diff --git a/packages/create-llama/README.md b/packages/create-llama/README.md
new file mode 100644
index 0000000000..5cb6102661
--- /dev/null
+++ b/packages/create-llama/README.md
@@ -0,0 +1,54 @@
+# Create LlamaIndex App
+
+The easiest way to get started with LlamaIndex is by using `create-llama`. This CLI tool enables you to quickly start building a new LlamaIndex application, with everything set up for you.
+To get started, use the following command:
+
+### Interactive
+
+You can create a new project interactively by running:
+
+```bash
+npx create-llama@latest
+# or
+npm create llama
+# or
+yarn create llama
+# or
+pnpm create llama
+```
+
+You will be asked for the name of your project, and then which framework you want to use
+create a TypeScript project:
+
+```bash
+✔ Which framework would you like to use? › NextJS
+```
+
+You can choose between NextJS and Express.
+
+### Non-interactive
+
+You can also pass command line arguments to set up a new project
+non-interactively. See `create-llama --help`:
+
+```bash
+create-llama [options]
+
+Options:
+ -V, --version output the version number
+
+
+ --use-npm
+
+ Explicitly tell the CLI to bootstrap the app using npm
+
+ --use-pnpm
+
+ Explicitly tell the CLI to bootstrap the app using pnpm
+
+ --use-yarn
+
+ Explicitly tell the CLI to bootstrap the app using Yarn
+
+```
+
diff --git a/packages/create-llama/create-app.ts b/packages/create-llama/create-app.ts
new file mode 100644
index 0000000000..70d7b40dc4
--- /dev/null
+++ b/packages/create-llama/create-app.ts
@@ -0,0 +1,109 @@
+/* eslint-disable import/no-extraneous-dependencies */
+import path from "path";
+import { green } from "picocolors";
+import { tryGitInit } from "./helpers/git";
+import { isFolderEmpty } from "./helpers/is-folder-empty";
+import { getOnline } from "./helpers/is-online";
+import { isWriteable } from "./helpers/is-writeable";
+import { makeDir } from "./helpers/make-dir";
+
+import fs from "fs";
+import terminalLink from "terminal-link";
+import type { InstallTemplateArgs } from "./templates";
+import { installTemplate } from "./templates";
+
+export async function createApp({
+ template,
+ framework,
+ engine,
+ ui,
+ appPath,
+ packageManager,
+ eslint,
+ frontend,
+ openAIKey,
+}: Omit<
+ InstallTemplateArgs,
+ "appName" | "root" | "isOnline" | "customApiPath"
+> & {
+ appPath: string;
+ frontend: boolean;
+}): Promise {
+ const root = path.resolve(appPath);
+
+ if (!(await isWriteable(path.dirname(root)))) {
+ console.error(
+ "The application path is not writable, please check folder permissions and try again.",
+ );
+ console.error(
+ "It is likely you do not have write permissions for this folder.",
+ );
+ process.exit(1);
+ }
+
+ const appName = path.basename(root);
+
+ await makeDir(root);
+ if (!isFolderEmpty(root, appName)) {
+ process.exit(1);
+ }
+
+ const useYarn = packageManager === "yarn";
+ const isOnline = !useYarn || (await getOnline());
+
+ console.log(`Creating a new LlamaIndex app in ${green(root)}.`);
+ console.log();
+
+ const args = {
+ appName,
+ root,
+ template,
+ framework,
+ engine,
+ ui,
+ packageManager,
+ isOnline,
+ eslint,
+ openAIKey,
+ };
+
+ if (frontend) {
+ // install backend
+ const backendRoot = path.join(root, "backend");
+ await makeDir(backendRoot);
+ await installTemplate({ ...args, root: backendRoot, backend: true });
+ // install frontend
+ const frontendRoot = path.join(root, "frontend");
+ await makeDir(frontendRoot);
+ await installTemplate({
+ ...args,
+ root: frontendRoot,
+ framework: "nextjs",
+ customApiPath: "http://localhost:8000/api/chat",
+ backend: false,
+ });
+ // copy readme for fullstack
+ await fs.promises.copyFile(
+ path.join(__dirname, "templates", "README-fullstack.md"),
+ path.join(root, "README.md"),
+ );
+ } else {
+ await installTemplate({ ...args, backend: true });
+ }
+
+ process.chdir(root);
+ if (tryGitInit(root)) {
+ console.log("Initialized a git repository.");
+ console.log();
+ }
+
+ console.log(`${green("Success!")} Created ${appName} at ${appPath}`);
+
+ console.log(
+ `Now have a look at the ${terminalLink(
+ "README.md",
+ `file://${appName}/README.md`,
+ )} and learn how to get started.`,
+ );
+ console.log();
+}
diff --git a/packages/create-llama/helpers/copy.ts b/packages/create-llama/helpers/copy.ts
new file mode 100644
index 0000000000..a5b722ba34
--- /dev/null
+++ b/packages/create-llama/helpers/copy.ts
@@ -0,0 +1,50 @@
+/* eslint-disable import/no-extraneous-dependencies */
+import { async as glob } from "fast-glob";
+import fs from "fs";
+import path from "path";
+
+interface CopyOption {
+ cwd?: string;
+ rename?: (basename: string) => string;
+ parents?: boolean;
+}
+
+const identity = (x: string) => x;
+
+export const copy = async (
+ src: string | string[],
+ dest: string,
+ { cwd, rename = identity, parents = true }: CopyOption = {},
+) => {
+ const source = typeof src === "string" ? [src] : src;
+
+ if (source.length === 0 || !dest) {
+ throw new TypeError("`src` and `dest` are required");
+ }
+
+ const sourceFiles = await glob(source, {
+ cwd,
+ dot: true,
+ absolute: false,
+ stats: false,
+ });
+
+ const destRelativeToCwd = cwd ? path.resolve(cwd, dest) : dest;
+
+ return Promise.all(
+ sourceFiles.map(async (p) => {
+ const dirname = path.dirname(p);
+ const basename = rename(path.basename(p));
+
+ const from = cwd ? path.resolve(cwd, p) : p;
+ const to = parents
+ ? path.join(destRelativeToCwd, dirname, basename)
+ : path.join(destRelativeToCwd, basename);
+
+ // Ensure the destination directory exists
+ await fs.promises.mkdir(path.dirname(to), { recursive: true });
+
+ return fs.promises.copyFile(from, to);
+ }),
+ );
+};
diff --git a/packages/create-llama/helpers/get-pkg-manager.ts b/packages/create-llama/helpers/get-pkg-manager.ts
new file mode 100644
index 0000000000..0187c88a49
--- /dev/null
+++ b/packages/create-llama/helpers/get-pkg-manager.ts
@@ -0,0 +1,15 @@
+export type PackageManager = "npm" | "pnpm" | "yarn";
+
+export function getPkgManager(): PackageManager {
+ const userAgent = process.env.npm_config_user_agent || "";
+
+ if (userAgent.startsWith("yarn")) {
+ return "yarn";
+ }
+
+ if (userAgent.startsWith("pnpm")) {
+ return "pnpm";
+ }
+
+ return "npm";
+}
diff --git a/packages/create-llama/helpers/git.ts b/packages/create-llama/helpers/git.ts
new file mode 100644
index 0000000000..fc27e60995
--- /dev/null
+++ b/packages/create-llama/helpers/git.ts
@@ -0,0 +1,58 @@
+/* eslint-disable import/no-extraneous-dependencies */
+import { execSync } from "child_process";
+import fs from "fs";
+import path from "path";
+
+function isInGitRepository(): boolean {
+ try {
+ execSync("git rev-parse --is-inside-work-tree", { stdio: "ignore" });
+ return true;
+ } catch (_) {}
+ return false;
+}
+
+function isInMercurialRepository(): boolean {
+ try {
+ execSync("hg --cwd . root", { stdio: "ignore" });
+ return true;
+ } catch (_) {}
+ return false;
+}
+
+function isDefaultBranchSet(): boolean {
+ try {
+ execSync("git config init.defaultBranch", { stdio: "ignore" });
+ return true;
+ } catch (_) {}
+ return false;
+}
+
+export function tryGitInit(root: string): boolean {
+ let didInit = false;
+ try {
+ execSync("git --version", { stdio: "ignore" });
+ if (isInGitRepository() || isInMercurialRepository()) {
+ return false;
+ }
+
+ execSync("git init", { stdio: "ignore" });
+ didInit = true;
+
+ if (!isDefaultBranchSet()) {
+ execSync("git checkout -b main", { stdio: "ignore" });
+ }
+
+ execSync("git add -A", { stdio: "ignore" });
+ execSync('git commit -m "Initial commit from Create Next App"', {
+ stdio: "ignore",
+ });
+ return true;
+ } catch (e) {
+ if (didInit) {
+ try {
+ fs.rmSync(path.join(root, ".git"), { recursive: true, force: true });
+ } catch (_) {}
+ }
+ return false;
+ }
+}
diff --git a/packages/create-llama/helpers/install.ts b/packages/create-llama/helpers/install.ts
new file mode 100644
index 0000000000..9f0f203562
--- /dev/null
+++ b/packages/create-llama/helpers/install.ts
@@ -0,0 +1,50 @@
+/* eslint-disable import/no-extraneous-dependencies */
+import spawn from "cross-spawn";
+import { yellow } from "picocolors";
+import type { PackageManager } from "./get-pkg-manager";
+
+/**
+ * Spawn a package manager installation based on user preference.
+ *
+ * @returns A Promise that resolves once the installation is finished.
+ */
+export async function callPackageManager(
+ /** Indicate which package manager to use. */
+ packageManager: PackageManager,
+ /** Indicate whether there is an active Internet connection.*/
+ isOnline: boolean,
+ args: string[] = ["install"],
+): Promise {
+ if (!isOnline) {
+ console.log(
+ yellow("You appear to be offline.\nFalling back to the local cache."),
+ );
+ args.push("--offline");
+ }
+ /**
+ * Return a Promise that resolves once the installation is finished.
+ */
+ return new Promise((resolve, reject) => {
+ /**
+ * Spawn the installation process.
+ */
+ const child = spawn(packageManager, args, {
+ stdio: "inherit",
+ env: {
+ ...process.env,
+ ADBLOCK: "1",
+ // we set NODE_ENV to development as pnpm skips dev
+ // dependencies when production
+ NODE_ENV: "development",
+ DISABLE_OPENCOLLECTIVE: "1",
+ },
+ });
+ child.on("close", (code) => {
+ if (code !== 0) {
+ reject({ command: `${packageManager} ${args.join(" ")}` });
+ return;
+ }
+ resolve();
+ });
+ });
+}
diff --git a/packages/create-llama/helpers/is-folder-empty.ts b/packages/create-llama/helpers/is-folder-empty.ts
new file mode 100644
index 0000000000..927a344c00
--- /dev/null
+++ b/packages/create-llama/helpers/is-folder-empty.ts
@@ -0,0 +1,62 @@
+/* eslint-disable import/no-extraneous-dependencies */
+import fs from "fs";
+import path from "path";
+import { blue, green } from "picocolors";
+
+export function isFolderEmpty(root: string, name: string): boolean {
+ const validFiles = [
+ ".DS_Store",
+ ".git",
+ ".gitattributes",
+ ".gitignore",
+ ".gitlab-ci.yml",
+ ".hg",
+ ".hgcheck",
+ ".hgignore",
+ ".idea",
+ ".npmignore",
+ ".travis.yml",
+ "LICENSE",
+ "Thumbs.db",
+ "docs",
+ "mkdocs.yml",
+ "npm-debug.log",
+ "yarn-debug.log",
+ "yarn-error.log",
+ "yarnrc.yml",
+ ".yarn",
+ ];
+
+ const conflicts = fs
+ .readdirSync(root)
+ .filter((file) => !validFiles.includes(file))
+ // Support IntelliJ IDEA-based editors
+ .filter((file) => !/\.iml$/.test(file));
+
+ if (conflicts.length > 0) {
+ console.log(
+ `The directory ${green(name)} contains files that could conflict:`,
+ );
+ console.log();
+ for (const file of conflicts) {
+ try {
+ const stats = fs.lstatSync(path.join(root, file));
+ if (stats.isDirectory()) {
+ console.log(` ${blue(file)}/`);
+ } else {
+ console.log(` ${file}`);
+ }
+ } catch {
+ console.log(` ${file}`);
+ }
+ }
+ console.log();
+ console.log(
+ "Either try using a new directory name, or remove the files listed above.",
+ );
+ console.log();
+ return false;
+ }
+
+ return true;
+}
diff --git a/packages/create-llama/helpers/is-online.ts b/packages/create-llama/helpers/is-online.ts
new file mode 100644
index 0000000000..eab6980053
--- /dev/null
+++ b/packages/create-llama/helpers/is-online.ts
@@ -0,0 +1,40 @@
+import { execSync } from "child_process";
+import dns from "dns";
+import url from "url";
+
+function getProxy(): string | undefined {
+ if (process.env.https_proxy) {
+ return process.env.https_proxy;
+ }
+
+ try {
+ const httpsProxy = execSync("npm config get https-proxy").toString().trim();
+ return httpsProxy !== "null" ? httpsProxy : undefined;
+ } catch (e) {
+ return;
+ }
+}
+
+export function getOnline(): Promise {
+ return new Promise((resolve) => {
+ dns.lookup("registry.yarnpkg.com", (registryErr) => {
+ if (!registryErr) {
+ return resolve(true);
+ }
+
+ const proxy = getProxy();
+ if (!proxy) {
+ return resolve(false);
+ }
+
+ const { hostname } = url.parse(proxy);
+ if (!hostname) {
+ return resolve(false);
+ }
+
+ dns.lookup(hostname, (proxyErr) => {
+ resolve(proxyErr == null);
+ });
+ });
+ });
+}
diff --git a/packages/create-llama/helpers/is-url.ts b/packages/create-llama/helpers/is-url.ts
new file mode 100644
index 0000000000..eb87b97525
--- /dev/null
+++ b/packages/create-llama/helpers/is-url.ts
@@ -0,0 +1,8 @@
+export function isUrl(url: string): boolean {
+ try {
+ new URL(url);
+ return true;
+ } catch (error) {
+ return false;
+ }
+}
diff --git a/packages/create-llama/helpers/is-writeable.ts b/packages/create-llama/helpers/is-writeable.ts
new file mode 100644
index 0000000000..fa29d60558
--- /dev/null
+++ b/packages/create-llama/helpers/is-writeable.ts
@@ -0,0 +1,10 @@
+import fs from "fs";
+
+export async function isWriteable(directory: string): Promise {
+ try {
+ await fs.promises.access(directory, (fs.constants || fs).W_OK);
+ return true;
+ } catch (err) {
+ return false;
+ }
+}
diff --git a/packages/create-llama/helpers/make-dir.ts b/packages/create-llama/helpers/make-dir.ts
new file mode 100644
index 0000000000..2c258fd6b5
--- /dev/null
+++ b/packages/create-llama/helpers/make-dir.ts
@@ -0,0 +1,8 @@
+import fs from "fs";
+
+export function makeDir(
+ root: string,
+ options = { recursive: true },
+): Promise {
+ return fs.promises.mkdir(root, options);
+}
diff --git a/packages/create-llama/helpers/validate-pkg.ts b/packages/create-llama/helpers/validate-pkg.ts
new file mode 100644
index 0000000000..68317653c8
--- /dev/null
+++ b/packages/create-llama/helpers/validate-pkg.ts
@@ -0,0 +1,20 @@
+// eslint-disable-next-line import/no-extraneous-dependencies
+import validateProjectName from "validate-npm-package-name";
+
+export function validateNpmName(name: string): {
+ valid: boolean;
+ problems?: string[];
+} {
+ const nameValidation = validateProjectName(name);
+ if (nameValidation.validForNewPackages) {
+ return { valid: true };
+ }
+
+ return {
+ valid: false,
+ problems: [
+ ...(nameValidation.errors || []),
+ ...(nameValidation.warnings || []),
+ ],
+ };
+}
diff --git a/packages/create-llama/index.ts b/packages/create-llama/index.ts
new file mode 100644
index 0000000000..b1009a4dbd
--- /dev/null
+++ b/packages/create-llama/index.ts
@@ -0,0 +1,399 @@
+#!/usr/bin/env node
+/* eslint-disable import/no-extraneous-dependencies */
+import ciInfo from "ci-info";
+import Commander from "commander";
+import Conf from "conf";
+import fs from "fs";
+import path from "path";
+import { blue, bold, cyan, green, red, yellow } from "picocolors";
+import prompts from "prompts";
+import checkForUpdate from "update-check";
+import { createApp } from "./create-app";
+import { getPkgManager } from "./helpers/get-pkg-manager";
+import { isFolderEmpty } from "./helpers/is-folder-empty";
+import { validateNpmName } from "./helpers/validate-pkg";
+import packageJson from "./package.json";
+
+let projectPath: string = "";
+
+const handleSigTerm = () => process.exit(0);
+
+process.on("SIGINT", handleSigTerm);
+process.on("SIGTERM", handleSigTerm);
+
+const onPromptState = (state: any) => {
+ if (state.aborted) {
+ // If we don't re-enable the terminal cursor before exiting
+ // the program, the cursor will remain hidden
+ process.stdout.write("\x1B[?25h");
+ process.stdout.write("\n");
+ process.exit(1);
+ }
+};
+
+const program = new Commander.Command(packageJson.name)
+ .version(packageJson.version)
+ .arguments("")
+ .usage(`${green("")} [options]`)
+ .action((name) => {
+ projectPath = name;
+ })
+ .option(
+ "--eslint",
+ `
+
+ Initialize with eslint config.
+`,
+ )
+ .option(
+ "--use-npm",
+ `
+
+ Explicitly tell the CLI to bootstrap the application using npm
+`,
+ )
+ .option(
+ "--use-pnpm",
+ `
+
+ Explicitly tell the CLI to bootstrap the application using pnpm
+`,
+ )
+ .option(
+ "--use-yarn",
+ `
+
+ Explicitly tell the CLI to bootstrap the application using Yarn
+`,
+ )
+ .option(
+ "--reset-preferences",
+ `
+
+ Explicitly tell the CLI to reset any stored preferences
+`,
+ )
+ .allowUnknownOption()
+ .parse(process.argv);
+
+const packageManager = !!program.useNpm
+ ? "npm"
+ : !!program.usePnpm
+ ? "pnpm"
+ : !!program.useYarn
+ ? "yarn"
+ : getPkgManager();
+
+async function run(): Promise {
+ const conf = new Conf({ projectName: "create-llama" });
+
+ if (program.resetPreferences) {
+ conf.clear();
+ console.log(`Preferences reset successfully`);
+ return;
+ }
+
+ if (typeof projectPath === "string") {
+ projectPath = projectPath.trim();
+ }
+
+ if (!projectPath) {
+ const res = await prompts({
+ onState: onPromptState,
+ type: "text",
+ name: "path",
+ message: "What is your project named?",
+ initial: "my-app",
+ validate: (name) => {
+ const validation = validateNpmName(path.basename(path.resolve(name)));
+ if (validation.valid) {
+ return true;
+ }
+ return "Invalid project name: " + validation.problems![0];
+ },
+ });
+
+ if (typeof res.path === "string") {
+ projectPath = res.path.trim();
+ }
+ }
+
+ if (!projectPath) {
+ console.log(
+ "\nPlease specify the project directory:\n" +
+ ` ${cyan(program.name())} ${green("")}\n` +
+ "For example:\n" +
+ ` ${cyan(program.name())} ${green("my-next-app")}\n\n` +
+ `Run ${cyan(`${program.name()} --help`)} to see all options.`,
+ );
+ process.exit(1);
+ }
+
+ const resolvedProjectPath = path.resolve(projectPath);
+ const projectName = path.basename(resolvedProjectPath);
+
+ const { valid, problems } = validateNpmName(projectName);
+ if (!valid) {
+ console.error(
+ `Could not create a project called ${red(
+ `"${projectName}"`,
+ )} because of npm naming restrictions:`,
+ );
+
+ problems!.forEach((p) => console.error(` ${red(bold("*"))} ${p}`));
+ process.exit(1);
+ }
+
+ /**
+ * Verify the project dir is empty or doesn't exist
+ */
+ const root = path.resolve(resolvedProjectPath);
+ const appName = path.basename(root);
+ const folderExists = fs.existsSync(root);
+
+ if (folderExists && !isFolderEmpty(root, appName)) {
+ process.exit(1);
+ }
+
+ const preferences = (conf.get("preferences") || {}) as Record<
+ string,
+ boolean | string
+ >;
+
+ const defaults: typeof preferences = {
+ template: "simple",
+ framework: "nextjs",
+ engine: "simple",
+ ui: "html",
+ eslint: true,
+ frontend: false,
+ openAIKey: "",
+ };
+ const getPrefOrDefault = (field: string) =>
+ preferences[field] ?? defaults[field];
+
+ const handlers = {
+ onCancel: () => {
+ console.error("Exiting.");
+ process.exit(1);
+ },
+ };
+
+ if (!program.template) {
+ if (ciInfo.isCI) {
+ program.template = getPrefOrDefault("template");
+ } else {
+ const { template } = await prompts(
+ {
+ type: "select",
+ name: "template",
+ message: "Which template would you like to use?",
+ choices: [
+ { title: "Chat without streaming", value: "simple" },
+ { title: "Chat with streaming", value: "streaming" },
+ ],
+ initial: 1,
+ },
+ handlers,
+ );
+ program.template = template;
+ preferences.template = template;
+ }
+ }
+
+ if (!program.framework) {
+ if (ciInfo.isCI) {
+ program.framework = getPrefOrDefault("framework");
+ } else {
+ const { framework } = await prompts(
+ {
+ type: "select",
+ name: "framework",
+ message: "Which framework would you like to use?",
+ choices: [
+ { title: "NextJS", value: "nextjs" },
+ { title: "Express", value: "express" },
+ { title: "FastAPI (Python)", value: "fastapi" },
+ ],
+ initial: 0,
+ },
+ handlers,
+ );
+ program.framework = framework;
+ preferences.framework = framework;
+ }
+ }
+
+ if (program.framework === "express" || program.framework === "fastapi") {
+ // if a backend-only framework is selected, ask whether we should create a frontend
+ if (!program.frontend) {
+ if (ciInfo.isCI) {
+ program.frontend = getPrefOrDefault("frontend");
+ } else {
+ const styledNextJS = blue("NextJS");
+ const styledBackend = green(
+ program.framework === "express"
+ ? "Express "
+ : program.framework === "fastapi"
+ ? "FastAPI (Python) "
+ : "",
+ );
+ const { frontend } = await prompts({
+ onState: onPromptState,
+ type: "toggle",
+ name: "frontend",
+ message: `Would you like to generate a ${styledNextJS} frontend for your ${styledBackend}backend?`,
+ initial: getPrefOrDefault("frontend"),
+ active: "Yes",
+ inactive: "No",
+ });
+ program.frontend = Boolean(frontend);
+ preferences.frontend = Boolean(frontend);
+ }
+ }
+ }
+
+ if (program.framework === "nextjs" || program.frontend) {
+ if (!program.ui) {
+ if (ciInfo.isCI) {
+ program.ui = getPrefOrDefault("ui");
+ } else {
+ const { ui } = await prompts(
+ {
+ type: "select",
+ name: "ui",
+ message: "Which UI would you like to use?",
+ choices: [
+ { title: "Just HTML", value: "html" },
+ { title: "Shadcn", value: "shadcn" },
+ ],
+ initial: 0,
+ },
+ handlers,
+ );
+ program.ui = ui;
+ preferences.ui = ui;
+ }
+ }
+ }
+
+ if (program.framework === "express" || program.framework === "nextjs") {
+ if (!program.engine) {
+ if (ciInfo.isCI) {
+ program.engine = getPrefOrDefault("engine");
+ } else {
+ const { engine } = await prompts(
+ {
+ type: "select",
+ name: "engine",
+ message: "Which chat engine would you like to use?",
+ choices: [
+ { title: "SimpleChatEngine", value: "simple" },
+ { title: "ContextChatEngine", value: "context" },
+ ],
+ initial: 0,
+ },
+ handlers,
+ );
+ program.engine = engine;
+ preferences.engine = engine;
+ }
+ }
+ }
+
+ if (!program.openAIKey) {
+ const { key } = await prompts(
+ {
+ type: "text",
+ name: "key",
+ message: "Please provide your OpenAI API key (leave blank to skip):",
+ },
+ handlers,
+ );
+ program.openAIKey = key;
+ preferences.openAIKey = key;
+ }
+
+ if (
+ program.framework !== "fastapi" &&
+ !process.argv.includes("--eslint") &&
+ !process.argv.includes("--no-eslint")
+ ) {
+ if (ciInfo.isCI) {
+ program.eslint = getPrefOrDefault("eslint");
+ } else {
+ const styledEslint = blue("ESLint");
+ const { eslint } = await prompts({
+ onState: onPromptState,
+ type: "toggle",
+ name: "eslint",
+ message: `Would you like to use ${styledEslint}?`,
+ initial: getPrefOrDefault("eslint"),
+ active: "Yes",
+ inactive: "No",
+ });
+ program.eslint = Boolean(eslint);
+ preferences.eslint = Boolean(eslint);
+ }
+ }
+
+ await createApp({
+ template: program.template,
+ framework: program.framework,
+ engine: program.engine,
+ ui: program.ui,
+ appPath: resolvedProjectPath,
+ packageManager,
+ eslint: program.eslint,
+ frontend: program.frontend,
+ openAIKey: program.openAIKey,
+ });
+ conf.set("preferences", preferences);
+}
+
+const update = checkForUpdate(packageJson).catch(() => null);
+
+async function notifyUpdate(): Promise {
+ try {
+ const res = await update;
+ if (res?.latest) {
+ const updateMessage =
+ packageManager === "yarn"
+ ? "yarn global add create-llama"
+ : packageManager === "pnpm"
+ ? "pnpm add -g create-llama"
+ : "npm i -g create-llama";
+
+ console.log(
+ yellow(bold("A new version of `create-llama` is available!")) +
+ "\n" +
+ "You can update by running: " +
+ cyan(updateMessage) +
+ "\n",
+ );
+ }
+ process.exit();
+ } catch {
+ // ignore error
+ }
+}
+
+run()
+ .then(notifyUpdate)
+ .catch(async (reason) => {
+ console.log();
+ console.log("Aborting installation.");
+ if (reason.command) {
+ console.log(` ${cyan(reason.command)} has failed.`);
+ } else {
+ console.log(
+ red("Unexpected error. Please report it as a bug:") + "\n",
+ reason,
+ );
+ }
+ console.log();
+
+ await notifyUpdate();
+
+ process.exit(1);
+ });
diff --git a/packages/create-llama/package.json b/packages/create-llama/package.json
new file mode 100644
index 0000000000..56e1aee0be
--- /dev/null
+++ b/packages/create-llama/package.json
@@ -0,0 +1,55 @@
+{
+ "name": "create-llama",
+ "version": "0.0.34",
+ "keywords": [
+ "rag",
+ "llamaindex",
+ "next.js"
+ ],
+ "description": "Create LlamaIndex-powered apps with one command",
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/run-llama/LlamaIndexTS",
+ "directory": "packages/create-llama"
+ },
+ "license": "MIT",
+ "bin": {
+ "create-llama": "./dist/index.js"
+ },
+ "files": [
+ "dist"
+ ],
+ "scripts": {
+ "dev": "ncc build ./index.ts -w -o dist/",
+ "build": "ncc build ./index.ts -o ./dist/ --minify --no-cache --no-source-map-register",
+ "lint": "eslint . --ignore-pattern dist",
+ "prepublishOnly": "cd ../../ && turbo run build"
+ },
+ "devDependencies": {
+ "@types/async-retry": "1.4.2",
+ "@types/ci-info": "2.0.0",
+ "@types/cross-spawn": "6.0.0",
+ "@types/node": "^20.2.5",
+ "@types/prompts": "2.0.1",
+ "@types/tar": "6.1.5",
+ "@types/validate-npm-package-name": "3.0.0",
+ "@vercel/ncc": "0.34.0",
+ "async-retry": "1.3.1",
+ "async-sema": "3.0.1",
+ "ci-info": "watson/ci-info#f43f6a1cefff47fb361c88cf4b943fdbcaafe540",
+ "commander": "2.20.0",
+ "conf": "10.2.0",
+ "cross-spawn": "7.0.3",
+ "fast-glob": "3.3.1",
+ "got": "10.7.0",
+ "picocolors": "1.0.0",
+ "prompts": "2.1.0",
+ "tar": "6.1.15",
+ "update-check": "1.5.4",
+ "validate-npm-package-name": "3.0.0",
+ "terminal-link": "^3.0.0"
+ },
+ "engines": {
+ "node": ">=16.14.0"
+ }
+}
\ No newline at end of file
diff --git a/packages/create-llama/templates/.gitignore b/packages/create-llama/templates/.gitignore
new file mode 100644
index 0000000000..ec6c67b630
--- /dev/null
+++ b/packages/create-llama/templates/.gitignore
@@ -0,0 +1,3 @@
+__pycache__
+poetry.lock
+storage
diff --git a/packages/create-llama/templates/README-fullstack.md b/packages/create-llama/templates/README-fullstack.md
new file mode 100644
index 0000000000..5a41b8cfc3
--- /dev/null
+++ b/packages/create-llama/templates/README-fullstack.md
@@ -0,0 +1,18 @@
+This is a [LlamaIndex](https://www.llamaindex.ai/) project bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama).
+
+## Getting Started
+
+First, startup the backend as described in the [backend README](./backend/README.md).
+
+Second, run the development server of the frontend as described in the [frontend README](./frontend/README.md).
+
+Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
+
+## Learn More
+
+To learn more about LlamaIndex, take a look at the following resources:
+
+- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex (Python features).
+- [LlamaIndexTS Documentation](https://ts.llamaindex.ai) - learn about LlamaIndex (Typescript features).
+
+You can check out [the LlamaIndexTS GitHub repository](https://github.com/run-llama/LlamaIndexTS) - your feedback and contributions are welcome!
diff --git a/packages/create-llama/templates/components/data/101.pdf b/packages/create-llama/templates/components/data/101.pdf
new file mode 100644
index 0000000000..ae5acffd53
Binary files /dev/null and b/packages/create-llama/templates/components/data/101.pdf differ
diff --git a/packages/create-llama/templates/components/engines/context/constants.mjs b/packages/create-llama/templates/components/engines/context/constants.mjs
new file mode 100644
index 0000000000..8cfb403c37
--- /dev/null
+++ b/packages/create-llama/templates/components/engines/context/constants.mjs
@@ -0,0 +1,4 @@
+export const STORAGE_DIR = "./data";
+export const STORAGE_CACHE_DIR = "./cache";
+export const CHUNK_SIZE = 512;
+export const CHUNK_OVERLAP = 20;
diff --git a/packages/create-llama/templates/components/engines/context/generate.mjs b/packages/create-llama/templates/components/engines/context/generate.mjs
new file mode 100644
index 0000000000..8420dd5f81
--- /dev/null
+++ b/packages/create-llama/templates/components/engines/context/generate.mjs
@@ -0,0 +1,48 @@
+import {
+ serviceContextFromDefaults,
+ SimpleDirectoryReader,
+ storageContextFromDefaults,
+ VectorStoreIndex,
+} from "llamaindex";
+
+import {
+ CHUNK_OVERLAP,
+ CHUNK_SIZE,
+ STORAGE_CACHE_DIR,
+ STORAGE_DIR,
+} from "./constants.mjs";
+
+async function getRuntime(func) {
+ const start = Date.now();
+ await func();
+ const end = Date.now();
+ return end - start;
+}
+
+async function generateDatasource(serviceContext) {
+ console.log(`Generating storage context...`);
+ // Split documents, create embeddings and store them in the storage context
+ const ms = await getRuntime(async () => {
+ const storageContext = await storageContextFromDefaults({
+ persistDir: STORAGE_CACHE_DIR,
+ });
+ const documents = await new SimpleDirectoryReader().loadData({
+ directoryPath: STORAGE_DIR,
+ });
+ await VectorStoreIndex.fromDocuments(documents, {
+ storageContext,
+ serviceContext,
+ });
+ });
+ console.log(`Storage context successfully generated in ${ms / 1000}s.`);
+}
+
+(async () => {
+ const serviceContext = serviceContextFromDefaults({
+ chunkSize: CHUNK_SIZE,
+ chunkOverlap: CHUNK_OVERLAP,
+ });
+
+ await generateDatasource(serviceContext);
+ console.log("Finished generating storage.");
+})();
diff --git a/packages/create-llama/templates/components/engines/context/index.ts b/packages/create-llama/templates/components/engines/context/index.ts
new file mode 100644
index 0000000000..cdd93809dc
--- /dev/null
+++ b/packages/create-llama/templates/components/engines/context/index.ts
@@ -0,0 +1,44 @@
+import {
+ ContextChatEngine,
+ LLM,
+ serviceContextFromDefaults,
+ SimpleDocumentStore,
+ storageContextFromDefaults,
+ VectorStoreIndex,
+} from "llamaindex";
+import { CHUNK_OVERLAP, CHUNK_SIZE, STORAGE_CACHE_DIR } from "./constants.mjs";
+
+async function getDataSource(llm: LLM) {
+ const serviceContext = serviceContextFromDefaults({
+ llm,
+ chunkSize: CHUNK_SIZE,
+ chunkOverlap: CHUNK_OVERLAP,
+ });
+ let storageContext = await storageContextFromDefaults({
+ persistDir: `${STORAGE_CACHE_DIR}`,
+ });
+
+ const numberOfDocs = Object.keys(
+ (storageContext.docStore as SimpleDocumentStore).toDict(),
+ ).length;
+ if (numberOfDocs === 0) {
+ throw new Error(
+ `StorageContext is empty - call 'npm run generate' to generate the storage first`,
+ );
+ }
+ return await VectorStoreIndex.init({
+ storageContext,
+ serviceContext,
+ });
+}
+
+export async function createChatEngine(llm: LLM) {
+ const index = await getDataSource(llm);
+ const retriever = index.asRetriever();
+ retriever.similarityTopK = 5;
+
+ return new ContextChatEngine({
+ chatModel: llm,
+ retriever,
+ });
+}
diff --git a/packages/create-llama/templates/components/ui/shadcn/README-template.md b/packages/create-llama/templates/components/ui/shadcn/README-template.md
new file mode 100644
index 0000000000..ebfcf48c99
--- /dev/null
+++ b/packages/create-llama/templates/components/ui/shadcn/README-template.md
@@ -0,0 +1 @@
+Using the chat component from https://github.com/marcusschiesser/ui (based on https://ui.shadcn.com/)
diff --git a/packages/create-llama/templates/components/ui/shadcn/button.tsx b/packages/create-llama/templates/components/ui/shadcn/button.tsx
new file mode 100644
index 0000000000..662b0404d8
--- /dev/null
+++ b/packages/create-llama/templates/components/ui/shadcn/button.tsx
@@ -0,0 +1,56 @@
+import { Slot } from "@radix-ui/react-slot";
+import { cva, type VariantProps } from "class-variance-authority";
+import * as React from "react";
+
+import { cn } from "./lib/utils";
+
+const buttonVariants = cva(
+ "inline-flex items-center justify-center whitespace-nowrap rounded-md text-sm font-medium ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50",
+ {
+ variants: {
+ variant: {
+ default: "bg-primary text-primary-foreground hover:bg-primary/90",
+ destructive:
+ "bg-destructive text-destructive-foreground hover:bg-destructive/90",
+ outline:
+ "border border-input bg-background hover:bg-accent hover:text-accent-foreground",
+ secondary:
+ "bg-secondary text-secondary-foreground hover:bg-secondary/80",
+ ghost: "hover:bg-accent hover:text-accent-foreground",
+ link: "text-primary underline-offset-4 hover:underline",
+ },
+ size: {
+ default: "h-10 px-4 py-2",
+ sm: "h-9 rounded-md px-3",
+ lg: "h-11 rounded-md px-8",
+ icon: "h-10 w-10",
+ },
+ },
+ defaultVariants: {
+ variant: "default",
+ size: "default",
+ },
+ },
+);
+
+export interface ButtonProps
+ extends React.ButtonHTMLAttributes,
+ VariantProps {
+ asChild?: boolean;
+}
+
+const Button = React.forwardRef(
+ ({ className, variant, size, asChild = false, ...props }, ref) => {
+ const Comp = asChild ? Slot : "button";
+ return (
+
+ );
+ },
+);
+Button.displayName = "Button";
+
+export { Button, buttonVariants };
diff --git a/packages/create-llama/templates/components/ui/shadcn/chat/chat-actions.tsx b/packages/create-llama/templates/components/ui/shadcn/chat/chat-actions.tsx
new file mode 100644
index 0000000000..151ef61a94
--- /dev/null
+++ b/packages/create-llama/templates/components/ui/shadcn/chat/chat-actions.tsx
@@ -0,0 +1,28 @@
+import { PauseCircle, RefreshCw } from "lucide-react";
+
+import { Button } from "../button";
+import { ChatHandler } from "./chat.interface";
+
+export default function ChatActions(
+ props: Pick & {
+ showReload?: boolean;
+ showStop?: boolean;
+ },
+) {
+ return (
+
+ {props.showStop && (
+
+ )}
+ {props.showReload && (
+
+ )}
+
+ );
+}
diff --git a/packages/create-llama/templates/components/ui/shadcn/chat/chat-avatar.tsx b/packages/create-llama/templates/components/ui/shadcn/chat/chat-avatar.tsx
new file mode 100644
index 0000000000..ce04e306a7
--- /dev/null
+++ b/packages/create-llama/templates/components/ui/shadcn/chat/chat-avatar.tsx
@@ -0,0 +1,25 @@
+import { User2 } from "lucide-react";
+import Image from "next/image";
+
+export default function ChatAvatar({ role }: { role: string }) {
+ if (role === "user") {
+ return (
+
+
+
+ );
+ }
+
+ return (
+
+
+
+ );
+}
diff --git a/packages/create-llama/templates/components/ui/shadcn/chat/chat-input.tsx b/packages/create-llama/templates/components/ui/shadcn/chat/chat-input.tsx
new file mode 100644
index 0000000000..1a0cc3e0cc
--- /dev/null
+++ b/packages/create-llama/templates/components/ui/shadcn/chat/chat-input.tsx
@@ -0,0 +1,29 @@
+import { Button } from "../button";
+import { Input } from "../input";
+import { ChatHandler } from "./chat.interface";
+
+export default function ChatInput(
+ props: Pick<
+ ChatHandler,
+ "isLoading" | "handleSubmit" | "handleInputChange" | "input"
+ >,
+) {
+ return (
+
+ );
+}
diff --git a/packages/create-llama/templates/components/ui/shadcn/chat/chat-message.tsx b/packages/create-llama/templates/components/ui/shadcn/chat/chat-message.tsx
new file mode 100644
index 0000000000..9ada08a3d7
--- /dev/null
+++ b/packages/create-llama/templates/components/ui/shadcn/chat/chat-message.tsx
@@ -0,0 +1,33 @@
+import { Check, Copy } from "lucide-react";
+
+import { Button } from "../button";
+import ChatAvatar from "./chat-avatar";
+import { Message } from "./chat.interface";
+import Markdown from "./markdown";
+import { useCopyToClipboard } from "./use-copy-to-clipboard";
+
+export default function ChatMessage(chatMessage: Message) {
+ const { isCopied, copyToClipboard } = useCopyToClipboard({ timeout: 2000 });
+ return (
+
+
+
+
+
+
+
+
+
+ );
+}
diff --git a/packages/create-llama/templates/components/ui/shadcn/chat/chat-messages.tsx b/packages/create-llama/templates/components/ui/shadcn/chat/chat-messages.tsx
new file mode 100644
index 0000000000..dd0a442b6c
--- /dev/null
+++ b/packages/create-llama/templates/components/ui/shadcn/chat/chat-messages.tsx
@@ -0,0 +1,51 @@
+import { useEffect, useRef } from "react";
+
+import ChatActions from "./chat-actions";
+import ChatMessage from "./chat-message";
+import { ChatHandler } from "./chat.interface";
+
+export default function ChatMessages(
+ props: Pick,
+) {
+ const scrollableChatContainerRef = useRef(null);
+ const messageLength = props.messages.length;
+ const lastMessage = props.messages[messageLength - 1];
+
+ const scrollToBottom = () => {
+ if (scrollableChatContainerRef.current) {
+ scrollableChatContainerRef.current.scrollTop =
+ scrollableChatContainerRef.current.scrollHeight;
+ }
+ };
+
+ const isLastMessageFromAssistant =
+ messageLength > 0 && lastMessage?.role !== "user";
+ const showReload =
+ props.reload && !props.isLoading && isLastMessageFromAssistant;
+ const showStop = props.stop && props.isLoading;
+
+ useEffect(() => {
+ scrollToBottom();
+ }, [messageLength, lastMessage]);
+
+ return (
+
+
+ {props.messages.map((m) => (
+
+ ))}
+
+
+
+
+
+ );
+}
diff --git a/packages/create-llama/templates/components/ui/shadcn/chat/chat.interface.ts b/packages/create-llama/templates/components/ui/shadcn/chat/chat.interface.ts
new file mode 100644
index 0000000000..3256f7f031
--- /dev/null
+++ b/packages/create-llama/templates/components/ui/shadcn/chat/chat.interface.ts
@@ -0,0 +1,15 @@
+export interface Message {
+ id: string;
+ content: string;
+ role: string;
+}
+
+export interface ChatHandler {
+ messages: Message[];
+ input: string;
+ isLoading: boolean;
+ handleSubmit: (e: React.FormEvent) => void;
+ handleInputChange: (e: React.ChangeEvent) => void;
+ reload?: () => void;
+ stop?: () => void;
+}
diff --git a/packages/create-llama/templates/components/ui/shadcn/chat/codeblock.tsx b/packages/create-llama/templates/components/ui/shadcn/chat/codeblock.tsx
new file mode 100644
index 0000000000..10598223b8
--- /dev/null
+++ b/packages/create-llama/templates/components/ui/shadcn/chat/codeblock.tsx
@@ -0,0 +1,139 @@
+"use client"
+
+import React, { FC, memo } from "react"
+import { Check, Copy, Download } from "lucide-react"
+import { Prism, SyntaxHighlighterProps } from "react-syntax-highlighter"
+import { coldarkDark } from "react-syntax-highlighter/dist/cjs/styles/prism"
+
+import { Button } from "../button"
+import { useCopyToClipboard } from "./use-copy-to-clipboard"
+
+// TODO: Remove this when @type/react-syntax-highlighter is updated
+const SyntaxHighlighter = Prism as unknown as FC
+
+interface Props {
+ language: string
+ value: string
+}
+
+interface languageMap {
+ [key: string]: string | undefined
+}
+
+export const programmingLanguages: languageMap = {
+ javascript: ".js",
+ python: ".py",
+ java: ".java",
+ c: ".c",
+ cpp: ".cpp",
+ "c++": ".cpp",
+ "c#": ".cs",
+ ruby: ".rb",
+ php: ".php",
+ swift: ".swift",
+ "objective-c": ".m",
+ kotlin: ".kt",
+ typescript: ".ts",
+ go: ".go",
+ perl: ".pl",
+ rust: ".rs",
+ scala: ".scala",
+ haskell: ".hs",
+ lua: ".lua",
+ shell: ".sh",
+ sql: ".sql",
+ html: ".html",
+ css: ".css",
+ // add more file extensions here, make sure the key is same as language prop in CodeBlock.tsx component
+}
+
+export const generateRandomString = (length: number, lowercase = false) => {
+ const chars = "ABCDEFGHJKLMNPQRSTUVWXY3456789" // excluding similar looking characters like Z, 2, I, 1, O, 0
+ let result = ""
+ for (let i = 0; i < length; i++) {
+ result += chars.charAt(Math.floor(Math.random() * chars.length))
+ }
+ return lowercase ? result.toLowerCase() : result
+}
+
+const CodeBlock: FC = memo(({ language, value }) => {
+ const { isCopied, copyToClipboard } = useCopyToClipboard({ timeout: 2000 })
+
+ const downloadAsFile = () => {
+ if (typeof window === "undefined") {
+ return
+ }
+ const fileExtension = programmingLanguages[language] || ".file"
+ const suggestedFileName = `file-${generateRandomString(
+ 3,
+ true
+ )}${fileExtension}`
+ const fileName = window.prompt("Enter file name" || "", suggestedFileName)
+
+ if (!fileName) {
+ // User pressed cancel on prompt.
+ return
+ }
+
+ const blob = new Blob([value], { type: "text/plain" })
+ const url = URL.createObjectURL(blob)
+ const link = document.createElement("a")
+ link.download = fileName
+ link.href = url
+ link.style.display = "none"
+ document.body.appendChild(link)
+ link.click()
+ document.body.removeChild(link)
+ URL.revokeObjectURL(url)
+ }
+
+ const onCopy = () => {
+ if (isCopied) return
+ copyToClipboard(value)
+ }
+
+ return (
+
+
+
{language}
+
+
+
+
+
+
+ {value}
+
+
+ )
+})
+CodeBlock.displayName = "CodeBlock"
+
+export { CodeBlock }
diff --git a/packages/create-llama/templates/components/ui/shadcn/chat/index.ts b/packages/create-llama/templates/components/ui/shadcn/chat/index.ts
new file mode 100644
index 0000000000..0b8104960c
--- /dev/null
+++ b/packages/create-llama/templates/components/ui/shadcn/chat/index.ts
@@ -0,0 +1,5 @@
+import ChatInput from "./chat-input";
+import ChatMessages from "./chat-messages";
+
+export { type ChatHandler, type Message } from "./chat.interface";
+export { ChatMessages, ChatInput };
diff --git a/packages/create-llama/templates/components/ui/shadcn/chat/markdown.tsx b/packages/create-llama/templates/components/ui/shadcn/chat/markdown.tsx
new file mode 100644
index 0000000000..31b78242d2
--- /dev/null
+++ b/packages/create-llama/templates/components/ui/shadcn/chat/markdown.tsx
@@ -0,0 +1,59 @@
+import { FC, memo } from "react"
+import ReactMarkdown, { Options } from "react-markdown"
+import remarkGfm from "remark-gfm"
+import remarkMath from "remark-math"
+
+import { CodeBlock } from "./codeblock"
+
+const MemoizedReactMarkdown: FC = memo(
+ ReactMarkdown,
+ (prevProps, nextProps) =>
+ prevProps.children === nextProps.children &&
+ prevProps.className === nextProps.className
+)
+
+export default function Markdown({ content }: { content: string }) {
+ return (
+ {children}
+ },
+ code({ node, inline, className, children, ...props }) {
+ if (children.length) {
+ if (children[0] == "▍") {
+ return (
+ ▍
+ )
+ }
+
+ children[0] = (children[0] as string).replace("`▍`", "▍")
+ }
+
+ const match = /language-(\w+)/.exec(className || "")
+
+ if (inline) {
+ return (
+
+ {children}
+
+ )
+ }
+
+ return (
+
+ )
+ },
+ }}
+ >
+ {content}
+
+ )
+}
diff --git a/packages/create-llama/templates/components/ui/shadcn/chat/use-copy-to-clipboard.tsx b/packages/create-llama/templates/components/ui/shadcn/chat/use-copy-to-clipboard.tsx
new file mode 100644
index 0000000000..62f7156dca
--- /dev/null
+++ b/packages/create-llama/templates/components/ui/shadcn/chat/use-copy-to-clipboard.tsx
@@ -0,0 +1,33 @@
+'use client'
+
+import * as React from 'react'
+
+export interface useCopyToClipboardProps {
+ timeout?: number
+}
+
+export function useCopyToClipboard({
+ timeout = 2000
+}: useCopyToClipboardProps) {
+ const [isCopied, setIsCopied] = React.useState(false)
+
+ const copyToClipboard = (value: string) => {
+ if (typeof window === 'undefined' || !navigator.clipboard?.writeText) {
+ return
+ }
+
+ if (!value) {
+ return
+ }
+
+ navigator.clipboard.writeText(value).then(() => {
+ setIsCopied(true)
+
+ setTimeout(() => {
+ setIsCopied(false)
+ }, timeout)
+ })
+ }
+
+ return { isCopied, copyToClipboard }
+}
diff --git a/packages/create-llama/templates/components/ui/shadcn/input.tsx b/packages/create-llama/templates/components/ui/shadcn/input.tsx
new file mode 100644
index 0000000000..edfa129e62
--- /dev/null
+++ b/packages/create-llama/templates/components/ui/shadcn/input.tsx
@@ -0,0 +1,25 @@
+import * as React from "react";
+
+import { cn } from "./lib/utils";
+
+export interface InputProps
+ extends React.InputHTMLAttributes {}
+
+const Input = React.forwardRef(
+ ({ className, type, ...props }, ref) => {
+ return (
+
+ );
+ },
+);
+Input.displayName = "Input";
+
+export { Input };
diff --git a/packages/create-llama/templates/components/ui/shadcn/lib/utils.ts b/packages/create-llama/templates/components/ui/shadcn/lib/utils.ts
new file mode 100644
index 0000000000..a5ef193506
--- /dev/null
+++ b/packages/create-llama/templates/components/ui/shadcn/lib/utils.ts
@@ -0,0 +1,6 @@
+import { clsx, type ClassValue } from "clsx";
+import { twMerge } from "tailwind-merge";
+
+export function cn(...inputs: ClassValue[]) {
+ return twMerge(clsx(inputs));
+}
diff --git a/packages/create-llama/templates/index.ts b/packages/create-llama/templates/index.ts
new file mode 100644
index 0000000000..98eece524f
--- /dev/null
+++ b/packages/create-llama/templates/index.ts
@@ -0,0 +1,296 @@
+import { copy } from "../helpers/copy";
+import { callPackageManager } from "../helpers/install";
+
+import fs from "fs/promises";
+import os from "os";
+import path from "path";
+import { bold, cyan } from "picocolors";
+import { version } from "../package.json";
+
+import { PackageManager } from "../helpers/get-pkg-manager";
+import {
+ InstallTemplateArgs,
+ TemplateEngine,
+ TemplateFramework,
+} from "./types";
+
+const envFileNameMap: Record = {
+ nextjs: ".env.local",
+ express: ".env",
+ fastapi: ".env",
+};
+
+const createEnvLocalFile = async (
+ root: string,
+ framework: TemplateFramework,
+ openAIKey?: string,
+) => {
+ if (openAIKey) {
+ const envFileName = envFileNameMap[framework];
+ if (!envFileName) return;
+ await fs.writeFile(
+ path.join(root, envFileName),
+ `OPENAI_API_KEY=${openAIKey}\n`,
+ );
+ console.log(`Created '${envFileName}' file containing OPENAI_API_KEY`);
+ }
+};
+
+const copyTestData = async (
+ root: string,
+ framework: TemplateFramework,
+ packageManager?: PackageManager,
+ engine?: TemplateEngine,
+) => {
+ if (engine === "context" || framework === "fastapi") {
+ const srcPath = path.join(__dirname, "components", "data");
+ const destPath = path.join(root, "data");
+ console.log(`\nCopying test data to ${cyan(destPath)}\n`);
+ await copy("**", destPath, {
+ parents: true,
+ cwd: srcPath,
+ });
+ }
+
+ if (packageManager && engine === "context") {
+ console.log(
+ `\nRunning ${cyan("npm run generate")} to generate the context data.\n`,
+ );
+ await callPackageManager(packageManager, true, ["run", "generate"]);
+ console.log();
+ }
+};
+
+const rename = (name: string) => {
+ switch (name) {
+ case "gitignore":
+ case "eslintrc.json": {
+ return `.${name}`;
+ }
+ // README.md is ignored by webpack-asset-relocator-loader used by ncc:
+ // https://github.com/vercel/webpack-asset-relocator-loader/blob/e9308683d47ff507253e37c9bcbb99474603192b/src/asset-relocator.js#L227
+ case "README-template.md": {
+ return "README.md";
+ }
+ default: {
+ return name;
+ }
+ }
+};
+
+/**
+ * Install a LlamaIndex internal template to a given `root` directory.
+ */
+const installTSTemplate = async ({
+ appName,
+ root,
+ packageManager,
+ isOnline,
+ template,
+ framework,
+ engine,
+ ui,
+ eslint,
+ customApiPath,
+}: InstallTemplateArgs) => {
+ console.log(bold(`Using ${packageManager}.`));
+
+ /**
+ * Copy the template files to the target directory.
+ */
+ console.log("\nInitializing project with template:", template, "\n");
+ const templatePath = path.join(__dirname, "types", template, framework);
+ const copySource = ["**"];
+ if (!eslint) copySource.push("!eslintrc.json");
+
+ await copy(copySource, root, {
+ parents: true,
+ cwd: templatePath,
+ rename,
+ });
+
+ /**
+ * Copy the selected chat engine files to the target directory and reference it.
+ */
+ let relativeEngineDestPath;
+ const compPath = path.join(__dirname, "components");
+ if (engine && (framework === "express" || framework === "nextjs")) {
+ console.log("\nUsing chat engine:", engine, "\n");
+ const enginePath = path.join(compPath, "engines", engine);
+ relativeEngineDestPath =
+ framework === "nextjs"
+ ? path.join("app", "api", "chat")
+ : path.join("src", "controllers");
+ await copy("**", path.join(root, relativeEngineDestPath, "engine"), {
+ parents: true,
+ cwd: enginePath,
+ });
+ }
+
+ /**
+ * Copy the selected UI files to the target directory and reference it.
+ */
+ if (framework === "nextjs" && ui !== "html") {
+ console.log("\nUsing UI:", ui, "\n");
+ const uiPath = path.join(compPath, "ui", ui);
+ const destUiPath = path.join(root, "app", "components", "ui");
+ // remove the default ui folder
+ await fs.rm(destUiPath, { recursive: true });
+ // copy the selected ui folder
+ await copy("**", destUiPath, {
+ parents: true,
+ cwd: uiPath,
+ rename,
+ });
+ }
+
+ /**
+ * Update the package.json scripts.
+ */
+ const packageJsonFile = path.join(root, "package.json");
+ const packageJson: any = JSON.parse(
+ await fs.readFile(packageJsonFile, "utf8"),
+ );
+ packageJson.name = appName;
+ packageJson.version = "0.1.0";
+
+ packageJson.dependencies = {
+ ...packageJson.dependencies,
+ llamaindex: version,
+ };
+
+ if (framework === "nextjs" && customApiPath) {
+ console.log(
+ "\nUsing external API with custom API path:",
+ customApiPath,
+ "\n",
+ );
+ // remove the default api folder
+ const apiPath = path.join(root, "app", "api");
+ await fs.rm(apiPath, { recursive: true });
+ // modify the dev script to use the custom api path
+ packageJson.scripts = {
+ ...packageJson.scripts,
+ dev: `NEXT_PUBLIC_CHAT_API=${customApiPath} next dev`,
+ };
+ }
+
+ if (engine === "context" && relativeEngineDestPath) {
+ // add generate script if using context engine
+ packageJson.scripts = {
+ ...packageJson.scripts,
+ generate: `node ${path.join(
+ relativeEngineDestPath,
+ "engine",
+ "generate.mjs",
+ )}`,
+ };
+ }
+
+ if (framework === "nextjs" && ui === "shadcn") {
+ // add shadcn dependencies to package.json
+ packageJson.dependencies = {
+ ...packageJson.dependencies,
+ "tailwind-merge": "^2",
+ "@radix-ui/react-slot": "^1",
+ "class-variance-authority": "^0.7",
+ "lucide-react": "^0.291",
+ remark: "^14.0.3",
+ "remark-code-import": "^1.2.0",
+ "remark-gfm": "^3.0.1",
+ "remark-math": "^5.1.1",
+ "react-markdown": "^8.0.7",
+ "react-syntax-highlighter": "^15.5.0",
+ };
+
+ packageJson.devDependencies = {
+ ...packageJson.devDependencies,
+ "@types/react-syntax-highlighter": "^15.5.6",
+ };
+ }
+
+ if (!eslint) {
+ // Remove packages starting with "eslint" from devDependencies
+ packageJson.devDependencies = Object.fromEntries(
+ Object.entries(packageJson.devDependencies).filter(
+ ([key]) => !key.startsWith("eslint"),
+ ),
+ );
+ }
+ await fs.writeFile(
+ packageJsonFile,
+ JSON.stringify(packageJson, null, 2) + os.EOL,
+ );
+
+ console.log("\nInstalling dependencies:");
+ for (const dependency in packageJson.dependencies)
+ console.log(`- ${cyan(dependency)}`);
+
+ console.log("\nInstalling devDependencies:");
+ for (const dependency in packageJson.devDependencies)
+ console.log(`- ${cyan(dependency)}`);
+
+ console.log();
+
+ await callPackageManager(packageManager, isOnline);
+};
+
+const installPythonTemplate = async ({
+ root,
+ template,
+ framework,
+}: Pick) => {
+ console.log("\nInitializing Python project with template:", template, "\n");
+ const templatePath = path.join(__dirname, "types", template, framework);
+ await copy("**", root, {
+ parents: true,
+ cwd: templatePath,
+ rename(name) {
+ switch (name) {
+ case "gitignore": {
+ return `.${name}`;
+ }
+ // README.md is ignored by webpack-asset-relocator-loader used by ncc:
+ // https://github.com/vercel/webpack-asset-relocator-loader/blob/e9308683d47ff507253e37c9bcbb99474603192b/src/asset-relocator.js#L227
+ case "README-template.md": {
+ return "README.md";
+ }
+ default: {
+ return name;
+ }
+ }
+ },
+ });
+
+ console.log(
+ "\nPython project, dependencies won't be installed automatically.\n",
+ );
+};
+
+export const installTemplate = async (
+ props: InstallTemplateArgs & { backend: boolean },
+) => {
+ process.chdir(props.root);
+ if (props.framework === "fastapi") {
+ await installPythonTemplate(props);
+ } else {
+ await installTSTemplate(props);
+ }
+
+ if (props.backend) {
+ // This is a backend, so we need to copy the test data and create the env file.
+
+ // Copy the environment file to the target directory.
+ await createEnvLocalFile(props.root, props.framework, props.openAIKey);
+
+ // Copy test pdf file
+ await copyTestData(
+ props.root,
+ props.framework,
+ props.packageManager,
+ props.engine,
+ );
+ }
+};
+
+export * from "./types";
diff --git a/packages/create-llama/templates/types.ts b/packages/create-llama/templates/types.ts
new file mode 100644
index 0000000000..4bcaf5c7bb
--- /dev/null
+++ b/packages/create-llama/templates/types.ts
@@ -0,0 +1,20 @@
+import { PackageManager } from "../helpers/get-pkg-manager";
+
+export type TemplateType = "simple" | "streaming";
+export type TemplateFramework = "nextjs" | "express" | "fastapi";
+export type TemplateEngine = "simple" | "context";
+export type TemplateUI = "html" | "shadcn";
+
+export interface InstallTemplateArgs {
+ appName: string;
+ root: string;
+ packageManager: PackageManager;
+ isOnline: boolean;
+ template: TemplateType;
+ framework: TemplateFramework;
+ engine?: TemplateEngine;
+ ui: TemplateUI;
+ eslint: boolean;
+ customApiPath?: string;
+ openAIKey?: string;
+}
diff --git a/packages/create-llama/templates/types/simple/express/README-template.md b/packages/create-llama/templates/types/simple/express/README-template.md
new file mode 100644
index 0000000000..7ea94ab755
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/express/README-template.md
@@ -0,0 +1,50 @@
+This is a [LlamaIndex](https://www.llamaindex.ai/) project using [Express](https://expressjs.com/) bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama).
+
+## Getting Started
+
+First, install the dependencies:
+
+```
+npm install
+```
+
+Second, run the development server:
+
+```
+npm run dev
+```
+
+Then call the express API endpoint `/api/chat` to see the result:
+
+```
+curl --location 'localhost:8000/api/chat' \
+--header 'Content-Type: application/json' \
+--data '{ "messages": [{ "role": "user", "content": "Hello" }] }'
+```
+
+You can start editing the API by modifying `src/controllers/chat.controller.ts`. The endpoint auto-updates as you save the file.
+
+## Production
+
+First, build the project:
+
+```
+npm run build
+```
+
+You can then run the production server:
+
+```
+NODE_ENV=production npm run start
+```
+
+> Note that the `NODE_ENV` environment variable is set to `production`. This disables CORS for all origins.
+
+## Learn More
+
+To learn more about LlamaIndex, take a look at the following resources:
+
+- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex (Python features).
+- [LlamaIndexTS Documentation](https://ts.llamaindex.ai) - learn about LlamaIndex (Typescript features).
+
+You can check out [the LlamaIndexTS GitHub repository](https://github.com/run-llama/LlamaIndexTS) - your feedback and contributions are welcome!
diff --git a/packages/create-llama/templates/types/simple/express/eslintrc.json b/packages/create-llama/templates/types/simple/express/eslintrc.json
new file mode 100644
index 0000000000..c19581799d
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/express/eslintrc.json
@@ -0,0 +1,3 @@
+{
+ "extends": "eslint:recommended"
+}
\ No newline at end of file
diff --git a/packages/create-llama/templates/types/simple/express/index.ts b/packages/create-llama/templates/types/simple/express/index.ts
new file mode 100644
index 0000000000..70a43ab580
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/express/index.ts
@@ -0,0 +1,26 @@
+import cors from "cors";
+import "dotenv/config";
+import express, { Express, Request, Response } from "express";
+import chatRouter from "./src/routes/chat.route";
+
+const app: Express = express();
+const port = 8000;
+
+const env = process.env["NODE_ENV"];
+const isDevelopment = !env || env === "development";
+if (isDevelopment) {
+ console.warn("Running in development mode - allowing CORS for all origins");
+ app.use(cors());
+}
+
+app.use(express.json());
+
+app.get("/", (req: Request, res: Response) => {
+ res.send("LlamaIndex Express Server");
+});
+
+app.use("/api/chat", chatRouter);
+
+app.listen(port, () => {
+ console.log(`⚡️[server]: Server is running at http://localhost:${port}`);
+});
diff --git a/packages/create-llama/templates/types/simple/express/package.json b/packages/create-llama/templates/types/simple/express/package.json
new file mode 100644
index 0000000000..5590207146
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/express/package.json
@@ -0,0 +1,27 @@
+{
+ "name": "llama-index-express",
+ "version": "1.0.0",
+ "main": "dist/index.js",
+ "type": "module",
+ "scripts": {
+ "build": "tsup index.ts --format esm --dts",
+ "start": "node dist/index.js",
+ "dev": "concurrently \"tsup index.ts --format esm --dts --watch\" \"nodemon -q dist/index.js\""
+ },
+ "dependencies": {
+ "cors": "^2.8.5",
+ "dotenv": "^16.3.1",
+ "express": "^4",
+ "llamaindex": "0.0.31"
+ },
+ "devDependencies": {
+ "@types/cors": "^2.8.16",
+ "@types/express": "^4",
+ "@types/node": "^20",
+ "concurrently": "^8",
+ "eslint": "^8",
+ "nodemon": "^3",
+ "tsup": "^7",
+ "typescript": "^5"
+ }
+}
diff --git a/packages/create-llama/templates/types/simple/express/src/controllers/chat.controller.ts b/packages/create-llama/templates/types/simple/express/src/controllers/chat.controller.ts
new file mode 100644
index 0000000000..476c0c35d5
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/express/src/controllers/chat.controller.ts
@@ -0,0 +1,37 @@
+import { NextFunction, Request, Response } from "express";
+import { ChatMessage, OpenAI } from "llamaindex";
+import { createChatEngine } from "./engine";
+
+export const chat = async (req: Request, res: Response, next: NextFunction) => {
+ try {
+ const { messages }: { messages: ChatMessage[] } = req.body;
+ const lastMessage = messages.pop();
+ if (!messages || !lastMessage || lastMessage.role !== "user") {
+ return res.status(400).json({
+ error:
+ "messages are required in the request body and the last message must be from the user",
+ });
+ }
+
+ const llm = new OpenAI({
+ model: "gpt-3.5-turbo",
+ });
+
+ const chatEngine = await createChatEngine(llm);
+
+ const response = await chatEngine.chat(lastMessage.content, messages);
+ const result: ChatMessage = {
+ role: "assistant",
+ content: response.response,
+ };
+
+ return res.status(200).json({
+ result,
+ });
+ } catch (error) {
+ console.error("[LlamaIndex]", error);
+ return res.status(500).json({
+ error: (error as Error).message,
+ });
+ }
+};
diff --git a/packages/create-llama/templates/types/simple/express/src/controllers/engine/index.ts b/packages/create-llama/templates/types/simple/express/src/controllers/engine/index.ts
new file mode 100644
index 0000000000..abb02e90cd
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/express/src/controllers/engine/index.ts
@@ -0,0 +1,7 @@
+import { LLM, SimpleChatEngine } from "llamaindex";
+
+export async function createChatEngine(llm: LLM) {
+ return new SimpleChatEngine({
+ llm,
+ });
+}
diff --git a/packages/create-llama/templates/types/simple/express/src/routes/chat.route.ts b/packages/create-llama/templates/types/simple/express/src/routes/chat.route.ts
new file mode 100644
index 0000000000..bdfeb08534
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/express/src/routes/chat.route.ts
@@ -0,0 +1,8 @@
+import express from "express";
+import { chat } from "../controllers/chat.controller";
+
+const llmRouter = express.Router();
+
+llmRouter.route("/").post(chat);
+
+export default llmRouter;
diff --git a/packages/create-llama/templates/types/simple/express/tsconfig.json b/packages/create-llama/templates/types/simple/express/tsconfig.json
new file mode 100644
index 0000000000..e886da1ef3
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/express/tsconfig.json
@@ -0,0 +1,10 @@
+{
+ "compilerOptions": {
+ "target": "es2016",
+ "esModuleInterop": true,
+ "forceConsistentCasingInFileNames": true,
+ "strict": true,
+ "skipLibCheck": true,
+ "moduleResolution": "node"
+ }
+}
\ No newline at end of file
diff --git a/packages/create-llama/templates/types/simple/fastapi/README-template.md b/packages/create-llama/templates/types/simple/fastapi/README-template.md
new file mode 100644
index 0000000000..f0b92bdfce
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/fastapi/README-template.md
@@ -0,0 +1,42 @@
+This is a [LlamaIndex](https://www.llamaindex.ai/) project using [FastAPI](https://fastapi.tiangolo.com/) bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama).
+
+## Getting Started
+
+First, setup the environment:
+
+```
+poetry install
+poetry shell
+```
+
+Second, run the development server:
+
+```
+python main.py
+```
+
+Then call the API endpoint `/api/chat` to see the result:
+
+```
+curl --location 'localhost:8000/api/chat' \
+--header 'Content-Type: application/json' \
+--data '{ "messages": [{ "role": "user", "content": "Hello" }] }'
+```
+
+You can start editing the API by modifying `app/api/routers/chat.py`. The endpoint auto-updates as you save the file.
+
+Open [http://localhost:8000/docs](http://localhost:8000/docs) with your browser to see the Swagger UI of the API.
+
+The API allows CORS for all origins to simplify development. You can change this behavior by setting the `ENVIRONMENT` environment variable to `prod`:
+
+```
+ENVIRONMENT=prod uvicorn main:app
+```
+
+## Learn More
+
+To learn more about LlamaIndex, take a look at the following resources:
+
+- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex.
+
+You can check out [the LlamaIndex GitHub repository](https://github.com/run-llama/llama_index) - your feedback and contributions are welcome!
diff --git a/packages/create-llama/templates/types/simple/fastapi/app/__init__.py b/packages/create-llama/templates/types/simple/fastapi/app/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/packages/create-llama/templates/types/simple/fastapi/app/api/__init__.py b/packages/create-llama/templates/types/simple/fastapi/app/api/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/packages/create-llama/templates/types/simple/fastapi/app/api/routers/__init__.py b/packages/create-llama/templates/types/simple/fastapi/app/api/routers/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/packages/create-llama/templates/types/simple/fastapi/app/api/routers/chat.py b/packages/create-llama/templates/types/simple/fastapi/app/api/routers/chat.py
new file mode 100644
index 0000000000..81f602edbe
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/fastapi/app/api/routers/chat.py
@@ -0,0 +1,56 @@
+from typing import List
+
+from app.utils.index import get_index
+from fastapi import APIRouter, Depends, HTTPException, status
+from llama_index import VectorStoreIndex
+from llama_index.llms.base import MessageRole, ChatMessage
+from pydantic import BaseModel
+
+chat_router = r = APIRouter()
+
+
+class _Message(BaseModel):
+ role: MessageRole
+ content: str
+
+
+class _ChatData(BaseModel):
+ messages: List[_Message]
+
+
+class _Result(BaseModel):
+ result: _Message
+
+
+@r.post("")
+async def chat(
+ data: _ChatData,
+ index: VectorStoreIndex = Depends(get_index),
+) -> _Result:
+ # check preconditions and get last message
+ if len(data.messages) == 0:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="No messages provided",
+ )
+ lastMessage = data.messages.pop()
+ if lastMessage.role != MessageRole.USER:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Last message must be from user",
+ )
+ # convert messages coming from the request to type ChatMessage
+ messages = [
+ ChatMessage(
+ role=m.role,
+ content=m.content,
+ )
+ for m in data.messages
+ ]
+
+ # query chat engine
+ chat_engine = index.as_chat_engine()
+ response = chat_engine.chat(lastMessage.content, messages)
+ return _Result(
+ result=_Message(role=MessageRole.ASSISTANT, content=response.response)
+ )
diff --git a/packages/create-llama/templates/types/simple/fastapi/app/utils/__init__.py b/packages/create-llama/templates/types/simple/fastapi/app/utils/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/packages/create-llama/templates/types/simple/fastapi/app/utils/index.py b/packages/create-llama/templates/types/simple/fastapi/app/utils/index.py
new file mode 100644
index 0000000000..076ca76631
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/fastapi/app/utils/index.py
@@ -0,0 +1,33 @@
+import logging
+import os
+
+from llama_index import (
+ SimpleDirectoryReader,
+ StorageContext,
+ VectorStoreIndex,
+ load_index_from_storage,
+)
+
+
+STORAGE_DIR = "./storage" # directory to cache the generated index
+DATA_DIR = "./data" # directory containing the documents to index
+
+
+def get_index():
+ logger = logging.getLogger("uvicorn")
+ # check if storage already exists
+ if not os.path.exists(STORAGE_DIR):
+ logger.info("Creating new index")
+ # load the documents and create the index
+ documents = SimpleDirectoryReader(DATA_DIR).load_data()
+ index = VectorStoreIndex.from_documents(documents)
+ # store it for later
+ index.storage_context.persist(STORAGE_DIR)
+ logger.info(f"Finished creating new index. Stored in {STORAGE_DIR}")
+ else:
+ # load the existing index
+ logger.info(f"Loading index from {STORAGE_DIR}...")
+ storage_context = StorageContext.from_defaults(persist_dir=STORAGE_DIR)
+ index = load_index_from_storage(storage_context)
+ logger.info(f"Finished loading index from {STORAGE_DIR}")
+ return index
diff --git a/packages/create-llama/templates/types/simple/fastapi/gitignore b/packages/create-llama/templates/types/simple/fastapi/gitignore
new file mode 100644
index 0000000000..069fcb4020
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/fastapi/gitignore
@@ -0,0 +1,2 @@
+__pycache__
+storage
diff --git a/packages/create-llama/templates/types/simple/fastapi/main.py b/packages/create-llama/templates/types/simple/fastapi/main.py
new file mode 100644
index 0000000000..9dc1a0afb6
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/fastapi/main.py
@@ -0,0 +1,31 @@
+import logging
+import os
+import uvicorn
+from app.api.routers.chat import chat_router
+from fastapi import FastAPI
+from fastapi.middleware.cors import CORSMiddleware
+from dotenv import load_dotenv
+
+load_dotenv()
+
+app = FastAPI()
+
+environment = os.getenv("ENVIRONMENT", "dev") # Default to 'development' if not set
+
+
+if environment == "dev":
+ logger = logging.getLogger("uvicorn")
+ logger.warning("Running in development mode - allowing CORS for all origins")
+ app.add_middleware(
+ CORSMiddleware,
+ allow_origins=["*"],
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+ )
+
+app.include_router(chat_router, prefix="/api/chat")
+
+
+if __name__ == "__main__":
+ uvicorn.run(app="main:app", host="0.0.0.0", reload=True)
diff --git a/packages/create-llama/templates/types/simple/fastapi/pyproject.toml b/packages/create-llama/templates/types/simple/fastapi/pyproject.toml
new file mode 100644
index 0000000000..59d182bbb4
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/fastapi/pyproject.toml
@@ -0,0 +1,19 @@
+[tool.poetry]
+name = "llamaindex-fastapi"
+version = "0.1.0"
+description = ""
+authors = ["Marcus Schiesser "]
+readme = "README.md"
+
+[tool.poetry.dependencies]
+python = "^3.11,<3.12"
+fastapi = "^0.104.1"
+uvicorn = { extras = ["standard"], version = "^0.23.2" }
+llama-index = "^0.8.56"
+pypdf = "^3.17.0"
+python-dotenv = "^1.0.0"
+
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
diff --git a/packages/create-llama/templates/types/simple/fastapi/tests/__init__.py b/packages/create-llama/templates/types/simple/fastapi/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/packages/create-llama/templates/types/simple/nextjs/.env.example b/packages/create-llama/templates/types/simple/nextjs/.env.example
new file mode 100644
index 0000000000..7ac0a01551
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/.env.example
@@ -0,0 +1,3 @@
+# Rename this file to `.env.local` to use environment variables locally with `next dev`
+# https://nextjs.org/docs/pages/building-your-application/configuring/environment-variables
+MY_HOST="example.com"
diff --git a/packages/create-llama/templates/types/simple/nextjs/README-template.md b/packages/create-llama/templates/types/simple/nextjs/README-template.md
new file mode 100644
index 0000000000..1509ded7c3
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/README-template.md
@@ -0,0 +1,30 @@
+This is a [LlamaIndex](https://www.llamaindex.ai/) project using [Next.js](https://nextjs.org/) bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama).
+
+## Getting Started
+
+First, install the dependencies:
+
+```
+npm install
+```
+
+Second, run the development server:
+
+```
+npm run dev
+```
+
+Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
+
+You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.
+
+This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font.
+
+## Learn More
+
+To learn more about LlamaIndex, take a look at the following resources:
+
+- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex (Python features).
+- [LlamaIndexTS Documentation](https://ts.llamaindex.ai) - learn about LlamaIndex (Typescript features).
+
+You can check out [the LlamaIndexTS GitHub repository](https://github.com/run-llama/LlamaIndexTS) - your feedback and contributions are welcome!
diff --git a/packages/create-llama/templates/types/simple/nextjs/app/api/chat/engine/index.ts b/packages/create-llama/templates/types/simple/nextjs/app/api/chat/engine/index.ts
new file mode 100644
index 0000000000..abb02e90cd
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/app/api/chat/engine/index.ts
@@ -0,0 +1,7 @@
+import { LLM, SimpleChatEngine } from "llamaindex";
+
+export async function createChatEngine(llm: LLM) {
+ return new SimpleChatEngine({
+ llm,
+ });
+}
diff --git a/packages/create-llama/templates/types/simple/nextjs/app/api/chat/route.ts b/packages/create-llama/templates/types/simple/nextjs/app/api/chat/route.ts
new file mode 100644
index 0000000000..097341ab43
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/app/api/chat/route.ts
@@ -0,0 +1,47 @@
+import { ChatMessage, OpenAI } from "llamaindex";
+import { NextRequest, NextResponse } from "next/server";
+import { createChatEngine } from "./engine";
+
+export const runtime = "nodejs";
+export const dynamic = "force-dynamic";
+
+export async function POST(request: NextRequest) {
+ try {
+ const body = await request.json();
+ const { messages }: { messages: ChatMessage[] } = body;
+ const lastMessage = messages.pop();
+ if (!messages || !lastMessage || lastMessage.role !== "user") {
+ return NextResponse.json(
+ {
+ error:
+ "messages are required in the request body and the last message must be from the user",
+ },
+ { status: 400 },
+ );
+ }
+
+ const llm = new OpenAI({
+ model: "gpt-3.5-turbo",
+ });
+
+ const chatEngine = await createChatEngine(llm);
+
+ const response = await chatEngine.chat(lastMessage.content, messages);
+ const result: ChatMessage = {
+ role: "assistant",
+ content: response.response,
+ };
+
+ return NextResponse.json({ result });
+ } catch (error) {
+ console.error("[LlamaIndex]", error);
+ return NextResponse.json(
+ {
+ error: (error as Error).message,
+ },
+ {
+ status: 500,
+ },
+ );
+ }
+}
diff --git a/packages/create-llama/templates/types/simple/nextjs/app/components/chat-section.tsx b/packages/create-llama/templates/types/simple/nextjs/app/components/chat-section.tsx
new file mode 100644
index 0000000000..133a0a884b
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/app/components/chat-section.tsx
@@ -0,0 +1,80 @@
+"use client";
+
+import { nanoid } from "nanoid";
+import { useState } from "react";
+import { ChatInput, ChatInputProps, ChatMessages, Message } from "./ui/chat";
+
+function useChat(): ChatInputProps & { messages: Message[] } {
+ const [messages, setMessages] = useState([]);
+ const [isLoading, setIsLoading] = useState(false);
+ const [input, setInput] = useState("");
+
+ const getAssistantMessage = async (messages: Message[]) => {
+ const response = await fetch(
+ process.env.NEXT_PUBLIC_CHAT_API ?? "/api/chat",
+ {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify({
+ messages,
+ }),
+ },
+ );
+ const data = await response.json();
+ const assistantMessage = data.result as Message;
+ return assistantMessage;
+ };
+
+ const handleSubmit = async (e: React.FormEvent) => {
+ e.preventDefault();
+ if (!input) return;
+
+ setIsLoading(true);
+
+ try {
+ const newMessages = [
+ ...messages,
+ { id: nanoid(), content: input, role: "user" },
+ ];
+ setMessages(newMessages);
+ setInput("");
+ const assistantMessage = await getAssistantMessage(newMessages);
+ setMessages([...newMessages, { ...assistantMessage, id: nanoid() }]);
+ } catch (error: any) {
+ console.log(error);
+ alert(error.message);
+ } finally {
+ setIsLoading(false);
+ }
+ };
+
+ const handleInputChange = (e: any): void => {
+ setInput(e.target.value);
+ };
+
+ return {
+ messages,
+ isLoading,
+ input,
+ handleSubmit,
+ handleInputChange,
+ };
+}
+
+export default function ChatSection() {
+ const { messages, isLoading, input, handleSubmit, handleInputChange } =
+ useChat();
+ return (
+
+
+
+
+ );
+}
diff --git a/packages/create-llama/templates/types/simple/nextjs/app/components/header.tsx b/packages/create-llama/templates/types/simple/nextjs/app/components/header.tsx
new file mode 100644
index 0000000000..2b0e488f76
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/app/components/header.tsx
@@ -0,0 +1,28 @@
+import Image from "next/image";
+
+export default function Header() {
+ return (
+
+
+ Get started by editing
+ app/page.tsx
+
+
+
+ );
+}
diff --git a/packages/create-llama/templates/types/simple/nextjs/app/components/ui/chat/chat-avatar.tsx b/packages/create-llama/templates/types/simple/nextjs/app/components/ui/chat/chat-avatar.tsx
new file mode 100644
index 0000000000..cd241104e4
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/app/components/ui/chat/chat-avatar.tsx
@@ -0,0 +1,34 @@
+"use client";
+
+import Image from "next/image";
+import { Message } from "./chat-messages";
+
+export default function ChatAvatar(message: Message) {
+ if (message.role === "user") {
+ return (
+
+ );
+ }
+
+ return (
+
+
+
+ );
+}
diff --git a/packages/create-llama/templates/types/simple/nextjs/app/components/ui/chat/chat-input.tsx b/packages/create-llama/templates/types/simple/nextjs/app/components/ui/chat/chat-input.tsx
new file mode 100644
index 0000000000..3eb979b027
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/app/components/ui/chat/chat-input.tsx
@@ -0,0 +1,42 @@
+"use client";
+
+export interface ChatInputProps {
+ /** The current value of the input */
+ input?: string;
+ /** An input/textarea-ready onChange handler to control the value of the input */
+ handleInputChange?: (
+ e:
+ | React.ChangeEvent
+ | React.ChangeEvent,
+ ) => void;
+ /** Form submission handler to automatically reset input and append a user message */
+ handleSubmit: (e: React.FormEvent) => void;
+ isLoading: boolean;
+}
+
+export default function ChatInput(props: ChatInputProps) {
+ return (
+ <>
+
+ >
+ );
+}
diff --git a/packages/create-llama/templates/types/simple/nextjs/app/components/ui/chat/chat-item.tsx b/packages/create-llama/templates/types/simple/nextjs/app/components/ui/chat/chat-item.tsx
new file mode 100644
index 0000000000..2244f729a8
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/app/components/ui/chat/chat-item.tsx
@@ -0,0 +1,13 @@
+"use client";
+
+import ChatAvatar from "./chat-avatar";
+import { Message } from "./chat-messages";
+
+export default function ChatItem(message: Message) {
+ return (
+
+ );
+}
diff --git a/packages/create-llama/templates/types/simple/nextjs/app/components/ui/chat/chat-messages.tsx b/packages/create-llama/templates/types/simple/nextjs/app/components/ui/chat/chat-messages.tsx
new file mode 100644
index 0000000000..65eacabbfb
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/app/components/ui/chat/chat-messages.tsx
@@ -0,0 +1,38 @@
+"use client";
+
+import { useEffect, useRef } from "react";
+import ChatItem from "./chat-item";
+
+export interface Message {
+ id: string;
+ content: string;
+ role: string;
+}
+
+export default function ChatMessages({ messages }: { messages: Message[] }) {
+ const scrollableChatContainerRef = useRef(null);
+
+ const scrollToBottom = () => {
+ if (scrollableChatContainerRef.current) {
+ scrollableChatContainerRef.current.scrollTop =
+ scrollableChatContainerRef.current.scrollHeight;
+ }
+ };
+
+ useEffect(() => {
+ scrollToBottom();
+ }, [messages.length]);
+
+ return (
+
+
+ {messages.map((m: Message) => (
+
+ ))}
+
+
+ );
+}
diff --git a/packages/create-llama/templates/types/simple/nextjs/app/components/ui/chat/index.ts b/packages/create-llama/templates/types/simple/nextjs/app/components/ui/chat/index.ts
new file mode 100644
index 0000000000..4ccc54926f
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/app/components/ui/chat/index.ts
@@ -0,0 +1,6 @@
+import ChatInput from "./chat-input";
+import ChatMessages from "./chat-messages";
+
+export type { ChatInputProps } from "./chat-input";
+export type { Message } from "./chat-messages";
+export { ChatMessages, ChatInput };
diff --git a/packages/create-llama/templates/types/simple/nextjs/app/favicon.ico b/packages/create-llama/templates/types/simple/nextjs/app/favicon.ico
new file mode 100644
index 0000000000..a1eaef62f2
Binary files /dev/null and b/packages/create-llama/templates/types/simple/nextjs/app/favicon.ico differ
diff --git a/packages/create-llama/templates/types/simple/nextjs/app/globals.css b/packages/create-llama/templates/types/simple/nextjs/app/globals.css
new file mode 100644
index 0000000000..09b85ed2c9
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/app/globals.css
@@ -0,0 +1,94 @@
+@tailwind base;
+@tailwind components;
+@tailwind utilities;
+
+@layer base {
+ :root {
+ --background: 0 0% 100%;
+ --foreground: 222.2 47.4% 11.2%;
+
+ --muted: 210 40% 96.1%;
+ --muted-foreground: 215.4 16.3% 46.9%;
+
+ --popover: 0 0% 100%;
+ --popover-foreground: 222.2 47.4% 11.2%;
+
+ --border: 214.3 31.8% 91.4%;
+ --input: 214.3 31.8% 91.4%;
+
+ --card: 0 0% 100%;
+ --card-foreground: 222.2 47.4% 11.2%;
+
+ --primary: 222.2 47.4% 11.2%;
+ --primary-foreground: 210 40% 98%;
+
+ --secondary: 210 40% 96.1%;
+ --secondary-foreground: 222.2 47.4% 11.2%;
+
+ --accent: 210 40% 96.1%;
+ --accent-foreground: 222.2 47.4% 11.2%;
+
+ --destructive: 0 100% 50%;
+ --destructive-foreground: 210 40% 98%;
+
+ --ring: 215 20.2% 65.1%;
+
+ --radius: 0.5rem;
+ }
+
+ .dark {
+ --background: 224 71% 4%;
+ --foreground: 213 31% 91%;
+
+ --muted: 223 47% 11%;
+ --muted-foreground: 215.4 16.3% 56.9%;
+
+ --accent: 216 34% 17%;
+ --accent-foreground: 210 40% 98%;
+
+ --popover: 224 71% 4%;
+ --popover-foreground: 215 20.2% 65.1%;
+
+ --border: 216 34% 17%;
+ --input: 216 34% 17%;
+
+ --card: 224 71% 4%;
+ --card-foreground: 213 31% 91%;
+
+ --primary: 210 40% 98%;
+ --primary-foreground: 222.2 47.4% 1.2%;
+
+ --secondary: 222.2 47.4% 11.2%;
+ --secondary-foreground: 210 40% 98%;
+
+ --destructive: 0 63% 31%;
+ --destructive-foreground: 210 40% 98%;
+
+ --ring: 216 34% 17%;
+
+ --radius: 0.5rem;
+ }
+}
+
+@layer base {
+ * {
+ @apply border-border;
+ }
+ body {
+ @apply bg-background text-foreground;
+ font-feature-settings:
+ "rlig" 1,
+ "calt" 1;
+ }
+ .background-gradient {
+ background-color: #fff;
+ background-image: radial-gradient(
+ at 21% 11%,
+ rgba(186, 186, 233, 0.53) 0,
+ transparent 50%
+ ),
+ radial-gradient(at 85% 0, hsla(46, 57%, 78%, 0.52) 0, transparent 50%),
+ radial-gradient(at 91% 36%, rgba(194, 213, 255, 0.68) 0, transparent 50%),
+ radial-gradient(at 8% 40%, rgba(251, 218, 239, 0.46) 0, transparent 50%);
+ }
+}
diff --git a/packages/create-llama/templates/types/simple/nextjs/app/layout.tsx b/packages/create-llama/templates/types/simple/nextjs/app/layout.tsx
new file mode 100644
index 0000000000..fb09770627
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/app/layout.tsx
@@ -0,0 +1,22 @@
+import type { Metadata } from "next";
+import { Inter } from "next/font/google";
+import "./globals.css";
+
+const inter = Inter({ subsets: ["latin"] });
+
+export const metadata: Metadata = {
+ title: "Create Llama App",
+ description: "Generated by create-llama",
+};
+
+export default function RootLayout({
+ children,
+}: {
+ children: React.ReactNode;
+}) {
+ return (
+
+ {children}
+
+ );
+}
diff --git a/packages/create-llama/templates/types/simple/nextjs/app/page.tsx b/packages/create-llama/templates/types/simple/nextjs/app/page.tsx
new file mode 100644
index 0000000000..31f51facb2
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/app/page.tsx
@@ -0,0 +1,11 @@
+import ChatSection from "@/app/components/chat-section";
+import Header from "@/app/components/header";
+
+export default function Home() {
+ return (
+
+
+
+
+ );
+}
diff --git a/packages/create-llama/templates/types/simple/nextjs/eslintrc.json b/packages/create-llama/templates/types/simple/nextjs/eslintrc.json
new file mode 100644
index 0000000000..bffb357a71
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/eslintrc.json
@@ -0,0 +1,3 @@
+{
+ "extends": "next/core-web-vitals"
+}
diff --git a/packages/create-llama/templates/types/simple/nextjs/gitignore b/packages/create-llama/templates/types/simple/nextjs/gitignore
new file mode 100644
index 0000000000..8f322f0d8f
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/gitignore
@@ -0,0 +1,35 @@
+# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
+
+# dependencies
+/node_modules
+/.pnp
+.pnp.js
+
+# testing
+/coverage
+
+# next.js
+/.next/
+/out/
+
+# production
+/build
+
+# misc
+.DS_Store
+*.pem
+
+# debug
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+
+# local env files
+.env*.local
+
+# vercel
+.vercel
+
+# typescript
+*.tsbuildinfo
+next-env.d.ts
diff --git a/packages/create-llama/templates/types/simple/nextjs/next-env.d.ts b/packages/create-llama/templates/types/simple/nextjs/next-env.d.ts
new file mode 100644
index 0000000000..4f11a03dc6
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/next-env.d.ts
@@ -0,0 +1,5 @@
+///
+///
+
+// NOTE: This file should not be edited
+// see https://nextjs.org/docs/basic-features/typescript for more information.
diff --git a/packages/create-llama/templates/types/simple/nextjs/next.config.js b/packages/create-llama/templates/types/simple/nextjs/next.config.js
new file mode 100644
index 0000000000..0b2c2bf173
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/next.config.js
@@ -0,0 +1,8 @@
+/** @type {import('next').NextConfig} */
+const nextConfig = {
+ experimental: {
+ serverComponentsExternalPackages: ["llamaindex"],
+ },
+}
+
+module.exports = nextConfig
diff --git a/packages/create-llama/templates/types/simple/nextjs/package.json b/packages/create-llama/templates/types/simple/nextjs/package.json
new file mode 100644
index 0000000000..990b41c832
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/package.json
@@ -0,0 +1,28 @@
+{
+ "name": "llama-index-nextjs",
+ "version": "1.0.0",
+ "scripts": {
+ "dev": "next dev",
+ "build": "next build",
+ "start": "next start",
+ "lint": "next lint"
+ },
+ "dependencies": {
+ "llamaindex": "0.0.31",
+ "nanoid": "^5",
+ "next": "^13",
+ "react": "^18",
+ "react-dom": "^18"
+ },
+ "devDependencies": {
+ "@types/node": "^20",
+ "@types/react": "^18",
+ "@types/react-dom": "^18",
+ "autoprefixer": "^10",
+ "eslint": "^8",
+ "eslint-config-next": "^13",
+ "postcss": "^8",
+ "tailwindcss": "^3",
+ "typescript": "^5"
+ }
+}
\ No newline at end of file
diff --git a/packages/create-llama/templates/types/simple/nextjs/postcss.config.js b/packages/create-llama/templates/types/simple/nextjs/postcss.config.js
new file mode 100644
index 0000000000..33ad091d26
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/postcss.config.js
@@ -0,0 +1,6 @@
+module.exports = {
+ plugins: {
+ tailwindcss: {},
+ autoprefixer: {},
+ },
+}
diff --git a/packages/create-llama/templates/types/simple/nextjs/public/llama.png b/packages/create-llama/templates/types/simple/nextjs/public/llama.png
new file mode 100644
index 0000000000..d4efba3b81
Binary files /dev/null and b/packages/create-llama/templates/types/simple/nextjs/public/llama.png differ
diff --git a/packages/create-llama/templates/types/simple/nextjs/tailwind.config.ts b/packages/create-llama/templates/types/simple/nextjs/tailwind.config.ts
new file mode 100644
index 0000000000..aa5580affa
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/tailwind.config.ts
@@ -0,0 +1,78 @@
+import type { Config } from "tailwindcss";
+import { fontFamily } from "tailwindcss/defaultTheme";
+
+const config: Config = {
+ darkMode: ["class"],
+ content: ["app/**/*.{ts,tsx}", "components/**/*.{ts,tsx}"],
+ theme: {
+ container: {
+ center: true,
+ padding: "2rem",
+ screens: {
+ "2xl": "1400px",
+ },
+ },
+ extend: {
+ colors: {
+ border: "hsl(var(--border))",
+ input: "hsl(var(--input))",
+ ring: "hsl(var(--ring))",
+ background: "hsl(var(--background))",
+ foreground: "hsl(var(--foreground))",
+ primary: {
+ DEFAULT: "hsl(var(--primary))",
+ foreground: "hsl(var(--primary-foreground))",
+ },
+ secondary: {
+ DEFAULT: "hsl(var(--secondary))",
+ foreground: "hsl(var(--secondary-foreground))",
+ },
+ destructive: {
+ DEFAULT: "hsl(var(--destructive) / )",
+ foreground: "hsl(var(--destructive-foreground) / )",
+ },
+ muted: {
+ DEFAULT: "hsl(var(--muted))",
+ foreground: "hsl(var(--muted-foreground))",
+ },
+ accent: {
+ DEFAULT: "hsl(var(--accent))",
+ foreground: "hsl(var(--accent-foreground))",
+ },
+ popover: {
+ DEFAULT: "hsl(var(--popover))",
+ foreground: "hsl(var(--popover-foreground))",
+ },
+ card: {
+ DEFAULT: "hsl(var(--card))",
+ foreground: "hsl(var(--card-foreground))",
+ },
+ },
+ borderRadius: {
+ xl: `calc(var(--radius) + 4px)`,
+ lg: `var(--radius)`,
+ md: `calc(var(--radius) - 2px)`,
+ sm: "calc(var(--radius) - 4px)",
+ },
+ fontFamily: {
+ sans: ["var(--font-sans)", ...fontFamily.sans],
+ },
+ keyframes: {
+ "accordion-down": {
+ from: { height: "0" },
+ to: { height: "var(--radix-accordion-content-height)" },
+ },
+ "accordion-up": {
+ from: { height: "var(--radix-accordion-content-height)" },
+ to: { height: "0" },
+ },
+ },
+ animation: {
+ "accordion-down": "accordion-down 0.2s ease-out",
+ "accordion-up": "accordion-up 0.2s ease-out",
+ },
+ },
+ },
+ plugins: [],
+};
+export default config;
diff --git a/packages/create-llama/templates/types/simple/nextjs/tsconfig.json b/packages/create-llama/templates/types/simple/nextjs/tsconfig.json
new file mode 100644
index 0000000000..c714696378
--- /dev/null
+++ b/packages/create-llama/templates/types/simple/nextjs/tsconfig.json
@@ -0,0 +1,27 @@
+{
+ "compilerOptions": {
+ "target": "es5",
+ "lib": ["dom", "dom.iterable", "esnext"],
+ "allowJs": true,
+ "skipLibCheck": true,
+ "strict": true,
+ "noEmit": true,
+ "esModuleInterop": true,
+ "module": "esnext",
+ "moduleResolution": "bundler",
+ "resolveJsonModule": true,
+ "isolatedModules": true,
+ "jsx": "preserve",
+ "incremental": true,
+ "plugins": [
+ {
+ "name": "next"
+ }
+ ],
+ "paths": {
+ "@/*": ["./*"]
+ }
+ },
+ "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
+ "exclude": ["node_modules"]
+}
diff --git a/packages/create-llama/templates/types/streaming/express/README-template.md b/packages/create-llama/templates/types/streaming/express/README-template.md
new file mode 100644
index 0000000000..7ea94ab755
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/express/README-template.md
@@ -0,0 +1,50 @@
+This is a [LlamaIndex](https://www.llamaindex.ai/) project using [Express](https://expressjs.com/) bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama).
+
+## Getting Started
+
+First, install the dependencies:
+
+```
+npm install
+```
+
+Second, run the development server:
+
+```
+npm run dev
+```
+
+Then call the express API endpoint `/api/chat` to see the result:
+
+```
+curl --location 'localhost:8000/api/chat' \
+--header 'Content-Type: application/json' \
+--data '{ "messages": [{ "role": "user", "content": "Hello" }] }'
+```
+
+You can start editing the API by modifying `src/controllers/chat.controller.ts`. The endpoint auto-updates as you save the file.
+
+## Production
+
+First, build the project:
+
+```
+npm run build
+```
+
+You can then run the production server:
+
+```
+NODE_ENV=production npm run start
+```
+
+> Note that the `NODE_ENV` environment variable is set to `production`. This disables CORS for all origins.
+
+## Learn More
+
+To learn more about LlamaIndex, take a look at the following resources:
+
+- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex (Python features).
+- [LlamaIndexTS Documentation](https://ts.llamaindex.ai) - learn about LlamaIndex (Typescript features).
+
+You can check out [the LlamaIndexTS GitHub repository](https://github.com/run-llama/LlamaIndexTS) - your feedback and contributions are welcome!
diff --git a/packages/create-llama/templates/types/streaming/express/eslintrc.json b/packages/create-llama/templates/types/streaming/express/eslintrc.json
new file mode 100644
index 0000000000..c19581799d
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/express/eslintrc.json
@@ -0,0 +1,3 @@
+{
+ "extends": "eslint:recommended"
+}
\ No newline at end of file
diff --git a/packages/create-llama/templates/types/streaming/express/index.ts b/packages/create-llama/templates/types/streaming/express/index.ts
new file mode 100644
index 0000000000..70a43ab580
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/express/index.ts
@@ -0,0 +1,26 @@
+import cors from "cors";
+import "dotenv/config";
+import express, { Express, Request, Response } from "express";
+import chatRouter from "./src/routes/chat.route";
+
+const app: Express = express();
+const port = 8000;
+
+const env = process.env["NODE_ENV"];
+const isDevelopment = !env || env === "development";
+if (isDevelopment) {
+ console.warn("Running in development mode - allowing CORS for all origins");
+ app.use(cors());
+}
+
+app.use(express.json());
+
+app.get("/", (req: Request, res: Response) => {
+ res.send("LlamaIndex Express Server");
+});
+
+app.use("/api/chat", chatRouter);
+
+app.listen(port, () => {
+ console.log(`⚡️[server]: Server is running at http://localhost:${port}`);
+});
diff --git a/packages/create-llama/templates/types/streaming/express/package.json b/packages/create-llama/templates/types/streaming/express/package.json
new file mode 100644
index 0000000000..72f127b407
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/express/package.json
@@ -0,0 +1,28 @@
+{
+ "name": "llama-index-express-streaming",
+ "version": "1.0.0",
+ "main": "dist/index.js",
+ "type": "module",
+ "scripts": {
+ "build": "tsup index.ts --format esm --dts",
+ "start": "node dist/index.js",
+ "dev": "concurrently \"tsup index.ts --format esm --dts --watch\" \"nodemon -q dist/index.js\""
+ },
+ "dependencies": {
+ "ai": "^2",
+ "cors": "^2.8.5",
+ "dotenv": "^16.3.1",
+ "express": "^4",
+ "llamaindex": "0.0.31"
+ },
+ "devDependencies": {
+ "@types/cors": "^2.8.16",
+ "@types/express": "^4",
+ "@types/node": "^20",
+ "concurrently": "^8",
+ "eslint": "^8",
+ "nodemon": "^3",
+ "tsup": "^7",
+ "typescript": "^5"
+ }
+}
diff --git a/packages/create-llama/templates/types/streaming/express/src/controllers/chat.controller.ts b/packages/create-llama/templates/types/streaming/express/src/controllers/chat.controller.ts
new file mode 100644
index 0000000000..162b5db74d
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/express/src/controllers/chat.controller.ts
@@ -0,0 +1,36 @@
+import { streamToResponse } from "ai";
+import { NextFunction, Request, Response } from "express";
+import { ChatMessage, OpenAI } from "llamaindex";
+import { createChatEngine } from "./engine";
+import { LlamaIndexStream } from "./llamaindex-stream";
+
+export const chat = async (req: Request, res: Response, next: NextFunction) => {
+ try {
+ const { messages }: { messages: ChatMessage[] } = req.body;
+ const lastMessage = messages.pop();
+ if (!messages || !lastMessage || lastMessage.role !== "user") {
+ return res.status(400).json({
+ error:
+ "messages are required in the request body and the last message must be from the user",
+ });
+ }
+
+ const llm = new OpenAI({
+ model: "gpt-3.5-turbo",
+ });
+
+ const chatEngine = await createChatEngine(llm);
+
+ const response = await chatEngine.chat(lastMessage.content, messages, true);
+
+ // Transform the response into a readable stream
+ const stream = LlamaIndexStream(response);
+
+ streamToResponse(stream, res);
+ } catch (error) {
+ console.error("[LlamaIndex]", error);
+ return res.status(500).json({
+ error: (error as Error).message,
+ });
+ }
+};
diff --git a/packages/create-llama/templates/types/streaming/express/src/controllers/engine/index.ts b/packages/create-llama/templates/types/streaming/express/src/controllers/engine/index.ts
new file mode 100644
index 0000000000..abb02e90cd
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/express/src/controllers/engine/index.ts
@@ -0,0 +1,7 @@
+import { LLM, SimpleChatEngine } from "llamaindex";
+
+export async function createChatEngine(llm: LLM) {
+ return new SimpleChatEngine({
+ llm,
+ });
+}
diff --git a/packages/create-llama/templates/types/streaming/express/src/controllers/llamaindex-stream.ts b/packages/create-llama/templates/types/streaming/express/src/controllers/llamaindex-stream.ts
new file mode 100644
index 0000000000..12328de875
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/express/src/controllers/llamaindex-stream.ts
@@ -0,0 +1,35 @@
+import {
+ createCallbacksTransformer,
+ createStreamDataTransformer,
+ trimStartOfStreamHelper,
+ type AIStreamCallbacksAndOptions,
+} from "ai";
+
+function createParser(res: AsyncGenerator) {
+ const trimStartOfStream = trimStartOfStreamHelper();
+ return new ReadableStream({
+ async pull(controller): Promise {
+ const { value, done } = await res.next();
+ if (done) {
+ controller.close();
+ return;
+ }
+
+ const text = trimStartOfStream(value ?? "");
+ if (text) {
+ controller.enqueue(text);
+ }
+ },
+ });
+}
+
+export function LlamaIndexStream(
+ res: AsyncGenerator,
+ callbacks?: AIStreamCallbacksAndOptions,
+): ReadableStream {
+ return createParser(res)
+ .pipeThrough(createCallbacksTransformer(callbacks))
+ .pipeThrough(
+ createStreamDataTransformer(callbacks?.experimental_streamData),
+ );
+}
diff --git a/packages/create-llama/templates/types/streaming/express/src/routes/chat.route.ts b/packages/create-llama/templates/types/streaming/express/src/routes/chat.route.ts
new file mode 100644
index 0000000000..bdfeb08534
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/express/src/routes/chat.route.ts
@@ -0,0 +1,8 @@
+import express from "express";
+import { chat } from "../controllers/chat.controller";
+
+const llmRouter = express.Router();
+
+llmRouter.route("/").post(chat);
+
+export default llmRouter;
diff --git a/packages/create-llama/templates/types/streaming/express/tsconfig.json b/packages/create-llama/templates/types/streaming/express/tsconfig.json
new file mode 100644
index 0000000000..e886da1ef3
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/express/tsconfig.json
@@ -0,0 +1,10 @@
+{
+ "compilerOptions": {
+ "target": "es2016",
+ "esModuleInterop": true,
+ "forceConsistentCasingInFileNames": true,
+ "strict": true,
+ "skipLibCheck": true,
+ "moduleResolution": "node"
+ }
+}
\ No newline at end of file
diff --git a/packages/create-llama/templates/types/streaming/fastapi/README-template.md b/packages/create-llama/templates/types/streaming/fastapi/README-template.md
new file mode 100644
index 0000000000..f0b92bdfce
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/fastapi/README-template.md
@@ -0,0 +1,42 @@
+This is a [LlamaIndex](https://www.llamaindex.ai/) project using [FastAPI](https://fastapi.tiangolo.com/) bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama).
+
+## Getting Started
+
+First, setup the environment:
+
+```
+poetry install
+poetry shell
+```
+
+Second, run the development server:
+
+```
+python main.py
+```
+
+Then call the API endpoint `/api/chat` to see the result:
+
+```
+curl --location 'localhost:8000/api/chat' \
+--header 'Content-Type: application/json' \
+--data '{ "messages": [{ "role": "user", "content": "Hello" }] }'
+```
+
+You can start editing the API by modifying `app/api/routers/chat.py`. The endpoint auto-updates as you save the file.
+
+Open [http://localhost:8000/docs](http://localhost:8000/docs) with your browser to see the Swagger UI of the API.
+
+The API allows CORS for all origins to simplify development. You can change this behavior by setting the `ENVIRONMENT` environment variable to `prod`:
+
+```
+ENVIRONMENT=prod uvicorn main:app
+```
+
+## Learn More
+
+To learn more about LlamaIndex, take a look at the following resources:
+
+- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex.
+
+You can check out [the LlamaIndex GitHub repository](https://github.com/run-llama/llama_index) - your feedback and contributions are welcome!
diff --git a/packages/create-llama/templates/types/streaming/fastapi/app/__init__.py b/packages/create-llama/templates/types/streaming/fastapi/app/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/packages/create-llama/templates/types/streaming/fastapi/app/api/__init__.py b/packages/create-llama/templates/types/streaming/fastapi/app/api/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/packages/create-llama/templates/types/streaming/fastapi/app/api/routers/__init__.py b/packages/create-llama/templates/types/streaming/fastapi/app/api/routers/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/packages/create-llama/templates/types/streaming/fastapi/app/api/routers/chat.py b/packages/create-llama/templates/types/streaming/fastapi/app/api/routers/chat.py
new file mode 100644
index 0000000000..36b618e232
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/fastapi/app/api/routers/chat.py
@@ -0,0 +1,65 @@
+from typing import List
+
+from fastapi.responses import StreamingResponse
+
+from app.utils.json import json_to_model
+from app.utils.index import get_index
+from fastapi import APIRouter, Depends, HTTPException, Request, status
+from llama_index import VectorStoreIndex
+from llama_index.llms.base import MessageRole, ChatMessage
+from pydantic import BaseModel
+
+chat_router = r = APIRouter()
+
+
+class _Message(BaseModel):
+ role: MessageRole
+ content: str
+
+
+class _ChatData(BaseModel):
+ messages: List[_Message]
+
+
+@r.post("")
+async def chat(
+ request: Request,
+ # Note: To support clients sending a JSON object using content-type "text/plain",
+ # we need to use Depends(json_to_model(_ChatData)) here
+ data: _ChatData = Depends(json_to_model(_ChatData)),
+ index: VectorStoreIndex = Depends(get_index),
+):
+ # check preconditions and get last message
+ if len(data.messages) == 0:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="No messages provided",
+ )
+ lastMessage = data.messages.pop()
+ if lastMessage.role != MessageRole.USER:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Last message must be from user",
+ )
+ # convert messages coming from the request to type ChatMessage
+ messages = [
+ ChatMessage(
+ role=m.role,
+ content=m.content,
+ )
+ for m in data.messages
+ ]
+
+ # query chat engine
+ chat_engine = index.as_chat_engine()
+ response = chat_engine.stream_chat(lastMessage.content, messages)
+
+ # stream response
+ async def event_generator():
+ for token in response.response_gen:
+ # If client closes connection, stop sending events
+ if await request.is_disconnected():
+ break
+ yield token
+
+ return StreamingResponse(event_generator(), media_type="text/plain")
diff --git a/packages/create-llama/templates/types/streaming/fastapi/app/utils/__init__.py b/packages/create-llama/templates/types/streaming/fastapi/app/utils/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/packages/create-llama/templates/types/streaming/fastapi/app/utils/index.py b/packages/create-llama/templates/types/streaming/fastapi/app/utils/index.py
new file mode 100644
index 0000000000..076ca76631
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/fastapi/app/utils/index.py
@@ -0,0 +1,33 @@
+import logging
+import os
+
+from llama_index import (
+ SimpleDirectoryReader,
+ StorageContext,
+ VectorStoreIndex,
+ load_index_from_storage,
+)
+
+
+STORAGE_DIR = "./storage" # directory to cache the generated index
+DATA_DIR = "./data" # directory containing the documents to index
+
+
+def get_index():
+ logger = logging.getLogger("uvicorn")
+ # check if storage already exists
+ if not os.path.exists(STORAGE_DIR):
+ logger.info("Creating new index")
+ # load the documents and create the index
+ documents = SimpleDirectoryReader(DATA_DIR).load_data()
+ index = VectorStoreIndex.from_documents(documents)
+ # store it for later
+ index.storage_context.persist(STORAGE_DIR)
+ logger.info(f"Finished creating new index. Stored in {STORAGE_DIR}")
+ else:
+ # load the existing index
+ logger.info(f"Loading index from {STORAGE_DIR}...")
+ storage_context = StorageContext.from_defaults(persist_dir=STORAGE_DIR)
+ index = load_index_from_storage(storage_context)
+ logger.info(f"Finished loading index from {STORAGE_DIR}")
+ return index
diff --git a/packages/create-llama/templates/types/streaming/fastapi/app/utils/json.py b/packages/create-llama/templates/types/streaming/fastapi/app/utils/json.py
new file mode 100644
index 0000000000..d9a847f53e
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/fastapi/app/utils/json.py
@@ -0,0 +1,22 @@
+import json
+from typing import TypeVar
+from fastapi import HTTPException, Request
+
+from pydantic import BaseModel, ValidationError
+
+
+T = TypeVar("T", bound=BaseModel)
+
+
+def json_to_model(cls: T):
+ async def get_json(request: Request) -> T:
+ body = await request.body()
+ try:
+ data_dict = json.loads(body.decode("utf-8"))
+ return cls(**data_dict)
+ except (json.JSONDecodeError, ValidationError) as e:
+ raise HTTPException(
+ status_code=400, detail=f"Could not decode JSON: {str(e)}"
+ )
+
+ return get_json
diff --git a/packages/create-llama/templates/types/streaming/fastapi/gitignore b/packages/create-llama/templates/types/streaming/fastapi/gitignore
new file mode 100644
index 0000000000..069fcb4020
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/fastapi/gitignore
@@ -0,0 +1,2 @@
+__pycache__
+storage
diff --git a/packages/create-llama/templates/types/streaming/fastapi/main.py b/packages/create-llama/templates/types/streaming/fastapi/main.py
new file mode 100644
index 0000000000..9dc1a0afb6
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/fastapi/main.py
@@ -0,0 +1,31 @@
+import logging
+import os
+import uvicorn
+from app.api.routers.chat import chat_router
+from fastapi import FastAPI
+from fastapi.middleware.cors import CORSMiddleware
+from dotenv import load_dotenv
+
+load_dotenv()
+
+app = FastAPI()
+
+environment = os.getenv("ENVIRONMENT", "dev") # Default to 'development' if not set
+
+
+if environment == "dev":
+ logger = logging.getLogger("uvicorn")
+ logger.warning("Running in development mode - allowing CORS for all origins")
+ app.add_middleware(
+ CORSMiddleware,
+ allow_origins=["*"],
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+ )
+
+app.include_router(chat_router, prefix="/api/chat")
+
+
+if __name__ == "__main__":
+ uvicorn.run(app="main:app", host="0.0.0.0", reload=True)
diff --git a/packages/create-llama/templates/types/streaming/fastapi/pyproject.toml b/packages/create-llama/templates/types/streaming/fastapi/pyproject.toml
new file mode 100644
index 0000000000..f5b75b3cfd
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/fastapi/pyproject.toml
@@ -0,0 +1,19 @@
+[tool.poetry]
+name = "llamaindex-fastapi-streaming"
+version = "0.1.0"
+description = ""
+authors = ["Marcus Schiesser "]
+readme = "README.md"
+
+[tool.poetry.dependencies]
+python = "^3.11,<3.12"
+fastapi = "^0.104.1"
+uvicorn = { extras = ["standard"], version = "^0.23.2" }
+llama-index = "^0.8.56"
+pypdf = "^3.17.0"
+python-dotenv = "^1.0.0"
+
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
diff --git a/packages/create-llama/templates/types/streaming/fastapi/tests/__init__.py b/packages/create-llama/templates/types/streaming/fastapi/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/packages/create-llama/templates/types/streaming/nextjs/.env.example b/packages/create-llama/templates/types/streaming/nextjs/.env.example
new file mode 100644
index 0000000000..7ac0a01551
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/.env.example
@@ -0,0 +1,3 @@
+# Rename this file to `.env.local` to use environment variables locally with `next dev`
+# https://nextjs.org/docs/pages/building-your-application/configuring/environment-variables
+MY_HOST="example.com"
diff --git a/packages/create-llama/templates/types/streaming/nextjs/README-template.md b/packages/create-llama/templates/types/streaming/nextjs/README-template.md
new file mode 100644
index 0000000000..1509ded7c3
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/README-template.md
@@ -0,0 +1,30 @@
+This is a [LlamaIndex](https://www.llamaindex.ai/) project using [Next.js](https://nextjs.org/) bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama).
+
+## Getting Started
+
+First, install the dependencies:
+
+```
+npm install
+```
+
+Second, run the development server:
+
+```
+npm run dev
+```
+
+Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
+
+You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.
+
+This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font.
+
+## Learn More
+
+To learn more about LlamaIndex, take a look at the following resources:
+
+- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex (Python features).
+- [LlamaIndexTS Documentation](https://ts.llamaindex.ai) - learn about LlamaIndex (Typescript features).
+
+You can check out [the LlamaIndexTS GitHub repository](https://github.com/run-llama/LlamaIndexTS) - your feedback and contributions are welcome!
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/engine/index.ts b/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/engine/index.ts
new file mode 100644
index 0000000000..abb02e90cd
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/engine/index.ts
@@ -0,0 +1,7 @@
+import { LLM, SimpleChatEngine } from "llamaindex";
+
+export async function createChatEngine(llm: LLM) {
+ return new SimpleChatEngine({
+ llm,
+ });
+}
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/llamaindex-stream.ts b/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/llamaindex-stream.ts
new file mode 100644
index 0000000000..12328de875
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/llamaindex-stream.ts
@@ -0,0 +1,35 @@
+import {
+ createCallbacksTransformer,
+ createStreamDataTransformer,
+ trimStartOfStreamHelper,
+ type AIStreamCallbacksAndOptions,
+} from "ai";
+
+function createParser(res: AsyncGenerator) {
+ const trimStartOfStream = trimStartOfStreamHelper();
+ return new ReadableStream({
+ async pull(controller): Promise {
+ const { value, done } = await res.next();
+ if (done) {
+ controller.close();
+ return;
+ }
+
+ const text = trimStartOfStream(value ?? "");
+ if (text) {
+ controller.enqueue(text);
+ }
+ },
+ });
+}
+
+export function LlamaIndexStream(
+ res: AsyncGenerator,
+ callbacks?: AIStreamCallbacksAndOptions,
+): ReadableStream {
+ return createParser(res)
+ .pipeThrough(createCallbacksTransformer(callbacks))
+ .pipeThrough(
+ createStreamDataTransformer(callbacks?.experimental_streamData),
+ );
+}
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/route.ts b/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/route.ts
new file mode 100644
index 0000000000..989a5fec48
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/route.ts
@@ -0,0 +1,49 @@
+import { Message, StreamingTextResponse } from "ai";
+import { OpenAI } from "llamaindex";
+import { NextRequest, NextResponse } from "next/server";
+import { createChatEngine } from "./engine";
+import { LlamaIndexStream } from "./llamaindex-stream";
+
+export const runtime = "nodejs";
+export const dynamic = "force-dynamic";
+
+export async function POST(request: NextRequest) {
+ try {
+ const body = await request.json();
+ const { messages }: { messages: Message[] } = body;
+ const lastMessage = messages.pop();
+ if (!messages || !lastMessage || lastMessage.role !== "user") {
+ return NextResponse.json(
+ {
+ error:
+ "messages are required in the request body and the last message must be from the user",
+ },
+ { status: 400 },
+ );
+ }
+
+ const llm = new OpenAI({
+ model: "gpt-3.5-turbo",
+ });
+
+ const chatEngine = await createChatEngine(llm);
+
+ const response = await chatEngine.chat(lastMessage.content, messages, true);
+
+ // Transform the response into a readable stream
+ const stream = LlamaIndexStream(response);
+
+ // Return a StreamingTextResponse, which can be consumed by the client
+ return new StreamingTextResponse(stream);
+ } catch (error) {
+ console.error("[LlamaIndex]", error);
+ return NextResponse.json(
+ {
+ error: (error as Error).message,
+ },
+ {
+ status: 500,
+ },
+ );
+ }
+}
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/chat-section.tsx b/packages/create-llama/templates/types/streaming/nextjs/app/components/chat-section.tsx
new file mode 100644
index 0000000000..04098fcdf4
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/chat-section.tsx
@@ -0,0 +1,33 @@
+"use client";
+
+import { useChat } from "ai/react";
+import { ChatInput, ChatMessages } from "./ui/chat";
+
+export default function ChatSection() {
+ const {
+ messages,
+ input,
+ isLoading,
+ handleSubmit,
+ handleInputChange,
+ reload,
+ stop,
+ } = useChat({ api: process.env.NEXT_PUBLIC_CHAT_API });
+
+ return (
+
+
+
+
+ );
+}
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/header.tsx b/packages/create-llama/templates/types/streaming/nextjs/app/components/header.tsx
new file mode 100644
index 0000000000..2b0e488f76
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/header.tsx
@@ -0,0 +1,28 @@
+import Image from "next/image";
+
+export default function Header() {
+ return (
+
+
+ Get started by editing
+ app/page.tsx
+
+
+
+ );
+}
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-avatar.tsx b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-avatar.tsx
new file mode 100644
index 0000000000..cd241104e4
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-avatar.tsx
@@ -0,0 +1,34 @@
+"use client";
+
+import Image from "next/image";
+import { Message } from "./chat-messages";
+
+export default function ChatAvatar(message: Message) {
+ if (message.role === "user") {
+ return (
+
+ );
+ }
+
+ return (
+
+
+
+ );
+}
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx
new file mode 100644
index 0000000000..3eb979b027
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx
@@ -0,0 +1,42 @@
+"use client";
+
+export interface ChatInputProps {
+ /** The current value of the input */
+ input?: string;
+ /** An input/textarea-ready onChange handler to control the value of the input */
+ handleInputChange?: (
+ e:
+ | React.ChangeEvent
+ | React.ChangeEvent,
+ ) => void;
+ /** Form submission handler to automatically reset input and append a user message */
+ handleSubmit: (e: React.FormEvent) => void;
+ isLoading: boolean;
+}
+
+export default function ChatInput(props: ChatInputProps) {
+ return (
+ <>
+
+ >
+ );
+}
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-item.tsx b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-item.tsx
new file mode 100644
index 0000000000..2244f729a8
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-item.tsx
@@ -0,0 +1,13 @@
+"use client";
+
+import ChatAvatar from "./chat-avatar";
+import { Message } from "./chat-messages";
+
+export default function ChatItem(message: Message) {
+ return (
+
+ );
+}
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-messages.tsx b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-messages.tsx
new file mode 100644
index 0000000000..0e97839401
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-messages.tsx
@@ -0,0 +1,48 @@
+"use client";
+
+import { useEffect, useRef } from "react";
+import ChatItem from "./chat-item";
+
+export interface Message {
+ id: string;
+ content: string;
+ role: string;
+}
+
+export default function ChatMessages({
+ messages,
+ isLoading,
+ reload,
+ stop,
+}: {
+ messages: Message[];
+ isLoading?: boolean;
+ stop?: () => void;
+ reload?: () => void;
+}) {
+ const scrollableChatContainerRef = useRef(null);
+
+ const scrollToBottom = () => {
+ if (scrollableChatContainerRef.current) {
+ scrollableChatContainerRef.current.scrollTop =
+ scrollableChatContainerRef.current.scrollHeight;
+ }
+ };
+
+ useEffect(() => {
+ scrollToBottom();
+ }, [messages.length]);
+
+ return (
+
+
+ {messages.map((m: Message) => (
+
+ ))}
+
+
+ );
+}
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/index.ts b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/index.ts
new file mode 100644
index 0000000000..4ccc54926f
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/index.ts
@@ -0,0 +1,6 @@
+import ChatInput from "./chat-input";
+import ChatMessages from "./chat-messages";
+
+export type { ChatInputProps } from "./chat-input";
+export type { Message } from "./chat-messages";
+export { ChatMessages, ChatInput };
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/favicon.ico b/packages/create-llama/templates/types/streaming/nextjs/app/favicon.ico
new file mode 100644
index 0000000000..a1eaef62f2
Binary files /dev/null and b/packages/create-llama/templates/types/streaming/nextjs/app/favicon.ico differ
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/globals.css b/packages/create-llama/templates/types/streaming/nextjs/app/globals.css
new file mode 100644
index 0000000000..09b85ed2c9
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/globals.css
@@ -0,0 +1,94 @@
+@tailwind base;
+@tailwind components;
+@tailwind utilities;
+
+@layer base {
+ :root {
+ --background: 0 0% 100%;
+ --foreground: 222.2 47.4% 11.2%;
+
+ --muted: 210 40% 96.1%;
+ --muted-foreground: 215.4 16.3% 46.9%;
+
+ --popover: 0 0% 100%;
+ --popover-foreground: 222.2 47.4% 11.2%;
+
+ --border: 214.3 31.8% 91.4%;
+ --input: 214.3 31.8% 91.4%;
+
+ --card: 0 0% 100%;
+ --card-foreground: 222.2 47.4% 11.2%;
+
+ --primary: 222.2 47.4% 11.2%;
+ --primary-foreground: 210 40% 98%;
+
+ --secondary: 210 40% 96.1%;
+ --secondary-foreground: 222.2 47.4% 11.2%;
+
+ --accent: 210 40% 96.1%;
+ --accent-foreground: 222.2 47.4% 11.2%;
+
+ --destructive: 0 100% 50%;
+ --destructive-foreground: 210 40% 98%;
+
+ --ring: 215 20.2% 65.1%;
+
+ --radius: 0.5rem;
+ }
+
+ .dark {
+ --background: 224 71% 4%;
+ --foreground: 213 31% 91%;
+
+ --muted: 223 47% 11%;
+ --muted-foreground: 215.4 16.3% 56.9%;
+
+ --accent: 216 34% 17%;
+ --accent-foreground: 210 40% 98%;
+
+ --popover: 224 71% 4%;
+ --popover-foreground: 215 20.2% 65.1%;
+
+ --border: 216 34% 17%;
+ --input: 216 34% 17%;
+
+ --card: 224 71% 4%;
+ --card-foreground: 213 31% 91%;
+
+ --primary: 210 40% 98%;
+ --primary-foreground: 222.2 47.4% 1.2%;
+
+ --secondary: 222.2 47.4% 11.2%;
+ --secondary-foreground: 210 40% 98%;
+
+ --destructive: 0 63% 31%;
+ --destructive-foreground: 210 40% 98%;
+
+ --ring: 216 34% 17%;
+
+ --radius: 0.5rem;
+ }
+}
+
+@layer base {
+ * {
+ @apply border-border;
+ }
+ body {
+ @apply bg-background text-foreground;
+ font-feature-settings:
+ "rlig" 1,
+ "calt" 1;
+ }
+ .background-gradient {
+ background-color: #fff;
+ background-image: radial-gradient(
+ at 21% 11%,
+ rgba(186, 186, 233, 0.53) 0,
+ transparent 50%
+ ),
+ radial-gradient(at 85% 0, hsla(46, 57%, 78%, 0.52) 0, transparent 50%),
+ radial-gradient(at 91% 36%, rgba(194, 213, 255, 0.68) 0, transparent 50%),
+ radial-gradient(at 8% 40%, rgba(251, 218, 239, 0.46) 0, transparent 50%);
+ }
+}
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/layout.tsx b/packages/create-llama/templates/types/streaming/nextjs/app/layout.tsx
new file mode 100644
index 0000000000..fb09770627
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/layout.tsx
@@ -0,0 +1,22 @@
+import type { Metadata } from "next";
+import { Inter } from "next/font/google";
+import "./globals.css";
+
+const inter = Inter({ subsets: ["latin"] });
+
+export const metadata: Metadata = {
+ title: "Create Llama App",
+ description: "Generated by create-llama",
+};
+
+export default function RootLayout({
+ children,
+}: {
+ children: React.ReactNode;
+}) {
+ return (
+
+ {children}
+
+ );
+}
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/page.tsx b/packages/create-llama/templates/types/streaming/nextjs/app/page.tsx
new file mode 100644
index 0000000000..ef00262b4a
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/page.tsx
@@ -0,0 +1,11 @@
+import Header from "@/app/components/header";
+import ChatSection from "./components/chat-section";
+
+export default function Home() {
+ return (
+
+
+
+
+ );
+}
diff --git a/packages/create-llama/templates/types/streaming/nextjs/eslintrc.json b/packages/create-llama/templates/types/streaming/nextjs/eslintrc.json
new file mode 100644
index 0000000000..bffb357a71
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/eslintrc.json
@@ -0,0 +1,3 @@
+{
+ "extends": "next/core-web-vitals"
+}
diff --git a/packages/create-llama/templates/types/streaming/nextjs/gitignore b/packages/create-llama/templates/types/streaming/nextjs/gitignore
new file mode 100644
index 0000000000..8f322f0d8f
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/gitignore
@@ -0,0 +1,35 @@
+# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
+
+# dependencies
+/node_modules
+/.pnp
+.pnp.js
+
+# testing
+/coverage
+
+# next.js
+/.next/
+/out/
+
+# production
+/build
+
+# misc
+.DS_Store
+*.pem
+
+# debug
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+
+# local env files
+.env*.local
+
+# vercel
+.vercel
+
+# typescript
+*.tsbuildinfo
+next-env.d.ts
diff --git a/packages/create-llama/templates/types/streaming/nextjs/next-env.d.ts b/packages/create-llama/templates/types/streaming/nextjs/next-env.d.ts
new file mode 100644
index 0000000000..4f11a03dc6
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/next-env.d.ts
@@ -0,0 +1,5 @@
+///
+///
+
+// NOTE: This file should not be edited
+// see https://nextjs.org/docs/basic-features/typescript for more information.
diff --git a/packages/create-llama/templates/types/streaming/nextjs/next.config.js b/packages/create-llama/templates/types/streaming/nextjs/next.config.js
new file mode 100644
index 0000000000..0b2c2bf173
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/next.config.js
@@ -0,0 +1,8 @@
+/** @type {import('next').NextConfig} */
+const nextConfig = {
+ experimental: {
+ serverComponentsExternalPackages: ["llamaindex"],
+ },
+}
+
+module.exports = nextConfig
diff --git a/packages/create-llama/templates/types/streaming/nextjs/package.json b/packages/create-llama/templates/types/streaming/nextjs/package.json
new file mode 100644
index 0000000000..e9f23201d8
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/package.json
@@ -0,0 +1,28 @@
+{
+ "name": "llama-index-nextjs-streaming",
+ "version": "1.0.0",
+ "scripts": {
+ "dev": "next dev",
+ "build": "next build",
+ "start": "next start",
+ "lint": "next lint"
+ },
+ "dependencies": {
+ "ai": "^2",
+ "llamaindex": "0.0.31",
+ "next": "^13",
+ "react": "^18",
+ "react-dom": "^18"
+ },
+ "devDependencies": {
+ "@types/node": "^20",
+ "@types/react": "^18",
+ "@types/react-dom": "^18",
+ "autoprefixer": "^10",
+ "eslint": "^8",
+ "eslint-config-next": "^13",
+ "postcss": "^8",
+ "tailwindcss": "^3",
+ "typescript": "^5"
+ }
+}
\ No newline at end of file
diff --git a/packages/create-llama/templates/types/streaming/nextjs/postcss.config.js b/packages/create-llama/templates/types/streaming/nextjs/postcss.config.js
new file mode 100644
index 0000000000..33ad091d26
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/postcss.config.js
@@ -0,0 +1,6 @@
+module.exports = {
+ plugins: {
+ tailwindcss: {},
+ autoprefixer: {},
+ },
+}
diff --git a/packages/create-llama/templates/types/streaming/nextjs/public/llama.png b/packages/create-llama/templates/types/streaming/nextjs/public/llama.png
new file mode 100644
index 0000000000..d4efba3b81
Binary files /dev/null and b/packages/create-llama/templates/types/streaming/nextjs/public/llama.png differ
diff --git a/packages/create-llama/templates/types/streaming/nextjs/tailwind.config.ts b/packages/create-llama/templates/types/streaming/nextjs/tailwind.config.ts
new file mode 100644
index 0000000000..aa5580affa
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/tailwind.config.ts
@@ -0,0 +1,78 @@
+import type { Config } from "tailwindcss";
+import { fontFamily } from "tailwindcss/defaultTheme";
+
+const config: Config = {
+ darkMode: ["class"],
+ content: ["app/**/*.{ts,tsx}", "components/**/*.{ts,tsx}"],
+ theme: {
+ container: {
+ center: true,
+ padding: "2rem",
+ screens: {
+ "2xl": "1400px",
+ },
+ },
+ extend: {
+ colors: {
+ border: "hsl(var(--border))",
+ input: "hsl(var(--input))",
+ ring: "hsl(var(--ring))",
+ background: "hsl(var(--background))",
+ foreground: "hsl(var(--foreground))",
+ primary: {
+ DEFAULT: "hsl(var(--primary))",
+ foreground: "hsl(var(--primary-foreground))",
+ },
+ secondary: {
+ DEFAULT: "hsl(var(--secondary))",
+ foreground: "hsl(var(--secondary-foreground))",
+ },
+ destructive: {
+ DEFAULT: "hsl(var(--destructive) / )",
+ foreground: "hsl(var(--destructive-foreground) / )",
+ },
+ muted: {
+ DEFAULT: "hsl(var(--muted))",
+ foreground: "hsl(var(--muted-foreground))",
+ },
+ accent: {
+ DEFAULT: "hsl(var(--accent))",
+ foreground: "hsl(var(--accent-foreground))",
+ },
+ popover: {
+ DEFAULT: "hsl(var(--popover))",
+ foreground: "hsl(var(--popover-foreground))",
+ },
+ card: {
+ DEFAULT: "hsl(var(--card))",
+ foreground: "hsl(var(--card-foreground))",
+ },
+ },
+ borderRadius: {
+ xl: `calc(var(--radius) + 4px)`,
+ lg: `var(--radius)`,
+ md: `calc(var(--radius) - 2px)`,
+ sm: "calc(var(--radius) - 4px)",
+ },
+ fontFamily: {
+ sans: ["var(--font-sans)", ...fontFamily.sans],
+ },
+ keyframes: {
+ "accordion-down": {
+ from: { height: "0" },
+ to: { height: "var(--radix-accordion-content-height)" },
+ },
+ "accordion-up": {
+ from: { height: "var(--radix-accordion-content-height)" },
+ to: { height: "0" },
+ },
+ },
+ animation: {
+ "accordion-down": "accordion-down 0.2s ease-out",
+ "accordion-up": "accordion-up 0.2s ease-out",
+ },
+ },
+ },
+ plugins: [],
+};
+export default config;
diff --git a/packages/create-llama/templates/types/streaming/nextjs/tsconfig.json b/packages/create-llama/templates/types/streaming/nextjs/tsconfig.json
new file mode 100644
index 0000000000..c714696378
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/tsconfig.json
@@ -0,0 +1,27 @@
+{
+ "compilerOptions": {
+ "target": "es5",
+ "lib": ["dom", "dom.iterable", "esnext"],
+ "allowJs": true,
+ "skipLibCheck": true,
+ "strict": true,
+ "noEmit": true,
+ "esModuleInterop": true,
+ "module": "esnext",
+ "moduleResolution": "bundler",
+ "resolveJsonModule": true,
+ "isolatedModules": true,
+ "jsx": "preserve",
+ "incremental": true,
+ "plugins": [
+ {
+ "name": "next"
+ }
+ ],
+ "paths": {
+ "@/*": ["./*"]
+ }
+ },
+ "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
+ "exclude": ["node_modules"]
+}
diff --git a/packages/create-llama/tsconfig.json b/packages/create-llama/tsconfig.json
new file mode 100644
index 0000000000..e4edad9e12
--- /dev/null
+++ b/packages/create-llama/tsconfig.json
@@ -0,0 +1,11 @@
+{
+ "compilerOptions": {
+ "target": "es2019",
+ "moduleResolution": "node",
+ "strict": true,
+ "resolveJsonModule": true,
+ "esModuleInterop": true,
+ "skipLibCheck": false
+ },
+ "exclude": ["templates", "dist"]
+}
diff --git a/packages/eslint-config-custom/index.js b/packages/eslint-config-custom/index.js
index a4d327e57c..1423212575 100644
--- a/packages/eslint-config-custom/index.js
+++ b/packages/eslint-config-custom/index.js
@@ -35,6 +35,10 @@ module.exports = {
"NOTION_TOKEN",
"MONGODB_URI",
+
+ "https_proxy",
+ "npm_config_user_agent",
+ "NEXT_PUBLIC_CHAT_API",
],
},
],
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 3b0bcbb70c..1f72896b00 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -18,7 +18,7 @@ importers:
devDependencies:
'@turbo/gen':
specifier: ^1.10.16
- version: 1.10.16(@types/node@20.9.0)(typescript@5.2.2)
+ version: 1.10.16(@types/node@18.18.8)(typescript@5.2.2)
'@types/jest':
specifier: ^29.5.6
version: 29.5.6
@@ -33,7 +33,7 @@ importers:
version: 8.0.3
jest:
specifier: ^29.7.0
- version: 29.7.0(@types/node@20.9.0)
+ version: 29.7.0(@types/node@18.18.8)
prettier:
specifier: ^3.0.3
version: 3.0.3
@@ -203,6 +203,75 @@ importers:
specifier: ^5.2.2
version: 5.2.2
+ packages/create-llama:
+ devDependencies:
+ '@types/async-retry':
+ specifier: 1.4.2
+ version: 1.4.2
+ '@types/ci-info':
+ specifier: 2.0.0
+ version: 2.0.0
+ '@types/cross-spawn':
+ specifier: 6.0.0
+ version: 6.0.0
+ '@types/node':
+ specifier: ^20.2.5
+ version: 20.8.10
+ '@types/prompts':
+ specifier: 2.0.1
+ version: 2.0.1
+ '@types/tar':
+ specifier: 6.1.5
+ version: 6.1.5
+ '@types/validate-npm-package-name':
+ specifier: 3.0.0
+ version: 3.0.0
+ '@vercel/ncc':
+ specifier: 0.34.0
+ version: 0.34.0
+ async-retry:
+ specifier: 1.3.1
+ version: 1.3.1
+ async-sema:
+ specifier: 3.0.1
+ version: 3.0.1
+ ci-info:
+ specifier: watson/ci-info#f43f6a1cefff47fb361c88cf4b943fdbcaafe540
+ version: github.com/watson/ci-info/f43f6a1cefff47fb361c88cf4b943fdbcaafe540
+ commander:
+ specifier: 2.20.0
+ version: 2.20.0
+ conf:
+ specifier: 10.2.0
+ version: 10.2.0
+ cross-spawn:
+ specifier: 7.0.3
+ version: 7.0.3
+ fast-glob:
+ specifier: 3.3.1
+ version: 3.3.1
+ got:
+ specifier: 10.7.0
+ version: 10.7.0
+ picocolors:
+ specifier: 1.0.0
+ version: 1.0.0
+ prompts:
+ specifier: 2.1.0
+ version: 2.1.0
+ tar:
+ specifier: 6.1.15
+ version: 6.1.15
+ terminal-link:
+ specifier: ^3.0.0
+ version: 3.0.0
+ update-check:
+ specifier: 1.5.4
+ version: 1.5.4
+ validate-npm-package-name:
+ specifier: 3.0.0
+ version: 3.0.0
+
packages/eslint-config-custom:
dependencies:
eslint-config-next:
@@ -3127,7 +3196,7 @@ packages:
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
'@jest/types': 29.6.3
- '@types/node': 20.8.10
+ '@types/node': 18.18.8
chalk: 4.1.2
jest-message-util: 29.7.0
jest-util: 29.7.0
@@ -3148,14 +3217,14 @@ packages:
'@jest/test-result': 29.7.0
'@jest/transform': 29.7.0
'@jest/types': 29.6.3
- '@types/node': 20.8.10
+ '@types/node': 18.18.8
ansi-escapes: 4.3.2
chalk: 4.1.2
ci-info: 3.9.0
exit: 0.1.2
graceful-fs: 4.2.11
jest-changed-files: 29.7.0
- jest-config: 29.7.0(@types/node@20.8.10)
+ jest-config: 29.7.0(@types/node@18.18.8)
jest-haste-map: 29.7.0
jest-message-util: 29.7.0
jest-regex-util: 29.6.3
@@ -3183,7 +3252,7 @@ packages:
dependencies:
'@jest/fake-timers': 29.7.0
'@jest/types': 29.6.3
- '@types/node': 20.8.10
+ '@types/node': 18.18.8
jest-mock: 29.7.0
dev: true
@@ -3210,7 +3279,7 @@ packages:
dependencies:
'@jest/types': 29.6.3
'@sinonjs/fake-timers': 10.3.0
- '@types/node': 20.8.10
+ '@types/node': 18.18.8
jest-message-util: 29.7.0
jest-mock: 29.7.0
jest-util: 29.7.0
@@ -3243,7 +3312,7 @@ packages:
'@jest/transform': 29.7.0
'@jest/types': 29.6.3
'@jridgewell/trace-mapping': 0.3.19
- '@types/node': 20.8.10
+ '@types/node': 18.18.8
chalk: 4.1.2
collect-v8-coverage: 1.0.2
exit: 0.1.2
@@ -3330,7 +3399,7 @@ packages:
'@jest/schemas': 29.6.3
'@types/istanbul-lib-coverage': 2.0.4
'@types/istanbul-reports': 3.0.2
- '@types/node': 20.8.10
+ '@types/node': 18.18.8
'@types/yargs': 17.0.28
chalk: 4.1.2
@@ -3621,6 +3690,11 @@ packages:
engines: {node: '>=6'}
dev: false
+ /@sindresorhus/is@2.1.1:
+ resolution: {integrity: sha512-/aPsuoj/1Dw/kzhkgz+ES6TxG0zfTMGLwuK2ZG00k/iJzYHTLCE8mVU8EPqEOp/lmxPoq1C1C9RYToRKb2KEfg==}
+ engines: {node: '>=10'}
+ dev: true
+
/@sinonjs/commons@3.0.0:
resolution: {integrity: sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA==}
dependencies:
@@ -3808,6 +3882,13 @@ packages:
defer-to-connect: 1.1.3
dev: false
+ /@szmarczak/http-timer@4.0.6:
+ resolution: {integrity: sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==}
+ engines: {node: '>=10'}
+ dependencies:
+ defer-to-connect: 2.0.1
+ dev: true
+
/@tootallnate/quickjs-emscripten@0.23.0:
resolution: {integrity: sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA==}
dev: true
@@ -3837,7 +3918,7 @@ packages:
resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==}
dev: true
- /@turbo/gen@1.10.16(@types/node@20.9.0)(typescript@5.2.2):
+ /@turbo/gen@1.10.16(@types/node@18.18.8)(typescript@5.2.2):
resolution: {integrity: sha512-PzyluADjVuy5OcIi+/aRcD70OElQpRVRDdfZ9fH8G5Fv75lQcNrjd1bBGKmhjSw+g+eTEkXMGnY7s6gsCYjYTQ==}
hasBin: true
dependencies:
@@ -3849,7 +3930,7 @@ packages:
minimatch: 9.0.3
node-plop: 0.26.3
proxy-agent: 6.3.1
- ts-node: 10.9.1(@types/node@20.9.0)(typescript@5.2.2)
+ ts-node: 10.9.1(@types/node@18.18.8)(typescript@5.2.2)
update-check: 1.5.4
validate-npm-package-name: 5.0.0
transitivePeerDependencies:
@@ -3878,6 +3959,12 @@ packages:
update-check: 1.5.4
dev: true
+ /@types/async-retry@1.4.2:
+ resolution: {integrity: sha512-GUDuJURF0YiJZ+CBjNQA0+vbP/VHlJbB0sFqkzsV7EcOPRfurVonXpXKAt3w8qIjM1TEzpz6hc6POocPvHOS3w==}
+ dependencies:
+ '@types/retry': 0.12.0
+ dev: true
+
/@types/babel__core@7.20.2:
resolution: {integrity: sha512-pNpr1T1xLUc2l3xJKuPtsEky3ybxN3m4fJkknfIpTCTfIZCDW57oAg+EfCgIIp2rvCe0Wn++/FfodDS4YXxBwA==}
dependencies:
@@ -3911,28 +3998,47 @@ packages:
resolution: {integrity: sha512-oyl4jvAfTGX9Bt6Or4H9ni1Z447/tQuxnZsytsCaExKlmJiU8sFgnIBRzJUpKwB5eWn9HuBYlUlVA74q/yN0eQ==}
dependencies:
'@types/connect': 3.4.36
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
dev: false
/@types/bonjour@3.5.11:
resolution: {integrity: sha512-isGhjmBtLIxdHBDl2xGwUzEM8AOyOvWsADWq7rqirdi/ZQoHnLWErHvsThcEzTX8juDRiZtzp2Qkv5bgNh6mAg==}
dependencies:
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
dev: false
+ /@types/cacheable-request@6.0.3:
+ resolution: {integrity: sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw==}
+ dependencies:
+ '@types/http-cache-semantics': 4.0.3
+ '@types/keyv': 3.1.4
+ '@types/node': 18.18.8
+ '@types/responselike': 1.0.1
+ dev: true
+
+ /@types/ci-info@2.0.0:
+ resolution: {integrity: sha512-5R2/MHILQLDCzTuhs1j4Qqq8AaKUf7Ma4KSSkCtc12+fMs47zfa34qhto9goxpyX00tQK1zxB885VCiawZ5Qhg==}
+ dev: true
+
/@types/connect-history-api-fallback@1.5.1:
resolution: {integrity: sha512-iaQslNbARe8fctL5Lk+DsmgWOM83lM+7FzP0eQUJs1jd3kBE8NWqBTIT2S8SqQOJjxvt2eyIjpOuYeRXq2AdMw==}
dependencies:
'@types/express-serve-static-core': 4.17.37
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
dev: false
/@types/connect@3.4.36:
resolution: {integrity: sha512-P63Zd/JUGq+PdrM1lv0Wv5SBYeA2+CORvbrXbngriYY0jzLUWfQMQQxOhjONEz/wlHOAxOdY7CY65rgQdTjq2w==}
dependencies:
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
dev: false
+ /@types/cross-spawn@6.0.0:
+ resolution: {integrity: sha512-evp2ZGsFw9YKprDbg8ySgC9NA15g3YgiI8ANkGmKKvvi0P2aDGYLPxQIC5qfeKNUOe3TjABVGuah6omPRpIYhg==}
+ dependencies:
+ '@types/node': 18.18.8
+ dev: true
+
/@types/eslint-scope@3.7.5:
resolution: {integrity: sha512-JNvhIEyxVW6EoMIFIvj93ZOywYFatlpu9deeH6eSx6PE3WHYvHaQtmHmQeNw7aA81bYGBPPQqdtBm6b1SsQMmA==}
dependencies:
@@ -3969,7 +4075,7 @@ packages:
/@types/express-serve-static-core@4.17.37:
resolution: {integrity: sha512-ZohaCYTgGFcOP7u6aJOhY9uIZQgZ2vxC2yWoArY+FeDXlqeH66ZVBjgvg+RLVAS/DWNq4Ap9ZXu1+SUQiiWYMg==}
dependencies:
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
'@types/qs': 6.9.8
'@types/range-parser': 1.2.5
'@types/send': 0.17.2
@@ -3988,13 +4094,13 @@ packages:
resolution: {integrity: sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==}
dependencies:
'@types/minimatch': 5.1.2
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
dev: true
/@types/graceful-fs@4.1.7:
resolution: {integrity: sha512-MhzcwU8aUygZroVwL2jeYk6JisJrPl/oov/gsgGCue9mkgl9wjGbzReYQClxiUgFDnib9FuHqTndccKeZKxTRw==}
dependencies:
- '@types/node': 20.8.10
+ '@types/node': 18.18.8
dev: true
/@types/hast@2.3.6:
@@ -4010,6 +4116,10 @@ packages:
resolution: {integrity: sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg==}
dev: false
+ /@types/http-cache-semantics@4.0.3:
+ resolution: {integrity: sha512-V46MYLFp08Wf2mmaBhvgjStM3tPa+2GAdy/iqoX+noX1//zje2x4XmrIU0cAwyClATsTmahbtoQ2EwP7I5WSiA==}
+ dev: true
+
/@types/http-errors@2.0.2:
resolution: {integrity: sha512-lPG6KlZs88gef6aD85z3HNkztpj7w2R7HmR3gygjfXCQmsLloWNARFkMuzKiiY8FGdh1XDpgBdrSf4aKDiA7Kg==}
dev: false
@@ -4017,7 +4127,7 @@ packages:
/@types/http-proxy@1.17.12:
resolution: {integrity: sha512-kQtujO08dVtQ2wXAuSFfk9ASy3sug4+ogFR8Kd8UgP8PEuc1/G/8yjYRmp//PcDNJEUKOza/MrQu15bouEUCiw==}
dependencies:
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
dev: false
/@types/inquirer@6.5.0:
@@ -4067,8 +4177,7 @@ packages:
/@types/keyv@3.1.4:
resolution: {integrity: sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg==}
dependencies:
- '@types/node': 20.9.0
- dev: false
+ '@types/node': 18.18.8
/@types/lodash-es@4.17.10:
resolution: {integrity: sha512-YJP+w/2khSBwbUSFdGsSqmDvmnN3cCKoPOL7Zjle6s30ZtemkkqhjVfFqGwPN7ASil5VyjE2GtyU/yqYY6mC0A==}
@@ -4104,7 +4213,7 @@ packages:
/@types/node-fetch@2.6.6:
resolution: {integrity: sha512-95X8guJYhfqiuVVhRFxVQcf4hW/2bCuoPwDasMf/531STFoNoWTT7YDnWdXHEZKqAGUigmpG31r2FE70LwnzJw==}
dependencies:
- '@types/node': 18.18.7
+ '@types/node': 18.18.8
form-data: 4.0.0
dev: false
@@ -4144,11 +4253,7 @@ packages:
resolution: {integrity: sha512-TlgT8JntpcbmKUFzjhsyhGfP2fsiz1Mv56im6enJ905xG1DAYesxJaeSbGqQmAw8OWPdhyJGhGSQGKRNJ45u9w==}
dependencies:
undici-types: 5.26.5
-
- /@types/node@20.9.0:
- resolution: {integrity: sha512-nekiGu2NDb1BcVofVcEKMIwzlx4NjHlcjhoxxKBNLtz15Y1z7MYf549DFvkHSId02Ax6kGwWntIBPC3l/JZcmw==}
- dependencies:
- undici-types: 5.26.5
+ dev: true
/@types/normalize-package-data@2.4.2:
resolution: {integrity: sha512-lqa4UEhhv/2sjjIQgjX8B+RBjj47eo0mzGasklVJ78UKGQY1r0VpB9XHDaZZO9qzEFDdy4MrXLuEaSmPrPSe/A==}
@@ -4172,6 +4277,10 @@ packages:
resolution: {integrity: sha512-+DDIKtFsGMajapzc5A+jL9V1dpLZ5lShAd6Oq0yRu2qFHFr2hhHlZ2rkFiInXOoFSxjxGmyGdCjjHghoHj/x0w==}
dev: true
+ /@types/prompts@2.0.1:
+ resolution: {integrity: sha512-AhtMcmETelF8wFDV1ucbChKhLgsc+ytXZXkNz/nnTAMSDeqsjALknEFxi7ZtLgS/G8bV2rp90LhDW5SGACimIQ==}
+ dev: true
+
/@types/prop-types@15.7.8:
resolution: {integrity: sha512-kMpQpfZKSCBqltAJwskgePRaYRFukDkm1oItcAbC3gNELR20XIBcN9VRgg4+m8DKsTfkWeA4m4Imp4DDuWy7FQ==}
@@ -4213,17 +4322,15 @@ packages:
/@types/responselike@1.0.1:
resolution: {integrity: sha512-TiGnitEDxj2X0j+98Eqk5lv/Cij8oHd32bU4D/Yw6AOq7vvTk0gSD2GPj0G/HkvhMoVsdlhYF4yqqlyPBTM6Sg==}
dependencies:
- '@types/node': 20.9.0
- dev: false
+ '@types/node': 18.18.8
/@types/retry@0.12.0:
resolution: {integrity: sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==}
- dev: false
/@types/sax@1.2.5:
resolution: {integrity: sha512-9jWta97bBVC027/MShr3gLab8gPhKy4l6qpb+UJLF5pDm3501NvA7uvqVCW+REFtx00oTi6Cq9JzLwgq6evVgw==}
dependencies:
- '@types/node': 17.0.45
+ '@types/node': 18.18.8
dev: false
/@types/scheduler@0.16.4:
@@ -4237,7 +4344,7 @@ packages:
resolution: {integrity: sha512-aAG6yRf6r0wQ29bkS+x97BIs64ZLxeE/ARwyS6wrldMm3C1MdKwCcnnEwMC1slI8wuxJOpiUH9MioC0A0i+GJw==}
dependencies:
'@types/mime': 1.3.3
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
dev: false
/@types/serve-index@1.9.2:
@@ -4251,23 +4358,30 @@ packages:
dependencies:
'@types/http-errors': 2.0.2
'@types/mime': 3.0.2
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
dev: false
/@types/sockjs@0.3.34:
resolution: {integrity: sha512-R+n7qBFnm/6jinlteC9DBL5dGiDGjWAvjo4viUanpnc/dG1y7uDoacXPIQ/PQEg1fI912SMHIa014ZjRpvDw4g==}
dependencies:
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
dev: false
/@types/stack-utils@2.0.1:
resolution: {integrity: sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==}
dev: true
+ /@types/tar@6.1.5:
+ resolution: {integrity: sha512-qm2I/RlZij5RofuY7vohTpYNaYcrSQlN2MyjucQc7ZweDwaEWkdN/EeNh6e9zjK6uEm6PwjdMXkcj05BxZdX1Q==}
+ dependencies:
+ '@types/node': 18.18.8
+ minipass: 4.2.8
+ dev: true
+
/@types/through@0.0.31:
resolution: {integrity: sha512-LpKpmb7FGevYgXnBXYs6HWnmiFyVG07Pt1cnbgM1IhEacITTiUaBXXvOR3Y50ksaJWGSfhbEvQFivQEFGCC55w==}
dependencies:
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
dev: true
/@types/tinycolor2@1.4.4:
@@ -4282,6 +4396,10 @@ packages:
resolution: {integrity: sha512-BT2Krtx4xaO6iwzwMFUYvWBWkV2pr37zD68Vmp1CDV196MzczBRxuEpD6Pr395HAgebC/co7hOphs53r8V7jew==}
dev: true
+ /@types/validate-npm-package-name@3.0.0:
+ resolution: {integrity: sha512-iFNNIrEaJH1lbPiyX+O/QyxSbKxrTjdNBVZGckt+iEL9So0hdZNBL68sOfHnt2txuUD8UJXvmKv/1DkgkebgUg==}
+ dev: true
+
/@types/webidl-conversions@7.0.2:
resolution: {integrity: sha512-uNv6b/uGRLlCVmelat2rA8bcVd3k/42mV2EmjhPh6JLkd35T5bgwR/t6xy7a9MWhd9sixIeBUzhBenvk3NO+DQ==}
dev: false
@@ -4296,7 +4414,7 @@ packages:
/@types/ws@8.5.6:
resolution: {integrity: sha512-8B5EO9jLVCy+B58PLHvLDuOD8DRVMgQzq8d55SjLCOn9kqGyqOvy27exVaTio1q1nX5zLu8/6N0n2ThSxOM6tg==}
dependencies:
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
dev: false
/@types/yargs-parser@21.0.1:
@@ -4372,6 +4490,11 @@ packages:
/@ungap/structured-clone@1.2.0:
resolution: {integrity: sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==}
+ /@vercel/ncc@0.34.0:
+ resolution: {integrity: sha512-G9h5ZLBJ/V57Ou9vz5hI8pda/YQX5HQszCs3AmIus3XzsmRn/0Ptic5otD3xVST8QLKk7AMk7AqpsyQGN7MZ9A==}
+ hasBin: true
+ dev: true
+
/@webassemblyjs/ast@1.11.6:
resolution: {integrity: sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q==}
dependencies:
@@ -4563,7 +4686,6 @@ packages:
optional: true
dependencies:
ajv: 8.12.0
- dev: false
/ajv-keywords@3.5.2(ajv@6.12.6):
resolution: {integrity: sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==}
@@ -4596,7 +4718,6 @@ packages:
json-schema-traverse: 1.0.0
require-from-string: 2.0.2
uri-js: 4.4.1
- dev: false
/algoliasearch-helper@3.14.2(algoliasearch@4.20.0):
resolution: {integrity: sha512-FjDSrjvQvJT/SKMW74nPgFpsoPUwZCzGbCqbp8HhBFfSk/OvNFxzCaCmuO0p7AWeLy1gD+muFwQEkBwcl5H4pg==}
@@ -4644,6 +4765,13 @@ packages:
type-fest: 0.21.3
dev: true
+ /ansi-escapes@5.0.0:
+ resolution: {integrity: sha512-5GFMVX8HqE/TB+FuBJGuO5XG0WrsA6ptUqoODaT/n9mmUaZFkqnBueB4leqGBCmrUHnCnC4PCZTCd0E7QQ83bA==}
+ engines: {node: '>=12'}
+ dependencies:
+ type-fest: 1.4.0
+ dev: true
+
/ansi-html-community@0.0.8:
resolution: {integrity: sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==}
engines: {'0': node >= 0.8.0}
@@ -4839,6 +4967,16 @@ packages:
tslib: 2.6.2
dev: true
+ /async-retry@1.3.1:
+ resolution: {integrity: sha512-aiieFW/7h3hY0Bq5d+ktDBejxuwR78vRu9hDUdR8rNhSaQ29VzPL4AoIRG7D/c7tdenwOcKvgPM6tIxB3cB6HA==}
+ dependencies:
+ retry: 0.12.0
+ dev: true
+
+ /async-sema@3.0.1:
+ resolution: {integrity: sha512-fKT2riE8EHAvJEfLJXZiATQWqZttjx1+tfgnVshCDrH8vlw4YC8aECe0B8MU184g+aVRFVgmfxFlKZKaozSrNw==}
+ dev: true
+
/asynckit@0.4.0:
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
dev: false
@@ -4848,6 +4986,11 @@ packages:
engines: {node: '>= 4.0.0'}
dev: false
+ /atomically@1.7.0:
+ resolution: {integrity: sha512-Xcz9l0z7y9yQ9rdDaxlmaI4uJHf/T8g9hOEzJcsEqX2SjCj4J20uK7+ldkDHMbpJDK76wF7xEIgxc/vSlsfw5w==}
+ engines: {node: '>=10.12.0'}
+ dev: true
+
/autoprefixer@10.4.16(postcss@8.4.31):
resolution: {integrity: sha512-7vd3UC6xKp0HLfua5IjZlcXvGAGy7cBAXTg2lyQ/8WpNhd6SiZ8Be+xm3FyBSYJx5GKcpRCzBh7RH4/0dnY+uQ==}
engines: {node: ^10 || ^12 || >=14}
@@ -5313,6 +5456,10 @@ packages:
resolution: {integrity: sha512-HpGFw18DgFWlncDfjTa2rcQ4W88O1mC8e8yZ2AvQY5KDaktSTwo+KRf6nHK6FRI5FyRyb/5T6+TSxfP7QyGsmQ==}
dev: true
+ /builtins@1.0.3:
+ resolution: {integrity: sha512-uYBjakWipfaO/bXI7E8rq6kpwHRZK5cNYrUv2OzZSI/FvmdMyXJ2tG9dKcjEC5YHmHpUAwsargWIZNWdxb/bnQ==}
+ dev: true
+
/builtins@5.0.1:
resolution: {integrity: sha512-qwVpFEHNfhYJIzNRBvd2C1kyo6jz3ZSMPyyuR47OPdiKWlbYnZNyDWuyR175qDnAJLiCo5fBBqPb3RiXgWlkOQ==}
dependencies:
@@ -5358,6 +5505,14 @@ packages:
engines: {node: '>=8'}
dev: true
+ /cacheable-lookup@2.0.1:
+ resolution: {integrity: sha512-EMMbsiOTcdngM/K6gV/OxF2x0t07+vMOWxZNSCRQMjO2MY2nhZQ6OYhOOpyQrbhqsgtvKGI7hcq6xjnA92USjg==}
+ engines: {node: '>=10'}
+ dependencies:
+ '@types/keyv': 3.1.4
+ keyv: 4.5.4
+ dev: true
+
/cacheable-request@6.1.0:
resolution: {integrity: sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==}
engines: {node: '>=8'}
@@ -5371,6 +5526,19 @@ packages:
responselike: 1.0.2
dev: false
+ /cacheable-request@7.0.4:
+ resolution: {integrity: sha512-v+p6ongsrp0yTGbJXjgxPow2+DL93DASP4kXCDKb8/bwRtt9OEF3whggkkDkGNzgcWy2XaF4a8nZglC7uElscg==}
+ engines: {node: '>=8'}
+ dependencies:
+ clone-response: 1.0.3
+ get-stream: 5.2.0
+ http-cache-semantics: 4.1.1
+ keyv: 4.5.4
+ lowercase-keys: 2.0.0
+ normalize-url: 6.1.0
+ responselike: 2.0.1
+ dev: true
+
/call-bind@1.0.2:
resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==}
dependencies:
@@ -5555,6 +5723,11 @@ packages:
optionalDependencies:
fsevents: 2.3.3
+ /chownr@2.0.0:
+ resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==}
+ engines: {node: '>=10'}
+ dev: true
+
/chrome-trace-event@1.0.3:
resolution: {integrity: sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==}
engines: {node: '>=6.0'}
@@ -5657,7 +5830,6 @@ packages:
resolution: {integrity: sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA==}
dependencies:
mimic-response: 1.0.1
- dev: false
/clone@1.0.4:
resolution: {integrity: sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==}
@@ -5739,8 +5911,8 @@ packages:
engines: {node: '>=16'}
dev: false
- /commander@2.20.3:
- resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==}
+ /commander@2.20.0:
+ resolution: {integrity: sha512-7j2y+40w61zy6YC2iRNpUe/NwhNyoXrYpHMrSunaMG64nRnaf96zO/KMQR4OyN/UnE5KLyEBnKHd4aG3rskjpQ==}
/commander@4.1.1:
resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==}
@@ -5790,6 +5962,22 @@ packages:
/concat-map@0.0.1:
resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==}
+ /conf@10.2.0:
+ resolution: {integrity: sha512-8fLl9F04EJqjSqH+QjITQfJF8BrOVaYr1jewVgSRAEWePfxT0sku4w2hrGQ60BC/TNLGQ2pgxNlTbWQmMPFvXg==}
+ engines: {node: '>=12'}
+ dependencies:
+ ajv: 8.12.0
+ ajv-formats: 2.1.1(ajv@8.12.0)
+ atomically: 1.7.0
+ debounce-fn: 4.0.0
+ dot-prop: 6.0.1
+ env-paths: 2.2.1
+ json-schema-typed: 7.0.3
+ onetime: 5.1.2
+ pkg-up: 3.1.0
+ semver: 7.5.4
+ dev: true
+
/configstore@5.0.1:
resolution: {integrity: sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==}
engines: {node: '>=8'}
@@ -5964,7 +6152,7 @@ packages:
sha.js: 2.4.11
dev: true
- /create-jest@29.7.0(@types/node@20.9.0):
+ /create-jest@29.7.0(@types/node@18.18.8):
resolution: {integrity: sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
hasBin: true
@@ -5973,9 +6161,9 @@ packages:
chalk: 4.1.2
exit: 0.1.2
graceful-fs: 4.2.11
- jest-config: 29.7.0(@types/node@20.9.0)
+ jest-config: 29.7.0(@types/node@18.18.8)
jest-util: 29.7.0
- prompts: 2.4.2
+ prompts: 2.1.0
transitivePeerDependencies:
- '@types/node'
- babel-plugin-macros
@@ -6251,6 +6439,13 @@ packages:
engines: {node: '>= 14'}
dev: true
+ /debounce-fn@4.0.0:
+ resolution: {integrity: sha512-8pYCQiL9Xdcg0UPSD3d+0KMlOjp+KGU5EPwYddgzQ7DATsg4fuUDjQtsYLmWjnk2obnNHgV3vE2Y4jejSOJVBQ==}
+ engines: {node: '>=10'}
+ dependencies:
+ mimic-fn: 3.1.0
+ dev: true
+
/debug@2.6.9:
resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==}
peerDependencies:
@@ -6304,6 +6499,13 @@ packages:
mimic-response: 1.0.1
dev: false
+ /decompress-response@5.0.0:
+ resolution: {integrity: sha512-TLZWWybuxWgoW7Lykv+gq9xvzOsUjQ9tF09Tj6NSTYGMTCHNXzrPnD6Hi+TgZq19PyTAGH4Ll/NIM/eTGglnMw==}
+ engines: {node: '>=10'}
+ dependencies:
+ mimic-response: 2.1.0
+ dev: true
+
/dedent@1.5.1:
resolution: {integrity: sha512-+LxW+KLWxu3HW3M2w2ympwtqPrqYRzU8fqi6Fhd18fBALe15blJPI/I4+UHveMVG6lJqB4JNd4UG0S5cnVHwIg==}
peerDependencies:
@@ -6381,6 +6583,11 @@ packages:
resolution: {integrity: sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==}
dev: false
+ /defer-to-connect@2.0.1:
+ resolution: {integrity: sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==}
+ engines: {node: '>=10'}
+ dev: true
+
/define-data-property@1.1.0:
resolution: {integrity: sha512-UzGwzcjyv3OtAvolTj1GoyNYzfFR+iqbGjcnBEENZVCpM4/Ng1yhGNvS3lR/xDS74Tb2wGG9WzNSNIOS9UVb2g==}
engines: {node: '>= 0.4'}
@@ -6678,6 +6885,13 @@ packages:
is-obj: 2.0.0
dev: false
+ /dot-prop@6.0.1:
+ resolution: {integrity: sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==}
+ engines: {node: '>=10'}
+ dependencies:
+ is-obj: 2.0.0
+ dev: true
+
/duck@0.1.12:
resolution: {integrity: sha512-wkctla1O6VfP89gQ+J/yDesM0S7B7XLXjKGzXxMDVFg7uEn706niAtyYovKbyq1oT9YwDcly721/iUWoc8MVRg==}
dependencies:
@@ -6686,7 +6900,6 @@ packages:
/duplexer3@0.1.5:
resolution: {integrity: sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA==}
- dev: false
/duplexer@0.1.2:
resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==}
@@ -6754,7 +6967,6 @@ packages:
resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==}
dependencies:
once: 1.4.0
- dev: false
/enhanced-resolve@5.13.0:
resolution: {integrity: sha512-eyV8f0y1+bzyfh8xAwW/WTSZpLbjhqc4ne9eGSH4Zo2ejdyiNG9pU6mf9DG8a7+Auk6MFTlNOT4Y2y/9k8GKVg==}
@@ -6788,6 +7000,11 @@ packages:
engines: {node: '>=0.12'}
dev: false
+ /env-paths@2.2.1:
+ resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==}
+ engines: {node: '>=6'}
+ dev: true
+
/error-ex@1.3.2:
resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==}
dependencies:
@@ -7119,7 +7336,7 @@ packages:
minimatch: 3.1.2
object.values: 1.1.6
resolve: 1.22.4
- semver: 6.3.0
+ semver: 6.3.1
tsconfig-paths: 3.14.2
transitivePeerDependencies:
- eslint-import-resolver-typescript
@@ -7149,7 +7366,7 @@ packages:
minimatch: 3.1.2
object.entries: 1.1.6
object.fromentries: 2.0.6
- semver: 6.3.0
+ semver: 6.3.1
dev: false
/eslint-plugin-react-hooks@4.6.0(eslint@8.53.0):
@@ -7204,7 +7421,7 @@ packages:
object.values: 1.1.6
prop-types: 15.8.1
resolve: 2.0.0-next.4
- semver: 6.3.0
+ semver: 6.3.1
string.prototype.matchall: 4.0.8
dev: false
@@ -7383,7 +7600,7 @@ packages:
resolution: {integrity: sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw==}
engines: {node: '>= 0.8'}
dependencies:
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
require-like: 0.1.2
dev: false
@@ -7664,7 +7881,6 @@ packages:
engines: {node: '>=6'}
dependencies:
locate-path: 3.0.0
- dev: false
/find-up@4.1.0:
resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==}
@@ -7834,6 +8050,13 @@ packages:
universalify: 2.0.0
dev: false
+ /fs-minipass@2.1.0:
+ resolution: {integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==}
+ engines: {node: '>= 8'}
+ dependencies:
+ minipass: 3.3.6
+ dev: true
+
/fs-monkey@1.0.5:
resolution: {integrity: sha512-8uMbBjrhzW76TYgEV27Y5E//W2f/lTFmx78P2w19FZSxarhI/798APGQyuGCwmkNxgwGRhrLfvWyLBvNtuOmew==}
dev: false
@@ -7940,7 +8163,6 @@ packages:
engines: {node: '>=8'}
dependencies:
pump: 3.0.0
- dev: false
/get-stream@6.0.1:
resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==}
@@ -8113,6 +8335,29 @@ packages:
dependencies:
get-intrinsic: 1.2.2
+ /got@10.7.0:
+ resolution: {integrity: sha512-aWTDeNw9g+XqEZNcTjMMZSy7B7yE9toWOFYip7ofFTLleJhvZwUxxTxkTpKvF+p1SAA4VHmuEy7PiHTHyq8tJg==}
+ engines: {node: '>=10'}
+ dependencies:
+ '@sindresorhus/is': 2.1.1
+ '@szmarczak/http-timer': 4.0.6
+ '@types/cacheable-request': 6.0.3
+ '@types/keyv': 3.1.4
+ '@types/responselike': 1.0.1
+ cacheable-lookup: 2.0.1
+ cacheable-request: 7.0.4
+ decompress-response: 5.0.0
+ duplexer3: 0.1.5
+ get-stream: 5.2.0
+ lowercase-keys: 2.0.0
+ mimic-response: 2.1.0
+ p-cancelable: 2.1.1
+ p-event: 4.2.0
+ responselike: 2.0.1
+ to-readable-stream: 2.1.0
+ type-fest: 0.10.0
+ dev: true
+
/got@9.6.0:
resolution: {integrity: sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==}
engines: {node: '>=8.6'}
@@ -8440,7 +8685,6 @@ packages:
/http-cache-semantics@4.1.1:
resolution: {integrity: sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==}
- dev: false
/http-deceiver@1.2.7:
resolution: {integrity: sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw==}
@@ -8948,7 +9192,6 @@ packages:
/is-obj@2.0.0:
resolution: {integrity: sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==}
engines: {node: '>=8'}
- dev: false
/is-path-cwd@2.2.0:
resolution: {integrity: sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==}
@@ -9209,7 +9452,7 @@ packages:
'@jest/expect': 29.7.0
'@jest/test-result': 29.7.0
'@jest/types': 29.6.3
- '@types/node': 20.8.10
+ '@types/node': 18.18.8
chalk: 4.1.2
co: 4.6.0
dedent: 1.5.1
@@ -9230,7 +9473,7 @@ packages:
- supports-color
dev: true
- /jest-cli@29.7.0(@types/node@20.9.0):
+ /jest-cli@29.7.0(@types/node@18.18.8):
resolution: {integrity: sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
hasBin: true
@@ -9244,10 +9487,10 @@ packages:
'@jest/test-result': 29.7.0
'@jest/types': 29.6.3
chalk: 4.1.2
- create-jest: 29.7.0(@types/node@20.9.0)
+ create-jest: 29.7.0(@types/node@18.18.8)
exit: 0.1.2
import-local: 3.1.0
- jest-config: 29.7.0(@types/node@20.9.0)
+ jest-config: 29.7.0(@types/node@18.18.8)
jest-util: 29.7.0
jest-validate: 29.7.0
yargs: 17.7.2
@@ -9258,7 +9501,7 @@ packages:
- ts-node
dev: true
- /jest-config@29.7.0(@types/node@20.8.10):
+ /jest-config@29.7.0(@types/node@18.18.8):
resolution: {integrity: sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
peerDependencies:
@@ -9273,47 +9516,7 @@ packages:
'@babel/core': 7.23.0
'@jest/test-sequencer': 29.7.0
'@jest/types': 29.6.3
- '@types/node': 20.8.10
- babel-jest: 29.7.0(@babel/core@7.23.0)
- chalk: 4.1.2
- ci-info: 3.9.0
- deepmerge: 4.3.1
- glob: 7.2.3
- graceful-fs: 4.2.11
- jest-circus: 29.7.0
- jest-environment-node: 29.7.0
- jest-get-type: 29.6.3
- jest-regex-util: 29.6.3
- jest-resolve: 29.7.0
- jest-runner: 29.7.0
- jest-util: 29.7.0
- jest-validate: 29.7.0
- micromatch: 4.0.5
- parse-json: 5.2.0
- pretty-format: 29.7.0
- slash: 3.0.0
- strip-json-comments: 3.1.1
- transitivePeerDependencies:
- - babel-plugin-macros
- - supports-color
- dev: true
-
- /jest-config@29.7.0(@types/node@20.9.0):
- resolution: {integrity: sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==}
- engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
- peerDependencies:
- '@types/node': '*'
- ts-node: '>=9.0.0'
- peerDependenciesMeta:
- '@types/node':
- optional: true
- ts-node:
- optional: true
- dependencies:
- '@babel/core': 7.23.0
- '@jest/test-sequencer': 29.7.0
- '@jest/types': 29.6.3
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
babel-jest: 29.7.0(@babel/core@7.23.0)
chalk: 4.1.2
ci-info: 3.9.0
@@ -9373,7 +9576,7 @@ packages:
'@jest/environment': 29.7.0
'@jest/fake-timers': 29.7.0
'@jest/types': 29.6.3
- '@types/node': 20.8.10
+ '@types/node': 18.18.8
jest-mock: 29.7.0
jest-util: 29.7.0
dev: true
@@ -9389,7 +9592,7 @@ packages:
dependencies:
'@jest/types': 29.6.3
'@types/graceful-fs': 4.1.7
- '@types/node': 20.8.10
+ '@types/node': 18.18.8
anymatch: 3.1.3
fb-watchman: 2.0.2
graceful-fs: 4.2.11
@@ -9440,7 +9643,7 @@ packages:
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
'@jest/types': 29.6.3
- '@types/node': 20.8.10
+ '@types/node': 18.18.8
jest-util: 29.7.0
dev: true
@@ -9495,7 +9698,7 @@ packages:
'@jest/test-result': 29.7.0
'@jest/transform': 29.7.0
'@jest/types': 29.6.3
- '@types/node': 20.8.10
+ '@types/node': 18.18.8
chalk: 4.1.2
emittery: 0.13.1
graceful-fs: 4.2.11
@@ -9526,7 +9729,7 @@ packages:
'@jest/test-result': 29.7.0
'@jest/transform': 29.7.0
'@jest/types': 29.6.3
- '@types/node': 20.8.10
+ '@types/node': 18.18.8
chalk: 4.1.2
cjs-module-lexer: 1.2.3
collect-v8-coverage: 1.0.2
@@ -9578,7 +9781,7 @@ packages:
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
'@jest/types': 29.6.3
- '@types/node': 20.8.10
+ '@types/node': 18.18.8
chalk: 4.1.2
ci-info: 3.9.0
graceful-fs: 4.2.11
@@ -9602,7 +9805,7 @@ packages:
dependencies:
'@jest/test-result': 29.7.0
'@jest/types': 29.6.3
- '@types/node': 20.8.10
+ '@types/node': 18.18.8
ansi-escapes: 4.3.2
chalk: 4.1.2
emittery: 0.13.1
@@ -9614,7 +9817,7 @@ packages:
resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==}
engines: {node: '>= 10.13.0'}
dependencies:
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
merge-stream: 2.0.0
supports-color: 8.1.1
@@ -9622,12 +9825,12 @@ packages:
resolution: {integrity: sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
dependencies:
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
jest-util: 29.7.0
merge-stream: 2.0.0
supports-color: 8.1.1
- /jest@29.7.0(@types/node@20.9.0):
+ /jest@29.7.0(@types/node@18.18.8):
resolution: {integrity: sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
hasBin: true
@@ -9640,7 +9843,7 @@ packages:
'@jest/core': 29.7.0
'@jest/types': 29.6.3
import-local: 3.1.0
- jest-cli: 29.7.0(@types/node@20.9.0)
+ jest-cli: 29.7.0(@types/node@18.18.8)
transitivePeerDependencies:
- '@types/node'
- babel-plugin-macros
@@ -9714,7 +9917,10 @@ packages:
/json-schema-traverse@1.0.0:
resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==}
- dev: false
+
+ /json-schema-typed@7.0.3:
+ resolution: {integrity: sha512-7DE8mpG+/fVw+dTpjbxnx47TaMnDfOI1jwft9g1VybltZCduyRQPJPvc+zzKY9WPHxhPWczyFuYa6I8Mw4iU5A==}
+ dev: true
/json-stable-stringify-without-jsonify@1.0.1:
resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==}
@@ -9881,7 +10087,6 @@ packages:
dependencies:
p-locate: 3.0.0
path-exists: 3.0.0
- dev: false
/locate-path@5.0.0:
resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==}
@@ -10009,7 +10214,6 @@ packages:
/lowercase-keys@2.0.0:
resolution: {integrity: sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==}
engines: {node: '>=8'}
- dev: false
/lru-cache@4.1.5:
resolution: {integrity: sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==}
@@ -10252,6 +10456,11 @@ packages:
resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==}
engines: {node: '>=6'}
+ /mimic-fn@3.1.0:
+ resolution: {integrity: sha512-Ysbi9uYW9hFyfrThdDEQuykN4Ey6BuwPD2kpI5ES/nFTDn/98yxYNLZJcgUAKPT/mcrLLKaGzJR9YVxJrIdASQ==}
+ engines: {node: '>=8'}
+ dev: true
+
/mimic-fn@4.0.0:
resolution: {integrity: sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==}
engines: {node: '>=12'}
@@ -10260,7 +10469,11 @@ packages:
/mimic-response@1.0.1:
resolution: {integrity: sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==}
engines: {node: '>=4'}
- dev: false
+
+ /mimic-response@2.1.0:
+ resolution: {integrity: sha512-wXqjST+SLt7R009ySCglWBCFpjUygmCIfD790/kVbiGmUgfYGuB14PiTd5DwVxSV4NcYHjzMkoj5LjQZwTQLEA==}
+ engines: {node: '>=8'}
+ dev: true
/min-indent@1.0.1:
resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==}
@@ -10308,6 +10521,31 @@ packages:
/minimist@1.2.8:
resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==}
+ /minipass@3.3.6:
+ resolution: {integrity: sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==}
+ engines: {node: '>=8'}
+ dependencies:
+ yallist: 4.0.0
+ dev: true
+
+ /minipass@4.2.8:
+ resolution: {integrity: sha512-fNzuVyifolSLFL4NzpF+wEF4qrgqaaKX0haXPQEdQ7NKAN+WecoKMHV09YcuL/DHxrUsYQOK3MiuDf7Ip2OXfQ==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /minipass@5.0.0:
+ resolution: {integrity: sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /minizlib@2.1.2:
+ resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==}
+ engines: {node: '>= 8'}
+ dependencies:
+ minipass: 3.3.6
+ yallist: 4.0.0
+ dev: true
+
/mixme@0.5.9:
resolution: {integrity: sha512-VC5fg6ySUscaWUpI4gxCBTQMH2RdUpNrk+MsbpCYtIvf9SBJdiUey4qE7BXviJsJR4nDQxCZ+3yaYNW3guz/Pw==}
engines: {node: '>= 8.0.0'}
@@ -10320,6 +10558,12 @@ packages:
minimist: 1.2.8
dev: true
+ /mkdirp@1.0.4:
+ resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==}
+ engines: {node: '>=10'}
+ hasBin: true
+ dev: true
+
/mongodb-connection-string-url@2.6.0:
resolution: {integrity: sha512-WvTZlI9ab0QYtTYnuMLgobULWhokRjtC7db9LtcVfJ+Hsnyr5eo6ZtNAt3Ly24XZScGMelOcGtm7lSn0332tPQ==}
dependencies:
@@ -10587,7 +10831,6 @@ packages:
/normalize-url@6.1.0:
resolution: {integrity: sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==}
engines: {node: '>=10'}
- dev: false
/notion-md-crawler@0.0.2:
resolution: {integrity: sha512-lE3/DFMrg7GSbl1sBfDuLVLyxw+yjdarPVm1JGfQ6eONEbNGgO+BdZxpwwZQ1uYeEJurAXMXb/AXT8GKYjKAyg==}
@@ -10823,6 +11066,18 @@ packages:
engines: {node: '>=6'}
dev: false
+ /p-cancelable@2.1.1:
+ resolution: {integrity: sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /p-event@4.2.0:
+ resolution: {integrity: sha512-KXatOjCRXXkSePPb1Nbi0p0m+gQAwdlbhi4wQKJPI1HsMQS9g+Sqp2o+QHziPr7eYJyOZet836KoHEVM1mwOrQ==}
+ engines: {node: '>=8'}
+ dependencies:
+ p-timeout: 3.2.0
+ dev: true
+
/p-filter@2.1.0:
resolution: {integrity: sha512-ZBxxZ5sL2HghephhpGAQdoskxplTwr7ICaehZwLIlfL6acuVgZPm8yBNuRAFBGEqtD/hmUeq9eqLg2ys9Xr/yw==}
engines: {node: '>=8'}
@@ -10830,6 +11085,11 @@ packages:
p-map: 2.1.0
dev: false
+ /p-finally@1.0.0:
+ resolution: {integrity: sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==}
+ engines: {node: '>=4'}
+ dev: true
+
/p-limit@2.3.0:
resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==}
engines: {node: '>=6'}
@@ -10847,7 +11107,6 @@ packages:
engines: {node: '>=6'}
dependencies:
p-limit: 2.3.0
- dev: false
/p-locate@4.1.0:
resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==}
@@ -10888,6 +11147,13 @@ packages:
retry: 0.13.1
dev: false
+ /p-timeout@3.2.0:
+ resolution: {integrity: sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==}
+ engines: {node: '>=8'}
+ dependencies:
+ p-finally: 1.0.0
+ dev: true
+
/p-try@2.2.0:
resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==}
engines: {node: '>=6'}
@@ -11036,7 +11302,6 @@ packages:
/path-exists@3.0.0:
resolution: {integrity: sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==}
engines: {node: '>=4'}
- dev: false
/path-exists@4.0.0:
resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==}
@@ -11136,7 +11401,6 @@ packages:
engines: {node: '>=8'}
dependencies:
find-up: 3.0.0
- dev: false
/portkey-ai@0.1.16:
resolution: {integrity: sha512-EY4FRp6PZSD75Q1o1qc08DfPNTG9FnkUPN3Z1/lEvaq9iFpSO5UekcagUZaKSVhao311qjBjns+kF0rS9ht7iA==}
@@ -11615,6 +11879,62 @@ packages:
typescript: 5.2.2
dev: true
+ /prettier-plugin-tailwindcss@0.3.0(prettier-plugin-organize-imports@3.2.3)(prettier@3.0.3):
+ resolution: {integrity: sha512-009/Xqdy7UmkcTBpwlq7jsViDqXAYSOMLDrHAdTMlVZOrKfM2o9Ci7EMWTMZ7SkKBFTG04UM9F9iM2+4i6boDA==}
+ engines: {node: '>=12.17.0'}
+ peerDependencies:
+ '@ianvs/prettier-plugin-sort-imports': '*'
+ '@prettier/plugin-pug': '*'
+ '@shopify/prettier-plugin-liquid': '*'
+ '@shufo/prettier-plugin-blade': '*'
+ '@trivago/prettier-plugin-sort-imports': '*'
+ prettier: '>=2.2.0'
+ prettier-plugin-astro: '*'
+ prettier-plugin-css-order: '*'
+ prettier-plugin-import-sort: '*'
+ prettier-plugin-jsdoc: '*'
+ prettier-plugin-marko: '*'
+ prettier-plugin-organize-attributes: '*'
+ prettier-plugin-organize-imports: '*'
+ prettier-plugin-style-order: '*'
+ prettier-plugin-svelte: '*'
+ prettier-plugin-twig-melody: '*'
+ peerDependenciesMeta:
+ '@ianvs/prettier-plugin-sort-imports':
+ optional: true
+ '@prettier/plugin-pug':
+ optional: true
+ '@shopify/prettier-plugin-liquid':
+ optional: true
+ '@shufo/prettier-plugin-blade':
+ optional: true
+ '@trivago/prettier-plugin-sort-imports':
+ optional: true
+ prettier-plugin-astro:
+ optional: true
+ prettier-plugin-css-order:
+ optional: true
+ prettier-plugin-import-sort:
+ optional: true
+ prettier-plugin-jsdoc:
+ optional: true
+ prettier-plugin-marko:
+ optional: true
+ prettier-plugin-organize-attributes:
+ optional: true
+ prettier-plugin-organize-imports:
+ optional: true
+ prettier-plugin-style-order:
+ optional: true
+ prettier-plugin-svelte:
+ optional: true
+ prettier-plugin-twig-melody:
+ optional: true
+ dependencies:
+ prettier: 3.0.3
+ prettier-plugin-organize-imports: 3.2.3(prettier@3.0.3)(typescript@5.2.2)
+ dev: true
+
/prettier@2.8.8:
resolution: {integrity: sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==}
engines: {node: '>=10.13.0'}
@@ -11676,12 +11996,21 @@ packages:
asap: 2.0.6
dev: false
+ /prompts@2.1.0:
+ resolution: {integrity: sha512-+x5TozgqYdOwWsQFZizE/Tra3fKvAoy037kOyU6cgz84n8f6zxngLOV4O32kTwt9FcLCxAqw0P/c8rOr9y+Gfg==}
+ engines: {node: '>= 6'}
+ dependencies:
+ kleur: 3.0.3
+ sisteransi: 1.0.5
+ dev: true
+
/prompts@2.4.2:
resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==}
engines: {node: '>= 6'}
dependencies:
kleur: 3.0.3
sisteransi: 1.0.5
+ dev: false
/prop-types@15.8.1:
resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==}
@@ -11744,7 +12073,6 @@ packages:
dependencies:
end-of-stream: 1.4.4
once: 1.4.0
- dev: false
/punycode@1.4.1:
resolution: {integrity: sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==}
@@ -12354,7 +12682,6 @@ packages:
/require-from-string@2.0.2:
resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==}
engines: {node: '>=0.10.0'}
- dev: false
/require-like@0.1.2:
resolution: {integrity: sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==}
@@ -12441,6 +12768,12 @@ packages:
lowercase-keys: 1.0.1
dev: false
+ /responselike@2.0.1:
+ resolution: {integrity: sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw==}
+ dependencies:
+ lowercase-keys: 2.0.0
+ dev: true
+
/restore-cursor@3.1.0:
resolution: {integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==}
engines: {node: '>=8'}
@@ -12449,6 +12782,11 @@ packages:
signal-exit: 3.0.7
dev: true
+ /retry@0.12.0:
+ resolution: {integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==}
+ engines: {node: '>= 4'}
+ dev: true
+
/retry@0.13.1:
resolution: {integrity: sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==}
engines: {node: '>= 4'}
@@ -13346,6 +13684,14 @@ packages:
dependencies:
has-flag: 4.0.0
+ /supports-hyperlinks@2.3.0:
+ resolution: {integrity: sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==}
+ engines: {node: '>=8'}
+ dependencies:
+ has-flag: 4.0.0
+ supports-color: 7.2.0
+ dev: true
+
/supports-preserve-symlinks-flag@1.0.0:
resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==}
engines: {node: '>= 0.4'}
@@ -13392,11 +13738,31 @@ packages:
resolution: {integrity: sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==}
engines: {node: '>=6'}
+ /tar@6.1.15:
+ resolution: {integrity: sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==}
+ engines: {node: '>=10'}
+ dependencies:
+ chownr: 2.0.0
+ fs-minipass: 2.1.0
+ minipass: 5.0.0
+ minizlib: 2.1.2
+ mkdirp: 1.0.4
+ yallist: 4.0.0
+ dev: true
+
/term-size@2.2.1:
resolution: {integrity: sha512-wK0Ri4fOGjv/XPy8SBHZChl8CM7uMc5VML7SqiQ0zG7+J5Vr+RMQDoHa2CNT6KHUnTGIXH34UDMkPzAUyapBZg==}
engines: {node: '>=8'}
dev: false
+ /terminal-link@3.0.0:
+ resolution: {integrity: sha512-flFL3m4wuixmf6IfhFJd1YPiLiMuxEc8uHRM1buzIeZPm22Au2pDqBJQgdo7n1WfPU1ONFGv7YDwpFBmHGF6lg==}
+ engines: {node: '>=12'}
+ dependencies:
+ ansi-escapes: 5.0.0
+ supports-hyperlinks: 2.3.0
+ dev: true
+
/terser-webpack-plugin@5.3.9(webpack@5.88.2):
resolution: {integrity: sha512-ZuXsqE07EcggTWQjXUj+Aot/OMcD0bMKGgF63f7UxYcu5/AJF53aIpK1YoP5xR9l6s/Hy2b+t1AM0bLNPRuhwA==}
engines: {node: '>= 10.13.0'}
@@ -13451,7 +13817,7 @@ packages:
dependencies:
'@jridgewell/source-map': 0.3.5
acorn: 8.10.0
- commander: 2.20.3
+ commander: 2.20.0
source-map-support: 0.5.21
/test-exclude@6.0.0:
@@ -13542,6 +13908,11 @@ packages:
engines: {node: '>=6'}
dev: false
+ /to-readable-stream@2.1.0:
+ resolution: {integrity: sha512-o3Qa6DGg1CEXshSdvWNX2sN4QHqg03SPq7U6jPXRahlQdl5dK8oXjkU/2/sGrnOZKeGV1zLSO8qPwyKklPPE7w==}
+ engines: {node: '>=8'}
+ dev: true
+
/to-regex-range@5.0.1:
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
engines: {node: '>=8.0'}
@@ -13626,7 +13997,7 @@ packages:
'@babel/core': 7.23.2
bs-logger: 0.2.6
fast-json-stable-stringify: 2.1.0
- jest: 29.7.0(@types/node@20.9.0)
+ jest: 29.7.0(@types/node@18.18.8)
jest-util: 29.7.0
json5: 2.2.3
lodash.memoize: 4.1.2
@@ -13667,7 +14038,7 @@ packages:
yn: 3.1.1
dev: true
- /ts-node@10.9.1(@types/node@20.9.0)(typescript@5.2.2):
+ /ts-node@10.9.1(@types/node@18.18.8)(typescript@5.2.2):
resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==}
hasBin: true
peerDependencies:
@@ -13686,7 +14057,7 @@ packages:
'@tsconfig/node12': 1.0.11
'@tsconfig/node14': 1.0.3
'@tsconfig/node16': 1.0.4
- '@types/node': 20.9.0
+ '@types/node': 18.18.8
acorn: 8.10.0
acorn-walk: 8.2.0
arg: 4.1.3
@@ -13852,6 +14223,11 @@ packages:
engines: {node: '>=4'}
dev: true
+ /type-fest@0.10.0:
+ resolution: {integrity: sha512-EUV9jo4sffrwlg8s0zDhP0T2WD3pru5Xi0+HTE3zTUmBaZNhfkite9PdSJwdXLwPVW0jnAHT56pZHIOYckPEiw==}
+ engines: {node: '>=8'}
+ dev: true
+
/type-fest@0.13.1:
resolution: {integrity: sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg==}
engines: {node: '>=10'}
@@ -13876,6 +14252,11 @@ packages:
engines: {node: '>=8'}
dev: false
+ /type-fest@1.4.0:
+ resolution: {integrity: sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==}
+ engines: {node: '>=10'}
+ dev: true
+
/type-fest@2.19.0:
resolution: {integrity: sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==}
engines: {node: '>=12.20'}
@@ -14312,6 +14693,12 @@ packages:
spdx-expression-parse: 3.0.1
dev: false
+ /validate-npm-package-name@3.0.0:
+ resolution: {integrity: sha512-M6w37eVCMMouJ9V/sdPGnC5H4uDr73/+xdq0FBLO3TFFX1+7wiUY6Es328NN+y43tmY+doUdN9g9J21vqB7iLw==}
+ dependencies:
+ builtins: 1.0.3
+ dev: true
+
/validate-npm-package-name@5.0.0:
resolution: {integrity: sha512-YuKoXDAhBYxY7SfOKxHBDoSyENFeW5VvIIQp2TGQuit8gpK6MnWaQelBKxso72DoxTZfZdcP3W90LqpSkgPzLQ==}
engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}
@@ -14925,3 +15312,9 @@ packages:
/zwitch@1.0.5:
resolution: {integrity: sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw==}
dev: false
+
+ github.com/watson/ci-info/f43f6a1cefff47fb361c88cf4b943fdbcaafe540:
+ resolution: {tarball: https://codeload.github.com/watson/ci-info/tar.gz/f43f6a1cefff47fb361c88cf4b943fdbcaafe540}
+ name: ci-info
+ version: 2.0.0
+ dev: true