Skip to content

Commit

Permalink
feat: init @llamaindex/autotool (#819)
Browse files Browse the repository at this point in the history
  • Loading branch information
himself65 authored May 8, 2024
1 parent 1a45b44 commit b99ab05
Show file tree
Hide file tree
Showing 49 changed files with 2,957 additions and 564 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ jobs:
- name: Install dependencies
run: pnpm install
- name: Build
run: pnpm run build --filter llamaindex
run: pnpm run build
- name: Use Build For Examples
run: pnpm link ../packages/core/
working-directory: ./examples
Expand Down Expand Up @@ -105,7 +105,7 @@ jobs:
- name: Install dependencies
run: pnpm install
- name: Build llamaindex
run: pnpm run build --filter llamaindex
run: pnpm run build
- name: Build ${{ matrix.packages }}
run: pnpm run build
working-directory: packages/core/e2e/examples/${{ matrix.packages }}
Expand All @@ -124,7 +124,7 @@ jobs:
- name: Install dependencies
run: pnpm install
- name: Build
run: pnpm run build --filter llamaindex
run: pnpm run build
- name: Copy examples
run: rsync -rv --exclude=node_modules ./examples ${{ runner.temp }}
- name: Pack @llamaindex/env
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "@llamaindex/monorepo",
"private": true,
"scripts": {
"build": "turbo run build",
"build": "turbo run build --filter=\"!docs\" --filter=\"!*-test\"",
"build:release": "turbo run build lint test --filter=\"!docs\" --filter=\"!*-test\"",
"dev": "turbo run dev",
"format": "prettier --ignore-unknown --cache --check .",
Expand Down
83 changes: 83 additions & 0 deletions packages/autotool/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
# @llamaindex/autotool

> Auto transpile your JS function to LLM Agent compatible
## Usage

First, Install the package

```shell
npm install @llamaindex/autotool
pnpm add @llamaindex/autotool
yarn add @llamaindex/autotool
```

Second, Add the plugin/loader to your configuration:

### Next.js

```javascript
import { withNext } from "@llamaindex/autotool/next";

/** @type {import('next').NextConfig} */
const nextConfig = {};

export default withNext(nextConfig);
```

### Node.js

```shell
node --import @llamaindex/autotool/node ./path/to/your/script.js
```

Third, add `"use tool"` on top of your tool file or change to `.tool.ts`.

```typescript
"use tool";

export function getWeather(city: string) {
// ...
}
// ...
```

Finally, export a chat handler function to the frontend using `llamaindex` Agent

```typescript
"use server";

// imports ...

export async function chatWithAI(message: string): Promise<JSX.Element> {
const agent = new OpenAIAgent({
tools: convertTools("llamaindex"),
});
const uiStream = createStreamableUI();
agent
.chat({
stream: true,
message,
})
.then(async (responseStream) => {
return responseStream.pipeTo(
new WritableStream({
start: () => {
uiStream.append("\n");
},
write: async (message) => {
uiStream.append(message.response.delta);
},
close: () => {
uiStream.done();
},
}),
);
});
return uiStream.value;
}
```

## License

MIT
12 changes: 12 additions & 0 deletions packages/autotool/examples/01_node/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"name": "@llamaindex/autotool-01-node-example",
"type": "module",
"dependencies": {
"@llamaindex/autotool": "workspace:*",
"llamaindex": "workspace:*",
"openai": "^4.33.0"
},
"scripts": {
"start": "node --import tsx --import @llamaindex/autotool/node ./src/index.ts"
}
}
11 changes: 11 additions & 0 deletions packages/autotool/examples/01_node/src/index.tool.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import { getWeather } from "./utils.js";

/**
* Get current location
*/
export function getCurrentLocation() {
console.log("Getting current location");
return "London";
}

export { getWeather };
23 changes: 23 additions & 0 deletions packages/autotool/examples/01_node/src/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import { convertTools } from "@llamaindex/autotool";
import { OpenAI } from "openai";
import "./index.tool.js";

const openai = new OpenAI();
{
const response = await openai.chat.completions.create({
model: "gpt-3.5-turbo",
messages: [
{
role: "user",
content: "What's my current weather?",
},
],
tools: convertTools("openai"),
stream: false,
});

const toolCalls = response.choices[0].message.tool_calls ?? [];
for (const toolCall of toolCalls) {
toolCall.function.name;
}
}
8 changes: 8 additions & 0 deletions packages/autotool/examples/01_node/src/utils.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
/**
* Get the weather for a city
* @param city The city to get the weather for
* @returns The weather for the city, e.g. "Sunny", "Rainy", etc.
*/
export function getWeather(city: string) {
return `The weather in ${city} is sunny!`;
}
9 changes: 9 additions & 0 deletions packages/autotool/examples/01_node/tsconfig.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./lib",
"module": "node16",
"moduleResolution": "node16"
},
"include": ["./src"]
}
3 changes: 3 additions & 0 deletions packages/autotool/examples/02_nextjs/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Rename this file to `.env.local` to use environment variables locally with `next dev`
# https://nextjs.org/docs/pages/building-your-application/configuring/environment-variables
MY_HOST="example.com"
35 changes: 35 additions & 0 deletions packages/autotool/examples/02_nextjs/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.

# dependencies
/node_modules
/.pnp
.pnp.js

# testing
/coverage

# next.js
/.next/
/out/

# production
/build

# misc
.DS_Store
*.pem

# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*

# local env files
.env*.local

# vercel
.vercel

# typescript
*.tsbuildinfo
next-env.d.ts
30 changes: 30 additions & 0 deletions packages/autotool/examples/02_nextjs/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
This is a [LlamaIndex](https://www.llamaindex.ai/) project using [Next.js](https://nextjs.org/) bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama).

## Getting Started

First, install the dependencies:

```
npm install
```

Second, run the development server:

```
npm run dev
```

Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.

You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.

This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font.

## Learn More

To learn more about LlamaIndex, take a look at the following resources:

- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex (Python features).
- [LlamaIndexTS Documentation](https://ts.llamaindex.ai) - learn about LlamaIndex (Typescript features).

You can check out [the LlamaIndexTS GitHub repository](https://github.com/run-llama/LlamaIndexTS) - your feedback and contributions are welcome!
38 changes: 38 additions & 0 deletions packages/autotool/examples/02_nextjs/actions.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
"use server";
import { OpenAIAgent } from "llamaindex";
// import your tools on top, that's it
import { runWithStreamableUI } from "@/context";
import "@/tool";
import { convertTools } from "@llamaindex/autotool";
import { createStreamableUI } from "ai/rsc";
import type { JSX } from "react";

export async function chatWithAI(message: string): Promise<JSX.Element> {
const agent = new OpenAIAgent({
tools: convertTools("llamaindex"),
});
const uiStream = createStreamableUI();
runWithStreamableUI(uiStream, () =>
agent
.chat({
stream: true,
message,
})
.then(async (responseStream) => {
return responseStream.pipeTo(
new WritableStream({
start: () => {
uiStream.append("\n");
},
write: async (message) => {
uiStream.append(message.response.delta);
},
close: () => {
uiStream.done();
},
}),
);
}),
).catch(uiStream.error);
return uiStream.value;
}
Binary file not shown.
94 changes: 94 additions & 0 deletions packages/autotool/examples/02_nextjs/app/globals.css
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
@tailwind base;
@tailwind components;
@tailwind utilities;

@layer base {
:root {
--background: 0 0% 100%;
--foreground: 222.2 47.4% 11.2%;

--muted: 210 40% 96.1%;
--muted-foreground: 215.4 16.3% 46.9%;

--popover: 0 0% 100%;
--popover-foreground: 222.2 47.4% 11.2%;

--border: 214.3 31.8% 91.4%;
--input: 214.3 31.8% 91.4%;

--card: 0 0% 100%;
--card-foreground: 222.2 47.4% 11.2%;

--primary: 222.2 47.4% 11.2%;
--primary-foreground: 210 40% 98%;

--secondary: 210 40% 96.1%;
--secondary-foreground: 222.2 47.4% 11.2%;

--accent: 210 40% 96.1%;
--accent-foreground: 222.2 47.4% 11.2%;

--destructive: 0 100% 50%;
--destructive-foreground: 210 40% 98%;

--ring: 215 20.2% 65.1%;

--radius: 0.5rem;
}

.dark {
--background: 224 71% 4%;
--foreground: 213 31% 91%;

--muted: 223 47% 11%;
--muted-foreground: 215.4 16.3% 56.9%;

--accent: 216 34% 17%;
--accent-foreground: 210 40% 98%;

--popover: 224 71% 4%;
--popover-foreground: 215 20.2% 65.1%;

--border: 216 34% 17%;
--input: 216 34% 17%;

--card: 224 71% 4%;
--card-foreground: 213 31% 91%;

--primary: 210 40% 98%;
--primary-foreground: 222.2 47.4% 1.2%;

--secondary: 222.2 47.4% 11.2%;
--secondary-foreground: 210 40% 98%;

--destructive: 0 63% 31%;
--destructive-foreground: 210 40% 98%;

--ring: 216 34% 17%;

--radius: 0.5rem;
}
}

@layer base {
* {
@apply border-border;
}
body {
@apply bg-background text-foreground;
font-feature-settings:
"rlig" 1,
"calt" 1;
}
.background-gradient {
background-color: #fff;
background-image: radial-gradient(
at 21% 11%,
rgba(186, 186, 233, 0.53) 0,
transparent 50%
),
radial-gradient(at 85% 0, hsla(46, 57%, 78%, 0.52) 0, transparent 50%),
radial-gradient(at 91% 36%, rgba(194, 213, 255, 0.68) 0, transparent 50%),
radial-gradient(at 8% 40%, rgba(251, 218, 239, 0.46) 0, transparent 50%);
}
}
Loading

0 comments on commit b99ab05

Please sign in to comment.