Skip to content

Commit

Permalink
Merge branch 'main' into himself65/20240501/improve-doc
Browse files Browse the repository at this point in the history
  • Loading branch information
himself65 authored May 3, 2024
2 parents 703a196 + 2fe2b81 commit dd555e2
Show file tree
Hide file tree
Showing 53 changed files with 3,611 additions and 1,578 deletions.
5 changes: 5 additions & 0 deletions .changeset/friendly-lizards-melt.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"llamaindex": patch
---

fix: filter with multiple filters in ChromaDB
5 changes: 5 additions & 0 deletions .changeset/silly-horses-carry.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"llamaindex": patch
---

fix: change <-> to <=> in the SELECT query
7 changes: 7 additions & 0 deletions .changeset/small-actors-explode.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
"llamaindex": patch
"@llamaindex/core-test": patch
"@llamaindex/core-e2e": patch
---

fix: anthropic agent on multiple chat
26 changes: 26 additions & 0 deletions apps/docs/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,31 @@
# docs

## 0.0.12

### Patch Changes

- Updated dependencies [1dce275]
- Updated dependencies [d10533e]
- Updated dependencies [2008efe]
- Updated dependencies [5e61934]
- Updated dependencies [9e74a43]
- Updated dependencies [ee719a1]
- [email protected]

## 0.0.11

### Patch Changes

- Updated dependencies [e8c41c5]
- [email protected]

## 0.0.10

### Patch Changes

- Updated dependencies [61103b6]
- [email protected]

## 0.0.9

### Patch Changes
Expand Down
26 changes: 8 additions & 18 deletions apps/docs/blog/2024-04-26-v0.3.0.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,8 @@ export class MyAgent extends AgentRunner<MyLLM> {
// create store is a function to create a store for each task, by default it only includes `messages` and `toolOutputs`
createStore = AgentRunner.defaultCreateStore;

static taskHandler: TaskHandler<Anthropic> = async (step) => {
const { input } = step;
static taskHandler: TaskHandler<Anthropic> = async (step, enqueueOutput) => {
const { llm, stream } = step.context;
if (input) {
step.context.store.messages = [...step.context.store.messages, input];
}
// initialize the input
const response = await llm.chat({
stream,
Expand All @@ -90,27 +86,21 @@ export class MyAgent extends AgentRunner<MyLLM> {
];
// your logic here to decide whether to continue the task
const shouldContinue = Math.random(); /* <-- replace with your logic here */
enqueueOutput({
taskStep: step,
output: response,
isLast: !shouldContinue,
});
if (shouldContinue) {
const content = await someHeavyFunctionCall();
// if you want to continue the task, you can insert your new context for the next task step
step.context.store.messages = [
...step.context.store.messages,
{
content: "INSERT MY NEW DATA",
content,
role: "user",
},
];
return {
taskStep: step,
output: response,
isLast: false,
};
} else {
// if you want to end the task, you can return the response with `isLast: true`
return {
taskStep: step,
output: response,
isLast: true,
};
}
};
}
Expand Down
2 changes: 1 addition & 1 deletion apps/docs/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "docs",
"version": "0.0.9",
"version": "0.0.12",
"private": true,
"scripts": {
"docusaurus": "docusaurus",
Expand Down
1 change: 1 addition & 0 deletions examples/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
DEBUG=llamaindex
39 changes: 39 additions & 0 deletions examples/agent/openai-task.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import { ChatResponseChunk, OpenAIAgent } from "llamaindex";
import { ReadableStream } from "node:stream/web";
import {
getCurrentIDTool,
getUserInfoTool,
getWeatherTool,
} from "./utils/tools";

async function main() {
// Create an OpenAIAgent with the function tools
const agent = new OpenAIAgent({
tools: [getCurrentIDTool, getUserInfoTool, getWeatherTool],
});

const task = await agent.createTask(
"What is my current address weather based on my profile?",
true,
);

for await (const stepOutput of task) {
const stream = stepOutput.output as ReadableStream<ChatResponseChunk>;
if (stepOutput.isLast) {
for await (const chunk of stream) {
process.stdout.write(chunk.delta);
}
process.stdout.write("\n");
} else {
// handing function call
console.log("handling function call...");
for await (const chunk of stream) {
console.log("debug:", JSON.stringify(chunk.raw));
}
}
}
}

void main().then(() => {
console.log("Done");
});
2 changes: 1 addition & 1 deletion examples/agent/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ async function main() {
message: "How much is 5 + 5? then divide by 2",
});

console.log(String(response));
console.log(response.response.message);
}

void main().then(() => {
Expand Down
40 changes: 40 additions & 0 deletions examples/agent/react-task.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import { ChatResponseChunk, ReActAgent } from "llamaindex";
import { ReadableStream } from "node:stream/web";
import {
getCurrentIDTool,
getUserInfoTool,
getWeatherTool,
} from "./utils/tools";

async function main() {
// Create an OpenAIAgent with the function tools
const agent = new ReActAgent({
tools: [getCurrentIDTool, getUserInfoTool, getWeatherTool],
});

const task = await agent.createTask(
"What is my current address weather based on my profile?",
true,
);

for await (const stepOutput of task) {
const stream = stepOutput.output as ReadableStream<ChatResponseChunk>;
if (stepOutput.isLast) {
for await (const chunk of stream) {
process.stdout.write(chunk.delta);
}
process.stdout.write("\n");
} else {
// handing function call
console.log("handling function call...");
for await (const chunk of stream) {
console.log("debug:", JSON.stringify(chunk.raw));
}
}
console.log("---");
}
}

void main().then(() => {
console.log("Done");
});
97 changes: 0 additions & 97 deletions examples/agent/step_wise_openai.ts

This file was deleted.

54 changes: 54 additions & 0 deletions examples/agent/utils/tools.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
import { FunctionTool } from "llamaindex";

export const getCurrentIDTool = FunctionTool.from(
() => {
console.log("Getting user id...");
return crypto.randomUUID();
},
{
name: "get_user_id",
description: "Get a random user id",
},
);

export const getUserInfoTool = FunctionTool.from(
({ userId }: { userId: string }) => {
console.log("Getting user info...", userId);
return `Name: Alex; Address: 1234 Main St, CA; User ID: ${userId}`;
},
{
name: "get_user_info",
description: "Get user info",
parameters: {
type: "object",
properties: {
userId: {
type: "string",
description: "The user id",
},
},
required: ["userId"],
},
},
);

export const getWeatherTool = FunctionTool.from(
({ address }: { address: string }) => {
console.log("Getting weather...", address);
return `${address} is in a sunny location!`;
},
{
name: "get_weather",
description: "Get the current weather for a location",
parameters: {
type: "object",
properties: {
address: {
type: "string",
description: "The address",
},
},
required: ["address"],
},
},
);
22 changes: 22 additions & 0 deletions examples/huggingface/chat.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import { HuggingFaceInferenceAPI } from "llamaindex";

(async () => {
if (!process.env.HUGGING_FACE_TOKEN) {
throw new Error("Please set the HUGGING_FACE_TOKEN environment variable.");
}
const hf = new HuggingFaceInferenceAPI({
accessToken: process.env.HUGGING_FACE_TOKEN,
model: "mistralai/Mixtral-8x7B-Instruct-v0.1",
});
const result = await hf.chat({
messages: [
{ content: "You want to talk in rhymes.", role: "system" },
{
content:
"How much wood would a woodchuck chuck if a woodchuck could chuck wood?",
role: "user",
},
],
});
console.log(result);
})();
File renamed without changes.
Loading

0 comments on commit dd555e2

Please sign in to comment.