diff --git a/.changeset/good-badgers-train.md b/.changeset/good-badgers-train.md deleted file mode 100644 index b47a4aaf08..0000000000 --- a/.changeset/good-badgers-train.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"llamaindex": patch ---- - -Add HTMLReader (thanks @mtutty) diff --git a/.changeset/heavy-beans-cry.md b/.changeset/heavy-beans-cry.md new file mode 100644 index 0000000000..0a1cc4be7c --- /dev/null +++ b/.changeset/heavy-beans-cry.md @@ -0,0 +1,5 @@ +--- +"llamaindex": patch +--- + +gpt-4-1106-preview and gpt-4-vision-preview from OpenAI dev day diff --git a/.changeset/shiny-lions-clap.md b/.changeset/shiny-lions-clap.md deleted file mode 100644 index f9fef764ec..0000000000 --- a/.changeset/shiny-lions-clap.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"llamaindex": patch ---- - -Add observer/filter to the SimpleDirectoryReader (thanks @mtutty) diff --git a/apps/simple/CHANGELOG.md b/apps/simple/CHANGELOG.md index 4e6452ff5e..56e00da09e 100644 --- a/apps/simple/CHANGELOG.md +++ b/apps/simple/CHANGELOG.md @@ -1,5 +1,13 @@ # simple +## 0.0.30 + +### Patch Changes + +- Updated dependencies [90c0b83] +- Updated dependencies [dfd22aa] + - llamaindex@0.0.32 + ## 0.0.29 ### Patch Changes diff --git a/apps/simple/gptllama.ts b/apps/simple/gptllama.ts index 1ecb2a1b01..5497b5e492 100644 --- a/apps/simple/gptllama.ts +++ b/apps/simple/gptllama.ts @@ -6,7 +6,7 @@ import readline from "node:readline/promises"; import { ChatMessage, LlamaDeuce, OpenAI } from "llamaindex"; (async () => { - const gpt4 = new OpenAI({ model: "gpt-4", temperature: 0.9 }); + const gpt4 = new OpenAI({ model: "gpt-4-vision-preview", temperature: 0.9 }); const l2 = new LlamaDeuce({ model: "Llama-2-70b-chat-4bit", temperature: 0.9, diff --git a/apps/simple/openai.ts b/apps/simple/openai.ts index 4c7856be0a..00a992abe3 100644 --- a/apps/simple/openai.ts +++ b/apps/simple/openai.ts @@ -1,7 +1,7 @@ import { OpenAI } from "llamaindex"; (async () => { - const llm = new OpenAI({ model: "gpt-3.5-turbo", temperature: 0.0 }); + const llm = new OpenAI({ model: "gpt-4-1106-preview", temperature: 0.1 }); // complete api const response1 = await llm.complete("How are you?"); diff --git a/apps/simple/package.json b/apps/simple/package.json index 3eed79be91..fd55ed574c 100644 --- a/apps/simple/package.json +++ b/apps/simple/package.json @@ -1,5 +1,5 @@ { - "version": "0.0.29", + "version": "0.0.30", "private": true, "name": "simple", "dependencies": { diff --git a/apps/simple/vision.ts b/apps/simple/vision.ts new file mode 100644 index 0000000000..27797d00db --- /dev/null +++ b/apps/simple/vision.ts @@ -0,0 +1,15 @@ +import { OpenAI } from "llamaindex"; + +(async () => { + const llm = new OpenAI({ model: "gpt-4-vision-preview", temperature: 0.1 }); + + // complete api + const response1 = await llm.complete("How are you?"); + console.log(response1.message.content); + + // chat api + const response2 = await llm.chat([ + { content: "Tell me a joke!", role: "user" }, + ]); + console.log(response2.message.content); +})(); diff --git a/packages/core/CHANGELOG.md b/packages/core/CHANGELOG.md index 1f667df47b..271c0120e2 100644 --- a/packages/core/CHANGELOG.md +++ b/packages/core/CHANGELOG.md @@ -1,5 +1,12 @@ # llamaindex +## 0.0.32 + +### Patch Changes + +- 90c0b83: Add HTMLReader (thanks @mtutty) +- dfd22aa: Add observer/filter to the SimpleDirectoryReader (thanks @mtutty) + ## 0.0.31 ### Patch Changes diff --git a/packages/core/package.json b/packages/core/package.json index 0fa85b45ee..c059232c5b 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "llamaindex", - "version": "0.0.31", + "version": "0.0.32", "license": "MIT", "dependencies": { "@anthropic-ai/sdk": "^0.8.1", diff --git a/packages/core/src/llm/LLM.ts b/packages/core/src/llm/LLM.ts index 9d69743f57..0d659abe1a 100644 --- a/packages/core/src/llm/LLM.ts +++ b/packages/core/src/llm/LLM.ts @@ -101,6 +101,8 @@ export interface LLM { export const GPT4_MODELS = { "gpt-4": { contextWindow: 8192 }, + "gpt-4-1106-preview": { contextWindow: 128000 }, + "gpt-4-vision-preview": { contextWindow: 8192 }, "gpt-4-32k": { contextWindow: 32768 }, }; @@ -648,7 +650,7 @@ export class Anthropic implements LLM { this.callbackManager = init?.callbackManager; } - + tokens(messages: ChatMessage[]): number { throw new Error("Method not implemented."); }