Skip to content

Commit

Permalink
refactor: move settings.llm into core package (run-llama#1165)
Browse files Browse the repository at this point in the history
  • Loading branch information
himself65 authored Sep 9, 2024
1 parent 72d65dd commit 4998843
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 8 deletions.
11 changes: 11 additions & 0 deletions packages/core/src/global/settings.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import type { Tokenizer } from "@llamaindex/env";
import type { LLM } from "../llms";
import {
type CallbackManager,
getCallbackManager,
Expand All @@ -10,13 +11,23 @@ import {
setChunkSize,
withChunkSize,
} from "./settings/chunk-size";
import { getLLM, setLLM, withLLM } from "./settings/llm";
import {
getTokenizer,
setTokenizer,
withTokenizer,
} from "./settings/tokenizer";

export const Settings = {
get llm() {
return getLLM();
},
set llm(llm) {
setLLM(llm);
},
withLLM<Result>(llm: LLM, fn: () => Result): Result {
return withLLM(llm, fn);
},
get tokenizer() {
return getTokenizer();
},
Expand Down
23 changes: 23 additions & 0 deletions packages/core/src/global/settings/llm.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import { AsyncLocalStorage } from "@llamaindex/env";
import type { LLM } from "../../llms";

const llmAsyncLocalStorage = new AsyncLocalStorage<LLM>();
let globalLLM: LLM | undefined;

export function getLLM(): LLM {
const currentLLM = globalLLM ?? llmAsyncLocalStorage.getStore();
if (!currentLLM) {
throw new Error(
"Cannot find LLM, please set `Settings.llm = ...` on the top of your code",
);
}
return currentLLM;
}

export function setLLM(llm: LLM): void {
globalLLM = llm;
}

export function withLLM<Result>(llm: LLM, fn: () => Result): Result {
return llmAsyncLocalStorage.run(llm, fn);
}
13 changes: 5 additions & 8 deletions packages/llamaindex/src/Settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ export type PromptConfig = {

export interface Config {
prompt: PromptConfig;
llm: LLM | null;
promptHelper: PromptHelper | null;
embedModel: BaseEmbedding | null;
nodeParser: NodeParser | null;
Expand All @@ -41,12 +40,10 @@ export interface Config {
*/
class GlobalSettings implements Config {
#prompt: PromptConfig = {};
#llm: LLM | null = null;
#promptHelper: PromptHelper | null = null;
#nodeParser: NodeParser | null = null;
#chunkOverlap?: number;

#llmAsyncLocalStorage = new AsyncLocalStorage<LLM>();
#promptHelperAsyncLocalStorage = new AsyncLocalStorage<PromptHelper>();
#nodeParserAsyncLocalStorage = new AsyncLocalStorage<NodeParser>();
#chunkOverlapAsyncLocalStorage = new AsyncLocalStorage<number>();
Expand All @@ -62,19 +59,19 @@ class GlobalSettings implements Config {
}

get llm(): LLM {
if (this.#llm === null) {
this.#llm = new OpenAI();
if (CoreSettings.llm === null) {
CoreSettings.llm = new OpenAI();
}

return this.#llmAsyncLocalStorage.getStore() ?? this.#llm;
return CoreSettings.llm;
}

set llm(llm: LLM) {
this.#llm = llm;
CoreSettings.llm = llm;
}

withLLM<Result>(llm: LLM, fn: () => Result): Result {
return this.#llmAsyncLocalStorage.run(llm, fn);
return CoreSettings.withLLM(llm, fn);
}

get promptHelper(): PromptHelper {
Expand Down

0 comments on commit 4998843

Please sign in to comment.