-
Notifications
You must be signed in to change notification settings - Fork 85
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
20 changed files
with
1,366 additions
and
3 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
decopilot is deco.cx's LLM interface | ||
|
||
This app wrapps various LLM clients into comprehensive set of loaders/actions/workflows | ||
allowing for a standardized use of different architectures integrated withing deco.cx's base engine | ||
|
||
To better understand how to set up your API tokens and use the features in this app, follow the guide below: |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,122 @@ | ||
import type { AppContext } from "../../mod.ts"; | ||
|
||
import type { | ||
Attachment, | ||
LLMChainResponseType, | ||
LLMResponseType, | ||
Provider, | ||
TextOnly, | ||
} from "../../types.ts"; | ||
|
||
// import { LLMAgent } from "../../types.ts"; | ||
import runPrompt from "../prompt/runPrompt.ts"; | ||
import getSavedPrompts from "../../loaders/getSavedPrompts.ts"; | ||
|
||
interface Props { | ||
/** | ||
* @format dynamic-options | ||
* @options decopilot-app/loaders/listAvailableChains.ts | ||
*/ | ||
name: string; | ||
attachments?: Attachment[]; | ||
} | ||
|
||
export default async function action( | ||
{ name, attachments }: Props, | ||
_req: Request, | ||
ctx: AppContext, | ||
): Promise<LLMChainResponseType> { | ||
const chain = ctx.chains.find((p) => p.name === name); | ||
|
||
if (!chain) { | ||
throw new Error(`Chain with name: ${name} not found`); | ||
} | ||
|
||
let runResponse: LLMResponseType | null = null; | ||
|
||
const providerArray: Provider[] = []; | ||
const modelArray: string[] = []; | ||
|
||
let runAttachments = attachments; // Initial attachments from Props | ||
console.log(runAttachments); | ||
|
||
// Check the ChainType first | ||
if (chain.chainType === "Simple") { | ||
// Process Simple chain (only Prompts) | ||
for (const block of chain.blockNames) { | ||
if (block.blockType === "Prompt") { | ||
// const selected_prompt: Prompt | null | ||
const selected_prompt = getSavedPrompts( | ||
{ name: block.blockNames }, | ||
_req, | ||
ctx, | ||
); | ||
|
||
if (!selected_prompt) { | ||
throw new Error( | ||
`Prompt with Agent Name ${block.blockNames} not found`, | ||
); | ||
} | ||
|
||
// const promptData = { | ||
// name: selected_prompt.name, // Assuming the Prompt has an agentName property | ||
// // prompt: selected_prompt.prompt, | ||
// runAttachments | ||
// }; | ||
|
||
// Call runPrompt and use the output as the next attachment | ||
const response = await runPrompt( | ||
{ promptName: selected_prompt.name, attachments }, | ||
_req, | ||
ctx, | ||
); | ||
|
||
console.log(response); | ||
|
||
providerArray.push(response.provider); | ||
modelArray.push(response.model); | ||
|
||
const response_message = response.llm_response.map( | ||
(resp) => resp?.message?.content, | ||
).filter( | ||
(content) => content !== null, | ||
) as string[]; | ||
|
||
runAttachments = [ | ||
reassembleAttachmentToText(response_message.join("\n")), | ||
]; | ||
console.log(runAttachments); | ||
// Store the last response content | ||
runResponse = response; | ||
} | ||
} | ||
} else if (chain.chainType === "Complex") { | ||
// Process Complex chain (may include LLMAgents) | ||
// (Implement Complex chain handling here) | ||
} | ||
|
||
if (!runResponse) { | ||
throw new Error( | ||
"No valid response was received during the chain execution.", | ||
); | ||
} | ||
|
||
const response: LLMChainResponseType = { | ||
id: "chain_response_id", // Replace with an actual ID | ||
created: Date.now(), | ||
provider: providerArray, // Example provider | ||
model: modelArray, // Example model | ||
llm_response: runResponse.llm_response, | ||
}; | ||
|
||
return response; | ||
} | ||
|
||
// Helper functions to identify the content type | ||
function reassembleAttachmentToText(message: string): TextOnly { | ||
// Simply reassemble the message into the expected TextOnly format | ||
return { | ||
type: "TextOnly", | ||
call_text: message, | ||
}; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
// import { shortcircuit } from "@deco/deco"; | ||
import { callAntropic, callOpenAI } from "../../clients/llmClientObjects.ts"; | ||
import type { AppContext } from "../../mod.ts"; | ||
import type { Attachment, LLMResponseType, Prompt } from "../../types.ts"; | ||
|
||
interface Props { | ||
promptName?: string; | ||
inlinePrompt?: Prompt; | ||
attachments?: Attachment[]; | ||
} | ||
|
||
export default async function action( | ||
{ | ||
promptName, | ||
inlinePrompt, | ||
attachments, | ||
}: Props, | ||
_req: Request, | ||
ctx: AppContext, | ||
): Promise<LLMResponseType> { | ||
let prompt: Prompt | undefined; | ||
|
||
if (!promptName && !inlinePrompt) { | ||
throw new Error(`No prompt provided`); | ||
} | ||
if (promptName && inlinePrompt) { | ||
throw new Error(`Only provide prompt name or inline prompt, not both.`); | ||
} | ||
|
||
if (promptName) { | ||
prompt = ctx.content.find((p) => p.name === promptName); | ||
if (!prompt) { | ||
throw new Error(`Prompt with name: ${promptName} not found`); | ||
} | ||
} else if (inlinePrompt) { | ||
prompt = inlinePrompt; | ||
} | ||
|
||
// Type guard to ensure 'prompt' is defined before proceeding | ||
if (!prompt) { | ||
throw new Error("Prompt is undefined"); | ||
} | ||
|
||
if (prompt.provider === "Anthropic") { | ||
return await callAntropic(prompt, ctx, attachments ?? []); | ||
} | ||
|
||
if (prompt.provider === "OpenAI") { | ||
return await callOpenAI(prompt, ctx, attachments ?? []); | ||
} | ||
// if (prompt.provider === "Custom") { | ||
// return await callCustomProvider(prompt, ctx, attachments); | ||
// } | ||
|
||
throw new Error(`Provider ${prompt.provider} is not supported`); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
// import { shortcircuit } from "@deco/deco"; | ||
import { callAntropic, callOpenAI } from "../../clients/llmClientObjects.ts"; | ||
import type { AppContext } from "../../mod.ts"; | ||
import type { Attachment, LLMResponseType } from "../../types.ts"; | ||
|
||
interface Props { | ||
/** | ||
* @format dynamic-options | ||
* @options decopilot-app/loaders/listAvailablePrompts.ts | ||
*/ | ||
called_prompt: string; | ||
attachments?: Attachment[]; | ||
} | ||
|
||
export default async function action( | ||
{ called_prompt, attachments }: Props, | ||
_req: Request, | ||
ctx: AppContext, | ||
): Promise<LLMResponseType> { | ||
const prompt = ctx.content.find((p) => p.name === called_prompt); | ||
|
||
if (!prompt) { | ||
throw new Error(`Prompt with Name: ${called_prompt} not found`); | ||
} | ||
|
||
if (prompt.provider === "Anthropic") { | ||
return await callAntropic(prompt, ctx, attachments ?? []); | ||
} | ||
|
||
if (prompt.provider === "OpenAI") { | ||
return await callOpenAI(prompt, ctx, attachments ?? []); | ||
} | ||
// if (prompt.provider === "Custom") { | ||
// return await callCustomProvider(prompt, ctx, attachments); | ||
// } | ||
|
||
throw new Error(`Provider ${prompt.provider} is not supported`); | ||
} |
Oops, something went wrong.