Skip to content

Commit

Permalink
Accioly/decopilot app (#874)
Browse files Browse the repository at this point in the history
  • Loading branch information
paccioly authored Sep 24, 2024
1 parent 180b9f3 commit 50bdd2d
Show file tree
Hide file tree
Showing 20 changed files with 1,366 additions and 3 deletions.
Binary file added .DS_Store
Binary file not shown.
16 changes: 15 additions & 1 deletion anthropic/actions/code.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,17 @@
import { shortcircuit } from "@deco/deco";
import { AppContext } from "../mod.ts";
import { Anthropic } from "../deps.ts";

export const allowedModels = [
"claude-3-5-sonnet-20240620",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
"claude-2.1",
"claude-2.0",
"claude-instant-1.2",
] as const;

export interface Props {
/**
* @description The system prompt to be used for the AI Assistant.
Expand All @@ -13,14 +24,15 @@ export interface Props {
/**
* @description The model that will complete your prompt.
*/
model?: Anthropic.Model;
model?: typeof allowedModels[number];
/**
* @description The maximum number of tokens to generate.
*
* Different models have different maximum values for this parameter. See
* [models](https://docs.anthropic.com/en/docs/models-overview) for details.
*/
max_tokens?: number;
temperature?: number;
}

export default async function chat(
Expand All @@ -29,6 +41,7 @@ export default async function chat(
messages,
model = "claude-3-opus-20240229",
max_tokens = 4096,
temperature = 0.0,
}: Props,
_req: Request,
ctx: AppContext,
Expand All @@ -42,6 +55,7 @@ export default async function chat(
model,
max_tokens,
messages,
temperature,
});

return msg;
Expand Down
12 changes: 10 additions & 2 deletions anthropic/actions/invoke.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ export interface Props {
* @description The tool choice to be used for the AI Assistant.
*/
tool_choice?: Anthropic.MessageCreateParams["tool_choice"];
temperature: number;
}

export default async function invoke(
Expand All @@ -38,6 +39,7 @@ export default async function invoke(
max_tokens = 4096,
availableFunctions = [],
tool_choice = { type: "auto" },
temperature = 0.0,
}: Props,
_req: Request,
ctx: AppContext,
Expand All @@ -53,10 +55,16 @@ export default async function invoke(
model,
max_tokens,
messages,
tools,
tool_choice,
temperature,
};

if (tools?.length) {
params.tools = tools;
params.tool_choice = tool_choice;
}

console.log(tools);

try {
const msg = await ctx.anthropic.messages.create(params);
return msg;
Expand Down
1 change: 1 addition & 0 deletions deco.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ const compatibilityApps = [{
const config = {
apps: [
app("posthog"),
app("decopilot-app"),
app("smarthint"),
app("ra-trustvox"),
app("anthropic"),
Expand Down
6 changes: 6 additions & 0 deletions decopilot-app/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
decopilot is deco.cx's LLM interface

This app wrapps various LLM clients into comprehensive set of loaders/actions/workflows
allowing for a standardized use of different architectures integrated withing deco.cx's base engine

To better understand how to set up your API tokens and use the features in this app, follow the guide below:
122 changes: 122 additions & 0 deletions decopilot-app/actions/chain/runChain.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
import type { AppContext } from "../../mod.ts";

import type {
Attachment,
LLMChainResponseType,
LLMResponseType,
Provider,
TextOnly,
} from "../../types.ts";

// import { LLMAgent } from "../../types.ts";
import runPrompt from "../prompt/runPrompt.ts";
import getSavedPrompts from "../../loaders/getSavedPrompts.ts";

interface Props {
/**
* @format dynamic-options
* @options decopilot-app/loaders/listAvailableChains.ts
*/
name: string;
attachments?: Attachment[];
}

export default async function action(
{ name, attachments }: Props,
_req: Request,
ctx: AppContext,
): Promise<LLMChainResponseType> {
const chain = ctx.chains.find((p) => p.name === name);

if (!chain) {
throw new Error(`Chain with name: ${name} not found`);
}

let runResponse: LLMResponseType | null = null;

const providerArray: Provider[] = [];
const modelArray: string[] = [];

let runAttachments = attachments; // Initial attachments from Props
console.log(runAttachments);

// Check the ChainType first
if (chain.chainType === "Simple") {
// Process Simple chain (only Prompts)
for (const block of chain.blockNames) {
if (block.blockType === "Prompt") {
// const selected_prompt: Prompt | null
const selected_prompt = getSavedPrompts(
{ name: block.blockNames },
_req,
ctx,
);

if (!selected_prompt) {
throw new Error(
`Prompt with Agent Name ${block.blockNames} not found`,
);
}

// const promptData = {
// name: selected_prompt.name, // Assuming the Prompt has an agentName property
// // prompt: selected_prompt.prompt,
// runAttachments
// };

// Call runPrompt and use the output as the next attachment
const response = await runPrompt(
{ promptName: selected_prompt.name, attachments },
_req,
ctx,
);

console.log(response);

providerArray.push(response.provider);
modelArray.push(response.model);

const response_message = response.llm_response.map(
(resp) => resp?.message?.content,
).filter(
(content) => content !== null,
) as string[];

runAttachments = [
reassembleAttachmentToText(response_message.join("\n")),
];
console.log(runAttachments);
// Store the last response content
runResponse = response;
}
}
} else if (chain.chainType === "Complex") {
// Process Complex chain (may include LLMAgents)
// (Implement Complex chain handling here)
}

if (!runResponse) {
throw new Error(
"No valid response was received during the chain execution.",
);
}

const response: LLMChainResponseType = {
id: "chain_response_id", // Replace with an actual ID
created: Date.now(),
provider: providerArray, // Example provider
model: modelArray, // Example model
llm_response: runResponse.llm_response,
};

return response;
}

// Helper functions to identify the content type
function reassembleAttachmentToText(message: string): TextOnly {
// Simply reassemble the message into the expected TextOnly format
return {
type: "TextOnly",
call_text: message,
};
}
56 changes: 56 additions & 0 deletions decopilot-app/actions/prompt/runPrompt.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
// import { shortcircuit } from "@deco/deco";
import { callAntropic, callOpenAI } from "../../clients/llmClientObjects.ts";
import type { AppContext } from "../../mod.ts";
import type { Attachment, LLMResponseType, Prompt } from "../../types.ts";

interface Props {
promptName?: string;
inlinePrompt?: Prompt;
attachments?: Attachment[];
}

export default async function action(
{
promptName,
inlinePrompt,
attachments,
}: Props,
_req: Request,
ctx: AppContext,
): Promise<LLMResponseType> {
let prompt: Prompt | undefined;

if (!promptName && !inlinePrompt) {
throw new Error(`No prompt provided`);
}
if (promptName && inlinePrompt) {
throw new Error(`Only provide prompt name or inline prompt, not both.`);
}

if (promptName) {
prompt = ctx.content.find((p) => p.name === promptName);
if (!prompt) {
throw new Error(`Prompt with name: ${promptName} not found`);
}
} else if (inlinePrompt) {
prompt = inlinePrompt;
}

// Type guard to ensure 'prompt' is defined before proceeding
if (!prompt) {
throw new Error("Prompt is undefined");
}

if (prompt.provider === "Anthropic") {
return await callAntropic(prompt, ctx, attachments ?? []);
}

if (prompt.provider === "OpenAI") {
return await callOpenAI(prompt, ctx, attachments ?? []);
}
// if (prompt.provider === "Custom") {
// return await callCustomProvider(prompt, ctx, attachments);
// }

throw new Error(`Provider ${prompt.provider} is not supported`);
}
38 changes: 38 additions & 0 deletions decopilot-app/actions/prompt/runSavedPrompts.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
// import { shortcircuit } from "@deco/deco";
import { callAntropic, callOpenAI } from "../../clients/llmClientObjects.ts";
import type { AppContext } from "../../mod.ts";
import type { Attachment, LLMResponseType } from "../../types.ts";

interface Props {
/**
* @format dynamic-options
* @options decopilot-app/loaders/listAvailablePrompts.ts
*/
called_prompt: string;
attachments?: Attachment[];
}

export default async function action(
{ called_prompt, attachments }: Props,
_req: Request,
ctx: AppContext,
): Promise<LLMResponseType> {
const prompt = ctx.content.find((p) => p.name === called_prompt);

if (!prompt) {
throw new Error(`Prompt with Name: ${called_prompt} not found`);
}

if (prompt.provider === "Anthropic") {
return await callAntropic(prompt, ctx, attachments ?? []);
}

if (prompt.provider === "OpenAI") {
return await callOpenAI(prompt, ctx, attachments ?? []);
}
// if (prompt.provider === "Custom") {
// return await callCustomProvider(prompt, ctx, attachments);
// }

throw new Error(`Provider ${prompt.provider} is not supported`);
}
Loading

0 comments on commit 50bdd2d

Please sign in to comment.