Skip to content

Commit

Permalink
Revert "Migrating to use @deco/deco from jsr (#845)"
Browse files Browse the repository at this point in the history
This reverts commit 5f5beca.
  • Loading branch information
guitavano committed Sep 11, 2024
1 parent 42c6aab commit 0aef1aa
Show file tree
Hide file tree
Showing 168 changed files with 2,845 additions and 1,147 deletions.
15 changes: 11 additions & 4 deletions admin/types.ts
Original file line number Diff line number Diff line change
@@ -1,41 +1,48 @@
import { type Resolvable } from "deco/engine/core/resolver.ts";
import { type fjp } from "./deps.ts";
import { type Resolvable } from "@deco/deco";

export interface Pagination<T> {
data: T[];
page: number;
pageSize: number;
total: number;
}

export interface PatchState {
type: "patch-state";
payload: fjp.Operation[];
revision: string;
}

export interface FetchState {
type: "fetch-state";
}

export interface StatePatched {
type: "state-patched";
payload: fjp.Operation[];
revision: string;
// Maybe add data and user info in here
metadata?: unknown;
}

export interface StateFetched {
type: "state-fetched";
payload: State;
}

export interface OperationFailed {
type: "operation-failed";
code: "UNAUTHORIZED" | "INTERNAL_SERVER_ERROR";
reason: string;
}
export type Acked<T> = T & {
ack: string;
};

export type Acked<T> = T & { ack: string };

export interface State {
decofile: Record<string, Resolvable>;
revision: string;
}

export type Commands = PatchState | FetchState;
export type Events = StatePatched | StateFetched | OperationFailed;
12 changes: 11 additions & 1 deletion ai-assistants/actions/awsUploadImage.ts
Original file line number Diff line number Diff line change
@@ -1,38 +1,47 @@
import { logger } from "deco/observability/otel/config.ts";
import { meter } from "deco/observability/otel/metrics.ts";
import base64ToBlob from "../utils/blobConversion.ts";
import { AssistantIds } from "../types.ts";
import { ValueType } from "deco/deps.ts";
import { AppContext } from "../mod.ts";
import { logger, meter, ValueType } from "@deco/deco/o11y";

const stats = {
awsUploadImageError: meter.createCounter("assistant_aws_upload_error", {
unit: "1",
valueType: ValueType.INT,
}),
};

export interface AWSUploadImageProps {
file: string | ArrayBuffer | null;
assistantIds?: AssistantIds;
}

// TODO(ItamarRocha): Check if possible to upload straight to bucket instead of using presigned url
async function getSignedUrl(
mimetype: string,
ctx: AppContext,
): Promise<string> {
const randomID = crypto.randomUUID();
const name = `${randomID}.${mimetype.split("/")[1]}`;

// Get signed URL from S3
const s3Params = {
Bucket: ctx.assistantAwsProps?.assistantBucketName.get?.() ?? "",
Key: name,
ContentType: mimetype,
ACL: "public-read",
};

const uploadURL = await ctx.s3?.getSignedUrlPromise("putObject", s3Params);
return uploadURL as string;
}

async function uploadFileToS3(presignedUrl: string, data: Blob) {
const response = await fetch(presignedUrl, { method: "PUT", body: data });
return response;
}

// TODO(ItamarRocha): Rate limit
export default async function awsUploadImage(
awsUploadImageProps: AWSUploadImageProps,
Expand All @@ -48,6 +57,7 @@ export default async function awsUploadImage(
);
const uploadURL = await getSignedUrl(blobData.type, ctx);
const uploadResponse = await uploadFileToS3(uploadURL, blobData);

if (!uploadResponse.ok) {
stats.awsUploadImageError.add(1, {
assistantId,
Expand Down
50 changes: 32 additions & 18 deletions ai-assistants/actions/chat.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
import { AppContext } from "../mod.ts";

import { badRequest, notFound } from "deco/mod.ts";
import { messageProcessorFor } from "../chat/messages.ts";
import { Notify, Queue } from "../deps.ts";
import { badRequest, notFound } from "@deco/deco";

export interface Props {
thread?: string;
assistant: string;
message?: string;
}

/**
* Processes messages from the message queue.
* @param {Queue<ChatMessage>} q - The message queue.
Expand All @@ -32,29 +35,35 @@ const process = async (
]);
}
};

export interface MessageContentText {
type: "text";
value: string;
options?: string[];
}

export interface MessageContentFile {
type: "file";
fileId: string;
}

export interface ReplyMessage {
threadId: string;
messageId: string;
type: "message" | "error";
content: Array<MessageContentText | MessageContentFile>;
role: "user" | "assistant";
}

export interface FunctionCall {
name: string;
props: unknown;
}

export interface FunctionCallReply<T> extends FunctionCall {
response: T;
}

export interface ReplyStartFunctionCall {
threadId: string;
messageId: string;
Expand All @@ -67,14 +76,17 @@ export interface ReplyFunctionCalls<T> {
type: "function_calls";
content: FunctionCallReply<T>[];
}

export type Reply<T> =
| ReplyMessage
| ReplyFunctionCalls<T>
| ReplyStartFunctionCall;

export interface ChatMessage {
text: string;
reply: <T = unknown>(reply: Reply<T>) => void;
}

/**
* Initializes a WebSocket chat connection and processes incoming messages.
* @param {Props} props - The properties for the chat session.
Expand All @@ -86,24 +98,21 @@ export default async function openChat(
props: Props,
req: Request,
ctx: AppContext,
): Promise<
Response | {
replies: Reply<unknown>[];
thread: string;
}
> {
): Promise<Response | { replies: Reply<unknown>[]; thread: string }> {
if (!props.assistant) {
notFound();
}
const assistant = ctx.assistants[props.assistant];
if (!assistant) {
notFound();
}

const threads = ctx.openAI.beta.threads;
const threadId = props.thread;
const threadPromise = threadId
? threads.retrieve(threadId)
: threads.create();

const processorPromise = assistant.then(async (aiAssistant) =>
messageProcessorFor(aiAssistant, ctx, await threadPromise)
);
Expand All @@ -119,6 +128,7 @@ export default async function openChat(
});
return { replies, thread: (await threadPromise).id };
}

const { socket, response } = Deno.upgradeWebSocket(req);
const abort = new Notify();
const messagesQ = new Queue<ChatMessage>();
Expand All @@ -128,6 +138,7 @@ export default async function openChat(
reply: (replyMsg) => socket.send(JSON.stringify(replyMsg)),
});
}

/**
* Handles the WebSocket connection on open event.
*/
Expand All @@ -145,17 +156,19 @@ export default async function openChat(
}),
);
assistant.then((aiAssistant) => {
socket.send(JSON.stringify({
isWelcomeMessage: true,
threadId: aiAssistant.threadId,
assistantId: aiAssistant.id,
type: "message",
content: [{
type: "text",
value: aiAssistant.welcomeMessage ?? "Welcome to the chat!",
}],
role: "assistant",
}));
socket.send(
JSON.stringify({
isWelcomeMessage: true,
threadId: aiAssistant.threadId,
assistantId: aiAssistant.id,
type: "message",
content: [{
type: "text",
value: aiAssistant.welcomeMessage ?? "Welcome to the chat!",
}],
role: "assistant",
}),
);
});
};
/**
Expand All @@ -164,6 +177,7 @@ export default async function openChat(
socket.onclose = () => {
abort.notifyAll();
};

/**
* Handles the WebSocket connection on message event.
* @param {MessageEvent} event - The WebSocket message event.
Expand Down
9 changes: 7 additions & 2 deletions ai-assistants/actions/describeImage.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
import { logger } from "deco/observability/otel/config.ts";
import { meter } from "deco/observability/otel/metrics.ts";
import { AssistantIds } from "../types.ts";
import { ValueType } from "deco/deps.ts";
import { AppContext } from "../mod.ts";
import { logger, meter, ValueType } from "@deco/deco/o11y";
import { shortcircuit } from "@deco/deco";
import { shortcircuit } from "deco/engine/errors.ts";

const stats = {
promptTokens: meter.createHistogram("assistant_image_prompt_tokens", {
description: "Tokens used in Sales Assistant Describe Image Input - OpenAI",
Expand All @@ -17,11 +20,13 @@ const stats = {
valueType: ValueType.INT,
}),
};

export interface DescribeImageProps {
uploadURL: string;
userPrompt: string;
assistantIds?: AssistantIds;
}

// TODO(ItamarRocha): Rate limit
// TODO(@ItamarRocha): Refactor to use https://github.com/deco-cx/apps/blob/main/openai/loaders/vision.ts
export default async function describeImage(
Expand Down
9 changes: 8 additions & 1 deletion ai-assistants/actions/transcribeAudio.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
import { logger } from "deco/observability/otel/config.ts";
import base64ToBlob from "../utils/blobConversion.ts";
import { meter } from "deco/observability/otel/metrics.ts";
import { AssistantIds } from "../types.ts";
import { ValueType } from "deco/deps.ts";
import { AppContext } from "../mod.ts";
import { logger, meter, ValueType } from "@deco/deco/o11y";

const stats = {
audioSize: meter.createHistogram("assistant_transcribe_audio_size", {
description:
Expand All @@ -17,11 +20,13 @@ const stats = {
},
),
};

export interface TranscribeAudioProps {
file: string | ArrayBuffer | null;
assistantIds?: AssistantIds;
audioDuration: number;
}

// TODO(ItamarRocha): Rate limit
export default async function transcribeAudio(
transcribeAudioProps: TranscribeAudioProps,
Expand All @@ -36,12 +41,14 @@ export default async function transcribeAudio(
});
throw new Error("Audio file is empty");
}

const blobData = base64ToBlob(
transcribeAudioProps.file,
"audio",
transcribeAudioProps.assistantIds,
);
const file = new File([blobData], "input.wav", { type: "audio/wav" });

stats.audioSize.record(transcribeAudioProps.audioDuration, {
assistant_id: assistantId,
});
Expand Down
Loading

0 comments on commit 0aef1aa

Please sign in to comment.