Skip to content

Commit

Permalink
Update for breaking changes
Browse files Browse the repository at this point in the history
  • Loading branch information
mattjohnsonpint committed Jan 30, 2025
1 parent ebc33f3 commit e2c3efe
Showing 1 changed file with 7 additions and 6 deletions.
13 changes: 7 additions & 6 deletions function-calling/api-as/assembly/tool-helper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import {
ToolMessage,
ResponseFormat,
CompletionMessage,
ToolChoice,
} from "@hypermode/modus-sdk-as/models/openai/chat"

/**
Expand All @@ -28,7 +29,7 @@ export function llmWithTools(
): ResponseWithLogs {
var logs: string[] = []
var final_response = ""
var tool_messages: ToolMessage[] = []
var tool_messages: ToolMessage<string>[] = []
var message: CompletionMessage | null = null
var loops: u8 = 0
// we loop until we get a response or we reach the maximum number of loops (3)
Expand Down Expand Up @@ -60,15 +61,15 @@ function getLLMResponse(
system_prompt: string,
question: string,
last_message: CompletionMessage | null = null,
tools_messages: ToolMessage[] = [],
tools_messages: ToolMessage<string>[] = [],
): CompletionMessage {
const input = model.createInput([new SystemMessage(system_prompt), new UserMessage(question)])
/*
* adding tools messages (response from tools) to the input
* first we need to add the last completion message so the LLM can match the tool messages with the tool call
*/
if (last_message != null) {
input.messages.push(last_message)
input.messages.push(last_message.toAssistantMessage())
}
for (var i = 0; i < tools_messages.length; i++) {
input.messages.push(tools_messages[i])
Expand All @@ -77,7 +78,7 @@ function getLLMResponse(
input.responseFormat = ResponseFormat.Text
input.tools = tools

input.toolChoice = "auto" // "auto "required" or "none" or a function in json format
input.toolChoice = ToolChoice.Auto // Auto, Required, None, or Function("name")

const message = model.invoke(input).choices[0].message
return message
Expand All @@ -90,8 +91,8 @@ function getLLMResponse(
function aggregateToolsResponse(
toolCalls: ToolCall[],
toolCallBack: (toolCall: ToolCall) => string,
): ToolMessage[] {
var messages: ToolMessage[] = []
): ToolMessage<string>[] {
var messages: ToolMessage<string>[] = []
for (let i = 0; i < toolCalls.length; i++) {
const content = toolCallBack(toolCalls[i])
const toolCallResponse = new ToolMessage(content, toolCalls[i].id)
Expand Down

0 comments on commit e2c3efe

Please sign in to comment.