Skip to content

Commit

Permalink
chore: Update Recipes for Modus 0.17.x (#62)
Browse files Browse the repository at this point in the history
  • Loading branch information
mattjohnsonpint authored Jan 30, 2025
1 parent 603a2d1 commit a3353bc
Show file tree
Hide file tree
Showing 18 changed files with 713 additions and 656 deletions.
153 changes: 80 additions & 73 deletions dgraph-101/api-as/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

16 changes: 8 additions & 8 deletions dgraph-101/api-as/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,19 @@
"pretty:check": "prettier --check ."
},
"dependencies": {
"@hypermode/modus-sdk-as": "^0.16.0",
"@hypermode/modus-sdk-as": "^0.17.0",
"assemblyscript-json": "^1.1.0",
"json-as": "0.9.28"
"json-as": "0.9.29"
},
"devDependencies": {
"@eslint/js": "^9.12.0",
"@eslint/js": "^9.19.0",
"@types/eslint__js": "^8.42.3",
"assemblyscript": "^0.27.30",
"assemblyscript": "^0.27.32",
"assemblyscript-prettier": "^3.0.1",
"eslint": "^9.12.0",
"prettier": "^3.3.3",
"typescript": "^5.6.3",
"typescript-eslint": "^8.9.0",
"eslint": "^9.19.0",
"prettier": "^3.4.2",
"typescript": "^5.7.3",
"typescript-eslint": "^8.22.0",
"visitor-as": "^0.11.4"
},
"overrides": {
Expand Down
13 changes: 7 additions & 6 deletions function-calling/api-as/assembly/tool-helper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import {
ToolMessage,
ResponseFormat,
CompletionMessage,
ToolChoice,
} from "@hypermode/modus-sdk-as/models/openai/chat"

/**
Expand All @@ -28,7 +29,7 @@ export function llmWithTools(
): ResponseWithLogs {
var logs: string[] = []
var final_response = ""
var tool_messages: ToolMessage[] = []
var tool_messages: ToolMessage<string>[] = []
var message: CompletionMessage | null = null
var loops: u8 = 0
// we loop until we get a response or we reach the maximum number of loops (3)
Expand Down Expand Up @@ -60,15 +61,15 @@ function getLLMResponse(
system_prompt: string,
question: string,
last_message: CompletionMessage | null = null,
tools_messages: ToolMessage[] = [],
tools_messages: ToolMessage<string>[] = [],
): CompletionMessage {
const input = model.createInput([new SystemMessage(system_prompt), new UserMessage(question)])
/*
* adding tools messages (response from tools) to the input
* first we need to add the last completion message so the LLM can match the tool messages with the tool call
*/
if (last_message != null) {
input.messages.push(last_message)
input.messages.push(last_message.toAssistantMessage())
}
for (var i = 0; i < tools_messages.length; i++) {
input.messages.push(tools_messages[i])
Expand All @@ -77,7 +78,7 @@ function getLLMResponse(
input.responseFormat = ResponseFormat.Text
input.tools = tools

input.toolChoice = "auto" // "auto "required" or "none" or a function in json format
input.toolChoice = ToolChoice.Auto // Auto, Required, None, or Function("name")

const message = model.invoke(input).choices[0].message
return message
Expand All @@ -90,8 +91,8 @@ function getLLMResponse(
function aggregateToolsResponse(
toolCalls: ToolCall[],
toolCallBack: (toolCall: ToolCall) => string,
): ToolMessage[] {
var messages: ToolMessage[] = []
): ToolMessage<string>[] {
var messages: ToolMessage<string>[] = []
for (let i = 0; i < toolCalls.length; i++) {
const content = toolCallBack(toolCalls[i])
const toolCallResponse = new ToolMessage(content, toolCalls[i].id)
Expand Down
Loading

0 comments on commit a3353bc

Please sign in to comment.