Skip to content

Commit

Permalink
Working Ollama chat for 'generateText' but not 'streamText' json pars…
Browse files Browse the repository at this point in the history
…ing error!
  • Loading branch information
KastanDay committed Jun 21, 2024
1 parent 6426832 commit bd7a5dd
Show file tree
Hide file tree
Showing 3 changed files with 81 additions and 6 deletions.
28 changes: 23 additions & 5 deletions src/app/api/chat/ollama/route.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import { createOllama } from 'ollama-ai-provider'
import { streamText } from 'ai'

// import { OllamaModel } from '~/types/OllamaProvider'

const ollama = createOllama({
Expand All @@ -14,12 +16,28 @@ export async function POST(req: Request) {
*/
console.log('In ollama POST endpoint')

// const res = ollama.chat('llama3:70b-instruct', )//, numCtx=8192)
const model = ollama('llama3:70b-instruct')
model.doStream
const messages = [
{
role: 'user',
content: 'why is the sky blue?',
},
]

const result = await streamText({
maxRetries: 5,
maxTokens: 512,
model: ollama('llama3:70b-instruct'),
prompt: 'Invent a new holiday and describe its traditions.',
temperature: 0.3,
})

for await (const textPart of result.textStream) {
process.stdout.write(textPart)
}

// return new StreamingTextResponse(res.doStream)
// return res.doStream
console.log()
console.log('Token usage:', await result.usage)
console.log('Finish reason:', await result.finishReason)
}

export async function GET() {
Expand Down
5 changes: 4 additions & 1 deletion src/pages/api/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import {
import { OpenAIModel, OpenAIModelID, OpenAIModels } from '@/types/openai'
import { decrypt, isEncrypted } from '~/utils/crypto'
import { LLMProvider, ProviderNames } from '~/types/LLMProvider'
import { getOllamaModels } from '~/utils/modelProviders/ollama'
import { getOllamaModels, runOllamaChat } from '~/utils/modelProviders/ollama'

export const config = {
runtime: 'edge',
Expand All @@ -36,6 +36,9 @@ const handler = async (req: Request): Promise<Response> => {
const ollamaModels = await getOllamaModels()
console.log('Ollama Models in models.ts: ', ollamaModels)

// Test chat function
const ret = await runOllamaChat()

// Iterate over the providers, check if their key works. Return all available models...
// each model provider should have at least `/chat` and `/models` endpoints

Expand Down
54 changes: 54 additions & 0 deletions src/utils/modelProviders/ollama.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,64 @@
import { generateText, streamText } from 'ai'
import { createOllama } from 'ollama-ai-provider'

export interface OllamaModel {
id: string
name: string
parameterSize: string
tokenLimit: number
}

export const runOllamaChat = async () => {
console.log('In ollama POST endpoint')

const ollama = createOllama({
// custom settings
baseURL: 'https://ollama.ncsa.ai/api',
})

const messages = [
{
role: 'user',
content: 'why is the sky blue?',
},
]

const result = await generateText({
maxTokens: 1024,
model: ollama('llama3:70b-instruct'),
prompt: 'Invent a new holiday and describe its traditions.',
})

console.log('OLLAMA RESULT', result.text)

// TODO: Check out the server example for how to handle streaming responses
// https://sdk.vercel.ai/examples/next-app/chat/stream-chat-completion#server

// const result = await streamText({
// maxRetries: 5,
// maxTokens: 512,
// model: ollama('llama3:70b-instruct'),
// messages: messages,
// temperature: 0.3,
// })

// let fullResponse = '';
// for await (const textPart of result.textStream) {
// fullResponse += textPart;
// }

// try {
// const parsedResponse = JSON.parse(fullResponse);
// console.log(parsedResponse);
// } catch (error) {
// console.error('Failed to parse JSON:', error);
// }

// console.log()
// console.log('Token usage:', await result.usage)
// console.log('Finish reason:', await result.finishReason)
}

export const getOllamaModels = async () => {
const response = await fetch('https://ollama.ncsa.ai/api/ps')
if (!response.ok) {
Expand Down

0 comments on commit bd7a5dd

Please sign in to comment.