Skip to content

Commit

Permalink
restore support for model select dropdown from ollama api in provider
Browse files Browse the repository at this point in the history
  • Loading branch information
rjmacarthy committed Apr 9, 2024
1 parent 0392530 commit 375531e
Show file tree
Hide file tree
Showing 6 changed files with 158 additions and 73 deletions.
24 changes: 22 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -318,14 +318,34 @@
"default": true,
"description": "Enable this setting to allow twinny to keep making subsequent completion requests to the API after the last completion request was accepted."
},
"twinny.keepAlive": {
"twinny.ollamaHostname": {
"order": 13,
"type": "string",
"default": "0.0.0.0",
"description": "Hostname for Ollama API.",
"required": true
},
"twinny.ollamaApiPort": {
"order": 14,
"type": "number",
"default": 11434,
"description": "The API port usually `11434`",
"required": false
},
"twinny.keepAlive": {
"order": 15,
"type": "string",
"default": "5m",
"description": "Keep models in memory by making requests with keep_alive=-1. Applicable only for Ollama API."
},
"twinny.ollamaUseTls": {
"order": 25,
"type": "boolean",
"default": false,
"description": "Enables TLS encryption Ollama API connections."
},
"twinny.enableLogging": {
"order": 14,
"order": 26,
"type": "boolean",
"default": true,
"description": "Enable twinny debug mode"
Expand Down
38 changes: 13 additions & 25 deletions src/extension/ollama-service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,37 +4,25 @@ import { Logger } from '../common/logger'
export class OllamaService {
private logger: Logger
private _config = workspace.getConfiguration('twinny')
private _apiHostname = this._config.get('apiHostname') as string
private _chatApiPort = this._config.get('chatApiPort') as string
private _fimApiPort = this._config.get('fimApiPort') as string
private _useTls = this._config.get('useTls') as boolean
private _baseUrlChat: string
private _baseUrlFim: string
private _baseUrl: string

constructor() {
this.logger = new Logger()
const useTls = this._useTls
const protocol = useTls ? 'https' : 'http'
this._baseUrlChat = `${protocol}://${this._apiHostname}:${this._chatApiPort}`
this._baseUrlFim = `${protocol}://${this._apiHostname}:${this._fimApiPort}`
const protocol = (this._config.get('ollamaUseTls') as boolean)
? 'https'
: 'http'
const hostname = this._config.get('ollamaHostname') as string
const port = this._config.get('ollamaApiPort') as string
this._baseUrl = `${protocol}://${hostname}:${port}`
}

public fetchModels = async (resource = '/api/tags') => {
const chatModelsRes = (await fetch(this._baseUrlChat + resource)) || []
const fimModelsRes = await fetch(this._baseUrlFim + resource)
const { models: chatModels } = await chatModelsRes.json()
const { models: fimModels } = await fimModelsRes.json()
const models = new Set()
if (Array.isArray(chatModels)) {
for (const model of chatModels) {
models.add(model)
}
try {
const response = await fetch(`${this._baseUrl}${resource}`)
const { models } = await response.json()
return Array.isArray(models) ? [...new Set(models)] : []
} catch (err) {
return []
}
if (Array.isArray(fimModels)) {
for (const model of fimModels) {
models.add(model)
}
}
return Array.from(models)
}
}
21 changes: 21 additions & 0 deletions src/webview/hooks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { useEffect, useState } from 'react'

import { MESSAGE_KEY, MESSAGE_NAME } from '../common/constants'
import {
ApiModel,
ClientMessage,
LanguageType,
ServerMessage,
Expand Down Expand Up @@ -281,3 +282,23 @@ export const useConfigurationSetting = (key: string) => {

return { configurationSetting }
}

export const useOllamaModels = () => {
const [models, setModels] = useState<ApiModel[] | undefined>([])
const handler = (event: MessageEvent) => {
const message: ServerMessage<ApiModel[]> = event.data
if (message?.type === MESSAGE_NAME.twinnyFetchOllamaModels) {
setModels(message?.value.data)
}
return () => window.removeEventListener('message', handler)
}

useEffect(() => {
global.vscode.postMessage({
type: MESSAGE_NAME.twinnyFetchOllamaModels
})
window.addEventListener('message', handler)
}, [])

return { models }
}
35 changes: 35 additions & 0 deletions src/webview/model-select.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import { VSCodeDropdown } from '@vscode/webview-ui-toolkit/react'

import { getModelShortName } from './utils'
import { ApiModel } from '../common/types'

interface Props {
model: string | undefined
setModel: (model: string) => void
models: ApiModel[] | undefined
}

export const ModelSelect = ({ model, models, setModel }: Props) => {
const handleOnChange = (e: unknown): void => {
const event = e as React.ChangeEvent<HTMLSelectElement>
const selectedValue = event?.target.value || ''
setModel(selectedValue)
}

return (
<div>
<div>
<label htmlFor="modelName">Model name*</label>
</div>
<VSCodeDropdown onChange={handleOnChange} value={model}>
{models?.map((model, index) => {
return (
<option value={model.name} key={`${index}`}>
{getModelShortName(model.name)}
</option>
)
})}
</VSCodeDropdown>
</div>
)
}
Loading

0 comments on commit 375531e

Please sign in to comment.