From 7e9a80a9b71e4c82b9b90d33b016a9130560853b Mon Sep 17 00:00:00 2001 From: Nicolas Bonamy Date: Tue, 14 May 2024 17:45:34 -0500 Subject: [PATCH] prompt anywhere --- README.md | 1 + build/build_number.txt | 2 +- defaults/settings.json | 5 ++ package-lock.json | 24 ++++++- package.json | 1 + src/App.vue | 4 +- src/automations/anywhere.ts | 106 +++++++++++++++++++++++++++++ src/automations/commander.ts | 11 +-- src/components/Prompt.vue | 21 ++++-- src/main.ts | 37 +++++++++- src/main/shortcuts.ts | 1 + src/main/window.ts | 54 ++++++++++++++- src/preload.ts | 5 ++ src/screens/PromptAnywhere.vue | 49 +++++++++++++ src/settings/SettingsShortcuts.vue | 10 +++ src/types/automation.d.ts | 1 + src/types/config.d.ts | 1 + src/types/index.d.ts | 4 ++ 18 files changed, 323 insertions(+), 14 deletions(-) create mode 100644 src/automations/anywhere.ts create mode 100644 src/screens/PromptAnywhere.vue diff --git a/README.md b/README.md index 54b6395..41b39e4 100644 --- a/README.md +++ b/README.md @@ -73,6 +73,7 @@ To use Internet search you need a [Tavily API key](https://app.tavily.com/home). ## DONE +- [x] Prompt anywhere - [x] Cancel commands - [x] GPT-4o support - [x] Different default engine/model for commands diff --git a/build/build_number.txt b/build/build_number.txt index f84d24e..a14f8d5 100644 --- a/build/build_number.txt +++ b/build/build_number.txt @@ -1 +1 @@ -178 +179 diff --git a/defaults/settings.json b/defaults/settings.json index bc388c5..3a4d578 100644 --- a/defaults/settings.json +++ b/defaults/settings.json @@ -34,6 +34,11 @@ "key": "Space", "alt": true, "ctrl": true + }, + "anywhere": { + "key": "Space", + "shift": true, + "ctrl": true } }, "engines": { diff --git a/package-lock.json b/package-lock.json index 6d78e24..9786bb3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "witsy", - "version": "1.5.5", + "version": "1.6.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "witsy", - "version": "1.5.5", + "version": "1.6.0", "license": "MIT", "dependencies": { "@anthropic-ai/sdk": "^0.20.4", @@ -26,6 +26,7 @@ "katex": "^0.16.10", "markdown-it": "^14.1.0", "markdown-it-mark": "^4.0.0", + "markdown-to-text": "^0.1.1", "minimatch": "^9.0.4", "mitt": "^3.0.1", "officeparser": "^4.1.1", @@ -3297,6 +3298,11 @@ "@types/responselike": "^1.0.0" } }, + "node_modules/@types/chai": { + "version": "4.3.16", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.16.tgz", + "integrity": "sha512-PatH4iOdyh3MyWtmHVFXLWCCIhUbopaltqddG9BzB+gMIzee2MJrvd+jouii9Z3wzQJruGWAm7WOMjgfG8hQlQ==" + }, "node_modules/@types/concat-stream": { "version": "1.6.1", "resolved": "https://registry.npmjs.org/@types/concat-stream/-/concat-stream-1.6.1.tgz", @@ -3449,6 +3455,11 @@ "dev": true, "optional": true }, + "node_modules/@types/mocha": { + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-8.2.3.tgz", + "integrity": "sha512-ekGvFhFgrc2zYQoX4JeZPmVzZxw6Dtllga7iGHzfbYIYkAMUx/sAFP2GdFpLff+vdHXu5fl7WX9AT+TtqYcsyw==" + }, "node_modules/@types/node": { "version": "20.12.7", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.7.tgz", @@ -12902,6 +12913,15 @@ "resolved": "https://registry.npmjs.org/markdown-it-mark/-/markdown-it-mark-4.0.0.tgz", "integrity": "sha512-YLhzaOsU9THO/cal0lUjfMjrqSMPjjyjChYM7oyj4DnyaXEzA8gnW6cVJeyCrCVeyesrY2PlEdUYJSPFYL4Nkg==" }, + "node_modules/markdown-to-text": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/markdown-to-text/-/markdown-to-text-0.1.1.tgz", + "integrity": "sha512-co/J5l8mJ2RK9wD/nQRGwO7JxoeyfvVNtOZll016EdAX2qYkwCWMdtYvJO42b41Ho7GFEJMuly9llf0Nj+ReQw==", + "dependencies": { + "@types/chai": "^4.2.14", + "@types/mocha": "^8.2.0" + } + }, "node_modules/matcher": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/matcher/-/matcher-3.0.0.tgz", diff --git a/package.json b/package.json index ad73baa..03c98c6 100644 --- a/package.json +++ b/package.json @@ -80,6 +80,7 @@ "katex": "^0.16.10", "markdown-it": "^14.1.0", "markdown-it-mark": "^4.0.0", + "markdown-to-text": "^0.1.1", "minimatch": "^9.0.4", "mitt": "^3.0.1", "officeparser": "^4.1.1", diff --git a/src/App.vue b/src/App.vue index eb30b7e..7a4d1ad 100644 --- a/src/App.vue +++ b/src/App.vue @@ -8,6 +8,7 @@ import { ref, computed, onMounted } from 'vue' import Main from './screens/Main.vue' import Wait from './screens/Wait.vue' import Commands from './screens/Commands.vue' +import PromptAnywhere from './screens/PromptAnywhere.vue' // add platform name onMounted(() => { @@ -30,7 +31,8 @@ const routes = { '/': Main, '/chat': Main, '/wait': Wait, - '/command': Commands + '/command': Commands, + '/prompt': PromptAnywhere, } const currentPath = ref(window.location.hash) diff --git a/src/automations/anywhere.ts b/src/automations/anywhere.ts new file mode 100644 index 0000000..5c46c26 --- /dev/null +++ b/src/automations/anywhere.ts @@ -0,0 +1,106 @@ + +import { Configuration } from '../types/config.d' +import { App } from 'electron' +import { loadSettings } from '../main/config' +import { igniteEngine } from '../services/llm' +import { LlmResponse } from '../types/llm.d' +import removeMarkdown from 'markdown-to-text' +import LlmEngine from '../services/engine' +import Automator from './automator' +import Message from '../models/message' +import * as window from '../main/window' + +export default class PromptAnywhere { + + private llm: LlmEngine + private cancelled: boolean + + constructor(llm?: LlmEngine) { + this.llm = llm + this.cancelled = false + } + + cancel = async () => { + + // close stuff + await window.closeWaitingPanel(); + await window.restoreWindows(); + await window.releaseFocus(); + + // record + this.cancelled = true; + + } + + execPrompt = async (app: App, prompt: string): Promise => { + + try { + + // config + const config: Configuration = loadSettings(app); + const engine = config.llm.engine; + const model = config.getActiveModel(); + + // open waiting panel + window.openWaitingPanel(); + + // we need an llm + if (!this.llm) { + this.llm = igniteEngine(engine, config); + if (!this.llm) { + throw new Error(`Invalid LLM engine: ${engine}`) + } + } + + // now prompt llm + console.debug(`Prompting with ${prompt.slice(0, 50)}…`); + const response = await this.promptLlm(model, prompt); + const result = removeMarkdown(response.content, { + stripListLeaders: false, + listUnicodeChar: '' + }); + + // if cancelled + if (this.cancelled) { + console.debug('Discarding LLM output as command was cancelled'); + return + } + + // done + await window.closeWaitingPanel(); + await window.restoreWindows(); + await window.releaseFocus(); + + // now paste + console.debug(`Processing LLM output: ${result.slice(0, 50)}…`); + + // we need an automator + const automator = new Automator(); + await automator.pasteText(result) + + + } catch (error) { + console.error('Error while testing', error); + } + + // done waiting + await window.closeWaitingPanel(true); + await window.restoreWindows(); + await window.releaseFocus(); + + } + + private promptLlm = (model: string, prompt: string): Promise => { + + // build messages + const messages: Message[] = [ + new Message('user', prompt) + ] + + // now get it + return this.llm.complete(messages, { model: model }) + + } + + +} \ No newline at end of file diff --git a/src/automations/commander.ts b/src/automations/commander.ts index 4f7ff3a..c32bd05 100644 --- a/src/automations/commander.ts +++ b/src/automations/commander.ts @@ -5,13 +5,13 @@ import { RunCommandResponse } from '../types/automation.d' import { LlmResponse } from '../types/llm.d' import { App, BrowserWindow, Notification } from 'electron' import { loadSettings } from '../main/config' -import { igniteEngine } from '../services/llm' -import * as window from '../main/window' - +import { igniteEngine } from '../services/llm' +import removeMarkdown from 'markdown-to-text' import Message from '../models/message' import Automator from './automator' import LlmEngine from '../services/engine' import { v4 as uuidv4 } from 'uuid' +import * as window from '../main/window' const textCache: strDict = {} @@ -144,7 +144,10 @@ export default class Commander { // now prompt llm console.debug(`Prompting with ${result.prompt.slice(0, 50)}…`); const response = await this.promptLlm(model, result.prompt); - result.response = response.content; + result.response = removeMarkdown(response.content, { + stripListLeaders: false, + listUnicodeChar: '' + }); // if cancelled if (this.cancelled) { diff --git a/src/components/Prompt.vue b/src/components/Prompt.vue index 4a1fb04..919810b 100644 --- a/src/components/Prompt.vue +++ b/src/components/Prompt.vue @@ -1,14 +1,14 @@