diff --git a/.eslintrc.json b/.eslintrc.json index 634d89c2b..2200e0303 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -11,20 +11,14 @@ } ], "@typescript-eslint/no-explicit-any": "warn", + "@typescript-eslint/explicit-function-return-type": "off", "@typescript-eslint/explicit-module-boundary-types": "off", "@typescript-eslint/no-empty-function": "off", "prefer-const": "warn", "react-hooks/set-state-in-effect": "warn" }, - "overrides": [ - { - "files": ["src/main/**/*.ts", "src/main/**/*.tsx"], - "rules": { - "@typescript-eslint/no-var-requires": "off" - } - } - ], + "overrides": [], "env": { "browser": true, "es2020": true, diff --git a/.gitignore b/.gitignore index 8d9e1a2ad..1096dd4f6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ # Dependencies node_modules/ dist/ +out/ release/ # Logs @@ -59,8 +60,7 @@ Thumbs.db .notes .claude/ -# Local generated worktrees for perf experiments -worktrees/ + # Checkouts directory .checkouts/ diff --git a/.nvmrc b/.nvmrc index 442c7587a..d845d9d88 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -22.20.0 +24.14.0 diff --git a/.prettierignore b/.prettierignore index 227943b01..09dada1cb 100644 --- a/.prettierignore +++ b/.prettierignore @@ -7,6 +7,7 @@ **/.next **/build **/.turbo +**/out # ignore config files that shouldn't be formatted **/.env* diff --git a/.prettierrc b/.prettierrc index cb35617fa..3453c182a 100644 --- a/.prettierrc +++ b/.prettierrc @@ -6,5 +6,15 @@ "tabWidth": 2, "bracketSpacing": true, "bracketSameLine": false, - "plugins": ["prettier-plugin-tailwindcss"] + "plugins": ["prettier-plugin-tailwindcss", "@ianvs/prettier-plugin-sort-imports"], + "tailwindStylesheet": "./src/renderer/index.css", + "importOrder": [ + "^@/(.*)$", + "^@root/(.*)$", + "^@shared/(.*)$", + "^@main/(.*)$", + "^@renderer/(.*)$", + "^[./]" + ], + "importOrderTypeScriptVersion": "5.0.0" } diff --git a/AGENTS.md b/AGENTS.md index 4a99a3a78..78d8332ce 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,7 +1,8 @@ + --- default_branch: main package_manager: pnpm -node_version: "22.20.0" +node_version: "24.x.x" start_command: "pnpm run d" dev_command: "pnpm run dev" build_command: "pnpm run build" @@ -9,7 +10,7 @@ test_commands: - "pnpm run format" - "pnpm run lint" - "pnpm run type-check" - - "pnpm exec vitest run" + - "pnpm run test" ports: dev: 3000 required_env: [] @@ -23,329 +24,51 @@ optional_env: - CODEX_APPROVAL_POLICY --- -# Emdash - -Cross-platform Electron app that orchestrates multiple CLI coding agents (Claude Code, Codex, Qwen Code, Amp, etc.) in parallel. Each agent runs in its own Git worktree for isolation. Also supports remote development over SSH. - -### Tech Stack - -- **Runtime**: Electron 30.5.1, Node.js >=20.0.0 <23.0.0 (recommended: 22.20.0 via `.nvmrc`) -- **Frontend**: React 18, TypeScript 5.3, Vite 5, Tailwind CSS 3 -- **Backend**: Node.js, TypeScript, Drizzle ORM 0.32, SQLite3 5.1 -- **Editor**: Monaco Editor 0.55, **Terminal**: @xterm/xterm 6.0 + node-pty 1.0 -- **Native Modules**: node-pty, sqlite3, keytar 7.9 (require `pnpm run rebuild` after updates) -- **SSH**: ssh2 1.17 -- **UI**: Radix UI primitives, lucide-react icons, framer-motion - -## Quickstart - -1. `nvm use` (installs Node 22.20.0 if missing) or install Node 22.x manually. -2. `pnpm run d` to install dependencies and launch Electron + Vite. -3. If `pnpm run d` fails mid-stream, rerun `pnpm install`, then `pnpm run dev` (main + renderer). - -## Development Commands - -```bash -# Quick start (installs deps, starts dev) -pnpm run d - -# Development (runs main + renderer concurrently) -pnpm run dev -pnpm run dev:main # Electron main process only (tsc + electron) -pnpm run dev:renderer # Vite dev server only (port 3000) - -# Quality checks (ALWAYS run before committing) -pnpm run format # Format with Prettier -pnpm run lint # ESLint -pnpm run type-check # TypeScript type checking (uses tsconfig.json — renderer/shared/types) -pnpm exec vitest run # Run all tests - -# Run a specific test -pnpm exec vitest run src/test/main/WorktreeService.test.ts - -# Native modules -pnpm run rebuild # Rebuild native modules for Electron -pnpm run reset # Clean install (removes node_modules, reinstalls) - -# Building & Packaging -pnpm run build # Build main + renderer -pnpm run package:mac # macOS .dmg (arm64) -pnpm run package:linux # Linux AppImage/deb (x64) -pnpm run package:win # Windows nsis/portable (x64) -``` - -## Testing - -Tests use `vi.mock()` to stub `electron`, `DatabaseService`, `logger`, etc. Integration tests create real git repos in `os.tmpdir()`. No shared test setup file — mocks are per-file. - -- **Framework**: Vitest (configured in `vite.config.ts`, `environment: 'node'`) -- **Test locations**: `src/test/main/` (15 service tests), `src/test/renderer/` (3 UI tests), `src/main/utils/__tests__/` (2 utility tests) - -## Guardrails - -- **ALWAYS** run `pnpm run format`, `pnpm run lint`, `pnpm run type-check`, and `pnpm exec vitest run` before committing. -- **NEVER** modify `drizzle/meta/` or numbered migration files — always use `drizzle-kit generate`. -- **NEVER** modify `build/` entitlements or updater config without review. -- **ALWAYS** use feature branches or worktrees; never commit directly to `main`. -- Do limit edits to `src/**`, `docs/**`, or config files you fully understand; keep `dist/`, `release/`, and `build/` untouched. -- Don't modify telemetry defaults or updater logic unless intentional and reviewed. -- Don't run commands that mutate global environments (global package installs, git pushes) from agent scripts. -- Put temporary notes or scratch content in `.notes/` (gitignored). - -## Architecture - -### Process Model - -- **Main process** (`src/main/`): Electron main — IPC handlers, services, database, PTY management -- **Renderer process** (`src/renderer/`): React UI built with Vite — components, hooks, terminal panes -- **Shared** (`src/shared/`): Provider registry (21 agent definitions), PTY ID helpers, shared utilities - -### Boot Sequence - -`entry.ts` → `main.ts` → IPC registration → window creation - -- `entry.ts` — Sets app name (must happen before `app.getPath('userData')`, or Electron defaults to `~/Library/Application Support/Electron`). Monkey-patches `Module._resolveFilename` to resolve `@shared/*` and `@/*` path aliases at runtime in compiled JS. -- `main.ts` — Loads `.env`, fixes PATH for CLI discovery on macOS/Linux/Windows (adds Homebrew, npm global, nvm paths so agents like `gh`, `codex`, `claude` are found when launched from Finder), detects `SSH_AUTH_SOCK` from user's login shell, then initializes Electron windows and registers all IPC handlers. -- `preload.ts` — Exposes secure `electronAPI` to renderer via `contextBridge`. - -### Main Process (`src/main/`) - -**Key services** (`src/main/services/`): -- `WorktreeService.ts` — Git worktree lifecycle, file preservation patterns -- `WorktreePoolService.ts` — Worktree pooling/reuse for instant task starts -- `DatabaseService.ts` — All SQLite CRUD operations -- `ptyManager.ts` — PTY (pseudo-terminal) lifecycle, session isolation, agent spawning -- `SkillsService.ts` — Cross-agent skill installation and catalog management -- `GitHubService.ts` / `GitService.ts` — Git and GitHub operations via `gh` CLI -- `PrGenerationService.ts` — Automated PR generation -- `TaskLifecycleService.ts` — Task lifecycle orchestration -- `TerminalSnapshotService.ts` — Terminal state snapshots -- `TerminalConfigParser.ts` — Terminal configuration parsing -- `RepositoryManager.ts` — Repository management -- `RemotePtyService.ts` / `RemoteGitService.ts` — Remote development over SSH -- `ssh/` — SSH connection management, credentials (via keytar), host key verification - -Note: Some IPC handler files are colocated in `services/` (e.g., `worktreeIpc.ts`, `ptyIpc.ts`, `updateIpc.ts`, `lifecycleIpc.ts`, `planLockIpc.ts`, `fsIpc.ts`). - -**IPC Handlers** (`src/main/ipc/`): -- 25+ handler files total (19 in `ipc/` + 6 colocated in `services/`) covering app, db, git, github, browser, connections, project, settings, telemetry, SSH, Linear, Jira, skills, and more -- All return `{ success: boolean, data?: any, error?: string }` format -- Types defined in `src/renderer/types/electron-api.d.ts` (~1,870 lines) - -**Database** (`src/main/db/`): -- Schema: `schema.ts` — Migrations: `drizzle/` (auto-generated) -- Locations: macOS `~/Library/Application Support/emdash/emdash.db`, Linux `~/.config/emdash/emdash.db`, Windows `%APPDATA%\emdash\emdash.db` -- Override with `EMDASH_DB_FILE` env var - -### Renderer Process (`src/renderer/`) - -**Key components** (`components/`): -- `App.tsx` — Root orchestration (~790 lines), located at `src/renderer/App.tsx` -- `EditorMode.tsx` — Monaco code editor -- `ChatInterface.tsx` — Conversation UI -- `FileChangesPanel.tsx` / `ChangesDiffModal.tsx` — Diff visualization and review -- `CommandPalette.tsx` — Command/action palette -- `FileExplorer/` — File tree navigation -- `BrowserPane.tsx` — Webview preview -- `skills/` — Skills catalog and management UI -- `ssh/` — SSH connection UI components - -**Key hooks** (`hooks/`, 42 total): -- `useAppInitialization` — Two-round project/task loading (fast skeleton then full), restores last active project/task from localStorage -- `useTaskManagement` — Full task lifecycle (~864 lines): create, delete, rename, archive, restore. Handles optimistic UI removal with rollback, lifecycle teardown, PTY cleanup -- `useCliAgentDetection` — Detects which CLI agents are installed on the system -- `useInitialPromptInjection` / `usePendingInjection` — Manages initial prompt sent to agents on task start - -### Path Aliases - -**Important**: `@/*` resolves differently in main vs renderer: - -| Alias | tsconfig.json (renderer) | tsconfig.main.json (main) | -|-------|-------------------------|--------------------------| -| `@/*` | `src/renderer/*` | `src/*` | -| `@shared/*` | `src/shared/*` | `src/shared/*` | -| `#types/*` | `src/types/*` | _(not available)_ | -| `#types` | `src/types/index.ts` | _(not available)_ | - -At runtime in compiled main process, `entry.ts` monkey-patches `Module._resolveFilename` to map `@shared/*` → `dist/main/shared/*` and `@/*` → `dist/main/main/*`. - -Main uses `module: "CommonJS"` (required by Electron), renderer uses `module: "ESNext"` (Vite handles compilation). - -### IPC Pattern - -```typescript -// Main (src/main/ipc/exampleIpc.ts) -ipcMain.handle('example:action', async (_event, args) => { - try { - return { success: true, data: await service.doSomething(args) }; - } catch (error) { - return { success: false, error: error.message }; - } -}); - -// Renderer — call via window.electronAPI -const result = await window.electronAPI.exampleAction({ id: '123' }); -``` - -All new IPC methods must be declared in `src/renderer/types/electron-api.d.ts`. - -### Services - -Singleton classes with module-level export: -```typescript -export class ExampleService { /* ... */ } -export const exampleService = new ExampleService(); -``` - -## Provider Registry (`src/shared/providers/registry.ts`) - -All 21 CLI agents are defined as `ProviderDefinition` objects. Key fields: - -- `cli` — binary name, `commands` — detection commands (may differ from cli) -- `autoApproveFlag` — e.g. `--dangerously-skip-permissions` for Claude -- `initialPromptFlag` — how to pass the initial prompt (`-i`, positional, etc.) -- `useKeystrokeInjection` — `true` for agents with no CLI prompt flag (Amp, OpenCode); Emdash types the prompt into the TUI after startup -- `sessionIdFlag` — only Claude; enables multi-chat session isolation via `--session-id` -- `resumeFlag` — e.g. `-c -r` for Claude, `--continue` for Kilocode - -To add a new provider: add a definition here AND add any API key to the `AGENT_ENV_VARS` list in `ptyManager.ts`. - -## PTY Management (`src/main/services/ptyManager.ts`) - -Three spawn modes: -1. **`startPty()`** — Shell-based: `{cli} {args}; exec {shell} -il` (user gets a shell after agent exits) -2. **`startDirectPty()`** — Direct spawn without shell wrapper using cached CLI path. Faster. Falls back to `startPty` when CLI path isn't cached or `shellSetup` is configured. -3. **`startSshPty()`** — Wraps `ssh -tt {target}` for remote development. - -**Session isolation**: For Claude, generates a deterministic UUID from task/conversation ID for `--session-id`/`--resume`. Session map persisted to `{userData}/pty-session-map.json`. - -**PTY ID format** (`src/shared/ptyId.ts`): `{providerId}-main-{taskId}` or `{providerId}-chat-{conversationId}`. - -**Environment**: PTYs use a minimal env (not `process.env`). The `AGENT_ENV_VARS` list in `ptyManager.ts` is the definitive passthrough list for API keys. Data is flushed over IPC every 16ms. - -## Worktree System - -**WorktreeService** (`src/main/services/WorktreeService.ts`): -- Creates worktrees at `../worktrees/{slugged-name}-{3-char-hash}` on branch `{prefix}/{slugged-name}-{hash}` -- Branch prefix defaults to `emdash`, configurable in settings -- Preserves gitignored files (`.env`, `.envrc`, etc.) from main repo to worktree -- Custom preserve patterns via `.emdash.json` at project root: `{ "preservePatterns": [".claude/**"] }` - -**WorktreePoolService** (`src/main/services/WorktreePoolService.ts`): -Eliminates 3-7s worktree creation delay: -1. Pre-creates a `_reserve/{hash}` worktree in the background on project open -2. On task creation, instant `git worktree move` + `git branch -m` rename -3. Replenishes reserve in background after claiming -4. Reserves expire after 30 minutes; orphaned reserves cleaned on startup - -## Multi-Chat Conversations - -Tasks can have multiple conversation tabs, each with their own provider and PTY. Database `conversations` table tracks `isMain`, `provider`, `displayOrder`. For Claude, each conversation gets its own session UUID. - -## Skills System - -Implements the [Agent Skills](https://agentskills.io) standard — cross-agent reusable skill packages (`SKILL.md` with YAML frontmatter). - -- **Central storage**: `~/.agentskills/{skill-name}/`, metadata in `~/.agentskills/.emdash/` -- **Agent sync**: Symlinks from central storage into each agent's native directory (`~/.claude/commands/`, `~/.codex/skills/`, etc.) -- **Aggregated catalog**: Merges from OpenAI repo, Anthropic repo, and local user-created skills -- **Key files**: `src/shared/skills/` (types, validation, agent targets), `src/main/services/SkillsService.ts` (core logic), `src/main/ipc/skillsIpc.ts`, `src/renderer/components/skills/`, `src/main/services/skills/bundled-catalog.json` (offline fallback) - -## SSH Remote Development - -Orchestrates agents on remote machines over SSH. - -- **Connections**: Password, key, or agent auth. Credentials stored via `keytar` in OS keychain. -- **Remote worktrees**: Created at `/.emdash/worktrees//` on the server -- **Remote PTY**: Agent shells via `ssh2`'s shell API, streaming to UI in real-time -- **Key files**: `src/main/services/ssh/` (SshService, SshCredentialService, SshHostKeyService), `src/main/services/RemotePtyService.ts`, `src/main/services/RemoteGitService.ts`, `src/main/utils/shellEscape.ts` - -**Local-only (not yet remote)**: file diffs, file watching, branch push, worktree pooling, GitHub/PR features. - -**Security**: Shell args escaped via `quoteShellArg()` from `src/main/utils/shellEscape.ts`. Env var keys validated against `^[A-Za-z_][A-Za-z0-9_]*$`. Remote PTY restricted to allowlisted shell binaries. File access gated by `isPathSafe()`. - -## Database & Migrations - -- Schema in `src/main/db/schema.ts` → `pnpm exec drizzle-kit generate` to create migrations -- Browse: `pnpm exec drizzle-kit studio` -- Locations: macOS `~/Library/Application Support/emdash/emdash.db`, Linux `~/.config/emdash/emdash.db`, Windows `%APPDATA%\emdash\emdash.db` -- **NEVER** manually edit files in `drizzle/meta/` or numbered SQL migrations - -## Code Style - -- **TypeScript**: Strict mode enabled in both tsconfigs. Prefer explicit types over `any`. Type imports: `import type { Foo } from './bar'` -- **React**: Functional components with hooks. Both named and default exports are used. -- **File naming**: Components PascalCase (`FileExplorer.tsx`), hooks/utilities camelCase with `use` prefix (`useTaskManagement.ts`) or kebab-case (`use-toast.ts`). Tests: `*.test.ts` -- **Error handling**: Main → `log.error()` from `../lib/logger`, Renderer → `console.error()` or toast, IPC → `{ success: false, error }` -- **Styling**: Tailwind CSS classes - -## Project Configuration - -- **`.emdash.json`** at project root: `{ "preservePatterns": [".claude/**"] }` — controls which gitignored files are copied to worktrees. Also supports `shellSetup` for lifecycle scripts. -- **Branch prefix**: Configurable via app settings (`repository.branchPrefix`), defaults to `emdash` - -## Environment Variables - -All optional: -- `EMDASH_DB_FILE` — Override database file path -- `EMDASH_DISABLE_NATIVE_DB` — Disable native SQLite driver -- `EMDASH_DISABLE_CLONE_CACHE` — Disable clone caching -- `EMDASH_DISABLE_PTY` — Disable PTY support (used in tests) -- `TELEMETRY_ENABLED` — Toggle anonymous telemetry (PostHog) -- `CODEX_SANDBOX_MODE` / `CODEX_APPROVAL_POLICY` — Codex agent configuration - -## Hot Reload - -- **Renderer changes**: Hot-reload via Vite -- **Main process changes**: Require Electron restart (Ctrl+C → `pnpm run dev`) -- **Native modules**: Require `pnpm run rebuild` - -## CI/CD +# Emdash Agent Guide -- **`code-consistency-check.yml`** (every PR): format check, type check, vitest (workflow name: "CI Check") -- **`release.yml`** (on `v*` tags): per-platform builds. Mac builds each arch separately to prevent native module architecture mismatches. Mac release includes signing + notarization. +Start here. Load only the linked `agents/` docs that are relevant to the task. -## Common Pitfalls +## Start Here -1. **PTY resize after exit**: PTYs must be cleaned up on exit. Use `removePty()` in exit handlers. -2. **Worktree path resolution**: Always resolve paths from `WorktreeService`, not manually. -3. **IPC type safety**: Define all new IPC methods in `electron-api.d.ts`. -4. **Native module issues**: After updating node-pty/sqlite3/keytar, run `pnpm run rebuild`. Last resort: `pnpm run reset`. -5. **Monaco disposal**: Editor instances must be disposed to prevent memory leaks. -6. **CLI not found in agent**: If agents can't find `gh`, `codex`, etc., the PATH setup in `main.ts` may need updating for the platform. -7. **New provider integration**: Must add to registry in `src/shared/providers/registry.ts` AND add any API key to `AGENT_ENV_VARS` in `ptyManager.ts`. -8. **SSH shell injection**: All remote shell arguments must use `quoteShellArg()` from `src/main/utils/shellEscape.ts`. +- Repo map: `agents/README.md` +- Setup and commands: `agents/quickstart.md` +- System overview: `agents/architecture/overview.md` +- Validation flow: `agents/workflows/testing.md` -## Risky Areas +## Read By Task -- `src/main/db/**` + `drizzle/` — Schema migrations; mismatches can corrupt user data. -- `build/` entitlements and updater config — Incorrect changes break signing/auto-update. -- Native dependencies (`sqlite3`, `node-pty`, `keytar`) — Rebuilding is slow; avoid upgrading casually. -- PTY/terminal management — Race conditions or unhandled exits can kill agent runs. -- SSH services (`src/main/services/ssh/**`, `src/main/utils/shellEscape.ts`) — Security-critical: remote connections, credentials, shell command construction. +- Main-process changes: `agents/architecture/main-process.md` +- Renderer/UI changes: `agents/architecture/renderer.md` +- Shared types or provider metadata: `agents/architecture/shared.md` +- Worktree behavior or `.emdash.json`: `agents/workflows/worktrees.md` +- SSH or remote project work: `agents/workflows/remote-development.md` +- Provider integration or CLI behavior: `agents/integrations/providers.md` +- MCP changes: `agents/integrations/mcp.md` -## Git Workflow +## High-Risk Areas -- Worktrees: `../worktrees/{workspace-name}-{hash}`, agents run there -- Conventional commits: `feat:`, `fix:`, `refactor:`, `docs:`, `chore:`, `test:` -- Example: `fix(agent): resolve worktree path issue (#123)` +- Database and migrations: `agents/risky-areas/database.md` +- PTY/session orchestration: `agents/risky-areas/pty.md` +- SSH and shell escaping: `agents/risky-areas/ssh.md` +- Auto-update and packaging: `agents/risky-areas/updater.md` -## Key Configuration Files +## Conventions -- `vite.config.ts` — Renderer build + Vitest test config -- `drizzle.config.ts` — Database migration config (supports `EMDASH_DB_FILE` override) -- `tsconfig.json` — Renderer/shared TypeScript config (`module: ESNext`, `noEmit: true` — Vite does compilation) -- `tsconfig.main.json` — Main process TypeScript config (`module: CommonJS` — required by Electron main) -- `tailwind.config.js` — Tailwind configuration -- `.nvmrc` — Node version (22.20.0) -- Electron Builder config is in `package.json` under `"build"` key +- IPC contract and typing: `agents/conventions/ipc.md` +- Main process patterns (controllers, services, Result type, events): `agents/conventions/main-patterns.md` +- Renderer patterns (modals, views, PTY frontend, React Query contexts): `agents/conventions/renderer-patterns.md` +- TypeScript and React norms: `agents/conventions/typescript.md` +- Config files and repo rules: `agents/conventions/config-files.md` +- Never do re exports always import from the original source -## Pre-PR Checklist +## Non-Negotiables -- [ ] Dev server runs: `pnpm run d` (or `pnpm run dev`) starts cleanly. -- [ ] Code is formatted: `pnpm run format`. -- [ ] Lint passes: `pnpm run lint`. -- [ ] Types check: `pnpm run type-check`. -- [ ] Tests pass: `pnpm exec vitest run`. -- [ ] No stray build artifacts or secrets committed. -- [ ] Documented any schema or config changes impacting users. +- Run `pnpm run format`, `pnpm run lint`, `pnpm run type-check`, and `pnpm exec vitest run` before merging. +- Do not hand-edit numbered Drizzle migrations or `drizzle/meta/`. +- New RPC methods go in the appropriate `src/main/core/*/controller.ts` and are auto-registered via `src/main/rpc.ts`. +- Only use manual IPC in `electron-api.d.ts` for methods requiring `event.sender`. +- New modals must be registered in `src/renderer/core/modal/registry.ts`. +- New views must be registered in `src/renderer/core/view/registry.ts`. +- Treat `src/main/core/pty/`, `src/main/core/ssh/`, `src/main/db/`, and updater code as high risk. +- Avoid editing `dist/`, `release/`, and `build/` unless the task is explicitly about packaging or updater/signing behavior. +- The docs app in `docs/` is separate from the Electron renderer and also defaults to port `3000`. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 30bd3d68f..49ff15847 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -6,7 +6,7 @@ Thanks for your interest in contributing! We favor small, focused PRs and clear Prerequisites -- **Node.js 20.0.0+ (recommended: 22.20.0)** and Git +- **Node.js 24.0.0+ (recommended: 24.14.0)** and Git - Optional (recommended for end‑to‑end testing): - GitHub CLI (`brew install gh`; then `gh auth login`) - At least one supported coding agent CLI (see docs for list) diff --git a/agents/README.md b/agents/README.md new file mode 100644 index 000000000..ea886141c --- /dev/null +++ b/agents/README.md @@ -0,0 +1,29 @@ +# Agent Docs + +This directory is the system of record for agent-facing repo guidance. Keep topic pages small, specific, and mechanically checkable where possible. + +## Recommended Reading Order + +1. `quickstart.md` +2. `architecture/overview.md` +3. the task-specific page for the area you are changing + +## Directory Layout + +- `architecture/` + - system structure and major code ownership boundaries +- `workflows/` + - task-oriented procedures like testing, worktrees, and remote development +- `integrations/` + - provider, MCP, and external service guidance +- `risky-areas/` + - places where incorrect changes are expensive +- `conventions/` + - coding contracts and repo rules + +## Maintenance Rules + +- Prefer one page per concrete topic. +- Avoid volatile counts unless you can verify them cheaply. +- Link to the source-of-truth file paths. +- Update the smallest relevant page instead of expanding `AGENTS.md`. diff --git a/agents/architecture/main-process.md b/agents/architecture/main-process.md new file mode 100644 index 000000000..c28915b8d --- /dev/null +++ b/agents/architecture/main-process.md @@ -0,0 +1,51 @@ +# Main Process + +## Structure + +The main process is organized into domain modules under `src/main/core/`. Each domain typically has a `controller.ts` (RPC handlers) and service/implementation files. + +## Domain Modules (`src/main/core/`) + +- **account** — Emdash account service, credential store, provider token registry +- **app** — App lifecycle service and controller +- **conversations** — Conversation CRUD, session start, agent event classifiers (per-provider terminal output parsers) +- **dependencies** — CLI agent detection, probing, dependency management +- **editor** — Editor buffer service for Monaco integration +- **fs** — Filesystem operations with provider pattern (`local-fs.ts`, `ssh-fs.ts`) +- **git** — Git operations (`git-service.ts`, `git-repo-utils.ts`, `detectGitInfo.ts`) +- **github** — GitHub auth, PRs, issues, repos (via `gh` CLI) +- **jira** — Jira integration +- **linear** — Linear integration +- **mcp** — MCP service, adapters, config IO, catalog +- **projects** — Project management with provider pattern (`local-project-provider.ts`), worktree service, project settings, CRUD operations +- **pty** — PTY lifecycle (`local-pty.ts`, `ssh2-pty.ts`), session registry, env setup, spawn utilities +- **repository** — Repository controller +- **settings** — App settings service and schema, provider settings (separate controller) +- **shared** — Shared utilities (OAuth flow) +- **skills** — Skills service and controller +- **ssh** — SSH connection management, credentials, config parsing, client proxy +- **tasks** — Task CRUD (create, delete, archive, restore, provision) +- **terminals** — Terminal lifecycle with provider pattern (`local-terminal-provider.ts`, `ssh-terminal-provider.ts`), lifecycle scripts +- **updates** — Auto-update service + +## Other Main Process Areas + +- `src/main/app/` — Menu, protocol handler, window creation +- `src/main/lib/` — Logger, telemetry, events, result type, updater error +- `src/main/db/` — Database schema and initialization +- `src/main/utils/` — Shell environment, shell escaping, child process env, external links +- `src/main/services/AgentEventService.ts` — Forwards agent hook events to renderer windows + +## IPC / RPC Structure + +- All domain controllers are assembled into a typed RPC router in `src/main/rpc.ts`. +- RPC primitives live in `src/shared/ipc/rpc.ts` (`createRPCRouter`, `createRPCController`, `createRPCClient`). +- Event primitives live in `src/shared/ipc/events.ts`. +- A small number of manual IPC handlers remain in `electron-api.d.ts` for methods requiring `event.sender` (PTY start/input/resize/kill, fsList, openIn). + +## When Editing Here + +- Check `agents/conventions/main-patterns.md` for controller, service, Result type, and event patterns. +- Check `agents/conventions/ipc.md` for the RPC controller pattern and typing rules. +- Check `agents/risky-areas/pty.md` before touching PTY or provider spawn behavior. +- Check `agents/risky-areas/database.md` before changing persistence or migrations. diff --git a/agents/architecture/overview.md b/agents/architecture/overview.md new file mode 100644 index 000000000..c4562db13 --- /dev/null +++ b/agents/architecture/overview.md @@ -0,0 +1,30 @@ +# Architecture Overview + +## Process Model + +- `src/main/`: Electron main process — app lifecycle, RPC controllers, domain services, database, PTY orchestration, updater, SSH +- `src/preload/`: Electron preload bridge — exposes typed `invoke`, `eventSend`, `eventOn` to renderer +- `src/renderer/`: React UI — views, components, hooks, contexts, typed RPC client +- `src/shared/`: Provider registry, IPC primitives (RPC + events), MCP types, skills types, shared domain types +- `docs/`: Separate Next.js + Fumadocs site + +## Boot Sequence + +`src/main/index.ts` → app lifecycle → IPC/RPC registration → window creation → renderer + +- `index.ts` — Loads `.env`, normalizes PATH, initializes database, registers all RPC controllers via `src/main/rpc.ts`, creates the main window. +- `src/main/rpc.ts` — Assembles the typed RPC router from domain controllers (`src/main/core/*/controller.ts`). +- `src/preload/index.ts` — Exposes `window.electronAPI` (`invoke`, `eventSend`, `eventOn`) via `contextBridge`. +- `src/renderer/core/ipc.ts` — Creates the typed RPC client and event emitter used throughout the renderer. + +## Build Tooling + +- `electron.vite.config.ts` — electron-vite config for main, preload, and renderer builds. +- `vitest.config.ts` — Vitest config with two test projects: `node` (main + renderer unit tests) and `browser` (Playwright-backed renderer tests). +- Single `tsconfig.json` for all targets. + +## Read Next + +- Main process details: `main-process.md` +- Renderer details: `renderer.md` +- Shared modules and provider registry: `shared.md` diff --git a/agents/architecture/renderer.md b/agents/architecture/renderer.md new file mode 100644 index 000000000..32a0101db --- /dev/null +++ b/agents/architecture/renderer.md @@ -0,0 +1,50 @@ +# Renderer + +## Main Entry Points + +- `src/renderer/App.tsx`: top-level provider composition +- `src/renderer/views/Workspace.tsx`: main post-onboarding shell +- `src/renderer/components/MainContent.tsx`: switches between views (projects, tasks, settings, skills, MCP, home) +- `src/renderer/core/ipc.ts`: typed RPC client (`rpc`) and event emitter (`events`) used throughout renderer + +## View Areas (`src/renderer/views/`) + +- `projects/` — project management: active project, pending project, create task modal, settings panel, task panel, branch selector, titlebar +- `tasks/` — task experience: + - `conversations/` — conversation panel and tabs + - `diff-viewer/` — file changes panel, diff views (file, stacked), PR section, git state providers + - `editor/` — Monaco code editor, file tree, editor providers, conflict dialog + - `terminals/` — terminal panel and tabs + - `hooks/` — task-scoped hooks (use-task, use-conversations, use-terminals, use-task-view-navigation) +- `settings/` — settings view +- `home-view.tsx`, `mcp-view.tsx`, `skills-view.tsx`, `Welcome.tsx` + +## Component Areas (`src/renderer/components/`) + +- `sidebar/` — app sidebar +- `diff/` — diff-related components +- `skills/` — skills catalog and management +- `mcp/` — MCP server management +- `kanban/` — kanban board +- `integrations/` — integration management +- `ssh/` — SSH connection UI +- `FileExplorer/` — file tree navigation +- `settings/` — settings components +- `projects/` — project-related components +- `ui/` — shared UI primitives + +## Supporting Structure + +- Context providers: `src/renderer/contexts/` +- Hooks: `src/renderer/hooks/` +- Client-side state helpers, stores, and utilities: `src/renderer/lib/` +- Core infrastructure: `src/renderer/core/` (IPC client, modals, project state, PTY helpers, view management) + +## When Editing Here + +- Check `agents/conventions/renderer-patterns.md` for modal, view, PTY frontend, and context patterns. +- Call RPC methods via the typed `rpc` client from `src/renderer/core/ipc.ts` (e.g., `rpc.tasks.create(...)`). +- New modals must be registered in `src/renderer/core/modal/registry.ts`. +- New views must be registered in `src/renderer/core/view/registry.ts`. +- Only methods in `src/renderer/types/electron-api.d.ts` use direct `window.electronAPI` calls (PTY ops, fsList, openIn). +- If you change user-visible workflows, update the matching page in `docs/` when appropriate. diff --git a/agents/architecture/shared.md b/agents/architecture/shared.md new file mode 100644 index 000000000..4affe2cc5 --- /dev/null +++ b/agents/architecture/shared.md @@ -0,0 +1,45 @@ +# Shared Modules + +## Main Shared Areas + +- Provider registry: + - `src/shared/agent-provider-registry.ts` +- IPC primitives: + - `src/shared/ipc/rpc.ts` — typed RPC router, controller, and client + - `src/shared/ipc/events.ts` — typed event emitter +- Typed event definitions: + - `src/shared/events/` — `agentEvents.ts`, `appEvents.ts`, `editorEvents.ts`, `fsEvents.ts`, `githubEvents.ts`, `hostPreviewEvents.ts`, `lifecycleEvents.ts`, `ptyEvents.ts`, `sshEvents.ts` +- MCP types: + - `src/shared/mcp/` +- Skills types and validation: + - `src/shared/skills/` +- Domain type modules (flat files): + - `conversations.ts`, `fs.ts`, `git.ts`, `github.ts`, `hostPreview.ts`, `lifecycle.ts`, `projects.ts`, `pull-requests.ts`, `ssh.ts`, `tasks.ts`, `terminals.ts`, `urls.ts`, `utils.ts` +- PTY helpers: + - `ptyId.ts`, `ptySessionId.ts` +- App settings types: + - `app-settings.ts` + +## Path Aliases + +All aliases are defined in a single `tsconfig.json` and mirrored in `electron.vite.config.ts`: + +| Alias | Resolves to | +| --- | --- | +| `@/*` | `src/*` | +| `@renderer/*` | `src/renderer/*` | +| `@main/*` | `src/main/*` | +| `@shared/*` | `src/shared/*` | +| `@root/*` | `./*` | + +Aliases are resolved at build time by electron-vite. No runtime monkey-patching is needed. + +## Provider Registry Rules + +When adding a provider: + +1. update `src/shared/agent-provider-registry.ts` +2. add any required env passthrough in `src/main/core/pty/pty-env.ts` +3. add an agent event classifier in `src/main/core/conversations/impl/agent-event-classifiers/` +4. update renderer surfaces that assume provider metadata +5. add tests for non-standard spawn or detection behavior diff --git a/agents/conventions/config-files.md b/agents/conventions/config-files.md new file mode 100644 index 000000000..6a5632af5 --- /dev/null +++ b/agents/conventions/config-files.md @@ -0,0 +1,20 @@ +# Config Files And Repo Rules + +## Key Files + +- `package.json` +- `electron.vite.config.ts` +- `vitest.config.ts` +- `tsconfig.json` +- `drizzle.config.ts` +- `.emdash.json` +- `.nvmrc` +- `.husky/` +- `.github/workflows/` +- `flake.nix` + +## Repo Rules + +- avoid editing `dist/`, `release/`, and `build/` unless the task is explicitly about packaging or signing +- the docs app in `docs/` is separate from the Electron renderer +- update the narrowest relevant page in `agents/` instead of growing `AGENTS.md` diff --git a/agents/conventions/ipc.md b/agents/conventions/ipc.md new file mode 100644 index 000000000..aadfadb56 --- /dev/null +++ b/agents/conventions/ipc.md @@ -0,0 +1,45 @@ +# IPC Conventions + +## RPC Pattern + +The primary IPC mechanism is a typed RPC system: + +- **Controllers**: `src/main/core/*/controller.ts` — define handler functions using `createRPCController`. +- **Router**: `src/main/rpc.ts` — assembles all controllers into a typed router using `createRPCRouter`. +- **Registration**: `registerRPCRouter(router, ipcMain)` in `src/main/index.ts` — auto-registers `namespace.method` channels. +- **Client**: `src/renderer/core/ipc.ts` — creates a proxy-based typed client using `createRPCClient`. + +```ts +// Main — src/main/core/example/controller.ts +import { createRPCController } from '@shared/ipc/rpc'; +export const exampleController = createRPCController({ + async doSomething(id: string) { + return await service.doSomething(id); + }, +}); + +// Renderer — call via typed client +import { rpc } from '@renderer/core/ipc'; +const result = await rpc.example.doSomething('123'); +``` + +## Manual IPC (electron-api.d.ts) + +A small set of IPC methods that depend on `event.sender` remain as manual handlers declared in `src/renderer/types/electron-api.d.ts` (~92 lines): + +- PTY operations: `ptyStart`, `ptyStartDirect`, `ptyInput`, `ptyResize`, `ptyKill` +- Filesystem listing: `fsList` +- Open in external app: `openIn` +- Update events: `onUpdateEvent` + +## Event System + +Typed events use `createEventEmitter` from `src/shared/ipc/events.ts`. Event type definitions live in `src/shared/events/`. + +## Rules + +- Prefer the RPC pattern for new IPC methods — add a handler to the appropriate controller. +- Only use manual IPC when `event.sender` is required. +- Keep the RPC router type (`RpcRouter`) importable by the renderer for type inference. +- Prefer existing service boundaries over adding logic directly inside controllers. +- Update tests when controller shape or IPC wiring changes. diff --git a/agents/conventions/main-patterns.md b/agents/conventions/main-patterns.md new file mode 100644 index 000000000..86193b912 --- /dev/null +++ b/agents/conventions/main-patterns.md @@ -0,0 +1,105 @@ +# Main Process Patterns + +## Controller Pattern + +Each domain in `src/main/core/` exposes a `controller.ts` that defines RPC handlers: + +```ts +// src/main/core/tasks/controller.ts +import { createRPCController } from '@shared/ipc/rpc'; +import { createTask } from './createTask'; +import { getTasks } from './getTasks'; + +export const taskController = createRPCController({ + createTask, + getTasks, + deleteTask, + // ... +}); +``` + +Controllers are assembled into the router in `src/main/rpc.ts`: + +```ts +export const rpcRouter = createRPCRouter({ + tasks: taskController, + projects: projectController, + // ... +}); +``` + +**Rules:** +- Controller handlers are imported functions — keep logic in separate operation files, not inline +- Each controller becomes an RPC namespace (e.g., `rpc.tasks.createTask(...)` on the renderer) +- New domains need their controller added to `src/main/rpc.ts` + + +## Service Pattern + +For stateful concerns, use singleton classes: + +```ts +export class AppService { + private cache = new Map(); + + async initialize() { /* ... */ } + async doSomething(id: string) { /* ... */ } +} + +export const appService = new AppService(); +``` + +**Rules:** +- Module-level singleton export +- Initialization method called from `src/main/index.ts` +- Services hold long-lived state (caches, subscriptions, connections) + +## Provider Pattern + +For domain logic with multiple backends (local vs SSH): + +``` +src/main/core/projects/ +├── project-provider.ts # Interface +├── impl/ +│ ├── local-project-provider.ts +│ └── _ssh-project-provider.ts # Prefixed with _ = not yet implemented +└── project-manager.ts # Orchestrates providers +``` + +Used in: projects, filesystem (`local-fs.ts` / `ssh-fs.ts`), terminals (`local-terminal-provider.ts` / `ssh-terminal-provider.ts`) + +## Result Type (`src/main/lib/result.ts`) + +Explicit error handling via discriminated union: + +```ts +import { ok, err, type Result } from '../lib/result'; + +async function doSomething(): Promise> { + if (problem) return err({ type: 'not_found' as const }); + return ok(data); +} +``` + +**Rules:** +- Prefer `Result` over thrown exceptions for expected failure modes +- Controllers convert Result types to IPC-compatible responses + +## Event System (`src/main/lib/events.ts`) + +Topic-based event emitter for main ↔ renderer communication: + +```ts +import { events } from '../lib/events'; + +// Emit to a specific topic (e.g., session ID) +events.emit(ptyDataChannel, buffer, sessionId); + +// Listen on a specific topic +const unsub = events.on(ptyDataChannel, (data) => {...}, sessionId); +``` + +Channel naming: without topic → `eventName`, with topic → `eventName.{topic}` + +Event type definitions live in `src/shared/events/`. diff --git a/agents/conventions/renderer-patterns.md b/agents/conventions/renderer-patterns.md new file mode 100644 index 000000000..96976cf1e --- /dev/null +++ b/agents/conventions/renderer-patterns.md @@ -0,0 +1,93 @@ +# Renderer Patterns + +## Modal System (`src/renderer/core/modal/`) + +All modals use a registry-based system. Only one modal can be active at a time. + +- `registry.ts` — central registry mapping modal IDs to components +- `modal-provider.tsx` — React context managing active modal state +- `modal-renderer.tsx` — renders the currently active modal + +**Adding a modal:** +1. Create the component accepting `BaseModalProps` (provides `onSuccess` and `onClose` callbacks) +2. Register it in `registry.ts` +3. Open it via the hook: + +```tsx +const { showModal } = useModalContext(); +showModal('myModal', { projectId: '123', onSuccess: (result) => {...} }); +``` + +**Rules:** +- All modals must be registered in `registry.ts` +- `showModal` is type-safe — TypeScript infers required args from the registry +- `hasActiveCloseGuard` prevents dismissal during critical operations + +## View System (`src/renderer/core/view/`) + +Views use a registry + parameterized navigation pattern. + +- `registry.ts` — view definitions with optional `WrapView`, `TitlebarSlot`, `MainPanel`, `RightPanel` +- `provider.tsx` — state management, navigation, param persistence +- `layout-provider.tsx` — panel collapse/expand/drag state + +**Key behaviors:** +- `navigate(viewId, params?)` is type-safe; params are optional when all fields are optional +- Params persist per-view (navigating away and back preserves params) +- Modal automatically closes on navigation +- `updateViewParams(viewId, partial)` updates params without re-navigating + +**Rules:** +- Views are singletons — one per ViewId +- MainPanel is required; RightPanel and WrapView are optional +- Add new views to `registry.ts` + +## PTY Frontend (`src/renderer/core/pty/`) + +Terminal sessions use a registry + pool pattern. + +- `pty.ts` — `FrontendPty` class with `FrontendPtyRegistry` (module-level singleton, survives React unmounts) +- `pty-pool.ts` — `TerminalPool` managing up to 16 reusable xterm.js instances +- `use-pty.ts` — React hook integrating FrontendPty + TerminalPool +- `pty-session-context.tsx` — context for session registration +- `pty-pane.tsx` — terminal component (forwardRef) + +**Lifecycle:** register → attach → detach → unregister + +**Rules:** +- `registerSession()` must happen BEFORE RPC starts the PTY to avoid missing output +- `FrontendPty` buffers output (max 1 MB) when no xterm is attached, drains on `attach()` +- Terminal instances are never disposed — they're parked off-screen and reused from the pool +- `sessionId` format: `makePtySessionId(projectId, taskId, conversationId)` — deterministic +- Panel drag pauses resizing to avoid jank (`panelDragStore`) + +## React Query Context Pattern + +Context providers use React Query for data fetching with optimistic updates: + +```tsx +// Pattern used in AppSettingsProvider, ProjectProvider, etc. +const { data } = useQuery({ queryKey: ['resource'], queryFn: () => rpc.ns.get() }); +const mutation = useMutation({ + mutationFn: (args) => rpc.ns.update(args), + onMutate: async (args) => { + // optimistic update via queryClient.setQueryData + }, + onError: () => { + // rollback via queryClient.setQueryData with previous snapshot + }, +}); +``` + +**Rules:** +- Contexts combine React Query + local state, not standalone useState +- Use `useAppSettingsKey(key)` for fine-grained per-setting hooks +- Optimistic updates must include rollback on error + +## State Outside React + +For state that must survive React unmounts or be shared across unrelated components: + +- **`useSyncExternalStore`-compatible stores** — e.g., `panelDragStore` in `src/renderer/lib/` +- **Module-level singletons** — e.g., `FrontendPtyRegistry`, `TerminalPool` +- **Manager classes** — e.g., `PendingInjectionManager`, `TaskTerminalsStore` diff --git a/agents/conventions/typescript.md b/agents/conventions/typescript.md new file mode 100644 index 000000000..28b5eaa02 --- /dev/null +++ b/agents/conventions/typescript.md @@ -0,0 +1,23 @@ +# TypeScript And React Conventions + +## TypeScript + +- strict mode is enabled in `tsconfig.json` +- always use explicit types, do not use `any` +- prefer module imports at the top of the file, never use require() +- single `tsconfig.json` for all targets (main, preload, renderer, shared) + +## Renderer + +- functional React components and hooks +- context providers under `src/renderer/contexts/` +- hooks under `src/renderer/hooks/` +- client-side stores and helpers under `src/renderer/lib/` +- core infrastructure under `src/renderer/core/` (IPC client, modal management, view state) +- view-level components under `src/renderer/views/` + +## Naming + +- components: PascalCase +- hooks: `useX` camelCase or existing patterns like `use-toast.ts` +- tests: `*.test.ts` diff --git a/agents/integrations/mcp.md b/agents/integrations/mcp.md new file mode 100644 index 000000000..04834e5c8 --- /dev/null +++ b/agents/integrations/mcp.md @@ -0,0 +1,26 @@ +# MCP + +## Main Files + +- `src/main/core/mcp/services/McpService.ts` +- `src/main/core/mcp/utils/` — adapters, catalog, config IO, config paths, conversion +- `src/main/core/mcp/controller.ts` +- `src/shared/mcp/` +- `src/renderer/components/mcp/` +- `src/renderer/views/mcp-view.tsx` + +## Current Behavior + +- MCP server configs are read, adapted, merged, and written across supported agent ecosystems +- provider-specific config formats are handled through adapters in `src/main/core/mcp/utils/` +- the renderer MCP UI manages installed servers and catalog entries + +## Important Constraint + +- Codex currently supports stdio MCP servers only + +## Rules + +- do not assume all providers support the same MCP transport types +- keep canonical MCP data in shared types and adapt at the edges +- if you add provider-specific MCP behavior, update both service and UI compatibility handling diff --git a/agents/integrations/providers.md b/agents/integrations/providers.md new file mode 100644 index 000000000..48c815197 --- /dev/null +++ b/agents/integrations/providers.md @@ -0,0 +1,40 @@ +# Providers + +## Source Of Truth + +- `src/shared/agent-provider-registry.ts` +- `src/main/core/dependencies/dependency-manager.ts` +- `src/main/core/pty/` + +## Current Providers (22) + +codex, claude, qwen, droid, gemini, cursor, copilot, amp, opencode, charm, auggie, goose, kimi, kilocode, kiro, rovo, cline, continue, codebuff, mistral, pi, autohand + +## Provider Metadata Includes + +- CLI and detection commands +- version args +- install command and docs URL +- auto-approve flags +- initial prompt handling +- keystroke injection behavior +- resume and session flags +- optional plan activation and auto-start commands + +## Agent Event Classifiers + +Each provider has a terminal output classifier in `src/main/core/conversations/impl/agent-event-classifiers/`. These parse agent terminal output to detect events (task completion, errors, etc.) and forward them to the renderer via `AgentEventService`. + +## Provider Runtime Notes + +- Claude uses deterministic `--session-id` values for conversation isolation. +- Agents with no CLI prompt flag (e.g., Amp, OpenCode) use keystroke injection — Emdash types the prompt into the TUI after startup. +- `src/main/services/AgentEventService.ts` forwards hook events to renderer windows and can show OS notifications. + +## Adding Or Changing A Provider + +1. update `src/shared/agent-provider-registry.ts` +2. update allowlisted agent env vars in `src/main/core/pty/pty-env.ts` if needed +3. add an agent event classifier in `src/main/core/conversations/impl/agent-event-classifiers/` +4. validate detection behavior in `src/main/core/dependencies/` +5. add or update tests for any non-standard behavior diff --git a/agents/quickstart.md b/agents/quickstart.md new file mode 100644 index 000000000..6823b4646 --- /dev/null +++ b/agents/quickstart.md @@ -0,0 +1,41 @@ +# Quickstart + +## Toolchain + +- Node: `24.14.0` from `.nvmrc` +- Package manager: `pnpm@10.28.2` +- Electron app root: this repo +- Docs app: `docs/` + +## Core Commands + +```bash +pnpm run d +pnpm run dev +pnpm run dev:main +pnpm run dev:renderer +pnpm run build +pnpm run rebuild +pnpm run reset +``` + +## Validation Commands + +```bash +pnpm run format +pnpm run lint +pnpm run type-check +pnpm test run +``` + +## Docs Commands + +```bash +pnpm run docs:build +``` + +## Important Notes + +- The docs app and the Electron renderer both default to port `3000`. +- After native dependency changes (`sqlite3`, `node-pty`, `keytar`), run `pnpm run rebuild`. +- Husky and lint-staged run formatting and linting on staged files during commit. diff --git a/agents/risky-areas/database.md b/agents/risky-areas/database.md new file mode 100644 index 000000000..738ee8b9f --- /dev/null +++ b/agents/risky-areas/database.md @@ -0,0 +1,20 @@ +# Risky Area: Database + +## Main Files + +- `src/main/db/schema.ts` +- `src/main/db/initialize.ts` +- `drizzle/` + +## Rules + +- never hand-edit numbered migrations +- never hand-edit `drizzle/meta/` +- use `pnpm exec drizzle-kit generate` for new migrations +- treat schema invariants and data migrations as high risk + +## Current Behavior + +- database path is resolved by main-process db path helpers +- `EMDASH_DB_FILE` overrides the default location +- database initialization happens in `src/main/db/initialize.ts` diff --git a/agents/risky-areas/pty.md b/agents/risky-areas/pty.md new file mode 100644 index 000000000..b257bbc87 --- /dev/null +++ b/agents/risky-areas/pty.md @@ -0,0 +1,24 @@ +# Risky Area: PTY And Sessions + +## Main Files + +- `src/main/core/pty/` — `local-pty.ts`, `ssh2-pty.ts`, `pty.ts`, `pty-env.ts`, `pty-session-registry.ts`, `spawn-utils.ts`, `exit-signals.ts`, `controller.ts` +- `src/main/core/terminals/` — terminal lifecycle, local and SSH terminal providers +- `src/main/core/conversations/impl/agent-event-classifiers/` — per-provider terminal output parsers +- `src/main/services/AgentEventService.ts` + +## Core Risks + +- PTY cleanup and exit handling +- resize behavior +- shell quoting and Windows command wrapping +- tmux lifecycle +- provider-specific resume/session behavior +- env passthrough safety + +## Rules + +- use the allowlisted env passthrough model in `src/main/core/pty/pty-env.ts` +- do not weaken quoting or spawn behavior casually +- validate both direct spawn and shell-wrapped spawn cases when changing PTY startup logic +- confirm renderer event flow if hook payload or notification behavior changes diff --git a/agents/risky-areas/ssh.md b/agents/risky-areas/ssh.md new file mode 100644 index 000000000..3fb8293b0 --- /dev/null +++ b/agents/risky-areas/ssh.md @@ -0,0 +1,16 @@ +# Risky Area: SSH And Shell Escaping + +## Main Files + +- `src/main/core/ssh/` — `ssh-connection-manager.ts`, `ssh-credential-service.ts`, `ssh-client-proxy.ts`, `sshConfigParser.ts`, `build-connect-config.ts`, `controller.ts` +- `src/main/core/fs/impl/ssh-fs.ts` +- `src/main/core/pty/ssh2-pty.ts` +- `src/main/core/terminals/impl/ssh-terminal-provider.ts` +- `src/main/utils/shellEscape.ts` + +## Rules + +- treat remote shell construction as security-sensitive +- use shared escaping and validation helpers +- do not bypass path-safety or shell validation helpers +- verify how a change affects both connection setup and command execution diff --git a/agents/risky-areas/updater.md b/agents/risky-areas/updater.md new file mode 100644 index 000000000..7830a7f26 --- /dev/null +++ b/agents/risky-areas/updater.md @@ -0,0 +1,23 @@ +# Risky Area: Updater And Packaging + +## Main Files + +- `src/main/core/updates/AutoUpdateService.ts` +- `src/main/core/updates/controller.ts` +- `build/` +- `package.json` +- `.github/workflows/release.yml` +- `.github/workflows/windows-beta-build.yml` +- `.github/workflows/nix-build.yml` + +## Rules + +- avoid changing updater defaults casually +- treat signing, notarization, packaging targets, and native rebuild flow as release-critical +- keep build output directories and packaging config stable unless the task is explicitly about release behavior + +## Current Notes + +- macOS and Linux release jobs rebuild native modules for the target Electron version +- Windows beta builds intentionally use Node 20 in CI for native module stability +- changelog and auto-update behavior are separate but related surfaces in the app diff --git a/agents/workflows/remote-development.md b/agents/workflows/remote-development.md new file mode 100644 index 000000000..4ac29de06 --- /dev/null +++ b/agents/workflows/remote-development.md @@ -0,0 +1,26 @@ +# Remote Development + +## Main Files + +- `src/main/core/ssh/` — connection management, credentials, config parsing +- `src/main/core/pty/ssh2-pty.ts` +- `src/main/core/fs/impl/ssh-fs.ts` +- `src/main/core/terminals/impl/ssh-terminal-provider.ts` +- `src/main/utils/shellEscape.ts` + +## Current Model + +- remote projects are backed by SSH connections +- remote worktrees live under `/.emdash/worktrees//` +- remote PTYs stream agent shells back to the renderer + +## Authentication And Storage + +- SSH credentials are managed through the SSH services and OS-backed secret storage +- host key handling is implemented under `src/main/core/ssh/` + +## Rules + +- treat all shell construction as security-sensitive +- use shared SSH and shell-escaping helpers instead of ad hoc quoting +- confirm whether a feature is local-only before assuming parity on remote projects diff --git a/agents/workflows/testing.md b/agents/workflows/testing.md new file mode 100644 index 000000000..afaf488e8 --- /dev/null +++ b/agents/workflows/testing.md @@ -0,0 +1,41 @@ +# Testing And Validation + +## Core Local Gate + +Run these before merging: + +```bash +pnpm run format +pnpm run lint +pnpm run type-check +pnpm run test +``` + +## Test Layout + +- main-process tests: colocated in `src/main/core/**/*.test.ts` +- renderer unit tests: `src/renderer/tests/` +- renderer browser tests: `src/renderer/tests/browser/` (run via Playwright) + +## Current Setup + +- Vitest config is in `vitest.config.ts` (separate from the build config in `electron.vite.config.ts`). +- Two test projects: + - `node` — all `src/**/*.test.ts` files excluding `_*` dirs and browser tests + - `browser` — `src/renderer/tests/browser/**/*.test.{ts,tsx}` via `@vitest/browser-playwright` +- Tests use per-file `vi.mock()` setup. +- Integration-style tests create temporary repos and worktrees in `os.tmpdir()`. + +## CI Notes + +- `.github/workflows/code-consistency-check.yml` currently enforces: + - `pnpm run format:check` + - `pnpm run type-check` + - `pnpm exec vitest run` +- Lint is still expected locally even though it is not enabled in that workflow yet. + +## Focused Validation + +- after IPC/RPC changes: rerun the affected Vitest file and confirm the controller is wired in `src/main/rpc.ts` +- after worktree or PTY changes: rerun the closest `src/main/core/` test files +- after docs changes: run `pnpm run docs:build` diff --git a/agents/workflows/worktrees.md b/agents/workflows/worktrees.md new file mode 100644 index 000000000..e800eb229 --- /dev/null +++ b/agents/workflows/worktrees.md @@ -0,0 +1,33 @@ +# Worktrees + +## Main Files + +- `src/main/core/projects/worktrees/worktree-service.ts` +- `src/main/core/projects/project-manager.ts` +- `src/main/core/terminals/runLifecycleScript.ts` +- `.emdash.json` + +## Current Behavior + +- task worktrees are created under `../worktrees/` +- branch prefix defaults to `emdash` and is configurable in app settings +- selected gitignored files are preserved into worktrees +- worktree creation is managed by the project provider pattern + +## `.emdash.json` + +Current supported keys: + +- `preservePatterns` +- `scripts.setup` +- `scripts.run` +- `scripts.teardown` +- `shellSetup` +- `tmux` + +## Rules + +- do not hardcode worktree paths; use service helpers +- use lifecycle config for repo-specific bootstrap and teardown behavior +- `shellSetup` runs inside each PTY before the interactive shell starts +- tmux wrapping is project-configurable and affects PTY lifecycle behavior diff --git a/components.json b/components.json index 0d76b71de..b0732a2d7 100644 --- a/components.json +++ b/components.json @@ -1,10 +1,10 @@ { "$schema": "https://ui.shadcn.com/schema.json", - "style": "new-york", + "style": "base-vega", "rsc": false, "tsx": true, "tailwind": { - "config": "tailwind.config.js", + "config": "", "css": "src/renderer/index.css", "baseColor": "neutral", "cssVariables": true, @@ -12,11 +12,11 @@ }, "iconLibrary": "lucide", "aliases": { - "components": "@/components", - "utils": "@/lib/utils", - "ui": "@/components/ui", - "lib": "@/lib", - "hooks": "@/hooks" + "components": "@renderer/components", + "utils": "@renderer/lib/utils", + "ui": "@renderer/components/ui", + "lib": "@renderer/lib", + "hooks": "@renderer/hooks" }, "registries": { "@ai-elements": "https://registry.ai-sdk.dev/{name}.json" diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 000000000..a16ff31ce --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,50 @@ +# ────────────────────────────────────────────────────────────────────────────── +# Emdash SSH Dev Container — docker-compose +# +# Usage: +# cp .env.example .env # fill in your API keys +# docker compose up --build -d +# +# Connect from emdash: +# host: localhost port: 2222 user: devuser auth: password pass: devpass +# ────────────────────────────────────────────────────────────────────────────── +services: + ssh-dev: + build: + context: ./docker-ssh + dockerfile: dockerfile + container_name: emdash-ssh-dev + ports: + - "2222:22" + environment: + # API keys forwarded into SSH sessions via ~/.ssh/environment. + # Define these in a .env file next to this compose file (gitignored). + - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY:-} + - OPENAI_API_KEY=${OPENAI_API_KEY:-} + - GH_TOKEN=${GH_TOKEN:-} + - GITHUB_TOKEN=${GITHUB_TOKEN:-} + - GEMINI_API_KEY=${GEMINI_API_KEY:-} + - GOOGLE_API_KEY=${GOOGLE_API_KEY:-} + - AMP_API_KEY=${AMP_API_KEY:-} + - DASHSCOPE_API_KEY=${DASHSCOPE_API_KEY:-} + - KIMI_API_KEY=${KIMI_API_KEY:-} + - MISTRAL_API_KEY=${MISTRAL_API_KEY:-} + - CODEBUFF_API_KEY=${CODEBUFF_API_KEY:-} + - FACTORY_API_KEY=${FACTORY_API_KEY:-} + - CURSOR_API_KEY=${CURSOR_API_KEY:-} + volumes: + # ── Project mounts ─────────────────────────────────────────────────── + # Option A: mount a real local git repo from your Mac (recommended): + # - /Users/yourname/code/myrepo:/home/devuser/projects/myrepo + # + # Option B: use a named volume for a self-contained sandbox: + - projects:/home/devuser/projects + restart: unless-stopped + # Needed for tmux PTY allocation over SSH + tty: true + stdin_open: true + shm_size: '256mb' + +volumes: + projects: + driver: local \ No newline at end of file diff --git a/docker-ssh/dockerfile b/docker-ssh/dockerfile new file mode 100644 index 000000000..05e8fa9f7 --- /dev/null +++ b/docker-ssh/dockerfile @@ -0,0 +1,128 @@ +# ────────────────────────────────────────────────────────────────────────────── +# Emdash SSH Dev Container +# +# A self-contained Linux environment you can SSH into from emdash's SSH remote +# development feature. Includes git, gh, tmux, Node.js, and Claude Code out +# of the box. Other agents (codex, gemini, etc.) can be installed on top. +# +# Usage: +# docker compose up --build -d +# Then add an SSH connection in emdash: +# host: localhost port: 2222 user: devuser auth: password pass: devpass +# ────────────────────────────────────────────────────────────────────────────── +FROM ubuntu:24.04 + +# Avoid interactive prompts during package install +ENV DEBIAN_FRONTEND=noninteractive + +# ── System packages ───────────────────────────────────────────────────────── +RUN apt-get update && apt-get install -y --no-install-recommends \ + # SSH server + openssh-server \ + # Core tools + git \ + curl \ + wget \ + tmux \ + sudo \ + ca-certificates \ + gnupg \ + # Shell utilities + bash \ + zsh \ + vim \ + nano \ + less \ + # Build tools (needed by some npm native modules) + build-essential \ + python3 \ + # Process utilities + procps \ + && rm -rf /var/lib/apt/lists/* + +# ── GitHub CLI (gh) ───────────────────────────────────────────────────────── +RUN curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg \ + | gpg --dearmor -o /usr/share/keyrings/githubcli-archive-keyring.gpg \ + && chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg \ + && echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" \ + > /etc/apt/sources.list.d/github-cli.list \ + && apt-get update \ + && apt-get install -y gh \ + && rm -rf /var/lib/apt/lists/* + +# ── Node.js 22 LTS (via NodeSource) ───────────────────────────────────────── +RUN curl -fsSL https://deb.nodesource.com/setup_22.x | bash - \ + && apt-get install -y nodejs \ + && rm -rf /var/lib/apt/lists/* + +# ── Global npm config: faster installs, no fund/audit noise ───────────────── +RUN npm config set fund false && npm config set audit false + +# ── Claude Code ───────────────────────────────────────────────────────────── +# Primary agent. Installed globally so it's on PATH for all users. +RUN npm install -g @anthropic-ai/claude-code + +# ── Optional: additional agents (uncomment as needed) ─────────────────────── +# RUN npm install -g @openai/codex # OpenAI Codex CLI +# RUN npm install -g @google/gemini-cli # Gemini CLI +# RUN npm install -g opencode # OpenCode + +# ── Dev user ───────────────────────────────────────────────────────────────── +# 'devuser' is the account emdash will SSH into. +# Password is 'devpass' — change or replace with key auth for security. +RUN useradd -m -s /bin/bash devuser \ + && echo 'devuser:devpass' | chpasswd \ + && usermod -aG sudo devuser \ + && echo 'devuser ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers.d/devuser + +# ── SSH server config ──────────────────────────────────────────────────────── +# Generate host keys and configure sshd for interactive use. +RUN mkdir -p /var/run/sshd \ + && ssh-keygen -A + +# Allow password auth and keep SSH sessions alive. +# PermitUserEnvironment lets ~/.ssh/environment set API keys on login. +RUN sed -i 's/#PasswordAuthentication yes/PasswordAuthentication yes/' /etc/ssh/sshd_config \ + && sed -i 's/PasswordAuthentication no/PasswordAuthentication yes/' /etc/ssh/sshd_config \ + && echo 'PermitUserEnvironment yes' >> /etc/ssh/sshd_config \ + && echo 'ClientAliveInterval 60' >> /etc/ssh/sshd_config \ + && echo 'ClientAliveCountMax 10' >> /etc/ssh/sshd_config \ + && echo 'X11Forwarding no' >> /etc/ssh/sshd_config + +# ── Git global defaults for devuser ───────────────────────────────────────── +USER devuser +WORKDIR /home/devuser + +RUN git config --global user.email "devuser@emdash-dev" \ + && git config --global user.name "Emdash Dev" \ + && git config --global init.defaultBranch main \ + # Required: allow git operations in worktrees under /home/devuser + && git config --global safe.directory '*' + +# ── Projects directory ─────────────────────────────────────────────────────── +# Mount your git repos here, e.g.: +# volumes: +# - /path/to/my/project:/home/devuser/projects/myproject +RUN mkdir -p /home/devuser/projects + +# ── tmux default config: no status bar clutter ────────────────────────────── +RUN echo 'set -g default-terminal "xterm-256color"' > /home/devuser/.tmux.conf \ + && echo 'set -g history-limit 50000' >> /home/devuser/.tmux.conf \ + && echo 'set-option -g allow-rename on' >> /home/devuser/.tmux.conf + +# ── Optional: SSH authorized_keys ─────────────────────────────────────────── +# Uncomment and mount your public key for key-based auth: +# COPY --chown=devuser:devuser authorized_keys /home/devuser/.ssh/authorized_keys +# RUN chmod 700 /home/devuser/.ssh && chmod 600 /home/devuser/.ssh/authorized_keys +RUN mkdir -p /home/devuser/.ssh && chmod 700 /home/devuser/.ssh + +# ── Startup ────────────────────────────────────────────────────────────────── +# sshd must run as root; use an entrypoint script to inject env vars +# from docker-compose into the user's environment file, then start sshd. +USER root + +COPY --chmod=755 entrypoint.sh /entrypoint.sh + +EXPOSE 22 + +ENTRYPOINT ["/entrypoint.sh"] \ No newline at end of file diff --git a/docker-ssh/entrypoint.sh b/docker-ssh/entrypoint.sh new file mode 100644 index 000000000..a5a53c665 --- /dev/null +++ b/docker-ssh/entrypoint.sh @@ -0,0 +1,35 @@ +#!/bin/bash +set -e + +# Write any ANTHROPIC_API_KEY / GH_TOKEN / etc. passed as container env vars +# into devuser's ~/.ssh/environment so they're available in SSH sessions +# (requires PermitUserEnvironment yes in sshd_config). +ENV_FILE=/home/devuser/.ssh/environment +: > "$ENV_FILE" +chown devuser:devuser "$ENV_FILE" +chmod 600 "$ENV_FILE" + +AGENT_VARS=( + ANTHROPIC_API_KEY + OPENAI_API_KEY + GH_TOKEN + GITHUB_TOKEN + GEMINI_API_KEY + GOOGLE_API_KEY + AMP_API_KEY + DASHSCOPE_API_KEY + KIMI_API_KEY + MISTRAL_API_KEY + CODEBUFF_API_KEY + FACTORY_API_KEY + CURSOR_API_KEY +) + +for var in "${AGENT_VARS[@]}"; do + val="${!var:-}" + if [ -n "$val" ]; then + echo "${var}=${val}" >> "$ENV_FILE" + fi +done + +exec /usr/sbin/sshd -D -e \ No newline at end of file diff --git a/docs/app/[[...slug]]/layout.tsx b/docs/app/[[...slug]]/layout.tsx index b70a3b456..db4f0cf25 100644 --- a/docs/app/[[...slug]]/layout.tsx +++ b/docs/app/[[...slug]]/layout.tsx @@ -1,7 +1,7 @@ -import { source } from '@/lib/source'; import { DocsLayout } from 'fumadocs-ui/layouts/docs'; import type { ReactNode } from 'react'; import { baseOptions } from '@/lib/layout.shared'; +import { source } from '@/lib/source'; export default function Layout({ children }: { children: ReactNode }) { return ( diff --git a/docs/app/[[...slug]]/page.tsx b/docs/app/[[...slug]]/page.tsx index fea5b88f5..af4996ace 100644 --- a/docs/app/[[...slug]]/page.tsx +++ b/docs/app/[[...slug]]/page.tsx @@ -1,11 +1,11 @@ -import { source } from '@/lib/source'; -import { DocsPage, DocsBody, DocsDescription, DocsTitle } from 'fumadocs-ui/page'; -import { notFound } from 'next/navigation'; +import { getGithubLastEdit } from 'fumadocs-core/content/github'; import defaultMdxComponents from 'fumadocs-ui/mdx'; +import { DocsBody, DocsDescription, DocsPage, DocsTitle } from 'fumadocs-ui/page'; import type { Metadata } from 'next'; +import { notFound } from 'next/navigation'; import { CopyMarkdownButton } from '@/components/CopyMarkdownButton'; import { LastUpdated } from '@/components/LastUpdated'; -import { getGithubLastEdit } from 'fumadocs-core/content/github'; +import { source } from '@/lib/source'; async function getLastModifiedFromGitHub(filePath: string): Promise { if (process.env.NODE_ENV === 'development') { diff --git a/docs/app/api/search/route.ts b/docs/app/api/search/route.ts index b02d7cd3a..97cf05821 100644 --- a/docs/app/api/search/route.ts +++ b/docs/app/api/search/route.ts @@ -1,5 +1,5 @@ -import { source } from '@/lib/source'; import { createFromSource } from 'fumadocs-core/search/server'; +import { source } from '@/lib/source'; export const { GET } = createFromSource(source, { language: 'english', diff --git a/docs/lib/layout.shared.tsx b/docs/lib/layout.shared.tsx index 89bcc593d..40fb73b9b 100644 --- a/docs/lib/layout.shared.tsx +++ b/docs/lib/layout.shared.tsx @@ -1,5 +1,5 @@ -import Image from 'next/image'; import type { BaseLayoutProps } from 'fumadocs-ui/layouts/shared'; +import Image from 'next/image'; function Logo() { return ( diff --git a/docs/lib/source.ts b/docs/lib/source.ts index b4d70baec..3668dc1d1 100644 --- a/docs/lib/source.ts +++ b/docs/lib/source.ts @@ -1,6 +1,6 @@ import { loader } from 'fumadocs-core/source'; -import * as icons from 'lucide-static'; import { docs } from 'fumadocs-mdx:collections/server'; +import * as icons from 'lucide-static'; export const source = loader({ source: docs.toFumadocsSource(), diff --git a/drizzle.config.ts b/drizzle.config.ts index c103c130d..c7a40cae4 100644 --- a/drizzle.config.ts +++ b/drizzle.config.ts @@ -1,27 +1,13 @@ +import { resolve } from 'node:path'; import { defineConfig } from 'drizzle-kit'; -import { join } from 'path'; -import { homedir } from 'os'; +import { defaultDbFilePath, resolveDefaultUserDataPath } from './src/main/db/default-path'; -function resolveDefaultDbFile() { - const explicit = process.env.EMDASH_DB_FILE; - if (explicit && explicit.length > 0) { - return explicit; +function resolveDbUrl(): string { + const explicit = process.env.EMDASH_DB_FILE?.trim(); + if (explicit) { + return resolve(explicit); } - - const home = process.env.HOME ?? homedir(); - const platform = process.platform; - - if (platform === 'darwin') { - return join(home, 'Library', 'Application Support', 'emdash', 'emdash.db'); - } - - if (platform === 'win32') { - const appData = process.env.APPDATA ?? join(home, 'AppData', 'Roaming'); - return join(appData, 'emdash', 'emdash.db'); - } - - const xdgData = process.env.XDG_DATA_HOME ?? join(home, '.local', 'share'); - return join(xdgData, 'emdash', 'emdash.db'); + return defaultDbFilePath(resolveDefaultUserDataPath()); } export default defineConfig({ @@ -29,7 +15,7 @@ export default defineConfig({ out: './drizzle', dialect: 'sqlite', dbCredentials: { - url: resolveDefaultDbFile(), + url: resolveDbUrl(), }, strict: true, verbose: true, diff --git a/drizzle/0000_initial.sql b/drizzle/0000_initial.sql deleted file mode 100644 index 461359314..000000000 --- a/drizzle/0000_initial.sql +++ /dev/null @@ -1,50 +0,0 @@ -CREATE TABLE `conversations` ( - `id` text PRIMARY KEY NOT NULL, - `workspace_id` text NOT NULL, - `title` text NOT NULL, - `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, - `updated_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, - FOREIGN KEY (`workspace_id`) REFERENCES `workspaces`(`id`) ON UPDATE no action ON DELETE cascade -); ---> statement-breakpoint -CREATE INDEX `idx_conversations_workspace_id` ON `conversations` (`workspace_id`);--> statement-breakpoint -CREATE TABLE `messages` ( - `id` text PRIMARY KEY NOT NULL, - `conversation_id` text NOT NULL, - `content` text NOT NULL, - `sender` text NOT NULL, - `timestamp` text DEFAULT CURRENT_TIMESTAMP NOT NULL, - `metadata` text, - FOREIGN KEY (`conversation_id`) REFERENCES `conversations`(`id`) ON UPDATE no action ON DELETE cascade -); ---> statement-breakpoint -CREATE INDEX `idx_messages_conversation_id` ON `messages` (`conversation_id`);--> statement-breakpoint -CREATE INDEX `idx_messages_timestamp` ON `messages` (`timestamp`);--> statement-breakpoint -CREATE TABLE `projects` ( - `id` text PRIMARY KEY NOT NULL, - `name` text NOT NULL, - `path` text NOT NULL, - `git_remote` text, - `git_branch` text, - `github_repository` text, - `github_connected` integer DEFAULT 0 NOT NULL, - `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, - `updated_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL -); ---> statement-breakpoint -CREATE UNIQUE INDEX `idx_projects_path` ON `projects` (`path`);--> statement-breakpoint -CREATE TABLE `workspaces` ( - `id` text PRIMARY KEY NOT NULL, - `project_id` text NOT NULL, - `name` text NOT NULL, - `branch` text NOT NULL, - `path` text NOT NULL, - `status` text DEFAULT 'idle' NOT NULL, - `agent_id` text, - `metadata` text, - `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, - `updated_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, - FOREIGN KEY (`project_id`) REFERENCES `projects`(`id`) ON UPDATE no action ON DELETE cascade -); ---> statement-breakpoint -CREATE INDEX `idx_workspaces_project_id` ON `workspaces` (`project_id`); \ No newline at end of file diff --git a/drizzle/0000_mean_wither.sql b/drizzle/0000_mean_wither.sql new file mode 100644 index 000000000..0203d1e66 --- /dev/null +++ b/drizzle/0000_mean_wither.sql @@ -0,0 +1,150 @@ +CREATE TABLE `app_settings` ( + `key` text PRIMARY KEY NOT NULL, + `value` text NOT NULL, + `updated_at` integer DEFAULT CURRENT_TIMESTAMP NOT NULL +); +--> statement-breakpoint +CREATE TABLE `conversations` ( + `id` text PRIMARY KEY NOT NULL, + `project_id` text NOT NULL, + `task_id` text NOT NULL, + `title` text NOT NULL, + `provider` text, + `config` text, + `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, + `updated_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, + FOREIGN KEY (`project_id`) REFERENCES `projects`(`id`) ON UPDATE no action ON DELETE cascade, + FOREIGN KEY (`task_id`) REFERENCES `tasks`(`id`) ON UPDATE no action ON DELETE cascade +); +--> statement-breakpoint +CREATE TABLE `editor_buffers` ( + `id` text PRIMARY KEY NOT NULL, + `project_id` text NOT NULL, + `task_id` text NOT NULL, + `file_path` text NOT NULL, + `content` text NOT NULL, + `updated_at` integer NOT NULL, + FOREIGN KEY (`project_id`) REFERENCES `projects`(`id`) ON UPDATE no action ON DELETE cascade, + FOREIGN KEY (`task_id`) REFERENCES `tasks`(`id`) ON UPDATE no action ON DELETE cascade +); +--> statement-breakpoint +CREATE TABLE `kv` ( + `key` text PRIMARY KEY NOT NULL, + `value` text NOT NULL, + `updated_at` integer DEFAULT CURRENT_TIMESTAMP NOT NULL +); +--> statement-breakpoint +CREATE TABLE `line_comments` ( + `id` text PRIMARY KEY NOT NULL, + `task_id` text NOT NULL, + `file_path` text NOT NULL, + `line_number` integer NOT NULL, + `line_content` text, + `content` text NOT NULL, + `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, + `updated_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, + `sent_at` text, + FOREIGN KEY (`task_id`) REFERENCES `tasks`(`id`) ON UPDATE no action ON DELETE cascade +); +--> statement-breakpoint +CREATE TABLE `messages` ( + `id` text PRIMARY KEY NOT NULL, + `conversation_id` text NOT NULL, + `content` text NOT NULL, + `sender` text NOT NULL, + `timestamp` text DEFAULT CURRENT_TIMESTAMP NOT NULL, + `metadata` text, + FOREIGN KEY (`conversation_id`) REFERENCES `conversations`(`id`) ON UPDATE no action ON DELETE cascade +); +--> statement-breakpoint +CREATE TABLE `projects` ( + `id` text PRIMARY KEY NOT NULL, + `name` text NOT NULL, + `path` text NOT NULL, + `workspace_provider` text DEFAULT 'local' NOT NULL, + `base_ref` text, + `git_remote` text, + `ssh_connection_id` text, + `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, + `updated_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, + FOREIGN KEY (`ssh_connection_id`) REFERENCES `ssh_connections`(`id`) ON UPDATE no action ON DELETE set null +); +--> statement-breakpoint +CREATE TABLE `pull_requests` ( + `id` text PRIMARY KEY NOT NULL, + `provider` text DEFAULT 'github' NOT NULL, + `name_with_owner` text DEFAULT '' NOT NULL, + `url` text NOT NULL, + `title` text NOT NULL, + `status` text DEFAULT 'open' NOT NULL, + `author` text, + `is_draft` integer, + `metadata` text, + `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, + `updated_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, + `fetched_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL +); +--> statement-breakpoint +CREATE TABLE `ssh_connections` ( + `id` text PRIMARY KEY NOT NULL, + `name` text NOT NULL, + `host` text NOT NULL, + `port` integer DEFAULT 22 NOT NULL, + `username` text NOT NULL, + `auth_type` text DEFAULT 'agent' NOT NULL, + `private_key_path` text, + `use_agent` integer DEFAULT 0 NOT NULL, + `metadata` text, + `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, + `updated_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL +); +--> statement-breakpoint +CREATE TABLE `tasks` ( + `id` text PRIMARY KEY NOT NULL, + `project_id` text NOT NULL, + `name` text NOT NULL, + `status` text NOT NULL, + `source_branch` text NOT NULL, + `task_branch` text, + `linked_issue` text, + `archived_at` text, + `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, + `updated_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, + FOREIGN KEY (`project_id`) REFERENCES `projects`(`id`) ON UPDATE no action ON DELETE cascade +); +--> statement-breakpoint +CREATE TABLE `tasks_pull_requests` ( + `task_id` text NOT NULL, + `pull_request_url` text NOT NULL, + PRIMARY KEY(`task_id`, `pull_request_url`), + FOREIGN KEY (`task_id`) REFERENCES `tasks`(`id`) ON UPDATE no action ON DELETE cascade, + FOREIGN KEY (`pull_request_url`) REFERENCES `pull_requests`(`url`) ON UPDATE no action ON DELETE cascade +); +--> statement-breakpoint +CREATE TABLE `terminals` ( + `id` text PRIMARY KEY NOT NULL, + `project_id` text NOT NULL, + `task_id` text NOT NULL, + `ssh` integer DEFAULT 0 NOT NULL, + `name` text NOT NULL, + `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, + `updated_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, + FOREIGN KEY (`project_id`) REFERENCES `projects`(`id`) ON UPDATE no action ON DELETE cascade, + FOREIGN KEY (`task_id`) REFERENCES `tasks`(`id`) ON UPDATE no action ON DELETE cascade +); +--> statement-breakpoint +CREATE UNIQUE INDEX `idx_app_settings_key` ON `app_settings` (`key`);--> statement-breakpoint +CREATE INDEX `idx_conversations_task_id` ON `conversations` (`task_id`);--> statement-breakpoint +CREATE INDEX `idx_editor_buffers_task_file` ON `editor_buffers` (`task_id`,`file_path`);--> statement-breakpoint +CREATE UNIQUE INDEX `idx_kv_key` ON `kv` (`key`);--> statement-breakpoint +CREATE INDEX `idx_line_comments_task_file` ON `line_comments` (`task_id`,`file_path`);--> statement-breakpoint +CREATE INDEX `idx_messages_conversation_id` ON `messages` (`conversation_id`);--> statement-breakpoint +CREATE INDEX `idx_messages_timestamp` ON `messages` (`timestamp`);--> statement-breakpoint +CREATE UNIQUE INDEX `idx_projects_path` ON `projects` (`path`);--> statement-breakpoint +CREATE INDEX `idx_projects_ssh_connection_id` ON `projects` (`ssh_connection_id`);--> statement-breakpoint +CREATE UNIQUE INDEX `idx_pull_requests_url` ON `pull_requests` (`url`);--> statement-breakpoint +CREATE INDEX `idx_pull_requests_name_with_owner` ON `pull_requests` (`name_with_owner`);--> statement-breakpoint +CREATE UNIQUE INDEX `idx_ssh_connections_name` ON `ssh_connections` (`name`);--> statement-breakpoint +CREATE INDEX `idx_ssh_connections_host` ON `ssh_connections` (`host`);--> statement-breakpoint +CREATE INDEX `idx_tasks_project_id` ON `tasks` (`project_id`);--> statement-breakpoint +CREATE INDEX `idx_terminals_task_id` ON `terminals` (`task_id`); \ No newline at end of file diff --git a/drizzle/0001_add_base_ref_to_projects.sql b/drizzle/0001_add_base_ref_to_projects.sql deleted file mode 100644 index 463896901..000000000 --- a/drizzle/0001_add_base_ref_to_projects.sql +++ /dev/null @@ -1,11 +0,0 @@ -ALTER TABLE `projects` ADD COLUMN `base_ref` text; ---> statement-breakpoint -UPDATE `projects` -SET `base_ref` = CASE - WHEN git_branch IS NOT NULL AND length(trim(git_branch)) > 0 THEN - CASE - WHEN instr(git_branch, '/') > 0 THEN trim(git_branch) - ELSE printf('%s/%s', COALESCE(NULLIF(trim(git_remote), ''), 'origin'), trim(git_branch)) - END - ELSE NULL -END; diff --git a/drizzle/0002_lyrical_impossible_man.sql b/drizzle/0002_lyrical_impossible_man.sql deleted file mode 100644 index d37950258..000000000 --- a/drizzle/0002_lyrical_impossible_man.sql +++ /dev/null @@ -1,34 +0,0 @@ -ALTER TABLE `workspaces` RENAME TO `tasks`;--> statement-breakpoint -PRAGMA foreign_keys=OFF;--> statement-breakpoint -CREATE TABLE `__new_tasks` ( - `id` text PRIMARY KEY NOT NULL, - `project_id` text NOT NULL, - `name` text NOT NULL, - `branch` text NOT NULL, - `path` text NOT NULL, - `status` text DEFAULT 'idle' NOT NULL, - `agent_id` text, - `metadata` text, - `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, - `updated_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, - FOREIGN KEY (`project_id`) REFERENCES `projects`(`id`) ON UPDATE no action ON DELETE cascade -); ---> statement-breakpoint -INSERT INTO `__new_tasks`("id", "project_id", "name", "branch", "path", "status", "agent_id", "metadata", "created_at", "updated_at") SELECT "id", "project_id", "name", "branch", "path", "status", "agent_id", "metadata", "created_at", "updated_at" FROM `tasks`;--> statement-breakpoint -DROP TABLE `tasks`;--> statement-breakpoint -ALTER TABLE `__new_tasks` RENAME TO `tasks`;--> statement-breakpoint -PRAGMA foreign_keys=ON;--> statement-breakpoint -CREATE INDEX `idx_tasks_project_id` ON `tasks` (`project_id`);--> statement-breakpoint -CREATE TABLE `__new_conversations` ( - `id` text PRIMARY KEY NOT NULL, - `task_id` text NOT NULL, - `title` text NOT NULL, - `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, - `updated_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, - FOREIGN KEY (`task_id`) REFERENCES `tasks`(`id`) ON UPDATE no action ON DELETE cascade -); ---> statement-breakpoint -INSERT INTO `__new_conversations`("id", "task_id", "title", "created_at", "updated_at") SELECT "id", "workspace_id", "title", "created_at", "updated_at" FROM `conversations`;--> statement-breakpoint -DROP TABLE `conversations`;--> statement-breakpoint -ALTER TABLE `__new_conversations` RENAME TO `conversations`;--> statement-breakpoint -CREATE INDEX `idx_conversations_task_id` ON `conversations` (`task_id`); \ No newline at end of file diff --git a/drizzle/0003_add_run_config_status_to_projects.sql b/drizzle/0003_add_run_config_status_to_projects.sql deleted file mode 100644 index 206054283..000000000 --- a/drizzle/0003_add_run_config_status_to_projects.sql +++ /dev/null @@ -1,8 +0,0 @@ -ALTER TABLE `projects` ADD COLUMN `run_config_status` text; ---> statement-breakpoint -ALTER TABLE `projects` ADD COLUMN `run_config_error` text; ---> statement-breakpoint -ALTER TABLE `projects` ADD COLUMN `run_config_provider` text; ---> statement-breakpoint -ALTER TABLE `projects` ADD COLUMN `run_config_updated_at` text; - diff --git a/drizzle/0004_add_line_comments.sql b/drizzle/0004_add_line_comments.sql deleted file mode 100644 index ab3800919..000000000 --- a/drizzle/0004_add_line_comments.sql +++ /dev/null @@ -1,14 +0,0 @@ -CREATE TABLE `line_comments` ( - `id` text PRIMARY KEY NOT NULL, - `task_id` text NOT NULL, - `file_path` text NOT NULL, - `line_number` integer NOT NULL, - `line_content` text, - `content` text NOT NULL, - `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, - `updated_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, - `sent_at` text, - FOREIGN KEY (`task_id`) REFERENCES `tasks`(`id`) ON UPDATE no action ON DELETE cascade -); ---> statement-breakpoint -CREATE INDEX `idx_line_comments_task_file` ON `line_comments` (`task_id`, `file_path`); diff --git a/drizzle/0005_add_use_worktree_to_tasks.sql b/drizzle/0005_add_use_worktree_to_tasks.sql deleted file mode 100644 index 0c0ef463d..000000000 --- a/drizzle/0005_add_use_worktree_to_tasks.sql +++ /dev/null @@ -1,3 +0,0 @@ -ALTER TABLE `tasks` ADD COLUMN `use_worktree` integer DEFAULT 1 NOT NULL; ---> statement-breakpoint -UPDATE `tasks` SET `use_worktree` = 1 WHERE `use_worktree` IS NULL; diff --git a/drizzle/0006_add_multi_chat_support.sql b/drizzle/0006_add_multi_chat_support.sql deleted file mode 100644 index f6030ced8..000000000 --- a/drizzle/0006_add_multi_chat_support.sql +++ /dev/null @@ -1,11 +0,0 @@ --- Add multi-chat support fields to conversations table -ALTER TABLE conversations ADD COLUMN provider TEXT; -ALTER TABLE conversations ADD COLUMN is_active INTEGER DEFAULT 0; -ALTER TABLE conversations ADD COLUMN display_order INTEGER DEFAULT 0; -ALTER TABLE conversations ADD COLUMN metadata TEXT; - --- Update existing conversations to be active (first chat in each task) -UPDATE conversations SET is_active = 1, display_order = 0 WHERE is_active IS NULL; - --- Create index for quick active conversation lookup -CREATE INDEX IF NOT EXISTS idx_conversations_active ON conversations (task_id, is_active); \ No newline at end of file diff --git a/drizzle/0007_add_is_main_to_conversations.sql b/drizzle/0007_add_is_main_to_conversations.sql deleted file mode 100644 index cfd00f951..000000000 --- a/drizzle/0007_add_is_main_to_conversations.sql +++ /dev/null @@ -1,13 +0,0 @@ --- Add is_main column to conversations table -ALTER TABLE conversations ADD COLUMN is_main INTEGER NOT NULL DEFAULT 0; - --- Mark first conversation of each task as main (for backward compatibility) -UPDATE conversations -SET is_main = 1 -WHERE id IN ( - SELECT id FROM ( - SELECT id, ROW_NUMBER() OVER (PARTITION BY task_id ORDER BY created_at ASC) as rn - FROM conversations - ) t - WHERE rn = 1 -); \ No newline at end of file diff --git a/drizzle/0008_add_archived_at_to_tasks.sql b/drizzle/0008_add_archived_at_to_tasks.sql deleted file mode 100644 index d0e1ee173..000000000 --- a/drizzle/0008_add_archived_at_to_tasks.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE `tasks` ADD COLUMN `archived_at` text; diff --git a/drizzle/0009_add_ssh_support.sql b/drizzle/0009_add_ssh_support.sql deleted file mode 100644 index d8a1906b2..000000000 --- a/drizzle/0009_add_ssh_support.sql +++ /dev/null @@ -1,26 +0,0 @@ --- Create ssh_connections table -CREATE TABLE `ssh_connections` ( - `id` text PRIMARY KEY NOT NULL, - `name` text NOT NULL, - `host` text NOT NULL, - `port` integer DEFAULT 22 NOT NULL, - `username` text NOT NULL, - `auth_type` text DEFAULT 'agent' NOT NULL, - `private_key_path` text, - `use_agent` integer DEFAULT 0 NOT NULL, - `created_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL, - `updated_at` text DEFAULT CURRENT_TIMESTAMP NOT NULL -); - --- Add indexes for ssh_connections -CREATE UNIQUE INDEX `idx_ssh_connections_name` ON `ssh_connections` (`name`); -CREATE INDEX `idx_ssh_connections_host` ON `ssh_connections` (`host`); - --- Add columns to projects table for SSH support -ALTER TABLE `projects` ADD COLUMN `ssh_connection_id` text REFERENCES ssh_connections(id) ON DELETE SET NULL; -ALTER TABLE `projects` ADD COLUMN `is_remote` integer DEFAULT 0 NOT NULL; -ALTER TABLE `projects` ADD COLUMN `remote_path` text; - --- Add indexes for projects -CREATE INDEX `idx_projects_ssh_connection_id` ON `projects` (`ssh_connection_id`); -CREATE INDEX `idx_projects_is_remote` ON `projects` (`is_remote`); diff --git a/drizzle/meta/0000_snapshot.json b/drizzle/meta/0000_snapshot.json index c56f856ab..531ec7521 100644 --- a/drizzle/meta/0000_snapshot.json +++ b/drizzle/meta/0000_snapshot.json @@ -1,9 +1,46 @@ { "version": "6", "dialect": "sqlite", - "id": "b932945e-f26a-4c07-9c63-08179d95d5bc", + "id": "77f94ade-90d8-472e-8df1-20e888388354", "prevId": "00000000-0000-0000-0000-000000000000", "tables": { + "app_settings": { + "name": "app_settings", + "columns": { + "key": { + "name": "key", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "value": { + "name": "value", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "CURRENT_TIMESTAMP" + } + }, + "indexes": { + "idx_app_settings_key": { + "name": "idx_app_settings_key", + "columns": ["key"], + "isUnique": true + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + }, "conversations": { "name": "conversations", "columns": { @@ -14,8 +51,15 @@ "notNull": true, "autoincrement": false }, - "workspace_id": { - "name": "workspace_id", + "project_id": { + "name": "project_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", "type": "text", "primaryKey": false, "notNull": true, @@ -28,6 +72,20 @@ "notNull": true, "autoincrement": false }, + "provider": { + "name": "provider", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "config": { + "name": "config", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, "created_at": { "name": "created_at", "type": "text", @@ -46,26 +104,237 @@ } }, "indexes": { - "idx_conversations_workspace_id": { - "name": "idx_conversations_workspace_id", - "columns": ["workspace_id"], + "idx_conversations_task_id": { + "name": "idx_conversations_task_id", + "columns": ["task_id"], "isUnique": false } }, "foreignKeys": { - "conversations_workspace_id_workspaces_id_fk": { - "name": "conversations_workspace_id_workspaces_id_fk", + "conversations_project_id_projects_id_fk": { + "name": "conversations_project_id_projects_id_fk", + "tableFrom": "conversations", + "tableTo": "projects", + "columnsFrom": ["project_id"], + "columnsTo": ["id"], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "conversations_task_id_tasks_id_fk": { + "name": "conversations_task_id_tasks_id_fk", "tableFrom": "conversations", - "tableTo": "workspaces", - "columnsFrom": ["workspace_id"], + "tableTo": "tasks", + "columnsFrom": ["task_id"], + "columnsTo": ["id"], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + }, + "editor_buffers": { + "name": "editor_buffers", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "project_id": { + "name": "project_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "file_path": { + "name": "file_path", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "idx_editor_buffers_task_file": { + "name": "idx_editor_buffers_task_file", + "columns": ["task_id", "file_path"], + "isUnique": false + } + }, + "foreignKeys": { + "editor_buffers_project_id_projects_id_fk": { + "name": "editor_buffers_project_id_projects_id_fk", + "tableFrom": "editor_buffers", + "tableTo": "projects", + "columnsFrom": ["project_id"], + "columnsTo": ["id"], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "editor_buffers_task_id_tasks_id_fk": { + "name": "editor_buffers_task_id_tasks_id_fk", + "tableFrom": "editor_buffers", + "tableTo": "tasks", + "columnsFrom": ["task_id"], "columnsTo": ["id"], "onDelete": "cascade", "onUpdate": "no action" } }, "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} + "uniqueConstraints": {} + }, + "kv": { + "name": "kv", + "columns": { + "key": { + "name": "key", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "value": { + "name": "value", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "CURRENT_TIMESTAMP" + } + }, + "indexes": { + "idx_kv_key": { + "name": "idx_kv_key", + "columns": ["key"], + "isUnique": true + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + }, + "line_comments": { + "name": "line_comments", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "file_path": { + "name": "file_path", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "line_number": { + "name": "line_number", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "line_content": { + "name": "line_content", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "CURRENT_TIMESTAMP" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "CURRENT_TIMESTAMP" + }, + "sent_at": { + "name": "sent_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + } + }, + "indexes": { + "idx_line_comments_task_file": { + "name": "idx_line_comments_task_file", + "columns": ["task_id", "file_path"], + "isUnique": false + } + }, + "foreignKeys": { + "line_comments_task_id_tasks_id_fk": { + "name": "line_comments_task_id_tasks_id_fk", + "tableFrom": "line_comments", + "tableTo": "tasks", + "columnsFrom": ["task_id"], + "columnsTo": ["id"], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} }, "messages": { "name": "messages", @@ -138,8 +407,7 @@ } }, "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} + "uniqueConstraints": {} }, "projects": { "name": "projects", @@ -165,6 +433,21 @@ "notNull": true, "autoincrement": false }, + "workspace_provider": { + "name": "workspace_provider", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'local'" + }, + "base_ref": { + "name": "base_ref", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, "git_remote": { "name": "git_remote", "type": "text", @@ -172,28 +455,235 @@ "notNull": false, "autoincrement": false }, - "git_branch": { - "name": "git_branch", + "ssh_connection_id": { + "name": "ssh_connection_id", "type": "text", "primaryKey": false, "notNull": false, "autoincrement": false }, - "github_repository": { - "name": "github_repository", + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "CURRENT_TIMESTAMP" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "CURRENT_TIMESTAMP" + } + }, + "indexes": { + "idx_projects_path": { + "name": "idx_projects_path", + "columns": ["path"], + "isUnique": true + }, + "idx_projects_ssh_connection_id": { + "name": "idx_projects_ssh_connection_id", + "columns": ["ssh_connection_id"], + "isUnique": false + } + }, + "foreignKeys": { + "projects_ssh_connection_id_ssh_connections_id_fk": { + "name": "projects_ssh_connection_id_ssh_connections_id_fk", + "tableFrom": "projects", + "tableTo": "ssh_connections", + "columnsFrom": ["ssh_connection_id"], + "columnsTo": ["id"], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + }, + "pull_requests": { + "name": "pull_requests", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "provider": { + "name": "provider", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'github'" + }, + "name_with_owner": { + "name": "name_with_owner", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "''" + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'open'" + }, + "author": { + "name": "author", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "is_draft": { + "name": "is_draft", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "metadata": { + "name": "metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "CURRENT_TIMESTAMP" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "CURRENT_TIMESTAMP" + }, + "fetched_at": { + "name": "fetched_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "CURRENT_TIMESTAMP" + } + }, + "indexes": { + "idx_pull_requests_url": { + "name": "idx_pull_requests_url", + "columns": ["url"], + "isUnique": true + }, + "idx_pull_requests_name_with_owner": { + "name": "idx_pull_requests_name_with_owner", + "columns": ["name_with_owner"], + "isUnique": false + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + }, + "ssh_connections": { + "name": "ssh_connections", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "host": { + "name": "host", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "port": { + "name": "port", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 22 + }, + "username": { + "name": "username", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "auth_type": { + "name": "auth_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'agent'" + }, + "private_key_path": { + "name": "private_key_path", "type": "text", "primaryKey": false, "notNull": false, "autoincrement": false }, - "github_connected": { - "name": "github_connected", + "use_agent": { + "name": "use_agent", "type": "integer", "primaryKey": false, "notNull": true, "autoincrement": false, "default": 0 }, + "metadata": { + "name": "metadata", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, "created_at": { "name": "created_at", "type": "text", @@ -212,19 +702,23 @@ } }, "indexes": { - "idx_projects_path": { - "name": "idx_projects_path", - "columns": ["path"], + "idx_ssh_connections_name": { + "name": "idx_ssh_connections_name", + "columns": ["name"], "isUnique": true + }, + "idx_ssh_connections_host": { + "name": "idx_ssh_connections_host", + "columns": ["host"], + "isUnique": false } }, "foreignKeys": {}, "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} + "uniqueConstraints": {} }, - "workspaces": { - "name": "workspaces", + "tasks": { + "name": "tasks", "columns": { "id": { "name": "id", @@ -247,37 +741,36 @@ "notNull": true, "autoincrement": false }, - "branch": { - "name": "branch", + "status": { + "name": "status", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false }, - "path": { - "name": "path", + "source_branch": { + "name": "source_branch", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false }, - "status": { - "name": "status", + "task_branch": { + "name": "task_branch", "type": "text", "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "'idle'" + "notNull": false, + "autoincrement": false }, - "agent_id": { - "name": "agent_id", + "linked_issue": { + "name": "linked_issue", "type": "text", "primaryKey": false, "notNull": false, "autoincrement": false }, - "metadata": { - "name": "metadata", + "archived_at": { + "name": "archived_at", "type": "text", "primaryKey": false, "notNull": false, @@ -301,16 +794,16 @@ } }, "indexes": { - "idx_workspaces_project_id": { - "name": "idx_workspaces_project_id", + "idx_tasks_project_id": { + "name": "idx_tasks_project_id", "columns": ["project_id"], "isUnique": false } }, "foreignKeys": { - "workspaces_project_id_projects_id_fk": { - "name": "workspaces_project_id_projects_id_fk", - "tableFrom": "workspaces", + "tasks_project_id_projects_id_fk": { + "name": "tasks_project_id_projects_id_fk", + "tableFrom": "tasks", "tableTo": "projects", "columnsFrom": ["project_id"], "columnsTo": ["id"], @@ -319,11 +812,142 @@ } }, "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} + "uniqueConstraints": {} + }, + "tasks_pull_requests": { + "name": "tasks_pull_requests", + "columns": { + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "pull_request_url": { + "name": "pull_request_url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": { + "tasks_pull_requests_task_id_tasks_id_fk": { + "name": "tasks_pull_requests_task_id_tasks_id_fk", + "tableFrom": "tasks_pull_requests", + "tableTo": "tasks", + "columnsFrom": ["task_id"], + "columnsTo": ["id"], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "tasks_pull_requests_pull_request_url_pull_requests_url_fk": { + "name": "tasks_pull_requests_pull_request_url_pull_requests_url_fk", + "tableFrom": "tasks_pull_requests", + "tableTo": "pull_requests", + "columnsFrom": ["pull_request_url"], + "columnsTo": ["url"], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": { + "tasks_pull_requests_task_id_pull_request_url_pk": { + "columns": ["task_id", "pull_request_url"], + "name": "tasks_pull_requests_task_id_pull_request_url_pk" + } + }, + "uniqueConstraints": {} + }, + "terminals": { + "name": "terminals", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "project_id": { + "name": "project_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "ssh": { + "name": "ssh", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "CURRENT_TIMESTAMP" + }, + "updated_at": { + "name": "updated_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "CURRENT_TIMESTAMP" + } + }, + "indexes": { + "idx_terminals_task_id": { + "name": "idx_terminals_task_id", + "columns": ["task_id"], + "isUnique": false + } + }, + "foreignKeys": { + "terminals_project_id_projects_id_fk": { + "name": "terminals_project_id_projects_id_fk", + "tableFrom": "terminals", + "tableTo": "projects", + "columnsFrom": ["project_id"], + "columnsTo": ["id"], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "terminals_task_id_tasks_id_fk": { + "name": "terminals_task_id_tasks_id_fk", + "tableFrom": "terminals", + "tableTo": "tasks", + "columnsFrom": ["task_id"], + "columnsTo": ["id"], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} } }, - "views": {}, "enums": {}, "_meta": { "schemas": {}, diff --git a/drizzle/meta/0001_snapshot.json b/drizzle/meta/0001_snapshot.json deleted file mode 100644 index 90204e260..000000000 --- a/drizzle/meta/0001_snapshot.json +++ /dev/null @@ -1,343 +0,0 @@ -{ - "version": "6", - "dialect": "sqlite", - "id": "ec5ad35e-22ec-4c0b-9d48-cfb1033d9d93", - "prevId": "b932945e-f26a-4c07-9c63-08179d95d5bc", - "tables": { - "conversations": { - "name": "conversations", - "columns": { - "id": { - "name": "id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "workspace_id": { - "name": "workspace_id", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "title": { - "name": "title", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "created_at": { - "name": "created_at", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "CURRENT_TIMESTAMP" - }, - "updated_at": { - "name": "updated_at", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "CURRENT_TIMESTAMP" - } - }, - "indexes": { - "idx_conversations_workspace_id": { - "name": "idx_conversations_workspace_id", - "columns": ["workspace_id"], - "isUnique": false - } - }, - "foreignKeys": { - "conversations_workspace_id_workspaces_id_fk": { - "name": "conversations_workspace_id_workspaces_id_fk", - "tableFrom": "conversations", - "tableTo": "workspaces", - "columnsFrom": ["workspace_id"], - "columnsTo": ["id"], - "onDelete": "cascade", - "onUpdate": "no action" - } - }, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "messages": { - "name": "messages", - "columns": { - "id": { - "name": "id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "conversation_id": { - "name": "conversation_id", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "content": { - "name": "content", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "sender": { - "name": "sender", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "timestamp": { - "name": "timestamp", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "CURRENT_TIMESTAMP" - }, - "metadata": { - "name": "metadata", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - } - }, - "indexes": { - "idx_messages_conversation_id": { - "name": "idx_messages_conversation_id", - "columns": ["conversation_id"], - "isUnique": false - }, - "idx_messages_timestamp": { - "name": "idx_messages_timestamp", - "columns": ["timestamp"], - "isUnique": false - } - }, - "foreignKeys": { - "messages_conversation_id_conversations_id_fk": { - "name": "messages_conversation_id_conversations_id_fk", - "tableFrom": "messages", - "tableTo": "conversations", - "columnsFrom": ["conversation_id"], - "columnsTo": ["id"], - "onDelete": "cascade", - "onUpdate": "no action" - } - }, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "projects": { - "name": "projects", - "columns": { - "id": { - "name": "id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "name": { - "name": "name", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "path": { - "name": "path", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "git_remote": { - "name": "git_remote", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "git_branch": { - "name": "git_branch", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "github_repository": { - "name": "github_repository", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "github_connected": { - "name": "github_connected", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": 0 - }, - "created_at": { - "name": "created_at", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "CURRENT_TIMESTAMP" - }, - "updated_at": { - "name": "updated_at", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "CURRENT_TIMESTAMP" - }, - "base_ref": { - "name": "base_ref", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - } - }, - "indexes": { - "idx_projects_path": { - "name": "idx_projects_path", - "columns": ["path"], - "isUnique": true - } - }, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "workspaces": { - "name": "workspaces", - "columns": { - "id": { - "name": "id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "project_id": { - "name": "project_id", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "name": { - "name": "name", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "branch": { - "name": "branch", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "path": { - "name": "path", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "status": { - "name": "status", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "'idle'" - }, - "agent_id": { - "name": "agent_id", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "metadata": { - "name": "metadata", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "created_at": { - "name": "created_at", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "CURRENT_TIMESTAMP" - }, - "updated_at": { - "name": "updated_at", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "CURRENT_TIMESTAMP" - } - }, - "indexes": { - "idx_workspaces_project_id": { - "name": "idx_workspaces_project_id", - "columns": ["project_id"], - "isUnique": false - } - }, - "foreignKeys": { - "workspaces_project_id_projects_id_fk": { - "name": "workspaces_project_id_projects_id_fk", - "tableFrom": "workspaces", - "tableTo": "projects", - "columnsFrom": ["project_id"], - "columnsTo": ["id"], - "onDelete": "cascade", - "onUpdate": "no action" - } - }, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - } - }, - "views": {}, - "enums": {}, - "_meta": { - "schemas": {}, - "tables": {}, - "columns": {} - }, - "internal": { - "indexes": {} - } -} diff --git a/drizzle/meta/0002_snapshot.json b/drizzle/meta/0002_snapshot.json deleted file mode 100644 index fc0d0457f..000000000 --- a/drizzle/meta/0002_snapshot.json +++ /dev/null @@ -1,347 +0,0 @@ -{ - "version": "6", - "dialect": "sqlite", - "id": "f61fcd81-f000-4e2d-84f2-2f79133ae5d6", - "prevId": "ec5ad35e-22ec-4c0b-9d48-cfb1033d9d93", - "tables": { - "conversations": { - "name": "conversations", - "columns": { - "id": { - "name": "id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "task_id": { - "name": "task_id", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "title": { - "name": "title", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "created_at": { - "name": "created_at", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "CURRENT_TIMESTAMP" - }, - "updated_at": { - "name": "updated_at", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "CURRENT_TIMESTAMP" - } - }, - "indexes": { - "idx_conversations_task_id": { - "name": "idx_conversations_task_id", - "columns": ["task_id"], - "isUnique": false - } - }, - "foreignKeys": { - "conversations_task_id_tasks_id_fk": { - "name": "conversations_task_id_tasks_id_fk", - "tableFrom": "conversations", - "tableTo": "tasks", - "columnsFrom": ["task_id"], - "columnsTo": ["id"], - "onDelete": "cascade", - "onUpdate": "no action" - } - }, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "messages": { - "name": "messages", - "columns": { - "id": { - "name": "id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "conversation_id": { - "name": "conversation_id", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "content": { - "name": "content", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "sender": { - "name": "sender", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "timestamp": { - "name": "timestamp", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "CURRENT_TIMESTAMP" - }, - "metadata": { - "name": "metadata", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - } - }, - "indexes": { - "idx_messages_conversation_id": { - "name": "idx_messages_conversation_id", - "columns": ["conversation_id"], - "isUnique": false - }, - "idx_messages_timestamp": { - "name": "idx_messages_timestamp", - "columns": ["timestamp"], - "isUnique": false - } - }, - "foreignKeys": { - "messages_conversation_id_conversations_id_fk": { - "name": "messages_conversation_id_conversations_id_fk", - "tableFrom": "messages", - "tableTo": "conversations", - "columnsFrom": ["conversation_id"], - "columnsTo": ["id"], - "onDelete": "cascade", - "onUpdate": "no action" - } - }, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "projects": { - "name": "projects", - "columns": { - "id": { - "name": "id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "name": { - "name": "name", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "path": { - "name": "path", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "git_remote": { - "name": "git_remote", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "git_branch": { - "name": "git_branch", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "base_ref": { - "name": "base_ref", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "github_repository": { - "name": "github_repository", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "github_connected": { - "name": "github_connected", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": 0 - }, - "created_at": { - "name": "created_at", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "CURRENT_TIMESTAMP" - }, - "updated_at": { - "name": "updated_at", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "CURRENT_TIMESTAMP" - } - }, - "indexes": { - "idx_projects_path": { - "name": "idx_projects_path", - "columns": ["path"], - "isUnique": true - } - }, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "tasks": { - "name": "tasks", - "columns": { - "id": { - "name": "id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "project_id": { - "name": "project_id", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "name": { - "name": "name", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "branch": { - "name": "branch", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "path": { - "name": "path", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "status": { - "name": "status", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "'idle'" - }, - "agent_id": { - "name": "agent_id", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "metadata": { - "name": "metadata", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "created_at": { - "name": "created_at", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "CURRENT_TIMESTAMP" - }, - "updated_at": { - "name": "updated_at", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": "CURRENT_TIMESTAMP" - } - }, - "indexes": { - "idx_tasks_project_id": { - "name": "idx_tasks_project_id", - "columns": ["project_id"], - "isUnique": false - } - }, - "foreignKeys": { - "tasks_project_id_projects_id_fk": { - "name": "tasks_project_id_projects_id_fk", - "tableFrom": "tasks", - "tableTo": "projects", - "columnsFrom": ["project_id"], - "columnsTo": ["id"], - "onDelete": "cascade", - "onUpdate": "no action" - } - }, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - } - }, - "views": {}, - "enums": {}, - "_meta": { - "schemas": {}, - "tables": { - "\"workspaces\"": "\"tasks\"" - }, - "columns": { - "\"conversations\".\"workspace_id\"": "\"conversations\".\"task_id\"" - } - }, - "internal": { - "indexes": {} - } -} diff --git a/drizzle/meta/_journal.json b/drizzle/meta/_journal.json index ec51ccb78..9e0d0b88f 100644 --- a/drizzle/meta/_journal.json +++ b/drizzle/meta/_journal.json @@ -5,71 +5,8 @@ { "idx": 0, "version": "6", - "when": 1761210820918, - "tag": "0000_initial", - "breakpoints": true - }, - { - "idx": 1, - "version": "6", - "when": 1761240000000, - "tag": "0001_add_base_ref_to_projects", - "breakpoints": true - }, - { - "idx": 2, - "version": "6", - "when": 1765592430354, - "tag": "0002_lyrical_impossible_man", - "breakpoints": true - }, - { - "idx": 3, - "version": "6", - "when": 1765592430355, - "tag": "0003_add_run_config_status_to_projects", - "breakpoints": true - }, - { - "idx": 4, - "version": "6", - "when": 1765592430356, - "tag": "0004_add_line_comments", - "breakpoints": true - }, - { - "idx": 5, - "version": "6", - "when": 1765592430357, - "tag": "0005_add_use_worktree_to_tasks", - "breakpoints": true - }, - { - "idx": 6, - "version": "6", - "when": 1765592430358, - "tag": "0006_add_multi_chat_support", - "breakpoints": true - }, - { - "idx": 7, - "version": "6", - "when": 1765592430359, - "tag": "0007_add_is_main_to_conversations", - "breakpoints": true - }, - { - "idx": 8, - "version": "6", - "when": 1765592430360, - "tag": "0008_add_archived_at_to_tasks", - "breakpoints": true - }, - { - "idx": 9, - "version": "6", - "when": 1738857600000, - "tag": "0009_add_ssh_support", + "when": 1774036008845, + "tag": "0000_mean_wither", "breakpoints": true } ] diff --git a/electron.vite.config.ts b/electron.vite.config.ts new file mode 100644 index 000000000..873c71f14 --- /dev/null +++ b/electron.vite.config.ts @@ -0,0 +1,42 @@ +import { resolve } from 'node:path'; +import tailwindcss from '@tailwindcss/vite'; +import react from '@vitejs/plugin-react'; +import { defineConfig } from 'electron-vite'; + +export default defineConfig({ + main: { + root: 'src/main', + resolve: { + alias: { + '@': resolve('src'), + '@main': resolve('src/main'), + '@shared': resolve('src/shared'), + '@root': resolve('.'), + }, + }, + }, + preload: { + root: 'src/preload', + resolve: { + alias: { + '@shared': resolve('src/shared'), + '@root': resolve('.'), + }, + }, + }, + renderer: { + root: 'src/renderer', + plugins: [react(), tailwindcss()], + resolve: { + alias: { + '@': resolve('src'), + '@renderer': resolve('src/renderer'), + '@shared': resolve('src/shared'), + '@root': resolve('.'), + }, + }, + server: { + port: 3000, + }, + }, +}); diff --git a/package.json b/package.json index 08106fa09..cf2d0840c 100644 --- a/package.json +++ b/package.json @@ -2,29 +2,33 @@ "name": "emdash", "version": "0.4.24", "description": "A cross-platform Electron app that orchestrates multiple coding agents in parallel", - "main": "dist/main/main/entry.js", + "type": "module", + "main": "./out/main/index.js", "packageManager": "pnpm@10.28.2", "scripts": { "d": "pnpm install && pnpm run dev", - "dev": "concurrently \"pnpm run dev:main\" \"pnpm run dev:renderer\"", - "dev:main": "tsc -p tsconfig.main.json && node scripts/copy-main-assets.cjs && electron dist/main/main/entry.js --dev", - "dev:renderer": "vite", + "dev": "electron-vite dev", + "dev:main": "electron-vite dev --watch main", + "dev:renderer": "electron-vite dev --watch renderer", + "db:generate": "drizzle-kit generate", + "db:reset": "rm -f \"$HOME/Library/Application Support/Emdash/emdash2.db\" \"$HOME/.config/Emdash/emdash2.db\"", "test": "vitest run", - "build": "pnpm run build:main && pnpm run build:renderer", - "build:main": "tsc -p tsconfig.main.json && node scripts/copy-main-assets.cjs", - "build:renderer": "vite build", + "build": "electron-vite build", + "build:main": "electron-vite build --filter main", + "build:renderer": "electron-vite build --filter renderer", "package": "pnpm run build && electron-builder", "package:mac": "pnpm run build && electron-builder --mac", "package:linux": "pnpm run build && electron-builder --linux --publish never", "package:win": "pnpm run build && electron-builder --win --publish never", - "postinstall": "node scripts/postinstall.cjs", - "rebuild": "electron-rebuild -f -v 30.5.1 --only=sqlite3,node-pty,keytar", - "clean": "node scripts/clean.cjs", + "postinstall": "node --experimental-strip-types scripts/postinstall.ts", + "rebuild": "electron-rebuild -f -v 40.7.0 --only=better-sqlite3,node-pty,keytar", + "clean": "rm -rf node_modules dist", "reset": "pnpm run clean && pnpm install", "lint": "eslint . --ext .ts,.tsx", "format": "prettier --write .", "format:check": "prettier --check .", - "type-check": "tsc --noEmit", + "type-check": "tsc --noEmit && tsc -p tsconfig.main.json --noEmit", + "typecheck": "tsc --noEmit", "docs": "cd docs && pnpm run dev", "docs:build": "cd docs && pnpm run build", "prepare": "husky" @@ -41,24 +45,28 @@ "email": "support@emdash.sh" }, "engines": { - "node": ">=20.0.0 <23.0.0", + "node": ">=24.0.0", "pnpm": ">=10.28.0" }, "devDependencies": { "@electron/rebuild": "^4.0.1", + "@ianvs/prettier-plugin-sort-imports": "^4.7.1", + "@tailwindcss/vite": "^4.2.1", "@types/node": "^20.10.0", - "@types/react": "^18.2.45", - "@types/react-dom": "^18.2.18", + "@types/react": "^19.0.0", + "@types/react-dom": "^19.0.0", "@types/react-syntax-highlighter": "^15.5.13", "@typescript-eslint/eslint-plugin": "^6.14.0", "@typescript-eslint/parser": "^6.14.0", "@vitejs/plugin-react": "^4.7.0", - "autoprefixer": "^10.4.16", - "better-sqlite3": "^12.6.0", + "@vitest/browser": "^4.1.0", + "@vitest/browser-playwright": "^4.1.0", + "@vitest/browser-preview": "^4.1.0", "concurrently": "^8.2.2", "drizzle-kit": "^0.24.2", - "electron": "^30.5.1", - "electron-builder": "^26.0.12", + "electron": "^40.7.0", + "electron-builder": "^26.8.1", + "electron-vite": "^5.0.0", "eslint": "^8.55.0", "eslint-plugin-import": "^2.32.0", "eslint-plugin-react-hooks": "^7.0.0", @@ -67,14 +75,20 @@ "postcss": "^8.4.32", "prettier": "3.6.2", "prettier-plugin-tailwindcss": "^0.6.14", - "tailwindcss": "^3.3.6", + "tailwindcss": "^4.2.1", "typescript": "^5.3.3", - "vite": "^5.0.10", - "vitest": "^3.2.4" + "vite": "^6.4.1", + "vitest": "^4.1.0", + "vitest-browser-react": "^2.1.0" }, "dependencies": { + "@base-ui/react": "^1.3.0", "@isaacs/brace-expansion": "^5.0.1", + "@linear/sdk": "^77.0.0", "@monaco-editor/react": "^4.7.0", + "@octokit/auth-oauth-device": "^8.0.3", + "@octokit/rest": "^22.0.1", + "@parcel/watcher": "^2.5.6", "@radix-ui/react-accordion": "^1.2.12", "@radix-ui/react-alert-dialog": "^1.1.15", "@radix-ui/react-checkbox": "^1.3.3", @@ -91,51 +105,65 @@ "@radix-ui/react-slot": "^1.2.3", "@radix-ui/react-switch": "^1.2.6", "@radix-ui/react-toast": "^1.2.15", + "@radix-ui/react-toggle": "^1.1.10", + "@radix-ui/react-toggle-group": "^1.1.11", "@radix-ui/react-tooltip": "^1.2.8", "@radix-ui/react-use-controllable-state": "^1.2.2", + "@tanstack/react-form": "^1.28.4", + "@tanstack/react-hotkeys": "^0.4.1", "@tanstack/react-query": "^5.90.21", + "@tanstack/react-virtual": "^3.13.23", + "@types/better-sqlite3": "^7.6.13", "@types/react-window": "^1.8.8", - "@types/sqlite3": "^3.1.11", "@types/ssh2": "^1.15.5", + "@xterm/addon-canvas": "^0.7.0", "@xterm/addon-fit": "^0.11.0", - "@xterm/addon-serialize": "^0.14.0", "@xterm/addon-web-links": "^0.12.0", - "@xterm/addon-webgl": "^0.19.0", "@xterm/xterm": "^6.0.0", + "allotment": "^1.20.5", + "better-sqlite3": "^12.6.0", + "chokidar": "^5.0.0", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "cmdk": "^1.1.1", + "devicon": "^2.17.0", "dotenv": "^17.2.3", "drizzle-orm": "^0.32.1", "electron-updater": "^6.3.9", "fix-path": "^4.0.0", "framer-motion": "^12.33.0", + "glob": "^13.0.6", "human-id": "^4.1.2", "ignore": "^5.3.1", + "jsonc-parser": "^3.3.1", "keytar": "^7.9.0", "lucide-react": "^0.564.0", "minimatch": "^10.1.1", + "mobx": "^6.15.0", + "mobx-react-lite": "^4.1.1", "monaco-editor": "^0.55.1", "motion": "^12.23.12", "nbranch": "^0.1.0", "node-pty": "1.0.0", "posthog-js": "^1.297.2", - "react": "^18.2.0", - "react-dom": "^18.2.0", + "react": "^19.2.0", + "react-dom": "^19.2.0", "react-icons": "^5.5.0", "react-markdown": "^10.1.0", - "react-resizable-panels": "^3.0.6", + "react-resizable-panels": "^4.7.3", "react-syntax-highlighter": "^15.6.6", "react-window": "^2.2.4", "rehype-raw": "^7.0.0", "rehype-sanitize": "^6.0.0", "remark-gfm": "^4.0.1", - "sqlite3": "^5.1.7", + "smol-toml": "^1.6.0", + "sonner": "^2.0.7", "ssh2": "^1.17.0", "streamdown": "^1.3.0", "tailwind-merge": "^2.6.0", - "tailwindcss-animate": "^1.0.7", - "tsconfig-paths": "^3.15.0" + "tsconfig-paths": "^3.15.0", + "tw-animate-css": "^1.4.0", + "zod": "^4.3.6" }, "build": { "appId": "com.emdash", @@ -154,12 +182,12 @@ ], "generateUpdatesFilesForAllChannels": true, "files": [ - "dist/**/*", + "out/**/*", "node_modules/**/*", "drizzle/**/*" ], "asarUnpack": [ - "node_modules/sqlite3/**", + "node_modules/better-sqlite3/**", "node_modules/node-pty/**", "node_modules/keytar/**", "**/*.node" @@ -249,13 +277,12 @@ }, "pnpm": { "onlyBuiltDependencies": [ - "sqlite3", + "better-sqlite3", "node-pty", "keytar", "electron" ], "ignoredBuiltDependencies": [ - "better-sqlite3", "core-js", "cpu-features", "esbuild", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ea650ed58..11598abd7 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,93 +8,129 @@ importers: .: dependencies: + '@base-ui/react': + specifier: ^1.3.0 + version: 1.3.0(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@isaacs/brace-expansion': specifier: ^5.0.1 version: 5.0.1 + '@linear/sdk': + specifier: ^77.0.0 + version: 77.0.0(graphql@16.13.1) '@monaco-editor/react': specifier: ^4.7.0 - version: 4.7.0(monaco-editor@0.55.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 4.7.0(monaco-editor@0.55.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@octokit/auth-oauth-device': + specifier: ^8.0.3 + version: 8.0.3 + '@octokit/rest': + specifier: ^22.0.1 + version: 22.0.1 + '@parcel/watcher': + specifier: ^2.5.6 + version: 2.5.6 '@radix-ui/react-accordion': specifier: ^1.2.12 - version: 1.2.12(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.2.12(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-alert-dialog': specifier: ^1.1.15 - version: 1.1.15(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.1.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-checkbox': specifier: ^1.3.3 - version: 1.3.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.3.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-collapsible': specifier: ^1.1.12 - version: 1.1.12(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.1.12(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-context-menu': specifier: ^2.2.16 - version: 2.2.16(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 2.2.16(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-dialog': specifier: ^1.1.15 - version: 1.1.15(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.1.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-label': specifier: ^2.1.8 - version: 2.1.8(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 2.1.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-popover': specifier: ^1.1.15 - version: 1.1.15(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.1.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-progress': specifier: ^1.1.8 - version: 1.1.8(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.1.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-radio-group': specifier: ^1.3.8 - version: 1.3.8(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.3.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-scroll-area': specifier: ^1.2.10 - version: 1.2.10(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.2.10(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-select': specifier: ^2.2.6 - version: 2.2.6(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 2.2.6(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-separator': specifier: ^1.1.7 - version: 1.1.8(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.1.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-slot': specifier: ^1.2.3 - version: 1.2.4(@types/react@18.3.28)(react@18.3.1) + version: 1.2.4(@types/react@19.2.14)(react@19.2.4) '@radix-ui/react-switch': specifier: ^1.2.6 - version: 1.2.6(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.2.6(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-toast': specifier: ^1.2.15 - version: 1.2.15(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.2.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-toggle': + specifier: ^1.1.10 + version: 1.1.10(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-toggle-group': + specifier: ^1.1.11 + version: 1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-tooltip': specifier: ^1.2.8 - version: 1.2.8(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.2.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-use-controllable-state': specifier: ^1.2.2 - version: 1.2.2(@types/react@18.3.28)(react@18.3.1) + version: 1.2.2(@types/react@19.2.14)(react@19.2.4) + '@tanstack/react-form': + specifier: ^1.28.4 + version: 1.28.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@tanstack/react-hotkeys': + specifier: ^0.4.1 + version: 0.4.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@tanstack/react-query': specifier: ^5.90.21 - version: 5.90.21(react@18.3.1) + version: 5.90.21(react@19.2.4) + '@tanstack/react-virtual': + specifier: ^3.13.23 + version: 3.13.23(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@types/better-sqlite3': + specifier: ^7.6.13 + version: 7.6.13 '@types/react-window': specifier: ^1.8.8 version: 1.8.8 - '@types/sqlite3': - specifier: ^3.1.11 - version: 3.1.11 '@types/ssh2': specifier: ^1.15.5 version: 1.15.5 + '@xterm/addon-canvas': + specifier: ^0.7.0 + version: 0.7.0(@xterm/xterm@6.0.0) '@xterm/addon-fit': specifier: ^0.11.0 version: 0.11.0 - '@xterm/addon-serialize': - specifier: ^0.14.0 - version: 0.14.0 '@xterm/addon-web-links': specifier: ^0.12.0 version: 0.12.0 - '@xterm/addon-webgl': - specifier: ^0.19.0 - version: 0.19.0 '@xterm/xterm': specifier: ^6.0.0 version: 6.0.0 + allotment: + specifier: ^1.20.5 + version: 1.20.5(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + better-sqlite3: + specifier: ^12.6.0 + version: 12.6.2 + chokidar: + specifier: ^5.0.0 + version: 5.0.0 class-variance-authority: specifier: ^0.7.1 version: 0.7.1 @@ -103,13 +139,16 @@ importers: version: 2.1.1 cmdk: specifier: ^1.1.1 - version: 1.1.1(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.1.1(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + devicon: + specifier: ^2.17.0 + version: 2.17.0 dotenv: specifier: ^17.2.3 version: 17.2.4 drizzle-orm: specifier: ^0.32.1 - version: 0.32.2(@opentelemetry/api@1.9.0)(@types/react@18.3.28)(better-sqlite3@12.6.2)(react@18.3.1)(sqlite3@5.1.7) + version: 0.32.2(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/react@19.2.14)(better-sqlite3@12.6.2)(react@19.2.4)(sqlite3@5.1.7) electron-updater: specifier: ^6.3.9 version: 6.7.3 @@ -118,28 +157,40 @@ importers: version: 4.0.0 framer-motion: specifier: ^12.33.0 - version: 12.33.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 12.33.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + glob: + specifier: ^13.0.6 + version: 13.0.6 human-id: specifier: ^4.1.2 version: 4.1.3 ignore: specifier: ^5.3.1 version: 5.3.2 + jsonc-parser: + specifier: ^3.3.1 + version: 3.3.1 keytar: specifier: ^7.9.0 version: 7.9.0 lucide-react: specifier: ^0.564.0 - version: 0.564.0(react@18.3.1) + version: 0.564.0(react@19.2.4) minimatch: specifier: ^10.1.1 version: 10.1.2 + mobx: + specifier: ^6.15.0 + version: 6.15.0 + mobx-react-lite: + specifier: ^4.1.1 + version: 4.1.1(mobx@6.15.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) monaco-editor: specifier: ^0.55.1 version: 0.55.1 motion: specifier: ^12.23.12 - version: 12.33.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 12.33.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) nbranch: specifier: ^0.1.0 version: 0.1.0 @@ -150,26 +201,26 @@ importers: specifier: ^1.297.2 version: 1.342.1 react: - specifier: ^18.2.0 - version: 18.3.1 + specifier: ^19.2.0 + version: 19.2.4 react-dom: - specifier: ^18.2.0 - version: 18.3.1(react@18.3.1) + specifier: ^19.2.0 + version: 19.2.4(react@19.2.4) react-icons: specifier: ^5.5.0 - version: 5.5.0(react@18.3.1) + version: 5.5.0(react@19.2.4) react-markdown: specifier: ^10.1.0 - version: 10.1.0(@types/react@18.3.28)(react@18.3.1) + version: 10.1.0(@types/react@19.2.14)(react@19.2.4) react-resizable-panels: - specifier: ^3.0.6 - version: 3.0.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + specifier: ^4.7.3 + version: 4.7.3(react-dom@19.2.4(react@19.2.4))(react@19.2.4) react-syntax-highlighter: specifier: ^15.6.6 - version: 15.6.6(react@18.3.1) + version: 15.6.6(react@19.2.4) react-window: specifier: ^2.2.4 - version: 2.2.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 2.2.6(react-dom@19.2.4(react@19.2.4))(react@19.2.4) rehype-raw: specifier: ^7.0.0 version: 7.0.0 @@ -179,37 +230,49 @@ importers: remark-gfm: specifier: ^4.0.1 version: 4.0.1 - sqlite3: - specifier: ^5.1.7 - version: 5.1.7 + smol-toml: + specifier: ^1.6.0 + version: 1.6.0 + sonner: + specifier: ^2.0.7 + version: 2.0.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4) ssh2: specifier: ^1.17.0 version: 1.17.0 streamdown: specifier: ^1.3.0 - version: 1.6.11(@types/mdast@4.0.4)(micromark-util-types@2.0.2)(micromark@4.0.2)(react@18.3.1) + version: 1.6.11(@types/mdast@4.0.4)(micromark-util-types@2.0.2)(micromark@4.0.2)(react@19.2.4) tailwind-merge: specifier: ^2.6.0 version: 2.6.1 - tailwindcss-animate: - specifier: ^1.0.7 - version: 1.0.7(tailwindcss@3.4.19(yaml@2.8.2)) tsconfig-paths: specifier: ^3.15.0 version: 3.15.0 + tw-animate-css: + specifier: ^1.4.0 + version: 1.4.0 + zod: + specifier: ^4.3.6 + version: 4.3.6 devDependencies: '@electron/rebuild': specifier: ^4.0.1 version: 4.0.3 + '@ianvs/prettier-plugin-sort-imports': + specifier: ^4.7.1 + version: 4.7.1(prettier@3.6.2) + '@tailwindcss/vite': + specifier: ^4.2.1 + version: 4.2.1(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2)) '@types/node': specifier: ^20.10.0 version: 20.19.32 '@types/react': - specifier: ^18.2.45 - version: 18.3.28 + specifier: ^19.0.0 + version: 19.2.14 '@types/react-dom': - specifier: ^18.2.18 - version: 18.3.7(@types/react@18.3.28) + specifier: ^19.0.0 + version: 19.2.3(@types/react@19.2.14) '@types/react-syntax-highlighter': specifier: ^15.5.13 version: 15.5.13 @@ -221,13 +284,16 @@ importers: version: 6.21.0(eslint@8.57.1)(typescript@5.9.3) '@vitejs/plugin-react': specifier: ^4.7.0 - version: 4.7.0(vite@5.4.21(@types/node@20.19.32)) - autoprefixer: - specifier: ^10.4.16 - version: 10.4.24(postcss@8.5.6) - better-sqlite3: - specifier: ^12.6.0 - version: 12.6.2 + version: 4.7.0(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2)) + '@vitest/browser': + specifier: ^4.1.0 + version: 4.1.0(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2))(vitest@4.1.0) + '@vitest/browser-playwright': + specifier: ^4.1.0 + version: 4.1.0(playwright@1.58.2)(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2))(vitest@4.1.0) + '@vitest/browser-preview': + specifier: ^4.1.0 + version: 4.1.0(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2))(vitest@4.1.0) concurrently: specifier: ^8.2.2 version: 8.2.2 @@ -235,11 +301,14 @@ importers: specifier: ^0.24.2 version: 0.24.2 electron: - specifier: ^30.5.1 - version: 30.5.1 + specifier: ^40.7.0 + version: 40.7.0 electron-builder: - specifier: ^26.0.12 + specifier: ^26.8.1 version: 26.8.1(electron-builder-squirrel-windows@24.13.3) + electron-vite: + specifier: ^5.0.0 + version: 5.0.0(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2)) eslint: specifier: ^8.55.0 version: 8.57.1 @@ -263,29 +332,28 @@ importers: version: 3.6.2 prettier-plugin-tailwindcss: specifier: ^0.6.14 - version: 0.6.14(prettier@3.6.2) + version: 0.6.14(@ianvs/prettier-plugin-sort-imports@4.7.1(prettier@3.6.2))(prettier@3.6.2) tailwindcss: - specifier: ^3.3.6 - version: 3.4.19(yaml@2.8.2) + specifier: ^4.2.1 + version: 4.2.1 typescript: specifier: ^5.3.3 version: 5.9.3 vite: - specifier: ^5.0.10 - version: 5.4.21(@types/node@20.19.32) + specifier: ^6.4.1 + version: 6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2) vitest: - specifier: ^3.2.4 - version: 3.2.4(@types/debug@4.1.12)(@types/node@20.19.32) + specifier: ^4.1.0 + version: 4.1.0(@opentelemetry/api@1.9.0)(@types/node@20.19.32)(@vitest/browser-playwright@4.1.0)(@vitest/browser-preview@4.1.0)(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2)) + vitest-browser-react: + specifier: ^2.1.0 + version: 2.1.0(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vitest@4.1.0) packages: 7zip-bin@5.2.0: resolution: {integrity: sha512-ukTPVhqG4jNzMro2qA9HSCSSVJN3aN7tlb+hfqYCt3ER0yWroeA2VR38MNrOHLQ/cVj+DaIMad0kFCtWWowh/A==} - '@alloc/quick-lru@5.2.0': - resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==} - engines: {node: '>=10'} - '@antfu/install-pkg@1.1.0': resolution: {integrity: sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==} @@ -348,6 +416,12 @@ packages: engines: {node: '>=6.0.0'} hasBin: true + '@babel/plugin-transform-arrow-functions@7.27.1': + resolution: {integrity: sha512-8Z4TGic6xW70FKThA5HYEKKyBpOOsucTOD1DjU3fZxDg+K3zBJcXMFnt/4yQiZnf5+MiOMSXQ9PaEK/Ilh1DeA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + '@babel/plugin-transform-react-jsx-self@7.27.1': resolution: {integrity: sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==} engines: {node: '>=6.9.0'} @@ -376,6 +450,30 @@ packages: resolution: {integrity: sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==} engines: {node: '>=6.9.0'} + '@base-ui/react@1.3.0': + resolution: {integrity: sha512-FwpKqZbPz14AITp1CVgf4AjhKPe1OeeVKSBMdgD10zbFlj3QSWelmtCMLi2+/PFZZcIm3l87G7rwtCZJwHyXWA==} + engines: {node: '>=14.0.0'} + peerDependencies: + '@types/react': ^17 || ^18 || ^19 + react: ^17 || ^18 || ^19 + react-dom: ^17 || ^18 || ^19 + peerDependenciesMeta: + '@types/react': + optional: true + + '@base-ui/utils@0.2.6': + resolution: {integrity: sha512-yQ+qeuqohwhsNpoYDqqXaLllYAkPCP4vYdDrVo8FQXaAPfHWm1pG/Vm+jmGTA5JFS0BAIjookyapuJFY8F9PIw==} + peerDependencies: + '@types/react': ^17 || ^18 || ^19 + react: ^17 || ^18 || ^19 + react-dom: ^17 || ^18 || ^19 + peerDependenciesMeta: + '@types/react': + optional: true + + '@blazediff/core@1.9.1': + resolution: {integrity: sha512-ehg3jIkYKulZh+8om/O25vkvSsXXwC+skXmyA87FFx6A/45eqOkZsBltMw/TVteb0mloiGT8oGRTcjRAz66zaA==} + '@braintree/sanitize-url@7.1.2': resolution: {integrity: sha512-jigsZK+sMF/cuiB7sERuo9V7N9jx+dhmHHnQyDSVdpZwVutaBu7WvNYqMDLSgFgfB30n452TP3vjDAvFC973mA==} @@ -463,9 +561,9 @@ packages: cpu: [ppc64] os: [aix] - '@esbuild/aix-ppc64@0.21.5': - resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==} - engines: {node: '>=12'} + '@esbuild/aix-ppc64@0.25.12': + resolution: {integrity: sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==} + engines: {node: '>=18'} cpu: [ppc64] os: [aix] @@ -481,9 +579,9 @@ packages: cpu: [arm64] os: [android] - '@esbuild/android-arm64@0.21.5': - resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==} - engines: {node: '>=12'} + '@esbuild/android-arm64@0.25.12': + resolution: {integrity: sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==} + engines: {node: '>=18'} cpu: [arm64] os: [android] @@ -499,9 +597,9 @@ packages: cpu: [arm] os: [android] - '@esbuild/android-arm@0.21.5': - resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==} - engines: {node: '>=12'} + '@esbuild/android-arm@0.25.12': + resolution: {integrity: sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==} + engines: {node: '>=18'} cpu: [arm] os: [android] @@ -517,9 +615,9 @@ packages: cpu: [x64] os: [android] - '@esbuild/android-x64@0.21.5': - resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==} - engines: {node: '>=12'} + '@esbuild/android-x64@0.25.12': + resolution: {integrity: sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==} + engines: {node: '>=18'} cpu: [x64] os: [android] @@ -535,9 +633,9 @@ packages: cpu: [arm64] os: [darwin] - '@esbuild/darwin-arm64@0.21.5': - resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==} - engines: {node: '>=12'} + '@esbuild/darwin-arm64@0.25.12': + resolution: {integrity: sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==} + engines: {node: '>=18'} cpu: [arm64] os: [darwin] @@ -553,9 +651,9 @@ packages: cpu: [x64] os: [darwin] - '@esbuild/darwin-x64@0.21.5': - resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==} - engines: {node: '>=12'} + '@esbuild/darwin-x64@0.25.12': + resolution: {integrity: sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==} + engines: {node: '>=18'} cpu: [x64] os: [darwin] @@ -571,9 +669,9 @@ packages: cpu: [arm64] os: [freebsd] - '@esbuild/freebsd-arm64@0.21.5': - resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==} - engines: {node: '>=12'} + '@esbuild/freebsd-arm64@0.25.12': + resolution: {integrity: sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==} + engines: {node: '>=18'} cpu: [arm64] os: [freebsd] @@ -589,9 +687,9 @@ packages: cpu: [x64] os: [freebsd] - '@esbuild/freebsd-x64@0.21.5': - resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==} - engines: {node: '>=12'} + '@esbuild/freebsd-x64@0.25.12': + resolution: {integrity: sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==} + engines: {node: '>=18'} cpu: [x64] os: [freebsd] @@ -607,9 +705,9 @@ packages: cpu: [arm64] os: [linux] - '@esbuild/linux-arm64@0.21.5': - resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==} - engines: {node: '>=12'} + '@esbuild/linux-arm64@0.25.12': + resolution: {integrity: sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==} + engines: {node: '>=18'} cpu: [arm64] os: [linux] @@ -625,9 +723,9 @@ packages: cpu: [arm] os: [linux] - '@esbuild/linux-arm@0.21.5': - resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==} - engines: {node: '>=12'} + '@esbuild/linux-arm@0.25.12': + resolution: {integrity: sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==} + engines: {node: '>=18'} cpu: [arm] os: [linux] @@ -643,9 +741,9 @@ packages: cpu: [ia32] os: [linux] - '@esbuild/linux-ia32@0.21.5': - resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==} - engines: {node: '>=12'} + '@esbuild/linux-ia32@0.25.12': + resolution: {integrity: sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==} + engines: {node: '>=18'} cpu: [ia32] os: [linux] @@ -661,9 +759,9 @@ packages: cpu: [loong64] os: [linux] - '@esbuild/linux-loong64@0.21.5': - resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==} - engines: {node: '>=12'} + '@esbuild/linux-loong64@0.25.12': + resolution: {integrity: sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==} + engines: {node: '>=18'} cpu: [loong64] os: [linux] @@ -679,9 +777,9 @@ packages: cpu: [mips64el] os: [linux] - '@esbuild/linux-mips64el@0.21.5': - resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==} - engines: {node: '>=12'} + '@esbuild/linux-mips64el@0.25.12': + resolution: {integrity: sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==} + engines: {node: '>=18'} cpu: [mips64el] os: [linux] @@ -697,9 +795,9 @@ packages: cpu: [ppc64] os: [linux] - '@esbuild/linux-ppc64@0.21.5': - resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==} - engines: {node: '>=12'} + '@esbuild/linux-ppc64@0.25.12': + resolution: {integrity: sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==} + engines: {node: '>=18'} cpu: [ppc64] os: [linux] @@ -715,9 +813,9 @@ packages: cpu: [riscv64] os: [linux] - '@esbuild/linux-riscv64@0.21.5': - resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==} - engines: {node: '>=12'} + '@esbuild/linux-riscv64@0.25.12': + resolution: {integrity: sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==} + engines: {node: '>=18'} cpu: [riscv64] os: [linux] @@ -733,9 +831,9 @@ packages: cpu: [s390x] os: [linux] - '@esbuild/linux-s390x@0.21.5': - resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==} - engines: {node: '>=12'} + '@esbuild/linux-s390x@0.25.12': + resolution: {integrity: sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==} + engines: {node: '>=18'} cpu: [s390x] os: [linux] @@ -751,12 +849,18 @@ packages: cpu: [x64] os: [linux] - '@esbuild/linux-x64@0.21.5': - resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==} - engines: {node: '>=12'} + '@esbuild/linux-x64@0.25.12': + resolution: {integrity: sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==} + engines: {node: '>=18'} cpu: [x64] os: [linux] + '@esbuild/netbsd-arm64@0.25.12': + resolution: {integrity: sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + '@esbuild/netbsd-x64@0.18.20': resolution: {integrity: sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==} engines: {node: '>=12'} @@ -769,12 +873,18 @@ packages: cpu: [x64] os: [netbsd] - '@esbuild/netbsd-x64@0.21.5': - resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==} - engines: {node: '>=12'} + '@esbuild/netbsd-x64@0.25.12': + resolution: {integrity: sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==} + engines: {node: '>=18'} cpu: [x64] os: [netbsd] + '@esbuild/openbsd-arm64@0.25.12': + resolution: {integrity: sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + '@esbuild/openbsd-x64@0.18.20': resolution: {integrity: sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==} engines: {node: '>=12'} @@ -787,12 +897,18 @@ packages: cpu: [x64] os: [openbsd] - '@esbuild/openbsd-x64@0.21.5': - resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==} - engines: {node: '>=12'} + '@esbuild/openbsd-x64@0.25.12': + resolution: {integrity: sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==} + engines: {node: '>=18'} cpu: [x64] os: [openbsd] + '@esbuild/openharmony-arm64@0.25.12': + resolution: {integrity: sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openharmony] + '@esbuild/sunos-x64@0.18.20': resolution: {integrity: sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==} engines: {node: '>=12'} @@ -805,9 +921,9 @@ packages: cpu: [x64] os: [sunos] - '@esbuild/sunos-x64@0.21.5': - resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==} - engines: {node: '>=12'} + '@esbuild/sunos-x64@0.25.12': + resolution: {integrity: sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==} + engines: {node: '>=18'} cpu: [x64] os: [sunos] @@ -823,9 +939,9 @@ packages: cpu: [arm64] os: [win32] - '@esbuild/win32-arm64@0.21.5': - resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==} - engines: {node: '>=12'} + '@esbuild/win32-arm64@0.25.12': + resolution: {integrity: sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==} + engines: {node: '>=18'} cpu: [arm64] os: [win32] @@ -841,9 +957,9 @@ packages: cpu: [ia32] os: [win32] - '@esbuild/win32-ia32@0.21.5': - resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==} - engines: {node: '>=12'} + '@esbuild/win32-ia32@0.25.12': + resolution: {integrity: sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==} + engines: {node: '>=18'} cpu: [ia32] os: [win32] @@ -859,9 +975,9 @@ packages: cpu: [x64] os: [win32] - '@esbuild/win32-x64@0.21.5': - resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==} - engines: {node: '>=12'} + '@esbuild/win32-x64@0.25.12': + resolution: {integrity: sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==} + engines: {node: '>=18'} cpu: [x64] os: [win32] @@ -886,21 +1002,41 @@ packages: '@floating-ui/core@1.7.4': resolution: {integrity: sha512-C3HlIdsBxszvm5McXlB8PeOEWfBhcGBTZGkGlWc2U0KFY5IwG5OQEuQ8rq52DZmcHDlPLd+YFBK+cZcytwIFWg==} + '@floating-ui/core@1.7.5': + resolution: {integrity: sha512-1Ih4WTWyw0+lKyFMcBHGbb5U5FtuHJuujoyyr5zTaWS5EYMeT6Jb2AuDeftsCsEuchO+mM2ij5+q9crhydzLhQ==} + '@floating-ui/dom@1.7.5': resolution: {integrity: sha512-N0bD2kIPInNHUHehXhMke1rBGs1dwqvC9O9KYMyyjK7iXt7GAhnro7UlcuYcGdS/yYOlq0MAVgrow8IbWJwyqg==} + '@floating-ui/dom@1.7.6': + resolution: {integrity: sha512-9gZSAI5XM36880PPMm//9dfiEngYoC6Am2izES1FF406YFsjvyBMmeJ2g4SAju3xWwtuynNRFL2s9hgxpLI5SQ==} + '@floating-ui/react-dom@2.1.7': resolution: {integrity: sha512-0tLRojf/1Go2JgEVm+3Frg9A3IW8bJgKgdO0BN5RkF//ufuz2joZM63Npau2ff3J6lUVYgDSNzNkR+aH3IVfjg==} peerDependencies: react: '>=16.8.0' react-dom: '>=16.8.0' + '@floating-ui/react-dom@2.1.8': + resolution: {integrity: sha512-cC52bHwM/n/CxS87FH0yWdngEZrjdtLW/qVruo68qg+prK7ZQ4YGdut2GyDVpoGeAYe/h899rVeOVm6Oi40k2A==} + peerDependencies: + react: '>=16.8.0' + react-dom: '>=16.8.0' + '@floating-ui/utils@0.2.10': resolution: {integrity: sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==} + '@floating-ui/utils@0.2.11': + resolution: {integrity: sha512-RiB/yIh78pcIxl6lLMG0CgBXAZ2Y0eVHqMPYugu+9U0AeT6YBeiJpf7lbdJNIugFP5SIjwNRgo4DhR1Qxi26Gg==} + '@gar/promisify@1.1.3': resolution: {integrity: sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==} + '@graphql-typed-document-node/core@3.2.0': + resolution: {integrity: sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ==} + peerDependencies: + graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + '@humanwhocodes/config-array@0.13.0': resolution: {integrity: sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==} engines: {node: '>=10.10.0'} @@ -914,6 +1050,24 @@ packages: resolution: {integrity: sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==} deprecated: Use @eslint/object-schema instead + '@ianvs/prettier-plugin-sort-imports@4.7.1': + resolution: {integrity: sha512-jmTNYGlg95tlsoG3JLCcuC4BrFELJtLirLAkQW/71lXSyOhVt/Xj7xWbbGcuVbNq1gwWgSyMrPjJc9Z30hynVw==} + peerDependencies: + '@prettier/plugin-oxc': ^0.0.4 || ^0.1.0 + '@vue/compiler-sfc': 2.7.x || 3.x + content-tag: ^4.0.0 + prettier: 2 || 3 || ^4.0.0-0 + prettier-plugin-ember-template-tag: ^2.1.0 + peerDependenciesMeta: + '@prettier/plugin-oxc': + optional: true + '@vue/compiler-sfc': + optional: true + content-tag: + optional: true + prettier-plugin-ember-template-tag: + optional: true + '@iconify/types@2.0.0': resolution: {integrity: sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==} @@ -952,6 +1106,10 @@ packages: '@jridgewell/trace-mapping@0.3.31': resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + '@linear/sdk@77.0.0': + resolution: {integrity: sha512-pI7ZNh/UZF12JoVhCi8tdubYnCp2DJ1hkraMA+kDyYqjU+elKOVd5yAdN/SoAR17V7Kg8dtDabgArjknvUnQSA==} + engines: {node: '>=18.x'} + '@malept/cross-spawn-promise@1.1.1': resolution: {integrity: sha512-RTBGWL5FWQcg9orDOCcp4LvItNzUPcyEU9bwaeJX0rJ1IQxzucC48Y0/sQLp/g6t99IQgAlGIaesJS+gTn7tVQ==} engines: {node: '>= 10'} @@ -1005,6 +1163,70 @@ packages: engines: {node: '>=10'} deprecated: This functionality has been moved to @npmcli/fs + '@octokit/auth-oauth-device@8.0.3': + resolution: {integrity: sha512-zh2W0mKKMh/VWZhSqlaCzY7qFyrgd9oTWmTmHaXnHNeQRCZr/CXy2jCgHo4e4dJVTiuxP5dLa0YM5p5QVhJHbw==} + engines: {node: '>= 20'} + + '@octokit/auth-token@6.0.0': + resolution: {integrity: sha512-P4YJBPdPSpWTQ1NU4XYdvHvXJJDxM6YwpS0FZHRgP7YFkdVxsWcpWGy/NVqlAA7PcPCnMacXlRm1y2PFZRWL/w==} + engines: {node: '>= 20'} + + '@octokit/core@7.0.6': + resolution: {integrity: sha512-DhGl4xMVFGVIyMwswXeyzdL4uXD5OGILGX5N8Y+f6W7LhC1Ze2poSNrkF/fedpVDHEEZ+PHFW0vL14I+mm8K3Q==} + engines: {node: '>= 20'} + + '@octokit/endpoint@11.0.3': + resolution: {integrity: sha512-FWFlNxghg4HrXkD3ifYbS/IdL/mDHjh9QcsNyhQjN8dplUoZbejsdpmuqdA76nxj2xoWPs7p8uX2SNr9rYu0Ag==} + engines: {node: '>= 20'} + + '@octokit/graphql@9.0.3': + resolution: {integrity: sha512-grAEuupr/C1rALFnXTv6ZQhFuL1D8G5y8CN04RgrO4FIPMrtm+mcZzFG7dcBm+nq+1ppNixu+Jd78aeJOYxlGA==} + engines: {node: '>= 20'} + + '@octokit/oauth-authorization-url@8.0.0': + resolution: {integrity: sha512-7QoLPRh/ssEA/HuHBHdVdSgF8xNLz/Bc5m9fZkArJE5bb6NmVkDm3anKxXPmN1zh6b5WKZPRr3697xKT/yM3qQ==} + engines: {node: '>= 20'} + + '@octokit/oauth-methods@6.0.2': + resolution: {integrity: sha512-HiNOO3MqLxlt5Da5bZbLV8Zarnphi4y9XehrbaFMkcoJ+FL7sMxH/UlUsCVxpddVu4qvNDrBdaTVE2o4ITK8ng==} + engines: {node: '>= 20'} + + '@octokit/openapi-types@27.0.0': + resolution: {integrity: sha512-whrdktVs1h6gtR+09+QsNk2+FO+49j6ga1c55YZudfEG+oKJVvJLQi3zkOm5JjiUXAagWK2tI2kTGKJ2Ys7MGA==} + + '@octokit/plugin-paginate-rest@14.0.0': + resolution: {integrity: sha512-fNVRE7ufJiAA3XUrha2omTA39M6IXIc6GIZLvlbsm8QOQCYvpq/LkMNGyFlB1d8hTDzsAXa3OKtybdMAYsV/fw==} + engines: {node: '>= 20'} + peerDependencies: + '@octokit/core': '>=6' + + '@octokit/plugin-request-log@6.0.0': + resolution: {integrity: sha512-UkOzeEN3W91/eBq9sPZNQ7sUBvYCqYbrrD8gTbBuGtHEuycE4/awMXcYvx6sVYo7LypPhmQwwpUe4Yyu4QZN5Q==} + engines: {node: '>= 20'} + peerDependencies: + '@octokit/core': '>=6' + + '@octokit/plugin-rest-endpoint-methods@17.0.0': + resolution: {integrity: sha512-B5yCyIlOJFPqUUeiD0cnBJwWJO8lkJs5d8+ze9QDP6SvfiXSz1BF+91+0MeI1d2yxgOhU/O+CvtiZ9jSkHhFAw==} + engines: {node: '>= 20'} + peerDependencies: + '@octokit/core': '>=6' + + '@octokit/request-error@7.1.0': + resolution: {integrity: sha512-KMQIfq5sOPpkQYajXHwnhjCC0slzCNScLHs9JafXc4RAJI+9f+jNDlBNaIMTvazOPLgb4BnlhGJOTbnN0wIjPw==} + engines: {node: '>= 20'} + + '@octokit/request@10.0.8': + resolution: {integrity: sha512-SJZNwY9pur9Agf7l87ywFi14W+Hd9Jg6Ifivsd33+/bGUQIjNujdFiXII2/qSlN2ybqUHfp5xpekMEjIBTjlSw==} + engines: {node: '>= 20'} + + '@octokit/rest@22.0.1': + resolution: {integrity: sha512-Jzbhzl3CEexhnivb1iQ0KJ7s5vvjMWcmRtq5aUsKmKDrRW6z3r84ngmiFKFvpZjpiU/9/S6ITPFRpn5s/3uQJw==} + engines: {node: '>= 20'} + + '@octokit/types@16.0.0': + resolution: {integrity: sha512-sKq+9r1Mm4efXW1FCk7hFSeJo4QKreL/tTbR0rz/qx/r1Oa2VV83LTA/H/MuCOX7uCIJmQVRKBcbmWoySjAnSg==} + '@opentelemetry/api-logs@0.208.0': resolution: {integrity: sha512-CjruKY9V6NMssL/T1kAFgzosF1v9o6oeN+aX5JB/C/xPNtmgIJqcXHG7fA82Ou1zCpWGl4lROQUKwUNE1pMCyg==} engines: {node: '>=8.0.0'} @@ -1077,10 +1299,101 @@ packages: resolution: {integrity: sha512-R5R9tb2AXs2IRLNKLBJDynhkfmx7mX0vi8NkhZb3gUkPWHn6HXk5J8iQ/dql0U3ApfWym4kXXmBDRGO+oeOfjg==} engines: {node: '>=14'} + '@parcel/watcher-android-arm64@2.5.6': + resolution: {integrity: sha512-YQxSS34tPF/6ZG7r/Ih9xy+kP/WwediEUsqmtf0cuCV5TPPKw/PQHRhueUo6JdeFJaqV3pyjm0GdYjZotbRt/A==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [android] + + '@parcel/watcher-darwin-arm64@2.5.6': + resolution: {integrity: sha512-Z2ZdrnwyXvvvdtRHLmM4knydIdU9adO3D4n/0cVipF3rRiwP+3/sfzpAwA/qKFL6i1ModaabkU7IbpeMBgiVEA==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [darwin] + + '@parcel/watcher-darwin-x64@2.5.6': + resolution: {integrity: sha512-HgvOf3W9dhithcwOWX9uDZyn1lW9R+7tPZ4sug+NGrGIo4Rk1hAXLEbcH1TQSqxts0NYXXlOWqVpvS1SFS4fRg==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [darwin] + + '@parcel/watcher-freebsd-x64@2.5.6': + resolution: {integrity: sha512-vJVi8yd/qzJxEKHkeemh7w3YAn6RJCtYlE4HPMoVnCpIXEzSrxErBW5SJBgKLbXU3WdIpkjBTeUNtyBVn8TRng==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [freebsd] + + '@parcel/watcher-linux-arm-glibc@2.5.6': + resolution: {integrity: sha512-9JiYfB6h6BgV50CCfasfLf/uvOcJskMSwcdH1PHH9rvS1IrNy8zad6IUVPVUfmXr+u+Km9IxcfMLzgdOudz9EQ==} + engines: {node: '>= 10.0.0'} + cpu: [arm] + os: [linux] + libc: [glibc] + + '@parcel/watcher-linux-arm-musl@2.5.6': + resolution: {integrity: sha512-Ve3gUCG57nuUUSyjBq/MAM0CzArtuIOxsBdQ+ftz6ho8n7s1i9E1Nmk/xmP323r2YL0SONs1EuwqBp2u1k5fxg==} + engines: {node: '>= 10.0.0'} + cpu: [arm] + os: [linux] + libc: [musl] + + '@parcel/watcher-linux-arm64-glibc@2.5.6': + resolution: {integrity: sha512-f2g/DT3NhGPdBmMWYoxixqYr3v/UXcmLOYy16Bx0TM20Tchduwr4EaCbmxh1321TABqPGDpS8D/ggOTaljijOA==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@parcel/watcher-linux-arm64-musl@2.5.6': + resolution: {integrity: sha512-qb6naMDGlbCwdhLj6hgoVKJl2odL34z2sqkC7Z6kzir8b5W65WYDpLB6R06KabvZdgoHI/zxke4b3zR0wAbDTA==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@parcel/watcher-linux-x64-glibc@2.5.6': + resolution: {integrity: sha512-kbT5wvNQlx7NaGjzPFu8nVIW1rWqV780O7ZtkjuWaPUgpv2NMFpjYERVi0UYj1msZNyCzGlaCWEtzc+exjMGbQ==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@parcel/watcher-linux-x64-musl@2.5.6': + resolution: {integrity: sha512-1JRFeC+h7RdXwldHzTsmdtYR/Ku8SylLgTU/reMuqdVD7CtLwf0VR1FqeprZ0eHQkO0vqsbvFLXUmYm/uNKJBg==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [linux] + libc: [musl] + + '@parcel/watcher-win32-arm64@2.5.6': + resolution: {integrity: sha512-3ukyebjc6eGlw9yRt678DxVF7rjXatWiHvTXqphZLvo7aC5NdEgFufVwjFfY51ijYEWpXbqF5jtrK275z52D4Q==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [win32] + + '@parcel/watcher-win32-ia32@2.5.6': + resolution: {integrity: sha512-k35yLp1ZMwwee3Ez/pxBi5cf4AoBKYXj00CZ80jUz5h8prpiaQsiRPKQMxoLstNuqe2vR4RNPEAEcjEFzhEz/g==} + engines: {node: '>= 10.0.0'} + cpu: [ia32] + os: [win32] + + '@parcel/watcher-win32-x64@2.5.6': + resolution: {integrity: sha512-hbQlYcCq5dlAX9Qx+kFb0FHue6vbjlf0FrNzSKdYK2APUf7tGfGxQCk2ihEREmbR6ZMc0MVAD5RIX/41gpUzTw==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [win32] + + '@parcel/watcher@2.5.6': + resolution: {integrity: sha512-tmmZ3lQxAe/k/+rNnXQRawJ4NjxO2hqiOLTHvWchtGZULp4RyFeh6aU4XdOYBFe2KE1oShQTv4AblOs2iOrNnQ==} + engines: {node: '>= 10.0.0'} + '@pkgjs/parseargs@0.11.0': resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} + '@polka/url@1.0.0-next.29': + resolution: {integrity: sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==} + '@posthog/core@1.20.1': resolution: {integrity: sha512-uoTmWkYCtLYFpiK37/JCq+BuCA/OZn1qQZn5cPv1EEKt3ni3Zgg48xWCnSEyGFl5KKSXlfCruiRTwnbAtCgrBA==} @@ -1533,6 +1846,32 @@ packages: '@types/react-dom': optional: true + '@radix-ui/react-toggle-group@1.1.11': + resolution: {integrity: sha512-5umnS0T8JQzQT6HbPyO7Hh9dgd82NmS36DQr+X/YJ9ctFNCiiQd6IJAYYZ33LUwm8M+taCz5t2ui29fHZc4Y6Q==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-toggle@1.1.10': + resolution: {integrity: sha512-lS1odchhFTeZv3xwHH31YPObmJn8gOg7Lq12inrr0+BH/l3Tsq32VfjqH1oh80ARM3mlkfMic15n0kg4sD1poQ==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + '@radix-ui/react-tooltip@1.2.8': resolution: {integrity: sha512-tY7sVt1yL9ozIxvmbtN5qtmH2krXcBCfjEiCgKGLqunJHvgvZG2Pcl2oQ3kbcZARb1BGEHdkLzcYGO8ynVlieg==} peerDependencies: @@ -1803,18 +2142,174 @@ packages: resolution: {integrity: sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==} engines: {node: '>=10'} + '@standard-schema/spec@1.1.0': + resolution: {integrity: sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==} + '@szmarczak/http-timer@4.0.6': resolution: {integrity: sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==} engines: {node: '>=10'} + '@tailwindcss/node@4.2.1': + resolution: {integrity: sha512-jlx6sLk4EOwO6hHe1oCGm1Q4AN/s0rSrTTPBGPM0/RQ6Uylwq17FuU8IeJJKEjtc6K6O07zsvP+gDO6MMWo7pg==} + + '@tailwindcss/oxide-android-arm64@4.2.1': + resolution: {integrity: sha512-eZ7G1Zm5EC8OOKaesIKuw77jw++QJ2lL9N+dDpdQiAB/c/B2wDh0QPFHbkBVrXnwNugvrbJFk1gK2SsVjwWReg==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [android] + + '@tailwindcss/oxide-darwin-arm64@4.2.1': + resolution: {integrity: sha512-q/LHkOstoJ7pI1J0q6djesLzRvQSIfEto148ppAd+BVQK0JYjQIFSK3JgYZJa+Yzi0DDa52ZsQx2rqytBnf8Hw==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [darwin] + + '@tailwindcss/oxide-darwin-x64@4.2.1': + resolution: {integrity: sha512-/f/ozlaXGY6QLbpvd/kFTro2l18f7dHKpB+ieXz+Cijl4Mt9AI2rTrpq7V+t04nK+j9XBQHnSMdeQRhbGyt6fw==} + engines: {node: '>= 20'} + cpu: [x64] + os: [darwin] + + '@tailwindcss/oxide-freebsd-x64@4.2.1': + resolution: {integrity: sha512-5e/AkgYJT/cpbkys/OU2Ei2jdETCLlifwm7ogMC7/hksI2fC3iiq6OcXwjibcIjPung0kRtR3TxEITkqgn0TcA==} + engines: {node: '>= 20'} + cpu: [x64] + os: [freebsd] + + '@tailwindcss/oxide-linux-arm-gnueabihf@4.2.1': + resolution: {integrity: sha512-Uny1EcVTTmerCKt/1ZuKTkb0x8ZaiuYucg2/kImO5A5Y/kBz41/+j0gxUZl+hTF3xkWpDmHX+TaWhOtba2Fyuw==} + engines: {node: '>= 20'} + cpu: [arm] + os: [linux] + + '@tailwindcss/oxide-linux-arm64-gnu@4.2.1': + resolution: {integrity: sha512-CTrwomI+c7n6aSSQlsPL0roRiNMDQ/YzMD9EjcR+H4f0I1SQ8QqIuPnsVp7QgMkC1Qi8rtkekLkOFjo7OlEFRQ==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@tailwindcss/oxide-linux-arm64-musl@4.2.1': + resolution: {integrity: sha512-WZA0CHRL/SP1TRbA5mp9htsppSEkWuQ4KsSUumYQnyl8ZdT39ntwqmz4IUHGN6p4XdSlYfJwM4rRzZLShHsGAQ==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@tailwindcss/oxide-linux-x64-gnu@4.2.1': + resolution: {integrity: sha512-qMFzxI2YlBOLW5PhblzuSWlWfwLHaneBE0xHzLrBgNtqN6mWfs+qYbhryGSXQjFYB1Dzf5w+LN5qbUTPhW7Y5g==} + engines: {node: '>= 20'} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@tailwindcss/oxide-linux-x64-musl@4.2.1': + resolution: {integrity: sha512-5r1X2FKnCMUPlXTWRYpHdPYUY6a1Ar/t7P24OuiEdEOmms5lyqjDRvVY1yy9Rmioh+AunQ0rWiOTPE8F9A3v5g==} + engines: {node: '>= 20'} + cpu: [x64] + os: [linux] + libc: [musl] + + '@tailwindcss/oxide-wasm32-wasi@4.2.1': + resolution: {integrity: sha512-MGFB5cVPvshR85MTJkEvqDUnuNoysrsRxd6vnk1Lf2tbiqNlXpHYZqkqOQalydienEWOHHFyyuTSYRsLfxFJ2Q==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] + bundledDependencies: + - '@napi-rs/wasm-runtime' + - '@emnapi/core' + - '@emnapi/runtime' + - '@tybys/wasm-util' + - '@emnapi/wasi-threads' + - tslib + + '@tailwindcss/oxide-win32-arm64-msvc@4.2.1': + resolution: {integrity: sha512-YlUEHRHBGnCMh4Nj4GnqQyBtsshUPdiNroZj8VPkvTZSoHsilRCwXcVKnG9kyi0ZFAS/3u+qKHBdDc81SADTRA==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [win32] + + '@tailwindcss/oxide-win32-x64-msvc@4.2.1': + resolution: {integrity: sha512-rbO34G5sMWWyrN/idLeVxAZgAKWrn5LiR3/I90Q9MkA67s6T1oB0xtTe+0heoBvHSpbU9Mk7i6uwJnpo4u21XQ==} + engines: {node: '>= 20'} + cpu: [x64] + os: [win32] + + '@tailwindcss/oxide@4.2.1': + resolution: {integrity: sha512-yv9jeEFWnjKCI6/T3Oq50yQEOqmpmpfzG1hcZsAOaXFQPfzWprWrlHSdGPEF3WQTi8zu8ohC9Mh9J470nT5pUw==} + engines: {node: '>= 20'} + + '@tailwindcss/vite@4.2.1': + resolution: {integrity: sha512-TBf2sJjYeb28jD2U/OhwdW0bbOsxkWPwQ7SrqGf9sVcoYwZj7rkXljroBO9wKBut9XnmQLXanuDUeqQK0lGg/w==} + peerDependencies: + vite: ^5.2.0 || ^6 || ^7 + + '@tanstack/devtools-event-client@0.4.1': + resolution: {integrity: sha512-GRxmPw4OHZ2oZeIEUkEwt/NDvuEqzEYRAjzUVMs+I0pd4C7k1ySOiuJK2CqF+K/yEAR3YZNkW3ExrpDarh9Vwg==} + engines: {node: '>=18'} + + '@tanstack/form-core@1.28.4': + resolution: {integrity: sha512-2eox5ePrJ6kvA1DXD5QHk/GeGr3VFZ0uYR63UgQOe7bUg6h1JfXaIMqTjZK9sdGyE4oRNqFpoW54H0pZM7nObQ==} + + '@tanstack/hotkeys@0.4.1': + resolution: {integrity: sha512-EGHqcdKP2jzy0dEkahA3ABtEXohMqPlU3Ac04sBQjgesJqr9xWuesJotOfWPh3P68kQQg8krNAtFTydIN3+WSw==} + engines: {node: '>=18'} + + '@tanstack/pacer-lite@0.1.1': + resolution: {integrity: sha512-y/xtNPNt/YeyoVxE/JCx+T7yjEzpezmbb+toK8DDD1P4m7Kzs5YR956+7OKexG3f8aXgC3rLZl7b1V+yNUSy5w==} + engines: {node: '>=18'} + '@tanstack/query-core@5.90.20': resolution: {integrity: sha512-OMD2HLpNouXEfZJWcKeVKUgQ5n+n3A2JFmBaScpNDUqSrQSjiveC7dKMe53uJUg1nDG16ttFPz2xfilz6i2uVg==} + '@tanstack/react-form@1.28.4': + resolution: {integrity: sha512-ZGBwl9JM2u0kol7jAWpqAkr2JSHfXJaLPsFDZWPf+ewpVkwngTTW/rGgtoDe5uVpHoDIpOhzpPCAh6O1SjGEOg==} + peerDependencies: + '@tanstack/react-start': '*' + react: ^17.0.0 || ^18.0.0 || ^19.0.0 + peerDependenciesMeta: + '@tanstack/react-start': + optional: true + + '@tanstack/react-hotkeys@0.4.1': + resolution: {integrity: sha512-hFh/kKQODn4kSytfIsEE/Vf1AaAb+NAFi4lx+OB49NmKY5z/BNH1/uEdYlVgOEvnDm4QrCISIMBOVpMgK5QNQg==} + engines: {node: '>=18'} + peerDependencies: + react: '>=16.8' + react-dom: '>=16.8' + '@tanstack/react-query@5.90.21': resolution: {integrity: sha512-0Lu6y5t+tvlTJMTO7oh5NSpJfpg/5D41LlThfepTixPYkJ0sE2Jj0m0f6yYqujBwIXlId87e234+MxG3D3g7kg==} peerDependencies: react: ^18 || ^19 + '@tanstack/react-store@0.9.2': + resolution: {integrity: sha512-Vt5usJE5sHG/cMechQfmwvwne6ktGCELe89Lmvoxe3LKRoFrhPa8OCKWs0NliG8HTJElEIj7PLtaBQIcux5pAQ==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + '@tanstack/react-virtual@3.13.23': + resolution: {integrity: sha512-XnMRnHQ23piOVj2bzJqHrRrLg4r+F86fuBcwteKfbIjJrtGxb4z7tIvPVAe4B+4UVwo9G4Giuz5fmapcrnZ0OQ==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + '@tanstack/store@0.9.2': + resolution: {integrity: sha512-K013lUJEFJK2ofFQ/hZKJUmCnpcV00ebLyOyFOWQvyQHUOZp/iYO84BM6aOGiV81JzwbX0APTVmW8YI7yiG5oA==} + + '@tanstack/virtual-core@3.13.23': + resolution: {integrity: sha512-zSz2Z2HNyLjCplANTDyl3BcdQJc2k1+yyFoKhNRmCr7V7dY8o8q5m8uFTI1/Pg1kL+Hgrz6u3Xo6eFUB7l66cg==} + + '@testing-library/dom@10.4.1': + resolution: {integrity: sha512-o4PXJQidqJl82ckFaXUeoAW+XysPLauYI43Abki5hABd853iMhitooc6znOnczgbTYmEP6U6/y1ZyKAIsvMKGg==} + engines: {node: '>=18'} + + '@testing-library/user-event@14.6.1': + resolution: {integrity: sha512-vq7fv0rnt+QTXgPxr5Hjc210p6YKq2kmdziLgnsZGgLJ9e6VAShx1pACLuRjd/AS/sr7phAR58OIIpf0LlmQNw==} + engines: {node: '>=12', npm: '>=6'} + peerDependencies: + '@testing-library/dom': '>=7.21.4' + '@tootallnate/once@1.1.2': resolution: {integrity: sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==} engines: {node: '>= 6'} @@ -1823,6 +2318,9 @@ packages: resolution: {integrity: sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==} engines: {node: '>= 10'} + '@types/aria-query@5.0.4': + resolution: {integrity: sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==} + '@types/babel__core@7.20.5': resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==} @@ -1835,6 +2333,9 @@ packages: '@types/babel__traverse@7.28.0': resolution: {integrity: sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==} + '@types/better-sqlite3@7.6.13': + resolution: {integrity: sha512-NMv9ASNARoKksWtsq/SHakpYAYnhBrQgGD8zkLYk/jaK8jUGn08CfEdTRgYhMypUQAfzSP8W6gNLe0q19/t4VA==} + '@types/cacheable-request@6.0.3': resolution: {integrity: sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw==} @@ -1985,16 +2486,16 @@ packages: '@types/node@20.19.32': resolution: {integrity: sha512-Ez8QE4DMfhjjTsES9K2dwfV258qBui7qxUsoaixZDiTzbde4U12e1pXGNu/ECsUIOi5/zoCxAQxIhQnaUQ2VvA==} + '@types/node@24.11.0': + resolution: {integrity: sha512-fPxQqz4VTgPI/IQ+lj9r0h+fDR66bzoeMGHp8ASee+32OSGIkeASsoZuJixsQoVef1QJbeubcPBxKk22QVoWdw==} + '@types/plist@3.0.5': resolution: {integrity: sha512-E6OCaRmAe4WDmWNsL/9RMqdkkzDCY1etutkflWk4c+AcjDU07Pcz1fQwTX0TQz+Pxqn9i4L1TU3UFpjnrcDgxA==} - '@types/prop-types@15.7.15': - resolution: {integrity: sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==} - - '@types/react-dom@18.3.7': - resolution: {integrity: sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==} + '@types/react-dom@19.2.3': + resolution: {integrity: sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==} peerDependencies: - '@types/react': ^18.0.0 + '@types/react': ^19.2.0 '@types/react-syntax-highlighter@15.5.13': resolution: {integrity: sha512-uLGJ87j6Sz8UaBAooU0T6lWJ0dBmjZgN1PZTrj05TNql2/XpC6+4HhMT5syIdFUUt+FASfCeLLv4kBygNU+8qA==} @@ -2002,8 +2503,8 @@ packages: '@types/react-window@1.8.8': resolution: {integrity: sha512-8Ls660bHR1AUA2kuRvVG9D/4XpRC6wjAaPT9dil7Ckc76eP9TKWZwwmgfq8Q1LANX3QNDnoU4Zp48A3w+zK69Q==} - '@types/react@18.3.28': - resolution: {integrity: sha512-z9VXpC7MWrhfWipitjNdgCauoMLRdIILQsAEV+ZesIzBq/oUlxk0m3ApZuMFCXdnS4U7KrI+l3WRUEGQ8K1QKw==} + '@types/react@19.2.14': + resolution: {integrity: sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w==} '@types/responselike@1.0.3': resolution: {integrity: sha512-H/+L+UkTV33uf49PH5pCAUBVPNj2nDBXTN+qS1dOwyyg24l3CcicicCA7ca+HMvJBZcFgl5r8e+RR6elsb4Lyw==} @@ -2011,9 +2512,6 @@ packages: '@types/semver@7.7.1': resolution: {integrity: sha512-FmgJfu+MOcQ370SD0ev7EI8TlCAfKYU+B4m5T3yXc1CiRN94g/SZPtsCkk506aUDtlMnFZvasDwHHUcZUEaYuA==} - '@types/sqlite3@3.1.11': - resolution: {integrity: sha512-KYF+QgxAnnAh7DWPdNDroxkDI3/MspH1NMx6m/N/6fT1G6+jvsw4/ZePt8R8cr7ta58aboeTfYFBDxTJ5yv15w==} - '@types/ssh2@1.15.5': resolution: {integrity: sha512-N1ASjp/nXH3ovBHddRJpli4ozpk6UdDYIX4RJWFa9L1YKnzdhTlVmiGHm4DZnj/jLbqZpes4aeR30EFGQtvhQQ==} @@ -2102,51 +2600,66 @@ packages: peerDependencies: vite: ^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 - '@vitest/expect@3.2.4': - resolution: {integrity: sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==} + '@vitest/browser-playwright@4.1.0': + resolution: {integrity: sha512-2RU7pZELY9/aVMLmABNy1HeZ4FX23FXGY1jRuHLHgWa2zaAE49aNW2GLzebW+BmbTZIKKyFF1QXvk7DEWViUCQ==} + peerDependencies: + playwright: '*' + vitest: 4.1.0 - '@vitest/mocker@3.2.4': - resolution: {integrity: sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==} + '@vitest/browser-preview@4.1.0': + resolution: {integrity: sha512-P2bxouIqn8yKpjKaVuOwwCjgdbhHWfx0dmDoe44+OdMyxz1dkEvmxEorx+usyLLr98AM2jUI2aYLZ6eMYhx2BQ==} + peerDependencies: + vitest: 4.1.0 + + '@vitest/browser@4.1.0': + resolution: {integrity: sha512-tG/iOrgbiHQks0ew7CdelUyNEHkv8NLrt+CqdTivIuoSnXvO7scWMn4Kqo78/UGY1NJ6Hv+vp8BvRnED/bjFdQ==} + peerDependencies: + vitest: 4.1.0 + + '@vitest/expect@4.1.0': + resolution: {integrity: sha512-EIxG7k4wlWweuCLG9Y5InKFwpMEOyrMb6ZJ1ihYu02LVj/bzUwn2VMU+13PinsjRW75XnITeFrQBMH5+dLvCDA==} + + '@vitest/mocker@4.1.0': + resolution: {integrity: sha512-evxREh+Hork43+Y4IOhTo+h5lGmVRyjqI739Rz4RlUPqwrkFFDF6EMvOOYjTx4E8Tl6gyCLRL8Mu7Ry12a13Tw==} peerDependencies: msw: ^2.4.9 - vite: ^5.0.0 || ^6.0.0 || ^7.0.0-0 + vite: ^6.0.0 || ^7.0.0 || ^8.0.0-0 peerDependenciesMeta: msw: optional: true vite: optional: true - '@vitest/pretty-format@3.2.4': - resolution: {integrity: sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==} + '@vitest/pretty-format@4.1.0': + resolution: {integrity: sha512-3RZLZlh88Ib0J7NQTRATfc/3ZPOnSUn2uDBUoGNn5T36+bALixmzphN26OUD3LRXWkJu4H0s5vvUeqBiw+kS0A==} - '@vitest/runner@3.2.4': - resolution: {integrity: sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==} + '@vitest/runner@4.1.0': + resolution: {integrity: sha512-Duvx2OzQ7d6OjchL+trw+aSrb9idh7pnNfxrklo14p3zmNL4qPCDeIJAK+eBKYjkIwG96Bc6vYuxhqDXQOWpoQ==} - '@vitest/snapshot@3.2.4': - resolution: {integrity: sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==} + '@vitest/snapshot@4.1.0': + resolution: {integrity: sha512-0Vy9euT1kgsnj1CHttwi9i9o+4rRLEaPRSOJ5gyv579GJkNpgJK+B4HSv/rAWixx2wdAFci1X4CEPjiu2bXIMg==} - '@vitest/spy@3.2.4': - resolution: {integrity: sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==} + '@vitest/spy@4.1.0': + resolution: {integrity: sha512-pz77k+PgNpyMDv2FV6qmk5ZVau6c3R8HC8v342T2xlFxQKTrSeYw9waIJG8KgV9fFwAtTu4ceRzMivPTH6wSxw==} - '@vitest/utils@3.2.4': - resolution: {integrity: sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==} + '@vitest/utils@4.1.0': + resolution: {integrity: sha512-XfPXT6a8TZY3dcGY8EdwsBulFCIw+BeeX0RZn2x/BtiY/75YGh8FeWGG8QISN/WhaqSrE2OrlDgtF8q5uhOTmw==} '@xmldom/xmldom@0.8.11': resolution: {integrity: sha512-cQzWCtO6C8TQiYl1ruKNn2U6Ao4o4WBBcbL61yJl84x+j5sOWWFU9X7DpND8XZG3daDppSsigMdfAIl2upQBRw==} engines: {node: '>=10.0.0'} + '@xterm/addon-canvas@0.7.0': + resolution: {integrity: sha512-LF5LYcfvefJuJ7QotNRdRSPc9YASAVDeoT5uyXS/nZshZXjYplGXRECBGiznwvhNL2I8bq1Lf5MzRwstsYQ2Iw==} + peerDependencies: + '@xterm/xterm': ^5.0.0 + '@xterm/addon-fit@0.11.0': resolution: {integrity: sha512-jYcgT6xtVYhnhgxh3QgYDnnNMYTcf8ElbxxFzX0IZo+vabQqSPAjC3c1wJrKB5E19VwQei89QCiZZP86DCPF7g==} - '@xterm/addon-serialize@0.14.0': - resolution: {integrity: sha512-uteyTU1EkrQa2Ux6P/uFl2fzmXI46jy5uoQMKEOM0fKTyiW7cSn0WrFenHm5vO5uEXX/GpwW/FgILvv3r0WbkA==} - '@xterm/addon-web-links@0.12.0': resolution: {integrity: sha512-4Smom3RPyVp7ZMYOYDoC/9eGJJJqYhnPLGGqJ6wOBfB8VxPViJNSKdgRYb8NpaM6YSelEKbA2SStD7lGyqaobw==} - '@xterm/addon-webgl@0.19.0': - resolution: {integrity: sha512-b3fMOsyLVuCeNJWxolACEUED0vm7qC0cy4wRvf3oURSzDTYVQiGPhTnhWZwIHdvC48Y+oLhvYXnY4XDXPoJo6A==} - '@xterm/xterm@6.0.0': resolution: {integrity: sha512-TQwDdQGtwwDt+2cgKDLn0IRaSxYu1tSUjgKarSDkUM0ZNiSRXFpjxEsvc/Zgc5kq5omJ+V0a8/kIM2WD3sMOYg==} @@ -2191,6 +2704,12 @@ packages: ajv@6.12.6: resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} + allotment@1.20.5: + resolution: {integrity: sha512-7i4NT7ieXEyAd5lBrXmE7WHz/e7hRuo97+j+TwrPE85ha6kyFURoc76nom0dWSZ1pTKVEAMJy/+f3/Isfu/41A==} + peerDependencies: + react: ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^17.0.0 || ^18.0.0 || ^19.0.0 + ansi-escapes@7.3.0: resolution: {integrity: sha512-BvU8nYgGQBxcmMuEeUEmNTvrMVjJNSH7RgW24vXexN4Ven6qCvy4TntnvlnwnMLTVlcRQQdbRY8NKnaIoeWDNg==} engines: {node: '>=18'} @@ -2207,17 +2726,14 @@ packages: resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} engines: {node: '>=8'} + ansi-styles@5.2.0: + resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} + engines: {node: '>=10'} + ansi-styles@6.2.3: resolution: {integrity: sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==} engines: {node: '>=12'} - any-promise@1.3.0: - resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} - - anymatch@3.1.3: - resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} - engines: {node: '>= 8'} - app-builder-bin@4.0.0: resolution: {integrity: sha512-xwdG0FJPQMe0M0UA4Tz0zEB8rBJTRA5a476ZawAqiBkMv16GRK5xpXThOjMaEOFnZ6zabejjG4J3da0SXG63KA==} @@ -2258,9 +2774,6 @@ packages: engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} deprecated: This package is no longer supported. - arg@5.0.2: - resolution: {integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==} - argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} @@ -2268,6 +2781,9 @@ packages: resolution: {integrity: sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==} engines: {node: '>=10'} + aria-query@5.3.0: + resolution: {integrity: sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==} + array-buffer-byte-length@1.0.2: resolution: {integrity: sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==} engines: {node: '>= 0.4'} @@ -2329,13 +2845,6 @@ packages: resolution: {integrity: sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==} engines: {node: '>= 4.0.0'} - autoprefixer@10.4.24: - resolution: {integrity: sha512-uHZg7N9ULTVbutaIsDRoUkoS8/h3bdsmVJYZ5l3wv8Cp/6UIIoRDm90hZ+BwxUj/hGBEzLxdHNSKuFpn8WOyZw==} - engines: {node: ^10 || ^12 || >=14} - hasBin: true - peerDependencies: - postcss: ^8.1.0 - available-typed-arrays@1.0.7: resolution: {integrity: sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==} engines: {node: '>= 0.4'} @@ -2346,6 +2855,10 @@ packages: balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + balanced-match@4.0.4: + resolution: {integrity: sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==} + engines: {node: 18 || 20 || >=22} + base64-js@1.5.1: resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} @@ -2356,14 +2869,13 @@ packages: bcrypt-pbkdf@1.0.2: resolution: {integrity: sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==} + before-after-hook@4.0.0: + resolution: {integrity: sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ==} + better-sqlite3@12.6.2: resolution: {integrity: sha512-8VYKM3MjCa9WcaSAI3hzwhmyHVlH8tiGFwf0RlTsZPWJ1I5MkzjiudCo4KC4DxOaL/53A5B1sI/IbldNFDbsKA==} engines: {node: 20.x || 22.x || 23.x || 24.x || 25.x} - binary-extensions@2.3.0: - resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} - engines: {node: '>=8'} - bindings@1.5.0: resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==} @@ -2386,6 +2898,10 @@ packages: brace-expansion@2.0.2: resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} + brace-expansion@5.0.4: + resolution: {integrity: sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==} + engines: {node: 18 || 20 || >=22} + braces@3.0.3: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} engines: {node: '>=8'} @@ -2462,18 +2978,14 @@ packages: resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} engines: {node: '>=6'} - camelcase-css@2.0.1: - resolution: {integrity: sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==} - engines: {node: '>= 6'} - caniuse-lite@1.0.30001769: resolution: {integrity: sha512-BCfFL1sHijQlBGWBMuJyhZUhzo7wer5sVj9hqekB/7xn0Ypy+pER/edCYQm4exbXj4WiySGp40P8UuTh6w1srg==} ccount@2.0.1: resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==} - chai@5.3.3: - resolution: {integrity: sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==} + chai@6.2.2: + resolution: {integrity: sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==} engines: {node: '>=18'} chalk@4.1.2: @@ -2501,10 +3013,6 @@ packages: character-reference-invalid@2.0.1: resolution: {integrity: sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==} - check-error@2.1.3: - resolution: {integrity: sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA==} - engines: {node: '>= 16'} - chevrotain-allstar@0.3.1: resolution: {integrity: sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw==} peerDependencies: @@ -2513,9 +3021,9 @@ packages: chevrotain@11.0.3: resolution: {integrity: sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw==} - chokidar@3.6.0: - resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} - engines: {node: '>= 8.10.0'} + chokidar@5.0.0: + resolution: {integrity: sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw==} + engines: {node: '>= 20.19.0'} chownr@1.1.4: resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==} @@ -2546,6 +3054,9 @@ packages: class-variance-authority@0.7.1: resolution: {integrity: sha512-Ka+9Trutv7G8M6WT6SeiRWz792K5qEqIGEGzXKhAE6xOWAY6pPH8U+9IY3oCMv6kqTmLsv7Xh/2w2RigkePMsg==} + classnames@2.5.1: + resolution: {integrity: sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==} + clean-stack@2.2.0: resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} engines: {node: '>=6'} @@ -2619,10 +3130,6 @@ packages: resolution: {integrity: sha512-H+y0Jo/T1RZ9qPP4Eh1pkcQcLRglraJaSLoyOtHxu6AapkjWVCy2Sit1QQ4x3Dng8qDlSsZEet7g5Pq06MvTgw==} engines: {node: '>=20'} - commander@4.1.1: - resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} - engines: {node: '>= 6'} - commander@5.1.0: resolution: {integrity: sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==} engines: {node: '>= 6'} @@ -2702,11 +3209,6 @@ packages: resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} engines: {node: '>= 8'} - cssesc@3.0.0: - resolution: {integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==} - engines: {node: '>=4'} - hasBin: true - csstype@3.2.3: resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==} @@ -2909,10 +3411,6 @@ packages: resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==} engines: {node: '>=10'} - deep-eql@5.0.2: - resolution: {integrity: sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==} - engines: {node: '>=6'} - deep-extend@0.6.0: resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==} engines: {node: '>=4.0.0'} @@ -2963,12 +3461,12 @@ packages: detect-node@2.1.0: resolution: {integrity: sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==} + devicon@2.17.0: + resolution: {integrity: sha512-2nKUdjobJlmRSaCHa50PGsVq0VDURnq9gVzQoJggsM/NKN0tLhC/Uq2zmy2pH36Q/1q3gvYwp/GjTgv/R0Ysbg==} + devlop@1.1.0: resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==} - didyoumean@1.2.2: - resolution: {integrity: sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==} - dir-compare@3.3.0: resolution: {integrity: sha512-J7/et3WlGUCxjdnD3HAAzQ6nsnc0WL6DD7WcwJb7c39iH1+AWfg+9OqzJNaI6PkBwBvm1mhZNL9iY/nRiZXlPg==} @@ -2979,9 +3477,6 @@ packages: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} engines: {node: '>=8'} - dlv@1.1.3: - resolution: {integrity: sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==} - dmg-builder@26.8.1: resolution: {integrity: sha512-glMJgnTreo8CFINujtAhCgN96QAqApDMZ8Vl1r8f0QT8QprvC1UCltV4CcWj20YoIyLZx6IUskaJZ0NV8fokcg==} @@ -2999,6 +3494,9 @@ packages: resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==} engines: {node: '>=6.0.0'} + dom-accessibility-api@0.5.16: + resolution: {integrity: sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==} + dompurify@3.2.7: resolution: {integrity: sha512-WhL/YuveyGXJaerVlMYGWhvQswa7myDG17P7Vu65EWC05o8vfeNbvNf4d/BOvH99+ZW+LlQsc1GDKMa1vNK6dw==} @@ -3153,8 +3651,19 @@ packages: electron-updater@6.7.3: resolution: {integrity: sha512-EgkT8Z9noqXKbwc3u5FkJA+r48jwZ5DTUiOkJMOTEEH//n5Am6wfQGz7nvSFEA2oIAMv9jRzn5JKTyWeSKOPgg==} - electron@30.5.1: - resolution: {integrity: sha512-AhL7+mZ8Lg14iaNfoYTkXQ2qee8mmsQyllKdqxlpv/zrKgfxz6jNVtcRRbQtLxtF8yzcImWdfTQROpYiPumdbw==} + electron-vite@5.0.0: + resolution: {integrity: sha512-OHp/vjdlubNlhNkPkL/+3JD34ii5ov7M0GpuXEVdQeqdQ3ulvVR7Dg/rNBLfS5XPIFwgoBLDf9sjjrL+CuDyRQ==} + engines: {node: ^20.19.0 || >=22.12.0} + hasBin: true + peerDependencies: + '@swc/core': ^1.0.0 + vite: ^5.0.0 || ^6.0.0 || ^7.0.0 + peerDependenciesMeta: + '@swc/core': + optional: true + + electron@40.7.0: + resolution: {integrity: sha512-oQe76S/3V1rcb0+i45hAxnCH8udkRZSaHUNwglzNAEKbB94LSJ1qwbFo8+uRc2UsYZgCqSIMRcyX40GyOkD+Xw==} engines: {node: '>= 12.20.55'} hasBin: true @@ -3173,6 +3682,10 @@ packages: end-of-stream@1.4.5: resolution: {integrity: sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==} + enhanced-resolve@5.20.0: + resolution: {integrity: sha512-/ce7+jQ1PQ6rVXwe+jKEg5hW5ciicHwIQUagZkp6IufBoY3YDgdTTY1azVs0qoRgVmvsNB+rbjLJxDAeHHtwsQ==} + engines: {node: '>=10.13.0'} + entities@6.0.1: resolution: {integrity: sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==} engines: {node: '>=0.12'} @@ -3200,8 +3713,8 @@ packages: resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} engines: {node: '>= 0.4'} - es-module-lexer@1.7.0: - resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==} + es-module-lexer@2.0.0: + resolution: {integrity: sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==} es-object-atoms@1.1.1: resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} @@ -3237,9 +3750,9 @@ packages: engines: {node: '>=12'} hasBin: true - esbuild@0.21.5: - resolution: {integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==} - engines: {node: '>=12'} + esbuild@0.25.12: + resolution: {integrity: sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==} + engines: {node: '>=18'} hasBin: true escalade@3.2.0: @@ -3364,6 +3877,9 @@ packages: resolution: {integrity: sha512-Wrk35e8ydCKDj/ArClo1VrPVmN8zph5V4AtHwIuHhvMXsKf73UT3BOD+azBIW+3wOJ4FhEH7zyaJCFvChjYvMA==} engines: {'0': node >=0.6.0} + fast-content-type-parse@3.0.0: + resolution: {integrity: sha512-ZvLdcY8P+N8mGQJahJV5G4U88CSvT1rP8ApL6uETe88MBXrBHAkZlSEySdUlyztF7ccb+Znos3TFqaepHxdhBg==} + fast-deep-equal@3.1.3: resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} @@ -3443,9 +3959,6 @@ packages: resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==} engines: {node: '>=0.4.x'} - fraction.js@5.3.4: - resolution: {integrity: sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==} - framer-motion@12.33.0: resolution: {integrity: sha512-ca8d+rRPcDP5iIF+MoT3WNc0KHJMjIyFAbtVLvM9eA7joGSpeqDfiNH/kCs1t4CHi04njYvWyj0jS4QlEK/rJQ==} peerDependencies: @@ -3490,6 +4003,11 @@ packages: fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} + fsevents@2.3.2: + resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + fsevents@2.3.3: resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} @@ -3573,6 +4091,10 @@ packages: deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me hasBin: true + glob@13.0.6: + resolution: {integrity: sha512-Wjlyrolmm8uDpm/ogGyXZXb1Z+Ca2B8NbJwqBVg0axK9GbBeoS7yGV6vjXnYdGm6X53iehEuxxbyiKp8QmN4Vw==} + engines: {node: 18 || 20 || >=22} + glob@7.2.3: resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me @@ -3611,6 +4133,10 @@ packages: graphemer@1.4.0: resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==} + graphql@16.13.1: + resolution: {integrity: sha512-gGgrVCoDKlIZ8fIqXBBb0pPKqDgki0Z/FSKNiQzSGj2uEYHr1tq5wmBegGwJx6QB5S5cM0khSBpi/JFHMCvsmQ==} + engines: {node: ^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0} + hachure-fill@0.5.2: resolution: {integrity: sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==} @@ -3844,10 +4370,6 @@ packages: resolution: {integrity: sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==} engines: {node: '>= 0.4'} - is-binary-path@2.1.0: - resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} - engines: {node: '>=8'} - is-boolean-object@1.2.2: resolution: {integrity: sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==} engines: {node: '>= 0.4'} @@ -4012,10 +4534,6 @@ packages: engines: {node: '>=10'} hasBin: true - jiti@1.21.7: - resolution: {integrity: sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==} - hasBin: true - jiti@2.6.1: resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==} hasBin: true @@ -4023,9 +4541,6 @@ packages: js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} - js-tokens@9.0.1: - resolution: {integrity: sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==} - js-yaml@4.1.1: resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==} hasBin: true @@ -4047,6 +4562,9 @@ packages: json-stringify-safe@5.0.1: resolution: {integrity: sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==} + json-with-bigint@3.5.7: + resolution: {integrity: sha512-7ei3MdAI5+fJPVnKlW77TKNKwQ5ppSzWvhPuSuINT/GYW9ZOC1eRKOuhV9yHG5aEsUPj9BBx5JIekkmoLHxZOw==} + json5@1.0.2: resolution: {integrity: sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==} hasBin: true @@ -4056,6 +4574,9 @@ packages: engines: {node: '>=6'} hasBin: true + jsonc-parser@3.3.1: + resolution: {integrity: sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ==} + jsonfile@4.0.0: resolution: {integrity: sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==} @@ -4096,12 +4617,79 @@ packages: resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} engines: {node: '>= 0.8.0'} - lilconfig@3.1.3: - resolution: {integrity: sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==} - engines: {node: '>=14'} + lightningcss-android-arm64@1.31.1: + resolution: {integrity: sha512-HXJF3x8w9nQ4jbXRiNppBCqeZPIAfUo8zE/kOEGbW5NZvGc/K7nMxbhIr+YlFlHW5mpbg/YFPdbnCh1wAXCKFg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [android] + + lightningcss-darwin-arm64@1.31.1: + resolution: {integrity: sha512-02uTEqf3vIfNMq3h/z2cJfcOXnQ0GRwQrkmPafhueLb2h7mqEidiCzkE4gBMEH65abHRiQvhdcQ+aP0D0g67sg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [darwin] + + lightningcss-darwin-x64@1.31.1: + resolution: {integrity: sha512-1ObhyoCY+tGxtsz1lSx5NXCj3nirk0Y0kB/g8B8DT+sSx4G9djitg9ejFnjb3gJNWo7qXH4DIy2SUHvpoFwfTA==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [darwin] + + lightningcss-freebsd-x64@1.31.1: + resolution: {integrity: sha512-1RINmQKAItO6ISxYgPwszQE1BrsVU5aB45ho6O42mu96UiZBxEXsuQ7cJW4zs4CEodPUioj/QrXW1r9pLUM74A==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [freebsd] + + lightningcss-linux-arm-gnueabihf@1.31.1: + resolution: {integrity: sha512-OOCm2//MZJ87CdDK62rZIu+aw9gBv4azMJuA8/KB74wmfS3lnC4yoPHm0uXZ/dvNNHmnZnB8XLAZzObeG0nS1g==} + engines: {node: '>= 12.0.0'} + cpu: [arm] + os: [linux] + + lightningcss-linux-arm64-gnu@1.31.1: + resolution: {integrity: sha512-WKyLWztD71rTnou4xAD5kQT+982wvca7E6QoLpoawZ1gP9JM0GJj4Tp5jMUh9B3AitHbRZ2/H3W5xQmdEOUlLg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + libc: [glibc] + + lightningcss-linux-arm64-musl@1.31.1: + resolution: {integrity: sha512-mVZ7Pg2zIbe3XlNbZJdjs86YViQFoJSpc41CbVmKBPiGmC4YrfeOyz65ms2qpAobVd7WQsbW4PdsSJEMymyIMg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + libc: [musl] + + lightningcss-linux-x64-gnu@1.31.1: + resolution: {integrity: sha512-xGlFWRMl+0KvUhgySdIaReQdB4FNudfUTARn7q0hh/V67PVGCs3ADFjw+6++kG1RNd0zdGRlEKa+T13/tQjPMA==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + libc: [glibc] + + lightningcss-linux-x64-musl@1.31.1: + resolution: {integrity: sha512-eowF8PrKHw9LpoZii5tdZwnBcYDxRw2rRCyvAXLi34iyeYfqCQNA9rmUM0ce62NlPhCvof1+9ivRaTY6pSKDaA==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + libc: [musl] + + lightningcss-win32-arm64-msvc@1.31.1: + resolution: {integrity: sha512-aJReEbSEQzx1uBlQizAOBSjcmr9dCdL3XuC/6HLXAxmtErsj2ICo5yYggg1qOODQMtnjNQv2UHb9NpOuFtYe4w==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [win32] + + lightningcss-win32-x64-msvc@1.31.1: + resolution: {integrity: sha512-I9aiFrbd7oYHwlnQDqr1Roz+fTz61oDDJX7n9tYF9FJymH1cIN1DtKw3iYt6b8WZgEjoNwVSncwF4wx/ZedMhw==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [win32] - lines-and-columns@1.2.4: - resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} + lightningcss@1.31.1: + resolution: {integrity: sha512-l51N2r93WmGUye3WuFoN5k10zyvrVs0qfKBhyC5ogUQ6Ew6JUSswh78mbSO+IU3nTWsyOArqPCcShdQSadghBQ==} + engines: {node: '>= 12.0.0'} lint-staged@16.3.0: resolution: {integrity: sha512-YVHHy/p6U4/No9Af+35JLh3umJ9dPQnGTvNCbfO/T5fC60us0jFnc+vw33cqveI+kqxIFJQakcMVTO2KM+653A==} @@ -4122,6 +4710,12 @@ packages: lodash-es@4.17.23: resolution: {integrity: sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==} + lodash.clamp@4.0.3: + resolution: {integrity: sha512-HvzRFWjtcguTW7yd8NJBshuNaCa8aqNFtnswdT7f/cMd/1YKy5Zzoq4W/Oxvnx9l7aeY258uSdDfM793+eLsVg==} + + lodash.debounce@4.0.8: + resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==} + lodash.defaults@4.2.0: resolution: {integrity: sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==} @@ -4164,13 +4758,6 @@ packages: longest-streak@3.1.0: resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==} - loose-envify@1.4.0: - resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} - hasBin: true - - loupe@3.2.1: - resolution: {integrity: sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==} - lowercase-keys@2.0.0: resolution: {integrity: sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==} engines: {node: '>=8'} @@ -4181,6 +4768,10 @@ packages: lru-cache@10.4.3: resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} + lru-cache@11.2.6: + resolution: {integrity: sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ==} + engines: {node: 20 || >=22} + lru-cache@5.1.1: resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} @@ -4198,6 +4789,10 @@ packages: peerDependencies: react: ^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0 + lz-string@1.5.0: + resolution: {integrity: sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==} + hasBin: true + magic-string@0.30.21: resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} @@ -4441,6 +5036,10 @@ packages: resolution: {integrity: sha512-fu656aJ0n2kcXwsnwnv9g24tkU5uSmOlTjd6WyyaKm2Z+h1qmY6bAjrcaIxF/BslFqbZ8UBtbJi7KgQOZD2PTw==} engines: {node: 20 || >=22} + minimatch@10.2.4: + resolution: {integrity: sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==} + engines: {node: 18 || 20 || >=22} + minimatch@3.1.2: resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} @@ -4499,6 +5098,10 @@ packages: resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} engines: {node: '>=16 || 14 >=14.17'} + minipass@7.1.3: + resolution: {integrity: sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==} + engines: {node: '>=16 || 14 >=14.17'} + minizlib@2.1.2: resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==} engines: {node: '>= 8'} @@ -4518,6 +5121,22 @@ packages: mlly@1.8.0: resolution: {integrity: sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==} + mobx-react-lite@4.1.1: + resolution: {integrity: sha512-iUxiMpsvNraCKXU+yPotsOncNNmyeS2B5DKL+TL6Tar/xm+wwNJAubJmtRSeAoYawdZqwv8Z/+5nPRHeQxTiXg==} + peerDependencies: + mobx: ^6.9.0 + react: ^16.8.0 || ^17 || ^18 || ^19 + react-dom: '*' + react-native: '*' + peerDependenciesMeta: + react-dom: + optional: true + react-native: + optional: true + + mobx@6.15.0: + resolution: {integrity: sha512-UczzB+0nnwGotYSgllfARAqWCJ5e/skuV2K/l+Zyck/H6pJIhLXuBnz+6vn2i211o7DtbE78HQtsYEKICHGI+g==} + monaco-editor@0.55.1: resolution: {integrity: sha512-jz4x+TJNFHwHtwuV9vA9rMujcZRb0CEilTEwG2rRSpe/A7Jdkuj8xPKttCgOh+v/lkHy7HsZ64oj+q3xoAFl9A==} @@ -4541,12 +5160,13 @@ packages: react-dom: optional: true + mrmime@2.0.1: + resolution: {integrity: sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==} + engines: {node: '>=10'} + ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} - mz@2.7.0: - resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} - nan@2.25.0: resolution: {integrity: sha512-0M90Ag7Xn5KMLLZ7zliPWP3rT90P6PN+IzVFS0VqmnPktBk3700xUVv8Ikm9EUaUE5SDWdp/BIxdENzVznpm1g==} @@ -4640,14 +5260,6 @@ packages: engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} deprecated: This package is no longer supported. - object-assign@4.1.1: - resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} - engines: {node: '>=0.10.0'} - - object-hash@3.0.0: - resolution: {integrity: sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==} - engines: {node: '>= 6'} - object-inspect@1.13.4: resolution: {integrity: sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==} engines: {node: '>= 0.4'} @@ -4672,6 +5284,9 @@ packages: resolution: {integrity: sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==} engines: {node: '>= 0.4'} + obug@2.1.1: + resolution: {integrity: sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==} + once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} @@ -4762,6 +5377,10 @@ packages: resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} engines: {node: '>=16 || 14 >=14.18'} + path-scurry@2.0.2: + resolution: {integrity: sha512-3O/iVVsJAPsOnpwWIeD+d6z/7PmqApyQePUtCndjatj/9I5LylHvt5qluFaBT3I5h3r1ejfR056c+FCv+NnNXg==} + engines: {node: 18 || 20 || >=22} + path-type@4.0.0: resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} engines: {node: '>=8'} @@ -4769,10 +5388,6 @@ packages: pathe@2.0.3: resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==} - pathval@2.0.1: - resolution: {integrity: sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==} - engines: {node: '>= 14.16'} - pe-library@0.4.1: resolution: {integrity: sha512-eRWB5LBz7PpDu4PUlwT0PhnQfTQJlDDdPa35urV4Osrm0t0AqQFGn+UIkU3klZvwJ8KPO3VbBFsXquA6p6kqZw==} engines: {node: '>=12', npm: '>=6'} @@ -4791,73 +5406,36 @@ packages: resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==} engines: {node: '>=12'} - pify@2.3.0: - resolution: {integrity: sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==} - engines: {node: '>=0.10.0'} - - pirates@4.0.7: - resolution: {integrity: sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==} - engines: {node: '>= 6'} - pkg-types@1.3.1: resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==} + playwright-core@1.58.2: + resolution: {integrity: sha512-yZkEtftgwS8CsfYo7nm0KE8jsvm6i/PTgVtB8DL726wNf6H2IMsDuxCpJj59KDaxCtSnrWan2AeDqM7JBaultg==} + engines: {node: '>=18'} + hasBin: true + + playwright@1.58.2: + resolution: {integrity: sha512-vA30H8Nvkq/cPBnNw4Q8TWz1EJyqgpuinBcHET0YVJVFldr8JDNiU9LaWAE1KqSkRYazuaBhTpB5ZzShOezQ6A==} + engines: {node: '>=18'} + hasBin: true + plist@3.1.0: resolution: {integrity: sha512-uysumyrvkUX0rX/dEVqt8gC3sTBzd4zoWfLeS29nb53imdaXVvLINYXTI2GNqzaMuvacNx4uJQ8+b3zXR0pkgQ==} engines: {node: '>=10.4.0'} - points-on-curve@0.2.0: - resolution: {integrity: sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==} - - points-on-path@0.2.1: - resolution: {integrity: sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g==} - - possible-typed-array-names@1.1.0: - resolution: {integrity: sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==} - engines: {node: '>= 0.4'} - - postcss-import@15.1.0: - resolution: {integrity: sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==} - engines: {node: '>=14.0.0'} - peerDependencies: - postcss: ^8.0.0 - - postcss-js@4.1.0: - resolution: {integrity: sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw==} - engines: {node: ^12 || ^14 || >= 16} - peerDependencies: - postcss: ^8.4.21 - - postcss-load-config@6.0.1: - resolution: {integrity: sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==} - engines: {node: '>= 18'} - peerDependencies: - jiti: '>=1.21.0' - postcss: '>=8.0.9' - tsx: ^4.8.1 - yaml: ^2.4.2 - peerDependenciesMeta: - jiti: - optional: true - postcss: - optional: true - tsx: - optional: true - yaml: - optional: true + pngjs@7.0.0: + resolution: {integrity: sha512-LKWqWJRhstyYo9pGvgor/ivk2w94eSjE3RGVuzLGlr3NmD8bf7RcYGze1mNdEHRP6TRP6rMuDHk5t44hnTRyow==} + engines: {node: '>=14.19.0'} - postcss-nested@6.2.0: - resolution: {integrity: sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==} - engines: {node: '>=12.0'} - peerDependencies: - postcss: ^8.2.14 + points-on-curve@0.2.0: + resolution: {integrity: sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==} - postcss-selector-parser@6.1.2: - resolution: {integrity: sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==} - engines: {node: '>=4'} + points-on-path@0.2.1: + resolution: {integrity: sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g==} - postcss-value-parser@4.2.0: - resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} + possible-typed-array-names@1.1.0: + resolution: {integrity: sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==} + engines: {node: '>= 0.4'} postcss@8.5.6: resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} @@ -4944,6 +5522,10 @@ packages: engines: {node: '>=14'} hasBin: true + pretty-format@27.5.1: + resolution: {integrity: sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==} + engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} + prismjs@1.27.0: resolution: {integrity: sha512-t13BGPUlFDR7wRB5kQDG4jjl7XeuH6jbJGt11JHPL96qwsEHNX2+68tFXqc1/k+/jALsbSWJKUOT/hcYAZ5LkA==} engines: {node: '>=6'} @@ -5009,16 +5591,19 @@ packages: resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} hasBin: true - react-dom@18.3.1: - resolution: {integrity: sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==} + react-dom@19.2.4: + resolution: {integrity: sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ==} peerDependencies: - react: ^18.3.1 + react: ^19.2.4 react-icons@5.5.0: resolution: {integrity: sha512-MEFcXdkP3dLo8uumGI5xN3lDFNsRtrjbOEKDLD7yv76v4wpnEq2Lt2qeHaQOr34I/wPN3s3+N08WkQ+CW37Xiw==} peerDependencies: react: '*' + react-is@17.0.2: + resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==} + react-markdown@10.1.0: resolution: {integrity: sha512-qKxVopLT/TyA6BX3Ue5NwabOsAzm0Q7kAPwq6L+wWDwisYs7R8vZ0nRXqq6rkueboxpkjvLGU9fWifiX/ZZFxQ==} peerDependencies: @@ -5049,11 +5634,11 @@ packages: '@types/react': optional: true - react-resizable-panels@3.0.6: - resolution: {integrity: sha512-b3qKHQ3MLqOgSS+FRYKapNkJZf5EQzuf6+RLiq1/IlTHw99YrZ2NJZLk4hQIzTnnIkRg2LUqyVinu6YWWpUYew==} + react-resizable-panels@4.7.3: + resolution: {integrity: sha512-PYcYMLtvJD+Pr0TQNeMvddcnLOwUa/Yb4iNwU7ThNLlHaQYEEC9MIBWHaBGODzYuXIkPRZ/OWe5sbzG1Rzq5ew==} peerDependencies: - react: ^16.14.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc - react-dom: ^16.14.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 react-style-singleton@2.2.3: resolution: {integrity: sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ==} @@ -5076,17 +5661,14 @@ packages: react: ^18.0.0 || ^19.0.0 react-dom: ^18.0.0 || ^19.0.0 - react@18.3.1: - resolution: {integrity: sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==} + react@19.2.4: + resolution: {integrity: sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==} engines: {node: '>=0.10.0'} read-binary-file-arch@1.0.6: resolution: {integrity: sha512-BNg9EN3DD3GsDXX7Aa8O4p92sryjkmzYYgmgTAc6CA4uGLEDzFfxOxugu21akOxpcXHiEgsYkC6nPsQvLLLmEg==} hasBin: true - read-cache@1.0.0: - resolution: {integrity: sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==} - read-config-file@6.3.2: resolution: {integrity: sha512-M80lpCjnE6Wt6zb98DoW8WHR09nzMSpu8XHtPkiTHrJ5Az9CybfeQhTJ8D7saeBHpGhLPIVyA8lcL6ZmdKwY6Q==} engines: {node: '>=12.0.0'} @@ -5101,9 +5683,9 @@ packages: readdir-glob@1.1.3: resolution: {integrity: sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==} - readdirp@3.6.0: - resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} - engines: {node: '>=8.10.0'} + readdirp@5.0.0: + resolution: {integrity: sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ==} + engines: {node: '>= 20.19.0'} reflect.getprototypeof@1.0.10: resolution: {integrity: sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==} @@ -5183,6 +5765,9 @@ packages: resolution: {integrity: sha512-vHjcY2MlAITJhC0eRD/Vv8Vlgmu9Sd3LX9zZvtGzU5ZImdTN3+d6e/4mnTyV8vEbyf1sgNIrWxhWlrys52OkEA==} engines: {node: '>=12', npm: '>=6'} + reselect@5.1.1: + resolution: {integrity: sha512-K/BG6eIky/SBpzfHZv/dd+9JBFiS4SWV7FIujVyJRux6e45+73RaUHXLmIR1f7WOMaQ0U1km6qwklRQxpJJY0w==} + resolve-alpn@1.2.1: resolution: {integrity: sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==} @@ -5277,8 +5862,8 @@ packages: resolution: {integrity: sha512-1n3r/tGXO6b6VXMdFT54SHzT9ytu9yr7TaELowdYpMqY/Ao7EnlQGmAQ1+RatX7Tkkdm6hONI2owqNx2aZj5Sw==} engines: {node: '>=11.0.0'} - scheduler@0.23.2: - resolution: {integrity: sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==} + scheduler@0.27.0: + resolution: {integrity: sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==} semver-compare@1.0.0: resolution: {integrity: sha512-YM3/ITh2MJ5MtzaM429anh+x2jiLVjqILF4m4oyQB18W7Ggea7BfqdH/wGMK7dDiMghv/6WG7znWMwUDzJiXow==} @@ -5374,6 +5959,10 @@ packages: resolution: {integrity: sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==} engines: {node: '>=10'} + sirv@3.0.2: + resolution: {integrity: sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==} + engines: {node: '>=18'} + slash@3.0.0: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} @@ -5390,6 +5979,10 @@ packages: resolution: {integrity: sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==} engines: {node: '>= 6.0.0', npm: '>= 3.0.0'} + smol-toml@1.6.0: + resolution: {integrity: sha512-4zemZi0HvTnYwLfrpk/CF9LOd9Lt87kAt50GnqhMpyF9U3poDAP2+iukq2bZsO/ufegbYehBkqINbsWxj4l4cw==} + engines: {node: '>= 18'} + socks-proxy-agent@6.2.1: resolution: {integrity: sha512-a6KW9G+6B3nWZ1yB8G7pJwL3ggLy1uTzKAgCb7ttblwqdz9fMGJUuTy3uFzEP48FAs9FLILlmzDlE2JJhVQaXQ==} engines: {node: '>= 10'} @@ -5402,6 +5995,12 @@ packages: resolution: {integrity: sha512-HLpt+uLy/pxB+bum/9DzAgiKS8CX1EvbWxI4zlmgGCExImLdiad2iCwXT5Z4c9c3Eq8rP2318mPW2c+QbtjK8A==} engines: {node: '>= 10.0.0', npm: '>= 3.0.0'} + sonner@2.0.7: + resolution: {integrity: sha512-W6ZN4p58k8aDKA4XPcx2hpIQXBRAgyiWVkYhT7CvK6D3iAu7xjvVyhQHg2/iaKJZ1XVJ4r7XuwGL+WGEK37i9w==} + peerDependencies: + react: ^18.0.0 || ^19.0.0 || ^19.0.0-rc + react-dom: ^18.0.0 || ^19.0.0 || ^19.0.0-rc + source-map-js@1.2.1: resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} engines: {node: '>=0.10.0'} @@ -5450,8 +6049,8 @@ packages: state-local@1.0.7: resolution: {integrity: sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==} - std-env@3.10.0: - resolution: {integrity: sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==} + std-env@4.0.0: + resolution: {integrity: sha512-zUMPtQ/HBY3/50VbpkupYHbRroTRZJPRLvreamgErJVys0ceuzMkD44J/QjqhHjOzK42GQ3QZIeFG1OYfOtKqQ==} stop-iteration-iterator@1.1.0: resolution: {integrity: sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==} @@ -5530,9 +6129,6 @@ packages: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} engines: {node: '>=8'} - strip-literal@3.1.0: - resolution: {integrity: sha512-8r3mkIM/2+PpjHoOtiAW8Rg3jJLHaV7xPwG+YRGrv6FP0wwk/toTpATxWYOW0BKdWwl82VT2tFYi5DlROa0Mxg==} - style-to-js@1.1.21: resolution: {integrity: sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ==} @@ -5542,11 +6138,6 @@ packages: stylis@4.3.6: resolution: {integrity: sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ==} - sucrase@3.35.1: - resolution: {integrity: sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw==} - engines: {node: '>=16 || 14 >=14.17'} - hasBin: true - suffix-thumb@5.0.2: resolution: {integrity: sha512-I5PWXAFKx3FYnI9a+dQMWNqTxoRt6vdBdb0O+BJ1sxXCWtSoQCusc13E58f+9p4MYx/qCnEMkD5jac6K2j3dgA==} @@ -5566,21 +6157,21 @@ packages: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} + tabbable@6.4.0: + resolution: {integrity: sha512-05PUHKSNE8ou2dwIxTngl4EzcnsCDZGJ/iCLtDflR/SHB/ny14rXc+qU5P4mG9JkusiV7EivzY9Mhm55AzAvCg==} + tailwind-merge@2.6.1: resolution: {integrity: sha512-Oo6tHdpZsGpkKG88HJ8RR1rg/RdnEkQEfMoEk2x1XRI3F1AxeU+ijRXpiVUF4UbLfcxxRGw6TbUINKYdWVsQTQ==} tailwind-merge@3.4.0: resolution: {integrity: sha512-uSaO4gnW+b3Y2aWoWfFpX62vn2sR3skfhbjsEnaBI81WD1wBLlHZe5sWf0AqjksNdYTbGBEd0UasQMT3SNV15g==} - tailwindcss-animate@1.0.7: - resolution: {integrity: sha512-bl6mpH3T7I3UFxuvDEXLxy/VuFxBk5bbzplh7tXI68mwMokNYd1t9qPBHlnyTwfa4JGC4zP516I1hYYtQ/vspA==} - peerDependencies: - tailwindcss: '>=3.0.0 || insiders' + tailwindcss@4.2.1: + resolution: {integrity: sha512-/tBrSQ36vCleJkAOsy9kbNTgaxvGbyOamC30PRePTQe/o1MFwEKHQk4Cn7BNGaPtjp+PuUrByJehM1hgxfq4sw==} - tailwindcss@3.4.19: - resolution: {integrity: sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ==} - engines: {node: '>=14.0.0'} - hasBin: true + tapable@2.3.0: + resolution: {integrity: sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==} + engines: {node: '>=6'} tar-fs@2.1.4: resolution: {integrity: sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==} @@ -5604,13 +6195,6 @@ packages: text-table@0.2.0: resolution: {integrity: sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==} - thenify-all@1.6.0: - resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} - engines: {node: '>=0.8'} - - thenify@3.3.1: - resolution: {integrity: sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==} - tiny-async-pool@1.3.0: resolution: {integrity: sha512-01EAw5EDrcVrdgyCLgoSPvqznC0sVxDSVeiOz09FUpjh71G79VCqneOr+xvt7T1r76CF6ZZfPjHorN2+d+3mqA==} @@ -5620,9 +6204,6 @@ packages: tinybench@2.9.0: resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==} - tinyexec@0.3.2: - resolution: {integrity: sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==} - tinyexec@1.0.2: resolution: {integrity: sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==} engines: {node: '>=18'} @@ -5631,16 +6212,8 @@ packages: resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==} engines: {node: '>=12.0.0'} - tinypool@1.1.1: - resolution: {integrity: sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==} - engines: {node: ^18.0.0 || >=20.0.0} - - tinyrainbow@2.0.0: - resolution: {integrity: sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==} - engines: {node: '>=14.0.0'} - - tinyspy@4.0.4: - resolution: {integrity: sha512-azl+t0z7pw/z958Gy9svOTuzqIk6xq+NSheJzn5MMWtWTFywIacg2wUlzKFGtt3cthx0r2SxMK0yzJOR0IES7Q==} + tinyrainbow@3.1.0: + resolution: {integrity: sha512-Bf+ILmBgretUrdJxzXM0SgXLZ3XfiaUuOj/IKQHuTXip+05Xn+uyEYdVg0kYDipTBcLrCVyUzAPz7QmArb0mmw==} engines: {node: '>=14.0.0'} tmp-promise@3.0.3: @@ -5654,6 +6227,10 @@ packages: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} + totalist@3.0.1: + resolution: {integrity: sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==} + engines: {node: '>=6'} + tree-kill@1.2.2: resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==} hasBin: true @@ -5677,9 +6254,6 @@ packages: resolution: {integrity: sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==} engines: {node: '>=6.10'} - ts-interface-checker@0.1.13: - resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} - tsconfig-paths@3.15.0: resolution: {integrity: sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==} @@ -5689,6 +6263,9 @@ packages: tunnel-agent@0.6.0: resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==} + tw-animate-css@1.4.0: + resolution: {integrity: sha512-7bziOlRqH0hJx80h/3mbicLW7o8qLsH5+RaLR2t+OHM3D0JlWGODQKQ4cxbK7WlvmUxpcj6Kgu6EKqjrGFe3QQ==} + tweetnacl@0.14.5: resolution: {integrity: sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==} @@ -5738,6 +6315,9 @@ packages: undici-types@6.21.0: resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + undici-types@7.16.0: + resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==} + unified@11.0.5: resolution: {integrity: sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==} @@ -5776,6 +6356,9 @@ packages: unist-util-visit@5.1.0: resolution: {integrity: sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg==} + universal-user-agent@7.0.3: + resolution: {integrity: sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A==} + universalify@0.1.2: resolution: {integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==} engines: {node: '>= 4.0.0'} @@ -5813,6 +6396,17 @@ packages: '@types/react': optional: true + use-sync-external-store@1.6.0: + resolution: {integrity: sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + usehooks-ts@3.1.1: + resolution: {integrity: sha512-I4diPp9Cq6ieSUH2wu+fDAVQO43xwtulo+fKEidHUwZPnYImbtkTjzIJYcDcJqxgmX31GVqNFURodvcgHcW0pA==} + engines: {node: '>=16.15.0'} + peerDependencies: + react: ^16.8.0 || ^17 || ^18 || ^19 || ^19.0.0-rc + utf8-byte-length@1.0.5: resolution: {integrity: sha512-Xn0w3MtiQ6zoz2vFyUVruaCL53O/DwUvkEeOvj+uulMm0BkUGYWmBYVyElqZaSLhY6ZD0ulfU3aBra2aVT4xfA==} @@ -5836,27 +6430,27 @@ packages: vfile@6.0.3: resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==} - vite-node@3.2.4: - resolution: {integrity: sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==} + vite@6.4.1: + resolution: {integrity: sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==} engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} hasBin: true - - vite@5.4.21: - resolution: {integrity: sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true peerDependencies: - '@types/node': ^18.0.0 || >=20.0.0 + '@types/node': ^18.0.0 || ^20.0.0 || >=22.0.0 + jiti: '>=1.21.0' less: '*' lightningcss: ^1.21.0 sass: '*' sass-embedded: '*' stylus: '*' sugarss: '*' - terser: ^5.4.0 + terser: ^5.16.0 + tsx: ^4.8.1 + yaml: ^2.4.2 peerDependenciesMeta: '@types/node': optional: true + jiti: + optional: true less: optional: true lightningcss: @@ -5871,27 +6465,52 @@ packages: optional: true terser: optional: true + tsx: + optional: true + yaml: + optional: true - vitest@3.2.4: - resolution: {integrity: sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==} - engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} + vitest-browser-react@2.1.0: + resolution: {integrity: sha512-/cOVQ+dZojhavfsbHjcfzB3zrUxG39HIbGdvK9vSBdGc8b8HRu5Bql0p8aXtKw4sb8/E8n5XEncQxvqHtfjjag==} + peerDependencies: + '@types/react': ^18.0.0 || ^19.0.0 + '@types/react-dom': ^18.0.0 || ^19.0.0 + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + vitest: ^4.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + vitest@4.1.0: + resolution: {integrity: sha512-YbDrMF9jM2Lqc++2530UourxZHmkKLxrs4+mYhEwqWS97WJ7wOYEkcr+QfRgJ3PW9wz3odRijLZjHEaRLTNbqw==} + engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0} hasBin: true peerDependencies: '@edge-runtime/vm': '*' - '@types/debug': ^4.1.12 - '@types/node': ^18.0.0 || ^20.0.0 || >=22.0.0 - '@vitest/browser': 3.2.4 - '@vitest/ui': 3.2.4 + '@opentelemetry/api': ^1.9.0 + '@types/node': ^20.0.0 || ^22.0.0 || >=24.0.0 + '@vitest/browser-playwright': 4.1.0 + '@vitest/browser-preview': 4.1.0 + '@vitest/browser-webdriverio': 4.1.0 + '@vitest/ui': 4.1.0 happy-dom: '*' jsdom: '*' + vite: ^6.0.0 || ^7.0.0 || ^8.0.0-0 peerDependenciesMeta: '@edge-runtime/vm': optional: true - '@types/debug': + '@opentelemetry/api': optional: true '@types/node': optional: true - '@vitest/browser': + '@vitest/browser-playwright': + optional: true + '@vitest/browser-preview': + optional: true + '@vitest/browser-webdriverio': optional: true '@vitest/ui': optional: true @@ -5982,6 +6601,18 @@ packages: wrappy@1.0.2: resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + ws@8.19.0: + resolution: {integrity: sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + xmlbuilder@15.1.1: resolution: {integrity: sha512-yMqGBqtXyeN1e3TGYvgNgDVZ3j84W4cwkOXQswghol6APgZWaff9lnbvN7MHYJOiXsvGPXtjTYJEiC9J2wv9Eg==} engines: {node: '>=8.0'} @@ -6044,8 +6675,6 @@ snapshots: 7zip-bin@5.2.0: {} - '@alloc/quick-lru@5.2.0': {} - '@antfu/install-pkg@1.1.0': dependencies: package-manager-detector: 1.6.0 @@ -6130,6 +6759,11 @@ snapshots: dependencies: '@babel/types': 7.29.0 + '@babel/plugin-transform-arrow-functions@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/plugin-transform-react-jsx-self@7.27.1(@babel/core@7.29.0)': dependencies: '@babel/core': 7.29.0 @@ -6165,6 +6799,32 @@ snapshots: '@babel/helper-string-parser': 7.27.1 '@babel/helper-validator-identifier': 7.28.5 + '@base-ui/react@1.3.0(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@babel/runtime': 7.28.6 + '@base-ui/utils': 0.2.6(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@floating-ui/react-dom': 2.1.8(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@floating-ui/utils': 0.2.11 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + tabbable: 6.4.0 + use-sync-external-store: 1.6.0(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + + '@base-ui/utils@0.2.6(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@babel/runtime': 7.28.6 + '@floating-ui/utils': 0.2.11 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + reselect: 5.1.1 + use-sync-external-store: 1.6.0(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + + '@blazediff/core@1.9.1': {} + '@braintree/sanitize-url@7.1.2': {} '@chevrotain/cst-dts-gen@11.0.3': @@ -6324,7 +6984,7 @@ snapshots: '@esbuild/aix-ppc64@0.19.12': optional: true - '@esbuild/aix-ppc64@0.21.5': + '@esbuild/aix-ppc64@0.25.12': optional: true '@esbuild/android-arm64@0.18.20': @@ -6333,7 +6993,7 @@ snapshots: '@esbuild/android-arm64@0.19.12': optional: true - '@esbuild/android-arm64@0.21.5': + '@esbuild/android-arm64@0.25.12': optional: true '@esbuild/android-arm@0.18.20': @@ -6342,7 +7002,7 @@ snapshots: '@esbuild/android-arm@0.19.12': optional: true - '@esbuild/android-arm@0.21.5': + '@esbuild/android-arm@0.25.12': optional: true '@esbuild/android-x64@0.18.20': @@ -6351,7 +7011,7 @@ snapshots: '@esbuild/android-x64@0.19.12': optional: true - '@esbuild/android-x64@0.21.5': + '@esbuild/android-x64@0.25.12': optional: true '@esbuild/darwin-arm64@0.18.20': @@ -6360,7 +7020,7 @@ snapshots: '@esbuild/darwin-arm64@0.19.12': optional: true - '@esbuild/darwin-arm64@0.21.5': + '@esbuild/darwin-arm64@0.25.12': optional: true '@esbuild/darwin-x64@0.18.20': @@ -6369,7 +7029,7 @@ snapshots: '@esbuild/darwin-x64@0.19.12': optional: true - '@esbuild/darwin-x64@0.21.5': + '@esbuild/darwin-x64@0.25.12': optional: true '@esbuild/freebsd-arm64@0.18.20': @@ -6378,7 +7038,7 @@ snapshots: '@esbuild/freebsd-arm64@0.19.12': optional: true - '@esbuild/freebsd-arm64@0.21.5': + '@esbuild/freebsd-arm64@0.25.12': optional: true '@esbuild/freebsd-x64@0.18.20': @@ -6387,7 +7047,7 @@ snapshots: '@esbuild/freebsd-x64@0.19.12': optional: true - '@esbuild/freebsd-x64@0.21.5': + '@esbuild/freebsd-x64@0.25.12': optional: true '@esbuild/linux-arm64@0.18.20': @@ -6396,7 +7056,7 @@ snapshots: '@esbuild/linux-arm64@0.19.12': optional: true - '@esbuild/linux-arm64@0.21.5': + '@esbuild/linux-arm64@0.25.12': optional: true '@esbuild/linux-arm@0.18.20': @@ -6405,7 +7065,7 @@ snapshots: '@esbuild/linux-arm@0.19.12': optional: true - '@esbuild/linux-arm@0.21.5': + '@esbuild/linux-arm@0.25.12': optional: true '@esbuild/linux-ia32@0.18.20': @@ -6414,7 +7074,7 @@ snapshots: '@esbuild/linux-ia32@0.19.12': optional: true - '@esbuild/linux-ia32@0.21.5': + '@esbuild/linux-ia32@0.25.12': optional: true '@esbuild/linux-loong64@0.18.20': @@ -6423,7 +7083,7 @@ snapshots: '@esbuild/linux-loong64@0.19.12': optional: true - '@esbuild/linux-loong64@0.21.5': + '@esbuild/linux-loong64@0.25.12': optional: true '@esbuild/linux-mips64el@0.18.20': @@ -6432,7 +7092,7 @@ snapshots: '@esbuild/linux-mips64el@0.19.12': optional: true - '@esbuild/linux-mips64el@0.21.5': + '@esbuild/linux-mips64el@0.25.12': optional: true '@esbuild/linux-ppc64@0.18.20': @@ -6441,7 +7101,7 @@ snapshots: '@esbuild/linux-ppc64@0.19.12': optional: true - '@esbuild/linux-ppc64@0.21.5': + '@esbuild/linux-ppc64@0.25.12': optional: true '@esbuild/linux-riscv64@0.18.20': @@ -6450,7 +7110,7 @@ snapshots: '@esbuild/linux-riscv64@0.19.12': optional: true - '@esbuild/linux-riscv64@0.21.5': + '@esbuild/linux-riscv64@0.25.12': optional: true '@esbuild/linux-s390x@0.18.20': @@ -6459,7 +7119,7 @@ snapshots: '@esbuild/linux-s390x@0.19.12': optional: true - '@esbuild/linux-s390x@0.21.5': + '@esbuild/linux-s390x@0.25.12': optional: true '@esbuild/linux-x64@0.18.20': @@ -6468,7 +7128,10 @@ snapshots: '@esbuild/linux-x64@0.19.12': optional: true - '@esbuild/linux-x64@0.21.5': + '@esbuild/linux-x64@0.25.12': + optional: true + + '@esbuild/netbsd-arm64@0.25.12': optional: true '@esbuild/netbsd-x64@0.18.20': @@ -6477,7 +7140,10 @@ snapshots: '@esbuild/netbsd-x64@0.19.12': optional: true - '@esbuild/netbsd-x64@0.21.5': + '@esbuild/netbsd-x64@0.25.12': + optional: true + + '@esbuild/openbsd-arm64@0.25.12': optional: true '@esbuild/openbsd-x64@0.18.20': @@ -6486,7 +7152,10 @@ snapshots: '@esbuild/openbsd-x64@0.19.12': optional: true - '@esbuild/openbsd-x64@0.21.5': + '@esbuild/openbsd-x64@0.25.12': + optional: true + + '@esbuild/openharmony-arm64@0.25.12': optional: true '@esbuild/sunos-x64@0.18.20': @@ -6495,7 +7164,7 @@ snapshots: '@esbuild/sunos-x64@0.19.12': optional: true - '@esbuild/sunos-x64@0.21.5': + '@esbuild/sunos-x64@0.25.12': optional: true '@esbuild/win32-arm64@0.18.20': @@ -6504,7 +7173,7 @@ snapshots: '@esbuild/win32-arm64@0.19.12': optional: true - '@esbuild/win32-arm64@0.21.5': + '@esbuild/win32-arm64@0.25.12': optional: true '@esbuild/win32-ia32@0.18.20': @@ -6513,7 +7182,7 @@ snapshots: '@esbuild/win32-ia32@0.19.12': optional: true - '@esbuild/win32-ia32@0.21.5': + '@esbuild/win32-ia32@0.25.12': optional: true '@esbuild/win32-x64@0.18.20': @@ -6522,7 +7191,7 @@ snapshots: '@esbuild/win32-x64@0.19.12': optional: true - '@esbuild/win32-x64@0.21.5': + '@esbuild/win32-x64@0.25.12': optional: true '@eslint-community/eslint-utils@4.9.1(eslint@8.57.1)': @@ -6552,22 +7221,43 @@ snapshots: dependencies: '@floating-ui/utils': 0.2.10 + '@floating-ui/core@1.7.5': + dependencies: + '@floating-ui/utils': 0.2.11 + '@floating-ui/dom@1.7.5': dependencies: '@floating-ui/core': 1.7.4 '@floating-ui/utils': 0.2.10 - '@floating-ui/react-dom@2.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@floating-ui/dom@1.7.6': + dependencies: + '@floating-ui/core': 1.7.5 + '@floating-ui/utils': 0.2.11 + + '@floating-ui/react-dom@2.1.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@floating-ui/dom': 1.7.5 - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + + '@floating-ui/react-dom@2.1.8(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@floating-ui/dom': 1.7.6 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) '@floating-ui/utils@0.2.10': {} + '@floating-ui/utils@0.2.11': {} + '@gar/promisify@1.1.3': optional: true + '@graphql-typed-document-node/core@3.2.0(graphql@16.13.1)': + dependencies: + graphql: 16.13.1 + '@humanwhocodes/config-array@0.13.0': dependencies: '@humanwhocodes/object-schema': 2.0.3 @@ -6580,6 +7270,17 @@ snapshots: '@humanwhocodes/object-schema@2.0.3': {} + '@ianvs/prettier-plugin-sort-imports@4.7.1(prettier@3.6.2)': + dependencies: + '@babel/generator': 7.29.1 + '@babel/parser': 7.29.0 + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 + prettier: 3.6.2 + semver: 7.7.4 + transitivePeerDependencies: + - supports-color + '@iconify/types@2.0.0': {} '@iconify/utils@3.1.0': @@ -6626,6 +7327,12 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.5 + '@linear/sdk@77.0.0(graphql@16.13.1)': + dependencies: + '@graphql-typed-document-node/core': 3.2.0(graphql@16.13.1) + transitivePeerDependencies: + - graphql + '@malept/cross-spawn-promise@1.1.1': dependencies: cross-spawn: 7.0.6 @@ -6651,12 +7358,12 @@ snapshots: dependencies: state-local: 1.0.7 - '@monaco-editor/react@4.7.0(monaco-editor@0.55.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@monaco-editor/react@4.7.0(monaco-editor@0.55.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@monaco-editor/loader': 1.7.0 monaco-editor: 0.55.1 - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) '@nodelib/fs.scandir@2.1.5': dependencies: @@ -6696,6 +7403,85 @@ snapshots: rimraf: 3.0.2 optional: true + '@octokit/auth-oauth-device@8.0.3': + dependencies: + '@octokit/oauth-methods': 6.0.2 + '@octokit/request': 10.0.8 + '@octokit/types': 16.0.0 + universal-user-agent: 7.0.3 + + '@octokit/auth-token@6.0.0': {} + + '@octokit/core@7.0.6': + dependencies: + '@octokit/auth-token': 6.0.0 + '@octokit/graphql': 9.0.3 + '@octokit/request': 10.0.8 + '@octokit/request-error': 7.1.0 + '@octokit/types': 16.0.0 + before-after-hook: 4.0.0 + universal-user-agent: 7.0.3 + + '@octokit/endpoint@11.0.3': + dependencies: + '@octokit/types': 16.0.0 + universal-user-agent: 7.0.3 + + '@octokit/graphql@9.0.3': + dependencies: + '@octokit/request': 10.0.8 + '@octokit/types': 16.0.0 + universal-user-agent: 7.0.3 + + '@octokit/oauth-authorization-url@8.0.0': {} + + '@octokit/oauth-methods@6.0.2': + dependencies: + '@octokit/oauth-authorization-url': 8.0.0 + '@octokit/request': 10.0.8 + '@octokit/request-error': 7.1.0 + '@octokit/types': 16.0.0 + + '@octokit/openapi-types@27.0.0': {} + + '@octokit/plugin-paginate-rest@14.0.0(@octokit/core@7.0.6)': + dependencies: + '@octokit/core': 7.0.6 + '@octokit/types': 16.0.0 + + '@octokit/plugin-request-log@6.0.0(@octokit/core@7.0.6)': + dependencies: + '@octokit/core': 7.0.6 + + '@octokit/plugin-rest-endpoint-methods@17.0.0(@octokit/core@7.0.6)': + dependencies: + '@octokit/core': 7.0.6 + '@octokit/types': 16.0.0 + + '@octokit/request-error@7.1.0': + dependencies: + '@octokit/types': 16.0.0 + + '@octokit/request@10.0.8': + dependencies: + '@octokit/endpoint': 11.0.3 + '@octokit/request-error': 7.1.0 + '@octokit/types': 16.0.0 + fast-content-type-parse: 3.0.0 + json-with-bigint: 3.5.7 + universal-user-agent: 7.0.3 + + '@octokit/rest@22.0.1': + dependencies: + '@octokit/core': 7.0.6 + '@octokit/plugin-paginate-rest': 14.0.0(@octokit/core@7.0.6) + '@octokit/plugin-request-log': 6.0.0(@octokit/core@7.0.6) + '@octokit/plugin-rest-endpoint-methods': 17.0.0(@octokit/core@7.0.6) + + '@octokit/types@16.0.0': + dependencies: + '@octokit/openapi-types': 27.0.0 + '@opentelemetry/api-logs@0.208.0': dependencies: '@opentelemetry/api': 1.9.0 @@ -6772,9 +7558,71 @@ snapshots: '@opentelemetry/semantic-conventions@1.39.0': {} + '@parcel/watcher-android-arm64@2.5.6': + optional: true + + '@parcel/watcher-darwin-arm64@2.5.6': + optional: true + + '@parcel/watcher-darwin-x64@2.5.6': + optional: true + + '@parcel/watcher-freebsd-x64@2.5.6': + optional: true + + '@parcel/watcher-linux-arm-glibc@2.5.6': + optional: true + + '@parcel/watcher-linux-arm-musl@2.5.6': + optional: true + + '@parcel/watcher-linux-arm64-glibc@2.5.6': + optional: true + + '@parcel/watcher-linux-arm64-musl@2.5.6': + optional: true + + '@parcel/watcher-linux-x64-glibc@2.5.6': + optional: true + + '@parcel/watcher-linux-x64-musl@2.5.6': + optional: true + + '@parcel/watcher-win32-arm64@2.5.6': + optional: true + + '@parcel/watcher-win32-ia32@2.5.6': + optional: true + + '@parcel/watcher-win32-x64@2.5.6': + optional: true + + '@parcel/watcher@2.5.6': + dependencies: + detect-libc: 2.1.2 + is-glob: 4.0.3 + node-addon-api: 7.1.1 + picomatch: 4.0.3 + optionalDependencies: + '@parcel/watcher-android-arm64': 2.5.6 + '@parcel/watcher-darwin-arm64': 2.5.6 + '@parcel/watcher-darwin-x64': 2.5.6 + '@parcel/watcher-freebsd-x64': 2.5.6 + '@parcel/watcher-linux-arm-glibc': 2.5.6 + '@parcel/watcher-linux-arm-musl': 2.5.6 + '@parcel/watcher-linux-arm64-glibc': 2.5.6 + '@parcel/watcher-linux-arm64-musl': 2.5.6 + '@parcel/watcher-linux-x64-glibc': 2.5.6 + '@parcel/watcher-linux-x64-musl': 2.5.6 + '@parcel/watcher-win32-arm64': 2.5.6 + '@parcel/watcher-win32-ia32': 2.5.6 + '@parcel/watcher-win32-x64': 2.5.6 + '@pkgjs/parseargs@0.11.0': optional: true + '@polka/url@1.0.0-next.29': {} + '@posthog/core@1.20.1': dependencies: cross-spawn: 7.0.6 @@ -6808,532 +7656,558 @@ snapshots: '@radix-ui/primitive@1.1.3': {} - '@radix-ui/react-accordion@1.2.12(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-accordion@1.2.12(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-collapsible': 1.1.12(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-direction': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-collapsible': 1.1.12(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-alert-dialog@1.1.15(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-alert-dialog@1.1.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-dialog': 1.1.15(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-slot': 1.2.3(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-dialog': 1.1.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-arrow@1.1.7(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-arrow@1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-checkbox@1.3.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-checkbox@1.3.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-previous': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-size': 1.1.1(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-previous': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-size': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-collapsible@1.1.12(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-collapsible@1.1.12(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-collection@1.1.7(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-collection@1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-slot': 1.2.3(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-compose-refs@1.1.2(@types/react@18.3.28)(react@18.3.1)': + '@radix-ui/react-compose-refs@1.1.2(@types/react@19.2.14)(react@19.2.4)': dependencies: - react: 18.3.1 + react: 19.2.4 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - '@radix-ui/react-context-menu@2.2.16(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-context-menu@2.2.16(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-menu': 2.1.16(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-menu': 2.1.16(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-context@1.1.2(@types/react@18.3.28)(react@18.3.1)': + '@radix-ui/react-context@1.1.2(@types/react@19.2.14)(react@19.2.4)': dependencies: - react: 18.3.1 + react: 19.2.4 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - '@radix-ui/react-context@1.1.3(@types/react@18.3.28)(react@18.3.1)': + '@radix-ui/react-context@1.1.3(@types/react@19.2.14)(react@19.2.4)': dependencies: - react: 18.3.1 + react: 19.2.4 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - '@radix-ui/react-dialog@1.1.15(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-dialog@1.1.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-focus-guards': 1.1.3(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-slot': 1.2.3(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.28)(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.4) aria-hidden: 1.2.6 - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - react-remove-scroll: 2.7.2(@types/react@18.3.28)(react@18.3.1) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + react-remove-scroll: 2.7.2(@types/react@19.2.14)(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-direction@1.1.1(@types/react@18.3.28)(react@18.3.1)': + '@radix-ui/react-direction@1.1.1(@types/react@19.2.14)(react@19.2.4)': dependencies: - react: 18.3.1 + react: 19.2.4 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - '@radix-ui/react-dismissable-layer@1.1.11(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-dismissable-layer@1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-escape-keydown': 1.1.1(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-escape-keydown': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-focus-guards@1.1.3(@types/react@18.3.28)(react@18.3.1)': + '@radix-ui/react-focus-guards@1.1.3(@types/react@19.2.14)(react@19.2.4)': dependencies: - react: 18.3.1 + react: 19.2.4 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - '@radix-ui/react-focus-scope@1.1.7(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-focus-scope@1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-id@1.1.1(@types/react@18.3.28)(react@18.3.1)': + '@radix-ui/react-id@1.1.1(@types/react@19.2.14)(react@19.2.4)': dependencies: - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - '@radix-ui/react-label@2.1.8(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-label@2.1.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@radix-ui/react-primitive': 2.1.4(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-primitive': 2.1.4(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-menu@2.1.16(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-menu@2.1.16(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-direction': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-focus-guards': 1.1.3(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-popper': 1.2.8(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-slot': 1.2.3(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.28)(react@18.3.1) + '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.4) aria-hidden: 1.2.6 - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - react-remove-scroll: 2.7.2(@types/react@18.3.28)(react@18.3.1) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + react-remove-scroll: 2.7.2(@types/react@19.2.14)(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-popover@1.1.15(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-popover@1.1.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-focus-guards': 1.1.3(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-popper': 1.2.8(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-slot': 1.2.3(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.28)(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.4) aria-hidden: 1.2.6 - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - react-remove-scroll: 2.7.2(@types/react@18.3.28)(react@18.3.1) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + react-remove-scroll: 2.7.2(@types/react@19.2.14)(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) - - '@radix-ui/react-popper@1.2.8(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': - dependencies: - '@floating-ui/react-dom': 2.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-arrow': 1.1.7(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-rect': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-size': 1.1.1(@types/react@18.3.28)(react@18.3.1) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-popper@1.2.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@floating-ui/react-dom': 2.1.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-arrow': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-rect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-size': 1.1.1(@types/react@19.2.14)(react@19.2.4) '@radix-ui/rect': 1.1.1 - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-portal@1.1.9(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-portal@1.1.9(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-presence@1.1.5(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-presence@1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-primitive@2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-primitive@2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@radix-ui/react-slot': 1.2.3(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-primitive@2.1.4(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-primitive@2.1.4(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@radix-ui/react-slot': 1.2.4(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-slot': 1.2.4(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-progress@1.1.8(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-progress@1.1.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@radix-ui/react-context': 1.1.3(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.4(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-context': 1.1.3(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-primitive': 2.1.4(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-radio-group@1.3.8(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-radio-group@1.3.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-direction': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-previous': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-size': 1.1.1(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-previous': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-size': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-roving-focus@1.1.11(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-roving-focus@1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-direction': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-scroll-area@1.2.10(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-scroll-area@1.2.10(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@radix-ui/number': 1.1.1 '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-direction': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-select@2.2.6(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-select@2.2.6(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@radix-ui/number': 1.1.1 '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-direction': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-focus-guards': 1.1.3(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-popper': 1.2.8(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-slot': 1.2.3(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-previous': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-previous': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) aria-hidden: 1.2.6 - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - react-remove-scroll: 2.7.2(@types/react@18.3.28)(react@18.3.1) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + react-remove-scroll: 2.7.2(@types/react@19.2.14)(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-separator@1.1.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@radix-ui/react-primitive': 2.1.4(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-slot@1.2.3(@types/react@19.2.14)(react@19.2.4)': + dependencies: + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 - '@radix-ui/react-separator@1.1.8(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-slot@1.2.4(@types/react@19.2.14)(react@19.2.4)': dependencies: - '@radix-ui/react-primitive': 2.1.4(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 - '@radix-ui/react-slot@1.2.3(@types/react@18.3.28)(react@18.3.1)': + '@radix-ui/react-switch@1.2.6(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-previous': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-size': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-slot@1.2.4(@types/react@18.3.28)(react@18.3.1)': + '@radix-ui/react-toast@1.2.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-switch@1.2.6(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-toggle-group@1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-previous': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-size': 1.1.1(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-toggle': 1.1.10(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-toast@1.2.15(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-toggle@1.1.10(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-tooltip@1.2.8(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-tooltip@1.2.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-popper': 1.2.8(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-slot': 1.2.3(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) - '@radix-ui/react-use-callback-ref@1.1.1(@types/react@18.3.28)(react@18.3.1)': + '@radix-ui/react-use-callback-ref@1.1.1(@types/react@19.2.14)(react@19.2.4)': dependencies: - react: 18.3.1 + react: 19.2.4 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - '@radix-ui/react-use-controllable-state@1.2.2(@types/react@18.3.28)(react@18.3.1)': + '@radix-ui/react-use-controllable-state@1.2.2(@types/react@19.2.14)(react@19.2.4)': dependencies: - '@radix-ui/react-use-effect-event': 0.0.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 + '@radix-ui/react-use-effect-event': 0.0.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - '@radix-ui/react-use-effect-event@0.0.2(@types/react@18.3.28)(react@18.3.1)': + '@radix-ui/react-use-effect-event@0.0.2(@types/react@19.2.14)(react@19.2.4)': dependencies: - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - '@radix-ui/react-use-escape-keydown@1.1.1(@types/react@18.3.28)(react@18.3.1)': + '@radix-ui/react-use-escape-keydown@1.1.1(@types/react@19.2.14)(react@19.2.4)': dependencies: - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - '@radix-ui/react-use-layout-effect@1.1.1(@types/react@18.3.28)(react@18.3.1)': + '@radix-ui/react-use-layout-effect@1.1.1(@types/react@19.2.14)(react@19.2.4)': dependencies: - react: 18.3.1 + react: 19.2.4 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - '@radix-ui/react-use-previous@1.1.1(@types/react@18.3.28)(react@18.3.1)': + '@radix-ui/react-use-previous@1.1.1(@types/react@19.2.14)(react@19.2.4)': dependencies: - react: 18.3.1 + react: 19.2.4 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - '@radix-ui/react-use-rect@1.1.1(@types/react@18.3.28)(react@18.3.1)': + '@radix-ui/react-use-rect@1.1.1(@types/react@19.2.14)(react@19.2.4)': dependencies: '@radix-ui/rect': 1.1.1 - react: 18.3.1 + react: 19.2.4 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - '@radix-ui/react-use-size@1.1.1(@types/react@18.3.28)(react@18.3.1)': + '@radix-ui/react-use-size@1.1.1(@types/react@19.2.14)(react@19.2.4)': dependencies: - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.28)(react@18.3.1) - react: 18.3.1 + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.4) + react: 19.2.4 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - '@radix-ui/react-visually-hidden@1.2.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-visually-hidden@1.2.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 - '@types/react-dom': 18.3.7(@types/react@18.3.28) + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) '@radix-ui/rect@1.1.1': {} @@ -7442,31 +8316,164 @@ snapshots: dependencies: '@shikijs/types': 3.22.0 - '@shikijs/types@3.22.0': + '@shikijs/types@3.22.0': + dependencies: + '@shikijs/vscode-textmate': 10.0.2 + '@types/hast': 3.0.4 + + '@shikijs/vscode-textmate@10.0.2': {} + + '@sindresorhus/is@4.6.0': {} + + '@standard-schema/spec@1.1.0': {} + + '@szmarczak/http-timer@4.0.6': + dependencies: + defer-to-connect: 2.0.1 + + '@tailwindcss/node@4.2.1': + dependencies: + '@jridgewell/remapping': 2.3.5 + enhanced-resolve: 5.20.0 + jiti: 2.6.1 + lightningcss: 1.31.1 + magic-string: 0.30.21 + source-map-js: 1.2.1 + tailwindcss: 4.2.1 + + '@tailwindcss/oxide-android-arm64@4.2.1': + optional: true + + '@tailwindcss/oxide-darwin-arm64@4.2.1': + optional: true + + '@tailwindcss/oxide-darwin-x64@4.2.1': + optional: true + + '@tailwindcss/oxide-freebsd-x64@4.2.1': + optional: true + + '@tailwindcss/oxide-linux-arm-gnueabihf@4.2.1': + optional: true + + '@tailwindcss/oxide-linux-arm64-gnu@4.2.1': + optional: true + + '@tailwindcss/oxide-linux-arm64-musl@4.2.1': + optional: true + + '@tailwindcss/oxide-linux-x64-gnu@4.2.1': + optional: true + + '@tailwindcss/oxide-linux-x64-musl@4.2.1': + optional: true + + '@tailwindcss/oxide-wasm32-wasi@4.2.1': + optional: true + + '@tailwindcss/oxide-win32-arm64-msvc@4.2.1': + optional: true + + '@tailwindcss/oxide-win32-x64-msvc@4.2.1': + optional: true + + '@tailwindcss/oxide@4.2.1': + optionalDependencies: + '@tailwindcss/oxide-android-arm64': 4.2.1 + '@tailwindcss/oxide-darwin-arm64': 4.2.1 + '@tailwindcss/oxide-darwin-x64': 4.2.1 + '@tailwindcss/oxide-freebsd-x64': 4.2.1 + '@tailwindcss/oxide-linux-arm-gnueabihf': 4.2.1 + '@tailwindcss/oxide-linux-arm64-gnu': 4.2.1 + '@tailwindcss/oxide-linux-arm64-musl': 4.2.1 + '@tailwindcss/oxide-linux-x64-gnu': 4.2.1 + '@tailwindcss/oxide-linux-x64-musl': 4.2.1 + '@tailwindcss/oxide-wasm32-wasi': 4.2.1 + '@tailwindcss/oxide-win32-arm64-msvc': 4.2.1 + '@tailwindcss/oxide-win32-x64-msvc': 4.2.1 + + '@tailwindcss/vite@4.2.1(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2))': + dependencies: + '@tailwindcss/node': 4.2.1 + '@tailwindcss/oxide': 4.2.1 + tailwindcss: 4.2.1 + vite: 6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2) + + '@tanstack/devtools-event-client@0.4.1': {} + + '@tanstack/form-core@1.28.4': + dependencies: + '@tanstack/devtools-event-client': 0.4.1 + '@tanstack/pacer-lite': 0.1.1 + '@tanstack/store': 0.9.2 + + '@tanstack/hotkeys@0.4.1': + dependencies: + '@tanstack/store': 0.9.2 + + '@tanstack/pacer-lite@0.1.1': {} + + '@tanstack/query-core@5.90.20': {} + + '@tanstack/react-form@1.28.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@tanstack/form-core': 1.28.4 + '@tanstack/react-store': 0.9.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + transitivePeerDependencies: + - react-dom + + '@tanstack/react-hotkeys@0.4.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@tanstack/hotkeys': 0.4.1 + '@tanstack/react-store': 0.9.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + + '@tanstack/react-query@5.90.21(react@19.2.4)': + dependencies: + '@tanstack/query-core': 5.90.20 + react: 19.2.4 + + '@tanstack/react-store@0.9.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@tanstack/store': 0.9.2 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + use-sync-external-store: 1.6.0(react@19.2.4) + + '@tanstack/react-virtual@3.13.23(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@shikijs/vscode-textmate': 10.0.2 - '@types/hast': 3.0.4 + '@tanstack/virtual-core': 3.13.23 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) - '@shikijs/vscode-textmate@10.0.2': {} + '@tanstack/store@0.9.2': {} - '@sindresorhus/is@4.6.0': {} + '@tanstack/virtual-core@3.13.23': {} - '@szmarczak/http-timer@4.0.6': + '@testing-library/dom@10.4.1': dependencies: - defer-to-connect: 2.0.1 - - '@tanstack/query-core@5.90.20': {} + '@babel/code-frame': 7.29.0 + '@babel/runtime': 7.28.6 + '@types/aria-query': 5.0.4 + aria-query: 5.3.0 + dom-accessibility-api: 0.5.16 + lz-string: 1.5.0 + picocolors: 1.1.1 + pretty-format: 27.5.1 - '@tanstack/react-query@5.90.21(react@18.3.1)': + '@testing-library/user-event@14.6.1(@testing-library/dom@10.4.1)': dependencies: - '@tanstack/query-core': 5.90.20 - react: 18.3.1 + '@testing-library/dom': 10.4.1 '@tootallnate/once@1.1.2': optional: true '@tootallnate/once@2.0.0': {} + '@types/aria-query@5.0.4': {} + '@types/babel__core@7.20.5': dependencies: '@babel/parser': 7.29.0 @@ -7488,6 +8495,10 @@ snapshots: dependencies: '@babel/types': 7.29.0 + '@types/better-sqlite3@7.6.13': + dependencies: + '@types/node': 20.19.32 + '@types/cacheable-request@6.0.3': dependencies: '@types/http-cache-semantics': 4.2.0 @@ -7669,29 +8680,30 @@ snapshots: dependencies: undici-types: 6.21.0 + '@types/node@24.11.0': + dependencies: + undici-types: 7.16.0 + '@types/plist@3.0.5': dependencies: '@types/node': 20.19.32 xmlbuilder: 15.1.1 optional: true - '@types/prop-types@15.7.15': {} - - '@types/react-dom@18.3.7(@types/react@18.3.28)': + '@types/react-dom@19.2.3(@types/react@19.2.14)': dependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 '@types/react-syntax-highlighter@15.5.13': dependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 '@types/react-window@1.8.8': dependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - '@types/react@18.3.28': + '@types/react@19.2.14': dependencies: - '@types/prop-types': 15.7.15 csstype: 3.2.3 '@types/responselike@1.0.3': @@ -7700,10 +8712,6 @@ snapshots: '@types/semver@7.7.1': {} - '@types/sqlite3@3.1.11': - dependencies: - '@types/node': 20.19.32 - '@types/ssh2@1.15.5': dependencies: '@types/node': 18.19.130 @@ -7813,7 +8821,7 @@ snapshots: '@ungap/structured-clone@1.3.0': {} - '@vitejs/plugin-react@4.7.0(vite@5.4.21(@types/node@20.19.32))': + '@vitejs/plugin-react@4.7.0(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2))': dependencies: '@babel/core': 7.29.0 '@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.29.0) @@ -7821,62 +8829,103 @@ snapshots: '@rolldown/pluginutils': 1.0.0-beta.27 '@types/babel__core': 7.20.5 react-refresh: 0.17.0 - vite: 5.4.21(@types/node@20.19.32) + vite: 6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2) transitivePeerDependencies: - supports-color - '@vitest/expect@3.2.4': + '@vitest/browser-playwright@4.1.0(playwright@1.58.2)(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2))(vitest@4.1.0)': + dependencies: + '@vitest/browser': 4.1.0(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2))(vitest@4.1.0) + '@vitest/mocker': 4.1.0(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2)) + playwright: 1.58.2 + tinyrainbow: 3.1.0 + vitest: 4.1.0(@opentelemetry/api@1.9.0)(@types/node@20.19.32)(@vitest/browser-playwright@4.1.0)(@vitest/browser-preview@4.1.0)(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2)) + transitivePeerDependencies: + - bufferutil + - msw + - utf-8-validate + - vite + + '@vitest/browser-preview@4.1.0(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2))(vitest@4.1.0)': + dependencies: + '@testing-library/dom': 10.4.1 + '@testing-library/user-event': 14.6.1(@testing-library/dom@10.4.1) + '@vitest/browser': 4.1.0(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2))(vitest@4.1.0) + vitest: 4.1.0(@opentelemetry/api@1.9.0)(@types/node@20.19.32)(@vitest/browser-playwright@4.1.0)(@vitest/browser-preview@4.1.0)(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2)) + transitivePeerDependencies: + - bufferutil + - msw + - utf-8-validate + - vite + + '@vitest/browser@4.1.0(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2))(vitest@4.1.0)': + dependencies: + '@blazediff/core': 1.9.1 + '@vitest/mocker': 4.1.0(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2)) + '@vitest/utils': 4.1.0 + magic-string: 0.30.21 + pngjs: 7.0.0 + sirv: 3.0.2 + tinyrainbow: 3.1.0 + vitest: 4.1.0(@opentelemetry/api@1.9.0)(@types/node@20.19.32)(@vitest/browser-playwright@4.1.0)(@vitest/browser-preview@4.1.0)(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2)) + ws: 8.19.0 + transitivePeerDependencies: + - bufferutil + - msw + - utf-8-validate + - vite + + '@vitest/expect@4.1.0': dependencies: + '@standard-schema/spec': 1.1.0 '@types/chai': 5.2.3 - '@vitest/spy': 3.2.4 - '@vitest/utils': 3.2.4 - chai: 5.3.3 - tinyrainbow: 2.0.0 + '@vitest/spy': 4.1.0 + '@vitest/utils': 4.1.0 + chai: 6.2.2 + tinyrainbow: 3.1.0 - '@vitest/mocker@3.2.4(vite@5.4.21(@types/node@20.19.32))': + '@vitest/mocker@4.1.0(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2))': dependencies: - '@vitest/spy': 3.2.4 + '@vitest/spy': 4.1.0 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: - vite: 5.4.21(@types/node@20.19.32) + vite: 6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2) - '@vitest/pretty-format@3.2.4': + '@vitest/pretty-format@4.1.0': dependencies: - tinyrainbow: 2.0.0 + tinyrainbow: 3.1.0 - '@vitest/runner@3.2.4': + '@vitest/runner@4.1.0': dependencies: - '@vitest/utils': 3.2.4 + '@vitest/utils': 4.1.0 pathe: 2.0.3 - strip-literal: 3.1.0 - '@vitest/snapshot@3.2.4': + '@vitest/snapshot@4.1.0': dependencies: - '@vitest/pretty-format': 3.2.4 + '@vitest/pretty-format': 4.1.0 + '@vitest/utils': 4.1.0 magic-string: 0.30.21 pathe: 2.0.3 - '@vitest/spy@3.2.4': - dependencies: - tinyspy: 4.0.4 + '@vitest/spy@4.1.0': {} - '@vitest/utils@3.2.4': + '@vitest/utils@4.1.0': dependencies: - '@vitest/pretty-format': 3.2.4 - loupe: 3.2.1 - tinyrainbow: 2.0.0 + '@vitest/pretty-format': 4.1.0 + convert-source-map: 2.0.0 + tinyrainbow: 3.1.0 '@xmldom/xmldom@0.8.11': {} - '@xterm/addon-fit@0.11.0': {} + '@xterm/addon-canvas@0.7.0(@xterm/xterm@6.0.0)': + dependencies: + '@xterm/xterm': 6.0.0 - '@xterm/addon-serialize@0.14.0': {} + '@xterm/addon-fit@0.11.0': {} '@xterm/addon-web-links@0.12.0': {} - '@xterm/addon-webgl@0.19.0': {} - '@xterm/xterm@6.0.0': {} abbrev@1.1.1: @@ -7920,6 +8969,17 @@ snapshots: json-schema-traverse: 0.4.1 uri-js: 4.4.1 + allotment@1.20.5(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + classnames: 2.5.1 + eventemitter3: 5.0.4 + fast-deep-equal: 3.1.3 + lodash.clamp: 4.0.3 + lodash.debounce: 4.0.8 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + usehooks-ts: 3.1.1(react@19.2.4) + ansi-escapes@7.3.0: dependencies: environment: 1.1.0 @@ -7932,14 +8992,9 @@ snapshots: dependencies: color-convert: 2.0.1 - ansi-styles@6.2.3: {} - - any-promise@1.3.0: {} + ansi-styles@5.2.0: {} - anymatch@3.1.3: - dependencies: - normalize-path: 3.0.0 - picomatch: 2.3.1 + ansi-styles@6.2.3: {} app-builder-bin@4.0.0: {} @@ -8067,14 +9122,16 @@ snapshots: readable-stream: 3.6.2 optional: true - arg@5.0.2: {} - argparse@2.0.1: {} aria-hidden@1.2.6: dependencies: tslib: 2.8.1 + aria-query@5.3.0: + dependencies: + dequal: 2.0.3 + array-buffer-byte-length@1.0.2: dependencies: call-bound: 1.0.4 @@ -8149,15 +9206,6 @@ snapshots: at-least-node@1.0.0: {} - autoprefixer@10.4.24(postcss@8.5.6): - dependencies: - browserslist: 4.28.1 - caniuse-lite: 1.0.30001769 - fraction.js: 5.3.4 - picocolors: 1.1.1 - postcss: 8.5.6 - postcss-value-parser: 4.2.0 - available-typed-arrays@1.0.7: dependencies: possible-typed-array-names: 1.1.0 @@ -8166,6 +9214,8 @@ snapshots: balanced-match@1.0.2: {} + balanced-match@4.0.4: {} + base64-js@1.5.1: {} baseline-browser-mapping@2.9.19: {} @@ -8174,13 +9224,13 @@ snapshots: dependencies: tweetnacl: 0.14.5 + before-after-hook@4.0.0: {} + better-sqlite3@12.6.2: dependencies: bindings: 1.5.0 prebuild-install: 7.1.3 - binary-extensions@2.3.0: {} - bindings@1.5.0: dependencies: file-uri-to-path: 1.0.0 @@ -8209,6 +9259,10 @@ snapshots: dependencies: balanced-match: 1.0.2 + brace-expansion@5.0.4: + dependencies: + balanced-match: 4.0.4 + braces@3.0.3: dependencies: fill-range: 7.1.1 @@ -8363,19 +9417,11 @@ snapshots: callsites@3.1.0: {} - camelcase-css@2.0.1: {} - caniuse-lite@1.0.30001769: {} ccount@2.0.1: {} - chai@5.3.3: - dependencies: - assertion-error: 2.0.1 - check-error: 2.1.3 - deep-eql: 5.0.2 - loupe: 3.2.1 - pathval: 2.0.1 + chai@6.2.2: {} chalk@4.1.2: dependencies: @@ -8396,8 +9442,6 @@ snapshots: character-reference-invalid@2.0.1: {} - check-error@2.1.3: {} - chevrotain-allstar@0.3.1(chevrotain@11.0.3): dependencies: chevrotain: 11.0.3 @@ -8412,17 +9456,9 @@ snapshots: '@chevrotain/utils': 11.0.3 lodash-es: 4.17.21 - chokidar@3.6.0: + chokidar@5.0.0: dependencies: - anymatch: 3.1.3 - braces: 3.0.3 - glob-parent: 5.1.2 - is-binary-path: 2.1.0 - is-glob: 4.0.3 - normalize-path: 3.0.0 - readdirp: 3.6.0 - optionalDependencies: - fsevents: 2.3.3 + readdirp: 5.0.0 chownr@1.1.4: {} @@ -8442,6 +9478,8 @@ snapshots: dependencies: clsx: 2.1.1 + classnames@2.5.1: {} + clean-stack@2.2.0: optional: true @@ -8480,14 +9518,14 @@ snapshots: clsx@2.1.1: {} - cmdk@1.1.1(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + cmdk@1.1.1(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4): dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-dialog': 1.1.15(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.28)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.4(@types/react-dom@18.3.7(@types/react@18.3.28))(@types/react@18.3.28)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-dialog': 1.1.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.4) + '@radix-ui/react-primitive': 2.1.4(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) transitivePeerDependencies: - '@types/react' - '@types/react-dom' @@ -8513,8 +9551,6 @@ snapshots: commander@14.0.3: {} - commander@4.1.1: {} - commander@5.1.0: {} commander@7.2.0: {} @@ -8601,8 +9637,6 @@ snapshots: shebang-command: 2.0.0 which: 2.0.2 - cssesc@3.0.0: {} - csstype@3.2.3: {} cytoscape-cose-bilkent@4.1.0(cytoscape@3.33.1): @@ -8829,8 +9863,6 @@ snapshots: dependencies: mimic-response: 3.1.0 - deep-eql@5.0.2: {} - deep-extend@0.6.0: {} deep-is@0.1.4: {} @@ -8873,12 +9905,12 @@ snapshots: detect-node@2.1.0: optional: true + devicon@2.17.0: {} + devlop@1.1.0: dependencies: dequal: 2.0.3 - didyoumean@1.2.2: {} - dir-compare@3.3.0: dependencies: buffer-equal: 1.0.1 @@ -8893,8 +9925,6 @@ snapshots: dependencies: path-type: 4.0.0 - dlv@1.1.3: {} - dmg-builder@26.8.1(electron-builder-squirrel-windows@24.13.3): dependencies: app-builder-lib: 26.8.1(dmg-builder@26.8.1)(electron-builder-squirrel-windows@24.13.3) @@ -8928,6 +9958,8 @@ snapshots: dependencies: esutils: 2.0.3 + dom-accessibility-api@0.5.16: {} + dompurify@3.2.7: optionalDependencies: '@types/trusted-types': 2.0.7 @@ -8957,12 +9989,13 @@ snapshots: transitivePeerDependencies: - supports-color - drizzle-orm@0.32.2(@opentelemetry/api@1.9.0)(@types/react@18.3.28)(better-sqlite3@12.6.2)(react@18.3.1)(sqlite3@5.1.7): + drizzle-orm@0.32.2(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/react@19.2.14)(better-sqlite3@12.6.2)(react@19.2.4)(sqlite3@5.1.7): optionalDependencies: '@opentelemetry/api': 1.9.0 - '@types/react': 18.3.28 + '@types/better-sqlite3': 7.6.13 + '@types/react': 19.2.14 better-sqlite3: 12.6.2 - react: 18.3.1 + react: 19.2.4 sqlite3: 5.1.7 dunder-proto@1.0.1: @@ -9045,10 +10078,22 @@ snapshots: transitivePeerDependencies: - supports-color - electron@30.5.1: + electron-vite@5.0.0(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2)): + dependencies: + '@babel/core': 7.29.0 + '@babel/plugin-transform-arrow-functions': 7.27.1(@babel/core@7.29.0) + cac: 6.7.14 + esbuild: 0.25.12 + magic-string: 0.30.21 + picocolors: 1.1.1 + vite: 6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2) + transitivePeerDependencies: + - supports-color + + electron@40.7.0: dependencies: '@electron/get': 2.0.3 - '@types/node': 20.19.32 + '@types/node': 24.11.0 extract-zip: 2.0.1 transitivePeerDependencies: - supports-color @@ -9068,6 +10113,11 @@ snapshots: dependencies: once: 1.4.0 + enhanced-resolve@5.20.0: + dependencies: + graceful-fs: 4.2.11 + tapable: 2.3.0 + entities@6.0.1: {} env-paths@2.2.1: {} @@ -9137,7 +10187,7 @@ snapshots: es-errors@1.3.0: {} - es-module-lexer@1.7.0: {} + es-module-lexer@2.0.0: {} es-object-atoms@1.1.1: dependencies: @@ -9221,31 +10271,34 @@ snapshots: '@esbuild/win32-ia32': 0.19.12 '@esbuild/win32-x64': 0.19.12 - esbuild@0.21.5: + esbuild@0.25.12: optionalDependencies: - '@esbuild/aix-ppc64': 0.21.5 - '@esbuild/android-arm': 0.21.5 - '@esbuild/android-arm64': 0.21.5 - '@esbuild/android-x64': 0.21.5 - '@esbuild/darwin-arm64': 0.21.5 - '@esbuild/darwin-x64': 0.21.5 - '@esbuild/freebsd-arm64': 0.21.5 - '@esbuild/freebsd-x64': 0.21.5 - '@esbuild/linux-arm': 0.21.5 - '@esbuild/linux-arm64': 0.21.5 - '@esbuild/linux-ia32': 0.21.5 - '@esbuild/linux-loong64': 0.21.5 - '@esbuild/linux-mips64el': 0.21.5 - '@esbuild/linux-ppc64': 0.21.5 - '@esbuild/linux-riscv64': 0.21.5 - '@esbuild/linux-s390x': 0.21.5 - '@esbuild/linux-x64': 0.21.5 - '@esbuild/netbsd-x64': 0.21.5 - '@esbuild/openbsd-x64': 0.21.5 - '@esbuild/sunos-x64': 0.21.5 - '@esbuild/win32-arm64': 0.21.5 - '@esbuild/win32-ia32': 0.21.5 - '@esbuild/win32-x64': 0.21.5 + '@esbuild/aix-ppc64': 0.25.12 + '@esbuild/android-arm': 0.25.12 + '@esbuild/android-arm64': 0.25.12 + '@esbuild/android-x64': 0.25.12 + '@esbuild/darwin-arm64': 0.25.12 + '@esbuild/darwin-x64': 0.25.12 + '@esbuild/freebsd-arm64': 0.25.12 + '@esbuild/freebsd-x64': 0.25.12 + '@esbuild/linux-arm': 0.25.12 + '@esbuild/linux-arm64': 0.25.12 + '@esbuild/linux-ia32': 0.25.12 + '@esbuild/linux-loong64': 0.25.12 + '@esbuild/linux-mips64el': 0.25.12 + '@esbuild/linux-ppc64': 0.25.12 + '@esbuild/linux-riscv64': 0.25.12 + '@esbuild/linux-s390x': 0.25.12 + '@esbuild/linux-x64': 0.25.12 + '@esbuild/netbsd-arm64': 0.25.12 + '@esbuild/netbsd-x64': 0.25.12 + '@esbuild/openbsd-arm64': 0.25.12 + '@esbuild/openbsd-x64': 0.25.12 + '@esbuild/openharmony-arm64': 0.25.12 + '@esbuild/sunos-x64': 0.25.12 + '@esbuild/win32-arm64': 0.25.12 + '@esbuild/win32-ia32': 0.25.12 + '@esbuild/win32-x64': 0.25.12 escalade@3.2.0: {} @@ -9420,6 +10473,8 @@ snapshots: extsprintf@1.4.1: optional: true + fast-content-type-parse@3.0.0: {} + fast-deep-equal@3.1.3: {} fast-glob@3.3.3: @@ -9502,16 +10557,14 @@ snapshots: format@0.2.2: {} - fraction.js@5.3.4: {} - - framer-motion@12.33.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + framer-motion@12.33.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4): dependencies: motion-dom: 12.33.0 motion-utils: 12.29.2 tslib: 2.8.1 optionalDependencies: - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) fs-constants@1.0.0: {} @@ -9550,6 +10603,9 @@ snapshots: fs.realpath@1.0.0: {} + fsevents@2.3.2: + optional: true + fsevents@2.3.3: optional: true @@ -9643,6 +10699,12 @@ snapshots: package-json-from-dist: 1.0.1 path-scurry: 1.11.1 + glob@13.0.6: + dependencies: + minimatch: 10.2.4 + minipass: 7.1.3 + path-scurry: 2.0.2 + glob@7.2.3: dependencies: fs.realpath: 1.0.0 @@ -9702,6 +10764,8 @@ snapshots: graphemer@1.4.0: {} + graphql@16.13.1: {} + hachure-fill@0.5.2: {} has-bigints@1.1.0: {} @@ -10023,10 +11087,6 @@ snapshots: dependencies: has-bigints: 1.1.0 - is-binary-path@2.1.0: - dependencies: - binary-extensions: 2.3.0 - is-boolean-object@1.2.2: dependencies: call-bound: 1.0.4 @@ -10172,14 +11232,10 @@ snapshots: filelist: 1.0.4 picocolors: 1.1.1 - jiti@1.21.7: {} - jiti@2.6.1: {} js-tokens@4.0.0: {} - js-tokens@9.0.1: {} - js-yaml@4.1.1: dependencies: argparse: 2.0.1 @@ -10195,12 +11251,16 @@ snapshots: json-stringify-safe@5.0.1: optional: true + json-with-bigint@3.5.7: {} + json5@1.0.2: dependencies: minimist: 1.2.8 json5@2.2.3: {} + jsonc-parser@3.3.1: {} + jsonfile@4.0.0: optionalDependencies: graceful-fs: 4.2.11 @@ -10249,9 +11309,54 @@ snapshots: prelude-ls: 1.2.1 type-check: 0.4.0 - lilconfig@3.1.3: {} + lightningcss-android-arm64@1.31.1: + optional: true + + lightningcss-darwin-arm64@1.31.1: + optional: true + + lightningcss-darwin-x64@1.31.1: + optional: true + + lightningcss-freebsd-x64@1.31.1: + optional: true + + lightningcss-linux-arm-gnueabihf@1.31.1: + optional: true + + lightningcss-linux-arm64-gnu@1.31.1: + optional: true + + lightningcss-linux-arm64-musl@1.31.1: + optional: true + + lightningcss-linux-x64-gnu@1.31.1: + optional: true + + lightningcss-linux-x64-musl@1.31.1: + optional: true + + lightningcss-win32-arm64-msvc@1.31.1: + optional: true + + lightningcss-win32-x64-msvc@1.31.1: + optional: true - lines-and-columns@1.2.4: {} + lightningcss@1.31.1: + dependencies: + detect-libc: 2.1.2 + optionalDependencies: + lightningcss-android-arm64: 1.31.1 + lightningcss-darwin-arm64: 1.31.1 + lightningcss-darwin-x64: 1.31.1 + lightningcss-freebsd-x64: 1.31.1 + lightningcss-linux-arm-gnueabihf: 1.31.1 + lightningcss-linux-arm64-gnu: 1.31.1 + lightningcss-linux-arm64-musl: 1.31.1 + lightningcss-linux-x64-gnu: 1.31.1 + lightningcss-linux-x64-musl: 1.31.1 + lightningcss-win32-arm64-msvc: 1.31.1 + lightningcss-win32-x64-msvc: 1.31.1 lint-staged@16.3.0: dependencies: @@ -10280,6 +11385,10 @@ snapshots: lodash-es@4.17.23: {} + lodash.clamp@4.0.3: {} + + lodash.debounce@4.0.8: {} + lodash.defaults@4.2.0: {} lodash.difference@4.5.0: {} @@ -10315,12 +11424,6 @@ snapshots: longest-streak@3.1.0: {} - loose-envify@1.4.0: - dependencies: - js-tokens: 4.0.0 - - loupe@3.2.1: {} - lowercase-keys@2.0.0: {} lowlight@1.20.0: @@ -10330,6 +11433,8 @@ snapshots: lru-cache@10.4.3: {} + lru-cache@11.2.6: {} + lru-cache@5.1.1: dependencies: yallist: 3.1.1 @@ -10338,13 +11443,15 @@ snapshots: dependencies: yallist: 4.0.0 - lucide-react@0.542.0(react@18.3.1): + lucide-react@0.542.0(react@19.2.4): dependencies: - react: 18.3.1 + react: 19.2.4 - lucide-react@0.564.0(react@18.3.1): + lucide-react@0.564.0(react@19.2.4): dependencies: - react: 18.3.1 + react: 19.2.4 + + lz-string@1.5.0: {} magic-string@0.30.21: dependencies: @@ -10852,6 +11959,10 @@ snapshots: dependencies: '@isaacs/brace-expansion': 5.0.1 + minimatch@10.2.4: + dependencies: + brace-expansion: 5.0.4 + minimatch@3.1.2: dependencies: brace-expansion: 1.1.12 @@ -10916,6 +12027,8 @@ snapshots: minipass@7.1.2: {} + minipass@7.1.3: {} + minizlib@2.1.2: dependencies: minipass: 3.3.6 @@ -10936,6 +12049,16 @@ snapshots: pkg-types: 1.3.1 ufo: 1.6.3 + mobx-react-lite@4.1.1(mobx@6.15.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + mobx: 6.15.0 + react: 19.2.4 + use-sync-external-store: 1.6.0(react@19.2.4) + optionalDependencies: + react-dom: 19.2.4(react@19.2.4) + + mobx@6.15.0: {} + monaco-editor@0.55.1: dependencies: dompurify: 3.2.7 @@ -10947,21 +12070,17 @@ snapshots: motion-utils@12.29.2: {} - motion@12.33.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + motion@12.33.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4): dependencies: - framer-motion: 12.33.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + framer-motion: 12.33.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) tslib: 2.8.1 optionalDependencies: - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) - ms@2.1.3: {} + mrmime@2.0.1: {} - mz@2.7.0: - dependencies: - any-promise: 1.3.0 - object-assign: 4.1.1 - thenify-all: 1.6.0 + ms@2.1.3: {} nan@2.25.0: {} @@ -11066,10 +12185,6 @@ snapshots: set-blocking: 2.0.0 optional: true - object-assign@4.1.1: {} - - object-hash@3.0.0: {} - object-inspect@1.13.4: {} object-keys@1.1.1: {} @@ -11103,6 +12218,8 @@ snapshots: define-properties: 1.2.1 es-object-atoms: 1.1.1 + obug@2.1.1: {} + once@1.4.0: dependencies: wrappy: 1.0.2 @@ -11213,12 +12330,15 @@ snapshots: lru-cache: 10.4.3 minipass: 7.1.2 + path-scurry@2.0.2: + dependencies: + lru-cache: 11.2.6 + minipass: 7.1.3 + path-type@4.0.0: {} pathe@2.0.3: {} - pathval@2.0.1: {} - pe-library@0.4.1: {} pend@1.2.0: {} @@ -11229,22 +12349,28 @@ snapshots: picomatch@4.0.3: {} - pify@2.3.0: {} - - pirates@4.0.7: {} - pkg-types@1.3.1: dependencies: confbox: 0.1.8 mlly: 1.8.0 pathe: 2.0.3 + playwright-core@1.58.2: {} + + playwright@1.58.2: + dependencies: + playwright-core: 1.58.2 + optionalDependencies: + fsevents: 2.3.2 + plist@3.1.0: dependencies: '@xmldom/xmldom': 0.8.11 base64-js: 1.5.1 xmlbuilder: 15.1.1 + pngjs@7.0.0: {} + points-on-curve@0.2.0: {} points-on-path@0.2.1: @@ -11254,38 +12380,6 @@ snapshots: possible-typed-array-names@1.1.0: {} - postcss-import@15.1.0(postcss@8.5.6): - dependencies: - postcss: 8.5.6 - postcss-value-parser: 4.2.0 - read-cache: 1.0.0 - resolve: 1.22.11 - - postcss-js@4.1.0(postcss@8.5.6): - dependencies: - camelcase-css: 2.0.1 - postcss: 8.5.6 - - postcss-load-config@6.0.1(jiti@1.21.7)(postcss@8.5.6)(yaml@2.8.2): - dependencies: - lilconfig: 3.1.3 - optionalDependencies: - jiti: 1.21.7 - postcss: 8.5.6 - yaml: 2.8.2 - - postcss-nested@6.2.0(postcss@8.5.6): - dependencies: - postcss: 8.5.6 - postcss-selector-parser: 6.1.2 - - postcss-selector-parser@6.1.2: - dependencies: - cssesc: 3.0.0 - util-deprecate: 1.0.2 - - postcss-value-parser@4.2.0: {} - postcss@8.5.6: dependencies: nanoid: 3.3.11 @@ -11327,12 +12421,20 @@ snapshots: prelude-ls@1.2.1: {} - prettier-plugin-tailwindcss@0.6.14(prettier@3.6.2): + prettier-plugin-tailwindcss@0.6.14(@ianvs/prettier-plugin-sort-imports@4.7.1(prettier@3.6.2))(prettier@3.6.2): dependencies: prettier: 3.6.2 + optionalDependencies: + '@ianvs/prettier-plugin-sort-imports': 4.7.1(prettier@3.6.2) prettier@3.6.2: {} + pretty-format@27.5.1: + dependencies: + ansi-regex: 5.0.1 + ansi-styles: 5.2.0 + react-is: 17.0.2 + prismjs@1.27.0: {} prismjs@1.30.0: {} @@ -11398,26 +12500,27 @@ snapshots: minimist: 1.2.8 strip-json-comments: 2.0.1 - react-dom@18.3.1(react@18.3.1): + react-dom@19.2.4(react@19.2.4): dependencies: - loose-envify: 1.4.0 - react: 18.3.1 - scheduler: 0.23.2 + react: 19.2.4 + scheduler: 0.27.0 - react-icons@5.5.0(react@18.3.1): + react-icons@5.5.0(react@19.2.4): dependencies: - react: 18.3.1 + react: 19.2.4 - react-markdown@10.1.0(@types/react@18.3.28)(react@18.3.1): + react-is@17.0.2: {} + + react-markdown@10.1.0(@types/react@19.2.14)(react@19.2.4): dependencies: '@types/hast': 3.0.4 '@types/mdast': 4.0.4 - '@types/react': 18.3.28 + '@types/react': 19.2.14 devlop: 1.1.0 hast-util-to-jsx-runtime: 2.3.6 html-url-attributes: 3.0.1 mdast-util-to-hast: 13.2.1 - react: 18.3.1 + react: 19.2.4 remark-parse: 11.0.0 remark-rehype: 11.1.2 unified: 11.0.5 @@ -11428,56 +12531,54 @@ snapshots: react-refresh@0.17.0: {} - react-remove-scroll-bar@2.3.8(@types/react@18.3.28)(react@18.3.1): + react-remove-scroll-bar@2.3.8(@types/react@19.2.14)(react@19.2.4): dependencies: - react: 18.3.1 - react-style-singleton: 2.2.3(@types/react@18.3.28)(react@18.3.1) + react: 19.2.4 + react-style-singleton: 2.2.3(@types/react@19.2.14)(react@19.2.4) tslib: 2.8.1 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - react-remove-scroll@2.7.2(@types/react@18.3.28)(react@18.3.1): + react-remove-scroll@2.7.2(@types/react@19.2.14)(react@19.2.4): dependencies: - react: 18.3.1 - react-remove-scroll-bar: 2.3.8(@types/react@18.3.28)(react@18.3.1) - react-style-singleton: 2.2.3(@types/react@18.3.28)(react@18.3.1) + react: 19.2.4 + react-remove-scroll-bar: 2.3.8(@types/react@19.2.14)(react@19.2.4) + react-style-singleton: 2.2.3(@types/react@19.2.14)(react@19.2.4) tslib: 2.8.1 - use-callback-ref: 1.3.3(@types/react@18.3.28)(react@18.3.1) - use-sidecar: 1.1.3(@types/react@18.3.28)(react@18.3.1) + use-callback-ref: 1.3.3(@types/react@19.2.14)(react@19.2.4) + use-sidecar: 1.1.3(@types/react@19.2.14)(react@19.2.4) optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - react-resizable-panels@3.0.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + react-resizable-panels@4.7.3(react-dom@19.2.4(react@19.2.4))(react@19.2.4): dependencies: - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) - react-style-singleton@2.2.3(@types/react@18.3.28)(react@18.3.1): + react-style-singleton@2.2.3(@types/react@19.2.14)(react@19.2.4): dependencies: get-nonce: 1.0.1 - react: 18.3.1 + react: 19.2.4 tslib: 2.8.1 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - react-syntax-highlighter@15.6.6(react@18.3.1): + react-syntax-highlighter@15.6.6(react@19.2.4): dependencies: '@babel/runtime': 7.28.6 highlight.js: 10.7.3 highlightjs-vue: 1.0.0 lowlight: 1.20.0 prismjs: 1.30.0 - react: 18.3.1 + react: 19.2.4 refractor: 3.6.0 - react-window@2.2.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + react-window@2.2.6(react-dom@19.2.4(react@19.2.4))(react@19.2.4): dependencies: - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) - react@18.3.1: - dependencies: - loose-envify: 1.4.0 + react@19.2.4: {} read-binary-file-arch@1.0.6: dependencies: @@ -11485,10 +12586,6 @@ snapshots: transitivePeerDependencies: - supports-color - read-cache@1.0.0: - dependencies: - pify: 2.3.0 - read-config-file@6.3.2: dependencies: config-file-ts: 0.2.6 @@ -11518,9 +12615,7 @@ snapshots: dependencies: minimatch: 5.1.6 - readdirp@3.6.0: - dependencies: - picomatch: 2.3.1 + readdirp@5.0.0: {} reflect.getprototypeof@1.0.10: dependencies: @@ -11654,6 +12749,8 @@ snapshots: dependencies: pe-library: 0.4.1 + reselect@5.1.1: {} + resolve-alpn@1.2.1: {} resolve-from@4.0.0: {} @@ -11781,9 +12878,7 @@ snapshots: sax@1.4.4: {} - scheduler@0.23.2: - dependencies: - loose-envify: 1.4.0 + scheduler@0.27.0: {} semver-compare@1.0.0: optional: true @@ -11899,6 +12994,12 @@ snapshots: dependencies: semver: 7.7.4 + sirv@3.0.2: + dependencies: + '@polka/url': 1.0.0-next.29 + mrmime: 2.0.1 + totalist: 3.0.1 + slash@3.0.0: {} slice-ansi@3.0.0: @@ -11915,6 +13016,8 @@ snapshots: smart-buffer@4.2.0: {} + smol-toml@1.6.0: {} + socks-proxy-agent@6.2.1: dependencies: agent-base: 6.0.2 @@ -11937,6 +13040,11 @@ snapshots: ip-address: 10.1.0 smart-buffer: 4.2.0 + sonner@2.0.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + source-map-js@1.2.1: {} source-map-support@0.5.21: @@ -11966,6 +13074,7 @@ snapshots: transitivePeerDependencies: - bluebird - supports-color + optional: true ssh2@1.17.0: dependencies: @@ -11990,7 +13099,7 @@ snapshots: state-local@1.0.7: {} - std-env@3.10.0: {} + std-env@4.0.0: {} stop-iteration-iterator@1.1.0: dependencies: @@ -11999,17 +13108,17 @@ snapshots: stopword@3.1.5: {} - streamdown@1.6.11(@types/mdast@4.0.4)(micromark-util-types@2.0.2)(micromark@4.0.2)(react@18.3.1): + streamdown@1.6.11(@types/mdast@4.0.4)(micromark-util-types@2.0.2)(micromark@4.0.2)(react@19.2.4): dependencies: clsx: 2.1.1 hast: 1.0.0 hast-util-to-jsx-runtime: 2.3.6 html-url-attributes: 3.0.1 katex: 0.16.28 - lucide-react: 0.542.0(react@18.3.1) + lucide-react: 0.542.0(react@19.2.4) marked: 16.4.2 mermaid: 11.12.2 - react: 18.3.1 + react: 19.2.4 rehype-harden: 1.1.7 rehype-katex: 7.0.1 rehype-raw: 7.0.0 @@ -12108,10 +13217,6 @@ snapshots: strip-json-comments@3.1.1: {} - strip-literal@3.1.0: - dependencies: - js-tokens: 9.0.1 - style-to-js@1.1.21: dependencies: style-to-object: 1.0.14 @@ -12122,16 +13227,6 @@ snapshots: stylis@4.3.6: {} - sucrase@3.35.1: - dependencies: - '@jridgewell/gen-mapping': 0.3.13 - commander: 4.1.1 - lines-and-columns: 1.2.4 - mz: 2.7.0 - pirates: 4.0.7 - tinyglobby: 0.2.15 - ts-interface-checker: 0.1.13 - suffix-thumb@5.0.2: {} sumchecker@3.0.1: @@ -12150,41 +13245,15 @@ snapshots: supports-preserve-symlinks-flag@1.0.0: {} + tabbable@6.4.0: {} + tailwind-merge@2.6.1: {} tailwind-merge@3.4.0: {} - tailwindcss-animate@1.0.7(tailwindcss@3.4.19(yaml@2.8.2)): - dependencies: - tailwindcss: 3.4.19(yaml@2.8.2) + tailwindcss@4.2.1: {} - tailwindcss@3.4.19(yaml@2.8.2): - dependencies: - '@alloc/quick-lru': 5.2.0 - arg: 5.0.2 - chokidar: 3.6.0 - didyoumean: 1.2.2 - dlv: 1.1.3 - fast-glob: 3.3.3 - glob-parent: 6.0.2 - is-glob: 4.0.3 - jiti: 1.21.7 - lilconfig: 3.1.3 - micromatch: 4.0.8 - normalize-path: 3.0.0 - object-hash: 3.0.0 - picocolors: 1.1.1 - postcss: 8.5.6 - postcss-import: 15.1.0(postcss@8.5.6) - postcss-js: 4.1.0(postcss@8.5.6) - postcss-load-config: 6.0.1(jiti@1.21.7)(postcss@8.5.6)(yaml@2.8.2) - postcss-nested: 6.2.0(postcss@8.5.6) - postcss-selector-parser: 6.1.2 - resolve: 1.22.11 - sucrase: 3.35.1 - transitivePeerDependencies: - - tsx - - yaml + tapable@2.3.0: {} tar-fs@2.1.4: dependencies: @@ -12225,14 +13294,6 @@ snapshots: text-table@0.2.0: {} - thenify-all@1.6.0: - dependencies: - thenify: 3.3.1 - - thenify@3.3.1: - dependencies: - any-promise: 1.3.0 - tiny-async-pool@1.3.0: dependencies: semver: 5.7.2 @@ -12241,8 +13302,6 @@ snapshots: tinybench@2.9.0: {} - tinyexec@0.3.2: {} - tinyexec@1.0.2: {} tinyglobby@0.2.15: @@ -12250,11 +13309,7 @@ snapshots: fdir: 6.5.0(picomatch@4.0.3) picomatch: 4.0.3 - tinypool@1.1.1: {} - - tinyrainbow@2.0.0: {} - - tinyspy@4.0.4: {} + tinyrainbow@3.1.0: {} tmp-promise@3.0.3: dependencies: @@ -12266,6 +13321,8 @@ snapshots: dependencies: is-number: 7.0.0 + totalist@3.0.1: {} + tree-kill@1.2.2: {} trim-lines@3.0.1: {} @@ -12282,8 +13339,6 @@ snapshots: ts-dedent@2.2.0: {} - ts-interface-checker@0.1.13: {} - tsconfig-paths@3.15.0: dependencies: '@types/json5': 0.0.29 @@ -12297,6 +13352,8 @@ snapshots: dependencies: safe-buffer: 5.2.1 + tw-animate-css@1.4.0: {} + tweetnacl@0.14.5: {} type-check@0.4.0: @@ -12356,6 +13413,8 @@ snapshots: undici-types@6.21.0: {} + undici-types@7.16.0: {} + unified@11.0.5: dependencies: '@types/unist': 3.0.3 @@ -12417,6 +13476,8 @@ snapshots: unist-util-is: 6.0.1 unist-util-visit-parents: 6.0.2 + universal-user-agent@7.0.3: {} + universalify@0.1.2: {} universalify@2.0.1: {} @@ -12431,20 +13492,29 @@ snapshots: dependencies: punycode: 2.3.1 - use-callback-ref@1.3.3(@types/react@18.3.28)(react@18.3.1): + use-callback-ref@1.3.3(@types/react@19.2.14)(react@19.2.4): dependencies: - react: 18.3.1 + react: 19.2.4 tslib: 2.8.1 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 - use-sidecar@1.1.3(@types/react@18.3.28)(react@18.3.1): + use-sidecar@1.1.3(@types/react@19.2.14)(react@19.2.4): dependencies: detect-node-es: 1.1.0 - react: 18.3.1 + react: 19.2.4 tslib: 2.8.1 optionalDependencies: - '@types/react': 18.3.28 + '@types/react': 19.2.14 + + use-sync-external-store@1.6.0(react@19.2.4): + dependencies: + react: 19.2.4 + + usehooks-ts@3.1.1(react@19.2.4): + dependencies: + lodash.debounce: 4.0.8 + react: 19.2.4 utf8-byte-length@1.0.5: {} @@ -12474,71 +13544,59 @@ snapshots: '@types/unist': 3.0.3 vfile-message: 4.0.3 - vite-node@3.2.4(@types/node@20.19.32): - dependencies: - cac: 6.7.14 - debug: 4.4.3 - es-module-lexer: 1.7.0 - pathe: 2.0.3 - vite: 5.4.21(@types/node@20.19.32) - transitivePeerDependencies: - - '@types/node' - - less - - lightningcss - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser - - vite@5.4.21(@types/node@20.19.32): + vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2): dependencies: - esbuild: 0.21.5 + esbuild: 0.25.12 + fdir: 6.5.0(picomatch@4.0.3) + picomatch: 4.0.3 postcss: 8.5.6 rollup: 4.57.1 + tinyglobby: 0.2.15 optionalDependencies: '@types/node': 20.19.32 fsevents: 2.3.3 + jiti: 2.6.1 + lightningcss: 1.31.1 + yaml: 2.8.2 - vitest@3.2.4(@types/debug@4.1.12)(@types/node@20.19.32): + vitest-browser-react@2.1.0(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vitest@4.1.0): dependencies: - '@types/chai': 5.2.3 - '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@5.4.21(@types/node@20.19.32)) - '@vitest/pretty-format': 3.2.4 - '@vitest/runner': 3.2.4 - '@vitest/snapshot': 3.2.4 - '@vitest/spy': 3.2.4 - '@vitest/utils': 3.2.4 - chai: 5.3.3 - debug: 4.4.3 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + vitest: 4.1.0(@opentelemetry/api@1.9.0)(@types/node@20.19.32)(@vitest/browser-playwright@4.1.0)(@vitest/browser-preview@4.1.0)(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2)) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + vitest@4.1.0(@opentelemetry/api@1.9.0)(@types/node@20.19.32)(@vitest/browser-playwright@4.1.0)(@vitest/browser-preview@4.1.0)(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2)): + dependencies: + '@vitest/expect': 4.1.0 + '@vitest/mocker': 4.1.0(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2)) + '@vitest/pretty-format': 4.1.0 + '@vitest/runner': 4.1.0 + '@vitest/snapshot': 4.1.0 + '@vitest/spy': 4.1.0 + '@vitest/utils': 4.1.0 + es-module-lexer: 2.0.0 expect-type: 1.3.0 magic-string: 0.30.21 + obug: 2.1.1 pathe: 2.0.3 picomatch: 4.0.3 - std-env: 3.10.0 + std-env: 4.0.0 tinybench: 2.9.0 - tinyexec: 0.3.2 + tinyexec: 1.0.2 tinyglobby: 0.2.15 - tinypool: 1.1.1 - tinyrainbow: 2.0.0 - vite: 5.4.21(@types/node@20.19.32) - vite-node: 3.2.4(@types/node@20.19.32) + tinyrainbow: 3.1.0 + vite: 6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2) why-is-node-running: 2.3.0 optionalDependencies: - '@types/debug': 4.1.12 + '@opentelemetry/api': 1.9.0 '@types/node': 20.19.32 + '@vitest/browser-playwright': 4.1.0(playwright@1.58.2)(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2))(vitest@4.1.0) + '@vitest/browser-preview': 4.1.0(vite@6.4.1(@types/node@20.19.32)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2))(vitest@4.1.0) transitivePeerDependencies: - - less - - lightningcss - msw - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser vscode-jsonrpc@8.2.0: {} @@ -12646,6 +13704,8 @@ snapshots: wrappy@1.0.2: {} + ws@8.19.0: {} + xmlbuilder@15.1.1: {} xtend@4.0.2: {} diff --git a/postcss.config.js b/postcss.config.js deleted file mode 100644 index 12a703d90..000000000 --- a/postcss.config.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = { - plugins: { - tailwindcss: {}, - autoprefixer: {}, - }, -}; diff --git a/scripts/clean.cjs b/scripts/clean.cjs deleted file mode 100644 index 870f1861c..000000000 --- a/scripts/clean.cjs +++ /dev/null @@ -1,8 +0,0 @@ -const fs = require('node:fs'); -const path = require('node:path'); - -const repoRoot = path.resolve(__dirname, '..'); - -for (const dir of ['node_modules', 'dist']) { - fs.rmSync(path.join(repoRoot, dir), { recursive: true, force: true }); -} diff --git a/scripts/copy-main-assets.cjs b/scripts/copy-main-assets.cjs deleted file mode 100644 index 1b8dbb388..000000000 --- a/scripts/copy-main-assets.cjs +++ /dev/null @@ -1,29 +0,0 @@ -const fs = require('node:fs'); -const path = require('node:path'); - -const repoRoot = path.resolve(__dirname, '..'); - -function copyFile(src, dest) { - fs.mkdirSync(path.dirname(dest), { recursive: true }); - fs.copyFileSync(src, dest); -} - -const assets = [ - { - src: path.join(repoRoot, 'src', 'main', 'appConfig.json'), - dest: path.join(repoRoot, 'dist', 'main', 'appConfig.json'), - }, - { - src: path.join(repoRoot, 'src', 'main', 'services', 'skills', 'bundled-catalog.json'), - dest: path.join(repoRoot, 'dist', 'main', 'main', 'services', 'skills', 'bundled-catalog.json'), - }, -]; - -for (const asset of assets) { - if (!fs.existsSync(asset.src)) { - // eslint-disable-next-line no-console - console.error(`copy-main-assets: missing source file: ${asset.src}`); - process.exit(1); - } - copyFile(asset.src, asset.dest); -} diff --git a/scripts/emdash-run.ts b/scripts/emdash-run.ts deleted file mode 100644 index 81f2a9ad8..000000000 --- a/scripts/emdash-run.ts +++ /dev/null @@ -1,115 +0,0 @@ -#!/usr/bin/env node -import { readFileSync } from 'node:fs'; -import { resolve } from 'node:path'; - -import { ContainerConfigError, resolveContainerConfig } from '../src/shared/container/config'; -import { generateMockStartEvents } from '../src/shared/container/mockRunner'; -import { PortManager } from '../src/shared/container/portManager'; - -interface StartCommandOptions { - workspaceId: string; - configPath: string; - runId?: string; - mode?: 'container' | 'host'; -} - -interface ParsedArgs { - command: 'start' | 'help'; - options: StartCommandOptions; -} - -function parseArgs(argv: string[]): ParsedArgs { - const [commandRaw, ...rest] = argv; - const command = (commandRaw ?? 'help') as ParsedArgs['command']; - - if (command !== 'start') { - return { command: 'help', options: { workspaceId: '', configPath: '.emdash/config.json' } }; - } - - const options: StartCommandOptions = { - workspaceId: '', - configPath: '.emdash/config.json', - }; - - for (let i = 0; i < rest.length; i += 1) { - const arg = rest[i]; - if (!arg) continue; - switch (arg) { - case '--workspace': - case '-w': - options.workspaceId = rest[i + 1] ?? ''; - i += 1; - break; - case '--config': - case '-c': - options.configPath = rest[i + 1] ?? options.configPath; - i += 1; - break; - case '--mode': - options.mode = (rest[i + 1] ?? 'container') as 'container' | 'host'; - i += 1; - break; - case '--run-id': - options.runId = rest[i + 1] ?? options.runId; - i += 1; - break; - default: - break; - } - } - - return { command, options }; -} - -function printUsage(): void { - process.stderr.write(`Usage: emdash-run start --workspace [--config path]\n`); -} - -function loadConfig(configPath: string) { - const absolute = resolve(process.cwd(), configPath); - const raw = readFileSync(absolute, 'utf8'); - return JSON.parse(raw) as unknown; -} - -async function runStart(options: StartCommandOptions): Promise { - if (!options.workspaceId) { - throw new Error('Missing required --workspace argument'); - } - - const rawConfig = loadConfig(options.configPath); - const resolvedConfig = resolveContainerConfig(rawConfig); - - const portManager = new PortManager(); - const events = await generateMockStartEvents({ - workspaceId: options.workspaceId, - config: resolvedConfig, - portAllocator: portManager, - runId: options.runId, - mode: options.mode, - }); - - for (const event of events) { - process.stdout.write(`${JSON.stringify(event)}\n`); - } -} - -async function main(argv: string[]): Promise { - const parsed = parseArgs(argv); - if (parsed.command !== 'start') { - printUsage(); - return; - } - - try { - await runStart(parsed.options); - } catch (error) { - if (error instanceof ContainerConfigError) { - process.stderr.write(`Invalid config: ${error.message}\n`); - } else if (error instanceof Error) { - process.stderr.write(`${error.message}\n`); - } - process.exitCode = 1; - } -} - -void main(process.argv.slice(2)); diff --git a/scripts/postinstall.cjs b/scripts/postinstall.ts similarity index 84% rename from scripts/postinstall.cjs rename to scripts/postinstall.ts index 59ecb1d6c..d5fd406ee 100644 --- a/scripts/postinstall.cjs +++ b/scripts/postinstall.ts @@ -1,5 +1,8 @@ -const { spawnSync } = require('node:child_process'); -const path = require('node:path'); +import { spawnSync } from 'node:child_process'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); if (process.env.CI || process.env.EMDASH_SKIP_ELECTRON_REBUILD === '1') { process.exit(0); @@ -7,6 +10,7 @@ if (process.env.CI || process.env.EMDASH_SKIP_ELECTRON_REBUILD === '1') { function getElectronRebuildBin() { const binName = process.platform === 'win32' ? 'electron-rebuild.cmd' : 'electron-rebuild'; + return path.resolve(__dirname, '..', 'node_modules', '.bin', binName); } @@ -36,7 +40,7 @@ const disablePty = process.env.EMDASH_DISABLE_PTY === '1'; const disableNativeDb = process.env.EMDASH_DISABLE_NATIVE_DB === '1'; const nativeModules = []; -if (!disableNativeDb) nativeModules.push('sqlite3'); +if (!disableNativeDb) nativeModules.push('better-sqlite3'); if (!disablePty) nativeModules.push('node-pty'); nativeModules.push('keytar'); diff --git a/src/main/app/lifecycle.ts b/src/main/app/lifecycle.ts deleted file mode 100644 index 61a9171cd..000000000 --- a/src/main/app/lifecycle.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { app, BrowserWindow } from 'electron'; -import { createMainWindow } from './window'; - -export function registerAppLifecycle() { - app.on('window-all-closed', () => { - if (process.platform !== 'darwin') { - app.quit(); - } - }); - - app.on('activate', () => { - if (BrowserWindow.getAllWindows().length === 0) { - createMainWindow(); - } - }); -} diff --git a/src/main/app/menu.ts b/src/main/app/menu.ts index 573b7b056..09d72fdc6 100644 --- a/src/main/app/menu.ts +++ b/src/main/app/menu.ts @@ -1,14 +1,13 @@ -import { Menu, shell, app, BrowserWindow, nativeImage } from 'electron'; -import { EMDASH_RELEASES_URL, EMDASH_DOCS_URL } from '@shared/urls'; - -function getFocusedWindow(): BrowserWindow | null { - return BrowserWindow.getFocusedWindow() ?? BrowserWindow.getAllWindows()[0] ?? null; -} - -function sendToRenderer(channel: string) { - const win = getFocusedWindow(); - if (win) win.webContents.send(channel); -} +import { app, Menu, shell } from 'electron'; +import { + menuCheckForUpdatesChannel, + menuCloseTabChannel, + menuOpenSettingsChannel, + menuRedoChannel, + menuUndoChannel, +} from '@shared/events/appEvents'; +import { EMDASH_DOCS_URL, EMDASH_RELEASES_URL } from '@shared/urls'; +import { events } from '@main/lib/events'; export function setupApplicationMenu(): void { const isMac = process.platform === 'darwin'; @@ -28,11 +27,11 @@ export function setupApplicationMenu(): void { { label: 'Settings\u2026', accelerator: 'CmdOrCtrl+,', - click: () => sendToRenderer('menu:open-settings'), + click: () => events.emit(menuOpenSettingsChannel, undefined), }, { label: 'Check for Updates\u2026', - click: () => sendToRenderer('menu:check-for-updates'), + click: () => events.emit(menuCheckForUpdatesChannel, undefined), }, { type: 'separator' as const }, { role: 'services' as const }, @@ -60,7 +59,7 @@ export function setupApplicationMenu(): void { { label: 'Settings\u2026', accelerator: 'CmdOrCtrl+,', - click: () => sendToRenderer('menu:open-settings'), + click: () => events.emit(menuOpenSettingsChannel, undefined), }, { type: 'separator' as const }, ] @@ -69,7 +68,7 @@ export function setupApplicationMenu(): void { ? { label: 'Close Tab', accelerator: 'CmdOrCtrl+W', - click: () => sendToRenderer('menu:close-tab'), + click: () => events.emit(menuCloseTabChannel, undefined), } : { role: 'quit' as const }, ], @@ -81,12 +80,12 @@ export function setupApplicationMenu(): void { { label: 'Undo', accelerator: 'CmdOrCtrl+Z', - click: () => sendToRenderer('menu:undo'), + click: () => events.emit(menuUndoChannel, undefined), }, { label: 'Redo', accelerator: isMac ? 'Shift+CmdOrCtrl+Z' : 'CmdOrCtrl+Y', - click: () => sendToRenderer('menu:redo'), + click: () => events.emit(menuRedoChannel, undefined), }, { type: 'separator' as const }, { role: 'cut' as const }, @@ -131,7 +130,7 @@ export function setupApplicationMenu(): void { { type: 'separator' as const }, { label: 'Check for Updates\u2026', - click: () => sendToRenderer('menu:check-for-updates'), + click: () => events.emit(menuCheckForUpdatesChannel, undefined), }, ] : []), diff --git a/src/main/app/protocol.ts b/src/main/app/protocol.ts new file mode 100644 index 000000000..4243a60fb --- /dev/null +++ b/src/main/app/protocol.ts @@ -0,0 +1,39 @@ +import { join, normalize, sep } from 'node:path'; +import { net, protocol } from 'electron'; + +export const APP_SCHEME = 'app'; +export const APP_ORIGIN = `${APP_SCHEME}://emdash`; + +export function registerAppScheme(): void { + protocol.registerSchemesAsPrivileged([ + { + scheme: APP_SCHEME, + privileges: { + standard: true, + secure: true, + supportFetchAPI: true, + corsEnabled: true, + }, + }, + ]); +} + +export function setupAppProtocol(rendererRoot: string): void { + const root = normalize(rendererRoot); + + protocol.handle(APP_SCHEME, async (request) => { + const { pathname } = new URL(request.url); + const relPath = decodeURIComponent(pathname).replace(/^\/+/, ''); + const resolved = normalize(join(root, relPath || 'index.html')); + + if (!resolved.startsWith(root + sep) && resolved !== root) { + return new Response(null, { status: 403 }); + } + + try { + return await net.fetch(`file://${resolved}`); + } catch { + return net.fetch(`file://${join(root, 'index.html')}`); + } + }); +} diff --git a/src/main/app/staticServer.ts b/src/main/app/staticServer.ts deleted file mode 100644 index a56522575..000000000 --- a/src/main/app/staticServer.ts +++ /dev/null @@ -1,156 +0,0 @@ -import { createServer, type IncomingMessage, type ServerResponse } from 'http'; -import { promises as fs } from 'fs'; -import { extname, join, normalize, sep } from 'path'; -import type { AddressInfo } from 'net'; - -let serverUrl: string | null = null; -let serverStarted = false; - -const DEFAULT_RENDERER_PORT = 12112; -const RENDERER_PORT_RANGE = 100; - -const MIME_MAP: Record = { - '.html': 'text/html; charset=utf-8', - '.js': 'application/javascript; charset=utf-8', - '.mjs': 'application/javascript; charset=utf-8', - '.cjs': 'application/javascript; charset=utf-8', - '.css': 'text/css; charset=utf-8', - '.json': 'application/json; charset=utf-8', - '.svg': 'image/svg+xml', - '.png': 'image/png', - '.jpg': 'image/jpeg', - '.jpeg': 'image/jpeg', - '.gif': 'image/gif', - '.webp': 'image/webp', - '.ico': 'image/x-icon', - '.woff2': 'font/woff2', -}; - -function getMime(filePath: string) { - return MIME_MAP[extname(filePath).toLowerCase()] ?? 'application/octet-stream'; -} - -function isPathInside(parent: string, child: string) { - const parentPath = normalize(parent + sep); - const childPath = normalize(child); - return childPath.startsWith(parentPath); -} - -function getRendererPortCandidates(): number[] { - const raw = process.env.EMDASH_RENDERER_PORT; - const parsed = raw ? Number.parseInt(raw, 10) : Number.NaN; - const start = Number.isFinite(parsed) && parsed > 0 ? parsed : DEFAULT_RENDERER_PORT; - return Array.from({ length: RENDERER_PORT_RANGE }, (_, i) => start + i); -} - -async function listenWithFallback(server: ReturnType): Promise { - const candidates = getRendererPortCandidates(); - - for (const port of candidates) { - try { - const addr = await new Promise((resolve, reject) => { - let onError: (error: unknown) => void; - let onListening: () => void; - - const cleanup = () => { - server.removeListener('error', onError); - server.removeListener('listening', onListening); - }; - - onError = (error: unknown) => { - cleanup(); - reject(error); - }; - onListening = () => { - cleanup(); - resolve(server.address() as AddressInfo); - }; - - server.once('error', onError); - server.once('listening', onListening); - server.listen(port, '127.0.0.1'); - }); - - if (!addr || typeof addr.port !== 'number') { - throw new Error('Failed to start renderer server'); - } - return addr; - } catch (error) { - const code = (error as any)?.code; - if (code === 'EADDRINUSE') { - if (server.listening) { - await new Promise((resolve) => server.close(() => resolve())); - } - continue; - } - throw error; - } - } - - // As a last resort, pick an ephemeral port (should be extremely rare). - const addr = await new Promise((resolve, reject) => { - server.once('error', reject); - server.listen(0, '127.0.0.1', () => resolve(server.address() as AddressInfo)); - }); - - if (!addr || typeof addr.port !== 'number') { - throw new Error('Failed to start renderer server'); - } - return addr; -} - -export async function ensureRendererServer(root: string): Promise { - if (serverStarted && serverUrl) return serverUrl; - - const server = createServer(async (req: IncomingMessage, res: ServerResponse) => { - try { - if (!req.url) { - res.writeHead(400); - res.end(); - return; - } - - const url = new URL(req.url, 'http://localhost'); - const isHead = req.method === 'HEAD'; - - const rawPath = decodeURIComponent(url.pathname || '/'); - const safePath = normalize(rawPath).replace(/^(\.\.[/\\])+/, ''); - let filePath = join(root, safePath); - - // Block path traversal - if (!isPathInside(root, filePath)) { - res.writeHead(403); - res.end(); - return; - } - - let stat; - try { - stat = await fs.stat(filePath); - } catch { - stat = null; - } - - if (!stat || stat.isDirectory()) { - filePath = join(root, 'index.html'); - } - - const data = await fs.readFile(filePath); - res.writeHead(200, { - 'Content-Type': getMime(filePath), - 'Cache-Control': 'no-cache, no-store, must-revalidate', - }); - if (!isHead) res.write(data); - res.end(); - } catch { - res.writeHead(500); - res.end(); - } - }); - - const addr = await listenWithFallback(server); - serverUrl = `http://127.0.0.1:${addr.port}/index.html`; - serverStarted = true; - - return serverUrl!; -} diff --git a/src/main/app/window.ts b/src/main/app/window.ts index ff7da73af..78ab0f39e 100644 --- a/src/main/app/window.ts +++ b/src/main/app/window.ts @@ -1,60 +1,46 @@ -import { BrowserWindow, app } from 'electron'; -import { join } from 'path'; -import { isDev } from '../utils/dev'; -import { registerExternalLinkHandlers } from '../utils/externalLinks'; -import { ensureRendererServer } from './staticServer'; +import { join } from 'node:path'; +import { BrowserWindow } from 'electron'; +import appIcon from '@/assets/images/emdash/emdash_logo.png?asset'; +import { capture, checkAndReportDailyActiveUser } from '@main/lib/telemetry'; +import { registerExternalLinkHandlers } from '@main/utils/externalLinks'; +import { APP_ORIGIN } from './protocol'; let mainWindow: BrowserWindow | null = null; export function createMainWindow(): BrowserWindow { - // In development, resolve icon from src/assets - // In production (packaged), electron-builder handles the icon - const iconPath = isDev - ? join(__dirname, '..', '..', '..', 'src', 'assets', 'images', 'emdash', 'emdash_logo.png') - : undefined; - mainWindow = new BrowserWindow({ width: 1400, height: 900, minWidth: 700, minHeight: 500, title: 'Emdash', - ...(iconPath && { icon: iconPath }), + // In production, electron-builder injects the icon from the app bundle. + ...(import.meta.env.DEV && { icon: appIcon }), webPreferences: { nodeIntegration: false, contextIsolation: true, + // Required for ESM preload scripts (.mjs) + sandbox: false, // Allow using in renderer for in‑app browser pane. // The webview runs in a separate process; nodeIntegration remains disabled. webviewTag: true, - // __dirname here resolves to dist/main/main/app at runtime (dev) - // Preload is emitted to dist/main/main/preload.js - preload: join(__dirname, '..', 'preload.js'), + // __dirname resolves to out/main/ at runtime; preload is at out/preload/index.mjs + preload: join(__dirname, '../preload/index.mjs'), }, - ...(process.platform === 'darwin' ? { titleBarStyle: 'hiddenInset' } : {}), + ...(process.platform === 'darwin' + ? { titleBarStyle: 'hiddenInset', trafficLightPosition: { x: 10, y: 10 } } + : {}), show: false, }); - if (isDev) { - mainWindow.loadURL('http://localhost:3000'); + if (import.meta.env.DEV) { + mainWindow.loadURL(process.env.ELECTRON_RENDERER_URL!); } else { - // Serve renderer over an HTTP origin in production so embeds work. - const rendererRoot = join(app.getAppPath(), 'dist', 'renderer'); - void ensureRendererServer(rendererRoot) - .then((url: string) => { - if (mainWindow && !mainWindow.isDestroyed()) { - mainWindow.loadURL(url); - } - }) - .catch(() => { - // Fallback to file load if server fails for any reason. - if (mainWindow && !mainWindow.isDestroyed()) { - mainWindow.loadFile(join(rendererRoot, 'index.html')); - } - }); + mainWindow.loadURL(`${APP_ORIGIN}/index.html`); } // Route external links to the user’s default browser - registerExternalLinkHandlers(mainWindow, isDev); + registerExternalLinkHandlers(mainWindow, import.meta.env.DEV); // Show when ready mainWindow.once('ready-to-show', () => { @@ -63,12 +49,9 @@ export function createMainWindow(): BrowserWindow { // Track window focus for telemetry mainWindow.on('focus', () => { - // Lazy import to avoid circular dependencies - void import('../telemetry').then(({ capture, checkAndReportDailyActiveUser }) => { - void capture('app_window_focused'); - // Also check for daily active user when window gains focus - checkAndReportDailyActiveUser(); - }); + capture('app_window_focused'); + mainWindow?.setWindowButtonVisibility(true); + checkAndReportDailyActiveUser(); }); // Cleanup reference on close diff --git a/src/main/config/github.config.ts b/src/main/config/github.config.ts deleted file mode 100644 index 102d94b08..000000000 --- a/src/main/config/github.config.ts +++ /dev/null @@ -1,8 +0,0 @@ -/** - * GitHub OAuth configuration for Device Flow authentication. - * No client secret needed - Device Flow is designed for desktop/CLI apps. - */ -export const GITHUB_CONFIG = { - clientId: 'Ov23ligC35uHWopzCeWf', - scopes: ['repo', 'read:user', 'read:org'], -}; diff --git a/src/main/core/account/config.ts b/src/main/core/account/config.ts new file mode 100644 index 000000000..98c62ef4e --- /dev/null +++ b/src/main/core/account/config.ts @@ -0,0 +1,6 @@ +export const ACCOUNT_CONFIG = { + authServer: { + baseUrl: 'https://auth.emdash.sh', + authTimeoutMs: Number(process.env.EMDASH_AUTH_TIMEOUT_MS || 300000), + }, +}; diff --git a/src/main/core/account/controller.ts b/src/main/core/account/controller.ts new file mode 100644 index 000000000..a9c6d7398 --- /dev/null +++ b/src/main/core/account/controller.ts @@ -0,0 +1,53 @@ +import { createRPCController } from '@shared/ipc/rpc'; +import { log } from '@main/lib/logger'; +import { emdashAccountService } from './services/emdash-account-service'; + +export const accountController = createRPCController({ + getSession: async () => { + try { + return await emdashAccountService.getSession(); + } catch (error) { + log.error('Failed to get account session:', error); + return { user: null, isSignedIn: false, hasAccount: false }; + } + }, + + signIn: async (provider?: string) => { + try { + const result = await emdashAccountService.signIn(provider); + return { success: true, user: result.user }; + } catch (error) { + log.error('Account sign-in failed:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Sign-in failed', + }; + } + }, + + signOut: async () => { + try { + await emdashAccountService.signOut(); + return { success: true }; + } catch (error) { + log.error('Account sign-out failed:', error); + return { success: false, error: 'Sign-out failed' }; + } + }, + + checkHealth: async () => { + try { + return await emdashAccountService.checkServerHealth(); + } catch { + return false; + } + }, + + validateSession: async () => { + try { + return await emdashAccountService.validateSession(); + } catch { + return false; + } + }, +}); diff --git a/src/main/core/account/provider-token-registry.test.ts b/src/main/core/account/provider-token-registry.test.ts new file mode 100644 index 000000000..57afa5a2b --- /dev/null +++ b/src/main/core/account/provider-token-registry.test.ts @@ -0,0 +1,42 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { providerTokenRegistry } from './provider-token-registry'; + +describe('providerTokenRegistry', () => { + beforeEach(() => { + providerTokenRegistry.clear(); + }); + + it('dispatches to registered handler', async () => { + const handler = vi.fn().mockResolvedValue(undefined); + providerTokenRegistry.register('github', handler); + + await providerTokenRegistry.dispatch('github', 'ghp_token123'); + + expect(handler).toHaveBeenCalledWith('ghp_token123'); + }); + + it('is a no-op when no handler is registered for the provider', async () => { + await expect(providerTokenRegistry.dispatch('gitlab', 'token')).resolves.not.toThrow(); + }); + + it('propagates handler errors', async () => { + const handler = vi.fn().mockRejectedValue(new Error('keytar failed')); + providerTokenRegistry.register('github', handler); + + await expect(providerTokenRegistry.dispatch('github', 'token')).rejects.toThrow( + 'keytar failed' + ); + }); + + it('replaces handler on re-registration', async () => { + const first = vi.fn(); + const second = vi.fn(); + providerTokenRegistry.register('github', first); + providerTokenRegistry.register('github', second); + + await providerTokenRegistry.dispatch('github', 'token'); + + expect(first).not.toHaveBeenCalled(); + expect(second).toHaveBeenCalledWith('token'); + }); +}); diff --git a/src/main/core/account/provider-token-registry.ts b/src/main/core/account/provider-token-registry.ts new file mode 100644 index 000000000..deb8842f4 --- /dev/null +++ b/src/main/core/account/provider-token-registry.ts @@ -0,0 +1,20 @@ +type ProviderTokenHandler = (token: string) => Promise; + +const handlers = new Map(); + +export const providerTokenRegistry = { + register(provider: string, handler: ProviderTokenHandler): void { + handlers.set(provider, handler); + }, + + async dispatch(provider: string, token: string): Promise { + const handler = handlers.get(provider); + if (!handler) return; + await handler(token); + }, + + /** For testing only — removes all registered handlers. */ + clear(): void { + handlers.clear(); + }, +}; diff --git a/src/main/core/account/services/credential-store.test.ts b/src/main/core/account/services/credential-store.test.ts new file mode 100644 index 000000000..3a92709ec --- /dev/null +++ b/src/main/core/account/services/credential-store.test.ts @@ -0,0 +1,66 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { AccountCredentialStore } from './credential-store'; + +const mockGetPassword = vi.fn(); +const mockSetPassword = vi.fn(); +const mockDeletePassword = vi.fn(); + +vi.mock('keytar', () => ({ + getPassword: (...args: unknown[]) => mockGetPassword(...args), + setPassword: (...args: unknown[]) => mockSetPassword(...args), + deletePassword: (...args: unknown[]) => mockDeletePassword(...args), +})); + +describe('AccountCredentialStore', () => { + let store: AccountCredentialStore; + + beforeEach(() => { + vi.clearAllMocks(); + store = new AccountCredentialStore(); + }); + + describe('get()', () => { + it('returns token from keytar', async () => { + mockGetPassword.mockResolvedValue('session-token-123'); + const token = await store.get(); + expect(token).toBe('session-token-123'); + expect(mockGetPassword).toHaveBeenCalledWith('emdash-account', 'session-token'); + }); + + it('returns null when keytar has no token', async () => { + mockGetPassword.mockResolvedValue(null); + const token = await store.get(); + expect(token).toBeNull(); + }); + + it('returns null on keytar error', async () => { + mockGetPassword.mockRejectedValue(new Error('keytar unavailable')); + const token = await store.get(); + expect(token).toBeNull(); + }); + }); + + describe('set()', () => { + it('stores token in keytar', async () => { + await store.set('new-token'); + expect(mockSetPassword).toHaveBeenCalledWith('emdash-account', 'session-token', 'new-token'); + }); + + it('throws on keytar error', async () => { + mockSetPassword.mockRejectedValue(new Error('keytar write failed')); + await expect(store.set('token')).rejects.toThrow('keytar write failed'); + }); + }); + + describe('clear()', () => { + it('deletes token from keytar', async () => { + await store.clear(); + expect(mockDeletePassword).toHaveBeenCalledWith('emdash-account', 'session-token'); + }); + + it('does not throw on keytar error', async () => { + mockDeletePassword.mockRejectedValue(new Error('keytar error')); + await expect(store.clear()).resolves.not.toThrow(); + }); + }); +}); diff --git a/src/main/core/account/services/credential-store.ts b/src/main/core/account/services/credential-store.ts new file mode 100644 index 000000000..163a08c2a --- /dev/null +++ b/src/main/core/account/services/credential-store.ts @@ -0,0 +1,35 @@ +import keytar from 'keytar'; +import { log } from '@main/lib/logger'; + +const SERVICE_NAME = 'emdash-account'; +const SESSION_ACCOUNT = 'session-token'; + +export class AccountCredentialStore { + async get(): Promise { + try { + return await keytar.getPassword(SERVICE_NAME, SESSION_ACCOUNT); + } catch (error) { + log.error('Failed to retrieve session token:', error); + return null; + } + } + + async set(token: string): Promise { + try { + await keytar.setPassword(SERVICE_NAME, SESSION_ACCOUNT, token); + } catch (error) { + log.error('Failed to store session token:', error); + throw error; + } + } + + async clear(): Promise { + try { + await keytar.deletePassword(SERVICE_NAME, SESSION_ACCOUNT); + } catch (error) { + log.error('Failed to clear session token:', error); + } + } +} + +export const accountCredentialStore = new AccountCredentialStore(); diff --git a/src/main/core/account/services/emdash-account-service.test.ts b/src/main/core/account/services/emdash-account-service.test.ts new file mode 100644 index 000000000..765ea2cfb --- /dev/null +++ b/src/main/core/account/services/emdash-account-service.test.ts @@ -0,0 +1,270 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { EmdashAccountService } from './emdash-account-service'; + +const mockCredGet = vi.fn(); +const mockCredSet = vi.fn(); +const mockCredClear = vi.fn(); +vi.mock('./credential-store', () => ({ + accountCredentialStore: { + get: (...args: unknown[]) => mockCredGet(...args), + set: (...args: unknown[]) => mockCredSet(...args), + clear: (...args: unknown[]) => mockCredClear(...args), + }, +})); + +const mockKvGet = vi.fn(); +const mockKvSet = vi.fn(); +vi.mock('@main/db/kv', () => ({ + KV: class { + get(...args: unknown[]) { + return mockKvGet(...args); + } + set(...args: unknown[]) { + return mockKvSet(...args); + } + }, +})); + +const mockExecuteOAuthFlow = vi.fn(); +vi.mock('@main/core/shared/oauth-flow', () => ({ + executeOAuthFlow: (...args: unknown[]) => mockExecuteOAuthFlow(...args), +})); + +const mockDispatch = vi.fn().mockResolvedValue(undefined); +vi.mock('../provider-token-registry', () => ({ + providerTokenRegistry: { + dispatch: (...args: unknown[]) => mockDispatch(...args), + }, +})); + +vi.mock('../config', () => ({ + ACCOUNT_CONFIG: { + authServer: { baseUrl: 'https://auth.test.emdash.sh', authTimeoutMs: 5000 }, + }, +})); + +const mockFetch = vi.fn(); +vi.stubGlobal('fetch', mockFetch); + +describe('EmdashAccountService', () => { + let service: EmdashAccountService; + + beforeEach(() => { + vi.clearAllMocks(); + mockKvGet.mockResolvedValue(null); + mockKvSet.mockResolvedValue(undefined); + service = new EmdashAccountService(); + }); + + describe('getSession()', () => { + it('returns no account when profile cache is empty', async () => { + const session = await service.getSession(); + expect(session).toEqual({ user: null, isSignedIn: false, hasAccount: false }); + }); + + it('returns hasAccount true but not signed in when profile exists but no token', async () => { + mockKvGet.mockResolvedValue({ + hasAccount: true, + userId: 'u1', + username: 'test', + avatarUrl: '', + email: 'test@test.com', + lastValidated: '2026-01-01', + }); + const session = await service.getSession(); + expect(session.hasAccount).toBe(true); + expect(session.isSignedIn).toBe(false); + expect(session.user).toBeNull(); + }); + }); + + describe('loadSessionToken()', () => { + it('loads token from credential store', async () => { + mockCredGet.mockResolvedValue('token-123'); + await service.loadSessionToken(); + expect(mockCredGet).toHaveBeenCalled(); + }); + }); + + describe('signIn()', () => { + it('calls executeOAuthFlow and stores session', async () => { + const oauthResponse = { + sessionToken: 'session-abc', + accessToken: 'ghp_123', + providerId: 'github', + user: { + userId: 'u1', + username: 'testuser', + avatarUrl: 'https://img.com/a', + email: 'a@b.com', + }, + }; + mockExecuteOAuthFlow.mockResolvedValue(oauthResponse); + + const result = await service.signIn(); + + expect(mockExecuteOAuthFlow).toHaveBeenCalledWith(expect.objectContaining({})); + expect(mockCredSet).toHaveBeenCalledWith('session-abc'); + expect(mockKvSet).toHaveBeenCalledWith( + 'profile', + expect.objectContaining({ hasAccount: true, username: 'testuser' }) + ); + expect(result).toEqual({ + providerToken: 'ghp_123', + provider: 'github', + user: oauthResponse.user, + }); + }); + + it('passes provider as extraParam when specified', async () => { + const oauthResponse = { + sessionToken: 'session-abc', + accessToken: 'ghp_123', + providerId: 'github', + user: { userId: 'u1', username: 'test', avatarUrl: '', email: '' }, + }; + mockExecuteOAuthFlow.mockResolvedValue(oauthResponse); + + await service.signIn('github'); + + expect(mockExecuteOAuthFlow).toHaveBeenCalledWith(expect.objectContaining({})); + }); + + it('dispatches provider token via registry when provider token is present', async () => { + const oauthResponse = { + sessionToken: 'session-abc', + accessToken: 'ghp_123', + providerId: 'github', + user: { userId: 'u1', username: 'test', avatarUrl: '', email: '' }, + }; + mockExecuteOAuthFlow.mockResolvedValue(oauthResponse); + + await service.signIn(); + + expect(mockDispatch).toHaveBeenCalledWith('github', 'ghp_123'); + }); + + it('throws when provider token persistence fails', async () => { + const oauthResponse = { + sessionToken: 'session-abc', + accessToken: 'ghp_123', + providerId: 'github', + user: { userId: 'u1', username: 'test', avatarUrl: '', email: '' }, + }; + mockExecuteOAuthFlow.mockResolvedValue(oauthResponse); + mockDispatch.mockRejectedValueOnce(new Error('keytar failed')); + + await expect(service.signIn()).rejects.toThrow('keytar failed'); + }); + + it('does not dispatch when provider token is absent', async () => { + const oauthResponse = { + sessionToken: 'session-abc', + user: { userId: 'u1', username: 'test', avatarUrl: '', email: '' }, + }; + mockExecuteOAuthFlow.mockResolvedValue(oauthResponse); + + await service.signIn(); + + expect(mockDispatch).not.toHaveBeenCalled(); + }); + }); + + describe('signOut()', () => { + it('clears session token and preserves hasAccount in profile cache', async () => { + const exchangeResult = { + sessionToken: 'session-abc', + accessToken: 'ghp_123', + providerId: 'github', + user: { userId: 'u1', username: 'test', avatarUrl: '', email: '' }, + }; + mockExecuteOAuthFlow.mockResolvedValue(exchangeResult); + await service.signIn(); + vi.clearAllMocks(); + mockKvSet.mockResolvedValue(undefined); + + await service.signOut(); + + expect(mockCredClear).toHaveBeenCalled(); + expect(mockKvSet).toHaveBeenCalledWith( + 'profile', + expect.objectContaining({ hasAccount: true }) + ); + mockKvGet.mockResolvedValue({ + hasAccount: true, + userId: 'u1', + username: 'test', + avatarUrl: '', + email: '', + }); + const session = await service.getSession(); + expect(session.isSignedIn).toBe(false); + expect(session.hasAccount).toBe(true); + }); + }); + + describe('validateSession()', () => { + it('returns false when no session token', async () => { + const result = await service.validateSession(); + expect(result).toBe(false); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it('clears session on 401 and preserves hasAccount', async () => { + const exchangeResult = { + sessionToken: 'session-abc', + accessToken: 'ghp_123', + providerId: 'github', + user: { userId: 'u1', username: 'test', avatarUrl: '', email: '' }, + }; + mockExecuteOAuthFlow.mockResolvedValue(exchangeResult); + await service.signIn(); + vi.clearAllMocks(); + mockKvSet.mockResolvedValue(undefined); + + mockFetch.mockResolvedValue({ ok: false, status: 401 }); + const result = await service.validateSession(); + + expect(result).toBe(false); + expect(mockCredClear).toHaveBeenCalled(); + expect(mockKvSet).toHaveBeenCalledWith( + 'profile', + expect.objectContaining({ hasAccount: true }) + ); + }); + + it('returns true on network error (optimistic)', async () => { + const exchangeResult = { + sessionToken: 'session-abc', + accessToken: 'ghp_123', + providerId: 'github', + user: { userId: 'u1', username: 'test', avatarUrl: '', email: '' }, + }; + mockExecuteOAuthFlow.mockResolvedValue(exchangeResult); + await service.signIn(); + vi.clearAllMocks(); + + mockFetch.mockRejectedValue(new Error('network error')); + const result = await service.validateSession(); + expect(result).toBe(true); + }); + }); + + describe('checkServerHealth()', () => { + it('returns true on successful health check', async () => { + mockFetch.mockResolvedValue({ ok: true }); + const result = await service.checkServerHealth(); + expect(result).toBe(true); + expect(mockFetch).toHaveBeenCalledWith( + 'https://auth.test.emdash.sh/health', + expect.objectContaining({ signal: expect.any(AbortSignal) }) + ); + }); + + it('returns false on network error', async () => { + mockFetch.mockRejectedValue(new Error('network error')); + const result = await service.checkServerHealth(); + expect(result).toBe(false); + }); + }); +}); diff --git a/src/main/core/account/services/emdash-account-service.ts b/src/main/core/account/services/emdash-account-service.ts new file mode 100644 index 000000000..8a66f2deb --- /dev/null +++ b/src/main/core/account/services/emdash-account-service.ts @@ -0,0 +1,174 @@ +import { executeOAuthFlow } from '@main/core/shared/oauth-flow'; +import { KV } from '@main/db/kv'; +import { ACCOUNT_CONFIG } from '../config'; +import { providerTokenRegistry } from '../provider-token-registry'; +import { accountCredentialStore } from './credential-store'; + +export interface AccountUser { + userId: string; + username: string; + avatarUrl: string; + email: string; +} + +export interface CachedProfile { + hasAccount: boolean; + userId: string; + username: string; + avatarUrl: string; + email: string; + lastValidated: string; +} + +export interface SignInResult { + providerToken?: string; + provider?: string; + user: AccountUser; +} + +export interface SessionState { + user: AccountUser | null; + isSignedIn: boolean; + hasAccount: boolean; +} + +interface AccountKVSchema extends Record { + profile: CachedProfile; +} + +const accountKV = new KV('account'); + +export class EmdashAccountService { + private cachedProfile: CachedProfile | null = null; + private sessionToken: string | null = null; + + async getSession(): Promise { + this.cachedProfile = await accountKV.get('profile'); + const hasAccount = this.cachedProfile?.hasAccount === true; + const isSignedIn = hasAccount && this.sessionToken !== null; + return { + user: + isSignedIn && this.cachedProfile + ? { + userId: this.cachedProfile.userId, + username: this.cachedProfile.username, + avatarUrl: this.cachedProfile.avatarUrl, + email: this.cachedProfile.email, + } + : null, + isSignedIn, + hasAccount, + }; + } + + async loadSessionToken(): Promise { + this.sessionToken = await accountCredentialStore.get(); + } + + /** make provider optional and remove default in case emdash starts supporting more providers */ + async signIn(provider: string = 'github'): Promise { + const { baseUrl } = ACCOUNT_CONFIG.authServer; + + const extraParams: Record = {}; + + if (provider) { + extraParams.provider_id = provider; + } + + const raw = await executeOAuthFlow({ + authorizeUrl: `${baseUrl}/sign-in`, + exchangeUrl: `${baseUrl}/api/v1/auth/electron/exchange`, + successRedirectUrl: `${baseUrl}/auth/success`, + errorRedirectUrl: `${baseUrl}/auth/error`, + extraParams, + timeoutMs: ACCOUNT_CONFIG.authServer.authTimeoutMs, + }); + + const sessionToken = raw.sessionToken as string; + const user = raw.user as AccountUser; + if (!sessionToken || !user) { + throw new Error('Invalid sign-in response: missing sessionToken or user'); + } + + await accountCredentialStore.set(sessionToken); + this.sessionToken = sessionToken; + + const profile: CachedProfile = { + hasAccount: true, + userId: user.userId, + username: user.username, + avatarUrl: user.avatarUrl, + email: user.email, + lastValidated: new Date().toISOString(), + }; + this.cachedProfile = profile; + await accountKV.set('profile', profile); + + const accessToken = raw.accessToken as string | undefined; + const providerId = raw.providerId as string | undefined; + if (accessToken && providerId) { + await providerTokenRegistry.dispatch(providerId, accessToken); + } + + return { + providerToken: accessToken || undefined, + provider: providerId || undefined, + user, + }; + } + + async signOut(): Promise { + this.sessionToken = null; + await accountCredentialStore.clear(); + if (this.cachedProfile) { + this.cachedProfile.hasAccount = true; + await accountKV.set('profile', this.cachedProfile); + } + } + + async checkServerHealth(): Promise { + const { baseUrl } = ACCOUNT_CONFIG.authServer; + try { + const response = await fetch(`${baseUrl}/health`, { + signal: AbortSignal.timeout(3000), + }); + return response.ok; + } catch { + return false; + } + } + + async validateSession(): Promise { + const token = this.sessionToken; + if (!token) return false; + + const { baseUrl } = ACCOUNT_CONFIG.authServer; + try { + const response = await fetch(`${baseUrl}/api/auth/get-session`, { + method: 'POST', + headers: { Authorization: `Bearer ${token}` }, + signal: AbortSignal.timeout(3000), + }); + + if (!response.ok) { + this.sessionToken = null; + await accountCredentialStore.clear(); + if (this.cachedProfile) { + this.cachedProfile.hasAccount = true; + await accountKV.set('profile', this.cachedProfile); + } + return false; + } + + if (this.cachedProfile) { + this.cachedProfile.lastValidated = new Date().toISOString(); + await accountKV.set('profile', this.cachedProfile); + } + return true; + } catch { + return this.sessionToken !== null; + } + } +} + +export const emdashAccountService = new EmdashAccountService(); diff --git a/src/main/core/app/controller.ts b/src/main/core/app/controller.ts new file mode 100644 index 000000000..58f894905 --- /dev/null +++ b/src/main/core/app/controller.ts @@ -0,0 +1,45 @@ +import { createRPCController } from '@shared/ipc/rpc'; +import type { OpenInAppId } from '@shared/openInApps'; +import { appService } from './service'; + +export const appController = createRPCController({ + openExternal: async (url: string) => { + try { + await appService.openExternal(url); + return { success: true }; + } catch (error) { + return { success: false, error: error instanceof Error ? error.message : String(error) }; + } + }, + clipboardWriteText: async (text: string) => { + try { + appService.clipboardWriteText(text); + return { success: true }; + } catch (error) { + return { success: false, error: error instanceof Error ? error.message : String(error) }; + } + }, + openIn: async (args: { + app: OpenInAppId; + path: string; + isRemote?: boolean; + sshConnectionId?: string | null; + }) => { + try { + await appService.openIn(args); + return { success: true }; + } catch (error) { + return { success: false, error: error instanceof Error ? error.message : String(error) }; + } + }, + checkInstalledApps: () => appService.checkInstalledApps(), + listInstalledFonts: async (args?: { refresh?: boolean }) => { + const { fonts, cached, error } = await appService.listInstalledFonts(args?.refresh); + return { success: !error, fonts, cached, ...(error ? { error } : {}) }; + }, + openSelectDirectoryDialog: (args: { title: string; message: string }) => + appService.openSelectDirectoryDialog(args), + getAppVersion: () => appService.getCachedAppVersion(), + getElectronVersion: () => process.versions.electron, + getPlatform: () => process.platform, +}); diff --git a/src/main/core/app/service.ts b/src/main/core/app/service.ts new file mode 100644 index 000000000..7b43ed56e --- /dev/null +++ b/src/main/core/app/service.ts @@ -0,0 +1,332 @@ +import { exec } from 'node:child_process'; +import { homedir } from 'node:os'; +import { join } from 'node:path'; +import { eq } from 'drizzle-orm'; +import { clipboard, dialog, shell } from 'electron'; +import { appPasteChannel, appRedoChannel, appUndoChannel } from '@shared/events/appEvents'; +import { + getAppById, + getResolvedLabel, + OPEN_IN_APPS, + type OpenInAppId, + type PlatformConfig, + type PlatformKey, +} from '@shared/openInApps'; +import { getMainWindow } from '@main/app/window'; +import { db } from '@main/db/client'; +import { sshConnections } from '@main/db/schema'; +import { events } from '@main/lib/events'; +import { buildExternalToolEnv } from '@main/utils/childProcessEnv'; +import { + buildGhosttyRemoteExecArgs, + buildRemoteEditorUrl, + buildRemoteSshCommand, +} from '@main/utils/remoteOpenIn'; +import { + checkCommand, + checkMacApp, + checkMacAppByName, + escapeAppleScriptString, + execFileCommand, + listInstalledFontsAll, + resolveAppVersion, +} from './utils'; + +const FONT_CACHE_TTL_MS = 5 * 60 * 1_000; + +class AppService { + private cachedAppVersion: string | null = null; + private cachedAppVersionPromise: Promise | null = null; + private cachedInstalledFonts: { fonts: string[]; fetchedAt: number } | null = null; + + initialize(): void { + void this.getCachedAppVersion(); + + events.on(appUndoChannel, () => { + getMainWindow()?.webContents.undo(); + }); + events.on(appRedoChannel, () => { + getMainWindow()?.webContents.redo(); + }); + events.on(appPasteChannel, () => { + getMainWindow()?.webContents.paste(); + }); + } + + getCachedAppVersion(): Promise { + if (this.cachedAppVersion) return Promise.resolve(this.cachedAppVersion); + if (!this.cachedAppVersionPromise) { + this.cachedAppVersionPromise = resolveAppVersion().then((version) => { + this.cachedAppVersion = version; + return version; + }); + } + return this.cachedAppVersionPromise; + } + + async listInstalledFonts( + refresh?: boolean + ): Promise<{ fonts: string[]; cached: boolean; error?: string }> { + const now = Date.now(); + if ( + !refresh && + this.cachedInstalledFonts && + now - this.cachedInstalledFonts.fetchedAt < FONT_CACHE_TTL_MS + ) { + return { fonts: this.cachedInstalledFonts.fonts, cached: true }; + } + try { + const fonts = await listInstalledFontsAll(); + this.cachedInstalledFonts = { fonts, fetchedAt: now }; + return { fonts, cached: false }; + } catch (error) { + return { + fonts: this.cachedInstalledFonts?.fonts ?? [], + cached: Boolean(this.cachedInstalledFonts), + error: error instanceof Error ? error.message : String(error), + }; + } + } + + async checkInstalledApps(): Promise> { + const platform = process.platform as PlatformKey; + const availability: Record = {}; + + for (const openInApp of Object.values(OPEN_IN_APPS)) { + const platformConfig = openInApp.platforms[platform]; + if (!platformConfig && !openInApp.alwaysAvailable) { + availability[openInApp.id] = false; + continue; + } + if (openInApp.alwaysAvailable) { + availability[openInApp.id] = true; + continue; + } + try { + let isAvailable = false; + if (platformConfig?.bundleIds) { + for (const bundleId of platformConfig.bundleIds) { + if (await checkMacApp(bundleId)) { + isAvailable = true; + break; + } + } + } + if (!isAvailable && platformConfig?.appNames) { + for (const appName of platformConfig.appNames) { + if (await checkMacAppByName(appName)) { + isAvailable = true; + break; + } + } + } + if (!isAvailable && platformConfig?.checkCommands) { + for (const cmd of platformConfig.checkCommands) { + if (await checkCommand(cmd)) { + isAvailable = true; + break; + } + } + } + availability[openInApp.id] = isAvailable; + } catch (error) { + console.error(`Error checking installed app ${openInApp.id}:`, error); + availability[openInApp.id] = false; + } + } + + return availability; + } + + async openExternal(url: string): Promise { + if (!url || typeof url !== 'string') throw new Error('Invalid URL'); + let parsedUrl: URL; + try { + parsedUrl = new URL(url); + } catch { + throw new Error('Invalid URL format'); + } + if (!['http:', 'https:'].includes(parsedUrl.protocol)) { + throw new Error( + `Protocol "${parsedUrl.protocol}" is not allowed. Only http and https URLs are permitted.` + ); + } + await shell.openExternal(url); + } + + clipboardWriteText(text: string): void { + if (typeof text !== 'string') throw new Error('Invalid clipboard text'); + clipboard.writeText(text); + } + + async openIn(args: { + app: OpenInAppId; + path: string; + isRemote?: boolean; + sshConnectionId?: string | null; + }): Promise { + const { path: target, app: appId, isRemote = false, sshConnectionId } = args; + + if (!target || typeof target !== 'string' || !appId) { + throw new Error('Invalid arguments'); + } + + const platform = process.platform as PlatformKey; + const appConfig = getAppById(appId); + if (!appConfig) throw new Error('Invalid app ID'); + + const platformConfig = appConfig.platforms?.[platform]; + const label = getResolvedLabel(appConfig, platform); + + if (!platformConfig && !appConfig.alwaysAvailable) { + throw new Error(`${label} is not available on this platform.`); + } + + if (isRemote && sshConnectionId) { + await this.openInRemote({ appId, appConfig, label, target, platform, sshConnectionId }); + return; + } + + await this.openInLocal({ label, target, platformConfig }); + } + + private async openInRemote(args: { + appId: OpenInAppId; + appConfig: ReturnType; + label: string; + target: string; + platform: PlatformKey; + sshConnectionId: string; + }): Promise { + const { appId, appConfig, label, target, platform, sshConnectionId } = args; + + const [connection] = await db + .select() + .from(sshConnections) + .where(eq(sshConnections.id, sshConnectionId)) + .limit(1); + + if (!connection) throw new Error('SSH connection not found'); + + const { host, username, port } = connection; + + if (appId === 'vscode' || appId === 'cursor') { + await shell.openExternal(buildRemoteEditorUrl(appId, host, username, target)); + return; + } + + if ((appId === 'terminal' || appId === 'iterm2') && platform === 'darwin') { + const sshCommand = buildRemoteSshCommand({ host, username, port, targetPath: target }); + const escapedCommand = escapeAppleScriptString(sshCommand); + const appName = appId === 'terminal' ? 'Terminal' : 'iTerm'; + const script = + appId === 'terminal' + ? `tell application "${appName}" to do script "${escapedCommand}"` + : `tell application "${appName}" to create window with default profile command "${escapedCommand}"`; + await execFileCommand('osascript', [ + '-e', + script, + '-e', + `tell application "${appName}" to activate`, + ]); + return; + } + + if (appId === 'warp' && platform === 'darwin') { + const sshCommand = buildRemoteSshCommand({ host, username, port, targetPath: target }); + await shell.openExternal(`warp://action/new_window?cmd=${encodeURIComponent(sshCommand)}`); + return; + } + + if (appId === 'ghostty') { + const ghosttyExecArgs = buildGhosttyRemoteExecArgs({ + host, + username, + port, + targetPath: target, + }); + const attempts = + platform === 'darwin' + ? [ + { + file: 'open', + args: ['-n', '-b', 'com.mitchellh.ghostty', '--args', '-e', ...ghosttyExecArgs], + }, + { file: 'open', args: ['-na', 'Ghostty', '--args', '-e', ...ghosttyExecArgs] }, + { file: 'ghostty', args: ['-e', ...ghosttyExecArgs] }, + ] + : [{ file: 'ghostty', args: ['-e', ...ghosttyExecArgs] }]; + + let lastError: unknown = null; + for (const attempt of attempts) { + try { + await execFileCommand(attempt.file, attempt.args); + return; + } catch (error) { + lastError = error; + } + } + if (lastError instanceof Error) throw lastError; + throw new Error('Unable to launch Ghostty'); + } + + if (appConfig?.supportsRemote) { + throw new Error(`Remote SSH not yet implemented for ${label}`); + } + } + + private async openInLocal(args: { + label: string; + target: string; + platformConfig: PlatformConfig | undefined; + }): Promise { + const { label, target, platformConfig } = args; + + if (platformConfig?.openUrls) { + for (const urlTemplate of platformConfig.openUrls) { + const url = urlTemplate + .replace('{{path_url}}', encodeURIComponent(target)) + .replace('{{path}}', target); + try { + await shell.openExternal(url); + return; + } catch { + // try next URL + } + } + throw new Error( + `${label} is not installed or its URI scheme is not registered on this platform.` + ); + } + + const quoted = (p: string) => `'${p.replace(/'/g, "'\\''")}'`; + const commands: string[] = platformConfig?.openCommands ?? []; + const command = commands + .map((cmd) => cmd.replace('{{path}}', quoted(target)).replace('{{path_raw}}', target)) + .join(' || '); + + if (!command) throw new Error('Unsupported platform or app'); + + await new Promise((resolve, reject) => { + exec(command, { cwd: target, env: buildExternalToolEnv() }, (err) => { + if (err) return reject(err); + resolve(); + }); + }); + } + + async openSelectDirectoryDialog(args: { + title: string; + message: string; + }): Promise { + const result = await dialog.showOpenDialog(getMainWindow()!, { + title: args.title, + properties: ['openDirectory'], + message: args.message, + }); + if (result.canceled) return undefined; + return result.filePaths[0]; + } +} + +export const appService = new AppService(); diff --git a/src/main/core/app/utils.ts b/src/main/core/app/utils.ts new file mode 100644 index 000000000..22844d694 --- /dev/null +++ b/src/main/core/app/utils.ts @@ -0,0 +1,186 @@ +import { exec, execFile } from 'node:child_process'; +import { readFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { app } from 'electron'; +import { buildExternalToolEnv } from '@main/utils/childProcessEnv'; + +const UNKNOWN_VERSION = 'unknown'; + +export const execCommand = ( + command: string, + opts?: { maxBuffer?: number; timeout?: number } +): Promise => + new Promise((resolve, reject) => { + exec( + command, + { + maxBuffer: opts?.maxBuffer ?? 8 * 1024 * 1024, + timeout: opts?.timeout ?? 30_000, + env: buildExternalToolEnv(), + }, + (error, stdout) => { + if (error) return reject(error); + resolve(stdout ?? ''); + } + ); + }); + +export const execFileCommand = ( + file: string, + args: string[], + opts?: { timeout?: number } +): Promise => + new Promise((resolve, reject) => { + execFile( + file, + args, + { + timeout: opts?.timeout ?? 30_000, + env: buildExternalToolEnv(), + }, + (error) => { + if (error) return reject(error); + resolve(); + } + ); + }); + +export const escapeAppleScriptString = (value: string): string => + value.replace(/\\/g, '\\\\').replace(/"/g, '\\"'); + +// ─── Font discovery ─────────────────────────────────────────────────────────── + +const dedupeAndSortFonts = (fonts: string[]): string[] => { + const unique = Array.from(new Set(fonts.map((f) => f.trim()).filter(Boolean))); + return unique.sort((a, b) => a.localeCompare(b)); +}; + +const listInstalledFontsMac = async (): Promise => { + const stdout = await execCommand('system_profiler SPFontsDataType -json', { + maxBuffer: 24 * 1024 * 1024, + timeout: 60_000, + }); + const parsed = JSON.parse(stdout) as { + SPFontsDataType?: Array<{ + typefaces?: Array<{ family?: string; fullname?: string }>; + _name?: string; + }>; + }; + const fonts: string[] = []; + for (const item of parsed.SPFontsDataType ?? []) { + for (const typeface of item.typefaces ?? []) { + if (typeface.family) fonts.push(typeface.family); + } + } + return dedupeAndSortFonts(fonts); +}; + +const listInstalledFontsLinux = async (): Promise => { + const stdout = await execCommand('fc-list : family', { timeout: 30_000 }); + const fonts = stdout + .split('\n') + .flatMap((line) => line.split(',')) + .map((font) => font.trim()) + .filter(Boolean); + return dedupeAndSortFonts(fonts); +}; + +const listInstalledFontsWindows = async (): Promise => { + const script = + "$fonts = Get-ItemProperty -Path 'HKLM:\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Fonts';" + + "$props = $fonts.PSObject.Properties | Where-Object { $_.Name -notlike 'PS*' };" + + "$props | ForEach-Object { ($_.Name -replace '\\s*\\(.*\\)$','').Trim() }"; + const stdout = await execCommand(`powershell -NoProfile -Command "${script}"`, { + timeout: 30_000, + }); + return dedupeAndSortFonts( + stdout + .split('\n') + .map((line) => line.trim()) + .filter(Boolean) + ); +}; + +export const listInstalledFontsAll = async (): Promise => { + switch (process.platform) { + case 'darwin': + return listInstalledFontsMac(); + case 'linux': + return listInstalledFontsLinux(); + case 'win32': + return listInstalledFontsWindows(); + default: + return []; + } +}; + +// ─── App version ───────────────────────────────────────────────────────────── + +const readPackageVersion = async (packageJsonPath: string): Promise => { + try { + const packageJson = JSON.parse(await readFile(packageJsonPath, 'utf-8')); + if (packageJson.name === 'emdash' && packageJson.version) { + return packageJson.version as string; + } + } catch { + // Ignore missing or malformed package.json; try the next path. + } + return null; +}; + +export const resolveAppVersion = async (): Promise => { + try { + const version = app.getVersion(); + if (version && version !== '0.0.0') return version; + } catch { + // fall through + } + + const possiblePaths = [ + join(__dirname, '../../package.json'), + join(process.cwd(), 'package.json'), + join(app.getAppPath(), 'package.json'), + ]; + + for (const packageJsonPath of possiblePaths) { + const version = await readPackageVersion(packageJsonPath); + if (version) return version; + } + + try { + return app.getVersion(); + } catch { + return UNKNOWN_VERSION; + } +}; + +// ─── Installed-app detection ───────────────────────────────────────────────── + +export const checkCommand = (cmd: string): Promise => + new Promise((resolve) => { + exec(`command -v ${cmd} >/dev/null 2>&1`, { env: buildExternalToolEnv() }, (error) => { + resolve(!error); + }); + }); + +export const checkMacApp = (bundleId: string): Promise => + new Promise((resolve) => { + exec( + `mdfind "kMDItemCFBundleIdentifier == '${bundleId}'"`, + { env: buildExternalToolEnv() }, + (error, stdout) => { + resolve(!error && stdout.trim().length > 0); + } + ); + }); + +export const checkMacAppByName = (appName: string): Promise => + new Promise((resolve) => { + exec( + `osascript -e 'id of application "${appName}"' 2>/dev/null`, + { env: buildExternalToolEnv() }, + (error) => { + resolve(!error); + } + ); + }); diff --git a/src/main/core/conversations/controller.ts b/src/main/core/conversations/controller.ts new file mode 100644 index 000000000..4be68ffe9 --- /dev/null +++ b/src/main/core/conversations/controller.ts @@ -0,0 +1,12 @@ +import { createRPCController } from '@shared/ipc/rpc'; +import { createConversation } from './createConversation'; +import { deleteConversation } from './deleteConversation'; +import { getConversations } from './getConversations'; +import { renameConversation } from './renameConversation'; + +export const conversationController = createRPCController({ + getConversations, + createConversation, + deleteConversation, + renameConversation, +}); diff --git a/src/main/core/conversations/createConversation.ts b/src/main/core/conversations/createConversation.ts new file mode 100644 index 000000000..2b172a383 --- /dev/null +++ b/src/main/core/conversations/createConversation.ts @@ -0,0 +1,40 @@ +import { randomUUID } from 'node:crypto'; +import { sql } from 'drizzle-orm'; +import { Conversation, CreateConversationParams } from '@shared/conversations'; +import { db } from '@main/db/client'; +import { conversations } from '@main/db/schema'; +import { resolveTask } from '../projects/utils'; +import { mapConversationRowToConversation } from './utils'; + +export async function createConversation(params: CreateConversationParams): Promise { + const id = params.id ?? randomUUID(); + + const [row] = await db + .insert(conversations) + .values({ + id, + projectId: params.projectId, + taskId: params.taskId, + title: params.title, + provider: params.provider, + createdAt: sql`CURRENT_TIMESTAMP`, + updatedAt: sql`CURRENT_TIMESTAMP`, + }) + .returning(); + + const task = resolveTask(params.projectId, params.taskId); + if (!task) { + throw new Error('Task not found'); + } + + const conversation = mapConversationRowToConversation(row); + + await task.conversations.startSession( + conversation, + params.initialSize, + false, + params.initialPrompt + ); + + return mapConversationRowToConversation(row); +} diff --git a/src/main/core/conversations/deleteConversation.ts b/src/main/core/conversations/deleteConversation.ts new file mode 100644 index 000000000..4fd2c3469 --- /dev/null +++ b/src/main/core/conversations/deleteConversation.ts @@ -0,0 +1,23 @@ +import { and, eq } from 'drizzle-orm'; +import { db } from '@main/db/client'; +import { conversations } from '@main/db/schema'; +import { resolveTask } from '../projects/utils'; + +export async function deleteConversation( + projectId: string, + taskId: string, + conversationId: string +): Promise { + await db + .delete(conversations) + .where( + and( + eq(conversations.id, conversationId), + eq(conversations.projectId, projectId), + eq(conversations.taskId, taskId) + ) + ); + + const task = resolveTask(projectId, taskId); + await task?.conversations.stopSession(conversationId); +} diff --git a/src/main/core/conversations/getConversations.ts b/src/main/core/conversations/getConversations.ts new file mode 100644 index 000000000..4e2cee63b --- /dev/null +++ b/src/main/core/conversations/getConversations.ts @@ -0,0 +1,13 @@ +import { eq, isNull } from 'drizzle-orm'; +import { db } from '@main/db/client'; +import { conversations, tasks } from '@main/db/schema'; +import { mapConversationRowToConversation } from './utils'; + +export async function getConversations() { + const rows = await db + .select({ conversation: conversations }) + .from(conversations) + .innerJoin(tasks, eq(conversations.taskId, tasks.id)) + .where(isNull(tasks.archivedAt)); + return rows.map(({ conversation }) => mapConversationRowToConversation(conversation, false)); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/amp.ts b/src/main/core/conversations/impl/agent-event-classifiers/amp.ts new file mode 100644 index 000000000..6ab4b4967 --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/amp.ts @@ -0,0 +1,48 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createAmpClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter|Next command|Type your message/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How.*\?|Which.*\?|Please (provide|specify|clarify)/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/auggie.ts b/src/main/core/conversations/impl/agent-event-classifiers/auggie.ts new file mode 100644 index 000000000..0095936d7 --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/auggie.ts @@ -0,0 +1,55 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createAuggieClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter|Next command/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + if (/auggie\s*>/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How can I|Please provide/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/autohand.ts b/src/main/core/conversations/impl/agent-event-classifiers/autohand.ts new file mode 100644 index 000000000..0ae320fce --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/autohand.ts @@ -0,0 +1,48 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createAutohandClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter|Next command/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How.*\?|Which.*\?|Please (provide|specify|clarify)/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/base.ts b/src/main/core/conversations/impl/agent-event-classifiers/base.ts new file mode 100644 index 000000000..bf1315a8e --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/base.ts @@ -0,0 +1,69 @@ +import type { NotificationType } from '@shared/events/agentEvents'; + +export type ClassificationResult = + | { + type: 'notification'; + notificationType: NotificationType; + message?: string; + } + | { + type: 'stop'; + message?: string; + } + | { + type: 'error'; + message?: string; + } + | undefined; + +export interface ProviderClassifier { + /** + * Feed a new data chunk. Returns classification result if pattern detected, undefined otherwise. + * Maintains internal buffer for multi-chunk pattern matching. + */ + classify(chunk: string): ClassificationResult; + + /** Reset buffer state (called on session restart) */ + reset(): void; +} + +type ClassifyFn = (text: string) => ClassificationResult; + +const MAX_BUFFER = 4096; // 4KB sliding window + +function stripAnsi(s: string): string { + return s + .replace(/\x1b\[[0-9;]*[A-Za-z]/g, '') + .replace(/\r/g, '') + .replace(/\x1b\][^\x07]*\x07/g, ''); +} + +/** + * Factory function that creates a classifier with buffering and ANSI stripping. + * The provided callback receives cleaned text and returns classification result. + */ +export function createProviderClassifier(classifyFn: ClassifyFn): ProviderClassifier { + let buffer = ''; + + return { + classify(chunk: string): ClassificationResult { + // Append to buffer + buffer += chunk; + + // Trim from front if too large (keep most recent data) + if (buffer.length > MAX_BUFFER) { + buffer = buffer.slice(-MAX_BUFFER); + } + + // Strip ANSI codes for pattern matching + const clean = stripAnsi(buffer); + + // Call provider-specific classification + return classifyFn(clean); + }, + + reset(): void { + buffer = ''; + }, + }; +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/charm.ts b/src/main/core/conversations/impl/agent-event-classifiers/charm.ts new file mode 100644 index 000000000..e4f6bc8ed --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/charm.ts @@ -0,0 +1,55 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createCharmClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + if (/crush\s*>/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|Choose|Select/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/claude.ts b/src/main/core/conversations/impl/agent-event-classifiers/claude.ts new file mode 100644 index 000000000..4bd2228ba --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/claude.ts @@ -0,0 +1,49 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createClaudeClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + // Check last ~500 chars for prompt patterns + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Next command|Use \/login/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful|logged in/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How.*\?|Which.*\?|Please (provide|specify|clarify)/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/cline.ts b/src/main/core/conversations/impl/agent-event-classifiers/cline.ts new file mode 100644 index 000000000..cbfe6759f --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/cline.ts @@ -0,0 +1,63 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createClineClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Task completion + if (/Task completed|Done\./i.test(tail)) { + return { + type: 'stop', + message: 'Task completed', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter|Next command/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + if (/cline\s*>/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How can I|Please.*:/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/codebuff.ts b/src/main/core/conversations/impl/agent-event-classifiers/codebuff.ts new file mode 100644 index 000000000..a0637009d --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/codebuff.ts @@ -0,0 +1,55 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createCodebuffClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + if (/codebuff\s*>/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|Enter.*command/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/codex.ts b/src/main/core/conversations/impl/agent-event-classifiers/codex.ts new file mode 100644 index 000000000..7bfc25905 --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/codex.ts @@ -0,0 +1,56 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createCodexClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting input|Press Enter/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Command menu patterns + if (/\/(status|approvals|model)\b/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful|API key accepted/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How.*\?|Which.*\?|Please (provide|specify|clarify)/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/continue.ts b/src/main/core/conversations/impl/agent-event-classifiers/continue.ts new file mode 100644 index 000000000..4b76dca77 --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/continue.ts @@ -0,0 +1,55 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createContinueClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + if (/cn\s*>|continue\s*>/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How can I|Next step/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/copilot.ts b/src/main/core/conversations/impl/agent-event-classifiers/copilot.ts new file mode 100644 index 000000000..2ecc1a928 --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/copilot.ts @@ -0,0 +1,52 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createCopilotClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if ( + /Do you want to|Confirm with number keys|approve all file operations|Yes, and approve/i.test( + tail + ) + ) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Press Enter|Next step/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How.*\?|Which.*\?|Please (provide|specify|clarify)/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/cursor.ts b/src/main/core/conversations/impl/agent-event-classifiers/cursor.ts new file mode 100644 index 000000000..5e5e38404 --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/cursor.ts @@ -0,0 +1,55 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createCursorClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Add a follow-up/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + if (/Auto\s*[\r\n]+\s*\/\s*commands/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How.*\?|Which.*\?|Please (provide|specify|clarify)/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/droid.ts b/src/main/core/conversations/impl/agent-event-classifiers/droid.ts new file mode 100644 index 000000000..e5c4a2883 --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/droid.ts @@ -0,0 +1,48 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createDroidClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful|API key valid/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How.*\?|Which.*\?|Please (provide|specify|clarify)/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/gemini.ts b/src/main/core/conversations/impl/agent-event-classifiers/gemini.ts new file mode 100644 index 000000000..f528162b5 --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/gemini.ts @@ -0,0 +1,48 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createGeminiClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success (Gemini may show API key validation) + if (/Successfully authenticated|Login successful|API key valid/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How.*\?|Which.*\?|Please (provide|specify|clarify)/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/generic.ts b/src/main/core/conversations/impl/agent-event-classifiers/generic.ts new file mode 100644 index 000000000..efb6e10c3 --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/generic.ts @@ -0,0 +1,74 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +/** + * Generic fallback classifier for providers without specific patterns. + * Uses common patterns across most CLI agents. + */ +export function createGenericClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts (y/n, confirm, approve, etc.) + if (/\[y\/n\]|\[Y\/N\]|Continue\?/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Task completion (stop signal) + if (/✓|✔|Task completed|Finished|Done\./i.test(tail)) { + return { + type: 'stop', + message: 'Task completed', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter|Next command/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + if (/Add a follow-up/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful|API key (accepted|valid)/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How.*\?|Which.*\?|Please (provide|specify|clarify)/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/goose.ts b/src/main/core/conversations/impl/agent-event-classifiers/goose.ts new file mode 100644 index 000000000..4aafa8a87 --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/goose.ts @@ -0,0 +1,63 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createGooseClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Session started/resumed could be idle + if (/Session.*started|Session.*resumed/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + if (/goose\s*>/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How can I|Choose/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/index.ts b/src/main/core/conversations/impl/agent-event-classifiers/index.ts new file mode 100644 index 000000000..da1ce5343 --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/index.ts @@ -0,0 +1,57 @@ +import type { AgentProviderId } from '@shared/agent-provider-registry'; +import { createAmpClassifier } from './amp'; +import { createAuggieClassifier } from './auggie'; +import { createAutohandClassifier } from './autohand'; +import type { ProviderClassifier } from './base'; +import { createCharmClassifier } from './charm'; +import { createClaudeClassifier } from './claude'; +import { createClineClassifier } from './cline'; +import { createCodebuffClassifier } from './codebuff'; +import { createCodexClassifier } from './codex'; +import { createContinueClassifier } from './continue'; +import { createCopilotClassifier } from './copilot'; +import { createCursorClassifier } from './cursor'; +import { createDroidClassifier } from './droid'; +import { createGeminiClassifier } from './gemini'; +import { createGenericClassifier } from './generic'; +import { createGooseClassifier } from './goose'; +import { createKilocodeClassifier } from './kilocode'; +import { createKimiClassifier } from './kimi'; +import { createKiroClassifier } from './kiro'; +import { createMistralClassifier } from './mistral'; +import { createOpenCodeClassifier } from './opencode'; +import { createPiClassifier } from './pi'; +import { createQwenClassifier } from './qwen'; +import { createRovoClassifier } from './rovo'; + +export type { ProviderClassifier, ClassificationResult } from './base'; + +const classifierFactories: Record ProviderClassifier> = { + amp: createAmpClassifier, + auggie: createAuggieClassifier, + autohand: createAutohandClassifier, + charm: createCharmClassifier, + claude: createClaudeClassifier, + cline: createClineClassifier, + codebuff: createCodebuffClassifier, + codex: createCodexClassifier, + continue: createContinueClassifier, + copilot: createCopilotClassifier, + cursor: createCursorClassifier, + droid: createDroidClassifier, + gemini: createGeminiClassifier, + goose: createGooseClassifier, + kilocode: createKilocodeClassifier, + kimi: createKimiClassifier, + kiro: createKiroClassifier, + mistral: createMistralClassifier, + opencode: createOpenCodeClassifier, + pi: createPiClassifier, + qwen: createQwenClassifier, + rovo: createRovoClassifier, +}; + +export function createClassifier(providerId: AgentProviderId): ProviderClassifier { + const factory = classifierFactories[providerId]; + return factory ? factory() : createGenericClassifier(); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/kilocode.ts b/src/main/core/conversations/impl/agent-event-classifiers/kilocode.ts new file mode 100644 index 000000000..a21405575 --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/kilocode.ts @@ -0,0 +1,77 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createKilocodeClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Task completion (potential stop signal) + if (/✓\s*Task Completed|Checkpoint Saved/i.test(tail)) { + return { + type: 'stop', + message: 'Task completed', + }; + } + + // Idle/ready prompts + if (/Type a message or \/command/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + if (/What would you like to work on/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + if (/Ready|Awaiting|Press Enter|Next command/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + if (/\/help for commands|\/mode to switch mode|! for shell mode/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How.*\?|Which.*\?|Please (provide|specify|clarify)/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/kimi.ts b/src/main/core/conversations/impl/agent-event-classifiers/kimi.ts new file mode 100644 index 000000000..5f97bb875 --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/kimi.ts @@ -0,0 +1,55 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createKimiClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter|Next command|\/help|\/setup/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + if (/qwen\s*>/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How can I|Please.*:/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/kiro.ts b/src/main/core/conversations/impl/agent-event-classifiers/kiro.ts new file mode 100644 index 000000000..23b90023a --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/kiro.ts @@ -0,0 +1,48 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createKiroClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter|Next command|Kiro CLI/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How.*\?|Which.*\?|Please (provide|specify|clarify)/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/mistral.ts b/src/main/core/conversations/impl/agent-event-classifiers/mistral.ts new file mode 100644 index 000000000..b8f0aeb6c --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/mistral.ts @@ -0,0 +1,77 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createMistralClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts (y/n confirmations) + if (/\[y\/n\]|\[Y\/N\]|Continue\?|Approve|Reject|Cancel/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Task completion + if (/✓|✔|Completed|Finished|Done\.|Task completed/i.test(tail)) { + return { + type: 'stop', + message: 'Task completed', + }; + } + + // Idle/ready prompts + if (/Type.*message|Enter.*prompt/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + if (/What would you like|How can I help/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + if (/Ready|Awaiting|Press Enter|Next command/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + if (/\bvibe\s*>|›|»|>/i.test(tail) && tail.length < 100) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful|API key accepted/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|Please.*:/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/opencode.ts b/src/main/core/conversations/impl/agent-event-classifiers/opencode.ts new file mode 100644 index 000000000..571af4e16 --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/opencode.ts @@ -0,0 +1,48 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createOpenCodeClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter|Next command|Type your message/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How.*\?|Which.*\?|Please (provide|specify|clarify)/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/pi.ts b/src/main/core/conversations/impl/agent-event-classifiers/pi.ts new file mode 100644 index 000000000..da1038dbd --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/pi.ts @@ -0,0 +1,48 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createPiClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter|Next command/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How.*\?|Which.*\?|Please (provide|specify|clarify)/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/qwen.ts b/src/main/core/conversations/impl/agent-event-classifiers/qwen.ts new file mode 100644 index 000000000..55fe8f799 --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/qwen.ts @@ -0,0 +1,63 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createQwenClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Task completion + if (/Task completed|Finished/i.test(tail)) { + return { + type: 'stop', + message: 'Task completed', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter|Next command/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + if (/qwen\s*>/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success + if (/Successfully authenticated|Login successful/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How can I|Please.*:/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/agent-event-classifiers/rovo.ts b/src/main/core/conversations/impl/agent-event-classifiers/rovo.ts new file mode 100644 index 000000000..a38074279 --- /dev/null +++ b/src/main/core/conversations/impl/agent-event-classifiers/rovo.ts @@ -0,0 +1,48 @@ +import { createProviderClassifier, type ClassificationResult } from './base'; + +export function createRovoClassifier() { + return createProviderClassifier((text: string): ClassificationResult => { + const tail = text.slice(-500); + + // Permission/approval prompts + if (/approve|reject|permission|allow|confirm/i.test(tail)) { + return { + type: 'notification', + notificationType: 'permission_prompt', + }; + } + + // Idle/ready prompts + if (/Ready|Awaiting|Press Enter|Next command|rovodev/i.test(tail)) { + return { + type: 'notification', + notificationType: 'idle_prompt', + }; + } + + // Auth success (rovodev may show connecting/session messages) + if (/Successfully authenticated|Login successful|connected|session started/i.test(text)) { + return { + type: 'notification', + notificationType: 'auth_success', + }; + } + + // Questions/elicitation + if (/What.*\?|How.*\?|Which.*\?|Please (provide|specify|clarify)/i.test(tail)) { + return { + type: 'notification', + notificationType: 'elicitation_dialog', + }; + } + + // Error detection + if (/error:|fatal:|exception|failed/i.test(text)) { + return { + type: 'error', + }; + } + + return undefined; + }); +} diff --git a/src/main/core/conversations/impl/local-conversation.ts b/src/main/core/conversations/impl/local-conversation.ts new file mode 100644 index 000000000..fd453f4b8 --- /dev/null +++ b/src/main/core/conversations/impl/local-conversation.ts @@ -0,0 +1,124 @@ +import { Conversation } from '@shared/conversations'; +import { agentSessionExitedChannel } from '@shared/events/agentEvents'; +import { makePtySessionId } from '@shared/ptySessionId'; +import type { ConversationProvider } from '@main/core/conversations/types'; +import { spawnLocalPty } from '@main/core/pty/local-pty'; +import { Pty } from '@main/core/pty/pty'; +import { buildAgentEnv } from '@main/core/pty/pty-env'; +import { ptySessionRegistry } from '@main/core/pty/pty-session-registry'; +import { events } from '@main/lib/events'; +import { log } from '@main/lib/logger'; +import { buildAgentCommand, wireAgentClassifier } from './shared'; + +const DEFAULT_COLS = 80; +const DEFAULT_ROWS = 24; + +export class LocalConversationProvider implements ConversationProvider { + private sessions = new Map(); + private readonly projectId: string; + private readonly taskPath: string; + private readonly taskId: string; + + constructor({ + projectId, + taskPath, + taskId, + }: { + projectId: string; + taskPath: string; + taskId: string; + }) { + this.projectId = projectId; + this.taskPath = taskPath; + this.taskId = taskId; + } + + async startSession( + conversation: Conversation, + initialSize: { cols: number; rows: number } = { cols: DEFAULT_COLS, rows: DEFAULT_ROWS }, + isResuming: boolean = false, + initialPrompt?: string + ): Promise { + const sessionId = makePtySessionId( + conversation.projectId, + conversation.taskId, + conversation.id + ); + if (this.sessions.has(sessionId)) return; + + const { command, args } = await buildAgentCommand({ + providerId: conversation.providerId, + autoApprove: conversation.autoApprove, + sessionId: conversation.id, + isResuming, + initialPrompt, + }); + + const pty = spawnLocalPty({ + id: sessionId, + command, + args, + cwd: this.taskPath, + env: buildAgentEnv(), + cols: initialSize.cols, + rows: initialSize.rows, + }); + + wireAgentClassifier({ + pty, + providerId: conversation.providerId, + projectId: conversation.projectId, + taskId: conversation.taskId, + conversationId: conversation.id, + }); + + pty.onExit(({ exitCode }) => { + ptySessionRegistry.unregister(sessionId); + const shouldRespawn = this.sessions.has(sessionId); + this.sessions.delete(sessionId); + events.emit(agentSessionExitedChannel, { + sessionId, + projectId: conversation.projectId, + conversationId: conversation.id, + taskId: conversation.taskId, + exitCode, + }); + if (shouldRespawn) { + setTimeout(() => { + this.startSession(conversation, initialSize, isResuming, initialPrompt).catch((e) => { + log.error('LocalConversationProvider: respawn failed', { + conversationId: conversation.id, + error: String(e), + }); + }); + }, 500); + } + }); + + ptySessionRegistry.register(sessionId, pty); + this.sessions.set(sessionId, pty); + } + + async stopSession(conversationId: string): Promise { + const sessionId = makePtySessionId(this.projectId, this.taskId, conversationId); + const pty = this.sessions.get(sessionId); + if (!pty) return; + try { + pty.kill(); + } catch (e) { + log.warn('LocalAgentProvider: error killing PTY', { sessionId, error: String(e) }); + } + this.sessions.delete(sessionId); + ptySessionRegistry.unregister(sessionId); + } + + async destroyAll(): Promise { + for (const [sessionId, pty] of this.sessions) { + try { + pty.kill(); + } catch {} + ptySessionRegistry.unregister(sessionId); + } + this.sessions.clear(); + } +} diff --git a/src/main/core/conversations/impl/shared.ts b/src/main/core/conversations/impl/shared.ts new file mode 100644 index 000000000..a2dee46de --- /dev/null +++ b/src/main/core/conversations/impl/shared.ts @@ -0,0 +1,74 @@ +import { AgentProviderId } from '@shared/agent-provider-registry'; +import { agentEventChannel } from '@shared/events/agentEvents'; +import { Pty } from '@main/core/pty/pty'; +import { providerOverrideSettings } from '@main/core/settings/provider-settings-service'; +import { events } from '@main/lib/events'; +import { createClassifier } from './agent-event-classifiers'; + +export function wireAgentClassifier({ + pty, + providerId, + projectId, + taskId, + conversationId, +}: { + pty: Pty; + providerId: AgentProviderId; + projectId: string; + taskId: string; + conversationId: string; +}): void { + const classifier = createClassifier(providerId); + pty.onData((chunk) => { + const result = classifier.classify(chunk); + if (result) { + events.emit(agentEventChannel, { + type: result.type, + conversationId: conversationId, + taskId: taskId, + projectId: projectId, + timestamp: Date.now(), + payload: { + message: result.message, + notificationType: result.type === 'notification' ? result.notificationType : undefined, + }, + }); + } + }); +} + +export async function buildAgentCommand({ + providerId, + autoApprove, + initialPrompt, + sessionId, + isResuming, +}: { + providerId: AgentProviderId; + autoApprove?: boolean; + initialPrompt?: string; + sessionId: string; + isResuming?: boolean; +}) { + const providerConfig = await providerOverrideSettings.getItem(providerId); + + const cli = providerConfig?.cli; + const args: string[] = []; + + if (isResuming && providerConfig?.sessionIdFlag) { + args.push(providerConfig?.sessionIdFlag, sessionId); + } + + if (autoApprove && providerConfig?.autoApproveFlag) { + args.push(providerConfig?.autoApproveFlag); + } + + if (!isResuming && initialPrompt && providerConfig?.initialPromptFlag) { + args.push(providerConfig?.initialPromptFlag, initialPrompt); + args.push(initialPrompt); + } + + args.push(...(providerConfig?.defaultArgs ?? [])); + + return { command: cli!, args }; +} diff --git a/src/main/core/conversations/impl/ssh-conversation.ts b/src/main/core/conversations/impl/ssh-conversation.ts new file mode 100644 index 000000000..42fa04418 --- /dev/null +++ b/src/main/core/conversations/impl/ssh-conversation.ts @@ -0,0 +1,103 @@ +import { AgentProviderId } from '@shared/agent-provider-registry'; +import { Conversation } from '@shared/conversations'; +import { agentSessionExitedChannel } from '@shared/events/agentEvents'; +import { makePtySessionId } from '@shared/ptySessionId'; +import type { + ConversationProvider, + ConversationStartOptions, + CreateSessionError, +} from '@main/core/conversations/types'; +import { Pty } from '@main/core/pty/pty'; +import { ptySessionRegistry } from '@main/core/pty/pty-session-registry'; +import { buildSshCommandString, resolveSpawnParams } from '@main/core/pty/spawn-utils'; +import { openSsh2Pty } from '@main/core/pty/ssh2-pty'; +import type { SshClientProxy } from '@main/core/ssh/ssh-client-proxy'; +import { events } from '@main/lib/events'; +import { log } from '@main/lib/logger'; +import { ok, Result } from '@main/lib/result'; +import type { AgentSessionConfig } from './agent-session'; +import { wireAgentClassifier } from './shared'; + +export class SshConversationProvider implements ConversationProvider { + private sessions = new Map(); + + constructor( + private readonly projectId: string, + private readonly taskId: string, + private readonly proxy: SshClientProxy + ) {} + + async startSession(conversation: Conversation): Promise { + const sessionId = makePtySessionId(opts.projectId, opts.taskId, opts.conversationId); + + if (this.sessions.has(sessionId)) return ok(); + + const cfg: AgentSessionConfig = { + taskId: opts.taskId, + conversationId: opts.conversationId, + providerId: opts.providerId as AgentProviderId, + command: opts.command, + args: opts.args, + cwd: opts.cwd, + sessionId: opts.agentSessionId, + shellSetup: opts.shellSetup, + tmuxSessionName: opts.tmuxSessionName, + autoApprove: opts.autoApprove ?? false, + resume: opts.resume ?? false, + }; + + const { command, args, cwd } = resolveSpawnParams('agent', cfg); + const sshCommand = buildSshCommandString(command, args, cwd); + + const result = await openSsh2Pty(this.proxy.client, { + id: sessionId, + command: sshCommand, + cols: 80, + rows: 24, + }); + + if (!result.success) return result; + + const pty = result.data; + + wireAgentClassifier(pty, sessionId, cfg); + + pty.onExit(({ exitCode }) => { + this.sessions.delete(sessionId); + events.emit( + agentSessionExitedChannel, + { sessionId, conversationId: cfg.conversationId, taskId: cfg.taskId, exitCode }, + cfg.taskId + ); + }); + + ptySessionRegistry.register(sessionId, pty); + this.sessions.set(sessionId, pty); + + log.info('SshAgentProvider: session started', { sessionId, cwd }); + return ok(); + } + + async stopSession(conversationId: string): Promise { + const sessionId = makePtySessionId(this.projectId, this.taskId, conversationId); + const pty = this.sessions.get(sessionId); + if (!pty) return; + try { + pty.kill(); + } catch (e) { + log.warn('SshAgentProvider: error killing PTY', { sessionId, error: String(e) }); + } + this.sessions.delete(sessionId); + ptySessionRegistry.unregister(sessionId); + } + + async destroyAll(): Promise { + for (const [sessionId, pty] of this.sessions) { + try { + pty.kill(); + } catch {} + ptySessionRegistry.unregister(sessionId); + } + this.sessions.clear(); + } +} diff --git a/src/main/core/conversations/renameConversation.ts b/src/main/core/conversations/renameConversation.ts new file mode 100644 index 000000000..6de5ee170 --- /dev/null +++ b/src/main/core/conversations/renameConversation.ts @@ -0,0 +1,24 @@ +import { eq, sql } from 'drizzle-orm'; +import { db } from '@main/db/client'; +import { conversations } from '@main/db/schema'; + +const MAX_TITLE_LENGTH = 64; + +export async function renameConversation(conversationId: string, newTitle: string): Promise { + const trimmed = newTitle.trim(); + if (trimmed.length > MAX_TITLE_LENGTH) { + throw new Error(`Conversation title cannot exceed ${MAX_TITLE_LENGTH} characters`); + } + + const result = await db + .update(conversations) + .set({ + title: trimmed, + updatedAt: sql`CURRENT_TIMESTAMP`, + }) + .where(eq(conversations.id, conversationId)); + + if (result.changes === 0) { + throw new Error(`Conversation not found: ${conversationId}`); + } +} diff --git a/src/main/core/conversations/types.ts b/src/main/core/conversations/types.ts new file mode 100644 index 000000000..3c3542e21 --- /dev/null +++ b/src/main/core/conversations/types.ts @@ -0,0 +1,16 @@ +import { Conversation } from '@shared/conversations'; + +export interface ConversationProvider { + startSession( + conversation: Conversation, + initialSize?: { cols: number; rows: number }, + isResuming?: boolean, + initialPrompt?: string + ): Promise; + stopSession(conversationId: string): Promise; + destroyAll(): Promise; +} + +export type ConversationConfig = { + autoApprove?: boolean; +}; diff --git a/src/main/core/conversations/utils.ts b/src/main/core/conversations/utils.ts new file mode 100644 index 000000000..ef291a848 --- /dev/null +++ b/src/main/core/conversations/utils.ts @@ -0,0 +1,18 @@ +import { AgentProviderId } from '@shared/agent-provider-registry'; +import { Conversation } from '@shared/conversations'; +import { ConversationRow } from '@main/db/schema'; + +export function mapConversationRowToConversation( + row: ConversationRow, + resume: boolean = false +): Conversation { + return { + id: row.id, + title: row.title, + taskId: row.taskId, + projectId: row.projectId, + providerId: row.provider as AgentProviderId, + autoApprove: row.config ? JSON.parse(row.config).autoApprove : undefined, + resume: resume, + }; +} diff --git a/src/main/core/dependencies/controller.ts b/src/main/core/dependencies/controller.ts new file mode 100644 index 000000000..c2e43c005 --- /dev/null +++ b/src/main/core/dependencies/controller.ts @@ -0,0 +1,12 @@ +import { createRPCController } from '@shared/ipc/rpc'; +import { localDependencyManager } from './dependency-manager'; +import { DependencyCategory, DependencyId } from './types'; + +export const dependenciesController = createRPCController({ + getAll: () => Object.fromEntries(localDependencyManager.getAll()), + get: (id: DependencyId) => localDependencyManager.get(id), + getByCategory: (cat: DependencyCategory) => localDependencyManager.getByCategory(cat), + probe: (id: DependencyId) => localDependencyManager.probe(id), + probeAll: () => localDependencyManager.probeAll(), + probeCategory: (cat: DependencyCategory) => localDependencyManager.probeCategory(cat), +}); diff --git a/src/main/core/dependencies/dependency-manager.ts b/src/main/core/dependencies/dependency-manager.ts new file mode 100644 index 000000000..b170fb152 --- /dev/null +++ b/src/main/core/dependencies/dependency-manager.ts @@ -0,0 +1,218 @@ +import os from 'node:os'; +import { dependencyStatusUpdatedChannel } from '@shared/events/appEvents'; +import { spawnLocalPty } from '@main/core/pty/local-pty'; +import { events } from '@main/lib/events'; +import { log } from '@main/lib/logger'; +import { resolveCommandPath, runVersionProbe } from './probe'; +import { DEPENDENCIES, getDependencyDescriptor } from './registry'; +import type { + DependencyCategory, + DependencyDescriptor, + DependencyId, + DependencyState, + DependencyStatus, + ProbeResult, +} from './types'; + +const VERSION_RE = /(\d+\.\d+[\d.]*)/; + +function resolveProbeStatus( + descriptor: DependencyDescriptor, + resolvedPath: string | null, + probe: ProbeResult +): DependencyStatus { + if (descriptor.resolveStatus) { + return descriptor.resolveStatus(probe); + } + if (resolvedPath !== null) return 'available'; + if (probe.exitCode !== null && (probe.stdout || probe.stderr)) return 'available'; + if (probe.timedOut && probe.stdout) return 'available'; + return probe.exitCode === null ? 'missing' : 'error'; +} + +function extractVersion(probe: ProbeResult): string | null { + const raw = (probe.stdout || probe.stderr).trim(); + const firstLine = raw.split('\n')[0]?.trim() ?? ''; + // Extract a version-like token, e.g. "git version 2.39.0" → "2.39.0" + const m = VERSION_RE.exec(firstLine); + return m ? m[1] : firstLine || null; +} + +function dependencyStateFromProbeResult( + descriptor: DependencyDescriptor, + resolvedPath: string | null, + probe: ProbeResult | null +): DependencyState { + let status: DependencyStatus; + let version: string | null = null; + + if (probe === null) { + status = resolvedPath !== null ? 'available' : 'missing'; + } else { + status = resolveProbeStatus(descriptor, resolvedPath, probe); + } + + if (status === 'available' && probe) { + version = extractVersion(probe); + } + + return { + id: descriptor.id, + status, + version, + path: resolvedPath, + checkedAt: Date.now(), + error: status === 'error' ? probe?.stderr?.trim() || 'Unknown error' : undefined, + }; +} + +export class LocalDependencyManager { + private state = new Map(); + + /** + * Kick off background probing for all dependencies. Returns immediately; + * results stream in via `dependencyStatusUpdatedChannel` events. + */ + initialize(): void { + void this.probeAll(); + } + + getAll(): Map { + return new Map(this.state); + } + + get(id: DependencyId): DependencyState | undefined { + return this.state.get(id); + } + + getByCategory(cat: DependencyCategory): DependencyState[] { + return [...this.state.values()].filter((s) => { + const desc = getDependencyDescriptor(s.id); + return desc?.category === cat; + }); + } + + /** + * Two-phase probe for a single dependency: + * 1. Resolve path (fast, ~5ms) — emits an event immediately. + * 2. Run version probe (slow, up to 10s) — emits a second event on completion. + */ + async probe(id: DependencyId): Promise { + const descriptor = getDependencyDescriptor(id); + if (!descriptor) { + throw new Error(`Unknown dependency id: ${id}`); + } + + // Phase 1: path resolution + const resolvedPath = await this.resolveFirstPath(descriptor); + const pathState = dependencyStateFromProbeResult(descriptor, resolvedPath, null); + this.updateState(pathState); + + if (pathState.status === 'missing') { + return pathState; + } + + // Phase 2: version probe + const versionArgs = descriptor.versionArgs ?? ['--version']; + const probeResult = await runVersionProbe( + descriptor.commands[0] ?? id, + resolvedPath, + versionArgs + ); + const fullState = dependencyStateFromProbeResult(descriptor, resolvedPath, probeResult); + this.updateState(fullState); + + return fullState; + } + + async probeAll(): Promise { + await Promise.all( + DEPENDENCIES.map((d) => + this.probe(d.id).catch((err) => { + log.warn(`[DependencyManager] Failed to probe ${d.id}:`, err); + }) + ) + ); + } + + async probeCategory(cat: DependencyCategory): Promise { + const targets = DEPENDENCIES.filter((d) => d.category === cat); + await Promise.all( + targets.map((d) => + this.probe(d.id).catch((err) => { + log.warn(`[DependencyManager] Failed to probe ${d.id}:`, err); + }) + ) + ); + } + + /** + * Run the installCommand for a dependency, then re-probe to update state. + * Returns the updated DependencyState after installation attempt. + */ + async install(id: DependencyId): Promise { + const descriptor = getDependencyDescriptor(id); + if (!descriptor) { + throw new Error(`Unknown dependency id: ${id}`); + } + if (!descriptor.installCommand) { + throw new Error(`No install command for dependency: ${id}`); + } + + log.info(`[DependencyManager] Installing ${id}: ${descriptor.installCommand}`); + + await this.runWithLocalPty(descriptor.installCommand); + + return this.probe(id); + } + + private runWithLocalPty(command: string): Promise { + return new Promise((resolve, reject) => { + const shell = process.env.SHELL ?? '/bin/sh'; + const result = spawnLocalPty({ + id: `install:${crypto.randomUUID()}`, + command: shell, + args: ['-c', command], + cwd: os.homedir(), + env: process.env as Record, + cols: 80, + rows: 24, + }); + + if (!result.success) { + const msg = + result.error.kind === 'spawn-failed' ? result.error.message : 'PTY support is disabled'; + reject(new Error(msg)); + return; + } + + const chunks: string[] = []; + result.data.onData((chunk: string) => chunks.push(chunk)); + result.data.onExit(({ exitCode }) => { + if (exitCode) { + log.info(`[DependencyManager] Install succeeded`); + resolve(); + } else { + const output = chunks.join('').trim(); + log.error(`[DependencyManager] Install failed`, { exitCode, output }); + reject(new Error(`Install failed (exit ${exitCode ?? '?'}): ${output}`)); + } + }); + }); + } + + private async resolveFirstPath(descriptor: DependencyDescriptor): Promise { + for (const command of descriptor.commands) { + const path = await resolveCommandPath(command); + if (path) return path; + } + return null; + } + + private updateState(state: DependencyState): void { + this.state.set(state.id, state); + events.emit(dependencyStatusUpdatedChannel, { id: state.id, state }); + } +} + +export const localDependencyManager = new LocalDependencyManager(); diff --git a/src/main/core/dependencies/probe.ts b/src/main/core/dependencies/probe.ts new file mode 100644 index 000000000..71bec2841 --- /dev/null +++ b/src/main/core/dependencies/probe.ts @@ -0,0 +1,73 @@ +import { execFile, spawn } from 'node:child_process'; +import { promisify } from 'node:util'; +import type { ProbeResult } from './types'; + +const execFileAsync = promisify(execFile); + +const WHICH_TIMEOUT_MS = 5_000; +const VERSION_PROBE_TIMEOUT_MS = 10_000; + +/** + * Resolves the absolute path of a command binary using `which` (Unix) or `where` (Windows). + * Returns `null` if the command is not found or the resolution fails. + */ +export async function resolveCommandPath(command: string): Promise { + const resolver = process.platform === 'win32' ? 'where' : 'which'; + try { + const { stdout } = await execFileAsync(resolver, [command], { + timeout: WHICH_TIMEOUT_MS, + encoding: 'utf8', + }); + const firstLine = stdout.trim().split('\n')[0]?.trim(); + return firstLine ?? null; + } catch { + return null; + } +} + +/** + * Spawns `command args` and collects stdout/stderr up to a timeout. + * Never throws — all failures are captured in the returned `ProbeResult`. + */ +export async function runVersionProbe( + command: string, + resolvedPath: string | null, + args: string[], + timeoutMs: number = VERSION_PROBE_TIMEOUT_MS +): Promise { + const bin = resolvedPath ?? command; + + return new Promise((resolve) => { + let stdout = ''; + let stderr = ''; + let settled = false; + + const proc = spawn(bin, args, { timeout: timeoutMs, windowsHide: true }); + + const finish = (exitCode: number | null, timedOut: boolean) => { + if (settled) return; + settled = true; + resolve({ command, path: resolvedPath, stdout, stderr, exitCode, timedOut }); + }; + + proc.stdout?.on('data', (chunk: Buffer) => { + stdout += chunk.toString(); + }); + proc.stderr?.on('data', (chunk: Buffer) => { + stderr += chunk.toString(); + }); + + proc.on('close', (code) => finish(code, false)); + proc.on('error', () => finish(null, false)); + + const timer = setTimeout(() => { + if (!settled) { + proc.kill(); + finish(null, true); + } + }, timeoutMs); + + proc.on('close', () => clearTimeout(timer)); + proc.on('error', () => clearTimeout(timer)); + }); +} diff --git a/src/main/core/dependencies/registry.ts b/src/main/core/dependencies/registry.ts new file mode 100644 index 000000000..e9ddcfa96 --- /dev/null +++ b/src/main/core/dependencies/registry.ts @@ -0,0 +1,96 @@ +import { listDetectableProviders } from '@shared/agent-provider-registry'; +import type { DependencyDescriptor, DependencyStatus, ProbeResult } from './types'; + +const CORE_DEPENDENCIES: DependencyDescriptor[] = [ + { + id: 'git', + name: 'Git', + category: 'core', + commands: ['git'], + versionArgs: ['--version'], + docUrl: 'https://git-scm.com', + installHint: 'Install Git from https://git-scm.com/downloads', + }, + { + id: 'gh', + name: 'GitHub CLI', + category: 'core', + commands: ['gh'], + versionArgs: ['--version'], + docUrl: 'https://cli.github.com', + installHint: 'Run: brew install gh (or see https://cli.github.com)', + installCommand: (() => { + switch (process.platform) { + case 'darwin': + return 'brew install gh'; + case 'linux': + return 'sudo apt update && sudo apt install -y gh'; + case 'win32': + return 'winget install GitHub.cli'; + default: + return undefined; + } + })(), + }, + { + id: 'tmux', + name: 'tmux', + category: 'core', + commands: ['tmux'], + versionArgs: ['-V'], + docUrl: 'https://github.com/tmux/tmux', + installHint: 'Run: brew install tmux', + }, + { + id: 'ssh', + name: 'SSH', + category: 'core', + commands: ['ssh'], + versionArgs: ['-V'], + docUrl: 'https://www.openssh.com', + }, + { + id: 'node', + name: 'Node.js', + category: 'core', + commands: ['node'], + versionArgs: ['--version'], + docUrl: 'https://nodejs.org', + installHint: 'Install Node.js from https://nodejs.org or via nvm', + }, +]; + +/** + * Agents that output their version on stderr, time out during probing, or return + * a non-zero exit code are still "available" if a path was resolved or any output + * was produced. This mirrors the logic in ConnectionsService.resolveStatus(). + */ +function agentResolveStatus(result: ProbeResult): DependencyStatus { + if (result.path !== null) return 'available'; + if (result.timedOut && result.stdout) return 'available'; + if (result.exitCode !== null && (result.stdout || result.stderr)) return 'available'; + return result.exitCode === null ? 'missing' : 'error'; +} + +function buildAgentDependencies(): DependencyDescriptor[] { + return listDetectableProviders().map((provider) => ({ + id: provider.id, + name: provider.name, + category: 'agent' as const, + commands: provider.commands ?? [provider.cli ?? provider.id], + versionArgs: provider.versionArgs ?? ['--version'], + docUrl: provider.docUrl, + installHint: provider.installCommand ? `Run: ${provider.installCommand}` : undefined, + installCommand: provider.installCommand, + resolveStatus: agentResolveStatus, + })); +} + +export const DEPENDENCIES: DependencyDescriptor[] = [ + ...CORE_DEPENDENCIES, + ...buildAgentDependencies(), +]; + +export function getDependencyDescriptor(id: string): DependencyDescriptor | undefined { + return DEPENDENCIES.find((d) => d.id === id); +} diff --git a/src/main/core/dependencies/types.ts b/src/main/core/dependencies/types.ts new file mode 100644 index 000000000..127a48c70 --- /dev/null +++ b/src/main/core/dependencies/types.ts @@ -0,0 +1,47 @@ +import type { AgentProviderId } from '@shared/agent-provider-registry'; + +export type DependencyCategory = 'core' | 'agent'; + +export type CoreDependencyId = 'git' | 'gh' | 'tmux' | 'ssh' | 'node'; + +export type DependencyId = CoreDependencyId | AgentProviderId; + +export interface ProbeResult { + command: string; + path: string | null; + stdout: string; + stderr: string; + exitCode: number | null; + timedOut: boolean; +} + +export type DependencyStatus = 'available' | 'missing' | 'error'; + +export interface DependencyDescriptor { + id: DependencyId; + name: string; + category: DependencyCategory; + /** Binary names to try in order; first success wins. */ + commands: string[]; + /** Args passed when probing for a version string. Defaults to ['--version']. */ + versionArgs?: string[]; + docUrl?: string; + /** Human-readable installation hint shown in UI. */ + installHint?: string; + /** Machine-executable install command, e.g. "npm install -g @openai/codex". */ + installCommand?: string; + /** + * Override the default status resolution logic. + * Useful for CLIs that exit non-zero on `--version` but are still available. + */ + resolveStatus?: (result: ProbeResult) => DependencyStatus; +} + +export interface DependencyState { + id: DependencyId; + status: DependencyStatus; + version: string | null; + path: string | null; + checkedAt: number; + error?: string; +} diff --git a/src/main/core/editor/controller.ts b/src/main/core/editor/controller.ts new file mode 100644 index 000000000..22a3666cf --- /dev/null +++ b/src/main/core/editor/controller.ts @@ -0,0 +1,13 @@ +import { createRPCController } from '@shared/ipc/rpc'; +import { editorBufferService } from './editor-buffer-service'; + +export const editorBufferController = createRPCController({ + saveBuffer: (projectId: string, taskId: string, filePath: string, content: string) => + editorBufferService.saveBuffer(projectId, taskId, filePath, content), + + clearBuffer: (projectId: string, taskId: string, filePath: string) => + editorBufferService.clearBuffer(projectId, taskId, filePath), + + listBuffers: (projectId: string, taskId: string) => + editorBufferService.listBuffers(projectId, taskId), +}); diff --git a/src/main/core/editor/editor-buffer-service.ts b/src/main/core/editor/editor-buffer-service.ts new file mode 100644 index 000000000..1527baa78 --- /dev/null +++ b/src/main/core/editor/editor-buffer-service.ts @@ -0,0 +1,48 @@ +import { and, eq, lt } from 'drizzle-orm'; +import { db } from '@/main/db/client'; +import { editorBuffers } from '@/main/db/schema'; + +export class EditorBufferService { + async saveBuffer( + projectId: string, + taskId: string, + filePath: string, + content: string + ): Promise { + const id = `${projectId}:${taskId}:${filePath}`; + await db + .insert(editorBuffers) + .values({ id, projectId, taskId, filePath, content, updatedAt: Date.now() }) + .onConflictDoUpdate({ + target: editorBuffers.id, + set: { content, updatedAt: Date.now() }, + }); + } + + async clearBuffer(projectId: string, taskId: string, filePath: string): Promise { + const id = `${projectId}:${taskId}:${filePath}`; + await db.delete(editorBuffers).where(eq(editorBuffers.id, id)); + } + + async clearAllForTask(taskId: string): Promise { + await db.delete(editorBuffers).where(eq(editorBuffers.taskId, taskId)); + } + + async listBuffers( + projectId: string, + taskId: string + ): Promise<{ filePath: string; content: string }[]> { + const rows = await db + .select({ filePath: editorBuffers.filePath, content: editorBuffers.content }) + .from(editorBuffers) + .where(and(eq(editorBuffers.projectId, projectId), eq(editorBuffers.taskId, taskId))); + return rows; + } + + async pruneStale(olderThanMs: number): Promise { + const cutoff = Date.now() - olderThanMs; + await db.delete(editorBuffers).where(lt(editorBuffers.updatedAt, cutoff)); + } +} + +export const editorBufferService = new EditorBufferService(); diff --git a/src/main/core/fs/controller.ts b/src/main/core/fs/controller.ts new file mode 100644 index 000000000..740d7ad21 --- /dev/null +++ b/src/main/core/fs/controller.ts @@ -0,0 +1,273 @@ +import { planEventChannel } from '@shared/events/appEvents'; +import { fsWatchEventChannel } from '@shared/events/fsEvents'; +import { createRPCController } from '@shared/ipc/rpc'; +import { events } from '@main/lib/events'; +import { err, ok } from '@main/lib/result'; +import { resolveTask } from '../projects/utils'; +import { + FileSystemErrorCodes, + type FileWatcher, + type ListOptions, + type SearchOptions, +} from './types'; + +// One watcher per (projectId, taskId) pair, shared across all consumers via labels. +// Local: single recursive @parcel/watcher subscription — update() is a no-op. +// SSH: poll-based — update() receives the union of all labels' paths to poll. +const watcherRegistry = new Map(); +// Per-label path groups, keyed by `${projectId}::${taskId}` → label → paths. +// Paths are forwarded to update() for SSH compatibility; local ignores them. +const watcherLabeledPaths = new Map>(); + +export const filesController = createRPCController({ + listFiles: async (projectId: string, taskId: string, dirPath: string, options?: ListOptions) => { + const env = resolveTask(projectId, taskId); + if (!env) + return err({ type: 'not_found' as const, entity: 'filesystem' as const, detail: undefined }); + + try { + const result = await env.fs.list(dirPath, options); + return ok(result); + } catch (e) { + return err({ type: 'fs_error' as const, message: String(e) }); + } + }, + + readFile: async (projectId: string, taskId: string, filePath: string, maxBytes?: number) => { + const env = resolveTask(projectId, taskId); + if (!env) + return err({ type: 'not_found' as const, entity: 'filesystem' as const, detail: undefined }); + + try { + const result = await env.fs.read(filePath, maxBytes); + return ok(result); + } catch (e) { + return err({ type: 'fs_error' as const, message: String(e) }); + } + }, + + writeFile: async (projectId: string, taskId: string, filePath: string, content: string) => { + const env = resolveTask(projectId, taskId); + if (!env) + return err({ type: 'not_found' as const, entity: 'filesystem' as const, detail: undefined }); + + try { + const result = await env.fs.write(filePath, content); + return ok(result); + } catch (e) { + if ( + e instanceof Error && + (e as unknown as { code?: string }).code === FileSystemErrorCodes.PERMISSION_DENIED + ) { + events.emit(planEventChannel, { + type: 'write_blocked' as const, + root: projectId, + relPath: filePath, + message: e.message, + }); + } + return err({ type: 'fs_error' as const, message: String(e) }); + } + }, + + removeFile: async (projectId: string, taskId: string, filePath: string) => { + const env = resolveTask(projectId, taskId); + if (!env) + return err({ type: 'not_found' as const, entity: 'filesystem' as const, detail: undefined }); + + if (!env.fs.remove) { + return err({ + type: 'fs_error' as const, + message: 'remove not supported by this filesystem', + }); + } + + try { + const result = await env.fs.remove(filePath); + return ok(result); + } catch (e) { + if ( + e instanceof Error && + (e as unknown as { code?: string }).code === FileSystemErrorCodes.PERMISSION_DENIED + ) { + events.emit(planEventChannel, { + type: 'remove_blocked' as const, + root: projectId, + relPath: filePath, + message: e.message, + }); + } + return err({ type: 'fs_error' as const, message: String(e) }); + } + }, + + readImage: async (projectId: string, taskId: string, filePath: string) => { + const env = resolveTask(projectId, taskId); + if (!env) + return err({ type: 'not_found' as const, entity: 'filesystem' as const, detail: undefined }); + + if (!env.fs.readImage) { + return err({ + type: 'fs_error' as const, + message: 'readImage not supported by this filesystem', + }); + } + + try { + const result = await env.fs.readImage(filePath); + return ok(result); + } catch (e) { + return err({ type: 'fs_error' as const, message: String(e) }); + } + }, + + searchFiles: async ( + projectId: string, + taskId: string, + query: string, + options?: SearchOptions + ) => { + const env = resolveTask(projectId, taskId); + if (!env) + return err({ type: 'not_found' as const, entity: 'filesystem' as const, detail: undefined }); + + try { + const result = await env.fs.search(query, options); + return ok(result); + } catch (e) { + return err({ type: 'fs_error' as const, message: String(e) }); + } + }, + + statFile: async (projectId: string, taskId: string, filePath: string) => { + const env = resolveTask(projectId, taskId); + if (!env) + return err({ type: 'not_found' as const, entity: 'filesystem' as const, detail: undefined }); + + try { + const entry = await env.fs.stat(filePath); + return ok({ entry }); + } catch (e) { + return err({ type: 'fs_error' as const, message: String(e) }); + } + }, + + fileExists: async (projectId: string, taskId: string, filePath: string) => { + const env = resolveTask(projectId, taskId); + if (!env) + return err({ type: 'not_found' as const, entity: 'filesystem' as const, detail: undefined }); + + try { + const exists = await env.fs.exists(filePath); + return ok({ exists }); + } catch (e) { + return err({ type: 'fs_error' as const, message: String(e) }); + } + }, + + getProjectConfig: async (projectId: string, taskId: string) => { + const env = resolveTask(projectId, taskId); + if (!env) + return err({ type: 'not_found' as const, entity: 'filesystem' as const, detail: undefined }); + + if (!env.fs.getProjectConfig) { + return err({ + type: 'fs_error' as const, + message: 'getProjectConfig not supported by this filesystem', + }); + } + + try { + const result = await env.fs.getProjectConfig(); + return ok(result); + } catch (e) { + return err({ type: 'fs_error' as const, message: String(e) }); + } + }, + + saveProjectConfig: async (projectId: string, taskId: string, content: string) => { + const env = resolveTask(projectId, taskId); + if (!env) + return err({ type: 'not_found' as const, entity: 'filesystem' as const, detail: undefined }); + + if (!env.fs.saveProjectConfig) { + return err({ + type: 'fs_error' as const, + message: 'saveProjectConfig not supported by this filesystem', + }); + } + + try { + const result = await env.fs.saveProjectConfig(content); + return ok(result); + } catch (e) { + return err({ type: 'fs_error' as const, message: String(e) }); + } + }, + + saveAttachment: async (projectId: string, taskId: string, srcPath: string, subdir?: string) => { + const env = resolveTask(projectId, taskId); + if (!env) + return err({ type: 'not_found' as const, entity: 'filesystem' as const, detail: undefined }); + + if (!env.fs.saveAttachment) { + return err({ + type: 'fs_error' as const, + message: 'saveAttachment not supported by this filesystem', + }); + } + + try { + const result = await env.fs.saveAttachment(srcPath, subdir); + return ok(result); + } catch (e) { + return err({ type: 'fs_error' as const, message: String(e) }); + } + }, + + watchSetPaths: async (projectId: string, taskId: string, paths: string[], label = 'default') => { + const env = resolveTask(projectId, taskId); + if (!env) { + return err({ type: 'not_found' as const, entity: 'filesystem' as const, detail: undefined }); + } + + if (!env.fs.watch) { + return ok({ supported: false as const }); + } + + const key = `${projectId}::${taskId}`; + const groups = watcherLabeledPaths.get(key) ?? new Map(); + groups.set(label, paths); + watcherLabeledPaths.set(key, groups); + const union = [...new Set([...groups.values()].flat())]; + + const existing = watcherRegistry.get(key); + if (existing) { + // For SSH: update the union of watched paths across all labels. + // For local: update() is a no-op since the recursive watcher covers everything. + existing.update(union); + } else { + const watcher = env.fs.watch((evts) => { + events.emit(fsWatchEventChannel, { projectId, taskId, events: evts }, taskId); + }); + watcher.update(union); + watcherRegistry.set(key, watcher); + } + return ok({ supported: true as const }); + }, + + watchStop: async (projectId: string, taskId: string, label = 'default') => { + const key = `${projectId}::${taskId}`; + const groups = watcherLabeledPaths.get(key); + groups?.delete(label); + if (!groups?.size) { + watcherLabeledPaths.delete(key); + watcherRegistry.get(key)?.close(); + watcherRegistry.delete(key); + } else { + const union = [...new Set([...groups.values()].flat())]; + watcherRegistry.get(key)?.update(union); + } + return ok({}); + }, +}); diff --git a/src/main/services/fs/__tests__/LocalFileSystem.test.ts b/src/main/core/fs/impl/local-fs.test.ts similarity index 98% rename from src/main/services/fs/__tests__/LocalFileSystem.test.ts rename to src/main/core/fs/impl/local-fs.test.ts index 6a9f4c2f6..1f6709bf4 100644 --- a/src/main/services/fs/__tests__/LocalFileSystem.test.ts +++ b/src/main/core/fs/impl/local-fs.test.ts @@ -1,9 +1,9 @@ -import { describe, it, expect, beforeEach, afterEach } from 'vitest'; -import fs from 'fs'; -import path from 'path'; -import os from 'os'; -import { LocalFileSystem } from '../LocalFileSystem'; -import { FileSystemError, FileSystemErrorCodes } from '../types'; +import fs from 'node:fs'; +import os from 'node:os'; +import path from 'node:path'; +import { afterEach, beforeEach, describe, expect, it } from 'vitest'; +import { FileSystemError } from '../types'; +import { LocalFileSystem } from './local-fs'; describe('LocalFileSystem', () => { let tempDir: string; diff --git a/src/main/services/fs/LocalFileSystem.ts b/src/main/core/fs/impl/local-fs.ts similarity index 64% rename from src/main/services/fs/LocalFileSystem.ts rename to src/main/core/fs/impl/local-fs.ts index 578dced44..89d7c08fe 100644 --- a/src/main/services/fs/LocalFileSystem.ts +++ b/src/main/core/fs/impl/local-fs.ts @@ -1,26 +1,24 @@ -/** - * Local FileSystem implementation - * Wraps Node.js fs operations for local disk access with security and performance features - */ - -import { promises as fs, createReadStream } from 'fs'; -import type { Stats } from 'fs'; -import { join, resolve, relative, dirname, extname, sep } from 'path'; -import { createInterface } from 'readline'; +import { createReadStream, promises as fs, statSync, type Stats } from 'node:fs'; +import { basename, dirname, extname, join, relative, resolve, sep } from 'node:path'; +import { createInterface } from 'node:readline'; +import parcelWatcher from '@parcel/watcher'; +import ignore from 'ignore'; +import type { FileWatchEvent } from '@shared/fs'; import { - IFileSystem, - FileListResult, - ReadResult, - WriteResult, - SearchResult, - ListOptions, - SearchOptions, + DEFAULT_EMDASH_CONFIG, FileEntry, - SearchMatch, + FileListResult, FileSystemError, FileSystemErrorCodes, -} from './types'; -import { GitIgnoreParser } from '../../utils/gitIgnore'; + FileSystemProvider, + FileWatcher, + ListOptions, + ReadResult, + SearchMatch, + SearchOptions, + SearchResult, + WriteResult, +} from '../types'; // Binary file extensions to skip during search const BINARY_EXTENSIONS = new Set([ @@ -84,6 +82,41 @@ const SEARCH_IGNORES = new Set([ '.parcel-cache', ]); +// Directory names to exclude from the filesystem watcher. +// Note: .git is intentionally omitted so that index/HEAD change events +// reach monaco-model-registry for git model refreshes. +const WATCH_IGNORED_NAMES = [ + '.svn', + '.hg', + 'node_modules', + 'dist', + 'build', + '.next', + '.nuxt', + 'coverage', + '__pycache__', + '.pytest_cache', + 'venv', + '.venv', + 'target', + '.terraform', + '.serverless', + 'worktrees', + '.emdash', + '.conductor', + '.cursor', + '.claude', + '.amp', + '.codex', + '.aider', + '.continue', + '.cody', + '.windsurf', +]; + +// Glob patterns for parcel/watcher ignore option, derived from WATCH_IGNORED_NAMES. +const WATCH_IGNORE_GLOBS = WATCH_IGNORED_NAMES.map((n) => `**/${n}/**`); + // Allowed image extensions for readImage const ALLOWED_IMAGE_EXTENSIONS = new Set([ '.png', @@ -108,7 +141,9 @@ const IMAGE_MIME_TYPES: Record = { '.ico': 'image/x-icon', }; -export class LocalFileSystem implements IFileSystem { +export class LocalFileSystem implements FileSystemProvider { + private listAbort: AbortController | null = null; + constructor(private projectPath: string) { if (!projectPath) { throw new FileSystemError('Project path is required', FileSystemErrorCodes.INVALID_PATH); @@ -116,6 +151,17 @@ export class LocalFileSystem implements IFileSystem { this.projectPath = resolve(projectPath); } + /** + * Cancel any in-flight list() traversal. Used by the IPC layer for per-sender debouncing. + * The in-process traversal checks the abort signal and exits early on the next tick. + */ + cancelPendingList(): void { + if (this.listAbort) { + this.listAbort.abort(); + this.listAbort = null; + } + } + /** * Resolve and validate a relative path, ensuring it doesn't escape the project root */ @@ -185,18 +231,21 @@ export class LocalFileSystem implements IFileSystem { const maxEntries = options.maxEntries || 10000; const timeBudgetMs = options.timeBudgetMs || 30000; + const abort = new AbortController(); + this.listAbort = abort; + let truncated = false; let truncateReason: 'maxEntries' | 'timeBudget' | undefined; const listDir = async (dirPath: string, recursive: boolean) => { - // Check time budget + if (abort.signal.aborted) return; + if (Date.now() - startTime > timeBudgetMs) { truncated = true; truncateReason = 'timeBudget'; return; } - // Check entry limit if (entries.length >= maxEntries) { truncated = true; truncateReason = 'maxEntries'; @@ -206,25 +255,23 @@ export class LocalFileSystem implements IFileSystem { let items; try { items = await fs.readdir(dirPath, { withFileTypes: true }); - } catch (err) { - // Skip directories we can't read + } catch { return; } for (const item of items) { - // Check time budget periodically + if (abort.signal.aborted) return; + if (entries.length % 100 === 0 && Date.now() - startTime > timeBudgetMs) { truncated = true; truncateReason = 'timeBudget'; return; } - // Skip hidden files if not included if (!options.includeHidden && item.name.startsWith('.')) { continue; } - // Skip ignored directories if (this.shouldIgnore(item.name)) { continue; } @@ -242,7 +289,6 @@ export class LocalFileSystem implements IFileSystem { mode: stat.mode, }; - // Apply filter if specified if (options.filter) { const filterRegex = new RegExp(options.filter); if (!filterRegex.test(item.name)) { @@ -252,14 +298,12 @@ export class LocalFileSystem implements IFileSystem { entries.push(entry); - // Check entry limit if (entries.length >= maxEntries) { truncated = true; truncateReason = 'maxEntries'; return; } - // Recurse into subdirectories if (recursive && item.isDirectory()) { await listDir(itemPath, true); } @@ -271,6 +315,10 @@ export class LocalFileSystem implements IFileSystem { await listDir(fullPath, options.recursive || false); + if (this.listAbort === abort) { + this.listAbort = null; + } + return { entries, total: entries.length, @@ -376,7 +424,6 @@ export class LocalFileSystem implements IFileSystem { async search(query: string, options: SearchOptions = {}): Promise { const pattern = options.pattern || query; - const startTime = Date.now(); const matches: SearchMatch[] = []; const maxResults = options.maxResults || 10000; const fileExtensions = options.fileExtensions; @@ -385,15 +432,11 @@ export class LocalFileSystem implements IFileSystem { let filesSearched = 0; let truncated = false; - const searchRegex = caseSensitive - ? new RegExp(pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')) - : new RegExp(pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), 'i'); - - let gitIgnore: GitIgnoreParser | undefined; + let gitIgnore: ReturnType | undefined; try { const gitIgnorePath = join(this.projectPath, '.gitignore'); const content = await fs.readFile(gitIgnorePath, 'utf-8'); - gitIgnore = new GitIgnoreParser(content); + gitIgnore = ignore().add(content); } catch { // Ignore error reading .gitignore } @@ -525,9 +568,10 @@ export class LocalFileSystem implements IFileSystem { try { await fs.unlink(fullPath); return { success: true }; - } catch (err: any) { + } catch (err: unknown) { + const code = (err as NodeJS.ErrnoException).code; // Attempt chmod retry on permission error - if (err.code === 'EACCES' || err.code === 'EPERM') { + if (code === 'EACCES' || code === 'EPERM') { try { await fs.chmod(fullPath, 0o666); await fs.unlink(fullPath); @@ -536,7 +580,102 @@ export class LocalFileSystem implements IFileSystem { return { success: false, error: `Permission denied: ${path}` }; } } - return { success: false, error: err.message }; + return { success: false, error: err instanceof Error ? err.message : String(err) }; + } + } + + async getProjectConfig(): Promise<{ success: boolean; content?: string; error?: string }> { + const configPath = join(this.projectPath, '.emdash.json'); + try { + try { + const content = await fs.readFile(configPath, 'utf-8'); + return { success: true, content }; + } catch (err: unknown) { + const code = (err as NodeJS.ErrnoException).code; + if (code !== 'ENOENT') throw err; + // File doesn't exist — create with defaults + await fs.writeFile(configPath, DEFAULT_EMDASH_CONFIG, 'utf-8'); + return { success: true, content: DEFAULT_EMDASH_CONFIG }; + } + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err) }; + } + } + + async saveProjectConfig(content: string): Promise<{ success: boolean; error?: string }> { + try { + JSON.parse(content); + } catch { + return { success: false, error: 'Invalid JSON format' }; + } + const configPath = join(this.projectPath, '.emdash.json'); + try { + await fs.writeFile(configPath, content, 'utf-8'); + return { success: true }; + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err) }; + } + } + + async saveAttachment( + srcPath: string, + subdir?: string + ): Promise<{ + success: boolean; + absPath?: string; + relPath?: string; + fileName?: string; + error?: string; + }> { + const ALLOWED_ATTACHMENT_EXTENSIONS = new Set([ + '.png', + '.jpg', + '.jpeg', + '.gif', + '.webp', + '.bmp', + '.svg', + ]); + + try { + try { + await fs.access(srcPath); + } catch { + return { success: false, error: 'Source file not found' }; + } + + const ext = extname(srcPath).toLowerCase(); + if (!ALLOWED_ATTACHMENT_EXTENSIONS.has(ext)) { + return { success: false, error: 'Unsupported attachment type' }; + } + + const destDir = join(this.projectPath, '.emdash', subdir ?? 'attachments'); + await fs.mkdir(destDir, { recursive: true }); + + const baseName = basename(srcPath); + let destName = baseName; + let counter = 1; + let destAbs = join(destDir, destName); + + while (true) { + try { + await fs.access(destAbs); + // File exists — try next name + const nameWithoutExt = basename(baseName, ext); + destName = `${nameWithoutExt}-${counter}${ext}`; + destAbs = join(destDir, destName); + counter++; + } catch { + // File does not exist — safe to write here + break; + } + } + + await fs.copyFile(srcPath, destAbs); + const relPath = relative(this.projectPath, destAbs); + return { success: true, absPath: destAbs, relPath, fileName: destName }; + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err) }; } } @@ -590,8 +729,84 @@ export class LocalFileSystem implements IFileSystem { mimeType, size: stat.size, }; - } catch (err: any) { - return { success: false, error: err.message }; + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err) }; } } + + async mkdir(dirPath: string, options?: { recursive?: boolean }): Promise { + await fs.mkdir(this.resolvePath(dirPath), { recursive: options?.recursive ?? false }); + } + + watch( + callback: (events: FileWatchEvent[]) => void, + options: { debounceMs?: number } = {} + ): FileWatcher { + const stabilityMs = options.debounceMs ?? 200; + let pending: FileWatchEvent[] = []; + let flushTimer: ReturnType | null = null; + // Set when the async subscribe resolves; used by close() if it resolves after close() is called. + let resolvedSub: parcelWatcher.AsyncSubscription | null = null; + let closed = false; + + const flush = () => { + if (pending.length) { + callback(pending); + pending = []; + } + }; + + const enqueue = (evt: FileWatchEvent) => { + pending.push(evt); + if (flushTimer) clearTimeout(flushTimer); + flushTimer = setTimeout(flush, stabilityMs); + }; + + const toRel = (absPath: string) => relative(this.projectPath, absPath).replace(/\\/g, '/'); + + void parcelWatcher + .subscribe( + this.projectPath, + (err, events) => { + if (err) return; + for (const e of events) { + const rel = toRel(e.path); + // Skip paths outside the project root (shouldn't happen, but guard anyway). + if (rel.startsWith('..')) continue; + + let entryType: 'file' | 'directory' = 'file'; + if (e.type !== 'delete') { + try { + entryType = statSync(e.path).isDirectory() ? 'directory' : 'file'; + } catch { + // File removed between the event and the stat — treat as file. + } + } + const type = e.type === 'update' ? ('modify' as const) : e.type; + enqueue({ type, entryType, path: rel }); + } + }, + { ignore: WATCH_IGNORE_GLOBS } + ) + .then((sub) => { + if (closed) { + void sub.unsubscribe(); + } else { + resolvedSub = sub; + } + }) + .catch(() => { + // Subscription failed (e.g. project path removed before watch started). + }); + + return { + // No-op: the recursive subscription already covers the entire worktree. + update(_paths: string[]) {}, + close() { + closed = true; + if (flushTimer) clearTimeout(flushTimer); + if (resolvedSub) void resolvedSub.unsubscribe(); + }, + }; + } } diff --git a/src/main/services/fs/RemoteFileSystem.ts b/src/main/core/fs/impl/ssh-fs.ts similarity index 78% rename from src/main/services/fs/RemoteFileSystem.ts rename to src/main/core/fs/impl/ssh-fs.ts index 1f0535948..ed16a4163 100644 --- a/src/main/services/fs/RemoteFileSystem.ts +++ b/src/main/core/fs/impl/ssh-fs.ts @@ -3,21 +3,24 @@ * Uses SFTP over SSH for remote filesystem operations */ -import { SFTPWrapper } from 'ssh2'; +import type { SFTPWrapper } from 'ssh2'; +import type { FileWatchEvent } from '@shared/fs'; +import { quoteShellArg } from '../../../utils/shellEscape'; +import type { SshClientProxy } from '../../ssh/ssh-client-proxy'; import { - IFileSystem, - FileListResult, - ReadResult, - WriteResult, - SearchResult, - ListOptions, - SearchOptions, + DEFAULT_EMDASH_CONFIG, FileEntry, + FileListResult, FileSystemError, FileSystemErrorCodes, -} from './types'; -import { SshService } from '../ssh/SshService'; -import { quoteShellArg } from '../../utils/shellEscape'; + FileSystemProvider, + FileWatcher, + ListOptions, + ReadResult, + SearchOptions, + SearchResult, + WriteResult, +} from '../types'; /** * Allowed image extensions for readImage @@ -35,21 +38,16 @@ const MAX_READ_SIZE = 100 * 1024 * 1024; const DEFAULT_MAX_BYTES = 200 * 1024; /** - * RemoteFileSystem implements IFileSystem using SFTP over SSH. + * SshFileSystem implements IFileSystem using SFTP over SSH. * Provides path traversal protection and proper error handling. */ -export class RemoteFileSystem implements IFileSystem { +export class SshFileSystem implements FileSystemProvider { + private cachedSftp: SFTPWrapper | undefined; + constructor( - private sshService: SshService, - private connectionId: string, - private remotePath: string + private readonly proxy: SshClientProxy, + private readonly remotePath: string ) { - if (!sshService) { - throw new FileSystemError('SSH service is required', FileSystemErrorCodes.CONNECTION_ERROR); - } - if (!connectionId) { - throw new FileSystemError('Connection ID is required', FileSystemErrorCodes.CONNECTION_ERROR); - } if (!remotePath) { throw new FileSystemError('Remote path is required', FileSystemErrorCodes.INVALID_PATH); } @@ -57,13 +55,52 @@ export class RemoteFileSystem implements IFileSystem { this.remotePath = remotePath.replace(/\\/g, '/'); } + // ─── Private helpers ────────────────────────────────────────────────────── + + private getSftp(): Promise { + if (this.cachedSftp) return Promise.resolve(this.cachedSftp); + return new Promise((resolve, reject) => { + this.proxy.client.sftp((err, sftp) => { + if (err) return reject(err); + this.cachedSftp = sftp; + sftp.on('close', () => { + this.cachedSftp = undefined; + }); + resolve(sftp); + }); + }); + } + + private exec(command: string): Promise<{ stdout: string; stderr: string; exitCode: number }> { + const full = `bash -l -c ${quoteShellArg(command)}`; + return new Promise((resolve, reject) => { + this.proxy.client.exec(full, (err, stream) => { + if (err) return reject(err); + let stdout = ''; + let stderr = ''; + stream.on('close', (code: number | null) => { + resolve({ stdout: stdout.trim(), stderr: stderr.trim(), exitCode: code ?? -1 }); + }); + stream.on('data', (d: Buffer) => { + stdout += d.toString('utf-8'); + }); + stream.stderr.on('data', (d: Buffer) => { + stderr += d.toString('utf-8'); + }); + stream.on('error', reject); + }); + }); + } + + // ─── IFileSystem ────────────────────────────────────────────────────────── + /** * List directory contents via SFTP */ async list(path: string = '', options?: ListOptions): Promise { const startTime = Date.now(); const fullPath = this.resolveRemotePath(path); - const sftp = await this.sshService.getSftp(this.connectionId); + const sftp = await this.getSftp(); return new Promise((resolve, reject) => { sftp.readdir(fullPath, (err, list) => { @@ -156,7 +193,7 @@ export class RemoteFileSystem implements IFileSystem { */ async read(path: string, maxBytes: number = DEFAULT_MAX_BYTES): Promise { const fullPath = this.resolveRemotePath(path); - const sftp = await this.sshService.getSftp(this.connectionId); + const sftp = await this.getSftp(); return new Promise((resolve, reject) => { sftp.open(fullPath, 'r', (err, handle) => { @@ -217,7 +254,7 @@ export class RemoteFileSystem implements IFileSystem { */ async write(path: string, content: string): Promise { const fullPath = this.resolveRemotePath(path); - const sftp = await this.sshService.getSftp(this.connectionId); + const sftp = await this.getSftp(); // Ensure parent directory exists const lastSlash = fullPath.lastIndexOf('/'); @@ -312,7 +349,7 @@ export class RemoteFileSystem implements IFileSystem { .join(' '); try { - const result = await this.sshService.executeCommand(this.connectionId, command); + const result = await this.exec(command); const lines = result.stdout.split('\n').filter((line) => line.trim()); @@ -383,12 +420,24 @@ export class RemoteFileSystem implements IFileSystem { } } + async mkdir(dirPath: string, options?: { recursive?: boolean }): Promise { + const fullPath = this.resolveRemotePath(dirPath); + const sftp = await this.getSftp(); + if (options?.recursive) { + await this.ensureRemoteDir(sftp, fullPath); + } else { + await new Promise((resolve, reject) => { + sftp.mkdir(fullPath, (err) => (err ? reject(this.mapSftpError(err, fullPath)) : resolve())); + }); + } + } + /** * Get file/directory metadata via SFTP */ async stat(path: string): Promise { const fullPath = this.resolveRemotePath(path); - const sftp = await this.sshService.getSftp(this.connectionId); + const sftp = await this.getSftp(); return new Promise((resolve, reject) => { sftp.stat(fullPath, (err, stats) => { @@ -441,7 +490,7 @@ export class RemoteFileSystem implements IFileSystem { const command = `grep -rn ${caseFlag} ${includeFilter} -e ${escapedPattern} ${quoteShellArg(basePath)} 2>/dev/null | head -n ${maxResults}`; try { - const result = await this.sshService.executeCommand(this.connectionId, command); + const result = await this.exec(command); // If grep returns non-zero exit but no stderr, it just means no matches if (result.exitCode !== 0 && result.exitCode !== 1) { @@ -449,7 +498,7 @@ export class RemoteFileSystem implements IFileSystem { return { matches: [], total: 0, filesSearched: 0 }; } - const matches: import('./types').SearchMatch[] = []; + const matches: import('../types').SearchMatch[] = []; const lines = result.stdout.split('\n').filter((line) => line.trim()); const seenFiles = new Set(); @@ -520,12 +569,12 @@ export class RemoteFileSystem implements IFileSystem { return { success: false, error: `File not found: ${path}` }; } - const sftp = await this.sshService.getSftp(this.connectionId); + const sftp = await this.getSftp(); if (entry.type === 'dir') { // For directories, use SSH exec to recursively remove const command = `rm -rf ${quoteShellArg(fullPath)}`; - const result = await this.sshService.executeCommand(this.connectionId, command); + const result = await this.exec(command); if (result.exitCode !== 0) { return { success: false, error: result.stderr || 'Failed to remove directory' }; @@ -570,7 +619,7 @@ export class RemoteFileSystem implements IFileSystem { } const fullPath = this.resolveRemotePath(path); - const sftp = await this.sshService.getSftp(this.connectionId); + const sftp = await this.getSftp(); return new Promise((resolve, reject) => { sftp.open(fullPath, 'r', (err, handle) => { @@ -637,20 +686,46 @@ export class RemoteFileSystem implements IFileSystem { } /** - * Ensure connection is active, reconnect if needed - * Note: Actual reconnection logic is handled by SshService + * Read (or auto-create) the project's .emdash.json config file via SFTP */ - private async ensureConnected(): Promise { - const connections = this.sshService.listConnections(); - if (!connections.includes(this.connectionId)) { - throw new FileSystemError( - 'SSH connection not found', - FileSystemErrorCodes.CONNECTION_ERROR, - this.connectionId - ); + async getProjectConfig(): Promise<{ success: boolean; content?: string; error?: string }> { + try { + const result = await this.read('.emdash.json').catch(async (err: unknown) => { + const code = (err as FileSystemError).code; + if (code !== FileSystemErrorCodes.NOT_FOUND) throw err; + // File doesn't exist — create with defaults then return defaults + await this.write('.emdash.json', DEFAULT_EMDASH_CONFIG); + return { + content: DEFAULT_EMDASH_CONFIG, + truncated: false, + totalSize: Buffer.byteLength(DEFAULT_EMDASH_CONFIG), + }; + }); + return { success: true, content: result.content }; + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err) }; + } + } + + /** + * Write the project's .emdash.json config file via SFTP after validating JSON + */ + async saveProjectConfig(content: string): Promise<{ success: boolean; error?: string }> { + try { + JSON.parse(content); + } catch { + return { success: false, error: 'Invalid JSON format' }; + } + try { + await this.write('.emdash.json', content); + return { success: true }; + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err) }; } } + // ─── Private utilities ──────────────────────────────────────────────────── + /** * Build absolute remote path from relative path * Provides path traversal protection @@ -803,4 +878,66 @@ export class RemoteFileSystem implements IFileSystem { // Default to unknown error return new FileSystemError(`Filesystem error: ${message}`, FileSystemErrorCodes.UNKNOWN, path); } + + watch( + callback: (events: FileWatchEvent[]) => void, + options: { debounceMs?: number } = {} + ): FileWatcher { + const interval = options.debounceMs ?? 4000; + let watched: string[] = []; + // Map from dirPath → previous entries (keyed by relative entry path) + const snapshots = new Map>(); + + const poll = async () => { + for (const dirPath of watched) { + let result: FileListResult | null = null; + try { + result = await this.list(dirPath, { includeHidden: true }); + } catch { + continue; + } + + const currMap = new Map(result.entries.map((e) => [e.path, e])); + const prevMap = snapshots.get(dirPath); + snapshots.set(dirPath, currMap); + + if (!prevMap) continue; + + const evts: FileWatchEvent[] = []; + for (const [p, e] of currMap) { + if (!prevMap.has(p)) + evts.push({ + type: 'create', + entryType: e.type === 'dir' ? 'directory' : 'file', + path: p, + }); + } + for (const [p, e] of prevMap) { + if (!currMap.has(p)) + evts.push({ + type: 'delete', + entryType: e.type === 'dir' ? 'directory' : 'file', + path: p, + }); + } + if (evts.length) callback(evts); + } + }; + + const timer = setInterval(() => { + void poll(); + }, interval); + + return { + update(paths: string[]) { + watched = paths; + for (const p of snapshots.keys()) { + if (!paths.includes(p)) snapshots.delete(p); + } + }, + close() { + clearInterval(timer); + }, + }; + } } diff --git a/src/main/services/fs/types.ts b/src/main/core/fs/types.ts similarity index 69% rename from src/main/services/fs/types.ts rename to src/main/core/fs/types.ts index a34047fe2..394b6d061 100644 --- a/src/main/services/fs/types.ts +++ b/src/main/core/fs/types.ts @@ -3,6 +3,19 @@ * Provides unified interface for local and remote (SSH/SFTP) filesystem operations */ +import type { FileWatchEvent } from '@shared/fs'; + +export type { FileWatchEvent }; + +/** + * Handle returned by FileSystemProvider.watch(). + * Call update() to change the set of watched paths, close() to stop. + */ +export interface FileWatcher { + update(paths: string[]): void; + close(): void; +} + /** * File entry metadata returned by filesystem operations */ @@ -129,7 +142,7 @@ export interface SearchMatch { * Filesystem interface abstraction * Implementations: LocalFileSystem (local disk), RemoteFileSystem (SFTP over SSH) */ -export interface IFileSystem { +export interface FileSystemProvider { /** * List directory contents * @param path - Directory path relative to project root @@ -181,7 +194,7 @@ export interface IFileSystem { * @param path - File path relative to project root * @returns Promise resolving to success status */ - remove?(path: string): Promise<{ success: boolean; error?: string }>; + remove(path: string): Promise<{ success: boolean; error?: string }>; /** * Read image file as base64 data URL @@ -195,7 +208,75 @@ export interface IFileSystem { size?: number; error?: string; }>; + + /** + * Read (or auto-create) the project's .emdash.json config file + * @returns Promise resolving to the config file content + */ + getProjectConfig?(): Promise<{ success: boolean; content?: string; error?: string }>; + + /** + * Write the project's .emdash.json config file after validating JSON + * @param content - JSON string to write + * @returns Promise resolving to success status + */ + saveProjectConfig?(content: string): Promise<{ success: boolean; error?: string }>; + + /** + * Copy a local file into the project's .emdash attachments directory. + * Only supported on local filesystems (srcPath is an absolute local path). + * @param srcPath - Absolute local path of the source file + * @param subdir - Subdirectory inside .emdash/ (defaults to "attachments") + * @returns Promise resolving to the saved file paths + */ + saveAttachment?( + srcPath: string, + subdir?: string + ): Promise<{ + success: boolean; + absPath?: string; + relPath?: string; + fileName?: string; + error?: string; + }>; + + mkdir(diPath: string, options?: { recursive?: boolean }): Promise; + + /** + * Watch the worktree for filesystem changes. Returns a FileWatcher handle; + * call update() to hint which paths matter (SSH uses this for polling), + * call close() to stop. Batches events and delivers them via callback. + * Optional — not all implementations support watching. + * + * Local: uses @parcel/watcher for a single recursive native-OS subscription. + * SSH: polls directories passed to update() at a fixed interval. + */ + watch?( + callback: (events: FileWatchEvent[]) => void, + options?: { debounceMs?: number } + ): FileWatcher; +} + +/** + * Default content written to .emdash.json when the file is first created. + * Shared between LocalFileSystem and SshFileSystem so both produce identical defaults. + */ +export const DEFAULT_EMDASH_CONFIG = `{ + "preservePatterns": [ + ".env", + ".env.keys", + ".env.local", + ".env.*.local", + ".envrc", + "docker-compose.override.yml" + ], + "scripts": { + "setup": "", + "run": "", + "teardown": "" + } } +`; /** * Base error class for filesystem operations diff --git a/src/main/core/git/controller.ts b/src/main/core/git/controller.ts new file mode 100644 index 000000000..5fb2c7b10 --- /dev/null +++ b/src/main/core/git/controller.ts @@ -0,0 +1,374 @@ +import type { DiffBase } from '@shared/git'; +import { createRPCController } from '@shared/ipc/rpc'; +import { resolveTask } from '@main/core/projects/utils'; +import { log } from '@main/lib/logger'; +import { err, ok } from '@main/lib/result'; + +export const gitController = createRPCController({ + getStatus: async (projectId: string, taskId: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + const changes = await env.git.getStatus(); + return ok({ changes }); + } catch (e) { + log.error('gitCtrl.getStatus failed', { projectId, taskId, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + getChangedFiles: async (projectId: string, taskId: string, base: DiffBase) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + const changes = await env.git.getChangedFiles(base); + return ok({ changes }); + } catch (e) { + log.error('gitCtrl.getChangedFiles failed', { projectId, taskId, base, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + getFileAtHead: async (projectId: string, taskId: string, filePath: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + const content = await env.git.getFileAtHead(filePath); + return ok({ content }); + } catch (e) { + log.error('gitCtrl.getFileAtHead failed', { projectId, taskId, filePath, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + getFileAtRef: async (projectId: string, taskId: string, filePath: string, ref: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + const content = await env.git.getFileAtRef(filePath, ref); + return ok({ content }); + } catch (e) { + log.error('gitCtrl.getFileAtRef failed', { projectId, taskId, filePath, ref, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + getFileAtIndex: async (projectId: string, taskId: string, filePath: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + const content = await env.git.getFileAtIndex(filePath); + return ok({ content }); + } catch (e) { + log.error('gitCtrl.getFileAtIndex failed', { projectId, taskId, filePath, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + getFileDiff: async (projectId: string, taskId: string, filePath: string, base?: DiffBase) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + const diff = await env.git.getFileDiff(filePath, base); + return ok({ diff }); + } catch (e) { + log.error('gitCtrl.getFileDiff failed', { projectId, taskId, filePath, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + stageFile: async (projectId: string, taskId: string, filePath: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + await env.git.stageFiles([filePath]); + return ok(); + } catch (e) { + log.error('gitCtrl.stageFile failed', { projectId, taskId, filePath, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + stageFiles: async (projectId: string, taskId: string, filePaths: string[]) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + await env.git.stageFiles(filePaths); + return ok(); + } catch (e) { + log.error('gitCtrl.stageFiles failed', { projectId, taskId, filePaths, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + stageAllFiles: async (projectId: string, taskId: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + await env.git.stageAllFiles(); + return ok(); + } catch (e) { + log.error('gitCtrl.stageAllFiles failed', { projectId, taskId, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + unstageFile: async (projectId: string, taskId: string, filePath: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + await env.git.unstageFiles([filePath]); + return ok(); + } catch (e) { + log.error('gitCtrl.unstageFile failed', { projectId, taskId, filePath, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + unstageFiles: async (projectId: string, taskId: string, filePaths: string[]) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + await env.git.unstageFiles(filePaths); + return ok(); + } catch (e) { + log.error('gitCtrl.unstageFiles failed', { projectId, taskId, filePaths, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + unstageAllFiles: async (projectId: string, taskId: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + await env.git.unstageAllFiles(); + return ok(); + } catch (e) { + log.error('gitCtrl.unstageAllFiles failed', { projectId, taskId, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + revertFile: async (projectId: string, taskId: string, filePath: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + await env.git.revertFiles([filePath]); + return ok(); + } catch (e) { + log.error('gitCtrl.revertFile failed', { projectId, taskId, filePath, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + revertFiles: async (projectId: string, taskId: string, filePaths: string[]) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + await env.git.revertFiles(filePaths); + return ok(); + } catch (e) { + log.error('gitCtrl.revertFiles failed', { projectId, taskId, filePaths, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + revertAllFiles: async (projectId: string, taskId: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + await env.git.revertAllFiles(); + return ok(); + } catch (e) { + log.error('gitCtrl.revertAllFiles failed', { projectId, taskId, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + commit: async (projectId: string, taskId: string, message: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + const result = await env.git.commit(message); + return ok({ hash: result.hash }); + } catch (e) { + log.error('gitCtrl.commit failed', { projectId, taskId, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + fetch: async (projectId: string, taskId: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + await env.git.fetch(); + return ok(); + } catch (e) { + log.error('gitCtrl.fetch failed', { projectId, taskId, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + push: async (projectId: string, taskId: string) => { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + try { + const result = await env.git.push(); + if (!result.success) { + log.error('gitCtrl.push failed', { projectId, taskId, error: result.error }); + return err({ ...result.error, type: 'git_error' as const }); + } + return ok({ output: result.data.output }); + } catch (e) { + log.error('gitCtrl.push failed', { projectId, taskId, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + pull: async (projectId: string, taskId: string) => { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + try { + const result = await env.git.pull(); + if (!result.success) { + log.error('gitCtrl.pull failed', { projectId, taskId, error: result.error }); + return err({ ...result.error, type: 'git_error' as const }); + } + return ok({ output: result.data.output }); + } catch (e) { + log.error('gitCtrl.pull failed', { projectId, taskId, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + softReset: async (projectId: string, taskId: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + const result = await env.git.softReset(); + return ok({ subject: result.subject, body: result.body }); + } catch (e) { + log.error('gitCtrl.softReset failed', { projectId, taskId, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + getLog: async ( + projectId: string, + taskId: string, + maxCount?: number, + skip?: number, + knownAheadCount?: number + ) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + const result = await env.git.getLog({ maxCount, skip, knownAheadCount }); + return ok({ commits: result.commits, aheadCount: result.aheadCount }); + } catch (e) { + log.error('gitCtrl.getLog failed', { projectId, taskId, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + getLatestCommit: async (projectId: string, taskId: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + const commit = await env.git.getLatestCommit(); + return ok({ commit }); + } catch (e) { + log.error('gitCtrl.getLatestCommit failed', { projectId, taskId, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + getCommitFiles: async (projectId: string, taskId: string, commitHash: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + const files = await env.git.getCommitFiles(commitHash); + return ok({ files }); + } catch (e) { + log.error('gitCtrl.getCommitFiles failed', { projectId, taskId, commitHash, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + getCommitFileDiff: async ( + projectId: string, + taskId: string, + commitHash: string, + filePath: string + ) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + const diff = await env.git.getCommitFileDiff(commitHash, filePath); + return ok({ diff }); + } catch (e) { + log.error('gitCtrl.getCommitFileDiff failed', { + projectId, + taskId, + commitHash, + filePath, + error: e, + }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + getBranchStatus: async (projectId: string, taskId: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + const status = await env.git.getBranchStatus(); + return ok(status); + } catch (e) { + log.error('gitCtrl.getBranchStatus failed', { projectId, taskId, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + getBranches: async (projectId: string, taskId: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + const branches = await env.git.getBranches(); + return ok({ branches }); + } catch (e) { + log.error('gitCtrl.getBranches failed', { projectId, taskId, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + getDefaultBranch: async (projectId: string, taskId: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + const defaultBranch = await env.git.getDefaultBranch(); + return ok(defaultBranch); + } catch (e) { + log.error('gitCtrl.getDefaultBranch failed', { projectId, taskId, error: e }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, + + renameBranch: async (projectId: string, taskId: string, oldBranch: string, newBranch: string) => { + try { + const env = resolveTask(projectId, taskId); + if (!env) return err({ type: 'not_found' as const }); + const result = await env.git.renameBranch(oldBranch, newBranch); + return ok({ remotePushed: result.remotePushed }); + } catch (e) { + log.error('gitCtrl.renameBranch failed', { + projectId, + taskId, + oldBranch, + newBranch, + error: e, + }); + return err({ type: 'git_error' as const, message: String(e) }); + } + }, +}); diff --git a/src/main/core/git/impl/detectGitInfo.ts b/src/main/core/git/impl/detectGitInfo.ts new file mode 100644 index 000000000..ed93c192e --- /dev/null +++ b/src/main/core/git/impl/detectGitInfo.ts @@ -0,0 +1,119 @@ +import { exec } from 'node:child_process'; +import fs from 'node:fs'; +import { join } from 'node:path'; +import { promisify } from 'node:util'; + +const execAsync = promisify(exec); + +const DEFAULT_REMOTE = 'origin'; +const DEFAULT_BRANCH = 'main'; + +export interface GitInfo { + isGitRepo: boolean; + remote?: string; + branch?: string; + baseRef: string; + rootPath: string; +} + +export function checkIsValidDirectory(path: string): boolean { + return fs.existsSync(path) && fs.statSync(path).isDirectory(); +} + +async function resolveRealPath(target: string): Promise { + try { + return await fs.promises.realpath(target); + } catch { + return target; + } +} + +function normalizeRemoteName(remote?: string | null): string { + if (!remote) return DEFAULT_REMOTE; + const trimmed = remote.trim(); + if (!trimmed) return ''; + if (/^[A-Za-z0-9._-]+$/.test(trimmed) && !trimmed.includes('://')) return trimmed; + return DEFAULT_REMOTE; +} + +function computeBaseRef(remote?: string | null, branch?: string | null): string { + const remoteName = normalizeRemoteName(remote); + if (branch?.trim()) { + const trimmed = branch.trim(); + if (trimmed.includes('/')) return trimmed; + return remoteName ? `${remoteName}/${trimmed}` : trimmed; + } + return remoteName ? `${remoteName}/${DEFAULT_BRANCH}` : DEFAULT_BRANCH; +} + +async function detectDefaultBranch( + projectPath: string, + remote?: string | null +): Promise { + const remoteName = normalizeRemoteName(remote); + if (!remoteName) { + try { + const { stdout } = await execAsync('git branch --show-current', { cwd: projectPath }); + return stdout.trim() || null; + } catch { + return null; + } + } + try { + const { stdout } = await execAsync(`git remote show ${remoteName}`, { cwd: projectPath }); + const match = stdout.match(/HEAD branch:\s*(\S+)/); + return match ? match[1] : null; + } catch { + return null; + } +} + +export async function detectGitInfo(projectPath: string): Promise { + const resolvedPath = await resolveRealPath(projectPath); + const isGitRepo = fs.existsSync(join(resolvedPath, '.git')); + + if (!isGitRepo) { + return { isGitRepo: false, baseRef: DEFAULT_BRANCH, rootPath: resolvedPath }; + } + + let remote: string | undefined; + try { + const { stdout } = await execAsync('git remote get-url origin', { cwd: resolvedPath }); + remote = stdout.trim() || undefined; + } catch {} + + let branch: string | undefined; + try { + const { stdout } = await execAsync('git branch --show-current', { cwd: resolvedPath }); + branch = stdout.trim() || undefined; + } catch {} + + if (!branch) { + const defaultBranch = await detectDefaultBranch(resolvedPath, remote); + branch = defaultBranch ?? undefined; + } + + let rootPath = resolvedPath; + try { + const { stdout } = await execAsync('git rev-parse --show-toplevel', { cwd: resolvedPath }); + const trimmed = stdout.trim(); + if (trimmed) rootPath = await resolveRealPath(trimmed); + } catch {} + + return { + isGitRepo: true, + remote, + branch, + baseRef: computeBaseRef(remote, branch), + rootPath, + }; +} + +export async function isGitRepository(projectPath: string): Promise { + const resolvedPath = await resolveRealPath(projectPath); + return fs.existsSync(join(resolvedPath, '.git')); +} + +export function checkIsGithubRemote(remote?: string): boolean { + return remote ? /github\.com[:/]/i.test(remote) : false; +} diff --git a/src/main/core/git/impl/git-repo-utils.test.ts b/src/main/core/git/impl/git-repo-utils.test.ts new file mode 100644 index 000000000..795b9ca93 --- /dev/null +++ b/src/main/core/git/impl/git-repo-utils.test.ts @@ -0,0 +1,310 @@ +import { describe, expect, it, vi } from 'vitest'; +import type { FileSystemProvider } from '@main/core/fs/types'; +import type { ExecFn } from '@main/core/utils/exec'; +import { cloneRepository, ensurePullRequestBranch, initializeNewProject } from './git-repo-utils'; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +/** Build an ExecFn that records every call and returns pre-baked responses. */ +function makeExec(map: Record = {}): ExecFn & { calls: string[][] } { + const calls: string[][] = []; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const fn = async (_cmd: string, args: string[] = [], _opts?: any) => { + calls.push(args); + const key = args.join(' '); + if (key in map) { + return { stdout: map[key], stderr: '' }; + } + // Default: succeed with empty stdout + return { stdout: '', stderr: '' }; + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (fn as any).calls = calls; + return fn as ExecFn & { calls: string[][] }; +} + +/** Build an ExecFn that fails for a specific key. */ +function makeFailingExec( + failKey: string, + errorMessage = 'command failed', + fallbackMap: Record = {} +): ExecFn & { calls: string[][] } { + const calls: string[][] = []; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const fn = async (_cmd: string, args: string[] = [], _opts?: any) => { + calls.push(args); + const key = args.join(' '); + if (key === failKey) { + throw new Error(errorMessage); + } + if (key in fallbackMap) { + return { stdout: fallbackMap[key], stderr: '' }; + } + return { stdout: '', stderr: '' }; + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (fn as any).calls = calls; + return fn as ExecFn & { calls: string[][] }; +} + +/** Build an ExecFn that fails for any key in a set. */ +function makeExecWithFailKeys( + failKeys: Set, + errorMessage = 'command failed' +): ExecFn & { calls: string[][] } { + const calls: string[][] = []; + const fn = async (_cmd: string, args: string[] = []) => { + calls.push(args); + const key = args.join(' '); + if (failKeys.has(key)) { + throw new Error(errorMessage); + } + return { stdout: '', stderr: '' }; + }; + return Object.assign(fn, { calls }) as ExecFn & { calls: string[][] }; +} + +function makeStubFs(overrides: Partial = {}): FileSystemProvider { + return { + list: vi.fn().mockResolvedValue({ entries: [], total: 0 }), + read: vi.fn().mockResolvedValue({ content: '', truncated: false, totalSize: 0 }), + write: vi.fn().mockResolvedValue({ success: true, bytesWritten: 0 }), + exists: vi.fn().mockResolvedValue(true), + stat: vi.fn().mockResolvedValue(null), + search: vi.fn().mockResolvedValue({ matches: [], total: 0 }), + remove: vi.fn().mockResolvedValue({ success: true }), + mkdir: vi.fn().mockResolvedValue(undefined), + ...overrides, + }; +} + +// --------------------------------------------------------------------------- +// cloneRepository +// --------------------------------------------------------------------------- + +describe('cloneRepository', () => { + it('creates parent directory and runs git clone', async () => { + const exec = makeExec(); + const fs = makeStubFs(); + + const result = await cloneRepository( + 'https://github.com/org/repo.git', + '/projects/repo', + exec, + fs + ); + + expect(result).toEqual({ success: true }); + expect(fs.mkdir).toHaveBeenCalledWith('/projects', { recursive: true }); + expect(exec.calls).toEqual([['clone', 'https://github.com/org/repo.git', '/projects/repo']]); + }); + + it('returns error on clone failure', async () => { + const exec = makeFailingExec( + 'clone https://github.com/org/repo.git /projects/repo', + 'fatal: repository not found' + ); + const fs = makeStubFs(); + + const result = await cloneRepository( + 'https://github.com/org/repo.git', + '/projects/repo', + exec, + fs + ); + + expect(result.success).toBe(false); + expect(result.error).toContain('repository not found'); + }); + + it('returns error when mkdir fails', async () => { + const exec = makeExec(); + const fs = makeStubFs({ + mkdir: vi.fn().mockRejectedValue(new Error('permission denied')), + }); + + const result = await cloneRepository( + 'https://github.com/org/repo.git', + '/projects/repo', + exec, + fs + ); + + expect(result.success).toBe(false); + expect(result.error).toContain('permission denied'); + }); + + it('handles deeply nested local path', async () => { + const exec = makeExec(); + const fs = makeStubFs(); + + await cloneRepository('git@github.com:org/repo.git', '/a/b/c/d/repo', exec, fs); + + expect(fs.mkdir).toHaveBeenCalledWith('/a/b/c/d', { recursive: true }); + }); +}); + +// --------------------------------------------------------------------------- +// initializeNewProject +// --------------------------------------------------------------------------- + +describe('initializeNewProject', () => { + it('writes README, stages, commits, and pushes to main', async () => { + const exec = makeExec(); + const fs = makeStubFs(); + + await initializeNewProject( + { + repoUrl: 'https://github.com/org/repo.git', + localPath: '/projects/repo', + name: 'My Project', + description: 'A cool project', + }, + exec, + fs + ); + + // README written with description + expect(fs.write).toHaveBeenCalledWith('README.md', '# My Project\n\nA cool project\n'); + + // Correct git commands in order + expect(exec.calls).toEqual([ + ['add', 'README.md'], + ['commit', '-m', 'Initial commit'], + ['push', '-u', 'origin', 'main'], + ]); + }); + + it('writes README without description when not provided', async () => { + const exec = makeExec(); + const fs = makeStubFs(); + + await initializeNewProject( + { + repoUrl: 'https://github.com/org/repo.git', + localPath: '/projects/repo', + name: 'Bare Project', + }, + exec, + fs + ); + + expect(fs.write).toHaveBeenCalledWith('README.md', '# Bare Project\n'); + }); + + it('falls back to master when push to main fails', async () => { + const exec = makeFailingExec('push -u origin main'); + const fs = makeStubFs(); + + await initializeNewProject( + { + repoUrl: 'https://github.com/org/repo.git', + localPath: '/projects/repo', + name: 'Project', + }, + exec, + fs + ); + + // Should have tried main first, then master + expect(exec.calls).toEqual([ + ['add', 'README.md'], + ['commit', '-m', 'Initial commit'], + ['push', '-u', 'origin', 'main'], + ['push', '-u', 'origin', 'master'], + ]); + }); + + it('throws when both main and master push fail', async () => { + const failKeys = new Set(['push -u origin main', 'push -u origin master']); + const exec = makeExecWithFailKeys(failKeys, 'push failed'); + + const fs = makeStubFs(); + + await expect( + initializeNewProject( + { + repoUrl: 'https://github.com/org/repo.git', + localPath: '/projects/repo', + name: 'Project', + }, + exec, + fs + ) + ).rejects.toThrow('Failed to push to remote repository'); + }); + + it('passes cwd option to all git commands', async () => { + const execOpts: Array<{ args: string[]; opts: { cwd?: string } | undefined }> = []; + const exec: ExecFn = async (_cmd, args = [], opts) => { + execOpts.push({ args: [...args], opts }); + return { stdout: '', stderr: '' }; + }; + const fs = makeStubFs(); + + await initializeNewProject( + { + repoUrl: 'https://github.com/org/repo.git', + localPath: '/my/project', + name: 'Test', + }, + exec, + fs + ); + + for (const entry of execOpts) { + expect(entry.opts).toEqual({ cwd: '/my/project' }); + } + }); +}); + +// --------------------------------------------------------------------------- +// ensurePullRequestBranch +// --------------------------------------------------------------------------- + +describe('ensurePullRequestBranch', () => { + it('fetches PR ref into a named branch', async () => { + const exec = makeExec(); + + const result = await ensurePullRequestBranch('/projects/repo', 42, 'feature-branch', exec); + + expect(result).toBe('feature-branch'); + expect(exec.calls).toEqual([ + ['fetch', 'origin', 'refs/pull/42/head:refs/heads/feature-branch', '--force'], + ]); + }); + + it('uses pr/{number} as branch name when branchName is empty', async () => { + const exec = makeExec(); + + const result = await ensurePullRequestBranch('/projects/repo', 99, '', exec); + + expect(result).toBe('pr/99'); + expect(exec.calls[0]).toContain('refs/pull/99/head:refs/heads/pr/99'); + }); + + it('passes cwd to exec', async () => { + let capturedOpts: { cwd?: string; timeout?: number; maxBuffer?: number } | undefined; + const exec: ExecFn = async (_cmd, _args = [], opts) => { + capturedOpts = opts; + return { stdout: '', stderr: '' }; + }; + + await ensurePullRequestBranch('/my/repo', 1, 'branch', exec); + + expect(capturedOpts).toEqual({ cwd: '/my/repo' }); + }); + + it('throws when git fetch fails (no fallback)', async () => { + const exec = makeFailingExec( + 'fetch origin refs/pull/5/head:refs/heads/pr-branch --force', + 'fatal: could not read from remote' + ); + + await expect(ensurePullRequestBranch('/projects/repo', 5, 'pr-branch', exec)).rejects.toThrow( + 'could not read from remote' + ); + }); +}); diff --git a/src/main/core/git/impl/git-repo-utils.ts b/src/main/core/git/impl/git-repo-utils.ts new file mode 100644 index 000000000..3c544fa06 --- /dev/null +++ b/src/main/core/git/impl/git-repo-utils.ts @@ -0,0 +1,126 @@ +/** + * Standalone git utility functions for repository-level operations that don't + * belong on the path-scoped GitService (e.g. cloning, initial project setup, + * fetching PR refs). + * + * All functions accept an ExecFn + FileSystemProvider so they remain testable + * without touching the real filesystem or spawning real processes. + */ + +import type { FileSystemProvider } from '@main/core/fs/types'; +import type { ExecFn } from '@main/core/utils/exec'; + +// --------------------------------------------------------------------------- +// cloneRepository +// --------------------------------------------------------------------------- + +/** + * Clone a git repository to a local path. + * Creates parent directories if they don't exist. + */ +export async function cloneRepository( + repoUrl: string, + localPath: string, + exec: ExecFn, + fs: FileSystemProvider +): Promise<{ success: boolean; error?: string }> { + try { + const dir = parentDir(localPath); + await fs.mkdir(dir, { recursive: true }); + await exec('git', ['clone', repoUrl, localPath]); + return { success: true }; + } catch (error) { + return { + success: false, + error: error instanceof Error ? error.message : 'Clone failed', + }; + } +} + +// --------------------------------------------------------------------------- +// initializeNewProject +// --------------------------------------------------------------------------- + +export interface InitializeNewProjectParams { + repoUrl: string; + localPath: string; + name: string; + description?: string; +} + +/** + * Initialize a freshly-cloned (empty) project with a README and initial commit. + * + * Steps: + * 1. Write a README.md + * 2. `git add README.md` + * 3. `git commit -m "Initial commit"` + * 4. `git push -u origin main` (falls back to `master` if `main` fails) + */ +export async function initializeNewProject( + params: InitializeNewProjectParams, + exec: ExecFn, + fs: FileSystemProvider +): Promise { + const { localPath, name, description } = params; + + const exists = await fs.exists(localPath); + if (!exists) { + throw new Error('Local path does not exist after clone'); + } + + const readmeContent = description ? `# ${name}\n\n${description}\n` : `# ${name}\n`; + await fs.write('README.md', readmeContent); + + const opts = { cwd: localPath }; + await exec('git', ['add', 'README.md'], opts); + await exec('git', ['commit', '-m', 'Initial commit'], opts); + + try { + await exec('git', ['push', '-u', 'origin', 'main'], opts); + } catch { + try { + await exec('git', ['push', '-u', 'origin', 'master'], opts); + } catch { + throw new Error('Failed to push to remote repository'); + } + } +} + +// --------------------------------------------------------------------------- +// ensurePullRequestBranch +// --------------------------------------------------------------------------- + +/** + * Fetch a pull request head ref into a local branch. + * + * Runs: + * git fetch origin refs/pull/{prNumber}/head:refs/heads/{safeBranch} --force + */ +export async function ensurePullRequestBranch( + projectPath: string, + prNumber: number, + branchName: string, + exec: ExecFn +): Promise { + const safeBranch = branchName || `pr/${prNumber}`; + + await exec( + 'git', + ['fetch', 'origin', `refs/pull/${prNumber}/head:refs/heads/${safeBranch}`, '--force'], + { cwd: projectPath } + ); + + return safeBranch; +} + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +/** Extract parent directory from a path (last `/`-separated segment removed). */ +function parentDir(p: string): string { + const sep = p.lastIndexOf('/'); + if (sep <= 0) return '/'; + return p.slice(0, sep); +} diff --git a/src/main/core/git/impl/git-service.test.ts b/src/main/core/git/impl/git-service.test.ts new file mode 100644 index 000000000..5774d532d --- /dev/null +++ b/src/main/core/git/impl/git-service.test.ts @@ -0,0 +1,313 @@ +import { describe, expect, it } from 'vitest'; +import type { FileSystemProvider } from '@main/core/fs/types'; +import type { ExecFn } from '@main/core/utils/exec'; +import { GitService } from './git-service'; +import { computeBaseRef } from './git-utils'; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +/** + * Builds an ExecFn that returns pre-baked responses keyed by the joined args + * string. Throws for any unmapped key (surfaces missing mocks early). + */ +function makeExec(map: Record): ExecFn { + return async (_cmd: string, args: string[] = []) => { + const key = args.join(' '); + if (key in map) { + return { stdout: map[key], stderr: '' }; + } + throw Object.assign(new Error(`Unexpected git command: git ${key}`), { + stdout: '', + stderr: `fatal: not expected`, + code: 128, + }); + }; +} + +/** + * Like makeExec but silently returns '' for unmapped keys. Useful when a + * method makes optional/fallback calls that aren't relevant to the test. + */ +function makePermissiveExec(map: Record): ExecFn { + return async (_cmd: string, args: string[] = []) => ({ + stdout: map[args.join(' ')] ?? '', + stderr: '', + }); +} + +const BRANCH_FORMAT = + 'branch -a --format=%(refname:short)|%(upstream:short)|%(upstream:track)|%(refname)'; + +const stubFs = {} as FileSystemProvider; + +function makeService(exec: ExecFn): GitService { + return new GitService('/repo', exec, stubFs); +} + +// --------------------------------------------------------------------------- +// getBranches() +// --------------------------------------------------------------------------- + +describe('GitService.getBranches', () => { + it('returns an empty array when stdout is empty', async () => { + const svc = makeService(makeExec({ [BRANCH_FORMAT]: '' })); + expect(await svc.getBranches()).toEqual([]); + }); + + it('categorises a plain local branch correctly', async () => { + const svc = makeService( + makeExec({ + [BRANCH_FORMAT]: 'main|||refs/heads/main\n', + }) + ); + const branches = await svc.getBranches(); + expect(branches).toHaveLength(1); + expect(branches[0]).toMatchObject({ type: 'local', branch: 'main' }); + }); + + it('categorises a remote tracking branch as type=remote (regression: remotes/ prefix bug)', async () => { + // %(refname:short) gives "origin/main" — not "remotes/origin/main". + // The old code checked startsWith('remotes/') which never matched, so all + // remote branches were misclassified as local. + const svc = makeService( + makeExec({ + [BRANCH_FORMAT]: 'origin/main|||refs/remotes/origin/main\n', + }) + ); + const branches = await svc.getBranches(); + expect(branches).toHaveLength(1); + expect(branches[0]).toMatchObject({ type: 'remote', branch: 'main', remote: 'origin' }); + }); + + it('skips remotes/origin/HEAD entries', async () => { + const svc = makeService( + makeExec({ + [BRANCH_FORMAT]: 'origin/HEAD|||refs/remotes/origin/HEAD\n', + }) + ); + expect(await svc.getBranches()).toHaveLength(0); + }); + + it('parses bracketed tracking info [ahead 1, behind 2] (Apple git 2.39.5 format)', async () => { + // Apple git 2.39.5 outputs %(upstream:track) with brackets: [ahead 1, behind 2] + // The ,nobrackets modifier was only added in git 2.40 and caused a fatal error. + const svc = makeService( + makeExec({ + [BRANCH_FORMAT]: 'feature|origin/feature|[ahead 1, behind 2]|refs/heads/feature\n', + }) + ); + const branches = await svc.getBranches(); + expect(branches).toHaveLength(1); + expect(branches[0]).toMatchObject({ + type: 'local', + branch: 'feature', + remote: 'origin', + divergence: { ahead: 1, behind: 2 }, + }); + }); + + it('parses unbracketed tracking info (newer git format: ahead 1, behind 2)', async () => { + const svc = makeService( + makeExec({ + [BRANCH_FORMAT]: 'feature|origin/feature|ahead 1, behind 2|refs/heads/feature\n', + }) + ); + const branches = await svc.getBranches(); + expect(branches[0]).toMatchObject({ + divergence: { ahead: 1, behind: 2 }, + }); + }); + + it('handles a local branch that is only ahead (no behind)', async () => { + const svc = makeService( + makeExec({ + [BRANCH_FORMAT]: 'feat|origin/feat|[ahead 3]|refs/heads/feat\n', + }) + ); + const [branch] = await svc.getBranches(); + expect(branch).toMatchObject({ divergence: { ahead: 3, behind: 0 } }); + }); + + it('handles a local branch that is only behind (no ahead)', async () => { + const svc = makeService( + makeExec({ + [BRANCH_FORMAT]: 'feat|origin/feat|[behind 5]|refs/heads/feat\n', + }) + ); + const [branch] = await svc.getBranches(); + expect(branch).toMatchObject({ divergence: { ahead: 0, behind: 5 } }); + }); + + it('returns no divergence when track field is empty', async () => { + const svc = makeService( + makeExec({ + [BRANCH_FORMAT]: 'main|origin/main||refs/heads/main\n', + }) + ); + const [branch] = await svc.getBranches(); + expect(branch).toMatchObject({ type: 'local', branch: 'main' }); + expect((branch as { divergence?: unknown }).divergence).toBeUndefined(); + }); + + it('returns a local branch with no upstream and no divergence', async () => { + const svc = makeService( + makeExec({ + [BRANCH_FORMAT]: 'orphan|||refs/heads/orphan\n', + }) + ); + const [branch] = await svc.getBranches(); + expect(branch).toMatchObject({ type: 'local', branch: 'orphan' }); + expect((branch as { remote?: unknown }).remote).toBeUndefined(); + }); + + it('correctly splits a mixed list into local and remote counts', async () => { + const lines = [ + 'main|||refs/heads/main', + 'feature|origin/feature|[ahead 1]|refs/heads/feature', + 'origin/main|||refs/remotes/origin/main', + 'origin/develop|||refs/remotes/origin/develop', + 'origin/HEAD|||refs/remotes/origin/HEAD', // should be skipped + ].join('\n'); + + const svc = makeService(makeExec({ [BRANCH_FORMAT]: lines })); + const branches = await svc.getBranches(); + const local = branches.filter((b) => b.type === 'local'); + const remote = branches.filter((b) => b.type === 'remote'); + + expect(local).toHaveLength(2); + expect(remote).toHaveLength(2); + }); +}); + +// --------------------------------------------------------------------------- +// getBranchStatus() +// --------------------------------------------------------------------------- + +describe('GitService.getBranchStatus', () => { + it('returns branch name with ahead/behind when upstream is configured', async () => { + const svc = makeService( + makeExec({ + 'branch --show-current': 'main', + 'rev-parse --abbrev-ref --symbolic-full-name @{upstream}': 'refs/remotes/origin/main', + 'rev-list --left-right --count @{upstream}...HEAD': '2\t3', + }) + ); + const status = await svc.getBranchStatus(); + expect(status).toEqual({ + branch: 'main', + upstream: 'refs/remotes/origin/main', + ahead: 3, + behind: 2, + }); + }); + + it('returns zeros when no upstream is configured', async () => { + // Both upstream calls throw — the service should swallow them and default to 0. + const exec: ExecFn = async (_cmd, args = []) => { + const key = args.join(' '); + if (key === 'branch --show-current') return { stdout: 'local-only', stderr: '' }; + throw Object.assign(new Error('no upstream'), { code: 128 }); + }; + const svc = makeService(exec); + const status = await svc.getBranchStatus(); + expect(status).toEqual({ branch: 'local-only', upstream: undefined, ahead: 0, behind: 0 }); + }); +}); + +// --------------------------------------------------------------------------- +// getDefaultBranch() +// --------------------------------------------------------------------------- + +describe('GitService.getDefaultBranch', () => { + it('resolves from symbolic-ref cache (heuristic 1) when branch exists locally', async () => { + const svc = makeService( + makePermissiveExec({ + 'symbolic-ref refs/remotes/origin/HEAD --short': 'origin/main', + 'rev-parse --verify refs/heads/main': 'abc123', + }) + ); + const result = await svc.getDefaultBranch(); + expect(result).toEqual({ name: 'main', remote: 'origin', existsLocally: true }); + }); + + it('resolves from symbolic-ref cache when branch does NOT exist locally', async () => { + const exec: ExecFn = async (_cmd, args = []) => { + const key = args.join(' '); + if (key === 'symbolic-ref refs/remotes/origin/HEAD --short') { + return { stdout: 'origin/main', stderr: '' }; + } + // rev-parse for refs/heads/main throws → existsLocally = false + throw Object.assign(new Error('no branch'), { code: 128 }); + }; + const svc = makeService(exec); + const result = await svc.getDefaultBranch(); + expect(result).toEqual({ name: 'main', remote: 'origin', existsLocally: false }); + }); + + it('falls back to local branch candidate "main" when symbolic-ref fails', async () => { + const exec: ExecFn = async (_cmd, args = []) => { + const key = args.join(' '); + // heuristic 1 fails + if (key === 'symbolic-ref refs/remotes/origin/HEAD --short') { + throw Object.assign(new Error('no HEAD'), { code: 128 }); + } + // heuristic 2 fails + if (key === 'remote show origin') { + throw Object.assign(new Error('no remote'), { code: 128 }); + } + // heuristic 3: "main" exists locally + if (key === 'rev-parse --verify refs/heads/main') { + return { stdout: 'abc123', stderr: '' }; + } + throw Object.assign(new Error('unexpected'), { code: 128 }); + }; + const svc = makeService(exec); + const result = await svc.getDefaultBranch(); + expect(result).toEqual({ name: 'main', remote: undefined, existsLocally: true }); + }); + + it('falls back to "main" convention when no heuristic resolves', async () => { + const exec: ExecFn = async () => { + throw Object.assign(new Error('nothing works'), { code: 128 }); + }; + const svc = makeService(exec); + const result = await svc.getDefaultBranch(); + expect(result).toEqual({ name: 'main', remote: undefined, existsLocally: false }); + }); +}); + +// --------------------------------------------------------------------------- +// computeBaseRef() — pure utility, no mocking needed +// --------------------------------------------------------------------------- + +describe('computeBaseRef', () => { + it('prefixes branch with remote name when both are provided', () => { + // computeBaseRef(baseRef, remote, branch) — remote is the 2nd argument + expect(computeBaseRef(undefined, 'origin', 'main')).toBe('origin/main'); + }); + + it('uses the provided baseRef when it already contains a slash', () => { + expect(computeBaseRef('origin/develop')).toBe('origin/develop'); + }); + + it('falls back to remote/main when no branch is provided', () => { + expect(computeBaseRef(undefined, 'origin')).toBe('origin/main'); + }); + + it('maps a URL remote to "origin" and combines with branch', () => { + // A URL remote (contains "://") is normalised to the "origin" remote name. + expect(computeBaseRef(undefined, 'https://github.com/org/repo.git', 'main')).toBe( + 'origin/main' + ); + }); + + it('returns "main" when all arguments are absent', () => { + expect(computeBaseRef()).toBe('main'); + }); + + it('strips a leading slash from a baseRef that has no remote', () => { + expect(computeBaseRef('/main')).toBe('main'); + }); +}); diff --git a/src/main/core/git/impl/git-service.ts b/src/main/core/git/impl/git-service.ts new file mode 100644 index 000000000..cedaf7ad7 --- /dev/null +++ b/src/main/core/git/impl/git-service.ts @@ -0,0 +1,970 @@ +import type { + Branch, + Commit, + CommitFile, + DefaultBranch, + DiffBase, + DiffLine, + DiffResult, + GitChange, + GitInfo, + LocalBranch, + PullError, + PushError, + RemoteBranch, +} from '@shared/git'; +import type { FileSystemProvider } from '@main/core/fs/types'; +import type { ExecFn } from '@main/core/utils/exec'; +import { err, ok, type Result } from '@main/lib/result'; +import { GitProvider } from '../types'; +import { + computeBaseRef, + mapStatus, + MAX_DIFF_CONTENT_BYTES, + MAX_DIFF_OUTPUT_BYTES, + parseDiffLines, + stripTrailingNewline, +} from './git-utils'; + +export class GitService implements GitProvider { + constructor( + private readonly path: string, + private readonly exec: ExecFn, + private readonly fs: FileSystemProvider + ) {} + + // --------------------------------------------------------------------------- + // Status & staging + // --------------------------------------------------------------------------- + + async getStatus(): Promise { + try { + await this.exec('git', ['rev-parse', '--is-inside-work-tree'], { cwd: this.path }); + } catch { + return []; + } + + const { stdout: statusOutput } = await this.exec( + 'git', + ['status', '--porcelain', '--untracked-files=all'], + { cwd: this.path } + ); + + if (!statusOutput.trim()) return []; + + const statusLines = statusOutput + .split('\n') + .map((l) => l.replace(/\r$/, '')) + .filter((l) => l.length > 0); + + // Fetch all staged and unstaged numstat counts in two parallel commands + // instead of N×2 sequential per-file invocations. + const parseNumstat = ( + stdout: string + ): Map => { + const map = new Map(); + for (const l of stdout + .trim() + .split('\n') + .filter((s) => s.trim())) { + const [addStr, delStr, ...pathParts] = l.split('\t'); + const filePath = pathParts.join('\t'); + if (!filePath) continue; + const existing = map.get(filePath) ?? { additions: 0, deletions: 0 }; + existing.additions += addStr === '-' ? 0 : Number.parseInt(addStr ?? '0', 10) || 0; + existing.deletions += delStr === '-' ? 0 : Number.parseInt(delStr ?? '0', 10) || 0; + map.set(filePath, existing); + } + return map; + }; + + const [stagedNumstat, unstagedNumstat] = await Promise.all([ + this.exec('git', ['diff', '--numstat', '--cached'], { cwd: this.path }) + .then((r) => parseNumstat(r.stdout)) + .catch(() => new Map()), + this.exec('git', ['diff', '--numstat'], { cwd: this.path }) + .then((r) => parseNumstat(r.stdout)) + .catch(() => new Map()), + ]); + + const changes: GitChange[] = []; + + for (const line of statusLines) { + const statusCode = line.substring(0, 2); + let filePath = line.substring(3); + if (statusCode.includes('R') && filePath.includes('->')) { + const parts = filePath.split('->'); + filePath = (parts[parts.length - 1] ?? '').trim(); + } + + const status = mapStatus(statusCode); + const isStaged = statusCode[0] !== ' ' && statusCode[0] !== '?'; + + const staged = stagedNumstat.get(filePath); + const unstaged = unstagedNumstat.get(filePath); + let additions = (staged?.additions ?? 0) + (unstaged?.additions ?? 0); + const deletions = (staged?.deletions ?? 0) + (unstaged?.deletions ?? 0); + + // Untracked files don't appear in git diff output; count lines from content. + if (additions === 0 && deletions === 0 && statusCode.includes('?')) { + try { + const result = await this.fs.read(filePath, MAX_DIFF_CONTENT_BYTES); + if (!result.truncated) { + additions = (result.content.match(/\n/g) ?? []).length; + } + } catch {} + } + + changes.push({ path: filePath, status, additions, deletions, isStaged }); + } + + return changes; + } + + async stageFiles(filePaths: string[]): Promise { + if (filePaths.length === 0) return; + await this.exec('git', ['add', '--', ...filePaths], { cwd: this.path }); + } + + async stageAllFiles(): Promise { + await this.exec('git', ['add', '-A'], { cwd: this.path }); + } + + async unstageFiles(filePaths: string[]): Promise { + if (filePaths.length === 0) return; + try { + await this.exec('git', ['reset', 'HEAD', '--', ...filePaths], { cwd: this.path }); + } catch { + // Fallback for edge cases (e.g. new files with no HEAD): unstage each via rm --cached + for (const filePath of filePaths) { + try { + await this.exec('git', ['reset', 'HEAD', '--', filePath], { cwd: this.path }); + } catch { + await this.exec('git', ['rm', '--cached', '--', filePath], { cwd: this.path }); + } + } + } + } + + async unstageAllFiles(): Promise { + try { + await this.exec('git', ['reset', 'HEAD'], { cwd: this.path }); + } catch { + // Repo may have no commits yet; ignore. + } + } + + async revertFiles(filePaths: string[]): Promise { + if (filePaths.length === 0) return; + + // Determine which files exist in HEAD in a single command + let trackedPaths = new Set(); + try { + const { stdout } = await this.exec( + 'git', + ['ls-tree', '--name-only', 'HEAD', '--', ...filePaths], + { cwd: this.path } + ); + trackedPaths = new Set(stdout.trim().split('\n').filter(Boolean)); + } catch { + // Empty repo — no HEAD yet, all files are untracked + } + + const tracked = filePaths.filter((f) => trackedPaths.has(f)); + const untracked = filePaths.filter((f) => !trackedPaths.has(f)); + + if (tracked.length > 0) { + await this.exec('git', ['checkout', 'HEAD', '--', ...tracked], { cwd: this.path }); + } + + // Untracked files don't exist in git history — remove them from disk + for (const filePath of untracked) { + try { + const exists = await this.fs.exists(filePath); + if (exists) await this.fs.remove(filePath); + } catch {} + } + } + + async revertAllFiles(): Promise { + // Reset index and working tree for all tracked changes back to HEAD, + // then remove any untracked files/directories. + try { + await this.exec('git', ['reset', '--hard', 'HEAD'], { cwd: this.path }); + } catch { + // Repo may have no commits yet; ignore. + } + await this.exec('git', ['clean', '-fd'], { cwd: this.path }); + } + + // --------------------------------------------------------------------------- + // Diffs + // --------------------------------------------------------------------------- + + async getFileAtHead(filePath: string): Promise { + return this.getFileAtRef(filePath, 'HEAD'); + } + + async getFileAtRef(filePath: string, ref: string): Promise { + try { + const { stdout } = await this.exec('git', ['show', `${ref}:${filePath}`], { + cwd: this.path, + maxBuffer: MAX_DIFF_CONTENT_BYTES, + }); + return stripTrailingNewline(stdout); + } catch { + return null; + } + } + + async getFileAtIndex(filePath: string): Promise { + try { + const { stdout } = await this.exec('git', ['show', `:0:${filePath}`], { + cwd: this.path, + maxBuffer: MAX_DIFF_CONTENT_BYTES, + }); + return stripTrailingNewline(stdout); + } catch { + return null; + } + } + + async getFileDiff(filePath: string, base: DiffBase = 'HEAD'): Promise { + const isBranchDiff = base !== 'HEAD' && base !== 'staged'; + const diffArgs = + base === 'staged' + ? ['diff', '--no-color', '--unified=2000', '--cached', '--', filePath] + : isBranchDiff + ? ['diff', '--no-color', '--unified=2000', `${base}...HEAD`, '--', filePath] + : ['diff', '--no-color', '--unified=2000', 'HEAD', '--', filePath]; + + let diffStdout: string | undefined; + try { + const { stdout } = await this.exec('git', diffArgs, { + cwd: this.path, + maxBuffer: MAX_DIFF_OUTPUT_BYTES, + }); + diffStdout = stdout; + } catch {} + + const originalRef = isBranchDiff ? base : 'HEAD'; + + const getOriginalContent = async (): Promise => { + try { + const { stdout } = await this.exec('git', ['show', `${originalRef}:${filePath}`], { + cwd: this.path, + maxBuffer: MAX_DIFF_CONTENT_BYTES, + }); + return stripTrailingNewline(stdout); + } catch { + return undefined; + } + }; + + const getModifiedContent = async (): Promise => { + if (isBranchDiff) { + try { + const { stdout } = await this.exec('git', ['show', `HEAD:${filePath}`], { + cwd: this.path, + maxBuffer: MAX_DIFF_CONTENT_BYTES, + }); + return stripTrailingNewline(stdout); + } catch { + return undefined; + } + } + try { + const result = await this.fs.read(filePath, MAX_DIFF_CONTENT_BYTES); + if (result.truncated) return undefined; + return stripTrailingNewline(result.content); + } catch { + return undefined; + } + }; + + if (diffStdout !== undefined) { + const { lines, isBinary } = parseDiffLines(diffStdout); + if (isBinary) return { lines: [], isBinary: true }; + + const [originalContent, modifiedContent] = await Promise.all([ + getOriginalContent(), + getModifiedContent(), + ]); + + if (lines.length === 0) { + if (modifiedContent !== undefined) { + return { + lines: modifiedContent.split('\n').map((l) => ({ right: l, type: 'add' as const })), + modifiedContent, + }; + } + if (originalContent !== undefined) { + return { + lines: originalContent.split('\n').map((l) => ({ left: l, type: 'del' as const })), + originalContent, + }; + } + return { lines: [] }; + } + return { lines, originalContent, modifiedContent }; + } + + const [originalContent, modifiedContent] = await Promise.all([ + getOriginalContent(), + getModifiedContent(), + ]); + + if (modifiedContent !== undefined) { + return { + lines: modifiedContent.split('\n').map((l) => ({ right: l, type: 'add' as const })), + originalContent, + modifiedContent, + }; + } + if (originalContent !== undefined) { + return { + lines: originalContent.split('\n').map((l) => ({ left: l, type: 'del' as const })), + originalContent, + }; + } + return { lines: [] }; + } + + async getCommitFileDiff(commitHash: string, filePath: string): Promise { + const getContentAt = async (ref: string): Promise => { + try { + const { stdout } = await this.exec('git', ['show', `${ref}:${filePath}`], { + cwd: this.path, + maxBuffer: MAX_DIFF_CONTENT_BYTES, + }); + return stripTrailingNewline(stdout); + } catch { + return undefined; + } + }; + + let hasParent = true; + try { + await this.exec('git', ['rev-parse', '--verify', `${commitHash}~1`], { cwd: this.path }); + } catch { + hasParent = false; + } + + if (!hasParent) { + const modifiedContent = await getContentAt(commitHash); + if (modifiedContent === undefined) return { lines: [] }; + if (modifiedContent === '') return { lines: [], modifiedContent }; + return { + lines: modifiedContent.split('\n').map((l) => ({ right: l, type: 'add' as const })), + modifiedContent, + }; + } + + let diffStdout: string | undefined; + try { + const { stdout } = await this.exec( + 'git', + ['diff', '--no-color', '--unified=2000', `${commitHash}~1`, commitHash, '--', filePath], + { cwd: this.path, maxBuffer: MAX_DIFF_OUTPUT_BYTES } + ); + diffStdout = stdout; + } catch {} + + let diffLines: DiffLine[] = []; + if (diffStdout !== undefined) { + const { lines, isBinary } = parseDiffLines(diffStdout); + if (isBinary) return { lines: [], isBinary: true }; + diffLines = lines; + } + + const [originalContent, modifiedContent] = await Promise.all([ + getContentAt(`${commitHash}~1`), + getContentAt(commitHash), + ]); + + if (diffLines.length > 0) return { lines: diffLines, originalContent, modifiedContent }; + + if (modifiedContent !== undefined && modifiedContent !== '') { + return { + lines: modifiedContent.split('\n').map((l) => ({ right: l, type: 'add' as const })), + originalContent, + modifiedContent, + }; + } + if (originalContent !== undefined) { + return { + lines: originalContent.split('\n').map((l) => ({ left: l, type: 'del' as const })), + originalContent, + modifiedContent, + }; + } + return { lines: [], originalContent, modifiedContent }; + } + + // --------------------------------------------------------------------------- + // Commit log + // --------------------------------------------------------------------------- + + async getLog(options?: { + maxCount?: number; + skip?: number; + knownAheadCount?: number; + }): Promise<{ commits: Commit[]; aheadCount: number }> { + const { maxCount = 50, skip = 0, knownAheadCount } = options ?? {}; + + let aheadCount = knownAheadCount ?? -1; + if (aheadCount < 0) { + aheadCount = 0; + try { + const { stdout } = await this.exec('git', ['rev-list', '--count', '@{upstream}..HEAD'], { + cwd: this.path, + }); + aheadCount = Number.parseInt(stdout.trim(), 10) || 0; + } catch { + try { + const { stdout: branchOut } = await this.exec( + 'git', + ['rev-parse', '--abbrev-ref', 'HEAD'], + { cwd: this.path } + ); + const currentBranch = branchOut.trim(); + const { stdout } = await this.exec( + 'git', + ['rev-list', '--count', `origin/${currentBranch}..HEAD`], + { cwd: this.path } + ); + aheadCount = Number.parseInt(stdout.trim(), 10) || 0; + } catch { + try { + const { stdout: defaultBranchOut } = await this.exec( + 'git', + ['symbolic-ref', '--short', 'refs/remotes/origin/HEAD'], + { cwd: this.path } + ); + const defaultBranch = defaultBranchOut.trim(); + const { stdout } = await this.exec( + 'git', + ['rev-list', '--count', `${defaultBranch}..HEAD`], + { cwd: this.path } + ); + aheadCount = Number.parseInt(stdout.trim(), 10) || 0; + } catch { + aheadCount = 0; + } + } + } + } + + const FIELD_SEP = '---FIELD_SEP---'; + const RECORD_SEP = '---RECORD_SEP---'; + const format = `${RECORD_SEP}%H${FIELD_SEP}%s${FIELD_SEP}%an${FIELD_SEP}%aI${FIELD_SEP}%D${FIELD_SEP}%b`; + const { stdout } = await this.exec( + 'git', + ['log', `--max-count=${maxCount}`, `--skip=${skip}`, `--pretty=format:${format}`, '--'], + { cwd: this.path } + ); + + if (!stdout.trim()) return { commits: [], aheadCount }; + + const commits = stdout + .split(RECORD_SEP) + .filter((entry) => entry.trim()) + .map((entry, index) => { + const parts = entry.trim().split(FIELD_SEP); + const refs = parts[4] || ''; + const tags = refs + .split(',') + .map((r) => r.trim()) + .filter((r) => r.startsWith('tag: ')) + .map((r) => r.slice(5)); + return { + hash: parts[0] || '', + subject: parts[1] || '', + body: (parts[5] || '').trim(), + author: parts[2] || '', + date: parts[3] || '', + isPushed: skip + index >= aheadCount, + tags, + }; + }); + + return { commits, aheadCount }; + } + + async getLatestCommit(): Promise { + const { commits } = await this.getLog({ maxCount: 1 }); + return commits[0] || null; + } + + async getChangedFiles(base: DiffBase): Promise { + const ref = base === 'staged' ? '--cached' : String(base); + + const parseNumstat = ( + stdout: string + ): Map => { + const map = new Map(); + for (const l of stdout + .trim() + .split('\n') + .filter((s) => s.trim())) { + const [addStr, delStr, ...pathParts] = l.split('\t'); + const filePath = pathParts.join('\t'); + if (!filePath) continue; + const existing = map.get(filePath) ?? { additions: 0, deletions: 0 }; + existing.additions += addStr === '-' ? 0 : Number.parseInt(addStr ?? '0', 10) || 0; + existing.deletions += delStr === '-' ? 0 : Number.parseInt(delStr ?? '0', 10) || 0; + map.set(filePath, existing); + } + return map; + }; + + const diffArgs = + base === 'staged' ? ['diff', '--numstat', '--cached'] : ['diff', '--numstat', ref]; + const nameArgs = + base === 'staged' ? ['diff', '--name-status', '--cached'] : ['diff', '--name-status', ref]; + + const [numstatResult, nameStatusResult] = await Promise.all([ + this.exec('git', diffArgs, { cwd: this.path }).catch(() => ({ stdout: '' })), + this.exec('git', nameArgs, { cwd: this.path }).catch(() => ({ stdout: '' })), + ]); + + const numstatMap = parseNumstat(numstatResult.stdout); + + const changes: GitChange[] = []; + for (const line of nameStatusResult.stdout.trim().split('\n').filter(Boolean)) { + const parts = line.split('\t'); + const code = parts[0] ?? ''; + const filePath = (parts[parts.length - 1] ?? '').trim(); + if (!filePath) continue; + + const stat = numstatMap.get(filePath); + changes.push({ + path: filePath, + status: mapStatus(code), + additions: stat?.additions ?? 0, + deletions: stat?.deletions ?? 0, + isStaged: base === 'staged', + }); + } + + return changes; + } + + async getCommitFiles(commitHash: string): Promise { + const { stdout } = await this.exec( + 'git', + [ + 'diff-tree', + '--root', + '--no-commit-id', + '-r', + '-m', + '--first-parent', + '--numstat', + commitHash, + ], + { cwd: this.path } + ); + + const { stdout: nameStatus } = await this.exec( + 'git', + [ + 'diff-tree', + '--root', + '--no-commit-id', + '-r', + '-m', + '--first-parent', + '--name-status', + commitHash, + ], + { cwd: this.path } + ); + + const statLines = stdout.trim().split('\n').filter(Boolean); + const statusLines = nameStatus.trim().split('\n').filter(Boolean); + + const statusMap = new Map(); + for (const line of statusLines) { + const [code, ...pathParts] = line.split('\t'); + const filePath = pathParts[pathParts.length - 1] || ''; + statusMap.set(filePath, mapStatus(code ?? '')); + } + + return statLines.map((line) => { + const [addStr, delStr, ...pathParts] = line.split('\t'); + const filePath = pathParts.join('\t'); + return { + path: filePath, + status: statusMap.get(filePath) || 'modified', + additions: addStr === '-' ? 0 : Number.parseInt(addStr || '0', 10) || 0, + deletions: delStr === '-' ? 0 : Number.parseInt(delStr || '0', 10) || 0, + }; + }); + } + + // --------------------------------------------------------------------------- + // Mutations + // --------------------------------------------------------------------------- + + async commit(message: string): Promise<{ hash: string }> { + if (!message || !message.trim()) throw new Error('Commit message cannot be empty'); + await this.exec('git', ['commit', '-m', message], { cwd: this.path }); + const { stdout } = await this.exec('git', ['rev-parse', 'HEAD'], { cwd: this.path }); + return { hash: stdout.trim() }; + } + + async fetch(): Promise { + await this.exec('git', ['fetch'], { cwd: this.path }); + } + + async push(): Promise> { + const doPush = async (args: string[]): Promise => { + const { stdout, stderr } = await this.exec('git', args, { cwd: this.path }); + return (stdout || stderr || '').trim(); + }; + + try { + const output = await doPush(['push']); + return ok({ output }); + } catch (error: unknown) { + const stderr = (error as { stderr?: string })?.stderr || ''; + const message = stderr || String(error); + + if ( + stderr.includes('has no upstream branch') || + stderr.includes('no upstream configured') || + stderr.includes('upstream branch of your current branch does not match') + ) { + try { + const { stdout: branchOut } = await this.exec('git', ['branch', '--show-current'], { + cwd: this.path, + }); + const output = await doPush(['push', '--set-upstream', 'origin', branchOut.trim()]); + return ok({ output }); + } catch (upstreamError: unknown) { + const upstreamStderr = (upstreamError as { stderr?: string })?.stderr || ''; + return err({ type: 'error', message: upstreamStderr || String(upstreamError) }); + } + } + + if (stderr.includes('[rejected]') || stderr.includes('Updates were rejected')) { + return err({ type: 'rejected', message }); + } + + return err({ type: 'error', message }); + } + } + + async publishBranch(branchName: string): Promise> { + const doPush = async (args: string[]): Promise => { + const { stdout, stderr } = await this.exec('git', args, { cwd: this.path }); + return (stdout || stderr || '').trim(); + }; + + try { + const output = await doPush(['push', '--set-upstream', 'origin', branchName]); + return ok({ output }); + } catch (error: unknown) { + const stderr = (error as { stderr?: string })?.stderr || ''; + const message = stderr || String(error); + if (stderr.includes('[rejected]') || stderr.includes('Updates were rejected')) { + return err({ type: 'rejected', message }); + } + return err({ type: 'error', message }); + } + } + + async pull(): Promise> { + try { + const { stdout } = await this.exec('git', ['pull'], { cwd: this.path }); + return ok({ output: stdout.trim() }); + } catch (error: unknown) { + const stdout = (error as { stdout?: string })?.stdout || ''; + const stderr = (error as { stderr?: string })?.stderr || ''; + const message = stderr || String(error); + + if (stdout.includes('CONFLICT') || stderr.includes('CONFLICT')) { + let conflictedFiles: string[] = []; + try { + const { stdout: conflictOut } = await this.exec( + 'git', + ['diff', '--name-only', '--diff-filter=U'], + { cwd: this.path } + ); + conflictedFiles = conflictOut + .split('\n') + .map((f) => f.trim()) + .filter(Boolean); + } catch {} + return err({ type: 'conflict', conflictedFiles, message }); + } + + return err({ type: 'error', message }); + } + } + + async softReset(): Promise<{ subject: string; body: string }> { + try { + await this.exec('git', ['rev-parse', '--verify', 'HEAD~1'], { cwd: this.path }); + } catch { + throw new Error('Cannot undo the initial commit'); + } + + const { commits: log } = await this.getLog({ maxCount: 1 }); + if (log[0]?.isPushed) { + throw new Error('Cannot undo a commit that has already been pushed'); + } + + const { stdout: subject } = await this.exec('git', ['log', '-1', '--pretty=format:%s'], { + cwd: this.path, + }); + const { stdout: body } = await this.exec('git', ['log', '-1', '--pretty=format:%b'], { + cwd: this.path, + }); + + await this.exec('git', ['reset', '--soft', 'HEAD~1'], { cwd: this.path }); + + return { subject: subject.trim(), body: body.trim() }; + } + + // --------------------------------------------------------------------------- + // Branch info + // --------------------------------------------------------------------------- + + async getBranchStatus(): Promise<{ + branch: string; + upstream?: string; + ahead: number; + behind: number; + }> { + const { stdout: branchOut } = await this.exec('git', ['branch', '--show-current'], { + cwd: this.path, + }); + const branch = branchOut.trim(); + + let upstream: string | undefined; + let ahead = 0; + let behind = 0; + + try { + const { stdout } = await this.exec( + 'git', + ['rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{upstream}'], + { cwd: this.path } + ); + upstream = stdout.trim() || undefined; + } catch {} + + try { + const { stdout } = await this.exec( + 'git', + ['rev-list', '--left-right', '--count', '@{upstream}...HEAD'], + { cwd: this.path } + ); + const parts = stdout.trim().split(/\s+/); + if (parts.length >= 2) { + behind = Number.parseInt(parts[0] || '0', 10) || 0; + ahead = Number.parseInt(parts[1] || '0', 10) || 0; + } + } catch {} + + return { branch, upstream, ahead, behind }; + } + + async getBranches(): Promise { + const { stdout } = await this.exec( + 'git', + ['branch', '-a', '--format=%(refname:short)|%(upstream:short)|%(upstream:track)|%(refname)'], + { cwd: this.path } + ); + + const branches: Branch[] = []; + + for (const line of stdout.split('\n')) { + const trimmed = line.trim(); + if (!trimmed) continue; + + const [refname, upstreamRef, track, fullRef] = trimmed.split('|'); + + if (fullRef?.startsWith('refs/remotes/')) { + const withoutPrefix = fullRef.slice('refs/remotes/'.length); + if (withoutPrefix.includes('HEAD')) continue; + const slashIdx = withoutPrefix.indexOf('/'); + const remote = slashIdx === -1 ? withoutPrefix : withoutPrefix.slice(0, slashIdx); + const branchName = slashIdx === -1 ? '' : withoutPrefix.slice(slashIdx + 1); + const entry: RemoteBranch = { type: 'remote', branch: branchName, remote }; + branches.push(entry); + } else { + const entry: LocalBranch = { type: 'local', branch: refname }; + if (upstreamRef) { + const slashIdx = upstreamRef.indexOf('/'); + entry.remote = slashIdx === -1 ? upstreamRef : upstreamRef.slice(0, slashIdx); + if (track) { + const ahead = Number.parseInt(/ahead (\d+)/.exec(track)?.[1] ?? '0', 10); + const behind = Number.parseInt(/behind (\d+)/.exec(track)?.[1] ?? '0', 10); + entry.divergence = { ahead, behind }; + } + } + branches.push(entry); + } + } + + return branches; + } + + async getDefaultBranch(): Promise { + // Heuristic 1: ask the remote what its HEAD points to (fast, no network call needed + // because git caches this in refs/remotes/origin/HEAD after a fetch/clone). + try { + const { stdout } = await this.exec( + 'git', + ['symbolic-ref', 'refs/remotes/origin/HEAD', '--short'], + { cwd: this.path } + ); + const ref = stdout.trim(); // e.g. "origin/main" + if (ref) { + const slashIdx = ref.indexOf('/'); + const remote = slashIdx === -1 ? 'origin' : ref.slice(0, slashIdx); + const name = slashIdx === -1 ? ref : ref.slice(slashIdx + 1); + const existsLocally = await this._branchExistsLocally(name); + return { name, remote, existsLocally }; + } + } catch {} + + // Heuristic 2: ask the remote directly (requires a network call). + try { + const { stdout } = await this.exec('git', ['remote', 'show', 'origin'], { cwd: this.path }); + const match = /HEAD branch:\s*(\S+)/.exec(stdout); + if (match?.[1]) { + const name = match[1]; + const existsLocally = await this._branchExistsLocally(name); + return { name, remote: 'origin', existsLocally }; + } + } catch {} + + // Heuristic 3: fall back to well-known default branch names in preference order. + for (const candidate of ['main', 'master', 'develop', 'trunk']) { + if (await this._branchExistsLocally(candidate)) { + return { name: candidate, remote: undefined, existsLocally: true }; + } + } + + // Last resort: return "main" as a convention. + return { name: 'main', remote: undefined, existsLocally: false }; + } + + private async _branchExistsLocally(branch: string): Promise { + try { + await this.exec('git', ['rev-parse', '--verify', `refs/heads/${branch}`], { + cwd: this.path, + }); + return true; + } catch { + return false; + } + } + + async getRemotes(): Promise<{ name: string; url: string }[]> { + try { + const { stdout } = await this.exec('git', ['remote', '-v'], { cwd: this.path }); + const seen = new Set(); + const remotes: { name: string; url: string }[] = []; + for (const line of stdout.split('\n')) { + const match = /^(\S+)\s+(\S+)\s+\(fetch\)$/.exec(line.trim()); + if (match?.[1] && match[2] && !seen.has(match[1])) { + seen.add(match[1]); + remotes.push({ name: match[1], url: match[2] }); + } + } + return remotes; + } catch { + return []; + } + } + + async createBranch(name: string, from: string, syncWithRemote = true): Promise { + if (syncWithRemote) { + await this.exec('git', ['fetch', 'origin'], { cwd: this.path }).catch(() => {}); + } + const base = syncWithRemote ? `origin/${from}` : `refs/heads/${from}`; + await this.exec('git', ['branch', '--no-track', name, base], { cwd: this.path }); + } + + async renameBranch(oldBranch: string, newBranch: string): Promise<{ remotePushed: boolean }> { + let remotePushed = false; + try { + const { stdout } = await this.exec('git', ['config', '--get', `branch.${oldBranch}.remote`], { + cwd: this.path, + }); + remotePushed = Boolean(stdout.trim()); + } catch {} + + await this.exec('git', ['branch', '-m', oldBranch, newBranch], { cwd: this.path }); + + if (remotePushed) { + try { + await this.exec('git', ['push', 'origin', '--delete', oldBranch], { cwd: this.path }); + } catch {} + await this.exec('git', ['push', '-u', 'origin', newBranch], { cwd: this.path }); + } + + return { remotePushed }; + } + + async deleteBranch(branch: string, force = true): Promise { + const flag = force ? '-D' : '-d'; + await this.exec('git', ['branch', flag, branch], { cwd: this.path }); + } + + // --------------------------------------------------------------------------- + // Repo info + // --------------------------------------------------------------------------- + + async detectInfo(): Promise { + try { + await this.exec('git', ['rev-parse', '--is-inside-work-tree'], { cwd: this.path }); + } catch { + return { isGitRepo: false, baseRef: 'main', rootPath: this.path }; + } + + let remote: string | undefined; + try { + const { stdout } = await this.exec('git', ['remote', 'get-url', 'origin'], { + cwd: this.path, + }); + remote = stdout.trim() || undefined; + } catch {} + + let branch: string | undefined; + try { + const { stdout } = await this.exec('git', ['branch', '--show-current'], { cwd: this.path }); + branch = stdout.trim() || undefined; + } catch {} + + if (!branch) { + try { + const { stdout } = await this.exec('git', ['remote', 'show', 'origin'], { cwd: this.path }); + const match = /HEAD branch:\s*(\S+)/.exec(stdout); + branch = match?.[1] ?? undefined; + } catch {} + } + + let rootPath = this.path; + try { + const { stdout } = await this.exec('git', ['rev-parse', '--show-toplevel'], { + cwd: this.path, + }); + const trimmed = stdout.trim(); + if (trimmed) rootPath = trimmed; + } catch {} + + return { + isGitRepo: true, + remote, + branch, + baseRef: computeBaseRef(remote, branch), + rootPath, + }; + } +} diff --git a/src/main/core/git/impl/git-utils.ts b/src/main/core/git/impl/git-utils.ts new file mode 100644 index 000000000..1804b7c32 --- /dev/null +++ b/src/main/core/git/impl/git-utils.ts @@ -0,0 +1,103 @@ +import type { DiffLine, GitChangeStatus } from '@shared/git'; + +/** Maximum bytes for fetching file content in diffs. */ +export const MAX_DIFF_CONTENT_BYTES = 512 * 1024; + +/** Maximum bytes for `git diff` output (larger than content limit due to headers/context). */ +export const MAX_DIFF_OUTPUT_BYTES = 10 * 1024 * 1024; + +/** Headers emitted by `git diff` that should be skipped when parsing hunks. */ +const DIFF_HEADER_PREFIXES = [ + 'diff ', + 'index ', + '--- ', + '+++ ', + '@@', + 'new file mode', + 'old file mode', + 'deleted file mode', + 'similarity index', + 'rename from', + 'rename to', + 'Binary files', +]; + +/** + * Map a git status code (porcelain or diff-tree) to a typed GitChangeStatus. + * Works for both two-char porcelain codes (e.g. ' M', 'A ', '??') and + * single-letter diff-tree codes (e.g. 'A', 'D', 'R100'). + */ +export function mapStatus(code: string): GitChangeStatus { + if (code.includes('U') || code === 'AA' || code === 'DD') return 'conflicted'; + if (code.includes('A') || code.includes('?')) return 'added'; + if (code.includes('D')) return 'deleted'; + if (code.includes('R')) return 'renamed'; + return 'modified'; +} + +/** Strip exactly one trailing newline, if present. */ +export function stripTrailingNewline(s: string): string { + return s.endsWith('\n') ? s.slice(0, -1) : s; +} + +/** Parse raw `git diff` output into structured diff lines, skipping headers. */ +export function parseDiffLines(stdout: string): { lines: DiffLine[]; isBinary: boolean } { + const result: DiffLine[] = []; + for (const line of stdout.split('\n')) { + if (!line) continue; + if (DIFF_HEADER_PREFIXES.some((p) => line.startsWith(p))) continue; + const prefix = line[0]; + const content = line.slice(1); + if (prefix === '\\') continue; + if (prefix === ' ') result.push({ left: content, right: content, type: 'context' }); + else if (prefix === '-') result.push({ left: content, type: 'del' }); + else if (prefix === '+') result.push({ right: content, type: 'add' }); + else result.push({ left: line, right: line, type: 'context' }); + } + const isBinary = result.length === 0 && stdout.includes('Binary files'); + return { lines: result, isBinary }; +} + +/** + * Strips the remote prefix from a fully-qualified remote tracking ref. + * e.g. "origin/main" → "main", "main" → "main" + */ +export function bareRefName(ref: string): string { + const slash = ref.indexOf('/'); + return slash !== -1 ? ref.slice(slash + 1) : ref; +} + +export function computeBaseRef( + baseRef?: string | null, + remote?: string | null, + branch?: string | null +): string { + const remoteName = (() => { + const trimmed = (remote ?? '').trim(); + if (!trimmed) return ''; + if (/^[A-Za-z0-9._-]+$/.test(trimmed) && !trimmed.includes('://')) return trimmed; + return 'origin'; + })(); + + const normalize = (value?: string | null): string | undefined => { + if (!value) return undefined; + const trimmed = value.trim(); + if (!trimmed || trimmed.includes('://')) return undefined; + + if (trimmed.includes('/')) { + const [head, ...rest] = trimmed.split('/'); + const branchPart = rest.join('/').replace(/^\/+/, ''); + if (head && branchPart) return `${head}/${branchPart}`; + if (!head && branchPart) { + return remoteName ? `${remoteName}/${branchPart}` : branchPart; + } + return undefined; + } + + const suffix = trimmed.startsWith('/') ? trimmed.slice(1) : trimmed; + return remoteName ? `${remoteName}/${suffix}` : suffix; + }; + + const defaultBranch = remoteName ? `${remoteName}/main` : 'main'; + return normalize(baseRef) ?? normalize(branch) ?? defaultBranch; +} diff --git a/src/main/core/git/types.ts b/src/main/core/git/types.ts new file mode 100644 index 000000000..bc59c7c18 --- /dev/null +++ b/src/main/core/git/types.ts @@ -0,0 +1,62 @@ +import { + Branch, + Commit, + CommitFile, + DefaultBranch, + DiffBase, + DiffResult, + GitChange, + GitInfo, + PullError, + PushError, +} from '@shared/git'; +import type { Result } from '@main/lib/result'; + +export interface GitProvider { + getStatus(): Promise; + getChangedFiles(base: DiffBase): Promise; + + getFileDiff(filePath: string, base?: DiffBase): Promise; + getFileAtHead(filePath: string): Promise; + getFileAtRef(filePath: string, ref: string): Promise; + getFileAtIndex(filePath: string): Promise; + getCommitFileDiff(commitHash: string, filePath: string): Promise; + + stageFiles(filePaths: string[]): Promise; + stageAllFiles(): Promise; + unstageFiles(filePaths: string[]): Promise; + unstageAllFiles(): Promise; + revertFiles(filePaths: string[]): Promise; + revertAllFiles(): Promise; + + getLog(options?: { + maxCount?: number; + skip?: number; + knownAheadCount?: number; + }): Promise<{ commits: Commit[]; aheadCount: number }>; + getLatestCommit(): Promise; + getCommitFiles(commitHash: string): Promise; + + commit(message: string): Promise<{ hash: string }>; + fetch(): Promise; + push(): Promise>; + publishBranch(branchName: string): Promise>; + pull(): Promise>; + softReset(): Promise<{ subject: string; body: string }>; + + getBranchStatus(): Promise<{ + branch: string; + upstream?: string; + ahead: number; + behind: number; + }>; + + getBranches(): Promise; + getDefaultBranch(): Promise; + getRemotes(): Promise<{ name: string; url: string }[]>; + createBranch(name: string, from: string, syncWithRemote?: boolean): Promise; + renameBranch(oldBranch: string, newBranch: string): Promise<{ remotePushed: boolean }>; + deleteBranch(branch: string, force?: boolean): Promise; + + detectInfo(): Promise; +} diff --git a/src/main/core/github/controller.ts b/src/main/core/github/controller.ts new file mode 100644 index 000000000..2223263bb --- /dev/null +++ b/src/main/core/github/controller.ts @@ -0,0 +1,334 @@ +import { homedir } from 'node:os'; +import * as path from 'node:path'; +import type { + GitHubAuthResponse, + GitHubConnectResponse, + GitHubStatusResponse, +} from '@shared/github'; +import { createRPCController } from '@shared/ipc/rpc'; +import { ACCOUNT_CONFIG } from '@main/core/account/config'; +import { LocalFileSystem } from '@main/core/fs/impl/local-fs'; +import { cloneRepository, initializeNewProject } from '@main/core/git/impl/git-repo-utils'; +import { githubAuthService } from '@main/core/github/services/github-auth-service'; +import { issueService } from '@main/core/github/services/issue-service'; +import { repoService } from '@main/core/github/services/repo-service'; +import { getLocalExec } from '@main/core/utils/exec'; +import { log } from '@main/lib/logger'; + +export const githubController = createRPCController({ + getStatus: async (): Promise => { + try { + const authenticated = await githubAuthService.isAuthenticated(); + + const [user, tokenSource] = authenticated + ? await Promise.all([ + githubAuthService.getCurrentUser(), + githubAuthService.getTokenSource(), + ]) + : [null, null]; + + return { authenticated, user, tokenSource }; + } catch (error) { + log.error('GitHub status check failed:', error); + return { authenticated: false, user: null, tokenSource: null }; + } + }, + + auth: async (): Promise => { + try { + return await githubAuthService.startDeviceFlowAuth(); + } catch (error) { + log.error('GitHub authentication failed:', error); + return { success: false, error: 'Authentication failed' }; + } + }, + + connectOAuth: async (): Promise => { + try { + const { baseUrl } = ACCOUNT_CONFIG.authServer; + return await githubAuthService.startOAuthFlow(baseUrl); + } catch (error) { + log.error('GitHub OAuth connect failed:', error); + return { success: false, error: 'OAuth connection failed' }; + } + }, + + authCancel: async () => { + try { + githubAuthService.cancelAuth(); + return { success: true }; + } catch (error) { + log.error('Failed to cancel GitHub auth:', error); + return { success: false, error: 'Failed to cancel' }; + } + }, + + isAuthenticated: async () => { + try { + return await githubAuthService.isAuthenticated(); + } catch (error) { + log.error('GitHub authentication check failed:', error); + return false; + } + }, + + logout: async () => { + try { + await githubAuthService.logout(); + return { success: true }; + } catch (error) { + log.error('GitHub logout failed:', error); + return { success: false, error: 'Logout failed' }; + } + }, + + getUser: async () => { + try { + return await githubAuthService.getCurrentUser(); + } catch (error) { + log.error('Failed to get user info:', error); + return null; + } + }, + + storeToken: async (token: string) => { + try { + await githubAuthService.storeToken(token); + return { success: true }; + } catch (error) { + log.error('Failed to store token:', error); + return { success: false, error: 'Failed to store token' }; + } + }, + + issuesList: async (nameWithOwner: string, limit?: number) => { + try { + const issues = await issueService.listIssues(nameWithOwner, limit ?? 50); + return { success: true, issues }; + } catch (error) { + const message = error instanceof Error ? error.message : 'Unable to list issues'; + return { success: false, error: message }; + } + }, + + issuesSearch: async (nameWithOwner: string, searchTerm: string, limit?: number) => { + try { + const issues = await issueService.searchIssues(nameWithOwner, searchTerm, limit ?? 20); + return { success: true, issues }; + } catch (error) { + const message = error instanceof Error ? error.message : 'Unable to search issues'; + return { success: false, error: message }; + } + }, + + issuesGet: async (nameWithOwner: string, issueNumber: number) => { + try { + const issue = await issueService.getIssue(nameWithOwner, issueNumber); + return { success: !!issue, issue: issue ?? undefined }; + } catch (error) { + const message = error instanceof Error ? error.message : 'Unable to get issue'; + return { success: false, error: message }; + } + }, + + // -- Repositories -------------------------------------------------------- + + getRepositories: async () => { + try { + return await repoService.listRepositories(); + } catch (error) { + log.error('Failed to get repositories:', error); + return []; + } + }, + + getOwners: async () => { + try { + const owners = await repoService.getOwners(); + return { success: true, owners }; + } catch (error) { + log.error('Failed to get owners:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to get owners', + }; + } + }, + + createRepository: async (params: { + name: string; + owner: string; + description?: string; + isPrivate?: boolean; + visibility?: 'public' | 'private'; + }) => { + try { + const isPrivate = params.isPrivate ?? params.visibility === 'private'; + const repoInfo = await repoService.createRepository({ + name: params.name, + owner: params.owner, + description: params.description, + isPrivate, + }); + return { + success: true, + repoUrl: repoInfo.url, + nameWithOwner: repoInfo.nameWithOwner, + defaultBranch: repoInfo.defaultBranch, + }; + } catch (error) { + log.error('Failed to create repository:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to create repository', + }; + } + }, + + deleteRepository: async (owner: string, name: string) => { + try { + await repoService.deleteRepository(owner, name); + return { success: true }; + } catch (error) { + log.error('Failed to delete repository:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to delete repository', + }; + } + }, + + validateRepoName: async (name: string, owner?: string) => { + try { + const formatValidation = repoService.validateRepositoryName(name); + if (!formatValidation.valid) { + return { + success: true, + valid: false, + exists: false, + error: formatValidation.error, + }; + } + + if (owner) { + const exists = await repoService.checkRepositoryExists(owner, name); + if (exists) { + return { + success: true, + valid: true, + exists: true, + error: `Repository ${owner}/${name} already exists`, + }; + } + } + + return { + success: true, + valid: true, + exists: false, + }; + } catch (error) { + log.error('Failed to validate repo name:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Validation failed', + }; + } + }, + + checkRepositoryExists: async (owner: string, name: string) => { + try { + const exists = await repoService.checkRepositoryExists(owner, name); + return { success: true, exists }; + } catch (error) { + log.error('Failed to check repository existence:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to check repository', + }; + } + }, + + cloneRepository: async (repoUrl: string, localPath: string) => { + try { + const exec = getLocalExec(); + const fs = new LocalFileSystem(path.dirname(localPath)); + return await cloneRepository(repoUrl, localPath, exec, fs); + } catch (error) { + log.error('Failed to clone repository:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Clone failed', + }; + } + }, + + createNewProject: async (params: { + name: string; + owner: string; + isPrivate: boolean; + description?: string; + }) => { + const { name, owner, isPrivate, description } = params; + + let repoUrl: string | undefined; + let nameWithOwner: string | undefined; + let defaultBranch: string | undefined; + let githubRepoCreated = false; + + try { + const repoInfo = await repoService.createRepository({ name, owner, isPrivate, description }); + repoUrl = repoInfo.url; + nameWithOwner = repoInfo.nameWithOwner; + defaultBranch = repoInfo.defaultBranch; + githubRepoCreated = true; + + const cloneUrl = `https://github.com/${nameWithOwner}.git`; + const settings = {}; + const projectDir = + (settings as { projects?: { defaultDirectory?: string } }).projects?.defaultDirectory ?? + path.join(homedir(), 'emdash-projects'); + const localPath = path.join(projectDir, name); + const exec = getLocalExec(); + const fs = new LocalFileSystem(path.dirname(localPath)); + const cloneResult = await cloneRepository(cloneUrl, localPath, exec, fs); + if (!cloneResult.success) { + throw new Error(cloneResult.error ?? 'Clone failed'); + } + + const projectFs = new LocalFileSystem(localPath); + await initializeNewProject( + { repoUrl: cloneUrl, localPath, name, description }, + exec, + projectFs + ); + + return { + success: true, + projectPath: localPath, + repoUrl, + nameWithOwner, + defaultBranch, + githubRepoCreated, + }; + } catch (error) { + log.error('Failed to create new project:', error); + + if (githubRepoCreated && nameWithOwner) { + try { + const [repoOwner, repoName] = nameWithOwner.split('/'); + await repoService.deleteRepository(repoOwner, repoName); + } catch (cleanupError) { + log.error('Failed to clean up GitHub repo after project creation failure:', cleanupError); + } + } + + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to create project', + repoUrl, + githubRepoCreated, + }; + } + }, +}); diff --git a/src/main/core/github/services/gh-cli-token.test.ts b/src/main/core/github/services/gh-cli-token.test.ts new file mode 100644 index 000000000..0eb890b93 --- /dev/null +++ b/src/main/core/github/services/gh-cli-token.test.ts @@ -0,0 +1,45 @@ +import { describe, expect, it } from 'vitest'; +import type { ExecFn } from '@main/core/utils/exec'; +import { extractGhCliToken, isGhCliAuthenticated } from './gh-cli-token'; + +function makeExec(responses: Record): ExecFn { + return async (command: string, args?: string[]) => { + const key = [command, ...(args || [])].join(' '); + const response = responses[key]; + if (!response) throw new Error(`Command not found: ${key}`); + return response; + }; +} + +describe('isGhCliAuthenticated', () => { + it('returns true when gh auth status succeeds', async () => { + const exec = makeExec({ 'gh auth status': { stdout: '', stderr: '' } }); + expect(await isGhCliAuthenticated(exec)).toBe(true); + }); + + it('returns false when gh auth status fails', async () => { + const exec: ExecFn = async () => { + throw new Error('not authenticated'); + }; + expect(await isGhCliAuthenticated(exec)).toBe(false); + }); +}); + +describe('extractGhCliToken', () => { + it('returns trimmed token from gh auth token', async () => { + const exec = makeExec({ 'gh auth token': { stdout: 'gho_abc123\n', stderr: '' } }); + expect(await extractGhCliToken(exec)).toBe('gho_abc123'); + }); + + it('returns null when gh auth token fails', async () => { + const exec: ExecFn = async () => { + throw new Error('no token'); + }; + expect(await extractGhCliToken(exec)).toBeNull(); + }); + + it('returns null for empty stdout', async () => { + const exec = makeExec({ 'gh auth token': { stdout: '', stderr: '' } }); + expect(await extractGhCliToken(exec)).toBeNull(); + }); +}); diff --git a/src/main/core/github/services/gh-cli-token.ts b/src/main/core/github/services/gh-cli-token.ts new file mode 100644 index 000000000..f73033093 --- /dev/null +++ b/src/main/core/github/services/gh-cli-token.ts @@ -0,0 +1,20 @@ +import type { ExecFn } from '@main/core/utils/exec'; + +export async function isGhCliAuthenticated(exec: ExecFn): Promise { + try { + await exec('gh', ['auth', 'status']); + return true; + } catch { + return false; + } +} + +export async function extractGhCliToken(exec: ExecFn): Promise { + try { + const { stdout } = await exec('gh', ['auth', 'token']); + const token = stdout.trim(); + return token || null; + } catch { + return null; + } +} diff --git a/src/main/core/github/services/github-auth-service.test.ts b/src/main/core/github/services/github-auth-service.test.ts new file mode 100644 index 000000000..6e39da0bd --- /dev/null +++ b/src/main/core/github/services/github-auth-service.test.ts @@ -0,0 +1,193 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { GitHubAuthServiceImpl } from './github-auth-service'; + +const mockGetPassword = vi.fn(); +const mockSetPassword = vi.fn(); +const mockDeletePassword = vi.fn(); + +vi.mock('keytar', () => ({ + default: { + getPassword: (...args: unknown[]) => mockGetPassword(...args), + setPassword: (...args: unknown[]) => mockSetPassword(...args), + deletePassword: (...args: unknown[]) => mockDeletePassword(...args), + }, + getPassword: (...args: unknown[]) => mockGetPassword(...args), + setPassword: (...args: unknown[]) => mockSetPassword(...args), + deletePassword: (...args: unknown[]) => mockDeletePassword(...args), +})); + +const mockExtractGhCliToken = vi.fn(); +vi.mock('./gh-cli-token', () => ({ + extractGhCliToken: (...args: unknown[]) => mockExtractGhCliToken(...args), +})); + +vi.mock('@main/core/utils/exec', () => ({ + getLocalExec: () => vi.fn(), +})); + +vi.mock('@main/lib/events', () => ({ + events: { emit: vi.fn() }, +})); + +vi.mock('@main/lib/logger', () => ({ + log: { error: vi.fn(), warn: vi.fn(), info: vi.fn() }, +})); + +vi.mock('@shared/events/githubEvents', () => ({ + githubAuthDeviceCodeChannel: { name: 'github:auth:device-code' }, + githubAuthSuccessChannel: { name: 'github:auth:success' }, + githubAuthErrorChannel: { name: 'github:auth:error' }, + githubAuthCancelledChannel: { name: 'github:auth:cancelled' }, +})); + +const mockExecuteOAuthFlow = vi.fn(); +vi.mock('@main/core/shared/oauth-flow', () => ({ + executeOAuthFlow: (...args: unknown[]) => mockExecuteOAuthFlow(...args), +})); + +describe('GitHubAuthServiceImpl', () => { + let service: GitHubAuthServiceImpl; + + beforeEach(() => { + vi.clearAllMocks(); + service = new GitHubAuthServiceImpl(); + }); + + describe('getToken()', () => { + it('returns token from keytar when found, skips gh CLI', async () => { + mockGetPassword.mockResolvedValue('ghp_stored_token'); + + const token = await service.getToken(); + + expect(token).toBe('ghp_stored_token'); + expect(mockGetPassword).toHaveBeenCalledWith('emdash-github', 'github-token'); + expect(mockExtractGhCliToken).not.toHaveBeenCalled(); + }); + + it('tries gh CLI when keytar is empty, stores in keytar, returns token', async () => { + mockGetPassword.mockResolvedValueOnce(null).mockResolvedValueOnce(null); + mockExtractGhCliToken.mockResolvedValue('gho_cli_token'); + mockSetPassword.mockResolvedValue(undefined); + + const token = await service.getToken(); + + expect(token).toBe('gho_cli_token'); + expect(mockExtractGhCliToken).toHaveBeenCalled(); + expect(mockSetPassword).toHaveBeenCalledWith( + 'emdash-github', + 'github-token', + 'gho_cli_token' + ); + expect(mockSetPassword).toHaveBeenCalledWith('emdash-github', 'github-token-source', 'cli'); + }); + + it('clears stale cli-managed keytar token when gh auth is logged out', async () => { + mockGetPassword.mockResolvedValueOnce('gho_old_cli_token').mockResolvedValueOnce('cli'); + mockExtractGhCliToken.mockResolvedValue(null); + mockDeletePassword.mockResolvedValue(true); + + const token = await service.getToken(); + + expect(token).toBeNull(); + expect(mockDeletePassword).toHaveBeenCalledWith('emdash-github', 'github-token'); + expect(mockDeletePassword).toHaveBeenCalledWith('emdash-github', 'github-token-source'); + }); + + it('returns null when nothing is found', async () => { + mockGetPassword.mockResolvedValueOnce(null).mockResolvedValueOnce(null); + mockExtractGhCliToken.mockResolvedValue(null); + + const token = await service.getToken(); + + expect(token).toBeNull(); + }); + }); + + describe('storeToken()', () => { + it('stores token in keytar', async () => { + mockSetPassword.mockResolvedValue(undefined); + + await service.storeToken('ghp_new_token'); + + expect(mockSetPassword).toHaveBeenCalledWith( + 'emdash-github', + 'github-token', + 'ghp_new_token' + ); + expect(mockSetPassword).toHaveBeenCalledWith( + 'emdash-github', + 'github-token-source', + 'keytar' + ); + }); + }); + + describe('logout()', () => { + it('deletes token from keytar', async () => { + mockDeletePassword.mockResolvedValue(true); + + await service.logout(); + + expect(mockDeletePassword).toHaveBeenCalledWith('emdash-github', 'github-token'); + expect(mockDeletePassword).toHaveBeenCalledWith('emdash-github', 'github-token-source'); + }); + }); + + describe('isAuthenticated()', () => { + it('returns false when no token is available', async () => { + mockGetPassword.mockResolvedValueOnce(null).mockResolvedValueOnce(null); + mockExtractGhCliToken.mockResolvedValue(null); + + const result = await service.isAuthenticated(); + + expect(result).toBe(false); + }); + + it('returns true when a token is available', async () => { + mockGetPassword.mockResolvedValueOnce('ghp_some_token').mockResolvedValueOnce('keytar'); + + const result = await service.isAuthenticated(); + + expect(result).toBe(true); + }); + }); + + describe('startOAuthFlow()', () => { + it('executes OAuth flow and stores token on success', async () => { + mockExecuteOAuthFlow.mockResolvedValue({ accessToken: 'ghp_new' }); + mockSetPassword.mockResolvedValue(undefined); + vi.spyOn(service, 'getUserInfo').mockResolvedValue({ + id: 1, + login: 'testuser', + name: 'Test', + email: '', + avatar_url: '', + }); + + const result = await service.startOAuthFlow('https://auth.test'); + + expect(mockExecuteOAuthFlow).toHaveBeenCalledWith( + expect.objectContaining({ + authorizeUrl: 'https://auth.test/auth/github', + }) + ); + expect(mockSetPassword).toHaveBeenCalledWith('emdash-github', 'github-token', 'ghp_new'); + expect(mockSetPassword).toHaveBeenCalledWith( + 'emdash-github', + 'github-token-source', + 'keytar' + ); + expect(result.success).toBe(true); + expect(result.token).toBe('ghp_new'); + }); + + it('returns error when no access token in response', async () => { + mockExecuteOAuthFlow.mockResolvedValue({ sessionToken: 'session-only' }); + + const result = await service.startOAuthFlow('https://auth.test'); + + expect(result.success).toBe(false); + expect(result.error).toBe('No access token in response'); + }); + }); +}); diff --git a/src/main/core/github/services/github-auth-service.ts b/src/main/core/github/services/github-auth-service.ts new file mode 100644 index 000000000..b1964f690 --- /dev/null +++ b/src/main/core/github/services/github-auth-service.ts @@ -0,0 +1,264 @@ +import { createOAuthDeviceAuth } from '@octokit/auth-oauth-device'; +import { Octokit } from '@octokit/rest'; +import keytar from 'keytar'; +import { + githubAuthCancelledChannel, + githubAuthDeviceCodeChannel, + githubAuthErrorChannel, + githubAuthSuccessChannel, +} from '@shared/events/githubEvents'; +import type { GitHubConnectResponse, GitHubUser } from '@shared/github'; +import { executeOAuthFlow } from '@main/core/shared/oauth-flow'; +import { getLocalExec } from '@main/core/utils/exec'; +import { events } from '@main/lib/events'; +import { log } from '@main/lib/logger'; +import { extractGhCliToken } from './gh-cli-token'; + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +export type AuthResult = GitHubConnectResponse; + +export interface DeviceCodeResult { + success: boolean; + device_code?: string; + user_code?: string; + verification_uri?: string; + expires_in?: number; + interval?: number; + error?: string; +} + +/** + * Manages GitHub authentication tokens regardless of how they were obtained + * (Emdash Account OAuth, Device Flow, PAT, or extracted from gh CLI). + */ +export type TokenSource = 'keytar' | 'cli' | null; + +export interface GitHubAuthService { + getToken(): Promise; + getTokenSource(): Promise; + isAuthenticated(): Promise; + getCurrentUser(): Promise; + getUserInfo(token: string): Promise; + startOAuthFlow(authServerBaseUrl: string): Promise; + startDeviceFlowAuth(): Promise; + storeToken(token: string): Promise; + cancelAuth(): void; + logout(): Promise; +} + +const SERVICE_NAME = 'emdash-github'; +const ACCOUNT_NAME = 'github-token'; +const TOKEN_SOURCE_ACCOUNT_NAME = 'github-token-source'; + +const GITHUB_CONFIG = { + clientId: 'Ov23ligC35uHWopzCeWf', + scopes: ['repo', 'read:user', 'read:org'], +} as const; + +export class GitHubAuthServiceImpl implements GitHubAuthService { + private deviceFlowAbortController: AbortController | null = null; + + private async getStoredTokenRecord(): Promise<{ token: string | null; source: TokenSource }> { + try { + const [token, rawSource] = await Promise.all([ + keytar.getPassword(SERVICE_NAME, ACCOUNT_NAME), + keytar.getPassword(SERVICE_NAME, TOKEN_SOURCE_ACCOUNT_NAME), + ]); + const source: TokenSource = rawSource === 'cli' || rawSource === 'keytar' ? rawSource : null; + return { token: token ?? null, source }; + } catch { + return { token: null, source: null }; + } + } + + private async clearStoredToken(): Promise { + await Promise.all([ + keytar.deletePassword(SERVICE_NAME, ACCOUNT_NAME), + keytar.deletePassword(SERVICE_NAME, TOKEN_SOURCE_ACCOUNT_NAME), + ]); + } + + async getToken(): Promise { + const { token: storedToken, source } = await this.getStoredTokenRecord(); + const exec = getLocalExec(); + + if (storedToken && source === 'cli') { + const cliToken = await extractGhCliToken(exec); + if (!cliToken) { + try { + await this.clearStoredToken(); + } catch (error) { + log.warn('Failed to clear stale CLI token from keytar:', error); + } + return null; + } + if (cliToken !== storedToken) { + try { + await this.storeToken(cliToken, 'cli'); + } catch (error) { + log.warn('Failed to sync refreshed CLI token to keytar:', error); + } + return cliToken; + } + return storedToken; + } + + if (storedToken) return storedToken; + + const cliToken = await extractGhCliToken(exec); + if (!cliToken) return null; + + try { + await this.storeToken(cliToken, 'cli'); + } catch (error) { + log.warn('Failed to cache CLI token in keytar:', error); + } + return cliToken; + } + + async getTokenSource(): Promise { + const token = await this.getToken(); + if (!token) return null; + + const { source } = await this.getStoredTokenRecord(); + if (source) return source; + + const cliToken = await extractGhCliToken(getLocalExec()); + if (cliToken && cliToken === token) return 'cli'; + + return 'keytar'; + } + + async isAuthenticated(): Promise { + const token = await this.getToken(); + return token !== null; + } + + async getCurrentUser(): Promise { + const token = await this.getToken(); + if (!token) return null; + return this.getUserInfo(token); + } + + async getUserInfo(token: string): Promise { + try { + const octokit = new Octokit({ auth: token }); + const { data } = await octokit.rest.users.getAuthenticated(); + return { + id: data.id, + login: data.login, + name: data.name ?? '', + email: data.email ?? '', + avatar_url: data.avatar_url, + }; + } catch { + return null; + } + } + + async startOAuthFlow(authServerBaseUrl: string): Promise { + try { + const raw = await executeOAuthFlow({ + authorizeUrl: `${authServerBaseUrl}/auth/github`, + exchangeUrl: `${authServerBaseUrl}/api/v1/auth/electron/exchange`, + successRedirectUrl: `${authServerBaseUrl}/auth/success`, + errorRedirectUrl: `${authServerBaseUrl}/auth/error`, + }); + + const accessToken = raw.accessToken as string; + if (!accessToken) { + return { success: false, error: 'No access token in response' }; + } + + await this.storeToken(accessToken); + const user = await this.getUserInfo(accessToken); + return { success: true, token: accessToken, user: user || undefined }; + } catch (error) { + log.warn('GitHub OAuth flow failed:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'OAuth authentication failed', + }; + } + } + + async startDeviceFlowAuth(): Promise { + this.deviceFlowAbortController = new AbortController(); + const { signal } = this.deviceFlowAbortController; + + try { + const auth = createOAuthDeviceAuth({ + clientId: GITHUB_CONFIG.clientId, + scopes: [...GITHUB_CONFIG.scopes], + onVerification: (verification) => { + events.emit(githubAuthDeviceCodeChannel, { + userCode: verification.user_code, + verificationUri: verification.verification_uri, + expiresIn: verification.expires_in, + interval: verification.interval, + }); + }, + }); + + const authPromise = auth({ type: 'oauth' }); + + const cancelPromise = new Promise((_, reject) => { + signal.addEventListener('abort', () => { + reject(new Error('Auth cancelled')); + }); + }); + + const result = await Promise.race([authPromise, cancelPromise]); + const token = result.token; + + await this.storeToken(token); + + const user = await this.getUserInfo(token); + + if (user) { + events.emit(githubAuthSuccessChannel, { token, user }); + } + + return { + success: true, + device_code: undefined, + user_code: undefined, + verification_uri: undefined, + }; + } catch (error) { + if (signal.aborted) { + events.emit(githubAuthCancelledChannel, undefined); + return { success: false, error: 'Auth cancelled' }; + } + + const message = error instanceof Error ? error.message : String(error); + events.emit(githubAuthErrorChannel, { error: 'device_flow_error', message }); + return { success: false, error: message }; + } finally { + this.deviceFlowAbortController = null; + } + } + + async storeToken(token: string, source: Exclude = 'keytar'): Promise { + await Promise.all([ + keytar.setPassword(SERVICE_NAME, ACCOUNT_NAME, token), + keytar.setPassword(SERVICE_NAME, TOKEN_SOURCE_ACCOUNT_NAME, source), + ]); + } + + cancelAuth(): void { + if (this.deviceFlowAbortController) { + this.deviceFlowAbortController.abort(); + this.deviceFlowAbortController = null; + } + } + + async logout(): Promise { + await this.clearStoredToken(); + } +} + +export const githubAuthService = new GitHubAuthServiceImpl(); diff --git a/src/main/core/github/services/issue-service.test.ts b/src/main/core/github/services/issue-service.test.ts new file mode 100644 index 000000000..c461ab0f3 --- /dev/null +++ b/src/main/core/github/services/issue-service.test.ts @@ -0,0 +1,169 @@ +import type { Octokit } from '@octokit/rest'; +import { describe, expect, it, vi } from 'vitest'; +import { issueService } from './issue-service'; +import { getOctokit } from './octokit-provider'; + +vi.mock('./octokit-provider', () => ({ + getOctokit: vi.fn(), +})); + +const mockGetOctokit = vi.mocked(getOctokit); + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function makeOctokit(overrides: { + listForRepo?: ReturnType; + issuesAndPullRequests?: ReturnType; + issuesGet?: ReturnType; +}): Octokit { + return { + rest: { + issues: { + listForRepo: overrides.listForRepo ?? vi.fn(), + get: overrides.issuesGet ?? vi.fn(), + }, + search: { + issuesAndPullRequests: overrides.issuesAndPullRequests ?? vi.fn(), + }, + }, + } as unknown as Octokit; +} + +const restIssue = { + number: 1, + title: 'Test issue', + html_url: 'https://github.com/owner/repo/issues/1', + state: 'open', + created_at: '2024-01-01T00:00:00Z', + updated_at: '2024-01-02T00:00:00Z', + comments: 3, + user: { login: 'alice', avatar_url: 'https://avatar.test/alice' }, + assignees: [{ login: 'bob', avatar_url: 'https://avatar.test/bob' }], + labels: [{ name: 'bug', color: 'fc2929' }], +}; + +const expectedIssue = { + number: 1, + title: 'Test issue', + url: 'https://github.com/owner/repo/issues/1', + state: 'open', + createdAt: '2024-01-01T00:00:00Z', + updatedAt: '2024-01-02T00:00:00Z', + comments: 3, + user: { login: 'alice', avatarUrl: 'https://avatar.test/alice' }, + assignees: [{ login: 'bob', avatarUrl: 'https://avatar.test/bob' }], + labels: [{ name: 'bug', color: 'fc2929' }], +}; + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('GitHubIssueServiceImpl', () => { + describe('listIssues', () => { + it('maps REST response to camelCase', async () => { + const listForRepo = vi.fn().mockResolvedValue({ data: [restIssue] }); + mockGetOctokit.mockResolvedValue(makeOctokit({ listForRepo })); + + const result = await issueService.listIssues('owner/repo', 30); + + expect(listForRepo).toHaveBeenCalledWith({ + owner: 'owner', + repo: 'repo', + state: 'open', + per_page: 30, + sort: 'updated', + direction: 'desc', + }); + expect(result).toEqual([expectedIssue]); + }); + + it('filters out pull requests', async () => { + const pr = { ...restIssue, number: 2, pull_request: { url: 'https://...' } }; + const listForRepo = vi.fn().mockResolvedValue({ data: [restIssue, pr] }); + mockGetOctokit.mockResolvedValue(makeOctokit({ listForRepo })); + + const result = await issueService.listIssues('owner/repo'); + + expect(result).toHaveLength(1); + expect(result[0].number).toBe(1); + }); + + it('returns empty array on error', async () => { + const listForRepo = vi.fn().mockRejectedValue(new Error('Network error')); + mockGetOctokit.mockResolvedValue(makeOctokit({ listForRepo })); + + expect(await issueService.listIssues('owner/repo')).toEqual([]); + }); + + it('clamps limit to 1-100', async () => { + const listForRepo = vi.fn().mockResolvedValue({ data: [] }); + mockGetOctokit.mockResolvedValue(makeOctokit({ listForRepo })); + + await issueService.listIssues('owner/repo', 0); + expect(listForRepo).toHaveBeenCalledWith(expect.objectContaining({ per_page: 1 })); + + listForRepo.mockClear(); + await issueService.listIssues('owner/repo', 999); + expect(listForRepo).toHaveBeenCalledWith(expect.objectContaining({ per_page: 100 })); + }); + }); + + describe('searchIssues', () => { + it('maps search results to camelCase', async () => { + const issuesAndPullRequests = vi.fn().mockResolvedValue({ data: { items: [restIssue] } }); + mockGetOctokit.mockResolvedValue(makeOctokit({ issuesAndPullRequests })); + + const result = await issueService.searchIssues('owner/repo', 'bug fix', 15); + + expect(issuesAndPullRequests).toHaveBeenCalledWith({ + q: 'bug fix repo:owner/repo is:issue is:open', + per_page: 15, + sort: 'updated', + order: 'desc', + }); + expect(result).toEqual([expectedIssue]); + }); + + it('returns empty for blank search term', async () => { + const issuesAndPullRequests = vi.fn(); + mockGetOctokit.mockResolvedValue(makeOctokit({ issuesAndPullRequests })); + + expect(await issueService.searchIssues('owner/repo', ' ')).toEqual([]); + expect(await issueService.searchIssues('owner/repo', '')).toEqual([]); + expect(issuesAndPullRequests).not.toHaveBeenCalled(); + }); + + it('returns empty on error', async () => { + const issuesAndPullRequests = vi.fn().mockRejectedValue(new Error('API error')); + mockGetOctokit.mockResolvedValue(makeOctokit({ issuesAndPullRequests })); + + expect(await issueService.searchIssues('owner/repo', 'query')).toEqual([]); + }); + }); + + describe('getIssue', () => { + it('maps detail response to camelCase with body', async () => { + const issuesGet = vi.fn().mockResolvedValue({ data: { ...restIssue, body: 'Issue body' } }); + mockGetOctokit.mockResolvedValue(makeOctokit({ issuesGet })); + + const result = await issueService.getIssue('owner/repo', 42); + + expect(issuesGet).toHaveBeenCalledWith({ + owner: 'owner', + repo: 'repo', + issue_number: 42, + }); + expect(result).toEqual({ ...expectedIssue, body: 'Issue body' }); + }); + + it('returns null on error', async () => { + const issuesGet = vi.fn().mockRejectedValue(new Error('Not found')); + mockGetOctokit.mockResolvedValue(makeOctokit({ issuesGet })); + + expect(await issueService.getIssue('owner/repo', 99)).toBeNull(); + }); + }); +}); diff --git a/src/main/core/github/services/issue-service.ts b/src/main/core/github/services/issue-service.ts new file mode 100644 index 000000000..250770cfb --- /dev/null +++ b/src/main/core/github/services/issue-service.ts @@ -0,0 +1,142 @@ +import type { Octokit } from '@octokit/rest'; +import { getOctokit } from './octokit-provider'; +import { splitRepo } from './utils'; + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +export interface GitHubIssue { + number: number; + title: string; + url: string; + state: string; + createdAt: string | null; + updatedAt: string | null; + comments: number; + user: { login: string; avatarUrl: string } | null; + assignees: Array<{ login: string; avatarUrl: string }>; + labels: Array<{ name: string; color: string }>; +} + +export interface GitHubIssueDetail extends GitHubIssue { + body: string | null; +} + +export interface GitHubIssueService { + listIssues(nameWithOwner: string, limit?: number): Promise; + searchIssues(nameWithOwner: string, searchTerm: string, limit?: number): Promise; + getIssue(nameWithOwner: string, issueNumber: number): Promise; +} + +// --------------------------------------------------------------------------- +// REST response shape (internal) +// --------------------------------------------------------------------------- + +interface RestIssue { + number: number; + title: string; + html_url: string; + state: string; + created_at: string | null; + updated_at: string | null; + comments: number; + user: { login: string; avatar_url: string } | null; + assignees: Array<{ login: string; avatar_url: string }> | null; + labels: Array; + body?: string | null; + pull_request?: unknown; +} + +// --------------------------------------------------------------------------- +// Implementation +// --------------------------------------------------------------------------- + +export class GitHubIssueServiceImpl implements GitHubIssueService { + constructor(private readonly getOctokit: () => Promise) {} + + async listIssues(nameWithOwner: string, limit: number = 50): Promise { + const { owner, repo } = splitRepo(nameWithOwner); + try { + const octokit = await this.getOctokit(); + const { data } = await octokit.rest.issues.listForRepo({ + owner, + repo, + state: 'open', + per_page: Math.min(Math.max(limit, 1), 100), + sort: 'updated', + direction: 'desc', + }); + return data + .filter((issue) => !issue.pull_request) + .map((item) => this.mapIssue(item as unknown as RestIssue)); + } catch { + return []; + } + } + + async searchIssues( + nameWithOwner: string, + searchTerm: string, + limit: number = 20 + ): Promise { + const term = searchTerm.trim(); + if (!term) return []; + const { owner, repo } = splitRepo(nameWithOwner); + try { + const octokit = await this.getOctokit(); + const { data } = await octokit.rest.search.issuesAndPullRequests({ + q: `${term} repo:${owner}/${repo} is:issue is:open`, + per_page: Math.min(Math.max(limit, 1), 100), + sort: 'updated', + order: 'desc', + }); + return data.items.map((item) => this.mapIssue(item as unknown as RestIssue)); + } catch { + return []; + } + } + + async getIssue(nameWithOwner: string, issueNumber: number): Promise { + const { owner, repo } = splitRepo(nameWithOwner); + try { + const octokit = await this.getOctokit(); + const { data } = await octokit.rest.issues.get({ + owner, + repo, + issue_number: issueNumber, + }); + return this.mapIssueDetail(data as unknown as RestIssue); + } catch { + return null; + } + } + + private mapIssue(item: RestIssue): GitHubIssue { + return { + number: item.number, + title: item.title, + url: item.html_url, + state: item.state, + createdAt: item.created_at, + updatedAt: item.updated_at, + comments: item.comments, + user: item.user ? { login: item.user.login, avatarUrl: item.user.avatar_url } : null, + assignees: (item.assignees ?? []).map((a) => ({ login: a.login, avatarUrl: a.avatar_url })), + labels: (item.labels ?? []).map((l) => + typeof l === 'string' + ? { name: l, color: '' } + : { name: l.name ?? '', color: l.color ?? '' } + ), + }; + } + + private mapIssueDetail(item: RestIssue): GitHubIssueDetail { + return { + ...this.mapIssue(item), + body: item.body ?? null, + }; + } +} + +export const issueService = new GitHubIssueServiceImpl(getOctokit); diff --git a/src/main/core/github/services/octokit-provider.ts b/src/main/core/github/services/octokit-provider.ts new file mode 100644 index 000000000..04dd40b08 --- /dev/null +++ b/src/main/core/github/services/octokit-provider.ts @@ -0,0 +1,20 @@ +import { Octokit } from '@octokit/rest'; +import { githubAuthService } from './github-auth-service'; + +let cachedOctokit: Octokit | null = null; +let cachedToken: string | null = null; + +export async function getOctokit(): Promise { + const token = await githubAuthService.getToken(); + if (!token) throw new Error('Not authenticated'); + if (token !== cachedToken) { + cachedOctokit = new Octokit({ auth: token }); + cachedToken = token; + } + return cachedOctokit!; +} + +export function clearOctokitCache(): void { + cachedOctokit = null; + cachedToken = null; +} diff --git a/src/main/core/github/services/pr-queries.ts b/src/main/core/github/services/pr-queries.ts new file mode 100644 index 000000000..8f4bf9c3c --- /dev/null +++ b/src/main/core/github/services/pr-queries.ts @@ -0,0 +1,135 @@ +export const PR_SUMMARY_FRAGMENT = ` + fragment PrSummaryFields on PullRequest { + number + title + url + state + isDraft + createdAt + updatedAt + headRefName + headRefOid + baseRefName + body + additions + deletions + changedFiles + mergeable + mergeStateStatus + author { login } + headRepository { + nameWithOwner + url + owner { login } + } + labels(first: 10) { nodes { name color } } + assignees(first: 10) { nodes { login avatarUrl } } + reviewDecision + latestReviews(first: 10) { + nodes { + author { login } + state + } + } + reviewRequests(first: 10) { + nodes { + requestedReviewer { + ... on User { login } + ... on Team { name } + } + } + } + } +`; + +export const LIST_PRS_QUERY = ` + query listPullRequests($owner: String!, $repo: String!, $limit: Int!) { + repository(owner: $owner, name: $repo) { + pullRequests(states: OPEN, first: $limit, orderBy: { field: UPDATED_AT, direction: DESC }) { + totalCount + nodes { ...PrSummaryFields } + } + } + } + ${PR_SUMMARY_FRAGMENT} +`; + +export const SEARCH_PRS_QUERY = ` + query searchPullRequests($query: String!, $limit: Int!) { + search(query: $query, type: ISSUE, first: $limit) { + issueCount + nodes { + ... on PullRequest { ...PrSummaryFields } + } + } + } + ${PR_SUMMARY_FRAGMENT} +`; + +export const GET_PR_DETAIL_QUERY = ` + query getPullRequest($owner: String!, $repo: String!, $number: Int!) { + repository(owner: $owner, name: $repo) { + pullRequest(number: $number) { + ...PrSummaryFields + } + } + } + ${PR_SUMMARY_FRAGMENT} +`; + +export const GET_PR_CHECK_RUNS_QUERY = ` + query getPrCheckRuns($owner: String!, $repo: String!, $number: Int!, $cursor: String) { + repository(owner: $owner, name: $repo) { + pullRequest(number: $number) { + commits(last: 1) { + nodes { + commit { + statusCheckRollup { + contexts(first: 100, after: $cursor) { + pageInfo { hasNextPage endCursor } + nodes { + ... on CheckRun { + __typename + name + status + conclusion + detailsUrl + startedAt + completedAt + checkSuite { + app { name logoUrl } + workflowRun { + workflow { name } + } + } + } + ... on StatusContext { + __typename + context + state + targetUrl + createdAt + } + } + } + } + } + } + } + } + } + } +`; + +export const SYNC_PRS_QUERY = ` + query syncPullRequests($owner: String!, $repo: String!, $cursor: String) { + repository(owner: $owner, name: $repo) { + pullRequests(first: 100, after: $cursor, orderBy: { field: UPDATED_AT, direction: DESC }) { + totalCount + pageInfo { hasNextPage endCursor } + nodes { ...PrSummaryFields } + } + } + } + ${PR_SUMMARY_FRAGMENT} +`; diff --git a/src/main/core/github/services/repo-service.test.ts b/src/main/core/github/services/repo-service.test.ts new file mode 100644 index 000000000..73793eaa5 --- /dev/null +++ b/src/main/core/github/services/repo-service.test.ts @@ -0,0 +1,260 @@ +import type { Octokit } from '@octokit/rest'; +import { describe, expect, it, vi } from 'vitest'; +import { getOctokit } from './octokit-provider'; +import { repoService } from './repo-service'; + +vi.mock('./octokit-provider', () => ({ + getOctokit: vi.fn(), +})); + +const mockGetOctokit = vi.mocked(getOctokit); + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function makeOctokit( + overrides: Partial<{ + reposListForAuthenticatedUser: ReturnType; + usersGetAuthenticated: ReturnType; + orgsListForAuthenticatedUser: ReturnType; + reposCreateForAuthenticatedUser: ReturnType; + reposCreateInOrg: ReturnType; + reposDelete: ReturnType; + reposGet: ReturnType; + }> = {} +): Octokit { + return { + rest: { + repos: { + listForAuthenticatedUser: + overrides.reposListForAuthenticatedUser ?? vi.fn().mockResolvedValue({ data: [] }), + createForAuthenticatedUser: overrides.reposCreateForAuthenticatedUser ?? vi.fn(), + createInOrg: overrides.reposCreateInOrg ?? vi.fn(), + delete: overrides.reposDelete ?? vi.fn().mockResolvedValue({}), + get: overrides.reposGet ?? vi.fn(), + }, + users: { + getAuthenticated: + overrides.usersGetAuthenticated ?? + vi.fn().mockResolvedValue({ data: { login: 'testuser' } }), + }, + orgs: { + listForAuthenticatedUser: + overrides.orgsListForAuthenticatedUser ?? vi.fn().mockResolvedValue({ data: [] }), + }, + }, + } as unknown as Octokit; +} + +// REST-shaped mock data (snake_case) +const restRepo = { + id: 1, + name: 'my-repo', + full_name: 'testuser/my-repo', + description: 'A test repo', + html_url: 'https://github.com/testuser/my-repo', + clone_url: 'https://github.com/testuser/my-repo.git', + ssh_url: 'git@github.com:testuser/my-repo.git', + default_branch: 'main', + private: false, + updated_at: '2024-01-01T00:00:00Z', + language: 'TypeScript', + stargazers_count: 10, + forks_count: 2, +}; + +// Expected camelCase output +const expectedRepo = { + id: 1, + name: 'my-repo', + nameWithOwner: 'testuser/my-repo', + description: 'A test repo', + url: 'https://github.com/testuser/my-repo', + cloneUrl: 'https://github.com/testuser/my-repo.git', + sshUrl: 'git@github.com:testuser/my-repo.git', + defaultBranch: 'main', + isPrivate: false, + updatedAt: '2024-01-01T00:00:00Z', + language: 'TypeScript', + stargazersCount: 10, + forksCount: 2, +}; + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('GitHubRepositoryServiceImpl', () => { + describe('listRepositories', () => { + it('maps REST response to camelCase', async () => { + const octokit = makeOctokit({ + reposListForAuthenticatedUser: vi.fn().mockResolvedValue({ data: [restRepo] }), + }); + mockGetOctokit.mockResolvedValue(octokit); + + const result = await repoService.listRepositories(); + + expect(result).toEqual([expectedRepo]); + }); + }); + + describe('getOwners', () => { + it('returns user + orgs', async () => { + const octokit = makeOctokit({ + orgsListForAuthenticatedUser: vi.fn().mockResolvedValue({ data: [{ login: 'acme' }] }), + }); + mockGetOctokit.mockResolvedValue(octokit); + + const owners = await repoService.getOwners(); + + expect(owners).toEqual([ + { login: 'testuser', type: 'User' }, + { login: 'acme', type: 'Organization' }, + ]); + }); + + it('returns user only if orgs fail', async () => { + const octokit = makeOctokit({ + orgsListForAuthenticatedUser: vi.fn().mockRejectedValue(new Error('forbidden')), + }); + mockGetOctokit.mockResolvedValue(octokit); + + const owners = await repoService.getOwners(); + + expect(owners).toEqual([{ login: 'testuser', type: 'User' }]); + }); + }); + + describe('createRepository', () => { + it('creates for authenticated user', async () => { + const octokit = makeOctokit({ + reposCreateForAuthenticatedUser: vi.fn().mockResolvedValue({ + data: { + html_url: 'https://github.com/testuser/new', + default_branch: 'main', + full_name: 'testuser/new', + }, + }), + }); + mockGetOctokit.mockResolvedValue(octokit); + + const result = await repoService.createRepository({ + name: 'new', + owner: 'testuser', + isPrivate: false, + }); + + expect(octokit.rest.repos.createForAuthenticatedUser).toHaveBeenCalled(); + expect(result).toEqual({ + url: 'https://github.com/testuser/new', + defaultBranch: 'main', + nameWithOwner: 'testuser/new', + }); + }); + + it('creates in org when owner differs', async () => { + const octokit = makeOctokit({ + reposCreateInOrg: vi.fn().mockResolvedValue({ + data: { + html_url: 'https://github.com/acme/new', + default_branch: 'main', + full_name: 'acme/new', + }, + }), + }); + mockGetOctokit.mockResolvedValue(octokit); + + await repoService.createRepository({ name: 'new', owner: 'acme', isPrivate: true }); + + expect(octokit.rest.repos.createInOrg).toHaveBeenCalledWith( + expect.objectContaining({ org: 'acme', name: 'new', private: true }) + ); + }); + }); + + describe('deleteRepository', () => { + it('calls repos.delete', async () => { + const octokit = makeOctokit(); + mockGetOctokit.mockResolvedValue(octokit); + + await repoService.deleteRepository('testuser', 'old-repo'); + + expect(octokit.rest.repos.delete).toHaveBeenCalledWith({ + owner: 'testuser', + repo: 'old-repo', + }); + }); + }); + + describe('checkRepositoryExists', () => { + it('returns true when found', async () => { + const octokit = makeOctokit({ + reposGet: vi.fn().mockResolvedValue({ data: {} }), + }); + mockGetOctokit.mockResolvedValue(octokit); + + expect(await repoService.checkRepositoryExists('testuser', 'repo')).toBe(true); + }); + + it('returns false on 404', async () => { + const octokit = makeOctokit({ + reposGet: vi.fn().mockRejectedValue({ status: 404 }), + }); + mockGetOctokit.mockResolvedValue(octokit); + + expect(await repoService.checkRepositoryExists('testuser', 'missing')).toBe(false); + }); + + it('throws on non-404 errors', async () => { + const octokit = makeOctokit({ + reposGet: vi.fn().mockRejectedValue({ status: 500 }), + }); + mockGetOctokit.mockResolvedValue(octokit); + + await expect(repoService.checkRepositoryExists('testuser', 'repo')).rejects.toEqual({ + status: 500, + }); + }); + }); + + describe('validateRepositoryName', () => { + it('accepts valid names', () => { + expect(repoService.validateRepositoryName('my-repo')).toEqual({ valid: true }); + expect(repoService.validateRepositoryName('repo.js')).toEqual({ valid: true }); + expect(repoService.validateRepositoryName('my_repo_123')).toEqual({ valid: true }); + }); + + it('rejects empty names', () => { + expect(repoService.validateRepositoryName('')).toEqual({ + valid: false, + error: 'Repository name is required', + }); + }); + + it('rejects names over 100 chars', () => { + expect(repoService.validateRepositoryName('a'.repeat(101)).valid).toBe(false); + }); + + it('rejects invalid characters', () => { + expect(repoService.validateRepositoryName('my repo').valid).toBe(false); + expect(repoService.validateRepositoryName('repo@name').valid).toBe(false); + }); + + it('rejects names starting/ending with special chars', () => { + expect(repoService.validateRepositoryName('-repo').valid).toBe(false); + expect(repoService.validateRepositoryName('repo-').valid).toBe(false); + expect(repoService.validateRepositoryName('.repo').valid).toBe(false); + }); + + it('rejects all-dots names', () => { + expect(repoService.validateRepositoryName('...').valid).toBe(false); + }); + + it('rejects reserved names', () => { + expect(repoService.validateRepositoryName('CON').valid).toBe(false); + expect(repoService.validateRepositoryName('nul').valid).toBe(false); + expect(repoService.validateRepositoryName('COM1').valid).toBe(false); + }); + }); +}); diff --git a/src/main/core/github/services/repo-service.ts b/src/main/core/github/services/repo-service.ts new file mode 100644 index 000000000..0648aec2e --- /dev/null +++ b/src/main/core/github/services/repo-service.ts @@ -0,0 +1,218 @@ +import type { Octokit } from '@octokit/rest'; +import { getOctokit } from './octokit-provider'; + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +export interface GitHubRepo { + id: number; + name: string; + nameWithOwner: string; + description: string | null; + url: string; + cloneUrl: string; + sshUrl: string; + defaultBranch: string; + isPrivate: boolean; + updatedAt: string | null; + language: string | null; + stargazersCount: number; + forksCount: number; +} + +export interface GitHubOwner { + login: string; + type: 'User' | 'Organization'; +} + +export interface GitHubRepositoryService { + listRepositories(): Promise; + getOwners(): Promise; + createRepository(params: { + name: string; + description?: string; + owner: string; + isPrivate: boolean; + }): Promise<{ url: string; defaultBranch: string; nameWithOwner: string }>; + deleteRepository(owner: string, name: string): Promise; + checkRepositoryExists(owner: string, name: string): Promise; + validateRepositoryName(name: string): { valid: boolean; error?: string }; +} + +// --------------------------------------------------------------------------- +// REST response shape (internal) +// --------------------------------------------------------------------------- + +interface RestRepo { + id: number; + name: string; + full_name: string; + description: string | null; + html_url: string; + clone_url: string; + ssh_url: string; + default_branch: string; + private: boolean; + updated_at: string | null; + language: string | null; + stargazers_count: number; + forks_count: number; +} + +// --------------------------------------------------------------------------- +// Implementation +// --------------------------------------------------------------------------- + +const RESERVED_NAMES = new Set([ + 'con', + 'prn', + 'aux', + 'nul', + 'com1', + 'com2', + 'com3', + 'com4', + 'com5', + 'com6', + 'com7', + 'com8', + 'com9', + 'lpt1', + 'lpt2', + 'lpt3', + 'lpt4', + 'lpt5', + 'lpt6', + 'lpt7', + 'lpt8', + 'lpt9', +]); + +export class GitHubRepositoryServiceImpl implements GitHubRepositoryService { + constructor(private readonly getOctokit: () => Promise) {} + + async listRepositories(): Promise { + const octokit = await this.getOctokit(); + const { data } = await octokit.rest.repos.listForAuthenticatedUser({ + per_page: 100, + sort: 'updated', + direction: 'desc', + }); + return data.map((item) => this.mapRepo(item as unknown as RestRepo)); + } + + async getOwners(): Promise { + const octokit = await this.getOctokit(); + const { data: user } = await octokit.rest.users.getAuthenticated(); + const owners: GitHubOwner[] = [{ login: user.login, type: 'User' }]; + + try { + const { data: orgs } = await octokit.rest.orgs.listForAuthenticatedUser(); + for (const org of orgs) { + owners.push({ login: org.login, type: 'Organization' }); + } + } catch {} + + return owners; + } + + async createRepository(params: { + name: string; + description?: string; + owner: string; + isPrivate: boolean; + }): Promise<{ url: string; defaultBranch: string; nameWithOwner: string }> { + const octokit = await this.getOctokit(); + const { data: user } = await octokit.rest.users.getAuthenticated(); + const isCurrentUser = params.owner === user.login; + + const createParams = { + name: params.name, + description: params.description, + private: params.isPrivate, + }; + + const { data } = isCurrentUser + ? await octokit.rest.repos.createForAuthenticatedUser(createParams) + : await octokit.rest.repos.createInOrg({ ...createParams, org: params.owner }); + + return { + url: data.html_url, + defaultBranch: data.default_branch || 'main', + nameWithOwner: data.full_name, + }; + } + + async deleteRepository(owner: string, name: string): Promise { + const octokit = await this.getOctokit(); + await octokit.rest.repos.delete({ owner, repo: name }); + } + + async checkRepositoryExists(owner: string, name: string): Promise { + const octokit = await this.getOctokit(); + try { + await octokit.rest.repos.get({ owner, repo: name }); + return true; + } catch (err: unknown) { + if (err != null && typeof err === 'object' && 'status' in err && err.status === 404) { + return false; + } + throw err; + } + } + + validateRepositoryName(name: string): { valid: boolean; error?: string } { + if (!name || name.length === 0) { + return { valid: false, error: 'Repository name is required' }; + } + + if (name.length > 100) { + return { valid: false, error: 'Repository name must be 100 characters or fewer' }; + } + + if (!/^[a-zA-Z0-9._-]+$/.test(name)) { + return { + valid: false, + error: 'Repository name may only contain letters, numbers, hyphens, underscores, and dots', + }; + } + + if (/^\.+$/.test(name)) { + return { valid: false, error: 'Repository name cannot consist entirely of dots' }; + } + + if (/^[-._]/.test(name) || /[-._]$/.test(name)) { + return { + valid: false, + error: 'Repository name must not start or end with a hyphen, dot, or underscore', + }; + } + + if (RESERVED_NAMES.has(name.toLowerCase())) { + return { valid: false, error: `"${name}" is a reserved name and cannot be used` }; + } + + return { valid: true }; + } + + private mapRepo(item: RestRepo): GitHubRepo { + return { + id: item.id, + name: item.name, + nameWithOwner: item.full_name, + description: item.description, + url: item.html_url, + cloneUrl: item.clone_url, + sshUrl: item.ssh_url, + defaultBranch: item.default_branch, + isPrivate: item.private, + updatedAt: item.updated_at, + language: item.language, + stargazersCount: item.stargazers_count, + forksCount: item.forks_count, + }; + } +} + +export const repoService = new GitHubRepositoryServiceImpl(getOctokit); diff --git a/src/main/core/github/services/utils.ts b/src/main/core/github/services/utils.ts new file mode 100644 index 000000000..1e1d61765 --- /dev/null +++ b/src/main/core/github/services/utils.ts @@ -0,0 +1,23 @@ +export function splitRepo(nameWithOwner: string): { owner: string; repo: string } { + const idx = nameWithOwner.indexOf('/'); + if (idx === -1) { + throw new Error(`Invalid nameWithOwner: "${nameWithOwner}" (expected "owner/repo")`); + } + return { owner: nameWithOwner.slice(0, idx), repo: nameWithOwner.slice(idx + 1) }; +} + +/** + * Extract a GitHub `owner/repo` string from a git remote URL. + * Handles both HTTPS (`https://github.com/owner/repo.git`) and + * SSH (`git@github.com:owner/repo.git`) formats. + * Returns `null` if the URL is not a recognisable GitHub remote. + */ +export function parseNameWithOwner(remoteUrl: string): string | null { + // https://github.com/owner/repo[.git][/?#...] + const https = /github\.com\/([^/]+\/[^/?#]+?)(?:\.git)?(?:[/?#]|$)/.exec(remoteUrl); + if (https) return https[1]; + // git@github.com:owner/repo[.git] + const ssh = /github\.com:([^/]+\/[^/?#]+?)(?:\.git)?$/.exec(remoteUrl); + if (ssh) return ssh[1]; + return null; +} diff --git a/src/main/services/JiraService.ts b/src/main/core/jira/JiraService.ts similarity index 70% rename from src/main/services/JiraService.ts rename to src/main/core/jira/JiraService.ts index 18ca5cb40..6dbd7b5e7 100644 --- a/src/main/services/JiraService.ts +++ b/src/main/core/jira/JiraService.ts @@ -1,15 +1,12 @@ +import { existsSync, readFileSync, unlinkSync, writeFileSync } from 'node:fs'; import { request } from 'node:https'; +import { join } from 'node:path'; import { URL } from 'node:url'; import { app } from 'electron'; -import { existsSync, readFileSync, writeFileSync, unlinkSync } from 'node:fs'; -import { join } from 'node:path'; - -type JiraCreds = { siteUrl: string; email: string }; +import type { Issue } from '@shared/tasks'; +import { capture } from '@main/lib/telemetry'; -function encodeBasic(email: string, token: string) { - const raw = `${email}:${token}`; - return Buffer.from(raw).toString('base64'); -} +// ── Public types ──────────────────────────────────────────────────────────── export interface JiraConnectionStatus { connected: boolean; @@ -19,6 +16,97 @@ export interface JiraConnectionStatus { error?: string; } +export interface JiraIssueStatus { + name: string; +} + +export interface JiraIssueProject { + key: string; + name: string; +} + +export interface JiraIssueAssignee { + displayName: string; + name: string; +} + +export interface JiraIssue { + id: string; + key: string; + summary: string; + description: string | null; + url: string; + status: JiraIssueStatus | null; + project: JiraIssueProject | null; + assignee: JiraIssueAssignee | null; + updatedAt: string | null; +} + +export function toGeneralIssue(issue: JiraIssue): Issue { + return { + provider: 'jira', + identifier: issue.key, + title: issue.summary, + url: issue.url, + description: issue.description ?? undefined, + updatedAt: issue.updatedAt ?? undefined, + }; +} + +// ── Internal types ─────────────────────────────────────────────────────────── + +type JiraCreds = { siteUrl: string; email: string }; + +interface JiraUser { + accountId?: string; + displayName?: string; + name?: string; + errorMessages?: string[]; +} + +interface RawJiraIssueFields { + summary?: string; + description?: AdfNode | null; + updated?: string | null; + project?: { key?: string; name?: string } | null; + status?: { name?: string } | null; + assignee?: { displayName?: string; name?: string } | null; +} + +interface RawJiraIssue { + id?: string; + key?: string; + fields?: RawJiraIssueFields; + errorMessages?: string[]; +} + +interface RawJiraSearchResult { + issues?: RawJiraIssue[]; +} + +interface AdfNode { + type?: string; + text?: string; + content?: AdfNode[]; +} + +interface JiraPickerSection { + issues?: Array<{ key?: string }>; +} + +interface JiraPickerResult { + sections?: JiraPickerSection[]; +} + +// ── Helpers ────────────────────────────────────────────────────────────────── + +function encodeBasic(email: string, token: string) { + const raw = `${email}:${token}`; + return Buffer.from(raw).toString('base64'); +} + +// ── Service ────────────────────────────────────────────────────────────────── + export default class JiraService { private readonly SERVICE = 'emdash-jira'; private readonly ACCOUNT = 'api-token'; @@ -29,7 +117,7 @@ export default class JiraService { try { if (!existsSync(this.CONF_FILE)) return null; const raw = readFileSync(this.CONF_FILE, 'utf8'); - const obj = JSON.parse(raw); + const obj = JSON.parse(raw) as Partial; const siteUrl = String(obj?.siteUrl || '').trim(); const email = String(obj?.email || '').trim(); if (!siteUrl || !email) return null; @@ -41,8 +129,7 @@ export default class JiraService { private writeCreds(creds: JiraCreds) { const { siteUrl, email } = creds; - const obj: any = { siteUrl, email }; - writeFileSync(this.CONF_FILE, JSON.stringify(obj), 'utf8'); + writeFileSync(this.CONF_FILE, JSON.stringify({ siteUrl, email }), 'utf8'); } async saveCredentials( @@ -59,13 +146,10 @@ export default class JiraService { const keytar = await import('keytar'); await keytar.setPassword(this.SERVICE, this.ACCOUNT, token); this.writeCreds({ siteUrl, email }); - // Track connection - void import('../telemetry').then(({ capture }) => { - void capture('jira_connected'); - }); + capture('jira_connected'); return { success: true, displayName: me?.displayName }; - } catch (e: any) { - return { success: false, error: e?.message || String(e) }; + } catch (e) { + return { success: false, error: e instanceof Error ? e.message : String(e) }; } } @@ -79,13 +163,10 @@ export default class JiraService { try { if (existsSync(this.CONF_FILE)) unlinkSync(this.CONF_FILE); } catch {} - // Track disconnection - void import('../telemetry').then(({ capture }) => { - void capture('jira_disconnected'); - }); + capture('jira_disconnected'); return { success: true }; - } catch (e: any) { - return { success: false, error: e?.message || String(e) }; + } catch (e) { + return { success: false, error: e instanceof Error ? e.message : String(e) }; } } @@ -108,12 +189,12 @@ export default class JiraService { displayName: me?.displayName, siteUrl: creds.siteUrl, }; - } catch (e: any) { - return { connected: false, error: e?.message || String(e) }; + } catch (e) { + return { connected: false, error: e instanceof Error ? e.message : String(e) }; } } - async initialFetch(limit = 50): Promise { + async initialFetch(limit = 50): Promise { const { siteUrl, email, token } = await this.requireAuth(); const jqlCandidates: string[] = []; // Pragmatic fallbacks that typically work with limited permissions @@ -135,7 +216,7 @@ export default class JiraService { try { const keys = await this.getRecentIssueKeys(siteUrl, email, token, limit); if (keys.length > 0) { - const results: any[] = []; + const results: RawJiraIssue[] = []; for (const key of keys.slice(0, limit)) { try { const issue = await this.getIssueByKey(siteUrl, email, token, key); @@ -152,7 +233,7 @@ export default class JiraService { return []; } - async searchIssues(searchTerm: string, limit = 20): Promise { + async searchIssues(searchTerm: string, limit = 20): Promise { const term = (searchTerm || '').trim(); if (!term) return []; const { siteUrl, email, token } = await this.requireAuth(); @@ -172,10 +253,10 @@ export default class JiraService { return { ...creds, token }; } - private async getMyself(siteUrl: string, email: string, token: string): Promise { + private async getMyself(siteUrl: string, email: string, token: string): Promise { const url = new URL('/rest/api/3/myself', siteUrl); const body = await this.doGet(url, email, token); - const data = JSON.parse(body || '{}'); + const data = JSON.parse(body || '{}') as JiraUser; if (!data || data.errorMessages) { throw new Error('Failed to verify Jira token.'); } @@ -188,7 +269,7 @@ export default class JiraService { token: string, jql: string, limit: number - ) { + ): Promise { const url = new URL('/rest/api/3/search', siteUrl); const payload = JSON.stringify({ jql, @@ -198,7 +279,7 @@ export default class JiraService { const body = await this.doRequest(url, email, token, 'POST', payload, { 'Content-Type': 'application/json', }); - const data = JSON.parse(body || '{}'); + const data = JSON.parse(body || '{}') as RawJiraSearchResult; return Array.isArray(data?.issues) ? data.issues : []; } @@ -251,7 +332,7 @@ export default class JiraService { } // Enhanced search that supports direct issue-key lookups and robust quoting - async smartSearchIssues(searchTerm: string, limit = 20): Promise { + async smartSearchIssues(searchTerm: string, limit = 20): Promise { const term = (searchTerm || '').trim(); if (!term) return []; const { siteUrl, email, token } = await this.requireAuth(); @@ -284,9 +365,9 @@ export default class JiraService { try { const url = new URL('/rest/api/3/project', siteUrl); const body = await this.doGet(url, email, token); - const data = JSON.parse(body || '[]'); + const data = JSON.parse(body || '[]') as Array<{ key?: string }>; if (!Array.isArray(data)) return []; - return data.map((p: any) => String(p?.key || '')).filter(Boolean); + return data.map((p) => String(p?.key || '')).filter(Boolean); } catch { return []; } @@ -297,11 +378,11 @@ export default class JiraService { email: string, token: string, key: string - ): Promise { + ): Promise { const url = new URL(`/rest/api/3/issue/${encodeURIComponent(key)}`, siteUrl); url.searchParams.set('fields', 'summary,description,updated,project,status,assignee'); const body = await this.doGet(url, email, token); - const data = JSON.parse(body || '{}'); + const data = JSON.parse(body || '{}') as RawJiraIssue; if (!data || data.errorMessages) return null; return data; } @@ -317,7 +398,7 @@ export default class JiraService { url.searchParams.set('query', ''); url.searchParams.set('currentJQL', ''); const body = await this.doGet(url, email, token); - const data = JSON.parse(body || '{}'); + const data = JSON.parse(body || '{}') as JiraPickerResult; const keys: string[] = []; const sections = Array.isArray(data?.sections) ? data.sections : []; for (const sec of sections) { @@ -332,17 +413,17 @@ export default class JiraService { return keys; } - private static flattenAdf(node: any): string { + private static flattenAdf(node: AdfNode | string | null | undefined): string { if (!node) return ''; if (typeof node === 'string') return node; if (node.type === 'text') return node.text || ''; if (Array.isArray(node.content)) { - const parts = node.content.map((c: any) => JiraService.flattenAdf(c)); + const parts = node.content.map((c) => JiraService.flattenAdf(c)); // Add newlines between block-level nodes (paragraphs, headings, etc.) - if (['doc', 'bulletList', 'orderedList'].includes(node.type)) { + if (['doc', 'bulletList', 'orderedList'].includes(node.type ?? '')) { return parts.join('\n'); } - if (['paragraph', 'heading', 'listItem'].includes(node.type)) { + if (['paragraph', 'heading', 'listItem'].includes(node.type ?? '')) { return parts.join(''); } return parts.join(''); @@ -350,23 +431,32 @@ export default class JiraService { return ''; } - private normalizeIssues(siteUrl: string, rawIssues: any[]): any[] { + private normalizeIssues(siteUrl: string, rawIssues: RawJiraIssue[]): JiraIssue[] { const base = siteUrl.replace(/\/$/, ''); return (rawIssues || []).map((it) => { - const fields = it?.fields || {}; + const fields = it?.fields ?? {}; return { id: String(it?.id || it?.key || ''), key: String(it?.key || ''), summary: String(fields?.summary || ''), description: fields?.description ? JiraService.flattenAdf(fields.description) : null, url: `${base}/browse/${it?.key}`, - status: fields?.status ? { name: fields.status.name } : null, - project: fields?.project ? { key: fields.project.key, name: fields.project.name } : null, - assignee: fields?.assignee - ? { displayName: fields.assignee.displayName, name: fields.assignee.name } - : null, - updatedAt: fields?.updated || null, + status: fields?.status?.name ? { name: fields.status.name } : null, + project: + fields?.project?.key && fields?.project?.name + ? { key: fields.project.key, name: fields.project.name } + : null, + assignee: + fields?.assignee?.displayName != null + ? { + displayName: fields.assignee.displayName ?? '', + name: fields.assignee.name ?? '', + } + : null, + updatedAt: fields?.updated ?? null, }; }); } } + +export const jiraService = new JiraService(); diff --git a/src/main/core/jira/controller.ts b/src/main/core/jira/controller.ts new file mode 100644 index 000000000..249b4860a --- /dev/null +++ b/src/main/core/jira/controller.ts @@ -0,0 +1,38 @@ +import { createRPCController } from '@shared/ipc/rpc'; +import { jiraService } from './JiraService'; + +export const jiraController = createRPCController({ + saveCredentials: async (args: { siteUrl: string; email: string; token: string }) => { + const siteUrl = String(args?.siteUrl || '').trim(); + const email = String(args?.email || '').trim(); + const token = String(args?.token || '').trim(); + if (!siteUrl || !email || !token) { + return { success: false, error: 'Site URL, email, and API token are required.' }; + } + return jiraService.saveCredentials(siteUrl, email, token); + }, + + clearCredentials: async () => jiraService.clearCredentials(), + + checkConnection: async () => jiraService.checkConnection(), + + initialFetch: async (limit?: number) => { + try { + const issues = await jiraService.initialFetch( + typeof limit === 'number' && Number.isFinite(limit) ? limit : 50 + ); + return { success: true, issues }; + } catch (e: unknown) { + return { success: false, error: e instanceof Error ? e.message : String(e) }; + } + }, + + searchIssues: async (searchTerm: string, limit?: number) => { + try { + const issues = await jiraService.smartSearchIssues(searchTerm, limit ?? 20); + return { success: true, issues }; + } catch (e: unknown) { + return { success: false, error: e instanceof Error ? e.message : String(e) }; + } + }, +}); diff --git a/src/main/core/line-comments.ts b/src/main/core/line-comments.ts new file mode 100644 index 000000000..c23b29269 --- /dev/null +++ b/src/main/core/line-comments.ts @@ -0,0 +1,76 @@ +import { and, asc, eq, inArray, isNull, sql } from 'drizzle-orm'; +import { createRPCController } from '../../shared/ipc/rpc'; +import { formatCommentsForAgent } from '../../shared/lineComments'; +import { db } from '../db/client'; +import { lineComments, type LineCommentInsert } from '../db/schema'; + +type LineCommentCreateInput = Omit; + +export const lineCommentsController = createRPCController({ + create: async (input: LineCommentCreateInput) => { + const id = `comment-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`; + await db.insert(lineComments).values({ + id, + taskId: input.taskId, + filePath: input.filePath, + lineNumber: input.lineNumber, + lineContent: input.lineContent ?? null, + content: input.content, + updatedAt: sql`CURRENT_TIMESTAMP`, + }); + return { id }; + }, + + get: async (args: { taskId: string; filePath?: string }) => { + const comments = args.filePath + ? await db + .select() + .from(lineComments) + .where( + sql`${lineComments.taskId} = ${args.taskId} AND ${lineComments.filePath} = ${args.filePath}` + ) + .orderBy(asc(lineComments.lineNumber)) + : await db + .select() + .from(lineComments) + .where(eq(lineComments.taskId, args.taskId)) + .orderBy(asc(lineComments.lineNumber)); + return { comments }; + }, + + update: async (input: { id: string; content: string }) => { + await db + .update(lineComments) + .set({ content: input.content, updatedAt: sql`CURRENT_TIMESTAMP` }) + .where(eq(lineComments.id, input.id)); + }, + + delete: async (id: string) => { + await db.delete(lineComments).where(eq(lineComments.id, id)); + }, + + getFormatted: async (taskId: string) => { + const comments = await db + .select() + .from(lineComments) + .where(eq(lineComments.taskId, taskId)) + .orderBy(asc(lineComments.lineNumber)); + const formatted = formatCommentsForAgent(comments); + return { formatted }; + }, + + markSent: async (commentIds: string[]) => { + if (commentIds.length === 0) return; + const now = new Date().toISOString(); + await db.update(lineComments).set({ sentAt: now }).where(inArray(lineComments.id, commentIds)); + }, + + getUnsent: async (taskId: string) => { + const comments = await db + .select() + .from(lineComments) + .where(and(eq(lineComments.taskId, taskId), isNull(lineComments.sentAt))) + .orderBy(asc(lineComments.filePath), asc(lineComments.lineNumber)); + return { comments }; + }, +}); diff --git a/src/main/core/linear/LinearService.ts b/src/main/core/linear/LinearService.ts new file mode 100644 index 000000000..1adcc43c0 --- /dev/null +++ b/src/main/core/linear/LinearService.ts @@ -0,0 +1,228 @@ +import { LinearClient } from '@linear/sdk'; +import keytar from 'keytar'; +import type { Issue } from '@shared/tasks'; +import { capture } from '@main/lib/telemetry'; + +export interface LinearIssue { + id: string; + identifier: string; + title: string; + description: string | null; + url: string; + state: { name: string; type: string; color: string } | null; + team: { name: string; key: string } | null; + project: { name: string } | null; + assignee: { displayName: string; name: string } | null; + updatedAt: string; +} + +export interface LinearConnectionStatus { + connected: boolean; + workspaceName?: string; + error?: string; +} + +export function toGeneralIssue(issue: LinearIssue): Issue { + return { + provider: 'linear', + identifier: issue.identifier, + title: issue.title, + url: issue.url, + description: issue.description ?? undefined, + updatedAt: issue.updatedAt, + }; +} + +const ISSUES_QUERY = ` + query ListIssues($limit: Int!) { + issues( + first: $limit, + orderBy: updatedAt, + filter: { state: { type: { nin: ["completed", "cancelled"] } } } + ) { + nodes { + id + identifier + title + description + url + state { name type color } + team { name key } + project { name } + assignee { displayName name } + updatedAt + } + } + } +`; + +const SEARCH_QUERY = ` + query SearchIssues($term: String!, $limit: Int!) { + searchIssues(term: $term, first: $limit) { + nodes { + id + identifier + title + description + url + state { name type color } + team { name key } + project { name } + assignee { displayName name } + updatedAt + } + } + } +`; + +export class LinearService { + private readonly SERVICE_NAME = 'emdash-linear'; + private readonly ACCOUNT_NAME = 'api-token'; + + // In-memory token cache: undefined = not yet loaded, null = no token, string = valid token + private _cachedToken: string | null | undefined = undefined; + + private _client: LinearClient | null = null; + private _clientToken: string | null = null; + + private getClient(token: string): LinearClient { + if (!this._client || this._clientToken !== token) { + this._client = new LinearClient({ apiKey: token }); + this._clientToken = token; + } + return this._client; + } + + async saveToken( + token: string + ): Promise<{ success: boolean; workspaceName?: string; error?: string }> { + try { + const client = this.getClient(token); + const viewer = await client.viewer; + const org = await viewer.organization; + await this.storeToken(token); + capture('linear_connected'); + return { + success: true, + workspaceName: org?.name ?? viewer.displayName ?? undefined, + }; + } catch (error) { + const message = + error instanceof Error + ? error.message + : 'Failed to validate Linear token. Please try again.'; + return { success: false, error: message }; + } + } + + async clearToken(): Promise<{ success: boolean; error?: string }> { + try { + await keytar.deletePassword(this.SERVICE_NAME, this.ACCOUNT_NAME); + this._cachedToken = null; + this._client = null; + this._clientToken = null; + capture('linear_disconnected'); + return { success: true }; + } catch (error) { + console.error('Failed to clear Linear token:', error); + return { + success: false, + error: 'Unable to remove Linear token from keychain.', + }; + } + } + + async checkConnection(): Promise { + try { + const token = await this.getStoredToken(); + if (!token) { + return { connected: false }; + } + const client = this.getClient(token); + const viewer = await client.viewer; + const org = await viewer.organization; + return { + connected: true, + workspaceName: org?.name ?? viewer.displayName ?? undefined, + }; + } catch (error) { + const message = + error instanceof Error ? error.message : 'Failed to verify Linear connection.'; + return { connected: false, error: message }; + } + } + + async initialFetch(limit = 50): Promise { + const token = await this.getStoredToken(); + if (!token) { + throw new Error('Linear token not set. Connect Linear in settings first.'); + } + + const sanitizedLimit = Math.min(Math.max(limit, 1), 200); + const client = this.getClient(token); + + const { data } = await client.client.rawRequest< + { issues: { nodes: LinearIssue[] } }, + { limit: number } + >(ISSUES_QUERY, { limit: sanitizedLimit }); + + return data?.issues?.nodes ?? []; + } + + async searchIssues(searchTerm: string, limit = 20): Promise { + const token = await this.getStoredToken(); + if (!token) { + throw new Error('Linear token not set. Connect Linear in settings first.'); + } + + if (!searchTerm.trim()) { + return []; + } + + const sanitizedLimit = Math.min(Math.max(limit, 1), 200); + const client = this.getClient(token); + + try { + const { data } = await client.client.rawRequest< + { searchIssues: { nodes: LinearIssue[] } }, + { term: string; limit: number } + >(SEARCH_QUERY, { + term: searchTerm.trim(), + limit: sanitizedLimit, + }); + + return data?.searchIssues?.nodes ?? []; + } catch (error) { + console.error('[Linear] searchIssues error:', error); + return []; + } + } + + private async storeToken(token: string): Promise { + const clean = token.trim(); + if (!clean) { + throw new Error('Linear token cannot be empty.'); + } + + try { + await keytar.setPassword(this.SERVICE_NAME, this.ACCOUNT_NAME, clean); + this._cachedToken = clean; + } catch (error) { + console.error('Failed to store Linear token:', error); + throw new Error('Unable to store Linear token securely.'); + } + } + + private async getStoredToken(): Promise { + if (this._cachedToken !== undefined) return this._cachedToken; + try { + this._cachedToken = await keytar.getPassword(this.SERVICE_NAME, this.ACCOUNT_NAME); + return this._cachedToken; + } catch (error) { + console.error('Failed to read Linear token from keychain:', error); + return null; + } + } +} + +export const linearService = new LinearService(); diff --git a/src/main/ipc/linearIpc.ts b/src/main/core/linear/controller.ts similarity index 60% rename from src/main/ipc/linearIpc.ts rename to src/main/core/linear/controller.ts index fe4942749..cc3c47c1a 100644 --- a/src/main/ipc/linearIpc.ts +++ b/src/main/core/linear/controller.ts @@ -1,26 +1,19 @@ -import { ipcMain } from 'electron'; -import LinearService from '../services/LinearService'; +import { createRPCController } from '@shared/ipc/rpc'; +import { linearService } from './LinearService'; -const linearService = new LinearService(); - -export function registerLinearIpc() { - ipcMain.handle('linear:saveToken', async (_event, token: string) => { +export const linearController = createRPCController({ + saveToken: async (token: string) => { if (!token || typeof token !== 'string') { return { success: false, error: 'A Linear API token is required.' }; } - return linearService.saveToken(token); - }); + }, - ipcMain.handle('linear:checkConnection', async () => { - return linearService.checkConnection(); - }); + checkConnection: async () => linearService.checkConnection(), - ipcMain.handle('linear:clearToken', async () => { - return linearService.clearToken(); - }); + clearToken: async () => linearService.clearToken(), - ipcMain.handle('linear:initialFetch', async (_event, limit?: number) => { + initialFetch: async (limit?: number) => { try { const issues = await linearService.initialFetch( typeof limit === 'number' && Number.isFinite(limit) ? limit : undefined @@ -31,13 +24,12 @@ export function registerLinearIpc() { error instanceof Error ? error.message : 'Unable to fetch initial Linear issues right now.'; return { success: false, error: message }; } - }); + }, - ipcMain.handle('linear:searchIssues', async (_event, searchTerm: string, limit?: number) => { + searchIssues: async (searchTerm: string, limit?: number) => { if (!searchTerm || typeof searchTerm !== 'string') { return { success: false, error: 'Search term is required.' }; } - try { const issues = await linearService.searchIssues(searchTerm, limit ?? 20); return { success: true, issues }; @@ -46,7 +38,5 @@ export function registerLinearIpc() { error instanceof Error ? error.message : 'Unable to search Linear issues right now.'; return { success: false, error: message }; } - }); -} - -export default registerLinearIpc; + }, +}); diff --git a/src/main/core/mcp/controller.ts b/src/main/core/mcp/controller.ts new file mode 100644 index 000000000..4c485230b --- /dev/null +++ b/src/main/core/mcp/controller.ts @@ -0,0 +1,72 @@ +import { AGENT_PROVIDERS } from '@shared/agent-provider-registry'; +import { createRPCController } from '@shared/ipc/rpc'; +import type { McpProvidersResponse, McpServer } from '@shared/mcp/types'; +import { localDependencyManager } from '@main/core/dependencies/dependency-manager'; +import type { DependencyId } from '@main/core/dependencies/types'; +import { log } from '@main/lib/logger'; +import { mcpService } from './services/McpService'; +import { agentSupportsHttp, getAllMcpAgentIds } from './utils/config-paths'; + +function mapProviders(agentIds: string[]): McpProvidersResponse[] { + return agentIds.map((id) => { + const provider = AGENT_PROVIDERS.find((p) => p.id === id); + const dep = localDependencyManager.get(id as DependencyId); + return { + id, + name: provider?.name ?? id, + installed: dep?.status === 'available', + supportsHttp: agentSupportsHttp(id), + }; + }); +} + +export const mcpController = createRPCController({ + loadAll: async () => { + try { + const data = await mcpService.loadAll(); + return { success: true, data }; + } catch (error) { + log.error('Failed to load MCP servers:', error); + return { success: false, error: error instanceof Error ? error.message : String(error) }; + } + }, + + saveServer: async (server: McpServer) => { + try { + await mcpService.saveServer(server); + return { success: true }; + } catch (error) { + log.error('Failed to save MCP server:', error); + return { success: false, error: error instanceof Error ? error.message : String(error) }; + } + }, + + removeServer: async (serverName: string) => { + try { + await mcpService.removeServer(serverName); + return { success: true }; + } catch (error) { + log.error('Failed to remove MCP server:', error); + return { success: false, error: error instanceof Error ? error.message : String(error) }; + } + }, + + getProviders: async () => { + try { + return { success: true, data: mapProviders(getAllMcpAgentIds()) }; + } catch (error) { + log.error('Failed to get MCP providers:', error); + return { success: false, error: error instanceof Error ? error.message : String(error) }; + } + }, + + refreshProviders: async () => { + try { + await localDependencyManager.probeCategory('agent'); + return { success: true, data: mapProviders(getAllMcpAgentIds()) }; + } catch (error) { + log.error('Failed to refresh MCP providers:', error); + return { success: false, error: error instanceof Error ? error.message : String(error) }; + } + }, +}); diff --git a/src/main/core/mcp/services/McpService.test.ts b/src/main/core/mcp/services/McpService.test.ts new file mode 100644 index 000000000..4a03a048e --- /dev/null +++ b/src/main/core/mcp/services/McpService.test.ts @@ -0,0 +1,177 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import type { AgentMcpMeta, ServerMap } from '@shared/mcp/types'; +import * as configIO from '../utils/config-io'; +import * as configPaths from '../utils/config-paths'; +import { McpService } from './McpService'; + +vi.mock('../utils/config-io', () => ({ + readServers: vi.fn(), + writeServers: vi.fn(), +})); + +vi.mock('../utils/config-paths', () => ({ + getAgentMcpMeta: vi.fn(), + getAllMcpAgentIds: vi.fn(() => ['claude', 'cursor']), +})); + +vi.mock('../utils/catalog', () => ({ + loadCatalog: vi.fn(() => []), + getCatalogServerConfig: vi.fn(), +})); + +vi.mock('@main/lib/logger', () => ({ + log: { error: vi.fn(), warn: vi.fn(), info: vi.fn() }, +})); + +const mockReadServers = vi.mocked(configIO.readServers); +const mockWriteServers = vi.mocked(configIO.writeServers); +const mockGetMeta = vi.mocked(configPaths.getAgentMcpMeta); + +const claudeMeta: AgentMcpMeta = { + agentId: 'claude', + configPath: '/home/test/.claude.json', + serversPath: ['mcpServers'], + template: { mcpServers: {} }, + isToml: false, + adapter: 'passthrough', +}; + +const cursorMeta: AgentMcpMeta = { + agentId: 'cursor', + configPath: '/home/test/.cursor/mcp.json', + serversPath: ['mcpServers'], + template: { mcpServers: {} }, + isToml: false, + adapter: 'cursor', +}; + +describe('McpService', () => { + let service: McpService; + + beforeEach(() => { + vi.clearAllMocks(); + service = new McpService(); + mockGetMeta.mockImplementation((id: string) => { + if (id === 'claude') return claudeMeta; + if (id === 'cursor') return cursorMeta; + return undefined; + }); + }); + + describe('loadAll', () => { + it('reads servers from all agents and normalizes to McpServer[]', async () => { + mockReadServers + .mockResolvedValueOnce({ myServer: { command: 'npx', args: ['-y', 'foo'] } }) // claude + .mockResolvedValueOnce({}); // cursor + + const result = await service.loadAll(); + expect(result.installed).toHaveLength(1); + expect(result.installed[0].name).toBe('myServer'); + expect(result.installed[0].transport).toBe('stdio'); + expect(result.installed[0].providers).toContain('claude'); + }); + + it('deduplicates servers by name, merging providers', async () => { + mockReadServers + .mockResolvedValueOnce({ shared: { command: 'npx', args: ['foo'] } }) + .mockResolvedValueOnce({ shared: { command: 'npx', args: ['foo'] } }); + + const result = await service.loadAll(); + expect(result.installed).toHaveLength(1); + expect(result.installed[0].providers).toContain('claude'); + expect(result.installed[0].providers).toContain('cursor'); + }); + + it('skips agents that throw on read', async () => { + mockReadServers + .mockRejectedValueOnce(new Error('malformed')) + .mockResolvedValueOnce({ s1: { command: 'npx' } }); + + const result = await service.loadAll(); + expect(result.installed).toHaveLength(1); + }); + }); + + describe('saveServer', () => { + it('writes to selected providers using forward adapters', async () => { + mockReadServers.mockImplementation(() => Promise.resolve({})); + mockWriteServers.mockResolvedValue(undefined); + + await service.saveServer({ + name: 'myServer', + transport: 'stdio', + command: 'npx', + args: ['-y', 'foo'], + + providers: ['claude'], + }); + + expect(mockWriteServers).toHaveBeenCalledTimes(1); + expect(mockWriteServers.mock.calls[0][0]).toBe(claudeMeta); + const written = mockWriteServers.mock.calls[0][1] as ServerMap; + expect(written.myServer).toBeDefined(); + }); + + it('removes server from deselected providers', async () => { + mockReadServers.mockImplementation(() => + Promise.resolve({ myServer: { command: 'npx' }, other: { command: 'y' } }) + ); + mockWriteServers.mockResolvedValue(undefined); + + // Save with only cursor selected — should remove from claude + await service.saveServer({ + name: 'myServer', + transport: 'stdio', + command: 'npx', + args: [], + + providers: ['cursor'], + }); + + // Claude should have myServer removed + const claudeCall = mockWriteServers.mock.calls.find((c) => c[0] === claudeMeta); + if (claudeCall) { + expect((claudeCall[1] as ServerMap).myServer).toBeUndefined(); + } + }); + + it('rejects empty server name', async () => { + await expect( + service.saveServer({ + name: '', + transport: 'stdio', + command: 'npx', + providers: ['claude'], + }) + ).rejects.toThrow('Invalid server name'); + }); + + it('rejects server name with invalid characters', async () => { + await expect( + service.saveServer({ + name: 'my server/hack', + transport: 'stdio', + command: 'npx', + providers: ['claude'], + }) + ).rejects.toThrow('Invalid server name'); + }); + }); + + describe('removeServer', () => { + it('removes server from all agent configs', async () => { + mockReadServers.mockImplementation(() => + Promise.resolve({ toRemove: { command: 'npx' }, keep: { command: 'y' } }) + ); + mockWriteServers.mockResolvedValue(undefined); + + await service.removeServer('toRemove'); + + for (const call of mockWriteServers.mock.calls) { + const servers = call[1] as ServerMap; + expect(servers.toRemove).toBeUndefined(); + expect(servers.keep).toBeDefined(); + } + }); + }); +}); diff --git a/src/main/core/mcp/services/McpService.ts b/src/main/core/mcp/services/McpService.ts new file mode 100644 index 000000000..ee310f16e --- /dev/null +++ b/src/main/core/mcp/services/McpService.ts @@ -0,0 +1,161 @@ +import type { McpLoadAllResponse, McpServer, RawServerEntry, ServerMap } from '@shared/mcp/types'; +import { log } from '@main/lib/logger'; +import { adaptForward, adaptReverse } from '../utils/adapters'; +import { loadCatalog } from '../utils/catalog'; +import { readServers, writeServers } from '../utils/config-io'; +import { getAgentMcpMeta, getAllMcpAgentIds } from '../utils/config-paths'; +import { mcpServerToRaw, rawEntryToMcpFields, rawToMcpServer } from '../utils/conversion'; + +export class McpService { + private _writeLock = Promise.resolve(); + + private async withWriteLock(fn: () => Promise): Promise { + const prev = this._writeLock; + let resolve: () => void; + this._writeLock = new Promise((r) => { + resolve = r; + }); + await prev; + try { + return await fn(); + } finally { + resolve!(); + } + } + + async loadAll(): Promise { + return this.withWriteLock(async () => { + const agentIds = getAllMcpAgentIds(); + const serversByName = new Map }>(); + + for (const agentId of agentIds) { + const meta = getAgentMcpMeta(agentId); + if (!meta) continue; + + let rawServers: ServerMap; + try { + rawServers = await readServers(meta); + } catch (err) { + log.warn(`Failed to read MCP config for ${agentId}:`, err); + continue; + } + + const canonical = adaptReverse(meta.adapter, rawServers); + + for (const [name, raw] of Object.entries(canonical)) { + const existing = serversByName.get(name); + if (existing) { + existing.providers.add(agentId); + + const newServer = rawToMcpServer(name, raw, existing.providers); + const existingKeyCount = Object.keys(rawEntryToMcpFields(existing.server)).length; + const newKeyCount = Object.keys(rawEntryToMcpFields(newServer)).length; + if (newKeyCount > existingKeyCount) { + existing.server = newServer; + } + } else { + const providers = new Set([agentId]); + serversByName.set(name, { + server: rawToMcpServer(name, raw, providers), + providers, + }); + } + } + } + + const installed: McpServer[] = []; + for (const { server, providers } of serversByName.values()) { + server.providers = Array.from(providers); + installed.push(server); + } + + const catalog = loadCatalog(); + + return { installed, catalog }; + }); + } + + async saveServer(server: McpServer): Promise { + if (!server.name || !/^[\w\-._]+$/.test(server.name)) { + throw new Error(`Invalid server name: "${server.name}"`); + } + return this.withWriteLock(async () => { + const allAgentIds = getAllMcpAgentIds(); + const selectedProviders = new Set(server.providers); + const raw = mcpServerToRaw(server); + + const failures: string[] = []; + + for (const agentId of allAgentIds) { + const meta = getAgentMcpMeta(agentId); + if (!meta) continue; + + let existing: ServerMap; + try { + existing = await readServers(meta); + } catch { + existing = {}; + } + + if (selectedProviders.has(agentId)) { + const adapted = adaptForward(meta.adapter, { [server.name]: raw }); + const adaptedEntry = adapted[server.name]; + if (adaptedEntry) { + existing[server.name] = adaptedEntry; + } + } else if (server.name in existing) { + delete existing[server.name]; + } else { + continue; + } + + try { + await writeServers(meta, existing); + } catch (err) { + log.error(`Failed to write MCP config for ${agentId}:`, err); + failures.push(agentId); + } + } + + if (failures.length) { + throw new Error(`Failed to write config for: ${failures.join(', ')}`); + } + }); + } + + async removeServer(serverName: string): Promise { + return this.withWriteLock(async () => { + const allAgentIds = getAllMcpAgentIds(); + const failures: string[] = []; + + for (const agentId of allAgentIds) { + const meta = getAgentMcpMeta(agentId); + if (!meta) continue; + + let existing: ServerMap; + try { + existing = await readServers(meta); + } catch { + continue; + } + + if (!(serverName in existing)) continue; + + delete existing[serverName]; + + try { + await writeServers(meta, existing); + } catch (err) { + log.error(`Failed to write MCP config for ${agentId}:`, err); + failures.push(agentId); + } + } + + if (failures.length) { + throw new Error(`Failed to write config for: ${failures.join(', ')}`); + } + }); + } +} + +export const mcpService = new McpService(); diff --git a/src/main/core/mcp/utils/adapters.test.ts b/src/main/core/mcp/utils/adapters.test.ts new file mode 100644 index 000000000..2d0939b08 --- /dev/null +++ b/src/main/core/mcp/utils/adapters.test.ts @@ -0,0 +1,215 @@ +import { describe, expect, it } from 'vitest'; +import type { ServerMap } from '@shared/mcp/types'; +import { adaptForward, adaptReverse } from './adapters'; + +describe('adaptForward (canonical → agent)', () => { + const stdioServer = { command: 'npx', args: ['-y', 'foo'] }; + const httpServer = { type: 'http', url: 'https://example.com/mcp', headers: { 'X-Key': 'abc' } }; + + describe('passthrough', () => { + it('returns servers unchanged', () => { + const servers: ServerMap = { s1: stdioServer, s2: httpServer }; + expect(adaptForward('passthrough', servers)).toEqual(servers); + }); + }); + + describe('gemini', () => { + it('renames url to httpUrl and adds Accept header for HTTP servers', () => { + const result = adaptForward('gemini', { s1: httpServer }); + expect(result.s1).toEqual({ + httpUrl: 'https://example.com/mcp', + headers: { 'X-Key': 'abc', Accept: 'application/json, text/event-stream' }, + }); + }); + + it('leaves stdio servers unchanged', () => { + const result = adaptForward('gemini', { s1: stdioServer }); + expect(result.s1).toEqual(stdioServer); + }); + + it('does not overwrite existing Accept header', () => { + const server = { type: 'http', url: 'https://x.com', headers: { Accept: 'custom' } }; + const result = adaptForward('gemini', { s1: server }); + expect((result.s1 as any).headers.Accept).toBe('custom'); + }); + }); + + describe('cursor', () => { + it('keeps only url and headers for HTTP servers', () => { + const result = adaptForward('cursor', { s1: httpServer }); + expect(result.s1).toEqual({ url: 'https://example.com/mcp', headers: { 'X-Key': 'abc' } }); + expect((result.s1 as any).type).toBeUndefined(); + }); + + it('leaves stdio servers unchanged', () => { + const result = adaptForward('cursor', { s1: stdioServer }); + expect(result.s1).toEqual(stdioServer); + }); + }); + + describe('codex', () => { + it('drops HTTP servers', () => { + const result = adaptForward('codex', { s1: stdioServer, s2: httpServer }); + expect(result.s1).toEqual(stdioServer); + expect(result.s2).toBeUndefined(); + }); + }); + + describe('opencode', () => { + it('transforms HTTP to remote type with Accept and enabled', () => { + const result = adaptForward('opencode', { s1: httpServer }); + expect(result.s1).toEqual({ + type: 'remote', + url: 'https://example.com/mcp', + headers: { 'X-Key': 'abc', Accept: 'application/json, text/event-stream' }, + enabled: true, + }); + }); + + it('transforms stdio to local type with command array and enabled', () => { + const result = adaptForward('opencode', { s1: stdioServer }); + expect(result.s1).toEqual({ + type: 'local', + command: ['npx', '-y', 'foo'], + enabled: true, + }); + }); + }); + + describe('copilot', () => { + it('adds tools: ["*"] if missing', () => { + const result = adaptForward('copilot', { s1: stdioServer }); + expect((result.s1 as any).tools).toEqual(['*']); + }); + + it('preserves existing tools', () => { + const server = { ...stdioServer, tools: ['read'] }; + const result = adaptForward('copilot', { s1: server }); + expect((result.s1 as any).tools).toEqual(['read']); + }); + }); +}); + +describe('adaptReverse (agent → canonical)', () => { + describe('passthrough', () => { + it('returns servers unchanged', () => { + const servers: ServerMap = { s1: { command: 'npx', args: ['-y', 'foo'] } }; + expect(adaptReverse('passthrough', servers)).toEqual(servers); + }); + }); + + describe('gemini', () => { + it('renames httpUrl back to url and adds type: http', () => { + const servers: ServerMap = { + s1: { + httpUrl: 'https://example.com', + headers: { Accept: 'application/json, text/event-stream' }, + }, + }; + const result = adaptReverse('gemini', servers); + expect(result.s1).toMatchObject({ type: 'http', url: 'https://example.com' }); + expect((result.s1 as any).httpUrl).toBeUndefined(); + }); + + it('strips injected Accept header during reverse', () => { + const servers: ServerMap = { + s1: { + httpUrl: 'https://example.com', + headers: { Accept: 'application/json, text/event-stream', 'X-Key': 'abc' }, + }, + }; + const result = adaptReverse('gemini', servers); + expect((result.s1 as any).headers).toEqual({ 'X-Key': 'abc' }); + }); + + it('removes headers entirely when only injected Accept remains', () => { + const servers: ServerMap = { + s1: { + httpUrl: 'https://example.com', + headers: { Accept: 'application/json, text/event-stream' }, + }, + }; + const result = adaptReverse('gemini', servers); + expect((result.s1 as any).headers).toBeUndefined(); + }); + + it('preserves custom Accept header', () => { + const servers: ServerMap = { + s1: { + httpUrl: 'https://example.com', + headers: { Accept: 'text/html' }, + }, + }; + const result = adaptReverse('gemini', servers); + expect((result.s1 as any).headers).toEqual({ Accept: 'text/html' }); + }); + }); + + describe('cursor', () => { + it('adds type: http when url is present and no command', () => { + const servers: ServerMap = { s1: { url: 'https://example.com', headers: {} } }; + const result = adaptReverse('cursor', servers); + expect((result.s1 as any).type).toBe('http'); + }); + + it('leaves stdio servers unchanged', () => { + const servers: ServerMap = { s1: { command: 'npx', args: ['foo'] } }; + const result = adaptReverse('cursor', servers); + expect(result.s1).toEqual({ command: 'npx', args: ['foo'] }); + }); + }); + + describe('codex', () => { + it('returns servers as-is (all are stdio)', () => { + const servers: ServerMap = { s1: { command: 'npx', args: ['foo'] } }; + expect(adaptReverse('codex', servers)).toEqual(servers); + }); + }); + + describe('opencode', () => { + it('converts remote type back to http canonical', () => { + const servers: ServerMap = { + s1: { type: 'remote', url: 'https://example.com', headers: {}, enabled: true }, + }; + const result = adaptReverse('opencode', servers); + expect(result.s1).toMatchObject({ type: 'http', url: 'https://example.com' }); + expect((result.s1 as any).enabled).toBeUndefined(); + }); + + it('strips injected Accept header during reverse', () => { + const servers: ServerMap = { + s1: { + type: 'remote', + url: 'https://example.com', + headers: { Accept: 'application/json, text/event-stream', 'X-Key': 'abc' }, + enabled: true, + }, + }; + const result = adaptReverse('opencode', servers); + expect((result.s1 as any).headers).toEqual({ 'X-Key': 'abc' }); + }); + + it('converts local type back to stdio canonical', () => { + const servers: ServerMap = { + s1: { type: 'local', command: ['npx', '-y', 'foo'], enabled: true }, + }; + const result = adaptReverse('opencode', servers); + expect(result.s1).toMatchObject({ command: 'npx', args: ['-y', 'foo'] }); + expect((result.s1 as any).type).toBeUndefined(); + }); + }); + + describe('copilot', () => { + it('strips tools: ["*"]', () => { + const servers: ServerMap = { s1: { command: 'npx', args: ['foo'], tools: ['*'] } }; + const result = adaptReverse('copilot', servers); + expect((result.s1 as any).tools).toBeUndefined(); + }); + + it('preserves non-wildcard tools', () => { + const servers: ServerMap = { s1: { command: 'npx', args: ['foo'], tools: ['read'] } }; + const result = adaptReverse('copilot', servers); + expect((result.s1 as any).tools).toEqual(['read']); + }); + }); +}); diff --git a/src/main/core/mcp/utils/adapters.ts b/src/main/core/mcp/utils/adapters.ts new file mode 100644 index 000000000..9a897c9d6 --- /dev/null +++ b/src/main/core/mcp/utils/adapters.ts @@ -0,0 +1,236 @@ +import type { AdapterType, RawServerEntry, ServerMap } from '@shared/mcp/types'; + +// ── Helpers ──────────────────────────────────────────────────────────────── + +function isHttpServer(s: RawServerEntry): boolean { + return s.type === 'http'; +} + +function isStdio(s: RawServerEntry): boolean { + return !isHttpServer(s) && s.command !== undefined; +} + +const INJECTED_ACCEPT = 'application/json, text/event-stream'; + +function ensureHeader(headers: Record, key: string, val: string): void { + if (typeof headers[key] !== 'string') { + headers[key] = val; + } +} + +function stripInjectedHeaders(entry: RawServerEntry): void { + if (typeof entry.headers !== 'object' || entry.headers === null) return; + const headers = entry.headers as Record; + if (headers.Accept === INJECTED_ACCEPT) { + delete headers.Accept; + if (!Object.keys(headers).length) { + delete entry.headers; + } + } +} + +function deepClone(obj: T): T { + return JSON.parse(JSON.stringify(obj)); +} + +function transformHttpServers( + servers: ServerMap, + fn: (s: RawServerEntry) => RawServerEntry +): ServerMap { + const result: ServerMap = {}; + for (const [k, v] of Object.entries(servers)) { + if (typeof v === 'object' && v !== null && isHttpServer(v)) { + result[k] = fn(deepClone(v)); + } else { + result[k] = deepClone(v); + } + } + return result; +} + +// ── Forward Adapters (canonical → agent) ─────────────────────────────────── + +function fwdPassthrough(servers: ServerMap): ServerMap { + return deepClone(servers); +} + +function fwdGemini(servers: ServerMap): ServerMap { + return transformHttpServers(servers, (s) => { + const url = s.url ?? ''; + const headers: Record = { + ...((s.headers as Record) ?? {}), + }; + ensureHeader(headers, 'Accept', 'application/json, text/event-stream'); + const result: RawServerEntry = { httpUrl: url, headers }; + if (s.env && typeof s.env === 'object') result.env = s.env; + return result; + }); +} + +function fwdCursor(servers: ServerMap): ServerMap { + return transformHttpServers(servers, (s) => { + const url = s.url ?? ''; + const headers = s.headers ?? {}; + const result: RawServerEntry = { url, headers }; + if (s.env && typeof s.env === 'object') result.env = s.env; + return result; + }); +} + +function fwdCodex(servers: ServerMap): ServerMap { + const result: ServerMap = {}; + for (const [k, v] of Object.entries(servers)) { + if (typeof v === 'object' && v !== null && isStdio(v)) { + result[k] = deepClone(v); + } + } + return result; +} + +function fwdOpencode(servers: ServerMap): ServerMap { + const result: ServerMap = {}; + for (const [k, v] of Object.entries(servers)) { + if (typeof v !== 'object' || v === null) { + result[k] = v; + continue; + } + if (isHttpServer(v)) { + const headers: Record = { + ...((v.headers as Record) ?? {}), + }; + ensureHeader(headers, 'Accept', 'application/json, text/event-stream'); + const entry: RawServerEntry = { type: 'remote', url: v.url ?? '', headers, enabled: true }; + if (v.env && typeof v.env === 'object') entry.env = v.env; + result[k] = entry; + } else if (isStdio(v)) { + const cmdVec: string[] = []; + if (typeof v.command === 'string' && v.command) cmdVec.push(v.command); + if (Array.isArray(v.args)) cmdVec.push(...(v.args as string[])); + const entry: RawServerEntry = { type: 'local', command: cmdVec, enabled: true }; + if (v.env && typeof v.env === 'object') entry.env = v.env; + result[k] = entry; + } else { + result[k] = deepClone(v); + } + } + return result; +} + +function fwdCopilot(servers: ServerMap): ServerMap { + const result: ServerMap = {}; + for (const [k, v] of Object.entries(servers)) { + if (typeof v === 'object' && v !== null && !('tools' in v)) { + result[k] = { ...deepClone(v), tools: ['*'] }; + } else { + result[k] = deepClone(v); + } + } + return result; +} + +// ── Reverse Adapters (agent → canonical) ─────────────────────────────────── + +function revPassthrough(servers: ServerMap): ServerMap { + return deepClone(servers); +} + +function revGemini(servers: ServerMap): ServerMap { + const result: ServerMap = {}; + for (const [k, v] of Object.entries(servers)) { + if (typeof v === 'object' && v !== null && 'httpUrl' in v) { + const { httpUrl, ...rest } = v; + const entry = { ...rest, type: 'http', url: httpUrl } as RawServerEntry; + stripInjectedHeaders(entry); + result[k] = entry; + } else { + result[k] = deepClone(v); + } + } + return result; +} + +function revCursor(servers: ServerMap): ServerMap { + const result: ServerMap = {}; + for (const [k, v] of Object.entries(servers)) { + if (typeof v === 'object' && v !== null && 'url' in v && !('command' in v)) { + result[k] = { ...deepClone(v), type: 'http' }; + } else { + result[k] = deepClone(v); + } + } + return result; +} + +function revCodex(servers: ServerMap): ServerMap { + return deepClone(servers); +} + +function revOpencode(servers: ServerMap): ServerMap { + const result: ServerMap = {}; + for (const [k, v] of Object.entries(servers)) { + if (typeof v !== 'object' || v === null) { + result[k] = v; + continue; + } + if (v.type === 'remote') { + const { type: _, enabled: _e, ...rest } = v; + const entry = { ...rest, type: 'http' } as RawServerEntry; + stripInjectedHeaders(entry); + result[k] = entry; + } else if (v.type === 'local' && Array.isArray(v.command)) { + const cmdArr = v.command as string[]; + const [command, ...args] = cmdArr; + const entry: RawServerEntry = {}; + if (command) entry.command = command; + if (args.length) entry.args = args; + result[k] = entry; + } else { + result[k] = deepClone(v); + } + } + return result; +} + +function revCopilot(servers: ServerMap): ServerMap { + const result: ServerMap = {}; + for (const [k, v] of Object.entries(servers)) { + if (typeof v === 'object' && v !== null) { + const clone = deepClone(v); + if (Array.isArray(clone.tools) && clone.tools.length === 1 && clone.tools[0] === '*') { + delete clone.tools; + } + result[k] = clone; + } else { + result[k] = v; + } + } + return result; +} + +// ── Public API ───────────────────────────────────────────────────────────── + +const FORWARD: Record ServerMap> = { + passthrough: fwdPassthrough, + gemini: fwdGemini, + cursor: fwdCursor, + codex: fwdCodex, + opencode: fwdOpencode, + copilot: fwdCopilot, +}; + +const REVERSE: Record ServerMap> = { + passthrough: revPassthrough, + gemini: revGemini, + cursor: revCursor, + codex: revCodex, + opencode: revOpencode, + copilot: revCopilot, +}; + +export function adaptForward(adapter: AdapterType, servers: ServerMap): ServerMap { + return FORWARD[adapter](servers); +} + +export function adaptReverse(adapter: AdapterType, servers: ServerMap): ServerMap { + return REVERSE[adapter](servers); +} diff --git a/src/main/core/mcp/utils/catalog.ts b/src/main/core/mcp/utils/catalog.ts new file mode 100644 index 000000000..492938a0a --- /dev/null +++ b/src/main/core/mcp/utils/catalog.ts @@ -0,0 +1,17 @@ +import { catalogData } from '@shared/mcp/catalog'; +import type { McpCatalogEntry, RawServerEntry } from '@shared/mcp/types'; + +export function loadCatalog(): McpCatalogEntry[] { + return Object.entries(catalogData).map(([key, entry]) => ({ + key, + name: entry.name, + description: entry.description, + docsUrl: entry.docsUrl, + defaultConfig: entry.config, + credentialKeys: entry.credentialKeys, + })); +} + +export function getCatalogServerConfig(key: string): RawServerEntry | undefined { + return catalogData[key]?.config; +} diff --git a/src/main/core/mcp/utils/config-io.test.ts b/src/main/core/mcp/utils/config-io.test.ts new file mode 100644 index 000000000..915e33c08 --- /dev/null +++ b/src/main/core/mcp/utils/config-io.test.ts @@ -0,0 +1,128 @@ +import * as fs from 'fs/promises'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import type { AgentMcpMeta } from '@shared/mcp/types'; +import { readServers, writeServers } from './config-io'; + +vi.mock('fs/promises', () => ({ + readFile: vi.fn(), + writeFile: vi.fn(), + mkdir: vi.fn(), +})); + +vi.mock('../../../main/lib/logger', () => ({ + log: { error: vi.fn(), warn: vi.fn(), info: vi.fn() }, +})); + +const mockFs = vi.mocked(fs); + +function makeMeta(overrides: Partial = {}): AgentMcpMeta { + return { + agentId: 'claude', + configPath: '/home/test/.claude.json', + serversPath: ['mcpServers'], + template: { mcpServers: {} }, + isToml: false, + adapter: 'passthrough', + ...overrides, + }; +} + +describe('readServers', () => { + beforeEach(() => vi.clearAllMocks()); + + it('reads JSON config and extracts servers at path', async () => { + mockFs.readFile.mockResolvedValue(JSON.stringify({ mcpServers: { s1: { command: 'npx' } } })); + const result = await readServers(makeMeta()); + expect(result).toEqual({ s1: { command: 'npx' } }); + }); + + it('returns empty object when file does not exist', async () => { + const err = new Error('ENOENT') as NodeJS.ErrnoException; + err.code = 'ENOENT'; + mockFs.readFile.mockRejectedValue(err); + const result = await readServers(makeMeta()); + expect(result).toEqual({}); + }); + + it('returns empty object when servers path not found in config', async () => { + mockFs.readFile.mockResolvedValue(JSON.stringify({ other: 'stuff' })); + const result = await readServers(makeMeta()); + expect(result).toEqual({}); + }); + + it('handles nested serversPath', async () => { + const meta = makeMeta({ + serversPath: ['settings', 'mcp', 'servers'], + template: { settings: { mcp: { servers: {} } } }, + }); + mockFs.readFile.mockResolvedValue( + JSON.stringify({ settings: { mcp: { servers: { s1: { command: 'x' } } } } }) + ); + const result = await readServers(meta); + expect(result).toEqual({ s1: { command: 'x' } }); + }); + + it('reads TOML config for codex', async () => { + const meta = makeMeta({ + agentId: 'codex', + configPath: '/home/test/.codex/config.toml', + serversPath: ['mcp_servers'], + isToml: true, + adapter: 'codex', + }); + mockFs.readFile.mockResolvedValue( + '[mcp_servers.myserver]\ncommand = "npx"\nargs = ["-y", "foo"]\n' + ); + const result = await readServers(meta); + expect(result.myserver).toBeDefined(); + expect((result.myserver as any).command).toBe('npx'); + }); +}); + +describe('writeServers', () => { + beforeEach(() => vi.clearAllMocks()); + + it('writes JSON config merging servers at path', async () => { + mockFs.readFile.mockResolvedValue(JSON.stringify({ otherKey: 'keep' })); + mockFs.mkdir.mockResolvedValue(undefined); + mockFs.writeFile.mockResolvedValue(undefined); + + await writeServers(makeMeta(), { s1: { command: 'npx' } }); + + const written = JSON.parse(mockFs.writeFile.mock.calls[0][1] as string); + expect(written.mcpServers).toEqual({ s1: { command: 'npx' } }); + expect(written.otherKey).toBe('keep'); + }); + + it('preserves all sibling keys outside the servers path', async () => { + mockFs.readFile.mockResolvedValue( + JSON.stringify({ + theme: 'dark', + mcpServers: { oldServer: { command: 'old' } }, + anotherKey: [1, 2, 3], + }) + ); + mockFs.mkdir.mockResolvedValue(undefined); + mockFs.writeFile.mockResolvedValue(undefined); + + await writeServers(makeMeta(), { newServer: { command: 'new' } }); + + const written = JSON.parse(mockFs.writeFile.mock.calls[0][1] as string); + expect(written.theme).toBe('dark'); + expect(written.anotherKey).toEqual([1, 2, 3]); + expect(written.mcpServers).toEqual({ newServer: { command: 'new' } }); + }); + + it('creates file from template when file does not exist', async () => { + const err = new Error('ENOENT') as NodeJS.ErrnoException; + err.code = 'ENOENT'; + mockFs.readFile.mockRejectedValue(err); + mockFs.mkdir.mockResolvedValue(undefined); + mockFs.writeFile.mockResolvedValue(undefined); + + await writeServers(makeMeta(), { s1: { command: 'npx' } }); + + const written = JSON.parse(mockFs.writeFile.mock.calls[0][1] as string); + expect(written.mcpServers).toEqual({ s1: { command: 'npx' } }); + }); +}); diff --git a/src/main/core/mcp/utils/config-io.ts b/src/main/core/mcp/utils/config-io.ts new file mode 100644 index 000000000..2abf2d6c6 --- /dev/null +++ b/src/main/core/mcp/utils/config-io.ts @@ -0,0 +1,117 @@ +import * as fs from 'fs/promises'; +import path from 'path'; +import * as jsoncParser from 'jsonc-parser'; +import * as toml from 'smol-toml'; +import type { AgentMcpMeta, RawServerEntry, ServerMap } from '@shared/mcp/types'; +import { log } from '@main/lib/logger'; + +// ── Read ─────────────────────────────────────────────────────────────────── + +export async function readServers(meta: AgentMcpMeta): Promise { + let content: string; + try { + content = await fs.readFile(meta.configPath, 'utf-8'); + } catch (err: unknown) { + if ((err as NodeJS.ErrnoException).code === 'ENOENT') return {}; + throw err; + } + + if (!content.trim()) return {}; + + let parsed: Record; + if (meta.isToml) { + parsed = toml.parse(content) as Record; + } else if (meta.configPath.endsWith('.jsonc')) { + const errors: jsoncParser.ParseError[] = []; + parsed = (jsoncParser.parse(content, errors) ?? {}) as Record; + if (errors.length) { + log.warn(`JSONC parse errors in ${meta.configPath}:`, errors); + } + } else { + try { + parsed = JSON.parse(content); + } catch { + log.warn(`Invalid JSON in ${meta.configPath}, returning empty`); + return {}; + } + } + + return extractAtPath(parsed, meta.serversPath); +} + +function extractAtPath(obj: Record, pathSegments: string[]): ServerMap { + let current: unknown = obj; + for (const key of pathSegments) { + if (typeof current !== 'object' || current === null) return {}; + current = (current as Record)[key]; + if (current === undefined) return {}; + } + if (typeof current !== 'object' || current === null || Array.isArray(current)) return {}; + const result: ServerMap = {}; + for (const [k, v] of Object.entries(current as Record)) { + if (typeof v === 'object' && v !== null && !Array.isArray(v)) { + result[k] = v as RawServerEntry; + } + } + return result; +} + +// ── Write ────────────────────────────────────────────────────────────────── + +export async function writeServers(meta: AgentMcpMeta, servers: ServerMap): Promise { + await fs.mkdir(path.dirname(meta.configPath), { recursive: true }); + let existing: Record; + let existingRaw: string | undefined; + try { + existingRaw = await fs.readFile(meta.configPath, 'utf-8'); + } catch (err: unknown) { + if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err; + } + + if (meta.isToml) { + existing = existingRaw + ? (toml.parse(existingRaw) as Record) + : { ...meta.template }; + setAtPath(existing, meta.serversPath, servers); + await fs.writeFile( + meta.configPath, + toml.stringify(existing as Parameters[0]) + ); + return; + } + + if (meta.configPath.endsWith('.jsonc') && existingRaw) { + let modified = existingRaw; + const edits = jsoncParser.modify(modified, meta.serversPath, servers, {}); + modified = jsoncParser.applyEdits(modified, edits); + await fs.writeFile(meta.configPath, modified); + return; + } + + if (existingRaw) { + try { + existing = JSON.parse(existingRaw); + } catch { + log.warn(`Invalid JSON in ${meta.configPath}, resetting to template`); + existing = JSON.parse(JSON.stringify(meta.template)); + } + } else { + existing = JSON.parse(JSON.stringify(meta.template)); + } + setAtPath(existing, meta.serversPath, servers); + await fs.writeFile(meta.configPath, JSON.stringify(existing, null, 2)); +} + +function setAtPath(obj: Record, pathSegments: string[], value: unknown): void { + let current: Record = obj; + for (let i = 0; i < pathSegments.length - 1; i++) { + const key = pathSegments[i]; + if (typeof current[key] !== 'object' || current[key] === null) { + current[key] = {}; + } + current = current[key] as Record; + } + if (pathSegments.length > 0) { + current[pathSegments[pathSegments.length - 1]] = value; + } +} diff --git a/src/main/core/mcp/utils/config-paths.test.ts b/src/main/core/mcp/utils/config-paths.test.ts new file mode 100644 index 000000000..bc4b89ecf --- /dev/null +++ b/src/main/core/mcp/utils/config-paths.test.ts @@ -0,0 +1,94 @@ +import { describe, expect, it, vi } from 'vitest'; +import { getAgentMcpMeta, getAllMcpAgentIds } from './config-paths'; + +vi.mock('os', () => ({ + default: { homedir: () => '/home/testuser' }, + homedir: () => '/home/testuser', +})); + +describe('getAgentMcpMeta', () => { + it('returns correct meta for claude', () => { + const meta = getAgentMcpMeta('claude'); + expect(meta).toBeDefined(); + expect(meta!.configPath).toBe('/home/testuser/.claude.json'); + expect(meta!.serversPath).toEqual(['mcpServers']); + expect(meta!.adapter).toBe('passthrough'); + expect(meta!.isToml).toBe(false); + }); + + it('returns correct meta for cursor', () => { + const meta = getAgentMcpMeta('cursor'); + expect(meta).toBeDefined(); + expect(meta!.configPath).toBe('/home/testuser/.cursor/mcp.json'); + expect(meta!.adapter).toBe('cursor'); + }); + + it('returns correct meta for codex (toml)', () => { + const meta = getAgentMcpMeta('codex'); + expect(meta).toBeDefined(); + expect(meta!.configPath).toContain('config.toml'); + expect(meta!.isToml).toBe(true); + expect(meta!.adapter).toBe('codex'); + }); + + it('returns correct meta for amp', () => { + const meta = getAgentMcpMeta('amp'); + expect(meta).toBeDefined(); + expect(meta!.configPath).toBe('/home/testuser/.config/amp/settings.json'); + expect(meta!.adapter).toBe('passthrough'); + }); + + it('returns correct meta for gemini', () => { + const meta = getAgentMcpMeta('gemini'); + expect(meta).toBeDefined(); + expect(meta!.configPath).toBe('/home/testuser/.gemini/settings.json'); + expect(meta!.serversPath).toEqual(['mcpServers']); + expect(meta!.adapter).toBe('gemini'); + }); + + it('returns correct meta for qwen (uses gemini adapter)', () => { + const meta = getAgentMcpMeta('qwen'); + expect(meta).toBeDefined(); + expect(meta!.configPath).toBe('/home/testuser/.qwen/settings.json'); + expect(meta!.adapter).toBe('gemini'); + }); + + it('returns correct meta for opencode', () => { + const meta = getAgentMcpMeta('opencode'); + expect(meta).toBeDefined(); + expect(meta!.configPath).toContain('opencode'); + expect(meta!.adapter).toBe('opencode'); + }); + + it('returns correct meta for copilot', () => { + const meta = getAgentMcpMeta('copilot'); + expect(meta).toBeDefined(); + expect(meta!.configPath).toBe('/home/testuser/.copilot/mcp-config.json'); + expect(meta!.adapter).toBe('copilot'); + }); + + it('returns correct meta for droid (passthrough)', () => { + const meta = getAgentMcpMeta('droid'); + expect(meta).toBeDefined(); + expect(meta!.adapter).toBe('passthrough'); + }); + + it('returns undefined for unknown agent', () => { + const meta = getAgentMcpMeta('unknown-agent'); + expect(meta).toBeUndefined(); + }); + + it('getAllMcpAgentIds returns all supported agents', () => { + const ids = getAllMcpAgentIds(); + expect(ids).toContain('claude'); + expect(ids).toContain('cursor'); + expect(ids).toContain('codex'); + expect(ids).toContain('amp'); + expect(ids).toContain('gemini'); + expect(ids).toContain('qwen'); + expect(ids).toContain('opencode'); + expect(ids).toContain('copilot'); + expect(ids).toContain('droid'); + expect(ids.length).toBe(9); + }); +}); diff --git a/src/main/core/mcp/utils/config-paths.ts b/src/main/core/mcp/utils/config-paths.ts new file mode 100644 index 000000000..96e0ea3a0 --- /dev/null +++ b/src/main/core/mcp/utils/config-paths.ts @@ -0,0 +1,110 @@ +import os from 'os'; +import path from 'path'; +import type { AdapterType, AgentMcpMeta } from '@shared/mcp/types'; + +interface AgentConfigDef { + pathSegments: string[]; + serversPath: string[]; + template: Record; + isToml: boolean; + adapter: AdapterType; + supportsHttp: boolean; +} + +const AGENT_CONFIGS: Record = { + claude: { + pathSegments: ['.claude.json'], + serversPath: ['mcpServers'], + template: { mcpServers: {} }, + isToml: false, + adapter: 'passthrough', + supportsHttp: true, + }, + cursor: { + pathSegments: ['.cursor', 'mcp.json'], + serversPath: ['mcpServers'], + template: { mcpServers: {} }, + isToml: false, + adapter: 'cursor', + supportsHttp: true, + }, + codex: { + pathSegments: ['.codex', 'config.toml'], + serversPath: ['mcp_servers'], + template: { mcp_servers: {} }, + isToml: true, + adapter: 'codex', + supportsHttp: false, + }, + amp: { + pathSegments: ['.config', 'amp', 'settings.json'], + serversPath: ['mcpServers'], + template: { mcpServers: {} }, + isToml: false, + adapter: 'passthrough', + supportsHttp: true, + }, + gemini: { + pathSegments: ['.gemini', 'settings.json'], + serversPath: ['mcpServers'], + template: { mcpServers: {} }, + isToml: false, + adapter: 'gemini', + supportsHttp: true, + }, + qwen: { + pathSegments: ['.qwen', 'settings.json'], + serversPath: ['mcpServers'], + template: { mcpServers: {} }, + isToml: false, + adapter: 'gemini', + supportsHttp: true, + }, + opencode: { + pathSegments: ['.config', 'opencode', 'opencode.json'], + serversPath: ['mcp'], + template: { mcp: {} }, + isToml: false, + adapter: 'opencode', + supportsHttp: true, + }, + copilot: { + pathSegments: ['.copilot', 'mcp-config.json'], + serversPath: ['mcpServers'], + template: { mcpServers: {} }, + isToml: false, + adapter: 'copilot', + supportsHttp: true, + }, + droid: { + pathSegments: ['.droid', 'settings.json'], + serversPath: ['mcpServers'], + template: { mcpServers: {} }, + isToml: false, + adapter: 'passthrough', + supportsHttp: true, + }, +}; + +export function getAgentMcpMeta(agentId: string): AgentMcpMeta | undefined { + const def = AGENT_CONFIGS[agentId]; + if (!def) return undefined; + + const home = os.homedir(); + return { + agentId, + configPath: path.join(home, ...def.pathSegments), + serversPath: def.serversPath, + template: def.template, + isToml: def.isToml, + adapter: def.adapter, + }; +} + +export function getAllMcpAgentIds(): string[] { + return Object.keys(AGENT_CONFIGS); +} + +export function agentSupportsHttp(agentId: string): boolean { + return AGENT_CONFIGS[agentId]?.supportsHttp ?? true; +} diff --git a/src/main/core/mcp/utils/conversion.ts b/src/main/core/mcp/utils/conversion.ts new file mode 100644 index 000000000..2211fbadc --- /dev/null +++ b/src/main/core/mcp/utils/conversion.ts @@ -0,0 +1,49 @@ +import type { McpServer, RawServerEntry } from '@shared/mcp/types'; + +export function rawToMcpServer( + name: string, + raw: RawServerEntry, + providers: Set +): McpServer { + const isHttp = raw.type === 'http' || ('url' in raw && !('command' in raw)); + return { + name, + transport: isHttp ? 'http' : 'stdio', + command: typeof raw.command === 'string' ? raw.command : undefined, + args: Array.isArray(raw.args) ? (raw.args as string[]) : undefined, + url: typeof raw.url === 'string' ? raw.url : undefined, + headers: + typeof raw.headers === 'object' && raw.headers !== null + ? (raw.headers as Record) + : undefined, + env: + typeof raw.env === 'object' && raw.env !== null + ? (raw.env as Record) + : undefined, + providers: Array.from(providers), + }; +} + +export function mcpServerToRaw(server: McpServer): RawServerEntry { + const raw: RawServerEntry = {}; + if (server.transport === 'http') { + raw.type = 'http'; + if (server.url) raw.url = server.url; + if (server.headers && Object.keys(server.headers).length) raw.headers = server.headers; + } else { + if (server.command) raw.command = server.command; + if (server.args?.length) raw.args = server.args; + } + if (server.env && Object.keys(server.env).length) raw.env = server.env; + return raw; +} + +export function rawEntryToMcpFields(server: McpServer): Record { + const fields: Record = {}; + if (server.command) fields.command = server.command; + if (server.args?.length) fields.args = server.args; + if (server.url) fields.url = server.url; + if (server.headers) fields.headers = server.headers; + if (server.env) fields.env = server.env; + return fields; +} diff --git a/src/main/core/projects/controller.ts b/src/main/core/projects/controller.ts new file mode 100644 index 000000000..728263a81 --- /dev/null +++ b/src/main/core/projects/controller.ts @@ -0,0 +1,23 @@ +import { createRPCController } from '@shared/ipc/rpc'; +import { createLocalProject, createSshProject } from './operations/createProject'; +import { deleteProject } from './operations/deleteProject'; +import { getProjectBootstrapStatus } from './operations/getProjectBootstrapStatus'; +import { getLocalProjectByPath, getProjects, getSshProjectByPath } from './operations/getProjects'; +import { getProjectSettings } from './operations/getProjectSettings'; +import { openProject } from './operations/openProject'; +import { renameProject } from './operations/renameProject'; +import { updateProjectSettings } from './operations/updateProjectSettings'; + +export const projectController = createRPCController({ + createLocalProject, + createSshProject, + getProjects, + deleteProject, + renameProject, + getLocalProjectByPath, + getSshProjectByPath, + getProjectSettings, + updateProjectSettings, + getProjectBootstrapStatus, + openProject, +}); diff --git a/src/main/core/projects/impl/_ssh-project-provider.ts b/src/main/core/projects/impl/_ssh-project-provider.ts new file mode 100644 index 000000000..56b099b44 --- /dev/null +++ b/src/main/core/projects/impl/_ssh-project-provider.ts @@ -0,0 +1,231 @@ +import { randomUUID } from 'node:crypto'; +import path from 'node:path'; +import type { SFTPWrapper } from 'ssh2'; +import { Conversation } from '@shared/conversations'; +import { SshProject } from '@shared/projects'; +import type { Task, TaskBootstrapStatus } from '@shared/tasks'; +import { Terminal } from '@shared/terminals'; +import { SshConversationProvider } from '@main/core/conversations/impl/ssh-conversation'; +import { SshFileSystem } from '@main/core/fs/impl/ssh-fs'; +import { GitService } from '@main/core/git/impl/git-service'; +import { openSsh2Pty } from '@main/core/pty/ssh2-pty'; +import type { SshClientProxy } from '@main/core/ssh/ssh-client-proxy'; +import { + sshConnectionManager, + type SshConnectionEvent, +} from '@main/core/ssh/ssh-connection-manager'; +import { SshTerminalProvider } from '@main/core/terminals/impl/ssh-terminal-provider'; +import { getSshExec } from '@main/core/utils/exec'; +import { log } from '@main/lib/logger'; +import { ok, type Result } from '@main/lib/result'; +import { quoteShellArg } from '@main/utils/shellEscape'; +import type { + ProjectProvider, + ProvisionTaskError, + TaskProvider, + TeardownTaskError, +} from '../project-provider'; +import { ProjectSettingsProvider } from '../settings/schema'; + +export async function createSshProvider(project: SshProject): Promise { + try { + const proxy = await sshConnectionManager.connect(project.connectionId); + return new SshProjectProvider(project.id, project.connectionId, proxy); + } catch (error) { + log.warn('createSshProvider: SSH connection failed', { + projectId: project.id, + error: error instanceof Error ? error.message : String(error), + }); + throw error; + } +} + +export class SshProjectProvider implements ProjectProvider { + readonly type = 'ssh'; + readonly settings: ProjectSettingsProvider; + + private environments = new Map(); + private agentProviders = new Map(); + private terminalProviders = new Map(); + private cachedSftp: SFTPWrapper | undefined; + + constructor( + private readonly projectId: string, + private readonly connectionId: string, + private readonly proxy: SshClientProxy + ) { + sshConnectionManager.on('connection-event', this.handleConnectionEvent); + } + + private handleConnectionEvent = (evt: SshConnectionEvent): void => { + if (evt.type === 'reconnected' && evt.connectionId === this.connectionId) { + this.rehydrateTerminals().catch((e: unknown) => { + log.error('SshProjectProvider: rehydrateTerminals failed after reconnect', { + projectId: this.projectId, + connectionId: this.connectionId, + error: String(e), + }); + }); + } + }; + + private getSftp(): Promise { + if (this.cachedSftp) return Promise.resolve(this.cachedSftp); + return new Promise((resolve, reject) => { + this.proxy.client.sftp((err, sftp) => { + if (err) return reject(err); + this.cachedSftp = sftp; + sftp.on('close', () => { + this.cachedSftp = undefined; + }); + resolve(sftp); + }); + }); + } + + async provisionTask( + task: Task, + conversations: Conversation[], + terminals: Terminal[] + ): Promise> { + const existing = this.environments.get(task.id); + if (existing) return ok(existing); + + const fs = new SshFileSystem(this.proxy, workingDirectory); + const git = new GitService(workingDirectory, getSshExec(this.proxy), fs); + + const agentProvider = new SshConversationProvider(this.projectId, taskId, this.proxy); + const terminalProvider = new SshTerminalProvider(this.projectId, taskId, this.proxy); + + this.agentProviders.set(taskId, agentProvider); + this.terminalProviders.set(taskId, terminalProvider); + + const getPty = async () => { + const command = `cd ${quoteShellArg(workingDirectory)} && exec $SHELL -l`; + const result = await openSsh2Pty(this.proxy.client, { + id: crypto.randomUUID(), + command, + cols: 80, + rows: 24, + }); + if (!result.success) { + throw new Error(`Failed to spawn lifecycle PTY: ${result.error.kind}`); + } + return result.data; + }; + + const taskEnv: TaskProvider = { + taskId, + taskPath: workingDirectory, + taskBranch: task.taskBranch, + sourceBranch: task.sourceBranch, + fs, + git, + agentProvider, + terminals: terminalProvider, + getPty, + }; + + this.environments.set(taskId, taskEnv); + + // Hydrate existing terminal sessions immediately on startup. + await Promise.all( + terminals.map((term) => + terminalProvider + .spawnTerminal({ + projectId: this.projectId, + terminalId: term.id, + taskId, + cwd: workingDirectory, + }) + .catch((e: unknown) => { + log.error('SshEnvironmentProvider: failed to hydrate terminal', { + terminalId: term.id, + error: String(e), + }); + }) + ) + ); + + return ok(taskEnv); + } + + async retryTaskProvision( + task: Task, + conversations: Conversation[], + terminals: Terminal[] + ): Promise> { + this.environments.delete(task.id); + return this.provisionTask(task, conversations, terminals); + } + + getTask(taskId: string): TaskProvider | undefined { + return this.environments.get(taskId); + } + + getTaskBootstrapStatus(taskId: string): TaskBootstrapStatus { + if (this.environments.has(taskId)) return { status: 'ready' }; + return { status: 'not-started' }; + } + + async teardownTask(taskId: string): Promise> { + this.agentProviders.get(taskId)?.destroyAll(); + this.terminalProviders.get(taskId)?.destroyAll(); + this.agentProviders.delete(taskId); + this.terminalProviders.delete(taskId); + this.environments.delete(taskId); + return ok(); + } + + async retryTaskTeardown(taskId: string): Promise> { + return this.teardownTask(taskId); + } + + async removeTaskWorktree(_taskBranch: string): Promise { + // Not implemented for SSH providers + } + + async cleanup(): Promise { + sshConnectionManager.off('connection-event', this.handleConnectionEvent); + await Promise.all(Array.from(this.environments.keys()).map((id) => this.teardownTask(id))); + } + + /** + * Re-spawn all terminal sessions for every active task after an SSH reconnect. + * Agent sessions are intentionally excluded — they must be restarted manually. + */ + private async rehydrateTerminals(): Promise { + await Promise.all( + Array.from(this.terminalProviders.values()).map((provider) => + provider.rehydrate().catch((e: unknown) => { + log.error('SshEnvironmentProvider: rehydrateTerminals failed for a provider', { + error: String(e), + }); + }) + ) + ); + } + + /** + * Upload local files into the task's working directory via SFTP and return + * their remote paths. + */ + async uploadFiles(taskId: string, localPaths: string[]): Promise { + const env = this.environments.get(taskId); + if (!env) throw new Error(`No provisioned environment for task: ${taskId}`); + + const sftp = await this.getSftp(); + const destDir = env.taskPath; + + return Promise.all( + localPaths.map(async (localPath) => { + const remoteName = `${randomUUID()}-${path.basename(localPath)}`; + const remotePath = `${destDir}/${remoteName}`; + await new Promise((resolve, reject) => { + sftp.fastPut(localPath, remotePath, (e) => (e ? reject(e) : resolve())); + }); + return remotePath; + }) + ); + } +} diff --git a/src/main/core/projects/impl/_vm-project-provider.ts b/src/main/core/projects/impl/_vm-project-provider.ts new file mode 100644 index 000000000..e05e3f1a6 --- /dev/null +++ b/src/main/core/projects/impl/_vm-project-provider.ts @@ -0,0 +1,57 @@ +import { Conversation } from '@shared/conversations'; +import { Task } from '@shared/tasks'; +import { Terminal } from '@shared/terminals'; +import { err, ok, type Result } from '@main/lib/result'; +import type { + ProjectProvider, + ProvisionTaskError, + TaskProvider, + TeardownTaskError, +} from '../project-provider'; + +const VM_NOT_IMPLEMENTED: ProvisionTaskError = { + type: 'error', + message: + 'VmEnvironmentProvider is not yet implemented. ' + + 'Set project.environmentProvider to "local" or "ssh" for now.', +}; + +export class VmEnvironmentProvider implements ProjectProvider { + readonly type = 'vm'; + + async provisionTask( + _task: Task, + _conversations: Conversation[], + _terminals: Terminal[] + ): Promise> { + return err(VM_NOT_IMPLEMENTED); + } + + async retryTaskProvision( + _task: Task, + _conversations: Conversation[], + _terminals: Terminal[] + ): Promise> { + return err(VM_NOT_IMPLEMENTED); + } + + getTask(_taskId: string): TaskProvider | undefined { + return undefined; + } + + async teardownTask(_taskId: string): Promise> { + return ok(); + } + + async retryTaskTeardown(_taskId: string): Promise> { + return ok(); + } + + async removeTaskWorktree(_taskBranch: string): Promise { + // Not implemented for VM providers + } + + async cleanup(): Promise { + // No-op until implemented. + } +} diff --git a/src/main/core/projects/impl/local-project-provider.ts b/src/main/core/projects/impl/local-project-provider.ts new file mode 100644 index 000000000..cf064b130 --- /dev/null +++ b/src/main/core/projects/impl/local-project-provider.ts @@ -0,0 +1,287 @@ +import fs from 'node:fs'; +import path from 'node:path'; +import { Conversation } from '@shared/conversations'; +import { LocalProject } from '@shared/projects'; +import { Task, type TaskBootstrapStatus } from '@shared/tasks'; +import { createScriptTerminalId, Terminal } from '@shared/terminals'; +import { LocalConversationProvider } from '@main/core/conversations/impl/local-conversation'; +import { LocalFileSystem } from '@main/core/fs/impl/local-fs'; +import type { FileSystemProvider } from '@main/core/fs/types'; +import { GitService } from '@main/core/git/impl/git-service'; +import { bareRefName } from '@main/core/git/impl/git-utils'; +import type { GitProvider } from '@main/core/git/types'; +import { appSettingsService } from '@main/core/settings/settings-service'; +import { TaskLifecycleService } from '@main/core/tasks/task-lifecycle-service'; +import { LocalTerminalProvider } from '@main/core/terminals/impl/local-terminal-provider'; +import { getLocalExec } from '@main/core/utils/exec'; +import { log } from '@main/lib/logger'; +import { err, ok, type Result } from '@main/lib/result'; +import type { + ProjectProvider, + ProvisionTaskError, + TaskProvider, + TeardownTaskError, +} from '../project-provider'; +import { LocalProjectSettingsProvider } from '../settings/project-settings'; +import type { ProjectSettingsProvider } from '../settings/schema'; +import { TimeoutSignal, withTimeout } from '../utils'; +import { WorktreeService } from '../worktrees/worktree-service'; + +const TASK_TIMEOUT_MS = 60_000; + +function toProvisionError(e: unknown): ProvisionTaskError { + if (e instanceof TimeoutSignal) return { type: 'timeout', message: e.message, timeout: e.ms }; + return { type: 'error', message: e instanceof Error ? e.message : String(e) }; +} + +function toTeardownError(e: unknown): TeardownTaskError { + if (e instanceof TimeoutSignal) return { type: 'timeout', message: e.message, timeout: e.ms }; + return { type: 'error', message: e instanceof Error ? e.message : String(e) }; +} + +export async function createLocalProvider(project: LocalProject): Promise { + const defaultWorktreeDirectory = (await appSettingsService.get('localProject')) + .defaultWorktreeDirectory; + const worktreePoolPath = path.join(defaultWorktreeDirectory, project.name); + + await fs.promises.mkdir(worktreePoolPath, { recursive: true }); + + return new LocalProjectProvider(project, { worktreePoolPath }); +} + +export class LocalProjectProvider implements ProjectProvider { + readonly type = 'local'; + readonly settings: ProjectSettingsProvider; + readonly git: GitProvider; + readonly fs: FileSystemProvider; + + private tasks = new Map(); + private provisioningTasks = new Map>>(); + private tearingDownTasks = new Map>>(); + private bootstrapErrors = new Map(); + private worktreeService: WorktreeService; + + constructor( + private readonly project: LocalProject, + options: { + worktreePoolPath: string; + } + ) { + this.settings = new LocalProjectSettingsProvider(project.path, bareRefName(project.baseRef)); + this.fs = new LocalFileSystem(project.path); + this.git = new GitService(project.path, getLocalExec(), this.fs); + this.worktreeService = new WorktreeService({ + worktreePoolPath: options.worktreePoolPath, + repoPath: project.path, + projectSettings: this.settings, + exec: getLocalExec(), + }); + } + + async provisionTask( + task: Task, + conversations: Conversation[], + terminals: Terminal[] + ): Promise> { + if (this.tasks.has(task.id)) return ok(this.tasks.get(task.id)!); + if (this.provisioningTasks.has(task.id)) return this.provisioningTasks.get(task.id)!; + + const promise = withTimeout( + this.doProvisionTask(task, conversations, terminals), + TASK_TIMEOUT_MS + ) + .then((taskEnv) => { + this.tasks.set(task.id, taskEnv); + this.provisioningTasks.delete(task.id); + return ok(taskEnv); + }) + .catch((e) => { + const provisionError = toProvisionError(e); + this.bootstrapErrors.set(task.id, provisionError); + this.provisioningTasks.delete(task.id); + log.error('LocalProjectProvider: failed to provision task', { + taskId: task.id, + error: String(e), + }); + return err(provisionError); + }); + + this.provisioningTasks.set(task.id, promise); + return promise; + } + + private async doProvisionTask( + task: Task, + conversations: Conversation[], + terminals: Terminal[] + ): Promise { + log.debug('LocalProjectProvider: doProvisionTask START', { taskId: task.id }); + + let workDir: string; + + if (task.taskBranch) { + const existing = await this.worktreeService.getWorktree(task.taskBranch); + if (existing) { + workDir = existing; + } else { + const result = await this.worktreeService.serveWorktree(task.sourceBranch, task.taskBranch); + if (!result.success) { + switch (result.error.type) { + case 'reserve-failed': + throw new Error(`Could not prepare worktree for branch "${task.sourceBranch}"`); + case 'worktree-setup-failed': + throw new Error(`Failed to set up worktree for task`); + } + } + workDir = result.data; + } + } else { + workDir = this.project.path; + } + + const taskFs = new LocalFileSystem(workDir); + const taskGit = new GitService(workDir, getLocalExec(), taskFs); + const conversationProvider = new LocalConversationProvider({ + projectId: this.project.id, + taskPath: workDir, + taskId: task.id, + }); + + const terminalProvider = new LocalTerminalProvider({ + projectId: this.project.id, + taskId: task.id, + taskPath: workDir, + }); + + const taskEnv: TaskProvider = { + taskId: task.id, + taskPath: workDir, + taskBranch: task.taskBranch, + sourceBranch: task.sourceBranch, + fs: taskFs, + git: taskGit, + conversations: conversationProvider, + terminals: terminalProvider, + }; + + const scripts = (await this.settings.get()).scripts; + + const userShell = + process.env.SHELL ?? (process.platform === 'darwin' ? '/bin/zsh' : '/bin/bash'); + + if (scripts?.setup) { + const id = await createScriptTerminalId({ + projectId: this.project.id, + taskId: task.id, + type: 'setup', + script: scripts.setup, + }); + terminalProvider.spawnTerminal( + { id, projectId: this.project.id, taskId: task.id, name: '' }, + { cols: 80, rows: 24 }, + { command: userShell, args: ['-c', scripts.setup] } + ); + } + + Promise.all( + terminals.map((term) => + terminalProvider.spawnTerminal(term).catch((e) => { + log.error('LocalEnvironmentProvider: failed to hydrate terminal', { + terminalId: term.id, + error: String(e), + }); + }) + ) + ); + + Promise.all( + conversations.map((conv) => + conversationProvider.startSession(conv).catch((e) => { + log.error('LocalEnvironmentProvider: failed to hydrate conversation', { + conversationId: conv.id, + error: String(e), + }); + }) + ) + ); + + log.debug('LocalProjectProvider: doProvisionTask DONE', { taskId: task.id }); + return taskEnv; + } + + getTask(taskId: string): TaskProvider | undefined { + return this.tasks.get(taskId); + } + + getTaskBootstrapStatus(taskId: string): TaskBootstrapStatus { + if (this.tasks.has(taskId)) return { status: 'ready' }; + if (this.provisioningTasks.has(taskId)) return { status: 'bootstrapping' }; + const bootstrapError = this.bootstrapErrors.get(taskId); + if (bootstrapError) return { status: 'error', message: bootstrapError.message }; + return { status: 'not-started' }; + } + + async teardownTask(taskId: string): Promise> { + if (this.tearingDownTasks.has(taskId)) return this.tearingDownTasks.get(taskId)!; + const task = this.tasks.get(taskId); + if (!task) return ok(); + + const promise = withTimeout(this.doTeardownTask(task), TASK_TIMEOUT_MS) + .then(() => ok()) + .catch((e) => { + log.error('LocalProjectProvider: failed to teardown task', { + taskId, + error: String(e), + }); + return err(toTeardownError(e)); + }) + .finally(() => { + this.tasks.delete(taskId); + this.tearingDownTasks.delete(taskId); + }); + + this.tearingDownTasks.set(taskId, promise); + return promise; + } + + private async doTeardownTask(task: TaskProvider): Promise { + const taskLifecycleService = new TaskLifecycleService({ + projectId: this.project.id, + taskId: task.taskId, + taskPath: task.taskPath, + terminals: task.terminals, + }); + + const scripts = (await this.settings.get())?.scripts; + + if (scripts?.teardown) { + taskLifecycleService.runLifecycleScript({ + type: 'teardown', + script: scripts?.teardown, + }); + } + + await task.conversations.destroyAll(); + await task.terminals.destroyAll(); + + if (task.taskBranch) { + await this.removeTaskWorktree(task.taskBranch).catch((e) => { + log.warn('LocalProjectProvider: worktree removal failed', { + taskId: task.taskId, + error: String(e), + }); + }); + } + } + + async removeTaskWorktree(taskBranch: string): Promise { + const worktreePath = await this.worktreeService.getWorktree(taskBranch); + if (worktreePath) { + await this.worktreeService.removeWorktree(worktreePath); + } + } + + async cleanup(): Promise { + await Promise.all(Array.from(this.tasks.keys()).map((id) => this.teardownTask(id))); + } +} diff --git a/src/main/core/projects/operations/createProject.ts b/src/main/core/projects/operations/createProject.ts new file mode 100644 index 000000000..831b79b0a --- /dev/null +++ b/src/main/core/projects/operations/createProject.ts @@ -0,0 +1,107 @@ +import { randomUUID } from 'node:crypto'; +import { sql } from 'drizzle-orm'; +import { type LocalProject, type SshProject } from '@shared/projects'; +import { LocalFileSystem } from '@main/core/fs/impl/local-fs'; +import { SshFileSystem } from '@main/core/fs/impl/ssh-fs'; +import { checkIsValidDirectory } from '@main/core/git/impl/detectGitInfo'; +import { GitService } from '@main/core/git/impl/git-service'; +import { projectManager } from '@main/core/projects/project-manager'; +import { sshConnectionManager } from '@main/core/ssh/ssh-connection-manager'; +import { getLocalExec, getSshExec } from '@main/core/utils/exec'; +import { db } from '@main/db/client'; +import { projects } from '@main/db/schema'; + +export type CreateLocalProjectParams = { + id?: string; + path: string; + name: string; +}; + +export async function createLocalProject(params: CreateLocalProjectParams): Promise { + const isValidDirectory = checkIsValidDirectory(params.path); + if (!isValidDirectory) { + throw new Error('Invalid directory'); + } + + const fs = new LocalFileSystem(params.path); + const git = new GitService(params.path, getLocalExec(), fs); + + const gitInfo = await git.detectInfo(); + if (!gitInfo.isGitRepo) { + throw new Error('Invalid git repository'); + } + + const [row] = await db + .insert(projects) + .values({ + id: params.id ?? randomUUID(), + name: params.name, + path: gitInfo.rootPath, + workspaceProvider: 'local', + baseRef: gitInfo.baseRef, + gitRemote: gitInfo.remote ?? null, + updatedAt: sql`CURRENT_TIMESTAMP`, + }) + .returning(); + + const project = { + type: 'local' as const, + id: row.id, + name: row.name, + path: row.path, + baseRef: row.baseRef ?? gitInfo.baseRef, + gitRemote: row.gitRemote ?? undefined, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + }; + + await projectManager.openProject(project); + + return project; +} + +export type CreateSshProjectParams = { + id?: string; + name: string; + path: string; + connectionId: string; +}; + +export async function createSshProject(params: CreateSshProjectParams): Promise { + const sshProxy = await sshConnectionManager.connect(params.connectionId); + + const sshFs = new SshFileSystem(sshProxy, params.path); + const git = new GitService(params.path, getSshExec(sshProxy), sshFs); + + const gitInfo = await git.detectInfo(); + + const [row] = await db + .insert(projects) + .values({ + id: params.id ?? randomUUID(), + name: params.name, + path: params.path, + workspaceProvider: 'ssh', + sshConnectionId: params.connectionId, + baseRef: gitInfo.baseRef, + gitRemote: gitInfo.remote ?? null, + updatedAt: sql`CURRENT_TIMESTAMP`, + }) + .returning(); + + const project = { + type: 'ssh' as const, + id: row.id, + name: row.name, + path: row.path, + connectionId: params.connectionId, + baseRef: row.baseRef ?? gitInfo.baseRef, + gitRemote: row.gitRemote ?? undefined, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + }; + + await projectManager.openProject(project); + + return project; +} diff --git a/src/main/core/projects/operations/deleteProject.ts b/src/main/core/projects/operations/deleteProject.ts new file mode 100644 index 000000000..5695fc3be --- /dev/null +++ b/src/main/core/projects/operations/deleteProject.ts @@ -0,0 +1,9 @@ +import { eq } from 'drizzle-orm'; +import { projectManager } from '@main/core/projects/project-manager'; +import { db } from '@main/db/client'; +import { projects } from '@main/db/schema'; + +export async function deleteProject(id: string): Promise { + await db.delete(projects).where(eq(projects.id, id)); + await projectManager.closeProject(id); +} diff --git a/src/main/core/projects/operations/getProjectBootstrapStatus.ts b/src/main/core/projects/operations/getProjectBootstrapStatus.ts new file mode 100644 index 000000000..e1db0828e --- /dev/null +++ b/src/main/core/projects/operations/getProjectBootstrapStatus.ts @@ -0,0 +1,11 @@ +import type { ProjectBootstrapStatus } from '@shared/projects'; +import { projectManager } from '@main/core/projects/project-manager'; +import { log } from '@main/lib/logger'; + +export async function getProjectBootstrapStatus( + projectId: string +): Promise { + const status = projectManager.getProjectBootstrapStatus(projectId); + log.debug('getProjectBootstrapStatus', { projectId, status: status.status }); + return status; +} diff --git a/src/main/core/projects/operations/getProjectSettings.ts b/src/main/core/projects/operations/getProjectSettings.ts new file mode 100644 index 000000000..df8c04fdb --- /dev/null +++ b/src/main/core/projects/operations/getProjectSettings.ts @@ -0,0 +1,10 @@ +import { projectManager } from '../project-manager'; +import { ProjectSettings } from '../settings/schema'; + +export async function getProjectSettings(projectId: string): Promise { + const project = projectManager.getProject(projectId); + if (!project) { + throw new Error(`Project ${projectId} not found`); + } + return project.settings.get(); +} diff --git a/src/main/core/projects/operations/getProjects.ts b/src/main/core/projects/operations/getProjects.ts new file mode 100644 index 000000000..c5f26fb8f --- /dev/null +++ b/src/main/core/projects/operations/getProjects.ts @@ -0,0 +1,100 @@ +import { and, desc, eq } from 'drizzle-orm'; +import type { LocalProject, SshProject } from '@shared/projects'; +import { db } from '@main/db/client'; +import { projects } from '@main/db/schema'; + +export async function getProjects(): Promise<(LocalProject | SshProject)[]> { + const rows = await db.select().from(projects).orderBy(desc(projects.updatedAt)); + return rows.map((row) => + row.workspaceProvider === 'local' + ? { + type: 'local' as const, + id: row.id, + name: row.name, + path: row.path, + baseRef: row.baseRef ?? 'main', + gitRemote: row.gitRemote ?? undefined, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + } + : { + type: 'ssh' as const, + id: row.id, + name: row.name, + path: row.path, + baseRef: row.baseRef ?? 'main', + gitRemote: row.gitRemote ?? undefined, + connectionId: row.sshConnectionId!, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + } + ); +} + +export async function getProjectById( + projectId: string +): Promise { + const [row] = await db.select().from(projects).where(eq(projects.id, projectId)).limit(1); + if (!row) return undefined; + if (row.workspaceProvider === 'local') { + return { + type: 'local' as const, + id: row.id, + name: row.name, + path: row.path, + baseRef: row.baseRef ?? 'main', + gitRemote: row.gitRemote ?? undefined, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + }; + } + return { + type: 'ssh' as const, + id: row.id, + name: row.name, + path: row.path, + baseRef: row.baseRef ?? 'main', + gitRemote: row.gitRemote ?? undefined, + connectionId: row.sshConnectionId!, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + }; +} + +export async function getLocalProjectByPath(path: string): Promise { + const [row] = await db.select().from(projects).where(eq(projects.path, path)).limit(1); + if (!row) return undefined; + return { + type: 'local' as const, + id: row.id, + name: row.name, + path: row.path, + baseRef: row.baseRef ?? 'main', + gitRemote: row.gitRemote ?? undefined, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + }; +} + +export async function getSshProjectByPath( + path: string, + connectionId: string +): Promise { + const [row] = await db + .select() + .from(projects) + .where(and(eq(projects.path, path), eq(projects.sshConnectionId, connectionId))) + .limit(1); + if (!row) return undefined; + return { + type: 'ssh' as const, + id: row.id, + name: row.name, + path: row.path, + baseRef: row.baseRef ?? 'main', + gitRemote: row.gitRemote ?? undefined, + connectionId: row.sshConnectionId!, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + }; +} diff --git a/src/main/core/projects/operations/openProject.ts b/src/main/core/projects/operations/openProject.ts new file mode 100644 index 000000000..8553f785a --- /dev/null +++ b/src/main/core/projects/operations/openProject.ts @@ -0,0 +1,5 @@ +import { projectManager } from '@main/core/projects/project-manager'; + +export async function openProject(projectId: string): Promise { + await projectManager.openProjectById(projectId); +} diff --git a/src/main/core/projects/operations/renameProject.ts b/src/main/core/projects/operations/renameProject.ts new file mode 100644 index 000000000..1866eac12 --- /dev/null +++ b/src/main/core/projects/operations/renameProject.ts @@ -0,0 +1,14 @@ +import { eq } from 'drizzle-orm'; +import { db } from '@main/db/client'; +import { projects } from '@main/db/schema'; + +export async function renameProject({ + projectId, + name, +}: { + projectId: string; + name: string; + renameProjectDirectory?: boolean; +}): Promise { + await db.update(projects).set({ name }).where(eq(projects.id, projectId)); +} diff --git a/src/main/core/projects/operations/updateProjectSettings.ts b/src/main/core/projects/operations/updateProjectSettings.ts new file mode 100644 index 000000000..51aab7865 --- /dev/null +++ b/src/main/core/projects/operations/updateProjectSettings.ts @@ -0,0 +1,13 @@ +import { projectManager } from '../project-manager'; +import { ProjectSettings } from '../settings/schema'; + +export async function updateProjectSettings( + projectId: string, + settings: ProjectSettings +): Promise { + const project = projectManager.getProject(projectId); + if (!project) { + throw new Error(`Project ${projectId} not found`); + } + await project.settings.update(settings); +} diff --git a/src/main/core/projects/project-manager.ts b/src/main/core/projects/project-manager.ts new file mode 100644 index 000000000..5db8d4000 --- /dev/null +++ b/src/main/core/projects/project-manager.ts @@ -0,0 +1,134 @@ +import type { LocalProject, ProjectBootstrapStatus, SshProject } from '@shared/projects'; +import { log } from '@main/lib/logger'; +import { err, ok, type Result } from '@main/lib/result'; +import { getProjectById, getProjects } from '../projects/operations/getProjects'; +import { createLocalProvider } from './impl/local-project-provider'; +import type { ProjectProvider } from './project-provider'; +import { TimeoutSignal, withTimeout } from './utils'; + +const PROVIDER_TIMEOUT_MS = 60_000; + +type ProviderError = { + type: 'error'; + message: string; +}; + +type TimeoutError = { + type: 'timeout'; + message: string; + timeout: number; +}; + +type InitializeProviderError = TimeoutError | ProviderError; +type TeardownProviderError = TimeoutError | ProviderError; + +function toInitError(e: unknown): InitializeProviderError { + if (e instanceof TimeoutSignal) return { type: 'timeout', message: e.message, timeout: e.ms }; + return { type: 'error', message: e instanceof Error ? e.message : String(e) }; +} + +function toTeardownError(e: unknown): TeardownProviderError { + if (e instanceof TimeoutSignal) return { type: 'timeout', message: e.message, timeout: e.ms }; + return { type: 'error', message: e instanceof Error ? e.message : String(e) }; +} + +class ProjectManager { + private initializingProviders = new Map< + string, + Promise> + >(); + private providers = new Map(); + private tearingDownProviders = new Map>>(); + private initializationErrors = new Map(); + + async initialize(): Promise { + const allProjects = await getProjects(); + + await Promise.allSettled( + allProjects.map(async (project) => { + await this.openProject(project); + }) + ); + } + + async openProject( + project: LocalProject | SshProject + ): Promise> { + if (this.providers.has(project.id)) return ok(this.providers.get(project.id)!); + if (this.initializingProviders.has(project.id)) + return this.initializingProviders.get(project.id)!; + + const promise = withTimeout(createProvider(project), PROVIDER_TIMEOUT_MS) + .then((provider) => { + this.providers.set(project.id, provider); + this.initializingProviders.delete(project.id); + return ok(provider); + }) + .catch((e) => { + const initError = toInitError(e); + this.initializationErrors.set(project.id, initError); + this.initializingProviders.delete(project.id); + log.error('ProjectManager: error during project initialization', { + projectId: project.id, + ...initError, + }); + return err(initError); + }); + + this.initializingProviders.set(project.id, promise); + return promise; + } + + async closeProject(projectId: string): Promise> { + if (this.tearingDownProviders.has(projectId)) return this.tearingDownProviders.get(projectId)!; + const provider = this.providers.get(projectId); + if (!provider) return ok(); + + const promise = withTimeout(provider.cleanup(), PROVIDER_TIMEOUT_MS) + .then(() => ok()) + .catch((e) => { + const error = toTeardownError(e); + log.error('ProjectManager: error during project teardown', { projectId, ...error }); + return err(error); + }) + .finally(() => { + this.providers.delete(projectId); + this.tearingDownProviders.delete(projectId); + }); + + this.tearingDownProviders.set(projectId, promise); + return promise; + } + + getProject(projectId: string): ProjectProvider | undefined { + return this.providers.get(projectId); + } + + getProjectBootstrapStatus(projectId: string): ProjectBootstrapStatus { + if (this.providers.has(projectId)) return { status: 'ready' }; + if (this.initializingProviders.has(projectId)) return { status: 'bootstrapping' }; + const initError = this.initializationErrors.get(projectId); + if (initError) return { status: 'error', message: initError.message }; + return { status: 'not-started' }; + } + + async openProjectById(projectId: string): Promise { + const project = await getProjectById(projectId); + if (!project) throw new Error(`Project not found: ${projectId}`); + await this.openProject(project); + } + + async shutdown(): Promise { + const ids = Array.from(this.providers.keys()); + await Promise.allSettled(ids.map((id) => this.closeProject(id))); + } +} + +async function createProvider(project: LocalProject | SshProject): Promise { + if (project.type === 'ssh') { + throw new Error('SSH projects are not yet supported'); + } + return createLocalProvider(project); +} + +export const projectManager = new ProjectManager(); diff --git a/src/main/core/projects/project-provider.ts b/src/main/core/projects/project-provider.ts new file mode 100644 index 000000000..945c32f4e --- /dev/null +++ b/src/main/core/projects/project-provider.ts @@ -0,0 +1,51 @@ +import { Conversation } from '@shared/conversations'; +import { Task, TaskBootstrapStatus } from '@shared/tasks'; +import { Terminal } from '@shared/terminals'; +import type { FileSystemProvider } from '@main/core/fs/types'; +import type { Result } from '@main/lib/result'; +import { ConversationProvider } from '../conversations/types'; +import type { GitProvider } from '../git/types'; +import { TerminalProvider } from '../terminals/terminal-provider'; +import { ProjectSettingsProvider } from './settings/schema'; + +export type BaseTaskProvisionArgs = { + taskId: string; + conversations: Conversation[]; + terminals: Terminal[]; +}; + +export type ProvisionTaskError = + | { type: 'timeout'; message: string; timeout: number } + | { type: 'error'; message: string }; + +export type TeardownTaskError = + | { type: 'timeout'; message: string; timeout: number } + | { type: 'error'; message: string }; + +export interface TaskProvider { + readonly taskId: string; + readonly taskPath: string; + readonly taskBranch: string | undefined; + readonly sourceBranch: string; + readonly fs: FileSystemProvider; + readonly git: GitProvider; + readonly conversations: ConversationProvider; + readonly terminals: TerminalProvider; +} + +export interface ProjectProvider { + readonly type: string; + readonly settings: ProjectSettingsProvider; + readonly git: GitProvider; + readonly fs: FileSystemProvider; + provisionTask( + args: Task, + conversations: Conversation[], + terminals: Terminal[] + ): Promise>; + getTask(taskId: string): TaskProvider | undefined; + getTaskBootstrapStatus(taskId: string): TaskBootstrapStatus; + teardownTask(taskId: string): Promise>; + removeTaskWorktree(taskBranch: string): Promise; + cleanup(): Promise; +} diff --git a/src/main/core/projects/settings/project-settings.ts b/src/main/core/projects/settings/project-settings.ts new file mode 100644 index 000000000..9720dbd39 --- /dev/null +++ b/src/main/core/projects/settings/project-settings.ts @@ -0,0 +1,104 @@ +import fs from 'node:fs'; +import path from 'node:path'; +import { SshFileSystem } from '@main/core/fs/impl/ssh-fs'; +import { appSettingsService } from '@main/core/settings/settings-service'; +import { ProjectSettings, ProjectSettingsProvider, projectSettingsSchema } from './schema'; + +export class LocalProjectSettingsProvider implements ProjectSettingsProvider { + constructor( + private readonly projectPath: string, + private readonly defaultBranchFallback: string = 'main' + ) {} + + async get(): Promise { + const settingsPath = path.join(this.projectPath, '.emdash.json'); + if (!fs.existsSync(settingsPath)) { + const defaultSettings = projectSettingsSchema.parse(JSON.parse('{}')); + return defaultSettings; + } + const settings = projectSettingsSchema.parse(JSON.parse(fs.readFileSync(settingsPath, 'utf8'))); + return settings; + } + + async update(settings: ProjectSettings): Promise { + const settingsPath = path.join(this.projectPath, '.emdash.json'); + fs.writeFileSync(settingsPath, JSON.stringify(settings, null, 2)); + } + + async ensure(): Promise { + const settingsPath = path.join(this.projectPath, '.emdash.json'); + if (!fs.existsSync(settingsPath)) { + const defaultSettings = projectSettingsSchema.parse(JSON.parse('{}')); + fs.writeFileSync(settingsPath, JSON.stringify(defaultSettings, null, 2)); + } + } + + async getDefaultBranch(): Promise { + const settings = await this.get(); + return settings.defaultBranch ?? this.defaultBranchFallback; + } + + async getRemote(): Promise { + const settings = await this.get(); + return settings.remote ?? 'origin'; + } + + async getWorktreeDirectory(): Promise { + const settings = await this.get(); + if (settings.worktreeDirectory) { + return settings.worktreeDirectory; + } + return (await appSettingsService.get('localProject')).defaultWorktreeDirectory; + } +} + +export class SshProjectSettingsProvider implements ProjectSettingsProvider { + constructor( + private readonly fs: SshFileSystem, + private readonly defaultBranchFallback: string = 'main' + ) {} + + async get(): Promise { + const exists = await this.fs.exists('.emdash.json'); + if (!exists) { + const defaultSettings = projectSettingsSchema.parse(JSON.parse('{}')); + await this.fs.write('.emdash.json', JSON.stringify(defaultSettings, null, 2)); + return defaultSettings; + } + + const settings = projectSettingsSchema.parse( + JSON.parse((await this.fs.read('.emdash.json')).content) + ); + return settings; + } + + async update(settings: ProjectSettings): Promise { + await this.fs.write('.emdash.json', JSON.stringify(settings, null, 2)); + } + + async ensure(): Promise { + const exists = await this.fs.exists('.emdash.json'); + if (!exists) { + const defaultSettings = projectSettingsSchema.parse(JSON.parse('{}')); + await this.fs.write('.emdash.json', JSON.stringify(defaultSettings, null, 2)); + } + } + + async getDefaultBranch(): Promise { + const settings = await this.get(); + return settings.defaultBranch ?? this.defaultBranchFallback; + } + + async getRemote(): Promise { + const settings = await this.get(); + return settings.remote ?? 'origin'; + } + + async getWorktreeDirectory(): Promise { + const settings = await this.get(); + if (settings.worktreeDirectory) { + return settings.worktreeDirectory; + } + return path.join('emdash', 'worktrees'); + } +} diff --git a/src/main/core/projects/settings/schema.ts b/src/main/core/projects/settings/schema.ts new file mode 100644 index 000000000..68a0758eb --- /dev/null +++ b/src/main/core/projects/settings/schema.ts @@ -0,0 +1,38 @@ +import z from 'zod'; + +export const projectSettingsSchema = z.object({ + preservePatterns: z + .array(z.string()) + .optional() + .default([ + '.env', + '.env.keys', + '.env.local', + '.env.*.local', + '.envrc', + 'docker-compose.override.yml', + ]), + shellSetup: z.string().optional(), + tmux: z.boolean().optional(), + scripts: z + .object({ + setup: z.string().optional(), + run: z.string().optional(), + teardown: z.string().optional(), + }) + .optional(), + worktreeDirectory: z.string().optional(), + defaultBranch: z.string().optional(), + remote: z.string().optional(), +}); + +export type ProjectSettings = z.infer; + +export interface ProjectSettingsProvider { + getDefaultBranch(): Promise; + getRemote(): Promise; + getWorktreeDirectory(): Promise; + get(): Promise; + update(settings: ProjectSettings): Promise; + ensure(): Promise; +} diff --git a/src/main/core/projects/utils.ts b/src/main/core/projects/utils.ts new file mode 100644 index 000000000..a79c79f56 --- /dev/null +++ b/src/main/core/projects/utils.ts @@ -0,0 +1,19 @@ +import { projectManager } from './project-manager'; + +export function resolveTask(projectId: string, taskId: string) { + return projectManager.getProject(projectId)?.getTask(taskId) ?? null; +} + +export class TimeoutSignal extends Error { + constructor(readonly ms: number) { + super(`Operation timed out after ${ms}ms`); + } +} + +export function withTimeout(promise: Promise, ms: number): Promise { + let timer: ReturnType; + const timeout = new Promise((_, reject) => { + timer = setTimeout(() => reject(new TimeoutSignal(ms)), ms); + }); + return Promise.race([promise, timeout]).finally(() => clearTimeout(timer)); +} diff --git a/src/main/core/projects/worktrees/utils.ts b/src/main/core/projects/worktrees/utils.ts new file mode 100644 index 000000000..0312bfa1e --- /dev/null +++ b/src/main/core/projects/worktrees/utils.ts @@ -0,0 +1,35 @@ +import fs from 'fs'; +import path from 'path'; +import { SshFileSystem } from '@main/core/fs/impl/ssh-fs'; + +export const ensureLocalWorktreeDirectory = ({ + directory, + projectName, +}: { + directory?: string; + projectName: string; +}): string => { + directory = directory ?? path.join('emdash', 'projects', 'worktrees', projectName); + if (!fs.existsSync(directory)) { + fs.mkdirSync(directory, { recursive: true }); + } + return directory; +}; + +export const ensureSshWorktreeDirectory = async ({ + directory, + projectName, + rootFs, +}: { + directory?: string; + projectName: string; + rootFs: SshFileSystem; +}): Promise => { + directory = directory ?? path.join('emdash', 'projects', 'worktrees', projectName); + + const exists = await rootFs.exists(directory); + if (!exists) { + await rootFs.mkdir(directory, { recursive: true }); + } + return directory; +}; diff --git a/src/main/core/projects/worktrees/worktree-service.test.ts b/src/main/core/projects/worktrees/worktree-service.test.ts new file mode 100644 index 000000000..81e9e35c5 --- /dev/null +++ b/src/main/core/projects/worktrees/worktree-service.test.ts @@ -0,0 +1,446 @@ +import fs from 'node:fs'; +import os from 'node:os'; +import path from 'node:path'; +import { afterEach, beforeEach, describe, expect, it } from 'vitest'; +import { getLocalExec, type ExecFn } from '@main/core/utils/exec'; +import type { ProjectSettingsProvider } from '../settings/schema'; +import { WorktreeService } from './worktree-service'; + +// --------------------------------------------------------------------------- +// Test helpers +// --------------------------------------------------------------------------- + +async function initRepo(dir: string, exec: ExecFn): Promise { + await exec('git', ['init'], { cwd: dir }); + // Force "main" as the initial branch regardless of system git config. + await exec('git', ['symbolic-ref', 'HEAD', 'refs/heads/main'], { cwd: dir }); + await exec('git', ['config', 'user.email', 'test@test.com'], { cwd: dir }); + await exec('git', ['config', 'user.name', 'Test'], { cwd: dir }); + // A commit is required before worktrees or additional branches can be created. + await exec('git', ['commit', '--allow-empty', '-m', 'init'], { cwd: dir }); +} + +function makeSettings( + preservePatterns: string[] = [], + defaultBranch = 'main' +): ProjectSettingsProvider { + return { + get: async () => ({ preservePatterns }), + update: async () => {}, + ensure: async () => {}, + getWorktreeDirectory: async () => '', + getDefaultBranch: async () => defaultBranch, + getRemote: async () => 'origin', + } as ProjectSettingsProvider; +} + +async function listWorktrees(repoDir: string, exec: ExecFn): Promise { + const { stdout } = await exec('git', ['worktree', 'list', '--porcelain'], { cwd: repoDir }); + return stdout + .trim() + .split(/\n\n/) + .filter(Boolean) + .map((block) => { + const line = block.split('\n').find((l) => l.startsWith('worktree ')); + return line ? line.slice('worktree '.length) : ''; + }) + .filter(Boolean); +} + +// --------------------------------------------------------------------------- +// Suite +// --------------------------------------------------------------------------- + +describe('WorktreeService', () => { + let repoDir: string; + let poolDir: string; + let exec: ExecFn; + + beforeEach(async () => { + repoDir = fs.mkdtempSync(path.join(os.tmpdir(), 'wt-repo-')); + poolDir = fs.mkdtempSync(path.join(os.tmpdir(), 'wt-pool-')); + exec = getLocalExec(); + await initRepo(repoDir, exec); + }); + + afterEach(() => { + fs.rmSync(repoDir, { recursive: true, force: true }); + fs.rmSync(poolDir, { recursive: true, force: true }); + }); + + function makeService( + overrides: Partial<{ + worktreePoolPath: string; + repoPath: string; + exec: ExecFn; + projectSettings: ProjectSettingsProvider; + }> = {} + ): WorktreeService { + return new WorktreeService({ + worktreePoolPath: poolDir, + repoPath: repoDir, + exec, + projectSettings: makeSettings(), + ...overrides, + }); + } + + // ------------------------------------------------------------------------- + // ensureReserve / createReserve + // ------------------------------------------------------------------------- + + describe('ensureReserve', () => { + it('creates reserve worktree and branch from scratch', async () => { + const svc = makeService(); + await svc.ensureReserve('main'); + + const reservePath = path.join(poolDir, '_reserve-main'); + expect(fs.existsSync(reservePath)).toBe(true); + + // The reserve branch must be registered in git + const { stdout } = await exec('git', ['rev-parse', '--verify', '_reserve-main'], { + cwd: repoDir, + }); + expect(stdout.trim()).toBeTruthy(); + }); + + it('is idempotent — calling twice does not throw or duplicate the worktree', async () => { + const svc = makeService(); + await svc.ensureReserve('main'); + await svc.ensureReserve('main'); // second call — must be a no-op + + const worktrees = await listWorktrees(repoDir, exec); + const reserveEntries = worktrees.filter((p) => p.endsWith('_reserve-main')); + expect(reserveEntries).toHaveLength(1); + }); + + it('deduplicates concurrent calls via reserveInProgress map', async () => { + const svc = makeService(); + // Fire three simultaneous calls (the constructor also fires one in the background). + await Promise.all([ + svc.ensureReserve('main'), + svc.ensureReserve('main'), + svc.ensureReserve('main'), + ]); + + const worktrees = await listWorktrees(repoDir, exec); + const reserveEntries = worktrees.filter((p) => p.endsWith('_reserve-main')); + expect(reserveEntries).toHaveLength(1); + }); + + it('re-adds worktree when branch exists but directory was deleted (stale branch)', async () => { + // Regression: previously failed with "branch already exists" because createReserve + // attempted `worktree add -b` even when the branch was already present. + const svc = makeService(); + await svc.ensureReserve('main'); + + const reservePath = path.join(poolDir, '_reserve-main'); + // Simulate accidental directory deletion without a proper worktree remove. + fs.rmSync(reservePath, { recursive: true, force: true }); + // Prune stale git worktree entry so git no longer considers the branch checked out. + await exec('git', ['worktree', 'prune'], { cwd: repoDir }); + + // Now the branch exists but has no live worktree — ensureReserve must recover. + await svc.ensureReserve('main'); + expect(fs.existsSync(reservePath)).toBe(true); + }); + + it('migrates reserve from old pool path to new pool path (pool path changed)', async () => { + // Regression: previously failed with "already checked out at " after the + // worktree pool was reorganised to include a per-project subdirectory. + const poolA = fs.mkdtempSync(path.join(os.tmpdir(), 'wt-pool-a-')); + const poolB = fs.mkdtempSync(path.join(os.tmpdir(), 'wt-pool-b-')); + + try { + const svc1 = makeService({ worktreePoolPath: poolA }); + await svc1.ensureReserve('main'); + expect(fs.existsSync(path.join(poolA, '_reserve-main'))).toBe(true); + + // New service uses poolB — must detect the branch is checked out at poolA + // and move it rather than trying to create a fresh worktree. + const svc2 = makeService({ worktreePoolPath: poolB }); + await svc2.ensureReserve('main'); + + expect(fs.existsSync(path.join(poolB, '_reserve-main'))).toBe(true); + expect(fs.existsSync(path.join(poolA, '_reserve-main'))).toBe(false); + } finally { + fs.rmSync(poolA, { recursive: true, force: true }); + fs.rmSync(poolB, { recursive: true, force: true }); + } + }); + + it('recovers from a stale directory that exists on disk but is not a registered git worktree', async () => { + // Regression: previously claimReserve would call `git worktree move` on the stale + // directory and fail with "is not a working tree". + const svc = makeService(); + await svc.ensureReserve('main'); + + const reservePath = path.join(poolDir, '_reserve-main'); + // Remove the worktree registration from git (keeps the branch, removes tracking). + await exec('git', ['worktree', 'remove', '--force', reservePath], { cwd: repoDir }); + // Leave behind an empty directory to simulate a stale/orphaned path. + await fs.promises.mkdir(reservePath, { recursive: true }); + + // ensureReserve must detect the stale directory, clean it up, and recreate properly. + await svc.ensureReserve('main'); + + // The directory must now be a valid, git-tracked worktree. + const { stdout } = await exec('git', ['rev-parse', '--git-dir'], { cwd: reservePath }); + expect(stdout.trim()).toBeTruthy(); + }); + + it('handles a sourceBranch that contains slashes (e.g. feature/main)', async () => { + // Slashes in branch names are slugified to dashes so the reserve lives as a + // flat directory under the pool rather than creating nested subdirectories. + await exec('git', ['branch', 'feature/main'], { cwd: repoDir }); + + const svc = makeService(); + await svc.ensureReserve('feature/main'); + + // Reserve lives at poolDir/_reserve-feature-main (slash → dash, flat path) + const reservePath = path.join(poolDir, '_reserve-feature-main'); + expect(fs.existsSync(reservePath)).toBe(true); + }); + + it('serializes concurrent ensureReserve calls for different branches', async () => { + // Regression: two simultaneous git worktree add commands on the same repo can + // race on git ref/lock files and fail. The gitOpQueue must serialize them. + await exec('git', ['branch', 'other'], { cwd: repoDir }); + + const svc = makeService(); + await Promise.all([svc.ensureReserve('main'), svc.ensureReserve('other')]); + + expect(fs.existsSync(path.join(poolDir, '_reserve-main'))).toBe(true); + expect(fs.existsSync(path.join(poolDir, '_reserve-other'))).toBe(true); + }); + }); + + // ------------------------------------------------------------------------- + // serveWorktree + // ------------------------------------------------------------------------- + + describe('serveWorktree', () => { + it('serves the reserve and returns the target worktree path', async () => { + await exec('git', ['branch', 'emdash/mytask-abc'], { cwd: repoDir }); + const svc = makeService(); + const result = await svc.serveWorktree('main', 'emdash/mytask-abc'); + + expect(result.success).toBe(true); + if (!result.success) throw new Error('expected success'); + expect(result.data).toBe(path.join(poolDir, 'emdash', 'mytask-abc')); + expect(fs.existsSync(result.data)).toBe(true); + }); + + it('creates parent directory for branch names with slashes', async () => { + // Regression: git worktree move does not create intermediate directories. + // The parent of poolDir/emdash/mytask-xyz must be created before the move. + await exec('git', ['branch', 'emdash/mytask-xyz'], { cwd: repoDir }); + const svc = makeService(); + const result = await svc.serveWorktree('main', 'emdash/mytask-xyz'); + + expect(result.success).toBe(true); + if (!result.success) throw new Error('expected success'); + expect(fs.existsSync(result.data)).toBe(true); + expect(fs.existsSync(path.join(poolDir, 'emdash'))).toBe(true); + }); + + it('auto-creates reserve when called without a prior ensureReserve', async () => { + await exec('git', ['branch', 'task/demand-test'], { cwd: repoDir }); + const svc = makeService(); + // Do NOT call ensureReserve — serveWorktree must create the reserve on demand. + const result = await svc.serveWorktree('main', 'task/demand-test'); + + expect(result.success).toBe(true); + if (!result.success) throw new Error('expected success'); + expect(fs.existsSync(result.data)).toBe(true); + }); + + it('replenishes the default-branch reserve after serving it', async () => { + await exec('git', ['branch', 'task/replenish-test'], { cwd: repoDir }); + const svc = makeService(); + await svc.serveWorktree('main', 'task/replenish-test'); + + // The background replenishment is fire-and-forget; calling ensureReserve + // joins the in-progress promise or finds it already complete. + await svc.ensureReserve('main'); + + expect(fs.existsSync(path.join(poolDir, '_reserve-main'))).toBe(true); + }); + + it('replenishes reserve when sourceBranch matches the defaultBranch (bare name equality check)', async () => { + // Regression: when defaultBranch was "origin/main" but sourceBranch was "main" + // the equality check (sourceBranch === this.defaultBranch) never fired. + // With the fix, defaultBranch is always the bare name so replenishment works. + await exec('git', ['branch', 'task/replenish-equality'], { cwd: repoDir }); + const svc = makeService({ projectSettings: makeSettings([], 'main') }); + await svc.serveWorktree('main', 'task/replenish-equality'); + + // Join any in-progress background replenishment. + await svc.ensureReserve('main'); + + expect(fs.existsSync(path.join(poolDir, '_reserve-main'))).toBe(true); + }); + + it('does not replenish reserve when sourceBranch is not the default branch', async () => { + await exec('git', ['branch', 'other'], { cwd: repoDir }); + await exec('git', ['branch', 'task/other-task', 'other'], { cwd: repoDir }); + + const svc = makeService(); + await svc.ensureReserve('other'); + await svc.serveWorktree('other', 'task/other-task'); + + // No automatic replenishment for non-default branches. + // The reserve path should not exist at this point. + await new Promise((r) => setTimeout(r, 50)); // give any background work a moment + expect(fs.existsSync(path.join(poolDir, '_reserve-other'))).toBe(false); + }); + + it('copies preserved files into the served worktree', async () => { + fs.writeFileSync(path.join(repoDir, '.env'), 'SECRET=abc'); + await exec('git', ['branch', 'task/env-test'], { cwd: repoDir }); + + const svc = makeService({ projectSettings: makeSettings(['.env']) }); + const result = await svc.serveWorktree('main', 'task/env-test'); + + expect(result.success).toBe(true); + if (!result.success) throw new Error('expected success'); + expect(fs.existsSync(path.join(result.data, '.env'))).toBe(true); + expect(fs.readFileSync(path.join(result.data, '.env'), 'utf8')).toBe('SECRET=abc'); + }); + + it('copies preserved files in nested subdirectories', async () => { + fs.mkdirSync(path.join(repoDir, '.claude'), { recursive: true }); + fs.writeFileSync(path.join(repoDir, '.claude', 'settings.json'), '{}'); + await exec('git', ['branch', 'task/nested-env'], { cwd: repoDir }); + + const svc = makeService({ projectSettings: makeSettings(['.claude/**']) }); + const result = await svc.serveWorktree('main', 'task/nested-env'); + + expect(result.success).toBe(true); + if (!result.success) throw new Error('expected success'); + expect(fs.existsSync(path.join(result.data, '.claude', 'settings.json'))).toBe(true); + }); + + it('completes successfully when preserve pattern matches no files', async () => { + await exec('git', ['branch', 'task/no-match'], { cwd: repoDir }); + const svc = makeService({ projectSettings: makeSettings(['.env.nonexistent']) }); + const result = await svc.serveWorktree('main', 'task/no-match'); + expect(result.success).toBe(true); + }); + + it('recovers when reserve directory is stale (exists on disk but not tracked by git)', async () => { + // Regression: previously failed with "fatal: '..._reserve-main' is not a working tree" + // because serveWorktree only checked fs.existsSync and skipped ensureReserve entirely. + await exec('git', ['branch', 'task/stale-recover'], { cwd: repoDir }); + const svc = makeService(); + await svc.ensureReserve('main'); + + const reservePath = path.join(poolDir, '_reserve-main'); + // Unregister the worktree from git and recreate an empty stale directory. + await exec('git', ['worktree', 'remove', '--force', reservePath], { cwd: repoDir }); + await fs.promises.mkdir(reservePath, { recursive: true }); + + // serveWorktree must detect the stale reserve, rebuild it, and complete normally. + const result = await svc.serveWorktree('main', 'task/stale-recover'); + + expect(result.success).toBe(true); + if (!result.success) throw new Error('expected success'); + expect(fs.existsSync(result.data)).toBe(true); + }); + + it('returns existing path when called twice with the same branchName (duplicate serve)', async () => { + // Regression: previously attempted a second git worktree move which failed + // because the target path was already occupied. + await exec('git', ['branch', 'task/dup'], { cwd: repoDir }); + const svc = makeService(); + const first = await svc.serveWorktree('main', 'task/dup'); + const second = await svc.serveWorktree('main', 'task/dup'); + + expect(first.success).toBe(true); + expect(second.success).toBe(true); + if (!first.success || !second.success) throw new Error('expected success'); + expect(first.data).toBe(second.data); + expect(fs.existsSync(first.data)).toBe(true); + }); + + it('returns reserve-failed when sourceBranch does not exist', async () => { + await exec('git', ['branch', 'task/no-source'], { cwd: repoDir }); + const svc = makeService(); + const result = await svc.serveWorktree('nonexistent-branch', 'task/no-source'); + expect(result.success).toBe(false); + if (result.success) throw new Error('expected failure'); + expect(result.error.type).toBe('reserve-failed'); + }); + }); + + // ------------------------------------------------------------------------- + // getWorktree + // ------------------------------------------------------------------------- + + describe('getWorktree', () => { + it('returns the path when the worktree directory exists', async () => { + const svc = makeService(); + await svc.ensureReserve('main'); + + const result = await svc.getWorktree('_reserve-main'); + expect(result).toBe(path.join(poolDir, '_reserve-main')); + }); + + it('returns undefined for a branch name with no matching directory', async () => { + const svc = makeService(); + const result = await svc.getWorktree('nonexistent-branch'); + expect(result).toBeUndefined(); + }); + + it('returns path for a served worktree', async () => { + await exec('git', ['branch', 'emdash/my-task'], { cwd: repoDir }); + const svc = makeService(); + await svc.serveWorktree('main', 'emdash/my-task'); + + const result = await svc.getWorktree('emdash/my-task'); + expect(result).toBe(path.join(poolDir, 'emdash', 'my-task')); + }); + + it('finds worktree by branch name when directory has a different name (renamed branch)', async () => { + await exec('git', ['branch', 'emdash/old-name'], { cwd: repoDir }); + const svc = makeService(); + await svc.serveWorktree('main', 'emdash/old-name'); + + const oldPath = path.join(poolDir, 'emdash', 'old-name'); + expect(fs.existsSync(oldPath)).toBe(true); + + await exec('git', ['branch', '-m', 'emdash/old-name', 'emdash/new-name'], { + cwd: oldPath, + }); + + const result = await svc.getWorktree('emdash/new-name'); + expect(result).toBe(fs.realpathSync(oldPath)); + }); + }); + + // ------------------------------------------------------------------------- + // removeWorktree + // ------------------------------------------------------------------------- + + describe('removeWorktree', () => { + it('removes the worktree directory and prunes the git entry', async () => { + const svc = makeService(); + await svc.ensureReserve('main'); + + const reservePath = path.join(poolDir, '_reserve-main'); + expect(fs.existsSync(reservePath)).toBe(true); + + await svc.removeWorktree(reservePath); + + expect(fs.existsSync(reservePath)).toBe(false); + + const worktrees = await listWorktrees(repoDir, exec); + expect(worktrees.some((p) => p.endsWith('_reserve-main'))).toBe(false); + }); + + it('does not throw when the path does not exist', async () => { + const svc = makeService(); + await expect(svc.removeWorktree(path.join(poolDir, 'nonexistent'))).resolves.not.toThrow(); + }); + }); +}); diff --git a/src/main/core/projects/worktrees/worktree-service.ts b/src/main/core/projects/worktrees/worktree-service.ts new file mode 100644 index 000000000..68bccc46e --- /dev/null +++ b/src/main/core/projects/worktrees/worktree-service.ts @@ -0,0 +1,244 @@ +import fs from 'node:fs'; +import path from 'node:path'; +import { glob } from 'glob'; +import { ExecFn } from '@main/core/utils/exec'; +import { log } from '@main/lib/logger'; +import { err, ok, Result } from '@main/lib/result'; +import { ProjectSettingsProvider } from '../settings/schema'; + +export type ServeWorktreeError = + | { type: 'reserve-failed'; sourceBranch: string; cause: unknown } + | { type: 'worktree-setup-failed'; cause: unknown }; + +function createStableWorktreeReserveId(sourceBranch: string) { + // Replace slashes with dashes so the reserve always lives as a flat directory + // under the pool (e.g. "feature/foo" → "_reserve-feature-foo"). + return `_reserve-${sourceBranch.replace(/\//g, '-')}`; +} + +export class WorktreeService { + private readonly reserveInProgress = new Map>(); + private gitOpQueue: Promise = Promise.resolve(); + private readonly worktreePoolPath: string; + private readonly repoPath: string; + private readonly exec: ExecFn; + private readonly projectSettings: ProjectSettingsProvider; + + constructor(args: { + worktreePoolPath: string; + repoPath: string; + exec: ExecFn; + projectSettings: ProjectSettingsProvider; + }) { + this.worktreePoolPath = args.worktreePoolPath; + this.repoPath = args.repoPath; + this.projectSettings = args.projectSettings; + this.exec = args.exec; + + this.exec('git', ['worktree', 'prune'], { cwd: this.repoPath }) + .catch(() => {}) + .then(() => this.projectSettings.getDefaultBranch()) + .then((branch) => this.ensureReserve(branch)) + .catch(() => {}); + } + + private async isValidWorktree(worktreePath: string): Promise { + try { + await this.exec('git', ['rev-parse', '--git-dir'], { cwd: worktreePath }); + return true; + } catch { + return false; + } + } + + private enqueueGitOp(fn: () => Promise): Promise { + const result = this.gitOpQueue.then(fn, fn); + this.gitOpQueue = result.catch(() => {}); + return result as Promise; + } + + async ensureReserve(sourceBranch: string): Promise { + const reservePath = path.join( + this.worktreePoolPath, + createStableWorktreeReserveId(sourceBranch) + ); + if (fs.existsSync(reservePath)) { + if (await this.isValidWorktree(reservePath)) return; + await fs.promises.rm(reservePath, { recursive: true, force: true }); + await this.exec('git', ['worktree', 'prune'], { cwd: this.repoPath }).catch(() => {}); + } + const inProgress = this.reserveInProgress.get(sourceBranch); + if (inProgress) return inProgress; + const creation = this.enqueueGitOp(() => this.createReserve(sourceBranch)).finally(() => { + this.reserveInProgress.delete(sourceBranch); + }); + this.reserveInProgress.set(sourceBranch, creation); + return creation; + } + + private async doEnsureReserve(sourceBranch: string): Promise { + const reservePath = path.join( + this.worktreePoolPath, + createStableWorktreeReserveId(sourceBranch) + ); + if (fs.existsSync(reservePath)) { + if (await this.isValidWorktree(reservePath)) return; + await fs.promises.rm(reservePath, { recursive: true, force: true }); + await this.exec('git', ['worktree', 'prune'], { cwd: this.repoPath }).catch(() => {}); + } + await this.createReserve(sourceBranch); + } + + private async createReserve(sourceBranch: string): Promise { + await this.ensureWorktreePoolDirExists(); + const reserveBranchName = createStableWorktreeReserveId(sourceBranch); + const worktreePath = path.join(this.worktreePoolPath, reserveBranchName); + // Check whether the reserve branch exists in git at all + let branchExists = false; + try { + await this.exec('git', ['rev-parse', '--verify', reserveBranchName], { cwd: this.repoPath }); + branchExists = true; + } catch {} + if (!branchExists) { + // Case 1: fresh — create the branch and worktree together. + // Use refs/heads/ prefix to avoid ambiguity when a tag exists with the same name. + await this.exec( + 'git', + ['worktree', 'add', '-b', reserveBranchName, worktreePath, `refs/heads/${sourceBranch}`], + { cwd: this.repoPath } + ); + return; + } + // Case 2 & 3: branch exists — try to re-add the worktree at the expected path. + // If the branch is already checked out at a different (stale) path, git will + // reject the add and tell us where it lives — move it instead. + try { + await this.exec('git', ['worktree', 'add', worktreePath, reserveBranchName], { + cwd: this.repoPath, + }); + } catch (e: unknown) { + const stderr = (e as { stderr?: string })?.stderr ?? ''; + const match = /already checked out at '(.+)'/.exec(stderr); + if (match?.[1]) { + // Case 3: branch is checked out at old/different path — move it to where we expect it + await this.exec('git', ['worktree', 'move', match[1], worktreePath], { + cwd: this.repoPath, + }); + } else { + throw e; + } + } + } + + private async ensureWorktreePoolDirExists(): Promise { + await fs.promises.mkdir(this.worktreePoolPath, { recursive: true }); + } + + async getWorktree(branchName: string): Promise { + const worktreePath = path.join(this.worktreePoolPath, branchName); + if (fs.existsSync(worktreePath)) return worktreePath; + + try { + const realPoolPath = fs.realpathSync(this.worktreePoolPath); + const { stdout } = await this.exec('git', ['worktree', 'list', '--porcelain'], { + cwd: this.repoPath, + }); + const branchLine = `branch refs/heads/${branchName}`; + for (const block of stdout.split('\n\n')) { + if (block.split('\n').some((line) => line === branchLine)) { + const match = /^worktree (.+)$/m.exec(block); + if (match?.[1]?.startsWith(realPoolPath)) return match[1]; + } + } + } catch {} + return undefined; + } + + async serveWorktree( + sourceBranch: string, + branchName: string + ): Promise> { + await this.ensureWorktreePoolDirExists(); + return this.enqueueGitOp(() => this.doServeWorktree(sourceBranch, branchName)); + } + + private async doServeWorktree( + sourceBranch: string, + branchName: string + ): Promise> { + const reserveBranchName = createStableWorktreeReserveId(sourceBranch); + const reservePath = path.join(this.worktreePoolPath, reserveBranchName); + const targetPath = path.join(this.worktreePoolPath, branchName); + + // Fast path: worktree already exists on disk. + if (fs.existsSync(targetPath)) return ok(targetPath); + + // Ensure the reserve worktree is ready. Use doEnsureReserve (not ensureReserve) + // because we're already inside enqueueGitOp — calling ensureReserve here would + // deadlock by trying to re-enqueue into the same serialised queue. + if (!fs.existsSync(reservePath) || !(await this.isValidWorktree(reservePath))) { + try { + await this.doEnsureReserve(sourceBranch); + } catch (cause) { + return err({ type: 'reserve-failed', sourceBranch, cause }); + } + } + + try { + await fs.promises.mkdir(path.dirname(targetPath), { recursive: true }); + // Move the reserve worktree directory to the task's permanent path. + await this.exec('git', ['worktree', 'move', reservePath, targetPath], { + cwd: this.repoPath, + }); + // Switch the worktree HEAD to the already-created task branch (fast: same commit). + await this.exec('git', ['switch', branchName], { cwd: targetPath }); + // Clean up the now-unused reserve branch. + await this.exec('git', ['branch', '-D', reserveBranchName], { cwd: this.repoPath }); + } catch (cause) { + return err({ type: 'worktree-setup-failed', cause }); + } + + await this.copyPreservedFiles(targetPath).catch((e) => { + log.warn('WorktreeService: failed to copy preserved files', { targetPath, error: String(e) }); + }); + + const defaultBranch = await this.projectSettings.getDefaultBranch(); + if (sourceBranch === defaultBranch) { + this.ensureReserve(sourceBranch).catch(() => {}); + } + + return ok(targetPath); + } + + async moveWorktree(oldPath: string, newPath: string): Promise { + await this.exec('git', ['worktree', 'move', oldPath, newPath], { cwd: this.repoPath }); + } + + async removeWorktree(worktreePath: string): Promise { + await fs.promises.rm(worktreePath, { recursive: true, force: true }).catch(() => {}); + await this.exec('git', ['worktree', 'prune'], { cwd: this.repoPath }).catch(() => {}); + } + + private async copyPreservedFiles(targetPath: string): Promise { + const settings = await this.projectSettings.get(); + const patterns = settings.preservePatterns ?? []; + for (const pattern of patterns) { + // glob the pattern against the main repo + const matches = await glob(pattern, { + cwd: this.repoPath, + dot: true, // match dotfiles like .env + absolute: false, + }); + for (const relPath of matches) { + const src = path.join(this.repoPath, relPath); + const stat = await fs.promises.stat(src).catch(() => null); + // Skip directories — glob patterns like `.claude/**` may match the dir itself. + if (!stat || !stat.isFile()) continue; + const dest = path.join(targetPath, relPath); + // ensure parent directory exists (e.g. nested paths) + await fs.promises.mkdir(path.dirname(dest), { recursive: true }); + await fs.promises.copyFile(src, dest); + } + } + } +} diff --git a/src/main/core/pty/controller.ts b/src/main/core/pty/controller.ts new file mode 100644 index 000000000..89faecf1d --- /dev/null +++ b/src/main/core/pty/controller.ts @@ -0,0 +1,86 @@ +import { createRPCController } from '@shared/ipc/rpc'; +import { log } from '@main/lib/logger'; +import { err, ok } from '@main/lib/result'; +import { projectManager } from '../projects/project-manager'; +import { ptySessionRegistry } from './pty-session-registry'; + +export const ptyController = createRPCController({ + /** Send raw input data to a PTY session. */ + sendInput: (sessionId: string, data: string) => { + const pty = ptySessionRegistry.get(sessionId); + if (!pty) return err({ type: 'not_found' as const }); + pty.write(data); + return ok(); + }, + + /** Resize a PTY session to the given terminal dimensions. */ + resize: (sessionId: string, cols: number, rows: number) => { + const pty = ptySessionRegistry.get(sessionId); + if (!pty) return err({ type: 'not_found' as const }); + pty.resize(cols, rows); + return ok(); + }, + + /** + * Atomically return the ring buffer and register the renderer as a consumer + * for future IPC delivery. Non-destructive — the ring buffer is kept intact. + * Called once by the renderer when connecting a FrontendPty to a session. + */ + subscribe: (sessionId: string) => { + return ok({ buffer: ptySessionRegistry.subscribe(sessionId) }); + }, + + /** + * Remove the renderer's consumer registration for a session. + * Called when the renderer disposes its FrontendPty. + */ + unsubscribe: (sessionId: string) => { + ptySessionRegistry.unsubscribe(sessionId); + return ok(); + }, + + /** Kill a PTY session and clean it up immediately. */ + kill: (sessionId: string) => { + const pty = ptySessionRegistry.get(sessionId); + if (pty) { + try { + pty.kill(); + } catch (e) { + log.warn('ptyController.kill: error killing PTY', { sessionId, error: String(e) }); + } + } + ptySessionRegistry.unregister(sessionId); + return ok(); + }, + + /** + * Upload local files into the task's working directory on a remote SSH host + * and return their remote paths. Uses the SFTP subsystem of the already- + * connected ssh2 client — no local ssh/scp binaries are involved. + * + * The session ID encodes the project and task (`projectId:taskId:leafId`), + * so no extra arguments are needed to locate the destination. + */ + uploadFiles: async (args: { sessionId: string; localPaths: string[] }) => { + try { + const [projectId, taskId] = args.sessionId.split(':'); + if (!projectId || !taskId) { + return err({ type: 'invalid_session' as const }); + } + + const provider = projectManager.getProject(projectId); + if (!provider || provider.type !== 'ssh') { + return err({ type: 'not_ssh' as const }); + } + + const remotePaths = await provider.uploadFiles(taskId, args.localPaths); + return ok({ remotePaths }); + } catch (e: unknown) { + log.error('pty:uploadFiles failed', { + sessionId: args.sessionId, + error: (e as Error)?.message || e, + }); + return err({ type: 'upload_failed' as const, message: String((e as Error)?.message || e) }); + } + }, +}); diff --git a/src/main/core/pty/exit-signals.ts b/src/main/core/pty/exit-signals.ts new file mode 100644 index 000000000..4b174d322 --- /dev/null +++ b/src/main/core/pty/exit-signals.ts @@ -0,0 +1,106 @@ +export type PtySignal = + | 'SIGHUP' // 1 — hangup / PTY master closed (common when emdash window closes) + | 'SIGINT' // 2 — Ctrl+C (user interrupt) + | 'SIGQUIT' // 3 — Ctrl+\ (quit + core dump) + | 'SIGILL' // 4 — illegal instruction + | 'SIGTRAP' // 5 — trace / breakpoint trap + | 'SIGABRT' // 6 — abort() called + | 'SIGBUS' // 7 — bus error (bad memory access) + | 'SIGFPE' // 8 — floating-point exception + | 'SIGKILL' // 9 — force kill (cannot be caught or ignored) + | 'SIGUSR1' // 10 — user-defined signal 1 + | 'SIGSEGV' // 11 — invalid memory reference (segfault) + | 'SIGUSR2' // 12 — user-defined signal 2 + | 'SIGPIPE' // 13 — write to closed pipe (agent output discarded) + | 'SIGALRM' // 14 — alarm timer expired + | 'SIGTERM' // 15 — graceful termination request (default `kill` signal) + | 'SIGCHLD' // 17 — child process state changed + | 'SIGCONT' // 18 — continue a stopped process + | 'SIGSTOP' // 19 — stop process (cannot be caught or ignored) + | 'SIGTSTP' // 20 — Ctrl+Z (stop from terminal) + | 'SIGTTIN' // 21 — background process attempted terminal read + | 'SIGTTOU' // 22 — background process attempted terminal write + | 'SIGURG' // 23 — urgent data available on socket + | 'SIGXCPU' // 24 — CPU time limit exceeded + | 'SIGXFSZ' // 25 — file size limit exceeded + | 'SIGVTALRM' // 26 — virtual timer expired + | 'SIGPROF' // 27 — profiling timer expired + | 'SIGWINCH' // 28 — terminal window resized (rarely surfaces as an exit signal) + | 'SIGPWR' // 30 — power failure + | 'SIGSYS'; // 31 — bad system call + +export const SIGNAL_BY_NUMBER: Readonly> = { + 1: 'SIGHUP', + 2: 'SIGINT', + 3: 'SIGQUIT', + 4: 'SIGILL', + 5: 'SIGTRAP', + 6: 'SIGABRT', + 7: 'SIGBUS', + 8: 'SIGFPE', + 9: 'SIGKILL', + 10: 'SIGUSR1', + 11: 'SIGSEGV', + 12: 'SIGUSR2', + 13: 'SIGPIPE', + 14: 'SIGALRM', + 15: 'SIGTERM', + 17: 'SIGCHLD', + 18: 'SIGCONT', + 19: 'SIGSTOP', + 20: 'SIGTSTP', + 21: 'SIGTTIN', + 22: 'SIGTTOU', + 23: 'SIGURG', + 24: 'SIGXCPU', + 25: 'SIGXFSZ', + 26: 'SIGVTALRM', + 27: 'SIGPROF', + 28: 'SIGWINCH', + 30: 'SIGPWR', + 31: 'SIGSYS', +}; + +const KNOWN_SIGNAL_NAMES = new Set(Object.values(SIGNAL_BY_NUMBER)); + +export function normalizeSignal(raw: number | string | null | undefined): PtySignal | undefined { + if (raw == null) return undefined; + if (typeof raw === 'number') return SIGNAL_BY_NUMBER[raw]; + const canonical = raw.startsWith('SIG') ? raw : `SIG${raw}`; + return KNOWN_SIGNAL_NAMES.has(canonical) ? (canonical as PtySignal) : undefined; +} + +export const EXIT_CODE_MEANINGS: Readonly> = { + 0: 'Success', + 1: 'General error', + 2: 'Misuse of shell built-in', + 126: 'Command not executable (permission denied)', + 127: 'Command not found', + 128: 'Invalid argument to exit()', + 129: 'Terminated by SIGHUP (PTY closed)', + 130: 'Terminated by SIGINT (Ctrl+C)', + 131: 'Terminated by SIGQUIT (Ctrl+\\)', + 134: 'Terminated by SIGABRT', + 137: 'Killed by SIGKILL (force kill / OOM)', + 139: 'Terminated by SIGSEGV (segfault)', + 141: 'Terminated by SIGPIPE (broken pipe)', + 143: 'Terminated by SIGTERM (graceful stop)', +}; + +export function getExitCodeMeaning(exitCode: number): string { + const knownExitCode = EXIT_CODE_MEANINGS[exitCode]; + if (knownExitCode) { + return knownExitCode; + } + if (signalFromExitCode(exitCode)) { + return `Terminated by ${signalFromExitCode(exitCode)}`; + } + return `Unknown exit code: ${exitCode}`; +} + +function signalFromExitCode(exitCode: number): PtySignal | undefined { + if (exitCode > 128 && exitCode <= 159) { + return SIGNAL_BY_NUMBER[exitCode - 128]; + } + return undefined; +} diff --git a/src/main/core/pty/local-pty.ts b/src/main/core/pty/local-pty.ts new file mode 100644 index 000000000..f1b69af41 --- /dev/null +++ b/src/main/core/pty/local-pty.ts @@ -0,0 +1,119 @@ +import path from 'node:path'; +import * as nodePty from 'node-pty'; +import type { IPty } from 'node-pty'; +import { log } from '@main/lib/logger'; +import { normalizeSignal } from './exit-signals'; +import type { Pty, PtyDimensions, PtyExitInfo } from './pty'; + +export interface LocalSpawnOptions extends PtyDimensions { + id: string; + command: string; + args: string[]; + cwd: string; + env: Record; +} + +const MIN_COLS = 2; +const MIN_ROWS = 1; + +export function spawnLocalPty(options: LocalSpawnOptions): LocalPtySession { + const { id, command, args, cwd, env, cols, rows } = options; + const spawnSpec = resolveWindowsPtySpawn(command, args); + + log.info('LocalPtySession:spawn', { + id, + command: spawnSpec.command, + args: spawnSpec.args, + cwd, + cols, + rows, + }); + + try { + const proc = nodePty.spawn(spawnSpec.command, spawnSpec.args, { + name: 'xterm-256color', + cols, + rows, + cwd, + env, + }); + return new LocalPtySession(id, proc); + } catch (e: unknown) { + const message = e instanceof Error ? e.message : String(e); + throw new Error(`Failed to spawn PTY: ${message}`); + } +} + +export class LocalPtySession implements Pty { + readonly id: string; + + constructor( + id: string, + private readonly proc: IPty + ) { + this.id = id; + } + + write(data: string): void { + this.proc.write(data); + } + + resize(cols: number, rows: number): void { + const c = Number.isFinite(cols) ? Math.max(MIN_COLS, Math.floor(cols)) : MIN_COLS; + const r = Number.isFinite(rows) ? Math.max(MIN_ROWS, Math.floor(rows)) : MIN_ROWS; + try { + this.proc.resize(c, r); + } catch (e: unknown) { + const msg = e instanceof Error ? e.message : String(e); + if (/EBADF|ENOTTY|ioctl\(2\) failed|not open|Napi::Error/.test(msg)) { + return; + } + log.error('LocalPtySession:resize failed', { cols: c, rows: r, error: msg }); + } + } + + kill(): void { + this.proc.kill(); + } + + onData(handler: (data: string) => void): void { + this.proc.onData(handler); + } + + onExit(handler: (info: PtyExitInfo) => void): void { + this.proc.onExit(({ exitCode, signal }) => { + handler({ exitCode, signal: normalizeSignal(signal) }); + }); + } +} + +function resolveWindowsPtySpawn( + command: string, + args: string[] +): { command: string; args: string[] } { + if (process.platform !== 'win32') return { command, args }; + + const quoteForCmdExe = (input: string): string => { + if (input.length === 0) return '""'; + if (!/[\s"^&|<>()%!]/.test(input)) return input; + return `"${input + .replace(/%/g, '%%') + .replace(/!/g, '^!') + .replace(/(["^&|<>()])/g, '^$1')}"`; + }; + + const ext = path.extname(command).toLowerCase(); + if (ext === '.cmd' || ext === '.bat') { + const comspec = process.env.ComSpec || String.raw`C:\\Windows\\System32\\cmd.exe`; + const fullCommandString = [command, ...args].map(quoteForCmdExe).join(' '); + return { command: comspec, args: ['/d', '/s', '/c', fullCommandString] }; + } + if (ext === '.ps1') { + return { + command: 'powershell.exe', + args: ['-NoProfile', '-ExecutionPolicy', 'Bypass', '-File', command, ...args], + }; + } + + return { command, args }; +} diff --git a/src/main/core/pty/pty-env.ts b/src/main/core/pty/pty-env.ts new file mode 100644 index 000000000..6f9716dd1 --- /dev/null +++ b/src/main/core/pty/pty-env.ts @@ -0,0 +1,309 @@ +import { execSync } from 'node:child_process'; +import os from 'node:os'; +import { join } from 'node:path'; +import { detectSshAuthSock } from '@main/utils/shellEnv'; + +export const AGENT_ENV_VARS = [ + 'AMP_API_KEY', + 'ANTHROPIC_API_KEY', + 'AUTOHAND_API_KEY', + 'AUGMENT_SESSION_AUTH', + 'AWS_ACCESS_KEY_ID', + 'AWS_DEFAULT_REGION', + 'AWS_PROFILE', + 'AWS_REGION', + 'AWS_SECRET_ACCESS_KEY', + 'AWS_SESSION_TOKEN', + 'AZURE_OPENAI_API_ENDPOINT', + 'AZURE_OPENAI_API_KEY', + 'AZURE_OPENAI_KEY', + 'CODEBUFF_API_KEY', + 'COPILOT_CLI_TOKEN', + 'CURSOR_API_KEY', + 'DASHSCOPE_API_KEY', + 'FACTORY_API_KEY', + 'GEMINI_API_KEY', + 'GH_TOKEN', + 'GITHUB_TOKEN', + 'GOOGLE_API_KEY', + 'GOOGLE_APPLICATION_CREDENTIALS', + 'GOOGLE_CLOUD_LOCATION', + 'GOOGLE_CLOUD_PROJECT', + 'HTTP_PROXY', + 'HTTPS_PROXY', + 'KIMI_API_KEY', + 'MISTRAL_API_KEY', + 'MOONSHOT_API_KEY', + 'NO_PROXY', + 'OPENAI_API_KEY', + 'OPENAI_BASE_URL', +] as const; + +const DISPLAY_ENV_VARS = [ + 'DISPLAY', // X11 display server + 'XAUTHORITY', // X11 auth cookie (often non-standard path on Wayland+GNOME) + 'WAYLAND_DISPLAY', // Wayland compositor socket + 'XDG_RUNTIME_DIR', // Contains Wayland/D-Bus sockets (e.g. /run/user/1000) + 'XDG_CURRENT_DESKTOP', // Used by xdg-open for DE detection + 'XDG_SESSION_TYPE', // Used by browsers/toolkits to select X11 vs Wayland + 'DBUS_SESSION_BUS_ADDRESS', // Needed by gio open and desktop portals +] as const; + +function getDisplayEnv(): Record { + const env: Record = {}; + for (const key of DISPLAY_ENV_VARS) { + const val = process.env[key]; + if (val) env[key] = val; + } + return env; +} + +let _localPath: string | undefined; +let _sshAuthSock: string | null | undefined; + +/** + * Lazily compute and cache an enriched PATH for local PTY sessions. + * + * When the app is launched from a GUI (Finder, app launcher, etc.) it inherits + * a minimal PATH that is missing Homebrew, nvm, npm-global, etc. This + * function replicates what the old top-level PATH blocks in main.ts did, but + * runs exactly once on first use and is cached for all subsequent calls. + */ +function resolveLocalPath(): string { + if (_localPath !== undefined) return _localPath; + + const sep = process.platform === 'win32' ? ';' : ':'; + const cur = process.env.PATH || process.env.Path || ''; + const parts = cur.split(sep).filter(Boolean); + + if (process.platform === 'darwin') { + for (const p of [ + '/opt/homebrew/bin', + '/usr/local/bin', + '/opt/homebrew/sbin', + '/usr/local/sbin', + ]) { + if (!parts.includes(p)) parts.unshift(p); + } + try { + const shell = process.env.SHELL || '/bin/zsh'; + const raw = execSync(`${shell} -ilc 'echo -n $PATH'`, { + encoding: 'utf8', + timeout: 3000, + env: { + ...process.env, + DISABLE_AUTO_UPDATE: 'true', + ZSH_TMUX_AUTOSTART: 'false', + ZSH_TMUX_AUTOSTARTED: 'true', + }, + }); + if (raw) { + const entries = (raw + sep + parts.join(sep)).split(/[:\n]/).filter(Boolean); + _localPath = Array.from(new Set(entries.filter((p) => p.startsWith('/')))).join(sep); + return _localPath; + } + } catch {} + } + + if (process.platform === 'linux') { + const home = os.homedir(); + for (const p of [ + join(home, '.nvm/versions/node', process.version, 'bin'), + join(home, '.npm-global/bin'), + join(home, '.local/bin'), + '/usr/local/bin', + ]) { + if (!parts.includes(p)) parts.unshift(p); + } + try { + const shell = process.env.SHELL || '/bin/bash'; + const raw = execSync(`${shell} -ilc 'echo -n $PATH'`, { + encoding: 'utf8', + timeout: 3000, + env: { + ...process.env, + DISABLE_AUTO_UPDATE: 'true', + ZSH_TMUX_AUTOSTART: 'false', + ZSH_TMUX_AUTOSTARTED: 'true', + }, + }); + if (raw) { + const entries = (raw + sep + parts.join(sep)).split(/[:\n]/).filter(Boolean); + _localPath = Array.from(new Set(entries.filter((p) => p.startsWith('/')))).join(sep); + return _localPath; + } + } catch {} + } + + if (process.platform === 'win32') { + const npmPath = join(process.env.APPDATA || '', 'npm'); + if (npmPath && !parts.includes(npmPath)) parts.unshift(npmPath); + } + + _localPath = parts.join(sep); + return _localPath; +} + +/** + * Lazily detect and cache the SSH_AUTH_SOCK path. + * + * GUI-launched apps don't inherit the shell's SSH agent socket. This + * function runs the detection once and caches the result so subsequent + * calls are free. + */ +function resolveSshAuthSock(): string | undefined { + if (_sshAuthSock !== undefined) return _sshAuthSock ?? undefined; + const sock = detectSshAuthSock(); + _sshAuthSock = sock ?? null; + return sock; +} + +function getWindowsEssentialEnv(resolvedPath: string): Record { + const home = os.homedir(); + return { + PATH: resolvedPath, + PATHEXT: process.env.PATHEXT || '.COM;.EXE;.BAT;.CMD;.VBS;.VBE;.JS;.JSE;.WSF;.WSH;.MSC', + SystemRoot: process.env.SystemRoot || 'C:\\Windows', + ComSpec: process.env.ComSpec || 'C:\\Windows\\System32\\cmd.exe', + TEMP: process.env.TEMP || process.env.TMP || '', + TMP: process.env.TMP || process.env.TEMP || '', + USERPROFILE: process.env.USERPROFILE || home, + APPDATA: process.env.APPDATA || '', + LOCALAPPDATA: process.env.LOCALAPPDATA || '', + HOMEDRIVE: process.env.HOMEDRIVE || '', + HOMEPATH: process.env.HOMEPATH || '', + USERNAME: process.env.USERNAME || os.userInfo().username, + ProgramFiles: process.env.ProgramFiles || 'C:\\Program Files', + 'ProgramFiles(x86)': process.env['ProgramFiles(x86)'] || 'C:\\Program Files (x86)', + ProgramData: process.env.ProgramData || 'C:\\ProgramData', + CommonProgramFiles: process.env.CommonProgramFiles || 'C:\\Program Files\\Common Files', + 'CommonProgramFiles(x86)': + process.env['CommonProgramFiles(x86)'] || 'C:\\Program Files (x86)\\Common Files', + ProgramW6432: process.env.ProgramW6432 || 'C:\\Program Files', + CommonProgramW6432: process.env.CommonProgramW6432 || 'C:\\Program Files\\Common Files', + }; +} + +export interface AgentEnvOptions { + /** + * Pass through AGENT_ENV_VARS from process.env. + * Defaults to true — set false only for tests or sandboxed environments. + */ + agentApiVars?: boolean; + + /** + * Include SHELL in the env (needed for shell-wrapper spawns so the shell + * can reconstruct login env via -il flags). + */ + includeShellVar?: boolean; + + /** + * Emdash hook server connection details. When set, injects + * EMDASH_HOOK_PORT, EMDASH_PTY_ID, and EMDASH_HOOK_TOKEN so agent CLIs + * can call back on lifecycle events. + */ + hook?: { + port: number; + ptyId: string; + token: string; + }; + + /** + * Per-provider custom env vars configured by the user. + * Keys are validated against ^[A-Za-z_][A-Za-z0-9_]*$. + */ + customVars?: Record; +} + +/** + * Build an environment for a user-facing interactive terminal session. + * + * Unlike buildAgentEnv, this inherits process.env wholesale so the terminal + * feels identical to one opened in Ghostty or Terminal.app — the user's + * EDITOR, MANPATH, JAVA_HOME, custom vars, etc. are all present. + * + * TERM, COLORTERM, TERM_PROGRAM, and SHELL are always set or overridden so + * the shell and programs inside it report the correct terminal identity. + * SSH_AUTH_SOCK is injected via the same cached detector used for agents, + * since GUI-launched apps often don't inherit it from the user's login shell. + */ +export function buildTerminalEnv(): Record { + // Inherit the full process environment, stripping undefined values. + const env: Record = {}; + for (const [key, val] of Object.entries(process.env)) { + if (val !== undefined) env[key] = val; + } + + // Terminal identity — always override so xterm capabilities are correct. + env.TERM = 'xterm-256color'; + env.COLORTERM = 'truecolor'; + env.TERM_PROGRAM = 'emdash'; + + // Ensure SHELL reflects the user's configured shell (may be absent in GUI). + env.SHELL = process.env.SHELL ?? (process.platform === 'darwin' ? '/bin/zsh' : '/bin/bash'); + + // Inject SSH_AUTH_SOCK when the app was launched from a GUI launcher that + // didn't inherit it from the user's agent-forwarding shell session. + if (!env.SSH_AUTH_SOCK) { + const sshAuthSock = resolveSshAuthSock(); + if (sshAuthSock) env.SSH_AUTH_SOCK = sshAuthSock; + } + + return env; +} + +/** + * Build a clean, minimal PTY environment from scratch. + * + * Does NOT inherit process.env wholesale — only well-known variables are + * forwarded. Login shells (-il) will rebuild PATH, NVM, etc. from the user's + * shell config files. Direct spawns (no shell) receive PATH so the CLI can + * find its own dependencies. + */ +export function buildAgentEnv(options: AgentEnvOptions = {}): Record { + const { agentApiVars = true, includeShellVar = false, hook, customVars } = options; + + const resolvedPath = resolveLocalPath(); + const env: Record = { + TERM: 'xterm-256color', + COLORTERM: 'truecolor', + TERM_PROGRAM: 'emdash', + HOME: process.env.HOME || os.homedir(), + USER: process.env.USER || os.userInfo().username, + PATH: resolvedPath, + ...(process.env.LANG && { LANG: process.env.LANG }), + ...(process.env.TMPDIR && { TMPDIR: process.env.TMPDIR }), + ...getDisplayEnv(), + ...(process.platform === 'win32' ? getWindowsEssentialEnv(resolvedPath) : {}), + }; + + const sshAuthSock = resolveSshAuthSock(); + if (sshAuthSock) env.SSH_AUTH_SOCK = sshAuthSock; + + if (includeShellVar) { + env.SHELL = process.env.SHELL || '/bin/bash'; + } + + if (agentApiVars) { + for (const key of AGENT_ENV_VARS) { + const val = process.env[key]; + if (val) env[key] = val; + } + } + + if (hook && hook.port > 0) { + env.EMDASH_HOOK_PORT = String(hook.port); + env.EMDASH_PTY_ID = hook.ptyId; + env.EMDASH_HOOK_TOKEN = hook.token; + } + + if (customVars) { + for (const [key, val] of Object.entries(customVars)) { + if (typeof val === 'string' && /^[A-Za-z_][A-Za-z0-9_]*$/.test(key)) { + env[key] = val; + } + } + } + + return env; +} diff --git a/src/main/core/pty/pty-session-registry.ts b/src/main/core/pty/pty-session-registry.ts new file mode 100644 index 000000000..869c4cdaf --- /dev/null +++ b/src/main/core/pty/pty-session-registry.ts @@ -0,0 +1,97 @@ +import { ptyDataChannel, ptyExitChannel, ptyInputChannel } from '@shared/events/ptyEvents'; +import { events } from '@main/lib/events'; +import type { Pty } from './pty'; + +const FLUSH_INTERVAL_MS = 16; // ~60 fps +const RING_BUFFER_CAP = 64 * 1024; // 64 KB per session + +export class PtySessionRegistry { + private ptyMap: Map = new Map(); + private ptyInputSubscriptions: Map void> = new Map(); + private ringBuffers: Map = new Map(); + private activeConsumers: Set = new Set(); + + register(sessionId: string, pty: Pty): void { + // Clear any stale ring buffer and consumer from a previous PTY at this sessionId (respawn) + this.ringBuffers.delete(sessionId); + this.activeConsumers.delete(sessionId); + + this.ptyMap.set(sessionId, pty); + + let buffer = ''; + let flushTimer: ReturnType | null = null; + + const flush = () => { + if (buffer) { + events.emit(ptyDataChannel, buffer, sessionId); + buffer = ''; + } + flushTimer = null; + }; + + pty.onData((data) => { + buffer += data; + if (!flushTimer) { + flushTimer = setTimeout(flush, FLUSH_INTERVAL_MS); + } + // Accumulate into ring buffer for late-connecting renderers + let rb = (this.ringBuffers.get(sessionId) ?? '') + data; + if (rb.length > RING_BUFFER_CAP) rb = rb.slice(-RING_BUFFER_CAP); + this.ringBuffers.set(sessionId, rb); + }); + + pty.onExit((info) => { + // Flush any buffered output before emitting exit + if (flushTimer !== null) { + clearTimeout(flushTimer); + flush(); + } + events.emit(ptyExitChannel, info, sessionId); + this.unregister(sessionId); + }); + + const off = events.on( + ptyInputChannel, + (data) => { + pty.write(data); + }, + sessionId + ); + + this.ptyInputSubscriptions.set(sessionId, off); + } + + unregister(sessionId: string): void { + this.ptyMap.delete(sessionId); + this.ptyInputSubscriptions.get(sessionId)?.(); + this.ptyInputSubscriptions.delete(sessionId); + this.ringBuffers.delete(sessionId); + this.activeConsumers.delete(sessionId); + } + + get(sessionId: string): Pty | undefined { + return this.ptyMap.get(sessionId); + } + + /** + * Atomically snapshot the ring buffer and register a consumer for future + * IPC delivery. Returns the current ring buffer without deleting it. + * Safe: runs in one synchronous tick — no PTY data can arrive between + * snapshot and consumer registration. + */ + subscribe(sessionId: string): string { + const buf = this.ringBuffers.get(sessionId) ?? ''; + this.activeConsumers.add(sessionId); + return buf; + } + + /** + * Remove the consumer registration for a session. + * Called when the renderer disposes its FrontendPty. + */ + unsubscribe(sessionId: string): void { + this.activeConsumers.delete(sessionId); + } +} + +export const ptySessionRegistry = new PtySessionRegistry(); diff --git a/src/main/core/pty/pty.ts b/src/main/core/pty/pty.ts new file mode 100644 index 000000000..1e22f4c25 --- /dev/null +++ b/src/main/core/pty/pty.ts @@ -0,0 +1,17 @@ +export type PtyExitInfo = { + exitCode?: number; + signal?: number | string; +}; + +export interface PtyDimensions { + cols: number; + rows: number; +} + +export interface Pty { + write(data: string): void; + resize(cols: number, rows: number): void; + kill(): void; + onData(handler: (data: string) => void): void; + onExit(handler: (info: PtyExitInfo) => void): void; +} diff --git a/src/main/core/pty/spawn-utils.ts b/src/main/core/pty/spawn-utils.ts new file mode 100644 index 000000000..102b2dbe8 --- /dev/null +++ b/src/main/core/pty/spawn-utils.ts @@ -0,0 +1,91 @@ +import type { AgentSessionConfig } from '../conversations/impl/agent-session'; +import type { GeneralSessionConfig } from '../terminals/impl/general-session'; + +export type SessionType = 'agent' | 'general' | 'lifecycle'; +export type SessionConfig = AgentSessionConfig | GeneralSessionConfig; + +export interface SpawnParams { + command: string; + args: string[]; + cwd: string; +} + +/** + * Derive the executable, arguments, and working directory from a session config. + * Applies shellSetup and tmux wrapping where relevant. + */ +export function resolveSpawnParams(type: SessionType, config: SessionConfig): SpawnParams { + const shell = process.env.SHELL ?? '/bin/sh'; + + switch (type) { + case 'agent': { + const cfg = config as AgentSessionConfig; + const baseCmd = [cfg.command, ...cfg.args].join(' '); + const fullCmd = cfg.shellSetup ? `${cfg.shellSetup} && ${baseCmd}` : baseCmd; + + if (cfg.tmuxSessionName) { + return buildTmuxParams(shell, cfg.tmuxSessionName, fullCmd, cfg.cwd); + } + + return { + command: shell, + args: ['-c', fullCmd], + cwd: cfg.cwd, + }; + } + + case 'general': { + const cfg = config as GeneralSessionConfig; + if (cfg.shellSetup) { + return { + command: shell, + args: ['-c', `${cfg.shellSetup} && exec ${shell} -il`], + cwd: cfg.cwd, + }; + } + return { command: shell, args: ['-il'], cwd: cfg.cwd }; + } + + default: { + throw new Error(`Unsupported session type: ${type}`); + } + } +} + +/** + * Build spawn params that wrap a command in a tmux session for persistence. + * + * Behaviour: + * - If a tmux session named `sessionName` already exists → attach to it. + * - Otherwise → create a detached session running `cmd`, then attach. + */ +export function buildTmuxParams( + shell: string, + sessionName: string, + cmd: string, + cwd: string +): SpawnParams { + const quotedName = JSON.stringify(sessionName); + const quotedCmd = JSON.stringify(cmd); + + const checkExists = `tmux has-session -t ${quotedName} 2>/dev/null`; + const newSession = `tmux new-session -d -s ${quotedName} ${quotedCmd}`; + const attach = `tmux attach-session -t ${quotedName}`; + + const tmuxCmd = `(${checkExists} && ${attach}) || (${newSession} && ${attach})`; + + return { + command: shell, + args: ['-c', tmuxCmd], + cwd, + }; +} + +/** + * Build a single shell command string for use with `sshClient.exec()`. + * Combines the binary + args and ensures the cwd is honoured remotely. + */ +export function buildSshCommandString(command: string, args: string[], cwd: string): string { + const invocation = [command, ...args].join(' '); + return `cd ${JSON.stringify(cwd)} && ${invocation}`; +} diff --git a/src/main/core/pty/ssh2-pty.ts b/src/main/core/pty/ssh2-pty.ts new file mode 100644 index 000000000..30074a8ab --- /dev/null +++ b/src/main/core/pty/ssh2-pty.ts @@ -0,0 +1,89 @@ +import type { Client, ClientChannel } from 'ssh2'; +import { log } from '@main/lib/logger'; +import { err, ok, type Result } from '@main/lib/result'; +import { normalizeSignal } from './exit-signals'; +import type { Pty, PtyDimensions, PtyExitInfo } from './pty'; + +export type Ssh2OpenError = { + readonly kind: 'channel-open-failed'; + readonly message: string; +}; + +export interface Ssh2SpawnOptions extends PtyDimensions { + id: string; + command: string; +} + +export class Ssh2PtySession implements Pty { + readonly id: string; + + constructor( + id: string, + private readonly channel: ClientChannel + ) { + this.id = id; + } + + write(data: string): void { + this.channel.write(data); + } + + resize(cols: number, rows: number): void { + try { + this.channel.setWindow(rows, cols, 0, 0); + } catch (err: unknown) { + log.warn('Ssh2PtySession:resize failed', { + cols, + rows, + error: String((err as Error)?.message ?? err), + }); + } + } + + kill(): void { + try { + this.channel.close(); + } catch {} + } + + onData(handler: (data: string) => void): void { + this.channel.on('data', (chunk: Buffer) => { + handler(chunk.toString('utf-8')); + }); + } + + onExit(handler: (info: PtyExitInfo) => void): void { + this.channel.on('close', (exitCode: number | null, signal: string | null) => { + handler({ exitCode: exitCode ?? undefined, signal: normalizeSignal(signal) }); + }); + } +} + +export async function openSsh2Pty( + sshClient: Client, + options: Ssh2SpawnOptions +): Promise> { + const { id, command, cols, rows } = options; + return new Promise((resolve) => { + sshClient.exec( + command, + { + pty: { + term: 'xterm-256color', + cols, + rows, + // width/height in pixels — set to 0, terminal uses cols/rows instead + width: 0, + height: 0, + }, + }, + (e, channel) => { + if (e) { + const message = e instanceof Error ? e.message : String(e); + return resolve(err({ kind: 'channel-open-failed', message })); + } + resolve(ok(new Ssh2PtySession(id, channel))); + } + ); + }); +} diff --git a/src/main/core/pull-requests/controller.ts b/src/main/core/pull-requests/controller.ts new file mode 100644 index 000000000..392ccb1b9 --- /dev/null +++ b/src/main/core/pull-requests/controller.ts @@ -0,0 +1,143 @@ +import { createRPCController } from '@shared/ipc/rpc'; +import { log } from '@main/lib/logger'; +import { prService } from './pr-service'; + +export const pullRequestController = createRPCController({ + // ── DB-cached reads ──────────────────────────────────────────────────── + listPullRequests: async (nameWithOwner: string) => { + try { + const prs = await prService.listPullRequests(nameWithOwner); + return { success: true, prs, totalCount: prs.length }; + } catch (error) { + log.error('Failed to list pull requests:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Unable to list pull requests', + }; + } + }, + + getPullRequest: async (nameWithOwner: string, prNumber: number, invalidate = false) => { + try { + const pr = await prService.getPullRequest(nameWithOwner, prNumber, invalidate); + if (!pr) return { success: false, error: 'Pull request not found' }; + return { success: true, pr }; + } catch (error) { + log.error('Failed to get pull request:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Unable to get pull request', + }; + } + }, + + getPullRequestsForTask: async (projectId: string, taskId: string, invalidate = false) => { + return prService.getPullRequestsForTask(projectId, taskId, invalidate); + }, + + // ── Mutations ────────────────────────────────────────────────────────── + createPullRequest: async (params: { + nameWithOwner: string; + head: string; + base: string; + title: string; + body?: string; + draft: boolean; + }) => { + try { + const result = await prService.createPullRequest(params); + return { success: true, url: result.url, number: result.number }; + } catch (error) { + log.error('Failed to create pull request:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Unable to create pull request', + }; + } + }, + + mergePullRequest: async ( + nameWithOwner: string, + prNumber: number, + options: { strategy: 'merge' | 'squash' | 'rebase'; commitHeadOid?: string } + ) => { + try { + const result = await prService.mergePullRequest(nameWithOwner, prNumber, options); + return { success: true, sha: result.sha, merged: result.merged }; + } catch (error) { + log.error('Failed to merge pull request:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Unable to merge pull request', + }; + } + }, + + // ── Pass-through reads ───────────────────────────────────────────────── + getCheckRuns: async (nameWithOwner: string, prNumber: number) => { + try { + const checks = await prService.getCheckRuns(nameWithOwner, prNumber); + return { success: true, checks }; + } catch (error) { + log.error('Failed to get check runs:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Unable to get check runs', + }; + } + }, + + getPrComments: async (nameWithOwner: string, prNumber: number) => { + try { + const result = await prService.getPrComments(nameWithOwner, prNumber); + return { success: true, ...result }; + } catch (error) { + log.error('Failed to get PR comments:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Unable to get PR comments', + }; + } + }, + + getPullRequestFiles: async (nameWithOwner: string, prNumber: number) => { + try { + const files = await prService.getPullRequestFiles(nameWithOwner, prNumber); + return { success: true, files }; + } catch (error) { + log.error('Failed to get pull request files:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Unable to get pull request files', + }; + } + }, + + // ── Pass-through mutations ───────────────────────────────────────────── + addPrComment: async (nameWithOwner: string, prNumber: number, body: string) => { + try { + const result = await prService.addPrComment(nameWithOwner, prNumber, body); + return { success: true, id: result.id }; + } catch (error) { + log.error('Failed to add PR comment:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Unable to add comment', + }; + } + }, + + // ── Bootstrap sync ───────────────────────────────────────────────────── + syncPullRequests: async (nameWithOwner: string) => { + try { + await prService.syncPullRequests(nameWithOwner); + return { success: true }; + } catch (error) { + log.error('Failed to sync pull requests:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Unable to sync pull requests', + }; + } + }, +}); diff --git a/src/main/core/pull-requests/pr-service.test.ts b/src/main/core/pull-requests/pr-service.test.ts new file mode 100644 index 000000000..dc691a4ec --- /dev/null +++ b/src/main/core/pull-requests/pr-service.test.ts @@ -0,0 +1,582 @@ +import type { Octokit } from '@octokit/rest'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; +// Retrieve the exposed mock handles after the module is set up +import { _mocks as dbMocks } from '@main/db/client'; +import { PrService } from './pr-service'; + +vi.mock('@main/core/github/services/octokit-provider', () => ({ + getOctokit: vi.fn(), +})); + +vi.mock('@main/core/projects/project-manager', () => ({ + projectManager: { getProject: vi.fn() }, +})); + +vi.mock('@main/core/projects/utils', () => ({ + resolveTask: vi.fn(), +})); + +// --------------------------------------------------------------------------- +// DB mock — makes upsert a transparent passthrough during unit tests. +// The db module is mocked with a chainable builder. mockReturning and +// mockOrderBy are exposed so individual tests can control return values. +// --------------------------------------------------------------------------- + +vi.mock('@main/db/client', () => { + const mockReturning = vi.fn().mockResolvedValue([]); + const mockOnConflict = vi.fn().mockReturnValue({ returning: mockReturning }); + const mockValues = vi.fn().mockReturnValue({ onConflictDoUpdate: mockOnConflict }); + const mockInsert = vi.fn().mockReturnValue({ values: mockValues }); + + const mockOrderBy = vi.fn().mockResolvedValue([]); + const mockLimit = vi.fn().mockResolvedValue([]); + const mockWhere = vi.fn().mockReturnValue({ orderBy: mockOrderBy, limit: mockLimit }); + const mockFrom = vi.fn().mockReturnValue({ where: mockWhere, orderBy: mockOrderBy }); + const mockSelect = vi.fn().mockReturnValue({ from: mockFrom }); + + // Expose via module so tests can access them via import + return { + db: { insert: mockInsert, select: mockSelect }, + _mocks: { mockReturning, mockOrderBy }, + }; +}); + +const { mockReturning, mockOrderBy } = dbMocks as { + mockReturning: ReturnType; + mockOrderBy: ReturnType; +}; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function makeOctokitFactory( + graphqlMock?: ReturnType, + restMock?: Record +): () => Promise { + const octokit = { + graphql: graphqlMock ?? vi.fn(), + rest: restMock ?? {}, + } as unknown as Octokit; + return async () => octokit; +} + +/** Produce a fake DB row that round-trips through dbRowToUnified → expectedUnified. */ +function makeFakeDbRow(pr: { + url: string; + provider: string; + nameWithOwner: string; + title: string; + status: string; + author: unknown; + isDraft: boolean; + metadata: unknown; + createdAt: string; + updatedAt: string; +}) { + return { + id: pr.url, + provider: pr.provider, + nameWithOwner: pr.nameWithOwner, + url: pr.url, + title: pr.title, + status: pr.status, + author: JSON.stringify(pr.author), + isDraft: Number(pr.isDraft), + metadata: JSON.stringify(pr.metadata), + createdAt: pr.createdAt, + updatedAt: pr.updatedAt, + fetchedAt: '2024-01-02T00:00:00Z', + }; +} + +const NAME_WITH_OWNER = 'acme/my-repo'; + +const gqlPrNode = { + number: 42, + title: 'feat: add widget', + url: 'https://github.com/acme/my-repo/pull/42', + state: 'OPEN' as const, + isDraft: false, + createdAt: '2024-01-01T00:00:00Z', + updatedAt: '2024-01-02T00:00:00Z', + headRefName: 'feat/widget', + headRefOid: 'abc123', + baseRefName: 'main', + body: 'PR description', + additions: 10, + deletions: 5, + changedFiles: 3, + mergeable: 'MERGEABLE' as const, + mergeStateStatus: 'CLEAN' as const, + author: { login: 'dev' }, + headRepository: { + nameWithOwner: 'acme/my-repo', + url: 'https://github.com/acme/my-repo', + owner: { login: 'acme' }, + }, + labels: { nodes: [{ name: 'enhancement', color: '84b6eb' }] }, + assignees: { nodes: [{ login: 'dev', avatarUrl: 'https://avatar.test/dev' }] }, + reviewDecision: null, + latestReviews: { nodes: [{ author: { login: 'reviewer' }, state: 'APPROVED' }] }, + reviewRequests: { nodes: [{ requestedReviewer: { login: 'pending-reviewer' } }] }, +}; + +const expectedUnified = { + id: 'https://github.com/acme/my-repo/pull/42', + identifier: '#42', + nameWithOwner: 'acme/my-repo', + provider: 'github', + url: 'https://github.com/acme/my-repo/pull/42', + title: 'feat: add widget', + status: 'open', + author: { userName: 'dev', displayName: 'dev' }, + isDraft: false, + createdAt: '2024-01-01T00:00:00Z', + updatedAt: '2024-01-02T00:00:00Z', + metadata: { + number: 42, + headRefName: 'feat/widget', + headRefOid: 'abc123', + baseRefName: 'main', + headRepository: { + nameWithOwner: 'acme/my-repo', + url: 'https://github.com/acme/my-repo', + owner: { login: 'acme' }, + }, + labels: [{ name: 'enhancement', color: '84b6eb' }], + assignees: [{ login: 'dev', avatarUrl: 'https://avatar.test/dev' }], + reviewDecision: null, + reviewers: [ + { login: 'pending-reviewer', state: 'PENDING' }, + { login: 'reviewer', state: 'APPROVED' }, + ], + body: 'PR description', + additions: 10, + deletions: 5, + changedFiles: 3, + mergeable: 'MERGEABLE', + mergeStateStatus: 'CLEAN', + }, +}; + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('PrService.listPullRequests (invalidate=true)', () => { + let graphqlMock: ReturnType; + let svc: PrService; + + beforeEach(() => { + vi.clearAllMocks(); + graphqlMock = vi.fn(); + svc = new PrService(makeOctokitFactory(graphqlMock)); + mockReturning.mockResolvedValue([makeFakeDbRow(expectedUnified)]); + }); + + it('uses repository query when no search', async () => { + graphqlMock.mockResolvedValue({ + repository: { pullRequests: { totalCount: 1, nodes: [gqlPrNode] } }, + }); + + const result = await svc.listPullRequests(NAME_WITH_OWNER, { limit: 10 }, true); + + expect(graphqlMock).toHaveBeenCalledWith(expect.stringContaining('listPullRequests'), { + owner: 'acme', + repo: 'my-repo', + limit: 10, + }); + expect(result).toEqual([expectedUnified]); + }); + + it('uses search query when searchQuery provided', async () => { + graphqlMock.mockResolvedValue({ + search: { nodes: [gqlPrNode] }, + }); + + const result = await svc.listPullRequests( + NAME_WITH_OWNER, + { searchQuery: 'widget', limit: 5 }, + true + ); + + expect(graphqlMock).toHaveBeenCalledWith(expect.stringContaining('searchPullRequests'), { + query: `widget repo:${NAME_WITH_OWNER} is:pr is:open`, + limit: 5, + }); + expect(result).toHaveLength(1); + }); + + it('clamps limit to [1, 100]', async () => { + graphqlMock.mockResolvedValue({ + repository: { pullRequests: { nodes: [] } }, + }); + + await svc.listPullRequests(NAME_WITH_OWNER, { limit: 999 }, true); + expect(graphqlMock).toHaveBeenCalledWith( + expect.stringContaining('listPullRequests'), + expect.objectContaining({ limit: 100 }) + ); + + graphqlMock.mockClear(); + await svc.listPullRequests(NAME_WITH_OWNER, { limit: 0 }, true); + expect(graphqlMock).toHaveBeenCalledWith( + expect.stringContaining('listPullRequests'), + expect.objectContaining({ limit: 1 }) + ); + }); + + it('builds reviewer list from reviewRequests and latestReviews', async () => { + graphqlMock.mockResolvedValue({ + repository: { pullRequests: { nodes: [gqlPrNode] } }, + }); + + const result = await svc.listPullRequests(NAME_WITH_OWNER, {}, true); + + expect(result[0].metadata.reviewers).toEqual([ + { login: 'pending-reviewer', state: 'PENDING' }, + { login: 'reviewer', state: 'APPROVED' }, + ]); + }); + + it('returns data from DB without fetching GitHub when invalidate=false', async () => { + mockOrderBy.mockResolvedValue([makeFakeDbRow(expectedUnified)]); + + const result = await svc.listPullRequests(NAME_WITH_OWNER); + + expect(graphqlMock).not.toHaveBeenCalled(); + expect(result).toEqual([expectedUnified]); + }); +}); + +describe('PrService.getPullRequest (invalidate=true)', () => { + let graphqlMock: ReturnType; + let svc: PrService; + + beforeEach(() => { + vi.clearAllMocks(); + graphqlMock = vi.fn(); + svc = new PrService(makeOctokitFactory(graphqlMock)); + mockReturning.mockResolvedValue([makeFakeDbRow(expectedUnified)]); + }); + + it('fetches a single PR by number and returns unified model', async () => { + graphqlMock.mockResolvedValue({ + repository: { pullRequest: gqlPrNode }, + }); + + const result = await svc.getPullRequest(NAME_WITH_OWNER, 42, true); + + expect(graphqlMock).toHaveBeenCalledWith(expect.stringContaining('getPullRequest'), { + owner: 'acme', + repo: 'my-repo', + number: 42, + }); + expect(result).toEqual(expectedUnified); + }); + + it('returns null when PR not found', async () => { + graphqlMock.mockResolvedValue({ repository: { pullRequest: null } }); + + expect(await svc.getPullRequest(NAME_WITH_OWNER, 999, true)).toBeNull(); + }); + + it('throws on error', async () => { + graphqlMock.mockRejectedValue(new Error('not found')); + + await expect(svc.getPullRequest(NAME_WITH_OWNER, 999, true)).rejects.toThrow('not found'); + }); +}); + +describe('PrService (REST-backed methods)', () => { + const mockCreate = vi.fn(); + const mockMerge = vi.fn(); + const mockListFiles = vi.fn(); + const mockListReviews = vi.fn(); + const mockListComments = vi.fn(); + const mockCreateComment = vi.fn(); + const mockGraphql = vi.fn(); + const mockPaginate = vi.fn(); + + const mockOctokit = { + rest: { + pulls: { + create: mockCreate, + merge: mockMerge, + listFiles: mockListFiles, + listReviews: mockListReviews, + }, + issues: { + listComments: mockListComments, + createComment: mockCreateComment, + }, + }, + graphql: mockGraphql, + paginate: mockPaginate, + } as unknown as Octokit; + + const getOctokit = vi.fn().mockResolvedValue(mockOctokit); + let service: PrService; + + beforeEach(() => { + vi.clearAllMocks(); + service = new PrService(getOctokit); + mockReturning.mockResolvedValue([makeFakeDbRow(expectedUnified)]); + }); + + it('createPullRequest returns URL and number', async () => { + mockCreate.mockResolvedValue({ + data: { html_url: 'https://github.com/owner/repo/pull/1', number: 1 }, + }); + // Subsequent getPullRequest(invalidate=true) call + mockGraphql.mockResolvedValue({ repository: { pullRequest: gqlPrNode } }); + + const result = await service.createPullRequest({ + nameWithOwner: 'owner/repo', + head: 'feature', + base: 'main', + title: 'Test PR', + draft: false, + }); + + expect(mockCreate).toHaveBeenCalledWith({ + owner: 'owner', + repo: 'repo', + head: 'feature', + base: 'main', + title: 'Test PR', + body: undefined, + draft: false, + }); + expect(result).toEqual({ + url: 'https://github.com/owner/repo/pull/1', + number: 1, + }); + }); + + it('mergePullRequest supports merge strategy and optional sha', async () => { + mockMerge.mockResolvedValue({ + data: { sha: 'abc123', merged: true, message: 'Pull Request successfully merged' }, + }); + // Subsequent getPullRequest(invalidate=true) call + mockGraphql.mockResolvedValue({ repository: { pullRequest: gqlPrNode } }); + + const result = await service.mergePullRequest('owner/repo', 42, { + strategy: 'squash', + commitHeadOid: 'def456', + }); + + expect(mockMerge).toHaveBeenCalledWith({ + owner: 'owner', + repo: 'repo', + pull_number: 42, + merge_method: 'squash', + sha: 'def456', + }); + expect(result).toEqual({ sha: 'abc123', merged: true }); + }); + + it('getCheckRuns maps check run buckets and status contexts', async () => { + mockGraphql.mockResolvedValue({ + repository: { + pullRequest: { + commits: { + nodes: [ + { + commit: { + statusCheckRollup: { + contexts: { + pageInfo: { hasNextPage: false, endCursor: null }, + nodes: [ + { + __typename: 'CheckRun', + name: 'CI', + status: 'COMPLETED', + conclusion: 'SUCCESS', + detailsUrl: 'https://github.com/runs/1', + startedAt: '2026-01-01T00:00:00Z', + completedAt: '2026-01-01T00:02:00Z', + checkSuite: { + workflowRun: { workflow: { name: 'Build' } }, + }, + }, + { + __typename: 'CheckRun', + name: 'Lint', + status: 'IN_PROGRESS', + conclusion: null, + detailsUrl: null, + startedAt: '2026-01-01T00:01:00Z', + completedAt: null, + checkSuite: null, + }, + { + __typename: 'CheckRun', + name: 'CodeQL', + status: 'COMPLETED', + conclusion: 'NEUTRAL', + detailsUrl: null, + startedAt: '2026-01-01T00:01:00Z', + completedAt: '2026-01-01T00:05:00Z', + checkSuite: null, + }, + { + __typename: 'StatusContext', + context: 'deploy/preview', + state: 'FAILURE', + targetUrl: 'https://deploy.example.com', + createdAt: '2026-01-01T00:00:00Z', + }, + ], + }, + }, + }, + }, + ], + }, + }, + }, + }); + + const result = await service.getCheckRuns('owner/repo', 42); + + expect(result).toEqual([ + { + name: 'CI', + bucket: 'pass', + workflowName: 'Build', + detailsUrl: 'https://github.com/runs/1', + startedAt: '2026-01-01T00:00:00Z', + completedAt: '2026-01-01T00:02:00Z', + }, + { + name: 'Lint', + bucket: 'pending', + workflowName: undefined, + detailsUrl: undefined, + startedAt: '2026-01-01T00:01:00Z', + completedAt: undefined, + }, + { + name: 'CodeQL', + bucket: 'skipping', + workflowName: undefined, + detailsUrl: undefined, + startedAt: '2026-01-01T00:01:00Z', + completedAt: '2026-01-01T00:05:00Z', + }, + { + name: 'deploy/preview', + bucket: 'fail', + detailsUrl: 'https://deploy.example.com', + startedAt: '2026-01-01T00:00:00Z', + }, + ]); + }); + + it('getPrComments filters out PENDING reviews', async () => { + mockPaginate + .mockResolvedValueOnce([ + { + id: 1, + user: { login: 'alice', avatar_url: 'https://a.com/alice.png' }, + body: 'Looks good', + created_at: '2026-01-01T00:00:00Z', + }, + ]) + .mockResolvedValueOnce([ + { + id: 2, + user: { login: 'bob', avatar_url: 'https://a.com/bob.png' }, + body: 'LGTM', + submitted_at: '2026-01-01T01:00:00Z', + state: 'APPROVED', + commit_id: 'abc', + }, + { + id: 3, + user: { login: 'carol' }, + body: '', + submitted_at: '2026-01-01T02:00:00Z', + state: 'COMMENTED', + commit_id: 'def', + }, + { + id: 4, + user: { login: 'dana' }, + body: 'Still working on this', + submitted_at: null, + updated_at: '2026-01-01T03:00:00Z', + state: 'PENDING', + commit_id: 'ghi', + }, + ]); + + const result = await service.getPrComments('owner/repo', 42); + + expect(result.comments).toHaveLength(1); + expect(result.comments[0].author.login).toBe('alice'); + expect(result.reviews).toHaveLength(1); + expect(result.reviews[0].state).toBe('APPROVED'); + expect(result.reviews.every((r) => r.state !== 'PENDING')).toBe(true); + }); + + it('addPrComment returns created id', async () => { + mockCreateComment.mockResolvedValue({ data: { id: 99 } }); + + const result = await service.addPrComment('owner/repo', 42, 'Nice work!'); + + expect(mockCreateComment).toHaveBeenCalledWith({ + owner: 'owner', + repo: 'repo', + issue_number: 42, + body: 'Nice work!', + }); + expect(result).toEqual({ id: 99 }); + }); + + it('getPullRequestFiles maps paginated response', async () => { + mockPaginate.mockResolvedValue([ + { + filename: 'src/foo.ts', + status: 'modified', + additions: 10, + deletions: 3, + patch: '@@ -1,3 +1,10 @@\n+added line', + }, + { + filename: 'src/bar.ts', + status: 'added', + additions: 25, + deletions: 0, + patch: '@@ -0,0 +1,25 @@\n+new file', + }, + ]); + + const result = await service.getPullRequestFiles('owner/repo', 42); + + expect(mockPaginate).toHaveBeenCalledWith(mockListFiles, { + owner: 'owner', + repo: 'repo', + pull_number: 42, + per_page: 100, + }); + expect(result).toEqual([ + { + filename: 'src/foo.ts', + status: 'modified', + additions: 10, + deletions: 3, + patch: '@@ -1,3 +1,10 @@\n+added line', + }, + { + filename: 'src/bar.ts', + status: 'added', + additions: 25, + deletions: 0, + patch: '@@ -0,0 +1,25 @@\n+new file', + }, + ]); + }); +}); diff --git a/src/main/core/pull-requests/pr-service.ts b/src/main/core/pull-requests/pr-service.ts new file mode 100644 index 000000000..12cc793f4 --- /dev/null +++ b/src/main/core/pull-requests/pr-service.ts @@ -0,0 +1,615 @@ +import type { Octokit } from '@octokit/rest'; +import { desc, eq, sql } from 'drizzle-orm'; +import type { + CheckRunBucket, + GitHubReviewer, + PrCheckRun, + PrCommentsResult, + PullRequest, + PullRequestFile, + PullRequestStatus, +} from '@shared/pull-requests'; +import { getOctokit } from '@main/core/github/services/octokit-provider'; +import { + GET_PR_CHECK_RUNS_QUERY, + GET_PR_DETAIL_QUERY, + LIST_PRS_QUERY, + SEARCH_PRS_QUERY, + SYNC_PRS_QUERY, +} from '@main/core/github/services/pr-queries'; +import { parseNameWithOwner, splitRepo } from '@main/core/github/services/utils'; +import { projectManager } from '@main/core/projects/project-manager'; +import { resolveTask } from '@main/core/projects/utils'; +import { db } from '@main/db/client'; +import { pullRequests } from '@main/db/schema'; +import { log } from '@main/lib/logger'; +import { err, ok } from '@main/lib/result'; + +// --------------------------------------------------------------------------- +// Public payload types +// --------------------------------------------------------------------------- + +export type TaskPrsPayload = { + prs: PullRequest[]; + nameWithOwner: string | null; + taskBranch: string | null; +}; + +export type ListPrOptions = { + limit?: number; + searchQuery?: string; +}; + +// --------------------------------------------------------------------------- +// GraphQL response shapes (internal) +// --------------------------------------------------------------------------- + +interface GqlPrNode { + number: number; + title: string; + url: string; + state: 'OPEN' | 'CLOSED' | 'MERGED'; + isDraft: boolean; + createdAt: string; + updatedAt: string; + headRefName: string; + headRefOid: string; + baseRefName: string; + body: string | null; + additions: number; + deletions: number; + changedFiles: number; + mergeable: 'MERGEABLE' | 'CONFLICTING' | 'UNKNOWN'; + mergeStateStatus: 'CLEAN' | 'DIRTY' | 'BEHIND' | 'BLOCKED' | 'HAS_HOOKS' | 'UNSTABLE' | 'UNKNOWN'; + author: { login: string } | null; + headRepository: { nameWithOwner: string; url: string; owner: { login: string } } | null; + labels: { nodes: Array<{ name: string; color: string }> }; + assignees: { nodes: Array<{ login: string; avatarUrl: string }> }; + reviewDecision: 'APPROVED' | 'CHANGES_REQUESTED' | 'REVIEW_REQUIRED' | null; + latestReviews: { nodes: Array<{ author: { login: string } | null; state: string }> }; + reviewRequests: { nodes: Array<{ requestedReviewer: { login?: string; name?: string } | null }> }; +} + +interface GqlCheckRunNode { + __typename: 'CheckRun'; + name: string; + status: string; + conclusion: string | null; + detailsUrl: string | null; + startedAt: string | null; + completedAt: string | null; + checkSuite: { + app: { name: string; logoUrl: string } | null; + workflowRun: { workflow: { name: string } } | null; + } | null; +} + +interface GqlStatusContextNode { + __typename: 'StatusContext'; + context: string; + state: string; + targetUrl: string | null; + createdAt: string; +} + +interface GqlCheckRunsResponse { + repository: { + pullRequest: { + commits: { + nodes: Array<{ + commit: { + statusCheckRollup: { + contexts: { + pageInfo: { hasNextPage: boolean; endCursor: string | null }; + nodes: Array; + }; + } | null; + }; + }>; + }; + } | null; + }; +} + +// --------------------------------------------------------------------------- +// Bucket mappers +// --------------------------------------------------------------------------- + +function mapCheckRunBucket(status: string, conclusion: string | null): CheckRunBucket { + if ( + status === 'IN_PROGRESS' || + status === 'QUEUED' || + status === 'WAITING' || + status === 'PENDING' + ) + return 'pending'; + if (!conclusion) return 'pending'; + switch (conclusion) { + case 'SUCCESS': + return 'pass'; + case 'NEUTRAL': + case 'SKIPPED': + case 'STALE': + return 'skipping'; + case 'FAILURE': + case 'TIMED_OUT': + case 'ACTION_REQUIRED': + case 'STARTUP_FAILURE': + return 'fail'; + case 'CANCELLED': + return 'cancel'; + default: + return 'fail'; + } +} + +function mapStatusContextBucket(state: string): CheckRunBucket { + switch (state) { + case 'SUCCESS': + return 'pass'; + case 'FAILURE': + case 'ERROR': + return 'fail'; + default: + return 'pending'; + } +} + +// --------------------------------------------------------------------------- +// PrService +// --------------------------------------------------------------------------- + +export class PrService { + constructor(private readonly getOctokit: () => Promise) {} + + // ── DB-cached reads ────────────────────────────────────────────────────── + + async listPullRequests( + nameWithOwner: string, + options: ListPrOptions = {}, + invalidate = false + ): Promise { + if (invalidate) { + const limit = Math.min(Math.max(options.limit ?? 30, 1), 100); + const searchQuery = options.searchQuery?.trim(); + const octokit = await this.getOctokit(); + const { owner, repo } = splitRepo(nameWithOwner); + + let fresh: PullRequest[]; + if (searchQuery) { + const response = await octokit.graphql<{ + search: { nodes: GqlPrNode[] }; + }>(SEARCH_PRS_QUERY, { + query: `${searchQuery} repo:${owner}/${repo} is:pr is:open`, + limit, + }); + fresh = response.search.nodes.map((n) => this.gqlToUnified(n, nameWithOwner)); + } else { + const response = await octokit.graphql<{ + repository: { pullRequests: { nodes: GqlPrNode[] } }; + }>(LIST_PRS_QUERY, { owner, repo, limit }); + fresh = response.repository.pullRequests.nodes.map((n) => + this.gqlToUnified(n, nameWithOwner) + ); + } + + return this.upsertMany(fresh); + } + + return this.fromDb(nameWithOwner); + } + + async getPullRequest( + nameWithOwner: string, + prNumber: number, + invalidate = false + ): Promise { + if (invalidate) { + const { owner, repo } = splitRepo(nameWithOwner); + const octokit = await this.getOctokit(); + const response = await octokit.graphql<{ + repository: { pullRequest: GqlPrNode | null }; + }>(GET_PR_DETAIL_QUERY, { owner, repo, number: prNumber }); + const node = response.repository.pullRequest; + if (!node) return null; + return this.upsert(this.gqlToUnified(node, nameWithOwner)); + } + + const rows = await db + .select() + .from(pullRequests) + .where(eq(pullRequests.nameWithOwner, nameWithOwner)) + .limit(1); + + // Try to find by number in the metadata JSON — fall back to null + for (const row of rows) { + const pr = this.dbRowToUnified(row); + if (pr.metadata.number === prNumber) return pr; + } + return null; + } + + async getPullRequestsForTask( + projectId: string, + taskId: string, + invalidate = false + ): Promise< + | { success: true; data: TaskPrsPayload } + | { success: false; error: { type: 'not_found' } | string } + > { + try { + const project = projectManager.getProject(projectId); + const env = resolveTask(projectId, taskId); + if (!project || !env) return err({ type: 'not_found' as const }); + if (!env.taskBranch) { + return ok({ prs: [], nameWithOwner: null, taskBranch: null }); + } + + const taskBranch = env.taskBranch; + const remoteName = await project.settings.getRemote(); + const remotes = await env.git.getRemotes(); + const remoteUrl = remotes.find((r) => r.name === remoteName)?.url; + const nameWithOwner = remoteUrl ? parseNameWithOwner(remoteUrl) : null; + + if (!nameWithOwner) { + return ok({ prs: [], nameWithOwner: null, taskBranch }); + } + + if (invalidate) { + const octokit = await this.getOctokit(); + const response = await octokit.graphql<{ search: { nodes: GqlPrNode[] } }>( + SEARCH_PRS_QUERY, + { query: `repo:${nameWithOwner} is:pr head:${taskBranch}`, limit: 25 } + ); + const fresh = response.search.nodes.map((n) => this.gqlToUnified(n, nameWithOwner)); + const prs = await this.upsertMany(fresh); + return ok({ prs, nameWithOwner, taskBranch }); + } + + const prs = await this.fromDb(nameWithOwner); + const taskPrs = prs.filter((pr) => pr.metadata.headRefName === taskBranch); + return ok({ prs: taskPrs, nameWithOwner, taskBranch }); + } catch (error) { + log.error('Failed to get pull requests for task:', error); + const env2 = resolveTask(projectId, taskId); + return ok({ + prs: [], + nameWithOwner: null, + taskBranch: env2?.taskBranch ?? null, + }); + } + } + + // ── Mutations (always refresh cache after write) ───────────────────────── + + async createPullRequest(params: { + nameWithOwner: string; + head: string; + base: string; + title: string; + body?: string; + draft: boolean; + }): Promise<{ url: string; number: number }> { + const octokit = await this.getOctokit(); + const { owner, repo } = splitRepo(params.nameWithOwner); + const response = await octokit.rest.pulls.create({ + owner, + repo, + head: params.head, + base: params.base, + title: params.title, + body: params.body, + draft: params.draft, + }); + const { html_url: url, number } = response.data; + await this.getPullRequest(params.nameWithOwner, number, true); + return { url, number }; + } + + async mergePullRequest( + nameWithOwner: string, + prNumber: number, + options: { strategy: 'merge' | 'squash' | 'rebase'; commitHeadOid?: string } + ): Promise<{ sha: string | null; merged: boolean }> { + const octokit = await this.getOctokit(); + const { owner, repo } = splitRepo(nameWithOwner); + const response = await octokit.rest.pulls.merge({ + owner, + repo, + pull_number: prNumber, + merge_method: options.strategy, + sha: options.commitHeadOid, + }); + await this.getPullRequest(nameWithOwner, prNumber, true); + return { sha: response.data.sha ?? null, merged: response.data.merged }; + } + + // ── Pass-through reads (no DB involvement) ─────────────────────────────── + + async getCheckRuns(nameWithOwner: string, prNumber: number): Promise { + const octokit = await this.getOctokit(); + const { owner, repo } = splitRepo(nameWithOwner); + + const allNodes: Array = []; + let cursor: string | undefined; + + for (;;) { + const response: GqlCheckRunsResponse = await octokit.graphql(GET_PR_CHECK_RUNS_QUERY, { + owner, + repo, + number: prNumber, + cursor, + }); + const contexts = + response.repository.pullRequest?.commits.nodes[0]?.commit?.statusCheckRollup?.contexts; + if (!contexts) break; + allNodes.push(...contexts.nodes); + if (!contexts.pageInfo.hasNextPage) break; + cursor = contexts.pageInfo.endCursor ?? undefined; + } + + return allNodes.map((node) => { + if (node.__typename === 'CheckRun') { + return { + name: node.name, + bucket: mapCheckRunBucket(node.status, node.conclusion), + workflowName: node.checkSuite?.workflowRun?.workflow?.name, + appName: node.checkSuite?.app?.name, + appLogoUrl: node.checkSuite?.app?.logoUrl, + detailsUrl: node.detailsUrl ?? undefined, + startedAt: node.startedAt ?? undefined, + completedAt: node.completedAt ?? undefined, + }; + } + return { + name: node.context, + bucket: mapStatusContextBucket(node.state), + detailsUrl: node.targetUrl ?? undefined, + startedAt: node.createdAt, + }; + }); + } + + async getPullRequestFiles(nameWithOwner: string, prNumber: number): Promise { + const octokit = await this.getOctokit(); + const { owner, repo } = splitRepo(nameWithOwner); + const files = await octokit.paginate(octokit.rest.pulls.listFiles, { + owner, + repo, + pull_number: prNumber, + per_page: 100, + }); + return files.map((f) => ({ + filename: f.filename, + status: f.status, + additions: f.additions, + deletions: f.deletions, + patch: f.patch, + })); + } + + async getPrComments(nameWithOwner: string, prNumber: number): Promise { + const octokit = await this.getOctokit(); + const { owner, repo } = splitRepo(nameWithOwner); + + const [commentsData, reviewsData] = await Promise.all([ + octokit.paginate(octokit.rest.issues.listComments, { + owner, + repo, + issue_number: prNumber, + per_page: 100, + }), + octokit.paginate(octokit.rest.pulls.listReviews, { + owner, + repo, + pull_number: prNumber, + per_page: 100, + }), + ]); + + return { + comments: commentsData.map((c) => ({ + id: c.id, + author: { login: c.user?.login ?? 'unknown', avatarUrl: c.user?.avatar_url }, + body: c.body ?? '', + createdAt: c.created_at, + })), + reviews: reviewsData + .filter( + (r) => + r.state !== 'PENDING' && + (r.body || r.state === 'APPROVED' || r.state === 'CHANGES_REQUESTED') + ) + .map((r) => { + const fallbackSubmittedAt = + 'updated_at' in r && typeof r.updated_at === 'string' ? r.updated_at : undefined; + return { + id: r.id, + author: { login: r.user?.login ?? 'unknown', avatarUrl: r.user?.avatar_url }, + body: r.body ?? '', + submittedAt: r.submitted_at ?? fallbackSubmittedAt, + state: r.state, + }; + }), + }; + } + + async addPrComment( + nameWithOwner: string, + prNumber: number, + body: string + ): Promise<{ id: number }> { + const octokit = await this.getOctokit(); + const { owner, repo } = splitRepo(nameWithOwner); + const response = await octokit.rest.issues.createComment({ + owner, + repo, + issue_number: prNumber, + body, + }); + return { id: response.data.id }; + } + + // ── Project bootstrap sync ─────────────────────────────────────────────── + + async syncPullRequests(nameWithOwner: string): Promise { + const sinceUpdatedAt = await this.getLatestUpdatedAt(nameWithOwner); + const { owner, repo } = splitRepo(nameWithOwner); + const octokit = await this.getOctokit(); + const toUpsert: PullRequest[] = []; + let cursor: string | undefined; + + for (;;) { + const response = await octokit.graphql<{ + repository: { + pullRequests: { + pageInfo: { hasNextPage: boolean; endCursor: string | null }; + nodes: GqlPrNode[]; + }; + }; + }>(SYNC_PRS_QUERY, { owner, repo, cursor }); + + const { nodes, pageInfo } = response.repository.pullRequests; + let reachedCursor = false; + for (const node of nodes) { + if (sinceUpdatedAt && node.updatedAt <= sinceUpdatedAt) { + reachedCursor = true; + break; + } + toUpsert.push(this.gqlToUnified(node, nameWithOwner)); + } + + if (reachedCursor || !pageInfo.hasNextPage) break; + cursor = pageInfo.endCursor ?? undefined; + } + + if (toUpsert.length > 0) { + await this.upsertMany(toUpsert); + } + } + + // ── Private: DB helpers ────────────────────────────────────────────────── + + private async upsert(pr: PullRequest): Promise { + const serialized = this.serialize(pr); + const [row] = await db + .insert(pullRequests) + .values({ id: pr.url, ...serialized, fetchedAt: sql`CURRENT_TIMESTAMP` }) + .onConflictDoUpdate({ + target: pullRequests.url, + set: { ...serialized, fetchedAt: sql`CURRENT_TIMESTAMP` }, + }) + .returning(); + return this.dbRowToUnified(row); + } + + private async upsertMany(prs: PullRequest[]): Promise { + return Promise.all(prs.map((pr) => this.upsert(pr))); + } + + private async fromDb(nameWithOwner: string): Promise { + const rows = await db + .select() + .from(pullRequests) + .where(eq(pullRequests.nameWithOwner, nameWithOwner)) + .orderBy(desc(pullRequests.updatedAt)); + return rows.map((r) => this.dbRowToUnified(r)); + } + + private async getLatestUpdatedAt(nameWithOwner: string): Promise { + const [row] = await db + .select({ updatedAt: pullRequests.updatedAt }) + .from(pullRequests) + .where(eq(pullRequests.nameWithOwner, nameWithOwner)) + .orderBy(desc(pullRequests.updatedAt)) + .limit(1); + return row?.updatedAt; + } + + // ── Private: GraphQL → unified model ──────────────────────────────────── + + private gqlToUnified(node: GqlPrNode, nameWithOwner: string): PullRequest { + const status: PullRequestStatus = + node.state === 'MERGED' ? 'merged' : node.state === 'CLOSED' ? 'closed' : 'open'; + + const reviewerMap = new Map(); + for (const req of node.reviewRequests.nodes) { + const login = req.requestedReviewer?.login ?? req.requestedReviewer?.name; + if (login) reviewerMap.set(login, { login, state: 'PENDING' }); + } + for (const review of node.latestReviews.nodes) { + const login = review.author?.login; + if (login) reviewerMap.set(login, { login, state: review.state as GitHubReviewer['state'] }); + } + + return { + id: node.url, + identifier: `#${node.number}`, + nameWithOwner, + provider: 'github', + url: node.url, + title: node.title, + status, + author: node.author ? { userName: node.author.login, displayName: node.author.login } : null, + isDraft: node.isDraft, + createdAt: node.createdAt, + updatedAt: node.updatedAt, + metadata: { + number: node.number, + headRefName: node.headRefName, + headRefOid: node.headRefOid, + baseRefName: node.baseRefName, + headRepository: node.headRepository, + labels: node.labels.nodes, + assignees: node.assignees.nodes, + reviewDecision: node.reviewDecision, + reviewers: Array.from(reviewerMap.values()), + additions: node.additions, + deletions: node.deletions, + changedFiles: node.changedFiles, + mergeable: node.mergeable, + mergeStateStatus: node.mergeStateStatus, + body: node.body, + }, + }; + } + + // ── Private: DB serialize / deserialize ────────────────────────────────── + + private serialize(pr: PullRequest) { + return { + provider: pr.provider, + nameWithOwner: pr.nameWithOwner, + url: pr.url, + title: pr.title, + status: pr.status, + author: JSON.stringify(pr.author), + isDraft: Number(pr.isDraft), + metadata: JSON.stringify(pr.metadata), + createdAt: pr.createdAt, + updatedAt: pr.updatedAt, + }; + } + + private dbRowToUnified(row: typeof pullRequests.$inferSelect): PullRequest { + const metadata = JSON.parse(row.metadata ?? '{}') as PullRequest['metadata']; + const identifier = + row.provider === 'github' && 'number' in metadata ? `#${metadata.number}` : row.url; + return { + id: row.id, + identifier, + nameWithOwner: row.nameWithOwner, + provider: row.provider as PullRequest['provider'], + url: row.url, + title: row.title, + status: row.status as PullRequest['status'], + author: row.author ? JSON.parse(row.author) : null, + isDraft: Boolean(row.isDraft), + metadata, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + }; + } +} + +export const prService = new PrService(getOctokit); diff --git a/src/main/core/repository/controller.ts b/src/main/core/repository/controller.ts new file mode 100644 index 000000000..c10f8b3d2 --- /dev/null +++ b/src/main/core/repository/controller.ts @@ -0,0 +1,30 @@ +import { createRPCController } from '@shared/ipc/rpc'; +import { projectManager } from '../projects/project-manager'; + +export const repositoryController = createRPCController({ + getBranches: async (projectId: string) => { + const project = projectManager.getProject(projectId); + if (!project) { + throw new Error('Project not found'); + } + return project.git.getBranches(); + }, + getDefaultBranch: async (projectId: string) => { + const project = projectManager.getProject(projectId); + if (!project) { + throw new Error('Project not found'); + } + const [name, remote] = await Promise.all([ + project.settings.getDefaultBranch(), + project.settings.getRemote(), + ]); + return { name, remote, existsLocally: true }; + }, + getRemotes: async (projectId: string) => { + const project = projectManager.getProject(projectId); + if (!project) { + throw new Error('Project not found'); + } + return project.git.getRemotes(); + }, +}); diff --git a/src/main/core/settings/controller.ts b/src/main/core/settings/controller.ts new file mode 100644 index 000000000..7c3fafc7e --- /dev/null +++ b/src/main/core/settings/controller.ts @@ -0,0 +1,24 @@ +import { createRPCController } from '@/shared/ipc/rpc'; +import { appSettingsService, type AppSettings, type AppSettingsKey } from './settings-service'; + +export const appSettingsController = createRPCController({ + get: (key: T): Promise => appSettingsService.get(key), + + getAll: (): Promise => appSettingsService.getAll(), + + getWithMeta: ( + key: T + ): Promise<{ + value: AppSettings[T]; + defaults: AppSettings[T]; + overrides: Partial; + }> => appSettingsService.getWithMeta(key), + + update: (key: T, value: AppSettings[T]): Promise => + appSettingsService.update(key, value), + + reset: (key: T): Promise => appSettingsService.reset(key), + + resetField: (key: T, field: string): Promise => + appSettingsService.resetField(key, field as keyof AppSettings[T]), +}); diff --git a/src/main/core/settings/override-settings.ts b/src/main/core/settings/override-settings.ts new file mode 100644 index 000000000..162b7793f --- /dev/null +++ b/src/main/core/settings/override-settings.ts @@ -0,0 +1,112 @@ +import { eq } from 'drizzle-orm'; +import type { ZodType } from 'zod'; +import { db } from '@main/db/client'; +import { appSettings } from '@main/db/schema'; +import { computeTrueOverrides, mergeDeep } from './utils'; + +export class OverrideSettings { + private cache: Record | null = null; + + constructor( + private readonly storageKey: string, + private readonly getExternalDefaults: () => Record, + private readonly itemSchema: ZodType> + ) {} + + private async readRawOverrides(): Promise>> { + const [row] = await db + .select() + .from(appSettings) + .where(eq(appSettings.key, this.storageKey)) + .execute(); + if (!row) return {}; + try { + return JSON.parse(row.value) as Record>; + } catch { + return {}; + } + } + + private async storeOverrides(overrides: Record>): Promise { + if (Object.keys(overrides).length === 0) { + await db.delete(appSettings).where(eq(appSettings.key, this.storageKey)).execute(); + } else { + const serialized = JSON.stringify(overrides); + await db + .insert(appSettings) + .values({ key: this.storageKey, value: serialized }) + .onConflictDoUpdate({ target: appSettings.key, set: { value: serialized } }) + .execute(); + } + this.cache = null; + } + + async getAll(): Promise> { + if (this.cache) return this.cache; + + const externalDefaults = this.getExternalDefaults(); + const storedOverrides = await this.readRawOverrides(); + const result: Record = {}; + const allIds = new Set([...Object.keys(externalDefaults), ...Object.keys(storedOverrides)]); + + for (const id of allIds) { + const def = (externalDefaults[id] ?? {}) as Record; + const override = (storedOverrides[id] ?? {}) as Record; + result[id] = mergeDeep(def, override) as TConfig; + } + + this.cache = result; + return result; + } + + async getItem(id: string): Promise { + const all = await this.getAll(); + return all[id]; + } + + async getItemWithMeta(id: string): Promise<{ + value: TConfig; + defaults: TConfig; + overrides: Partial; + } | null> { + const externalDefaults = this.getExternalDefaults(); + const defaults = externalDefaults[id]; + if (!defaults) return null; + + const storedOverrides = await this.readRawOverrides(); + const itemOverrides = (storedOverrides[id] ?? {}) as Record; + const trueOverrides = computeTrueOverrides( + itemOverrides, + defaults as Record + ) as Partial; + const value = mergeDeep(defaults as Record, itemOverrides) as TConfig; + + return { value, defaults, overrides: trueOverrides }; + } + + async updateItem(id: string, config: Partial): Promise { + const externalDefaults = this.getExternalDefaults(); + const defaults = (externalDefaults[id] ?? {}) as Record; + const validated = this.itemSchema.parse(config) as Record; + const delta = computeTrueOverrides(validated, defaults) as Partial; + + const storedOverrides = await this.readRawOverrides(); + if (Object.keys(delta).length === 0) { + delete storedOverrides[id]; + } else { + storedOverrides[id] = delta; + } + await this.storeOverrides(storedOverrides); + } + + async resetItem(id: string): Promise { + const storedOverrides = await this.readRawOverrides(); + delete storedOverrides[id]; + await this.storeOverrides(storedOverrides); + } + + async resetAll(): Promise { + await db.delete(appSettings).where(eq(appSettings.key, this.storageKey)).execute(); + this.cache = null; + } +} diff --git a/src/main/core/settings/provider-settings-controller.ts b/src/main/core/settings/provider-settings-controller.ts new file mode 100644 index 000000000..752539d14 --- /dev/null +++ b/src/main/core/settings/provider-settings-controller.ts @@ -0,0 +1,25 @@ +import { createRPCController } from '@/shared/ipc/rpc'; +import type { ProviderCustomConfig } from '@shared/app-settings'; +import { providerOverrideSettings } from './provider-settings-service'; + +export const providerSettingsController = createRPCController({ + getAll: (): Promise> => providerOverrideSettings.getAll(), + + getItem: (id: string): Promise => + providerOverrideSettings.getItem(id), + + getItemWithMeta: ( + id: string + ): Promise<{ + value: ProviderCustomConfig; + defaults: ProviderCustomConfig; + overrides: Partial; + } | null> => providerOverrideSettings.getItemWithMeta(id), + + updateItem: (id: string, config: Partial): Promise => + providerOverrideSettings.updateItem(id, config), + + resetItem: (id: string): Promise => providerOverrideSettings.resetItem(id), + + resetAll: (): Promise => providerOverrideSettings.resetAll(), +}); diff --git a/src/main/core/settings/provider-settings-service.ts b/src/main/core/settings/provider-settings-service.ts new file mode 100644 index 000000000..7ea7d3477 --- /dev/null +++ b/src/main/core/settings/provider-settings-service.ts @@ -0,0 +1,9 @@ +import type { ProviderCustomConfig } from '@shared/app-settings'; +import { OverrideSettings } from './override-settings'; +import { providerConfigDefaults, providerCustomConfigEntrySchema } from './schema'; + +export const providerOverrideSettings = new OverrideSettings( + 'providerConfigs', + () => providerConfigDefaults as Record, + providerCustomConfigEntrySchema +); diff --git a/src/main/core/settings/schema.ts b/src/main/core/settings/schema.ts new file mode 100644 index 000000000..62a539dc9 --- /dev/null +++ b/src/main/core/settings/schema.ts @@ -0,0 +1,121 @@ +import z from 'zod'; +import { AGENT_PROVIDER_IDS, AGENT_PROVIDERS } from '@shared/agent-provider-registry'; +import { openInAppIdSchema } from '@shared/openInApps'; +import { DEFAULT_AGENT_ID } from './settings-registry'; + +export const localProjectSettingsSchema = z.object({ + defaultProjectsDirectory: z.string(), + defaultWorktreeDirectory: z.string(), + branchPrefix: z.string(), + pushOnCreate: z.boolean(), +}); + +export const notificationSettingsSchema = z.object({ + enabled: z.boolean(), + sound: z.boolean(), + osNotifications: z.boolean(), + soundFocusMode: z.enum(['always', 'unfocused']), +}); + +export const taskSettingsSchema = z.object({ + autoGenerateName: z.boolean(), + autoApproveByDefault: z.boolean(), + autoTrustWorktrees: z.boolean(), +}); + +export const terminalSettingsSchema = z.object({ + fontFamily: z.string().optional(), + autoCopyOnSelection: z.boolean(), +}); + +export const themeSchema = z + .optional(z.enum(['light', 'dark', 'dark-black', 'system'])) + .default('system'); + +export const defaultAgentSchema = z.optional(z.enum(AGENT_PROVIDER_IDS)).default(DEFAULT_AGENT_ID); + +export const keyboardSettingsSchema = z + .optional( + z.object({ + commandPalette: z.string().optional(), + settings: z.string().optional(), + toggleLeftSidebar: z.string().optional(), + toggleRightSidebar: z.string().optional(), + toggleTheme: z.string().optional(), + toggleKanban: z.string().optional(), + toggleEditor: z.string().optional(), + closeModal: z.string().optional(), + nextProject: z.string().optional(), + prevProject: z.string().optional(), + newTask: z.string().optional(), + nextAgent: z.string().optional(), + prevAgent: z.string().optional(), + openInEditor: z.string().optional(), + }) + ) + .default({}); + +export const providerCustomConfigEntrySchema = z.object({ + cli: z.string().optional(), + resumeFlag: z.string().optional(), + defaultArgs: z.array(z.string()).optional(), + autoApproveFlag: z.string().optional(), + initialPromptFlag: z.string().optional(), + sessionIdFlag: z.string().optional(), + extraArgs: z.string().optional(), + env: z.record(z.string(), z.string()).optional(), +}); + +export const providerConfigDefaults = Object.fromEntries( + AGENT_PROVIDERS.filter( + (p) => p.cli || p.resumeFlag || p.autoApproveFlag || p.initialPromptFlag || p.defaultArgs + ).map((p) => [ + p.id, + { + ...(p.cli ? { cli: p.cli } : {}), + ...(p.resumeFlag ? { resumeFlag: p.resumeFlag } : {}), + ...(p.autoApproveFlag ? { autoApproveFlag: p.autoApproveFlag } : {}), + ...(p.initialPromptFlag !== undefined ? { initialPromptFlag: p.initialPromptFlag } : {}), + ...(p.defaultArgs ? { defaultArgs: p.defaultArgs } : {}), + ...(p.sessionIdFlag ? { sessionIdFlag: p.sessionIdFlag } : {}), + }, + ]) +); + +export const interfaceSettingsSchema = z.object({ + taskHoverAction: z.enum(['delete', 'archive']), + autoRightSidebarBehavior: z.boolean(), +}); + +export const browserPreviewSettingsSchema = z.object({ enabled: z.boolean() }); + +export const openInSettingsSchema = z.object({ + default: openInAppIdSchema, + hidden: z.array(openInAppIdSchema), +}); + +export const APP_SETTINGS_SCHEMA_MAP = { + localProject: localProjectSettingsSchema, + tasks: taskSettingsSchema, + defaultAgent: defaultAgentSchema, + keyboard: keyboardSettingsSchema, + notifications: notificationSettingsSchema, + theme: themeSchema, + openIn: openInSettingsSchema, + interface: interfaceSettingsSchema, + terminal: terminalSettingsSchema, + browserPreview: browserPreviewSettingsSchema, +} as const; + +export const appSettingsSchema = z.object({ + localProject: localProjectSettingsSchema, + tasks: taskSettingsSchema, + defaultAgent: defaultAgentSchema, + keyboard: keyboardSettingsSchema, + notifications: notificationSettingsSchema, + theme: themeSchema, + openIn: openInSettingsSchema, + interface: interfaceSettingsSchema, + terminal: terminalSettingsSchema, + browserPreview: browserPreviewSettingsSchema, +}); diff --git a/src/main/core/settings/settings-registry.ts b/src/main/core/settings/settings-registry.ts new file mode 100644 index 000000000..60643b454 --- /dev/null +++ b/src/main/core/settings/settings-registry.ts @@ -0,0 +1,52 @@ +import { homedir } from 'node:os'; +import { join } from 'node:path'; +import type { AppSettings, AppSettingsKey } from '@shared/app-settings'; +import type { OpenInAppId } from '@shared/openInApps'; + +export const DEFAULT_AGENT_ID = 'claude'; + +type SettingsDefaultsMap = { + [K in AppSettingsKey]: AppSettings[K] | (() => AppSettings[K]); +}; + +export const SETTINGS_DEFAULTS = { + localProject: () => ({ + defaultProjectsDirectory: join(homedir(), 'emdash', 'repositories'), + defaultWorktreeDirectory: join(homedir(), 'emdash', 'worktrees'), + branchPrefix: 'emdash', + pushOnCreate: true, + }), + tasks: { + autoGenerateName: true, + autoApproveByDefault: false, + autoTrustWorktrees: true, + }, + notifications: { + enabled: true, + sound: true, + osNotifications: true, + soundFocusMode: 'always' as const, + }, + terminal: { + autoCopyOnSelection: false, + }, + theme: 'system' as const, + defaultAgent: DEFAULT_AGENT_ID, + keyboard: {}, + openIn: { + default: 'terminal' as const, + hidden: [] as OpenInAppId[], + }, + interface: { + taskHoverAction: 'delete' as const, + autoRightSidebarBehavior: false, + }, + browserPreview: { + enabled: true, + }, +} satisfies SettingsDefaultsMap; + +export function getDefaultForKey(key: K): AppSettings[K] { + const d = SETTINGS_DEFAULTS[key]; + return (typeof d === 'function' ? (d as () => AppSettings[K])() : d) as AppSettings[K]; +} diff --git a/src/main/core/settings/settings-service.ts b/src/main/core/settings/settings-service.ts new file mode 100644 index 000000000..2f3100d48 --- /dev/null +++ b/src/main/core/settings/settings-service.ts @@ -0,0 +1,142 @@ +import { eq } from 'drizzle-orm'; +import { AppSettingsKeys, type AppSettings, type AppSettingsKey } from '@shared/app-settings'; +import { db } from '@main/db/client'; +import { appSettings } from '@main/db/schema'; +import { APP_SETTINGS_SCHEMA_MAP } from './schema'; +import { getDefaultForKey } from './settings-registry'; +import { computeDelta, computeTrueOverrides, isDeepEqual, isPlainObject, mergeDeep } from './utils'; + +export type { AppSettings, AppSettingsKey } from '@shared/app-settings'; +export { AppSettingsKeys } from '@shared/app-settings'; + +class Settings { + private cache: Partial = {}; + + private async readRaw(key: AppSettingsKey): Promise { + const [row] = await db.select().from(appSettings).where(eq(appSettings.key, key)).execute(); + if (!row) return null; + try { + return JSON.parse(row.value); + } catch { + return null; + } + } + + private async storeRaw(key: AppSettingsKey, value: unknown): Promise { + const serialized = JSON.stringify(value); + await db + .insert(appSettings) + .values({ key, value: serialized }) + .onConflictDoUpdate({ target: appSettings.key, set: { value: serialized } }) + .execute(); + } + + private async deleteRow(key: AppSettingsKey): Promise { + await db.delete(appSettings).where(eq(appSettings.key, key)).execute(); + } + + async get(key: K): Promise { + if (key in this.cache) return this.cache[key] as AppSettings[K]; + + const defaults = getDefaultForKey(key); + const raw = await this.readRaw(key); + + let value: AppSettings[K]; + if (raw === null || raw === undefined) { + value = defaults; + } else if (isPlainObject(raw) && isPlainObject(defaults)) { + value = mergeDeep(defaults as Record, raw) as AppSettings[K]; + } else { + value = raw as AppSettings[K]; + } + + this.cache[key] = value; + return value; + } + + async getWithMeta( + key: K + ): Promise<{ + value: AppSettings[K]; + defaults: AppSettings[K]; + overrides: Partial; + }> { + const defaults = getDefaultForKey(key); + const raw = await this.readRaw(key); + + if (raw === null || raw === undefined) { + return { value: defaults, defaults, overrides: {} as Partial }; + } + + let value: AppSettings[K]; + let overrides: Partial; + + if (isPlainObject(raw) && isPlainObject(defaults)) { + value = mergeDeep(defaults as Record, raw) as AppSettings[K]; + overrides = computeTrueOverrides(raw, defaults as Record) as Partial< + AppSettings[K] + >; + } else { + value = raw as AppSettings[K]; + overrides = (isDeepEqual(raw, defaults) ? {} : raw) as Partial; + } + + return { value, defaults, overrides }; + } + + async update(key: K, value: AppSettings[K]): Promise { + const validated = APP_SETTINGS_SCHEMA_MAP[key].parse(value) as AppSettings[K]; + const defaults = getDefaultForKey(key); + + if (isPlainObject(validated) && isPlainObject(defaults)) { + const delta = computeDelta( + validated as Record, + defaults as Record + ); + if (Object.keys(delta).length === 0) { + await this.deleteRow(key); + } else { + await this.storeRaw(key, delta); + } + } else if (isDeepEqual(validated, defaults)) { + await this.deleteRow(key); + } else { + await this.storeRaw(key, validated); + } + + delete this.cache[key]; + } + + async reset(key: K): Promise { + await this.deleteRow(key); + delete this.cache[key]; + } + + async resetField(key: K, field: keyof AppSettings[K]): Promise { + const raw = await this.readRaw(key); + if (!isPlainObject(raw)) return; + + const delta = { ...raw }; + delete delta[field as string]; + + if (Object.keys(delta).length === 0) { + await this.deleteRow(key); + } else { + await this.storeRaw(key, delta); + } + delete this.cache[key]; + } + + async getAll(): Promise { + const entries = await Promise.all( + AppSettingsKeys.map(async (key) => [key, await this.get(key)] as const) + ); + return Object.fromEntries(entries) as AppSettings; + } + + async initialize(): Promise { + await this.getAll(); + } +} + +export const appSettingsService = new Settings(); diff --git a/src/main/core/settings/utils.ts b/src/main/core/settings/utils.ts new file mode 100644 index 000000000..cde998e48 --- /dev/null +++ b/src/main/core/settings/utils.ts @@ -0,0 +1,53 @@ +export function isDeepEqual(a: unknown, b: unknown): boolean { + return JSON.stringify(a) === JSON.stringify(b); +} + +export function isPlainObject(v: unknown): v is Record { + return typeof v === 'object' && v !== null && !Array.isArray(v); +} + +export function mergeDeep( + base: Record, + overrides: Record +): Record { + const result = { ...base }; + for (const [k, v] of Object.entries(overrides)) { + if (v === undefined) continue; + const baseVal = base[k]; + if (isPlainObject(v) && isPlainObject(baseVal)) { + result[k] = mergeDeep(baseVal, v); + } else { + result[k] = v; + } + } + return result; +} + +export function computeDelta( + value: Record, + defaults: Record +): Record { + const delta: Record = {}; + for (const [k, v] of Object.entries(value)) { + if (!isDeepEqual(v, defaults[k])) { + delta[k] = v; + } + } + return delta; +} + +// Returns only fields in `stored` that differ from `defaults`. +// Handles legacy rows that stored the full value — fields at their default +// value are excluded from the result (they are not "truly overridden"). +export function computeTrueOverrides( + stored: Record, + defaults: Record +): Record { + const overrides: Record = {}; + for (const [k, v] of Object.entries(stored)) { + if (!isDeepEqual(v, defaults[k])) { + overrides[k] = v; + } + } + return overrides; +} diff --git a/src/main/core/shared/oauth-flow.test.ts b/src/main/core/shared/oauth-flow.test.ts new file mode 100644 index 000000000..8b7708cfb --- /dev/null +++ b/src/main/core/shared/oauth-flow.test.ts @@ -0,0 +1,132 @@ +import * as http from 'http'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { executeOAuthFlow } from './oauth-flow'; + +const mockOpenExternal = vi.fn().mockResolvedValue(undefined); +vi.mock('electron', () => ({ + shell: { openExternal: (...args: unknown[]) => mockOpenExternal(...args) }, +})); + +const mockFetch = vi.fn(); +vi.stubGlobal('fetch', mockFetch); + +function httpGet(url: string): Promise { + return new Promise((resolve, reject) => { + http + .get(url, (res) => { + res.resume(); + res.on('end', resolve); + }) + .on('error', reject); + }); +} + +describe('executeOAuthFlow', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + it('opens browser with PKCE params and exchanges code', async () => { + const flowPromise = executeOAuthFlow({ + authorizeUrl: 'https://auth.test/sign-in', + exchangeUrl: 'https://auth.test/api/exchange', + successRedirectUrl: 'https://auth.test/auth/success', + errorRedirectUrl: 'https://auth.test/auth/error', + timeoutMs: 5000, + }); + + await vi.waitFor(() => expect(mockOpenExternal).toHaveBeenCalled()); + + const openedUrl = new URL(mockOpenExternal.mock.calls[0][0]); + expect(openedUrl.origin).toBe('https://auth.test'); + expect(openedUrl.pathname).toBe('/sign-in'); + expect(openedUrl.searchParams.has('state')).toBe(true); + expect(openedUrl.searchParams.has('redirect_uri')).toBe(true); + expect(openedUrl.searchParams.get('code_challenge_method')).toBe('S256'); + expect(openedUrl.searchParams.has('code_challenge')).toBe(true); + + const mockResponse = { token: 'abc', user: { id: '1' } }; + mockFetch.mockResolvedValueOnce({ + ok: true, + json: () => Promise.resolve(mockResponse), + }); + + const redirectUri = openedUrl.searchParams.get('redirect_uri')!; + const state = openedUrl.searchParams.get('state')!; + await httpGet(`${redirectUri}?state=${encodeURIComponent(state)}&code=test-code`); + + const result = await flowPromise; + expect(result).toEqual(mockResponse); + }); + + it('passes extra query params to authorize URL', async () => { + const flowPromise = executeOAuthFlow({ + authorizeUrl: 'https://auth.test/authorize/github', + exchangeUrl: 'https://auth.test/api/exchange', + successRedirectUrl: 'https://auth.test/auth/success', + errorRedirectUrl: 'https://auth.test/auth/error', + extraParams: { provider: 'github', scope: 'repo' }, + timeoutMs: 5000, + }); + + await vi.waitFor(() => expect(mockOpenExternal).toHaveBeenCalled()); + + const openedUrl = new URL(mockOpenExternal.mock.calls[0][0]); + expect(openedUrl.searchParams.get('provider')).toBe('github'); + expect(openedUrl.searchParams.get('scope')).toBe('repo'); + + mockFetch.mockResolvedValueOnce({ + ok: true, + json: () => Promise.resolve({ accessToken: 'tok' }), + }); + + const redirectUri = openedUrl.searchParams.get('redirect_uri')!; + const state = openedUrl.searchParams.get('state')!; + await httpGet(`${redirectUri}?state=${encodeURIComponent(state)}&code=c`); + + await flowPromise; + }); + + it('rejects on state mismatch', async () => { + const flowPromise = executeOAuthFlow({ + authorizeUrl: 'https://auth.test/sign-in', + exchangeUrl: 'https://auth.test/api/exchange', + successRedirectUrl: 'https://auth.test/auth/success', + errorRedirectUrl: 'https://auth.test/auth/error', + timeoutMs: 5000, + }); + + await vi.waitFor(() => expect(mockOpenExternal).toHaveBeenCalled()); + + const openedUrl = new URL(mockOpenExternal.mock.calls[0][0]); + const redirectUri = openedUrl.searchParams.get('redirect_uri')!; + await httpGet(`${redirectUri}?state=wrong-state&code=test-code`); + + await expect(flowPromise).rejects.toThrow('State mismatch'); + }); + + it('rejects on exchange failure', async () => { + const flowPromise = executeOAuthFlow({ + authorizeUrl: 'https://auth.test/sign-in', + exchangeUrl: 'https://auth.test/api/exchange', + successRedirectUrl: 'https://auth.test/auth/success', + errorRedirectUrl: 'https://auth.test/auth/error', + timeoutMs: 5000, + }); + + await vi.waitFor(() => expect(mockOpenExternal).toHaveBeenCalled()); + + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 400, + json: () => Promise.resolve({ error: 'invalid code' }), + }); + + const openedUrl = new URL(mockOpenExternal.mock.calls[0][0]); + const redirectUri = openedUrl.searchParams.get('redirect_uri')!; + const state = openedUrl.searchParams.get('state')!; + await httpGet(`${redirectUri}?state=${encodeURIComponent(state)}&code=c`); + + await expect(flowPromise).rejects.toThrow('invalid code'); + }); +}); diff --git a/src/main/core/shared/oauth-flow.ts b/src/main/core/shared/oauth-flow.ts new file mode 100644 index 000000000..45d74da5e --- /dev/null +++ b/src/main/core/shared/oauth-flow.ts @@ -0,0 +1,163 @@ +import { createHash, randomBytes } from 'crypto'; +import * as http from 'http'; +import { shell } from 'electron'; + +export interface OAuthFlowOptions { + authorizeUrl: string; + exchangeUrl: string; + successRedirectUrl: string; + errorRedirectUrl: string; + extraParams?: Record; + timeoutMs?: number; +} + +/** + * Execute a full PKCE OAuth flow: + * 1. Generate PKCE challenge + state + * 2. Start ephemeral loopback HTTP server + * 3. Open browser to authorizeUrl with PKCE params + * 4. Wait for callback with code + * 5. Exchange code for tokens + * + * Returns the raw JSON response from the exchange endpoint. + * Each caller extracts what it needs. + */ +export async function executeOAuthFlow( + options: OAuthFlowOptions +): Promise> { + const { + authorizeUrl, + exchangeUrl, + successRedirectUrl, + errorRedirectUrl, + extraParams, + timeoutMs = 300_000, + } = options; + + const state = randomBytes(12).toString('base64url'); + const codeVerifier = randomBytes(32).toString('base64url'); + const codeChallenge = createHash('sha256').update(codeVerifier).digest('base64url'); + + const { code } = await startLoopbackServer({ + authorizeUrl, + successRedirectUrl, + errorRedirectUrl, + state, + codeChallenge, + extraParams, + timeoutMs, + }); + + return exchangeCode(exchangeUrl, state, code, codeVerifier); +} + +// --------------------------------------------------------------------------- +// Internal helpers +// --------------------------------------------------------------------------- + +interface LoopbackOptions { + authorizeUrl: string; + successRedirectUrl: string; + errorRedirectUrl: string; + state: string; + codeChallenge: string; + extraParams?: Record; + timeoutMs: number; +} + +function startLoopbackServer(opts: LoopbackOptions): Promise<{ code: string }> { + const { + authorizeUrl, + successRedirectUrl, + errorRedirectUrl, + state, + codeChallenge, + extraParams, + timeoutMs, + } = opts; + + return new Promise((resolve, reject) => { + const server = http.createServer((req, res) => { + if (!req.url) { + res.writeHead(400).end(); + return; + } + + const url = new URL(req.url, `http://${req.headers.host}`); + if (url.pathname !== '/callback') { + res.writeHead(404).end(); + return; + } + + const returnedState = url.searchParams.get('state'); + const code = url.searchParams.get('code'); + + if (returnedState !== state || !code) { + res.writeHead(302, { Location: errorRedirectUrl }).end(); + reject(new Error('State mismatch or missing code in OAuth callback')); + setTimeout(() => server.close(), 1000); + return; + } + + res.writeHead(302, { Location: successRedirectUrl }).end(); + resolve({ code }); + setTimeout(() => server.close(), 2000); + }); + + const timeout = setTimeout(() => { + server.close(); + reject(new Error('OAuth authentication timed out')); + }, timeoutMs); + + server.on('error', (err) => { + clearTimeout(timeout); + reject(err); + }); + + server.on('close', () => { + clearTimeout(timeout); + }); + + server.listen(0, '127.0.0.1', () => { + const port = (server.address() as { port: number }).port; + const redirectUri = `http://127.0.0.1:${port}/callback`; + + const params = new URLSearchParams({ + state, + redirect_uri: redirectUri, + code_challenge: codeChallenge, + code_challenge_method: 'S256', + ...extraParams, + }); + + const fullUrl = `${authorizeUrl}?${params.toString()}`; + + shell.openExternal(fullUrl).catch((err) => { + clearTimeout(timeout); + server.close(); + reject(new Error(`Failed to open browser: ${err.message}`)); + }); + }); + }); +} + +async function exchangeCode( + exchangeUrl: string, + state: string, + code: string, + codeVerifier: string +): Promise> { + const response = await fetch(exchangeUrl, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ state, code, code_verifier: codeVerifier }), + signal: AbortSignal.timeout(30_000), + }); + + if (!response.ok) { + const payload = (await response.json().catch(() => null)) as { error?: string } | null; + throw new Error(payload?.error || `Token exchange failed (${response.status})`); + } + + return (await response.json()) as Record; +} diff --git a/src/main/services/SkillsService.ts b/src/main/core/skills/SkillsService.ts similarity index 96% rename from src/main/services/SkillsService.ts rename to src/main/core/skills/SkillsService.ts index d42397096..e8d7af87b 100644 --- a/src/main/services/SkillsService.ts +++ b/src/main/core/skills/SkillsService.ts @@ -1,11 +1,12 @@ -import * as fs from 'fs'; -import * as path from 'path'; -import * as os from 'os'; -import * as https from 'https'; -import { log } from '../lib/logger'; -import { parseFrontmatter, isValidSkillName, generateSkillMd } from '@shared/skills/validation'; +import * as fs from 'node:fs'; +import * as https from 'node:https'; +import * as os from 'node:os'; +import * as path from 'node:path'; import { agentTargets, skillScanPaths } from '@shared/skills/agentTargets'; -import type { CatalogSkill, CatalogIndex, DetectedAgent } from '@shared/skills/types'; +import type { CatalogIndex, CatalogSkill, DetectedAgent } from '@shared/skills/types'; +import { generateSkillMd, isValidSkillName, parseFrontmatter } from '@shared/skills/validation'; +import { log } from '@main/lib/logger'; +import bundledCatalog from './bundled-catalog.json'; const SKILLS_ROOT = path.join(os.homedir(), '.agentskills'); const EMDASH_META = path.join(SKILLS_ROOT, '.emdash'); @@ -75,7 +76,7 @@ export class SkillsService { // No disk cache — fall back to bundled catalog } - const bundled = await this.loadBundledCatalog(); + const bundled = this.loadBundledCatalog(); this.catalogCache = bundled; return this.mergeInstalledState(bundled); } @@ -416,14 +417,8 @@ export class SkillsService { // --- Private helpers --- - private async loadBundledCatalog(): Promise { - try { - // eslint-disable-next-line @typescript-eslint/no-var-requires - const bundled = require('./skills/bundled-catalog.json') as CatalogIndex; - return bundled; - } catch { - return { version: 1, lastUpdated: new Date().toISOString(), skills: [] }; - } + private loadBundledCatalog(): CatalogIndex { + return bundledCatalog as CatalogIndex; } private async mergeInstalledState(catalog: CatalogIndex): Promise { diff --git a/src/main/services/skills/bundled-catalog.json b/src/main/core/skills/bundled-catalog.json similarity index 100% rename from src/main/services/skills/bundled-catalog.json rename to src/main/core/skills/bundled-catalog.json diff --git a/src/main/ipc/skillsIpc.ts b/src/main/core/skills/controller.ts similarity index 62% rename from src/main/ipc/skillsIpc.ts rename to src/main/core/skills/controller.ts index a5cb63c69..122fa1255 100644 --- a/src/main/ipc/skillsIpc.ts +++ b/src/main/core/skills/controller.ts @@ -1,9 +1,9 @@ -import { ipcMain } from 'electron'; -import { skillsService } from '../services/SkillsService'; -import { log } from '../lib/logger'; +import { createRPCController } from '@/shared/ipc/rpc'; +import { skillsService } from '@main/core/skills/SkillsService'; +import { log } from '@main/lib/logger'; -export function registerSkillsIpc(): void { - ipcMain.handle('skills:getCatalog', async () => { +export const skillsController = createRPCController({ + getCatalog: async () => { try { const catalog = await skillsService.getCatalogIndex(); return { success: true, data: catalog }; @@ -11,9 +11,9 @@ export function registerSkillsIpc(): void { log.error('Failed to get skills catalog:', error); return { success: false, error: error instanceof Error ? error.message : String(error) }; } - }); + }, - ipcMain.handle('skills:refreshCatalog', async () => { + refreshCatalog: async () => { try { const catalog = await skillsService.refreshCatalog(); return { success: true, data: catalog }; @@ -21,9 +21,9 @@ export function registerSkillsIpc(): void { log.error('Failed to refresh skills catalog:', error); return { success: false, error: error instanceof Error ? error.message : String(error) }; } - }); + }, - ipcMain.handle('skills:install', async (_, args: { skillId: string }) => { + install: async (args: { skillId: string }) => { try { const skill = await skillsService.installSkill(args.skillId); return { success: true, data: skill }; @@ -31,9 +31,9 @@ export function registerSkillsIpc(): void { log.error('Failed to install skill:', error); return { success: false, error: error instanceof Error ? error.message : String(error) }; } - }); + }, - ipcMain.handle('skills:uninstall', async (_, args: { skillId: string }) => { + uninstall: async (args: { skillId: string }) => { try { await skillsService.uninstallSkill(args.skillId); return { success: true }; @@ -41,9 +41,9 @@ export function registerSkillsIpc(): void { log.error('Failed to uninstall skill:', error); return { success: false, error: error instanceof Error ? error.message : String(error) }; } - }); + }, - ipcMain.handle('skills:getDetail', async (_, args: { skillId: string }) => { + getDetail: async (args: { skillId: string }) => { try { const skill = await skillsService.getSkillDetail(args.skillId); return { success: true, data: skill }; @@ -51,9 +51,9 @@ export function registerSkillsIpc(): void { log.error('Failed to get skill detail:', error); return { success: false, error: error instanceof Error ? error.message : String(error) }; } - }); + }, - ipcMain.handle('skills:getDetectedAgents', async () => { + getDetectedAgents: async () => { try { const agents = await skillsService.getDetectedAgents(); return { success: true, data: agents }; @@ -61,18 +61,15 @@ export function registerSkillsIpc(): void { log.error('Failed to detect agents:', error); return { success: false, error: error instanceof Error ? error.message : String(error) }; } - }); + }, - ipcMain.handle( - 'skills:create', - async (_, args: { name: string; description: string; content?: string }) => { - try { - const skill = await skillsService.createSkill(args.name, args.description, args.content); - return { success: true, data: skill }; - } catch (error) { - log.error('Failed to create skill:', error); - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } + create: async (args: { name: string; description: string; content?: string }) => { + try { + const skill = await skillsService.createSkill(args.name, args.description, args.content); + return { success: true, data: skill }; + } catch (error) { + log.error('Failed to create skill:', error); + return { success: false, error: error instanceof Error ? error.message : String(error) }; } - ); -} + }, +}); diff --git a/src/main/core/ssh/build-connect-config.ts b/src/main/core/ssh/build-connect-config.ts new file mode 100644 index 000000000..aa8ef52d7 --- /dev/null +++ b/src/main/core/ssh/build-connect-config.ts @@ -0,0 +1,63 @@ +import { readFile } from 'node:fs/promises'; +import { homedir } from 'node:os'; +import type { ConnectConfig } from 'ssh2'; +import { sshCredentialService } from '@main/core/ssh/ssh-credential-service'; +import { resolveIdentityAgent } from '@main/core/ssh/sshConfigParser'; +import type { SshConnectionRow } from '@main/db/schema'; + +/** + * Build an ssh2 `ConnectConfig` from a stored `SshConnectionRow`. + */ +export async function buildConnectConfigFromRow( + row: SshConnectionRow +): Promise { + const base: ConnectConfig = { + host: row.host, + port: row.port, + username: row.username, + readyTimeout: 20_000, + keepaliveInterval: 60_000, + keepaliveCountMax: 3, + }; + + switch (row.authType) { + case 'password': { + const password = await sshCredentialService.getPassword(row.id); + if (!password) { + throw new Error(`No password found for SSH connection '${row.name}' (id: ${row.id})`); + } + return { ...base, password }; + } + + case 'key': { + if (!row.privateKeyPath) { + throw new Error(`Private key path is required for SSH connection '${row.name}'`); + } + let keyPath = row.privateKeyPath; + if (keyPath.startsWith('~/')) { + keyPath = keyPath.replace('~', homedir()); + } else if (keyPath === '~') { + keyPath = homedir(); + } + const privateKey = await readFile(keyPath, 'utf-8'); + const passphrase = await sshCredentialService.getPassphrase(row.id); + return { ...base, privateKey, ...(passphrase ? { passphrase } : {}) }; + } + + case 'agent': { + const identityAgent = await resolveIdentityAgent(row.host); + const agent = identityAgent || process.env.SSH_AUTH_SOCK; + if (!agent) { + throw new Error( + `SSH agent socket not found for connection '${row.name}'. ` + + 'Ensure the SSH agent is running or use key/password auth.' + ); + } + return { ...base, agent }; + } + + default: { + throw new Error(`Unsupported SSH auth type: ${(row as { authType: string }).authType}`); + } + } +} diff --git a/src/main/core/ssh/controller.ts b/src/main/core/ssh/controller.ts new file mode 100644 index 000000000..6baadbaeb --- /dev/null +++ b/src/main/core/ssh/controller.ts @@ -0,0 +1,223 @@ +import { randomUUID } from 'node:crypto'; +import { readFileSync } from 'node:fs'; +import { homedir } from 'node:os'; +import { eq } from 'drizzle-orm'; +import { Client } from 'ssh2'; +import { createRPCController } from '@/shared/ipc/rpc'; +import type { ConnectionState, ConnectionTestResult, FileEntry, SshConfig } from '@shared/ssh'; +import { db } from '@main/db/client'; +import { sshConnections as sshConnectionsTable, type SshConnectionInsert } from '@main/db/schema'; +import { log } from '@main/lib/logger'; +import { sshConnectionManager } from './ssh-connection-manager'; +import { sshCredentialService } from './ssh-credential-service'; +import { resolveIdentityAgent } from './utils'; + +export const sshController = createRPCController({ + /** List all saved SSH connections (no secrets). */ + getConnections: async (): Promise => { + const rows = await db.select().from(sshConnectionsTable); + return rows.map((row) => ({ + id: row.id, + name: row.name, + host: row.host, + port: row.port, + worktreesDir: row.metadata ? JSON.parse(row.metadata).worktreesDir : undefined, + username: row.username, + authType: row.authType as 'password' | 'key' | 'agent', + privateKeyPath: row.privateKeyPath ?? undefined, + useAgent: row.useAgent === 1, + })); + }, + + /** Create or update an SSH connection, storing secrets in the OS keychain. */ + saveConnection: async ( + config: Omit & { password?: string; passphrase?: string } + ): Promise => { + const connectionId = randomUUID(); + + if (config.password) { + await sshCredentialService.storePassword(connectionId, config.password); + } + if (config.passphrase) { + await sshCredentialService.storePassphrase(connectionId, config.passphrase); + } + + const { password: _p, passphrase: _pp, ...dbConfig } = config; + + const metadata: Record = { + worktreesDir: config.worktreesDir, + }; + + const insertData: SshConnectionInsert = { + id: connectionId, + name: dbConfig.name, + host: dbConfig.host, + port: dbConfig.port, + metadata: JSON.stringify(metadata), + username: dbConfig.username, + authType: dbConfig.authType, + privateKeyPath: dbConfig.privateKeyPath ?? null, + useAgent: dbConfig.useAgent ? 1 : 0, + }; + + await db + .insert(sshConnectionsTable) + .values(insertData) + .onConflictDoUpdate({ + target: sshConnectionsTable.id, + set: { + name: insertData.name, + host: insertData.host, + port: insertData.port, + metadata: insertData.metadata, + username: insertData.username, + authType: insertData.authType, + privateKeyPath: insertData.privateKeyPath, + useAgent: insertData.useAgent, + updatedAt: new Date().toISOString(), + }, + }); + + return { ...dbConfig, id: connectionId, worktreesDir: config.worktreesDir }; + }, + + /** Delete a saved SSH connection and its stored credentials. */ + deleteConnection: async (id: string): Promise => { + if (sshConnectionManager.isConnected(id)) { + await sshConnectionManager.disconnect(id).catch((e) => { + log.warn('sshController.deleteConnection: error disconnecting', { + connectionId: id, + error: String(e), + }); + }); + } + await sshCredentialService.deleteAllCredentials(id); + await db.delete(sshConnectionsTable).where(eq(sshConnectionsTable.id, id)); + }, + + /** Test a connection without persisting anything. */ + testConnection: async ( + config: SshConfig & { password?: string; passphrase?: string } + ): Promise => { + return new Promise(async (resolve) => { + const client = new Client(); + const debugLogs: string[] = []; + const startTime = Date.now(); + + client.on('ready', () => { + const latency = Date.now() - startTime; + client.end(); + resolve({ success: true, latency, debugLogs }); + }); + + client.on('error', (err: Error) => { + resolve({ success: false, error: err.message, debugLogs }); + }); + + try { + const connectConfig: Parameters[0] = { + host: config.host, + port: config.port, + username: config.username, + readyTimeout: 10_000, + debug: (info: string) => debugLogs.push(info), + }; + + if (config.authType === 'password') { + connectConfig.password = config.password; + } else if (config.authType === 'key' && config.privateKeyPath) { + let keyPath = config.privateKeyPath; + if (keyPath.startsWith('~/')) keyPath = keyPath.replace('~', homedir()); + connectConfig.privateKey = readFileSync(keyPath); + if (config.passphrase) connectConfig.passphrase = config.passphrase; + } else if (config.authType === 'agent') { + const identityAgent = await resolveIdentityAgent(config.host); + connectConfig.agent = identityAgent || process.env.SSH_AUTH_SOCK; + } + + client.connect(connectConfig); + } catch (e) { + resolve({ success: false, error: (e as Error).message, debugLogs }); + } + }); + }, + + /** Intentionally close a connection and stop auto-reconnect. */ + disconnect: async (connectionId: string): Promise => { + await sshConnectionManager.disconnect(connectionId); + }, + + /** Returns whether the connection is currently live. */ + getState: async (connectionId: string): Promise<'connected' | 'disconnected'> => { + return sshConnectionManager.isConnected(connectionId) ? 'connected' : 'disconnected'; + }, + /** Returns the current ConnectionState for every connection tracked by the manager. */ + getConnectionState: async (): Promise> => { + return sshConnectionManager.getAllConnectionStates(); + }, + + /** Rename a saved SSH connection without changing any other fields. */ + renameConnection: async (id: string, name: string): Promise => { + const [row] = await db.select().from(sshConnectionsTable).where(eq(sshConnectionsTable.id, id)); + if (!row) throw new Error(`SSH connection ${id} not found`); + await db + .update(sshConnectionsTable) + .set({ name, updatedAt: new Date().toISOString() }) + .where(eq(sshConnectionsTable.id, id)); + }, + + /** List files/directories at a remote path via SFTP. */ + listFiles: async ({ + connectionId, + path: remotePath, + }: { + connectionId: string; + path: string; + }): Promise => { + let proxy = sshConnectionManager.getProxy(connectionId); + + if (!proxy || !proxy.isConnected) { + proxy = await sshConnectionManager.connect(connectionId); + } + + return new Promise((resolve, reject) => { + proxy!.client.sftp((err, sftp) => { + if (err) { + reject(new Error(`SFTP error: ${err.message}`)); + return; + } + sftp.readdir(remotePath, (readdirErr, list) => { + if (readdirErr) { + reject(new Error(`readdir error: ${readdirErr.message}`)); + return; + } + const entries: FileEntry[] = list + .map((item) => { + const mode = item.attrs.mode ?? 0; + const isDir = (mode & 0o170000) === 0o040000; + const isLink = (mode & 0o170000) === 0o120000; + const entryType: FileEntry['type'] = isLink + ? 'symlink' + : isDir + ? 'directory' + : 'file'; + const fullPath = `${remotePath.replace(/\/$/, '')}/${item.filename}`; + return { + path: fullPath, + name: item.filename, + type: entryType, + size: item.attrs.size ?? 0, + modifiedAt: new Date((item.attrs.mtime ?? 0) * 1000), + }; + }) + .sort((a, b) => { + if (a.type === 'directory' && b.type !== 'directory') return -1; + if (a.type !== 'directory' && b.type === 'directory') return 1; + return a.name.localeCompare(b.name); + }); + resolve(entries); + }); + }); + }); + }, +}); diff --git a/src/main/core/ssh/ssh-client-proxy.ts b/src/main/core/ssh/ssh-client-proxy.ts new file mode 100644 index 000000000..3f9c75054 --- /dev/null +++ b/src/main/core/ssh/ssh-client-proxy.ts @@ -0,0 +1,42 @@ +import type { Client } from 'ssh2'; + +/** + * Stable reference to an ssh2 Client that survives reconnects. + * + * Services like SshFileSystem and SshGitService hold a SshClientProxy + * rather than a raw Client. SshConnectionManager calls update() each time + * a connection is established (including after reconnect) and invalidate() + * when the connection drops. Callers that access proxy.client at call time + * therefore always get the current live Client without needing to be + * rebuilt or replaced. + */ +export class SshClientProxy { + private _client: Client | null = null; + + /** Called by SshConnectionManager when a connection becomes ready. */ + update(client: Client): void { + this._client = client; + } + + /** Called by SshConnectionManager when the connection drops. */ + invalidate(): void { + this._client = null; + } + + /** + * The live ssh2 Client. Throws if the connection is not currently + * established. Callers should check isConnected first if they want to + * avoid throwing. + */ + get client(): Client { + if (!this._client) { + throw new Error('SSH connection is not available'); + } + return this._client; + } + + /** True while an active connection is held. */ + get isConnected(): boolean { + return this._client !== null; + } +} diff --git a/src/main/core/ssh/ssh-connection-manager.ts b/src/main/core/ssh/ssh-connection-manager.ts new file mode 100644 index 000000000..b7ef426dd --- /dev/null +++ b/src/main/core/ssh/ssh-connection-manager.ts @@ -0,0 +1,366 @@ +import { EventEmitter } from 'node:events'; +import { eq } from 'drizzle-orm'; +import { Client, type ConnectConfig } from 'ssh2'; +import { sshConnectionEventChannel } from '@shared/events/sshEvents'; +import type { ConnectionState } from '@shared/ssh'; +import { db } from '@main/db/client'; +import { sshConnections } from '@main/db/schema'; +import { events } from '@main/lib/events'; +import { log } from '@main/lib/logger'; +import { buildConnectConfigFromRow } from './build-connect-config'; +import { SshClientProxy } from './ssh-client-proxy'; + +// ─── Error classes ──────────────────────────────────────────────────────────── + +export class SshAuthError extends Error { + constructor(message: string) { + super(message); + this.name = 'SshAuthError'; + } +} + +export class SshTimeoutError extends Error { + constructor(message: string) { + super(message); + this.name = 'SshTimeoutError'; + } +} + +export class SshConnectionError extends Error { + constructor(message: string) { + super(message); + this.name = 'SshConnectionError'; + } +} + +// ─── Types ──────────────────────────────────────────────────────────────────── + +export type SshConnectionEvent = + | { type: 'connected'; connectionId: string; proxy: SshClientProxy } + | { type: 'disconnected'; connectionId: string } + | { type: 'reconnecting'; connectionId: string; attempt: number; delayMs: number } + | { type: 'reconnected'; connectionId: string; proxy: SshClientProxy } + | { type: 'reconnect-failed'; connectionId: string } + | { type: 'error'; connectionId: string; error: Error }; + +/** Delays (ms) between successive reconnect attempts. Length = max attempts. */ +const RECONNECT_DELAYS_MS = [1_000, 2_000, 5_000, 10_000, 20_000]; + +interface ReconnectState { + attempt: number; + timer: NodeJS.Timeout | undefined; +} + +// ─── Implementation ────────────────────────────────────────────────────────── + +export class SshConnectionManager extends EventEmitter { + /** One stable proxy per connection ID — survives reconnects. */ + private proxies: Map = new Map(); + + private pendingConnections: Map> = new Map(); + + /** Tracks ongoing reconnect backoff state per connection. */ + private reconnecting: Map = new Map(); + + /** + * IDs for which disconnect() was called — these are excluded from + * auto-reconnect so an intentional teardown is never silently restarted. + */ + private intentionalDisconnects: Set = new Set(); + + // ─── Public API ────────────────────────────────────────────────────────── + + /** + * Connect and register a client under the given ID. + * + * - Reuses an existing connection if already in the pool. + * - Concurrent calls for the same ID coalesce to a single attempt. + * - Throws SshAuthError, SshTimeoutError, or SshConnectionError on failure. + */ + async connect(id: string): Promise { + this.intentionalDisconnects.delete(id); + + const existing = this.proxies.get(id); + if (existing?.isConnected) return existing; + + const pending = this.pendingConnections.get(id); + if (pending) return await pending; + + const [row] = await db.select().from(sshConnections).where(eq(sshConnections.id, id)).limit(1); + + if (!row) { + throw new SshConnectionError(`SSH connection '${id}' not found`); + } + + const config = await buildConnectConfigFromRow(row); + if (!config) { + throw new SshConnectionError(`SSH connection '${id}' has unsupported auth configuration`); + } + const connectionPromise = this.createConnection(id, config); + this.pendingConnections.set(id, connectionPromise); + + try { + return await connectionPromise; + } finally { + this.pendingConnections.delete(id); + } + } + + /** Get the stable SshClientProxy for a connection, or undefined. */ + getProxy(id: string): SshClientProxy | undefined { + return this.proxies.get(id); + } + + /** Returns true if the connection is currently live. */ + isConnected(id: string): boolean { + return this.proxies.get(id)?.isConnected ?? false; + } + + /** IDs of all connections that have a proxy (connected or reconnecting). */ + getConnectionIds(): string[] { + return Array.from(this.proxies.keys()); + } + + /** Returns the current ConnectionState for a single connection ID. */ + getConnectionState(id: string): ConnectionState { + if (this.proxies.get(id)?.isConnected) return 'connected'; + if (this.reconnecting.has(id)) return 'reconnecting'; + if (this.pendingConnections.has(id)) return 'connecting'; + return 'disconnected'; + } + + /** Returns the current ConnectionState for every tracked connection. */ + getAllConnectionStates(): Record { + const result: Record = {}; + for (const id of this.proxies.keys()) { + result[id] = this.getConnectionState(id); + } + return result; + } + + /** + * Gracefully close a connection and permanently stop reconnection for it. + * This is an intentional teardown — auto-reconnect will NOT fire afterward. + */ + async disconnect(id: string): Promise { + this.intentionalDisconnects.add(id); + this.cancelReconnect(id); + + const proxy = this.proxies.get(id); + if (!proxy?.isConnected) { + log.warn('SshConnectionManager: disconnect called for unknown/inactive connection', { + connectionId: id, + }); + this.proxies.delete(id); + return; + } + + log.info('SshConnectionManager: disconnecting', { connectionId: id }); + + const client = proxy.client; + return new Promise((resolve) => { + const timeout = setTimeout(() => { + log.warn('SshConnectionManager: disconnect timed out, forcing close', { connectionId: id }); + proxy.invalidate(); + this.proxies.delete(id); + resolve(); + }, 5_000); + + client.once('close', () => { + clearTimeout(timeout); + proxy.invalidate(); + this.proxies.delete(id); + resolve(); + }); + + client.end(); + }); + } + + /** Gracefully close all connections. */ + async disconnectAll(): Promise { + const ids = Array.from(this.proxies.keys()); + log.info('SshConnectionManager: disconnecting all connections', { count: ids.length }); + await Promise.all(ids.map((id) => this.disconnect(id))); + } + + // ─── Private ───────────────────────────────────────────────────────────── + + private createConnection(id: string, config: ConnectConfig): Promise { + log.info('SshConnectionManager: creating connection', { + connectionId: id, + host: config.host, + username: config.username, + }); + + // Ensure a stable proxy exists for this ID. + const proxy = this.proxies.get(id) ?? new SshClientProxy(); + this.proxies.set(id, proxy); + + const client = new Client(); + + return new Promise((resolve, reject) => { + let resolved = false; + const resolveOnce = (p: SshClientProxy) => { + if (!resolved) { + resolved = true; + resolve(p); + } + }; + + client.on('error', (error: Error) => { + log.error('SshConnectionManager: connection error', { + connectionId: id, + error: error.message, + }); + + this.emit('connection-event', { + type: 'error', + connectionId: id, + error, + } satisfies SshConnectionEvent); + + reject(classifyError(error)); + }); + + client.on('close', () => { + log.info('SshConnectionManager: connection closed', { connectionId: id }); + + // Only react if this client is still the one backing the proxy. + if (proxy.isConnected && proxy.client === client) { + proxy.invalidate(); + + this.emit('connection-event', { + type: 'disconnected', + connectionId: id, + } satisfies SshConnectionEvent); + + events.emit(sshConnectionEventChannel, { type: 'disconnected', connectionId: id }); + + // Auto-reconnect unless this was an intentional disconnect or the + // initial handshake never succeeded (resolved = false still). + if (!this.intentionalDisconnects.has(id) && resolved) { + this.scheduleReconnect(id); + } + } + }); + + client.on('ready', () => { + log.info('SshConnectionManager: connection ready', { connectionId: id }); + + proxy.update(client); + + const isReconnect = this.reconnecting.has(id); + this.cancelReconnect(id); + + this.emit('connection-event', { + type: isReconnect ? 'reconnected' : 'connected', + connectionId: id, + proxy, + } satisfies SshConnectionEvent); + + events.emit(sshConnectionEventChannel, { + type: isReconnect ? 'reconnected' : 'connected', + connectionId: id, + }); + + resolveOnce(proxy); + }); + + client.connect(config); + }); + } + + private scheduleReconnect(id: string): void { + const state = this.reconnecting.get(id) ?? { attempt: 0, timer: undefined }; + const attempt = state.attempt + 1; + + if (attempt > RECONNECT_DELAYS_MS.length) { + log.error('SshConnectionManager: max reconnect attempts reached', { connectionId: id }); + this.reconnecting.delete(id); + this.emit('connection-event', { + type: 'reconnect-failed', + connectionId: id, + } satisfies SshConnectionEvent); + events.emit(sshConnectionEventChannel, { type: 'reconnect-failed', connectionId: id }); + return; + } + + const delayMs = RECONNECT_DELAYS_MS[attempt - 1]!; + + log.info('SshConnectionManager: scheduling reconnect', { + connectionId: id, + attempt, + delayMs, + }); + + this.emit('connection-event', { + type: 'reconnecting', + connectionId: id, + attempt, + delayMs, + } satisfies SshConnectionEvent); + + events.emit(sshConnectionEventChannel, { + type: 'reconnecting', + connectionId: id, + attempt, + delayMs, + }); + + const timer = setTimeout(() => { + if (this.intentionalDisconnects.has(id)) { + this.reconnecting.delete(id); + return; + } + + const connectionPromise = this.connect(id); + this.pendingConnections.set(id, connectionPromise); + + connectionPromise + .then(() => { + this.pendingConnections.delete(id); + }) + .catch((error: unknown) => { + this.pendingConnections.delete(id); + // Auth failures won't resolve with retries — stop immediately. + if (error instanceof SshAuthError) { + log.error('SshConnectionManager: reconnect stopped — auth failure', { + connectionId: id, + }); + this.reconnecting.delete(id); + this.emit('connection-event', { + type: 'reconnect-failed', + connectionId: id, + } satisfies SshConnectionEvent); + events.emit(sshConnectionEventChannel, { type: 'reconnect-failed', connectionId: id }); + } else { + this.scheduleReconnect(id); + } + }); + }, delayMs); + + this.reconnecting.set(id, { attempt, timer }); + } + + private cancelReconnect(id: string): void { + const state = this.reconnecting.get(id); + if (state?.timer !== undefined) { + clearTimeout(state.timer); + } + this.reconnecting.delete(id); + } +} + +export const sshConnectionManager = new SshConnectionManager(); + +function classifyError(error: Error): SshAuthError | SshTimeoutError | SshConnectionError { + const msg = error.message.toLowerCase(); + if (msg.includes('authentication') || msg.includes('auth') || msg.includes('permission denied')) { + return new SshAuthError(error.message); + } + if (msg.includes('timeout') || msg.includes('timed out')) { + return new SshTimeoutError(error.message); + } + return new SshConnectionError(error.message); +} diff --git a/src/main/services/ssh/SshCredentialService.ts b/src/main/core/ssh/ssh-credential-service.ts similarity index 57% rename from src/main/services/ssh/SshCredentialService.ts rename to src/main/core/ssh/ssh-credential-service.ts index fed78b9cd..66f6c55a8 100644 --- a/src/main/services/ssh/SshCredentialService.ts +++ b/src/main/core/ssh/ssh-credential-service.ts @@ -2,17 +2,7 @@ import keytar from 'keytar'; const SERVICE_NAME = 'emdash-ssh'; -/** - * Service for managing SSH credentials securely. - * Uses system keychain for password and passphrase storage via keytar. - */ export class SshCredentialService { - /** - * Store password for a connection - * @param connectionId - Unique identifier for the connection - * @param password - Password to store - * @throws Error if storage fails - */ async storePassword(connectionId: string, password: string): Promise { try { await keytar.setPassword(SERVICE_NAME, `${connectionId}:password`, password); @@ -22,27 +12,15 @@ export class SshCredentialService { } } - /** - * Retrieve password for a connection - * @param connectionId - Unique identifier for the connection - * @returns The stored password or null if not found - * @throws Error if retrieval fails - */ async getPassword(connectionId: string): Promise { try { - const credential = await keytar.getPassword(SERVICE_NAME, `${connectionId}:password`); - return credential; + return await keytar.getPassword(SERVICE_NAME, `${connectionId}:password`); } catch (error) { const message = error instanceof Error ? error.message : String(error); throw new Error(`Failed to retrieve password for connection ${connectionId}: ${message}`); } } - /** - * Delete stored password - * @param connectionId - Unique identifier for the connection - * @throws Error if deletion fails - */ async deletePassword(connectionId: string): Promise { try { await keytar.deletePassword(SERVICE_NAME, `${connectionId}:password`); @@ -52,11 +30,6 @@ export class SshCredentialService { } } - /** - * Checks if a password exists in the keychain. - * @param connectionId - Unique identifier for the connection - * @returns True if password exists - */ async hasPassword(connectionId: string): Promise { try { const credential = await keytar.getPassword(SERVICE_NAME, `${connectionId}:password`); @@ -66,12 +39,6 @@ export class SshCredentialService { } } - /** - * Store passphrase for a private key - * @param connectionId - Unique identifier for the connection - * @param passphrase - Passphrase to store - * @throws Error if storage fails - */ async storePassphrase(connectionId: string, passphrase: string): Promise { try { await keytar.setPassword(SERVICE_NAME, `${connectionId}:passphrase`, passphrase); @@ -81,27 +48,15 @@ export class SshCredentialService { } } - /** - * Retrieve passphrase for a private key - * @param connectionId - Unique identifier for the connection - * @returns The stored passphrase or null if not found - * @throws Error if retrieval fails - */ async getPassphrase(connectionId: string): Promise { try { - const credential = await keytar.getPassword(SERVICE_NAME, `${connectionId}:passphrase`); - return credential; + return await keytar.getPassword(SERVICE_NAME, `${connectionId}:passphrase`); } catch (error) { const message = error instanceof Error ? error.message : String(error); throw new Error(`Failed to retrieve passphrase for connection ${connectionId}: ${message}`); } } - /** - * Delete stored passphrase - * @param connectionId - Unique identifier for the connection - * @throws Error if deletion fails - */ async deletePassphrase(connectionId: string): Promise { try { await keytar.deletePassword(SERVICE_NAME, `${connectionId}:passphrase`); @@ -111,11 +66,6 @@ export class SshCredentialService { } } - /** - * Checks if a passphrase exists in the keychain. - * @param connectionId - Unique identifier for the connection - * @returns True if passphrase exists - */ async hasPassphrase(connectionId: string): Promise { try { const credential = await keytar.getPassword(SERVICE_NAME, `${connectionId}:passphrase`); @@ -125,43 +75,28 @@ export class SshCredentialService { } } - /** - * Store both password and passphrase in one call - * @param connectionId - Unique identifier for the connection - * @param credentials - Object containing optional password and passphrase - * @throws Error if any storage operation fails - */ async storeCredentials( connectionId: string, credentials: { password?: string; passphrase?: string } ): Promise { const operations: Promise[] = []; - if (credentials.password) { operations.push(this.storePassword(connectionId, credentials.password)); } if (credentials.passphrase) { operations.push(this.storePassphrase(connectionId, credentials.passphrase)); } - if (operations.length > 0) { await Promise.all(operations); } } - /** - * Delete all credentials for a connection - * @param connectionId - Unique identifier for the connection - * @throws Error if any deletion operation fails - */ async deleteAllCredentials(connectionId: string): Promise { await Promise.all([ - this.deletePassword(connectionId).catch(() => { - // Ignore errors for individual deletions - }), - this.deletePassphrase(connectionId).catch(() => { - // Ignore errors for individual deletions - }), + this.deletePassword(connectionId).catch(() => {}), + this.deletePassphrase(connectionId).catch(() => {}), ]); } } + +export const sshCredentialService = new SshCredentialService(); diff --git a/src/main/utils/sshConfigParser.ts b/src/main/core/ssh/sshConfigParser.ts similarity index 95% rename from src/main/utils/sshConfigParser.ts rename to src/main/core/ssh/sshConfigParser.ts index 6a1d8efee..a05825aaa 100644 --- a/src/main/utils/sshConfigParser.ts +++ b/src/main/core/ssh/sshConfigParser.ts @@ -1,7 +1,7 @@ -import { readFile } from 'fs/promises'; -import { homedir } from 'os'; -import { join } from 'path'; -import type { SshConfigHost } from '../../shared/ssh/types'; +import { readFile } from 'node:fs/promises'; +import { homedir } from 'node:os'; +import { join } from 'node:path'; +import type { SshConfigHost } from '@shared/ssh'; /** * Strips surrounding quotes (single or double) from a value string. diff --git a/src/main/core/ssh/utils.ts b/src/main/core/ssh/utils.ts new file mode 100644 index 000000000..2329ba996 --- /dev/null +++ b/src/main/core/ssh/utils.ts @@ -0,0 +1,15 @@ +import { parseSshConfigFile } from '@main/core/ssh/sshConfigParser'; + +export async function resolveIdentityAgent(hostname: string): Promise { + try { + const hosts = await parseSshConfigFile(); + const match = hosts.find( + (h) => + h.host.toLowerCase() === hostname.toLowerCase() || + h.hostname?.toLowerCase() === hostname.toLowerCase() + ); + return match?.identityAgent; + } catch { + return undefined; + } +} diff --git a/src/main/core/tasks/archiveTask.ts b/src/main/core/tasks/archiveTask.ts new file mode 100644 index 000000000..98755a927 --- /dev/null +++ b/src/main/core/tasks/archiveTask.ts @@ -0,0 +1,19 @@ +import { eq, sql } from 'drizzle-orm'; +import { projectManager } from '@main/core/projects/project-manager'; +import { db } from '@main/db/client'; +import { tasks } from '@main/db/schema'; + +export async function archiveTask(projectId: string, taskId: string): Promise { + const project = projectManager.getProject(projectId); + + await db + .update(tasks) + .set({ + status: 'archived', + archivedAt: sql`CURRENT_TIMESTAMP`, + updatedAt: sql`CURRENT_TIMESTAMP`, + }) + .where(eq(tasks.id, taskId)); + + await project?.teardownTask(taskId); +} diff --git a/src/main/core/tasks/controller.ts b/src/main/core/tasks/controller.ts new file mode 100644 index 000000000..1ea393afb --- /dev/null +++ b/src/main/core/tasks/controller.ts @@ -0,0 +1,24 @@ +import { createRPCController } from '@shared/ipc/rpc'; +import { archiveTask } from './archiveTask'; +import { createTask } from './createTask'; +import { deleteTask } from './deleteTask'; +import { getBootstrapStatus } from './getBootstrapStatus'; +import { getTasks } from './getTasks'; +import { provisionTask } from './provisionTask'; +import { renameTask } from './renameTask'; +import { restoreTask } from './restoreTask'; +import { retryProvisionTask } from './retryProvisionTask'; +import { retryTeardownTask } from './retryTeardownTask'; + +export const taskController = createRPCController({ + createTask, + getTasks, + deleteTask, + archiveTask, + restoreTask, + renameTask, + provisionTask, + retryProvisionTask, + retryTeardownTask, + getBootstrapStatus, +}); diff --git a/src/main/core/tasks/core.ts b/src/main/core/tasks/core.ts new file mode 100644 index 000000000..e8e491b8f --- /dev/null +++ b/src/main/core/tasks/core.ts @@ -0,0 +1,17 @@ +import { Issue, Task, TaskLifecycleStatus } from '@shared/tasks'; +import { TaskRow } from '@main/db/schema'; + +export function mapTaskRowToTask(row: TaskRow): Task { + return { + id: row.id, + projectId: row.projectId, + name: row.name, + status: row.status as TaskLifecycleStatus, + sourceBranch: row.sourceBranch, + taskBranch: row.taskBranch ?? undefined, + linkedIssue: row.linkedIssue ? (JSON.parse(row.linkedIssue) as Issue) : undefined, + archivedAt: row.archivedAt ?? undefined, + createdAt: row.createdAt, + updatedAt: row.updatedAt, + }; +} diff --git a/src/main/core/tasks/createTask.ts b/src/main/core/tasks/createTask.ts new file mode 100644 index 000000000..7b0431949 --- /dev/null +++ b/src/main/core/tasks/createTask.ts @@ -0,0 +1,63 @@ +import { sql } from 'drizzle-orm'; +import type { CreateTaskParams, Task, TaskLifecycleStatus } from '@shared/tasks'; +import { projectManager } from '@main/core/projects/project-manager'; +import { db } from '@main/db/client'; +import { tasks } from '@main/db/schema'; +import { appSettingsService } from '../settings/settings-service'; + +export async function createTask(params: CreateTaskParams): Promise { + const id = params.id; + const suffix = Math.random().toString(36).slice(2, 7); + const branchPrefix = (await appSettingsService.get('localProject')).branchPrefix ?? ''; + + const taskBranch = params.taskBranch + ? branchPrefix + ? `${branchPrefix}/${params.taskBranch}-${suffix}` + : `${params.taskBranch}-${suffix}` + : undefined; + + const project = projectManager.getProject(params.projectId); + if (!project) { + throw new Error('Project not found'); + } + + if (taskBranch) { + await project.git.createBranch(taskBranch, params.sourceBranch); + if (params.pushBranch) { + await project.git.publishBranch(taskBranch); + } + } + + const [taskRow] = await db + .insert(tasks) + .values({ + id, + projectId: params.projectId, + name: params.name, + taskBranch: taskBranch, + status: 'todo' as TaskLifecycleStatus, + sourceBranch: params.sourceBranch, + linkedIssue: params.linkedIssue ? JSON.stringify(params.linkedIssue) : null, + updatedAt: sql`CURRENT_TIMESTAMP`, + }) + .returning(); + + const task: Task = { + id, + projectId: params.projectId, + name: params.name, + status: 'todo' as TaskLifecycleStatus, + sourceBranch: params.sourceBranch, + taskBranch: taskBranch, + linkedIssue: params.linkedIssue ? params.linkedIssue : undefined, + createdAt: taskRow.createdAt, + updatedAt: taskRow.updatedAt, + }; + + const provisionResult = await project.provisionTask(task, [], []); + if (!provisionResult.success) { + throw new Error(`Failed to provision task: ${provisionResult.error.message}`); + } + + return task; +} diff --git a/src/main/core/tasks/deleteTask.ts b/src/main/core/tasks/deleteTask.ts new file mode 100644 index 000000000..ab4ce0c54 --- /dev/null +++ b/src/main/core/tasks/deleteTask.ts @@ -0,0 +1,26 @@ +import { eq } from 'drizzle-orm'; +import { projectManager } from '@main/core/projects/project-manager'; +import { db } from '@main/db/client'; +import { tasks } from '@main/db/schema'; +import { log } from '@main/lib/logger'; + +export async function deleteTask(projectId: string, taskId: string): Promise { + const [task] = await db.select().from(tasks).where(eq(tasks.id, taskId)).limit(1); + if (!task) return; + + const project = projectManager.getProject(projectId); + + await db.delete(tasks).where(eq(tasks.id, taskId)); + + if (project) { + project.teardownTask(taskId).catch((e) => { + log.warn('deleteTask: teardown failed', { taskId, error: String(e) }); + }); + + if (task.taskBranch) { + project.git.deleteBranch(task.taskBranch).catch((e) => { + log.warn('deleteTask: branch deletion failed', { taskId, error: String(e) }); + }); + } + } +} diff --git a/src/main/core/tasks/getBootstrapStatus.ts b/src/main/core/tasks/getBootstrapStatus.ts new file mode 100644 index 000000000..c72ec86c6 --- /dev/null +++ b/src/main/core/tasks/getBootstrapStatus.ts @@ -0,0 +1,15 @@ +import type { TaskBootstrapStatus } from '@shared/tasks'; +import { projectManager } from '@main/core/projects/project-manager'; +import { log } from '@main/lib/logger'; + +export async function getBootstrapStatus( + projectId: string, + taskId: string +): Promise { + const project = projectManager.getProject(projectId); + if (!project) throw new Error(`Project not found: ${projectId}`); + + const status = project.getTaskBootstrapStatus(taskId); + log.debug('getBootstrapStatus', { taskId, status: status.status }); + return status; +} diff --git a/src/main/core/tasks/getTasks.ts b/src/main/core/tasks/getTasks.ts new file mode 100644 index 000000000..10ba3e8e8 --- /dev/null +++ b/src/main/core/tasks/getTasks.ts @@ -0,0 +1,17 @@ +import { and, desc, eq } from 'drizzle-orm'; +import { Task } from '@shared/tasks'; +import { db } from '@main/db/client'; +import { tasks } from '@main/db/schema'; +import { mapTaskRowToTask } from './core'; + +export async function getTasks(projectId?: string): Promise { + const rows = projectId + ? await db + .select() + .from(tasks) + .where(and(eq(tasks.projectId, projectId))) + .orderBy(desc(tasks.updatedAt)) + : await db.select().from(tasks).orderBy(desc(tasks.updatedAt)); + + return rows.map((row) => mapTaskRowToTask(row)); +} diff --git a/src/main/core/tasks/provisionTask.ts b/src/main/core/tasks/provisionTask.ts new file mode 100644 index 000000000..ddcc32ab5 --- /dev/null +++ b/src/main/core/tasks/provisionTask.ts @@ -0,0 +1,34 @@ +import { eq } from 'drizzle-orm'; +import { mapConversationRowToConversation } from '@main/core/conversations/utils'; +import { projectManager } from '@main/core/projects/project-manager'; +import { mapTerminalRowToTerminal } from '@main/core/terminals/core'; +import { db } from '@main/db/client'; +import { conversations, tasks, terminals } from '@main/db/schema'; +import { mapTaskRowToTask } from './core'; + +export async function provisionTask(taskId: string): Promise { + const [row] = await db.select().from(tasks).where(eq(tasks.id, taskId)); + if (!row) throw new Error(`Task not found: ${taskId}`); + + const task = mapTaskRowToTask(row); + const project = projectManager.getProject(task.projectId); + if (!project) throw new Error(`Project not found: ${task.projectId}`); + + if (project.getTask(taskId)) return; + + const [existingTerminals, existingConversations] = await Promise.all([ + db + .select() + .from(terminals) + .where(eq(terminals.taskId, taskId)) + .then((rows) => rows.map(mapTerminalRowToTerminal)), + db + .select() + .from(conversations) + .where(eq(conversations.taskId, taskId)) + .then((rows) => rows.map((r) => mapConversationRowToConversation(r, true))), + ]); + + const result = await project.provisionTask(task, existingConversations, existingTerminals); + if (!result.success) throw new Error(`Failed to provision task: ${result.error.message}`); +} diff --git a/src/main/core/tasks/renameTask.ts b/src/main/core/tasks/renameTask.ts new file mode 100644 index 000000000..a8e4a81a5 --- /dev/null +++ b/src/main/core/tasks/renameTask.ts @@ -0,0 +1,37 @@ +import { eq, sql } from 'drizzle-orm'; +import { projectManager } from '@main/core/projects/project-manager'; +import { db } from '@main/db/client'; +import { tasks } from '@main/db/schema'; +import { appSettingsService } from '../settings/settings-service'; + +export async function renameTask( + projectId: string, + taskId: string, + newName: string +): Promise { + const [row] = await db.select().from(tasks).where(eq(tasks.id, taskId)).limit(1); + if (!row) throw new Error(`Task not found: ${taskId}`); + + const project = projectManager.getProject(projectId); + if (!project) throw new Error(`Project not found: ${projectId}`); + + const oldBranch = row.taskBranch; + let newBranch: string | null = null; + + if (oldBranch) { + const suffix = Math.random().toString(36).slice(2, 7); + const branchPrefix = (await appSettingsService.get('localProject')).branchPrefix ?? ''; + newBranch = branchPrefix ? `${branchPrefix}/${newName}-${suffix}` : `${newName}-${suffix}`; + + await project.git.renameBranch(oldBranch, newBranch); + } + + await db + .update(tasks) + .set({ + name: newName, + taskBranch: newBranch ?? row.taskBranch, + updatedAt: sql`CURRENT_TIMESTAMP`, + }) + .where(eq(tasks.id, taskId)); +} diff --git a/src/main/core/tasks/restoreTask.ts b/src/main/core/tasks/restoreTask.ts new file mode 100644 index 000000000..fa4026721 --- /dev/null +++ b/src/main/core/tasks/restoreTask.ts @@ -0,0 +1,22 @@ +import { eq, sql } from 'drizzle-orm'; +import { db } from '@main/db/client'; +import { tasks } from '@main/db/schema'; +import type { TaskMetadata } from './core'; + +export async function restoreTask(id: string): Promise { + const [row] = await db.select().from(tasks).where(eq(tasks.id, id)).limit(1); + if (!row) return; + + const meta: TaskMetadata = row.metadata ? JSON.parse(row.metadata) : {}; + meta.lifecycleStatus = 'in_progress'; + + await db + .update(tasks) + .set({ + archivedAt: null, + status: 'in_progress', + metadata: JSON.stringify(meta), + updatedAt: sql`CURRENT_TIMESTAMP`, + }) + .where(eq(tasks.id, id)); +} diff --git a/src/main/core/tasks/retryProvisionTask.ts b/src/main/core/tasks/retryProvisionTask.ts new file mode 100644 index 000000000..b567e2515 --- /dev/null +++ b/src/main/core/tasks/retryProvisionTask.ts @@ -0,0 +1,32 @@ +import { eq } from 'drizzle-orm'; +import { mapConversationRowToConversation } from '@main/core/conversations/utils'; +import { projectManager } from '@main/core/projects/project-manager'; +import { mapTerminalRowToTerminal } from '@main/core/terminals/core'; +import { db } from '@main/db/client'; +import { conversations, tasks, terminals } from '@main/db/schema'; +import { mapTaskRowToTask } from './core'; + +export async function retryProvisionTask(taskId: string): Promise { + const [row] = await db.select().from(tasks).where(eq(tasks.id, taskId)); + if (!row) throw new Error(`Task not found: ${taskId}`); + + const task = mapTaskRowToTask(row); + const project = projectManager.getProject(task.projectId); + if (!project) throw new Error(`Project not found: ${task.projectId}`); + + const [existingTerminals, existingConversations] = await Promise.all([ + db + .select() + .from(terminals) + .where(eq(terminals.taskId, taskId)) + .then((rows) => rows.map(mapTerminalRowToTerminal)), + db + .select() + .from(conversations) + .where(eq(conversations.taskId, taskId)) + .then((rows) => rows.map((r) => mapConversationRowToConversation(r, true))), + ]); + + const result = await project.retryTaskProvision(task, existingConversations, existingTerminals); + if (!result.success) throw new Error(`Failed to provision task: ${result.error.message}`); +} diff --git a/src/main/core/tasks/retryTeardownTask.ts b/src/main/core/tasks/retryTeardownTask.ts new file mode 100644 index 000000000..f38b50b24 --- /dev/null +++ b/src/main/core/tasks/retryTeardownTask.ts @@ -0,0 +1,17 @@ +import { eq } from 'drizzle-orm'; +import { projectManager } from '@main/core/projects/project-manager'; +import { db } from '@main/db/client'; +import { tasks } from '@main/db/schema'; +import { mapTaskRowToTask } from './core'; + +export async function retryTeardownTask(taskId: string): Promise { + const [row] = await db.select().from(tasks).where(eq(tasks.id, taskId)); + if (!row) throw new Error(`Task not found: ${taskId}`); + + const task = mapTaskRowToTask(row); + const project = projectManager.getProject(task.projectId); + if (!project) throw new Error(`Project not found: ${task.projectId}`); + + const result = await project.retryTaskTeardown(taskId); + if (!result.success) throw new Error(`Failed to teardown task: ${result.error.message}`); +} diff --git a/src/main/core/tasks/task-lifecycle-service.ts b/src/main/core/tasks/task-lifecycle-service.ts new file mode 100644 index 000000000..f7f938826 --- /dev/null +++ b/src/main/core/tasks/task-lifecycle-service.ts @@ -0,0 +1,93 @@ +import { makePtySessionId } from '@shared/ptySessionId'; +import { createScriptTerminalId } from '@shared/terminals'; +import { spawnLocalPty } from '../pty/local-pty'; +import { Pty } from '../pty/pty'; +import { buildTerminalEnv } from '../pty/pty-env'; +import { ptySessionRegistry } from '../pty/pty-session-registry'; +import type { TerminalProvider } from '../terminals/terminal-provider'; + +const DEFAULT_COLS = 80; +const DEFAULT_ROWS = 24; + +export class TaskLifecycleService { + private sessions = new Map(); + private readonly projectId: string; + private readonly taskId: string; + private readonly taskPath: string; + private readonly terminals: TerminalProvider; + + constructor({ + projectId, + taskId, + taskPath, + terminals, + }: { + projectId: string; + taskId: string; + taskPath: string; + terminals: TerminalProvider; + }) { + this.projectId = projectId; + this.taskId = taskId; + this.taskPath = taskPath; + this.terminals = terminals; + } + + async runLifecycleScript( + script: { + type: 'setup' | 'run' | 'teardown'; + script: string; + }, + options: { shouldRespawn?: boolean; initialSize?: { cols: number; rows: number } } = {} + ): Promise { + const { shouldRespawn = false, initialSize = { cols: DEFAULT_COLS, rows: DEFAULT_ROWS } } = + options; + + const id = await createScriptTerminalId({ + projectId: this.projectId, + taskId: this.taskId, + type: script.type, + script: script.script, + }); + + if (this.sessions.has(id)) return; + + const userShell = + process.env.SHELL ?? (process.platform === 'darwin' ? '/bin/zsh' : '/bin/bash'); + + if (shouldRespawn) { + this.terminals.spawnTerminal( + { id, projectId: this.projectId, taskId: this.taskId, name: script.type }, + initialSize, + { + command: userShell, + args: ['-c', script.script], + } + ); + return; + } + + const sessionId = makePtySessionId(this.projectId, this.taskId, id); + + const pty = spawnLocalPty({ + id: sessionId, + command: userShell, + args: ['-c', script.script], + cwd: this.taskPath, + env: buildTerminalEnv(), + cols: initialSize.cols, + rows: initialSize.rows, + }); + + ptySessionRegistry.register(sessionId, pty); + this.sessions.set(id, pty); + + return new Promise((resolve) => { + pty.onExit(() => { + this.sessions.delete(id); + ptySessionRegistry.unregister(sessionId); + resolve(); + }); + }); + } +} diff --git a/src/main/core/terminals/controller.ts b/src/main/core/terminals/controller.ts new file mode 100644 index 000000000..8bbb8742e --- /dev/null +++ b/src/main/core/terminals/controller.ts @@ -0,0 +1,14 @@ +import { createRPCController } from '@shared/ipc/rpc'; +import { createTerminal } from './createTerminal'; +import { deleteTerminal } from './deleteTerminal'; +import { getAllTerminals } from './getAllTerminals'; +import { renameTerminal } from './renameTerminal'; +import { runLifecycleScript } from './runLifecycleScript'; + +export const terminalsController = createRPCController({ + getAllTerminals, + createTerminal, + deleteTerminal, + renameTerminal, + runLifecycleScript, +}); diff --git a/src/main/core/terminals/core.ts b/src/main/core/terminals/core.ts new file mode 100644 index 000000000..4446eaf27 --- /dev/null +++ b/src/main/core/terminals/core.ts @@ -0,0 +1,12 @@ +import { Terminal } from '@shared/terminals'; +import { TerminalRow } from '@main/db/schema'; + +export function mapTerminalRowToTerminal(row: TerminalRow): Terminal { + return { + id: row.id, + taskId: row.taskId, + ssh: row.ssh === 1, + projectId: row.projectId, + name: row.name, + }; +} diff --git a/src/main/core/terminals/createTerminal.ts b/src/main/core/terminals/createTerminal.ts new file mode 100644 index 000000000..3db1388ea --- /dev/null +++ b/src/main/core/terminals/createTerminal.ts @@ -0,0 +1,31 @@ +import { sql } from 'drizzle-orm'; +import type { CreateTerminalParams, Terminal } from '@shared/terminals'; +import { db } from '@main/db/client'; +import { terminals } from '@main/db/schema'; +import { resolveTask } from '../projects/utils'; +import { mapTerminalRowToTerminal } from './core'; + +export async function createTerminal(params: CreateTerminalParams): Promise { + const { id: terminalId, initialSize = { cols: 80, rows: 24 } } = params; + + const [row] = await db + .insert(terminals) + .values({ + id: terminalId, + projectId: params.projectId, + taskId: params.taskId, + name: params.name, + ssh: 0, + updatedAt: sql`CURRENT_TIMESTAMP`, + }) + .returning(); + + const task = resolveTask(params.projectId, params.taskId); + if (!task) { + throw new Error('Task not found'); + } + + await task.terminals.spawnTerminal(mapTerminalRowToTerminal(row), initialSize); + + return mapTerminalRowToTerminal(row); +} diff --git a/src/main/core/terminals/deleteTerminal.ts b/src/main/core/terminals/deleteTerminal.ts new file mode 100644 index 000000000..39391fb87 --- /dev/null +++ b/src/main/core/terminals/deleteTerminal.ts @@ -0,0 +1,27 @@ +import { and, eq } from 'drizzle-orm'; +import { db } from '@main/db/client'; +import { terminals } from '@main/db/schema'; +import { resolveTask } from '../projects/utils'; + +export async function deleteTerminal({ + projectId, + taskId, + terminalId, +}: { + projectId: string; + taskId: string; + terminalId: string; +}) { + await db + .delete(terminals) + .where( + and( + eq(terminals.id, terminalId), + eq(terminals.projectId, projectId), + eq(terminals.taskId, taskId) + ) + ); + + const task = resolveTask(projectId, taskId); + await task?.terminals.killTerminal(terminalId); +} diff --git a/src/main/core/terminals/getAllTerminals.ts b/src/main/core/terminals/getAllTerminals.ts new file mode 100644 index 000000000..cfb15627f --- /dev/null +++ b/src/main/core/terminals/getAllTerminals.ts @@ -0,0 +1,14 @@ +import { eq, isNull } from 'drizzle-orm'; +import type { Terminal } from '@shared/terminals'; +import { db } from '@main/db/client'; +import { tasks, terminals } from '@main/db/schema'; +import { mapTerminalRowToTerminal } from './core'; + +export async function getAllTerminals(): Promise { + const rows = await db + .select({ terminal: terminals }) + .from(terminals) + .innerJoin(tasks, eq(terminals.taskId, tasks.id)) + .where(isNull(tasks.archivedAt)); + return rows.map(({ terminal }) => mapTerminalRowToTerminal(terminal)); +} diff --git a/src/main/core/terminals/impl/general-session.ts b/src/main/core/terminals/impl/general-session.ts new file mode 100644 index 000000000..04c05a8df --- /dev/null +++ b/src/main/core/terminals/impl/general-session.ts @@ -0,0 +1,13 @@ +export interface GeneralSession { + type: 'general'; + config: GeneralSessionConfig; +} + +export interface GeneralSessionConfig { + taskId?: string; + cwd: string; + /** Project root — used to resolve .emdash.json shellSetup. */ + projectPath?: string; + /** Shell command prepended before the interactive shell: `${shellSetup} && exec $SHELL`. */ + shellSetup?: string; +} diff --git a/src/main/core/terminals/impl/local-terminal-provider.ts b/src/main/core/terminals/impl/local-terminal-provider.ts new file mode 100644 index 000000000..27f2f0fd4 --- /dev/null +++ b/src/main/core/terminals/impl/local-terminal-provider.ts @@ -0,0 +1,98 @@ +import { makePtySessionId } from '@shared/ptySessionId'; +import { Terminal } from '@shared/terminals'; +import { spawnLocalPty } from '@main/core/pty/local-pty'; +import { Pty } from '@main/core/pty/pty'; +import { buildTerminalEnv } from '@main/core/pty/pty-env'; +import { ptySessionRegistry } from '@main/core/pty/pty-session-registry'; +import { log } from '@main/lib/logger'; +import { TerminalProvider } from '../terminal-provider'; + +const DEFAULT_COLS = 80; +const DEFAULT_ROWS = 24; + +export class LocalTerminalProvider implements TerminalProvider { + private sessions = new Map(); + private readonly projectId: string; + private readonly taskId: string; + private readonly taskPath: string; + + constructor({ + projectId, + taskId, + taskPath, + }: { + projectId: string; + taskId: string; + taskPath: string; + }) { + this.projectId = projectId; + this.taskId = taskId; + this.taskPath = taskPath; + } + + async spawnTerminal( + terminal: Terminal, + initialSize: { cols: number; rows: number } = { cols: DEFAULT_COLS, rows: DEFAULT_ROWS }, + command?: { command: string; args: string[] } + ): Promise { + const sessionId = makePtySessionId(terminal.projectId, terminal.taskId, terminal.id); + + const userShell = + process.env.SHELL ?? (process.platform === 'darwin' ? '/bin/zsh' : '/bin/bash'); + + const pty = spawnLocalPty({ + id: sessionId, + command: command?.command ?? userShell, + // -l: login shell — sources /etc/profile, ~/.zprofile, ~/.bash_profile, + // etc., giving the user the same environment as any other terminal app. + // Only applied when using the default shell; explicit commands control + // their own args. + args: command?.args ?? (process.platform !== 'win32' ? ['-l'] : []), + cwd: this.taskPath, + env: buildTerminalEnv(), + cols: initialSize.cols, + rows: initialSize.rows, + }); + + pty.onExit(() => { + ptySessionRegistry.unregister(sessionId); + const shouldRespawn = this.sessions.has(sessionId); + this.sessions.delete(sessionId); + if (shouldRespawn) { + setTimeout(() => { + this.spawnTerminal(terminal).catch((e) => { + log.error('LocalTerminalProvider: respawn failed', { + terminalId: terminal.id, + error: String(e), + }); + }); + }, 500); + } + }); + + ptySessionRegistry.register(sessionId, pty); + this.sessions.set(sessionId, pty); + } + + async killTerminal(terminalId: string): Promise { + const sessionId = makePtySessionId(this.projectId, this.taskId, terminalId); + const pty = this.sessions.get(sessionId); + if (pty) { + try { + pty.kill(); + } catch {} + this.sessions.delete(sessionId); + ptySessionRegistry.unregister(sessionId); + } + } + + async destroyAll(): Promise { + for (const [sessionId, pty] of this.sessions) { + try { + pty.kill(); + } catch {} + ptySessionRegistry.unregister(sessionId); + } + this.sessions.clear(); + } +} diff --git a/src/main/core/terminals/impl/ssh-terminal-provider.ts b/src/main/core/terminals/impl/ssh-terminal-provider.ts new file mode 100644 index 000000000..76762617c --- /dev/null +++ b/src/main/core/terminals/impl/ssh-terminal-provider.ts @@ -0,0 +1,126 @@ +import { makePtySessionId } from '@shared/ptySessionId'; +import { Pty } from '@main/core/pty/pty'; +import { ptySessionRegistry } from '@main/core/pty/pty-session-registry'; +import { buildSshCommandString, resolveSpawnParams } from '@main/core/pty/spawn-utils'; +import { openSsh2Pty } from '@main/core/pty/ssh2-pty'; +import type { SshClientProxy } from '@main/core/ssh/ssh-client-proxy'; +import { + CreateSessionError, + TerminalProvider, + TerminalSpawnOptions, +} from '@main/core/terminals/terminal-provider'; +import { log } from '@main/lib/logger'; +import { ok, Result } from '@main/lib/result'; +import type { GeneralSessionConfig } from './general-session'; + +export class SshTerminalProvider implements TerminalProvider { + private sessions = new Map(); + /** Terminals explicitly killed by the user — suppresses auto-respawn. */ + private deletedTerminals = new Set(); + /** Stored spawn options per terminal ID — used for rehydration on reconnect. */ + private terminalOpts = new Map(); + + constructor( + private readonly projectId: string, + private readonly taskId: string, + private readonly proxy: SshClientProxy + ) {} + + async spawnTerminal(opts: TerminalSpawnOptions): Promise> { + const sessionId = makePtySessionId(opts.projectId, opts.taskId, opts.terminalId); + + // Store opts for rehydration on reconnect. + this.terminalOpts.set(opts.terminalId, opts); + + const cfg: GeneralSessionConfig = { + taskId: opts.taskId, + cwd: opts.cwd, + shellSetup: opts.shellSetup, + }; + + const { command, args, cwd } = resolveSpawnParams('general', cfg); + const sshCommand = buildSshCommandString(command, args, cwd); + + const result = await openSsh2Pty(this.proxy.client, { + id: sessionId, + command: sshCommand, + cols: 80, + rows: 24, + }); + + if (!result.success) { + log.error('SshTerminalProvider: failed to spawn terminal PTY', { + terminalId: opts.terminalId, + error: result.error, + }); + return result; + } + + const pty = result.data; + + pty.onExit(() => { + this.sessions.delete(sessionId); + ptySessionRegistry.unregister(sessionId); + if (!this.deletedTerminals.has(opts.terminalId)) { + // Skip auto-respawn if the connection is currently down — the + // EnvironmentProviderManager will trigger rehydrate() on reconnect. + if (!this.proxy.isConnected) return; + setTimeout(() => { + this.spawnTerminal(opts).catch((e) => { + log.error('SshTerminalProvider: respawn failed', { + terminalId: opts.terminalId, + error: String(e), + }); + }); + }, 500); + } + }); + + ptySessionRegistry.register(sessionId, pty); + this.sessions.set(sessionId, pty); + return ok(); + } + + /** + * Re-spawn all terminals whose sessions are no longer active (e.g. after + * an SSH reconnect). Skips user-deleted terminals and terminals that are + * already running. + */ + async rehydrate(): Promise { + for (const [terminalId, opts] of this.terminalOpts) { + if (this.deletedTerminals.has(terminalId)) continue; + const sessionId = makePtySessionId(opts.projectId, opts.taskId, opts.terminalId); + if (this.sessions.has(sessionId)) continue; + await this.spawnTerminal(opts).catch((e) => { + log.error('SshTerminalProvider: rehydrate failed', { + terminalId, + error: String(e), + }); + }); + } + } + + killTerminal(terminalId: string): void { + this.deletedTerminals.add(terminalId); + const sessionId = makePtySessionId(this.projectId, this.taskId, terminalId); + const pty = this.sessions.get(sessionId); + if (pty) { + try { + pty.kill(); + } catch {} + this.sessions.delete(sessionId); + ptySessionRegistry.unregister(sessionId); + } + setTimeout(() => this.deletedTerminals.delete(terminalId), 10_000); + } + + destroyAll(): void { + for (const [sessionId, pty] of this.sessions) { + try { + pty.kill(); + } catch {} + ptySessionRegistry.unregister(sessionId); + } + this.sessions.clear(); + } +} diff --git a/src/main/core/terminals/renameTerminal.ts b/src/main/core/terminals/renameTerminal.ts new file mode 100644 index 000000000..c3c5631b4 --- /dev/null +++ b/src/main/core/terminals/renameTerminal.ts @@ -0,0 +1,10 @@ +import { eq, sql } from 'drizzle-orm'; +import { db } from '@main/db/client'; +import { terminals } from '@main/db/schema'; + +export async function renameTerminal(terminalId: string, name: string) { + await db + .update(terminals) + .set({ name, updatedAt: sql`CURRENT_TIMESTAMP` }) + .where(eq(terminals.id, terminalId)); +} diff --git a/src/main/core/terminals/runLifecycleScript.ts b/src/main/core/terminals/runLifecycleScript.ts new file mode 100644 index 000000000..a5a7e14b1 --- /dev/null +++ b/src/main/core/terminals/runLifecycleScript.ts @@ -0,0 +1,30 @@ +import { projectManager } from '../projects/project-manager'; +import { TaskLifecycleService } from '../tasks/task-lifecycle-service'; + +export async function runLifecycleScript({ + projectId, + taskId, + type, +}: { + projectId: string; + taskId: string; + type: 'setup' | 'run' | 'teardown'; +}) { + const project = projectManager.getProject(projectId); + if (!project) throw new Error('Project not found'); + + const script = (await project?.settings.get())?.scripts?.[type]; + if (!script) return; + + const task = project?.getTask(taskId); + if (!task) throw new Error('Task not found'); + + const lifecycle = new TaskLifecycleService({ + projectId, + taskId, + taskPath: task.taskPath, + terminals: task.terminals, + }); + + await lifecycle.runLifecycleScript({ type, script }, { shouldRespawn: true }); +} diff --git a/src/main/core/terminals/terminal-provider.ts b/src/main/core/terminals/terminal-provider.ts new file mode 100644 index 000000000..7c42abeea --- /dev/null +++ b/src/main/core/terminals/terminal-provider.ts @@ -0,0 +1,11 @@ +import { Terminal } from '@shared/terminals'; + +export interface TerminalProvider { + spawnTerminal( + terminal: Terminal, + initialSize: { cols: number; rows: number }, + command?: { command: string; args: string[] } + ): Promise; + killTerminal(terminalId: string): Promise; + destroyAll(): Promise; +} diff --git a/src/main/services/AutoUpdateService.ts b/src/main/core/updates/AutoUpdateService.ts similarity index 94% rename from src/main/services/AutoUpdateService.ts rename to src/main/core/updates/AutoUpdateService.ts index 27bf94cb8..3ccab00e7 100644 --- a/src/main/services/AutoUpdateService.ts +++ b/src/main/core/updates/AutoUpdateService.ts @@ -1,7 +1,11 @@ +import { readFileSync } from 'node:fs'; +import { join } from 'node:path'; import { app, BrowserWindow } from 'electron'; -import { autoUpdater, UpdateInfo } from 'electron-updater'; -import { log } from '../lib/logger'; -import { formatUpdaterError, sanitizeUpdaterLogArgs } from '../lib/updaterError'; +import _electronUpdater, { type UpdateInfo } from 'electron-updater'; +import { log } from '@main/lib/logger'; +import { formatUpdaterError, sanitizeUpdaterLogArgs } from '@main/lib/updaterError'; + +const { autoUpdater } = _electronUpdater; // Update check intervals (in milliseconds) const UPDATE_CHECK_INTERVALS = { @@ -83,23 +87,19 @@ class AutoUpdateService { private getAppVersion(): string { try { - const { readFileSync } = require('fs'); - const { join } = require('path'); - - // In development, look for package.json in project root - const isDev = !app.isPackaged || process.env.NODE_ENV === 'development'; - - const possiblePaths = isDev - ? [ - join(__dirname, '../../../../package.json'), // from dist/main/main/services - join(__dirname, '../../../package.json'), - join(process.cwd(), 'package.json'), - ] - : [join(app.getAppPath(), 'package.json')]; - - for (const path of possiblePaths) { + // app.getVersion() reads from package.json in both dev and production + const version = app.getVersion(); + if (version) return version; + + // Fallback: read package.json directly (dev environment) + const possiblePaths = [ + join(__dirname, '../../package.json'), // from out/main/ + join(process.cwd(), 'package.json'), + join(app.getAppPath(), 'package.json'), + ]; + for (const pkgPath of possiblePaths) { try { - const packageJson = JSON.parse(readFileSync(path, 'utf-8')); + const packageJson = JSON.parse(readFileSync(pkgPath, 'utf-8')); if (packageJson.name === 'emdash' && packageJson.version) { return packageJson.version; } @@ -107,11 +107,9 @@ class AutoUpdateService { continue; } } - - // Fallback: hardcoded version for dev - return '0.3.46'; + return '0.0.0'; } catch { - return '0.3.46'; + return '0.0.0'; } } diff --git a/src/main/services/updateIpc.ts b/src/main/core/updates/controller.ts similarity index 64% rename from src/main/services/updateIpc.ts rename to src/main/core/updates/controller.ts index 9f328df70..5234f7574 100644 --- a/src/main/services/updateIpc.ts +++ b/src/main/core/updates/controller.ts @@ -1,6 +1,7 @@ -import { app, ipcMain } from 'electron'; -import { formatUpdaterError } from '../lib/updaterError'; -import { autoUpdateService } from './AutoUpdateService'; +import { app, shell } from 'electron'; +import { createRPCController } from '@shared/ipc/rpc'; +import { autoUpdateService, type UpdateSettings } from '@main/core/updates/AutoUpdateService'; +import { formatUpdaterError } from './updaterError'; const DEV_HINT_CHECK = 'Updates are disabled in development.'; const DEV_HINT_DOWNLOAD = 'Cannot download updates in development.'; @@ -29,60 +30,45 @@ function getLatestDownloadUrl(): string { } } -export function registerUpdateIpc() { - // AutoUpdateService handles all initialization and event listeners - - ipcMain.handle('update:check', async () => { +export const updateController = createRPCController({ + check: async () => { try { - // Always skip in dev mode - no exceptions if (isDev) { - return { - success: false, - error: DEV_HINT_CHECK, - devDisabled: true, - } as any; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return { success: false, error: DEV_HINT_CHECK, devDisabled: true } as any; } - // Delegate to AutoUpdateService to avoid race conditions const result = await autoUpdateService.checkForUpdates(false); return { success: true, result: result ?? null }; } catch (error) { return { success: false, error: formatUpdaterError(error) }; } - }); + }, - ipcMain.handle('update:download', async () => { + download: async () => { try { - // Always skip in dev mode - no exceptions if (isDev) { - return { - success: false, - error: DEV_HINT_DOWNLOAD, - devDisabled: true, - } as any; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return { success: false, error: DEV_HINT_DOWNLOAD, devDisabled: true } as any; } - // Delegate to AutoUpdateService to avoid race conditions await autoUpdateService.downloadUpdate(); return { success: true }; } catch (error) { return { success: false, error: formatUpdaterError(error) }; } - }); + }, - ipcMain.handle('update:quit-and-install', async () => { + quitAndInstall: async () => { try { - // Delegate to AutoUpdateService which handles rollback info autoUpdateService.quitAndInstall(); return { success: true }; } catch (error) { return { success: false, error: formatUpdaterError(error) }; } - }); + }, - ipcMain.handle('update:open-latest', async () => { + openLatest: async () => { try { - const { shell } = require('electron'); await shell.openExternal(getLatestDownloadUrl()); - // Gracefully quit after opening the external download link so the user can install setTimeout(() => { try { app.quit(); @@ -92,54 +78,53 @@ export function registerUpdateIpc() { } catch (error) { return { success: false, error: error instanceof Error ? error.message : String(error) }; } - }); + }, - // Expose app version for simple comparisons on renderer - ipcMain.handle('update:get-version', () => app.getVersion()); + getVersion: () => app.getVersion(), - // Enhanced IPC handlers for AutoUpdateService - ipcMain.handle('update:get-state', async () => { + getState: async () => { try { const state = autoUpdateService.getState(); return { success: true, data: state }; } catch (error) { return { success: false, error: formatUpdaterError(error) }; } - }); + }, - ipcMain.handle('update:get-settings', async () => { + getSettings: async () => { try { const settings = autoUpdateService.getSettings(); return { success: true, data: settings }; } catch (error) { return { success: false, error: formatUpdaterError(error) }; } - }); + }, - ipcMain.handle('update:update-settings', async (_event, settings: any) => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + updateSettings: async (settings: Partial) => { try { await autoUpdateService.updateSettings(settings); return { success: true }; } catch (error) { return { success: false, error: formatUpdaterError(error) }; } - }); + }, - ipcMain.handle('update:get-release-notes', async () => { + getReleaseNotes: async () => { try { const notes = await autoUpdateService.fetchReleaseNotes(); return { success: true, data: notes }; } catch (error) { return { success: false, error: formatUpdaterError(error) }; } - }); + }, - ipcMain.handle('update:check-now', async () => { + checkNow: async () => { try { const result = await autoUpdateService.checkForUpdates(false); return { success: true, data: result }; } catch (error) { return { success: false, error: formatUpdaterError(error) }; } - }); -} + }, +}); diff --git a/src/main/core/updates/updaterError.ts b/src/main/core/updates/updaterError.ts new file mode 100644 index 000000000..2bd4ec047 --- /dev/null +++ b/src/main/core/updates/updaterError.ts @@ -0,0 +1,29 @@ +// Utilities to keep updater errors/logs concise and scrub HTML bodies. +export function stripMarkupAndTruncate(raw: string): string { + if (!raw) return 'Unknown update error'; + + const withoutData = raw.includes('Data:') ? raw.slice(0, raw.indexOf('Data:')) : raw; + const noHtml = withoutData.replace(/ 240 ? `${collapsed.slice(0, 240)}…` : collapsed; +} + +export function formatUpdaterError(error: any): string { + const status = error?.statusCode || error?.code || error?.status; + const statusText = error?.statusMessage || error?.description; + if (status) { + const base = `Update request failed with HTTP ${status}`; + return statusText ? `${base}: ${stripMarkupAndTruncate(String(statusText))}` : base; + } + const message = error instanceof Error ? error.message : String(error ?? 'Unknown update error'); + return stripMarkupAndTruncate(message); +} + +export function sanitizeUpdaterLogArgs(args: any[]) { + return args.map((arg) => { + if (arg instanceof Error) return formatUpdaterError(arg); + if (typeof arg === 'string') return stripMarkupAndTruncate(arg); + return arg; + }); +} diff --git a/src/main/core/utils/exec.ts b/src/main/core/utils/exec.ts new file mode 100644 index 000000000..7f15d2e8a --- /dev/null +++ b/src/main/core/utils/exec.ts @@ -0,0 +1,80 @@ +import { execFile } from 'node:child_process'; +import fs from 'node:fs'; +import { promisify } from 'node:util'; +import { quoteShellArg } from '../../utils/shellEscape'; +import type { SshClientProxy } from '../ssh/ssh-client-proxy'; + +const execFileAsync = promisify(execFile); + +function resolveGitBin(): string { + const candidates = [ + (process.env.GIT_PATH || '').trim(), + '/opt/homebrew/bin/git', + '/usr/local/bin/git', + '/usr/bin/git', + ].filter(Boolean) as string[]; + for (const p of candidates) { + try { + if (fs.existsSync(p)) return p; + } catch {} + } + return 'git'; +} + +const GIT = resolveGitBin(); + +export type ExecFn = ( + command: string, + args?: string[], + options?: { cwd?: string; timeout?: number; maxBuffer?: number } +) => Promise<{ stdout: string; stderr: string }>; + +export function getLocalExec(): ExecFn { + return ( + command: string, + args: string[] = [], + options: { cwd?: string; timeout?: number; maxBuffer?: number } = {} + ) => { + const bin = command === 'git' ? GIT : command; + return execFileAsync(bin, args, options); + }; +} + +export function getSshExec(proxy: SshClientProxy): ExecFn { + return ( + command: string, + args: string[] = [], + { cwd }: { cwd?: string; timeout?: number; maxBuffer?: number } = {} + ) => { + const escaped = args.map(quoteShellArg).join(' '); + const inner = args.length ? `${command} ${escaped}` : command; + const withCwd = cwd ? `cd ${quoteShellArg(cwd)} && ${inner}` : inner; + const full = `bash -l -c ${quoteShellArg(withCwd)}`; + + return new Promise((resolve, reject) => { + proxy.client.exec(full, (execErr, stream) => { + if (execErr) return reject(execErr); + let stdout = ''; + let stderr = ''; + stream.on('close', (code: number | null) => { + if ((code ?? 0) === 0) { + resolve({ stdout, stderr }); + } else { + const e = Object.assign(new Error(stderr || `Process exited with code ${code}`), { + stdout, + stderr, + }); + reject(e); + } + }); + stream.on('data', (d: Buffer) => { + stdout += d.toString('utf-8'); + }); + stream.stderr.on('data', (d: Buffer) => { + stderr += d.toString('utf-8'); + }); + stream.on('error', reject); + }); + }); + }; +} diff --git a/src/main/db/client.ts b/src/main/db/client.ts new file mode 100644 index 000000000..fcc6f4c91 --- /dev/null +++ b/src/main/db/client.ts @@ -0,0 +1,12 @@ +import Database from 'better-sqlite3'; +import { drizzle } from 'drizzle-orm/better-sqlite3'; +import { resolveDatabasePath } from './path'; +import * as schema from './schema'; + +export type AppDb = ReturnType>; + +export const sqlite = new Database(resolveDatabasePath()); +sqlite.pragma('journal_mode = WAL'); +sqlite.pragma('busy_timeout = 5000'); + +export const db = drizzle(sqlite, { schema }); diff --git a/src/main/db/default-path.ts b/src/main/db/default-path.ts new file mode 100644 index 000000000..12a524c32 --- /dev/null +++ b/src/main/db/default-path.ts @@ -0,0 +1,39 @@ +import { homedir } from 'node:os'; +import { join } from 'node:path'; + +export const CURRENT_DB_FILENAME = 'emdash2.db'; +export const LEGACY_DB_FILENAMES = ['database.sqlite', 'orcbench.db']; + +/** + * Returns the platform-specific default userData directory for Emdash without + * requiring the Electron `app` module. Matches what `app.getPath('userData')` + * returns in a packaged build (productName = "Emdash"). + * + * Pass this result as `userDataPath` to `resolveDatabasePath()` when running + * outside of Electron (e.g. drizzle-kit CLI). + */ +export function resolveDefaultUserDataPath(): string { + const home = process.env.HOME ?? homedir(); + const platform = process.platform; + + if (platform === 'darwin') { + return join(home, 'Library', 'Application Support', 'Emdash'); + } + + if (platform === 'win32') { + const appData = process.env.APPDATA ?? join(home, 'AppData', 'Roaming'); + return join(appData, 'Emdash'); + } + + const xdgConfig = process.env.XDG_CONFIG_HOME ?? join(home, '.config'); + return join(xdgConfig, 'Emdash'); +} + +/** + * Returns the default database file path given a resolved userData directory. + * Does not check for file existence or perform any migration — suitable for + * contexts that only need a path (e.g. drizzle-kit config). + */ +export function defaultDbFilePath(userDataPath: string): string { + return join(userDataPath, CURRENT_DB_FILENAME); +} diff --git a/src/main/db/drizzleClient.ts b/src/main/db/drizzleClient.ts index d4ef5c0c7..1bc0d1dcf 100644 --- a/src/main/db/drizzleClient.ts +++ b/src/main/db/drizzleClient.ts @@ -1,227 +1,48 @@ -import type sqlite3Type from 'sqlite3'; -import { - type AsyncBatchRemoteCallback, - type RemoteCallback, - type SqliteRemoteDatabase, - drizzle, -} from 'drizzle-orm/sqlite-proxy'; - -import * as schema from './schema'; +import Database from 'better-sqlite3'; +import { drizzle } from 'drizzle-orm/better-sqlite3'; import { resolveDatabasePath } from './path'; +import * as schema from './schema'; type AppSchema = typeof schema; -export type DrizzleDb = SqliteRemoteDatabase; - -interface InternalClient { - client: DrizzleClient; - owned: boolean; -} +export type DrizzleDb = ReturnType>; export interface DrizzleClient { db: DrizzleDb; - sqlite: sqlite3Type.Database; - close: () => Promise; + sqlite: Database.Database; + close: () => void; } export interface CreateDrizzleClientOptions { - database?: sqlite3Type.Database; + database?: Database.Database; filePath?: string; busyTimeoutMs?: number; - cacheResult?: boolean; } -let sqliteModulePromise: Promise | null = null; -let cachedInternal: InternalClient | null = null; - const DEFAULT_BUSY_TIMEOUT_MS = 5000; -async function loadSqliteModule(): Promise { - if (!sqliteModulePromise) { - sqliteModulePromise = import('sqlite3').then((mod) => mod as unknown as typeof sqlite3Type); - } - return sqliteModulePromise; -} - -function normalizeParams(params: unknown[] | undefined): unknown[] { - return Array.isArray(params) ? params : []; -} - -function createCallbacks(db: sqlite3Type.Database) { - const runStatement = ( - sql: string, - params: unknown[] - ): Promise<{ rows: unknown[]; lastID: number; changes: number }> => - new Promise((resolve, reject) => { - db.run(sql, params, function (err) { - if (err) { - reject(err); - return; - } - resolve({ - rows: [], - lastID: this.lastID, - changes: this.changes, - }); - }); - }); - - const allStatement = (sql: string, params: unknown[]): Promise => - new Promise((resolve, reject) => { - db.all(sql, params, (err, rows) => { - if (err) { - reject(err); - return; - } - resolve(rows); - }); - }); - - const getStatement = (sql: string, params: unknown[]): Promise => - new Promise((resolve, reject) => { - db.get(sql, params, (err, row) => { - if (err) { - reject(err); - } else { - resolve(row ?? undefined); - } - }); - }); - - const mapRowToValues = (row: unknown): unknown[] => { - if (Array.isArray(row)) { - return row; - } - if (row && typeof row === 'object') { - return Object.values(row as Record); - } - return []; - }; - - const remote: RemoteCallback = async (sql, params, method) => { - const normalized = normalizeParams(params); - - switch (method) { - case 'run': { - const result = await runStatement(sql, normalized); - return { - rows: result.rows, - lastID: result.lastID, - changes: result.changes, - } as any; - } - case 'all': { - const rows = await allStatement(sql, normalized); - return { rows: rows.map(mapRowToValues) } as any; - } - case 'get': { - const row = await getStatement(sql, normalized); - return { - rows: row === undefined ? null : mapRowToValues(row), - } as any; - } - case 'values': { - const rows = await allStatement(sql, normalized); - const values = rows.map((row) => - Array.isArray(row) ? row : Object.values(row as Record) - ); - return { rows: values } as any; - } - default: { - throw new Error(`Unsupported sqlite method "${method}"`); - } - } - }; - - const batch: AsyncBatchRemoteCallback = async (operations) => { - const results: any[] = []; - for (const op of operations) { - results.push(await remote(op.sql, op.params, op.method)); - } - return results; - }; - - return { remote, batch }; -} - -async function openDatabase( - filePath: string, - busyTimeoutMs: number -): Promise { - const sqliteModule = await loadSqliteModule(); - const db = await new Promise((resolve, reject) => { - const instance = new sqliteModule.Database(filePath, (err) => { - if (err) { - reject(err); - } else { - resolve(instance); - } - }); - }); - - if (typeof db.configure === 'function') { - db.configure('busyTimeout', busyTimeoutMs); - } - +function openDatabase(filePath: string, busyTimeoutMs: number): Database.Database { + const db = new Database(filePath); + db.pragma('journal_mode = WAL'); + db.pragma(`busy_timeout = ${busyTimeoutMs}`); return db; } -export async function createDrizzleClient( - options: CreateDrizzleClientOptions = {} -): Promise { +export function createDrizzleClient(options: CreateDrizzleClientOptions = {}): DrizzleClient { if (process.env.EMDASH_DISABLE_NATIVE_DB === '1') { throw new Error('Native SQLite database is disabled via EMDASH_DISABLE_NATIVE_DB=1'); } const busyTimeout = options.busyTimeoutMs ?? DEFAULT_BUSY_TIMEOUT_MS; - const db = - options.database ?? - (await openDatabase(options.filePath ?? resolveDatabasePath(), busyTimeout)); - - const { remote, batch } = createCallbacks(db); - const drizzleDb = drizzle(remote, batch, { schema }); + const sqlite = + options.database ?? openDatabase(options.filePath ?? resolveDatabasePath(), busyTimeout); + const db = drizzle(sqlite, { schema }); const client: DrizzleClient = { - db: drizzleDb, - sqlite: db, - close: () => - new Promise((resolve, reject) => { - db.close((err) => { - if (err) { - reject(err); - } else { - resolve(); - } - }); - }), + db, + sqlite, + close: () => sqlite.close(), }; - const shouldCache = options.cacheResult ?? (!options.database && options.filePath === undefined); - - if (shouldCache) { - cachedInternal = { - client, - owned: !options.database, - }; - } - return client; } - -export async function getDrizzleClient(): Promise { - if (cachedInternal) { - return cachedInternal.client; - } - - return await createDrizzleClient(); -} - -export async function resetDrizzleClient(): Promise { - if (!cachedInternal) return; - - if (cachedInternal.owned) { - await cachedInternal.client.close().catch(() => {}); - } - - cachedInternal = null; -} diff --git a/src/main/db/initialize.ts b/src/main/db/initialize.ts new file mode 100644 index 000000000..a713df77a --- /dev/null +++ b/src/main/db/initialize.ts @@ -0,0 +1,64 @@ +import { createHash } from 'node:crypto'; +import type BetterSqlite3 from 'better-sqlite3'; +import journal from '@root/drizzle/meta/_journal.json'; +import { sqlite } from './client'; + +// Vite bundles all migration SQL files at build time — no runtime path resolution needed. +// Each value is the raw SQL string content of the file. +const sqlFiles = import.meta.glob('@root/drizzle/*.sql', { + query: '?raw', + import: 'default', + eager: true, +}) as Record; + +type JournalEntry = { idx: number; when: number; tag: string; breakpoints: boolean }; + +function runBundledMigrations(connection: BetterSqlite3.Database): void { + connection.exec(` + CREATE TABLE IF NOT EXISTS __drizzle_migrations ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + hash TEXT NOT NULL, + created_at NUMERIC + ) + `); + + const lastRow = connection + .prepare('SELECT created_at FROM __drizzle_migrations ORDER BY created_at DESC LIMIT 1') + .get() as { created_at: number } | undefined; + const lastTimestamp = lastRow?.created_at ?? 0; + + connection.transaction(() => { + for (const entry of (journal as { entries: JournalEntry[] }).entries) { + if (entry.when <= lastTimestamp) continue; + + const sqlKey = Object.keys(sqlFiles).find((k) => k.includes(entry.tag)); + if (!sqlKey) throw new Error(`Missing bundled SQL for migration: ${entry.tag}`); + + const sql = sqlFiles[sqlKey]; + const hash = createHash('sha256').update(sql).digest('hex'); + + for (const stmt of sql.split('--> statement-breakpoint')) { + const trimmed = stmt.trim(); + if (trimmed) connection.exec(trimmed); + } + + connection + .prepare('INSERT INTO __drizzle_migrations (hash, created_at) VALUES (?, ?)') + .run(hash, entry.when); + } + })(); +} + +/** + * Runs all pending migrations against the shared SQLite connection and validates + * the schema contract. Call this once in main.ts before any db queries run. + * + * Throws `DatabaseSchemaMismatchError` when required columns/tables are missing + * after migration (e.g. the user downgraded from a newer build). + * + * Returns the raw better-sqlite3 handle so the caller can close it on shutdown. + */ +export async function initializeDatabase(): Promise { + runBundledMigrations(sqlite); + return sqlite; +} diff --git a/src/main/db/kv.ts b/src/main/db/kv.ts new file mode 100644 index 000000000..a0eece1e3 --- /dev/null +++ b/src/main/db/kv.ts @@ -0,0 +1,49 @@ +import { eq } from 'drizzle-orm'; +import { db } from './client'; +import { kv } from './schema'; + +export class KV> { + constructor(private readonly namespace: string) {} + + private prefixed(key: string): string { + return `${this.namespace}:${key}`; + } + + async get(key: K): Promise { + const rows = await db + .select({ value: kv.value }) + .from(kv) + .where(eq(kv.key, this.prefixed(key))) + .limit(1); + + const raw = rows[0]?.value; + if (raw === undefined || raw === null) return null; + + try { + return JSON.parse(raw) as TSchema[K]; + } catch { + return null; + } + } + + async set(key: K, value: TSchema[K]): Promise { + try { + const serialised = JSON.stringify(value); + const now = Date.now(); + await db + .insert(kv) + .values({ key: this.prefixed(key), value: serialised, updatedAt: now }) + .onConflictDoUpdate({ target: kv.key, set: { value: serialised, updatedAt: now } }); + } catch { + // kv table may not exist yet during the first-run migration window + } + } + + async del(key: K): Promise { + try { + await db.delete(kv).where(eq(kv.key, this.prefixed(key))); + } catch { + // kv table may not exist yet during the first-run migration window + } + } +} diff --git a/src/main/db/path.ts b/src/main/db/path.ts index c30bc5aa0..d919cabe0 100644 --- a/src/main/db/path.ts +++ b/src/main/db/path.ts @@ -1,9 +1,7 @@ -import { existsSync, renameSync } from 'fs'; -import { dirname, join, resolve } from 'path'; +import { existsSync, renameSync } from 'node:fs'; +import { dirname, join, resolve } from 'node:path'; import { app } from 'electron'; - -const CURRENT_DB_FILENAME = 'emdash.db'; -const LEGACY_DB_FILENAMES = ['database.sqlite', 'orcbench.db']; +import { CURRENT_DB_FILENAME, LEGACY_DB_FILENAMES } from './default-path'; export interface ResolveDatabasePathOptions { userDataPath?: string; @@ -62,83 +60,3 @@ export const databaseFilenames = { current: CURRENT_DB_FILENAME, legacy: [...LEGACY_DB_FILENAMES], }; - -export function resolveMigrationsPath(): string | null { - const { realpathSync } = require('fs'); - const appPath = app.getAppPath(); - const resourcesPath = process.resourcesPath ?? appPath; - - // Resolve symlinks to get actual paths (handles Homebrew, symlinks, etc.) - const resolveRealPath = (p: string): string | null => { - try { - return realpathSync(p); - } catch { - return null; - } - }; - - // Get the executable directory (handles more cases) - const exePath = app.getPath('exe'); - const exeDir = dirname(exePath); - - const candidates = [ - // Standard Electron paths - join(appPath, 'drizzle'), - join(appPath, '..', 'drizzle'), - join(resourcesPath, 'drizzle'), - - // Handle ASAR unpacked - join(resourcesPath, 'app.asar.unpacked', 'drizzle'), - - // Handle Homebrew and other symlinked installations - ...(resolveRealPath(appPath) - ? [ - join(resolveRealPath(appPath)!, 'drizzle'), - join(resolveRealPath(appPath)!, '..', 'drizzle'), - ] - : []), - - // Handle macOS app bundle structure - join(exeDir, '..', 'Resources', 'drizzle'), - join(exeDir, '..', 'Resources', 'app', 'drizzle'), - join(exeDir, '..', 'Resources', 'app.asar.unpacked', 'drizzle'), - - // Development paths - join(process.cwd(), 'drizzle'), - join(__dirname, '..', '..', '..', 'drizzle'), - - // Handle translocated apps on macOS - ...(process.platform === 'darwin' && appPath.includes('AppTranslocation') - ? [join(appPath.split('AppTranslocation')[0], 'drizzle')] - : []), - ]; - - // Remove duplicates and try each candidate - const uniqueCandidates = [...new Set(candidates.filter(Boolean))]; - - for (const candidate of uniqueCandidates) { - if (existsSync(candidate)) { - // Verify it's actually a directory with migration files - try { - const files = require('fs').readdirSync(candidate); - if (files.some((f: string) => f.endsWith('.sql'))) { - console.log(`Found migrations at: ${candidate}`); - return candidate; - } - } catch { - // Not a valid directory, continue - } - } - } - - // Log diagnostic information to help debug - console.error('Failed to find drizzle migrations folder. Searched paths:'); - console.error('- appPath:', appPath); - console.error('- resourcesPath:', resourcesPath); - console.error('- exeDir:', exeDir); - console.error('- cwd:', process.cwd()); - console.error('- __dirname:', __dirname); - console.error('- Candidates checked:', uniqueCandidates); - - return null; -} diff --git a/src/main/db/schema.ts b/src/main/db/schema.ts index 44082d084..908a713a4 100644 --- a/src/main/db/schema.ts +++ b/src/main/db/schema.ts @@ -1,5 +1,12 @@ import { relations, sql } from 'drizzle-orm'; -import { index, integer, sqliteTable, text, uniqueIndex } from 'drizzle-orm/sqlite-core'; +import { + index, + integer, + primaryKey, + sqliteTable, + text, + uniqueIndex, +} from 'drizzle-orm/sqlite-core'; export const sshConnections = sqliteTable( 'ssh_connections', @@ -12,6 +19,7 @@ export const sshConnections = sqliteTable( authType: text('auth_type').notNull().default('agent'), // 'password' | 'key' | 'agent' privateKeyPath: text('private_key_path'), // optional, for key auth useAgent: integer('use_agent').notNull().default(0), // boolean, 0=false, 1=true + metadata: text('metadata'), // JSON for additional connection-specific data createdAt: text('created_at') .notNull() .default(sql`CURRENT_TIMESTAMP`), @@ -31,16 +39,12 @@ export const projects = sqliteTable( id: text('id').primaryKey(), name: text('name').notNull(), path: text('path').notNull(), - gitRemote: text('git_remote'), - gitBranch: text('git_branch'), + workspaceProvider: text('workspace_provider').notNull().default('local'), // 'local' | 'ssh' | 'vm' baseRef: text('base_ref'), - githubRepository: text('github_repository'), - githubConnected: integer('github_connected').notNull().default(0), + gitRemote: text('git_remote'), sshConnectionId: text('ssh_connection_id').references(() => sshConnections.id, { onDelete: 'set null', }), - isRemote: integer('is_remote').notNull().default(0), // boolean, 0=false, 1=true - remotePath: text('remote_path'), // path on remote server createdAt: text('created_at') .notNull() .default(sql`CURRENT_TIMESTAMP`), @@ -51,7 +55,20 @@ export const projects = sqliteTable( (table) => ({ pathIdx: uniqueIndex('idx_projects_path').on(table.path), sshConnectionIdIdx: index('idx_projects_ssh_connection_id').on(table.sshConnectionId), - isRemoteIdx: index('idx_projects_is_remote').on(table.isRemote), + }) +); + +export const appSettings = sqliteTable( + 'app_settings', + { + key: text('key').primaryKey(), + value: text('value').notNull(), + updatedAt: integer('updated_at') + .notNull() + .default(sql`CURRENT_TIMESTAMP`), + }, + (table) => ({ + keyIdx: uniqueIndex('idx_app_settings_key').on(table.key), }) ); @@ -63,12 +80,10 @@ export const tasks = sqliteTable( .notNull() .references(() => projects.id, { onDelete: 'cascade' }), name: text('name').notNull(), - branch: text('branch').notNull(), - path: text('path').notNull(), - status: text('status').notNull().default('idle'), - agentId: text('agent_id'), - metadata: text('metadata'), - useWorktree: integer('use_worktree').notNull().default(1), + status: text('status').notNull(), + sourceBranch: text('source_branch').notNull(), + taskBranch: text('task_branch'), + linkedIssue: text('linked_issue'), archivedAt: text('archived_at'), // null = active, timestamp = archived createdAt: text('created_at') .notNull() @@ -82,19 +97,62 @@ export const tasks = sqliteTable( }) ); +export const pullRequests = sqliteTable( + 'pull_requests', + { + id: text('id').primaryKey(), + provider: text('provider').notNull().default('github'), + nameWithOwner: text('name_with_owner').notNull().default(''), + url: text('url').notNull(), + title: text('title').notNull(), + status: text('status').notNull().default('open'), + author: text('author'), + isDraft: integer('is_draft'), + metadata: text('metadata'), + createdAt: text('created_at') + .notNull() + .default(sql`CURRENT_TIMESTAMP`), + updatedAt: text('updated_at') + .notNull() + .default(sql`CURRENT_TIMESTAMP`), + fetchedAt: text('fetched_at') + .notNull() + .default(sql`CURRENT_TIMESTAMP`), + }, + (table) => ({ + urlIdx: uniqueIndex('idx_pull_requests_url').on(table.url), + nameWithOwnerIdx: index('idx_pull_requests_name_with_owner').on(table.nameWithOwner), + }) +); + +export const tasksPullRequests = sqliteTable( + 'tasks_pull_requests', + { + taskId: text('task_id') + .notNull() + .references(() => tasks.id, { onDelete: 'cascade' }), + pullRequestUrl: text('pull_request_url') + .notNull() + .references(() => pullRequests.url, { onDelete: 'cascade' }), + }, + (table) => ({ + pk: primaryKey({ columns: [table.taskId, table.pullRequestUrl] }), + }) +); + export const conversations = sqliteTable( 'conversations', { id: text('id').primaryKey(), + projectId: text('project_id') + .notNull() + .references(() => projects.id, { onDelete: 'cascade' }), taskId: text('task_id') .notNull() .references(() => tasks.id, { onDelete: 'cascade' }), title: text('title').notNull(), - provider: text('provider'), // AI provider for this chat (claude, codex, qwen, etc.) - isActive: integer('is_active').notNull().default(0), // 1 if this is the active chat for the task - isMain: integer('is_main').notNull().default(0), // 1 if this is the main/primary chat (gets full persistence) - displayOrder: integer('display_order').notNull().default(0), // Order in the tab bar - metadata: text('metadata'), // JSON for additional chat-specific data + provider: text('provider'), + config: text('config'), createdAt: text('created_at') .notNull() .default(sql`CURRENT_TIMESTAMP`), @@ -104,7 +162,30 @@ export const conversations = sqliteTable( }, (table) => ({ taskIdIdx: index('idx_conversations_task_id').on(table.taskId), - activeIdx: index('idx_conversations_active').on(table.taskId, table.isActive), // Index for quick active conversation lookup + }) +); + +export const terminals = sqliteTable( + 'terminals', + { + id: text('id').primaryKey(), + projectId: text('project_id') + .notNull() + .references(() => projects.id, { onDelete: 'cascade' }), + taskId: text('task_id') + .notNull() + .references(() => tasks.id, { onDelete: 'cascade' }), + ssh: integer('ssh').notNull().default(0), // boolean, 0=false, 1=true + name: text('name').notNull(), + createdAt: text('created_at') + .notNull() + .default(sql`CURRENT_TIMESTAMP`), + updatedAt: text('updated_at') + .notNull() + .default(sql`CURRENT_TIMESTAMP`), + }, + (table) => ({ + taskIdIdx: index('idx_terminals_task_id').on(table.taskId), }) ); @@ -152,6 +233,42 @@ export const lineComments = sqliteTable( }) ); +export const editorBuffers = sqliteTable( + 'editor_buffers', + { + id: text('id').primaryKey(), // `${projectId}:${taskId}:${filePath}` + projectId: text('project_id') + .notNull() + .references(() => projects.id, { onDelete: 'cascade' }), + taskId: text('task_id') + .notNull() + .references(() => tasks.id, { onDelete: 'cascade' }), + filePath: text('file_path').notNull(), + content: text('content').notNull(), + updatedAt: integer('updated_at').notNull(), + }, + (table) => ({ + taskFileIdx: index('idx_editor_buffers_task_file').on(table.taskId, table.filePath), + }) +); + +export const kv = sqliteTable( + 'kv', + { + key: text('key').primaryKey(), + value: text('value').notNull(), + updatedAt: integer('updated_at') + .notNull() + .default(sql`CURRENT_TIMESTAMP`), + }, + (table) => ({ + keyIdx: uniqueIndex('idx_kv_key').on(table.key), + }) +); + +export type KvRow = typeof kv.$inferSelect; +export type KvInsert = typeof kv.$inferInsert; + export const sshConnectionsRelations = relations(sshConnections, ({ many }) => ({ projects: many(projects), })); @@ -200,6 +317,9 @@ export type SshConnectionInsert = typeof sshConnections.$inferInsert; export type ProjectRow = typeof projects.$inferSelect; export type TaskRow = typeof tasks.$inferSelect; export type ConversationRow = typeof conversations.$inferSelect; +export type TerminalRow = typeof terminals.$inferSelect; export type MessageRow = typeof messages.$inferSelect; export type LineCommentRow = typeof lineComments.$inferSelect; export type LineCommentInsert = typeof lineComments.$inferInsert; +export type EditorBufferRow = typeof editorBuffers.$inferSelect; +export type EditorBufferInsert = typeof editorBuffers.$inferInsert; diff --git a/src/main/db/sshRepository.ts b/src/main/db/sshRepository.ts index 375adb72d..aea6e27ce 100644 --- a/src/main/db/sshRepository.ts +++ b/src/main/db/sshRepository.ts @@ -1,10 +1,10 @@ import { eq } from 'drizzle-orm'; -import { getDrizzleClient } from './drizzleClient'; +import { db } from './client'; import { - sshConnections, projects, - type SshConnectionRow, + sshConnections, type SshConnectionInsert, + type SshConnectionRow, } from './schema'; export class SshRepository { @@ -20,7 +20,6 @@ export class SshRepository { async createConnection( data: Omit ): Promise { - const { db } = await getDrizzleClient(); const id = `ssh_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; const result = await db @@ -37,13 +36,11 @@ export class SshRepository { } async getConnection(id: string): Promise { - const { db } = await getDrizzleClient(); const result = await db.select().from(sshConnections).where(eq(sshConnections.id, id)); return result[0]; } async getAllConnections(): Promise { - const { db } = await getDrizzleClient(); return db.select().from(sshConnections); } @@ -51,7 +48,6 @@ export class SshRepository { id: string, data: Partial ): Promise { - const { db } = await getDrizzleClient(); const result = await db .update(sshConnections) .set({ @@ -64,12 +60,10 @@ export class SshRepository { } async deleteConnection(id: string): Promise { - const { db } = await getDrizzleClient(); - // First update any projects using this connection await db .update(projects) - .set({ sshConnectionId: null, isRemote: 0 }) + .set({ sshConnectionId: null }) .where(eq(projects.sshConnectionId, id)); // Then delete the connection @@ -77,7 +71,6 @@ export class SshRepository { } async getProjectsForConnection(connectionId: string): Promise { - const { db } = await getDrizzleClient(); const result = await db .select({ id: projects.id }) .from(projects) diff --git a/src/main/entry.ts b/src/main/entry.ts deleted file mode 100644 index 7e3ec37b0..000000000 --- a/src/main/entry.ts +++ /dev/null @@ -1,42 +0,0 @@ -// Runtime entry that installs path aliases for compiled JS before loading the app. -// This avoids '@shared/*' resolution failures in the compiled Electron main process. -import path from 'node:path'; - -// Ensure app name is set BEFORE any module reads app.getPath('userData'). -// In dev builds, if userData is resolved before app name is set, Electron defaults to -// ~/Library/Application Support/Electron which leads to confusing "missing DB/migrations" behavior. -try { - // eslint-disable-next-line @typescript-eslint/no-var-requires - const { app } = require('electron'); - app.setName('Emdash'); -} catch {} - -// Install minimal path alias resolver without external deps. -// Maps: -// @shared/* -> dist/main/shared/* -// @/* -> dist/main/main/* -try { - // eslint-disable-next-line @typescript-eslint/no-var-requires - const Module = require('module'); - const base = path.join(__dirname, '..'); // dist/main - const sharedBase = path.join(base, 'shared'); - const mainBase = path.join(base, 'main'); - const orig = Module._resolveFilename; - Module._resolveFilename = function (request: string, parent: any, isMain: boolean, options: any) { - if (typeof request === 'string') { - if (request.startsWith('@shared/')) { - const mapped = path.join(sharedBase, request.slice('@shared/'.length)); - return orig.call(this, mapped, parent, isMain, options); - } - if (request.startsWith('@/')) { - const mapped = path.join(mainBase, request.slice('@/'.length)); - return orig.call(this, mapped, parent, isMain, options); - } - } - return orig.call(this, request, parent, isMain, options); - }; -} catch {} - -// Load the actual application bootstrap -// eslint-disable-next-line @typescript-eslint/no-var-requires -require('./main'); diff --git a/src/main/errorTracking.ts b/src/main/errorTracking.ts deleted file mode 100644 index 08e22d6e1..000000000 --- a/src/main/errorTracking.ts +++ /dev/null @@ -1,394 +0,0 @@ -import { app } from 'electron'; -import * as telemetry from './telemetry'; -import { log } from './lib/logger'; - -/** - * Error tracking module for comprehensive error reporting with PostHog. - */ - -interface ErrorContext { - // User context - github_username?: string | null; - - // Operation context - operation?: string; - service?: string; - component?: string; - - // Error classification - error_type?: string; - severity?: 'low' | 'medium' | 'high' | 'critical'; - - // Agent/Provider context - provider?: string; - task_id?: string; - workspace_id?: string; - - // Project context - project_id?: string; - project_path?: string; - - // Git/Worktree context - branch_name?: string; - worktree_path?: string; - git_operation?: string; - - // Additional debugging info - [key: string]: any; -} - -class ErrorTracking { - private githubUsername: string | null = null; - private sessionErrors: number = 0; - private lastErrorTimestamp: number = 0; - - /** - * Initialize error tracking - */ - async init() { - try { - this.githubUsername = await this.fetchGithubUsername(); - if (this.githubUsername) { - log.info('ErrorTracking initialized with GitHub user', { username: this.githubUsername }); - } - } catch { - // Silent fail - } - } - - async captureException(error: Error | unknown, context?: ErrorContext): Promise { - try { - // Rate limiting to prevent error spam - const now = Date.now(); - if (now - this.lastErrorTimestamp < 100) { - return; // Skip if error happened within 100ms - } - this.lastErrorTimestamp = now; - this.sessionErrors++; - - // Build error object - const errorObj = error instanceof Error ? error : new Error(String(error)); - const errorMessage = errorObj.message || 'Unknown error'; - const errorStack = errorObj.stack || ''; - - if (!this.githubUsername) { - this.githubUsername = await this.fetchGithubUsername(); - } - - // Determine severity if not provided - const severity = context?.severity || this.determineSeverity(errorMessage, context); - - // Build comprehensive error properties following PostHog's $exception format - const properties: Record = { - // PostHog required fields for error tracking - $exception_message: errorMessage.slice(0, 500), // Required by PostHog - $exception_type: context?.error_type || this.classifyError(errorMessage), // Required - $exception_stack_trace_raw: errorStack.slice(0, 2000), // Required for stack traces - $exception_fingerprint: `${context?.service || 'unknown'}_${context?.operation || 'unknown'}_${context?.error_type || this.classifyError(errorMessage)}`, // For grouping - - // Additional context - severity, - - // User context - github_username: this.githubUsername, - - // Session context - session_errors: this.sessionErrors, - app_version: this.getAppVersion(), - electron_version: process.versions.electron, - platform: process.platform, - arch: process.arch, - is_dev: !app.isPackaged, - - // Operation context - operation: context?.operation, - service: context?.service, - component: context?.component || 'main', - - // Agent/Provider context - provider: context?.provider, - task_id: context?.task_id, - workspace_id: context?.workspace_id, - - // Project context - project_id: context?.project_id, - project_path: context?.project_path, - - // Git context - branch_name: context?.branch_name, - worktree_path: context?.worktree_path, - git_operation: context?.git_operation, - - // Timestamp - error_timestamp: new Date().toISOString(), - - // Additional custom context - ...this.sanitizeContext(context), - }; - - // Filter out undefined/null values - const cleanProperties = Object.fromEntries( - Object.entries(properties).filter(([_, v]) => v !== undefined && v !== null) - ); - - // Send to PostHog using proper exception tracking - telemetry.captureException(errorObj, cleanProperties); - - // Also log locally for debugging - log.error('Exception captured', { - message: errorMessage, - severity, - operation: context?.operation, - service: context?.service, - }); - } catch (trackingError) { - // Never let error tracking crash the app - log.warn('Failed to capture exception', { error: trackingError }); - } - } - - /** - * Capture a critical error that might affect app stability - */ - async captureCriticalError(error: Error | unknown, context?: ErrorContext): Promise { - await this.captureException(error, { - ...context, - severity: 'critical', - }); - } - - /** - * Track agent provider spawn errors - */ - async captureAgentSpawnError( - error: Error | unknown, - provider: string, - taskId: string, - additionalContext?: Partial - ): Promise { - await this.captureException(error, { - operation: 'agent_spawn', - service: 'ptyManager', - error_type: 'spawn_error', - severity: 'high', - provider, - task_id: taskId, - ...additionalContext, - }); - } - - /** - * Track project initialization errors - */ - async captureProjectError( - error: Error | unknown, - operation: 'create' | 'clone' | 'open' | 'delete', - projectPath?: string, - additionalContext?: Partial - ): Promise { - await this.captureException(error, { - operation: `project_${operation}`, - service: 'projectIpc', - error_type: 'project_error', - severity: operation === 'create' || operation === 'clone' ? 'high' : 'medium', - project_path: projectPath, - ...additionalContext, - }); - } - - /** - * Track worktree creation errors - */ - async captureWorktreeError( - error: Error | unknown, - operation: string, - worktreePath?: string, - branchName?: string, - additionalContext?: Partial - ): Promise { - await this.captureException(error, { - operation: `worktree_${operation}`, - service: 'WorktreeService', - error_type: 'worktree_error', - severity: 'high', - worktree_path: worktreePath, - branch_name: branchName, - ...additionalContext, - }); - } - - /** - * Track GitHub API errors - */ - async captureGitHubError( - error: Error | unknown, - operation: string, - additionalContext?: Partial - ): Promise { - await this.captureException(error, { - operation: `github_${operation}`, - service: 'GitHubService', - error_type: 'github_error', - severity: this.isAuthError(error) ? 'critical' : 'medium', - ...additionalContext, - }); - } - - /** - * Track database errors - */ - async captureDatabaseError( - error: Error | unknown, - operation: string, - additionalContext?: Partial - ): Promise { - await this.captureException(error, { - operation: `db_${operation}`, - service: 'DatabaseService', - error_type: 'database_error', - severity: 'high', - ...additionalContext, - }); - } - - /** - * Update GitHub username (call when user authenticates) - */ - async updateGithubUsername(username: string | null) { - this.githubUsername = username; - } - - // Private helper methods - - private async fetchGithubUsername(): Promise { - try { - // Lazy import to avoid circular dependencies - const { githubService } = require('./services/GitHubService'); - const user = await githubService.getCurrentUser(); - return user?.login || null; - } catch { - return null; - } - } - - private getAppVersion(): string { - try { - return app.getVersion(); - } catch { - return 'unknown'; - } - } - - private determineSeverity( - errorMessage: string, - context?: ErrorContext - ): ErrorContext['severity'] { - // Critical errors - if ( - errorMessage.includes('FATAL') || - errorMessage.includes('CRASH') || - errorMessage.includes('out of memory') || - context?.error_type === 'database_error' - ) { - return 'critical'; - } - - // High severity - if ( - errorMessage.includes('spawn') || - errorMessage.includes('PTY') || - errorMessage.includes('worktree') || - errorMessage.includes('permission denied') || - context?.operation?.includes('agent_spawn') - ) { - return 'high'; - } - - // Low severity - if ( - errorMessage.includes('canceled') || - errorMessage.includes('aborted') || - errorMessage.includes('timeout') - ) { - return 'low'; - } - - return 'medium'; - } - - private classifyError(errorMessage: string): string { - if (errorMessage.includes('spawn') || errorMessage.includes('PTY')) { - return 'spawn_error'; - } - if (errorMessage.includes('git') || errorMessage.includes('worktree')) { - return 'git_error'; - } - if (errorMessage.includes('database') || errorMessage.includes('sqlite')) { - return 'database_error'; - } - if (errorMessage.includes('network') || errorMessage.includes('fetch')) { - return 'network_error'; - } - if (errorMessage.includes('permission') || errorMessage.includes('EACCES')) { - return 'permission_error'; - } - if (errorMessage.includes('not found') || errorMessage.includes('ENOENT')) { - return 'file_not_found'; - } - return 'unknown_error'; - } - - private isAuthError(error: unknown): boolean { - const message = error instanceof Error ? error.message : String(error); - return ( - message.includes('auth') || - message.includes('unauthorized') || - message.includes('401') || - message.includes('403') - ); - } - - private sanitizeContext(context?: ErrorContext): Record { - if (!context) return {}; - - // Remove sensitive keys and limit string lengths - const sanitized: Record = {}; - const sensitiveKeys = ['password', 'token', 'secret', 'key', 'auth']; - - for (const [key, value] of Object.entries(context)) { - // Skip if already processed or sensitive - if (['severity', 'operation', 'service', 'component', 'error_type'].includes(key)) { - continue; - } - if (sensitiveKeys.some((sensitive) => key.toLowerCase().includes(sensitive))) { - continue; - } - - // Sanitize value - if (typeof value === 'string') { - sanitized[key] = value.slice(0, 200); - } else if (typeof value === 'number' || typeof value === 'boolean') { - sanitized[key] = value; - } else if (value === null || value === undefined) { - // Skip null/undefined - } else { - // Convert objects to string with limit - try { - sanitized[key] = JSON.stringify(value).slice(0, 200); - } catch { - // Skip if can't stringify - } - } - } - - return sanitized; - } -} - -// Export singleton instance -export const errorTracking = new ErrorTracking(); - -// Export helper for backward compatibility -export function captureException(error: Error | unknown, context?: ErrorContext) { - return errorTracking.captureException(error, context); -} diff --git a/src/main/index.ts b/src/main/index.ts new file mode 100644 index 000000000..65235f70b --- /dev/null +++ b/src/main/index.ts @@ -0,0 +1,128 @@ +import { join } from 'node:path'; +import dotenv from 'dotenv'; +import { app, BrowserWindow, dialog, ipcMain } from 'electron'; +import dockIcon from '@/assets/images/emdash/icon-dock.png?asset'; +import { registerRPCRouter } from '@shared/ipc/rpc'; +import { setupApplicationMenu } from './app/menu'; +import { registerAppScheme, setupAppProtocol } from './app/protocol'; +import { createMainWindow } from './app/window'; +import { providerTokenRegistry } from './core/account/provider-token-registry'; +import { emdashAccountService } from './core/account/services/emdash-account-service'; +import { appService } from './core/app/service'; +import { localDependencyManager } from './core/dependencies/dependency-manager'; +import { editorBufferService } from './core/editor/editor-buffer-service'; +import { githubAuthService } from './core/github/services/github-auth-service'; +import { projectManager } from './core/projects/project-manager'; +import { appSettingsService } from './core/settings/settings-service'; +import { autoUpdateService } from './core/updates/AutoUpdateService'; +import { initializeDatabase } from './db/initialize'; +import { log } from './lib/logger'; +import * as telemetry from './lib/telemetry'; +import { rpcRouter } from './rpc'; + +dotenv.config({ path: join(__dirname, '..', '..', '.env') }); + +if (process.platform === 'linux') { + app.commandLine.appendSwitch('ozone-platform-hint', 'auto'); +} + +registerAppScheme(); + +app.setName('Emdash'); + +app.on('second-instance', () => { + const win = BrowserWindow.getAllWindows()[0]; + if (win?.isMinimized()) win.restore(); + win?.focus(); +}); + +if (!import.meta.env.DEV && !app.requestSingleInstanceLock()) { + app.quit(); + process.exit(0); +} + +if (import.meta.env.DEV) { + try { + app.dock?.setIcon(dockIcon); + } catch (err) { + log.warn('Failed to set dock icon:', err); + } +} + +app.on('window-all-closed', () => { + if (process.platform !== 'darwin') { + app.quit(); + } +}); + +app.on('activate', () => { + if (BrowserWindow.getAllWindows().length === 0) { + createMainWindow(); + } +}); + +app.whenReady().then(async () => { + try { + await initializeDatabase(); + const BUFFER_STALE_MS = 30 * 24 * 60 * 60 * 1000; // 30 days + editorBufferService.pruneStale(BUFFER_STALE_MS).catch((e) => { + log.warn('Failed to prune stale editor buffers:', e); + }); + } catch (error) { + log.error('Failed to initialize database:', error); + dialog.showErrorBox( + 'Database Initialization Failed', + `Emdash could not start because the database failed to initialize.\n\n${error instanceof Error ? error.message : String(error)}` + ); + app.quit(); + return; + } + + try { + await telemetry.init({ installSource: app.isPackaged ? 'dmg' : 'dev' }); + } catch (e) { + log.warn('telemetry init failed:', e); + } + + appService.initialize(); + appSettingsService.initialize(); + + emdashAccountService.loadSessionToken().catch((e) => { + log.warn('Failed to load account session token:', e); + }); + + providerTokenRegistry.register('github', (token) => githubAuthService.storeToken(token)); + + registerRPCRouter(rpcRouter, ipcMain); + + projectManager.initialize().catch((e) => { + log.error('Failed to initialize environment providers:', e); + }); + + localDependencyManager.probeAll().catch((e) => { + log.error('Failed to probe dependencies:', e); + }); + + setupAppProtocol(join(app.getAppPath(), 'out', 'renderer')); + setupApplicationMenu(); + createMainWindow(); + + try { + await autoUpdateService.initialize(); + } catch (error) { + if (app.isPackaged) { + log.error('Failed to initialize auto-update service:', error); + } + } +}); + +app.on('before-quit', () => { + telemetry.capture('app_session'); + telemetry.capture('app_closed'); + telemetry.shutdown(); + + autoUpdateService.shutdown(); + projectManager.shutdown().catch((e) => { + log.error('Failed to shutdown project manager:', e); + }); +}); diff --git a/src/main/ipc/appIpc.ts b/src/main/ipc/appIpc.ts deleted file mode 100644 index b43c64df1..000000000 --- a/src/main/ipc/appIpc.ts +++ /dev/null @@ -1,630 +0,0 @@ -import { app, clipboard, ipcMain, shell } from 'electron'; -import { exec, execFile } from 'child_process'; -import { readFile } from 'fs/promises'; -import { join } from 'path'; -import { ensureProjectPrepared } from '../services/ProjectPrep'; -import { getAppSettings } from '../settings'; -import { - getAppById, - getResolvedLabel, - OPEN_IN_APPS, - type OpenInAppId, - type PlatformKey, -} from '@shared/openInApps'; -import { databaseService } from '../services/DatabaseService'; -import { buildExternalToolEnv } from '../utils/childProcessEnv'; -import { - buildGhosttyRemoteExecArgs, - buildRemoteEditorUrl, - buildRemoteSshCommand, -} from '../utils/remoteOpenIn'; - -const UNKNOWN_VERSION = 'unknown'; - -let cachedAppVersion: string | null = null; -let cachedAppVersionPromise: Promise | null = null; -const FONT_CACHE_TTL_MS = 5 * 60 * 1000; -let cachedInstalledFonts: { fonts: string[]; fetchedAt: number } | null = null; - -const execCommand = ( - command: string, - opts?: { maxBuffer?: number; timeout?: number } -): Promise => { - return new Promise((resolve, reject) => { - exec( - command, - { - maxBuffer: opts?.maxBuffer ?? 8 * 1024 * 1024, - timeout: opts?.timeout ?? 30000, - env: buildExternalToolEnv(), - }, - (error, stdout) => { - if (error) return reject(error); - resolve(stdout ?? ''); - } - ); - }); -}; - -const execFileCommand = ( - file: string, - args: string[], - opts?: { timeout?: number } -): Promise => { - return new Promise((resolve, reject) => { - execFile( - file, - args, - { - timeout: opts?.timeout ?? 30000, - env: buildExternalToolEnv(), - }, - (error) => { - if (error) return reject(error); - resolve(); - } - ); - }); -}; - -const escapeAppleScriptString = (value: string): string => - value.replace(/\\/g, '\\\\').replace(/"/g, '\\"'); - -const dedupeAndSortFonts = (fonts: string[]): string[] => { - const unique = Array.from(new Set(fonts.map((font) => font.trim()).filter(Boolean))); - return unique.sort((a, b) => a.localeCompare(b)); -}; - -const listInstalledFontsMac = async (): Promise => { - const stdout = await execCommand('system_profiler SPFontsDataType -json', { - maxBuffer: 24 * 1024 * 1024, - timeout: 60000, - }); - const parsed = JSON.parse(stdout) as { - SPFontsDataType?: Array<{ - typefaces?: Array<{ family?: string; fullname?: string }>; - _name?: string; - }>; - }; - const fonts: string[] = []; - for (const item of parsed.SPFontsDataType ?? []) { - for (const typeface of item.typefaces ?? []) { - if (typeface.family) fonts.push(typeface.family); - } - } - return dedupeAndSortFonts(fonts); -}; - -const listInstalledFontsLinux = async (): Promise => { - const stdout = await execCommand('fc-list : family', { timeout: 30000 }); - const fonts = stdout - .split('\n') - .flatMap((line) => line.split(',')) - .map((font) => font.trim()) - .filter(Boolean); - return dedupeAndSortFonts(fonts); -}; - -const listInstalledFontsWindows = async (): Promise => { - const script = - "$fonts = Get-ItemProperty -Path 'HKLM:\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Fonts';" + - "$props = $fonts.PSObject.Properties | Where-Object { $_.Name -notlike 'PS*' };" + - "$props | ForEach-Object { ($_.Name -replace '\\s*\\(.*\\)$','').Trim() }"; - const stdout = await execCommand(`powershell -NoProfile -Command "${script}"`, { - timeout: 30000, - }); - const fonts = stdout - .split('\n') - .map((line) => line.trim()) - .filter(Boolean); - return dedupeAndSortFonts(fonts); -}; - -const listInstalledFonts = async (): Promise => { - switch (process.platform) { - case 'darwin': - return listInstalledFontsMac(); - case 'linux': - return listInstalledFontsLinux(); - case 'win32': - return listInstalledFontsWindows(); - default: - return []; - } -}; - -const readPackageVersion = async (packageJsonPath: string): Promise => { - try { - const packageJson = JSON.parse(await readFile(packageJsonPath, 'utf-8')); - if (packageJson.name === 'emdash' && packageJson.version) { - return packageJson.version; - } - } catch { - // Ignore missing or malformed package.json; try the next path. - } - return null; -}; - -const resolveAppVersion = async (): Promise => { - // In development, we need to look for package.json in the project root. - const isDev = !app.isPackaged || process.env.NODE_ENV === 'development'; - - const possiblePaths = isDev - ? [ - join(__dirname, '../../../../package.json'), // from dist/main/main/ipc in dev - join(__dirname, '../../../package.json'), // alternative dev path - join(process.cwd(), 'package.json'), // current working directory - ] - : [ - join(__dirname, '../../package.json'), // from dist/main/ipc in production - join(app.getAppPath(), 'package.json'), // production build - ]; - - for (const packageJsonPath of possiblePaths) { - const version = await readPackageVersion(packageJsonPath); - if (version) { - return version; - } - } - - // In dev, never use app.getVersion() as it returns Electron version. - if (isDev) { - return UNKNOWN_VERSION; - } - - try { - return app.getVersion(); - } catch (error) { - void error; - return UNKNOWN_VERSION; - } -}; - -const getCachedAppVersion = (): Promise => { - if (cachedAppVersion) { - return Promise.resolve(cachedAppVersion); - } - - if (!cachedAppVersionPromise) { - cachedAppVersionPromise = resolveAppVersion().then((version) => { - cachedAppVersion = version; - return version; - }); - } - - return cachedAppVersionPromise; -}; - -export function registerAppIpc() { - void getCachedAppVersion(); - - ipcMain.handle('app:undo', async (event) => { - try { - event.sender.undo(); - return { success: true }; - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - - ipcMain.handle('app:redo', async (event) => { - try { - event.sender.redo(); - return { success: true }; - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - - ipcMain.handle('app:openExternal', async (_event, url: string) => { - try { - if (!url || typeof url !== 'string') throw new Error('Invalid URL'); - - // Security: Validate URL protocol to prevent local file access and dangerous protocols - const ALLOWED_PROTOCOLS = ['http:', 'https:']; - let parsedUrl: URL; - - try { - parsedUrl = new URL(url); - } catch { - throw new Error('Invalid URL format'); - } - - if (!ALLOWED_PROTOCOLS.includes(parsedUrl.protocol)) { - throw new Error( - `Protocol "${parsedUrl.protocol}" is not allowed. Only http and https URLs are permitted.` - ); - } - - await shell.openExternal(url); - return { success: true }; - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - - ipcMain.handle('app:clipboard-write-text', async (_event, text: string) => { - try { - if (typeof text !== 'string') throw new Error('Invalid clipboard text'); - clipboard.writeText(text); - return { success: true }; - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - - ipcMain.handle('app:paste', async (event) => { - try { - const webContents = event.sender; - if (!webContents) { - return { success: false, error: 'No webContents available' }; - } - webContents.paste(); - return { success: true }; - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - - ipcMain.handle( - 'app:openIn', - async ( - _event, - args: { - app: OpenInAppId; - path: string; - isRemote?: boolean; - sshConnectionId?: string | null; - } - ) => { - const target = args?.path; - const appId = args?.app; - const isRemote = args?.isRemote || false; - const sshConnectionId = args?.sshConnectionId; - - if (!target || typeof target !== 'string' || !appId) { - return { success: false, error: 'Invalid arguments' }; - } - try { - const platform = process.platform as PlatformKey; - const appConfig = getAppById(appId); - if (!appConfig) { - return { success: false, error: 'Invalid app ID' }; - } - - const platformConfig = appConfig.platforms?.[platform]; - const label = getResolvedLabel(appConfig, platform); - if (!platformConfig && !appConfig.alwaysAvailable) { - return { success: false, error: `${label} is not available on this platform.` }; - } - - // Handle remote SSH connections for supported editors and terminals - if (isRemote && sshConnectionId) { - try { - const connection = await databaseService.getSshConnection(sshConnectionId); - if (!connection) { - return { success: false, error: 'SSH connection not found' }; - } - - // Construct remote SSH URL or command based on the app - // Security: Escape all user-controlled values to prevent command injection - if (appId === 'vscode') { - // VS Code Remote SSH URL format: - // vscode://vscode-remote/ssh-remote+user%40hostname/path - const remoteUrl = buildRemoteEditorUrl( - 'vscode', - connection.host, - connection.username, - target - ); - await shell.openExternal(remoteUrl); - return { success: true }; - } else if (appId === 'cursor') { - // Cursor uses its own URL scheme for remote SSH - const remoteUrl = buildRemoteEditorUrl( - 'cursor', - connection.host, - connection.username, - target - ); - await shell.openExternal(remoteUrl); - return { success: true }; - } else if (appId === 'terminal' && platform === 'darwin') { - // macOS Terminal.app - execute SSH command - const sshCommand = buildRemoteSshCommand({ - host: connection.host, - username: connection.username, - port: connection.port, - targetPath: target, - }); - const escapedCommand = escapeAppleScriptString(sshCommand); - - await execFileCommand('osascript', [ - '-e', - `tell application "Terminal" to do script "${escapedCommand}"`, - '-e', - 'tell application "Terminal" to activate', - ]); - return { success: true }; - } else if (appId === 'iterm2' && platform === 'darwin') { - // iTerm2 - execute SSH command - const sshCommand = buildRemoteSshCommand({ - host: connection.host, - username: connection.username, - port: connection.port, - targetPath: target, - }); - const escapedCommand = escapeAppleScriptString(sshCommand); - - await execFileCommand('osascript', [ - '-e', - `tell application "iTerm" to create window with default profile command "${escapedCommand}"`, - '-e', - 'tell application "iTerm" to activate', - ]); - return { success: true }; - } else if (appId === 'warp' && platform === 'darwin') { - // Warp - use URL scheme with SSH command - const sshCommand = buildRemoteSshCommand({ - host: connection.host, - username: connection.username, - port: connection.port, - targetPath: target, - }); - await shell.openExternal( - `warp://action/new_window?cmd=${encodeURIComponent(sshCommand)}` - ); - return { success: true }; - } else if (appId === 'ghostty') { - // Ghostty - execute SSH command directly. - // Prefer remote login shell behavior for normal prompt/init scripts while - // keeping deterministic fallbacks when SHELL is missing or invalid. - // Compatibility note: many remote hosts don't ship xterm-ghostty terminfo. - // The argv builder falls back to TERM=xterm-256color only when current TERM - // isn't supported, keeping TUIs (e.g. ranger) working without always downgrading. - const ghosttyExecArgs = buildGhosttyRemoteExecArgs({ - host: connection.host, - username: connection.username, - port: connection.port, - targetPath: target, - }); - - const attempts = - platform === 'darwin' - ? [ - { - file: 'open', - args: [ - '-n', - '-b', - 'com.mitchellh.ghostty', - '--args', - '-e', - ...ghosttyExecArgs, - ], - }, - { - file: 'open', - args: ['-na', 'Ghostty', '--args', '-e', ...ghosttyExecArgs], - }, - { file: 'ghostty', args: ['-e', ...ghosttyExecArgs] }, - ] - : [{ file: 'ghostty', args: ['-e', ...ghosttyExecArgs] }]; - - let lastError: unknown = null; - for (const attempt of attempts) { - try { - await execFileCommand(attempt.file, attempt.args); - return { success: true }; - } catch (error) { - lastError = error; - } - } - - if (lastError instanceof Error) throw lastError; - throw new Error('Unable to launch Ghostty'); - } else if (appConfig.supportsRemote) { - // App claims to support remote but we don't have a handler - return { - success: false, - error: `Remote SSH not yet implemented for ${label}`, - }; - } - } catch (error) { - return { - success: false, - error: `Failed to open remote connection: ${error instanceof Error ? error.message : String(error)}`, - }; - } - } - - const quoted = (p: string) => `'${p.replace(/'/g, "'\\''")}'`; - - // Handle URL-based apps (like Warp) - if (platformConfig?.openUrls) { - for (const urlTemplate of platformConfig.openUrls) { - const url = urlTemplate - .replace('{{path_url}}', encodeURIComponent(target)) - .replace('{{path}}', target); - try { - await shell.openExternal(url); - return { success: true }; - } catch (error) { - void error; - } - } - return { - success: false, - error: `${label} is not installed or its URI scheme is not registered on this platform.`, - }; - } - - // Handle command-based apps - const commands = platformConfig?.openCommands || []; - let command = ''; - - if (commands.length > 0) { - command = commands - .map((cmd: string) => { - // Chain both replacements: first {{path}}, then {{path_raw}} - return cmd.replace('{{path}}', quoted(target)).replace('{{path_raw}}', target); - }) - .join(' || '); - } - - if (!command) { - return { success: false, error: 'Unsupported platform or app' }; - } - - if (appConfig.autoInstall) { - try { - const settings = getAppSettings(); - if (settings?.projectPrep?.autoInstallOnOpenInEditor) { - void ensureProjectPrepared(target).catch(() => {}); - } - } catch {} - } - - await new Promise((resolve, reject) => { - exec(command, { cwd: target, env: buildExternalToolEnv() }, (err) => { - if (err) return reject(err); - resolve(); - }); - }); - return { success: true }; - } catch (error) { - const appConfig = getAppById(appId); - const catchLabel = appConfig - ? getResolvedLabel(appConfig, process.platform as PlatformKey) - : appId; - return { success: false, error: `Unable to open in ${catchLabel}` }; - } - } - ); - - ipcMain.handle('app:checkInstalledApps', async () => { - const platform = process.platform as PlatformKey; - const availability: Record = {}; - - // Helper to check if a command exists - const checkCommand = (cmd: string): Promise => { - return new Promise((resolve) => { - exec(`command -v ${cmd} >/dev/null 2>&1`, { env: buildExternalToolEnv() }, (error) => { - resolve(!error); - }); - }); - }; - - // Helper to check if macOS app exists by bundle ID - const checkMacApp = (bundleId: string): Promise => { - return new Promise((resolve) => { - exec( - `mdfind "kMDItemCFBundleIdentifier == '${bundleId}'"`, - { env: buildExternalToolEnv() }, - (error, stdout) => { - resolve(!error && stdout.trim().length > 0); - } - ); - }); - }; - - // Helper to check if macOS app exists by name - const checkMacAppByName = (appName: string): Promise => { - return new Promise((resolve) => { - exec( - `osascript -e 'id of application "${appName}"' 2>/dev/null`, - { env: buildExternalToolEnv() }, - (error) => { - resolve(!error); - } - ); - }); - }; - - for (const app of OPEN_IN_APPS) { - // Skip apps that don't have platform-specific config - const platformConfig = app.platforms[platform]; - if (!platformConfig && !app.alwaysAvailable) { - availability[app.id] = false; - continue; - } - - // Always available apps are set to true by default - if (app.alwaysAvailable) { - availability[app.id] = true; - continue; - } - - try { - let isAvailable = false; - - // Check via bundle IDs (macOS) - if (platformConfig?.bundleIds) { - for (const bundleId of platformConfig.bundleIds) { - if (await checkMacApp(bundleId)) { - isAvailable = true; - break; - } - } - } - - // Check via app names (macOS) - if (!isAvailable && platformConfig?.appNames) { - for (const appName of platformConfig.appNames) { - if (await checkMacAppByName(appName)) { - isAvailable = true; - break; - } - } - } - - // Check via CLI commands (all platforms) - if (!isAvailable && platformConfig?.checkCommands) { - for (const cmd of platformConfig.checkCommands) { - if (await checkCommand(cmd)) { - isAvailable = true; - break; - } - } - } - - availability[app.id] = isAvailable; - } catch (error) { - console.error(`Error checking installed app ${app.id}:`, error); - availability[app.id] = false; - } - } - - return availability; - }); - - ipcMain.handle('app:listInstalledFonts', async (_event, args?: { refresh?: boolean }) => { - const refresh = Boolean(args?.refresh); - const now = Date.now(); - if ( - !refresh && - cachedInstalledFonts && - now - cachedInstalledFonts.fetchedAt < FONT_CACHE_TTL_MS - ) { - return { success: true, fonts: cachedInstalledFonts.fonts, cached: true }; - } - - try { - const fonts = await listInstalledFonts(); - cachedInstalledFonts = { fonts, fetchedAt: now }; - return { success: true, fonts, cached: false }; - } catch (error) { - return { - success: false, - error: error instanceof Error ? error.message : String(error), - fonts: cachedInstalledFonts?.fonts ?? [], - cached: Boolean(cachedInstalledFonts), - }; - } - }); - - // App metadata - ipcMain.handle('app:getAppVersion', () => getCachedAppVersion()); - ipcMain.handle('app:getElectronVersion', () => process.versions.electron); - ipcMain.handle('app:getPlatform', () => process.platform); -} diff --git a/src/main/ipc/browserIpc.ts b/src/main/ipc/browserIpc.ts deleted file mode 100644 index f215cc9e5..000000000 --- a/src/main/ipc/browserIpc.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { ipcMain } from 'electron'; -import { browserViewService } from '../services/browserViewService'; - -export function registerBrowserIpc() { - ipcMain.handle( - 'browser:view:show', - (_e, args: { x: number; y: number; width: number; height: number; url?: string }) => { - const { x, y, width, height, url } = args || ({} as any); - browserViewService.show({ x, y, width, height }, url); - return { ok: true }; - } - ); - ipcMain.handle('browser:view:hide', () => { - browserViewService.hide(); - return { ok: true }; - }); - ipcMain.handle( - 'browser:view:setBounds', - (_e, args: { x: number; y: number; width: number; height: number }) => { - const { x, y, width, height } = args || ({} as any); - browserViewService.setBounds({ x, y, width, height }); - return { ok: true }; - } - ); - ipcMain.handle('browser:view:loadURL', (_e, url: string, forceReload?: boolean) => { - browserViewService.loadURL(url, forceReload); - return { ok: true }; - }); - ipcMain.handle('browser:view:goBack', () => { - browserViewService.goBack(); - return { ok: true }; - }); - ipcMain.handle('browser:view:goForward', () => { - browserViewService.goForward(); - return { ok: true }; - }); - ipcMain.handle('browser:view:reload', () => { - browserViewService.reload(); - return { ok: true }; - }); - ipcMain.handle('browser:view:openDevTools', () => { - browserViewService.openDevTools(); - return { ok: true }; - }); - ipcMain.handle('browser:view:clear', () => { - browserViewService.clear(); - return { ok: true }; - }); -} diff --git a/src/main/ipc/connectionsIpc.ts b/src/main/ipc/connectionsIpc.ts deleted file mode 100644 index 376b1b554..000000000 --- a/src/main/ipc/connectionsIpc.ts +++ /dev/null @@ -1,83 +0,0 @@ -import { ipcMain } from 'electron'; -import { connectionsService } from '../services/ConnectionsService'; -import { - getProviderCustomConfig, - getAllProviderCustomConfigs, - updateProviderCustomConfig, - type ProviderCustomConfig, -} from '../settings'; - -export function registerConnectionsIpc() { - ipcMain.handle( - 'providers:getStatuses', - async (_event, opts?: { refresh?: boolean; providers?: string[]; providerId?: string }) => { - const providers = - Array.isArray(opts?.providers) && opts.providers.length > 0 - ? opts.providers - : opts?.providerId - ? [opts.providerId] - : null; - - try { - if (opts?.refresh) { - if (providers && providers.length > 0) { - for (const id of providers) { - await connectionsService.checkProvider(id, 'manual'); - } - } else { - await connectionsService.refreshAllProviderStatuses(); - } - } - const statuses = connectionsService.getCachedProviderStatuses(); - return { success: true, statuses }; - } catch (error) { - return { - success: false, - error: error instanceof Error ? error.message : 'Unknown error', - }; - } - } - ); - - // Get custom config for a specific provider - ipcMain.handle('providers:getCustomConfig', (_event, providerId: string) => { - try { - const config = getProviderCustomConfig(providerId); - return { success: true, config }; - } catch (error) { - return { - success: false, - error: error instanceof Error ? error.message : 'Unknown error', - }; - } - }); - - // Get all custom configs - ipcMain.handle('providers:getAllCustomConfigs', () => { - try { - const configs = getAllProviderCustomConfigs(); - return { success: true, configs }; - } catch (error) { - return { - success: false, - error: error instanceof Error ? error.message : 'Unknown error', - }; - } - }); - - // Update custom config for a specific provider - ipcMain.handle( - 'providers:updateCustomConfig', - (_event, providerId: string, config: ProviderCustomConfig | undefined) => { - try { - updateProviderCustomConfig(providerId, config); - return { success: true }; - } catch (error) { - return { - success: false, - error: error instanceof Error ? error.message : 'Unknown error', - }; - } - } - ); -} diff --git a/src/main/ipc/dbIpc.ts b/src/main/ipc/dbIpc.ts deleted file mode 100644 index 317bf943f..000000000 --- a/src/main/ipc/dbIpc.ts +++ /dev/null @@ -1,88 +0,0 @@ -import fs from 'node:fs'; -import path from 'node:path'; -import { databaseService } from '../services/DatabaseService'; -import type { Conversation, Message, Project, Task } from '../services/DatabaseService'; -import { createRPCController } from '../../shared/ipc/rpc'; -import { log } from '../lib/logger'; - -export const databaseController = createRPCController({ - getProjects: (): Promise => databaseService.getProjects(), - - saveProject: (project: Omit): Promise => - databaseService.saveProject(project), - - getTasks: (projectId?: string): Promise => databaseService.getTasks(projectId), - - saveTask: (task: Omit): Promise => - databaseService.saveTask(task), - - deleteProject: (projectId: string): Promise => databaseService.deleteProject(projectId), - - deleteTask: (taskId: string): Promise => databaseService.deleteTask(taskId), - - archiveTask: (taskId: string): Promise => databaseService.archiveTask(taskId), - - restoreTask: (taskId: string): Promise => databaseService.restoreTask(taskId), - - getArchivedTasks: (projectId?: string): Promise => - databaseService.getArchivedTasks(projectId), - - saveConversation: (conversation: Omit): Promise => - databaseService.saveConversation(conversation), - - getConversations: (taskId: string): Promise => - databaseService.getConversations(taskId), - - getOrCreateDefaultConversation: (taskId: string): Promise => - databaseService.getOrCreateDefaultConversation(taskId), - - createConversation: (args: { - taskId: string; - title: string; - provider?: string; - isMain?: boolean; - }): Promise => - databaseService.createConversation(args.taskId, args.title, args.provider, args.isMain), - - deleteConversation: (conversationId: string): Promise => - databaseService.deleteConversation(conversationId), - - setActiveConversation: (args: { taskId: string; conversationId: string }): Promise => - databaseService.setActiveConversation(args.taskId, args.conversationId), - - getActiveConversation: (taskId: string): Promise => - databaseService.getActiveConversation(taskId), - - reorderConversations: (args: { taskId: string; conversationIds: string[] }): Promise => - databaseService.reorderConversations(args.taskId, args.conversationIds), - - updateConversationTitle: (args: { conversationId: string; title: string }): Promise => - databaseService.updateConversationTitle(args.conversationId, args.title), - - saveMessage: (message: Omit): Promise => - databaseService.saveMessage(message), - - getMessages: (conversationId: string): Promise => - databaseService.getMessages(conversationId), - - cleanupSessionDirectory: async (args: { - taskPath: string; - conversationId: string; - }): Promise => { - const sessionDir = path.join(args.taskPath, '.emdash-sessions', args.conversationId); - if (!fs.existsSync(sessionDir)) return; - - fs.rmSync(sessionDir, { recursive: true, force: true }); - log.info('Cleaned up session directory:', sessionDir); - - const parentDir = path.join(args.taskPath, '.emdash-sessions'); - try { - if (fs.readdirSync(parentDir).length === 0) { - fs.rmdirSync(parentDir); - log.info('Removed empty .emdash-sessions directory'); - } - } catch { - // Parent directory removal is best-effort - } - }, -}); diff --git a/src/main/ipc/debugIpc.ts b/src/main/ipc/debugIpc.ts deleted file mode 100644 index e8e32c154..000000000 --- a/src/main/ipc/debugIpc.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { ipcMain } from 'electron'; -import { dirname } from 'path'; -import * as fs from 'fs'; - -export function registerDebugIpc() { - ipcMain.handle( - 'debug:append-log', - async (_, filePath: string, content: string, options: { reset?: boolean } = {}) => { - try { - if (!filePath) throw new Error('filePath is required'); - - const dir = dirname(filePath); - await fs.promises.mkdir(dir, { recursive: true }); - - const flag = options.reset ? 'w' : 'a'; - await fs.promises.writeFile(filePath, content, { flag, encoding: 'utf8' }); - return { success: true }; - } catch (error) { - console.error('Failed to append debug log:', error); - return { success: false, error: error instanceof Error ? error.message : 'Unknown error' }; - } - } - ); -} diff --git a/src/main/ipc/gitIpc.ts b/src/main/ipc/gitIpc.ts deleted file mode 100644 index e588a51ea..000000000 --- a/src/main/ipc/gitIpc.ts +++ /dev/null @@ -1,2317 +0,0 @@ -import { BrowserWindow, ipcMain } from 'electron'; -import { log } from '../lib/logger'; -import { exec, execFile } from 'child_process'; -import { randomUUID } from 'node:crypto'; -import fs from 'node:fs'; -import path from 'node:path'; -import os from 'node:os'; -import { promisify } from 'util'; -import { - getStatus as gitGetStatus, - getFileDiff as gitGetFileDiff, - stageFile as gitStageFile, - stageAllFiles as gitStageAllFiles, - unstageFile as gitUnstageFile, - revertFile as gitRevertFile, - commit as gitCommit, - push as gitPush, - pull as gitPull, - getLog as gitGetLog, - getLatestCommit as gitGetLatestCommit, - getCommitFiles as gitGetCommitFiles, - getCommitFileDiff as gitGetCommitFileDiff, - softResetLastCommit as gitSoftResetLastCommit, -} from '../services/GitService'; -import { prGenerationService } from '../services/PrGenerationService'; -import { databaseService } from '../services/DatabaseService'; -import { injectIssueFooter } from '../lib/prIssueFooter'; -import { getCreatePrBodyPlan } from '../lib/prCreateBodyPlan'; -import { patchCurrentPrBodyWithIssueFooter } from '../lib/prIssueFooterPatch'; -import { resolveRemoteProjectForWorktreePath } from '../utils/remoteProjectResolver'; -import { RemoteGitService } from '../services/RemoteGitService'; -import { sshService } from '../services/ssh/SshService'; - -const remoteGitService = new RemoteGitService(sshService); - -const execAsync = promisify(exec); -const execFileAsync = promisify(execFile); - -const GIT_STATUS_DEBOUNCE_MS = 500; -const supportsRecursiveWatch = process.platform === 'darwin' || process.platform === 'win32'; - -type GitStatusWatchEntry = { - watcher: fs.FSWatcher; - watchIds: Set; - debounceTimer?: NodeJS.Timeout; -}; - -const gitStatusWatchers = new Map(); - -// Remote polling for SSH projects (replaces fs.watch) -const REMOTE_POLL_INTERVAL_MS = 5000; -type RemoteStatusPollEntry = { - intervalId: NodeJS.Timeout; - watchIds: Set; - lastStatusHash: string; - connectionId: string; -}; -const remoteStatusPollers = new Map(); - -const ensureRemoteStatusPoller = ( - taskPath: string, - connectionId: string -): { success: true; watchId: string } => { - const watchId = randomUUID(); - const existing = remoteStatusPollers.get(taskPath); - if (existing) { - existing.watchIds.add(watchId); - return { success: true, watchId }; - } - - const entry: RemoteStatusPollEntry = { - intervalId: setInterval(async () => { - try { - const changes = await remoteGitService.getStatusDetailed(connectionId, taskPath); - // Simple hash: join paths + statuses to detect changes - const hash = changes.map((c) => `${c.path}:${c.status}:${c.isStaged}`).join('|'); - const poller = remoteStatusPollers.get(taskPath); - if (!poller) return; - if (hash !== poller.lastStatusHash) { - poller.lastStatusHash = hash; - broadcastGitStatusChange(taskPath); - } - } catch { - // Connection may have dropped — don't crash, just skip this poll - } - }, REMOTE_POLL_INTERVAL_MS), - watchIds: new Set([watchId]), - lastStatusHash: '', - connectionId, - }; - remoteStatusPollers.set(taskPath, entry); - return { success: true, watchId }; -}; - -const releaseRemoteStatusPoller = (taskPath: string, watchId?: string) => { - const entry = remoteStatusPollers.get(taskPath); - if (!entry) return { success: true as const }; - if (watchId) { - entry.watchIds.delete(watchId); - } - if (entry.watchIds.size <= 0) { - clearInterval(entry.intervalId); - remoteStatusPollers.delete(taskPath); - } - return { success: true as const }; -}; - -/** - * Validate that a taskPath is an absolute path pointing to a real directory - * that is a git repository. Returns an error string if invalid, or null if OK. - */ -function validateTaskPath(taskPath: string | undefined): string | null { - if (!taskPath) return 'Missing taskPath'; - if (!path.isAbsolute(taskPath)) return 'taskPath must be absolute'; - try { - const stat = fs.statSync(taskPath); - if (!stat.isDirectory()) return 'taskPath is not a directory'; - } catch { - return 'taskPath does not exist'; - } - return null; -} - -const broadcastGitStatusChange = (taskPath: string, error?: string) => { - const windows = BrowserWindow.getAllWindows(); - windows.forEach((window) => { - try { - window.webContents.send('git:status-changed', { taskPath, error }); - } catch (err) { - log.debug('[git:watch-status] failed to send status change', err); - } - }); -}; - -const ensureGitStatusWatcher = (taskPath: string) => { - if (!supportsRecursiveWatch) { - return { success: false as const, error: 'recursive-watch-unsupported' }; - } - if (!taskPath || !fs.existsSync(taskPath)) { - return { success: false as const, error: 'workspace-unavailable' }; - } - const existing = gitStatusWatchers.get(taskPath); - const watchId = randomUUID(); - if (existing) { - existing.watchIds.add(watchId); - return { success: true as const, watchId }; - } - try { - const watcher = fs.watch(taskPath, { recursive: true }, () => { - const entry = gitStatusWatchers.get(taskPath); - if (!entry) return; - if (entry.debounceTimer) clearTimeout(entry.debounceTimer); - entry.debounceTimer = setTimeout(() => { - broadcastGitStatusChange(taskPath); - }, GIT_STATUS_DEBOUNCE_MS); - }); - watcher.on('error', (error) => { - log.warn('[git:watch-status] watcher error', error); - const entry = gitStatusWatchers.get(taskPath); - if (entry?.debounceTimer) clearTimeout(entry.debounceTimer); - try { - entry?.watcher.close(); - } catch {} - gitStatusWatchers.delete(taskPath); - broadcastGitStatusChange(taskPath, 'watcher-error'); - }); - gitStatusWatchers.set(taskPath, { watcher, watchIds: new Set([watchId]) }); - return { success: true as const, watchId }; - } catch (error) { - return { - success: false as const, - error: error instanceof Error ? error.message : 'Failed to watch workspace', - }; - } -}; - -const releaseGitStatusWatcher = (taskPath: string, watchId?: string) => { - const entry = gitStatusWatchers.get(taskPath); - if (!entry) return { success: true as const }; - if (watchId) { - entry.watchIds.delete(watchId); - } - if (entry.watchIds.size <= 0) { - if (entry.debounceTimer) clearTimeout(entry.debounceTimer); - entry.watcher.close(); - gitStatusWatchers.delete(taskPath); - } - return { success: true as const }; -}; - -export function registerGitIpc() { - function resolveGitBin(): string { - // Allow override via env - const fromEnv = (process.env.GIT_PATH || '').trim(); - const candidates = [ - fromEnv, - '/opt/homebrew/bin/git', - '/usr/local/bin/git', - '/usr/bin/git', - ].filter(Boolean) as string[]; - for (const p of candidates) { - try { - if (p && fs.existsSync(p)) return p; - } catch {} - } - // Last resort: try /usr/bin/env git - return 'git'; - } - const GIT = resolveGitBin(); - - // Helper: commit-and-push for remote SSH projects - async function commitAndPushRemote( - connectionId: string, - taskPath: string, - opts: { commitMessage: string; createBranchIfOnDefault: boolean; branchPrefix: string } - ): Promise<{ success: boolean; branch?: string; output?: string; error?: string }> { - const { commitMessage, createBranchIfOnDefault, branchPrefix } = opts; - - // Verify git repo - const verifyResult = await remoteGitService.execGit( - connectionId, - taskPath, - 'rev-parse --is-inside-work-tree' - ); - if (verifyResult.exitCode !== 0) { - return { success: false, error: 'Not a git repository' }; - } - - let activeBranch = await remoteGitService.getCurrentBranch(connectionId, taskPath); - const defaultBranch = await remoteGitService.getDefaultBranchName(connectionId, taskPath); - - // Create feature branch if on default - if (createBranchIfOnDefault && (!activeBranch || activeBranch === defaultBranch)) { - const short = Date.now().toString(36); - const name = `${branchPrefix}/${short}`; - await remoteGitService.createBranch(connectionId, taskPath, name); - activeBranch = name; - } - - // Check for changes - const statusResult = await remoteGitService.execGit( - connectionId, - taskPath, - 'status --porcelain --untracked-files=all' - ); - const hasWorkingChanges = Boolean(statusResult.stdout?.trim()); - - // Read staged files - const readRemoteStagedFiles = async (): Promise => { - const r = await remoteGitService.execGit(connectionId, taskPath, 'diff --cached --name-only'); - return (r.stdout || '') - .split('\n') - .map((f) => f.trim()) - .filter(Boolean); - }; - - let stagedFiles = await readRemoteStagedFiles(); - - // Auto-stage if nothing staged yet - if (hasWorkingChanges && stagedFiles.length === 0) { - await remoteGitService.stageAllFiles(connectionId, taskPath); - } - - // Unstage plan mode artifacts - await remoteGitService.execGit(connectionId, taskPath, 'reset -q .emdash 2>/dev/null || true'); - await remoteGitService.execGit( - connectionId, - taskPath, - 'reset -q PLANNING.md 2>/dev/null || true' - ); - await remoteGitService.execGit( - connectionId, - taskPath, - 'reset -q planning.md 2>/dev/null || true' - ); - - stagedFiles = await readRemoteStagedFiles(); - - // Commit - if (stagedFiles.length > 0) { - const commitResult = await remoteGitService.commit(connectionId, taskPath, commitMessage); - if (commitResult.exitCode !== 0 && !/nothing to commit/i.test(commitResult.stderr || '')) { - return { success: false, error: commitResult.stderr || 'Commit failed' }; - } - } - - // Push - const pushResult = await remoteGitService.push(connectionId, taskPath); - if (pushResult.exitCode !== 0) { - const retryResult = await remoteGitService.push(connectionId, taskPath, activeBranch, true); - if (retryResult.exitCode !== 0) { - return { success: false, error: retryResult.stderr || 'Push failed' }; - } - } - - const finalStatus = await remoteGitService.execGit(connectionId, taskPath, 'status -sb'); - return { success: true, branch: activeBranch, output: (finalStatus.stdout || '').trim() }; - } - - // Helper: get PR status for remote SSH projects - async function getPrStatusRemote( - connectionId: string, - taskPath: string - ): Promise<{ success: boolean; pr?: any; error?: string }> { - const queryFields = [ - 'number', - 'url', - 'state', - 'isDraft', - 'mergeStateStatus', - 'headRefName', - 'baseRefName', - 'title', - 'author', - 'additions', - 'deletions', - 'changedFiles', - ]; - const fieldsStr = queryFields.join(','); - - const viewResult = await remoteGitService.execGh( - connectionId, - taskPath, - `pr view --json ${fieldsStr} -q .` - ); - let data = - viewResult.exitCode === 0 && viewResult.stdout.trim() - ? JSON.parse(viewResult.stdout.trim()) - : null; - - // Fallback: find by branch name - if (!data) { - const branch = await remoteGitService.getCurrentBranch(connectionId, taskPath); - if (branch) { - const listResult = await remoteGitService.execGh( - connectionId, - taskPath, - `pr list --head ${quoteGhArg(branch)} --json ${fieldsStr} --limit 1` - ); - if (listResult.exitCode === 0 && listResult.stdout.trim()) { - const listData = JSON.parse(listResult.stdout.trim()); - if (Array.isArray(listData) && listData.length > 0) data = listData[0]; - } - } - } - - if (!data) return { success: true, pr: null }; - - // Compute diff stats if missing - const asNumber = (v: any): number | null => - typeof v === 'number' && Number.isFinite(v) ? v : null; - if ( - asNumber(data.additions) === null || - asNumber(data.deletions) === null || - asNumber(data.changedFiles) === null - ) { - const baseRef = typeof data.baseRefName === 'string' ? data.baseRefName.trim() : ''; - const targetRef = baseRef ? `origin/${baseRef}` : ''; - const cmd = targetRef - ? `diff --shortstat ${quoteGhArg(targetRef)}...HEAD` - : 'diff --shortstat HEAD~1..HEAD'; - const diffResult = await remoteGitService.execGit(connectionId, taskPath, cmd); - if (diffResult.exitCode === 0) { - const m = (diffResult.stdout || '').match( - /(\d+)\s+files? changed(?:,\s+(\d+)\s+insertions?\(\+\))?(?:,\s+(\d+)\s+deletions?\(-\))?/ - ); - if (m) { - if (asNumber(data.changedFiles) === null && m[1]) data.changedFiles = parseInt(m[1], 10); - if (asNumber(data.additions) === null && m[2]) data.additions = parseInt(m[2], 10); - if (asNumber(data.deletions) === null && m[3]) data.deletions = parseInt(m[3], 10); - } - } - } - - return { success: true, pr: data }; - } - - // Helper: create PR for remote SSH projects - async function createPrRemote( - connectionId: string, - taskPath: string, - opts: { - title?: string; - body?: string; - base?: string; - head?: string; - draft?: boolean; - web?: boolean; - fill?: boolean; - } - ): Promise<{ success: boolean; url?: string; output?: string; error?: string; code?: string }> { - const { title, body, base, head, draft, web, fill } = opts; - const outputs: string[] = []; - - // Enrich body with issue footer - let prBody = body; - try { - const task = await databaseService.getTaskByPath(taskPath); - prBody = injectIssueFooter(body, task?.metadata); - } catch { - // Non-fatal - } - - const { - shouldPatchFilledBody, - shouldUseBodyFile: _unused, - shouldUseFill, - } = getCreatePrBodyPlan({ - fill, - title, - rawBody: body, - enrichedBody: prBody, - }); - - // Stage and commit pending changes - const statusResult = await remoteGitService.execGit( - connectionId, - taskPath, - 'status --porcelain --untracked-files=all' - ); - if (statusResult.stdout?.trim()) { - await remoteGitService.stageAllFiles(connectionId, taskPath); - const commitResult = await remoteGitService.commit( - connectionId, - taskPath, - 'stagehand: prepare pull request' - ); - if (commitResult.exitCode !== 0 && !/nothing to commit/i.test(commitResult.stderr || '')) { - outputs.push(commitResult.stderr || ''); - } - } - - // Push branch - const pushResult = await remoteGitService.push(connectionId, taskPath); - if (pushResult.exitCode !== 0) { - const branch = await remoteGitService.getCurrentBranch(connectionId, taskPath); - const retryResult = await remoteGitService.push(connectionId, taskPath, branch, true); - if (retryResult.exitCode !== 0) { - return { - success: false, - error: - 'Failed to push branch to origin. Please check your Git remotes and authentication.', - }; - } - } - outputs.push('git push: success'); - - // Resolve branches - const currentBranch = await remoteGitService.getCurrentBranch(connectionId, taskPath); - const defaultBranch = await remoteGitService.getDefaultBranchName(connectionId, taskPath); - - // Validate commits ahead - const baseRef = base || defaultBranch; - const aheadResult = await remoteGitService.execGit( - connectionId, - taskPath, - `rev-list --count origin/${quoteGhArg(baseRef)}..HEAD` - ); - const aheadCount = parseInt((aheadResult.stdout || '0').trim(), 10) || 0; - if (aheadCount <= 0) { - return { - success: false, - error: `No commits to create a PR. Make a commit on current branch '${currentBranch}' ahead of base '${baseRef}'.`, - }; - } - - // Build gh pr create command - const flags: string[] = []; - if (title) flags.push(`--title ${quoteGhArg(title)}`); - // Can't use --body-file on remote, use --body instead - if (prBody && !shouldUseFill) flags.push(`--body ${quoteGhArg(prBody)}`); - flags.push(`--base ${quoteGhArg(baseRef)}`); - if (head) { - flags.push(`--head ${quoteGhArg(head)}`); - } else if (currentBranch) { - flags.push(`--head ${quoteGhArg(currentBranch)}`); - } - if (draft) flags.push('--draft'); - if (web) flags.push('--web'); - if (shouldUseFill) flags.push('--fill'); - - const createResult = await remoteGitService.execGh( - connectionId, - taskPath, - `pr create ${flags.join(' ')}` - ); - - const combined = [createResult.stdout, createResult.stderr].filter(Boolean).join('\n').trim(); - const urlMatch = combined.match(/https?:\/\/\S+/); - const url = urlMatch ? urlMatch[0] : null; - - if (createResult.exitCode !== 0) { - const restrictionRe = - /Auth App access restrictions|authorized OAuth apps|third-parties is limited/i; - const prExistsRe = /already exists|already has.*pull request|pull request for branch/i; - let code: string | undefined; - if (restrictionRe.test(combined)) code = 'ORG_AUTH_APP_RESTRICTED'; - else if (prExistsRe.test(combined)) code = 'PR_ALREADY_EXISTS'; - return { success: false, error: combined, output: combined, code }; - } - - // Patch body if needed - if (shouldPatchFilledBody && url) { - try { - const task = await databaseService.getTaskByPath(taskPath); - if (task?.metadata) { - const editBody = injectIssueFooter(undefined, task.metadata); - if (editBody) { - await remoteGitService.execGh( - connectionId, - taskPath, - `pr edit --body ${quoteGhArg(editBody)}` - ); - } - } - } catch { - // Non-fatal - } - } - - const out = [...outputs, combined].filter(Boolean).join('\n'); - return { success: true, url: url || undefined, output: out }; - } - - // Helper: merge-to-main for remote SSH projects - async function mergeToMainRemote( - connectionId: string, - taskPath: string - ): Promise<{ success: boolean; prUrl?: string; error?: string }> { - const currentBranch = await remoteGitService.getCurrentBranch(connectionId, taskPath); - const defaultBranch = await remoteGitService.getDefaultBranchName(connectionId, taskPath); - - if (!currentBranch) { - return { success: false, error: 'Not on a branch (detached HEAD state).' }; - } - if (currentBranch === defaultBranch) { - return { - success: false, - error: `Already on ${defaultBranch}. Create a feature branch first.`, - }; - } - - // Stage and commit pending changes - const statusResult = await remoteGitService.execGit( - connectionId, - taskPath, - 'status --porcelain --untracked-files=all' - ); - if (statusResult.stdout?.trim()) { - await remoteGitService.stageAllFiles(connectionId, taskPath); - const commitResult = await remoteGitService.commit( - connectionId, - taskPath, - 'chore: prepare for merge to main' - ); - if (commitResult.exitCode !== 0 && !/nothing to commit/i.test(commitResult.stderr || '')) { - throw new Error(commitResult.stderr || 'Commit failed'); - } - } - - // Push - const pushResult = await remoteGitService.push(connectionId, taskPath); - if (pushResult.exitCode !== 0) { - const retryResult = await remoteGitService.push(connectionId, taskPath, currentBranch, true); - if (retryResult.exitCode !== 0) { - throw new Error(retryResult.stderr || 'Push failed'); - } - } - - // Create PR - let prUrl = ''; - const createResult = await remoteGitService.execGh( - connectionId, - taskPath, - `pr create --fill --base ${quoteGhArg(defaultBranch)}` - ); - const urlMatch = (createResult.stdout || '').match(/https?:\/\/\S+/); - prUrl = urlMatch ? urlMatch[0] : ''; - - if (createResult.exitCode !== 0) { - if (!/already exists|already has.*pull request/i.test(createResult.stderr || '')) { - return { success: false, error: `Failed to create PR: ${createResult.stderr}` }; - } - } - - // Patch PR body with issue footer - try { - const task = await databaseService.getTaskByPath(taskPath); - if (task?.metadata) { - const footer = injectIssueFooter(undefined, task.metadata); - if (footer) { - await remoteGitService.execGh( - connectionId, - taskPath, - `pr edit --body ${quoteGhArg(footer)}` - ); - } - } - } catch { - // Non-fatal - } - - // Merge - const mergeResult = await remoteGitService.execGh(connectionId, taskPath, 'pr merge --merge'); - if (mergeResult.exitCode !== 0) { - return { - success: false, - error: `PR created but merge failed: ${mergeResult.stderr}`, - prUrl, - }; - } - return { success: true, prUrl }; - } - - // Helper: escape arguments for gh CLI commands run over SSH - function quoteGhArg(arg: string): string { - // Use the same POSIX single-quote wrapping as quoteShellArg for consistency - return `'${arg.replace(/'/g, "'\\''")}'`; - } - - ipcMain.handle('git:watch-status', async (_, taskPath: string) => { - const remoteProject = await resolveRemoteProjectForWorktreePath(taskPath); - if (remoteProject) { - return ensureRemoteStatusPoller(taskPath, remoteProject.sshConnectionId); - } - return ensureGitStatusWatcher(taskPath); - }); - - ipcMain.handle('git:unwatch-status', async (_, taskPath: string, watchId?: string) => { - const remoteProject = await resolveRemoteProjectForWorktreePath(taskPath); - if (remoteProject) { - return releaseRemoteStatusPoller(taskPath, watchId); - } - return releaseGitStatusWatcher(taskPath, watchId); - }); - - // Git: Status (moved from Codex IPC) - ipcMain.handle('git:get-status', async (_, taskPath: string) => { - try { - const remoteProject = await resolveRemoteProjectForWorktreePath(taskPath); - if (remoteProject) { - const changes = await remoteGitService.getStatusDetailed( - remoteProject.sshConnectionId, - taskPath - ); - return { success: true, changes }; - } - const changes = await gitGetStatus(taskPath); - return { success: true, changes }; - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - - // Git: Per-file diff (moved from Codex IPC) - ipcMain.handle('git:get-file-diff', async (_, args: { taskPath: string; filePath: string }) => { - try { - const remoteProject = await resolveRemoteProjectForWorktreePath(args.taskPath); - if (remoteProject) { - const diff = await remoteGitService.getFileDiff( - remoteProject.sshConnectionId, - args.taskPath, - args.filePath - ); - return { success: true, diff }; - } - const diff = await gitGetFileDiff(args.taskPath, args.filePath); - return { success: true, diff }; - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - - // Git: Stage file - ipcMain.handle('git:stage-file', async (_, args: { taskPath: string; filePath: string }) => { - try { - log.info('Staging file:', { taskPath: args.taskPath, filePath: args.filePath }); - const remoteProject = await resolveRemoteProjectForWorktreePath(args.taskPath); - if (remoteProject) { - await remoteGitService.stageFile( - remoteProject.sshConnectionId, - args.taskPath, - args.filePath - ); - } else { - await gitStageFile(args.taskPath, args.filePath); - } - log.info('File staged successfully:', args.filePath); - return { success: true }; - } catch (error) { - log.error('Failed to stage file:', { filePath: args.filePath, error }); - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - - // Git: Stage all files - ipcMain.handle('git:stage-all-files', async (_, args: { taskPath: string }) => { - try { - log.info('Staging all files:', { taskPath: args.taskPath }); - const remoteProject = await resolveRemoteProjectForWorktreePath(args.taskPath); - if (remoteProject) { - await remoteGitService.stageAllFiles(remoteProject.sshConnectionId, args.taskPath); - } else { - await gitStageAllFiles(args.taskPath); - } - log.info('All files staged successfully'); - return { success: true }; - } catch (error) { - log.error('Failed to stage all files:', { taskPath: args.taskPath, error }); - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - - // Git: Unstage file - ipcMain.handle('git:unstage-file', async (_, args: { taskPath: string; filePath: string }) => { - try { - log.info('Unstaging file:', { taskPath: args.taskPath, filePath: args.filePath }); - const remoteProject = await resolveRemoteProjectForWorktreePath(args.taskPath); - if (remoteProject) { - await remoteGitService.unstageFile( - remoteProject.sshConnectionId, - args.taskPath, - args.filePath - ); - } else { - await gitUnstageFile(args.taskPath, args.filePath); - } - log.info('File unstaged successfully:', args.filePath); - return { success: true }; - } catch (error) { - log.error('Failed to unstage file:', { filePath: args.filePath, error }); - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - - // Git: Revert file - ipcMain.handle('git:revert-file', async (_, args: { taskPath: string; filePath: string }) => { - try { - log.info('Reverting file:', { taskPath: args.taskPath, filePath: args.filePath }); - const remoteProject = await resolveRemoteProjectForWorktreePath(args.taskPath); - let result: { action: string }; - if (remoteProject) { - result = await remoteGitService.revertFile( - remoteProject.sshConnectionId, - args.taskPath, - args.filePath - ); - } else { - result = await gitRevertFile(args.taskPath, args.filePath); - } - log.info('File operation completed:', { filePath: args.filePath, action: result.action }); - return { success: true, action: result.action }; - } catch (error) { - log.error('Failed to revert file:', { filePath: args.filePath, error }); - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - // Git: Generate PR title and description - ipcMain.handle( - 'git:generate-pr-content', - async ( - _, - args: { - taskPath: string; - base?: string; - } - ) => { - const { taskPath, base = 'main' } = args || ({} as { taskPath: string; base?: string }); - try { - // For remote projects, PR content generation still runs locally — it just needs - // the diff text. The prGenerationService can get diff data via the now-remote-aware - // git:get-status and git:get-file-diff handlers, or we pass the taskPath which the - // service uses with local git commands. For remote, we get the diff over SSH and - // pass it to the generation service. - const remoteProject = await resolveRemoteProjectForWorktreePath(taskPath); - if (remoteProject) { - const connId = remoteProject.sshConnectionId; - // Get diff text over SSH - const diffResult = await remoteGitService.execGit( - connId, - taskPath, - `diff --stat origin/${quoteGhArg(base)}...HEAD` - ); - const logResult = await remoteGitService.execGit( - connId, - taskPath, - `log --oneline origin/${quoteGhArg(base)}..HEAD` - ); - const diffText = (diffResult.stdout || '').trim(); - const logText = (logResult.stdout || '').trim(); - // Use simple title/description generation from diff summary - const lines = logText.split('\n').filter((l) => l.trim()); - const generatedTitle = lines.length === 1 ? lines[0].replace(/^[a-f0-9]+ /, '') : ''; - return { - success: true, - title: generatedTitle, - description: diffText ? `## Changes\n\n\`\`\`\n${diffText}\n\`\`\`` : '', - }; - } - - // Try to get the task to find which provider was used - let providerId: string | null = null; - try { - const task = await databaseService.getTaskByPath(taskPath); - if (task?.agentId) { - providerId = task.agentId; - log.debug('Found task provider for PR generation', { taskPath, providerId }); - } - } catch (error) { - log.debug('Could not lookup task provider', { error }); - // Non-fatal - continue without provider - } - - const result = await prGenerationService.generatePrContent(taskPath, base, providerId); - return { success: true, ...result }; - } catch (error) { - log.error('Failed to generate PR content:', error); - return { - success: false, - error: error instanceof Error ? error.message : String(error), - }; - } - } - ); - - // Git: Create Pull Request via GitHub CLI - ipcMain.handle( - 'git:create-pr', - async ( - _, - args: { - taskPath: string; - title?: string; - body?: string; - base?: string; - head?: string; - draft?: boolean; - web?: boolean; - fill?: boolean; - } - ) => { - const { taskPath, title, body, base, head, draft, web, fill } = - args || - ({} as { - taskPath: string; - title?: string; - body?: string; - base?: string; - head?: string; - draft?: boolean; - web?: boolean; - fill?: boolean; - }); - try { - const remoteProject = await resolveRemoteProjectForWorktreePath(taskPath); - if (remoteProject) { - return await createPrRemote(remoteProject.sshConnectionId, taskPath, { - title, - body, - base, - head, - draft, - web, - fill, - }); - } - - const outputs: string[] = []; - let taskMetadata: unknown = undefined; - let prBody = body; - try { - const task = await databaseService.getTaskByPath(taskPath); - taskMetadata = task?.metadata; - prBody = injectIssueFooter(body, task?.metadata); - } catch (error) { - log.debug('Unable to enrich PR body with issue footer', { taskPath, error }); - } - const { shouldPatchFilledBody, shouldUseBodyFile, shouldUseFill } = getCreatePrBodyPlan({ - fill, - title, - rawBody: body, - enrichedBody: prBody, - }); - - // Stage and commit any pending changes - try { - const { stdout: statusOut } = await execAsync( - 'git status --porcelain --untracked-files=all', - { - cwd: taskPath, - } - ); - if (statusOut && statusOut.trim().length > 0) { - const { stdout: addOut, stderr: addErr } = await execAsync('git add -A', { - cwd: taskPath, - }); - if (addOut?.trim()) outputs.push(addOut.trim()); - if (addErr?.trim()) outputs.push(addErr.trim()); - - const commitMsg = 'stagehand: prepare pull request'; - try { - const { stdout: commitOut, stderr: commitErr } = await execAsync( - `git commit -m ${JSON.stringify(commitMsg)}`, - { cwd: taskPath } - ); - if (commitOut?.trim()) outputs.push(commitOut.trim()); - if (commitErr?.trim()) outputs.push(commitErr.trim()); - } catch (commitErr) { - const msg = commitErr instanceof Error ? commitErr.message : String(commitErr); - if (msg && /nothing to commit/i.test(msg)) { - outputs.push('git commit: nothing to commit'); - } else { - throw commitErr; - } - } - } - } catch (stageErr) { - log.warn('Failed to stage/commit changes before PR:', stageErr as string); - // Continue; PR may still be created for existing commits - } - - // Ensure branch is pushed to origin so PR includes latest commit - try { - await execAsync('git push', { cwd: taskPath }); - outputs.push('git push: success'); - } catch (pushErr) { - try { - const { stdout: branchOut } = await execAsync('git rev-parse --abbrev-ref HEAD', { - cwd: taskPath, - }); - const branch = branchOut.trim(); - await execAsync(`git push --set-upstream origin ${JSON.stringify(branch)}`, { - cwd: taskPath, - }); - outputs.push(`git push --set-upstream origin ${branch}: success`); - } catch (pushErr2) { - log.error('Failed to push branch before PR:', pushErr2 as string); - return { - success: false, - error: - 'Failed to push branch to origin. Please check your Git remotes and authentication.', - }; - } - } - - // Determine current branch and default base branch (fallback to main) - let currentBranch = ''; - try { - const { stdout } = await execAsync('git branch --show-current', { cwd: taskPath }); - currentBranch = (stdout || '').trim(); - } catch {} - let defaultBranch = 'main'; - try { - const { stdout } = await execAsync( - 'gh repo view --json defaultBranchRef -q .defaultBranchRef.name', - { cwd: taskPath } - ); - const db = (stdout || '').trim(); - if (db) defaultBranch = db; - } catch { - try { - const { stdout } = await execAsync( - 'git remote show origin | sed -n "/HEAD branch/s/.*: //p"', - { cwd: taskPath } - ); - const db2 = (stdout || '').trim(); - if (db2) defaultBranch = db2; - } catch {} - } - - // Guard: ensure there is at least one commit ahead of base - try { - const baseRef = base || defaultBranch; - const { stdout: aheadOut } = await execAsync( - `git rev-list --count ${JSON.stringify(`origin/${baseRef}`)}..HEAD`, - { cwd: taskPath } - ); - const aheadCount = parseInt((aheadOut || '0').trim(), 10) || 0; - if (aheadCount <= 0) { - return { - success: false, - error: `No commits to create a PR. Make a commit on -current branch '${currentBranch}' ahead of base '${baseRef}'.`, - }; - } - } catch { - // Non-fatal; continue - } - - // Build gh pr create command - const flags: string[] = []; - if (title) flags.push(`--title ${JSON.stringify(title)}`); - - // Use temp file for body to properly handle newlines and multiline content - let bodyFile: string | null = null; - if (shouldUseBodyFile && prBody) { - try { - bodyFile = path.join( - os.tmpdir(), - `gh-pr-body-${Date.now()}-${Math.random().toString(36).substring(7)}.txt` - ); - // Write body with actual newlines preserved - fs.writeFileSync(bodyFile, prBody, 'utf8'); - flags.push(`--body-file ${JSON.stringify(bodyFile)}`); - } catch (writeError) { - log.warn('Failed to write body to temp file, falling back to --body flag', { - writeError, - }); - // Fallback to direct --body flag if temp file creation fails - flags.push(`--body ${JSON.stringify(prBody)}`); - } - } - - if (base || defaultBranch) flags.push(`--base ${JSON.stringify(base || defaultBranch)}`); - if (head) { - flags.push(`--head ${JSON.stringify(head)}`); - } else if (currentBranch) { - flags.push(`--head ${JSON.stringify(currentBranch)}`); - } - if (draft) flags.push('--draft'); - if (web) flags.push('--web'); - if (shouldUseFill) flags.push('--fill'); - - const cmd = `gh pr create ${flags.join(' ')}`.trim(); - - let stdout: string; - let stderr: string; - try { - const result = await execAsync(cmd, { cwd: taskPath }); - stdout = result.stdout || ''; - stderr = result.stderr || ''; - } finally { - // Clean up temp file if it was created - if (bodyFile && fs.existsSync(bodyFile)) { - try { - fs.unlinkSync(bodyFile); - } catch (unlinkError) { - log.debug('Failed to delete temp body file', { bodyFile, unlinkError }); - } - } - } - const out = [...outputs, (stdout || '').trim() || (stderr || '').trim()] - .filter(Boolean) - .join('\n'); - - // Try to extract PR URL from output - const urlMatch = out.match(/https?:\/\/\S+/); - const url = urlMatch ? urlMatch[0] : null; - - if (shouldPatchFilledBody) { - try { - const didPatchBody = await patchCurrentPrBodyWithIssueFooter({ - taskPath, - metadata: taskMetadata, - execFile: execFileAsync, - prUrl: url, - }); - if (didPatchBody) { - outputs.push('gh pr edit --body-file: success'); - } - } catch (editError) { - log.warn('Failed to patch PR body with issue footer after --fill create', { - taskPath, - editError, - }); - } - } - - return { success: true, url, output: out }; - } catch (error: any) { - // Capture rich error info from gh/child_process - const errMsg = typeof error?.message === 'string' ? error.message : String(error); - const errStdout = typeof error?.stdout === 'string' ? error.stdout : ''; - const errStderr = typeof error?.stderr === 'string' ? error.stderr : ''; - const combined = [errMsg, errStdout, errStderr].filter(Boolean).join('\n').trim(); - - // Check for various error conditions - const restrictionRe = - /Auth App access restrictions|authorized OAuth apps|third-parties is limited/i; - const prExistsRe = /already exists|already has.*pull request|pull request for branch/i; - - let code: string | undefined; - if (restrictionRe.test(combined)) { - code = 'ORG_AUTH_APP_RESTRICTED'; - log.warn('GitHub org restrictions detected during PR creation'); - } else if (prExistsRe.test(combined)) { - code = 'PR_ALREADY_EXISTS'; - log.info('PR already exists for branch - push was successful'); - } else { - log.error('Failed to create PR:', combined || error); - } - - return { - success: false, - error: combined || errMsg || 'Failed to create PR', - output: combined, - code, - } as any; - } - } - ); - - // Git: Get PR status for current branch via GitHub CLI - ipcMain.handle('git:get-pr-status', async (_, args: { taskPath: string }) => { - const { taskPath } = args || ({} as { taskPath: string }); - try { - const remoteProject = await resolveRemoteProjectForWorktreePath(taskPath); - if (remoteProject) { - return await getPrStatusRemote(remoteProject.sshConnectionId, taskPath); - } - - // Ensure we're in a git repo - await execAsync('git rev-parse --is-inside-work-tree', { cwd: taskPath }); - - const queryFields = [ - 'number', - 'url', - 'state', - 'isDraft', - 'mergeStateStatus', - 'headRefName', - 'baseRefName', - 'title', - 'author', - 'additions', - 'deletions', - 'changedFiles', - ]; - const cmd = `gh pr view --json ${queryFields.join(',')} -q .`; - try { - const { stdout } = await execAsync(cmd, { cwd: taskPath }); - const json = (stdout || '').trim(); - let data = json ? JSON.parse(json) : null; - - // Fallback: If gh pr view didn't find a PR (e.g. detached head, upstream not set, or fresh branch), - // try finding it by branch name via gh pr list. - if (!data) { - try { - const { stdout: branchOut } = await execAsync('git branch --show-current', { - cwd: taskPath, - }); - const currentBranch = branchOut.trim(); - if (currentBranch) { - const listCmd = `gh pr list --head ${JSON.stringify(currentBranch)} --json ${queryFields.join(',')} --limit 1`; - const { stdout: listOut } = await execAsync(listCmd, { cwd: taskPath }); - const listJson = (listOut || '').trim(); - const listData = listJson ? JSON.parse(listJson) : []; - if (listData.length > 0) { - data = listData[0]; - } - } - } catch (fallbackErr) { - log.warn('Failed to fallback to gh pr list:', fallbackErr); - // Ignore fallback errors and return original null/error - } - } - - if (!data) return { success: false, error: 'No PR data returned' }; - - // Fallback: if GH CLI didn't return diff stats, try to compute locally - const asNumber = (v: any): number | null => - typeof v === 'number' && Number.isFinite(v) - ? v - : typeof v === 'string' && Number.isFinite(Number.parseInt(v, 10)) - ? Number.parseInt(v, 10) - : null; - - const hasAdd = asNumber(data?.additions) !== null; - const hasDel = asNumber(data?.deletions) !== null; - const hasFiles = asNumber(data?.changedFiles) !== null; - - if (!hasAdd || !hasDel || !hasFiles) { - const baseRef = typeof data?.baseRefName === 'string' ? data.baseRefName.trim() : ''; - const targetRef = baseRef ? `origin/${baseRef}` : ''; - const shortstatCmd = targetRef - ? `git diff --shortstat ${JSON.stringify(targetRef)}...HEAD` - : 'git diff --shortstat HEAD~1..HEAD'; - try { - const { stdout: diffOut } = await execAsync(shortstatCmd, { cwd: taskPath }); - const statLine = (diffOut || '').trim(); - const m = - statLine && - statLine.match( - /(\d+)\s+files? changed(?:,\s+(\d+)\s+insertions?\(\+\))?(?:,\s+(\d+)\s+deletions?\(-\))?/ - ); - if (m) { - const [, filesStr, addStr, delStr] = m; - if (!hasFiles && filesStr) data.changedFiles = Number.parseInt(filesStr, 10); - if (!hasAdd && addStr) data.additions = Number.parseInt(addStr, 10); - if (!hasDel && delStr) data.deletions = Number.parseInt(delStr, 10); - } - } catch { - // best-effort only; ignore failures - } - } - - return { success: true, pr: data }; - } catch (err) { - const msg = String(err as string); - if (/no pull requests? found/i.test(msg) || /not found/i.test(msg)) { - return { success: true, pr: null }; - } - return { success: false, error: msg || 'Failed to query PR status' }; - } - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - - // Git: Merge PR via GitHub CLI - ipcMain.handle( - 'git:merge-pr', - async ( - _, - args: { - taskPath: string; - prNumber?: number; - strategy?: 'merge' | 'squash' | 'rebase'; - admin?: boolean; - } - ) => { - const { - taskPath, - prNumber, - strategy = 'merge', - admin = false, - } = (args || {}) as { - taskPath: string; - prNumber?: number; - strategy?: 'merge' | 'squash' | 'rebase'; - admin?: boolean; - }; - - try { - const remoteProject = await resolveRemoteProjectForWorktreePath(taskPath); - if (remoteProject) { - const strategyFlag = - strategy === 'squash' ? '--squash' : strategy === 'rebase' ? '--rebase' : '--merge'; - const ghArgs = ['pr', 'merge']; - if (typeof prNumber === 'number' && Number.isFinite(prNumber)) - ghArgs.push(String(prNumber)); - ghArgs.push(strategyFlag); - if (admin) ghArgs.push('--admin'); - const result = await remoteGitService.execGh( - remoteProject.sshConnectionId, - taskPath, - ghArgs.join(' ') - ); - if (result.exitCode !== 0) { - const msg = (result.stderr || '') + (result.stdout || ''); - if (/not installed|command not found/i.test(msg)) { - return { success: false, error: msg, code: 'GH_CLI_UNAVAILABLE' }; - } - return { success: false, error: msg || 'Failed to merge PR' }; - } - return { - success: true, - output: [result.stdout, result.stderr].filter(Boolean).join('\n').trim(), - }; - } - - await execFileAsync(GIT, ['rev-parse', '--is-inside-work-tree'], { cwd: taskPath }); - - const strategyFlag = - strategy === 'squash' ? '--squash' : strategy === 'rebase' ? '--rebase' : '--merge'; - - const ghArgs = ['pr', 'merge']; - if (typeof prNumber === 'number' && Number.isFinite(prNumber)) { - ghArgs.push(String(prNumber)); - } - ghArgs.push(strategyFlag); - if (admin) ghArgs.push('--admin'); - - try { - const { stdout, stderr } = await execFileAsync('gh', ghArgs, { cwd: taskPath }); - const output = [stdout, stderr].filter(Boolean).join('\n').trim(); - return { success: true, output }; - } catch (err) { - const msg = String(err as string); - if (/not installed|command not found/i.test(msg)) { - return { success: false, error: msg, code: 'GH_CLI_UNAVAILABLE' }; - } - const stderr = (err as any)?.stderr; - const stdout = (err as any)?.stdout; - const combined = [stderr, stdout, msg].filter(Boolean).join('\n').trim(); - return { success: false, error: combined || 'Failed to merge PR' }; - } - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - } - ); - - // Git: Get CI/CD check runs for current branch via GitHub CLI - ipcMain.handle('git:get-check-runs', async (_, args: { taskPath: string }) => { - const { taskPath } = args || ({} as { taskPath: string }); - try { - const remoteProject = await resolveRemoteProjectForWorktreePath(taskPath); - if (remoteProject) { - const connId = remoteProject.sshConnectionId; - const fields = 'bucket,completedAt,description,event,link,name,startedAt,state,workflow'; - const checksResult = await remoteGitService.execGh( - connId, - taskPath, - `pr checks --json ${fields}` - ); - if (checksResult.exitCode !== 0) { - const msg = checksResult.stderr || ''; - if (/no pull requests? found/i.test(msg) || /not found/i.test(msg)) { - return { success: true, checks: null }; - } - if (/not installed|command not found/i.test(msg)) { - return { success: false, error: msg, code: 'GH_CLI_UNAVAILABLE' }; - } - return { success: false, error: msg || 'Failed to query check runs' }; - } - const checks = checksResult.stdout.trim() ? JSON.parse(checksResult.stdout.trim()) : []; - - // Fetch html_url from API - try { - const shaResult = await remoteGitService.execGh( - connId, - taskPath, - "pr view --json headRefOid --jq '.headRefOid'" - ); - const sha = shaResult.stdout.trim(); - if (sha) { - const apiResult = await remoteGitService.execGh( - connId, - taskPath, - `api repos/{owner}/{repo}/commits/${sha}/check-runs --jq '.check_runs | map({name: .name, html_url: .html_url}) | .[]'` - ); - const urlMap = new Map(); - for (const line of apiResult.stdout.trim().split('\n')) { - if (!line) continue; - try { - const entry = JSON.parse(line); - if (entry.name && entry.html_url) urlMap.set(entry.name, entry.html_url); - } catch {} - } - for (const check of checks) { - const htmlUrl = urlMap.get(check.name); - if (htmlUrl) check.link = htmlUrl; - } - } - } catch { - // Fall back to original link values - } - - return { success: true, checks }; - } - - await execFileAsync(GIT, ['rev-parse', '--is-inside-work-tree'], { cwd: taskPath }); - - const fields = 'bucket,completedAt,description,event,link,name,startedAt,state,workflow'; - try { - const { stdout } = await execFileAsync('gh', ['pr', 'checks', '--json', fields], { - cwd: taskPath, - }); - const json = (stdout || '').trim(); - const checks = json ? JSON.parse(json) : []; - - // Fetch html_url from the GitHub API instead, which always points to the - // actual check run page on GitHub. - try { - const { stdout: shaOut } = await execFileAsync( - 'gh', - ['pr', 'view', '--json', 'headRefOid', '--jq', '.headRefOid'], - { cwd: taskPath } - ); - const sha = shaOut.trim(); - if (sha) { - const { stdout: apiOut } = await execFileAsync( - 'gh', - [ - 'api', - `repos/{owner}/{repo}/commits/${sha}/check-runs`, - '--jq', - '.check_runs | map({name: .name, html_url: .html_url}) | .[]', - ], - { cwd: taskPath } - ); - const urlMap = new Map(); - for (const line of apiOut.trim().split('\n')) { - if (!line) continue; - try { - const entry = JSON.parse(line); - if (entry.name && entry.html_url) urlMap.set(entry.name, entry.html_url); - } catch {} - } - for (const check of checks) { - const htmlUrl = urlMap.get(check.name); - if (htmlUrl) check.link = htmlUrl; - } - } - } catch { - // Fall back to original link values if API call fails - } - - return { success: true, checks }; - } catch (err) { - const msg = String(err as string); - if (/no pull requests? found/i.test(msg) || /not found/i.test(msg)) { - return { success: true, checks: null }; - } - if (/not installed|command not found/i.test(msg)) { - return { success: false, error: msg, code: 'GH_CLI_UNAVAILABLE' }; - } - return { success: false, error: msg || 'Failed to query check runs' }; - } - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - - // Git: Get PR comments and reviews via GitHub CLI - ipcMain.handle( - 'git:get-pr-comments', - async (_, args: { taskPath: string; prNumber?: number }) => { - const { taskPath, prNumber } = args || ({} as { taskPath: string; prNumber?: number }); - try { - const remoteProject = await resolveRemoteProjectForWorktreePath(taskPath); - if (remoteProject) { - const connId = remoteProject.sshConnectionId; - const ghViewArgs = prNumber - ? `pr view ${prNumber} --json comments,reviews,number` - : 'pr view --json comments,reviews,number'; - const viewResult = await remoteGitService.execGh(connId, taskPath, ghViewArgs); - if (viewResult.exitCode !== 0) { - const msg = viewResult.stderr || ''; - if (/no pull requests? found/i.test(msg) || /not found/i.test(msg)) { - return { success: true, comments: [], reviews: [] }; - } - if (/not installed|command not found/i.test(msg)) { - return { success: false, error: msg, code: 'GH_CLI_UNAVAILABLE' }; - } - return { success: false, error: msg || 'Failed to query PR comments' }; - } - const data = viewResult.stdout.trim() - ? JSON.parse(viewResult.stdout.trim()) - : { comments: [], reviews: [], number: 0 }; - const comments = data.comments || []; - const reviews = data.reviews || []; - - // Fetch avatar URLs via REST API - if (data.number) { - try { - const avatarMap = new Map(); - const setAvatar = (login: string, url: string) => { - avatarMap.set(login, url); - if (login.endsWith('[bot]')) avatarMap.set(login.replace(/\[bot]$/, ''), url); - }; - - const commentsApi = await remoteGitService.execGh( - connId, - taskPath, - `api repos/{owner}/{repo}/issues/${data.number}/comments --jq '.[] | {login: .user.login, avatar_url: .user.avatar_url}'` - ); - for (const line of commentsApi.stdout.trim().split('\n')) { - if (!line) continue; - try { - const entry = JSON.parse(line); - if (entry.login && entry.avatar_url) setAvatar(entry.login, entry.avatar_url); - } catch {} - } - - const reviewsApi = await remoteGitService.execGh( - connId, - taskPath, - `api repos/{owner}/{repo}/pulls/${data.number}/reviews --jq '.[] | {login: .user.login, avatar_url: .user.avatar_url}'` - ); - for (const line of reviewsApi.stdout.trim().split('\n')) { - if (!line) continue; - try { - const entry = JSON.parse(line); - if (entry.login && entry.avatar_url) setAvatar(entry.login, entry.avatar_url); - } catch {} - } - - for (const c of [...comments, ...reviews]) { - if (c.author?.login) { - const avatarUrl = avatarMap.get(c.author.login); - if (avatarUrl) c.author.avatarUrl = avatarUrl; - } - } - } catch { - // Fall back to no avatar URLs - } - } - - return { success: true, comments, reviews }; - } - - await execFileAsync(GIT, ['rev-parse', '--is-inside-work-tree'], { cwd: taskPath }); - - try { - const ghArgs = ['pr', 'view']; - if (prNumber) ghArgs.push(String(prNumber)); - ghArgs.push('--json', 'comments,reviews,number'); - - const { stdout } = await execFileAsync('gh', ghArgs, { cwd: taskPath }); - const json = (stdout || '').trim(); - const data = json ? JSON.parse(json) : { comments: [], reviews: [], number: 0 }; - - const comments = data.comments || []; - const reviews = data.reviews || []; - - // gh pr view doesn't return avatarUrl for authors. - // Fetch from the REST API which includes avatar_url (works for GitHub Apps too). - if (data.number) { - try { - const avatarMap = new Map(); - - const { stdout: commentsApi } = await execFileAsync( - 'gh', - [ - 'api', - `repos/{owner}/{repo}/issues/${data.number}/comments`, - '--jq', - '.[] | {login: .user.login, avatar_url: .user.avatar_url}', - ], - { cwd: taskPath } - ); - const setAvatar = (login: string, url: string) => { - avatarMap.set(login, url); - // REST API returns "app[bot]" while gh CLI returns "app" — store both - if (login.endsWith('[bot]')) avatarMap.set(login.replace(/\[bot]$/, ''), url); - }; - - for (const line of commentsApi.trim().split('\n')) { - if (!line) continue; - try { - const entry = JSON.parse(line); - if (entry.login && entry.avatar_url) setAvatar(entry.login, entry.avatar_url); - } catch {} - } - - const { stdout: reviewsApi } = await execFileAsync( - 'gh', - [ - 'api', - `repos/{owner}/{repo}/pulls/${data.number}/reviews`, - '--jq', - '.[] | {login: .user.login, avatar_url: .user.avatar_url}', - ], - { cwd: taskPath } - ); - for (const line of reviewsApi.trim().split('\n')) { - if (!line) continue; - try { - const entry = JSON.parse(line); - if (entry.login && entry.avatar_url) setAvatar(entry.login, entry.avatar_url); - } catch {} - } - - for (const c of [...comments, ...reviews]) { - if (c.author?.login) { - const avatarUrl = avatarMap.get(c.author.login); - if (avatarUrl) c.author.avatarUrl = avatarUrl; - } - } - } catch { - // Fall back to no avatar URLs — renderer will use GitHub fallback - } - } - - return { success: true, comments, reviews }; - } catch (err) { - const msg = String(err as string); - if (/no pull requests? found/i.test(msg) || /not found/i.test(msg)) { - return { success: true, comments: [], reviews: [] }; - } - if (/not installed|command not found/i.test(msg)) { - return { success: false, error: msg, code: 'GH_CLI_UNAVAILABLE' }; - } - return { success: false, error: msg || 'Failed to query PR comments' }; - } - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - } - ); - - // Git: Commit all changes and push current branch (create feature branch if on default) - ipcMain.handle( - 'git:commit-and-push', - async ( - _, - args: { - taskPath: string; - commitMessage?: string; - createBranchIfOnDefault?: boolean; - branchPrefix?: string; - } - ) => { - const { - taskPath, - commitMessage = 'chore: apply task changes', - createBranchIfOnDefault = true, - branchPrefix = 'orch', - } = (args || - ({} as { - taskPath: string; - commitMessage?: string; - createBranchIfOnDefault?: boolean; - branchPrefix?: string; - })) as { - taskPath: string; - commitMessage?: string; - createBranchIfOnDefault?: boolean; - branchPrefix?: string; - }; - - try { - const remoteProject = await resolveRemoteProjectForWorktreePath(taskPath); - - if (remoteProject) { - return await commitAndPushRemote(remoteProject.sshConnectionId, taskPath, { - commitMessage, - createBranchIfOnDefault, - branchPrefix, - }); - } - - // Ensure we're in a git repo - await execAsync('git rev-parse --is-inside-work-tree', { cwd: taskPath }); - - // Determine current branch - const { stdout: currentBranchOut } = await execAsync('git branch --show-current', { - cwd: taskPath, - }); - const currentBranch = (currentBranchOut || '').trim(); - - // Determine default branch via gh, fallback to main/master - let defaultBranch = 'main'; - try { - const { stdout } = await execAsync( - 'gh repo view --json defaultBranchRef -q .defaultBranchRef.name', - { cwd: taskPath } - ); - const db = (stdout || '').trim(); - if (db) defaultBranch = db; - } catch { - try { - const { stdout } = await execAsync( - 'git remote show origin | sed -n "/HEAD branch/s/.*: //p"', - { cwd: taskPath } - ); - const db2 = (stdout || '').trim(); - if (db2) defaultBranch = db2; - } catch {} - } - - // Optionally create a new branch if on default - let activeBranch = currentBranch; - if (createBranchIfOnDefault && (!currentBranch || currentBranch === defaultBranch)) { - const short = Date.now().toString(36); - const name = `${branchPrefix}/${short}`; - await execAsync(`git checkout -b ${JSON.stringify(name)}`, { cwd: taskPath }); - activeBranch = name; - } - - // Stage (only if needed) and commit - try { - const { stdout: st } = await execAsync('git status --porcelain --untracked-files=all', { - cwd: taskPath, - }); - const hasWorkingChanges = Boolean(st && st.trim().length > 0); - - const readStagedFiles = async () => { - try { - const { stdout } = await execAsync('git diff --cached --name-only', { - cwd: taskPath, - }); - return (stdout || '') - .split('\n') - .map((f) => f.trim()) - .filter(Boolean); - } catch { - return []; - } - }; - - let stagedFiles = await readStagedFiles(); - - // Only auto-stage everything when nothing is staged yet (preserves manual staging choices) - if (hasWorkingChanges && stagedFiles.length === 0) { - await execAsync('git add -A', { cwd: taskPath }); - } - - // Never commit plan mode artifacts - try { - await execAsync('git reset -q .emdash || true', { cwd: taskPath }); - } catch {} - try { - await execAsync('git reset -q PLANNING.md || true', { cwd: taskPath }); - } catch {} - try { - await execAsync('git reset -q planning.md || true', { cwd: taskPath }); - } catch {} - - stagedFiles = await readStagedFiles(); - - if (stagedFiles.length > 0) { - try { - await execAsync(`git commit -m ${JSON.stringify(commitMessage)}`, { - cwd: taskPath, - }); - } catch (commitErr) { - const msg = commitErr instanceof Error ? commitErr.message : String(commitErr); - if (!/nothing to commit/i.test(msg)) throw commitErr; - } - } - } catch (e) { - log.warn('Stage/commit step issue:', e instanceof Error ? e.message : String(e)); - throw e; - } - - // Push current branch (set upstream if needed) - try { - await execAsync('git push', { cwd: taskPath }); - } catch (pushErr) { - await execAsync(`git push --set-upstream origin ${JSON.stringify(activeBranch)}`, { - cwd: taskPath, - }); - } - - const { stdout: out } = await execAsync('git status -sb', { cwd: taskPath }); - return { success: true, branch: activeBranch, output: (out || '').trim() }; - } catch (error) { - log.error('Failed to commit and push:', error); - const errObj = error as { stderr?: string; message?: string }; - const errMsg = errObj?.stderr?.trim() || errObj?.message || String(error); - return { success: false, error: errMsg }; - } - } - ); - - // Git: Get branch status (current branch, default branch, ahead/behind counts) - ipcMain.handle('git:get-branch-status', async (_, args: { taskPath: string }) => { - const { taskPath } = args || ({} as { taskPath: string }); - - if (!taskPath) { - return { success: false, error: 'Path does not exist' }; - } - - const remoteProject = await resolveRemoteProjectForWorktreePath(taskPath); - if (remoteProject) { - try { - const status = await remoteGitService.getBranchStatus( - remoteProject.sshConnectionId, - taskPath - ); - return { success: true, ...status }; - } catch (error) { - log.error(`getBranchStatus (remote): error for ${taskPath}:`, error); - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - } - - // Early exit for missing/invalid local path - if (!fs.existsSync(taskPath)) { - log.warn(`getBranchStatus: path does not exist: ${taskPath}`); - return { success: false, error: 'Path does not exist' }; - } - - // Check if it's a git repo - expected to fail often for non-git paths - try { - await execFileAsync(GIT, ['rev-parse', '--is-inside-work-tree'], { cwd: taskPath }); - } catch { - log.warn(`getBranchStatus: not a git repository: ${taskPath}`); - return { success: false, error: 'Not a git repository' }; - } - - try { - // Current branch - const { stdout: currentBranchOut } = await execFileAsync(GIT, ['branch', '--show-current'], { - cwd: taskPath, - }); - const branch = (currentBranchOut || '').trim(); - - // Determine default branch - let defaultBranch = 'main'; - try { - const { stdout } = await execFileAsync( - 'gh', - ['repo', 'view', '--json', 'defaultBranchRef', '-q', '.defaultBranchRef.name'], - { cwd: taskPath } - ); - const db = (stdout || '').trim(); - if (db) defaultBranch = db; - } catch { - try { - // Use symbolic-ref to resolve origin/HEAD then take the last path part - const { stdout } = await execFileAsync( - GIT, - ['symbolic-ref', '--short', 'refs/remotes/origin/HEAD'], - { cwd: taskPath } - ); - const line = (stdout || '').trim(); - const last = line.split('/').pop(); - if (last) defaultBranch = last; - } catch {} - } - - // Ahead/behind relative to upstream tracking branch - let ahead = 0; - let behind = 0; - try { - // Best case: compare against the upstream tracking branch (@{upstream}) - const { stdout } = await execFileAsync( - GIT, - ['rev-list', '--left-right', '--count', '@{upstream}...HEAD'], - { cwd: taskPath } - ); - const parts = (stdout || '').trim().split(/\s+/); - if (parts.length >= 2) { - behind = parseInt(parts[0] || '0', 10) || 0; - ahead = parseInt(parts[1] || '0', 10) || 0; - } - } catch { - try { - // Fallback: compare against origin/ - const { stdout } = await execFileAsync( - GIT, - ['rev-list', '--left-right', '--count', `origin/${branch}...HEAD`], - { cwd: taskPath } - ); - const parts = (stdout || '').trim().split(/\s+/); - if (parts.length >= 2) { - behind = parseInt(parts[0] || '0', 10) || 0; - ahead = parseInt(parts[1] || '0', 10) || 0; - } - } catch { - // No upstream — use git status as last resort - try { - const { stdout } = await execFileAsync(GIT, ['status', '-sb'], { cwd: taskPath }); - const line = (stdout || '').split(/\n/)[0] || ''; - const m = line.match(/ahead\s+(\d+)/i); - const n = line.match(/behind\s+(\d+)/i); - if (m) ahead = parseInt(m[1] || '0', 10) || 0; - if (n) behind = parseInt(n[1] || '0', 10) || 0; - } catch {} - } - } - - // Count commits ahead of origin/ (for PR visibility) - let aheadOfDefault = 0; - if (branch !== defaultBranch) { - try { - const { stdout: countOut } = await execFileAsync( - GIT, - ['rev-list', '--count', `origin/${defaultBranch}..HEAD`], - { cwd: taskPath } - ); - aheadOfDefault = parseInt(countOut.trim(), 10) || 0; - } catch { - // origin/ may not exist - } - } - - return { success: true, branch, defaultBranch, ahead, behind, aheadOfDefault }; - } catch (error) { - log.error(`getBranchStatus: unexpected error for ${taskPath}:`, error); - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - - ipcMain.handle( - 'git:list-remote-branches', - async (_, args: { projectPath: string; remote?: string }) => { - const { projectPath, remote = 'origin' } = args || ({} as { projectPath: string }); - if (!projectPath) { - return { success: false, error: 'projectPath is required' }; - } - - const remoteProject = await resolveRemoteProjectForWorktreePath(projectPath); - if (remoteProject) { - try { - const branches = await remoteGitService.listBranches( - remoteProject.sshConnectionId, - projectPath, - remote - ); - return { success: true, branches }; - } catch (error) { - log.error('Failed to list branches (remote):', error); - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - } - - try { - await execAsync('git rev-parse --is-inside-work-tree', { cwd: projectPath }); - } catch { - return { success: false, error: 'Not a git repository' }; - } - - try { - // Check if remote exists before attempting to fetch - let hasRemote = false; - try { - await execFileAsync('git', ['remote', 'get-url', remote], { cwd: projectPath }); - hasRemote = true; - // Remote exists, try to fetch - try { - await execFileAsync('git', ['fetch', '--prune', remote], { cwd: projectPath }); - } catch (fetchError) { - log.warn('Failed to fetch remote before listing branches', fetchError); - } - } catch { - // Remote doesn't exist, skip fetch and will use local branches instead - log.debug(`Remote '${remote}' not found, will use local branches`); - } - - let branches: Array<{ ref: string; remote: string; branch: string; label: string }> = []; - - if (hasRemote) { - // List remote branches - const { stdout } = await execFileAsync( - 'git', - ['for-each-ref', '--format=%(refname:short)', `refs/remotes/${remote}`], - { cwd: projectPath } - ); - - branches = - stdout - ?.split('\n') - .map((line) => line.trim()) - .filter((line) => line.length > 0) - .filter((line) => !line.endsWith('/HEAD')) - .map((ref) => { - const [remoteAlias, ...rest] = ref.split('/'); - const branch = rest.join('/') || ref; - return { - ref, - remote: remoteAlias || remote, - branch, - label: `${remoteAlias || remote}/${branch}`, - }; - }) ?? []; - - // Also include local-only branches (not on remote) - try { - const { stdout: localStdout } = await execAsync( - 'git for-each-ref --format="%(refname:short)" refs/heads/', - { cwd: projectPath } - ); - - const remoteBranchNames = new Set(branches.map((b) => b.branch)); - - const localOnlyBranches = - localStdout - ?.split('\n') - .map((line) => line.trim()) - .filter((line) => line.length > 0) - .filter((branch) => !remoteBranchNames.has(branch)) - .map((branch) => ({ - ref: branch, - remote: '', - branch, - label: branch, - })) ?? []; - - branches = [...branches, ...localOnlyBranches]; - } catch (localBranchError) { - log.warn('Failed to list local branches', localBranchError); - } - } else { - // No remote - list local branches instead - try { - const { stdout } = await execAsync( - 'git for-each-ref --format="%(refname:short)" refs/heads/', - { cwd: projectPath } - ); - - branches = - stdout - ?.split('\n') - .map((line) => line.trim()) - .filter((line) => line.length > 0) - .map((branch) => ({ - ref: branch, - remote: '', // No remote - branch, - label: branch, // Just the branch name, no remote prefix - })) ?? []; - } catch (localBranchError) { - log.warn('Failed to list local branches', localBranchError); - } - } - - return { success: true, branches }; - } catch (error) { - log.error('Failed to list branches:', error); - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - } - ); - - // Git: Merge current branch to main via GitHub (create PR + merge immediately) - ipcMain.handle('git:merge-to-main', async (_, args: { taskPath: string }) => { - const { taskPath } = args || ({} as { taskPath: string }); - - try { - const remoteProject = await resolveRemoteProjectForWorktreePath(taskPath); - if (remoteProject) { - return await mergeToMainRemote(remoteProject.sshConnectionId, taskPath); - } - - // Get current and default branch names - const { stdout: currentOut } = await execAsync('git branch --show-current', { - cwd: taskPath, - }); - const currentBranch = (currentOut || '').trim(); - - let defaultBranch = 'main'; - try { - const { stdout } = await execAsync( - 'gh repo view --json defaultBranchRef -q .defaultBranchRef.name', - { cwd: taskPath } - ); - if (stdout?.trim()) defaultBranch = stdout.trim(); - } catch { - // gh not available or not a GitHub repo - fall back to 'main' - } - - // Validate: on a valid feature branch - if (!currentBranch) { - return { success: false, error: 'Not on a branch (detached HEAD state).' }; - } - if (currentBranch === defaultBranch) { - return { - success: false, - error: `Already on ${defaultBranch}. Create a feature branch first.`, - }; - } - - // Stage and commit any pending changes - const { stdout: statusOut } = await execAsync( - 'git status --porcelain --untracked-files=all', - { cwd: taskPath } - ); - if (statusOut?.trim()) { - await execAsync('git add -A', { cwd: taskPath }); - try { - await execAsync('git commit -m "chore: prepare for merge to main"', { cwd: taskPath }); - } catch (e) { - const msg = String(e); - if (!/nothing to commit/i.test(msg)) throw e; - } - } - - // Push branch (set upstream if needed) - try { - await execAsync('git push', { cwd: taskPath }); - } catch { - // No upstream set - push with -u - await execAsync(`git push --set-upstream origin ${JSON.stringify(currentBranch)}`, { - cwd: taskPath, - }); - } - - // Create PR (or use existing) - let prUrl = ''; - let prExists = false; - let taskMetadata: unknown = undefined; - try { - const task = await databaseService.getTaskByPath(taskPath); - taskMetadata = task?.metadata; - } catch (metadataError) { - log.debug('Unable to load task metadata for merge-to-main issue footer', { - taskPath, - metadataError, - }); - } - try { - const prCreateArgs = ['pr', 'create', '--fill', '--base', defaultBranch]; - const { stdout: prOut } = await execFileAsync('gh', prCreateArgs, { cwd: taskPath }); - const urlMatch = prOut?.match(/https?:\/\/\S+/); - prUrl = urlMatch ? urlMatch[0] : ''; - prExists = true; - } catch (e) { - const errMsg = (e as { stderr?: string })?.stderr || String(e); - if (!/already exists|already has.*pull request/i.test(errMsg)) { - return { success: false, error: `Failed to create PR: ${errMsg}` }; - } - // PR already exists - continue to merge - prExists = true; - } - - if (prExists) { - try { - await patchCurrentPrBodyWithIssueFooter({ - taskPath, - metadata: taskMetadata, - execFile: execFileAsync, - prUrl, - }); - } catch (editError) { - log.warn('Failed to patch merge-to-main PR body with issue footer', { - taskPath, - editError, - }); - } - } - - // Merge PR (branch cleanup happens when workspace is deleted) - try { - await execAsync('gh pr merge --merge', { cwd: taskPath }); - return { success: true, prUrl }; - } catch (e) { - const errMsg = (e as { stderr?: string })?.stderr || String(e); - return { success: false, error: `PR created but merge failed: ${errMsg}`, prUrl }; - } - } catch (e) { - log.error('Failed to merge to main:', e); - return { success: false, error: (e as { message?: string })?.message || String(e) }; - } - }); - - // Git: Rename branch (local and optionally remote) - ipcMain.handle( - 'git:rename-branch', - async ( - _, - args: { - repoPath: string; - oldBranch: string; - newBranch: string; - } - ) => { - const { repoPath, oldBranch, newBranch } = args; - try { - log.info('Renaming branch:', { repoPath, oldBranch, newBranch }); - - const remoteProject = await resolveRemoteProjectForWorktreePath(repoPath); - if (remoteProject) { - const result = await remoteGitService.renameBranch( - remoteProject.sshConnectionId, - repoPath, - oldBranch, - newBranch - ); - return { success: true, remotePushed: result.remotePushed }; - } - - // Check remote tracking BEFORE rename (git branch -m renames config section) - let remotePushed = false; - let remoteName = 'origin'; - try { - const { stdout: remoteOut } = await execFileAsync( - GIT, - ['config', '--get', `branch.${oldBranch}.remote`], - { cwd: repoPath } - ); - if (remoteOut?.trim()) { - remoteName = remoteOut.trim(); - remotePushed = true; - } - } catch { - // Branch wasn't tracking a remote, check if it exists on origin - try { - const { stdout: lsRemote } = await execFileAsync( - GIT, - ['ls-remote', '--heads', 'origin', oldBranch], - { cwd: repoPath } - ); - if (lsRemote?.trim()) { - remotePushed = true; - } - } catch { - // No remote branch - } - } - - // Rename local branch - await execFileAsync(GIT, ['branch', '-m', oldBranch, newBranch], { cwd: repoPath }); - log.info('Local branch renamed successfully'); - - // If pushed to remote, delete old and push new - if (remotePushed) { - log.info('Branch was pushed to remote, updating remote...'); - try { - // Delete old remote branch - await execFileAsync(GIT, ['push', remoteName, '--delete', oldBranch], { - cwd: repoPath, - }); - log.info('Deleted old remote branch'); - } catch (deleteErr) { - // Remote branch might not exist or already deleted - log.warn('Could not delete old remote branch (may not exist):', deleteErr); - } - - // Push new branch and set upstream - await execFileAsync(GIT, ['push', '-u', remoteName, newBranch], { cwd: repoPath }); - log.info('Pushed new branch to remote'); - } - - return { success: true, remotePushed }; - } catch (error) { - log.error('Failed to rename branch:', error); - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - } - ); - - ipcMain.handle('git:commit', async (_, args: { taskPath: string; message: string }) => { - try { - const pathErr = validateTaskPath(args.taskPath); - if (pathErr) return { success: false, error: pathErr }; - const result = await gitCommit(args.taskPath, args.message); - broadcastGitStatusChange(args.taskPath); - return { success: true, hash: result.hash }; - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - - ipcMain.handle('git:push', async (_, args: { taskPath: string }) => { - try { - const pathErr = validateTaskPath(args.taskPath); - if (pathErr) return { success: false, error: pathErr }; - const result = await gitPush(args.taskPath); - return { success: true, output: result.output }; - } catch (error) { - const errObj = error as { stderr?: string; message?: string }; - return { success: false, error: errObj?.stderr?.trim() || errObj?.message || String(error) }; - } - }); - - ipcMain.handle('git:pull', async (_, args: { taskPath: string }) => { - try { - const pathErr = validateTaskPath(args.taskPath); - if (pathErr) return { success: false, error: pathErr }; - const result = await gitPull(args.taskPath); - return { success: true, output: result.output }; - } catch (error) { - const errObj = error as { stderr?: string; message?: string }; - return { success: false, error: errObj?.stderr?.trim() || errObj?.message || String(error) }; - } - }); - - ipcMain.handle( - 'git:get-log', - async ( - _, - args: { taskPath: string; maxCount?: number; skip?: number; aheadCount?: number } - ) => { - try { - const pathErr = validateTaskPath(args.taskPath); - if (pathErr) return { success: false, error: pathErr }; - const result = await gitGetLog(args.taskPath, args.maxCount, args.skip, args.aheadCount); - return { success: true, commits: result.commits, aheadCount: result.aheadCount }; - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - } - ); - - ipcMain.handle('git:get-latest-commit', async (_, args: { taskPath: string }) => { - try { - const pathErr = validateTaskPath(args.taskPath); - if (pathErr) return { success: false, error: pathErr }; - const commit = await gitGetLatestCommit(args.taskPath); - return { success: true, commit }; - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - - ipcMain.handle( - 'git:get-commit-files', - async (_, args: { taskPath: string; commitHash: string }) => { - try { - const pathErr = validateTaskPath(args.taskPath); - if (pathErr) return { success: false, error: pathErr }; - if (!/^[0-9a-f]{4,40}$/i.test(args.commitHash)) { - return { success: false, error: 'Invalid commit hash' }; - } - const files = await gitGetCommitFiles(args.taskPath, args.commitHash); - return { success: true, files }; - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - } - ); - - ipcMain.handle( - 'git:get-commit-file-diff', - async (_, args: { taskPath: string; commitHash: string; filePath: string }) => { - try { - const pathErr = validateTaskPath(args.taskPath); - if (pathErr) return { success: false, error: pathErr }; - if (!/^[0-9a-f]{4,40}$/i.test(args.commitHash)) { - return { success: false, error: 'Invalid commit hash' }; - } - // filePath is validated by path.resolve check in GitService.getCommitFileDiff - const diff = await gitGetCommitFileDiff(args.taskPath, args.commitHash, args.filePath); - return { success: true, diff }; - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - } - ); - - ipcMain.handle('git:soft-reset', async (_, args: { taskPath: string }) => { - try { - const pathErr = validateTaskPath(args.taskPath); - if (pathErr) return { success: false, error: pathErr }; - const result = await gitSoftResetLastCommit(args.taskPath); - broadcastGitStatusChange(args.taskPath); - return { success: true, subject: result.subject, body: result.body }; - } catch (error) { - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); -} diff --git a/src/main/ipc/githubIpc.ts b/src/main/ipc/githubIpc.ts deleted file mode 100644 index dd91494a2..000000000 --- a/src/main/ipc/githubIpc.ts +++ /dev/null @@ -1,517 +0,0 @@ -import { ipcMain, app } from 'electron'; -import { log } from '../lib/logger'; -import { GitHubService } from '../services/GitHubService'; -import { worktreeService } from '../services/WorktreeService'; -import { githubCLIInstaller } from '../services/GitHubCLIInstaller'; -import { exec } from 'child_process'; -import { promisify } from 'util'; -import * as path from 'path'; -import * as fs from 'fs'; -import { homedir } from 'os'; -import { quoteShellArg } from '../utils/shellEscape'; - -const execAsync = promisify(exec); -const githubService = new GitHubService(); - -const slugify = (name: string) => - name - .toLowerCase() - .replace(/[^a-z0-9-]/g, '-') - .replace(/-+/g, '-') - .replace(/^-|-$/g, ''); - -export function registerGithubIpc() { - ipcMain.handle('github:connect', async (_, projectPath: string) => { - try { - // Check if GitHub CLI is authenticated - const isAuth = await githubService.isAuthenticated(); - if (!isAuth) { - return { success: false, error: 'GitHub CLI not authenticated' }; - } - - // Get repository info from GitHub CLI - try { - const { stdout } = await execAsync( - 'gh repo view --json name,nameWithOwner,defaultBranchRef', - { cwd: projectPath } - ); - const repoInfo = JSON.parse(stdout); - - return { - success: true, - repository: repoInfo.nameWithOwner, - branch: repoInfo.defaultBranchRef?.name || 'main', - }; - } catch (error) { - return { - success: false, - error: 'Repository not found on GitHub or not connected to GitHub CLI', - }; - } - } catch (error) { - log.error('Failed to connect to GitHub:', error); - return { success: false, error: 'Failed to connect to GitHub' }; - } - }); - - // Start Device Flow authentication with automatic background polling - ipcMain.handle('github:auth', async () => { - try { - return await githubService.startDeviceFlowAuth(); - } catch (error) { - log.error('GitHub authentication failed:', error); - return { success: false, error: 'Authentication failed' }; - } - }); - - // Cancel ongoing authentication - ipcMain.handle('github:auth:cancel', async () => { - try { - githubService.cancelAuth(); - return { success: true }; - } catch (error) { - log.error('Failed to cancel GitHub auth:', error); - return { success: false, error: 'Failed to cancel' }; - } - }); - - ipcMain.handle('github:isAuthenticated', async () => { - try { - return await githubService.isAuthenticated(); - } catch (error) { - log.error('GitHub authentication check failed:', error); - return false; - } - }); - - // GitHub status: installed + authenticated + user - ipcMain.handle('github:getStatus', async () => { - try { - let installed = true; - try { - await execAsync('gh --version'); - } catch { - installed = false; - } - - let authenticated = false; - let user: any = null; - if (installed) { - try { - const { stdout } = await execAsync('gh api user'); - user = JSON.parse(stdout); - authenticated = true; - } catch { - authenticated = false; - user = null; - } - } - - return { installed, authenticated, user }; - } catch (error) { - log.error('GitHub status check failed:', error); - return { installed: false, authenticated: false }; - } - }); - - ipcMain.handle('github:getUser', async () => { - try { - const token = await (githubService as any)['getStoredToken'](); - if (!token) return null; - return await githubService.getUserInfo(token); - } catch (error) { - log.error('Failed to get user info:', error); - return null; - } - }); - - ipcMain.handle('github:getRepositories', async () => { - try { - const token = await (githubService as any)['getStoredToken'](); - if (!token) throw new Error('Not authenticated'); - return await githubService.getRepositories(token); - } catch (error) { - log.error('Failed to get repositories:', error); - return []; - } - }); - - ipcMain.handle('github:cloneRepository', async (_, repoUrl: string, localPath: string) => { - const q = (s: string) => JSON.stringify(s); - try { - // Opt-out flag for safety or debugging - if (process.env.EMDASH_DISABLE_CLONE_CACHE === '1') { - await execAsync(`git clone ${q(repoUrl)} ${q(localPath)}`); - return { success: true }; - } - - // Ensure parent directory exists - const dir = path.dirname(localPath); - if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true }); - - // If already a git repo, short‑circuit - try { - if (fs.existsSync(path.join(localPath, '.git'))) return { success: true }; - } catch {} - - // Use a local bare mirror cache keyed by normalized URL - const cacheRoot = path.join(app.getPath('userData'), 'repo-cache'); - if (!fs.existsSync(cacheRoot)) fs.mkdirSync(cacheRoot, { recursive: true }); - const norm = (u: string) => u.replace(/\.git$/i, '').trim(); - const cacheKey = require('crypto').createHash('sha1').update(norm(repoUrl)).digest('hex'); - const mirrorPath = path.join(cacheRoot, `${cacheKey}.mirror`); - - if (!fs.existsSync(mirrorPath)) { - await execAsync(`git clone --mirror --filter=blob:none ${q(repoUrl)} ${q(mirrorPath)}`); - } else { - try { - await execAsync(`git -C ${q(mirrorPath)} remote set-url origin ${q(repoUrl)}`); - } catch {} - await execAsync(`git -C ${q(mirrorPath)} remote update --prune`); - } - - await execAsync( - `git clone --reference-if-able ${q(mirrorPath)} --dissociate ${q(repoUrl)} ${q(localPath)}` - ); - return { success: true }; - } catch (error) { - log.error('Failed to clone repository via cache:', error); - try { - await execAsync(`git clone ${q(repoUrl)} ${q(localPath)}`); - return { success: true }; - } catch (e2) { - return { success: false, error: e2 instanceof Error ? e2.message : 'Clone failed' }; - } - } - }); - - ipcMain.handle('github:logout', async () => { - try { - await githubService.logout(); - return { success: true }; - } catch (error) { - log.error('Failed to logout:', error); - return { success: false, error: error instanceof Error ? error.message : 'Logout failed' }; - } - }); - - // GitHub issues: list/search/get for the repository at projectPath - ipcMain.handle('github:issues:list', async (_e, projectPath: string, limit?: number) => { - if (!projectPath) return { success: false, error: 'Project path is required' }; - try { - const issues = await githubService.listIssues(projectPath, limit ?? 50); - return { success: true, issues }; - } catch (error) { - const message = error instanceof Error ? error.message : 'Unable to list issues'; - return { success: false, error: message }; - } - }); - - ipcMain.handle( - 'github:issues:search', - async (_e, projectPath: string, searchTerm: string, limit?: number) => { - if (!projectPath) return { success: false, error: 'Project path is required' }; - if (!searchTerm || typeof searchTerm !== 'string') { - return { success: false, error: 'Search term is required' }; - } - try { - const issues = await githubService.searchIssues(projectPath, searchTerm, limit ?? 20); - return { success: true, issues }; - } catch (error) { - const message = error instanceof Error ? error.message : 'Unable to search issues'; - return { success: false, error: message }; - } - } - ); - - ipcMain.handle('github:issues:get', async (_e, projectPath: string, number: number) => { - if (!projectPath) return { success: false, error: 'Project path is required' }; - if (!number || !Number.isFinite(number)) { - return { success: false, error: 'Issue number is required' }; - } - try { - const issue = await githubService.getIssue(projectPath, number); - return { success: !!issue, issue: issue ?? undefined }; - } catch (error) { - const message = error instanceof Error ? error.message : 'Unable to get issue'; - return { success: false, error: message }; - } - }); - - ipcMain.handle('github:listPullRequests', async (_, args: { projectPath: string }) => { - const projectPath = args?.projectPath; - if (!projectPath) { - return { success: false, error: 'Project path is required' }; - } - - try { - const prs = await githubService.getPullRequests(projectPath); - return { success: true, prs }; - } catch (error) { - log.error('Failed to list pull requests:', error); - const message = - error instanceof Error ? error.message : 'Unable to list pull requests via GitHub CLI'; - return { success: false, error: message }; - } - }); - - ipcMain.handle( - 'github:createPullRequestWorktree', - async ( - _, - args: { - projectPath: string; - projectId: string; - prNumber: number; - prTitle?: string; - taskName?: string; - branchName?: string; - } - ) => { - const { projectPath, projectId, prNumber } = args || ({} as typeof args); - - if (!projectPath || !projectId || !prNumber) { - return { success: false, error: 'Missing required parameters' }; - } - - const defaultSlug = slugify(args.prTitle || `pr-${prNumber}`) || `pr-${prNumber}`; - const taskName = - args.taskName && args.taskName.trim().length > 0 - ? args.taskName.trim() - : `pr-${prNumber}-${defaultSlug}`; - const branchName = args.branchName || `pr/${prNumber}`; - - try { - const currentWorktrees = await worktreeService.listWorktrees(projectPath); - const existing = currentWorktrees.find((wt) => wt.branch === branchName); - - if (existing) { - return { success: true, worktree: existing, branchName, taskName: existing.name }; - } - - await githubService.ensurePullRequestBranch(projectPath, prNumber, branchName); - - const worktreesDir = path.resolve(projectPath, '..', 'worktrees'); - const slug = slugify(taskName) || `pr-${prNumber}`; - let worktreePath = path.join(worktreesDir, slug); - - if (fs.existsSync(worktreePath)) { - worktreePath = path.join(worktreesDir, `${slug}-${Date.now()}`); - } - - const worktree = await worktreeService.createWorktreeFromBranch( - projectPath, - taskName, - branchName, - projectId, - { worktreePath } - ); - - return { success: true, worktree, branchName, taskName }; - } catch (error) { - log.error('Failed to create PR worktree:', error); - const message = - error instanceof Error ? error.message : 'Unable to create PR worktree via GitHub CLI'; - return { success: false, error: message }; - } - } - ); - - ipcMain.handle('github:checkCLIInstalled', async () => { - try { - return await githubCLIInstaller.isInstalled(); - } catch (error) { - log.error('Failed to check gh CLI installation:', error); - return false; - } - }); - - ipcMain.handle('github:installCLI', async () => { - try { - return await githubCLIInstaller.install(); - } catch (error) { - log.error('Failed to install gh CLI:', error); - return { - success: false, - error: error instanceof Error ? error.message : 'Installation failed', - }; - } - }); - - ipcMain.handle('github:getOwners', async () => { - try { - const owners = await githubService.getOwners(); - return { success: true, owners }; - } catch (error) { - log.error('Failed to get owners:', error); - return { - success: false, - error: error instanceof Error ? error.message : 'Failed to get owners', - }; - } - }); - - ipcMain.handle('github:validateRepoName', async (_, name: string, owner: string) => { - try { - // First validate format - const formatValidation = githubService.validateRepositoryName(name); - if (!formatValidation.valid) { - return { - success: true, - valid: false, - exists: false, - error: formatValidation.error, - }; - } - - // Then check if it exists - const exists = await githubService.checkRepositoryExists(owner, name); - if (exists) { - return { - success: true, - valid: true, - exists: true, - error: `Repository ${owner}/${name} already exists`, - }; - } - - return { - success: true, - valid: true, - exists: false, - }; - } catch (error) { - log.error('Failed to validate repo name:', error); - return { - success: false, - error: error instanceof Error ? error.message : 'Validation failed', - }; - } - }); - - ipcMain.handle( - 'github:createNewProject', - async ( - _, - params: { - name: string; - description?: string; - owner: string; - isPrivate: boolean; - gitignoreTemplate?: string; - } - ) => { - let githubRepoCreated = false; - let localDirCreated = false; - let repoUrl: string | undefined; - let localPath: string | undefined; - - try { - const { name, description, owner, isPrivate, gitignoreTemplate } = params; - - // Validate inputs - const formatValidation = githubService.validateRepositoryName(name); - if (!formatValidation.valid) { - return { - success: false, - error: formatValidation.error || 'Invalid repository name', - }; - } - - // Check if repo already exists - const exists = await githubService.checkRepositoryExists(owner, name); - if (exists) { - return { - success: false, - error: `Repository ${owner}/${name} already exists`, - }; - } - - // Get project directory from settings - const { getAppSettings } = await import('../settings'); - const settings = getAppSettings(); - const projectDir = - settings.projects?.defaultDirectory || path.join(homedir(), 'emdash-projects'); - - // Ensure project directory exists - if (!fs.existsSync(projectDir)) { - fs.mkdirSync(projectDir, { recursive: true }); - } - - localPath = path.join(projectDir, name); - if (fs.existsSync(localPath)) { - return { - success: false, - error: `Directory ${localPath} already exists`, - }; - } - - // Create GitHub repository - const repoInfo = await githubService.createRepository({ - name, - description, - owner, - isPrivate, - }); - githubRepoCreated = true; - repoUrl = repoInfo.url; - - // Clone repository - const cloneResult = await githubService.cloneRepository(repoUrl, localPath); - if (!cloneResult.success) { - // Cleanup: delete GitHub repo on clone failure - try { - // Security: Use quoteShellArg to prevent command injection - const repoRef = `${quoteShellArg(owner)}/${quoteShellArg(name)}`; - await execAsync(`gh repo delete ${repoRef} --yes`, { - timeout: 10000, - }); - } catch (cleanupError) { - log.warn('Failed to cleanup GitHub repo after clone failure:', cleanupError); - } - return { - success: false, - error: cloneResult.error || 'Failed to clone repository', - }; - } - localDirCreated = true; - - // Initialize project (create README, commit, push) - await githubService.initializeNewProject({ - repoUrl, - localPath, - name, - description, - }); - - // TODO: Add .gitignore if template specified (for future enhancement) - - return { - success: true, - projectPath: localPath, - repoUrl, - fullName: repoInfo.fullName, - defaultBranch: repoInfo.defaultBranch, - }; - } catch (error) { - log.error('Failed to create new project:', error); - - // Cleanup on failure - if (localDirCreated && localPath && fs.existsSync(localPath)) { - try { - fs.rmSync(localPath, { recursive: true, force: true }); - } catch (cleanupError) { - log.warn('Failed to cleanup local directory:', cleanupError); - } - } - - return { - success: false, - error: error instanceof Error ? error.message : 'Failed to create project', - githubRepoCreated, // Inform frontend about orphaned repo - repoUrl, - }; - } - } - ); -} diff --git a/src/main/ipc/hostPreviewIpc.ts b/src/main/ipc/hostPreviewIpc.ts deleted file mode 100644 index 7116d8032..000000000 --- a/src/main/ipc/hostPreviewIpc.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { ipcMain, BrowserWindow } from 'electron'; -import { hostPreviewService } from '../services/hostPreviewService'; - -export function registerHostPreviewIpc() { - ipcMain.handle( - 'preview:host:start', - async ( - _e, - args: { - taskId: string; - taskPath: string; - script?: string; - parentProjectPath?: string; - } - ) => { - const id = String(args?.taskId || '').trim(); - const wp = String(args?.taskPath || '').trim(); - if (!id || !wp) return { ok: false, error: 'taskId and taskPath are required' }; - return hostPreviewService.start(id, wp, { - script: args?.script, - parentProjectPath: args?.parentProjectPath, - }); - } - ); - - ipcMain.handle('preview:host:setup', async (_e, args: { taskId: string; taskPath: string }) => { - const id = String(args?.taskId || '').trim(); - const wp = String(args?.taskPath || '').trim(); - if (!id || !wp) return { ok: false, error: 'taskId and taskPath are required' }; - return hostPreviewService.setup(id, wp); - }); - - ipcMain.handle('preview:host:stop', async (_e, id: string) => { - const wid = String(id || '').trim(); - if (!wid) return { ok: true }; - return hostPreviewService.stop(wid); - }); - - ipcMain.handle('preview:host:stopAll', async (_e, exceptId?: string) => { - const ex = typeof exceptId === 'string' ? exceptId : ''; - return hostPreviewService.stopAll(ex); - }); - - const forward = (evt: any) => { - const all = BrowserWindow.getAllWindows(); - for (const win of all) { - try { - win.webContents.send('preview:host:event', evt); - } catch {} - } - }; - hostPreviewService.onEvent(forward); -} diff --git a/src/main/ipc/index.ts b/src/main/ipc/index.ts deleted file mode 100644 index 3a0ee157d..000000000 --- a/src/main/ipc/index.ts +++ /dev/null @@ -1,66 +0,0 @@ -import { registerPtyIpc } from '../services/ptyIpc'; -import { registerWorktreeIpc } from '../services/worktreeIpc'; -import { registerFsIpc } from '../services/fsIpc'; -import { registerLifecycleIpc } from '../services/lifecycleIpc'; -import { registerAppIpc } from './appIpc'; -import { registerProjectIpc } from './projectIpc'; -import { registerProjectSettingsIpc } from './projectSettingsIpc'; -import { registerGithubIpc } from './githubIpc'; -import { databaseController } from './dbIpc'; -import { registerDebugIpc } from './debugIpc'; -import { registerGitIpc } from './gitIpc'; -import { registerLinearIpc } from './linearIpc'; -import { registerConnectionsIpc } from './connectionsIpc'; -import { registerUpdateIpc } from '../services/updateIpc'; -import { registerTelemetryIpc } from './telemetryIpc'; -import { registerJiraIpc } from './jiraIpc'; -import { registerPlanLockIpc } from '../services/planLockIpc'; -import { appSettingsController } from './settingsIpc'; -import { registerHostPreviewIpc } from './hostPreviewIpc'; -import { registerBrowserIpc } from './browserIpc'; -import { registerNetIpc } from './netIpc'; -import { registerLineCommentsIpc } from './lineCommentsIpc'; -import { registerSshIpc } from './sshIpc'; -import { registerSkillsIpc } from './skillsIpc'; -import { createRPCRouter, registerRPCRouter } from '../../shared/ipc/rpc'; -import { ipcMain } from 'electron'; - -export const rpcRouter = createRPCRouter({ - db: databaseController, - appSettings: appSettingsController, -}); - -export type RpcRouter = typeof rpcRouter; - -export function registerAllIpc() { - // Register RPC - registerRPCRouter(rpcRouter, ipcMain); - - // Core app/utility IPC - registerAppIpc(); - registerDebugIpc(); - registerTelemetryIpc(); - registerUpdateIpc(); - - // Domain IPC - registerProjectIpc(); - registerProjectSettingsIpc(); - registerGithubIpc(); - registerGitIpc(); - registerHostPreviewIpc(); - registerBrowserIpc(); - registerNetIpc(); - registerLineCommentsIpc(); - - // Existing modules - registerPtyIpc(); - registerWorktreeIpc(); - registerFsIpc(); - registerLifecycleIpc(); - registerLinearIpc(); - registerConnectionsIpc(); - registerJiraIpc(); - registerPlanLockIpc(); - registerSshIpc(); - registerSkillsIpc(); -} diff --git a/src/main/ipc/jiraIpc.ts b/src/main/ipc/jiraIpc.ts deleted file mode 100644 index 75fde6dc5..000000000 --- a/src/main/ipc/jiraIpc.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { ipcMain } from 'electron'; -import JiraService from '../services/JiraService'; - -const jira = new JiraService(); - -export function registerJiraIpc() { - ipcMain.handle( - 'jira:saveCredentials', - async (_e, args: { siteUrl: string; email: string; token: string }) => { - const siteUrl = String(args?.siteUrl || '').trim(); - const email = String(args?.email || '').trim(); - const token = String(args?.token || '').trim(); - if (!siteUrl || !email || !token) { - return { success: false, error: 'Site URL, email, and API token are required.' }; - } - return jira.saveCredentials(siteUrl, email, token); - } - ); - - ipcMain.handle('jira:clearCredentials', async () => jira.clearCredentials()); - ipcMain.handle('jira:checkConnection', async () => jira.checkConnection()); - - ipcMain.handle('jira:initialFetch', async (_e, limit?: number) => { - try { - const issues = await jira.initialFetch( - typeof limit === 'number' && Number.isFinite(limit) ? limit : 50 - ); - return { success: true, issues }; - } catch (e: any) { - return { success: false, error: e?.message || String(e) }; - } - }); - - ipcMain.handle('jira:searchIssues', async (_e, searchTerm: string, limit?: number) => { - try { - // Use enhanced search that supports direct key lookups - const issues = await jira.smartSearchIssues(searchTerm, limit ?? 20); - return { success: true, issues }; - } catch (e: any) { - return { success: false, error: e?.message || String(e) }; - } - }); -} - -export default registerJiraIpc; diff --git a/src/main/ipc/lineCommentsIpc.ts b/src/main/ipc/lineCommentsIpc.ts deleted file mode 100644 index c8b4951bf..000000000 --- a/src/main/ipc/lineCommentsIpc.ts +++ /dev/null @@ -1,77 +0,0 @@ -import { ipcMain } from 'electron'; -import { log } from '../lib/logger'; -import { databaseService } from '../services/DatabaseService'; -import { formatCommentsForAgent } from '../../shared/lineComments'; - -export function registerLineCommentsIpc() { - ipcMain.handle('lineComments:create', async (_, input) => { - try { - const id = await databaseService.saveLineComment(input); - return { success: true, id }; - } catch (error) { - log.error('Failed to create line comment:', error); - return { success: false, error: (error as Error).message }; - } - }); - - ipcMain.handle('lineComments:get', async (_, args: { taskId: string; filePath?: string }) => { - try { - const comments = await databaseService.getLineComments(args.taskId, args.filePath); - return { success: true, comments }; - } catch (error) { - log.error('Failed to get line comments:', error); - return { success: false, error: (error as Error).message }; - } - }); - - ipcMain.handle('lineComments:update', async (_, input: { id: string; content: string }) => { - try { - await databaseService.updateLineComment(input.id, input.content); - return { success: true }; - } catch (error) { - log.error('Failed to update line comment:', error); - return { success: false, error: (error as Error).message }; - } - }); - - ipcMain.handle('lineComments:delete', async (_, id: string) => { - try { - await databaseService.deleteLineComment(id); - return { success: true }; - } catch (error) { - log.error('Failed to delete line comment:', error); - return { success: false, error: (error as Error).message }; - } - }); - - ipcMain.handle('lineComments:getFormatted', async (_, taskId: string) => { - try { - const comments = await databaseService.getLineComments(taskId); - const formatted = formatCommentsForAgent(comments); - return { success: true, formatted }; - } catch (error) { - log.error('Failed to format line comments:', error); - return { success: false, error: (error as Error).message }; - } - }); - - ipcMain.handle('lineComments:markSent', async (_, commentIds: string[]) => { - try { - await databaseService.markCommentsSent(commentIds); - return { success: true }; - } catch (error) { - log.error('Failed to mark comments as sent:', error); - return { success: false, error: (error as Error).message }; - } - }); - - ipcMain.handle('lineComments:getUnsent', async (_, taskId: string) => { - try { - const comments = await databaseService.getUnsentComments(taskId); - return { success: true, comments }; - } catch (error) { - log.error('Failed to get unsent comments:', error); - return { success: false, error: (error as Error).message }; - } - }); -} diff --git a/src/main/ipc/netIpc.ts b/src/main/ipc/netIpc.ts deleted file mode 100644 index 7c7985f18..000000000 --- a/src/main/ipc/netIpc.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { ipcMain } from 'electron'; -import net from 'node:net'; - -function probePort(host: string, port: number, timeoutMs = 800): Promise { - return new Promise((resolve) => { - let done = false; - const socket = net.createConnection({ host, port }); - const timer = setTimeout( - () => { - if (done) return; - done = true; - try { - socket.destroy(); - } catch {} - resolve(false); - }, - Math.max(1, timeoutMs) - ); - socket.once('connect', () => { - if (done) return; - done = true; - clearTimeout(timer); - try { - socket.destroy(); - } catch {} - resolve(true); - }); - socket.once('error', () => { - if (done) return; - done = true; - clearTimeout(timer); - try { - socket.destroy(); - } catch {} - resolve(false); - }); - }); -} - -export function registerNetIpc() { - ipcMain.handle( - 'net:probePorts', - async (_e, host: string, ports: number[], timeoutMs?: number) => { - const h = (host || 'localhost').trim() || 'localhost'; - const ps = Array.isArray(ports) ? ports.map((p) => Number(p)).filter((p) => p > 0) : []; - const t = typeof timeoutMs === 'number' && timeoutMs > 0 ? timeoutMs : 800; - if (!ps.length) return { reachable: [] }; - const results = await Promise.all(ps.map((p) => probePort(h, p, t))); - const reachable = ps.filter((_, i) => !!results[i]); - return { reachable }; - } - ); -} diff --git a/src/main/ipc/projectIpc.ts b/src/main/ipc/projectIpc.ts deleted file mode 100644 index e6c855154..000000000 --- a/src/main/ipc/projectIpc.ts +++ /dev/null @@ -1,181 +0,0 @@ -import { ipcMain, dialog } from 'electron'; -import { join } from 'path'; -import * as fs from 'fs'; -import { exec } from 'child_process'; -import { promisify } from 'util'; -import { getMainWindow } from '../app/window'; -import { errorTracking } from '../errorTracking'; - -const execAsync = promisify(exec); -const DEFAULT_REMOTE = 'origin'; -const DEFAULT_BRANCH = 'main'; - -const normalizeRemoteName = (remote?: string | null) => { - if (!remote) return DEFAULT_REMOTE; - const trimmed = remote.trim(); - if (!trimmed) return ''; // Empty string indicates no remote (local-only repo) - if (/^[A-Za-z0-9._-]+$/.test(trimmed) && !trimmed.includes('://')) { - return trimmed; - } - return DEFAULT_REMOTE; -}; - -const computeBaseRef = (remote?: string | null, branch?: string | null) => { - const remoteName = normalizeRemoteName(remote); - if (branch && branch.trim().length > 0) { - const trimmed = branch.trim(); - if (trimmed.includes('/')) return trimmed; - // Prepend remote only if one exists - return remoteName ? `${remoteName}/${trimmed}` : trimmed; - } - // Default: use origin/main if remote exists, otherwise just 'main' - return remoteName ? `${remoteName}/${DEFAULT_BRANCH}` : DEFAULT_BRANCH; -}; - -const detectDefaultBranch = async (projectPath: string, remote?: string | null) => { - const remoteName = normalizeRemoteName(remote); - // If no remote, try to detect the current local branch - if (!remoteName) { - try { - const { stdout } = await execAsync('git branch --show-current', { - cwd: projectPath, - }); - return stdout.trim() || null; - } catch { - return null; - } - } - // Try to get remote's default branch - try { - const { stdout } = await execAsync(`git remote show ${remoteName}`, { - cwd: projectPath, - }); - const match = stdout.match(/HEAD branch:\s*(\S+)/); - return match ? match[1] : null; - } catch { - return null; - } -}; - -export function registerProjectIpc() { - ipcMain.handle('project:open', async () => { - try { - const result = await dialog.showOpenDialog(getMainWindow()!, { - title: 'Open Project', - properties: ['openDirectory'], - message: 'Select a project directory to open', - }); - - if (result.canceled || result.filePaths.length === 0) { - return { success: false, error: 'No directory selected' }; - } - - const projectPath = result.filePaths[0]; - return { success: true, path: projectPath }; - } catch (error) { - console.error('Failed to open project:', error); - - // Track project open errors - await errorTracking.captureProjectError(error, 'open'); - - return { success: false, error: 'Failed to open project directory' }; - } - }); - - ipcMain.handle('git:getInfo', async (_, projectPath: string) => { - try { - const resolveRealPath = async (target: string) => { - try { - return await fs.promises.realpath(target); - } catch { - return target; - } - }; - - const resolvedProjectPath = await resolveRealPath(projectPath); - const gitPath = join(resolvedProjectPath, '.git'); - const isGitRepo = fs.existsSync(gitPath); - - if (!isGitRepo) { - return { isGitRepo: false, path: resolvedProjectPath }; - } - - // Get remote URL - let remote: string | null = null; - try { - const { stdout } = await execAsync('git remote get-url origin', { - cwd: resolvedProjectPath, - }); - remote = stdout.trim(); - } catch {} - - // Get current branch - let branch: string | null = null; - try { - const { stdout } = await execAsync('git branch --show-current', { - cwd: resolvedProjectPath, - }); - branch = stdout.trim(); - } catch {} - - let defaultBranch: string | null = null; - if (!branch) { - defaultBranch = await detectDefaultBranch(resolvedProjectPath, remote); - } - - let upstream: string | null = null; - let aheadCount: number | null = null; - let behindCount: number | null = null; - try { - const { stdout } = await execAsync('git rev-parse --abbrev-ref --symbolic-full-name @{u}', { - cwd: resolvedProjectPath, - }); - upstream = stdout.trim(); - } catch {} - - if (upstream) { - try { - const { stdout } = await execAsync('git rev-list --left-right --count HEAD...@{u}', { - cwd: resolvedProjectPath, - }); - const [ahead, behind] = stdout.trim().split(/\s+/); - aheadCount = Number.parseInt(ahead, 10); - behindCount = Number.parseInt(behind, 10); - } catch {} - } - - let rootPath: string | null = null; - try { - const { stdout } = await execAsync('git rev-parse --show-toplevel', { - cwd: resolvedProjectPath, - }); - const trimmed = stdout.trim(); - if (trimmed) { - rootPath = await resolveRealPath(trimmed); - } - } catch {} - - const baseRef = computeBaseRef(remote, branch || defaultBranch); - - const safeAhead = - typeof aheadCount === 'number' && Number.isFinite(aheadCount) ? aheadCount : undefined; - const safeBehind = - typeof behindCount === 'number' && Number.isFinite(behindCount) ? behindCount : undefined; - - return { - isGitRepo: true, - remote, - branch, - baseRef, - upstream, - aheadCount: safeAhead, - behindCount: safeBehind, - path: resolvedProjectPath, - rootPath: rootPath || resolvedProjectPath, - }; - } catch (error) { - console.error('Failed to get Git info:', error); - return { isGitRepo: false, error: 'Failed to read Git information', path: projectPath }; - } - }); -} diff --git a/src/main/ipc/projectSettingsIpc.ts b/src/main/ipc/projectSettingsIpc.ts deleted file mode 100644 index a483effc8..000000000 --- a/src/main/ipc/projectSettingsIpc.ts +++ /dev/null @@ -1,93 +0,0 @@ -import { ipcMain } from 'electron'; -import { log } from '../lib/logger'; -import { projectSettingsService } from '../services/ProjectSettingsService'; -import { worktreeService } from '../services/WorktreeService'; - -type ProjectSettingsArgs = { projectId: string }; -type UpdateProjectSettingsArgs = { projectId: string; baseRef: string }; - -const resolveProjectId = (input: ProjectSettingsArgs | string | undefined): string => { - if (!input) return ''; - if (typeof input === 'string') return input; - return input.projectId; -}; - -export function registerProjectSettingsIpc() { - ipcMain.handle('projectSettings:get', async (_event, args: ProjectSettingsArgs | string) => { - try { - const projectId = resolveProjectId(args); - if (!projectId) { - throw new Error('projectId is required'); - } - const settings = await projectSettingsService.getProjectSettings(projectId); - if (!settings) { - return { success: false, error: 'Project not found' }; - } - return { success: true, settings }; - } catch (error) { - log.error('Failed to get project settings', error); - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - }); - - ipcMain.handle( - 'projectSettings:update', - async (_event, args: UpdateProjectSettingsArgs | undefined) => { - try { - const projectId = args?.projectId; - const baseRef = args?.baseRef; - if (!projectId) { - throw new Error('projectId is required'); - } - if (typeof baseRef !== 'string') { - throw new Error('baseRef is required'); - } - const trimmed = baseRef.trim(); - if (!trimmed) { - throw new Error('baseRef cannot be empty'); - } - const settings = await projectSettingsService.updateProjectSettings(projectId, { - baseRef: trimmed, - }); - return { success: true, settings }; - } catch (error) { - log.error('Failed to update project settings', error); - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - } - ); - - ipcMain.handle( - 'projectSettings:fetchBaseRef', - async ( - _event, - args: - | { - projectId: string; - projectPath: string; - } - | undefined - ) => { - try { - const projectId = args?.projectId; - const projectPath = args?.projectPath; - if (!projectId) { - throw new Error('projectId is required'); - } - if (!projectPath) { - throw new Error('projectPath is required'); - } - const info = await worktreeService.fetchLatestBaseRef(projectPath, projectId); - return { - success: true, - baseRef: info.fullRef, - remote: info.remote, - branch: info.branch, - }; - } catch (error) { - log.error('Failed to fetch base branch', error); - return { success: false, error: error instanceof Error ? error.message : String(error) }; - } - } - ); -} diff --git a/src/main/ipc/settingsIpc.ts b/src/main/ipc/settingsIpc.ts deleted file mode 100644 index 26d22a89b..000000000 --- a/src/main/ipc/settingsIpc.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { AppSettingsUpdate, getAppSettings, updateAppSettings } from '../settings'; -import { createRPCController } from '../../shared/ipc/rpc'; - -export const appSettingsController = createRPCController({ - get: async () => getAppSettings(), - update: (partial: AppSettingsUpdate) => updateAppSettings(partial || {}), -}); diff --git a/src/main/ipc/sshIpc.ts b/src/main/ipc/sshIpc.ts deleted file mode 100644 index 4da76c7dc..000000000 --- a/src/main/ipc/sshIpc.ts +++ /dev/null @@ -1,891 +0,0 @@ -import { ipcMain } from 'electron'; -import { SSH_IPC_CHANNELS } from '../../shared/ssh/types'; -import { sshService } from '../services/ssh/SshService'; -import { SshCredentialService } from '../services/ssh/SshCredentialService'; -import { SshHostKeyService } from '../services/ssh/SshHostKeyService'; -import { SshConnectionMonitor } from '../services/ssh/SshConnectionMonitor'; -import { getDrizzleClient } from '../db/drizzleClient'; -import { sshConnections as sshConnectionsTable, type SshConnectionInsert } from '../db/schema'; -import { eq, desc } from 'drizzle-orm'; -import { randomUUID } from 'crypto'; -import { quoteShellArg } from '../utils/shellEscape'; -import { parseSshConfigFile, resolveIdentityAgent } from '../utils/sshConfigParser'; -import type { - SshConfig, - ConnectionTestResult, - FileEntry, - ConnectionState, - SshConfigHost, -} from '../../shared/ssh/types'; - -// Initialize services -const credentialService = new SshCredentialService(); -// Host key service initialized for future use (host key verification) -const _hostKeyService = new SshHostKeyService(); -const monitor = new SshConnectionMonitor((id) => sshService.isConnected(id)); - -// When ssh2 detects a dead connection (via keepalive) and emits `close`, -// SshService removes it from the pool and emits `disconnected`. -// The monitor reacts by triggering reconnect with exponential backoff. -sshService.on('disconnected', (connectionId: string) => { - monitor.handleDisconnect(connectionId); -}); - -/** - * Maps a database row to SshConfig - */ -function mapRowToConfig(row: { - id: string; - name: string; - host: string; - port: number; - username: string; - authType: string; - privateKeyPath: string | null; - useAgent: number; -}): SshConfig { - return { - id: row.id, - name: row.name, - host: row.host, - port: row.port, - username: row.username, - authType: row.authType as 'password' | 'key' | 'agent', - privateKeyPath: row.privateKeyPath ?? undefined, - useAgent: row.useAgent === 1, - }; -} - -/** - * Validates that a remote path is safe to access. - * - * Uses a two-layer approach: - * 1. Reject any path containing traversal sequences (even after normalization). - * 2. Reject paths that resolve into known-sensitive directories. - * - * The path is resolved against '/' so that relative tricks like - * "foo/../../etc/shadow" are caught. - */ -function isPathSafe(remotePath: string): boolean { - // Must be an absolute path - if (!remotePath.startsWith('/')) { - return false; - } - - // Normalize repeated slashes - const normalized = remotePath.replace(/\/+/g, '/'); - - // Reject any occurrence of '..' as a path component - // This catches ../ /.. and trailing /.. - const segments = normalized.split('/'); - if (segments.some((s) => s === '..')) { - return false; - } - - // Block access to sensitive system directories and hidden dotfiles - const restrictedPrefixes = ['/etc/', '/proc/', '/sys/', '/dev/', '/boot/', '/root/']; - for (const prefix of restrictedPrefixes) { - if (normalized.startsWith(prefix) || normalized === prefix.slice(0, -1)) { - return false; - } - } - - // Block .ssh directories anywhere in the path - if (segments.some((s) => s === '.ssh')) { - return false; - } - - return true; -} - -/** - * Classify an SSH error into a safe, non-PII category for telemetry. - */ -function classifySshError(err: any): string { - const msg = String(err?.message || err || '').toLowerCase(); - if (msg.includes('authentication') || msg.includes('auth') || msg.includes('password')) { - return 'auth_failed'; - } - if (msg.includes('timed out') || msg.includes('timeout')) { - return 'timeout'; - } - if ( - msg.includes('econnrefused') || - msg.includes('enotfound') || - msg.includes('enetunreach') || - msg.includes('network') - ) { - return 'network'; - } - if (msg.includes('key') || msg.includes('passphrase') || msg.includes('decrypt')) { - return 'key_error'; - } - return 'unknown'; -} - -/** - * Register all SSH IPC handlers - */ -export function registerSshIpc() { - // Wire up reconnect handler so the monitor's reconnect event actually reconnects (HIGH #9) - monitor.on('reconnect', async (connectionId: string, config: SshConfig, attempt: number) => { - try { - console.log(`[sshIpc] Reconnecting ${connectionId} (attempt ${attempt})...`); - - // Clean up the stale/dead connection before opening a new one - if (sshService.isConnected(connectionId)) { - await sshService.disconnect(connectionId).catch(() => {}); - } - - await sshService.connect(config); - monitor.updateState(connectionId, 'connected'); - void import('../telemetry').then(({ capture }) => { - void capture('ssh_reconnect_attempted', { success: true }); - }); - } catch (err: any) { - console.error( - `[sshIpc] Reconnect attempt ${attempt} failed for ${connectionId}:`, - err.message - ); - monitor.updateState(connectionId, 'error', err.message); - void import('../telemetry').then(({ capture }) => { - void capture('ssh_reconnect_attempted', { success: false }); - }); - } - }); - // Test connection - ipcMain.handle( - SSH_IPC_CHANNELS.TEST_CONNECTION, - async ( - _, - config: SshConfig & { password?: string; passphrase?: string } - ): Promise => { - try { - const { Client } = await import('ssh2'); - const debugLogs: string[] = []; - const testClient = new Client(); - - return new Promise(async (resolve) => { - const startTime = Date.now(); - - testClient.on('ready', () => { - const latency = Date.now() - startTime; - testClient.end(); - resolve({ success: true, latency, debugLogs }); - }); - - testClient.on('error', (err: Error) => { - resolve({ success: false, error: err.message, debugLogs }); - }); - - testClient.on('keyboard-interactive', () => { - // Close the connection if keyboard-interactive auth is required - testClient.end(); - resolve({ - success: false, - error: 'Keyboard-interactive authentication not supported', - debugLogs, - }); - }); - - const connectConfig: { - host: string; - port: number; - username: string; - readyTimeout: number; - password?: string; - privateKey?: Buffer; - passphrase?: string; - agent?: string; - debug?: (info: string) => void; - } = { - host: config.host, - port: config.port, - username: config.username, - readyTimeout: 10000, - debug: (info: string) => debugLogs.push(info), - }; - - if (config.authType === 'password') { - connectConfig.password = config.password; - } else if (config.authType === 'key' && config.privateKeyPath) { - const fs = require('fs'); - const os = require('os'); - try { - // Expand ~ to home directory - let keyPath = config.privateKeyPath; - if (keyPath.startsWith('~/')) { - keyPath = keyPath.replace('~', os.homedir()); - } else if (keyPath === '~') { - keyPath = os.homedir(); - } - - connectConfig.privateKey = fs.readFileSync(keyPath); - if (config.passphrase) { - connectConfig.passphrase = config.passphrase; - } - } catch (err: any) { - resolve({ - success: false, - error: `Failed to read private key: ${err.message}`, - debugLogs, - }); - return; - } - } else if (config.authType === 'agent') { - const identityAgent = await resolveIdentityAgent(config.host); - connectConfig.agent = identityAgent || process.env.SSH_AUTH_SOCK; - } - - testClient.connect(connectConfig); - }); - } catch (err: any) { - console.error('[sshIpc] Test connection error:', err); - return { success: false, error: err.message }; - } - } - ); - - // Save connection - ipcMain.handle( - SSH_IPC_CHANNELS.SAVE_CONNECTION, - async ( - _, - config: SshConfig & { password?: string; passphrase?: string } - ): Promise<{ success: boolean; connection?: SshConfig; error?: string }> => { - try { - const { db } = await getDrizzleClient(); - - // Generate ID if not provided - const connectionId = - config.id ?? `ssh-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`; - - // Save credentials first (secure keychain storage) - if (config.password) { - await credentialService.storePassword(connectionId, config.password); - } - if (config.passphrase) { - await credentialService.storePassphrase(connectionId, config.passphrase); - } - - // Strip sensitive data before saving to DB - const { password: _password, passphrase: _passphrase, ...dbConfig } = config; - - const insertData: SshConnectionInsert = { - id: connectionId, - name: dbConfig.name, - host: dbConfig.host, - port: dbConfig.port, - username: dbConfig.username, - authType: dbConfig.authType, - privateKeyPath: dbConfig.privateKeyPath, - useAgent: dbConfig.useAgent ? 1 : 0, - }; - - // Insert or update - await db - .insert(sshConnectionsTable) - .values(insertData) - .onConflictDoUpdate({ - target: sshConnectionsTable.id, - set: { - name: insertData.name, - host: insertData.host, - port: insertData.port, - username: insertData.username, - authType: insertData.authType, - privateKeyPath: insertData.privateKeyPath, - useAgent: insertData.useAgent, - updatedAt: new Date().toISOString(), - }, - }); - - void import('../telemetry').then(({ capture }) => { - void capture('ssh_connection_saved', { type: config.authType }); - }); - - return { - success: true, - connection: { - ...dbConfig, - id: connectionId, - }, - }; - } catch (err: any) { - console.error('[sshIpc] Save connection error:', err); - return { success: false, error: err.message }; - } - } - ); - - // Get connections - ipcMain.handle( - SSH_IPC_CHANNELS.GET_CONNECTIONS, - async (): Promise<{ success: boolean; connections?: SshConfig[]; error?: string }> => { - try { - const { db } = await getDrizzleClient(); - - const rows = await db - .select({ - id: sshConnectionsTable.id, - name: sshConnectionsTable.name, - host: sshConnectionsTable.host, - port: sshConnectionsTable.port, - username: sshConnectionsTable.username, - authType: sshConnectionsTable.authType, - privateKeyPath: sshConnectionsTable.privateKeyPath, - useAgent: sshConnectionsTable.useAgent, - }) - .from(sshConnectionsTable) - .orderBy(desc(sshConnectionsTable.updatedAt)); - - return { - success: true, - connections: rows.map(mapRowToConfig), - }; - } catch (err: any) { - console.error('[sshIpc] Get connections error:', err); - return { success: false, error: err.message }; - } - } - ); - - // Delete connection - ipcMain.handle( - SSH_IPC_CHANNELS.DELETE_CONNECTION, - async (_, id: string): Promise<{ success: boolean; error?: string }> => { - try { - // Stop monitoring BEFORE disconnecting so the monitor's - // handleDisconnect listener doesn't trigger a reconnect. - monitor.stopMonitoring(id); - if (sshService.isConnected(id)) { - try { - await sshService.disconnect(id); - } catch { - // Best-effort: continue with deletion even if disconnect fails - } - } - - const { db } = await getDrizzleClient(); - - // Delete credentials - await credentialService.deleteAllCredentials(id); - - // Delete from database - await db.delete(sshConnectionsTable).where(eq(sshConnectionsTable.id, id)); - - void import('../telemetry').then(({ capture }) => { - void capture('ssh_connection_deleted'); - }); - - return { success: true }; - } catch (err: any) { - console.error('[sshIpc] Delete connection error:', err); - return { success: false, error: err.message }; - } - } - ); - - // Connect - ipcMain.handle( - SSH_IPC_CHANNELS.CONNECT, - async ( - _, - arg: unknown - ): Promise<{ success: boolean; connectionId?: string; error?: string }> => { - try { - // Accept either a saved connection id (string) or a config object. - if (typeof arg === 'string') { - const id = arg; - const { db } = await getDrizzleClient(); - const rows = await db - .select({ - id: sshConnectionsTable.id, - name: sshConnectionsTable.name, - host: sshConnectionsTable.host, - port: sshConnectionsTable.port, - username: sshConnectionsTable.username, - authType: sshConnectionsTable.authType, - privateKeyPath: sshConnectionsTable.privateKeyPath, - useAgent: sshConnectionsTable.useAgent, - }) - .from(sshConnectionsTable) - .where(eq(sshConnectionsTable.id, id)) - .limit(1); - - const row = rows[0]; - if (!row) { - return { success: false, error: `SSH connection not found: ${id}` }; - } - - const loadedConfig = mapRowToConfig(row); - const connectionId = await sshService.connect(loadedConfig); - // startMonitoring is a no-op if already tracked; updateState - // is a no-op if not tracked. Call both to handle fresh connects - // and re-connects after the monitor gave up (state = disconnected). - monitor.startMonitoring(connectionId, loadedConfig); - monitor.updateState(connectionId, 'connected'); - void import('../telemetry').then(({ capture }) => { - void capture('ssh_connect_success', { type: loadedConfig.authType }); - }); - return { success: true, connectionId }; - } - - if (!arg || typeof arg !== 'object') { - return { success: false, error: 'Invalid SSH connect request' }; - } - - const config = arg as SshConfig & { password?: string; passphrase?: string }; - const effectiveId = config.id ?? randomUUID(); - - // If secrets are provided inline, store them for this id. - if (config.authType === 'password' && typeof config.password === 'string') { - await credentialService.storePassword(effectiveId, config.password); - } - if ( - config.authType === 'key' && - typeof config.passphrase === 'string' && - config.passphrase - ) { - await credentialService.storePassphrase(effectiveId, config.passphrase); - } - - // Load credentials from keychain if needed - let password = config.password; - let passphrase = config.passphrase; - - if (config.authType === 'password' && !password) { - password = (await credentialService.getPassword(effectiveId)) ?? undefined; - } - if (config.authType === 'key' && !passphrase) { - passphrase = (await credentialService.getPassphrase(effectiveId)) ?? undefined; - } - - const fullConfig = { - ...config, - id: effectiveId, - password, - passphrase, - }; - - const connectionId = await sshService.connect(fullConfig as any); - monitor.startMonitoring(connectionId, fullConfig as any); - monitor.updateState(connectionId, 'connected'); - void import('../telemetry').then(({ capture }) => { - void capture('ssh_connect_success', { type: config.authType }); - }); - return { success: true, connectionId }; - } catch (err: any) { - console.error('[sshIpc] Connection error:', err); - void import('../telemetry').then(({ capture }) => { - void capture('ssh_connect_failed', { error_type: classifySshError(err) }); - }); - return { success: false, error: err.message }; - } - } - ); - - // Disconnect - ipcMain.handle( - SSH_IPC_CHANNELS.DISCONNECT, - async (_, connectionId: string): Promise<{ success: boolean; error?: string }> => { - try { - // Stop monitoring BEFORE disconnecting so the monitor's - // handleDisconnect listener doesn't trigger a reconnect - // for an intentional disconnect. - monitor.stopMonitoring(connectionId); - await sshService.disconnect(connectionId); - void import('../telemetry').then(({ capture }) => { - void capture('ssh_disconnected'); - }); - return { success: true }; - } catch (err: any) { - console.error('[sshIpc] Disconnect error:', err); - return { success: false, error: err.message }; - } - } - ); - - // Execute command (guarded: only allow known-safe command prefixes from renderer) - const ALLOWED_COMMAND_PREFIXES = [ - 'git ', - 'ls ', - 'pwd', - 'cat ', - 'head ', - 'tail ', - 'wc ', - 'stat ', - 'file ', - 'which ', - 'echo ', - 'test ', - '[ ', - ]; - - ipcMain.handle( - SSH_IPC_CHANNELS.EXECUTE_COMMAND, - async ( - _, - connectionId: string, - command: string, - cwd?: string - ): Promise<{ - success: boolean; - stdout?: string; - stderr?: string; - exitCode?: number; - error?: string; - }> => { - try { - // Validate the command against the allowlist - const trimmed = command.trimStart(); - const isAllowed = ALLOWED_COMMAND_PREFIXES.some( - (prefix) => trimmed === prefix.trimEnd() || trimmed.startsWith(prefix) - ); - if (!isAllowed) { - console.warn(`[sshIpc] Blocked disallowed command: ${trimmed.slice(0, 80)}`); - return { success: false, error: 'Command not allowed' }; - } - - const result = await sshService.executeCommand(connectionId, command, cwd); - return { success: true, ...result }; - } catch (error: any) { - console.error('[sshIpc] Execute command error:', error); - return { success: false, error: error.message }; - } - } - ); - - // List files - ipcMain.handle( - SSH_IPC_CHANNELS.LIST_FILES, - async ( - _, - connectionId: string, - path: string - ): Promise<{ success: boolean; files?: FileEntry[]; error?: string }> => { - try { - // Validate path to prevent browsing sensitive directories - if (!isPathSafe(path)) { - return { success: false, error: 'Access denied: path is restricted' }; - } - - const sftp = await sshService.getSftp(connectionId); - - return new Promise((resolve) => { - sftp.readdir(path, (err, list) => { - if (err) { - resolve({ success: false, error: `Failed to list files: ${err.message}` }); - return; - } - - const entries: FileEntry[] = list.map((item) => { - const isDirectory = item.attrs.isDirectory(); - const isSymlink = item.attrs.isSymbolicLink(); - - let type: 'file' | 'directory' | 'symlink' = 'file'; - if (isDirectory) type = 'directory'; - else if (isSymlink) type = 'symlink'; - - return { - path: `${path}/${item.filename}`.replace(/\/+/g, '/'), - name: item.filename, - type, - size: item.attrs.size, - modifiedAt: new Date(item.attrs.mtime * 1000), - permissions: item.attrs.mode?.toString(8), - }; - }); - - resolve({ success: true, files: entries }); - }); - }); - } catch (error: any) { - console.error('[sshIpc] List files error:', error); - return { success: false, error: error.message }; - } - } - ); - - // Read file - ipcMain.handle( - SSH_IPC_CHANNELS.READ_FILE, - async ( - _, - connectionId: string, - path: string - ): Promise<{ success: boolean; content?: string; error?: string }> => { - try { - // Validate path to prevent access to sensitive files - if (!isPathSafe(path)) { - return { success: false, error: 'Access denied: path is restricted' }; - } - - const sftp = await sshService.getSftp(connectionId); - - return new Promise((resolve) => { - sftp.readFile(path, 'utf-8', (err, data) => { - if (err) { - resolve({ success: false, error: `Failed to read file: ${err.message}` }); - return; - } - resolve({ success: true, content: data.toString() }); - }); - }); - } catch (error: any) { - console.error('[sshIpc] Read file error:', error); - return { success: false, error: error.message }; - } - } - ); - - // Write file - ipcMain.handle( - SSH_IPC_CHANNELS.WRITE_FILE, - async ( - _, - connectionId: string, - path: string, - content: string - ): Promise<{ success: boolean; error?: string }> => { - try { - // Validate path to prevent writing to sensitive files - if (!isPathSafe(path)) { - return { success: false, error: 'Access denied: path is restricted' }; - } - - const sftp = await sshService.getSftp(connectionId); - - return new Promise((resolve) => { - sftp.writeFile(path, content, 'utf-8', (err) => { - if (err) { - resolve({ success: false, error: `Failed to write file: ${err.message}` }); - return; - } - resolve({ success: true }); - }); - }); - } catch (error: any) { - console.error('[sshIpc] Write file error:', error); - return { success: false, error: error.message }; - } - } - ); - - // Get state - ipcMain.handle( - SSH_IPC_CHANNELS.GET_STATE, - async ( - _, - connectionId: string - ): Promise<{ success: boolean; state?: ConnectionState; error?: string }> => { - try { - const state = monitor.getState(connectionId); - return { success: true, state }; - } catch (err: any) { - console.error('[sshIpc] Get state error:', err); - return { success: false, error: err.message }; - } - } - ); - - // Get SSH config hosts from ~/.ssh/config - ipcMain.handle( - SSH_IPC_CHANNELS.GET_SSH_CONFIG, - async (): Promise<{ success: boolean; hosts?: SshConfigHost[]; error?: string }> => { - try { - const hosts = await parseSshConfigFile(); - return { success: true, hosts }; - } catch (err: any) { - console.error('[sshIpc] Get SSH config error:', err); - return { success: false, error: err.message }; - } - } - ); - - // Get a specific SSH config host by alias - ipcMain.handle( - SSH_IPC_CHANNELS.GET_SSH_CONFIG_HOST, - async ( - _, - hostAlias: string - ): Promise<{ success: boolean; host?: SshConfigHost; error?: string }> => { - try { - if (!hostAlias || typeof hostAlias !== 'string') { - return { success: false, error: 'Host alias is required' }; - } - - const hosts = await parseSshConfigFile(); - const host = hosts.find((h) => h.host.toLowerCase() === hostAlias.toLowerCase()); - - if (!host) { - return { success: false, error: `Host alias not found: ${hostAlias}` }; - } - - return { success: true, host }; - } catch (err: any) { - console.error('[sshIpc] Get SSH config host error:', err); - return { success: false, error: err.message }; - } - } - ); - - // Check if a remote path is a git repository - ipcMain.handle( - SSH_IPC_CHANNELS.CHECK_IS_GIT_REPO, - async ( - _, - connectionId: string, - remotePath: string - ): Promise<{ success: boolean; isGitRepo?: boolean; error?: string }> => { - try { - if (!remotePath || !remotePath.startsWith('/')) { - return { success: false, error: 'An absolute remote path is required' }; - } - if (!isPathSafe(remotePath)) { - return { success: false, error: 'Access denied: path is restricted' }; - } - - const result = await sshService.executeCommand( - connectionId, - `git -C ${quoteShellArg(remotePath)} rev-parse --is-inside-work-tree 2>/dev/null` - ); - const isGitRepo = result.exitCode === 0 && result.stdout.trim() === 'true'; - return { success: true, isGitRepo }; - } catch (err: any) { - console.error('[sshIpc] Check git repo error:', err); - return { success: false, error: err.message }; - } - } - ); - - // Initialize a new git repository on the remote machine - ipcMain.handle( - SSH_IPC_CHANNELS.INIT_REPO, - async ( - _, - connectionId: string, - parentPath: string, - repoName: string - ): Promise<{ success: boolean; path?: string; error?: string }> => { - try { - if (!parentPath || !parentPath.startsWith('/')) { - return { success: false, error: 'An absolute parent path is required' }; - } - if (!isPathSafe(parentPath)) { - return { success: false, error: 'Access denied: path is restricted' }; - } - // Validate repo name: alphanumeric, hyphens, underscores, dots - if (!repoName || !/^[a-zA-Z0-9][a-zA-Z0-9._-]*$/.test(repoName)) { - return { - success: false, - error: - 'Invalid repository name. Use letters, numbers, hyphens, underscores, and dots. Must start with a letter or number.', - }; - } - - const repoPath = `${parentPath.replace(/\/+$/, '')}/${repoName}`; - if (!isPathSafe(repoPath)) { - return { success: false, error: 'Access denied: target path is restricted' }; - } - - // Check if directory already exists - const checkResult = await sshService.executeCommand( - connectionId, - `test -d ${quoteShellArg(repoPath)} && echo exists || echo absent` - ); - if (checkResult.stdout.trim() === 'exists') { - return { success: false, error: `Directory already exists: ${repoPath}` }; - } - - // Create directory and initialize git repo - const initResult = await sshService.executeCommand( - connectionId, - `mkdir -p ${quoteShellArg(repoPath)} && git -C ${quoteShellArg(repoPath)} init` - ); - if (initResult.exitCode !== 0) { - return { - success: false, - error: `Failed to initialize repository: ${initResult.stderr || initResult.stdout}`, - }; - } - - void import('../telemetry').then(({ capture }) => { - void capture('ssh_repo_init'); - }); - - return { success: true, path: repoPath }; - } catch (err: any) { - console.error('[sshIpc] Init repo error:', err); - return { success: false, error: err.message }; - } - } - ); - - // Clone a repository on the remote machine - ipcMain.handle( - SSH_IPC_CHANNELS.CLONE_REPO, - async ( - _, - connectionId: string, - repoUrl: string, - targetPath: string - ): Promise<{ success: boolean; path?: string; error?: string }> => { - try { - if (!repoUrl || typeof repoUrl !== 'string') { - return { success: false, error: 'Repository URL is required' }; - } - // Validate URL format - const urlPatterns = [/^https?:\/\/.+/i, /^git@.+:.+/i, /^ssh:\/\/.+/i]; - if (!urlPatterns.some((p) => p.test(repoUrl.trim()))) { - return { - success: false, - error: 'Invalid repository URL. Use https://, git@, or ssh:// format.', - }; - } - - if (!targetPath || !targetPath.startsWith('/')) { - return { success: false, error: 'An absolute target path is required' }; - } - if (!isPathSafe(targetPath)) { - return { success: false, error: 'Access denied: path is restricted' }; - } - - // Check if target already exists - const checkResult = await sshService.executeCommand( - connectionId, - `test -e ${quoteShellArg(targetPath)} && echo exists || echo absent` - ); - if (checkResult.stdout.trim() === 'exists') { - return { success: false, error: `Target path already exists: ${targetPath}` }; - } - - // Ensure parent directory exists - const parentDir = targetPath.replace(/\/[^/]+\/?$/, '') || '/'; - await sshService.executeCommand(connectionId, `mkdir -p ${quoteShellArg(parentDir)}`); - - // Clone the repository - const cloneResult = await sshService.executeCommand( - connectionId, - `git clone ${quoteShellArg(repoUrl.trim())} ${quoteShellArg(targetPath)}` - ); - if (cloneResult.exitCode !== 0) { - return { - success: false, - error: `Clone failed: ${cloneResult.stderr || cloneResult.stdout}`, - }; - } - - void import('../telemetry').then(({ capture }) => { - void capture('ssh_repo_clone'); - }); - - return { success: true, path: targetPath }; - } catch (err: any) { - console.error('[sshIpc] Clone repo error:', err); - return { success: false, error: err.message }; - } - } - ); -} diff --git a/src/main/ipc/telemetryIpc.ts b/src/main/ipc/telemetryIpc.ts deleted file mode 100644 index 5e2152b28..000000000 --- a/src/main/ipc/telemetryIpc.ts +++ /dev/null @@ -1,153 +0,0 @@ -import { ipcMain } from 'electron'; -import { - capture, - captureException, - isTelemetryEnabled, - getTelemetryStatus, - setTelemetryEnabledViaUser, - setOnboardingSeen, -} from '../telemetry'; - -// Events allowed from renderer process -// Main process-only events (app_started, app_closed, app_window_focused, github_connection_triggered, -// github_connected, task_snapshot, app_session, agent_run_start, agent_run_finish) should NOT be here -const RENDERER_ALLOWED_EVENTS = new Set([ - // Error tracking - '$exception', // PostHog error tracking format - // Legacy - 'feature_used', - 'error', - // Project management - 'project_add_clicked', - 'project_open_clicked', - 'project_added_success', - 'project_deleted', - 'project_view_opened', - // Task management - 'task_created', - 'task_deleted', - 'task_provider_switched', - 'task_custom_named', - 'task_advanced_options_opened', - // Terminal (Right Sidebar) - 'terminal_entered', - 'terminal_command_executed', - 'terminal_new_terminal_created', - 'terminal_deleted', - // Changes (Right Sidebar) - 'changes_viewed', - // Plan mode - 'plan_mode_enabled', - 'plan_mode_disabled', - // Git & Pull Requests - 'pr_created', - 'pr_creation_failed', - 'pr_viewed', - // Linear integration - 'linear_connected', - 'linear_disconnected', - 'linear_issues_searched', - 'linear_issue_selected', - // Jira integration - 'jira_connected', - 'jira_disconnected', - 'jira_issues_searched', - 'jira_issue_selected', - // Container & Dev Environment - 'container_connect_clicked', - 'container_connect_success', - 'container_connect_failed', - // ToolBar Section - 'toolbar_feedback_clicked', - 'toolbar_left_sidebar_clicked', - 'toolbar_right_sidebar_clicked', - 'toolbar_settings_clicked', - 'toolbar_open_in_menu_clicked', - 'toolbar_open_in_selected', - 'toolbar_kanban_toggled', - // Browser Preview - 'browser_preview_opened', - 'browser_preview_closed', - 'browser_preview_url_navigated', - // Skills - 'skills_view_opened', - 'skill_installed', - 'skill_uninstalled', - 'skill_created', - 'skill_detail_viewed', - // Remote Server / SSH - 'remote_project_modal_opened', - 'remote_project_connection_tested', - 'remote_project_created', - 'ssh_settings_opened', - // GitHub issues - 'github_issues_searched', - 'github_issue_selected', - // Task with issue - 'task_created_with_issue', - // Settings & Preferences - 'settings_tab_viewed', - 'theme_changed', - 'telemetry_toggled', - 'notification_settings_changed', - 'default_provider_changed', -]); - -export function registerTelemetryIpc() { - ipcMain.handle('telemetry:capture', async (_event, args: { event: string; properties?: any }) => { - try { - if (!isTelemetryEnabled()) return { success: false, disabled: true }; - const ev = String(args?.event || '') as any; - if (!RENDERER_ALLOWED_EVENTS.has(ev)) { - return { success: false, error: 'event_not_allowed' }; - } - const props = - args?.properties && typeof args.properties === 'object' ? args.properties : undefined; - - // Handle $exception events specially for PostHog error tracking - if (ev === '$exception') { - // Extract error details from properties - const errorMessage = props?.$exception_message || 'Unknown error'; - const error = new Error(errorMessage); - error.stack = props?.$exception_stack_trace_raw || ''; - error.name = props?.$exception_type || 'Error'; - - // Call captureException with the error and additional properties - captureException(error, props); - } else { - // Regular telemetry events - capture(ev, props); - } - - return { success: true }; - } catch (e: any) { - return { success: false, error: e?.message || 'capture_failed' }; - } - }); - - ipcMain.handle('telemetry:get-status', async () => { - try { - return { success: true, status: getTelemetryStatus() }; - } catch (e: any) { - return { success: false, error: e?.message || 'status_failed' }; - } - }); - - ipcMain.handle('telemetry:set-enabled', async (_event, enabled: boolean) => { - try { - setTelemetryEnabledViaUser(Boolean(enabled)); - return { success: true, status: getTelemetryStatus() }; - } catch (e: any) { - return { success: false, error: e?.message || 'update_failed' }; - } - }); - - ipcMain.handle('telemetry:set-onboarding-seen', async (_event, flag: boolean) => { - try { - setOnboardingSeen(Boolean(flag)); - return { success: true, status: getTelemetryStatus() }; - } catch (e: any) { - return { success: false, error: e?.message || 'update_failed' }; - } - }); -} diff --git a/src/main/lib/events.ts b/src/main/lib/events.ts new file mode 100644 index 000000000..b9172d36d --- /dev/null +++ b/src/main/lib/events.ts @@ -0,0 +1,23 @@ +import { ipcMain } from 'electron'; +import { createEventEmitter, type EmitterAdapter } from '@shared/ipc/events'; +import { getMainWindow } from '@main/app/window'; + +function createMainAdapter(): EmitterAdapter { + return { + emit: (eventName: string, data: unknown, topic?: string) => { + const channel = topic ? `${eventName}.${topic}` : eventName; + const win = getMainWindow(); + if (win && !win.isDestroyed()) { + win.webContents.send(channel, data); + } + }, + on: (eventName: string, cb: (data: unknown) => void, topic?: string) => { + const channel = topic ? `${eventName}.${topic}` : eventName; + const handler = (_e: Electron.IpcMainEvent, data: unknown) => cb(data); + ipcMain.on(channel, handler); + return () => ipcMain.removeListener(channel, handler); + }, + }; +} + +export const events = createEventEmitter(createMainAdapter()); diff --git a/src/main/lib/logger.ts b/src/main/lib/logger.ts index c6e55e853..248d8b0f4 100644 --- a/src/main/lib/logger.ts +++ b/src/main/lib/logger.ts @@ -14,25 +14,25 @@ function enabled(target: Level, current: Level): boolean { const current = envLevel(); export const log = { - debug: (...args: any[]) => { + debug: (...args: unknown[]) => { if (enabled('debug', current)) { // eslint-disable-next-line no-console console.debug(...args); } }, - info: (...args: any[]) => { + info: (...args: unknown[]) => { if (enabled('info', current)) { // eslint-disable-next-line no-console console.info(...args); } }, - warn: (...args: any[]) => { + warn: (...args: unknown[]) => { if (enabled('warn', current)) { // eslint-disable-next-line no-console console.warn(...args); } }, - error: (...args: any[]) => { + error: (...args: unknown[]) => { // eslint-disable-next-line no-console console.error(...args); }, diff --git a/src/main/lib/prCreateBodyPlan.ts b/src/main/lib/prCreateBodyPlan.ts deleted file mode 100644 index ca53e9477..000000000 --- a/src/main/lib/prCreateBodyPlan.ts +++ /dev/null @@ -1,28 +0,0 @@ -type CreatePrBodyPlanArgs = { - fill?: boolean; - title?: string; - rawBody?: string; - enrichedBody?: string; -}; - -export type CreatePrBodyPlan = { - shouldPatchFilledBody: boolean; - shouldUseBodyFile: boolean; - shouldUseFill: boolean; -}; - -export function getCreatePrBodyPlan(args: CreatePrBodyPlanArgs): CreatePrBodyPlan { - const { fill, title, rawBody, enrichedBody } = args; - - // When fill is requested with no explicit body, footer-only content must be - // applied after creation so gh can keep its fill-generated body. - const shouldPatchFilledBody = Boolean(fill && !rawBody && enrichedBody); - const shouldUseBodyFile = Boolean(enrichedBody && !shouldPatchFilledBody); - - // Use fill when caller requested it and either: - // - we need to patch footer after gh generates body, or - // - title/body are missing and gh should infer them. - const shouldUseFill = Boolean(fill && (shouldPatchFilledBody || !title || !enrichedBody)); - - return { shouldPatchFilledBody, shouldUseBodyFile, shouldUseFill }; -} diff --git a/src/main/lib/prIssueFooter.ts b/src/main/lib/prIssueFooter.ts deleted file mode 100644 index 0bac2c442..000000000 --- a/src/main/lib/prIssueFooter.ts +++ /dev/null @@ -1,56 +0,0 @@ -const START_MARKER = ''; -const END_MARKER = ''; - -const ISSUE_FOOTER_BLOCK_RE = - /\n?[\s\S]*?\n?/g; - -function stripInjectedIssueFooter(body: string): string { - return body.replace(ISSUE_FOOTER_BLOCK_RE, '').trimEnd(); -} - -function getFixesLines(metadata: unknown): string[] { - if (!metadata || typeof metadata !== 'object') { - return []; - } - - const md = metadata as { - githubIssue?: { number?: number | string | null } | null; - linearIssue?: { identifier?: string | null } | null; - }; - - const lines: string[] = []; - - const ghNumberRaw = md.githubIssue?.number; - const ghNumber = - typeof ghNumberRaw === 'number' - ? ghNumberRaw - : typeof ghNumberRaw === 'string' - ? Number.parseInt(ghNumberRaw, 10) - : NaN; - if (Number.isFinite(ghNumber) && ghNumber > 0) { - lines.push(`Fixes #${ghNumber}`); - } - - const linearIdentifier = - typeof md.linearIssue?.identifier === 'string' ? md.linearIssue.identifier.trim() : ''; - if (linearIdentifier) { - lines.push(`Fixes ${linearIdentifier}`); - } - - return Array.from(new Set(lines)); -} - -export function injectIssueFooter(body: string | undefined, metadata: unknown): string | undefined { - const cleaned = stripInjectedIssueFooter(typeof body === 'string' ? body : ''); - const lines = getFixesLines(metadata); - - if (lines.length === 0) { - const trimmed = cleaned.trim(); - return trimmed.length > 0 ? trimmed : undefined; - } - - const footerBlock = [START_MARKER, ...lines, END_MARKER].join('\n'); - const separator = cleaned.trim().length > 0 ? '\n\n' : ''; - const combined = `${cleaned}${separator}${footerBlock}`.trim(); - return combined.length > 0 ? combined : undefined; -} diff --git a/src/main/lib/prIssueFooterPatch.ts b/src/main/lib/prIssueFooterPatch.ts deleted file mode 100644 index 7ba8b2891..000000000 --- a/src/main/lib/prIssueFooterPatch.ts +++ /dev/null @@ -1,68 +0,0 @@ -import fs from 'node:fs'; -import os from 'node:os'; -import path from 'node:path'; -import { injectIssueFooter } from './prIssueFooter'; - -type ExecFileResult = { - stdout?: string | Buffer; -}; - -type ExecFileLike = ( - file: string, - args: string[], - options: { cwd: string } -) => Promise; - -type PatchPrIssueFooterArgs = { - taskPath: string; - metadata: unknown; - execFile: ExecFileLike; - prUrl?: string | null; -}; - -function normalizeBodyForComparison(body: string): string { - return body.replace(/\r\n/g, '\n').trimEnd(); -} - -export async function patchCurrentPrBodyWithIssueFooter({ - taskPath, - metadata, - execFile, - prUrl, -}: PatchPrIssueFooterArgs): Promise { - const existingBody = await execFile('gh', ['pr', 'view', '--json', 'body', '-q', '.body'], { - cwd: taskPath, - }); - const existingBodyText = String(existingBody.stdout || ''); - const mergedBody = injectIssueFooter(existingBodyText, metadata); - if (!mergedBody) { - return false; - } - if (normalizeBodyForComparison(mergedBody) === normalizeBodyForComparison(existingBodyText)) { - return false; - } - - const bodyFile = path.join( - os.tmpdir(), - `gh-pr-edit-body-${Date.now()}-${Math.random().toString(36).substring(7)}.txt` - ); - - try { - fs.writeFileSync(bodyFile, mergedBody, 'utf8'); - const editArgs = ['pr', 'edit']; - if (prUrl) { - editArgs.push(prUrl); - } - editArgs.push('--body-file', bodyFile); - await execFile('gh', editArgs, { cwd: taskPath }); - return true; - } finally { - if (fs.existsSync(bodyFile)) { - try { - fs.unlinkSync(bodyFile); - } catch { - // Ignore cleanup errors; caller should not fail due to temp-file deletion. - } - } - } -} diff --git a/src/main/lib/result.ts b/src/main/lib/result.ts new file mode 100644 index 000000000..60ba69279 --- /dev/null +++ b/src/main/lib/result.ts @@ -0,0 +1,6 @@ +export type Ok = { readonly success: true; readonly data: T }; +export type Err = { readonly success: false; readonly error: E }; +export type Result = Ok | Err; + +export const ok = (data: T = undefined as T): Ok => ({ success: true, data }); +export const err = (error: E): Err => ({ success: false, error }); diff --git a/src/main/telemetry.ts b/src/main/lib/telemetry.ts similarity index 54% rename from src/main/telemetry.ts rename to src/main/lib/telemetry.ts index 81c592b8a..a286d6e49 100644 --- a/src/main/telemetry.ts +++ b/src/main/lib/telemetry.ts @@ -1,29 +1,10 @@ +import { randomUUID } from 'node:crypto'; import { app } from 'electron'; -// Optional build-time defaults for distribution bundles -// Resolve robustly across dev and packaged layouts. -let appConfig: { posthogHost?: string; posthogKey?: string } = {}; -import { existsSync, readFileSync, writeFileSync } from 'fs'; -import { join } from 'path'; +import rawAppConfig from '@main/appConfig.json'; +import { KV } from '@main/db/kv'; -function loadAppConfig(): { posthogHost?: string; posthogKey?: string } { - try { - const dir = __dirname; // e.g., dist/main/main in dev builds - const candidates = [ - join(dir, 'appConfig.json'), // dist/main/main/appConfig.json - join(dir, '..', 'appConfig.json'), // dist/main/appConfig.json (CI injection path) - ]; - for (const p of candidates) { - if (existsSync(p)) { - const raw = readFileSync(p, 'utf8'); - return JSON.parse(raw); - } - } - } catch { - // fall through - } - return {}; -} -appConfig = loadAppConfig(); +// Build-time defaults from appConfig.json (bundled by electron-vite) +const appConfig: { posthogHost?: string; posthogKey?: string } = rawAppConfig; type TelemetryEvent = // App lifecycle @@ -138,19 +119,32 @@ interface InitOptions { installSource?: string; } +// --------------------------------------------------------------------------- +// Module-level state +// --------------------------------------------------------------------------- + let enabled = true; let apiKey: string | undefined; let host: string | undefined; let instanceId: string | undefined; let installSource: string | undefined; let userOptOut: boolean | undefined; -let onboardingSeen: boolean = false; +let onboardingSeen = false; let sessionStartMs: number = Date.now(); let lastActiveDate: string | undefined; let cachedGithubUsername: string | null = null; const libName = 'emdash'; +type TelemetryKVSchema = { + instanceId: string; + enabled: string; + onboardingSeen: string; + lastActiveDate: string; +}; + +const telemetryKV = new KV('telemetry'); + function getVersionSafe(): string { try { return app.getVersion(); @@ -159,59 +153,6 @@ function getVersionSafe(): string { } } -function getInstanceIdPath(): string { - const dir = app.getPath('userData'); - return join(dir, 'telemetry.json'); -} - -function loadOrCreateState(): { - instanceId: string; - enabledOverride?: boolean; - onboardingSeen?: boolean; - lastActiveDate?: string; -} { - try { - const file = getInstanceIdPath(); - if (existsSync(file)) { - const raw = readFileSync(file, 'utf8'); - const parsed = JSON.parse(raw); - if (parsed && typeof parsed.instanceId === 'string' && parsed.instanceId.length > 0) { - const enabledOverride = - typeof parsed.enabled === 'boolean' ? (parsed.enabled as boolean) : undefined; - const onboardingSeen = - typeof parsed.onboardingSeen === 'boolean' ? (parsed.onboardingSeen as boolean) : false; - const lastActiveDate = - typeof parsed.lastActiveDate === 'string' ? (parsed.lastActiveDate as string) : undefined; - return { - instanceId: parsed.instanceId as string, - enabledOverride, - onboardingSeen, - lastActiveDate, - }; - } - } - } catch { - // fall through to create - } - const newId = cryptoRandomId(); - try { - writeFileSync(getInstanceIdPath(), JSON.stringify({ instanceId: newId }, null, 2), 'utf8'); - } catch { - // ignore - } - return { instanceId: newId }; -} - -function cryptoRandomId(): string { - try { - const { randomUUID } = require('crypto'); - return randomUUID(); - } catch { - // Very old Node fallback; not expected in Electron 28+ - return Math.random().toString(36).slice(2) + Date.now().toString(36); - } -} - function isEnabled(): boolean { return ( enabled === true && @@ -240,10 +181,9 @@ function getBaseProps() { * Sanitize event properties to prevent PII leakage. * Simple allowlist approach: only allow safe property names and primitive types. */ -function sanitizeEventAndProps(event: TelemetryEvent, props: Record | undefined) { - const sanitized: Record = {}; +function sanitizeEventAndProps(_event: TelemetryEvent, props: Record | undefined) { + const sanitized: Record = {}; - // Simple allowlist of safe properties const allowedProps = new Set([ 'provider', 'source', @@ -279,16 +219,12 @@ function sanitizeEventAndProps(event: TelemetryEvent, props: Record if (props) { for (const [key, value] of Object.entries(props)) { - // Only process allowed property names if (!allowedProps.has(key)) continue; - // Only allow primitive types if (typeof value === 'string') { - // Trim and limit string length to prevent abuse sanitized[key] = value.trim().slice(0, 100); } else if (typeof value === 'number') { - // Clamp numbers to reasonable range - sanitized[key] = Math.max(0, Math.min(value, 1000000)); + sanitized[key] = Math.max(0, Math.min(value, 1_000_000)); } else if (typeof value === 'boolean') { sanitized[key] = value; } @@ -298,32 +234,28 @@ function sanitizeEventAndProps(event: TelemetryEvent, props: Record return sanitized; } -/** - * Fetch the current GitHub username if the user is authenticated. - * Returns null if not authenticated or if there's an error. - */ -async function getGithubUsername(): Promise { - try { - // Lazy import to avoid circular dependencies - const { githubService } = require('./services/GitHubService'); - const user = await githubService.getCurrentUser(); - return user?.login || null; - } catch { - // Silently fail if GitHub is not authenticated or there's an error - return null; +function normalizeHost(h: string | undefined): string | undefined { + if (!h) return undefined; + let s = String(h).trim(); + if (!/^https?:\/\//i.test(s)) { + s = 'https://' + s; } + return s.replace(/\/+$/, ''); } +// --------------------------------------------------------------------------- +// PostHog transport +// --------------------------------------------------------------------------- + async function posthogCapture( event: TelemetryEvent, - properties?: Record + properties?: Record ): Promise { if (!isEnabled()) return; try { - // Use global fetch if available (Node 18+/Electron 28+) - const f: any = (globalThis as any).fetch; + const f = (globalThis as { fetch?: typeof fetch }).fetch; if (!f) return; - const u = (host || '').replace(/\/$/, '') + '/capture/'; + const u = (host ?? '').replace(/\/$/, '') + '/capture/'; const body = { api_key: apiKey, event, @@ -343,16 +275,12 @@ async function posthogCapture( } } -/** - * PostHog identify call to associate the instanceId with GitHub username. - * This creates a user profile in PostHog. - */ async function posthogIdentify(username: string): Promise { if (!isEnabled() || !username) return; try { - const f: any = (globalThis as any).fetch; + const f = (globalThis as { fetch?: typeof fetch }).fetch; if (!f) return; - const u = (host || '').replace(/\/$/, '') + '/capture/'; + const u = (host ?? '').replace(/\/$/, '') + '/capture/'; const body = { api_key: apiKey, event: '$identify', @@ -374,7 +302,33 @@ async function posthogIdentify(username: string): Promise { } } -export async function init(options?: InitOptions) { +// --------------------------------------------------------------------------- +// Daily active user +// --------------------------------------------------------------------------- + +async function checkDailyActiveUser(): Promise { + if (!isEnabled()) return; + try { + const today = new Date().toISOString().split('T')[0]!; + if (lastActiveDate === today) return; + + void posthogCapture('daily_active_user', { + date: today, + timezone: Intl.DateTimeFormat().resolvedOptions().timeZone || 'unknown', + }); + + lastActiveDate = today; + telemetryKV.set('lastActiveDate', today); + } catch { + // Never let telemetry errors crash the app + } +} + +// --------------------------------------------------------------------------- +// Public API +// --------------------------------------------------------------------------- + +export async function init(options?: InitOptions): Promise { const env = process.env; const enabledEnv = (env.TELEMETRY_ENABLED ?? 'true').toString().toLowerCase(); enabled = enabledEnv !== 'false' && enabledEnv !== '0' && enabledEnv !== 'no'; @@ -384,43 +338,49 @@ export async function init(options?: InitOptions) { env.POSTHOG_HOST || (appConfig?.posthogHost as string | undefined) || undefined ); installSource = options?.installSource || env.INSTALL_SOURCE || undefined; - - const state = loadOrCreateState(); - instanceId = state.instanceId; sessionStartMs = Date.now(); - // If enabledOverride is explicitly false, user opted out; otherwise leave undefined - userOptOut = - typeof state.enabledOverride === 'boolean' ? state.enabledOverride === false : undefined; - onboardingSeen = state.onboardingSeen === true; - lastActiveDate = state.lastActiveDate; - - // Fetch GitHub username if available and cache for all future events - cachedGithubUsername = await getGithubUsername(); - - // If we have a GitHub username, identify the user in PostHog - if (cachedGithubUsername) { - void posthogIdentify(cachedGithubUsername); + + // Load persisted state from SQLite KV (all reads are non-blocking best-effort) + let storedInstanceId: string | null = null; + let storedEnabled: string | null = null; + let storedOnboarding: string | null = null; + let storedActiveDate: string | null = null; + try { + [storedInstanceId, storedEnabled, storedOnboarding, storedActiveDate] = await Promise.all([ + telemetryKV.get('instanceId'), + telemetryKV.get('enabled'), + telemetryKV.get('onboardingSeen'), + telemetryKV.get('lastActiveDate'), + ]); + } catch { + // KV unavailable during startup (e.g. DB migration not yet applied) — use in-memory defaults } - // Fire lifecycle start (github_username is now included via getBaseProps) - void posthogCapture('app_started'); + instanceId = storedInstanceId ?? (randomUUID().toString() as string); + if (!storedInstanceId) { + telemetryKV.set('instanceId', instanceId); + } - // Check for daily active user (fires event if it's a new day) - checkDailyActiveUser(); + userOptOut = storedEnabled === 'false' ? true : undefined; + onboardingSeen = storedOnboarding === 'true'; + lastActiveDate = storedActiveDate ?? undefined; + + void posthogCapture('app_started'); + void checkDailyActiveUser(); } /** - * Refresh the cached GitHub username. Call this when the user connects - * their GitHub account so all subsequent events include the username. + * Associate the current anonymous session with a known identity (e.g. GitHub + * username). Call this whenever authentication succeeds — no dynamic imports + * or polling needed. */ -export async function refreshGithubUsername(): Promise { - cachedGithubUsername = await getGithubUsername(); - if (cachedGithubUsername) { - void posthogIdentify(cachedGithubUsername); - } +export function identify(username: string): void { + if (!username) return; + cachedGithubUsername = username; + void posthogIdentify(username); } -export function capture(event: TelemetryEvent, properties?: Record) { +export function capture(event: TelemetryEvent, properties?: Record): void { if (event === 'app_session') { const dur = Math.max(0, Date.now() - (sessionStartMs || Date.now())); void posthogCapture(event, { session_duration_ms: dur }); @@ -431,39 +391,25 @@ export function capture(event: TelemetryEvent, properties?: Record) /** * Capture an exception for PostHog error tracking. - * This sends a properly formatted $exception event as required by PostHog. - * - * @param error - The error object or error message - * @param additionalProperties - Additional context properties */ export function captureException( error: Error | unknown, - additionalProperties?: Record -) { + additionalProperties?: Record +): void { if (!isEnabled()) return; - // Build error object const errorObj = error instanceof Error ? error : new Error(String(error)); - const errorMessage = errorObj.message || 'Unknown error'; - const errorStack = errorObj.stack || ''; - // Build PostHog $exception event properties - const properties: Record = { - // Required fields for PostHog error tracking - $exception_message: errorMessage, + void posthogCapture('$exception' as TelemetryEvent, { + $exception_message: errorObj.message || 'Unknown error', $exception_type: errorObj.name || 'Error', - $exception_stack_trace_raw: errorStack, - - // Merge additional properties + $exception_stack_trace_raw: errorObj.stack || '', ...additionalProperties, - }; - - // Send as $exception event (required for PostHog error tracking) - void posthogCapture('$exception' as any, properties); + }); } -export function shutdown() { - // No-op for now (no batching). Left for future posthog-node integration. +export function shutdown(): void { + // No-op — left for future posthog-node batching integration. } export function isTelemetryEnabled(): boolean { @@ -480,120 +426,16 @@ export function getTelemetryStatus() { }; } -export function setTelemetryEnabledViaUser(enabledFlag: boolean) { +export function setTelemetryEnabledViaUser(enabledFlag: boolean): void { userOptOut = !enabledFlag; - // Persist alongside instanceId - try { - const file = getInstanceIdPath(); - let state: any = {}; - if (existsSync(file)) { - try { - state = JSON.parse(readFileSync(file, 'utf8')) || {}; - } catch { - state = {}; - } - } - state.instanceId = instanceId || state.instanceId || cryptoRandomId(); - state.enabled = enabledFlag; // store explicit preference - state.updatedAt = new Date().toISOString(); - writeFileSync(file, JSON.stringify(state, null, 2), 'utf8'); - } catch { - // ignore - } + telemetryKV.set('enabled', String(enabledFlag)); } -function persistState(state: { - instanceId: string; - enabledOverride?: boolean; - onboardingSeen?: boolean; - lastActiveDate?: string; -}) { - try { - const existing = existsSync(getInstanceIdPath()) - ? JSON.parse(readFileSync(getInstanceIdPath(), 'utf8')) - : {}; - const merged = { - ...existing, - instanceId: state.instanceId, - enabled: - typeof state.enabledOverride === 'boolean' ? state.enabledOverride : existing.enabled, - onboardingSeen: - typeof state.onboardingSeen === 'boolean' ? state.onboardingSeen : existing.onboardingSeen, - lastActiveDate: - typeof state.lastActiveDate === 'string' ? state.lastActiveDate : existing.lastActiveDate, - createdAt: existing.createdAt || new Date().toISOString(), - updatedAt: new Date().toISOString(), - }; - writeFileSync(getInstanceIdPath(), JSON.stringify(merged, null, 2), 'utf8'); - } catch { - // ignore - } -} - -function normalizeHost(h: string | undefined): string | undefined { - if (!h) return undefined; - let s = String(h).trim(); - if (!/^https?:\/\//i.test(s)) { - s = 'https://' + s; - } - return s.replace(/\/+$/, ''); -} - -/** - * Check if this is a new day of activity and fire daily_active_user event if so. - * This ensures we accurately track DAU even when the app stays open for extended periods. - */ -async function checkDailyActiveUser(): Promise { - // Skip if telemetry is disabled - if (!isEnabled()) return; - - try { - const today = new Date().toISOString().split('T')[0]; // YYYY-MM-DD format - - // If we haven't tracked a date yet or it's a new day, fire the event - if (!lastActiveDate || lastActiveDate !== today) { - // Refresh cached GitHub username (user may have connected since init) - cachedGithubUsername = await getGithubUsername(); - - // Fire the daily active user event (github_username included via getBaseProps) - void posthogCapture('daily_active_user', { - date: today, - timezone: Intl.DateTimeFormat().resolvedOptions().timeZone || 'unknown', - }); - - // Update the last active date in memory - lastActiveDate = today; - - // Persist the new date to storage - persistState({ - instanceId: instanceId || cryptoRandomId(), - enabledOverride: userOptOut === undefined ? undefined : !userOptOut, - onboardingSeen, - lastActiveDate: today, - }); - } - } catch (error) { - // Never let telemetry errors crash the app - // Optionally log for debugging: console.error('DAU tracking error:', error); - } +export function setOnboardingSeen(flag: boolean): void { + onboardingSeen = Boolean(flag); + telemetryKV.set('onboardingSeen', String(onboardingSeen)); } -/** - * Export for use in window focus events - */ export async function checkAndReportDailyActiveUser(): Promise { return checkDailyActiveUser(); } - -export function setOnboardingSeen(flag: boolean) { - onboardingSeen = Boolean(flag); - try { - persistState({ - instanceId: instanceId || cryptoRandomId(), - onboardingSeen, - enabledOverride: userOptOut === undefined ? undefined : !userOptOut, - }); - } catch { - // ignore - } -} diff --git a/src/main/lib/telemetryHelpers.ts b/src/main/lib/telemetryHelpers.ts deleted file mode 100644 index d641eb746..000000000 --- a/src/main/lib/telemetryHelpers.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { capture } from '../telemetry'; - -/** - * Capture telemetry event with timing duration. - * Automatically clamps duration to reasonable bounds and includes it in properties. - */ -export async function captureWithTiming( - event: string, - operation: () => Promise | T, - additionalProps?: Record -): Promise { - const start = Date.now(); - try { - const result = await operation(); - const duration = Date.now() - start; - void capture(event as any, { - ...additionalProps, - duration_ms: duration, - }); - return result; - } catch (error) { - const duration = Date.now() - start; - void capture(event as any, { - ...additionalProps, - duration_ms: duration, - }); - throw error; - } -} diff --git a/src/main/main.ts b/src/main/main.ts deleted file mode 100644 index acb34d5c3..000000000 --- a/src/main/main.ts +++ /dev/null @@ -1,362 +0,0 @@ -// Load .env FIRST before any imports that might use it -// Use explicit path to ensure .env is loaded from project root -try { - const path = require('path'); - const envPath = path.join(__dirname, '..', '..', '.env'); - require('dotenv').config({ path: envPath }); -} catch (error) { - // dotenv is optional - no error if .env doesn't exist -} - -import { app, BrowserWindow, dialog } from 'electron'; -import { initializeShellEnvironment } from './utils/shellEnv'; -// Ensure PATH matches the user's shell when launched from Finder (macOS) -// so Homebrew/NPM global binaries like `gh` and `codex` are found. -try { - // Lazy import to avoid bundler complaints if not present on other platforms - // eslint-disable-next-line @typescript-eslint/no-var-requires - const fixPath = require('fix-path'); - if (typeof fixPath === 'function') fixPath(); -} catch { - // no-op if fix-path isn't available at runtime -} - -if (process.platform === 'darwin') { - const extras = ['/opt/homebrew/bin', '/usr/local/bin', '/opt/homebrew/sbin', '/usr/local/sbin']; - const cur = process.env.PATH || ''; - const parts = cur.split(':').filter(Boolean); - for (const p of extras) { - if (!parts.includes(p)) parts.unshift(p); - } - process.env.PATH = parts.join(':'); - - // As a last resort, ask the user's login shell for PATH and merge it in. - try { - const { execSync } = require('child_process'); - const shell = process.env.SHELL || '/bin/zsh'; - const loginPath = execSync(`${shell} -ilc 'echo -n $PATH'`, { encoding: 'utf8' }); - if (loginPath) { - // Shell noise (nvm messages, ASCII art, motd) gets captured in stdout. - // Split by both : and \n so noise fused with the first real path entry - // (e.g. "nvm output\n/usr/local/bin") is correctly separated. - const allEntries = (loginPath + ':' + process.env.PATH).split(/[:\n]/).filter(Boolean); - const validEntries = allEntries.filter((p: string) => p.startsWith('/')); - const merged = new Set(validEntries); - process.env.PATH = Array.from(merged).join(':'); - } - } catch {} -} - -if (process.platform === 'linux') { - try { - const os = require('os'); - const path = require('path'); - const homeDir = os.homedir(); - const extras = [ - path.join(homeDir, '.nvm/versions/node', process.version, 'bin'), - path.join(homeDir, '.npm-global/bin'), - path.join(homeDir, '.local/bin'), - '/usr/local/bin', - ]; - const cur = process.env.PATH || ''; - const parts = cur.split(':').filter(Boolean); - for (const p of extras) { - if (!parts.includes(p)) parts.unshift(p); - } - process.env.PATH = parts.join(':'); - - try { - const { execSync } = require('child_process'); - const shell = process.env.SHELL || '/bin/bash'; - const loginPath = execSync(`${shell} -ilc 'echo -n $PATH'`, { - encoding: 'utf8', - }); - if (loginPath) { - // Shell noise (nvm messages, ASCII art, motd) gets captured in stdout. - // Split by both : and \n so noise fused with the first real path entry - // (e.g. "nvm output\n/usr/local/bin") is correctly separated. - const allEntries = (loginPath + ':' + process.env.PATH).split(/[:\n]/).filter(Boolean); - const validEntries = allEntries.filter((p: string) => p.startsWith('/')); - const merged = new Set(validEntries); - process.env.PATH = Array.from(merged).join(':'); - } - } catch {} - } catch {} -} - -// Enable automatic Wayland/X11 detection on Linux. -// Uses native Wayland when available, falls back to X11 (XWayland) otherwise. -// Must be called before app.whenReady(). -if (process.platform === 'linux') { - app.commandLine.appendSwitch('ozone-platform-hint', 'auto'); -} - -if (process.platform === 'win32') { - // Ensure npm global binaries are in PATH for Windows - const npmPath = require('path').join(process.env.APPDATA || '', 'npm'); - const cur = process.env.PATH || ''; - const parts = cur.split(';').filter(Boolean); - if (npmPath && !parts.includes(npmPath)) { - parts.unshift(npmPath); - process.env.PATH = parts.join(';'); - } -} - -// Detect SSH_AUTH_SOCK from user's shell environment -// This is necessary because GUI-launched apps don't inherit shell env vars -try { - initializeShellEnvironment(); -} catch (error) { - // Silent fail - SSH agent auth will fail if user tries to use it - console.log('[main] Failed to initialize shell environment:', error); -} - -import { createMainWindow } from './app/window'; -import { registerAppLifecycle } from './app/lifecycle'; -import { setupApplicationMenu } from './app/menu'; -import { registerAllIpc } from './ipc'; -import { databaseService, DatabaseSchemaMismatchError } from './services/DatabaseService'; -import { connectionsService } from './services/ConnectionsService'; -import { autoUpdateService } from './services/AutoUpdateService'; -import { worktreePoolService } from './services/WorktreePoolService'; -import { sshService } from './services/ssh/SshService'; -import { taskLifecycleService } from './services/TaskLifecycleService'; -import { agentEventService } from './services/AgentEventService'; -import * as telemetry from './telemetry'; -import { errorTracking } from './errorTracking'; -import { join } from 'path'; -import { rmSync } from 'node:fs'; - -// Set app name for macOS dock and menu bar -app.setName('Emdash'); - -// Prevent multiple instances in production (e.g. user clicks icon while auto-updater is restarting). -// Skip in dev so dev server can run alongside the packaged app. -const isDev = !app.isPackaged || process.argv.includes('--dev'); -if (!isDev) { - const gotTheLock = app.requestSingleInstanceLock(); - if (!gotTheLock) { - app.quit(); - // Must also exit the process; app.quit() alone still runs the rest of this module - // before the event loop drains, which would register unnecessary listeners and timers. - process.exit(0); - } -} - -app.on('second-instance', () => { - const win = BrowserWindow.getAllWindows()[0]; - if (win) { - if (win.isMinimized()) win.restore(); - win.focus(); - } -}); - -// Set dock icon on macOS in development mode -if (process.platform === 'darwin' && !app.isPackaged) { - const iconPath = join( - __dirname, - '..', - '..', - '..', - 'src', - 'assets', - 'images', - 'emdash', - 'icon-dock.png' - ); - try { - app.dock.setIcon(iconPath); - } catch (err) { - console.warn('Failed to set dock icon:', err); - } -} - -// App bootstrap -app.whenReady().then(async () => { - const resetLocalDatabase = async (dbPath: string) => { - await databaseService.close().catch(() => {}); - for (const filePath of [dbPath, `${dbPath}-wal`, `${dbPath}-shm`]) { - rmSync(filePath, { force: true }); - } - }; - - // Initialize database - let dbInitOk = false; - let dbInitErrorType: string | undefined; - try { - await databaseService.initialize(); - dbInitOk = true; - } catch (error) { - const err = error as unknown; - const asObj = typeof err === 'object' && err !== null ? (err as Record) : null; - const code = asObj && typeof asObj.code === 'string' ? asObj.code : undefined; - const name = asObj && typeof asObj.name === 'string' ? asObj.name : undefined; - dbInitErrorType = code || name || 'unknown'; - console.error('Failed to initialize database:', error); - - if (err instanceof DatabaseSchemaMismatchError) { - const missing = err.missingInvariants.map((item) => `• ${item}`).join('\n'); - const result = await dialog.showMessageBox({ - type: 'error', - title: 'Local Data Reset Required', - message: 'Emdash cannot start because your local database schema is incompatible.', - detail: [ - 'Required schema entries are missing:', - missing || '• unknown invariant', - '', - `Database path: ${err.dbPath}`, - '', - 'Choose "Reset Local Data and Relaunch" to delete local Emdash data and start fresh.', - 'This only removes local app data (projects, tasks, conversations). Repository files are not deleted.', - ].join('\n'), - buttons: ['Reset Local Data and Relaunch', 'Quit'], - defaultId: 0, - cancelId: 1, - noLink: true, - }); - - if (result.response === 0) { - try { - await resetLocalDatabase(err.dbPath); - app.relaunch(); - app.exit(0); - return; - } catch (resetError) { - console.error('Failed to reset local database:', resetError); - dialog.showErrorBox( - 'Database Reset Failed', - `Unable to delete local database at:\n${err.dbPath}\n\n${resetError instanceof Error ? resetError.message : String(resetError)}` - ); - } - } - - app.quit(); - return; - } - - if (err instanceof Error && err.message.includes('migrations folder')) { - dialog.showErrorBox( - 'Database Initialization Failed', - 'Unable to initialize the application database.\n\n' + - 'This may be due to:\n' + - '• Running from Downloads or DMG (move to Applications)\n' + - '• Homebrew installation issues (try direct download)\n' + - '• Incomplete installation\n\n' + - 'Please try:\n' + - '1. Move Emdash to Applications folder\n' + - '2. Download directly from GitHub releases\n' + - '3. Check console for detailed error information' - ); - } - } - - // Initialize telemetry (privacy-first, with optional GitHub username) - await telemetry.init({ installSource: app.isPackaged ? 'dmg' : 'dev' }); - - // Initialize error tracking - await errorTracking.init(); - - try { - const summary = databaseService.getLastMigrationSummary(); - const toBucket = (n: number) => (n === 0 ? '0' : n === 1 ? '1' : n <= 3 ? '2-3' : '>3'); - telemetry.capture('db_setup', { - outcome: dbInitOk ? 'success' : 'failure', - ...(dbInitOk - ? { - applied_migrations: summary?.appliedCount ?? 0, - applied_migrations_bucket: toBucket(summary?.appliedCount ?? 0), - recovered: summary?.recovered === true, - } - : { - error_type: dbInitErrorType ?? 'unknown', - }), - }); - } catch { - // telemetry must never crash the app - } - - // Best-effort: capture a coarse snapshot of project/task counts (no names/paths) - let localProjectPathsForReserveCleanup: string[] = []; - try { - const [projects, tasks] = await Promise.all([ - databaseService.getProjects(), - databaseService.getTasks(), - ]); - localProjectPathsForReserveCleanup = projects - .filter((project) => !project.isRemote) - .map((project) => project.path); - const projectCount = projects.length; - const taskCount = tasks.length; - const toBucket = (n: number) => - n === 0 ? '0' : n <= 2 ? '1-2' : n <= 5 ? '3-5' : n <= 10 ? '6-10' : '>10'; - telemetry.capture('task_snapshot', { - project_count: projectCount, - project_count_bucket: toBucket(projectCount), - task_count: taskCount, - task_count_bucket: toBucket(taskCount), - } as any); - } catch { - // ignore errors — telemetry is best-effort only - } - - // Start agent event HTTP server (receives hook callbacks from CLI agents) - try { - await agentEventService.start(); - } catch (error) { - console.warn('Failed to start agent event service:', error); - } - - // Register IPC handlers - registerAllIpc(); - - // Clean up any orphaned reserve worktrees from previous sessions - worktreePoolService.cleanupOrphanedReserves(localProjectPathsForReserveCleanup).catch((error) => { - console.warn('Failed to cleanup orphaned reserves:', error); - }); - - // Warm provider installation cache - try { - await connectionsService.initProviderStatusCache(); - } catch { - // best-effort; ignore failures - } - - // Set up native application menu (Settings, Edit, View, Window) - setupApplicationMenu(); - - // Create main window - createMainWindow(); - - // Initialize auto-update service after window is created - try { - await autoUpdateService.initialize(); - } catch (error) { - if (app.isPackaged) { - console.error('Failed to initialize auto-update service:', error); - } - } -}); - -// App lifecycle handlers -registerAppLifecycle(); - -// Graceful shutdown telemetry event -app.on('before-quit', () => { - // Session summary with duration (no identifiers) - telemetry.capture('app_session'); - telemetry.capture('app_closed'); - telemetry.shutdown(); - - // Cleanup auto-update service - autoUpdateService.shutdown(); - // Stop agent event HTTP server - agentEventService.stop(); - // Stop any lifecycle run scripts so they do not outlive the app process. - taskLifecycleService.shutdown(); - - // Cleanup reserve worktrees (fire and forget - don't block quit) - worktreePoolService.cleanup().catch(() => {}); - - // Disconnect all SSH connections to avoid orphaned sessions on remote hosts - sshService.disconnectAll().catch(() => {}); -}); diff --git a/src/main/preload.ts b/src/main/preload.ts deleted file mode 100644 index 1dfc3f109..000000000 --- a/src/main/preload.ts +++ /dev/null @@ -1,1118 +0,0 @@ -import { contextBridge, ipcRenderer } from 'electron'; -import type { TerminalSnapshotPayload } from './types/terminalSnapshot'; -import type { OpenInAppId } from '../shared/openInApps'; -import type { AgentEvent } from '../shared/agentEvents'; - -// Keep preload self-contained: sandboxed preload cannot reliably require local runtime modules. -const LIFECYCLE_EVENT_CHANNEL = 'lifecycle:event'; -const GIT_STATUS_CHANGED_CHANNEL = 'git:status-changed'; - -const gitStatusChangedListeners = new Set<(data: { taskPath: string; error?: string }) => void>(); -let gitStatusBridgeAttached = false; - -function attachGitStatusBridgeOnce() { - if (gitStatusBridgeAttached) return; - gitStatusBridgeAttached = true; - ipcRenderer.on( - GIT_STATUS_CHANGED_CHANNEL, - (_: Electron.IpcRendererEvent, data: { taskPath: string; error?: string }) => { - for (const listener of gitStatusChangedListeners) { - try { - listener(data); - } catch {} - } - } - ); -} - -// Expose protected methods that allow the renderer process to use -// the ipcRenderer without exposing the entire object -contextBridge.exposeInMainWorld('electronAPI', { - // Generic invoke for the typed RPC client (createRPCClient) - invoke: (channel: string, ...args: unknown[]) => ipcRenderer.invoke(channel, ...args), - - // App info - getAppVersion: () => ipcRenderer.invoke('app:getAppVersion'), - getElectronVersion: () => ipcRenderer.invoke('app:getElectronVersion'), - getPlatform: () => ipcRenderer.invoke('app:getPlatform'), - listInstalledFonts: (args?: { refresh?: boolean }) => - ipcRenderer.invoke('app:listInstalledFonts', args), - undo: () => ipcRenderer.invoke('app:undo'), - redo: () => ipcRenderer.invoke('app:redo'), - // Updater - checkForUpdates: () => ipcRenderer.invoke('update:check'), - downloadUpdate: () => ipcRenderer.invoke('update:download'), - quitAndInstallUpdate: () => ipcRenderer.invoke('update:quit-and-install'), - openLatestDownload: () => ipcRenderer.invoke('update:open-latest'), - // Enhanced update methods - getUpdateState: () => ipcRenderer.invoke('update:get-state'), - getUpdateSettings: () => ipcRenderer.invoke('update:get-settings'), - updateUpdateSettings: (settings: any) => ipcRenderer.invoke('update:update-settings', settings), - getReleaseNotes: () => ipcRenderer.invoke('update:get-release-notes'), - checkForUpdatesNow: () => ipcRenderer.invoke('update:check-now'), - onUpdateEvent: (listener: (data: { type: string; payload?: any }) => void) => { - const pairs: Array<[string, string]> = [ - ['update:checking', 'checking'], - ['update:available', 'available'], - ['update:not-available', 'not-available'], - ['update:error', 'error'], - ['update:downloading', 'downloading'], - ['update:download-progress', 'download-progress'], - ['update:downloaded', 'downloaded'], - ['update:installing', 'installing'], - ]; - const handlers: Array<() => void> = []; - for (const [channel, type] of pairs) { - const wrapped = (_: Electron.IpcRendererEvent, payload: any) => listener({ type, payload }); - ipcRenderer.on(channel, wrapped); - handlers.push(() => ipcRenderer.removeListener(channel, wrapped)); - } - return () => handlers.forEach((off) => off()); - }, - - // Open a path in a specific app - openIn: (args: { app: OpenInAppId; path: string }) => ipcRenderer.invoke('app:openIn', args), - - // Check which apps are installed - checkInstalledApps: () => - ipcRenderer.invoke('app:checkInstalledApps') as Promise>, - - // PTY management - ptyStart: (opts: { - id: string; - cwd?: string; - remote?: { connectionId: string }; - shell?: string; - env?: Record; - cols?: number; - rows?: number; - autoApprove?: boolean; - initialPrompt?: string; - }) => ipcRenderer.invoke('pty:start', opts), - ptyInput: (args: { id: string; data: string }) => ipcRenderer.send('pty:input', args), - ptyResize: (args: { id: string; cols: number; rows: number }) => - ipcRenderer.send('pty:resize', args), - ptyKill: (id: string) => ipcRenderer.send('pty:kill', { id }), - ptyKillTmux: (id: string) => - ipcRenderer.invoke('pty:killTmux', { id }) as Promise<{ ok: boolean; error?: string }>, - - // Direct PTY spawn (no shell wrapper, bypasses shell config loading) - ptyStartDirect: (opts: { - id: string; - providerId: string; - cwd: string; - remote?: { connectionId: string }; - cols?: number; - rows?: number; - autoApprove?: boolean; - initialPrompt?: string; - clickTime?: number; - env?: Record; - resume?: boolean; - }) => ipcRenderer.invoke('pty:startDirect', opts), - - ptyScpToRemote: (args: { connectionId: string; localPaths: string[] }) => - ipcRenderer.invoke('pty:scp-to-remote', args), - - onPtyData: (id: string, listener: (data: string) => void) => { - const channel = `pty:data:${id}`; - const wrapped = (_: Electron.IpcRendererEvent, data: string) => listener(data); - ipcRenderer.on(channel, wrapped); - return () => ipcRenderer.removeListener(channel, wrapped); - }, - ptyGetSnapshot: (args: { id: string }) => ipcRenderer.invoke('pty:snapshot:get', args), - ptySaveSnapshot: (args: { id: string; payload: TerminalSnapshotPayload }) => - ipcRenderer.invoke('pty:snapshot:save', args), - ptyClearSnapshot: (args: { id: string }) => ipcRenderer.invoke('pty:snapshot:clear', args), - onPtyExit: (id: string, listener: (info: { exitCode: number; signal?: number }) => void) => { - const channel = `pty:exit:${id}`; - const wrapped = (_: Electron.IpcRendererEvent, info: { exitCode: number; signal?: number }) => - listener(info); - ipcRenderer.on(channel, wrapped); - return () => ipcRenderer.removeListener(channel, wrapped); - }, - onPtyStarted: (listener: (data: { id: string }) => void) => { - const channel = 'pty:started'; - const wrapped = (_: Electron.IpcRendererEvent, data: { id: string }) => listener(data); - ipcRenderer.on(channel, wrapped); - return () => ipcRenderer.removeListener(channel, wrapped); - }, - onAgentEvent: (listener: (event: AgentEvent, meta: { appFocused: boolean }) => void) => { - const channel = 'agent:event'; - const wrapped = ( - _: Electron.IpcRendererEvent, - data: AgentEvent, - meta: { appFocused: boolean } - ) => listener(data, meta); - ipcRenderer.on(channel, wrapped); - return () => ipcRenderer.removeListener(channel, wrapped); - }, - onNotificationFocusTask: (listener: (taskId: string) => void) => { - const channel = 'notification:focus-task'; - const wrapped = (_: Electron.IpcRendererEvent, taskId: string) => listener(taskId); - ipcRenderer.on(channel, wrapped); - return () => ipcRenderer.removeListener(channel, wrapped); - }, - terminalGetTheme: () => ipcRenderer.invoke('terminal:getTheme'), - - // Menu events (main → renderer) - onMenuOpenSettings: (listener: () => void) => { - const channel = 'menu:open-settings'; - const wrapped = () => listener(); - ipcRenderer.on(channel, wrapped); - return () => ipcRenderer.removeListener(channel, wrapped); - }, - onMenuCheckForUpdates: (listener: () => void) => { - const channel = 'menu:check-for-updates'; - const wrapped = () => listener(); - ipcRenderer.on(channel, wrapped); - return () => ipcRenderer.removeListener(channel, wrapped); - }, - onMenuUndo: (listener: () => void) => { - const channel = 'menu:undo'; - const wrapped = () => listener(); - ipcRenderer.on(channel, wrapped); - return () => ipcRenderer.removeListener(channel, wrapped); - }, - onMenuRedo: (listener: () => void) => { - const channel = 'menu:redo'; - const wrapped = () => listener(); - ipcRenderer.on(channel, wrapped); - return () => ipcRenderer.removeListener(channel, wrapped); - }, - onMenuCloseTab: (listener: () => void) => { - const channel = 'menu:close-tab'; - const wrapped = () => listener(); - ipcRenderer.on(channel, wrapped); - return () => ipcRenderer.removeListener(channel, wrapped); - }, - - // Worktree management - worktreeCreate: (args: { - projectPath: string; - taskName: string; - projectId: string; - baseRef?: string; - }) => ipcRenderer.invoke('worktree:create', args), - worktreeList: (args: { projectPath: string }) => ipcRenderer.invoke('worktree:list', args), - worktreeRemove: (args: { - projectPath: string; - worktreeId: string; - worktreePath?: string; - branch?: string; - taskName?: string; - }) => ipcRenderer.invoke('worktree:remove', args), - worktreeStatus: (args: { worktreePath: string }) => ipcRenderer.invoke('worktree:status', args), - worktreeMerge: (args: { projectPath: string; worktreeId: string }) => - ipcRenderer.invoke('worktree:merge', args), - worktreeGet: (args: { worktreeId: string }) => ipcRenderer.invoke('worktree:get', args), - worktreeGetAll: () => ipcRenderer.invoke('worktree:getAll'), - - // Worktree pool (reserve) management for instant task creation - worktreeEnsureReserve: (args: { projectId: string; projectPath: string; baseRef?: string }) => - ipcRenderer.invoke('worktree:ensureReserve', args), - worktreeHasReserve: (args: { projectId: string }) => - ipcRenderer.invoke('worktree:hasReserve', args), - worktreeClaimReserve: (args: { - projectId: string; - projectPath: string; - taskName: string; - baseRef?: string; - }) => ipcRenderer.invoke('worktree:claimReserve', args), - worktreeClaimReserveAndSaveTask: (args: { - projectId: string; - projectPath: string; - taskName: string; - baseRef?: string; - task: { - projectId: string; - name: string; - status: 'active' | 'idle' | 'running'; - agentId?: string | null; - metadata?: any; - useWorktree?: boolean; - }; - }) => ipcRenderer.invoke('worktree:claimReserveAndSaveTask', args), - worktreeRemoveReserve: (args: { projectId: string; projectPath?: string; isRemote?: boolean }) => - ipcRenderer.invoke('worktree:removeReserve', args), - - // Lifecycle scripts - lifecycleGetScript: (args: { projectPath: string; phase: 'setup' | 'run' | 'teardown' }) => - ipcRenderer.invoke('lifecycle:getScript', args), - lifecycleSetup: (args: { taskId: string; taskPath: string; projectPath: string }) => - ipcRenderer.invoke('lifecycle:setup', args), - lifecycleRunStart: (args: { taskId: string; taskPath: string; projectPath: string }) => - ipcRenderer.invoke('lifecycle:run:start', args), - lifecycleRunStop: (args: { taskId: string }) => ipcRenderer.invoke('lifecycle:run:stop', args), - lifecycleTeardown: (args: { taskId: string; taskPath: string; projectPath: string }) => - ipcRenderer.invoke('lifecycle:teardown', args), - lifecycleGetState: (args: { taskId: string }) => ipcRenderer.invoke('lifecycle:getState', args), - lifecycleClearTask: (args: { taskId: string }) => ipcRenderer.invoke('lifecycle:clearTask', args), - onLifecycleEvent: (listener: (data: any) => void) => { - const wrapped = (_: Electron.IpcRendererEvent, data: any) => listener(data); - ipcRenderer.on(LIFECYCLE_EVENT_CHANNEL, wrapped); - return () => ipcRenderer.removeListener(LIFECYCLE_EVENT_CHANNEL, wrapped); - }, - - // Filesystem helpers - fsList: ( - root: string, - opts?: { - includeDirs?: boolean; - maxEntries?: number; - timeBudgetMs?: number; - connectionId?: string; - remotePath?: string; - recursive?: boolean; - } - ) => ipcRenderer.invoke('fs:list', { root, ...(opts || {}) }), - fsRead: ( - root: string, - relPath: string, - maxBytes?: number, - remote?: { connectionId: string; remotePath: string } - ) => ipcRenderer.invoke('fs:read', { root, relPath, maxBytes, ...remote }), - fsReadImage: ( - root: string, - relPath: string, - remote?: { connectionId: string; remotePath: string } - ) => ipcRenderer.invoke('fs:read-image', { root, relPath, ...remote }), - fsSearchContent: ( - root: string, - query: string, - options?: { - caseSensitive?: boolean; - maxResults?: number; - fileExtensions?: string[]; - }, - remote?: { connectionId: string; remotePath: string } - ) => ipcRenderer.invoke('fs:searchContent', { root, query, options, ...remote }), - fsWriteFile: ( - root: string, - relPath: string, - content: string, - mkdirs?: boolean, - remote?: { connectionId: string; remotePath: string } - ) => ipcRenderer.invoke('fs:write', { root, relPath, content, mkdirs, ...remote }), - fsRemove: ( - root: string, - relPath: string, - remote?: { connectionId: string; remotePath: string } - ) => ipcRenderer.invoke('fs:remove', { root, relPath, ...remote }), - getProjectConfig: (projectPath: string) => - ipcRenderer.invoke('fs:getProjectConfig', { projectPath }), - saveProjectConfig: (projectPath: string, content: string) => - ipcRenderer.invoke('fs:saveProjectConfig', { projectPath, content }), - // Attachments - saveAttachment: (args: { taskPath: string; srcPath: string; subdir?: string }) => - ipcRenderer.invoke('fs:save-attachment', args), - - // Project management - openProject: () => ipcRenderer.invoke('project:open'), - getProjectSettings: (projectId: string) => - ipcRenderer.invoke('projectSettings:get', { projectId }), - updateProjectSettings: (args: { projectId: string; baseRef: string }) => - ipcRenderer.invoke('projectSettings:update', args), - fetchProjectBaseRef: (args: { projectId: string; projectPath: string }) => - ipcRenderer.invoke('projectSettings:fetchBaseRef', args), - getGitInfo: (projectPath: string) => ipcRenderer.invoke('git:getInfo', projectPath), - getGitStatus: (taskPath: string) => ipcRenderer.invoke('git:get-status', taskPath), - watchGitStatus: (taskPath: string) => ipcRenderer.invoke('git:watch-status', taskPath), - unwatchGitStatus: (taskPath: string, watchId?: string) => - ipcRenderer.invoke('git:unwatch-status', taskPath, watchId), - onGitStatusChanged: (listener: (data: { taskPath: string; error?: string }) => void) => { - attachGitStatusBridgeOnce(); - gitStatusChangedListeners.add(listener); - return () => { - gitStatusChangedListeners.delete(listener); - }; - }, - getFileDiff: (args: { taskPath: string; filePath: string }) => - ipcRenderer.invoke('git:get-file-diff', args), - stageFile: (args: { taskPath: string; filePath: string }) => - ipcRenderer.invoke('git:stage-file', args), - stageAllFiles: (args: { taskPath: string }) => ipcRenderer.invoke('git:stage-all-files', args), - unstageFile: (args: { taskPath: string; filePath: string }) => - ipcRenderer.invoke('git:unstage-file', args), - revertFile: (args: { taskPath: string; filePath: string }) => - ipcRenderer.invoke('git:revert-file', args), - gitCommit: (args: { taskPath: string; message: string }) => - ipcRenderer.invoke('git:commit', args), - gitPush: (args: { taskPath: string }) => ipcRenderer.invoke('git:push', args), - gitPull: (args: { taskPath: string }) => ipcRenderer.invoke('git:pull', args), - gitGetLog: (args: { taskPath: string; maxCount?: number; skip?: number }) => - ipcRenderer.invoke('git:get-log', args), - gitGetLatestCommit: (args: { taskPath: string }) => - ipcRenderer.invoke('git:get-latest-commit', args), - gitGetCommitFiles: (args: { taskPath: string; commitHash: string }) => - ipcRenderer.invoke('git:get-commit-files', args), - gitGetCommitFileDiff: (args: { taskPath: string; commitHash: string; filePath: string }) => - ipcRenderer.invoke('git:get-commit-file-diff', args), - gitSoftReset: (args: { taskPath: string }) => ipcRenderer.invoke('git:soft-reset', args), - gitCommitAndPush: (args: { - taskPath: string; - commitMessage?: string; - createBranchIfOnDefault?: boolean; - branchPrefix?: string; - }) => ipcRenderer.invoke('git:commit-and-push', args), - generatePrContent: (args: { taskPath: string; base?: string }) => - ipcRenderer.invoke('git:generate-pr-content', args), - createPullRequest: (args: { - taskPath: string; - title?: string; - body?: string; - base?: string; - head?: string; - draft?: boolean; - web?: boolean; - fill?: boolean; - }) => ipcRenderer.invoke('git:create-pr', args), - mergeToMain: (args: { taskPath: string }) => ipcRenderer.invoke('git:merge-to-main', args), - mergePr: (args: { - taskPath: string; - prNumber?: number; - strategy?: 'merge' | 'squash' | 'rebase'; - admin?: boolean; - }) => ipcRenderer.invoke('git:merge-pr', args), - getPrStatus: (args: { taskPath: string }) => ipcRenderer.invoke('git:get-pr-status', args), - getCheckRuns: (args: { taskPath: string }) => ipcRenderer.invoke('git:get-check-runs', args), - getPrComments: (args: { taskPath: string; prNumber?: number }) => - ipcRenderer.invoke('git:get-pr-comments', args), - getBranchStatus: (args: { taskPath: string }) => - ipcRenderer.invoke('git:get-branch-status', args), - renameBranch: (args: { repoPath: string; oldBranch: string; newBranch: string }) => - ipcRenderer.invoke('git:rename-branch', args), - listRemoteBranches: (args: { projectPath: string; remote?: string }) => - ipcRenderer.invoke('git:list-remote-branches', args), - openExternal: (url: string) => ipcRenderer.invoke('app:openExternal', url), - clipboardWriteText: (text: string) => ipcRenderer.invoke('app:clipboard-write-text', text), - paste: () => ipcRenderer.invoke('app:paste'), - // Telemetry (minimal, anonymous) - captureTelemetry: (event: string, properties?: Record) => - ipcRenderer.invoke('telemetry:capture', { event, properties }), - getTelemetryStatus: () => ipcRenderer.invoke('telemetry:get-status'), - setTelemetryEnabled: (enabled: boolean) => ipcRenderer.invoke('telemetry:set-enabled', enabled), - setOnboardingSeen: (flag: boolean) => ipcRenderer.invoke('telemetry:set-onboarding-seen', flag), - connectToGitHub: (projectPath: string) => ipcRenderer.invoke('github:connect', projectPath), - - // GitHub integration - githubAuth: () => ipcRenderer.invoke('github:auth'), - githubCancelAuth: () => ipcRenderer.invoke('github:auth:cancel'), - - // GitHub auth event listeners - onGithubAuthDeviceCode: ( - callback: (data: { - userCode: string; - verificationUri: string; - expiresIn: number; - interval: number; - }) => void - ) => { - const listener = (_: any, data: any) => callback(data); - ipcRenderer.on('github:auth:device-code', listener); - return () => ipcRenderer.removeListener('github:auth:device-code', listener); - }, - onGithubAuthPolling: (callback: (data: { status: string }) => void) => { - const listener = (_: any, data: any) => callback(data); - ipcRenderer.on('github:auth:polling', listener); - return () => ipcRenderer.removeListener('github:auth:polling', listener); - }, - onGithubAuthSlowDown: (callback: (data: { newInterval: number }) => void) => { - const listener = (_: any, data: any) => callback(data); - ipcRenderer.on('github:auth:slow-down', listener); - return () => ipcRenderer.removeListener('github:auth:slow-down', listener); - }, - onGithubAuthSuccess: (callback: (data: { token: string; user: any }) => void) => { - const listener = (_: any, data: any) => callback(data); - ipcRenderer.on('github:auth:success', listener); - return () => ipcRenderer.removeListener('github:auth:success', listener); - }, - onGithubAuthError: (callback: (data: { error: string; message: string }) => void) => { - const listener = (_: any, data: any) => callback(data); - ipcRenderer.on('github:auth:error', listener); - return () => ipcRenderer.removeListener('github:auth:error', listener); - }, - onGithubAuthCancelled: (callback: () => void) => { - const listener = () => callback(); - ipcRenderer.on('github:auth:cancelled', listener); - return () => ipcRenderer.removeListener('github:auth:cancelled', listener); - }, - onGithubAuthUserUpdated: (callback: (data: { user: any }) => void) => { - const listener = (_: any, data: any) => callback(data); - ipcRenderer.on('github:auth:user-updated', listener); - return () => ipcRenderer.removeListener('github:auth:user-updated', listener); - }, - - githubIsAuthenticated: () => ipcRenderer.invoke('github:isAuthenticated'), - githubGetStatus: () => ipcRenderer.invoke('github:getStatus'), - githubGetUser: () => ipcRenderer.invoke('github:getUser'), - githubGetRepositories: () => ipcRenderer.invoke('github:getRepositories'), - githubCloneRepository: (repoUrl: string, localPath: string) => - ipcRenderer.invoke('github:cloneRepository', repoUrl, localPath), - githubGetOwners: () => ipcRenderer.invoke('github:getOwners'), - githubValidateRepoName: (name: string, owner: string) => - ipcRenderer.invoke('github:validateRepoName', name, owner), - githubCreateNewProject: (params: { - name: string; - description?: string; - owner: string; - isPrivate: boolean; - gitignoreTemplate?: string; - }) => ipcRenderer.invoke('github:createNewProject', params), - githubListPullRequests: (projectPath: string) => - ipcRenderer.invoke('github:listPullRequests', { projectPath }), - githubCreatePullRequestWorktree: (args: { - projectPath: string; - projectId: string; - prNumber: number; - prTitle?: string; - taskName?: string; - branchName?: string; - }) => ipcRenderer.invoke('github:createPullRequestWorktree', args), - githubLogout: () => ipcRenderer.invoke('github:logout'), - githubCheckCLIInstalled: () => ipcRenderer.invoke('github:checkCLIInstalled'), - githubInstallCLI: () => ipcRenderer.invoke('github:installCLI'), - // GitHub issues - githubIssuesList: (projectPath: string, limit?: number) => - ipcRenderer.invoke('github:issues:list', projectPath, limit), - githubIssuesSearch: (projectPath: string, searchTerm: string, limit?: number) => - ipcRenderer.invoke('github:issues:search', projectPath, searchTerm, limit), - githubIssueGet: (projectPath: string, number: number) => - ipcRenderer.invoke('github:issues:get', projectPath, number), - // Linear integration - linearSaveToken: (token: string) => ipcRenderer.invoke('linear:saveToken', token), - linearCheckConnection: () => ipcRenderer.invoke('linear:checkConnection'), - linearClearToken: () => ipcRenderer.invoke('linear:clearToken'), - linearInitialFetch: (limit?: number) => ipcRenderer.invoke('linear:initialFetch', limit), - linearSearchIssues: (searchTerm: string, limit?: number) => - ipcRenderer.invoke('linear:searchIssues', searchTerm, limit), - // Jira integration - jiraSaveCredentials: (args: { siteUrl: string; email: string; token: string }) => - ipcRenderer.invoke('jira:saveCredentials', args), - jiraClearCredentials: () => ipcRenderer.invoke('jira:clearCredentials'), - jiraCheckConnection: () => ipcRenderer.invoke('jira:checkConnection'), - jiraInitialFetch: (limit?: number) => ipcRenderer.invoke('jira:initialFetch', limit), - jiraSearchIssues: (searchTerm: string, limit?: number) => - ipcRenderer.invoke('jira:searchIssues', searchTerm, limit), - getProviderStatuses: (opts?: { refresh?: boolean; providers?: string[]; providerId?: string }) => - ipcRenderer.invoke('providers:getStatuses', opts ?? {}), - getProviderCustomConfig: (providerId: string) => - ipcRenderer.invoke('providers:getCustomConfig', providerId), - getAllProviderCustomConfigs: () => ipcRenderer.invoke('providers:getAllCustomConfigs'), - updateProviderCustomConfig: (providerId: string, config: any) => - ipcRenderer.invoke('providers:updateCustomConfig', providerId, config), - - // Line comments management - lineCommentsCreate: (input: any) => ipcRenderer.invoke('lineComments:create', input), - lineCommentsGet: (args: { taskId: string; filePath?: string }) => - ipcRenderer.invoke('lineComments:get', args), - lineCommentsUpdate: (input: { id: string; content: string }) => - ipcRenderer.invoke('lineComments:update', input), - lineCommentsDelete: (id: string) => ipcRenderer.invoke('lineComments:delete', id), - lineCommentsGetFormatted: (taskId: string) => - ipcRenderer.invoke('lineComments:getFormatted', taskId), - lineCommentsMarkSent: (commentIds: string[]) => - ipcRenderer.invoke('lineComments:markSent', commentIds), - lineCommentsGetUnsent: (taskId: string) => ipcRenderer.invoke('lineComments:getUnsent', taskId), - - // Debug helpers - debugAppendLog: (filePath: string, content: string, options?: { reset?: boolean }) => - ipcRenderer.invoke('debug:append-log', filePath, content, options ?? {}), - - // PlanMode strict lock - planApplyLock: (taskPath: string) => ipcRenderer.invoke('plan:lock', taskPath), - planReleaseLock: (taskPath: string) => ipcRenderer.invoke('plan:unlock', taskPath), - onPlanEvent: ( - listener: (data: { - type: 'write_blocked' | 'remove_blocked'; - root: string; - relPath: string; - code?: string; - message?: string; - }) => void - ) => { - const channel = 'plan:event'; - const wrapped = (_: Electron.IpcRendererEvent, data: any) => listener(data); - ipcRenderer.on(channel, wrapped); - return () => ipcRenderer.removeListener(channel, wrapped); - }, - - onProviderStatusUpdated: (listener: (data: { providerId: string; status: any }) => void) => { - const channel = 'provider:status-updated'; - const wrapped = (_: Electron.IpcRendererEvent, data: any) => listener(data); - ipcRenderer.on(channel, wrapped); - return () => ipcRenderer.removeListener(channel, wrapped); - }, - - // Host preview (non-container) - hostPreviewStart: (args: { - taskId: string; - taskPath: string; - script?: string; - parentProjectPath?: string; - }) => ipcRenderer.invoke('preview:host:start', args), - hostPreviewSetup: (args: { taskId: string; taskPath: string }) => - ipcRenderer.invoke('preview:host:setup', args), - hostPreviewStop: (taskId: string) => ipcRenderer.invoke('preview:host:stop', taskId), - hostPreviewStopAll: (exceptId?: string) => ipcRenderer.invoke('preview:host:stopAll', exceptId), - onHostPreviewEvent: (listener: (data: any) => void) => { - const channel = 'preview:host:event'; - const wrapped = (_: Electron.IpcRendererEvent, data: any) => listener(data); - ipcRenderer.on(channel, wrapped); - return () => ipcRenderer.removeListener(channel, wrapped); - }, - - // Main-managed browser (WebContentsView) - browserShow: (bounds: { x: number; y: number; width: number; height: number }, url?: string) => - ipcRenderer.invoke('browser:view:show', { ...bounds, url }), - browserHide: () => ipcRenderer.invoke('browser:view:hide'), - browserSetBounds: (bounds: { x: number; y: number; width: number; height: number }) => - ipcRenderer.invoke('browser:view:setBounds', bounds), - browserLoadURL: (url: string, forceReload?: boolean) => - ipcRenderer.invoke('browser:view:loadURL', url, forceReload), - browserGoBack: () => ipcRenderer.invoke('browser:view:goBack'), - browserGoForward: () => ipcRenderer.invoke('browser:view:goForward'), - browserReload: () => ipcRenderer.invoke('browser:view:reload'), - browserOpenDevTools: () => ipcRenderer.invoke('browser:view:openDevTools'), - browserClear: () => ipcRenderer.invoke('browser:view:clear'), - onBrowserViewEvent: (listener: (data: any) => void) => { - const channel = 'browser:view:event'; - const wrapped = (_: Electron.IpcRendererEvent, data: any) => listener(data); - ipcRenderer.on(channel, wrapped); - return () => ipcRenderer.removeListener(channel, wrapped); - }, - - // Lightweight TCP probe for localhost ports to avoid noisy fetches - netProbePorts: (host: string, ports: number[], timeoutMs?: number) => - ipcRenderer.invoke('net:probePorts', host, ports, timeoutMs), - - // SSH operations (unwrap { success, ... } IPC responses) - sshTestConnection: (config: any) => ipcRenderer.invoke('ssh:testConnection', config), - sshSaveConnection: async (config: any) => { - const res = await ipcRenderer.invoke('ssh:saveConnection', config); - if (res && typeof res === 'object' && 'success' in res && !res.success) { - throw new Error((res as any).error || 'Failed to save SSH connection'); - } - return (res as any).connection; - }, - sshGetConnections: async () => { - const res = await ipcRenderer.invoke('ssh:getConnections'); - if (res && typeof res === 'object' && 'success' in res && !res.success) { - throw new Error((res as any).error || 'Failed to load SSH connections'); - } - return (res as any).connections || []; - }, - sshDeleteConnection: async (id: string) => { - const res = await ipcRenderer.invoke('ssh:deleteConnection', id); - if (res && typeof res === 'object' && 'success' in res && !res.success) { - throw new Error((res as any).error || 'Failed to delete SSH connection'); - } - }, - sshConnect: async (arg: any) => { - const res = await ipcRenderer.invoke('ssh:connect', arg); - if (res && typeof res === 'object' && 'success' in res) { - if (!res.success) { - throw new Error((res as any).error || 'SSH connect failed'); - } - return (res as any).connectionId as string; - } - return res as string; - }, - sshDisconnect: async (connectionId: string) => { - const res = await ipcRenderer.invoke('ssh:disconnect', connectionId); - if (res && typeof res === 'object' && 'success' in res && !res.success) { - throw new Error((res as any).error || 'SSH disconnect failed'); - } - }, - sshExecuteCommand: async (connectionId: string, command: string, cwd?: string) => { - const res = await ipcRenderer.invoke('ssh:executeCommand', connectionId, command, cwd); - if (res && typeof res === 'object' && 'success' in res && !res.success) { - throw new Error((res as any).error || 'SSH command failed'); - } - return { - stdout: (res as any).stdout || '', - stderr: (res as any).stderr || '', - exitCode: (res as any).exitCode ?? -1, - }; - }, - sshListFiles: async (connectionId: string, path: string) => { - const res = await ipcRenderer.invoke('ssh:listFiles', connectionId, path); - if (res && typeof res === 'object' && 'success' in res && !res.success) { - throw new Error((res as any).error || 'SSH list files failed'); - } - return (res as any).files || []; - }, - sshReadFile: async (connectionId: string, path: string) => { - const res = await ipcRenderer.invoke('ssh:readFile', connectionId, path); - if (res && typeof res === 'object' && 'success' in res && !res.success) { - throw new Error((res as any).error || 'SSH read file failed'); - } - return (res as any).content || ''; - }, - sshWriteFile: async (connectionId: string, path: string, content: string) => { - const res = await ipcRenderer.invoke('ssh:writeFile', connectionId, path, content); - if (res && typeof res === 'object' && 'success' in res && !res.success) { - throw new Error((res as any).error || 'SSH write file failed'); - } - }, - sshGetState: async (connectionId: string) => { - const res = await ipcRenderer.invoke('ssh:getState', connectionId); - if (res && typeof res === 'object' && 'success' in res && !res.success) { - throw new Error((res as any).error || 'SSH get state failed'); - } - return (res as any).state; - }, - sshGetConfig: () => ipcRenderer.invoke('ssh:getSshConfig'), - sshGetSshConfigHost: (hostAlias: string) => ipcRenderer.invoke('ssh:getSshConfigHost', hostAlias), - sshCheckIsGitRepo: async (connectionId: string, remotePath: string) => { - const res = await ipcRenderer.invoke('ssh:checkIsGitRepo', connectionId, remotePath); - if (res && typeof res === 'object' && 'success' in res && !res.success) { - throw new Error((res as any).error || 'SSH check git repo failed'); - } - return (res as any).isGitRepo as boolean; - }, - sshInitRepo: async (connectionId: string, parentPath: string, repoName: string) => { - const res = await ipcRenderer.invoke('ssh:initRepo', connectionId, parentPath, repoName); - if (res && typeof res === 'object' && 'success' in res && !res.success) { - throw new Error((res as any).error || 'SSH init repo failed'); - } - return (res as any).path as string; - }, - sshCloneRepo: async (connectionId: string, repoUrl: string, targetPath: string) => { - const res = await ipcRenderer.invoke('ssh:cloneRepo', connectionId, repoUrl, targetPath); - if (res && typeof res === 'object' && 'success' in res && !res.success) { - throw new Error((res as any).error || 'SSH clone repo failed'); - } - return (res as any).path as string; - }, - - // Skills management - skillsGetCatalog: () => ipcRenderer.invoke('skills:getCatalog'), - skillsRefreshCatalog: () => ipcRenderer.invoke('skills:refreshCatalog'), - skillsInstall: (args: { skillId: string }) => ipcRenderer.invoke('skills:install', args), - skillsUninstall: (args: { skillId: string }) => ipcRenderer.invoke('skills:uninstall', args), - skillsGetDetail: (args: { skillId: string }) => ipcRenderer.invoke('skills:getDetail', args), - skillsGetDetectedAgents: () => ipcRenderer.invoke('skills:getDetectedAgents'), - skillsCreate: (args: { name: string; description: string }) => - ipcRenderer.invoke('skills:create', args), -}); - -// Type definitions for the exposed API -export interface ElectronAPI { - // App info - getVersion: () => Promise; - getPlatform: () => Promise; - clipboardWriteText: (text: string) => Promise<{ success: boolean; error?: string }>; - paste: () => Promise<{ success: boolean; error?: string }>; - listInstalledFonts: (args?: { - refresh?: boolean; - }) => Promise<{ success: boolean; fonts?: string[]; cached?: boolean; error?: string }>; - // Updater - checkForUpdates: () => Promise<{ success: boolean; result?: any; error?: string }>; - downloadUpdate: () => Promise<{ success: boolean; error?: string }>; - quitAndInstallUpdate: () => Promise<{ success: boolean; error?: string }>; - openLatestDownload: () => Promise<{ success: boolean; error?: string }>; - onUpdateEvent: (listener: (data: { type: string; payload?: any }) => void) => () => void; - - // Telemetry (minimal, anonymous) - captureTelemetry: ( - event: string, - properties?: Record - ) => Promise<{ success: boolean; error?: string; disabled?: boolean }>; - getTelemetryStatus: () => Promise<{ - success: boolean; - status?: { - enabled: boolean; - envDisabled: boolean; - userOptOut: boolean; - hasKeyAndHost: boolean; - }; - error?: string; - }>; - setTelemetryEnabled: ( - enabled: boolean - ) => Promise<{ success: boolean; status?: any; error?: string }>; - - // PTY management - ptyStart: (opts: { - id: string; - cwd?: string; - shell?: string; - env?: Record; - cols?: number; - rows?: number; - autoApprove?: boolean; - initialPrompt?: string; - }) => Promise<{ ok: boolean; error?: string }>; - ptyInput: (args: { id: string; data: string }) => void; - ptyResize: (args: { id: string; cols: number; rows: number }) => void; - ptyKill: (id: string) => void; - onPtyData: (id: string, listener: (data: string) => void) => () => void; - ptyGetSnapshot: (args: { id: string }) => Promise<{ - ok: boolean; - snapshot?: any; - error?: string; - }>; - ptySaveSnapshot: (args: { - id: string; - payload: TerminalSnapshotPayload; - }) => Promise<{ ok: boolean; error?: string }>; - ptyClearSnapshot: (args: { id: string }) => Promise<{ ok: boolean }>; - onPtyExit: ( - id: string, - listener: (info: { exitCode: number; signal?: number }) => void - ) => () => void; - // Worktree management - worktreeCreate: (args: { - projectPath: string; - taskName: string; - projectId: string; - baseRef?: string; - }) => Promise<{ success: boolean; worktree?: any; error?: string }>; - worktreeList: (args: { - projectPath: string; - }) => Promise<{ success: boolean; worktrees?: any[]; error?: string }>; - worktreeRemove: (args: { - projectPath: string; - worktreeId: string; - worktreePath?: string; - branch?: string; - taskName?: string; - }) => Promise<{ success: boolean; error?: string }>; - worktreeStatus: (args: { - worktreePath: string; - }) => Promise<{ success: boolean; status?: any; error?: string }>; - worktreeMerge: (args: { - projectPath: string; - worktreeId: string; - }) => Promise<{ success: boolean; error?: string }>; - worktreeGet: (args: { - worktreeId: string; - }) => Promise<{ success: boolean; worktree?: any; error?: string }>; - worktreeGetAll: () => Promise<{ success: boolean; worktrees?: any[]; error?: string }>; - // Worktree pool (reserve) management for instant task creation - worktreeEnsureReserve: (args: { - projectId: string; - projectPath: string; - baseRef?: string; - }) => Promise<{ success: boolean; error?: string }>; - worktreeHasReserve: (args: { - projectId: string; - }) => Promise<{ success: boolean; hasReserve?: boolean; error?: string }>; - worktreeClaimReserve: (args: { - projectId: string; - projectPath: string; - taskName: string; - baseRef?: string; - }) => Promise<{ - success: boolean; - worktree?: any; - needsBaseRefSwitch?: boolean; - error?: string; - }>; - worktreeClaimReserveAndSaveTask: (args: { - projectId: string; - projectPath: string; - taskName: string; - baseRef?: string; - task: { - projectId: string; - name: string; - status: 'active' | 'idle' | 'running'; - agentId?: string | null; - metadata?: any; - useWorktree?: boolean; - }; - }) => Promise<{ - success: boolean; - worktree?: any; - task?: any; - needsBaseRefSwitch?: boolean; - error?: string; - }>; - worktreeRemoveReserve: (args: { - projectId: string; - }) => Promise<{ success: boolean; error?: string }>; - - // Lifecycle scripts - lifecycleGetScript: (args: { - projectPath: string; - phase: 'setup' | 'run' | 'teardown'; - }) => Promise<{ success: boolean; script?: string | null; error?: string }>; - lifecycleSetup: (args: { - taskId: string; - taskPath: string; - projectPath: string; - }) => Promise<{ success: boolean; skipped?: boolean; error?: string }>; - lifecycleRunStart: (args: { - taskId: string; - taskPath: string; - projectPath: string; - }) => Promise<{ success: boolean; skipped?: boolean; error?: string }>; - lifecycleRunStop: (args: { - taskId: string; - }) => Promise<{ success: boolean; skipped?: boolean; error?: string }>; - lifecycleTeardown: (args: { - taskId: string; - taskPath: string; - projectPath: string; - }) => Promise<{ success: boolean; skipped?: boolean; error?: string }>; - lifecycleGetState: (args: { taskId: string }) => Promise<{ - success: boolean; - state?: { - taskId: string; - setup: { - status: 'idle' | 'running' | 'succeeded' | 'failed'; - startedAt?: string; - finishedAt?: string; - exitCode?: number | null; - error?: string | null; - }; - run: { - status: 'idle' | 'running' | 'succeeded' | 'failed'; - startedAt?: string; - finishedAt?: string; - exitCode?: number | null; - error?: string | null; - pid?: number | null; - }; - teardown: { - status: 'idle' | 'running' | 'succeeded' | 'failed'; - startedAt?: string; - finishedAt?: string; - exitCode?: number | null; - error?: string | null; - }; - }; - error?: string; - }>; - lifecycleClearTask: (args: { taskId: string }) => Promise<{ success: boolean; error?: string }>; - onLifecycleEvent: (listener: (data: any) => void) => () => void; - - // Project management - openProject: () => Promise<{ success: boolean; path?: string; error?: string }>; - getGitInfo: (projectPath: string) => Promise<{ - isGitRepo: boolean; - remote?: string; - branch?: string; - baseRef?: string; - upstream?: string; - aheadCount?: number; - behindCount?: number; - path?: string; - rootPath?: string; - error?: string; - }>; - getGitStatus: (taskPath: string) => Promise<{ - success: boolean; - changes?: Array<{ - path: string; - status: string; - additions: number; - deletions: number; - diff?: string; - }>; - error?: string; - }>; - watchGitStatus: (taskPath: string) => Promise<{ - success: boolean; - watchId?: string; - error?: string; - }>; - unwatchGitStatus: ( - taskPath: string, - watchId?: string - ) => Promise<{ - success: boolean; - error?: string; - }>; - onGitStatusChanged: ( - listener: (data: { taskPath: string; error?: string }) => void - ) => () => void; - getFileDiff: (args: { taskPath: string; filePath: string }) => Promise<{ - success: boolean; - diff?: { lines: Array<{ left?: string; right?: string; type: 'context' | 'add' | 'del' }> }; - error?: string; - }>; - gitCommitAndPush: (args: { - taskPath: string; - commitMessage?: string; - createBranchIfOnDefault?: boolean; - branchPrefix?: string; - }) => Promise<{ success: boolean; branch?: string; output?: string; error?: string }>; - createPullRequest: (args: { - taskPath: string; - title?: string; - body?: string; - base?: string; - head?: string; - draft?: boolean; - web?: boolean; - fill?: boolean; - }) => Promise<{ success: boolean; url?: string; output?: string; error?: string }>; - connectToGitHub: ( - projectPath: string - ) => Promise<{ success: boolean; repository?: string; branch?: string; error?: string }>; - - // Filesystem helpers - fsList: ( - root: string, - opts?: { - includeDirs?: boolean; - maxEntries?: number; - timeBudgetMs?: number; - connectionId?: string; - remotePath?: string; - } - ) => Promise<{ - success: boolean; - items?: Array<{ path: string; type: 'file' | 'dir' }>; - error?: string; - canceled?: boolean; - truncated?: boolean; - reason?: string; - durationMs?: number; - }>; - fsRead: ( - root: string, - relPath: string, - maxBytes?: number, - remote?: { connectionId: string; remotePath: string } - ) => Promise<{ - success: boolean; - path?: string; - size?: number; - truncated?: boolean; - content?: string; - error?: string; - }>; - - // GitHub integration - githubAuth: () => Promise<{ - success: boolean; - device_code?: string; - user_code?: string; - verification_uri?: string; - expires_in?: number; - interval?: number; - error?: string; - }>; - githubCancelAuth: () => Promise<{ success: boolean; error?: string }>; - - // GitHub auth event listeners (return cleanup function) - onGithubAuthDeviceCode: ( - callback: (data: { - userCode: string; - verificationUri: string; - expiresIn: number; - interval: number; - }) => void - ) => () => void; - onGithubAuthPolling: (callback: (data: { status: string }) => void) => () => void; - onGithubAuthSlowDown: (callback: (data: { newInterval: number }) => void) => () => void; - onGithubAuthSuccess: (callback: (data: { token: string; user: any }) => void) => () => void; - onGithubAuthError: (callback: (data: { error: string; message: string }) => void) => () => void; - onGithubAuthCancelled: (callback: () => void) => () => void; - onGithubAuthUserUpdated: (callback: (data: { user: any }) => void) => () => void; - - githubIsAuthenticated: () => Promise; - githubGetStatus: () => Promise<{ installed: boolean; authenticated: boolean; user?: any }>; - githubGetUser: () => Promise; - githubGetRepositories: () => Promise; - githubCloneRepository: ( - repoUrl: string, - localPath: string - ) => Promise<{ success: boolean; error?: string }>; - githubListPullRequests: ( - projectPath: string - ) => Promise<{ success: boolean; prs?: any[]; error?: string }>; - githubCreatePullRequestWorktree: (args: { - projectPath: string; - projectId: string; - prNumber: number; - prTitle?: string; - taskName?: string; - branchName?: string; - }) => Promise<{ - success: boolean; - worktree?: any; - branchName?: string; - taskName?: string; - error?: string; - }>; - githubLogout: () => Promise; - githubCheckCLIInstalled: () => Promise; - githubInstallCLI: () => Promise<{ success: boolean; error?: string }>; - - // Host preview (non-container) - hostPreviewStart: (args: { - taskId: string; - taskPath: string; - script?: string; - parentProjectPath?: string; - }) => Promise<{ ok: boolean; error?: string }>; - hostPreviewSetup: (args: { - taskId: string; - taskPath: string; - }) => Promise<{ ok: boolean; error?: string }>; - hostPreviewStop: (taskId: string) => Promise<{ ok: boolean }>; - onHostPreviewEvent: ( - listener: (data: { type: 'url'; taskId: string; url: string }) => void - ) => () => void; - - // Main-managed browser (WebContentsView) - browserShow: ( - bounds: { x: number; y: number; width: number; height: number }, - url?: string - ) => Promise<{ ok: boolean }>; - browserHide: () => Promise<{ ok: boolean }>; - browserSetBounds: (bounds: { - x: number; - y: number; - width: number; - height: number; - }) => Promise<{ ok: boolean }>; - browserLoadURL: (url: string) => Promise<{ ok: boolean }>; - browserGoBack: () => Promise<{ ok: boolean }>; - browserGoForward: () => Promise<{ ok: boolean }>; - browserReload: () => Promise<{ ok: boolean }>; - browserOpenDevTools: () => Promise<{ ok: boolean }>; - onBrowserViewEvent: (listener: (data: any) => void) => () => void; - - // TCP probe (no HTTP requests) - netProbePorts: ( - host: string, - ports: number[], - timeoutMs?: number - ) => Promise<{ reachable: number[] }>; - - // SSH operations - sshTestConnection: ( - config: any - ) => Promise<{ success: boolean; latency?: number; error?: string }>; - sshSaveConnection: (config: any) => Promise; - sshGetConnections: () => Promise; - sshDeleteConnection: (id: string) => Promise; - sshConnect: (arg: any) => Promise; - sshDisconnect: (connectionId: string) => Promise; - sshExecuteCommand: ( - connectionId: string, - command: string, - cwd?: string - ) => Promise<{ - stdout: string; - stderr: string; - exitCode: number; - }>; - sshListFiles: (connectionId: string, path: string) => Promise; - sshReadFile: (connectionId: string, path: string) => Promise; - sshWriteFile: (connectionId: string, path: string, content: string) => Promise; - sshGetState: (connectionId: string) => Promise; - sshGetConfig: () => Promise<{ success: boolean; hosts?: any[]; error?: string }>; -} - -declare global { - interface Window { - electronAPI: ElectronAPI; - } -} diff --git a/src/main/rpc.ts b/src/main/rpc.ts new file mode 100644 index 000000000..cef90e47d --- /dev/null +++ b/src/main/rpc.ts @@ -0,0 +1,52 @@ +import { createRPCRouter } from '../shared/ipc/rpc'; +import { accountController } from './core/account/controller'; +import { appController } from './core/app/controller'; +import { conversationController } from './core/conversations/controller'; +import { dependenciesController } from './core/dependencies/controller'; +import { editorBufferController } from './core/editor/controller'; +import { filesController } from './core/fs/controller'; +import { gitController } from './core/git/controller'; +import { githubController } from './core/github/controller'; +import { jiraController } from './core/jira/controller'; +import { lineCommentsController } from './core/line-comments'; +import { linearController } from './core/linear/controller'; +import { mcpController } from './core/mcp/controller'; +import { projectController } from './core/projects/controller'; +import { ptyController } from './core/pty/controller'; +import { pullRequestController } from './core/pull-requests/controller'; +import { repositoryController } from './core/repository/controller'; +import { appSettingsController } from './core/settings/controller'; +import { providerSettingsController } from './core/settings/provider-settings-controller'; +import { skillsController } from './core/skills/controller'; +import { sshController } from './core/ssh/controller'; +import { taskController } from './core/tasks/controller'; +import { terminalsController } from './core/terminals/controller'; +import { updateController } from './core/updates/controller'; + +export const rpcRouter = createRPCRouter({ + account: accountController, + app: appController, + appSettings: appSettingsController, + providerSettings: providerSettingsController, + repository: repositoryController, + fs: filesController, + update: updateController, + pty: ptyController, + github: githubController, + jira: jiraController, + linear: linearController, + lineComments: lineCommentsController, + skills: skillsController, + ssh: sshController, + projects: projectController, + tasks: taskController, + conversations: conversationController, + terminals: terminalsController, + git: gitController, + dependencies: dependenciesController, + mcp: mcpController, + editorBuffer: editorBufferController, + pullRequests: pullRequestController, +}); + +export type RpcRouter = typeof rpcRouter; diff --git a/src/main/services/AgentEventService.ts b/src/main/services/AgentEventService.ts index 1a893ae39..3a0a5eedc 100644 --- a/src/main/services/AgentEventService.ts +++ b/src/main/services/AgentEventService.ts @@ -1,13 +1,17 @@ -import http from 'http'; -import crypto from 'crypto'; +import crypto from 'node:crypto'; +import http from 'node:http'; +import { eq } from 'drizzle-orm'; import { BrowserWindow, Notification } from 'electron'; -import { log } from '../lib/logger'; -import { parsePtyId, isMainPty } from '@shared/ptyId'; -import { getMainWindow } from '../app/window'; -import { getProvider } from '@shared/providers/registry'; -import type { ProviderId } from '@shared/providers/registry'; -import type { AgentEvent } from '@shared/agentEvents'; -import { getAppSettings } from '../settings'; +import { getProvider, type AgentProviderId } from '@shared/agent-provider-registry'; +import { agentEventChannel, type AgentEvent } from '@shared/events/agentEvents'; +import { notificationFocusTaskChannel } from '@shared/events/appEvents'; +import { parsePtyId } from '@shared/ptyId'; +import { getMainWindow } from '@main/app/window'; +import { appSettingsService } from '@main/core/settings/settings-service'; +import { db } from '@main/db/client'; +import { conversations, tasks } from '@main/db/schema'; +import { events } from '@main/lib/events'; +import { log } from '@main/lib/logger'; class AgentEventService { private server: http.Server | null = null; @@ -66,6 +70,13 @@ class AgentEventService { return; } + const convRows = await db + .select({ taskId: conversations.taskId }) + .from(conversations) + .where(eq(conversations.id, parsed.conversationId)) + .limit(1); + const taskId = convRows[0]?.taskId ?? parsed.conversationId; + // Body is the raw Claude Code hook payload JSON const raw = body ? JSON.parse(body) : {}; @@ -81,7 +92,8 @@ class AgentEventService { const event: AgentEvent = { type: type as AgentEvent['type'], ptyId, - taskId: parsed.suffix, + conversationId: parsed.conversationId, + taskId, providerId: parsed.providerId, timestamp: Date.now(), payload: normalizedPayload, @@ -92,15 +104,7 @@ class AgentEventService { await this.maybeShowOsNotification(event, appFocused); - for (const win of windows) { - try { - if (!win.isDestroyed() && !win.webContents.isDestroyed()) { - win.webContents.send('agent:event', event, { appFocused }); - } - } catch { - // Window may have been destroyed between check and send - } - } + events.emit(agentEventChannel, { event, appFocused }); res.writeHead(200); res.end(); @@ -130,20 +134,23 @@ class AgentEventService { private async maybeShowOsNotification(event: AgentEvent, appFocused: boolean): Promise { try { - const settings = getAppSettings(); - if (!settings.notifications?.enabled) return; - if (!settings.notifications?.osNotifications) return; + const notifications = await appSettingsService.get('notifications'); + if (!notifications?.enabled) return; + if (!notifications?.osNotifications) return; if (appFocused) return; if (!Notification.isSupported()) return; - const providerName = getProvider(event.providerId as ProviderId)?.name ?? event.providerId; + const providerName = + getProvider(event.providerId as AgentProviderId)?.name ?? event.providerId; - const isMain = isMainPty(event.ptyId); let taskName: string | null = null; - if (isMain) { - const { databaseService } = await import('./DatabaseService'); - const task = await databaseService.getTaskById(event.taskId); - if (task?.name) taskName = task.name; + if (event.taskId) { + const taskRows = await db + .select({ name: tasks.name }) + .from(tasks) + .where(eq(tasks.id, event.taskId)) + .limit(1); + if (taskRows[0]?.name) taskName = taskRows[0].name; } const titleSuffix = taskName ? ` — ${taskName}` : ''; @@ -155,8 +162,8 @@ class AgentEventService { if (win.isMinimized()) win.restore(); win.show(); win.focus(); - if (isMain) { - win.webContents.send('notification:focus-task', event.taskId); + if (event.taskId) { + events.emit(notificationFocusTaskChannel, { taskId: event.taskId }); } } }); diff --git a/src/main/services/ClaudeConfigService.ts b/src/main/services/ClaudeConfigService.ts deleted file mode 100644 index d4753960f..000000000 --- a/src/main/services/ClaudeConfigService.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { existsSync, readFileSync, writeFileSync, renameSync, unlinkSync } from 'fs'; -import { join, resolve } from 'path'; -import { homedir } from 'os'; -import { randomUUID } from 'crypto'; -import { log } from '../lib/logger'; -import { getAppSettings } from '../settings'; - -function getClaudeConfigPath(): string { - return join(homedir(), '.claude.json'); -} - -/** - * Auto-trust a worktree directory for Claude Code if the setting is enabled. - * No-op for non-Claude providers. - */ -export function maybeAutoTrustForClaude(providerId: string, cwd?: string): void { - if (!cwd) return; - if (providerId !== 'claude') return; - if (!getAppSettings().tasks?.autoTrustWorktrees) return; - ensureClaudeTrust(cwd); -} - -/** - * Ensure that Claude Code trusts the given worktree directory by writing - * the trust entry into ~/.claude.json. Idempotent and non-fatal — errors - * are logged but never propagated so PTY spawning is never blocked. - */ -export function ensureClaudeTrust(worktreePath: string): void { - try { - const configPath = getClaudeConfigPath(); - const resolvedPath = resolve(worktreePath); - let config: Record = {}; - - if (existsSync(configPath)) { - const raw = readFileSync(configPath, 'utf8'); - config = JSON.parse(raw); - } - - if (!config.projects || typeof config.projects !== 'object' || Array.isArray(config.projects)) { - config.projects = {}; - } - - const existing = config.projects[resolvedPath]; - if ( - existing && - existing.hasTrustDialogAccepted === true && - existing.hasCompletedProjectOnboarding === true - ) { - return; // Already trusted - } - - config.projects[resolvedPath] = { - ...existing, - hasTrustDialogAccepted: true, - hasCompletedProjectOnboarding: true, - }; - - // Atomic write: write to temp file then rename - const tmpPath = configPath + '.' + randomUUID() + '.tmp'; - try { - writeFileSync(tmpPath, JSON.stringify(config, null, 2), 'utf8'); - renameSync(tmpPath, configPath); - } catch (writeErr) { - try { - unlinkSync(tmpPath); - } catch {} - throw writeErr; - } - } catch (err) { - log.warn('ClaudeConfigService: failed to write trust entry', { - path: worktreePath, - error: String((err as Error)?.message || err), - }); - } -} diff --git a/src/main/services/ClaudeHookService.ts b/src/main/services/ClaudeHookService.ts deleted file mode 100644 index 75bcccfec..000000000 --- a/src/main/services/ClaudeHookService.ts +++ /dev/null @@ -1,71 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import { log } from '../lib/logger'; - -// Hook command pipes stdin directly to curl via -d @- to avoid any shell -// expansion of the payload (which can contain $, backticks, etc. in -// AI-generated text). The ptyId and event type are sent as HTTP headers -// instead of being embedded in the JSON body. -function makeCommand(type: string): string { - return ( - 'curl -sf -X POST ' + - '-H "Content-Type: application/json" ' + - '-H "X-Emdash-Token: $EMDASH_HOOK_TOKEN" ' + - `-H "X-Emdash-Pty-Id: $EMDASH_PTY_ID" ` + - `-H "X-Emdash-Event-Type: ${type}" ` + - '-d @- ' + - '"http://127.0.0.1:$EMDASH_HOOK_PORT/hook" || true' - ); -} - -export class ClaudeHookService { - static writeHookConfig(worktreePath: string): void { - const claudeDir = path.join(worktreePath, '.claude'); - const settingsPath = path.join(claudeDir, 'settings.local.json'); - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - let existing: Record = {}; - try { - const content = fs.readFileSync(settingsPath, 'utf-8'); - existing = JSON.parse(content); - } catch { - // File doesn't exist or isn't valid JSON — start fresh - } - - // Ensure .claude directory exists - try { - fs.mkdirSync(claudeDir, { recursive: true }); - } catch { - // May already exist - } - - // Merge our hook entries alongside any user-defined hooks. - // Claude Code hook format: [{ matcher?, hooks: [{ type, command }] }] - // We identify our own entries by the EMDASH_HOOK_PORT marker in the - // command string, strip them out, then append a fresh one. This is - // idempotent across restarts and preserves user hooks. - const hooks = existing.hooks || {}; - - for (const eventType of ['Notification', 'Stop'] as const) { - const prev: unknown[] = Array.isArray(hooks[eventType]) ? hooks[eventType] : []; - const userEntries = prev.filter( - (entry: any) => !JSON.stringify(entry).includes('EMDASH_HOOK_PORT') - ); - userEntries.push({ - hooks: [{ type: 'command', command: makeCommand(eventType.toLowerCase()) }], - }); - hooks[eventType] = userEntries; - } - - existing.hooks = hooks; - - try { - fs.writeFileSync(settingsPath, JSON.stringify(existing, null, 2) + '\n'); - } catch (err) { - log.warn('ClaudeHookService: failed to write hook config', { - path: settingsPath, - error: String(err), - }); - } - } -} diff --git a/src/main/services/ConnectionsService.ts b/src/main/services/ConnectionsService.ts deleted file mode 100644 index 40c001e56..000000000 --- a/src/main/services/ConnectionsService.ts +++ /dev/null @@ -1,419 +0,0 @@ -import { spawn, execFileSync } from 'child_process'; -import { BrowserWindow } from 'electron'; -import { providerStatusCache, type ProviderStatus } from './providerStatusCache'; -import { listDetectableProviders, type ProviderDefinition } from '@shared/providers/registry'; -import { log } from '../lib/logger'; - -export type CliStatusCode = 'connected' | 'missing' | 'needs_key' | 'error'; - -export interface CliProviderStatus { - id: string; - name: string; - status: CliStatusCode; - version?: string | null; - message?: string | null; - docUrl?: string | null; - command?: string | null; - installCommand?: string | null; -} - -type CliDefinition = ProviderDefinition & { - commands: string[]; - args: string[]; - statusResolver?: (result: CommandResult) => CliStatusCode; - messageResolver?: (result: CommandResult) => string | null; -}; - -interface CommandResult { - command: string; - success: boolean; - error?: Error; - stdout: string; - stderr: string; - status: number | null; - version: string | null; - resolvedPath: string | null; - timedOut?: boolean; - timeoutMs?: number; -} - -const truncate = (input: string, max = 400): string => - input && input.length > max ? `${input.slice(0, max)}…` : input; - -const DEFAULT_TIMEOUT_MS = 3000; - -const quoteForCmdExe = (input: string): string => { - if (input.length === 0) return '""'; - if (!/[\s"^&|<>()%!]/.test(input)) return input; - return `"${input - .replace(/%/g, '%%') - .replace(/!/g, '^!') - .replace(/(["^&|<>()])/g, '^$1')}"`; -}; - -export const CLI_DEFINITIONS: CliDefinition[] = listDetectableProviders().map((provider) => ({ - id: provider.id, - name: provider.name, - commands: provider.commands ?? [], - args: provider.versionArgs ?? ['--version'], - docUrl: provider.docUrl, - installCommand: provider.installCommand, - detectable: provider.detectable, -})); - -class ConnectionsService { - private initialized = false; - private timeoutRetryPending = new Set(); - private timeoutRetryTimers = new Map(); - - private clearTimeoutRetry(providerId: string) { - const pendingTimer = this.timeoutRetryTimers.get(providerId); - if (pendingTimer) { - clearTimeout(pendingTimer); - this.timeoutRetryTimers.delete(providerId); - } - this.timeoutRetryPending.delete(providerId); - } - - async initProviderStatusCache() { - if (this.initialized) return; - this.initialized = true; - await providerStatusCache.load(); - - // Check all providers and log a summary - await Promise.all(CLI_DEFINITIONS.map((def) => this.checkProvider(def.id, 'bootstrap'))); - - const statuses = providerStatusCache.getAll(); - const connected = CLI_DEFINITIONS.filter((d) => statuses[d.id]?.installed).map((d) => d.id); - const notInstalled = CLI_DEFINITIONS.filter((d) => !statuses[d.id]?.installed).map((d) => d.id); - - log.info( - `Providers: connected (${connected.join(', ') || 'none'}) | not installed (${notInstalled.join(', ') || 'none'})` - ); - } - - getCachedProviderStatuses(): Record { - return providerStatusCache.getAll(); - } - - async checkProvider( - providerId: string, - reason: 'bootstrap' | 'manual' | 'timeout-retry' = 'manual', - opts?: { timeoutMs?: number; allowRetry?: boolean } - ) { - const def = CLI_DEFINITIONS.find((d) => d.id === providerId); - if (!def) return; - - if (reason !== 'timeout-retry' && this.timeoutRetryPending.has(providerId)) { - // Cancel any pending timeout-based retry when a fresh check is requested. - this.clearTimeoutRetry(providerId); - } - - const timeoutMs = opts?.timeoutMs ?? DEFAULT_TIMEOUT_MS; - const commandResult = await this.tryCommands(def, timeoutMs); - const statusCode = await this.resolveStatus(def, commandResult); - this.cacheStatus(def.id, commandResult, statusCode); - - // Only log verbose details for actual errors (not just "not installed") - const isActualError = - (statusCode === 'error' || statusCode === 'needs_key') && commandResult.resolvedPath !== null; // binary was found but something went wrong - - if (isActualError) { - log.warn('provider:error', { - providerId: def.id, - status: statusCode, - command: commandResult.command, - resolvedPath: commandResult.resolvedPath, - exitStatus: commandResult.status, - stderr: commandResult.stderr ? truncate(commandResult.stderr) : null, - stdout: commandResult.stdout ? truncate(commandResult.stdout) : null, - error: commandResult.error - ? String(commandResult.error?.message || commandResult.error) - : null, - }); - } - - const shouldRetryTimeout = - commandResult.timedOut && - (commandResult.resolvedPath || commandResult.stdout) && - opts?.allowRetry !== false; - if (shouldRetryTimeout && !this.timeoutRetryPending.has(providerId)) { - this.timeoutRetryPending.add(providerId); - const retryDelayMs = 1500; - const retryTimeoutMs = Math.max(timeoutMs * 2, 12000); - const retryTimer = setTimeout(() => { - this.timeoutRetryTimers.delete(providerId); - void this.checkProvider(providerId, 'timeout-retry', { - timeoutMs: retryTimeoutMs, - allowRetry: false, - }).finally(() => this.timeoutRetryPending.delete(providerId)); - }, retryDelayMs); - this.timeoutRetryTimers.set(providerId, retryTimer); - } - } - - async refreshAllProviderStatuses(): Promise> { - log.info('provider:refreshAll:start'); - await Promise.all( - CLI_DEFINITIONS.map((definition) => this.checkProvider(definition.id, 'manual')) - ); - log.info('provider:refreshAll:done'); - return this.getCachedProviderStatuses(); - } - - private async resolveStatus(def: CliDefinition, result: CommandResult): Promise { - if (def.statusResolver) { - return def.statusResolver(result); - } - - if (result.success) { - return 'connected'; - } - - if (result.resolvedPath) { - return 'connected'; - } - - if (result.timedOut && result.stdout) { - return 'connected'; - } - - if (result.status !== null && !result.timedOut && (result.stdout || result.stderr)) { - return 'connected'; - } - - return result.error ? 'error' : 'missing'; - } - - private resolveMessage( - def: CliDefinition, - result: CommandResult, - status: CliStatusCode - ): string | null { - if (def.id === 'codex') { - return status === 'connected' - ? null - : 'Codex CLI not detected. Install @openai/codex to enable Codex agents.'; - } - - if (def.messageResolver) { - return def.messageResolver(result); - } - - if (status === 'missing') { - return `${def.name} was not found in PATH.`; - } - - if (status === 'error') { - if (result.stderr.trim()) { - return result.stderr.trim(); - } - if (result.stdout.trim()) { - return result.stdout.trim(); - } - if (result.error) { - return result.error.message; - } - } - - return null; - } - - private async tryCommands(def: CliDefinition, timeoutMs: number): Promise { - for (const command of def.commands) { - const result = await this.runCommand(command, def.args ?? ['--version'], timeoutMs); - if (result.success) { - return result; - } - - // If the command exists but returned a non-zero status, still return result for diagnostics - if (result.error && (result.error as NodeJS.ErrnoException).code !== 'ENOENT') { - return result; - } - } - - const lastCommand = def.commands[def.commands.length - 1]; - return this.runCommandViaShell(lastCommand, def.args ?? ['--version'], timeoutMs); - } - - /** Run a command through the user's login shell as a fallback for detection. */ - private async runCommandViaShell( - command: string, - args: string[], - timeoutMs: number - ): Promise { - const shell = process.env.SHELL || (process.platform === 'win32' ? 'cmd.exe' : '/bin/sh'); - const fullCmd = [command, ...args].join(' '); - const shellArgs = process.platform === 'win32' ? ['/c', fullCmd] : ['-lc', fullCmd]; - const result = await this.runCommand(shell, shellArgs, timeoutMs); - - if (result.status === 127) { - return { - ...result, - command, - success: false, - resolvedPath: null, - status: null, - error: new Error(`${command}: command not found (shell fallback)`), - }; - } - - return { ...result, command }; - } - - private async runCommand( - command: string, - args: string[], - timeoutMs: number - ): Promise { - const resolvedPath = this.resolveCommandPath(command); - return new Promise((resolve) => { - try { - const executable = resolvedPath || command; - const lowerExecutable = executable.toLowerCase(); - const shouldUseCmdExe = - process.platform === 'win32' && - (lowerExecutable.endsWith('.cmd') || lowerExecutable.endsWith('.bat')); - - const child = shouldUseCmdExe - ? spawn(process.env.ComSpec || 'cmd.exe', [ - '/d', - '/s', - '/c', - [executable, ...args].map(quoteForCmdExe).join(' '), - ]) - : spawn(command, args); - - let stdout = ''; - let stderr = ''; - let didTimeout = false; - - // timeout for version checks (some CLIs can start slowly) - const timeoutId = setTimeout(() => { - didTimeout = true; - child.kill(); - }, timeoutMs); - - child.stdout?.on('data', (data) => { - stdout += data.toString(); - }); - - child.stderr?.on('data', (data) => { - stderr += data.toString(); - }); - - child.on('error', (error) => { - clearTimeout(timeoutId); - log.warn('provider:command-spawn-error', { - command, - executable, - resolvedPath, - error: error?.message || String(error), - }); - resolve({ - command, - success: false, - error, - stdout: stdout || '', - stderr: stderr || '', - status: null, - version: null, - resolvedPath, - timedOut: didTimeout, - timeoutMs, - }); - }); - - child.on('close', (code) => { - clearTimeout(timeoutId); - - const success = !didTimeout && code === 0; - const version = this.extractVersion(stdout) || this.extractVersion(stderr); - - if (!success) { - log.warn('provider:command-exit-failed', { - command, - executable, - resolvedPath, - status: code, - timedOut: didTimeout, - stderr: stderr ? truncate(stderr) : null, - stdout: stdout ? truncate(stdout) : null, - }); - } - - resolve({ - command, - success, - error: didTimeout ? new Error('Command timeout') : undefined, - stdout, - stderr, - status: code, - version, - resolvedPath, - timedOut: didTimeout, - timeoutMs, - }); - }); - } catch (error) { - resolve({ - command, - success: false, - error: error as Error, - stdout: '', - stderr: '', - status: null, - version: null, - resolvedPath, - timedOut: false, - timeoutMs, - }); - } - }); - } - - private extractVersion(output: string): string | null { - if (!output) return null; - const matches = output.match(/\d+\.\d+(\.\d+)?/); - return matches ? matches[0] : null; - } - - private resolveCommandPath(command: string): string | null { - const resolver = process.platform === 'win32' ? 'where' : 'which'; - try { - const result = execFileSync(resolver, [command], { encoding: 'utf8' }); - const lines = result - .split(/\r?\n/) - .map((l) => l.trim()) - .filter(Boolean); - return lines[0] ?? null; - } catch { - return null; - } - } - - private cacheStatus(providerId: string, result: CommandResult, statusCode: CliStatusCode) { - const installed = statusCode === 'connected'; - const status: ProviderStatus = { - installed, - path: result.resolvedPath, - version: result.version, - lastChecked: Date.now(), - }; - providerStatusCache.set(providerId, status); - this.emitStatusUpdate(providerId, status); - } - - private emitStatusUpdate(providerId: string, status: ProviderStatus) { - const payload = { providerId, status }; - BrowserWindow.getAllWindows().forEach((win) => { - try { - win.webContents.send('provider:status-updated', payload); - } catch { - // ignore send errors - } - }); - } -} - -export const connectionsService = new ConnectionsService(); diff --git a/src/main/services/DatabaseService.ts b/src/main/services/DatabaseService.ts deleted file mode 100644 index 53a15799f..000000000 --- a/src/main/services/DatabaseService.ts +++ /dev/null @@ -1,1271 +0,0 @@ -import type sqlite3Type from 'sqlite3'; -import { and, asc, desc, eq, inArray, isNull, ne, or, sql } from 'drizzle-orm'; -import { readMigrationFiles } from 'drizzle-orm/migrator'; -import { resolveDatabasePath, resolveMigrationsPath } from '../db/path'; -import { getDrizzleClient } from '../db/drizzleClient'; -import { errorTracking } from '../errorTracking'; -import { - projects as projectsTable, - tasks as tasksTable, - conversations as conversationsTable, - messages as messagesTable, - lineComments as lineCommentsTable, - sshConnections as sshConnectionsTable, - type ProjectRow, - type TaskRow, - type ConversationRow, - type MessageRow, - type LineCommentRow, - type LineCommentInsert, - type SshConnectionRow, - type SshConnectionInsert, -} from '../db/schema'; - -export interface Project { - id: string; - name: string; - path: string; - // Remote project fields (optional for backward compatibility) - isRemote?: boolean; - sshConnectionId?: string | null; - remotePath?: string | null; - gitInfo: { - isGitRepo: boolean; - remote?: string; - branch?: string; - baseRef?: string; - }; - githubInfo?: { - repository: string; - connected: boolean; - }; - createdAt: string; - updatedAt: string; -} - -export interface Task { - id: string; - projectId: string; - name: string; - branch: string; - path: string; - status: 'active' | 'idle' | 'running'; - agentId?: string | null; - metadata?: any; - useWorktree?: boolean; - archivedAt?: string | null; - createdAt: string; - updatedAt: string; -} - -export interface Conversation { - id: string; - taskId: string; - title: string; - provider?: string | null; - isActive?: boolean; - isMain?: boolean; - displayOrder?: number; - metadata?: string | null; - createdAt: string; - updatedAt: string; -} - -export interface Message { - id: string; - conversationId: string; - content: string; - sender: 'user' | 'agent'; - timestamp: string; - metadata?: string; // JSON string for additional data -} - -export interface MigrationSummary { - appliedCount: number; - totalMigrations: number; - recovered: boolean; -} - -export class DatabaseSchemaMismatchError extends Error { - readonly code = 'DB_SCHEMA_MISMATCH'; - readonly dbPath: string; - readonly missingInvariants: string[]; - - constructor(dbPath: string, missingInvariants: string[]) { - const suffix = missingInvariants.length > 0 ? ` (${missingInvariants.join(', ')})` : ''; - super(`Database schema mismatch${suffix}`); - this.name = 'DatabaseSchemaMismatchError'; - this.dbPath = dbPath; - this.missingInvariants = missingInvariants; - } -} - -export class DatabaseService { - private static migrationsApplied = false; - private db: sqlite3Type.Database | null = null; - private sqlite3: typeof sqlite3Type | null = null; - private dbPath: string; - private disabled: boolean = false; - private lastMigrationSummary: MigrationSummary | null = null; - - constructor() { - if (process.env.EMDASH_DISABLE_NATIVE_DB === '1') { - this.disabled = true; - } - this.dbPath = resolveDatabasePath(); - } - - async initialize(): Promise { - if (this.disabled) return Promise.resolve(); - if (!this.sqlite3) { - try { - // Dynamic import to avoid loading native module at startup - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - this.sqlite3 = (await import('sqlite3')) as unknown as typeof sqlite3Type; - } catch (e) { - // Track critical database initialization error - await errorTracking.captureDatabaseError(e, 'initialize_sqlite3_import'); - return Promise.reject(e); - } - } - return new Promise((resolve, reject) => { - this.db = new this.sqlite3!.Database(this.dbPath, async (err) => { - if (err) { - // Track critical database connection error - await errorTracking.captureDatabaseError(err, 'initialize_connection', { - db_path: this.dbPath, - }); - reject(err); - return; - } - - this.ensureMigrations() - .then(async () => { - await this.validateSchemaContract(); - resolve(); - }) - .catch(async (initError) => { - const operation = - initError instanceof DatabaseSchemaMismatchError - ? 'initialize_schema_contract' - : 'initialize_migrations'; - await errorTracking.captureDatabaseError(initError, operation, { - db_path: this.dbPath, - }); - reject(initError); - }); - }); - }); - } - - getLastMigrationSummary(): MigrationSummary | null { - return this.lastMigrationSummary; - } - - async saveProject(project: Omit): Promise { - if (this.disabled) return; - const { db } = await getDrizzleClient(); - const gitRemote = project.gitInfo.remote ?? null; - const gitBranch = project.gitInfo.branch ?? null; - const baseRef = this.computeBaseRef( - project.gitInfo.baseRef, - project.gitInfo.remote, - project.gitInfo.branch - ); - const githubRepository = project.githubInfo?.repository ?? null; - const githubConnected = project.githubInfo?.connected ? 1 : 0; - - // Clean up stale rows that would conflict on id or path but not both. - // This prevents unique constraint errors when re-adding a deleted project. - await db - .delete(projectsTable) - .where( - or( - and(eq(projectsTable.id, project.id), ne(projectsTable.path, project.path)), - and(eq(projectsTable.path, project.path), ne(projectsTable.id, project.id)) - ) - ); - - await db - .insert(projectsTable) - .values({ - id: project.id, - name: project.name, - path: project.path, - gitRemote, - gitBranch, - baseRef: baseRef ?? null, - githubRepository, - githubConnected, - sshConnectionId: project.sshConnectionId ?? null, - isRemote: project.isRemote ? 1 : 0, - remotePath: project.remotePath ?? null, - updatedAt: sql`CURRENT_TIMESTAMP`, - }) - .onConflictDoUpdate({ - target: projectsTable.path, - set: { - name: project.name, - gitRemote, - gitBranch, - baseRef: baseRef ?? null, - githubRepository, - githubConnected, - sshConnectionId: project.sshConnectionId ?? null, - isRemote: project.isRemote ? 1 : 0, - remotePath: project.remotePath ?? null, - updatedAt: sql`CURRENT_TIMESTAMP`, - }, - }); - } - - async getProjects(): Promise { - if (this.disabled) return []; - const { db } = await getDrizzleClient(); - const rows = await db.select().from(projectsTable).orderBy(desc(projectsTable.updatedAt)); - return rows.map((row) => this.mapDrizzleProjectRow(row)); - } - - async getProjectById(projectId: string): Promise { - if (this.disabled) return null; - if (!projectId) { - throw new Error('projectId is required'); - } - const { db } = await getDrizzleClient(); - const rows = await db - .select() - .from(projectsTable) - .where(eq(projectsTable.id, projectId)) - .limit(1); - - if (rows.length === 0) { - return null; - } - - return this.mapDrizzleProjectRow(rows[0]); - } - - async updateProjectBaseRef(projectId: string, nextBaseRef: string): Promise { - if (this.disabled) return null; - if (!projectId) { - throw new Error('projectId is required'); - } - const trimmed = typeof nextBaseRef === 'string' ? nextBaseRef.trim() : ''; - if (!trimmed) { - throw new Error('baseRef cannot be empty'); - } - - const { db } = await getDrizzleClient(); - const rows = await db - .select({ - id: projectsTable.id, - gitRemote: projectsTable.gitRemote, - gitBranch: projectsTable.gitBranch, - }) - .from(projectsTable) - .where(eq(projectsTable.id, projectId)) - .limit(1); - - if (rows.length === 0) { - throw new Error(`Project not found: ${projectId}`); - } - - const source = rows[0]; - const normalized = this.computeBaseRef(trimmed, source.gitRemote, source.gitBranch); - - await db - .update(projectsTable) - .set({ - baseRef: normalized, - updatedAt: sql`CURRENT_TIMESTAMP`, - }) - .where(eq(projectsTable.id, projectId)); - - return this.getProjectById(projectId); - } - - async saveTask(task: Omit): Promise { - if (this.disabled) return; - const metadataValue = - typeof task.metadata === 'string' - ? task.metadata - : task.metadata - ? JSON.stringify(task.metadata) - : null; - const { db } = await getDrizzleClient(); - await db - .insert(tasksTable) - .values({ - id: task.id, - projectId: task.projectId, - name: task.name, - branch: task.branch, - path: task.path, - status: task.status, - agentId: task.agentId ?? null, - metadata: metadataValue, - useWorktree: task.useWorktree !== false ? 1 : 0, - updatedAt: sql`CURRENT_TIMESTAMP`, - }) - .onConflictDoUpdate({ - target: tasksTable.id, - set: { - projectId: task.projectId, - name: task.name, - branch: task.branch, - path: task.path, - status: task.status, - agentId: task.agentId ?? null, - metadata: metadataValue, - useWorktree: task.useWorktree !== false ? 1 : 0, - updatedAt: sql`CURRENT_TIMESTAMP`, - }, - }); - } - - async getTasks(projectId?: string): Promise { - if (this.disabled) return []; - const { db } = await getDrizzleClient(); - - // Filter out archived tasks by default - const rows: TaskRow[] = projectId - ? await db - .select() - .from(tasksTable) - .where(and(eq(tasksTable.projectId, projectId), isNull(tasksTable.archivedAt))) - .orderBy(desc(tasksTable.updatedAt)) - : await db - .select() - .from(tasksTable) - .where(isNull(tasksTable.archivedAt)) - .orderBy(desc(tasksTable.updatedAt)); - return rows.map((row) => this.mapDrizzleTaskRow(row)); - } - - async getArchivedTasks(projectId?: string): Promise { - if (this.disabled) return []; - const { db } = await getDrizzleClient(); - - const rows: TaskRow[] = projectId - ? await db - .select() - .from(tasksTable) - .where( - and(eq(tasksTable.projectId, projectId), sql`${tasksTable.archivedAt} IS NOT NULL`) - ) - .orderBy(desc(tasksTable.archivedAt)) - : await db - .select() - .from(tasksTable) - .where(sql`${tasksTable.archivedAt} IS NOT NULL`) - .orderBy(desc(tasksTable.archivedAt)); - return rows.map((row) => this.mapDrizzleTaskRow(row)); - } - - async archiveTask(taskId: string): Promise { - if (this.disabled) return; - const { db } = await getDrizzleClient(); - await db - .update(tasksTable) - .set({ - archivedAt: new Date().toISOString(), - status: 'idle', // Reset status since PTY processes are killed on archive - updatedAt: sql`CURRENT_TIMESTAMP`, - }) - .where(eq(tasksTable.id, taskId)); - } - - async restoreTask(taskId: string): Promise { - if (this.disabled) return; - const { db } = await getDrizzleClient(); - await db - .update(tasksTable) - .set({ - archivedAt: null, - updatedAt: sql`CURRENT_TIMESTAMP`, - }) - .where(eq(tasksTable.id, taskId)); - } - - async getTaskByPath(taskPath: string): Promise { - if (this.disabled) return null; - const { db } = await getDrizzleClient(); - - const rows = await db.select().from(tasksTable).where(eq(tasksTable.path, taskPath)).limit(1); - - if (rows.length === 0) return null; - return this.mapDrizzleTaskRow(rows[0]); - } - - async getTaskById(taskId: string): Promise { - if (this.disabled) return null; - if (!taskId) return null; - const { db } = await getDrizzleClient(); - const rows = await db.select().from(tasksTable).where(eq(tasksTable.id, taskId)).limit(1); - if (rows.length === 0) return null; - return this.mapDrizzleTaskRow(rows[0]); - } - - async deleteProject(projectId: string): Promise { - if (this.disabled) return; - const { db } = await getDrizzleClient(); - await db.delete(projectsTable).where(eq(projectsTable.id, projectId)); - } - - async deleteTask(taskId: string): Promise { - if (this.disabled) return; - const { db } = await getDrizzleClient(); - await db.delete(tasksTable).where(eq(tasksTable.id, taskId)); - } - - // Conversation management methods - async saveConversation( - conversation: Omit - ): Promise { - const { db } = await getDrizzleClient(); - await db - .insert(conversationsTable) - .values({ - id: conversation.id, - taskId: conversation.taskId, - title: conversation.title, - provider: conversation.provider ?? null, - isActive: conversation.isActive ? 1 : 0, - isMain: conversation.isMain ? 1 : 0, - displayOrder: conversation.displayOrder ?? 0, - metadata: conversation.metadata ?? null, - updatedAt: sql`CURRENT_TIMESTAMP`, - }) - .onConflictDoUpdate({ - target: conversationsTable.id, - set: { - title: conversation.title, - provider: conversation.provider ?? null, - isActive: conversation.isActive ? 1 : 0, - isMain: conversation.isMain ? 1 : 0, - displayOrder: conversation.displayOrder ?? 0, - metadata: conversation.metadata ?? null, - updatedAt: sql`CURRENT_TIMESTAMP`, - }, - }); - } - - async getConversations(taskId: string): Promise { - if (this.disabled) return []; - const { db } = await getDrizzleClient(); - const rows = await db - .select() - .from(conversationsTable) - .where(eq(conversationsTable.taskId, taskId)) - .orderBy(asc(conversationsTable.displayOrder), desc(conversationsTable.updatedAt)); - return rows.map((row) => this.mapDrizzleConversationRow(row)); - } - - async getOrCreateDefaultConversation(taskId: string): Promise { - if (this.disabled) { - return { - id: `conv-${taskId}-default`, - taskId, - title: 'Default Conversation', - isMain: true, - createdAt: new Date().toISOString(), - updatedAt: new Date().toISOString(), - }; - } - const { db } = await getDrizzleClient(); - - const existingRows = await db - .select() - .from(conversationsTable) - .where(eq(conversationsTable.taskId, taskId)) - .orderBy(asc(conversationsTable.createdAt)) - .limit(1); - - if (existingRows.length > 0) { - return this.mapDrizzleConversationRow(existingRows[0]); - } - - const conversationId = `conv-${taskId}-${Date.now()}`; - await this.saveConversation({ - id: conversationId, - taskId, - title: 'Default Conversation', - isMain: true, - isActive: true, - }); - - const [createdRow] = await db - .select() - .from(conversationsTable) - .where(eq(conversationsTable.id, conversationId)) - .limit(1); - - if (createdRow) { - return this.mapDrizzleConversationRow(createdRow); - } - - return { - id: conversationId, - taskId, - title: 'Default Conversation', - createdAt: new Date().toISOString(), - updatedAt: new Date().toISOString(), - }; - } - - // Message management methods - async saveMessage(message: Omit): Promise { - if (this.disabled) return; - const metadataValue = - typeof message.metadata === 'string' - ? message.metadata - : message.metadata - ? JSON.stringify(message.metadata) - : null; - const { db } = await getDrizzleClient(); - await db.transaction(async (tx) => { - await tx - .insert(messagesTable) - .values({ - id: message.id, - conversationId: message.conversationId, - content: message.content, - sender: message.sender, - metadata: metadataValue, - timestamp: sql`CURRENT_TIMESTAMP`, - }) - .onConflictDoNothing() - .run(); - - await tx - .update(conversationsTable) - .set({ updatedAt: sql`CURRENT_TIMESTAMP` }) - .where(eq(conversationsTable.id, message.conversationId)) - .run(); - }); - } - - async getMessages(conversationId: string): Promise { - if (this.disabled) return []; - const { db } = await getDrizzleClient(); - const rows = await db - .select() - .from(messagesTable) - .where(eq(messagesTable.conversationId, conversationId)) - .orderBy(asc(messagesTable.timestamp)); - return rows.map((row) => this.mapDrizzleMessageRow(row)); - } - - async deleteConversation(conversationId: string): Promise { - if (this.disabled) return; - const { db } = await getDrizzleClient(); - await db.delete(conversationsTable).where(eq(conversationsTable.id, conversationId)); - } - - // New multi-chat methods - async createConversation( - taskId: string, - title: string, - provider?: string, - isMain?: boolean - ): Promise { - if (this.disabled) { - return { - id: `conv-${taskId}-${Date.now()}`, - taskId, - title, - provider: provider ?? null, - isActive: true, - isMain: isMain ?? false, - displayOrder: 0, - metadata: null, - createdAt: new Date().toISOString(), - updatedAt: new Date().toISOString(), - }; - } - - const { db } = await getDrizzleClient(); - - // Get the next display order - const existingConversations = await db - .select() - .from(conversationsTable) - .where(eq(conversationsTable.taskId, taskId)); - - const maxOrder = Math.max(...existingConversations.map((c) => c.displayOrder || 0), -1); - - // Check if this should be the main conversation - // If explicitly set as main, check if one already exists - if (isMain === true) { - const hasMain = existingConversations.some((c) => c.isMain === 1); - if (hasMain) { - isMain = false; // Don't allow multiple main conversations - } - } else if (isMain === undefined) { - // If not specified, make it main only if it's the first conversation - isMain = existingConversations.length === 0; - } - - // Deactivate other conversations - await db - .update(conversationsTable) - .set({ isActive: 0 }) - .where(eq(conversationsTable.taskId, taskId)); - - // Create the new conversation - const conversationId = `conv_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; - const newConversation = { - id: conversationId, - taskId, - title, - provider: provider ?? null, - isActive: true, - isMain: isMain ?? false, - displayOrder: maxOrder + 1, - }; - - await this.saveConversation(newConversation); - - // Fetch the created conversation - const [createdRow] = await db - .select() - .from(conversationsTable) - .where(eq(conversationsTable.id, conversationId)) - .limit(1); - - return this.mapDrizzleConversationRow(createdRow); - } - - async setActiveConversation(taskId: string, conversationId: string): Promise { - if (this.disabled) return; - const { db } = await getDrizzleClient(); - - await db.transaction(async (tx) => { - // Deactivate all conversations for this task - await tx - .update(conversationsTable) - .set({ isActive: 0 }) - .where(eq(conversationsTable.taskId, taskId)); - - // Activate the selected one - await tx - .update(conversationsTable) - .set({ isActive: 1, updatedAt: sql`CURRENT_TIMESTAMP` }) - .where(eq(conversationsTable.id, conversationId)); - }); - } - - async getActiveConversation(taskId: string): Promise { - if (this.disabled) return null; - const { db } = await getDrizzleClient(); - - const results = await db - .select() - .from(conversationsTable) - .where(and(eq(conversationsTable.taskId, taskId), eq(conversationsTable.isActive, 1))) - .limit(1); - - return results[0] ? this.mapDrizzleConversationRow(results[0]) : null; - } - - async reorderConversations(taskId: string, conversationIds: string[]): Promise { - if (this.disabled) return; - const { db } = await getDrizzleClient(); - - await db.transaction(async (tx) => { - for (let i = 0; i < conversationIds.length; i++) { - await tx - .update(conversationsTable) - .set({ displayOrder: i }) - .where(eq(conversationsTable.id, conversationIds[i])); - } - }); - } - - async updateConversationTitle(conversationId: string, title: string): Promise { - if (this.disabled) return; - const { db } = await getDrizzleClient(); - - await db - .update(conversationsTable) - .set({ title, updatedAt: sql`CURRENT_TIMESTAMP` }) - .where(eq(conversationsTable.id, conversationId)); - } - - // Line comment management methods - async saveLineComment( - input: Omit - ): Promise { - if (this.disabled) return ''; - const id = `comment-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`; - const { db } = await getDrizzleClient(); - await db.insert(lineCommentsTable).values({ - id, - taskId: input.taskId, - filePath: input.filePath, - lineNumber: input.lineNumber, - lineContent: input.lineContent ?? null, - content: input.content, - updatedAt: sql`CURRENT_TIMESTAMP`, - }); - return id; - } - - async getLineComments(taskId: string, filePath?: string): Promise { - if (this.disabled) return []; - const { db } = await getDrizzleClient(); - - if (filePath) { - const rows = await db - .select() - .from(lineCommentsTable) - .where( - sql`${lineCommentsTable.taskId} = ${taskId} AND ${lineCommentsTable.filePath} = ${filePath}` - ) - .orderBy(asc(lineCommentsTable.lineNumber)); - return rows; - } - - const rows = await db - .select() - .from(lineCommentsTable) - .where(eq(lineCommentsTable.taskId, taskId)) - .orderBy(asc(lineCommentsTable.lineNumber)); - return rows; - } - - async updateLineComment(id: string, content: string): Promise { - if (this.disabled) return; - const { db } = await getDrizzleClient(); - await db - .update(lineCommentsTable) - .set({ - content, - updatedAt: sql`CURRENT_TIMESTAMP`, - }) - .where(eq(lineCommentsTable.id, id)); - } - - async deleteLineComment(id: string): Promise { - if (this.disabled) return; - const { db } = await getDrizzleClient(); - await db.delete(lineCommentsTable).where(eq(lineCommentsTable.id, id)); - } - - async markCommentsSent(commentIds: string[]): Promise { - if (this.disabled || commentIds.length === 0) return; - const { db } = await getDrizzleClient(); - const now = new Date().toISOString(); - await db - .update(lineCommentsTable) - .set({ sentAt: now }) - .where(inArray(lineCommentsTable.id, commentIds)); - } - - async getUnsentComments(taskId: string): Promise { - if (this.disabled) return []; - const { db } = await getDrizzleClient(); - const rows = await db - .select() - .from(lineCommentsTable) - .where(and(eq(lineCommentsTable.taskId, taskId), isNull(lineCommentsTable.sentAt))) - .orderBy(asc(lineCommentsTable.filePath), asc(lineCommentsTable.lineNumber)); - return rows; - } - - // SSH connection management methods - async saveSshConnection( - connection: Omit & { id?: string } - ): Promise { - if (this.disabled) { - throw new Error('Database is disabled'); - } - const { db } = await getDrizzleClient(); - - const id = connection.id ?? `ssh_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; - const now = new Date().toISOString(); - - const result = await db - .insert(sshConnectionsTable) - .values({ - ...connection, - id, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: sshConnectionsTable.id, - set: { - name: connection.name, - host: connection.host, - port: connection.port, - username: connection.username, - authType: connection.authType, - privateKeyPath: connection.privateKeyPath ?? null, - useAgent: connection.useAgent, - updatedAt: now, - }, - }) - .returning(); - - return result[0]; - } - - async getSshConnections(): Promise { - if (this.disabled) return []; - const { db } = await getDrizzleClient(); - return db.select().from(sshConnectionsTable).orderBy(sshConnectionsTable.name); - } - - async getSshConnection(id: string): Promise { - if (this.disabled) return null; - const { db } = await getDrizzleClient(); - const rows = await db - .select() - .from(sshConnectionsTable) - .where(eq(sshConnectionsTable.id, id)) - .limit(1); - return rows.length > 0 ? rows[0] : null; - } - - async deleteSshConnection(id: string): Promise { - if (this.disabled) return; - const { db } = await getDrizzleClient(); - - // First update any projects using this connection - await db - .update(projectsTable) - .set({ sshConnectionId: null, isRemote: 0 }) - .where(eq(projectsTable.sshConnectionId, id)); - - // Then delete the connection - await db.delete(sshConnectionsTable).where(eq(sshConnectionsTable.id, id)); - } - - private computeBaseRef( - preferred?: string | null, - remote?: string | null, - branch?: string | null - ): string { - const remoteName = this.getRemoteAlias(remote); - const normalize = (value?: string | null): string | undefined => { - if (!value) return undefined; - const trimmed = value.trim(); - if (!trimmed || trimmed.includes('://')) return undefined; - - if (trimmed.includes('/')) { - const [head, ...rest] = trimmed.split('/'); - const branchPart = rest.join('/').replace(/^\/+/, ''); - if (head && branchPart) { - return `${head}/${branchPart}`; - } - if (!head && branchPart) { - // Leading slash - prepend remote if available - return remoteName ? `${remoteName}/${branchPart}` : branchPart; - } - return undefined; - } - - // Plain branch name - prepend remote only if one exists - const suffix = trimmed.replace(/^\/+/, ''); - return remoteName ? `${remoteName}/${suffix}` : suffix; - }; - - // Default: use origin/main if remote exists, otherwise just 'main' - const defaultBranch = remoteName - ? `${remoteName}/${this.defaultBranchName()}` - : this.defaultBranchName(); - return normalize(preferred) ?? normalize(branch) ?? defaultBranch; - } - - private defaultRemoteName(): string { - return 'origin'; - } - - private getRemoteAlias(remote?: string | null): string { - if (!remote) return this.defaultRemoteName(); - const trimmed = remote.trim(); - if (!trimmed) return ''; // Empty string indicates no remote (local-only repo) - if (/^[A-Za-z0-9._-]+$/.test(trimmed) && !trimmed.includes('://')) { - return trimmed; - } - return this.defaultRemoteName(); - } - - private defaultBranchName(): string { - return 'main'; - } - - private mapDrizzleProjectRow(row: ProjectRow): Project { - return { - id: row.id, - name: row.name, - path: row.path, - isRemote: row.isRemote === 1, - sshConnectionId: row.sshConnectionId ?? null, - remotePath: row.remotePath ?? null, - gitInfo: { - isGitRepo: !!(row.gitRemote || row.gitBranch), - remote: row.gitRemote ?? undefined, - branch: row.gitBranch ?? undefined, - baseRef: this.computeBaseRef(row.baseRef, row.gitRemote, row.gitBranch), - }, - githubInfo: row.githubRepository - ? { - repository: row.githubRepository, - connected: !!row.githubConnected, - } - : undefined, - createdAt: row.createdAt, - updatedAt: row.updatedAt, - }; - } - - private mapDrizzleTaskRow(row: TaskRow): Task { - return { - id: row.id, - projectId: row.projectId, - name: row.name, - branch: row.branch, - path: row.path, - status: (row.status as Task['status']) ?? 'idle', - agentId: row.agentId ?? null, - metadata: - typeof row.metadata === 'string' && row.metadata.length > 0 - ? this.parseTaskMetadata(row.metadata, row.id) - : null, - useWorktree: row.useWorktree === 1, - archivedAt: row.archivedAt ?? null, - createdAt: row.createdAt, - updatedAt: row.updatedAt, - }; - } - - private mapDrizzleConversationRow(row: ConversationRow): Conversation { - return { - id: row.id, - taskId: row.taskId, - title: row.title, - provider: row.provider ?? null, - isActive: row.isActive === 1, - // For backward compatibility: treat missing isMain as true (assume first/only conversation is main) - isMain: row.isMain !== undefined ? row.isMain === 1 : true, - displayOrder: row.displayOrder ?? 0, - metadata: row.metadata ?? null, - createdAt: row.createdAt, - updatedAt: row.updatedAt, - }; - } - - private mapDrizzleMessageRow(row: MessageRow): Message { - return { - id: row.id, - conversationId: row.conversationId, - content: row.content, - sender: row.sender as Message['sender'], - timestamp: row.timestamp, - metadata: row.metadata ?? undefined, - }; - } - - private parseTaskMetadata(serialized: string, taskId: string): any { - try { - return JSON.parse(serialized); - } catch (error) { - console.warn(`Failed to parse task metadata for ${taskId}`, error); - return null; - } - } - - async close(): Promise { - if (this.disabled || !this.db) return; - - return new Promise((resolve, reject) => { - this.db!.close((err) => { - if (err) { - reject(err); - } else { - resolve(); - } - }); - }); - } - - private async ensureMigrations(): Promise { - if (this.disabled) return; - if (!this.db) throw new Error('Database not initialized'); - if (DatabaseService.migrationsApplied) return; - - const migrationsPath = resolveMigrationsPath(); - if (!migrationsPath) { - // Provide a detailed error message for debugging - const errorMsg = [ - 'Failed to locate database migrations folder.', - 'This can happen when:', - '1. The app was installed via Homebrew (try downloading directly from GitHub)', - '2. The app is running from Downloads/DMG (move it to Applications)', - '3. The installation is incomplete or corrupted', - '4. Security software is blocking file access', - '', - 'To fix: Try downloading and installing Emdash directly from:', - 'https://github.com/generalaction/emdash/releases', - '', - ].join('\n'); - - throw new Error(errorMsg); - } - - // We run schema migrations with foreign_keys disabled. - // Many dev DBs were created with foreign_keys=OFF, so legacy data can contain orphans. - // Enabling FK enforcement mid-migration can cause schema transitions (table rebuilds) to fail. - await this.execSql('PRAGMA foreign_keys=OFF;'); - try { - // IMPORTANT: - // Drizzle's built-in migrator for sqlite-proxy decides what to run based on the latest - // `created_at` timestamp in __drizzle_migrations. If a migration is added later but has an - // earlier timestamp than the latest applied migration, Drizzle will skip it forever. - // - // To make migrations robust for dev DBs (and for any DB that may have extra migrations), - // we apply migrations by missing hash instead of timestamp ordering. - const migrations = readMigrationFiles({ migrationsFolder: migrationsPath }); - const tagByWhen = await this.tryLoadMigrationTagByWhen(migrationsPath); - - await this.execSql(` - CREATE TABLE IF NOT EXISTS "__drizzle_migrations" ( - id SERIAL PRIMARY KEY, - hash text NOT NULL, - created_at numeric - ) - `); - - const appliedRows = await this.allSql<{ hash: string }>( - `SELECT hash FROM "__drizzle_migrations"` - ); - const applied = new Set(appliedRows.map((r) => r.hash)); - - // Recovery: if a previous run partially applied the workspace->task migration, finish it. - // Symptom: `tasks` exists, `conversations` still has `workspace_id`, and `__new_conversations` exists. - let recovered = false; - if ( - (await this.tableExists('tasks')) && - (await this.tableExists('conversations')) && - (await this.tableExists('__new_conversations')) && - (await this.tableHasColumn('conversations', 'workspace_id')) && - !(await this.tableHasColumn('conversations', 'task_id')) - ) { - // Populate new conversations table from the old one (FK enforcement is OFF, so orphans won't block) - await this.execSql(` - INSERT INTO "__new_conversations"("id", "task_id", "title", "created_at", "updated_at") - SELECT "id", "workspace_id", "title", "created_at", "updated_at" FROM "conversations" - `); - await this.execSql(`DROP TABLE "conversations";`); - await this.execSql(`ALTER TABLE "__new_conversations" RENAME TO "conversations";`); - await this.execSql( - `CREATE INDEX IF NOT EXISTS "idx_conversations_task_id" ON "conversations" ("task_id");` - ); - - // Mark the workspace->task migration as applied (even if it wasn't tracked). - // This prevents the hash-based runner from attempting to re-run it against a partially-migrated DB. - await this.ensureMigrationMarkedApplied( - migrationsPath, - applied, - '0002_lyrical_impossible_man' - ); - recovered = true; - } - - let appliedCount = 0; - for (const migration of migrations) { - if (applied.has(migration.hash)) continue; - - const tag = tagByWhen?.get(migration.folderMillis); - // If the DB already reflects the workspace->task rename (e.g. user manually fixed their DB) - // but the migration hash wasn't recorded, mark it as applied and move on. - if ( - tag === '0002_lyrical_impossible_man' && - (await this.tableExists('tasks')) && - !(await this.tableExists('workspaces')) && - (await this.tableExists('conversations')) && - (await this.tableHasColumn('conversations', 'task_id')) - ) { - await this.execSql( - `INSERT INTO "__drizzle_migrations" ("hash", "created_at") VALUES('${migration.hash}', '${migration.folderMillis}')` - ); - applied.add(migration.hash); - continue; - } - - // Execute each statement chunk (drizzle-kit uses '--> statement-breakpoint') - for (const statement of migration.sql) { - // We manage FK enforcement ourselves during migrations. - const trimmed = statement.trim().toUpperCase(); - if (trimmed.startsWith('PRAGMA FOREIGN_KEYS=')) continue; - await this.execSql(statement); - } - - // Record as applied (same schema as Drizzle uses) - await this.execSql( - `INSERT INTO "__drizzle_migrations" ("hash", "created_at") VALUES('${migration.hash}', '${migration.folderMillis}')` - ); - - applied.add(migration.hash); - appliedCount += 1; - } - - this.lastMigrationSummary = { - appliedCount, - totalMigrations: migrations.length, - recovered, - }; - - DatabaseService.migrationsApplied = true; - } finally { - // Restore FK enforcement for normal operation (and ensure it's re-enabled on failure). - await this.execSql('PRAGMA foreign_keys=ON;'); - } - } - - private async validateSchemaContract(): Promise { - if (this.disabled) return; - - const missingInvariants: string[] = []; - - if (!(await this.tableHasColumn('projects', 'base_ref'))) { - missingInvariants.push('projects.base_ref'); - } - if (!(await this.tableExists('tasks'))) { - missingInvariants.push('tasks table'); - } - if (!(await this.tableHasColumn('conversations', 'task_id'))) { - missingInvariants.push('conversations.task_id'); - } - - if (missingInvariants.length > 0) { - throw new DatabaseSchemaMismatchError(this.dbPath, missingInvariants); - } - } - - private async tryLoadMigrationTagByWhen( - migrationsFolder: string - ): Promise | null> { - try { - // eslint-disable-next-line @typescript-eslint/no-var-requires - const fs = require('node:fs'); - // eslint-disable-next-line @typescript-eslint/no-var-requires - const path = require('node:path'); - const journalPath = path.join(migrationsFolder, 'meta', '_journal.json'); - if (!fs.existsSync(journalPath)) return null; - const parsed: unknown = JSON.parse(fs.readFileSync(journalPath, 'utf8')); - if (!parsed || typeof parsed !== 'object') return null; - const entries = (parsed as { entries?: unknown }).entries; - if (!Array.isArray(entries)) return null; - - const map = new Map(); - for (const e of entries) { - if (!e || typeof e !== 'object') continue; - const when = (e as { when?: unknown }).when; - const tag = (e as { tag?: unknown }).tag; - if (typeof when === 'number' && typeof tag === 'string') { - map.set(when, tag); - } - } - return map; - } catch { - return null; - } - } - - private async ensureMigrationMarkedApplied( - migrationsFolder: string, - applied: Set, - tag: string - ): Promise { - // Only mark if the SQL file + journal entry exist. - // eslint-disable-next-line @typescript-eslint/no-var-requires - const fs = require('node:fs'); - // eslint-disable-next-line @typescript-eslint/no-var-requires - const path = require('node:path'); - // eslint-disable-next-line @typescript-eslint/no-var-requires - const crypto = require('node:crypto'); - - const journalPath = path.join(migrationsFolder, 'meta', '_journal.json'); - if (!fs.existsSync(journalPath)) return; - const journalParsed: unknown = JSON.parse(fs.readFileSync(journalPath, 'utf8')); - const entries = (journalParsed as { entries?: unknown }).entries; - if (!Array.isArray(entries)) return; - const entry = entries.find((e) => { - if (!e || typeof e !== 'object') return false; - return (e as { tag?: unknown }).tag === tag; - }) as { when?: unknown } | undefined; - if (!entry) return; - - const sqlPath = path.join(migrationsFolder, `${tag}.sql`); - if (!fs.existsSync(sqlPath)) return; - const contents = fs.readFileSync(sqlPath, 'utf8'); - const hash = crypto.createHash('sha256').update(contents).digest('hex'); - - if (applied.has(hash)) return; - const createdAt = typeof entry.when === 'number' ? entry.when : Date.now(); - await this.execSql( - `INSERT INTO "__drizzle_migrations" ("hash", "created_at") VALUES('${hash}', '${createdAt}')` - ); - applied.add(hash); - } - - private async tableExists(name: string): Promise { - const rows = await this.allSql<{ name: string }>( - `SELECT name FROM sqlite_master WHERE type='table' AND name='${name.replace(/'/g, "''")}' LIMIT 1` - ); - return rows.length > 0; - } - - private async tableHasColumn(tableName: string, columnName: string): Promise { - if (!(await this.tableExists(tableName))) return false; - const rows = await this.allSql<{ name: string }>( - `PRAGMA table_info("${tableName.replace(/"/g, '""')}")` - ); - return rows.some((r) => r.name === columnName); - } - - private async allSql(query: string): Promise { - if (!this.db) throw new Error('Database not initialized'); - const trimmed = query.trim(); - if (!trimmed) return []; - - return await new Promise((resolve, reject) => { - this.db!.all(trimmed, (err, rows) => { - if (err) { - reject(err); - } else { - resolve((rows ?? []) as T[]); - } - }); - }); - } - - private async execSql(statement: string): Promise { - if (!this.db) throw new Error('Database not initialized'); - const trimmed = statement.trim(); - if (!trimmed) return; - - await new Promise((resolve, reject) => { - this.db!.exec(trimmed, (err) => { - if (err) { - // Handle idempotent migration cases - skip if schema already matches - const msg = err.message ?? ''; - if (msg.includes('duplicate column name') || msg.includes('already exists')) { - // Schema change already applied, continue - resolve(); - return; - } - reject(err); - } else { - resolve(); - } - }); - }); - } -} - -export const databaseService = new DatabaseService(); diff --git a/src/main/services/GitHubCLIInstaller.ts b/src/main/services/GitHubCLIInstaller.ts deleted file mode 100644 index ac47d7c93..000000000 --- a/src/main/services/GitHubCLIInstaller.ts +++ /dev/null @@ -1,89 +0,0 @@ -import { exec } from 'child_process'; -import { promisify } from 'util'; -import * as os from 'os'; - -const execAsync = promisify(exec); - -export class GitHubCLIInstaller { - /** - * Check if gh CLI is installed - */ - async isInstalled(): Promise { - try { - await execAsync('gh --version'); - return true; - } catch { - return false; - } - } - - /** - * Attempt to install gh CLI automatically - */ - async install(): Promise<{ success: boolean; error?: string }> { - const platform = os.platform(); - - try { - switch (platform) { - case 'darwin': // macOS - return await this.installMacOS(); - case 'linux': - return await this.installLinux(); - case 'win32': - return await this.installWindows(); - default: - return { success: false, error: `Unsupported platform: ${platform}` }; - } - } catch (error) { - console.error('Failed to install gh CLI:', error); - return { - success: false, - error: error instanceof Error ? error.message : 'Installation failed', - }; - } - } - - private async installMacOS(): Promise<{ success: boolean; error?: string }> { - try { - // Check if Homebrew is installed - await execAsync('which brew'); - - // Install gh using Homebrew - await execAsync('brew install gh'); - return { success: true }; - } catch (error) { - return { - success: false, - error: 'Homebrew not found. Please install from https://brew.sh/ first.', - }; - } - } - - private async installLinux(): Promise<{ success: boolean; error?: string }> { - try { - // Try apt (Debian/Ubuntu) - await execAsync('sudo apt update && sudo apt install -y gh'); - return { success: true }; - } catch { - return { - success: false, - error: 'Could not install gh CLI. Please install manually: https://cli.github.com/', - }; - } - } - - private async installWindows(): Promise<{ success: boolean; error?: string }> { - try { - // Try winget - await execAsync('winget install GitHub.cli'); - return { success: true }; - } catch { - return { - success: false, - error: 'Could not install gh CLI. Please install manually: https://cli.github.com/', - }; - } - } -} - -export const githubCLIInstaller = new GitHubCLIInstaller(); diff --git a/src/main/services/GitHubService.ts b/src/main/services/GitHubService.ts deleted file mode 100644 index 39bd7f5d3..000000000 --- a/src/main/services/GitHubService.ts +++ /dev/null @@ -1,1125 +0,0 @@ -import { exec, spawn } from 'child_process'; -import { promisify } from 'util'; -import * as path from 'path'; -import * as fs from 'fs'; -import { GITHUB_CONFIG } from '../config/github.config'; -import { getMainWindow } from '../app/window'; -import { errorTracking } from '../errorTracking'; - -const execAsync = promisify(exec); - -export interface GitHubUser { - id: number; - login: string; - name: string; - email: string; - avatar_url: string; -} - -export interface GitHubRepo { - id: number; - name: string; - full_name: string; - description: string | null; - html_url: string; - clone_url: string; - ssh_url: string; - default_branch: string; - private: boolean; - updated_at: string | null; - language: string | null; - stargazers_count: number; - forks_count: number; -} - -export interface GitHubPullRequest { - number: number; - title: string; - headRefName: string; - baseRefName: string; - url: string; - isDraft?: boolean; - updatedAt?: string | null; - headRefOid?: string; - author?: { - login?: string; - name?: string; - } | null; - headRepositoryOwner?: { - login?: string; - } | null; - headRepository?: { - name?: string; - nameWithOwner?: string; - url?: string; - } | null; -} - -export interface AuthResult { - success: boolean; - token?: string; - user?: GitHubUser; - error?: string; -} - -export interface DeviceCodeResult { - success: boolean; - device_code?: string; - user_code?: string; - verification_uri?: string; - expires_in?: number; - interval?: number; - error?: string; -} - -export class GitHubService { - private readonly SERVICE_NAME = 'emdash-github'; - private readonly ACCOUNT_NAME = 'github-token'; - - // Polling state management - private isPolling = false; - private pollingInterval: NodeJS.Timeout | null = null; - private currentDeviceCode: string | null = null; - private currentInterval = 5; - - /** - * Authenticate with GitHub using Device Flow - * Returns device code info for the UI to display to the user - */ - async authenticate(): Promise { - return await this.requestDeviceCode(); - } - - /** - * Start Device Flow authentication with automatic background polling - * Emits events to renderer for UI updates - * Returns immediately with device code info - */ - async startDeviceFlowAuth(): Promise { - // Stop any existing polling - this.stopPolling(); - - // Request device code - const deviceCodeResult = await this.requestDeviceCode(); - - if (!deviceCodeResult.success || !deviceCodeResult.device_code) { - // Emit error to renderer - const mainWindow = getMainWindow(); - if (mainWindow) { - mainWindow.webContents.send('github:auth:error', { - error: deviceCodeResult.error || 'Failed to request device code', - }); - } - return deviceCodeResult; - } - - // Store device code and interval - this.currentDeviceCode = deviceCodeResult.device_code; - this.currentInterval = deviceCodeResult.interval || 5; - this.isPolling = true; - - // Give renderer time to mount modal and subscribe to events - // Then emit device code for display - setTimeout(() => { - const mainWindow = getMainWindow(); - if (mainWindow) { - mainWindow.webContents.send('github:auth:device-code', { - userCode: deviceCodeResult.user_code, - verificationUri: deviceCodeResult.verification_uri, - expiresIn: deviceCodeResult.expires_in, - interval: this.currentInterval, - }); - } - }, 100); // 100ms delay to ensure modal is mounted - - // Start background polling - this.startBackgroundPolling(deviceCodeResult.expires_in || 900); - - return deviceCodeResult; - } - - /** - * Start background polling loop - */ - private startBackgroundPolling(expiresIn: number): void { - if (!this.currentDeviceCode) return; - - const startTime = Date.now(); - const expiresAt = startTime + expiresIn * 1000; - - const poll = async () => { - if (!this.isPolling || !this.currentDeviceCode) { - this.stopPolling(); - return; - } - - // Check if expired - if (Date.now() >= expiresAt) { - this.stopPolling(); - const mainWindow = getMainWindow(); - if (mainWindow) { - mainWindow.webContents.send('github:auth:error', { - error: 'expired_token', - message: 'Authorization code expired. Please try again.', - }); - } - return; - } - - try { - const result = await this.pollDeviceToken(this.currentDeviceCode, this.currentInterval); - - if (result.success && result.token) { - // Success! Emit immediately - this.stopPolling(); - - // Update error tracking with GitHub username - if (result.user?.login) { - await errorTracking.updateGithubUsername(result.user.login); - } - - const mainWindow = getMainWindow(); - if (mainWindow) { - mainWindow.webContents.send('github:auth:success', { - token: result.token, - user: result.user || undefined, - }); - } - } else if (result.error) { - const mainWindow = getMainWindow(); - - if (result.error === 'authorization_pending') { - // Still waiting - emit polling status - if (mainWindow) { - mainWindow.webContents.send('github:auth:polling', { - status: 'waiting', - }); - } - } else if (result.error === 'slow_down') { - // GitHub wants us to slow down - this.currentInterval += 5; - if (mainWindow) { - mainWindow.webContents.send('github:auth:slow-down', { - newInterval: this.currentInterval, - }); - } - - // Restart interval with new timing - if (this.pollingInterval) { - clearInterval(this.pollingInterval); - this.pollingInterval = setInterval(poll, this.currentInterval * 1000); - } - } else if (result.error === 'expired_token') { - // Code expired - this.stopPolling(); - if (mainWindow) { - mainWindow.webContents.send('github:auth:error', { - error: 'expired_token', - message: 'Authorization code expired. Please try again.', - }); - } - } else if (result.error === 'access_denied') { - // User denied - this.stopPolling(); - if (mainWindow) { - mainWindow.webContents.send('github:auth:error', { - error: 'access_denied', - message: 'Authorization was cancelled.', - }); - } - } else { - // Unknown error - this.stopPolling(); - if (mainWindow) { - mainWindow.webContents.send('github:auth:error', { - error: result.error, - message: `Authentication failed: ${result.error}`, - }); - } - } - } - } catch (error) { - console.error('Polling error:', error); - - // Track polling errors - await errorTracking.captureGitHubError(error, 'poll_device_code'); - - const mainWindow = getMainWindow(); - if (mainWindow) { - mainWindow.webContents.send('github:auth:error', { - error: 'network_error', - message: 'Network error during authentication. Please try again.', - }); - } - this.stopPolling(); - } - }; - - // Start polling with initial interval - setTimeout(poll, this.currentInterval * 1000); - this.pollingInterval = setInterval(poll, this.currentInterval * 1000); - } - - /** - * Stop the background polling - */ - stopPolling(): void { - this.isPolling = false; - if (this.pollingInterval) { - clearInterval(this.pollingInterval); - this.pollingInterval = null; - } - this.currentDeviceCode = null; - this.currentInterval = 5; - } - - /** - * Cancel the authentication flow - */ - cancelAuth(): void { - this.stopPolling(); - const mainWindow = getMainWindow(); - if (mainWindow) { - mainWindow.webContents.send('github:auth:cancelled', {}); - } - } - - /** - * Request a device code from GitHub for Device Flow authentication - */ - async requestDeviceCode(): Promise { - try { - const response = await fetch('https://github.com/login/device/code', { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - client_id: GITHUB_CONFIG.clientId, - scope: GITHUB_CONFIG.scopes.join(' '), - }), - }); - - const data = (await response.json()) as { - device_code?: string; - user_code?: string; - verification_uri?: string; - expires_in?: number; - interval?: number; - error?: string; - error_description?: string; - }; - - if (data.device_code && data.user_code && data.verification_uri) { - // Don't auto-open here - let the UI control when to open browser - return { - success: true, - device_code: data.device_code, - user_code: data.user_code, - verification_uri: data.verification_uri, - expires_in: data.expires_in || 900, - interval: data.interval || 5, - }; - } else { - return { - success: false, - error: data.error_description || 'Failed to request device code', - }; - } - } catch (error) { - console.error('Device code request failed:', error); - return { - success: false, - error: 'Network error while requesting device code', - }; - } - } - - /** - * Poll for access token using device code - * Should be called repeatedly until success or error - */ - async pollDeviceToken(deviceCode: string, _interval: number = 5): Promise { - try { - const response = await fetch('https://github.com/login/oauth/access_token', { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - client_id: GITHUB_CONFIG.clientId, - device_code: deviceCode, - grant_type: 'urn:ietf:params:oauth:grant-type:device_code', - }), - }); - - const data = (await response.json()) as { - access_token?: string; - token_type?: string; - scope?: string; - error?: string; - error_description?: string; - }; - - if (data.access_token) { - // We get the token, now fetch user info immediately before returning success - // This ensures the UI has the correct username without a race condition - const token = data.access_token; - const user = await this.getUserInfo(token); - - // Store token and authenticate gh CLI BEFORE returning success. - // This must complete synchronously (awaited) so that when the renderer - // receives the success event and checks `gh api user`, the CLI is - // already authenticated. Previously these were deferred via setImmediate, - // causing a race where the status check ran before gh CLI auth finished. - try { - await this.storeToken(token); - } catch (error) { - console.warn('Failed to store token:', error); - } - - try { - await this.authenticateGHCLI(token); - } catch { - // Silent fail - gh CLI might not be installed - } - - const mainWindow = getMainWindow(); - if (user && mainWindow) { - mainWindow.webContents.send('github:auth:user-updated', { - user: user, - }); - } - - return { - success: true, - token: token, - user: user || undefined, - }; - } else if (data.error) { - // Return error to caller - they decide how to handle - return { - success: false, - error: data.error, - }; - } else { - return { - success: false, - error: 'Unknown error during token polling', - }; - } - } catch (error) { - console.error('Token polling failed:', error); - return { - success: false, - error: 'Network error during token polling', - }; - } - } - - /** - * Authenticate gh CLI with the OAuth token - */ - private async authenticateGHCLI(token: string): Promise { - try { - // Check if gh CLI is installed first - await execAsync('gh --version'); - - // Security: Authenticate gh CLI with token via stdin (not shell interpolation) - // This prevents command injection if token contains shell metacharacters - await new Promise((resolve, reject) => { - const child = spawn('gh', ['auth', 'login', '--with-token'], { - stdio: ['pipe', 'pipe', 'pipe'], - }); - - child.on('close', (code) => { - if (code === 0) { - resolve(); - } else { - reject(new Error(`gh auth login failed with code ${code}`)); - } - }); - - child.on('error', reject); - - // Write token to stdin and close it - child.stdin.write(token); - child.stdin.end(); - }); - } catch (error) { - console.warn('Could not authenticate gh CLI (may not be installed):', error); - // Don't throw - OAuth still succeeded even if gh CLI isn't available - } - } - - /** - * Execute gh command with automatic re-auth on failure - */ - private async execGH( - command: string, - options?: any - ): Promise<{ stdout: string; stderr: string }> { - try { - const result = await execAsync(command, { encoding: 'utf8', ...options }); - return { - stdout: String(result.stdout), - stderr: String(result.stderr), - }; - } catch (error: any) { - // Check if it's an auth error - if (error.message && error.message.includes('not authenticated')) { - // Try to re-authenticate gh CLI with stored token - const token = await this.getStoredToken(); - if (token) { - await this.authenticateGHCLI(token); - - // Retry the command - const result = await execAsync(command, { encoding: 'utf8', ...options }); - return { - stdout: String(result.stdout), - stderr: String(result.stderr), - }; - } - } - throw error; - } - } - - /** - * List open GitHub issues for the current repo (cwd = projectPath) - */ - async listIssues( - projectPath: string, - limit: number = 50 - ): Promise< - Array<{ - number: number; - title: string; - url?: string; - state?: string; - updatedAt?: string | null; - assignees?: Array<{ login?: string; name?: string }>; - labels?: Array<{ name?: string }>; - }> - > { - const safeLimit = Math.min(Math.max(Number(limit) || 50, 1), 200); - try { - const fields = ['number', 'title', 'url', 'state', 'updatedAt', 'assignees', 'labels']; - const { stdout } = await this.execGH( - `gh issue list --state open --limit ${safeLimit} --json ${fields.join(',')}`, - { cwd: projectPath } - ); - const list = JSON.parse(stdout || '[]'); - if (!Array.isArray(list)) return []; - return list; - } catch (error) { - console.error('Failed to list GitHub issues:', error); - return []; // Return empty array instead of throwing - } - } - - /** Search open issues in current repo */ - async searchIssues( - projectPath: string, - searchTerm: string, - limit: number = 20 - ): Promise< - Array<{ - number: number; - title: string; - url?: string; - state?: string; - updatedAt?: string | null; - assignees?: Array<{ login?: string; name?: string }>; - labels?: Array<{ name?: string }>; - }> - > { - const safeLimit = Math.min(Math.max(Number(limit) || 20, 1), 200); - const term = String(searchTerm || '').trim(); - if (!term) return []; - - try { - const fields = ['number', 'title', 'url', 'state', 'updatedAt', 'assignees', 'labels']; - const { stdout } = await this.execGH( - `gh issue list --state open --search ${JSON.stringify(term)} --limit ${safeLimit} --json ${fields.join(',')}`, - { cwd: projectPath } - ); - const list = JSON.parse(stdout || '[]'); - if (!Array.isArray(list)) return []; - return list; - } catch (error) { - // Surface empty results rather than failing hard on weird queries - return []; - } - } - - /** Get a single issue with body for enrichment */ - async getIssue( - projectPath: string, - number: number - ): Promise<{ - number: number; - title?: string; - body?: string; - url?: string; - state?: string; - updatedAt?: string | null; - assignees?: Array<{ login?: string; name?: string }>; - labels?: Array<{ name?: string }>; - } | null> { - try { - const fields = [ - 'number', - 'title', - 'body', - 'url', - 'state', - 'updatedAt', - 'assignees', - 'labels', - ]; - const { stdout } = await this.execGH( - `gh issue view ${JSON.stringify(String(number))} --json ${fields.join(',')}`, - { cwd: projectPath } - ); - const data = JSON.parse(stdout || 'null'); - if (!data || typeof data !== 'object') return null; - return data; - } catch (error) { - console.error('Failed to view GitHub issue:', error); - return null; - } - } - - /** - * Authenticate with GitHub using Personal Access Token - */ - async authenticateWithToken(token: string): Promise { - try { - // Test the token by getting user info - const user = await this.getUserInfo(token); - - if (user) { - // Store token securely - await this.storeToken(token); - - // Update error tracking with GitHub username - if (user.login) { - await errorTracking.updateGithubUsername(user.login); - } - - return { success: true, token, user }; - } - - return { success: false, error: 'Invalid token' }; - } catch (error) { - console.error('Token authentication failed:', error); - return { - success: false, - error: 'Invalid token or network error', - }; - } - } - - /** - * Check if user is authenticated - */ - async isAuthenticated(): Promise { - try { - // First check if gh CLI is authenticated system-wide - const isGHAuth = await this.isGHCLIAuthenticated(); - if (isGHAuth) { - return true; - } - - // Fall back to checking stored token - const token = await this.getStoredToken(); - - if (!token) { - // No stored token, user needs to authenticate - return false; - } - - // Test the token by making a simple API call - const user = await this.getUserInfo(token); - return !!user; - } catch (error) { - console.error('Authentication check failed:', error); - return false; - } - } - - /** - * Check if gh CLI is authenticated system-wide - */ - private async isGHCLIAuthenticated(): Promise { - try { - // gh auth status exits with 0 if authenticated, non-zero otherwise - await execAsync('gh auth status'); - return true; - } catch (error) { - // Not authenticated or gh CLI not installed - return false; - } - } - - /** - * Get user information using GitHub API or CLI - */ - async getUserInfo(token: string): Promise { - try { - let userData; - if (token) { - const response = await fetch('https://api.github.com/user', { - headers: { - Authorization: `Bearer ${token}`, - Accept: 'application/vnd.github.v3+json', - 'X-GitHub-Api-Version': '2022-11-28', - }, - }); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - userData = await response.json(); - } else { - // Use gh CLI to get user info as fallback - const { stdout } = await this.execGH('gh api user'); - userData = JSON.parse(stdout); - } - - return { - id: userData.id, - login: userData.login, - name: userData.name || userData.login, - email: userData.email || '', - avatar_url: userData.avatar_url, - }; - } catch (error) { - console.error('Failed to get user info:', error); - return null; - } - } - - /** - * Get current authenticated user information - * This is a convenience method that doesn't require a token parameter - */ - async getCurrentUser(): Promise { - try { - // Check if authenticated first - const isAuth = await this.isAuthenticated(); - if (!isAuth) { - return null; - } - - // Get user info using the existing method - // Note: The token parameter is ignored in getUserInfo since it uses gh CLI - return await this.getUserInfo(''); - } catch (error) { - console.error('Failed to get current user:', error); - return null; - } - } - - /** - * Get user's repositories using GitHub CLI - */ - async getRepositories(_token: string): Promise { - try { - // Use gh CLI to get repositories with correct field names - const { stdout } = await this.execGH( - 'gh repo list --limit 100 --json name,nameWithOwner,description,url,defaultBranchRef,isPrivate,updatedAt,primaryLanguage,stargazerCount,forkCount' - ); - const repos = JSON.parse(stdout); - - return repos.map((repo: any) => ({ - id: Math.random(), // gh CLI doesn't provide ID, so we generate one - name: repo.name, - full_name: repo.nameWithOwner, - description: repo.description, - html_url: repo.url, - clone_url: `https://github.com/${repo.nameWithOwner}.git`, - ssh_url: `git@github.com:${repo.nameWithOwner}.git`, - default_branch: repo.defaultBranchRef?.name || 'main', - private: repo.isPrivate, - updated_at: repo.updatedAt, - language: repo.primaryLanguage?.name || null, - stargazers_count: repo.stargazerCount || 0, - forks_count: repo.forkCount || 0, - })); - } catch (error) { - console.error('Failed to fetch repositories:', error); - throw error; - } - } - - /** - * List open pull requests for the repository located at projectPath. - */ - async getPullRequests(projectPath: string): Promise { - try { - const fields = [ - 'number', - 'title', - 'headRefName', - 'baseRefName', - 'url', - 'isDraft', - 'updatedAt', - 'headRefOid', - 'author', - 'headRepositoryOwner', - 'headRepository', - ]; - const { stdout } = await this.execGH(`gh pr list --state open --json ${fields.join(',')}`, { - cwd: projectPath, - }); - const list = JSON.parse(stdout || '[]'); - - if (!Array.isArray(list)) return []; - - return list.map((item: any) => ({ - number: item?.number, - title: item?.title || `PR #${item?.number ?? 'unknown'}`, - headRefName: item?.headRefName || '', - baseRefName: item?.baseRefName || '', - url: item?.url || '', - isDraft: item?.isDraft ?? false, - updatedAt: item?.updatedAt || null, - headRefOid: item?.headRefOid || undefined, - author: item?.author || null, - headRepositoryOwner: item?.headRepositoryOwner || null, - headRepository: item?.headRepository || null, - })); - } catch (error) { - console.error('Failed to list pull requests:', error); - throw error; - } - } - - /** - * Ensure a local branch exists for the given pull request by delegating to gh CLI. - * Returns the branch name that now tracks the PR. - */ - async ensurePullRequestBranch( - projectPath: string, - prNumber: number, - branchName: string - ): Promise { - const safeBranch = branchName || `pr/${prNumber}`; - let previousRef: string | null = null; - - try { - const { stdout } = await execAsync('git rev-parse --abbrev-ref HEAD', { - cwd: projectPath, - }); - const current = (stdout || '').trim(); - if (current) previousRef = current; - } catch { - previousRef = null; - } - - try { - await this.execGH( - `gh pr checkout ${JSON.stringify(String(prNumber))} --branch ${JSON.stringify(safeBranch)} --force`, - { cwd: projectPath } - ); - } catch (error) { - console.error('Failed to checkout pull request branch via gh:', error); - throw error; - } finally { - if (previousRef && previousRef !== safeBranch) { - try { - await execAsync(`git checkout ${JSON.stringify(previousRef)}`, { cwd: projectPath }); - } catch (switchErr) { - console.warn('Failed to restore previous branch after PR checkout:', switchErr); - } - } - } - - return safeBranch; - } - - /** - * Validate repository name format - */ - validateRepositoryName(name: string): { valid: boolean; error?: string } { - if (!name || name.trim().length === 0) { - return { valid: false, error: 'Repository name is required' }; - } - - const trimmed = name.trim(); - - // Check length - if (trimmed.length > 100) { - return { valid: false, error: 'Repository name must be 100 characters or less' }; - } - - // Check for valid characters (alphanumeric, hyphens, underscores, dots) - // GitHub allows: a-z, A-Z, 0-9, -, _, . - if (!/^[a-zA-Z0-9._-]+$/.test(trimmed)) { - return { - valid: false, - error: 'Repository name can only contain letters, numbers, hyphens, underscores, and dots', - }; - } - - // Cannot start or end with hyphen, dot, or underscore - if (/^[-._]|[-._]$/.test(trimmed)) { - return { - valid: false, - error: 'Repository name cannot start or end with a hyphen, dot, or underscore', - }; - } - - // Cannot be all dots - if (/^\.+$/.test(trimmed)) { - return { valid: false, error: 'Repository name cannot be all dots' }; - } - - // Reserved names (basic ones, GitHub has more) - const reserved = [ - 'con', - 'prn', - 'aux', - 'nul', - 'com1', - 'com2', - 'com3', - 'com4', - 'com5', - 'com6', - 'com7', - 'com8', - 'com9', - 'lpt1', - 'lpt2', - 'lpt3', - 'lpt4', - 'lpt5', - 'lpt6', - 'lpt7', - 'lpt8', - 'lpt9', - ]; - if (reserved.includes(trimmed.toLowerCase())) { - return { valid: false, error: 'Repository name is reserved' }; - } - - return { valid: true }; - } - - /** - * Check if a repository exists for the given owner and name - */ - async checkRepositoryExists(owner: string, name: string): Promise { - try { - await this.execGH(`gh repo view ${owner}/${name}`); - return true; - } catch { - return false; - } - } - - /** - * Get available owners (user + organizations) - */ - async getOwners(): Promise> { - try { - // Get current user - const { stdout: userStdout } = await this.execGH('gh api user'); - const user = JSON.parse(userStdout); - - const owners: Array<{ login: string; type: 'User' | 'Organization' }> = [ - { login: user.login, type: 'User' }, - ]; - - // Get organizations - try { - const { stdout: orgsStdout } = await this.execGH('gh api user/orgs'); - const orgs = JSON.parse(orgsStdout); - if (Array.isArray(orgs)) { - for (const org of orgs) { - owners.push({ login: org.login, type: 'Organization' }); - } - } - } catch (error) { - // If orgs fetch fails, just continue with user only - console.warn('Failed to fetch organizations:', error); - } - - return owners; - } catch (error) { - console.error('Failed to get owners:', error); - throw error; - } - } - - /** - * Create a new GitHub repository - */ - async createRepository(params: { - name: string; - description?: string; - owner: string; - isPrivate: boolean; - }): Promise<{ url: string; defaultBranch: string; fullName: string }> { - try { - const { name, description, owner, isPrivate } = params; - - // Build gh repo create command - const visibilityFlag = isPrivate ? '--private' : '--public'; - let command = `gh repo create ${owner}/${name} ${visibilityFlag} --confirm`; - - if (description && description.trim()) { - // Escape description for shell - const desc = JSON.stringify(description.trim()); - command += ` --description ${desc}`; - } - - await this.execGH(command); - - // Get repository details - const { stdout } = await this.execGH( - `gh repo view ${owner}/${name} --json name,nameWithOwner,url,defaultBranchRef` - ); - const repoInfo = JSON.parse(stdout); - - return { - url: repoInfo.url || `https://github.com/${repoInfo.nameWithOwner}`, - defaultBranch: repoInfo.defaultBranchRef?.name || 'main', - fullName: repoInfo.nameWithOwner || `${owner}/${name}`, - }; - } catch (error) { - console.error('Failed to create repository:', error); - throw error; - } - } - - /** - * Initialize a new project with initial files and commit - */ - async initializeNewProject(params: { - repoUrl: string; - localPath: string; - name: string; - description?: string; - }): Promise { - const { repoUrl, localPath, name, description } = params; - - try { - // Ensure the directory exists (clone should have created it, but just in case) - if (!fs.existsSync(localPath)) { - throw new Error('Local path does not exist after clone'); - } - - // Create README.md - const readmePath = path.join(localPath, 'README.md'); - const readmeContent = description ? `# ${name}\n\n${description}\n` : `# ${name}\n`; - fs.writeFileSync(readmePath, readmeContent, 'utf8'); - - // Initialize git, add files, commit, and push - const execOptions = { cwd: localPath }; - - // Add and commit - await execAsync('git add README.md', execOptions); - await execAsync('git commit -m "Initial commit"', execOptions); - - // Push to origin - await execAsync('git push -u origin main', execOptions).catch(async () => { - // If main branch doesn't exist, try master - try { - await execAsync('git push -u origin master', execOptions); - } catch { - // If both fail, let the error propagate - throw new Error('Failed to push to remote repository'); - } - }); - } catch (error) { - console.error('Failed to initialize new project:', error); - throw error; - } - } - - /** - * Clone a repository to local task directory - */ - async cloneRepository( - repoUrl: string, - localPath: string - ): Promise<{ success: boolean; error?: string }> { - try { - // Ensure the local path directory exists - const dir = path.dirname(localPath); - if (!fs.existsSync(dir)) { - fs.mkdirSync(dir, { recursive: true }); - } - - // Clone the repository - await execAsync(`git clone "${repoUrl}" "${localPath}"`); - - return { success: true }; - } catch (error) { - console.error('Failed to clone repository:', error); - return { - success: false, - error: error instanceof Error ? error.message : 'Clone failed', - }; - } - } - - /** - * Logout and clear stored token - */ - async logout(): Promise { - // Run both operations in parallel since they're independent - await Promise.allSettled([ - // Logout from gh CLI - execAsync('echo Y | gh auth logout --hostname github.com').catch((error) => { - console.warn('Failed to logout from gh CLI (may not be installed or logged in):', error); - }), - // Clear keychain token - (async () => { - try { - const keytar = await import('keytar'); - await keytar.deletePassword(this.SERVICE_NAME, this.ACCOUNT_NAME); - } catch (error) { - console.error('Failed to clear keychain token:', error); - } - })(), - ]); - } - - /** - * Store authentication token securely - */ - private async storeToken(token: string): Promise { - try { - const keytar = await import('keytar'); - await keytar.setPassword(this.SERVICE_NAME, this.ACCOUNT_NAME, token); - } catch (error) { - console.error('Failed to store token:', error); - throw error; - } - } - - /** - * Retrieve stored authentication token - */ - private async getStoredToken(): Promise { - try { - const keytar = await import('keytar'); - return await keytar.getPassword(this.SERVICE_NAME, this.ACCOUNT_NAME); - } catch (error) { - console.error('Failed to retrieve token:', error); - return null; - } - } -} - -// Export singleton instance -export const githubService = new GitHubService(); diff --git a/src/main/services/GitService.ts b/src/main/services/GitService.ts deleted file mode 100644 index 4b4918fb2..000000000 --- a/src/main/services/GitService.ts +++ /dev/null @@ -1,654 +0,0 @@ -import { execFile } from 'child_process'; -import { promisify } from 'util'; -import * as fs from 'fs'; -import * as path from 'path'; -import { - parseDiffLines, - stripTrailingNewline, - MAX_DIFF_CONTENT_BYTES, - MAX_DIFF_OUTPUT_BYTES, -} from '../utils/diffParser'; -import type { DiffLine, DiffResult } from '../utils/diffParser'; - -const execFileAsync = promisify(execFile); -const MAX_UNTRACKED_LINECOUNT_BYTES = 512 * 1024; - -async function countFileNewlinesCapped(filePath: string, maxBytes: number): Promise { - let stat: fs.Stats; - try { - stat = await fs.promises.stat(filePath); - } catch { - return null; - } - - if (!stat.isFile() || stat.size > maxBytes) { - return null; - } - - return await new Promise((resolve) => { - let count = 0; - const stream = fs.createReadStream(filePath); - stream.on('data', (chunk: string | Buffer) => { - const buffer = typeof chunk === 'string' ? Buffer.from(chunk) : chunk; - for (let i = 0; i < buffer.length; i++) { - if (buffer[i] === 0x0a) count++; - } - }); - stream.on('error', () => resolve(null)); - stream.on('end', () => resolve(count)); - }); -} - -async function readFileTextCapped(filePath: string, maxBytes: number): Promise { - let stat: fs.Stats; - try { - stat = await fs.promises.stat(filePath); - } catch { - return null; - } - - if (!stat.isFile() || stat.size > maxBytes) { - return null; - } - - try { - return await fs.promises.readFile(filePath, 'utf8'); - } catch { - return null; - } -} - -export type GitChange = { - path: string; - status: string; - additions: number; - deletions: number; - isStaged: boolean; -}; - -export async function getStatus(taskPath: string): Promise { - try { - await execFileAsync('git', ['rev-parse', '--is-inside-work-tree'], { - cwd: taskPath, - }); - } catch { - return []; - } - - const { stdout: statusOutput } = await execFileAsync( - 'git', - ['status', '--porcelain', '--untracked-files=all'], - { - cwd: taskPath, - } - ); - - if (!statusOutput.trim()) return []; - - const changes: GitChange[] = []; - const statusLines = statusOutput - .split('\n') - .map((l) => l.replace(/\r$/, '')) - .filter((l) => l.length > 0); - - for (const line of statusLines) { - const statusCode = line.substring(0, 2); - let filePath = line.substring(3); - if (statusCode.includes('R') && filePath.includes('->')) { - const parts = filePath.split('->'); - filePath = parts[parts.length - 1].trim(); - } - - let status = 'modified'; - if (statusCode.includes('A') || statusCode.includes('?')) status = 'added'; - else if (statusCode.includes('D')) status = 'deleted'; - else if (statusCode.includes('R')) status = 'renamed'; - else if (statusCode.includes('M')) status = 'modified'; - - // Check if file is staged (first character of status code indicates staged changes) - const isStaged = statusCode[0] !== ' ' && statusCode[0] !== '?'; - let additions = 0; - let deletions = 0; - - const sumNumstat = (stdout: string) => { - const lines = stdout - .trim() - .split('\n') - .filter((l) => l.trim().length > 0); - for (const l of lines) { - const p = l.split('\t'); - if (p.length >= 2) { - const addStr = p[0]; - const delStr = p[1]; - const a = addStr === '-' ? 0 : parseInt(addStr, 10) || 0; - const d = delStr === '-' ? 0 : parseInt(delStr, 10) || 0; - additions += a; - deletions += d; - } - } - }; - - try { - const staged = await execFileAsync('git', ['diff', '--numstat', '--cached', '--', filePath], { - cwd: taskPath, - }); - if (staged.stdout && staged.stdout.trim()) sumNumstat(staged.stdout); - } catch {} - - try { - const unstaged = await execFileAsync('git', ['diff', '--numstat', '--', filePath], { - cwd: taskPath, - }); - if (unstaged.stdout && unstaged.stdout.trim()) sumNumstat(unstaged.stdout); - } catch {} - - if (additions === 0 && deletions === 0 && statusCode.includes('?')) { - const absPath = path.join(taskPath, filePath); - const count = await countFileNewlinesCapped(absPath, MAX_UNTRACKED_LINECOUNT_BYTES); - if (typeof count === 'number') { - additions = count; - } - } - - changes.push({ path: filePath, status, additions, deletions, isStaged }); - } - - return changes; -} - -export async function stageFile(taskPath: string, filePath: string): Promise { - await execFileAsync('git', ['add', '--', filePath], { cwd: taskPath }); -} - -export async function stageAllFiles(taskPath: string): Promise { - await execFileAsync('git', ['add', '-A'], { cwd: taskPath }); -} - -export async function unstageFile(taskPath: string, filePath: string): Promise { - try { - await execFileAsync('git', ['reset', 'HEAD', '--', filePath], { cwd: taskPath }); - } catch { - // HEAD may not exist (no commits yet) — use rm --cached instead - await execFileAsync('git', ['rm', '--cached', '--', filePath], { cwd: taskPath }); - } -} - -export async function revertFile( - taskPath: string, - filePath: string -): Promise<{ action: 'unstaged' | 'reverted' }> { - // Validate filePath doesn't escape the worktree - const absPath = path.resolve(taskPath, filePath); - const resolvedTaskPath = path.resolve(taskPath); - if (!absPath.startsWith(resolvedTaskPath + path.sep) && absPath !== resolvedTaskPath) { - throw new Error('File path is outside the worktree'); - } - - // Check if file is tracked in git (exists in HEAD) - let fileExistsInHead = false; - try { - await execFileAsync('git', ['cat-file', '-e', `HEAD:${filePath}`], { cwd: taskPath }); - fileExistsInHead = true; - } catch { - // File doesn't exist in HEAD (it's a new/untracked file), delete it - if (fs.existsSync(absPath)) { - fs.unlinkSync(absPath); - } - return { action: 'reverted' }; - } - - // File exists in HEAD, revert it - if (fileExistsInHead) { - try { - await execFileAsync('git', ['checkout', 'HEAD', '--', filePath], { cwd: taskPath }); - } catch (error) { - // If checkout fails, don't delete the file - throw the error instead - throw new Error( - `Failed to revert file: ${error instanceof Error ? error.message : String(error)}` - ); - } - } - return { action: 'reverted' }; -} - -export async function getFileDiff(taskPath: string, filePath: string): Promise { - const absPath = path.resolve(taskPath, filePath); - const resolvedTaskPath = path.resolve(taskPath); - if (!absPath.startsWith(resolvedTaskPath + path.sep) && absPath !== resolvedTaskPath) { - throw new Error('File path is outside the worktree'); - } - - // Helper: fetch content at HEAD with size guard - const getOriginalContent = async (): Promise => { - try { - const { stdout } = await execFileAsync('git', ['show', `HEAD:${filePath}`], { - cwd: taskPath, - maxBuffer: MAX_DIFF_CONTENT_BYTES, - }); - return stripTrailingNewline(stdout); - } catch { - return undefined; - } - }; - - // Helper: read current file from disk with size guard - const getModifiedContent = async (): Promise => { - const content = await readFileTextCapped(path.join(taskPath, filePath), MAX_DIFF_CONTENT_BYTES); - return content !== null ? stripTrailingNewline(content) : undefined; - }; - - // Step 1: Run git diff - let diffStdout: string | undefined; - try { - const { stdout } = await execFileAsync( - 'git', - ['diff', '--no-color', '--unified=2000', 'HEAD', '--', filePath], - { cwd: taskPath, maxBuffer: MAX_DIFF_OUTPUT_BYTES } - ); - diffStdout = stdout; - } catch { - // git diff failed (no HEAD, untracked file, etc.) — fall through to content-only path - } - - // Step 2: Parse diff and check binary - if (diffStdout !== undefined) { - const { lines, isBinary } = parseDiffLines(diffStdout); - - if (isBinary) { - return { lines: [], isBinary: true }; - } - - // Step 3: Fetch content (only for non-binary) - const [originalContent, modifiedContent] = await Promise.all([ - getOriginalContent(), - getModifiedContent(), - ]); - - // Step 4: Handle empty diff (untracked or deleted file that git reports as empty diff) - if (lines.length === 0) { - if (modifiedContent !== undefined) { - return { - lines: modifiedContent.split('\n').map((l) => ({ right: l, type: 'add' as const })), - modifiedContent, - }; - } - if (originalContent !== undefined) { - return { - lines: originalContent.split('\n').map((l) => ({ left: l, type: 'del' as const })), - originalContent, - }; - } - return { lines: [] }; - } - - return { lines, originalContent, modifiedContent }; - } - - // Fallback: git diff failed — try content-only approach - const [originalContent, modifiedContent] = await Promise.all([ - getOriginalContent(), - getModifiedContent(), - ]); - - if (modifiedContent !== undefined) { - return { - lines: modifiedContent.split('\n').map((l) => ({ right: l, type: 'add' as const })), - originalContent, - modifiedContent, - }; - } - if (originalContent !== undefined) { - return { - lines: originalContent.split('\n').map((l) => ({ left: l, type: 'del' as const })), - originalContent, - }; - } - return { lines: [] }; -} - -/** Commit staged files (no push). Returns the commit hash. */ -export async function commit(taskPath: string, message: string): Promise<{ hash: string }> { - if (!message || !message.trim()) { - throw new Error('Commit message cannot be empty'); - } - await execFileAsync('git', ['commit', '-m', message], { cwd: taskPath }); - const { stdout } = await execFileAsync('git', ['rev-parse', 'HEAD'], { cwd: taskPath }); - return { hash: stdout.trim() }; -} - -/** Push current branch to origin. Sets upstream if needed. */ -export async function push(taskPath: string): Promise<{ output: string }> { - try { - const { stdout } = await execFileAsync('git', ['push'], { cwd: taskPath }); - return { output: stdout.trim() }; - } catch (error: unknown) { - const stderr = (error as { stderr?: string })?.stderr || ''; - // Only fallback to --set-upstream if git tells us there's no upstream - if (stderr.includes('has no upstream branch') || stderr.includes('no upstream configured')) { - const { stdout: branch } = await execFileAsync('git', ['branch', '--show-current'], { - cwd: taskPath, - }); - const { stdout } = await execFileAsync( - 'git', - ['push', '--set-upstream', 'origin', branch.trim()], - { cwd: taskPath } - ); - return { output: stdout.trim() }; - } - throw error; - } -} - -/** Pull from remote. */ -export async function pull(taskPath: string): Promise<{ output: string }> { - const { stdout } = await execFileAsync('git', ['pull'], { cwd: taskPath }); - return { output: stdout.trim() }; -} - -/** Get commit log for the current branch. */ -export async function getLog( - taskPath: string, - maxCount: number = 50, - skip: number = 0, - knownAheadCount?: number -): Promise<{ - commits: Array<{ - hash: string; - subject: string; - body: string; - author: string; - date: string; - isPushed: boolean; - tags: string[]; - }>; - aheadCount: number; -}> { - // Use caller-provided aheadCount for pagination consistency, otherwise compute it. - // Strategy: try upstream tracking branch first, then origin/, then origin/HEAD. - // If none work, assume all commits are pushed (aheadCount = 0). - let aheadCount = knownAheadCount ?? -1; - if (aheadCount < 0) { - aheadCount = 0; - try { - // Best case: branch has an upstream tracking ref - const { stdout: countOut } = await execFileAsync( - 'git', - ['rev-list', '--count', '@{upstream}..HEAD'], - { cwd: taskPath } - ); - aheadCount = parseInt(countOut.trim(), 10) || 0; - } catch { - try { - // Fallback: compare against origin/ - const { stdout: branchOut } = await execFileAsync( - 'git', - ['rev-parse', '--abbrev-ref', 'HEAD'], - { cwd: taskPath } - ); - const currentBranch = branchOut.trim(); - const { stdout: countOut } = await execFileAsync( - 'git', - ['rev-list', '--count', `origin/${currentBranch}..HEAD`], - { cwd: taskPath } - ); - aheadCount = parseInt(countOut.trim(), 10) || 0; - } catch { - try { - // Last resort: compare against origin/HEAD (default branch) - const { stdout: defaultBranchOut } = await execFileAsync( - 'git', - ['symbolic-ref', '--short', 'refs/remotes/origin/HEAD'], - { cwd: taskPath } - ); - const defaultBranch = defaultBranchOut.trim(); - const { stdout: countOut } = await execFileAsync( - 'git', - ['rev-list', '--count', `${defaultBranch}..HEAD`], - { cwd: taskPath } - ); - aheadCount = parseInt(countOut.trim(), 10) || 0; - } catch { - // Cannot determine remote state (no remote, detached HEAD, offline, etc.) - // Default to 0 ahead so all commits show as pushed. This avoids false "unpushed" - // indicators when there's genuinely no remote to compare against. - aheadCount = 0; - } - } - } - } - - const FIELD_SEP = '---FIELD_SEP---'; - const RECORD_SEP = '---RECORD_SEP---'; - const format = `${RECORD_SEP}%H${FIELD_SEP}%s${FIELD_SEP}%an${FIELD_SEP}%aI${FIELD_SEP}%D${FIELD_SEP}%b`; - const { stdout } = await execFileAsync( - 'git', - ['log', `--max-count=${maxCount}`, `--skip=${skip}`, `--pretty=format:${format}`, '--'], - { cwd: taskPath } - ); - - if (!stdout.trim()) return { commits: [], aheadCount }; - - const commits = stdout - .split(RECORD_SEP) - .filter((entry) => entry.trim()) - .map((entry, index) => { - const parts = entry.trim().split(FIELD_SEP); - // %D outputs ref decorations like "tag: v0.4.2, origin/main, HEAD -> main" - const refs = parts[4] || ''; - const tags = refs - .split(',') - .map((r) => r.trim()) - .filter((r) => r.startsWith('tag: ')) - .map((r) => r.slice(5)); - return { - hash: parts[0] || '', - subject: parts[1] || '', - body: (parts[5] || '').trim(), - author: parts[2] || '', - date: parts[3] || '', - isPushed: skip + index >= aheadCount, - tags, - }; - }); - - return { commits, aheadCount }; -} - -/** Get the latest commit info (subject + body). */ -export async function getLatestCommit( - taskPath: string -): Promise<{ hash: string; subject: string; body: string; isPushed: boolean } | null> { - const { commits } = await getLog(taskPath, 1); - return commits[0] || null; -} - -/** Get files changed in a specific commit. */ -export async function getCommitFiles( - taskPath: string, - commitHash: string -): Promise> { - // Use --root to handle initial commits (no parent) and - // -m --first-parent to handle merge commits (compare against first parent only) - const { stdout } = await execFileAsync( - 'git', - [ - 'diff-tree', - '--root', - '--no-commit-id', - '-r', - '-m', - '--first-parent', - '--numstat', - commitHash, - ], - { cwd: taskPath } - ); - - const { stdout: nameStatus } = await execFileAsync( - 'git', - [ - 'diff-tree', - '--root', - '--no-commit-id', - '-r', - '-m', - '--first-parent', - '--name-status', - commitHash, - ], - { cwd: taskPath } - ); - - const statLines = stdout.trim().split('\n').filter(Boolean); - const statusLines = nameStatus.trim().split('\n').filter(Boolean); - - const statusMap = new Map(); - for (const line of statusLines) { - const [code, ...pathParts] = line.split('\t'); - const filePath = pathParts[pathParts.length - 1] || ''; - const status = - code === 'A' - ? 'added' - : code === 'D' - ? 'deleted' - : code?.startsWith('R') - ? 'renamed' - : 'modified'; - statusMap.set(filePath, status); - } - - return statLines.map((line) => { - const [addStr, delStr, ...pathParts] = line.split('\t'); - const filePath = pathParts.join('\t'); - return { - path: filePath, - status: statusMap.get(filePath) || 'modified', - additions: addStr === '-' ? 0 : parseInt(addStr || '0', 10) || 0, - deletions: delStr === '-' ? 0 : parseInt(delStr || '0', 10) || 0, - }; - }); -} - -/** Get diff for a specific file in a specific commit. */ -export async function getCommitFileDiff( - taskPath: string, - commitHash: string, - filePath: string -): Promise { - const absPath = path.resolve(taskPath, filePath); - const resolvedTaskPath = path.resolve(taskPath); - if (!absPath.startsWith(resolvedTaskPath + path.sep) && absPath !== resolvedTaskPath) { - throw new Error('File path is outside the worktree'); - } - - // Helper: fetch content at a given ref with size guard - const getContentAt = async (ref: string): Promise => { - try { - const { stdout } = await execFileAsync('git', ['show', `${ref}:${filePath}`], { - cwd: taskPath, - maxBuffer: MAX_DIFF_CONTENT_BYTES, - }); - return stripTrailingNewline(stdout); - } catch { - return undefined; - } - }; - - // Check if this is a root commit (no parent) - let hasParent = true; - try { - await execFileAsync('git', ['rev-parse', '--verify', `${commitHash}~1`], { cwd: taskPath }); - } catch { - hasParent = false; - } - - if (!hasParent) { - const modifiedContent = await getContentAt(commitHash); - if (modifiedContent === undefined) { - return { lines: [] }; - } - if (modifiedContent === '') { - return { lines: [], modifiedContent }; - } - return { - lines: modifiedContent.split('\n').map((l) => ({ right: l, type: 'add' as const })), - modifiedContent, - }; - } - - // Run diff - let diffStdout: string | undefined; - try { - const { stdout } = await execFileAsync( - 'git', - ['diff', '--no-color', '--unified=2000', `${commitHash}~1`, commitHash, '--', filePath], - { cwd: taskPath, maxBuffer: MAX_DIFF_OUTPUT_BYTES } - ); - diffStdout = stdout; - } catch { - // diff too large or git error — fall through to content-only path - } - - let diffLines: DiffLine[] = []; - if (diffStdout !== undefined) { - const { lines, isBinary } = parseDiffLines(diffStdout); - if (isBinary) { - return { lines: [], isBinary: true }; - } - diffLines = lines; - } - - // Fetch content AFTER binary check to avoid fetching binary blobs - const [originalContent, modifiedContent] = await Promise.all([ - getContentAt(`${commitHash}~1`), - getContentAt(commitHash), - ]); - - if (diffLines.length > 0) return { lines: diffLines, originalContent, modifiedContent }; - - // Fallback: diff failed or empty — determine from content - if (modifiedContent !== undefined && modifiedContent !== '') { - return { - lines: modifiedContent.split('\n').map((l) => ({ right: l, type: 'add' as const })), - originalContent, - modifiedContent, - }; - } - if (originalContent !== undefined) { - return { - lines: originalContent.split('\n').map((l) => ({ left: l, type: 'del' as const })), - originalContent, - modifiedContent, - }; - } - return { lines: [], originalContent, modifiedContent }; -} - -/** Soft-reset the latest commit. Returns the commit message that was reset. */ -export async function softResetLastCommit( - taskPath: string -): Promise<{ subject: string; body: string }> { - // Check if HEAD~1 exists (i.e., this isn't the initial commit) - try { - await execFileAsync('git', ['rev-parse', '--verify', 'HEAD~1'], { cwd: taskPath }); - } catch { - throw new Error('Cannot undo the initial commit'); - } - - // Check if the commit has been pushed (safety guard — UI also hides the button) - const { commits: log } = await getLog(taskPath, 1); - if (log[0]?.isPushed) { - throw new Error('Cannot undo a commit that has already been pushed'); - } - - const { stdout: subject } = await execFileAsync('git', ['log', '-1', '--pretty=format:%s'], { - cwd: taskPath, - }); - const { stdout: body } = await execFileAsync('git', ['log', '-1', '--pretty=format:%b'], { - cwd: taskPath, - }); - - await execFileAsync('git', ['reset', '--soft', 'HEAD~1'], { cwd: taskPath }); - - return { subject: subject.trim(), body: body.trim() }; -} diff --git a/src/main/services/LifecycleScriptsService.ts b/src/main/services/LifecycleScriptsService.ts deleted file mode 100644 index 9d11c754b..000000000 --- a/src/main/services/LifecycleScriptsService.ts +++ /dev/null @@ -1,68 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import { log } from '../lib/logger'; -import type { LifecyclePhase, LifecycleScriptConfig } from '@shared/lifecycle'; - -export interface EmdashConfig { - preservePatterns?: string[]; - scripts?: LifecycleScriptConfig; - shellSetup?: string; - tmux?: boolean; -} - -/** - * Manages lifecycle scripts for worktrees. - * Scripts are configured in .emdash.json at the project root. - */ -class LifecycleScriptsService { - /** - * Read .emdash.json config from project root - */ - readConfig(projectPath: string): EmdashConfig | null { - try { - const configPath = path.join(projectPath, '.emdash.json'); - if (!fs.existsSync(configPath)) { - return null; - } - const content = fs.readFileSync(configPath, 'utf8'); - return JSON.parse(content) as EmdashConfig; - } catch (error) { - log.warn('Failed to read .emdash.json', { projectPath, error }); - return null; - } - } - - /** - * Get a specific lifecycle script command if configured. - */ - getScript(projectPath: string, phase: LifecyclePhase): string | null { - const config = this.readConfig(projectPath); - const scripts = config?.scripts; - const script = scripts?.[phase]; - return typeof script === 'string' && script.trim().length > 0 ? script.trim() : null; - } - - /** - * Get the shell setup command if configured in .emdash.json. - * Runs inside every PTY (agent and plain terminal) before the shell starts. - */ - getShellSetup(projectPath: string): string | null { - const config = this.readConfig(projectPath); - const shellSetup = config?.shellSetup; - return typeof shellSetup === 'string' && shellSetup.trim().length > 0 - ? shellSetup.trim() - : null; - } - - /** - * Check if tmux wrapping is enabled for this project in .emdash.json. - * When true, agent PTY sessions are wrapped in named tmux sessions - * for persistence and resumability. - */ - getTmuxEnabled(projectPath: string): boolean { - const config = this.readConfig(projectPath); - return config?.tmux === true; - } -} - -export const lifecycleScriptsService = new LifecycleScriptsService(); diff --git a/src/main/services/LinearService.ts b/src/main/services/LinearService.ts deleted file mode 100644 index efebc0f68..000000000 --- a/src/main/services/LinearService.ts +++ /dev/null @@ -1,286 +0,0 @@ -import { request } from 'node:https'; -import { URL } from 'node:url'; - -const LINEAR_API_URL = 'https://api.linear.app/graphql'; - -export interface LinearViewer { - name?: string | null; - displayName?: string | null; - organization?: { - name?: string | null; - } | null; -} - -export interface LinearConnectionStatus { - connected: boolean; - workspaceName?: string; - viewer?: LinearViewer; - error?: string; -} - -interface GraphQLResponse { - data?: T; - errors?: Array<{ message: string }>; -} - -export class LinearService { - private readonly SERVICE_NAME = 'emdash-linear'; - private readonly ACCOUNT_NAME = 'api-token'; - - async saveToken( - token: string - ): Promise<{ success: boolean; workspaceName?: string; error?: string }> { - try { - const viewer = await this.fetchViewer(token); - await this.storeToken(token); - // Track connection - void import('../telemetry').then(({ capture }) => { - void capture('linear_connected'); - }); - return { - success: true, - workspaceName: viewer?.organization?.name ?? viewer?.displayName ?? undefined, - }; - } catch (error) { - const message = - error instanceof Error - ? error.message - : 'Failed to validate Linear token. Please try again.'; - return { success: false, error: message }; - } - } - - async clearToken(): Promise<{ success: boolean; error?: string }> { - try { - const keytar = await import('keytar'); - await keytar.deletePassword(this.SERVICE_NAME, this.ACCOUNT_NAME); - // Track disconnection - void import('../telemetry').then(({ capture }) => { - void capture('linear_disconnected'); - }); - return { success: true }; - } catch (error) { - console.error('Failed to clear Linear token:', error); - return { - success: false, - error: 'Unable to remove Linear token from keychain.', - }; - } - } - - async checkConnection(): Promise { - try { - const token = await this.getStoredToken(); - if (!token) { - return { connected: false }; - } - - const viewer = await this.fetchViewer(token); - return { - connected: true, - workspaceName: viewer?.organization?.name ?? viewer?.displayName ?? undefined, - viewer, - }; - } catch (error) { - const message = - error instanceof Error ? error.message : 'Failed to verify Linear connection.'; - return { connected: false, error: message }; - } - } - - async initialFetch(limit = 50): Promise { - const token = await this.getStoredToken(); - if (!token) { - throw new Error('Linear token not set. Connect Linear in settings first.'); - } - - const sanitizedLimit = Math.min(Math.max(limit, 1), 200); - - // Use server-side filter to exclude completed/canceled issues so we get a full - // page of open issues instead of fetching N and discarding closed ones. - const query = ` - query ListIssues($limit: Int!) { - issues( - first: $limit, - orderBy: updatedAt, - filter: { state: { type: { nin: ["completed", "cancelled"] } } } - ) { - nodes { - id - identifier - title - description - url - state { name type color } - team { name key } - project { name } - assignee { displayName name } - updatedAt - } - } - } - `; - - const response = await this.graphql<{ issues: { nodes: any[] } }>(token, query, { - limit: sanitizedLimit, - }); - - return response?.issues?.nodes ?? []; - } - - async searchIssues(searchTerm: string, limit = 20): Promise { - const token = await this.getStoredToken(); - if (!token) { - throw new Error('Linear token not set. Connect Linear in settings first.'); - } - - if (!searchTerm.trim()) { - return []; - } - - const sanitizedLimit = Math.min(Math.max(limit, 1), 200); - - // Use Linear's server-side searchIssues query for full-text search across all issues - const searchQuery = ` - query SearchIssues($term: String!, $limit: Int!) { - searchIssues(term: $term, first: $limit) { - nodes { - id - identifier - title - description - url - state { name type color } - team { name key } - project { name } - assignee { displayName name } - updatedAt - } - } - } - `; - - try { - const searchResponse = await this.graphql<{ searchIssues: { nodes: any[] } }>( - token, - searchQuery, - { - term: searchTerm.trim(), - limit: sanitizedLimit, - } - ); - - return searchResponse?.searchIssues?.nodes ?? []; - } catch (error) { - console.error('[Linear] searchIssues error:', error); - return []; - } - } - - private async fetchViewer(token: string): Promise { - const query = ` - query ViewerInfo { - viewer { - name - displayName - organization { - name - } - } - } - `; - - const data = await this.graphql<{ viewer: LinearViewer }>(token, query); - if (!data?.viewer) { - throw new Error('Unable to retrieve Linear account information.'); - } - return data.viewer; - } - - private async graphql( - token: string, - query: string, - variables?: Record - ): Promise { - const body = JSON.stringify({ query, variables }); - - const requestPromise = new Promise>((resolve, reject) => { - const url = new URL(LINEAR_API_URL); - - const req = request( - { - hostname: url.hostname, - path: url.pathname, - method: 'POST', - headers: { - 'Content-Type': 'application/json', - Authorization: token, - 'Content-Length': Buffer.byteLength(body).toString(), - }, - }, - (res) => { - let data = ''; - - res.on('data', (chunk) => { - data += chunk; - }); - - res.on('end', () => { - try { - const parsed = JSON.parse(data) as GraphQLResponse; - resolve(parsed); - } catch (error) { - reject(error); - } - }); - } - ); - - req.on('error', (error) => { - reject(error); - }); - - req.write(body); - req.end(); - }); - - const result = await requestPromise; - - if (result.errors?.length) { - throw new Error(result.errors.map((err) => err.message).join('\n')); - } - - if (!result.data) { - throw new Error('Linear API returned no data.'); - } - - return result.data; - } - - private async storeToken(token: string): Promise { - const clean = token.trim(); - if (!clean) { - throw new Error('Linear token cannot be empty.'); - } - - try { - const keytar = await import('keytar'); - await keytar.setPassword(this.SERVICE_NAME, this.ACCOUNT_NAME, clean); - } catch (error) { - console.error('Failed to store Linear token:', error); - throw new Error('Unable to store Linear token securely.'); - } - } - - private async getStoredToken(): Promise { - try { - const keytar = await import('keytar'); - return await keytar.getPassword(this.SERVICE_NAME, this.ACCOUNT_NAME); - } catch (error) { - console.error('Failed to read Linear token from keychain:', error); - return null; - } - } -} - -export default LinearService; diff --git a/src/main/services/PrGenerationService.ts b/src/main/services/PrGenerationService.ts deleted file mode 100644 index 448f68a97..000000000 --- a/src/main/services/PrGenerationService.ts +++ /dev/null @@ -1,701 +0,0 @@ -import { execFile, spawn } from 'child_process'; -import { promisify } from 'util'; -import { log } from '../lib/logger'; -import { getProvider, PROVIDER_IDS, type ProviderId } from '../../shared/providers/registry'; - -const execFileAsync = promisify(execFile); - -export interface GeneratedPrContent { - title: string; - description: string; -} - -/** - * Generates PR title and description using available CLI agents or fallback heuristics - */ -export class PrGenerationService { - /** - * Generate PR title and description based on git changes - * @param taskPath - Path to the task - * @param baseBranch - Base branch to compare against (default: 'main') - * @param preferredProviderId - Optional provider ID to use first (e.g., from task.agentId) - */ - async generatePrContent( - taskPath: string, - baseBranch: string = 'main', - preferredProviderId?: string | null - ): Promise { - try { - // Get git diff and commit messages - const { diff, commits, changedFiles } = await this.getGitContext(taskPath, baseBranch); - - if (!diff && commits.length === 0) { - return this.generateFallbackContent(changedFiles); - } - - // Build ordered list of providers to try: preferred → claude → remaining - const attempted = new Set(); - const tryOrder: ProviderId[] = []; - - if (preferredProviderId && this.isValidProviderId(preferredProviderId)) { - tryOrder.push(preferredProviderId as ProviderId); - } - tryOrder.push('claude'); - for (const id of PROVIDER_IDS) { - tryOrder.push(id); - } - - for (const providerId of tryOrder) { - if (attempted.has(providerId)) continue; - attempted.add(providerId); - - try { - const result = await this.generateWithProvider(providerId, taskPath, diff, commits); - if (result) { - log.info(`Generated PR content with ${providerId}`); - return { - title: result.title, - description: this.normalizeMarkdown(result.description), - }; - } - } catch (error) { - log.debug(`Provider ${providerId} generation failed, trying next`, { error }); - } - } - - // Fallback to heuristic-based generation - return this.generateHeuristicContent(diff, commits, changedFiles); - } catch (error) { - log.error('Failed to generate PR content', { error }); - return this.generateFallbackContent([]); - } - } - - /** - * Get git context (diff, commits, changed files) for PR generation - */ - private async getGitContext( - taskPath: string, - baseBranch: string - ): Promise<{ diff: string; commits: string[]; changedFiles: string[] }> { - let diff = ''; - let commits: string[] = []; - let changedFiles: string[] = []; - - try { - // Fetch remote to ensure we have latest state (prevents comparing against stale local branches) - // This is critical: if local main is behind remote, we'd incorrectly include others' commits - // Only fetch if remote exists - try { - await execFileAsync('git', ['remote', 'get-url', 'origin'], { cwd: taskPath }); - // Remote exists, try to fetch - try { - await execFileAsync('git', ['fetch', 'origin', '--quiet'], { cwd: taskPath }); - } catch (fetchError) { - log.debug('Failed to fetch remote, continuing with existing refs', { fetchError }); - } - } catch { - // Remote doesn't exist, skip fetch - log.debug('Remote origin not found, skipping fetch'); - } - - // Always prefer remote branch to avoid stale local branch issues - let baseBranchRef = baseBranch; - let baseBranchExists = false; - - // First try remote branch (most reliable - always up to date) - try { - await execFileAsync('git', ['rev-parse', '--verify', `origin/${baseBranch}`], { - cwd: taskPath, - }); - baseBranchExists = true; - baseBranchRef = `origin/${baseBranch}`; - } catch { - // Fall back to local branch only if remote doesn't exist - try { - await execFileAsync('git', ['rev-parse', '--verify', baseBranch], { cwd: taskPath }); - baseBranchExists = true; - baseBranchRef = baseBranch; - } catch { - // Base branch doesn't exist, will use working directory diff - } - } - - if (baseBranchExists) { - // Get diff between base branch and current HEAD (committed changes) - try { - const { stdout: diffOut } = await execFileAsync( - 'git', - ['diff', `${baseBranchRef}...HEAD`, '--stat'], - { cwd: taskPath, maxBuffer: 10 * 1024 * 1024 } - ); - diff = diffOut || ''; - - // Get list of changed files from commits - const { stdout: filesOut } = await execFileAsync( - 'git', - ['diff', '--name-only', `${baseBranchRef}...HEAD`], - { cwd: taskPath } - ); - const committedFiles = (filesOut || '') - .split('\n') - .map((f) => f.trim()) - .filter(Boolean); - changedFiles.push(...committedFiles); - - // Get commit messages - const { stdout: commitsOut } = await execFileAsync( - 'git', - ['log', `${baseBranchRef}..HEAD`, '--pretty=format:%s'], - { cwd: taskPath } - ); - commits = (commitsOut || '') - .split('\n') - .map((c) => c.trim()) - .filter(Boolean); - } catch (error) { - log.debug('Failed to get diff/commits from base branch', { error }); - } - } - - // Also include uncommitted changes (working directory) to capture all changes - // This ensures PR description includes changes that will be committed - try { - const { stdout: workingDiff } = await execFileAsync('git', ['diff', '--stat'], { - cwd: taskPath, - maxBuffer: 10 * 1024 * 1024, - }); - const workingDiffText = workingDiff || ''; - - // If we have both committed and uncommitted changes, combine them - if (workingDiffText && diff) { - // Combine diff stats (working directory changes will be added) - diff = `${diff}\n${workingDiffText}`; - } else if (workingDiffText && !diff) { - // Only uncommitted changes - diff = workingDiffText; - } - - // Get uncommitted changed files and merge with committed files - const { stdout: filesOut } = await execFileAsync('git', ['diff', '--name-only'], { - cwd: taskPath, - }); - const uncommittedFiles = (filesOut || '') - .split('\n') - .map((f) => f.trim()) - .filter(Boolean); - - // Merge file lists, avoiding duplicates - const allFiles = new Set([...changedFiles, ...uncommittedFiles]); - changedFiles = Array.from(allFiles); - } catch (error) { - log.debug('Failed to get working directory diff', { error }); - } - - // Fallback: if we still have no diff or commits, try staged changes - if (commits.length === 0 && diff.length === 0) { - try { - const { stdout: stagedDiff } = await execFileAsync( - 'git', - ['diff', '--cached', '--stat'], - { cwd: taskPath, maxBuffer: 10 * 1024 * 1024 } - ); - if (stagedDiff) { - diff = stagedDiff; - const { stdout: filesOut } = await execFileAsync( - 'git', - ['diff', '--cached', '--name-only'], - { cwd: taskPath } - ); - changedFiles = (filesOut || '') - .split('\n') - .map((f) => f.trim()) - .filter(Boolean); - } - } catch {} - } - } catch (error) { - log.warn('Failed to get git context', { error }); - } - - return { diff, commits, changedFiles }; - } - - /** - * Check if a provider can be used for non-interactive PR generation. - * Returns false for providers that require TUI keystroke injection, - * have no CLI binary, or can't accept a prompt via CLI args. - */ - private canUseForPrGeneration(providerId: ProviderId): boolean { - const provider = getProvider(providerId); - if (!provider) return false; - if (!provider.cli) return false; - if (provider.useKeystrokeInjection) return false; - if (provider.initialPromptFlag === undefined) return false; - return true; - } - - /** - * Generate PR content using a CLI provider. - * Retries once on parse failure (exit code 0 but malformed output). - */ - private async generateWithProvider( - providerId: ProviderId, - taskPath: string, - diff: string, - commits: string[] - ): Promise { - if (!this.canUseForPrGeneration(providerId)) { - return null; - } - - const provider = getProvider(providerId); - const cliCommand = provider!.cli!; - - // Check if provider CLI is available - try { - await execFileAsync(cliCommand, provider!.versionArgs || ['--version'], { - cwd: taskPath, - }); - } catch { - log.debug(`Provider ${providerId} CLI not available`); - return null; - } - - // Build prompt for PR generation - const prompt = this.buildPrGenerationPrompt(diff, commits); - - // Try up to 2 times: retry once if the process succeeded but JSON parsing failed - for (let attempt = 0; attempt < 2; attempt++) { - const { result, shouldRetry } = await this.spawnProvider( - providerId, - cliCommand, - provider, - taskPath, - prompt - ); - if (result) return result; - if (!shouldRetry) break; - log.debug(`Retrying provider ${providerId} (attempt ${attempt + 2}/2) after parse failure`); - } - - return null; - } - - /** - * Spawn a provider CLI process and collect its output. - * Returns the parsed result (if any) and whether a retry is worthwhile. - */ - private spawnProvider( - providerId: ProviderId, - cliCommand: string, - provider: ReturnType, - taskPath: string, - prompt: string - ): Promise<{ result: GeneratedPrContent | null; shouldRetry: boolean }> { - return new Promise((resolve) => { - const timeout = 60000; - let stdout = ''; - let stderr = ''; - let resolved = false; - - const done = (result: GeneratedPrContent | null, shouldRetry: boolean) => { - if (resolved) return; - resolved = true; - resolve({ result, shouldRetry }); - }; - - // Build command arguments — Claude Code gets special `-p` (print) mode - const args: string[] = []; - const isClaudeProvider = providerId === 'claude'; - - if (isClaudeProvider) { - // Use -p (print/non-interactive) mode with structured JSON output. - // This eliminates ANSI escapes, progress indicators, and TUI noise. - args.push('-p', prompt, '--output-format', 'json'); - if (provider!.autoApproveFlag) { - args.push(provider!.autoApproveFlag); - } - } else { - if (provider!.defaultArgs?.length) { - args.push(...provider!.defaultArgs); - } - if (provider!.autoApproveFlag) { - args.push(provider!.autoApproveFlag); - } - } - - // Handle prompt for non-Claude providers using the same logic as ptyManager.buildProviderCliArgs(): - // - initialPromptFlag: '' → positional arg (push prompt directly) - // - initialPromptFlag: '-i' / '-t' / etc. → push flag then prompt - // - initialPromptFlag: undefined → provider can't accept a prompt (already filtered by canUseForPrGeneration) - if (!isClaudeProvider && provider!.initialPromptFlag !== undefined) { - if (provider!.initialPromptFlag) { - args.push(provider!.initialPromptFlag); - } - args.push(prompt); - } - - // Spawn the provider CLI - const child = spawn(cliCommand, args, { - cwd: taskPath, - stdio: ['pipe', 'pipe', 'pipe'], - env: { - ...process.env, - TERM: 'xterm-256color', - COLORTERM: 'truecolor', - }, - }); - - // Set timeout - const timeoutId = setTimeout(() => { - try { - child.kill('SIGTERM'); - } catch {} - log.debug(`Provider ${providerId} invocation timed out`); - done(null, false); // Don't retry on timeout - }, timeout); - - // Collect stdout - if (child.stdout) { - child.stdout.on('data', (data: Buffer) => { - stdout += data.toString('utf8'); - }); - } - - // Collect stderr (for debugging) - if (child.stderr) { - child.stderr.on('data', (data: Buffer) => { - stderr += data.toString('utf8'); - }); - } - - // Handle process exit - child.on('exit', (code: number | null, signal: NodeJS.Signals | null) => { - clearTimeout(timeoutId); - - if (code !== 0 && code !== null) { - log.debug(`Provider ${providerId} exited with code ${code}`, { stderr }); - done(null, false); // Don't retry on non-zero exit - return; - } - - if (signal) { - log.debug(`Provider ${providerId} killed by signal ${signal}`); - done(null, false); - return; - } - - // Try to parse the response - const result = this.parseProviderResponse(stdout); - if (result) { - log.info(`Successfully generated PR content with ${providerId}`); - done(result, false); - } else { - log.debug(`Failed to parse response from ${providerId}`, { stdout, stderr }); - done(null, true); // Retry — process succeeded but parsing failed - } - }); - - // Handle errors - child.on('error', (error: Error) => { - clearTimeout(timeoutId); - log.debug(`Failed to spawn ${providerId}`, { error }); - done(null, false); // Don't retry on spawn failure - }); - - // Close stdin — all providers receive prompts via CLI args, not stdin - if (child.stdin) { - child.stdin.end(); - } - }); - } - - /** - * Build prompt for PR generation - */ - private buildPrGenerationPrompt(diff: string, commits: string[]): string { - const commitContext = - commits.length > 0 ? `\n\nCommits:\n${commits.map((c) => `- ${c}`).join('\n')}` : ''; - const diffContext = diff - ? `\n\nDiff summary:\n${diff.substring(0, 2000)}${diff.length > 2000 ? '...' : ''}` - : ''; - - return `Generate a concise PR title and description based on these changes: - -${commitContext}${diffContext} - -Respond with ONLY valid JSON — no markdown fences, no preamble, no explanation. Your entire response must be exactly one JSON object: -{ - "title": "A concise PR title (max 72 chars, use conventional commit format if applicable)", - "description": "A well-structured markdown description using proper markdown formatting. Use ## for section headers, - or * for lists, \`code\` for inline code, and proper line breaks.\n\nUse actual newlines (\\n in JSON) for line breaks, not literal \\n text. Keep it straightforward and to the point." -}`; - } - - /** - * Strip ANSI escape sequences from a string - */ - private stripAnsi(text: string): string { - // Covers CSI sequences, OSC sequences, and other common escape codes - // eslint-disable-next-line no-control-regex - return text.replace( - /\x1b\[[0-9;]*[a-zA-Z]|\x1b\].*?(?:\x07|\x1b\\)|\x1b[^[(\x1b]*?[a-zA-Z]/g, - '' - ); - } - - /** - * Parse provider response into PR content. - * - * Multi-step extraction: - * 1. Strip ANSI escape sequences - * 2. If output is a Claude --output-format json envelope, extract the result field - * 3. Strip markdown code fences - * 4. Try to find a JSON object containing both "title" and "description" keys - * 5. Fall back to greedy match - */ - private parseProviderResponse(response: string): GeneratedPrContent | null { - try { - // Step 1: Strip ANSI escape sequences - let text = this.stripAnsi(response); - - // Step 2: If this is a Claude --output-format json envelope, extract the result field - try { - const envelope = JSON.parse(text); - if (envelope && typeof envelope.result === 'string') { - text = envelope.result; - } - } catch { - // Not a valid JSON envelope — continue with raw text - } - - // Step 3: Strip markdown code fences (```json ... ``` or ``` ... ```) - text = text.replace(/```(?:json)?\s*\n?([\s\S]*?)\n?\s*```/g, '$1'); - - // Step 4: Try to find a JSON object containing both "title" and "description" - const specificMatch = text.match(/\{[^{}]*"title"[^{}]*"description"[^{}]*\}/s); - // Also try reversed key order - const reversedMatch = specificMatch - ? null - : text.match(/\{[^{}]*"description"[^{}]*"title"[^{}]*\}/s); - const jsonStr = specificMatch?.[0] ?? reversedMatch?.[0]; - - // Step 5: Fall back to greedy match only if specific match failed - const fallbackStr = jsonStr ?? text.match(/\{[\s\S]*\}/)?.[0]; - - if (fallbackStr) { - const parsed = JSON.parse(fallbackStr); - if (parsed.title && parsed.description) { - let description = String(parsed.description); - - // Handle multiple newline escape scenarios: - // 1. Literal backslash-n sequences (from double-escaped JSON like "\\n") - if (description.includes('\\n')) { - description = description.replace(/\\n/g, '\n'); - } - // 2. Double-backslash newlines - description = description.replace(/\\\\n/g, '\n'); - - description = description.trim(); - - return { - title: parsed.title.trim(), - description, - }; - } - } - } catch (error) { - log.debug('Failed to parse provider response', { error, response }); - } - return null; - } - - /** - * Generate PR content using heuristics based on commits and files - */ - private generateHeuristicContent( - diff: string, - commits: string[], - changedFiles: string[] - ): GeneratedPrContent { - // Use first commit message as title if available (best case) - let title = 'chore: update code'; - if (commits.length > 0) { - // Use the most recent commit message as title - title = commits[0]; - - // Clean up common prefixes that might not be needed in PR title - title = title.replace( - /^(feat|fix|chore|docs|style|refactor|test|perf|ci|build|revert):\s*/i, - '' - ); - - // Ensure title is not too long (GitHub PR title limit is ~72 chars) - if (title.length > 72) { - title = title.substring(0, 69) + '...'; - } - - // Re-add conventional commit prefix if it was there - const firstCommit = commits[0]; - const prefixMatch = firstCommit.match( - /^(feat|fix|chore|docs|style|refactor|test|perf|ci|build|revert):/i - ); - if (prefixMatch && !title.startsWith(prefixMatch[1])) { - title = `${prefixMatch[1]}: ${title}`; - } - } else if (changedFiles.length > 0) { - // Generate title from file changes when no commits available - const mainFile = changedFiles[0]; - const fileParts = mainFile.split('/'); - const fileName = fileParts[fileParts.length - 1]; - const baseName = fileName.replace(/\.[^.]*$/, ''); // Remove extension - - // Analyze file patterns to infer intent - if (fileName.match(/test|spec/i)) { - title = 'test: add tests'; - } else if (fileName.match(/fix|bug|error/i)) { - title = 'fix: resolve issue'; - } else if (fileName.match(/feat|feature|add/i)) { - title = 'feat: add feature'; - } else if (baseName.match(/^[A-Z]/)) { - // Capitalized files often indicate new components/features - title = `feat: add ${baseName}`; - } else { - title = `chore: update ${baseName || fileName}`; - } - } - - // Generate description from commits and files - const descriptionParts: string[] = []; - - // Extract diff stats first - let fileCount = 0; - let insertions = 0; - let deletions = 0; - if (diff) { - const statsMatch = diff.match( - /(\d+)\s+files? changed(?:,\s+(\d+)\s+insertions?\(\+\))?(?:,\s+(\d+)\s+deletions?\(-\))?/ - ); - if (statsMatch) { - fileCount = parseInt(statsMatch[1] || '0', 10) || 0; - insertions = parseInt(statsMatch[2] || '0', 10) || 0; - deletions = parseInt(statsMatch[3] || '0', 10) || 0; - } - } - // Fallback to changedFiles length if no diff stats - if (fileCount === 0 && changedFiles.length > 0) { - fileCount = changedFiles.length; - } - - // Add commits section if available - if (commits.length > 0) { - descriptionParts.push('## Changes'); - commits.forEach((commit) => { - descriptionParts.push(`- ${commit}`); - }); - } - - // Add files section - only show if more than 1 file or if we have detailed stats - if (changedFiles.length > 0) { - if (changedFiles.length === 1 && fileCount === 1) { - // Single file: include it inline with summary - descriptionParts.push('\n## Summary'); - descriptionParts.push(`- Updated \`${changedFiles[0]}\``); - if (insertions > 0 || deletions > 0) { - const changes: string[] = []; - if (insertions > 0) changes.push(`+${insertions}`); - if (deletions > 0) changes.push(`-${deletions}`); - if (changes.length > 0) { - descriptionParts.push(`- ${changes.join(', ')} lines`); - } - } - } else { - // Multiple files: show list - descriptionParts.push('\n## Files Changed'); - changedFiles.slice(0, 20).forEach((file) => { - descriptionParts.push(`- \`${file}\``); - }); - if (changedFiles.length > 20) { - descriptionParts.push(`\n... and ${changedFiles.length - 20} more files`); - } - - // Add summary stats if available - if (fileCount > 0 || insertions > 0 || deletions > 0) { - descriptionParts.push('\n## Summary'); - if (fileCount > 0) { - descriptionParts.push(`- ${fileCount} file${fileCount !== 1 ? 's' : ''} changed`); - } - if (insertions > 0 || deletions > 0) { - const changes: string[] = []; - if (insertions > 0) changes.push(`+${insertions}`); - if (deletions > 0) changes.push(`-${deletions}`); - descriptionParts.push(`- ${changes.join(', ')} lines`); - } - } - } - } else if (fileCount > 0 || insertions > 0 || deletions > 0) { - // No file list but we have stats - descriptionParts.push('\n## Summary'); - if (fileCount > 0) { - descriptionParts.push(`- ${fileCount} file${fileCount !== 1 ? 's' : ''} changed`); - } - if (insertions > 0 || deletions > 0) { - const changes: string[] = []; - if (insertions > 0) changes.push(`+${insertions}`); - if (deletions > 0) changes.push(`-${deletions}`); - descriptionParts.push(`- ${changes.join(', ')} lines`); - } - } - - const description = descriptionParts.join('\n') || 'No description available.'; - - return { title, description }; - } - - /** - * Generate fallback content when no context is available - */ - private generateFallbackContent(changedFiles: string[]): GeneratedPrContent { - const title = - changedFiles.length > 0 - ? `chore: update ${changedFiles[0].split('/').pop() || 'files'}` - : 'chore: update code'; - - const description = - changedFiles.length > 0 - ? `Updated ${changedFiles.length} file${changedFiles.length !== 1 ? 's' : ''}.` - : 'No changes detected.'; - - return { title, description }; - } - - /** - * Normalize markdown formatting to ensure proper structure - */ - private normalizeMarkdown(text: string): string { - if (!text) return text; - - // Ensure headers have proper spacing (double newline before headers) - let normalized = text.replace(/\n(##+ )/g, '\n\n$1'); - - // Remove excessive blank lines (more than 2 consecutive) - normalized = normalized.replace(/\n{3,}/g, '\n\n'); - - // Trim trailing whitespace on each line but preserve intentional spacing - normalized = normalized - .split('\n') - .map((line) => line.trimEnd()) - .join('\n'); - - return normalized.trim(); - } - - /** - * Check if a string is a valid provider ID - */ - private isValidProviderId(id: string): id is ProviderId { - return PROVIDER_IDS.includes(id as ProviderId); - } -} - -export const prGenerationService = new PrGenerationService(); diff --git a/src/main/services/ProjectPrep.ts b/src/main/services/ProjectPrep.ts deleted file mode 100644 index 5ed378c15..000000000 --- a/src/main/services/ProjectPrep.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { existsSync } from 'fs'; -import { join } from 'path'; -import { spawn } from 'child_process'; - -function pickNodeInstallCmd(target: string): string[] { - // Prefer package manager based on lockfile presence - if (existsSync(join(target, 'pnpm-lock.yaml'))) { - return ['pnpm install --frozen-lockfile', 'pnpm install']; - } - if (existsSync(join(target, 'yarn.lock'))) { - // Support modern Yarn (Berry) and classic Yarn - return ['yarn install --immutable', 'yarn install --frozen-lockfile', 'yarn install']; - } - if (existsSync(join(target, 'bun.lockb'))) { - return ['bun install']; - } - if (existsSync(join(target, 'package-lock.json'))) { - return ['npm ci', 'npm install']; - } - return ['npm install']; -} - -function runInBackground(cmd: string | string[], cwd: string) { - const command = Array.isArray(cmd) ? cmd.filter(Boolean).join(' || ') : cmd; - const child = spawn(command, { - cwd, - shell: true, - stdio: 'ignore', - windowsHide: true, - detached: process.platform !== 'win32', - }); - // Avoid unhandled errors from bubbling; ignore failures silently - child.on('error', () => {}); - child.unref?.(); -} - -/** - * Best-effort dependency prep for common project types. - * Non-blocking; spawns installs in background if needed. - */ -export async function ensureProjectPrepared(targetPath: string) { - try { - // Node projects: if package.json exists and node_modules missing, install deps - const isNode = existsSync(join(targetPath, 'package.json')); - const hasNodeModules = existsSync(join(targetPath, 'node_modules')); - if (isNode && !hasNodeModules) { - const cmds = pickNodeInstallCmd(targetPath); - runInBackground(cmds, targetPath); - } - - // Optional: we could add Python prep here later if desired - } catch { - // ignore - } -} diff --git a/src/main/services/ProjectSettingsService.ts b/src/main/services/ProjectSettingsService.ts deleted file mode 100644 index fcbcad3b3..000000000 --- a/src/main/services/ProjectSettingsService.ts +++ /dev/null @@ -1,56 +0,0 @@ -import type { Project } from './DatabaseService'; -import { databaseService } from './DatabaseService'; - -export interface ProjectSettings { - projectId: string; - name: string; - path: string; - gitRemote?: string; - gitBranch?: string; - baseRef?: string; -} - -class ProjectSettingsService { - async getProjectSettings(projectId: string): Promise { - if (!projectId) { - throw new Error('projectId is required'); - } - const project = await databaseService.getProjectById(projectId); - if (!project) { - return null; - } - return this.toSettings(project); - } - - async updateProjectSettings( - projectId: string, - settings: { baseRef?: string } - ): Promise { - if (!projectId) { - throw new Error('projectId is required'); - } - const nextBaseRef = settings?.baseRef; - if (typeof nextBaseRef !== 'string') { - throw new Error('baseRef is required'); - } - - const project = await databaseService.updateProjectBaseRef(projectId, nextBaseRef); - if (!project) { - throw new Error('Project not found'); - } - return this.toSettings(project); - } - - private toSettings(project: Project): ProjectSettings { - return { - projectId: project.id, - name: project.name, - path: project.path, - gitRemote: project.gitInfo.remote, - gitBranch: project.gitInfo.branch, - baseRef: project.gitInfo.baseRef, - }; - } -} - -export const projectSettingsService = new ProjectSettingsService(); diff --git a/src/main/services/RemoteGitService.ts b/src/main/services/RemoteGitService.ts deleted file mode 100644 index a45a2ebf8..000000000 --- a/src/main/services/RemoteGitService.ts +++ /dev/null @@ -1,836 +0,0 @@ -import { SshService } from './ssh/SshService'; -import type { ExecResult } from '../../shared/ssh/types'; -import { quoteShellArg } from '../utils/shellEscape'; -import type { GitChange } from './GitService'; -import { parseDiffLines, stripTrailingNewline, MAX_DIFF_CONTENT_BYTES } from '../utils/diffParser'; -import type { DiffLine, DiffResult } from '../utils/diffParser'; - -export interface WorktreeInfo { - path: string; - branch: string; - isMain: boolean; -} - -export interface GitStatusFile { - status: string; - path: string; -} - -export interface GitStatus { - branch: string; - isClean: boolean; - files: GitStatusFile[]; -} - -export class RemoteGitService { - constructor(private sshService: SshService) {} - - private normalizeRemotePath(p: string): string { - // Remote paths should use forward slashes. - return p.replace(/\\/g, '/').replace(/\/+$/g, ''); - } - - async getStatus(connectionId: string, worktreePath: string): Promise { - const result = await this.sshService.executeCommand( - connectionId, - 'git status --porcelain -b', - worktreePath - ); - - if (result.exitCode !== 0) { - throw new Error(`Git status failed: ${result.stderr}`); - } - - const lines = result.stdout.split('\n'); - const branchLine = lines[0]; - const files = lines.slice(1).filter((l) => l.trim()); - - const branchMatch = branchLine.match(/^## (.+?)(?:\...|$)/); - const branch = branchMatch ? branchMatch[1] : 'unknown'; - - return { - branch, - isClean: files.length === 0, - files: files.map((line) => ({ - status: line.substring(0, 2).trim(), - path: line.substring(3), - })), - }; - } - - async getDefaultBranch(connectionId: string, projectPath: string): Promise { - const normalizedProjectPath = this.normalizeRemotePath(projectPath); - - // Try to get the current branch - const currentBranchResult = await this.sshService.executeCommand( - connectionId, - 'git rev-parse --abbrev-ref HEAD', - normalizedProjectPath - ); - - if ( - currentBranchResult.exitCode === 0 && - currentBranchResult.stdout.trim() && - currentBranchResult.stdout.trim() !== 'HEAD' - ) { - return currentBranchResult.stdout.trim(); - } - - // Fallback: check common default branch names - const commonBranches = ['main', 'master', 'develop', 'trunk']; - for (const branch of commonBranches) { - const checkResult = await this.sshService.executeCommand( - connectionId, - `git rev-parse --verify ${quoteShellArg(branch)} 2>/dev/null`, - normalizedProjectPath - ); - if (checkResult.exitCode === 0) { - return branch; - } - } - - return 'HEAD'; - } - - async createWorktree( - connectionId: string, - projectPath: string, - taskName: string, - baseRef?: string - ): Promise { - const normalizedProjectPath = this.normalizeRemotePath(projectPath); - const slug = taskName - .toLowerCase() - .replace(/[^a-z0-9-]/g, '-') - .replace(/-+/g, '-') - .replace(/^-|-$/g, ''); - const worktreeName = `${slug || 'task'}-${Date.now()}`; - const relWorktreePath = `.emdash/worktrees/${worktreeName}`; - const worktreePath = `${normalizedProjectPath}/${relWorktreePath}`.replace(/\/+/g, '/'); - - // Create worktrees directory (relative so we avoid quoting issues) - await this.sshService.executeCommand( - connectionId, - 'mkdir -p .emdash/worktrees', - normalizedProjectPath - ); - - // Auto-detect default branch if baseRef is not provided or is invalid - let base = (baseRef || '').trim(); - - // If no base provided, use auto-detection - if (!base) { - base = await this.getDefaultBranch(connectionId, normalizedProjectPath); - } else { - // Always verify the provided branch exists, regardless of what it is - const verifyResult = await this.sshService.executeCommand( - connectionId, - `git rev-parse --verify ${quoteShellArg(base)} 2>/dev/null`, - normalizedProjectPath - ); - - if (verifyResult.exitCode !== 0) { - // Branch doesn't exist, auto-detect the actual default branch - base = await this.getDefaultBranch(connectionId, normalizedProjectPath); - } - } - - if (!base) { - base = 'HEAD'; - } - - const result = await this.sshService.executeCommand( - connectionId, - `git worktree add ${quoteShellArg(relWorktreePath)} -b ${quoteShellArg(worktreeName)} ${quoteShellArg( - base - )}`, - normalizedProjectPath - ); - - if (result.exitCode !== 0) { - throw new Error(`Failed to create worktree: ${result.stderr}`); - } - - return { - path: worktreePath, - branch: worktreeName, - isMain: false, - }; - } - - async removeWorktree( - connectionId: string, - projectPath: string, - worktreePath: string - ): Promise { - const normalizedProjectPath = this.normalizeRemotePath(projectPath); - const normalizedWorktreePath = this.normalizeRemotePath(worktreePath); - const result = await this.sshService.executeCommand( - connectionId, - `git worktree remove ${quoteShellArg(normalizedWorktreePath)} --force`, - normalizedProjectPath - ); - - if (result.exitCode !== 0) { - throw new Error(`Failed to remove worktree: ${result.stderr}`); - } - } - - async listWorktrees(connectionId: string, projectPath: string): Promise { - const normalizedProjectPath = this.normalizeRemotePath(projectPath); - const result = await this.sshService.executeCommand( - connectionId, - 'git worktree list --porcelain', - normalizedProjectPath - ); - - if (result.exitCode !== 0) { - throw new Error(`Failed to list worktrees: ${result.stderr}`); - } - - // Porcelain output is blocks separated by blank lines. - // Each block begins with: worktree - // Optional: branch - // Optional: detached - const blocks = result.stdout - .split(/\n\s*\n/g) - .map((b) => b.trim()) - .filter(Boolean); - - const out: WorktreeInfo[] = []; - for (const block of blocks) { - const lines = block.split('\n').map((l) => l.trim()); - const wtLine = lines.find((l) => l.startsWith('worktree ')); - if (!wtLine) continue; - const wtPath = wtLine.slice('worktree '.length).trim(); - const branchLine = lines.find((l) => l.startsWith('branch ')); - const branchRef = branchLine ? branchLine.slice('branch '.length).trim() : ''; - const branch = branchRef.replace(/^refs\/heads\//, '') || 'HEAD'; - const isMain = this.normalizeRemotePath(wtPath) === normalizedProjectPath; - out.push({ path: wtPath, branch, isMain }); - } - return out; - } - - async getWorktreeStatus( - connectionId: string, - worktreePath: string - ): Promise<{ - hasChanges: boolean; - stagedFiles: string[]; - unstagedFiles: string[]; - untrackedFiles: string[]; - }> { - const normalizedWorktreePath = this.normalizeRemotePath(worktreePath); - const result = await this.sshService.executeCommand( - connectionId, - 'git status --porcelain --untracked-files=all', - normalizedWorktreePath - ); - - if (result.exitCode !== 0) { - throw new Error(`Git status failed: ${result.stderr}`); - } - - const stagedFiles: string[] = []; - const unstagedFiles: string[] = []; - const untrackedFiles: string[] = []; - const lines = (result.stdout || '') - .trim() - .split('\n') - .filter((l) => l.length > 0); - - for (const line of lines) { - const status = line.substring(0, 2); - const file = line.substring(3); - if (status.includes('A') || status.includes('M') || status.includes('D')) { - stagedFiles.push(file); - } - if (status[1] === 'M' || status[1] === 'D') { - unstagedFiles.push(file); - } - if (status.includes('??')) { - untrackedFiles.push(file); - } - } - - return { - hasChanges: stagedFiles.length > 0 || unstagedFiles.length > 0 || untrackedFiles.length > 0, - stagedFiles, - unstagedFiles, - untrackedFiles, - }; - } - - async getBranchList(connectionId: string, projectPath: string): Promise { - const result = await this.sshService.executeCommand( - connectionId, - 'git branch -a --format="%(refname:short)"', - this.normalizeRemotePath(projectPath) - ); - - if (result.exitCode !== 0) { - return []; - } - - return result.stdout.split('\n').filter((b) => b.trim()); - } - - async commit( - connectionId: string, - worktreePath: string, - message: string, - files?: string[] - ): Promise { - let command = 'git commit'; - - if (files && files.length > 0) { - const fileList = files.map((f) => quoteShellArg(f)).join(' '); - command = `git add ${fileList} && ${command}`; - } - - command += ` -m ${quoteShellArg(message)}`; - - return this.sshService.executeCommand( - connectionId, - command, - this.normalizeRemotePath(worktreePath) - ); - } - - // --------------------------------------------------------------------------- - // Git operations for IPC parity with local GitService - // --------------------------------------------------------------------------- - - /** - * Detailed git status matching the shape returned by local GitService.getStatus(). - * Parses porcelain output, numstat diffs, and untracked file line counts. - */ - async getStatusDetailed(connectionId: string, worktreePath: string): Promise { - const cwd = this.normalizeRemotePath(worktreePath); - - // Verify git repo - const verifyResult = await this.sshService.executeCommand( - connectionId, - 'git rev-parse --is-inside-work-tree', - cwd - ); - if (verifyResult.exitCode !== 0) { - return []; - } - - // Get porcelain status - const statusResult = await this.sshService.executeCommand( - connectionId, - 'git status --porcelain --untracked-files=all', - cwd - ); - if (statusResult.exitCode !== 0) { - throw new Error(`Git status failed: ${statusResult.stderr}`); - } - - const statusOutput = statusResult.stdout; - if (!statusOutput.trim()) return []; - - const statusLines = statusOutput - .split('\n') - .map((l) => l.replace(/\r$/, '')) - .filter((l) => l.length > 0); - - // Batch-fetch numstat for staged and unstaged changes (one SSH call each, not per-file) - const [stagedNumstat, unstagedNumstat] = await Promise.all([ - this.sshService.executeCommand(connectionId, 'git diff --numstat --cached', cwd), - this.sshService.executeCommand(connectionId, 'git diff --numstat', cwd), - ]); - - const parseNumstat = (stdout: string): Map => { - const map = new Map(); - for (const line of stdout.split('\n').filter((l) => l.trim())) { - const parts = line.split('\t'); - if (parts.length >= 3) { - const add = parts[0] === '-' ? 0 : parseInt(parts[0], 10) || 0; - const del = parts[1] === '-' ? 0 : parseInt(parts[1], 10) || 0; - map.set(parts[2], { add, del }); - } - } - return map; - }; - - const stagedStats = parseNumstat(stagedNumstat.stdout || ''); - const unstagedStats = parseNumstat(unstagedNumstat.stdout || ''); - - // Collect untracked file paths so we can batch their line counts - const untrackedPaths: string[] = []; - - const changes: GitChange[] = []; - for (const line of statusLines) { - const statusCode = line.substring(0, 2); - let filePath = line.substring(3); - if (statusCode.includes('R') && filePath.includes('->')) { - const parts = filePath.split('->'); - filePath = parts[parts.length - 1].trim(); - } - - let status = 'modified'; - if (statusCode.includes('A') || statusCode.includes('?')) status = 'added'; - else if (statusCode.includes('D')) status = 'deleted'; - else if (statusCode.includes('R')) status = 'renamed'; - else if (statusCode.includes('M')) status = 'modified'; - - const isStaged = statusCode[0] !== ' ' && statusCode[0] !== '?'; - - const staged = stagedStats.get(filePath); - const unstaged = unstagedStats.get(filePath); - const additions = (staged?.add ?? 0) + (unstaged?.add ?? 0); - const deletions = (staged?.del ?? 0) + (unstaged?.del ?? 0); - - if (additions === 0 && deletions === 0 && statusCode.includes('?')) { - untrackedPaths.push(filePath); - } - - changes.push({ path: filePath, status, additions, deletions, isStaged }); - } - - // Batch line-count for untracked files (skip files > 512KB) - if (untrackedPaths.length > 0) { - const escaped = untrackedPaths.map((f) => quoteShellArg(f)).join(' '); - // For each file: if <= 512KB, count newlines; otherwise print -1 - const script = - `for f in ${escaped}; do ` + - `s=$(stat -c%s "$f" 2>/dev/null || stat -f%z "$f" 2>/dev/null); ` + - `if [ "$s" -le ${MAX_DIFF_CONTENT_BYTES} ] 2>/dev/null; then ` + - `wc -l < "$f" 2>/dev/null || echo -1; ` + - `else echo -1; fi; done`; - const countResult = await this.sshService.executeCommand(connectionId, script, cwd); - if (countResult.exitCode === 0) { - const counts = countResult.stdout - .split('\n') - .map((l) => l.trim()) - .filter((l) => l.length > 0); - for (let i = 0; i < untrackedPaths.length && i < counts.length; i++) { - const count = parseInt(counts[i], 10); - if (count >= 0) { - const change = changes.find((c) => c.path === untrackedPaths[i]); - if (change) change.additions = count; - } - } - } - } - - return changes; - } - - /** - * Per-file diff matching the shape returned by local GitService.getFileDiff(). - * Uses a diff-first pattern: run git diff, check for binary, then fetch content only if non-binary. - */ - async getFileDiff( - connectionId: string, - worktreePath: string, - filePath: string - ): Promise { - const cwd = this.normalizeRemotePath(worktreePath); - - // Step 1: Run git diff - const diffResult = await this.sshService.executeCommand( - connectionId, - `git diff --no-color --unified=2000 HEAD -- ${quoteShellArg(filePath)}`, - cwd - ); - - // Step 2: Parse and check binary - let diffLines: DiffLine[] = []; - if (diffResult.exitCode === 0 && diffResult.stdout.trim()) { - const { lines, isBinary } = parseDiffLines(diffResult.stdout); - if (isBinary) { - return { lines: [], isBinary: true }; - } - diffLines = lines; - } - - // Step 3: Fetch content ONCE (non-binary only, covers both diff-success and fallback paths) - const [showResult, catResult] = await Promise.all([ - this.sshService.executeCommand( - connectionId, - `s=$(git cat-file -s HEAD:${quoteShellArg(filePath)} 2>/dev/null); ` + - `if [ "$s" -le ${MAX_DIFF_CONTENT_BYTES} ] 2>/dev/null; then git show HEAD:${quoteShellArg(filePath)}; ` + - `else echo "__EMDASH_TOO_LARGE__"; fi`, - cwd - ), - this.sshService.executeCommand( - connectionId, - `s=$(stat -c%s ${quoteShellArg(filePath)} 2>/dev/null || stat -f%z ${quoteShellArg(filePath)} 2>/dev/null); ` + - `if [ "$s" -le ${MAX_DIFF_CONTENT_BYTES} ] 2>/dev/null; then cat ${quoteShellArg(filePath)}; else echo "__EMDASH_TOO_LARGE__"; fi`, - cwd - ), - ]); - - const rawOriginal = - showResult.exitCode === 0 ? stripTrailingNewline(showResult.stdout) : undefined; - const originalContent = rawOriginal === '__EMDASH_TOO_LARGE__' ? undefined : rawOriginal; - - const rawModified = - catResult.exitCode === 0 ? stripTrailingNewline(catResult.stdout) : undefined; - const modifiedContent = rawModified === '__EMDASH_TOO_LARGE__' ? undefined : rawModified; - - // Step 4: Return based on what we have - if (diffLines.length > 0) return { lines: diffLines, originalContent, modifiedContent }; - - // Fallback: empty diff or diff failed — determine untracked/deleted from content - if (modifiedContent !== undefined) { - return { - lines: modifiedContent.split('\n').map((l) => ({ right: l, type: 'add' as const })), - modifiedContent, - }; - } - if (originalContent !== undefined) { - return { - lines: originalContent.split('\n').map((l) => ({ left: l, type: 'del' as const })), - originalContent, - }; - } - return { lines: [] }; - } - - async stageFile(connectionId: string, worktreePath: string, filePath: string): Promise { - const cwd = this.normalizeRemotePath(worktreePath); - const result = await this.sshService.executeCommand( - connectionId, - `git add -- ${quoteShellArg(filePath)}`, - cwd - ); - if (result.exitCode !== 0) { - throw new Error(`Failed to stage file: ${result.stderr}`); - } - } - - async stageAllFiles(connectionId: string, worktreePath: string): Promise { - const cwd = this.normalizeRemotePath(worktreePath); - const result = await this.sshService.executeCommand(connectionId, 'git add -A', cwd); - if (result.exitCode !== 0) { - throw new Error(`Failed to stage all files: ${result.stderr}`); - } - } - - async unstageFile(connectionId: string, worktreePath: string, filePath: string): Promise { - const cwd = this.normalizeRemotePath(worktreePath); - const result = await this.sshService.executeCommand( - connectionId, - `git reset HEAD -- ${quoteShellArg(filePath)}`, - cwd - ); - if (result.exitCode !== 0) { - throw new Error(`Failed to unstage file: ${result.stderr}`); - } - } - - async revertFile( - connectionId: string, - worktreePath: string, - filePath: string - ): Promise<{ action: 'reverted' }> { - const cwd = this.normalizeRemotePath(worktreePath); - - // Check if file exists in HEAD - const catFileResult = await this.sshService.executeCommand( - connectionId, - `git cat-file -e HEAD:${quoteShellArg(filePath)}`, - cwd - ); - - if (catFileResult.exitCode !== 0) { - // File doesn't exist in HEAD — it's untracked. Delete it. - await this.sshService.executeCommand( - connectionId, - `rm -f -- ${quoteShellArg(filePath)}`, - cwd - ); - return { action: 'reverted' }; - } - - // File exists in HEAD — revert it - const checkoutResult = await this.sshService.executeCommand( - connectionId, - `git checkout HEAD -- ${quoteShellArg(filePath)}`, - cwd - ); - if (checkoutResult.exitCode !== 0) { - throw new Error(`Failed to revert file: ${checkoutResult.stderr}`); - } - return { action: 'reverted' }; - } - - // --------------------------------------------------------------------------- - // Commit, push, and branch operations - // --------------------------------------------------------------------------- - - async getCurrentBranch(connectionId: string, worktreePath: string): Promise { - const cwd = this.normalizeRemotePath(worktreePath); - const result = await this.sshService.executeCommand( - connectionId, - 'git branch --show-current', - cwd - ); - return (result.stdout || '').trim(); - } - - /** - * Detect the default branch name using the remote HEAD or common conventions. - * Unlike getDefaultBranch(), this specifically queries origin's default (not current branch). - */ - async getDefaultBranchName(connectionId: string, worktreePath: string): Promise { - const cwd = this.normalizeRemotePath(worktreePath); - - // Try gh CLI first - const ghResult = await this.sshService.executeCommand( - connectionId, - 'gh repo view --json defaultBranchRef -q .defaultBranchRef.name 2>/dev/null', - cwd - ); - if (ghResult.exitCode === 0 && ghResult.stdout.trim()) { - return ghResult.stdout.trim(); - } - - // Fallback: parse git remote show origin - const remoteResult = await this.sshService.executeCommand( - connectionId, - 'git remote show origin 2>/dev/null | sed -n "/HEAD branch/s/.*: //p"', - cwd - ); - if (remoteResult.exitCode === 0 && remoteResult.stdout.trim()) { - return remoteResult.stdout.trim(); - } - - // Fallback: symbolic-ref - const symrefResult = await this.sshService.executeCommand( - connectionId, - 'git symbolic-ref --short refs/remotes/origin/HEAD 2>/dev/null', - cwd - ); - if (symrefResult.exitCode === 0 && symrefResult.stdout.trim()) { - const parts = symrefResult.stdout.trim().split('/'); - return parts[parts.length - 1]; - } - - return 'main'; - } - - async createBranch(connectionId: string, worktreePath: string, name: string): Promise { - const cwd = this.normalizeRemotePath(worktreePath); - const result = await this.sshService.executeCommand( - connectionId, - `git checkout -b ${quoteShellArg(name)}`, - cwd - ); - if (result.exitCode !== 0) { - throw new Error(`Failed to create branch: ${result.stderr}`); - } - } - - async push( - connectionId: string, - worktreePath: string, - branch?: string, - setUpstream?: boolean - ): Promise { - const cwd = this.normalizeRemotePath(worktreePath); - let cmd = 'git push'; - if (setUpstream && branch) { - cmd = `git push --set-upstream origin ${quoteShellArg(branch)}`; - } - return this.sshService.executeCommand(connectionId, cmd, cwd); - } - - async getBranchStatus( - connectionId: string, - worktreePath: string - ): Promise<{ branch: string; defaultBranch: string; ahead: number; behind: number }> { - const cwd = this.normalizeRemotePath(worktreePath); - - const branch = await this.getCurrentBranch(connectionId, worktreePath); - const defaultBranch = await this.getDefaultBranchName(connectionId, worktreePath); - - let ahead = 0; - let behind = 0; - const revListResult = await this.sshService.executeCommand( - connectionId, - `git rev-list --left-right --count origin/${quoteShellArg(defaultBranch)}...HEAD 2>/dev/null`, - cwd - ); - if (revListResult.exitCode === 0) { - const parts = (revListResult.stdout || '').trim().split(/\s+/); - if (parts.length >= 2) { - behind = parseInt(parts[0] || '0', 10) || 0; - ahead = parseInt(parts[1] || '0', 10) || 0; - } - } else { - // Fallback: parse git status -sb - const statusResult = await this.sshService.executeCommand( - connectionId, - 'git status -sb', - cwd - ); - if (statusResult.exitCode === 0) { - const line = (statusResult.stdout || '').split('\n')[0] || ''; - const aheadMatch = line.match(/ahead\s+(\d+)/i); - const behindMatch = line.match(/behind\s+(\d+)/i); - if (aheadMatch) ahead = parseInt(aheadMatch[1], 10) || 0; - if (behindMatch) behind = parseInt(behindMatch[1], 10) || 0; - } - } - - return { branch, defaultBranch, ahead, behind }; - } - - async listBranches( - connectionId: string, - projectPath: string, - remote = 'origin' - ): Promise> { - const cwd = this.normalizeRemotePath(projectPath); - - // Check if remote exists - let hasRemote = false; - const remoteCheck = await this.sshService.executeCommand( - connectionId, - `git remote get-url ${quoteShellArg(remote)} 2>/dev/null`, - cwd - ); - if (remoteCheck.exitCode === 0) { - hasRemote = true; - // Try to fetch (non-fatal) - await this.sshService.executeCommand( - connectionId, - `git fetch --prune ${quoteShellArg(remote)} 2>/dev/null`, - cwd - ); - } - - let branches: Array<{ ref: string; remote: string; branch: string; label: string }> = []; - - if (hasRemote) { - const { stdout } = await this.sshService.executeCommand( - connectionId, - `git for-each-ref --format="%(refname:short)" refs/remotes/${quoteShellArg(remote)}`, - cwd - ); - branches = (stdout || '') - .split('\n') - .map((l) => l.trim()) - .filter((l) => l.length > 0 && !l.endsWith('/HEAD')) - .map((ref) => { - const [remoteAlias, ...rest] = ref.split('/'); - const branch = rest.join('/') || ref; - return { - ref, - remote: remoteAlias || remote, - branch, - label: `${remoteAlias || remote}/${branch}`, - }; - }); - - // Include local-only branches - const localResult = await this.sshService.executeCommand( - connectionId, - 'git for-each-ref --format="%(refname:short)" refs/heads/', - cwd - ); - const remoteBranchNames = new Set(branches.map((b) => b.branch)); - const localOnly = (localResult.stdout || '') - .split('\n') - .map((l) => l.trim()) - .filter((l) => l.length > 0 && !remoteBranchNames.has(l)) - .map((branch) => ({ ref: branch, remote: '', branch, label: branch })); - branches = [...branches, ...localOnly]; - } else { - const localResult = await this.sshService.executeCommand( - connectionId, - 'git for-each-ref --format="%(refname:short)" refs/heads/', - cwd - ); - branches = (localResult.stdout || '') - .split('\n') - .map((l) => l.trim()) - .filter((l) => l.length > 0) - .map((branch) => ({ ref: branch, remote: '', branch, label: branch })); - } - - return branches; - } - - async renameBranch( - connectionId: string, - repoPath: string, - oldBranch: string, - newBranch: string - ): Promise<{ remotePushed: boolean }> { - const cwd = this.normalizeRemotePath(repoPath); - - // Check remote tracking before rename - let remotePushed = false; - let remoteName = 'origin'; - const configResult = await this.sshService.executeCommand( - connectionId, - `git config --get branch.${quoteShellArg(oldBranch)}.remote 2>/dev/null`, - cwd - ); - if (configResult.exitCode === 0 && configResult.stdout.trim()) { - remoteName = configResult.stdout.trim(); - remotePushed = true; - } else { - const lsResult = await this.sshService.executeCommand( - connectionId, - `git ls-remote --heads origin ${quoteShellArg(oldBranch)} 2>/dev/null`, - cwd - ); - if (lsResult.exitCode === 0 && lsResult.stdout.trim()) { - remotePushed = true; - } - } - - // Rename local branch - const renameResult = await this.sshService.executeCommand( - connectionId, - `git branch -m ${quoteShellArg(oldBranch)} ${quoteShellArg(newBranch)}`, - cwd - ); - if (renameResult.exitCode !== 0) { - throw new Error(`Failed to rename branch: ${renameResult.stderr}`); - } - - // Update remote if needed - if (remotePushed) { - // Delete old remote branch (non-fatal) - await this.sshService.executeCommand( - connectionId, - `git push ${quoteShellArg(remoteName)} --delete ${quoteShellArg(oldBranch)} 2>/dev/null`, - cwd - ); - // Push new branch - const pushResult = await this.sshService.executeCommand( - connectionId, - `git push -u ${quoteShellArg(remoteName)} ${quoteShellArg(newBranch)}`, - cwd - ); - if (pushResult.exitCode !== 0) { - throw new Error(`Failed to push renamed branch: ${pushResult.stderr}`); - } - } - - return { remotePushed }; - } - - // --------------------------------------------------------------------------- - // GitHub CLI operations (run gh commands over SSH) - // --------------------------------------------------------------------------- - - async execGh(connectionId: string, worktreePath: string, ghArgs: string): Promise { - const cwd = this.normalizeRemotePath(worktreePath); - return this.sshService.executeCommand(connectionId, `gh ${ghArgs}`, cwd); - } - - async execGit(connectionId: string, worktreePath: string, gitArgs: string): Promise { - const cwd = this.normalizeRemotePath(worktreePath); - return this.sshService.executeCommand(connectionId, `git ${gitArgs}`, cwd); - } -} diff --git a/src/main/services/RemotePtyService.ts b/src/main/services/RemotePtyService.ts deleted file mode 100644 index 854f25c8c..000000000 --- a/src/main/services/RemotePtyService.ts +++ /dev/null @@ -1,200 +0,0 @@ -import { EventEmitter } from 'events'; -import { SshService } from './ssh/SshService'; -import { quoteShellArg, isValidEnvVarName } from '../utils/shellEscape'; - -export interface RemotePtyOptions { - id: string; - connectionId: string; - cwd: string; - shell: string; - autoApprove?: boolean; - initialPrompt?: string; - env?: Record; -} - -export interface RemotePty { - id: string; - write(data: string): void; - resize(cols: number, rows: number): void; - kill(): void; - onData(callback: (data: string) => void): void; - onExit(callback: (code: number) => void): void; -} - -/** - * Allowlist of shells that can be launched as remote PTYs. - * Only absolute paths to well-known shells are permitted. - */ -const ALLOWED_SHELLS = new Set([ - '/bin/bash', - '/bin/sh', - '/bin/zsh', - '/usr/bin/bash', - '/usr/bin/zsh', - '/usr/bin/fish', - '/usr/local/bin/bash', - '/usr/local/bin/zsh', - '/usr/local/bin/fish', -]); - -/** - * Service for managing remote PTY (pseudo-terminal) sessions over SSH. - * - * This service allows running interactive shell sessions on remote machines, - * including AI agent CLIs like Codex, Claude, etc. It provides: - * - Interactive shell sessions via ssh2 - * - Environment variable support - * - Working directory configuration - * - Auto-approve flag support for agents - * - Proper cleanup on exit - */ -export class RemotePtyService extends EventEmitter { - private ptys: Map = new Map(); - - constructor(private sshService: SshService) { - super(); - } - - /** - * Starts a new remote PTY session on an established SSH connection. - * - * @param options - Configuration for the remote PTY session - * @returns The created RemotePty instance - * @throws Error if connection not found or shell creation fails - */ - async startRemotePty(options: RemotePtyOptions): Promise { - const connection = this.sshService.getConnection(options.connectionId); - if (!connection) { - throw new Error(`Connection ${options.connectionId} not found`); - } - - const client = connection.client; - - return new Promise((resolve, reject) => { - client.shell((err, stream) => { - if (err) { - reject(err); - return; - } - - // Build command with environment and cwd - // Validate env var keys to prevent injection (CRITICAL #1) - const envEntries = Object.entries(options.env || {}).filter(([k]) => { - if (!isValidEnvVarName(k)) { - console.warn(`[RemotePtyService] Skipping invalid env var name: ${k}`); - return false; - } - return true; - }); - const envVars = envEntries.map(([k, v]) => `export ${k}=${quoteShellArg(v)}`).join(' && '); - - const cdCommand = options.cwd ? `cd ${quoteShellArg(options.cwd)}` : ''; - const autoApproveFlag = options.autoApprove ? ' --full-auto' : ''; - - // Validate shell against allowlist (HIGH #5) - const shellBinary = options.shell.split(/\s+/)[0]; - if (!ALLOWED_SHELLS.has(shellBinary)) { - reject( - new Error( - `Shell not allowed: ${shellBinary}. Allowed: ${[...ALLOWED_SHELLS].join(', ')}` - ) - ); - return; - } - - const fullCommand = [envVars, cdCommand, `${options.shell}${autoApproveFlag}`] - .filter(Boolean) - .join(' && '); - - // Send initial command - stream.write(fullCommand + '\n'); - - // Send initial prompt if provided - if (options.initialPrompt) { - setTimeout(() => { - stream.write(options.initialPrompt + '\n'); - }, 500); - } - - const pty: RemotePty = { - id: options.id, - write: (data: string) => stream.write(data), - // ssh2 expects rows, cols, height, width - resize: (cols: number, rows: number) => stream.setWindow(rows, cols, 0, 0), - kill: () => stream.close(), - onData: (callback) => stream.on('data', (data: Buffer) => callback(data.toString())), - onExit: (callback) => stream.on('close', () => callback(0)), - }; - - this.ptys.set(options.id, pty); - - stream.on('close', () => { - this.ptys.delete(options.id); - this.emit('exit', options.id); - }); - - resolve(pty); - }); - }); - } - - /** - * Writes data to a remote PTY session. - * - * @param ptyId - ID of the PTY session - * @param data - Data to write - */ - write(ptyId: string, data: string): void { - const pty = this.ptys.get(ptyId); - if (pty) { - pty.write(data); - } - } - - /** - * Resizes a remote PTY session. - * - * @param ptyId - ID of the PTY session - * @param cols - Number of columns - * @param rows - Number of rows - */ - resize(ptyId: string, cols: number, rows: number): void { - const pty = this.ptys.get(ptyId); - if (pty) { - pty.resize(cols, rows); - } - } - - /** - * Kills a remote PTY session. - * - * @param ptyId - ID of the PTY session - */ - kill(ptyId: string): void { - const pty = this.ptys.get(ptyId); - if (pty) { - pty.kill(); - this.ptys.delete(ptyId); - } - } - - /** - * Gets a PTY session by ID. - * - * @param ptyId - ID of the PTY session - * @returns The RemotePty instance or undefined - */ - getPty(ptyId: string): RemotePty | undefined { - return this.ptys.get(ptyId); - } - - /** - * Checks if a PTY session exists. - * - * @param ptyId - ID of the PTY session - * @returns true if the PTY exists - */ - hasPty(ptyId: string): boolean { - return this.ptys.has(ptyId); - } -} diff --git a/src/main/services/RepositoryManager.ts b/src/main/services/RepositoryManager.ts deleted file mode 100644 index ccd9d8f5b..000000000 --- a/src/main/services/RepositoryManager.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { exec } from 'child_process'; -import { promisify } from 'util'; - -const execAsync = promisify(exec); - -export interface Repo { - id: string; - path: string; - origin: string; - defaultBranch: string; - lastActivity?: string; - changes?: { - added: number; - removed: number; - }; -} - -export class RepositoryManager { - private repos: Map = new Map(); - - async scanRepositories(): Promise { - // Need to implement actual repository scanning - // For now, return empty array - return []; - } - - async addRepository(path: string): Promise { - try { - // Validate that the path is a git repository - const { stdout } = await execAsync(`cd "${path}" && git rev-parse --is-inside-work-tree`); - - if (stdout.trim() !== 'true') { - throw new Error('Not a git repository'); - } - - // Get repository info - const [origin, defaultBranch] = await Promise.all([ - this.getOrigin(path), - this.getDefaultBranch(path), - ]); - - const repo: Repo = { - id: this.generateId(), - path, - origin, - defaultBranch, - lastActivity: new Date().toISOString(), - }; - - this.repos.set(repo.id, repo); - return repo; - } catch (error) { - throw new Error(`Failed to add repository: ${error}`); - } - } - - private async getOrigin(path: string): Promise { - try { - const { stdout } = await execAsync(`cd "${path}" && git remote get-url origin`); - return stdout.trim(); - } catch { - return 'No origin'; - } - } - - private async getDefaultBranch(path: string): Promise { - try { - const { stdout } = await execAsync( - `cd "${path}" && git symbolic-ref refs/remotes/origin/HEAD | sed 's@^refs/remotes/origin/@@'` - ); - return stdout.trim() || 'main'; - } catch { - return 'main'; - } - } - - private generateId(): string { - return Math.random().toString(36).substr(2, 9); - } - - getRepository(id: string): Repo | undefined { - return this.repos.get(id); - } - - getAllRepositories(): Repo[] { - return Array.from(this.repos.values()); - } -} diff --git a/src/main/services/TaskLifecycleService.ts b/src/main/services/TaskLifecycleService.ts deleted file mode 100644 index 77f5c25f0..000000000 --- a/src/main/services/TaskLifecycleService.ts +++ /dev/null @@ -1,527 +0,0 @@ -import { EventEmitter } from 'node:events'; -import { spawn, type ChildProcess } from 'node:child_process'; -import path from 'node:path'; -import { promisify } from 'node:util'; -import { lifecycleScriptsService } from './LifecycleScriptsService'; -import { - type LifecycleEvent, - type LifecyclePhase, - type LifecyclePhaseState, - type TaskLifecycleState, -} from '@shared/lifecycle'; -import { getTaskEnvVars } from '@shared/task/envVars'; -import { log } from '../lib/logger'; -import { execFile } from 'node:child_process'; - -const execFileAsync = promisify(execFile); - -type LifecycleResult = { - ok: boolean; - skipped?: boolean; - error?: string; -}; - -class TaskLifecycleService extends EventEmitter { - private states = new Map(); - private runProcesses = new Map(); - private finiteProcesses = new Map>(); - private runStartInflight = new Map>(); - private setupInflight = new Map>(); - private teardownInflight = new Map>(); - private stopIntents = new Set(); - - private nowIso(): string { - return new Date().toISOString(); - } - - private inflightKey(taskId: string, taskPath: string): string { - return `${taskId}::${taskPath}`; - } - - private killProcessTree(proc: ChildProcess, signal: NodeJS.Signals): void { - const pid = proc.pid; - if (!pid) return; - - if (process.platform === 'win32') { - const args = ['/PID', String(pid), '/T']; - if (signal === 'SIGKILL') { - args.push('/F'); - } - const killer = spawn('taskkill', args, { stdio: 'ignore' }); - killer.unref(); - return; - } - - try { - // Detached shell commands run as their own process group. - process.kill(-pid, signal); - } catch { - proc.kill(signal); - } - } - - private trackFiniteProcess(taskId: string, proc: ChildProcess): () => void { - const set = this.finiteProcesses.get(taskId) ?? new Set(); - set.add(proc); - this.finiteProcesses.set(taskId, set); - return () => { - const current = this.finiteProcesses.get(taskId); - if (!current) return; - current.delete(proc); - if (current.size === 0) { - this.finiteProcesses.delete(taskId); - } - }; - } - - private async resolveDefaultBranch(projectPath: string): Promise { - try { - const { stdout } = await execFileAsync( - 'git', - ['symbolic-ref', '--short', 'refs/remotes/origin/HEAD'], - { cwd: projectPath } - ); - const ref = stdout.trim(); - if (ref) { - return ref.replace(/^origin\//, ''); - } - } catch {} - - try { - const { stdout } = await execFileAsync('git', ['rev-parse', '--abbrev-ref', 'HEAD'], { - cwd: projectPath, - }); - const branch = stdout.trim(); - if (branch && branch !== 'HEAD') { - return branch; - } - } catch {} - - return 'main'; - } - - private async buildLifecycleEnv( - taskId: string, - taskPath: string, - projectPath: string, - taskName?: string - ): Promise { - const defaultBranch = await this.resolveDefaultBranch(projectPath); - taskName = taskName || path.basename(taskPath) || taskId; - const taskEnv = getTaskEnvVars({ - taskId, - taskName, - taskPath, - projectPath, - defaultBranch, - portSeed: taskPath || taskId, - }); - return { ...process.env, ...taskEnv }; - } - - private createPhaseState(): LifecyclePhaseState { - return { status: 'idle', error: null, exitCode: null }; - } - - private defaultState(taskId: string): TaskLifecycleState { - return { - taskId, - setup: this.createPhaseState(), - run: { ...this.createPhaseState(), pid: null }, - teardown: this.createPhaseState(), - }; - } - - private ensureState(taskId: string): TaskLifecycleState { - const existing = this.states.get(taskId); - if (existing) return existing; - const state = this.defaultState(taskId); - this.states.set(taskId, state); - return state; - } - - private emitLifecycleEvent( - taskId: string, - phase: LifecyclePhase, - status: LifecycleEvent['status'], - extras?: Partial - ): void { - const evt: LifecycleEvent = { - taskId, - phase, - status, - timestamp: this.nowIso(), - ...(extras || {}), - }; - this.emit('event', evt); - } - - private runFinite( - taskId: string, - taskPath: string, - projectPath: string, - phase: Extract, - taskName?: string - ): Promise { - const script = lifecycleScriptsService.getScript(projectPath, phase); - if (!script) return Promise.resolve({ ok: true, skipped: true }); - - const state = this.ensureState(taskId); - state[phase] = { - status: 'running', - startedAt: this.nowIso(), - finishedAt: undefined, - exitCode: null, - error: null, - }; - this.emitLifecycleEvent(taskId, phase, 'starting'); - - return new Promise((resolve) => { - void (async () => { - let settled = false; - const finish = (result: LifecycleResult, nextState: LifecyclePhaseState): void => { - if (settled) return; - settled = true; - state[phase] = nextState; - resolve(result); - }; - try { - const env = await this.buildLifecycleEnv(taskId, taskPath, projectPath, taskName); - const child = spawn(script, { - cwd: taskPath, - shell: true, - env, - detached: true, - }); - const untrackFinite = this.trackFiniteProcess(taskId, child); - const onData = (buf: Buffer) => { - const line = buf.toString(); - this.emitLifecycleEvent(taskId, phase, 'line', { line }); - }; - child.stdout?.on('data', onData); - child.stderr?.on('data', onData); - child.on('error', (error) => { - untrackFinite(); - const message = error?.message || String(error); - this.emitLifecycleEvent(taskId, phase, 'error', { error: message }); - finish( - { ok: false, error: message }, - { - ...state[phase], - status: 'failed', - finishedAt: this.nowIso(), - error: message, - } - ); - }); - child.on('exit', (code) => { - untrackFinite(); - const ok = code === 0; - this.emitLifecycleEvent(taskId, phase, ok ? 'done' : 'error', { - exitCode: code, - ...(ok ? {} : { error: `Exited with code ${String(code)}` }), - }); - finish(ok ? { ok: true } : { ok: false, error: `Exited with code ${String(code)}` }, { - ...state[phase], - status: ok ? 'succeeded' : 'failed', - finishedAt: this.nowIso(), - exitCode: code, - error: ok ? null : `Exited with code ${String(code)}`, - }); - }); - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - this.emitLifecycleEvent(taskId, phase, 'error', { error: message }); - finish( - { ok: false, error: message }, - { - ...state[phase], - status: 'failed', - finishedAt: this.nowIso(), - error: message, - } - ); - } - })(); - }); - } - - async runSetup( - taskId: string, - taskPath: string, - projectPath: string, - taskName?: string - ): Promise { - const key = this.inflightKey(taskId, taskPath); - if (this.setupInflight.has(key)) { - return this.setupInflight.get(key)!; - } - const run = this.runFinite(taskId, taskPath, projectPath, 'setup', taskName).finally(() => { - this.setupInflight.delete(key); - }); - this.setupInflight.set(key, run); - return run; - } - - async startRun( - taskId: string, - taskPath: string, - projectPath: string, - taskName?: string - ): Promise { - const inflight = this.runStartInflight.get(taskId); - if (inflight) return inflight; - - const run = this.startRunInternal(taskId, taskPath, projectPath, taskName).finally(() => { - if (this.runStartInflight.get(taskId) === run) { - this.runStartInflight.delete(taskId); - } - }); - this.runStartInflight.set(taskId, run); - return run; - } - - private async startRunInternal( - taskId: string, - taskPath: string, - projectPath: string, - taskName?: string - ): Promise { - const setupScript = lifecycleScriptsService.getScript(projectPath, 'setup'); - if (setupScript) { - const setupStatus = this.ensureState(taskId).setup.status; - if (setupStatus === 'running') { - return { ok: false, error: 'Setup is still running' }; - } - if (setupStatus === 'failed') { - return { ok: false, error: 'Setup failed. Fix setup before starting run' }; - } - if (setupStatus !== 'succeeded') { - return { ok: false, error: 'Setup has not completed yet' }; - } - } - - const script = lifecycleScriptsService.getScript(projectPath, 'run'); - if (!script) return { ok: true, skipped: true }; - - const existing = this.runProcesses.get(taskId); - if (existing && existing.exitCode === null && !existing.killed) { - return { ok: true, skipped: true }; - } - - const state = this.ensureState(taskId); - state.run = { - status: 'running', - startedAt: this.nowIso(), - finishedAt: undefined, - exitCode: null, - error: null, - pid: null, - }; - this.emitLifecycleEvent(taskId, 'run', 'starting'); - - try { - const env = await this.buildLifecycleEnv(taskId, taskPath, projectPath, taskName); - const child = spawn(script, { - cwd: taskPath, - shell: true, - env, - detached: true, - }); - this.runProcesses.set(taskId, child); - state.run.pid = child.pid ?? null; - - const onData = (buf: Buffer) => { - const line = buf.toString(); - this.emitLifecycleEvent(taskId, 'run', 'line', { line }); - }; - child.stdout?.on('data', onData); - child.stderr?.on('data', onData); - child.on('error', (error) => { - if (this.runProcesses.get(taskId) !== child) return; - this.runProcesses.delete(taskId); - this.stopIntents.delete(taskId); - const message = error?.message || String(error); - const cur = this.ensureState(taskId); - cur.run = { - ...cur.run, - status: 'failed', - finishedAt: this.nowIso(), - error: message, - }; - this.emitLifecycleEvent(taskId, 'run', 'error', { error: message }); - }); - child.on('exit', (code) => { - if (this.runProcesses.get(taskId) !== child) return; - this.runProcesses.delete(taskId); - const wasStopped = this.stopIntents.has(taskId); - this.stopIntents.delete(taskId); - const cur = this.ensureState(taskId); - cur.run = { - ...cur.run, - status: wasStopped ? 'idle' : code === 0 ? 'succeeded' : 'failed', - finishedAt: this.nowIso(), - exitCode: code, - pid: null, - error: wasStopped || code === 0 ? null : `Exited with code ${String(code)}`, - }; - this.emitLifecycleEvent(taskId, 'run', 'exit', { exitCode: code }); - }); - - return { ok: true }; - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - state.run = { - ...state.run, - status: 'failed', - finishedAt: this.nowIso(), - error: message, - pid: null, - }; - this.emitLifecycleEvent(taskId, 'run', 'error', { error: message }); - return { ok: false, error: message }; - } - } - - stopRun(taskId: string): LifecycleResult { - const proc = this.runProcesses.get(taskId); - if (!proc) return { ok: true, skipped: true }; - - this.stopIntents.add(taskId); - try { - this.killProcessTree(proc, 'SIGTERM'); - setTimeout(() => { - const current = this.runProcesses.get(taskId); - if (!current || current !== proc) return; - this.killProcessTree(proc, 'SIGKILL'); - }, 8_000); - return { ok: true }; - } catch (error) { - this.stopIntents.delete(taskId); - const message = error instanceof Error ? error.message : String(error); - const cur = this.ensureState(taskId); - cur.run = { - ...cur.run, - status: 'failed', - finishedAt: this.nowIso(), - error: message, - }; - log.warn('Failed to stop run process', { taskId, error: message }); - return { ok: false, error: message }; - } - } - - async runTeardown( - taskId: string, - taskPath: string, - projectPath: string, - taskName?: string - ): Promise { - const key = this.inflightKey(taskId, taskPath); - if (this.teardownInflight.has(key)) { - return this.teardownInflight.get(key)!; - } - const run = (async () => { - // Serialize teardown behind setup for this task/worktree key. - const setupRun = this.setupInflight.get(key); - if (setupRun) { - await setupRun.catch(() => {}); - } - - // Ensure a managed run process is stopped before teardown starts. - const existingRun = this.runProcesses.get(taskId); - if (existingRun) { - this.stopRun(taskId); - await new Promise((resolve) => { - let done = false; - const finish = () => { - if (done) return; - done = true; - resolve(); - }; - const timer = setTimeout(() => { - log.warn('Timed out waiting for run process to exit before teardown', { taskId }); - finish(); - }, 10_000); - existingRun.once('exit', () => { - clearTimeout(timer); - finish(); - }); - }); - } - return this.runFinite(taskId, taskPath, projectPath, 'teardown', taskName); - })().finally(() => { - this.teardownInflight.delete(key); - }); - this.teardownInflight.set(key, run); - return run; - } - - getState(taskId: string): TaskLifecycleState { - return this.ensureState(taskId); - } - - clearTask(taskId: string): void { - this.states.delete(taskId); - this.stopIntents.delete(taskId); - this.runStartInflight.delete(taskId); - - const prefix = `${taskId}::`; - for (const key of this.setupInflight.keys()) { - if (key.startsWith(prefix)) { - this.setupInflight.delete(key); - } - } - for (const key of this.teardownInflight.keys()) { - if (key.startsWith(prefix)) { - this.teardownInflight.delete(key); - } - } - - const proc = this.runProcesses.get(taskId); - if (proc) { - try { - this.killProcessTree(proc, 'SIGTERM'); - } catch {} - this.runProcesses.delete(taskId); - } - - const finite = this.finiteProcesses.get(taskId); - if (finite) { - for (const child of finite) { - try { - this.killProcessTree(child, 'SIGTERM'); - } catch {} - } - this.finiteProcesses.delete(taskId); - } - } - - shutdown(): void { - for (const [taskId, proc] of this.runProcesses.entries()) { - try { - this.stopIntents.add(taskId); - this.killProcessTree(proc, 'SIGTERM'); - } catch {} - } - for (const procs of this.finiteProcesses.values()) { - for (const proc of procs) { - try { - this.killProcessTree(proc, 'SIGTERM'); - } catch {} - } - } - this.runProcesses.clear(); - this.finiteProcesses.clear(); - this.runStartInflight.clear(); - this.setupInflight.clear(); - this.teardownInflight.clear(); - } - - onEvent(listener: (evt: LifecycleEvent) => void): () => void { - this.on('event', listener); - return () => this.off('event', listener); - } -} - -export const taskLifecycleService = new TaskLifecycleService(); diff --git a/src/main/services/TerminalConfigParser.ts b/src/main/services/TerminalConfigParser.ts deleted file mode 100644 index c5bb03928..000000000 --- a/src/main/services/TerminalConfigParser.ts +++ /dev/null @@ -1,837 +0,0 @@ -import { existsSync, readFileSync } from 'fs'; -import { homedir } from 'os'; -import { join } from 'path'; -import { execSync } from 'child_process'; -import { log } from '../lib/logger'; - -export interface TerminalTheme { - background?: string; - foreground?: string; - cursor?: string; - cursorAccent?: string; - selectionBackground?: string; - black?: string; - red?: string; - green?: string; - yellow?: string; - blue?: string; - magenta?: string; - cyan?: string; - white?: string; - brightBlack?: string; - brightRed?: string; - brightGreen?: string; - brightYellow?: string; - brightBlue?: string; - brightMagenta?: string; - brightCyan?: string; - brightWhite?: string; - fontFamily?: string; - fontSize?: number; -} - -export interface TerminalConfig { - terminal: string; - theme: TerminalTheme; -} - -/** - * Detect the user's preferred terminal emulator and extract its theme configuration. - * Supports: iTerm2, Terminal.app, Alacritty, Ghostty, Kitty - */ -export function detectAndLoadTerminalConfig(): TerminalConfig | null { - if (process.platform === 'darwin') { - return detectMacOSTerminal(); - } else if (process.platform === 'win32') { - return detectWindowsTerminal(); - } else if (process.platform === 'linux') { - return detectLinuxTerminal(); - } - return null; -} - -function detectMacOSTerminal(): TerminalConfig | null { - // Check iTerm2 first (most popular) - const iterm2Config = loadiTerm2Config(); - if (iterm2Config) { - log.debug('terminalConfig:detected', { terminal: 'iTerm2' }); - return iterm2Config; - } - - // Check Terminal.app - const terminalAppConfig = loadTerminalAppConfig(); - if (terminalAppConfig) { - log.debug('terminalConfig:detected', { terminal: 'Terminal.app' }); - return terminalAppConfig; - } - - // Check Alacritty - const alacrittyConfig = loadAlacrittyConfig(); - if (alacrittyConfig) { - log.debug('terminalConfig:detected', { terminal: 'Alacritty' }); - return alacrittyConfig; - } - - // Check Ghostty - const ghosttyConfig = loadGhosttyConfig(); - if (ghosttyConfig) { - log.debug('terminalConfig:detected', { terminal: 'Ghostty' }); - return ghosttyConfig; - } - - // Check Kitty - const kittyConfig = loadKittyConfig(); - if (kittyConfig) { - log.debug('terminalConfig:detected', { terminal: 'Kitty' }); - return kittyConfig; - } - - return null; -} - -function detectWindowsTerminal(): TerminalConfig | null { - // Windows Terminal stores config in JSON at: - // %LOCALAPPDATA%\Packages\Microsoft.WindowsTerminal_8wekyb3d8bbwe\LocalState\settings.json - try { - const localAppData = process.env.LOCALAPPDATA; - if (!localAppData) return null; - - const settingsPath = join( - localAppData, - 'Packages', - 'Microsoft.WindowsTerminal_8wekyb3d8bbwe', - 'LocalState', - 'settings.json' - ); - - if (existsSync(settingsPath)) { - const config = loadWindowsTerminalConfig(settingsPath); - if (config) { - log.debug('terminalConfig:detected', { terminal: 'Windows Terminal' }); - return config; - } - } - } catch (error) { - log.warn('terminalConfig:windowsTerminal:readFailed', { error }); - } - - return null; -} - -function detectLinuxTerminal(): TerminalConfig | null { - // Check common Linux terminals - const alacrittyConfig = loadAlacrittyConfig(); - if (alacrittyConfig) { - log.debug('terminalConfig:detected', { terminal: 'Alacritty' }); - return alacrittyConfig; - } - - const kittyConfig = loadKittyConfig(); - if (kittyConfig) { - log.debug('terminalConfig:detected', { terminal: 'Kitty' }); - return kittyConfig; - } - - const ghosttyConfig = loadGhosttyConfig(); - if (ghosttyConfig) { - log.debug('terminalConfig:detected', { terminal: 'Ghostty' }); - return ghosttyConfig; - } - - // GNOME Terminal uses dconf, which is harder to parse - // We could use dconf read, but it's complex - - return null; -} - -/** - * Load iTerm2 configuration from plist file. - * iTerm2 stores preferences at: ~/Library/Preferences/com.googlecode.iterm2.plist - */ -function loadiTerm2Config(): TerminalConfig | null { - try { - const plistPath = join(homedir(), 'Library', 'Preferences', 'com.googlecode.iterm2.plist'); - if (!existsSync(plistPath)) { - return null; - } - - // Use plutil to convert plist to JSON (macOS built-in) - let jsonContent: string; - try { - jsonContent = execSync(`plutil -convert json -o - "${plistPath}"`, { - encoding: 'utf8', - maxBuffer: 10 * 1024 * 1024, // 10MB - }); - } catch { - // If plutil fails, try reading as XML plist - return loadiTerm2ConfigXML(plistPath); - } - - let plist: any; - try { - plist = JSON.parse(jsonContent); - } catch (error) { - // Silently ignore plist parsing errors - the file may contain non-standard objects - return null; - } - - // iTerm2 stores color schemes in "New Bookmarks" -> "Color Presets" - // We need to find the default profile's color scheme - const newBookmarks = plist['New Bookmarks'] || []; - const defaultProfile = Array.isArray(newBookmarks) - ? newBookmarks.find((p: any) => p['Default Bookmark'] === 'Yes') || newBookmarks[0] - : newBookmarks; - - if (!defaultProfile) { - return null; - } - - const colorPresetName = - defaultProfile['Color Preset Name'] || defaultProfile['Custom Color Preset']; - if (!colorPresetName) { - return null; - } - - // Find the color preset - const customColorPresets = plist['Custom Color Presets'] || {}; - const preset = customColorPresets[colorPresetName] || {}; - - // Extract colors - const theme: TerminalTheme = {}; - - // Background and foreground - if (preset['Background Color']) { - theme.background = parseiTerm2Color(preset['Background Color']); - } - if (preset['Foreground Color']) { - theme.foreground = parseiTerm2Color(preset['Foreground Color']); - } - if (preset['Cursor Color']) { - theme.cursor = parseiTerm2Color(preset['Cursor Color']); - } - if (preset['Selection Color']) { - theme.selectionBackground = parseiTerm2Color(preset['Selection Color']); - } - - // ANSI colors - const ansiColors = [ - 'Ansi 0 Color', - 'Ansi 1 Color', - 'Ansi 2 Color', - 'Ansi 3 Color', - 'Ansi 4 Color', - 'Ansi 5 Color', - 'Ansi 6 Color', - 'Ansi 7 Color', - 'Ansi 8 Color', - 'Ansi 9 Color', - 'Ansi 10 Color', - 'Ansi 11 Color', - 'Ansi 12 Color', - 'Ansi 13 Color', - 'Ansi 14 Color', - 'Ansi 15 Color', - ]; - - const colorMap: Record = { - 'Ansi 0 Color': 'black', - 'Ansi 1 Color': 'red', - 'Ansi 2 Color': 'green', - 'Ansi 3 Color': 'yellow', - 'Ansi 4 Color': 'blue', - 'Ansi 5 Color': 'magenta', - 'Ansi 6 Color': 'cyan', - 'Ansi 7 Color': 'white', - 'Ansi 8 Color': 'brightBlack', - 'Ansi 9 Color': 'brightRed', - 'Ansi 10 Color': 'brightGreen', - 'Ansi 11 Color': 'brightYellow', - 'Ansi 12 Color': 'brightBlue', - 'Ansi 13 Color': 'brightMagenta', - 'Ansi 14 Color': 'brightCyan', - 'Ansi 15 Color': 'brightWhite', - }; - - for (const ansiKey of ansiColors) { - if (preset[ansiKey]) { - const colorKey = colorMap[ansiKey]; - if (colorKey) { - const parsedColor = parseiTerm2Color(preset[ansiKey]); - if (parsedColor) { - (theme as any)[colorKey] = parsedColor; - } - } - } - } - - // Font - if (defaultProfile['Normal Font']) { - const fontMatch = String(defaultProfile['Normal Font']).match(/^(.+?)\s+(\d+)$/); - if (fontMatch) { - theme.fontFamily = fontMatch[1]; - theme.fontSize = parseInt(fontMatch[2], 10); - } - } - - return { - terminal: 'iTerm2', - theme, - }; - } catch (error) { - log.warn('terminalConfig:iTerm2:parseFailed', { error }); - return null; - } -} - -/** - * Fallback: Try to parse iTerm2 plist as XML - */ -function loadiTerm2ConfigXML(plistPath: string): TerminalConfig | null { - try { - const xmlContent = readFileSync(plistPath, 'utf8'); - // Simple XML parsing for color values - // This is a basic implementation - could be improved - const colorRegex = - /([^<]+)<\/key>\s*[\s\S]*?Red Component<\/key>\s*([\d.]+)<\/real>[\s\S]*?Green Component<\/key>\s*([\d.]+)<\/real>[\s\S]*?Blue Component<\/key>\s*([\d.]+)<\/real>/g; - // This is complex - for now, return null and rely on JSON conversion - return null; - } catch { - return null; - } -} - -/** - * Parse iTerm2 color format (NSColor with Red/Green/Blue/Alpha components) - */ -function parseiTerm2Color(colorObj: any): string | undefined { - if (typeof colorObj === 'string') { - // Already a hex string - return colorObj; - } - - if (colorObj && typeof colorObj === 'object') { - // NSColor format: { "Red Component": 0.5, "Green Component": 0.5, "Blue Component": 0.5, "Alpha Component": 1.0 } - const r = Math.round((colorObj['Red Component'] || 0) * 255); - const g = Math.round((colorObj['Green Component'] || 0) * 255); - const b = Math.round((colorObj['Blue Component'] || 0) * 255); - return `#${r.toString(16).padStart(2, '0')}${g.toString(16).padStart(2, '0')}${b.toString(16).padStart(2, '0')}`; - } - - return undefined; -} - -/** - * Load Terminal.app configuration - */ -function loadTerminalAppConfig(): TerminalConfig | null { - try { - const plistPath = join(homedir(), 'Library', 'Preferences', 'com.apple.Terminal.plist'); - if (!existsSync(plistPath)) { - return null; - } - - let jsonContent: string; - try { - jsonContent = execSync(`plutil -convert json -o - "${plistPath}"`, { - encoding: 'utf8', - maxBuffer: 10 * 1024 * 1024, - }); - } catch { - return null; - } - - let plist: any; - try { - plist = JSON.parse(jsonContent); - } catch (error) { - // Silently ignore plist parsing errors - the file may contain non-standard objects - return null; - } - const windowSettings = plist['Window Settings'] || {}; - const defaultProfile = plist['Default Window Settings'] || 'Basic'; - - const profile = windowSettings[defaultProfile]; - if (!profile) { - return null; - } - - const theme: TerminalTheme = {}; - - if (profile['BackgroundColor']) { - theme.background = parseiTerm2Color(profile['BackgroundColor']); - } - if (profile['TextColor']) { - theme.foreground = parseiTerm2Color(profile['TextColor']); - } - if (profile['CursorColor']) { - theme.cursor = parseiTerm2Color(profile['CursorColor']); - } - - // Terminal.app uses similar ANSI color structure - const colorMap: Record = { - ANSIBlackColor: 'black', - ANSIRedColor: 'red', - ANSIGreenColor: 'green', - ANSIYellowColor: 'yellow', - ANSIBlueColor: 'blue', - ANSIMagentaColor: 'magenta', - ANSICyanColor: 'cyan', - ANSIWhiteColor: 'white', - ANSIBrightBlackColor: 'brightBlack', - ANSIBrightRedColor: 'brightRed', - ANSIBrightGreenColor: 'brightGreen', - ANSIBrightYellowColor: 'brightYellow', - ANSIBrightBlueColor: 'brightBlue', - ANSIBrightMagentaColor: 'brightMagenta', - ANSIBrightCyanColor: 'brightCyan', - ANSIBrightWhiteColor: 'brightWhite', - }; - - for (const [key, themeKey] of Object.entries(colorMap)) { - if (profile[key]) { - const parsedColor = parseiTerm2Color(profile[key]); - if (parsedColor) { - (theme as any)[themeKey] = parsedColor; - } - } - } - - if (profile['Font']) { - const fontMatch = String(profile['Font']).match(/^(.+?)\s+(\d+)$/); - if (fontMatch) { - theme.fontFamily = fontMatch[1]; - theme.fontSize = parseInt(fontMatch[2], 10); - } - } - - return { - terminal: 'Terminal.app', - theme, - }; - } catch (error) { - log.warn('terminalConfig:TerminalApp:parseFailed', { error }); - return null; - } -} - -/** - * Load Alacritty configuration (TOML format) - */ -function loadAlacrittyConfig(): TerminalConfig | null { - try { - const configPath = join(homedir(), '.config', 'alacritty', 'alacritty.toml'); - if (!existsSync(configPath)) { - // Try YAML format (older versions) - const yamlPath = join(homedir(), '.config', 'alacritty', 'alacritty.yml'); - if (existsSync(yamlPath)) { - return loadAlacrittyYAML(yamlPath); - } - return null; - } - - const content = readFileSync(configPath, 'utf8'); - return parseAlacrittyTOML(content); - } catch (error) { - log.warn('terminalConfig:Alacritty:parseFailed', { error }); - return null; - } -} - -/** - * Parse Alacritty TOML config (simplified parser) - */ -function parseAlacrittyTOML(content: string): TerminalConfig | null { - const theme: TerminalTheme = {}; - - // Simple TOML parsing - extract colors section - const colorsMatch = content.match(/\[colors\]\s*([\s\S]*?)(?=\[|$)/); - if (!colorsMatch) { - return null; - } - - const colorsSection = colorsMatch[1]; - - // Parse background/foreground - const bgMatch = colorsSection.match(/background\s*=\s*['"]([^'"]+)['"]/); - if (bgMatch) { - theme.background = bgMatch[1]; - } - - const fgMatch = colorsSection.match(/foreground\s*=\s*['"]([^'"]+)['"]/); - if (fgMatch) { - theme.foreground = fgMatch[1]; - } - - // Parse cursor - const cursorMatch = colorsSection.match(/cursor\s*=\s*['"]([^'"]+)['"]/); - if (cursorMatch) { - theme.cursor = cursorMatch[1]; - } - - // Parse ANSI colors (simplified - Alacritty uses nested structure) - const ansiColors = ['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white']; - const brightColors = [ - 'bright_black', - 'bright_red', - 'bright_green', - 'bright_yellow', - 'bright_blue', - 'bright_magenta', - 'bright_cyan', - 'bright_white', - ]; - - const colorMap: Record = { - black: 'black', - red: 'red', - green: 'green', - yellow: 'yellow', - blue: 'blue', - magenta: 'magenta', - cyan: 'cyan', - white: 'white', - bright_black: 'brightBlack', - bright_red: 'brightRed', - bright_green: 'brightGreen', - bright_yellow: 'brightYellow', - bright_blue: 'brightBlue', - bright_magenta: 'brightMagenta', - bright_cyan: 'brightCyan', - bright_white: 'brightWhite', - }; - - for (const [alacrittyKey, themeKey] of Object.entries(colorMap)) { - const regex = new RegExp(`${alacrittyKey.replace('_', '[-_]')}\\s*=\\s*['"]([^'"]+)['"]`, 'i'); - const match = colorsSection.match(regex); - if (match && match[1]) { - (theme as any)[themeKey] = match[1]; - } - } - - // Parse font - const fontMatch = content.match(/\[font\]\s*([\s\S]*?)(?=\[|$)/); - if (fontMatch) { - const fontSection = fontMatch[1]; - const familyMatch = fontSection.match(/normal\s*=\s*\{[\s\S]*?family\s*=\s*['"]([^'"]+)['"]/); - const sizeMatch = fontSection.match(/size\s*=\s*(\d+)/); - if (familyMatch) { - theme.fontFamily = familyMatch[1]; - } - if (sizeMatch) { - theme.fontSize = parseInt(sizeMatch[1], 10); - } - } - - return { - terminal: 'Alacritty', - theme, - }; -} - -/** - * Parse Alacritty YAML config (simplified) - */ -function loadAlacrittyYAML(yamlPath: string): TerminalConfig | null { - try { - const content = readFileSync(yamlPath, 'utf8'); - // Very basic YAML parsing for colors - // For production, consider using a YAML parser library - const theme: TerminalTheme = {}; - - // Extract basic colors (simplified regex-based parsing) - const bgMatch = content.match(/background:\s*['"]?([^'"]+)['"]?/); - if (bgMatch) { - theme.background = bgMatch[1]; - } - - const fgMatch = content.match(/foreground:\s*['"]?([^'"]+)['"]?/); - if (fgMatch) { - theme.foreground = fgMatch[1]; - } - - return { - terminal: 'Alacritty', - theme, - }; - } catch { - return null; - } -} - -/** - * Load Ghostty configuration - */ -function loadGhosttyConfig(): TerminalConfig | null { - try { - const configPath = join(homedir(), '.config', 'ghostty', 'config'); - if (!existsSync(configPath)) { - return null; - } - - const content = readFileSync(configPath, 'utf8'); - return parseGhosttyConfig(content); - } catch (error) { - log.warn('terminalConfig:Ghostty:parseFailed', { error }); - return null; - } -} - -/** - * Parse Ghostty config (key = value format) - */ -function parseGhosttyConfig(content: string): TerminalConfig | null { - const theme: TerminalTheme = {}; - - // Ghostty uses simple key = value format - const lines = content.split('\n'); - for (const line of lines) { - const trimmed = line.trim(); - if (trimmed.startsWith('#') || !trimmed.includes('=')) { - continue; - } - - const [key, ...valueParts] = trimmed.split('='); - const value = valueParts - .join('=') - .trim() - .replace(/^["']|["']$/g, ''); - - switch (key.trim()) { - case 'background': - theme.background = value; - break; - case 'foreground': - theme.foreground = value; - break; - case 'cursor': - theme.cursor = value; - break; - case 'color0': - theme.black = value; - break; - case 'color1': - theme.red = value; - break; - case 'color2': - theme.green = value; - break; - case 'color3': - theme.yellow = value; - break; - case 'color4': - theme.blue = value; - break; - case 'color5': - theme.magenta = value; - break; - case 'color6': - theme.cyan = value; - break; - case 'color7': - theme.white = value; - break; - case 'color8': - theme.brightBlack = value; - break; - case 'color9': - theme.brightRed = value; - break; - case 'color10': - theme.brightGreen = value; - break; - case 'color11': - theme.brightYellow = value; - break; - case 'color12': - theme.brightBlue = value; - break; - case 'color13': - theme.brightMagenta = value; - break; - case 'color14': - theme.brightCyan = value; - break; - case 'color15': - theme.brightWhite = value; - break; - case 'font': - theme.fontFamily = value; - break; - case 'font-size': - theme.fontSize = parseInt(value, 10); - break; - } - } - - return { - terminal: 'Ghostty', - theme, - }; -} - -/** - * Load Kitty configuration - */ -function loadKittyConfig(): TerminalConfig | null { - try { - const configPath = join(homedir(), '.config', 'kitty', 'kitty.conf'); - if (!existsSync(configPath)) { - return null; - } - - const content = readFileSync(configPath, 'utf8'); - return parseKittyConfig(content); - } catch (error) { - log.warn('terminalConfig:Kitty:parseFailed', { error }); - return null; - } -} - -/** - * Parse Kitty config (similar to Ghostty format) - */ -function parseKittyConfig(content: string): TerminalConfig | null { - const theme: TerminalTheme = {}; - - const lines = content.split('\n'); - for (const line of lines) { - const trimmed = line.trim(); - if (trimmed.startsWith('#') || !trimmed.includes(' ')) { - continue; - } - - const [key, ...valueParts] = trimmed.split(/\s+/); - const value = valueParts.join(' ').trim(); - - switch (key) { - case 'background': - theme.background = value; - break; - case 'foreground': - theme.foreground = value; - break; - case 'cursor': - theme.cursor = value; - break; - case 'color0': - theme.black = value; - break; - case 'color1': - theme.red = value; - break; - case 'color2': - theme.green = value; - break; - case 'color3': - theme.yellow = value; - break; - case 'color4': - theme.blue = value; - break; - case 'color5': - theme.magenta = value; - break; - case 'color6': - theme.cyan = value; - break; - case 'color7': - theme.white = value; - break; - case 'color8': - theme.brightBlack = value; - break; - case 'color9': - theme.brightRed = value; - break; - case 'color10': - theme.brightGreen = value; - break; - case 'color11': - theme.brightYellow = value; - break; - case 'color12': - theme.brightBlue = value; - break; - case 'color13': - theme.brightMagenta = value; - break; - case 'color14': - theme.brightCyan = value; - break; - case 'color15': - theme.brightWhite = value; - break; - case 'font_family': - theme.fontFamily = value; - break; - case 'font_size': - theme.fontSize = parseInt(value, 10); - break; - } - } - - return { - terminal: 'Kitty', - theme, - }; -} - -/** - * Load Windows Terminal configuration - */ -function loadWindowsTerminalConfig(settingsPath: string): TerminalConfig | null { - try { - const content = readFileSync(settingsPath, 'utf8'); - const config = JSON.parse(content); - - // Windows Terminal stores profiles in "profiles.list" - const profiles = config.profiles?.list || []; - const defaultProfile = profiles.find((p: any) => p.default === true) || profiles[0]; - - if (!defaultProfile) { - return null; - } - - const theme: TerminalTheme = {}; - - // Windows Terminal uses color schemes - const colorSchemeName = defaultProfile.colorScheme; - if (colorSchemeName && config.schemes) { - const scheme = config.schemes.find((s: any) => s.name === colorSchemeName); - if (scheme) { - theme.background = scheme.background; - theme.foreground = scheme.foreground; - theme.black = scheme.black; - theme.red = scheme.red; - theme.green = scheme.green; - theme.yellow = scheme.yellow; - theme.blue = scheme.blue; - theme.magenta = scheme.magenta; - theme.cyan = scheme.cyan; - theme.white = scheme.white; - theme.brightBlack = scheme.brightBlack; - theme.brightRed = scheme.brightRed; - theme.brightGreen = scheme.brightGreen; - theme.brightYellow = scheme.brightYellow; - theme.brightBlue = scheme.brightBlue; - theme.brightMagenta = scheme.brightMagenta; - theme.brightCyan = scheme.brightCyan; - theme.brightWhite = scheme.brightWhite; - } - } - - // Font - if (defaultProfile.font) { - theme.fontFamily = defaultProfile.font.face; - theme.fontSize = defaultProfile.font.size; - } - - return { - terminal: 'Windows Terminal', - theme, - }; - } catch (error) { - log.warn('terminalConfig:WindowsTerminal:parseFailed', { error }); - return null; - } -} diff --git a/src/main/services/TerminalSnapshotService.ts b/src/main/services/TerminalSnapshotService.ts deleted file mode 100644 index 7f67196c0..000000000 --- a/src/main/services/TerminalSnapshotService.ts +++ /dev/null @@ -1,164 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import { app } from 'electron'; -import { log } from '../lib/logger'; -import type { TerminalSnapshotPayload } from '../types/terminalSnapshot'; -import { TERMINAL_SNAPSHOT_VERSION } from '../types/terminalSnapshot'; - -interface StoredSnapshot extends TerminalSnapshotPayload { - bytes: number; -} - -const MAX_SNAPSHOT_BYTES = 8 * 1024 * 1024; -const MAX_TOTAL_BYTES = 64 * 1024 * 1024; - -function resolveBaseDir(): string { - const override = process.env.EMDASH_TERMINAL_SNAPSHOT_DIR; - if (override && override.trim().length > 0) { - return path.resolve(override); - } - try { - return path.join(app.getPath('userData'), 'terminal-snapshots'); - } catch (error) { - log.warn('terminalSnapshotService: unable to resolve userData path, using cwd fallback', { - error, - }); - return path.join(process.cwd(), '.emdash-terminal-snapshots'); - } -} - -const BASE_DIR = resolveBaseDir(); - -function snapshotPath(id: string): string { - const safe = id.replace(/[^a-zA-Z0-9._-]/g, '_'); - return path.join(BASE_DIR, `${safe}.json`); -} - -async function ensureDir(): Promise { - await fs.promises.mkdir(BASE_DIR, { recursive: true }); -} - -async function readSnapshotFile(filePath: string): Promise { - let raw: string; - try { - raw = await fs.promises.readFile(filePath, 'utf8'); - } catch (error) { - if ((error as NodeJS.ErrnoException)?.code !== 'ENOENT') { - log.warn('terminalSnapshotService: failed to read snapshot', { filePath, error }); - } - return null; - } - - try { - const parsed = JSON.parse(raw) as TerminalSnapshotPayload; - if (parsed.version !== TERMINAL_SNAPSHOT_VERSION) { - return null; - } - const bytes = Buffer.byteLength(raw, 'utf8'); - return { ...parsed, bytes }; - } catch (error) { - log.warn('terminalSnapshotService: invalid snapshot JSON', { - filePath, - error, - bytes: Buffer.byteLength(raw, 'utf8'), - }); - return null; - } -} - -async function removeFile(filePath: string): Promise { - try { - await fs.promises.unlink(filePath); - } catch (error) { - if ((error as NodeJS.ErrnoException)?.code !== 'ENOENT') { - log.warn('terminalSnapshotService: failed to delete snapshot', { filePath, error }); - } - } -} - -async function listSnapshots(): Promise< - Array<{ id: string; path: string; stats: StoredSnapshot }> -> { - try { - const entries = await fs.promises.readdir(BASE_DIR); - const result: Array<{ id: string; path: string; stats: StoredSnapshot }> = []; - for (const entry of entries) { - if (!entry.endsWith('.json')) continue; - const filePath = path.join(BASE_DIR, entry); - const stats = await readSnapshotFile(filePath); - if (stats) { - const id = entry.replace(/\.json$/, ''); - result.push({ id, path: filePath, stats }); - } - } - return result; - } catch (error) { - if ((error as NodeJS.ErrnoException)?.code === 'ENOENT') return []; - log.warn('terminalSnapshotService: failed to list snapshots', { error }); - return []; - } -} - -class TerminalSnapshotService { - async getSnapshot(id: string): Promise { - const record = await readSnapshotFile(snapshotPath(id)); - return record ? { ...record } : null; - } - - async saveSnapshot( - id: string, - payload: TerminalSnapshotPayload - ): Promise<{ ok: boolean; error?: string }> { - try { - if (payload.version !== TERMINAL_SNAPSHOT_VERSION) { - return { ok: false, error: 'Unsupported snapshot version' }; - } - - const json = JSON.stringify(payload); - const bytes = Buffer.byteLength(json, 'utf8'); - if (bytes > MAX_SNAPSHOT_BYTES) { - return { ok: false, error: 'Snapshot size exceeds per-task limit' }; - } - - await ensureDir(); - await fs.promises.writeFile(snapshotPath(id), json, 'utf8'); - await this.pruneIfNeeded(id); - return { ok: true }; - } catch (error) { - log.error('terminalSnapshotService: failed to save snapshot', { id, error }); - return { ok: false, error: (error as Error)?.message ?? String(error) }; - } - } - - async deleteSnapshot(id: string): Promise { - await removeFile(snapshotPath(id)); - } - - private async pruneIfNeeded(recentId: string): Promise { - const records = await listSnapshots(); - if (records.length === 0) return; - - let total = records.reduce((sum, rec) => sum + rec.stats.bytes, 0); - if (total <= MAX_TOTAL_BYTES) return; - // Sort by oldest first, prefer to keep the most recent snapshot we just wrote - const ordered = records - .filter((rec) => rec.id !== recentId) - .sort((a, b) => Date.parse(a.stats.createdAt) - Date.parse(b.stats.createdAt)); - - for (const entry of ordered) { - if (total <= MAX_TOTAL_BYTES) break; - await removeFile(entry.path); - total -= entry.stats.bytes; - } - - // As a last resort, keep only the recent snapshot - if (total > MAX_TOTAL_BYTES) { - for (const entry of records) { - if (entry.id === recentId) continue; - await removeFile(entry.path); - } - } - } -} - -export const terminalSnapshotService = new TerminalSnapshotService(); diff --git a/src/main/services/WorktreePoolService.ts b/src/main/services/WorktreePoolService.ts deleted file mode 100644 index db0d0f5ca..000000000 --- a/src/main/services/WorktreePoolService.ts +++ /dev/null @@ -1,658 +0,0 @@ -import { execFile } from 'child_process'; -import { promisify } from 'util'; -import path from 'path'; -import fs from 'fs'; -import crypto from 'crypto'; -import { log } from '../lib/logger'; -import { worktreeService, type WorktreeInfo } from './WorktreeService'; - -const execFileAsync = promisify(execFile); - -interface ReserveWorktree { - id: string; - path: string; - branch: string; - projectId: string; - projectPath: string; - baseRef: string; - createdAt: string; -} - -interface ClaimResult { - worktree: WorktreeInfo; - needsBaseRefSwitch: boolean; -} - -/** - * WorktreePoolService maintains a pool of pre-created "reserve" worktrees - * that can be instantly claimed when users create new tasks. - * - * This eliminates the 3-7 second wait for worktree creation by: - * 1. Pre-creating reserve worktrees in the background when projects are opened - * 2. Instantly renaming reserves when tasks are created - * 3. Replenishing the pool in the background after claims - */ -export class WorktreePoolService { - // Keyed by `${projectId}::${baseRef}` to keep reserves base-ref specific. - private reserves = new Map(); - private creationInProgress = new Set(); - private readonly RESERVE_PREFIX = '_reserve'; - // Reserves older than this are considered stale and will be recreated - // 30 minutes is reasonable since users don't create tasks that frequently - private readonly MAX_RESERVE_AGE_MS = 30 * 60 * 1000; // 30 minutes - - /** Generate a unique hash for reserve identification */ - private generateReserveHash(): string { - const bytes = crypto.randomBytes(4); - return bytes.readUIntBE(0, 4).toString(36).slice(0, 6).padStart(6, '0'); - } - - /** Get the reserve worktree path for a project */ - private getReservePath(projectPath: string, hash: string): string { - return path.join(projectPath, '..', `worktrees/${this.RESERVE_PREFIX}-${hash}`); - } - - /** Get the reserve branch name */ - private getReserveBranch(hash: string): string { - return `${this.RESERVE_PREFIX}/${hash}`; - } - - private normalizeBaseRef(baseRef?: string): string { - const trimmed = (baseRef || '').trim(); - return trimmed.length > 0 ? trimmed : 'HEAD'; - } - - private getReserveKey(projectId: string, baseRef?: string): string { - return `${projectId}::${this.normalizeBaseRef(baseRef)}`; - } - - private async refreshRefsForReserveCreation( - projectPath: string, - projectId: string - ): Promise { - try { - await execFileAsync('git', ['fetch', '--all', '--prune'], { - cwd: projectPath, - timeout: 15000, - }); - } catch (error) { - log.warn('WorktreePool: Failed to refresh refs during reserve creation', { - projectId, - error, - }); - } - } - - /** - * Resolve HEAD or bare branch names to their remote tracking counterpart. - * After `refreshRefsForReserveCreation` fetches all refs, this ensures the - * worktree is created from the freshly-fetched remote ref rather than a - * potentially stale local branch. - */ - private async resolveToRemoteRef(projectPath: string, baseRef: string): Promise { - // Already a remote tracking ref — use as-is - if (baseRef.startsWith('origin/')) return baseRef; - - try { - const branchName = - baseRef === 'HEAD' - ? ( - await execFileAsync('git', ['symbolic-ref', '--short', 'HEAD'], { - cwd: projectPath, - }) - ).stdout.trim() - : baseRef; - - // Verify the remote tracking ref exists (it should after fetch --all) - await execFileAsync('git', ['rev-parse', '--verify', `refs/remotes/origin/${branchName}`], { - cwd: projectPath, - }); - return `origin/${branchName}`; - } catch { - return baseRef; // Fallback to original if resolution fails - } - } - - /** Generate stable ID from path */ - private stableIdFromPath(worktreePath: string): string { - const abs = path.resolve(worktreePath); - const h = crypto.createHash('sha1').update(abs).digest('hex').slice(0, 12); - return `wt-${h}`; - } - - /** Check if a reserve is stale (too old to be useful) */ - private isReserveStale(reserve: ReserveWorktree): boolean { - const age = Date.now() - new Date(reserve.createdAt).getTime(); - return age > this.MAX_RESERVE_AGE_MS; - } - - /** Check if a fresh reserve exists for a project */ - hasReserve(projectId: string): boolean { - for (const [key, reserve] of this.reserves.entries()) { - if (!key.startsWith(`${projectId}::`)) continue; - if (this.isReserveStale(reserve)) { - this.reserves.delete(key); - this.cleanupReserve(reserve).catch(() => {}); - continue; - } - return true; - } - return false; - } - - /** Get the reserve for a project (if any) */ - getReserve(projectId: string): ReserveWorktree | undefined { - for (const [key, reserve] of this.reserves.entries()) { - if (!key.startsWith(`${projectId}::`)) continue; - if (this.isReserveStale(reserve)) { - this.reserves.delete(key); - this.cleanupReserve(reserve).catch(() => {}); - continue; - } - return reserve; - } - return undefined; - } - - /** - * Ensure a reserve worktree exists for a project. - * Creates one in the background if not present. - */ - async ensureReserve(projectId: string, projectPath: string, baseRef?: string): Promise { - const reserveKey = this.getReserveKey(projectId, baseRef); - - // Creation already in progress - if (this.creationInProgress.has(reserveKey)) { - return; - } - - // Check existing reserve - const existing = this.reserves.get(reserveKey); - if (existing) { - if (!this.isReserveStale(existing)) { - return; // Fresh reserve exists - } - // Stale reserve - clean it up and create fresh one - this.reserves.delete(reserveKey); - this.cleanupReserve(existing).catch(() => {}); - } - - // Start background creation - this.creationInProgress.add(reserveKey); - - try { - await this.createReserve(projectId, projectPath, this.normalizeBaseRef(baseRef)); - } catch (error) { - log.warn('WorktreePool: Failed to create reserve', { projectId, baseRef, error }); - } finally { - this.creationInProgress.delete(reserveKey); - } - } - - /** - * Create a reserve worktree for a project - */ - private async createReserve( - projectId: string, - projectPath: string, - baseRef: string - ): Promise { - const hash = this.generateReserveHash(); - const reservePath = this.getReservePath(projectPath, hash); - const reserveBranch = this.getReserveBranch(hash); - - // Ensure worktrees directory exists - const worktreesDir = path.dirname(reservePath); - if (!fs.existsSync(worktreesDir)) { - fs.mkdirSync(worktreesDir, { recursive: true }); - } - - // Keep reserve refs fresh in the background so claim remains instant. - await this.refreshRefsForReserveCreation(projectPath, projectId); - - // Resolve HEAD/local refs to remote tracking refs (freshly fetched) - // so the worktree is created from up-to-date code, not a stale local branch. - const resolvedRef = await this.resolveToRemoteRef(projectPath, baseRef); - - // Create the worktree - await execFileAsync('git', ['worktree', 'add', '-b', reserveBranch, reservePath, resolvedRef], { - cwd: projectPath, - }); - - const reserveId = this.stableIdFromPath(reservePath); - const reserve: ReserveWorktree = { - id: reserveId, - path: reservePath, - branch: reserveBranch, - projectId, - projectPath, - baseRef, - createdAt: new Date().toISOString(), - }; - - this.reserves.set(this.getReserveKey(projectId, baseRef), reserve); - } - - /** - * Claim a reserve worktree for a new task. - * Renames the reserve to match the task name and returns it instantly. - */ - async claimReserve( - projectId: string, - projectPath: string, - taskName: string, - requestedBaseRef?: string - ): Promise { - const resolvedBaseRef = this.normalizeBaseRef(requestedBaseRef); - const reserveKey = this.getReserveKey(projectId, resolvedBaseRef); - const reserve = this.reserves.get(reserveKey); - if (!reserve) { - this.replenishReserve(projectId, projectPath, resolvedBaseRef); - return null; - } - - // Check if reserve is stale (too old) - if (this.isReserveStale(reserve)) { - // Remove stale reserve and clean it up in background - this.reserves.delete(reserveKey); - this.cleanupReserve(reserve).catch(() => {}); - // Start creating a fresh reserve for next time - this.replenishReserve(projectId, projectPath, resolvedBaseRef); - return null; // Caller will use fallback (sync creation) - } - - // Remove from pool immediately to prevent double-claims - this.reserves.delete(reserveKey); - - try { - const result = await this.transformReserve(reserve, taskName); - - // Start background replenishment - this.replenishReserve(projectId, projectPath, resolvedBaseRef); - - return result; - } catch (error) { - log.error('WorktreePool: Failed to claim reserve', { projectId, taskName, error }); - // Try to clean up the reserve on failure - this.cleanupReserve(reserve).catch(() => {}); - return null; - } - } - - /** - * Transform a reserve worktree into a task worktree - */ - private async transformReserve(reserve: ReserveWorktree, taskName: string): Promise { - const { getAppSettings } = await import('../settings'); - const settings = getAppSettings(); - const prefix = settings?.repository?.branchPrefix || 'emdash'; - - // Generate new names - const sluggedName = this.slugify(taskName); - const hash = this.generateShortHash(); - const newBranch = `${prefix}/${sluggedName}-${hash}`; - const newPath = path.join(reserve.projectPath, '..', `worktrees/${sluggedName}-${hash}`); - const newId = this.stableIdFromPath(newPath); - - // Move the worktree (instant operation) - await execFileAsync('git', ['worktree', 'move', reserve.path, newPath], { - cwd: reserve.projectPath, - }); - - // Update reserve path so cleanup uses correct location if we fail later - reserve.path = newPath; - - // Rename the branch (instant operation) - await execFileAsync('git', ['branch', '-m', reserve.branch, newBranch], { - cwd: newPath, - }); - - // Preserve project-specific gitignored files from project to worktree - try { - await worktreeService.preserveProjectFilesToWorktree(reserve.projectPath, newPath); - } catch (preserveErr) { - log.warn('WorktreePool: Failed to preserve files', { error: preserveErr }); - } - - // Push branch to remote in background (non-blocking) - this.pushBranchAsync(newPath, newBranch, settings); - - const worktree: WorktreeInfo = { - id: newId, - name: taskName, - branch: newBranch, - path: newPath, - projectId: reserve.projectId, - status: 'active', - createdAt: new Date().toISOString(), - }; - - // Register with worktreeService - worktreeService.registerWorktree(worktree); - - return { worktree, needsBaseRefSwitch: false }; - } - - /** Replenish reserve in background after claiming */ - private replenishReserve(projectId: string, projectPath: string, baseRef?: string): void { - // Fire and forget - this.ensureReserve(projectId, projectPath, baseRef).catch((error) => { - log.warn('WorktreePool: Failed to replenish reserve', { projectId, error }); - }); - } - - /** Push branch to remote asynchronously */ - private async pushBranchAsync( - worktreePath: string, - branchName: string, - settings: any - ): Promise { - if (settings?.repository?.pushOnCreate === false) { - return; - } - - try { - // Get remote name - const { stdout: remotesOut } = await execFileAsync('git', ['remote'], { - cwd: worktreePath, - }); - const remotes = remotesOut.trim().split('\n').filter(Boolean); - const remote = remotes.includes('origin') ? 'origin' : remotes[0]; - - if (!remote) { - return; - } - - await execFileAsync('git', ['push', '--set-upstream', remote, branchName], { - cwd: worktreePath, - timeout: 60000, - }); - } catch { - // Push failures are non-critical, ignore silently - } - } - - /** Cleanup a reserve worktree */ - private async cleanupReserve(reserve: ReserveWorktree): Promise { - try { - await execFileAsync('git', ['worktree', 'remove', '--force', reserve.path], { - cwd: reserve.projectPath, - }); - } catch { - // Worktree might already be gone; continue and try branch cleanup. - } - - try { - // Also delete the branch - await execFileAsync('git', ['branch', '-D', reserve.branch], { - cwd: reserve.projectPath, - }); - } catch { - // Cleanup failures are non-critical - } - } - - /** Remove reserve for a project (e.g., when project is removed) */ - async removeReserve(projectId: string, projectPath?: string): Promise { - const reservesForProject = Array.from(this.reserves.entries()).filter(([key]) => - key.startsWith(`${projectId}::`) - ); - const resolvedProjectPath = projectPath || reservesForProject[0]?.[1].projectPath; - - await Promise.all( - reservesForProject.map(async ([key, reserve]) => { - this.reserves.delete(key); - await this.cleanupReserve(reserve); - }) - ); - - if (!resolvedProjectPath) { - return; - } - - await this.cleanupReserveArtifactsForProject(resolvedProjectPath); - } - - private async cleanupReserveArtifactsForProject(projectPath: string): Promise { - const normalizedProjectPath = path.resolve(projectPath); - const reserveBranches = await this.listReserveBranches(normalizedProjectPath); - const remainingBranches = new Set(reserveBranches); - - for (const reserve of this.findReserveDirectoriesForProject( - normalizedProjectPath, - remainingBranches - )) { - try { - await execFileAsync('git', ['worktree', 'remove', '--force', reserve.path], { - cwd: normalizedProjectPath, - }); - } catch { - // Best effort: if git cleanup fails, remove directory directly. - try { - fs.rmSync(reserve.path, { recursive: true, force: true }); - } catch { - // Ignore secondary cleanup failure. - } - } - - if (reserve.branch) { - await this.deleteBranch(normalizedProjectPath, reserve.branch); - remainingBranches.delete(reserve.branch); - } - } - - // Clean up any remaining reserve branches even if the worktree directory is already gone. - for (const branch of remainingBranches) { - await this.deleteBranch(normalizedProjectPath, branch); - } - } - - private findReserveDirectoriesForProject( - projectPath: string, - reserveBranches: Set - ): Array<{ path: string; branch: string | null }> { - const worktreesDir = path.join(projectPath, '..', 'worktrees'); - if (!fs.existsSync(worktreesDir)) { - return []; - } - - const result: Array<{ path: string; branch: string | null }> = []; - try { - const entries = fs.readdirSync(worktreesDir, { withFileTypes: true }); - for (const entry of entries) { - if (!entry.isDirectory() || !entry.name.startsWith(`${this.RESERVE_PREFIX}-`)) { - continue; - } - - const reservePath = path.join(worktreesDir, entry.name); - const ownerPath = this.getMainRepoPathFromWorktree(reservePath); - const branch = this.getReserveBranchFromDirectoryName(entry.name); - const ownsReserve = ownerPath ? path.resolve(ownerPath) === projectPath : false; - const branchBelongsToProject = branch ? reserveBranches.has(branch) : false; - - if (!ownsReserve && !branchBelongsToProject) { - continue; - } - - result.push({ path: reservePath, branch }); - } - } catch { - // Ignore unreadable worktrees directory. - } - - return result; - } - - private getReserveBranchFromDirectoryName(name: string): string | null { - const branchMatch = name.match(/^_reserve-(.+)$/); - if (!branchMatch) return null; - return `_reserve/${branchMatch[1]}`; - } - - private async listReserveBranches(projectPath: string): Promise { - try { - const { stdout } = await execFileAsync( - 'git', - ['for-each-ref', '--format=%(refname:short)', `refs/heads/${this.RESERVE_PREFIX}`], - { cwd: projectPath } - ); - return stdout - .trim() - .split('\n') - .map((line) => line.trim()) - .filter((line) => line.startsWith(`${this.RESERVE_PREFIX}/`)); - } catch { - return []; - } - } - - private async deleteBranch(projectPath: string, branchName: string): Promise { - try { - await execFileAsync('git', ['branch', '-D', branchName], { cwd: projectPath }); - } catch { - // Branch may not exist or still be attached to a worktree. - } - } - - private getMainRepoPathFromWorktree(worktreePath: string): string | null { - const gitDirPath = path.join(worktreePath, '.git'); - if (!fs.existsSync(gitDirPath)) { - return null; - } - - try { - const gitDirContent = fs.readFileSync(gitDirPath, 'utf8'); - const match = gitDirContent.match(/gitdir:\s*(.+)/); - if (!match) { - return null; - } - - const gitWorktreePath = match[1].trim(); - const resolvedGitWorktreePath = path.isAbsolute(gitWorktreePath) - ? gitWorktreePath - : path.resolve(worktreePath, gitWorktreePath); - const mainRepoPath = resolvedGitWorktreePath.replace( - /[\\\\/]\.git[\\\\/]worktrees[\\\\/].*$/, - '' - ); - if (mainRepoPath !== resolvedGitWorktreePath) { - return mainRepoPath; - } - - // Fallback for unexpected gitdir layouts. - return resolvedGitWorktreePath.replace(/[\\\\/]\.git$/, ''); - } catch { - return null; - } - } - - /** Cleanup all reserves (e.g., on app shutdown) */ - async cleanup(): Promise { - for (const [projectId, reserve] of this.reserves) { - try { - await this.cleanupReserve(reserve); - } catch (error) { - log.warn('WorktreePool: Failed to cleanup reserve on shutdown', { projectId, error }); - } - } - this.reserves.clear(); - } - - /** - * Clean up orphaned reserve worktrees from previous sessions. - * Called on app startup to handle reserves left behind from crashes or forced quits. - * Runs in background and doesn't block app startup. - */ - async cleanupOrphanedReserves(projectPaths: string[] = []): Promise { - // Small delay to not compete with critical startup tasks - await new Promise((resolve) => setTimeout(resolve, 2000)); - - // Find all worktree directories that might contain reserves - const homedir = require('os').homedir(); - const projectWorktreeDirs = projectPaths.map((projectPath) => - path.join(projectPath, '..', 'worktrees') - ); - const possibleWorktreeDirs = [ - ...projectWorktreeDirs, - path.join(homedir, 'cursor', 'worktrees'), - path.join(homedir, 'Documents', 'worktrees'), - path.join(homedir, 'Projects', 'worktrees'), - path.join(homedir, 'code', 'worktrees'), - path.join(homedir, 'dev', 'worktrees'), - ]; - const uniqueWorktreeDirs = [...new Set(possibleWorktreeDirs.map((dir) => path.resolve(dir)))]; - - // Collect all orphaned reserves first (fast sync scan) - const orphanedReserves: { path: string; name: string }[] = []; - for (const worktreesDir of uniqueWorktreeDirs) { - if (!fs.existsSync(worktreesDir)) continue; - try { - const entries = fs.readdirSync(worktreesDir, { withFileTypes: true }); - for (const entry of entries) { - if (entry.isDirectory() && entry.name.startsWith(this.RESERVE_PREFIX)) { - orphanedReserves.push({ - path: path.join(worktreesDir, entry.name), - name: entry.name, - }); - } - } - } catch { - // Ignore unreadable directories - } - } - - if (orphanedReserves.length === 0) { - return; - } - - // Clean up all reserves in parallel (silently) - await Promise.allSettled( - orphanedReserves.map((reserve) => this.cleanupOrphanedReserve(reserve.path, reserve.name)) - ); - } - - /** Clean up a single orphaned reserve */ - private async cleanupOrphanedReserve(reservePath: string, name: string): Promise { - try { - // Try to find the parent git repo to properly remove the worktree - const mainRepoPath = this.getMainRepoPathFromWorktree(reservePath); - if (mainRepoPath && fs.existsSync(mainRepoPath)) { - // Remove worktree via git - await execFileAsync('git', ['worktree', 'remove', '--force', reservePath], { - cwd: mainRepoPath, - }); - - // Try to remove the reserve branch - const branchName = this.getReserveBranchFromDirectoryName(name); - if (branchName) { - await this.deleteBranch(mainRepoPath, branchName); - } - - return true; - } - - // Fallback: just remove the directory - fs.rmSync(reservePath, { recursive: true, force: true }); - return true; - } catch { - return false; - } - } - - /** Slugify task name */ - private slugify(name: string): string { - return name - .toLowerCase() - .replace(/[^a-z0-9-]/g, '-') - .replace(/-+/g, '-') - .replace(/^-|-$/g, ''); - } - - /** Generate short hash */ - private generateShortHash(): string { - const bytes = crypto.randomBytes(3); - return bytes.readUIntBE(0, 3).toString(36).slice(0, 3).padStart(3, '0'); - } -} - -export const worktreePoolService = new WorktreePoolService(); diff --git a/src/main/services/WorktreeService.ts b/src/main/services/WorktreeService.ts deleted file mode 100644 index 3ae55fa30..000000000 --- a/src/main/services/WorktreeService.ts +++ /dev/null @@ -1,1252 +0,0 @@ -import { execFile } from 'child_process'; -import { log } from '../lib/logger'; -import { promisify } from 'util'; -import path from 'path'; -import fs from 'fs'; -import crypto from 'crypto'; -import { projectSettingsService } from './ProjectSettingsService'; -import { minimatch } from 'minimatch'; -import { errorTracking } from '../errorTracking'; - -type BaseRefInfo = { remote: string; branch: string; fullRef: string }; - -const execFileAsync = promisify(execFile); - -export interface WorktreeInfo { - id: string; - name: string; - branch: string; - path: string; - projectId: string; - status: 'active' | 'paused' | 'completed' | 'error'; - createdAt: string; - lastActivity?: string; -} - -export interface PreserveResult { - copied: string[]; - skipped: string[]; -} - -/** Default patterns for files to preserve when creating worktrees */ -const DEFAULT_PRESERVE_PATTERNS = [ - '.env', - '.env.keys', - '.env.local', - '.env.*.local', - '.envrc', - 'docker-compose.override.yml', -]; - -/** Default path segments to exclude from preservation */ -const DEFAULT_EXCLUDE_PATTERNS = [ - 'node_modules', - '.git', - 'vendor', - '.cache', - 'dist', - 'build', - '.next', - '.nuxt', - '__pycache__', - '.venv', - 'venv', -]; - -/** Project-level config stored in .emdash.json */ -interface EmdashConfig { - preservePatterns?: string[]; -} - -export class WorktreeService { - private worktrees = new Map(); - - private async cleanupWorktreeDirectory(pathToRemove: string, projectPath: string): Promise { - if (!fs.existsSync(pathToRemove)) { - return; - } - - const normalizedPathToRemove = path.resolve(pathToRemove); - const normalizedProjectPath = path.resolve(projectPath); - - if (normalizedPathToRemove === normalizedProjectPath) { - log.error(`CRITICAL: Prevented filesystem removal of main repository! Path: ${pathToRemove}`); - return; - } - - const isLikelyWorktree = - pathToRemove.includes('/worktrees/') || - pathToRemove.includes('\\worktrees\\') || - pathToRemove.includes('/.conductor/') || - pathToRemove.includes('\\.conductor\\') || - pathToRemove.includes('/.cursor/worktrees/') || - pathToRemove.includes('\\.cursor\\worktrees\\'); - - if (!isLikelyWorktree) { - log.warn( - `Path doesn't appear to be a worktree directory, skipping filesystem removal: ${pathToRemove}` - ); - return; - } - - try { - await fs.promises.rm(pathToRemove, { recursive: true, force: true }); - } catch (rmErr: any) { - if (rmErr && (rmErr.code === 'EACCES' || rmErr.code === 'EPERM')) { - try { - if (process.platform === 'win32') { - await execFileAsync('cmd', ['/c', 'attrib', '-R', '/S', '/D', pathToRemove + '\\*']); - } else { - await execFileAsync('chmod', ['-R', 'u+w', pathToRemove]); - } - } catch (permErr) { - log.warn('Failed to adjust permissions for worktree cleanup:', permErr); - } - try { - await fs.promises.rm(pathToRemove, { recursive: true, force: true }); - } catch (retryErr) { - log.warn('Failed to cleanup worktree directory after permission fix:', retryErr); - } - } else { - log.warn('Failed to cleanup worktree directory:', rmErr); - } - } - } - - /** - * Read .emdash.json config from project root - */ - private readProjectConfig(projectPath: string): EmdashConfig | null { - try { - const configPath = path.join(projectPath, '.emdash.json'); - if (!fs.existsSync(configPath)) { - return null; - } - const content = fs.readFileSync(configPath, 'utf8'); - return JSON.parse(content) as EmdashConfig; - } catch { - return null; - } - } - - /** - * Get preserve patterns for a project (config or defaults) - */ - private getPreservePatterns(projectPath: string): string[] { - const config = this.readProjectConfig(projectPath); - if (config?.preservePatterns && Array.isArray(config.preservePatterns)) { - return config.preservePatterns; - } - return DEFAULT_PRESERVE_PATTERNS; - } - - /** - * Preserve project files into a worktree using project config (or defaults). - */ - async preserveProjectFilesToWorktree( - projectPath: string, - worktreePath: string - ): Promise { - const patterns = this.getPreservePatterns(projectPath); - return this.preserveFilesToWorktree(projectPath, worktreePath, patterns); - } - - /** Slugify task name to make it shell-safe */ - private slugify(name: string): string { - return name - .toLowerCase() - .replace(/[^a-z0-9-]/g, '-') - .replace(/-+/g, '-') - .replace(/^-|-$/g, ''); - } - - /** Generate a short 3-char alphanumeric hash for branch uniqueness */ - private generateShortHash(): string { - const bytes = crypto.randomBytes(3); - return bytes.readUIntBE(0, 3).toString(36).slice(0, 3).padStart(3, '0'); - } - - /** Generate a stable ID from the absolute worktree path */ - private stableIdFromPath(worktreePath: string): string { - const abs = path.resolve(worktreePath); - const h = crypto.createHash('sha1').update(abs).digest('hex').slice(0, 12); - return `wt-${h}`; - } - - /** - * Create a new Git worktree for an agent task - */ - async createWorktree( - projectPath: string, - taskName: string, - projectId: string, - baseRef?: string - ): Promise { - // Declare variables outside try block for access in catch block - let branchName: string | undefined; - let worktreePath: string | undefined; - const sluggedName = this.slugify(taskName); - const hash = this.generateShortHash(); - - try { - const { getAppSettings } = await import('../settings'); - const settings = getAppSettings(); - const prefix = settings?.repository?.branchPrefix || 'emdash'; - branchName = this.sanitizeBranchName(`${prefix}/${sluggedName}-${hash}`); - worktreePath = path.join(projectPath, '..', `worktrees/${sluggedName}-${hash}`); - const worktreeId = this.stableIdFromPath(worktreePath); - - log.info(`Creating worktree: ${branchName} -> ${worktreePath}`); - - // Check if worktree path already exists - if (fs.existsSync(worktreePath)) { - throw new Error(`Worktree directory already exists: ${worktreePath}`); - } - - // Ensure worktrees directory exists - const worktreesDir = path.dirname(worktreePath); - if (!fs.existsSync(worktreesDir)) { - fs.mkdirSync(worktreesDir, { recursive: true }); - } - - // Use provided baseRef override or resolve from project settings - let baseRefInfo: BaseRefInfo; - if (baseRef) { - const parsed = await this.parseBaseRef(baseRef, projectPath); - if (parsed) { - baseRefInfo = parsed; - } else { - // If parsing failed, fall back to project settings - log.warn( - `Failed to parse provided baseRef '${baseRef}', falling back to project settings` - ); - baseRefInfo = await this.resolveProjectBaseRef(projectPath, projectId); - } - } else { - baseRefInfo = await this.resolveProjectBaseRef(projectPath, projectId); - } - const fetchedBaseRef = await this.fetchBaseRefWithFallback( - projectPath, - projectId, - baseRefInfo - ); - - // Create the worktree - const { stdout, stderr } = await execFileAsync( - 'git', - ['worktree', 'add', '-b', branchName, worktreePath, fetchedBaseRef.fullRef], - { cwd: projectPath } - ); - - log.debug('Git worktree stdout:', stdout); - log.debug('Git worktree stderr:', stderr); - - // Verify the worktree was actually created - if (!fs.existsSync(worktreePath)) { - throw new Error(`Worktree directory was not created: ${worktreePath}`); - } - - // Preserve .env and other gitignored config files from source to worktree - try { - await this.preserveProjectFilesToWorktree(projectPath, worktreePath); - } catch (preserveErr) { - log.warn('Failed to preserve files to worktree (continuing):', preserveErr); - } - - await this.logWorktreeSyncStatus(projectPath, worktreePath, fetchedBaseRef); - - const worktreeInfo: WorktreeInfo = { - id: worktreeId, - name: taskName, - branch: branchName, - path: worktreePath, - projectId, - status: 'active', - createdAt: new Date().toISOString(), - }; - - this.worktrees.set(worktreeInfo.id, worktreeInfo); - - log.info(`Created worktree: ${taskName} -> ${branchName}`); - - // Push the new branch to origin and set upstream so PRs work out of the box - // Only if a remote exists - if (settings?.repository?.pushOnCreate !== false && fetchedBaseRef.remote) { - try { - await execFileAsync( - 'git', - ['push', '--set-upstream', fetchedBaseRef.remote, branchName], - { - cwd: worktreePath, - } - ); - log.info( - `Pushed branch ${branchName} to ${fetchedBaseRef.remote} with upstream tracking` - ); - } catch (pushErr) { - log.warn('Initial push of worktree branch failed:', pushErr as any); - // Don't fail worktree creation if push fails - user can push manually later - } - } else if (!fetchedBaseRef.remote) { - log.info( - `Skipping push for worktree branch ${branchName} - no remote configured (local-only repo)` - ); - } - - return worktreeInfo; - } catch (error) { - log.error('Failed to create worktree:', error); - const message = error instanceof Error ? error.message : String(error); - - // Track worktree creation errors - await errorTracking.captureWorktreeError(error, 'create', worktreePath, branchName, { - project_id: projectId, - project_path: projectPath, - task_name: taskName, - hash: hash, - }); - - throw new Error(message || 'Failed to create worktree'); - } - } - - async fetchLatestBaseRef(projectPath: string, projectId: string): Promise { - const baseRefInfo = await this.resolveProjectBaseRef(projectPath, projectId); - const fetched = await this.fetchBaseRefWithFallback(projectPath, projectId, baseRefInfo); - return fetched; - } - - /** - * List all worktrees for a project - */ - async listWorktrees(projectPath: string): Promise { - try { - const { stdout } = await execFileAsync('git', ['worktree', 'list'], { - cwd: projectPath, - }); - - const worktrees: WorktreeInfo[] = []; - const lines = stdout.trim().split('\n'); - // Compute managed prefixes based on configured prefix - let managedPrefixes: string[] = ['emdash', 'agent', 'pr', 'orch']; - try { - const { getAppSettings } = await import('../settings'); - const settings = getAppSettings(); - const p = settings?.repository?.branchPrefix; - if (p) managedPrefixes = Array.from(new Set([p, ...managedPrefixes])); - } catch {} - - for (const line of lines) { - if (line.includes('[') && line.includes(']')) { - const parts = line.split(/\s+/); - const worktreePath = parts[0]; - const branchMatch = line.match(/\[([^\]]+)\]/); - const branch = branchMatch ? branchMatch[1] : 'unknown'; - - const managedBranch = managedPrefixes.some((pf) => { - return ( - branch.startsWith(pf + '/') || - branch.startsWith(pf + '-') || - branch.startsWith(pf + '_') || - branch.startsWith(pf + '.') || - branch === pf - ); - }); - - if (!managedBranch) { - const tracked = Array.from(this.worktrees.values()).find( - (wt) => wt.path === worktreePath - ); - if (!tracked) continue; - } - - const existing = Array.from(this.worktrees.values()).find( - (wt) => wt.path === worktreePath - ); - - worktrees.push( - existing ?? { - id: this.stableIdFromPath(worktreePath), - name: path.basename(worktreePath), - branch, - path: worktreePath, - projectId: path.basename(projectPath), - status: 'active', - createdAt: new Date().toISOString(), - } - ); - } - } - - return worktrees; - } catch (error) { - log.error('Failed to list worktrees:', error); - return []; - } - } - - /** Sanitize branch name to ensure it's a valid Git ref */ - private sanitizeBranchName(name: string): string { - let n = name - .replace(/\s+/g, '-') - .replace(/[^A-Za-z0-9._\/-]+/g, '-') - .replace(/-+/g, '-') - .replace(/\/+/g, '/'); - n = n.replace(/^[./-]+/, '').replace(/[./-]+$/, ''); - if (!n || n === 'HEAD') { - n = `emdash/${this.slugify('task')}-${this.generateShortHash()}`; - } - return n; - } - - /** Remove a worktree */ - async removeWorktree( - projectPath: string, - worktreeId: string, - worktreePath?: string, - branch?: string - ): Promise { - try { - const worktree = this.worktrees.get(worktreeId); - - const pathToRemove = worktree?.path ?? worktreePath; - const branchToDelete = worktree?.branch ?? branch; - - if (!pathToRemove) { - throw new Error('Worktree path not provided'); - } - - // CRITICAL SAFETY CHECK: Prevent removing the main repository - // Check if the path to remove is the same as the project path (main repo) - const normalizedPathToRemove = path.resolve(pathToRemove); - const normalizedProjectPath = path.resolve(projectPath); - - if (normalizedPathToRemove === normalizedProjectPath) { - log.error( - `CRITICAL: Attempted to remove main repository! Path: ${pathToRemove}, Project: ${projectPath}` - ); - throw new Error('Cannot remove main repository - this is not a worktree'); - } - - // Additional safety: Check if this is actually a worktree using git worktree list - try { - const { stdout } = await execFileAsync('git', ['worktree', 'list', '--porcelain'], { - cwd: projectPath, - }); - - // Parse the output to find if pathToRemove is a worktree - const lines = stdout.split('\n'); - let isWorktree = false; - let isMainWorktree = false; - - for (let i = 0; i < lines.length; i++) { - if (lines[i].startsWith('worktree ')) { - const wtPath = lines[i].substring(9); // Remove "worktree " prefix - const normalizedWtPath = path.resolve(wtPath); - - if (normalizedWtPath === normalizedPathToRemove) { - // Check if this is the main worktree (bare repos have no main worktree) - const nextLine = lines[i + 1]; - if (nextLine && nextLine === 'bare') { - isMainWorktree = true; - } else if (i === 0) { - // First worktree in the list is usually the main worktree - isMainWorktree = true; - } - isWorktree = true; - break; - } - } - } - - if (isMainWorktree) { - log.error(`CRITICAL: Attempted to remove main worktree! Path: ${pathToRemove}`); - throw new Error('Cannot remove main worktree'); - } - - if (!isWorktree) { - log.warn(`Path is not a git worktree, skipping removal: ${pathToRemove}`); - // Don't throw error, just return - the path might not exist or might be a task without worktree - return; - } - } catch (checkError) { - log.warn('Could not verify worktree status, proceeding with caution:', checkError); - // If we can't verify, at least we've checked it's not the main project path above - } - - // Remove the worktree directory via git first - try { - // Use --force to remove even when there are untracked/modified files - await execFileAsync('git', ['worktree', 'remove', '--force', pathToRemove], { - cwd: projectPath, - }); - } catch (gitError) { - console.warn('git worktree remove failed, attempting filesystem cleanup', gitError); - } - - // Best-effort prune to clear any stale worktree metadata that can keep a branch "checked out" - try { - await execFileAsync('git', ['worktree', 'prune', '--verbose'], { cwd: projectPath }); - } catch (pruneErr) { - console.warn('git worktree prune failed (continuing):', pruneErr); - } - - // Ensure directory is removed even if git command failed - void this.cleanupWorktreeDirectory(pathToRemove, projectPath); - - if (branchToDelete) { - const tryDeleteBranch = async () => - await execFileAsync('git', ['branch', '-D', branchToDelete!], { cwd: projectPath }); - try { - await tryDeleteBranch(); - } catch (branchError: any) { - const msg = String(branchError?.stderr || branchError?.message || branchError); - // If git thinks the branch is still checked out in a (now removed) worktree, - // prune and retry once more. - if (/checked out at /.test(msg)) { - try { - await execFileAsync('git', ['worktree', 'prune', '--verbose'], { cwd: projectPath }); - await tryDeleteBranch(); - } catch (retryErr) { - console.warn(`Failed to delete branch ${branchToDelete} after prune:`, retryErr); - } - } else { - console.warn(`Failed to delete branch ${branchToDelete}:`, branchError); - } - } - - // Only try to delete remote branch if a remote exists - const remoteAlias = 'origin'; - const hasRemote = await this.hasRemote(projectPath, remoteAlias); - if (hasRemote) { - let remoteBranchName = branchToDelete; - if (branchToDelete.startsWith('origin/')) { - remoteBranchName = branchToDelete.replace(/^origin\//, ''); - } - try { - await execFileAsync('git', ['push', remoteAlias, '--delete', remoteBranchName], { - cwd: projectPath, - }); - log.info(`Deleted remote branch ${remoteAlias}/${remoteBranchName}`); - } catch (remoteError: any) { - const msg = String(remoteError?.stderr || remoteError?.message || remoteError); - if ( - /remote ref does not exist/i.test(msg) || - /unknown revision/i.test(msg) || - /not found/i.test(msg) - ) { - log.info(`Remote branch ${remoteAlias}/${remoteBranchName} already absent`); - } else { - log.warn( - `Failed to delete remote branch ${remoteAlias}/${remoteBranchName}:`, - remoteError - ); - } - } - } else { - log.info(`Skipping remote branch deletion - no remote configured (local-only repo)`); - } - } - - if (worktree) { - this.worktrees.delete(worktreeId); - log.info(`Removed worktree: ${worktree.name}`); - } else { - log.info(`Removed worktree ${worktreeId}`); - } - } catch (error) { - log.error('Failed to remove worktree:', error); - throw new Error(`Failed to remove worktree: ${error}`); - } - } - - /** - * Get worktree status and changes - */ - async getWorktreeStatus(worktreePath: string): Promise<{ - hasChanges: boolean; - stagedFiles: string[]; - unstagedFiles: string[]; - untrackedFiles: string[]; - }> { - try { - const { stdout: status } = await execFileAsync( - 'git', - ['status', '--porcelain', '--untracked-files=all'], - { - cwd: worktreePath, - } - ); - - const stagedFiles: string[] = []; - const unstagedFiles: string[] = []; - const untrackedFiles: string[] = []; - - const lines = status - .trim() - .split('\n') - .filter((line) => line.length > 0); - - for (const line of lines) { - const status = line.substring(0, 2); - const file = line.substring(3); - - if (status.includes('A') || status.includes('M') || status.includes('D')) { - stagedFiles.push(file); - } - if (status.includes('M') || status.includes('D')) { - unstagedFiles.push(file); - } - if (status.includes('??')) { - untrackedFiles.push(file); - } - } - - return { - hasChanges: stagedFiles.length > 0 || unstagedFiles.length > 0 || untrackedFiles.length > 0, - stagedFiles, - unstagedFiles, - untrackedFiles, - }; - } catch (error) { - log.error('Failed to get worktree status:', error); - return { - hasChanges: false, - stagedFiles: [], - unstagedFiles: [], - untrackedFiles: [], - }; - } - } - - /** - * Get the default branch of a repository - */ - private async getDefaultBranch(projectPath: string): Promise { - // Check if origin remote exists first - const hasOrigin = await this.hasRemote(projectPath, 'origin'); - if (!hasOrigin) { - // No remote - try to get current branch - try { - const { stdout } = await execFileAsync('git', ['branch', '--show-current'], { - cwd: projectPath, - }); - const current = stdout.trim(); - if (current) return current; - } catch { - // Fallback to 'main' - } - return 'main'; - } - - // Has remote - try to get its default branch - try { - const { stdout } = await execFileAsync('git', ['remote', 'show', 'origin'], { - cwd: projectPath, - }); - const match = stdout.match(/HEAD branch:\s*(\S+)/); - return match ? match[1] : 'main'; - } catch { - return 'main'; - } - } - - private async parseBaseRef( - ref?: string | null, - projectPath?: string - ): Promise { - if (!ref) return null; - const cleaned = ref - .trim() - .replace(/^refs\/remotes\//, '') - .replace(/^remotes\//, ''); - if (!cleaned) return null; - - // Check if this looks like a remote/branch ref - const slashIndex = cleaned.indexOf('/'); - if (slashIndex > 0) { - const potentialRemote = cleaned.substring(0, slashIndex); - const branch = cleaned.substring(slashIndex + 1); - - if (branch) { - // Verify if potentialRemote is actually a git remote - if (projectPath) { - try { - const { stdout } = await execFileAsync('git', ['remote'], { cwd: projectPath }); - const remotes = (stdout || '').trim().split('\n').filter(Boolean); - if (remotes.includes(potentialRemote)) { - return { remote: potentialRemote, branch, fullRef: cleaned }; - } - // Not a valid remote, fall through to treat as local branch - } catch { - // Can't check remotes, assume it's a remote ref - return { remote: potentialRemote, branch, fullRef: cleaned }; - } - } else { - // No projectPath to verify, assume it's a remote ref - return { remote: potentialRemote, branch, fullRef: cleaned }; - } - } - } - - // Treat as a local branch (no remote prefix) - return { remote: '', branch: cleaned, fullRef: cleaned }; - } - - private async resolveProjectBaseRef( - projectPath: string, - projectId: string - ): Promise { - const settings = await projectSettingsService.getProjectSettings(projectId); - if (!settings) { - throw new Error( - 'Project settings not found. Please re-open the project in Emdash and try again.' - ); - } - - const parsed = await this.parseBaseRef(settings.baseRef, projectPath); - if (parsed) { - return parsed; - } - - // If parseBaseRef returned null, it might be a local branch name - // Check if the baseRef exists as a local branch - if (settings.baseRef) { - try { - const { stdout } = await execFileAsync( - 'git', - ['rev-parse', '--verify', `refs/heads/${settings.baseRef}`], - { cwd: projectPath } - ); - if (stdout?.trim()) { - // It's a valid local branch - check if we have a remote - const hasOrigin = await this.hasRemote(projectPath, 'origin'); - if (hasOrigin) { - return { - remote: 'origin', - branch: settings.baseRef, - fullRef: `origin/${settings.baseRef}`, - }; - } else { - // Local-only repo - return { - remote: '', - branch: settings.baseRef, - fullRef: settings.baseRef, - }; - } - } - } catch { - // Not a local branch, continue to fallback - } - } - - // Check if we have a remote - const hasOrigin = await this.hasRemote(projectPath, 'origin'); - const fallbackBranch = - settings.gitBranch?.trim() && !settings.gitBranch.includes(' ') - ? settings.gitBranch.trim() - : await this.getDefaultBranch(projectPath); - const branch = fallbackBranch || 'main'; - - if (hasOrigin) { - return { - remote: 'origin', - branch, - fullRef: `origin/${branch}`, - }; - } else { - // Local-only repo - return { - remote: '', - branch, - fullRef: branch, - }; - } - } - - private async buildDefaultBaseRef(projectPath: string): Promise { - const hasOrigin = await this.hasRemote(projectPath, 'origin'); - const branch = await this.getDefaultBranch(projectPath); - const cleanBranch = branch?.trim() || 'main'; - - if (hasOrigin) { - return { remote: 'origin', branch: cleanBranch, fullRef: `origin/${cleanBranch}` }; - } else { - // Local-only repo - return { remote: '', branch: cleanBranch, fullRef: cleanBranch }; - } - } - - private extractErrorMessage(error: any): string { - if (!error) return ''; - const parts: Array = []; - if (typeof error.message === 'string') parts.push(error.message); - if (typeof error.stderr === 'string') parts.push(error.stderr); - if (typeof error.stdout === 'string') parts.push(error.stdout); - return parts.filter(Boolean).join(' ').trim(); - } - - private isMissingRemoteRefError(error: any): boolean { - const msg = this.extractErrorMessage(error).toLowerCase(); - if (!msg) return false; - return ( - msg.includes("couldn't find remote ref") || - msg.includes('could not find remote ref') || - msg.includes('remote ref does not exist') || - msg.includes('fatal: the remote end hung up unexpectedly') || - msg.includes('no such ref was fetched') - ); - } - - private async fetchBaseRefWithFallback( - projectPath: string, - projectId: string, - target: BaseRefInfo - ): Promise { - // Check if remote exists - if not, this is a local-only repo - const hasRemote = await this.hasRemote(projectPath, target.remote); - - if (!hasRemote) { - log.info(`No remote '${target.remote}' found, using local branch ${target.branch}`); - // Verify the local branch exists - try { - await execFileAsync('git', ['rev-parse', '--verify', target.branch], { - cwd: projectPath, - }); - // Return target with just the branch name (no remote prefix) - return { - remote: '', - branch: target.branch, - fullRef: target.branch, - }; - } catch (error: any) { - if ( - error?.code === 'ENAMETOOLONG' || - error?.code === 'ENOENT' || - error?.code === 'EACCES' - ) { - throw new Error(`Git failed to run (${error.code}). Check app logs for details.`); - } - throw new Error(`Local branch '${target.branch}' does not exist. Please create it first.`); - } - } - - // Remote exists, proceed with fetch - try { - await execFileAsync('git', ['fetch', target.remote, target.branch], { - cwd: projectPath, - }); - log.info(`Fetched latest ${target.fullRef} for worktree creation`); - return target; - } catch (error) { - log.warn(`Failed to fetch ${target.fullRef}`, error); - if (!this.isMissingRemoteRefError(error)) { - const message = this.extractErrorMessage(error) || 'Unknown git fetch error'; - throw new Error(`Failed to fetch ${target.fullRef}: ${message}`); - } - - // Attempt fallback to default branch - const fallback = await this.buildDefaultBaseRef(projectPath); - if (fallback.fullRef === target.fullRef) { - const message = this.extractErrorMessage(error) || 'Unknown git fetch error'; - throw new Error(`Failed to fetch ${target.fullRef}: ${message}`); - } - - // Check if fallback remote exists before fetching - const hasFallbackRemote = await this.hasRemote(projectPath, fallback.remote); - if (!hasFallbackRemote) { - throw new Error( - `Failed to fetch ${target.fullRef} and fallback remote '${fallback.remote}' does not exist` - ); - } - - try { - await execFileAsync('git', ['fetch', fallback.remote, fallback.branch], { - cwd: projectPath, - }); - log.info(`Fetched fallback ${fallback.fullRef} after missing base ref`); - - try { - await projectSettingsService.updateProjectSettings(projectId, { - baseRef: fallback.fullRef, - }); - log.info(`Updated project ${projectId} baseRef to fallback ${fallback.fullRef}`); - } catch (persistError) { - log.warn('Failed to persist fallback baseRef', persistError); - } - - return fallback; - } catch (fallbackError) { - const msg = this.extractErrorMessage(fallbackError) || 'Unknown git fetch error'; - throw new Error( - `Failed to fetch base branch. Tried ${target.fullRef} and ${fallback.fullRef}. ${msg} Please verify the branch exists on the remote.` - ); - } - } - } - - /** - * Check if a git remote exists in the repository - */ - private async hasRemote(projectPath: string, remoteName: string): Promise { - if (!remoteName) return false; - try { - await execFileAsync('git', ['remote', 'get-url', remoteName], { - cwd: projectPath, - }); - return true; - } catch (error: any) { - if (error?.code === 'ENAMETOOLONG' || error?.code === 'ENOENT' || error?.code === 'EACCES') { - throw error; - } - return false; - } - } - - /** - * Merge worktree changes back to main branch - */ - async mergeWorktreeChanges(projectPath: string, worktreeId: string): Promise { - try { - const worktree = this.worktrees.get(worktreeId); - if (!worktree) { - throw new Error('Worktree not found'); - } - - const defaultBranch = await this.getDefaultBranch(projectPath); - - // Switch to default branch - await execFileAsync('git', ['checkout', defaultBranch], { cwd: projectPath }); - - // Merge the worktree branch - await execFileAsync('git', ['merge', worktree.branch], { cwd: projectPath }); - - // Remove the worktree - await this.removeWorktree(projectPath, worktreeId); - - log.info(`Merged worktree changes: ${worktree.name}`); - } catch (error) { - log.error('Failed to merge worktree changes:', error); - throw new Error(`Failed to merge worktree changes: ${error}`); - } - } - - /** - * Get worktree by ID - */ - getWorktree(worktreeId: string): WorktreeInfo | undefined { - return this.worktrees.get(worktreeId); - } - - /** - * Get all worktrees - */ - getAllWorktrees(): WorktreeInfo[] { - return Array.from(this.worktrees.values()); - } - - /** - * Build scoped git pathspecs from preserve patterns. - * We query both the raw pattern and a recursive variant to preserve existing - * basename-matching behavior for nested files. - */ - private buildIgnoredPathspecs(patterns: string[]): string[] { - const pathspecs = new Set(); - - for (const rawPattern of patterns) { - const pattern = rawPattern.trim().replace(/\\/g, '/').replace(/^\.\//, ''); - if (!pattern) { - continue; - } - - pathspecs.add(pattern); - if (!pattern.startsWith('**/')) { - pathspecs.add(`**/${pattern}`); - } - } - - return Array.from(pathspecs); - } - - /** - * Get ignored and non-ignored untracked files that match preserve patterns. - */ - private async getPreserveCandidateFiles(dir: string, patterns: string[]): Promise { - const pathspecs = this.buildIgnoredPathspecs(patterns); - if (pathspecs.length === 0) { - return []; - } - - try { - const [ignoredResult, untrackedResult] = await Promise.all([ - execFileAsync( - 'git', - ['ls-files', '--others', '--ignored', '--exclude-standard', '--', ...pathspecs], - { - cwd: dir, - maxBuffer: 10 * 1024 * 1024, - } - ), - execFileAsync('git', ['ls-files', '--others', '--exclude-standard', '--', ...pathspecs], { - cwd: dir, - maxBuffer: 10 * 1024 * 1024, - }), - ]); - - const ignoredFiles = (ignoredResult.stdout || '') - .trim() - .split('\n') - .filter((line) => line.length > 0); - const untrackedFiles = (untrackedResult.stdout || '') - .trim() - .split('\n') - .filter((line) => line.length > 0); - - return Array.from(new Set([...ignoredFiles, ...untrackedFiles])); - } catch (error) { - log.debug('Failed to list preserve candidate files:', error); - return []; - } - } - - /** - * Check if a file path matches any of the preserve patterns - */ - private matchesPreservePattern(filePath: string, patterns: string[]): boolean { - const fileName = path.basename(filePath); - - for (const pattern of patterns) { - // Match against filename - if (minimatch(fileName, pattern, { dot: true })) { - return true; - } - // Match against full path - if (minimatch(filePath, pattern, { dot: true })) { - return true; - } - // Match against full path with ** prefix for nested matches - if (minimatch(filePath, `**/${pattern}`, { dot: true })) { - return true; - } - } - - return false; - } - - /** - * Check if a file path contains any excluded path segments - */ - private isExcludedPath(filePath: string, excludePatterns: string[]): boolean { - if (excludePatterns.length === 0) { - return false; - } - - // git ls-files always returns paths with forward slashes regardless of OS - const parts = filePath.split('/'); - for (const part of parts) { - if (excludePatterns.includes(part)) { - return true; - } - } - - return false; - } - - /** - * Copy a file safely, skipping if destination already exists - */ - private async copyFileExclusive( - sourcePath: string, - destPath: string - ): Promise<'copied' | 'skipped' | 'error'> { - try { - // Check if destination already exists - if (fs.existsSync(destPath)) { - return 'skipped'; - } - - // Ensure destination directory exists - const destDir = path.dirname(destPath); - if (!fs.existsSync(destDir)) { - fs.mkdirSync(destDir, { recursive: true }); - } - - // Copy file preserving mode - const content = fs.readFileSync(sourcePath); - const stat = fs.statSync(sourcePath); - fs.writeFileSync(destPath, content, { mode: stat.mode }); - - return 'copied'; - } catch (error) { - log.debug(`Failed to copy ${sourcePath} to ${destPath}:`, error); - return 'error'; - } - } - - /** - * Preserve local files (typically ignored or untracked) from source to destination worktree. - * Only copies files that match the preserve patterns and don't exist in destination. - */ - async preserveFilesToWorktree( - sourceDir: string, - destDir: string, - patterns: string[] = DEFAULT_PRESERVE_PATTERNS, - excludePatterns: string[] = DEFAULT_EXCLUDE_PATTERNS - ): Promise { - const result: PreserveResult = { copied: [], skipped: [] }; - - if (patterns.length === 0) { - return result; - } - - // Get local files matching preserve patterns from source directory - const sourceFiles = await this.getPreserveCandidateFiles(sourceDir, patterns); - - if (sourceFiles.length === 0) { - log.debug('No preserve candidate files found in source directory'); - return result; - } - - // Filter files that match patterns and aren't excluded - const filesToCopy: string[] = []; - for (const file of sourceFiles) { - if (this.isExcludedPath(file, excludePatterns)) { - continue; - } - - if (this.matchesPreservePattern(file, patterns)) { - filesToCopy.push(file); - } - } - - if (filesToCopy.length === 0) { - log.debug('No files matched preserve patterns'); - return result; - } - - log.info(`Preserving ${filesToCopy.length} file(s) to worktree: ${filesToCopy.join(', ')}`); - - // Copy each file - for (const file of filesToCopy) { - const sourcePath = path.join(sourceDir, file); - const destPath = path.join(destDir, file); - - // Verify source file exists - if (!fs.existsSync(sourcePath)) { - log.debug(`Source file does not exist, skipping: ${sourcePath}`); - continue; - } - - const copyResult = await this.copyFileExclusive(sourcePath, destPath); - - if (copyResult === 'copied') { - result.copied.push(file); - log.debug(`Copied: ${file}`); - } else if (copyResult === 'skipped') { - result.skipped.push(file); - log.debug(`Skipped (already exists): ${file}`); - } - } - - if (result.copied.length > 0) { - log.info(`Preserved ${result.copied.length} file(s) to worktree`); - } - - return result; - } - - private async logWorktreeSyncStatus( - projectPath: string, - worktreePath: string, - baseRef: BaseRefInfo - ): Promise { - try { - const [{ stdout: remoteOut }, { stdout: worktreeOut }] = await Promise.all([ - execFileAsync('git', ['rev-parse', baseRef.fullRef], { cwd: projectPath }), - execFileAsync('git', ['rev-parse', 'HEAD'], { cwd: worktreePath }), - ]); - const remoteSha = (remoteOut || '').trim(); - const worktreeSha = (worktreeOut || '').trim(); - if (!remoteSha || !worktreeSha) return; - if (remoteSha === worktreeSha) { - log.debug(`Worktree ${worktreePath} matches ${baseRef.fullRef} @ ${remoteSha}`); - } else { - log.warn( - `Worktree ${worktreePath} diverged from ${baseRef.fullRef} immediately after creation`, - { remoteSha, worktreeSha, baseRef: baseRef.fullRef } - ); - } - } catch (error) { - log.debug('Unable to verify worktree head against remote', error); - } - } - - async createWorktreeFromBranch( - projectPath: string, - taskName: string, - branchName: string, - projectId: string, - options?: { worktreePath?: string } - ): Promise { - const normalizedName = taskName || branchName.replace(/\//g, '-'); - const sluggedName = this.slugify(normalizedName) || 'task'; - const targetPath = - options?.worktreePath || - path.join(projectPath, '..', `worktrees/${sluggedName}-${Date.now()}`); - const worktreePath = path.resolve(targetPath); - - if (fs.existsSync(worktreePath)) { - throw new Error(`Worktree directory already exists: ${worktreePath}`); - } - - const worktreesDir = path.dirname(worktreePath); - if (!fs.existsSync(worktreesDir)) { - fs.mkdirSync(worktreesDir, { recursive: true }); - } - - try { - await execFileAsync('git', ['worktree', 'add', worktreePath, branchName], { - cwd: projectPath, - }); - } catch (error) { - throw new Error( - `Failed to create worktree for branch ${branchName}: ${error instanceof Error ? error.message : String(error)}` - ); - } - - if (!fs.existsSync(worktreePath)) { - throw new Error(`Worktree directory was not created: ${worktreePath}`); - } - - // Preserve .env and other gitignored config files from source to worktree - try { - await this.preserveProjectFilesToWorktree(projectPath, worktreePath); - } catch (preserveErr) { - log.warn('Failed to preserve files to worktree (continuing):', preserveErr); - } - - const worktreeInfo: WorktreeInfo = { - id: this.stableIdFromPath(worktreePath), - name: normalizedName, - branch: branchName, - path: worktreePath, - projectId, - status: 'active', - createdAt: new Date().toISOString(), - }; - - this.worktrees.set(worktreeInfo.id, worktreeInfo); - - return worktreeInfo; - } - - /** - * Register a worktree created externally (e.g., by WorktreePoolService) - */ - registerWorktree(worktree: WorktreeInfo): void { - this.worktrees.set(worktree.id, worktree); - } -} - -export const worktreeService = new WorktreeService(); diff --git a/src/main/services/__tests__/RemoteGitService.test.ts b/src/main/services/__tests__/RemoteGitService.test.ts deleted file mode 100644 index 7da1aa1f0..000000000 --- a/src/main/services/__tests__/RemoteGitService.test.ts +++ /dev/null @@ -1,1015 +0,0 @@ -import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'; -import { RemoteGitService, WorktreeInfo, GitStatus } from '../RemoteGitService'; -import { SshService } from '../ssh/SshService'; -import { ExecResult } from '../../../shared/ssh/types'; - -// Mock SshService -const mockExecuteCommand = vi.fn(); -const mockConnect = vi.fn(); -const mockDisconnect = vi.fn(); - -vi.mock('../ssh/SshService', () => ({ - SshService: vi.fn().mockImplementation(() => ({ - executeCommand: mockExecuteCommand, - connect: mockConnect, - disconnect: mockDisconnect, - })), -})); - -describe('RemoteGitService', () => { - let service: RemoteGitService; - let mockSshService: SshService; - - beforeEach(() => { - vi.clearAllMocks(); - mockSshService = new SshService(); - service = new RemoteGitService(mockSshService); - }); - - describe('getStatus', () => { - it('should parse clean repository status', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '## main...origin/main\n', - stderr: '', - exitCode: 0, - } as ExecResult); - - const result = await service.getStatus('conn-1', '/home/user/project'); - - expect(result.branch).toBe('main'); - expect(result.isClean).toBe(true); - expect(result.files).toHaveLength(0); - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - 'git status --porcelain -b', - '/home/user/project' - ); - }); - - it('should parse repository with uncommitted changes', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '## feature-branch\n M modified.ts\n?? untracked.txt\nA staged.js', - stderr: '', - exitCode: 0, - } as ExecResult); - - const result = await service.getStatus('conn-1', '/home/user/project'); - - expect(result.branch).toBe('feature-branch'); - expect(result.isClean).toBe(false); - expect(result.files).toHaveLength(3); - expect(result.files).toContainEqual({ status: 'M', path: 'modified.ts' }); - expect(result.files).toContainEqual({ status: '??', path: 'untracked.txt' }); - expect(result.files).toContainEqual({ status: 'A', path: 'staged.js' }); - }); - - it('should handle ahead/behind status', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '## main...origin/main [ahead 2, behind 1]\n', - stderr: '', - exitCode: 0, - } as ExecResult); - - const result = await service.getStatus('conn-1', '/home/user/project'); - - expect(result.branch).toBe('main'); - }); - - it('should handle detached HEAD', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '## HEAD (no branch)\n M file.txt', - stderr: '', - exitCode: 0, - } as ExecResult); - - const result = await service.getStatus('conn-1', '/home/user/project'); - - expect(result.branch).toBe('HEAD (no branch)'); - }); - - it('should throw error when git status fails', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '', - stderr: 'fatal: not a git repository', - exitCode: 128, - } as ExecResult); - - await expect(service.getStatus('conn-1', '/home/user/project')).rejects.toThrow( - 'Git status failed: fatal: not a git repository' - ); - }); - - it('should handle unknown branch format', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '##\n', - stderr: '', - exitCode: 0, - } as ExecResult); - - const result = await service.getStatus('conn-1', '/home/user/project'); - - expect(result.branch).toBe('unknown'); - }); - }); - - describe('createWorktree', () => { - beforeEach(() => { - vi.useFakeTimers(); - vi.setSystemTime(new Date('2024-01-15T10:30:00Z')); - }); - - afterEach(() => { - vi.useRealTimers(); - }); - - it('should create worktree with default base ref', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: "Preparing worktree (new branch 'task-name-1705314600000')\n", - stderr: '', - exitCode: 0, - } as ExecResult); - - const result = await service.createWorktree('conn-1', '/home/user/project', 'task name'); - - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - 'mkdir -p .emdash/worktrees', - '/home/user/project' - ); - // When no baseRef is provided, getDefaultBranch is called first (git rev-parse), - // then git worktree add is called - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - expect.stringContaining('git worktree add'), - '/home/user/project' - ); - expect(result.branch).toContain('task-name'); - expect(result.isMain).toBe(false); - expect(result.path).toContain('.emdash/worktrees'); - }); - - it('should create worktree with custom base ref', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '', - stderr: '', - exitCode: 0, - } as ExecResult); - - const result = await service.createWorktree( - 'conn-1', - '/home/user/project', - 'feature-task', - 'origin/develop' - ); - - expect(mockExecuteCommand).toHaveBeenNthCalledWith( - 2, - 'conn-1', - expect.stringContaining('origin/develop'), - '/home/user/project' - ); - }); - - it('should sanitize task name for branch', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '', - stderr: '', - exitCode: 0, - } as ExecResult); - - const result = await service.createWorktree( - 'conn-1', - '/home/user/project', - 'task with spaces & symbols!@#' - ); - - expect(result.branch).toMatch(/^task-with-spaces-/); - expect(result.branch).not.toContain(' '); - expect(result.branch).not.toContain('&'); - expect(result.branch).not.toContain('!'); - expect(result.branch).not.toContain('@'); - expect(result.branch).not.toContain('#'); - }); - - it('should throw error when worktree creation fails', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult) // mkdir succeeds - .mockResolvedValueOnce({ stdout: 'main', stderr: '', exitCode: 0 } as ExecResult) // getDefaultBranch (git rev-parse) - .mockResolvedValueOnce({ - stdout: '', - stderr: 'fatal: A branch named \"test\" already exists', - exitCode: 128, - } as ExecResult); // git worktree add fails - - await expect(service.createWorktree('conn-1', '/home/user/project', 'test')).rejects.toThrow( - 'Failed to create worktree: fatal: A branch named' - ); - }); - - it('should construct correct worktree path', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '', - stderr: '', - exitCode: 0, - } as ExecResult); - - const result = await service.createWorktree( - 'conn-1', - '/home/user/repos/myproject', - 'test-task' - ); - - expect(result.path).toContain('/.emdash/worktrees/'); - expect(result.path).toContain('test-task'); - }); - }); - - describe('removeWorktree', () => { - it('should remove worktree successfully', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '', - stderr: '', - exitCode: 0, - } as ExecResult); - - await service.removeWorktree( - 'conn-1', - '/home/user/project', - '/home/user/project/.emdash/worktrees/test-123' - ); - - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - "git worktree remove '/home/user/project/.emdash/worktrees/test-123' --force", - '/home/user/project' - ); - }); - - it('should throw error when removal fails', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '', - stderr: 'fatal: not a valid worktree', - exitCode: 128, - } as ExecResult); - - await expect( - service.removeWorktree('conn-1', '/home/user/project', '/invalid/path') - ).rejects.toThrow('Failed to remove worktree: fatal: not a valid worktree'); - }); - - it('should handle paths with spaces', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '', - stderr: '', - exitCode: 0, - } as ExecResult); - - await service.removeWorktree( - 'conn-1', - '/home/user/my project', - '/home/user/my project/.emdash/worktrees/test' - ); - - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - "git worktree remove '/home/user/my project/.emdash/worktrees/test' --force", - '/home/user/my project' - ); - }); - }); - - describe('getBranchList', () => { - it('should return list of branches', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: 'main\ndevelop\nfeature/new-thing\n* current-branch\n remotes/origin/main\n', - stderr: '', - exitCode: 0, - } as ExecResult); - - const result = await service.getBranchList('conn-1', '/home/user/project'); - - expect(result).toHaveLength(5); - expect(result).toContain('main'); - expect(result).toContain('develop'); - expect(result).toContain('feature/new-thing'); - expect(result).toContain('* current-branch'); - expect(result).toContain(' remotes/origin/main'); - }); - - it('should return empty array when git command fails', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '', - stderr: 'fatal: not a git repository', - exitCode: 128, - } as ExecResult); - - const result = await service.getBranchList('conn-1', '/home/user/project'); - - expect(result).toEqual([]); - }); - - it('should filter out empty lines', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: 'main\n\ndevelop\n\n', - stderr: '', - exitCode: 0, - } as ExecResult); - - const result = await service.getBranchList('conn-1', '/home/user/project'); - - expect(result).toHaveLength(2); - expect(result).toContain('main'); - expect(result).toContain('develop'); - }); - }); - - describe('commit', () => { - it('should commit with message', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '[main abc1234] Test commit\n 1 file changed, 1 insertion(+)\n', - stderr: '', - exitCode: 0, - } as ExecResult); - - const result = await service.commit('conn-1', '/home/user/project', 'Test commit'); - - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - "git commit -m 'Test commit'", - '/home/user/project' - ); - expect(result.exitCode).toBe(0); - }); - - it('should stage and commit specific files', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '[main abc1234] Commit specific files\n', - stderr: '', - exitCode: 0, - } as ExecResult); - - const result = await service.commit('conn-1', '/home/user/project', 'Commit specific files', [ - 'file1.ts', - 'file2.ts', - ]); - - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - "git add 'file1.ts' 'file2.ts' && git commit -m 'Commit specific files'", - '/home/user/project' - ); - }); - - it('should escape quotes in commit message', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '', - stderr: '', - exitCode: 0, - } as ExecResult); - - await service.commit('conn-1', '/home/user/project', 'Fix bug in "authentication" module'); - - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - 'git commit -m \'Fix bug in "authentication" module\'', - '/home/user/project' - ); - }); - - it('should handle multiline commit messages', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '', - stderr: '', - exitCode: 0, - } as ExecResult); - - await service.commit('conn-1', '/home/user/project', 'First line\n\nSecond paragraph'); - - // The message should be properly escaped - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - expect.stringContaining('git commit'), - '/home/user/project' - ); - }); - - it('should handle empty files array', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '', - stderr: '', - exitCode: 0, - } as ExecResult); - - await service.commit('conn-1', '/home/user/project', 'Commit message', []); - - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - "git commit -m 'Commit message'", - '/home/user/project' - ); - }); - - it('should handle commit failure', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '', - stderr: 'nothing to commit, working tree clean', - exitCode: 1, - } as ExecResult); - - const result = await service.commit('conn-1', '/home/user/project', 'Empty commit'); - - expect(result.exitCode).toBe(1); - expect(result.stderr).toBe('nothing to commit, working tree clean'); - }); - - it('should commit files with special characters in names', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '', - stderr: '', - exitCode: 0, - } as ExecResult); - - await service.commit('conn-1', '/home/user/project', 'Special files', [ - 'file with spaces.ts', - ]); - - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - expect.stringContaining("git add 'file with spaces.ts'"), - '/home/user/project' - ); - }); - }); - - describe('getStatusDetailed', () => { - it('should return empty array for non-git directory', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '', - stderr: 'fatal: not a git repository', - exitCode: 128, - } as ExecResult); - - const result = await service.getStatusDetailed('conn-1', '/home/user/project'); - expect(result).toEqual([]); - }); - - it('should return empty array for clean repo', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ stdout: 'true', stderr: '', exitCode: 0 } as ExecResult) // rev-parse - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult); // status - - const result = await service.getStatusDetailed('conn-1', '/home/user/project'); - expect(result).toEqual([]); - }); - - it('should parse status with additions/deletions from numstat', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ stdout: 'true', stderr: '', exitCode: 0 } as ExecResult) // rev-parse - .mockResolvedValueOnce({ - stdout: ' M src/app.ts\nA src/new.ts\n?? untracked.txt\n', - stderr: '', - exitCode: 0, - } as ExecResult) // status - .mockResolvedValueOnce({ - stdout: '5\t2\tsrc/new.ts\n', - stderr: '', - exitCode: 0, - } as ExecResult) // numstat --cached - .mockResolvedValueOnce({ - stdout: '10\t3\tsrc/app.ts\n', - stderr: '', - exitCode: 0, - } as ExecResult); // numstat (unstaged) - - const result = await service.getStatusDetailed('conn-1', '/home/user/project'); - - expect(result).toHaveLength(3); - - const appTs = result.find((c) => c.path === 'src/app.ts'); - expect(appTs).toBeDefined(); - expect(appTs!.status).toBe('modified'); - expect(appTs!.additions).toBe(10); - expect(appTs!.deletions).toBe(3); - expect(appTs!.isStaged).toBe(false); - - const newTs = result.find((c) => c.path === 'src/new.ts'); - expect(newTs).toBeDefined(); - expect(newTs!.status).toBe('added'); - expect(newTs!.isStaged).toBe(true); - expect(newTs!.additions).toBe(5); - expect(newTs!.deletions).toBe(2); - - const untracked = result.find((c) => c.path === 'untracked.txt'); - expect(untracked).toBeDefined(); - expect(untracked!.status).toBe('added'); - expect(untracked!.isStaged).toBe(false); - }); - - it('should batch line-count for untracked files', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ stdout: 'true', stderr: '', exitCode: 0 } as ExecResult) // rev-parse - .mockResolvedValueOnce({ - stdout: '?? file1.txt\n?? file2.txt\n', - stderr: '', - exitCode: 0, - } as ExecResult) // status - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult) // numstat --cached - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult) // numstat - .mockResolvedValueOnce({ - stdout: '42\n100\n', - stderr: '', - exitCode: 0, - } as ExecResult); // wc -l batch - - const result = await service.getStatusDetailed('conn-1', '/home/user/project'); - - expect(result).toHaveLength(2); - expect(result[0].additions).toBe(42); - expect(result[1].additions).toBe(100); - }); - - it('should handle renamed files', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ stdout: 'true', stderr: '', exitCode: 0 } as ExecResult) - .mockResolvedValueOnce({ - stdout: 'R old.ts -> new.ts\n', - stderr: '', - exitCode: 0, - } as ExecResult) - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult) - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult); - - const result = await service.getStatusDetailed('conn-1', '/home/user/project'); - - expect(result).toHaveLength(1); - expect(result[0].path).toBe('new.ts'); - expect(result[0].status).toBe('renamed'); - expect(result[0].isStaged).toBe(true); - }); - }); - - describe('getFileDiff', () => { - it('should parse unified diff output', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ - stdout: - 'diff --git a/file.ts b/file.ts\nindex abc..def 100644\n--- a/file.ts\n+++ b/file.ts\n@@ -1,3 +1,3 @@\n hello\n-old line\n+new line\n world\n', - stderr: '', - exitCode: 0, - } as ExecResult) // git diff - .mockResolvedValueOnce({ - stdout: 'hello\nold line\nworld\n', - stderr: '', - exitCode: 0, - } as ExecResult) // git show HEAD:file - .mockResolvedValueOnce({ - stdout: 'hello\nnew line\nworld\n', - stderr: '', - exitCode: 0, - } as ExecResult); // cat file - - const result = await service.getFileDiff('conn-1', '/home/user/project', 'file.ts'); - - expect(result.lines).toHaveLength(4); - expect(result.lines[0]).toEqual({ left: 'hello', right: 'hello', type: 'context' }); - expect(result.lines[1]).toEqual({ left: 'old line', type: 'del' }); - expect(result.lines[2]).toEqual({ right: 'new line', type: 'add' }); - expect(result.lines[3]).toEqual({ left: 'world', right: 'world', type: 'context' }); - expect(result.originalContent).toBe('hello\nold line\nworld'); - expect(result.modifiedContent).toBe('hello\nnew line\nworld'); - }); - - it('should handle untracked file (no diff, read content)', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult) - .mockResolvedValueOnce({ stdout: '', stderr: 'not found', exitCode: 128 } as ExecResult) - .mockResolvedValueOnce({ - stdout: 'line1\nline2\nline3\n', - stderr: '', - exitCode: 0, - } as ExecResult); // cat fallback - - const result = await service.getFileDiff('conn-1', '/home/user/project', 'newfile.txt'); - - expect(result.lines).toHaveLength(3); - expect(result.lines[0]).toEqual({ right: 'line1', type: 'add' }); - expect(result.lines[1]).toEqual({ right: 'line2', type: 'add' }); - expect(result.lines[2]).toEqual({ right: 'line3', type: 'add' }); - expect(result.originalContent).toBeUndefined(); - expect(result.modifiedContent).toBe('line1\nline2\nline3'); - }); - - it('should handle deleted file with realistic diff output', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ - stdout: - 'diff --git a/deleted.txt b/deleted.txt\ndeleted file mode 100644\nindex abc1234..0000000\n--- a/deleted.txt\n+++ /dev/null\n@@ -1,2 +0,0 @@\n-old content\n-was here\n', - stderr: '', - exitCode: 0, - } as ExecResult) // git diff - .mockResolvedValueOnce({ - stdout: 'old content\nwas here\n', - stderr: '', - exitCode: 0, - } as ExecResult) // git show HEAD:file - .mockResolvedValueOnce({ - stdout: '', - stderr: 'No such file or directory', - exitCode: 1, - } as ExecResult); // cat fails — file not on disk - - const result = await service.getFileDiff('conn-1', '/home/user/project', 'deleted.txt'); - - expect(result.lines).toHaveLength(2); - expect(result.lines[0]).toEqual({ left: 'old content', type: 'del' }); - expect(result.lines[1]).toEqual({ left: 'was here', type: 'del' }); - expect(result.originalContent).toBe('old content\nwas here'); - expect(result.modifiedContent).toBeUndefined(); - }); - - it('should return empty lines when all fallbacks fail', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult) // git diff (parallel) - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 1 } as ExecResult) // git show HEAD:file (parallel) - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 1 } as ExecResult); // cat fallback - - const result = await service.getFileDiff('conn-1', '/home/user/project', 'ghost.txt'); - expect(result.lines).toEqual([]); - expect(result.originalContent).toBeUndefined(); - expect(result.modifiedContent).toBeUndefined(); - }); - - it('should handle staged new file (git show HEAD fails, diff and cat succeed)', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ - stdout: - 'diff --git a/newfile.ts b/newfile.ts\nnew file mode 100644\nindex 0000000..abc1234\n--- /dev/null\n+++ b/newfile.ts\n@@ -0,0 +1,2 @@\n+line one\n+line two\n', - stderr: '', - exitCode: 0, - } as ExecResult) // git diff - .mockResolvedValueOnce({ - stdout: '', - stderr: 'fatal: Path does not exist', - exitCode: 128, - } as ExecResult) // git show HEAD:file (fails — file not in HEAD) - .mockResolvedValueOnce({ - stdout: 'line one\nline two\n', - stderr: '', - exitCode: 0, - } as ExecResult); // cat file - - const result = await service.getFileDiff('conn-1', '/home/user/project', 'newfile.ts'); - - expect(result.lines).toHaveLength(2); - expect(result.lines[0]).toEqual({ right: 'line one', type: 'add' }); - expect(result.lines[1]).toEqual({ right: 'line two', type: 'add' }); - expect(result.originalContent).toBeUndefined(); - expect(result.modifiedContent).toBe('line one\nline two'); - }); - - it('should skip "No newline at end of file" markers', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ - stdout: - 'diff --git a/file.ts b/file.ts\nindex abc1234..def5678 100644\n--- a/file.ts\n+++ b/file.ts\n@@ -1,2 +1,2 @@\n hello\n-old line\n\\ No newline at end of file\n+new line\n\\ No newline at end of file\n', - stderr: '', - exitCode: 0, - } as ExecResult) // git diff - .mockResolvedValueOnce({ - stdout: 'hello\nold line', - stderr: '', - exitCode: 0, - } as ExecResult) // git show HEAD:file (no trailing newline) - .mockResolvedValueOnce({ - stdout: 'hello\nnew line', - stderr: '', - exitCode: 0, - } as ExecResult); // cat file (no trailing newline) - - const result = await service.getFileDiff('conn-1', '/home/user/project', 'file.ts'); - - expect(result.lines).toHaveLength(3); - expect(result.lines[0]).toEqual({ left: 'hello', right: 'hello', type: 'context' }); - expect(result.lines[1]).toEqual({ left: 'old line', type: 'del' }); - expect(result.lines[2]).toEqual({ right: 'new line', type: 'add' }); - expect(result.originalContent).toBe('hello\nold line'); - expect(result.modifiedContent).toBe('hello\nnew line'); - }); - - it('should detect binary files and return empty lines with isBinary flag', async () => { - mockExecuteCommand.mockResolvedValueOnce({ - stdout: - 'diff --git a/image.png b/image.png\nindex abc1234..def5678 100644\nBinary files a/image.png and b/image.png differ\n', - stderr: '', - exitCode: 0, - } as ExecResult); // git diff only — no content fetch for binary - - const result = await service.getFileDiff('conn-1', '/home/user/project', 'image.png'); - - expect(result.lines).toEqual([]); - expect(result.isBinary).toBe(true); - // Verify no additional SSH calls were made for content - expect(mockExecuteCommand).toHaveBeenCalledTimes(1); - }); - }); - - describe('stageFile', () => { - it('should stage a file via git add', async () => { - mockExecuteCommand.mockResolvedValue({ stdout: '', stderr: '', exitCode: 0 } as ExecResult); - - await service.stageFile('conn-1', '/home/user/project', 'src/app.ts'); - - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - "git add -- 'src/app.ts'", - '/home/user/project' - ); - }); - - it('should throw on failure', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '', - stderr: 'fatal: pathspec not found', - exitCode: 128, - } as ExecResult); - - await expect( - service.stageFile('conn-1', '/home/user/project', 'nonexistent.ts') - ).rejects.toThrow('Failed to stage file'); - }); - - it('should escape special characters in file path', async () => { - mockExecuteCommand.mockResolvedValue({ stdout: '', stderr: '', exitCode: 0 } as ExecResult); - - await service.stageFile('conn-1', '/home/user/project', "file with spaces & 'quotes'.ts"); - - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - expect.stringContaining('git add --'), - '/home/user/project' - ); - }); - }); - - describe('stageAllFiles', () => { - it('should run git add -A', async () => { - mockExecuteCommand.mockResolvedValue({ stdout: '', stderr: '', exitCode: 0 } as ExecResult); - - await service.stageAllFiles('conn-1', '/home/user/project'); - - expect(mockExecuteCommand).toHaveBeenCalledWith('conn-1', 'git add -A', '/home/user/project'); - }); - }); - - describe('unstageFile', () => { - it('should run git reset HEAD', async () => { - mockExecuteCommand.mockResolvedValue({ stdout: '', stderr: '', exitCode: 0 } as ExecResult); - - await service.unstageFile('conn-1', '/home/user/project', 'src/app.ts'); - - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - "git reset HEAD -- 'src/app.ts'", - '/home/user/project' - ); - }); - }); - - describe('revertFile', () => { - it('should delete untracked file when not in HEAD', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ - stdout: '', - stderr: 'fatal: Not a valid object name', - exitCode: 128, - } as ExecResult) // cat-file -e fails - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult); // rm -f - - const result = await service.revertFile('conn-1', '/home/user/project', 'newfile.txt'); - - expect(result.action).toBe('reverted'); - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - "rm -f -- 'newfile.txt'", - '/home/user/project' - ); - }); - - it('should checkout from HEAD for tracked file', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult) // cat-file -e succeeds - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult); // checkout HEAD - - const result = await service.revertFile('conn-1', '/home/user/project', 'existing.ts'); - - expect(result.action).toBe('reverted'); - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - "git checkout HEAD -- 'existing.ts'", - '/home/user/project' - ); - }); - - it('should throw when checkout fails', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult) // cat-file - .mockResolvedValueOnce({ - stdout: '', - stderr: 'error: pathspec did not match', - exitCode: 1, - } as ExecResult); // checkout fails - - await expect(service.revertFile('conn-1', '/home/user/project', 'broken.ts')).rejects.toThrow( - 'Failed to revert file' - ); - }); - }); - - describe('getCurrentBranch', () => { - it('should return current branch name', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: 'feature/my-branch\n', - stderr: '', - exitCode: 0, - } as ExecResult); - - const result = await service.getCurrentBranch('conn-1', '/home/user/project'); - expect(result).toBe('feature/my-branch'); - }); - - it('should return empty string for detached HEAD', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: '\n', - stderr: '', - exitCode: 0, - } as ExecResult); - - const result = await service.getCurrentBranch('conn-1', '/home/user/project'); - expect(result).toBe(''); - }); - }); - - describe('push', () => { - it('should run git push', async () => { - mockExecuteCommand.mockResolvedValue({ - stdout: 'Everything up-to-date', - stderr: '', - exitCode: 0, - } as ExecResult); - - const result = await service.push('conn-1', '/home/user/project'); - - expect(mockExecuteCommand).toHaveBeenCalledWith('conn-1', 'git push', '/home/user/project'); - expect(result.exitCode).toBe(0); - }); - - it('should set upstream when requested', async () => { - mockExecuteCommand.mockResolvedValue({ stdout: '', stderr: '', exitCode: 0 } as ExecResult); - - await service.push('conn-1', '/home/user/project', 'feature-branch', true); - - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - "git push --set-upstream origin 'feature-branch'", - '/home/user/project' - ); - }); - }); - - describe('getBranchStatus', () => { - it('should return branch status with ahead/behind', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ - stdout: 'feature-branch\n', - stderr: '', - exitCode: 0, - } as ExecResult) // branch --show-current - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 1 } as ExecResult) // gh fails - .mockResolvedValueOnce({ stdout: 'main\n', stderr: '', exitCode: 0 } as ExecResult) // remote show origin - .mockResolvedValueOnce({ - stdout: '3\t5\n', - stderr: '', - exitCode: 0, - } as ExecResult); // rev-list - - const result = await service.getBranchStatus('conn-1', '/home/user/project'); - - expect(result.branch).toBe('feature-branch'); - expect(result.defaultBranch).toBe('main'); - expect(result.behind).toBe(3); - expect(result.ahead).toBe(5); - }); - }); - - describe('renameBranch', () => { - it('should rename local branch', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 1 } as ExecResult) // no remote tracking - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 1 } as ExecResult) // ls-remote empty - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult); // branch -m - - const result = await service.renameBranch( - 'conn-1', - '/home/user/project', - 'old-name', - 'new-name' - ); - - expect(result.remotePushed).toBe(false); - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - "git branch -m 'old-name' 'new-name'", - '/home/user/project' - ); - }); - - it('should update remote when branch was pushed', async () => { - mockExecuteCommand - .mockResolvedValueOnce({ - stdout: 'origin\n', - stderr: '', - exitCode: 0, - } as ExecResult) // remote tracking - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult) // branch -m - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult) // push --delete - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult); // push -u - - const result = await service.renameBranch( - 'conn-1', - '/home/user/project', - 'old-name', - 'new-name' - ); - - expect(result.remotePushed).toBe(true); - }); - }); - - describe('execGh and execGit', () => { - it('should run gh commands with correct cwd', async () => { - mockExecuteCommand.mockResolvedValue({ stdout: '{}', stderr: '', exitCode: 0 } as ExecResult); - - await service.execGh('conn-1', '/home/user/project', 'pr view --json number'); - - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - 'gh pr view --json number', - '/home/user/project' - ); - }); - - it('should run git commands with correct cwd', async () => { - mockExecuteCommand.mockResolvedValue({ stdout: '', stderr: '', exitCode: 0 } as ExecResult); - - await service.execGit('conn-1', '/home/user/project', 'status -sb'); - - expect(mockExecuteCommand).toHaveBeenCalledWith( - 'conn-1', - 'git status -sb', - '/home/user/project' - ); - }); - }); - - describe('integration scenarios', () => { - it('should handle full workflow: create, check status, commit, remove', async () => { - // Create worktree - mockExecuteCommand - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult) // mkdir - .mockResolvedValueOnce({ stdout: '', stderr: '', exitCode: 0 } as ExecResult); // worktree add - - const worktree = await service.createWorktree('conn-1', '/home/user/project', 'feature'); - - // Check status (clean) - mockExecuteCommand.mockResolvedValue({ - stdout: `## ${worktree.branch}\n`, - stderr: '', - exitCode: 0, - } as ExecResult); - - const status = await service.getStatus('conn-1', worktree.path); - expect(status.isClean).toBe(true); - - // Commit - mockExecuteCommand.mockResolvedValue({ - stdout: `[${worktree.branch} abc1234] Initial commit\n`, - stderr: '', - exitCode: 0, - } as ExecResult); - - const commitResult = await service.commit('conn-1', worktree.path, 'Initial commit'); - expect(commitResult.exitCode).toBe(0); - - // Remove worktree - mockExecuteCommand.mockResolvedValue({ - stdout: '', - stderr: '', - exitCode: 0, - } as ExecResult); - - await expect( - service.removeWorktree('conn-1', '/home/user/project', worktree.path) - ).resolves.not.toThrow(); - }); - }); -}); diff --git a/src/main/services/browserViewService.ts b/src/main/services/browserViewService.ts deleted file mode 100644 index 0a5b260ae..000000000 --- a/src/main/services/browserViewService.ts +++ /dev/null @@ -1,235 +0,0 @@ -import { BrowserWindow, WebContentsView } from 'electron'; -import { getMainWindow } from '../app/window'; - -class BrowserViewService { - private view: WebContentsView | null = null; - private visible = false; - private emitToRenderers(evt: any) { - try { - const wins = BrowserWindow.getAllWindows(); - for (const w of wins) { - try { - w.webContents.send('browser:view:event', evt); - } catch {} - } - } catch {} - } - - ensureView(win?: BrowserWindow): WebContentsView | null { - const w = win || getMainWindow() || undefined; - if (!w) return null; - if (!this.view) { - this.view = new WebContentsView({ - webPreferences: { - contextIsolation: true, - nodeIntegration: false, - }, - }); - w.contentView.addChildView(this.view); - try { - this.view.webContents.setWindowOpenHandler?.(() => ({ action: 'deny' }) as any); - } catch {} - try { - this.view.webContents.on('did-finish-load', () => - this.emitToRenderers({ type: 'did-finish-load' }) - ); - this.view.webContents.on('did-fail-load', (_ev, errorCode, errorDescription) => - this.emitToRenderers({ type: 'did-fail-load', errorCode, errorDescription }) - ); - this.view.webContents.on('did-start-navigation', (_ev, url) => - this.emitToRenderers({ type: 'did-start-navigation', url }) - ); - } catch {} - this.visible = true; - } - return this.view; - } - - // Clear the current URL when switching worktrees - clear() { - if (!this.view) return; - try { - // Load about:blank to clear the current page - this.view.webContents.loadURL('about:blank'); - } catch {} - } - - private bringToFront(win: BrowserWindow) { - if (!this.view) return; - try { - // Remove and re-add the view to bring it to the front - // In Electron, views added later are rendered on top - win.contentView.removeChildView(this.view); - win.contentView.addChildView(this.view); - } catch {} - } - - show(bounds: Electron.Rectangle, url?: string) { - const win = getMainWindow() || undefined; - if (!win) return; - - const v = this.ensureView(win); - if (!v) return; - - // Ensure bounds are valid (width and height must be > 0) - if (bounds.width <= 0 || bounds.height <= 0) { - return; - } - - // Bring view to front to ensure it renders above other content - this.bringToFront(win); - - // Set bounds first to ensure view is positioned correctly - v.setBounds(bounds); - - try { - // Keep rendering even when not focused/visible previously - v.webContents.setBackgroundThrottling?.(false as any); - } catch {} - - // Load URL immediately when provided - if (url) { - try { - const current = (() => { - try { - return v.webContents.getURL(); - } catch { - return ''; - } - })(); - // Normalize URLs for comparison (remove trailing slashes, etc.) - const normalizeUrl = (u: string) => u.replace(/\/$/, '').toLowerCase(); - const normalizedCurrent = current ? normalizeUrl(current) : ''; - const normalizedUrl = normalizeUrl(url); - - if (!current || normalizedCurrent !== normalizedUrl) { - // Load URL immediately - don't delay - try { - v.webContents.loadURL(url); - } catch (e) { - // If immediate load fails, try again after a short delay - setTimeout(() => { - try { - v.webContents.loadURL(url); - } catch {} - }, 50); - } - } - } catch {} - } - - // Ensure view is visible and focused - try { - v.webContents.focus(); - } catch {} - - // Force bounds update after a short delay to ensure view is positioned correctly - // This helps with timing issues where the container might not be fully laid out yet - try { - setTimeout(() => { - try { - const updatedBounds = { ...bounds }; - // Re-validate bounds before setting - if (updatedBounds.width > 0 && updatedBounds.height > 0) { - v.setBounds(updatedBounds); - // Bring to front again after bounds update - this.bringToFront(win); - // Ensure URL is still loaded after bounds update - if (url) { - try { - const current = v.webContents.getURL(); - const normalizeUrl = (u: string) => u.replace(/\/$/, '').toLowerCase(); - if (!current || normalizeUrl(current) !== normalizeUrl(url)) { - v.webContents.loadURL(url); - } - } catch {} - } - // Force focus again after bounds update - v.webContents.focus(); - } - } catch {} - }, 50); - } catch {} - - this.visible = true; - } - - hide() { - if (!this.view) return; - try { - this.view.setBounds({ x: -10000, y: -10000, width: 1, height: 1 }); - } catch {} - this.visible = false; - } - - setBounds(bounds: Electron.Rectangle) { - if (!this.view) return; - try { - this.view.setBounds(bounds); - } catch {} - } - - loadURL(url: string, forceReload = false) { - // Don't load empty or invalid URLs - if (!url || typeof url !== 'string' || url.trim() === '') { - return; - } - - const v = this.ensureView(); - if (!v) return; - try { - // Normalize URL for comparison - const normalizeUrl = (u: string) => u.replace(/\/$/, '').toLowerCase(); - const current = (() => { - try { - return v.webContents.getURL(); - } catch { - return ''; - } - })(); - // Load if URL is different or if forceReload is true - if (forceReload || !current || normalizeUrl(current) !== normalizeUrl(url)) { - // Ensure view is visible before loading - const win = getMainWindow(); - if (win && this.visible) { - this.bringToFront(win); - } - v.webContents.loadURL(url); - // Focus after loading to ensure it's active - setTimeout(() => { - try { - v.webContents.focus(); - } catch {} - }, 50); - } - } catch {} - } - - goBack() { - try { - this.view?.webContents.goBack(); - } catch {} - } - goForward() { - try { - this.view?.webContents.goForward(); - } catch {} - } - reload() { - try { - this.view?.webContents.reload(); - } catch {} - } - - openDevTools() { - try { - this.view?.webContents.openDevTools({ mode: 'detach' }); - } catch {} - } - - isVisible(): boolean { - return this.visible; - } -} - -export const browserViewService = new BrowserViewService(); diff --git a/src/main/services/fs/FileSystemFactory.ts b/src/main/services/fs/FileSystemFactory.ts deleted file mode 100644 index 581619a67..000000000 --- a/src/main/services/fs/FileSystemFactory.ts +++ /dev/null @@ -1,101 +0,0 @@ -/** - * FileSystem Factory - * Creates appropriate IFileSystem implementation based on project configuration - */ - -import { IFileSystem } from './types'; -import { LocalFileSystem } from './LocalFileSystem'; -import { RemoteFileSystem } from './RemoteFileSystem'; -import { SshService } from '../ssh/SshService'; -import { ProjectRow } from '../../db/schema'; - -interface FileSystemCache { - [key: string]: IFileSystem; -} - -export class FileSystemFactory { - private static cache: FileSystemCache = {}; - private static sshService: SshService | null = null; - - /** - * Initialize the factory with SSH service - */ - static initialize(sshService: SshService): void { - this.sshService = sshService; - } - - /** - * Create filesystem for a project - */ - static create(project: ProjectRow): IFileSystem { - const cacheKey = project.id; - - // Return cached instance if available - if (this.cache[cacheKey]) { - return this.cache[cacheKey]; - } - - let fs: IFileSystem; - - if (project.isRemote && project.sshConnectionId) { - if (!this.sshService) { - throw new Error('SSH service not initialized'); - } - - if (!project.remotePath) { - throw new Error('Remote project missing remotePath'); - } - - fs = new RemoteFileSystem(this.sshService, project.sshConnectionId, project.remotePath); - } else { - fs = new LocalFileSystem(project.path); - } - - // Cache the instance - this.cache[cacheKey] = fs; - return fs; - } - - /** - * Get filesystem for a project (alias for create) - */ - static get(project: ProjectRow): IFileSystem { - return this.create(project); - } - - /** - * Clear cache for a specific project - */ - static clearCache(projectId: string): void { - delete this.cache[projectId]; - } - - /** - * Clear all cached filesystems - */ - static clearAllCache(): void { - this.cache = {}; - } - - /** - * Check if project uses remote filesystem - */ - static isRemote(project: ProjectRow): boolean { - return !!project.isRemote; - } - - /** - * Get connection ID for remote project - */ - static getConnectionId(project: ProjectRow): string | null { - return project.sshConnectionId || null; - } - - /** - * Dispose factory and clear all resources - */ - static dispose(): void { - this.clearAllCache(); - this.sshService = null; - } -} diff --git a/src/main/services/fs/index.ts b/src/main/services/fs/index.ts deleted file mode 100644 index 11ab2db6d..000000000 --- a/src/main/services/fs/index.ts +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Filesystem Abstraction Layer - * - * Provides unified interface for local and remote (SSH/SFTP) filesystem operations. - * This module is part of Wave 1 (interfaces and structure). - * - * Wave 2 will implement: - * - LocalFileSystem: wrapping existing fsIpc functionality - * - RemoteFileSystem: SFTP-based implementation using ssh2 - * - FileSystemFactory: factory pattern for creating appropriate FS instances - * - * Usage: - * import { FileSystemFactory, IFileSystem } from './services/fs'; - * - * const fs: IFileSystem = FileSystemFactory.create(project); - * const result = await fs.read('src/index.ts'); - */ - -// Types and interfaces -export type { - IFileSystem, - FileEntry, - ListOptions, - FileListResult, - ReadResult, - WriteResult, - SearchOptions, - SearchResult, - SearchMatch, -} from './types'; - -export { FileSystemError, FileSystemErrorCodes } from './types'; - -// Implementations (stubs in Wave 1) -export { LocalFileSystem } from './LocalFileSystem'; -export { RemoteFileSystem } from './RemoteFileSystem'; -export { FileSystemFactory } from './FileSystemFactory'; diff --git a/src/main/services/fsIpc.ts b/src/main/services/fsIpc.ts deleted file mode 100644 index 71b26107c..000000000 --- a/src/main/services/fsIpc.ts +++ /dev/null @@ -1,882 +0,0 @@ -import { ipcMain, shell } from 'electron'; -import * as fs from 'fs'; -import * as path from 'path'; -import { Worker } from 'worker_threads'; -import { FsListWorkerResponse } from '../types/fsListWorker'; -import { DEFAULT_IGNORES } from '../utils/fsIgnores'; -import { safeStat } from '../utils/safeStat'; -import { sshService } from './ssh/SshService'; -import { RemoteFileSystem } from './fs/RemoteFileSystem'; -import { GitIgnoreParser } from '../utils/gitIgnore'; - -const DEFAULT_EMDASH_CONFIG = `{ - "preservePatterns": [ - ".env", - ".env.keys", - ".env.local", - ".env.*.local", - ".envrc", - "docker-compose.override.yml" - ], - "scripts": { - "setup": "", - "run": "", - "teardown": "" - } -} -`; - -type RemoteParams = { - connectionId?: string; - remotePath?: string; -}; - -function isRemoteRequest(args: RemoteParams): args is { connectionId: string; remotePath: string } { - return Boolean(args.connectionId && args.remotePath); -} - -function createRemoteFs(args: { connectionId: string; remotePath: string }): RemoteFileSystem { - return new RemoteFileSystem(sshService, args.connectionId, args.remotePath); -} - -type ListArgs = { - root: string; - includeDirs?: boolean; - recursive?: boolean; - maxEntries?: number; - timeBudgetMs?: number; -} & RemoteParams; - -type ListWorkerState = { - worker: Worker; - requestId: number; - canceled: boolean; -}; - -const listWorkersBySender = new Map(); -const DEFAULT_TIME_BUDGET_MS = 2000; -const MIN_TIME_BUDGET_MS = 250; -const MAX_TIME_BUDGET_MS = 10000; -const MAX_FILES_TO_SEARCH = 10000; -const DEFAULT_BATCH_SIZE = 250; - -// Centralized configuration/constants for attachments -const ALLOWED_IMAGE_EXTENSIONS = new Set([ - '.png', - '.jpg', - '.jpeg', - '.gif', - '.webp', - '.bmp', - '.svg', -]); -const DEFAULT_ATTACHMENTS_SUBDIR = 'attachments' as const; - -export function registerFsIpc(): void { - function emitPlanEvent(payload: any) { - try { - const { BrowserWindow } = require('electron'); - for (const win of BrowserWindow.getAllWindows()) { - try { - win.webContents.send('plan:event', payload); - } catch {} - } - } catch {} - } - ipcMain.handle('fs:list', async (_event, args: ListArgs) => { - try { - // --- Remote path: delegate to RemoteFileSystem --- - if (isRemoteRequest(args)) { - try { - const rfs = createRemoteFs(args); - const maxEntries = Math.min(Math.max(args.maxEntries ?? 5000, 100), MAX_FILES_TO_SEARCH); - const result = await rfs.listRecursive({ - includeDirs: args.includeDirs ?? true, - maxEntries, - }); - return { - success: true, - items: result.items, - truncated: result.truncated, - reason: result.truncated ? 'maxEntries' : undefined, - }; - } catch (error) { - console.error('fs:list remote failed:', error); - return { success: false, error: 'Failed to list remote files' }; - } - } - - // --- Local path --- - const root = args.root; - const includeDirs = args.includeDirs ?? true; - const maxEntries = Math.min(Math.max(args.maxEntries ?? 5000, 100), MAX_FILES_TO_SEARCH); - const timeBudgetMs = Math.min( - Math.max(args.timeBudgetMs ?? DEFAULT_TIME_BUDGET_MS, MIN_TIME_BUDGET_MS), - MAX_TIME_BUDGET_MS - ); - if (!root || !fs.existsSync(root)) { - return { success: false, error: 'Invalid root path' }; - } - - const senderId = _event.sender.id; - const prev = listWorkersBySender.get(senderId); - if (prev) { - prev.canceled = true; - prev.worker.terminate().catch(() => {}); - } - - const requestId = (prev?.requestId ?? 0) + 1; - const workerPath = path.join(__dirname, '..', 'workers', 'fsListWorker.js'); - const worker = new Worker(workerPath); - const state: ListWorkerState = { worker, requestId, canceled: false }; - listWorkersBySender.set(senderId, state); - - const result = await new Promise((resolve, reject) => { - const cleanup = () => { - worker.removeAllListeners('message'); - worker.removeAllListeners('error'); - worker.removeAllListeners('exit'); - }; - - worker.once('message', (message) => { - cleanup(); - worker.terminate().catch(() => {}); - resolve(message as FsListWorkerResponse); - }); - worker.once('error', (error) => { - cleanup(); - reject(error); - }); - worker.once('exit', (code) => { - cleanup(); - if (state.canceled) { - resolve({ taskId: requestId, ok: false, error: 'Canceled' }); - return; - } - if (code === 0) { - resolve({ - taskId: requestId, - ok: false, - error: 'Worker exited before responding', - }); - return; - } - reject(new Error(`fs:list worker exited with code ${code}`)); - }); - - worker.postMessage({ - taskId: requestId, - root, - includeDirs, - recursive: args.recursive !== false, // Default to true if not specified - maxEntries, - timeBudgetMs, - batchSize: DEFAULT_BATCH_SIZE, - }); - }); - - const latest = listWorkersBySender.get(senderId); - if (!latest || latest.requestId !== requestId || state.canceled) { - return { success: true, canceled: true }; - } - - listWorkersBySender.delete(senderId); - - if (!result.ok) { - if (result.error === 'Canceled') return { success: true, canceled: true }; - return { success: false, error: result.error }; - } - - return { - success: true, - items: result.items, - truncated: result.truncated, - reason: result.reason, - durationMs: result.durationMs, - }; - } catch (error) { - console.error('fs:list failed:', error); - return { success: false, error: 'Failed to list files' }; - } - }); - - ipcMain.handle( - 'fs:read', - async (_event, args: { root: string; relPath: string; maxBytes?: number } & RemoteParams) => { - try { - // --- Remote path --- - if (isRemoteRequest(args)) { - try { - const rfs = createRemoteFs(args); - const maxBytes = Math.min(Math.max(args.maxBytes ?? 200 * 1024, 1024), 5 * 1024 * 1024); - const result = await rfs.read(args.relPath, maxBytes); - return { - success: true, - path: args.relPath, - size: result.totalSize, - truncated: result.truncated, - content: result.content, - }; - } catch (error) { - console.error('fs:read remote failed:', error); - return { success: false, error: 'Failed to read remote file' }; - } - } - - // --- Local path --- - const { root, relPath } = args; - const maxBytes = Math.min(Math.max(args.maxBytes ?? 200 * 1024, 1024), 5 * 1024 * 1024); - if (!root || !fs.existsSync(root)) return { success: false, error: 'Invalid root path' }; - if (!relPath) return { success: false, error: 'Invalid relPath' }; - - // Resolve and ensure within root - const abs = path.resolve(root, relPath); - const normRoot = path.resolve(root) + path.sep; - if (!abs.startsWith(normRoot)) return { success: false, error: 'Path escapes root' }; - - const st = safeStat(abs); - if (!st) return { success: false, error: 'Not found' }; - if (st.isDirectory()) return { success: false, error: 'Is a directory' }; - - const size = st.size; - let truncated = false; - let content: string; - const fd = fs.openSync(abs, 'r'); - try { - const bytesToRead = Math.min(size, maxBytes); - const buf = Buffer.alloc(bytesToRead); - fs.readSync(fd, buf, 0, bytesToRead, 0); - content = buf.toString('utf8'); - truncated = size > bytesToRead; - } finally { - fs.closeSync(fd); - } - - return { success: true, path: relPath, size, truncated, content }; - } catch (error) { - console.error('fs:read failed:', error); - return { success: false, error: 'Failed to read file' }; - } - } - ); - - // Read image file as base64 - ipcMain.handle( - 'fs:read-image', - async (_event, args: { root: string; relPath: string } & RemoteParams) => { - try { - // --- Remote path --- - if (isRemoteRequest(args)) { - try { - const rfs = createRemoteFs(args); - const result = await rfs.readImage(args.relPath); - return result; - } catch (error) { - console.error('fs:read-image remote failed:', error); - return { success: false, error: 'Failed to read remote image' }; - } - } - - // --- Local path --- - const { root, relPath } = args; - if (!root || !fs.existsSync(root)) return { success: false, error: 'Invalid root path' }; - if (!relPath) return { success: false, error: 'Invalid relPath' }; - - // Resolve and ensure within root - const abs = path.resolve(root, relPath); - const normRoot = path.resolve(root) + path.sep; - if (!abs.startsWith(normRoot)) return { success: false, error: 'Path escapes root' }; - - const st = safeStat(abs); - if (!st) return { success: false, error: 'Not found' }; - if (st.isDirectory()) return { success: false, error: 'Is a directory' }; - - // Check if it's an allowed image type - const ext = path.extname(relPath).toLowerCase(); - if (!ALLOWED_IMAGE_EXTENSIONS.has(ext)) { - return { success: false, error: 'Not an image file' }; - } - - // Read file as base64 - const buffer = fs.readFileSync(abs); - const base64 = buffer.toString('base64'); - - // Determine MIME type - let mimeType = 'image/'; - switch (ext) { - case '.svg': - mimeType += 'svg+xml'; - break; - case '.jpg': - case '.jpeg': - mimeType += 'jpeg'; - break; - default: - mimeType += ext.substring(1); // Remove the dot - } - - return { - success: true, - dataUrl: `data:${mimeType};base64,${base64}`, - mimeType, - size: st.size, - }; - } catch (error) { - console.error('fs:read-image failed:', error); - return { success: false, error: 'Failed to read image' }; - } - } - ); - - // Constants for search functionality - const SEARCH_PREVIEW_CONTEXT_LENGTH = 30; - const DEFAULT_MAX_SEARCH_RESULTS = 10000; // Increased for comprehensive search results - const MAX_FILE_SIZE = 2 * 1024 * 1024; // 2MB max file size - const MAX_SEARCH_FILES = 20000; // Increased to search more files - const BINARY_CHECK_BYTES = 512; // Check first 512 bytes for binary content (faster) - - // Extended ignore patterns for performance - const SEARCH_IGNORES = new Set([ - ...DEFAULT_IGNORES, - '.vscode', - '.idea', - 'coverage', - '__pycache__', - '.pytest_cache', - 'venv', - '.venv', - 'target', - '.terraform', - '.serverless', - 'vendor', - 'bower_components', - '.turbo', - 'worktrees', - '.worktrees', - ]); - - // Binary file extensions to skip (blacklist approach) - const BINARY_EXTENSIONS = new Set([ - '.png', - '.jpg', - '.jpeg', - '.gif', - '.bmp', - '.ico', - '.svg', - '.pdf', - '.zip', - '.tar', - '.gz', - '.rar', - '.7z', - '.exe', - '.dll', - '.so', - '.dylib', - '.a', - '.o', - '.mp3', - '.mp4', - '.avi', - '.mov', - '.wav', - '.flac', - '.ttf', - '.otf', - '.woff', - '.woff2', - '.eot', - '.pyc', - '.pyo', - '.class', - '.jar', - '.war', - '.node', - '.wasm', - '.map', - '.DS_Store', - '.lock', - ]); - - // Check if file is likely binary - const isBinaryFile = (filePath: string): boolean => { - // First check extension - const ext = path.extname(filePath).toLowerCase(); - if (BINARY_EXTENSIONS.has(ext)) return true; - - try { - const fd = fs.openSync(filePath, 'r'); - const buffer = Buffer.alloc(BINARY_CHECK_BYTES); - const bytesRead = fs.readSync(fd, buffer, 0, BINARY_CHECK_BYTES, 0); - fs.closeSync(fd); - - // Check for null bytes (common in binary files) - for (let i = 0; i < bytesRead; i++) { - if (buffer[i] === 0) return true; - } - - // Check if mostly non-printable characters - let nonPrintable = 0; - for (let i = 0; i < Math.min(bytesRead, 512); i++) { - const byte = buffer[i]; - if (byte < 32 && byte !== 9 && byte !== 10 && byte !== 13) { - nonPrintable++; - } - } - - // If more than 30% non-printable, likely binary - return nonPrintable > bytesRead * 0.3; - } catch { - return false; // Assume text if we can't read it, let the search handle the error - } - }; - - // Search for content in files - OPTIMIZED VERSION - ipcMain.handle( - 'fs:searchContent', - async ( - _event, - args: { - root: string; - query: string; - options?: { caseSensitive?: boolean; maxResults?: number; fileExtensions?: string[] }; - } & RemoteParams - ) => { - try { - // --- Remote path: delegate to RemoteFileSystem.search --- - if (isRemoteRequest(args)) { - try { - const rfs = createRemoteFs(args); - const { query, options = {} } = args; - const { - caseSensitive = false, - maxResults = DEFAULT_MAX_SEARCH_RESULTS, - fileExtensions = [], - } = options; - - const searchResult = await rfs.search(query, { - caseSensitive, - maxResults, - fileExtensions, - }); - - // Group flat SearchMatch[] by file to match the renderer's expected format - const groupedMap = new Map< - string, - Array<{ line: number; column: number; text: string; preview: string }> - >(); - for (const match of searchResult.matches) { - const file = match.filePath; - if (!groupedMap.has(file)) { - groupedMap.set(file, []); - } - groupedMap.get(file)!.push({ - line: match.line, - column: match.column, - text: match.content, - preview: match.preview || match.content, - }); - } - - const results = Array.from(groupedMap.entries()).map(([file, matches]) => ({ - file, - matches, - })); - - return { success: true, results }; - } catch (error) { - console.error('fs:searchContent remote failed:', error); - return { success: false, error: 'Failed to search remote files' }; - } - } - - // --- Local path --- - const { root, query, options = {} } = args; - const { - caseSensitive = false, - maxResults = DEFAULT_MAX_SEARCH_RESULTS, - fileExtensions = [], - } = options; - - if (!root || !fs.existsSync(root)) return { success: false, error: 'Invalid root path' }; - if (!query || query.length < 2) - return { success: false, error: 'Query too short (min 2 chars)' }; - - let gitIgnore: GitIgnoreParser | undefined; - try { - const gitIgnorePath = path.join(root, '.gitignore'); - const content = await fs.promises.readFile(gitIgnorePath, 'utf8'); - gitIgnore = new GitIgnoreParser(content); - } catch { - // Ignore error reading .gitignore - } - - const results: Array<{ - file: string; - matches: Array<{ - line: number; - column: number; - text: string; - preview: string; - }>; - }> = []; - - let totalMatches = 0; - let filesSearched = 0; - const searchQuery = caseSensitive ? query : query.toLowerCase(); - - // Helper function to check if file should be searched - const shouldSearchFile = (filePath: string, stat: fs.Stats): boolean => { - // Skip large files - if (stat.size > MAX_FILE_SIZE) return false; - - const ext = path.extname(filePath).toLowerCase(); - - // Skip known binary extensions (but allow files without extensions) - if (ext && BINARY_EXTENSIONS.has(ext)) return false; - - // If user specified extensions, use those - if (fileExtensions.length > 0) { - return fileExtensions.some((e) => { - const normalizedExt = e.toLowerCase().startsWith('.') - ? e.toLowerCase() - : '.' + e.toLowerCase(); - return ext === normalizedExt; - }); - } - - // Otherwise search all non-binary files - return true; - }; - - // Optimized async file search - const searchInFile = async (filePath: string): Promise => { - if (totalMatches >= maxResults || filesSearched >= MAX_SEARCH_FILES) return; - - try { - filesSearched++; - - // Check if binary file first - if (isBinaryFile(filePath)) return; - - // Read file in chunks for better memory usage - const content = await fs.promises.readFile(filePath, 'utf8'); - - // Quick check if query exists at all (much faster) - const contentToSearch = caseSensitive ? content : content.toLowerCase(); - if (!contentToSearch.includes(searchQuery)) return; - - // Only split lines if we found something - const lines = content.split('\n'); - const fileMatches: (typeof results)[0]['matches'] = []; - - for (let lineNum = 0; lineNum < lines.length && totalMatches < maxResults; lineNum++) { - const line = lines[lineNum]; - const searchLine = caseSensitive ? line : line.toLowerCase(); - - if (!searchLine.includes(searchQuery)) continue; // Quick skip - - let columnIndex = searchLine.indexOf(searchQuery); - while (columnIndex !== -1 && totalMatches < maxResults) { - // Create preview with context - const previewStart = Math.max(0, columnIndex - SEARCH_PREVIEW_CONTEXT_LENGTH); - const previewEnd = Math.min( - line.length, - columnIndex + query.length + SEARCH_PREVIEW_CONTEXT_LENGTH - ); - let preview = line.substring(previewStart, previewEnd).trim(); - - // Add ellipsis if truncated - if (previewStart > 0) preview = '...' + preview; - if (previewEnd < line.length) preview = preview + '...'; - - fileMatches.push({ - line: lineNum + 1, - column: columnIndex + 1, - text: line.substring(columnIndex, columnIndex + query.length), - preview: preview, - }); - - totalMatches++; - columnIndex = searchLine.indexOf(searchQuery, columnIndex + 1); - } - } - - if (fileMatches.length > 0) { - const relativePath = path.relative(root, filePath); - results.push({ - file: relativePath, - matches: fileMatches, - }); - } - } catch (err) { - // Skip files we can't read - } - }; - - // Collect files first, then search in parallel - const collectFiles = async (dirPath: string, files: string[] = []): Promise => { - if (files.length >= MAX_SEARCH_FILES) return files; - - try { - const entries = await fs.promises.readdir(dirPath, { withFileTypes: true }); - - for (const entry of entries) { - if (files.length >= MAX_SEARCH_FILES) break; - - const fullPath = path.join(dirPath, entry.name); - - if (entry.isDirectory()) { - const relPath = path.relative(root, fullPath); - if (gitIgnore && (gitIgnore.ignores(relPath) || gitIgnore.ignores(relPath + '/'))) { - continue; - } - - if (!SEARCH_IGNORES.has(entry.name)) { - await collectFiles(fullPath, files); - } - } else if (entry.isFile()) { - try { - const stat = await fs.promises.stat(fullPath); - if (shouldSearchFile(fullPath, stat)) { - files.push(fullPath); - } - } catch {} - } - } - } catch {} - - return files; - }; - - // Collect files and search them in batches - const files = await collectFiles(root); - - // Process files in parallel batches for speed - const BATCH_SIZE = 10; - for (let i = 0; i < files.length && totalMatches < maxResults; i += BATCH_SIZE) { - const batch = files.slice(i, i + BATCH_SIZE); - await Promise.all(batch.map((file) => searchInFile(file))); - } - - return { success: true, results }; - } catch (error) { - console.error('fs:searchContent failed:', error); - return { success: false, error: 'Failed to search files' }; - } - } - ); - - // Save an attachment (e.g., image) into a task-managed folder - ipcMain.handle( - 'fs:save-attachment', - async (_event, args: { taskPath: string; srcPath: string; subdir?: string }) => { - try { - const { taskPath, srcPath } = args; - if (!taskPath || !fs.existsSync(taskPath)) - return { success: false, error: 'Invalid taskPath' }; - if (!srcPath || !fs.existsSync(srcPath)) - return { success: false, error: 'Invalid srcPath' }; - - const ext = path.extname(srcPath).toLowerCase(); - if (!ALLOWED_IMAGE_EXTENSIONS.has(ext)) { - return { success: false, error: 'Unsupported attachment type' }; - } - - const baseDir = path.join(taskPath, '.emdash', args.subdir || DEFAULT_ATTACHMENTS_SUBDIR); - fs.mkdirSync(baseDir, { recursive: true }); - - const baseName = path.basename(srcPath); - let destName = baseName; - let counter = 1; - let destAbs = path.join(baseDir, destName); - while (fs.existsSync(destAbs)) { - const name = path.basename(baseName, ext); - destName = `${name}-${counter}${ext}`; - destAbs = path.join(baseDir, destName); - counter++; - } - - fs.copyFileSync(srcPath, destAbs); - - const relFromTask = path.relative(taskPath, destAbs); - return { - success: true, - absPath: destAbs, - relPath: relFromTask, - fileName: destName, - }; - } catch (error) { - console.error('fs:save-attachment failed:', error); - return { success: false, error: 'Failed to save attachment' }; - } - } - ); - - // Write a file relative to a root (creates parent directories) - ipcMain.handle( - 'fs:write', - async ( - _event, - args: { root: string; relPath: string; content: string; mkdirs?: boolean } & RemoteParams - ) => { - try { - // --- Remote path --- - if (isRemoteRequest(args)) { - try { - const rfs = createRemoteFs(args); - const result = await rfs.write(args.relPath, args.content); - return { success: result.success }; - } catch (error) { - console.error('fs:write remote failed:', error); - return { success: false, error: 'Failed to write remote file' }; - } - } - - // --- Local path --- - const { root, relPath, content, mkdirs = true } = args; - if (!root || !fs.existsSync(root)) return { success: false, error: 'Invalid root path' }; - if (!relPath) return { success: false, error: 'Invalid relPath' }; - - const abs = path.resolve(root, relPath); - const normRoot = path.resolve(root) + path.sep; - if (!abs.startsWith(normRoot)) return { success: false, error: 'Path escapes root' }; - - const dir = path.dirname(abs); - if (mkdirs) fs.mkdirSync(dir, { recursive: true }); - try { - fs.writeFileSync(abs, content, 'utf8'); - } catch (e: any) { - // Surface permission issues to renderer (Plan Mode lock likely) - if ((e?.code || '').toUpperCase() === 'EACCES') { - emitPlanEvent({ - type: 'write_blocked', - root, - relPath, - code: e?.code, - message: e?.message || String(e), - }); - } - throw e; - } - return { success: true }; - } catch (error) { - console.error('fs:write failed:', error); - return { success: false, error: 'Failed to write file' }; - } - } - ); - - // Remove a file relative to a root - ipcMain.handle( - 'fs:remove', - async (_event, args: { root: string; relPath: string } & RemoteParams) => { - try { - // --- Remote path --- - if (isRemoteRequest(args)) { - try { - const rfs = createRemoteFs(args); - return await rfs.remove(args.relPath); - } catch (error) { - console.error('fs:remove remote failed:', error); - return { success: false, error: 'Failed to remove remote file' }; - } - } - - // --- Local path --- - const { root, relPath } = args; - if (!root || !fs.existsSync(root)) return { success: false, error: 'Invalid root path' }; - if (!relPath) return { success: false, error: 'Invalid relPath' }; - const abs = path.resolve(root, relPath); - const normRoot = path.resolve(root) + path.sep; - if (!abs.startsWith(normRoot)) return { success: false, error: 'Path escapes root' }; - if (!fs.existsSync(abs)) return { success: true }; - const st = safeStat(abs); - if (st && st.isDirectory()) return { success: false, error: 'Is a directory' }; - try { - fs.unlinkSync(abs); - } catch (e: any) { - // Try to relax permissions and retry (useful after a plan lock) - try { - const dir = path.dirname(abs); - const dst = safeStat(dir); - if (dst) fs.chmodSync(dir, (dst.mode & 0o7777) | 0o222); - } catch {} - try { - const fst = safeStat(abs); - if (fst) fs.chmodSync(abs, (fst.mode & 0o7777) | 0o222); - } catch {} - try { - fs.unlinkSync(abs); - } catch (e2: any) { - if ((e2?.code || '').toUpperCase() === 'EACCES') { - emitPlanEvent({ - type: 'remove_blocked', - root, - relPath, - code: e2?.code, - message: e2?.message || String(e2), - }); - } - throw e2; - } - } - return { success: true }; - } catch (error) { - console.error('fs:remove failed:', error); - return { success: false, error: 'Failed to remove file' }; - } - } - ); - - // Get .emdash.json config file content (create with defaults if missing) - ipcMain.handle('fs:getProjectConfig', async (_event, args: { projectPath: string }) => { - try { - const { projectPath } = args; - if (!projectPath || !fs.existsSync(projectPath)) { - return { success: false, error: 'Invalid project path' }; - } - - const configPath = path.join(projectPath, '.emdash.json'); - - // Create with defaults if missing - if (!fs.existsSync(configPath)) { - fs.writeFileSync(configPath, DEFAULT_EMDASH_CONFIG, 'utf8'); - } - - const content = fs.readFileSync(configPath, 'utf8'); - return { success: true, path: configPath, content }; - } catch (error) { - console.error('fs:getProjectConfig failed:', error); - return { success: false, error: 'Failed to read config file' }; - } - }); - - // Save .emdash.json config file content - ipcMain.handle( - 'fs:saveProjectConfig', - async (_event, args: { projectPath: string; content: string }) => { - try { - const { projectPath, content } = args; - if (!projectPath || !fs.existsSync(projectPath)) { - return { success: false, error: 'Invalid project path' }; - } - - // Validate JSON before saving - try { - JSON.parse(content); - } catch { - return { success: false, error: 'Invalid JSON format' }; - } - - const configPath = path.join(projectPath, '.emdash.json'); - fs.writeFileSync(configPath, content, 'utf8'); - return { success: true, path: configPath }; - } catch (error) { - console.error('fs:saveProjectConfig failed:', error); - return { success: false, error: 'Failed to save config file' }; - } - } - ); -} diff --git a/src/main/services/hostPreviewService.ts b/src/main/services/hostPreviewService.ts deleted file mode 100644 index adf5c9733..000000000 --- a/src/main/services/hostPreviewService.ts +++ /dev/null @@ -1,490 +0,0 @@ -import { EventEmitter } from 'node:events'; -import { spawn, ChildProcessWithoutNullStreams } from 'node:child_process'; -import net from 'node:net'; -import fs from 'node:fs'; -import path from 'node:path'; -import { log } from '../lib/logger'; - -export type HostPreviewEvent = { - type: 'url' | 'setup' | 'exit'; - taskId: string; - url?: string; - status?: 'starting' | 'line' | 'done' | 'error'; - line?: string; -}; - -function detectPackageManager(dir: string): 'pnpm' | 'yarn' | 'npm' { - try { - if (fs.existsSync(path.join(dir, 'pnpm-lock.yaml'))) return 'pnpm'; - if (fs.existsSync(path.join(dir, 'yarn.lock'))) return 'yarn'; - return 'npm'; - } catch { - return 'npm'; - } -} - -function normalizeUrl(u: string): string { - try { - const re = /(https?:\/\/(?:localhost|127\.0\.0\.1|0\.0\.0\.0|\[::1\]):\d{2,5}(?:\/\S*)?)/i; - const m = u.match(re); - if (!m) return ''; - const url = new URL(m[1].replace('0.0.0.0', 'localhost')); - url.hostname = 'localhost'; - return url.toString(); - } catch { - return ''; - } -} - -class HostPreviewService extends EventEmitter { - private procs = new Map(); - private procCwds = new Map(); // Track cwd for each taskId - - async setup(taskId: string, taskPath: string): Promise<{ ok: boolean; error?: string }> { - const cwd = path.resolve(taskPath); - const pm = detectPackageManager(cwd); - const cmd = pm; - // Prefer clean install for npm when lockfile exists - const hasPkgLock = fs.existsSync(path.join(cwd, 'package-lock.json')); - const args = pm === 'npm' ? (hasPkgLock ? ['ci'] : ['install']) : ['install']; - try { - const child = spawn(cmd, args, { - cwd, - shell: true, - env: { ...process.env, BROWSER: 'none' }, - }); - this.emit('event', { type: 'setup', taskId, status: 'starting' } as HostPreviewEvent); - const onData = (buf: Buffer) => { - const line = buf.toString(); - this.emit('event', { - type: 'setup', - taskId, - status: 'line', - line, - } as HostPreviewEvent); - }; - child.stdout.on('data', onData); - child.stderr.on('data', onData); - await new Promise((resolve, reject) => { - child.on('exit', (code) => { - if (code === 0) resolve(); - else reject(new Error(`install exited with ${code}`)); - }); - child.on('error', reject); - }); - this.emit('event', { type: 'setup', taskId, status: 'done' } as HostPreviewEvent); - return { ok: true }; - } catch (e: any) { - this.emit('event', { - type: 'setup', - taskId, - status: 'error', - line: e?.message || String(e), - } as HostPreviewEvent); - return { ok: false, error: e?.message || String(e) }; - } - } - - private async pickAvailablePort(preferred: number[], host = '127.0.0.1'): Promise { - const tryPort = (port: number) => - new Promise((resolve) => { - const server = net.createServer(); - server.once('error', () => resolve(false)); - server.listen(port, host, () => { - try { - server.close(() => resolve(true)); - } catch { - resolve(false); - } - }); - }); - for (const p of preferred) { - if (await tryPort(p)) return p; - } - const ephemeral = await new Promise((resolve) => { - const server = net.createServer(); - server.listen(0, host, () => { - const addr = server.address(); - const port = typeof addr === 'object' && addr ? addr.port : 0; - try { - server.close(() => resolve(port || 5173)); - } catch { - resolve(5173); - } - }); - server.once('error', () => resolve(5173)); - }); - return ephemeral || 5173; - } - - async start( - taskId: string, - taskPath: string, - opts?: { script?: string; parentProjectPath?: string } - ): Promise<{ ok: boolean; error?: string }> { - const cwd = path.resolve(taskPath); - - // Log the resolved path to help debug worktree issues - log.info?.('[hostPreview] start', { - taskId, - taskPath, - resolvedCwd: cwd, - cwdExists: fs.existsSync(cwd), - hasPackageJson: fs.existsSync(path.join(cwd, 'package.json')), - }); - - // Check if process already exists for this taskId - const existingProc = this.procs.get(taskId); - const existingCwd = this.procCwds.get(taskId); - - // If process exists, verify it's running from the correct directory - if (existingProc) { - // Check if process is still running - try { - // On Unix, signal 0 checks if process exists - existingProc.kill(0); - // Process is still running - check if cwd matches - if (existingCwd && path.resolve(existingCwd) === cwd) { - log.info?.('[hostPreview] reusing existing process', { - taskId, - cwd: existingCwd, - }); - return { ok: true }; - } else { - // Process exists but is running from wrong directory - stop it - log.info?.('[hostPreview] stopping process with wrong cwd', { - taskId, - oldCwd: existingCwd, - newCwd: cwd, - }); - try { - existingProc.kill(); - } catch {} - this.procs.delete(taskId); - this.procCwds.delete(taskId); - } - } catch { - // Process has exited - clean up - this.procs.delete(taskId); - this.procCwds.delete(taskId); - } - } - - const pm = detectPackageManager(cwd); - // Preflight: if the task lacks node_modules but the parent has it, try linking - try { - const parent = (opts?.parentProjectPath || '').trim(); - if (parent) { - const wsNm = path.join(cwd, 'node_modules'); - const parentNm = path.join(parent, 'node_modules'); - const wsExists = fs.existsSync(wsNm); - const parentExists = fs.existsSync(parentNm); - if (!wsExists && parentExists) { - try { - const linkType = process.platform === 'win32' ? 'junction' : 'dir'; - fs.symlinkSync(parentNm, wsNm, linkType as any); - log.info?.('[hostPreview] linked node_modules', { - taskId, - wsNm, - parentNm, - linkType, - }); - } catch (e) { - log.warn?.( - '[hostPreview] failed to link node_modules; will rely on install if needed', - e - ); - } - } - } - } catch {} - const pkgPath = path.join(cwd, 'package.json'); - let script = 'dev'; - if (opts?.script && typeof opts.script === 'string' && opts.script.trim()) { - script = opts.script.trim(); - } else { - try { - const raw = fs.readFileSync(pkgPath, 'utf8'); - const pkg = JSON.parse(raw); - const scripts = (pkg && pkg.scripts) || {}; - const prefs = ['dev', 'start', 'serve', 'preview']; - for (const k of prefs) { - if (typeof scripts[k] === 'string') { - script = k; - break; - } - } - } catch {} - } - const cmd = pm; - const args: string[] = pm === 'npm' ? ['run', script] : [script]; - const env = { ...process.env } as Record; - - // Auto-install if package.json exists and node_modules is missing - try { - const hasPkg = fs.existsSync(pkgPath); - const hasNm = fs.existsSync(path.join(cwd, 'node_modules')); - if (hasPkg && !hasNm) { - const hasLock = fs.existsSync(path.join(cwd, 'package-lock.json')); - const installArgs = pm === 'npm' ? (hasLock ? ['ci'] : ['install']) : ['install']; - const inst = spawn(pm, installArgs, { - cwd, - shell: true, - env: { ...process.env, BROWSER: 'none' }, - }); - this.emit('event', { type: 'setup', taskId, status: 'starting' } as HostPreviewEvent); - const onData = (buf: Buffer) => { - try { - this.emit('event', { - type: 'setup', - taskId, - status: 'line', - line: buf.toString(), - } as HostPreviewEvent); - } catch {} - }; - inst.stdout.on('data', onData); - inst.stderr.on('data', onData); - await new Promise((resolve, reject) => { - inst.on('exit', (code) => { - code === 0 ? resolve() : reject(new Error(`install exited with ${code}`)); - }); - inst.on('error', reject); - }); - this.emit('event', { type: 'setup', taskId, status: 'done' } as HostPreviewEvent); - } - } catch {} - - // Choose a free port (avoid 3000) - const preferred = [5173, 5174, 3001, 3002, 8080, 4200, 5500, 7000]; - let forcedPort = await this.pickAvailablePort(preferred); - if (!env.PORT) env.PORT = String(forcedPort); - if (!env.VITE_PORT) env.VITE_PORT = env.PORT; - if (!env.BROWSER) env.BROWSER = 'none'; - - // Add CLI flags for common frameworks based on scripts and dependencies - try { - const raw = fs.readFileSync(pkgPath, 'utf8'); - const pkg = JSON.parse(raw); - const scripts = (pkg && pkg.scripts) || {}; - const deps = { ...(pkg.dependencies || {}), ...(pkg.devDependencies || {}) } as Record< - string, - string - >; - const scriptCmd = String(scripts[script] || '').toLowerCase(); - const looksLikeNext = scriptCmd.includes('next') || 'next' in deps; - const looksLikeVite = scriptCmd.includes('vite') || 'vite' in deps; - const looksLikeWebpack = - scriptCmd.includes('webpack-dev-server') || 'webpack-dev-server' in deps; - const looksLikeAngular = - /(^|\s)ng(\s|$)/.test(scriptCmd) || scriptCmd.includes('angular') || '@angular/cli' in deps; - const extra: string[] = []; - if (looksLikeNext) extra.push('-p', String(forcedPort)); - else if (looksLikeVite || looksLikeWebpack || looksLikeAngular) - extra.push('--port', String(forcedPort)); - if (extra.length) { - if (pm === 'npm') args.push('--', ...extra); - else args.push(...extra); - } - log.info?.('[hostPreview] start', { - taskId, - cwd, - pm, - cmd, - args, - script, - port: forcedPort, - }); - } catch { - log.info?.('[hostPreview] start', { - taskId, - cwd, - pm, - cmd, - args, - script, - port: forcedPort, - }); - } - - const tryStart = async (maxRetries = 3): Promise<{ ok: boolean; error?: string }> => { - try { - const child = spawn(cmd, args, { cwd, env, shell: true }); - this.procs.set(taskId, child); - this.procCwds.set(taskId, cwd); // Store the cwd for this process - - let urlEmitted = false; - let sawAddrInUse = false; - let candidateUrl: string | null = null; - const startedAt = Date.now(); - - const emitSetupLine = (line: string) => { - try { - this.emit('event', { - type: 'setup', - taskId, - status: 'line', - line, - } as HostPreviewEvent); - } catch {} - }; - - // Helper to probe and emit URL only when server is actually reachable - const probeAndEmitUrl = async (urlToProbe: string) => { - if (urlEmitted) return; - try { - const parsed = new URL(urlToProbe); - const host = parsed.hostname || 'localhost'; - const port = Number(parsed.port || 0); - if (!port) return; - - // Quick TCP probe to verify server is ready - const socket = net.createConnection({ host, port }, () => { - try { - socket.destroy(); - } catch {} - if (!urlEmitted) { - urlEmitted = true; - try { - this.emit('event', { - type: 'url', - taskId, - url: urlToProbe, - } as HostPreviewEvent); - } catch {} - } - }); - socket.on('error', () => { - try { - socket.destroy(); - } catch {} - }); - } catch {} - }; - - const onData = (buf: Buffer) => { - const line = buf.toString(); - emitSetupLine(line); - if (/EADDRINUSE|address\s+already\s+in\s+use/i.test(line)) sawAddrInUse = true; - const url = normalizeUrl(line); - if (url && !urlEmitted) { - // Store candidate URL and probe before emitting - candidateUrl = url; - // Probe immediately when URL is found in logs - probeAndEmitUrl(url); - } - }; - child.stdout.on('data', onData); - child.stderr.on('data', onData); - - // Probe periodically; if reachable and not emitted from logs, synthesize URL - const host = 'localhost'; - const probeInterval = setInterval(() => { - if (urlEmitted) return; - // If we have a candidate URL from logs, probe that first - if (candidateUrl) { - probeAndEmitUrl(candidateUrl); - return; - } - // Otherwise, probe the expected port - const socket = net.createConnection( - { host, port: Number(env.PORT) || forcedPort }, - () => { - try { - socket.destroy(); - } catch {} - if (!urlEmitted) { - urlEmitted = true; - try { - this.emit('event', { - type: 'url', - taskId, - url: `http://localhost:${Number(env.PORT) || forcedPort}`, - } as HostPreviewEvent); - } catch {} - } - } - ); - socket.on('error', () => { - try { - socket.destroy(); - } catch {} - }); - }, 800); - - child.on('exit', async () => { - clearInterval(probeInterval); - this.procs.delete(taskId); - this.procCwds.delete(taskId); // Clean up cwd tracking - const runtimeMs = Date.now() - startedAt; - const quickFail = runtimeMs < 4000; // exited very quickly - if (!urlEmitted && (sawAddrInUse || quickFail) && maxRetries > 0) { - // pick next free port and retry - const exclude = new Set([Number(env.PORT) || forcedPort]); - const nextList = preferred.filter((p) => !exclude.has(p)); - forcedPort = await this.pickAvailablePort(nextList.length ? nextList : preferred); - env.PORT = String(forcedPort); - env.VITE_PORT = env.PORT; - // rewrite CLI flags - const idx = args.lastIndexOf('-p'); - const idxPort = args.lastIndexOf('--port'); - if (idx >= 0 && idx + 1 < args.length) args[idx + 1] = String(forcedPort); - else if (idxPort >= 0 && idxPort + 1 < args.length) - args[idxPort + 1] = String(forcedPort); - else if (pm === 'npm') args.push('--', '-p', String(forcedPort)); - else args.push('-p', String(forcedPort)); - log.info?.('[hostPreview] retry on new port', { - taskId, - port: forcedPort, - retriesLeft: maxRetries - 1, - }); - await tryStart(maxRetries - 1); - return; - } - try { - this.emit('event', { type: 'exit', taskId } as HostPreviewEvent); - } catch {} - }); - return { ok: true }; - } catch (e: any) { - log.error('[hostPreview] failed to start', e); - return { ok: false, error: e?.message || String(e) }; - } - }; - - return await tryStart(3); - } - - stop(taskId: string): { ok: boolean } { - const p = this.procs.get(taskId); - if (!p) return { ok: true }; - try { - p.kill(); - } catch {} - this.procs.delete(taskId); - this.procCwds.delete(taskId); // Clean up cwd tracking - return { ok: true }; - } - - stopAll(exceptId?: string | null): { ok: boolean; stopped: string[] } { - const stopped: string[] = []; - const except = (exceptId || '').trim(); - for (const [id, proc] of this.procs.entries()) { - if (except && id === except) continue; - try { - proc.kill(); - } catch {} - this.procs.delete(id); - this.procCwds.delete(id); // Clean up cwd tracking - stopped.push(id); - } - return { ok: true, stopped }; - } - - onEvent(listener: (evt: HostPreviewEvent) => void): () => void { - this.on('event', listener); - return () => this.off('event', listener); - } -} - -export const hostPreviewService = new HostPreviewService(); diff --git a/src/main/services/iconService.ts b/src/main/services/iconService.ts deleted file mode 100644 index 1c0ebba00..000000000 --- a/src/main/services/iconService.ts +++ /dev/null @@ -1,178 +0,0 @@ -import { app } from 'electron'; -import fs from 'node:fs'; -import path from 'node:path'; -import https from 'node:https'; - -function toSlug(name: string): string { - return name - .trim() - .toLowerCase() - .replace(/[^a-z0-9]+/g, '-'); -} - -function bufferToDataUrl(buf: Buffer, contentType: string): string { - const ct = contentType.toLowerCase(); - const mime = ct.startsWith('image/') ? ct : 'image/x-icon'; - return `data:${mime};base64,${buf.toString('base64')}`; -} - -function readFileAsDataUrl(abs: string): string | null { - try { - const data = fs.readFileSync(abs); - const ext = path.extname(abs).toLowerCase(); - const mime = - ext === '.svg' - ? 'image/svg+xml' - : ext === '.png' - ? 'image/png' - : ext === '.jpg' || ext === '.jpeg' - ? 'image/jpeg' - : ext === '.ico' - ? 'image/x-icon' - : 'application/octet-stream'; - return bufferToDataUrl(data, mime); - } catch { - return null; - } -} - -function getKnownDomain(service: string): string | null { - const n = service.trim().toLowerCase(); - const map: Record = { - postgres: 'postgresql.org', - postgresql: 'postgresql.org', - redis: 'redis.io', - minio: 'min.io', - clickhouse: 'clickhouse.com', - nginx: 'nginx.org', - mysql: 'mysql.com', - mariadb: 'mariadb.org', - mongo: 'mongodb.com', - mongodb: 'mongodb.com', - rabbitmq: 'rabbitmq.com', - kafka: 'apache.org', - zookeeper: 'apache.org', - }; - return map[n] ?? null; -} - -function allowlisted(domain: string): boolean { - const allow = new Set([ - 'postgresql.org', - 'redis.io', - 'min.io', - 'clickhouse.com', - 'nginx.org', - 'mysql.com', - 'mariadb.org', - 'mongodb.com', - 'rabbitmq.com', - 'apache.org', - ]); - return allow.has(domain); -} - -async function fetchHttps( - url: string, - maxBytes = 200_000 -): Promise<{ data: Buffer; contentType: string } | null> { - return new Promise((resolve) => { - try { - https - .get(url, (res) => { - const status = res.statusCode || 0; - const loc = res.headers.location; - if (status >= 300 && status < 400 && loc && /^https:\/\//i.test(loc)) { - https - .get(loc, (res2) => { - pipeResp(res2); - }) - .on('error', () => resolve(null)); - return; - } - pipeResp(res); - - function pipeResp(r: typeof res) { - const ct = String(r.headers['content-type'] || '').toLowerCase(); - if (!ct.startsWith('image/')) { - resolve(null); - r.resume(); - return; - } - const chunks: Buffer[] = []; - let bytes = 0; - r.on('data', (chunk: Buffer) => { - bytes += chunk.length; - if (bytes > maxBytes) { - resolve(null); - r.destroy(); - return; - } - chunks.push(chunk); - }); - r.on('end', () => { - resolve({ data: Buffer.concat(chunks), contentType: ct }); - }); - r.on('error', () => resolve(null)); - } - }) - .on('error', () => resolve(null)); - } catch { - resolve(null); - } - }); -} - -export async function resolveServiceIcon(opts: { - service: string; - taskPath?: string; - allowNetwork?: boolean; -}): Promise<{ ok: true; dataUrl: string } | { ok: false }> { - const service = opts.service?.trim(); - if (!service) return { ok: false }; - const slug = toSlug(service); - - // 1) Task overrides - if (opts.taskPath) { - const p = path.join(opts.taskPath, '.emdash', 'service-icons'); - const candidates = ['.svg', '.png', '.jpg', '.jpeg', '.ico'].map((ext) => - path.join(p, `${slug}${ext}`) - ); - for (const abs of candidates) { - if (fs.existsSync(abs)) { - const dataUrl = readFileAsDataUrl(abs); - if (dataUrl) return { ok: true, dataUrl }; - } - } - } - - // 2) Cache under userData - const cacheDir = path.join(app.getPath('userData'), 'icons'); - try { - fs.mkdirSync(cacheDir, { recursive: true }); - } catch {} - const cacheFile = path.join(cacheDir, `${slug}.ico`); - if (fs.existsSync(cacheFile)) { - const dataUrl = readFileAsDataUrl(cacheFile); - if (dataUrl) return { ok: true, dataUrl }; - } - - // 3) Optional network fetch to allowlisted domains only - if (opts.allowNetwork) { - const domain = getKnownDomain(service); - if (domain && allowlisted(domain)) { - const ddgUrl = `https://icons.duckduckgo.com/ip3/${domain}.ico`; - const directUrl = `https://${domain}/favicon.ico`; - const fetched = (await fetchHttps(ddgUrl)) || (await fetchHttps(directUrl)); - if (fetched) { - try { - fs.writeFileSync(cacheFile, fetched.data); - } catch {} - const dataUrl = bufferToDataUrl(fetched.data, fetched.contentType); - return { ok: true, dataUrl }; - } - } - } - - return { ok: false }; -} diff --git a/src/main/services/lifecycleIpc.ts b/src/main/services/lifecycleIpc.ts deleted file mode 100644 index 5608a3aff..000000000 --- a/src/main/services/lifecycleIpc.ts +++ /dev/null @@ -1,149 +0,0 @@ -import { BrowserWindow, ipcMain } from 'electron'; -import { lifecycleScriptsService } from './LifecycleScriptsService'; -import { log } from '../lib/logger'; -import { LIFECYCLE_EVENT_CHANNEL, LIFECYCLE_PHASES } from '@shared/lifecycle'; -import { taskLifecycleService } from './TaskLifecycleService'; - -export function registerLifecycleIpc(): void { - // Get a specific lifecycle phase script for a project - ipcMain.handle( - 'lifecycle:getScript', - async ( - _event, - args: { - projectPath: string; - phase: string; - } - ) => { - try { - if (!LIFECYCLE_PHASES.includes(args.phase as (typeof LIFECYCLE_PHASES)[number])) { - return { success: false, error: `Invalid lifecycle phase: ${args.phase}` }; - } - const phase = args.phase as (typeof LIFECYCLE_PHASES)[number]; - const script = lifecycleScriptsService.getScript(args.projectPath, phase); - return { success: true, script }; - } catch (error) { - log.error('Failed to get lifecycle script:', error); - return { success: false, error: (error as Error).message }; - } - } - ); - - ipcMain.handle( - 'lifecycle:setup', - async ( - _event, - args: { - taskId: string; - taskPath: string; - projectPath: string; - taskName?: string; - } - ) => { - try { - const result = await taskLifecycleService.runSetup( - args.taskId, - args.taskPath, - args.projectPath, - args.taskName - ); - return { success: result.ok, ...result }; - } catch (error) { - log.error('Failed to run setup lifecycle phase:', error); - return { success: false, error: (error as Error).message }; - } - } - ); - - ipcMain.handle( - 'lifecycle:run:start', - async ( - _event, - args: { - taskId: string; - taskPath: string; - projectPath: string; - taskName?: string; - } - ) => { - try { - const result = await taskLifecycleService.startRun( - args.taskId, - args.taskPath, - args.projectPath, - args.taskName - ); - return { success: result.ok, ...result }; - } catch (error) { - log.error('Failed to start run lifecycle phase:', error); - return { success: false, error: (error as Error).message }; - } - } - ); - - ipcMain.handle('lifecycle:run:stop', async (_event, args: { taskId: string }) => { - try { - const result = taskLifecycleService.stopRun(args.taskId); - return { success: result.ok, ...result }; - } catch (error) { - log.error('Failed to stop run lifecycle phase:', error); - return { success: false, error: (error as Error).message }; - } - }); - - ipcMain.handle( - 'lifecycle:teardown', - async ( - _event, - args: { - taskId: string; - taskPath: string; - projectPath: string; - taskName?: string; - } - ) => { - try { - const result = await taskLifecycleService.runTeardown( - args.taskId, - args.taskPath, - args.projectPath, - args.taskName - ); - return { success: result.ok, ...result }; - } catch (error) { - log.error('Failed to run teardown lifecycle phase:', error); - return { success: false, error: (error as Error).message }; - } - } - ); - - ipcMain.handle('lifecycle:getState', async (_event, args: { taskId: string }) => { - try { - const state = taskLifecycleService.getState(args.taskId); - return { success: true, state }; - } catch (error) { - log.error('Failed to get lifecycle state:', error); - return { success: false, error: (error as Error).message }; - } - }); - - ipcMain.handle('lifecycle:clearTask', async (_event, args: { taskId: string }) => { - try { - taskLifecycleService.clearTask(args.taskId); - return { success: true }; - } catch (error) { - log.error('Failed to clear lifecycle state for task:', error); - return { success: false, error: (error as Error).message }; - } - }); - - const forward = (evt: any) => { - const all = BrowserWindow.getAllWindows(); - for (const win of all) { - try { - win.webContents.send(LIFECYCLE_EVENT_CHANNEL, evt); - } catch {} - } - }; - taskLifecycleService.onEvent(forward); -} diff --git a/src/main/services/planLockIpc.ts b/src/main/services/planLockIpc.ts deleted file mode 100644 index 55c2d6b11..000000000 --- a/src/main/services/planLockIpc.ts +++ /dev/null @@ -1,147 +0,0 @@ -import { ipcMain } from 'electron'; -import * as fs from 'fs'; -import * as path from 'path'; - -function isWindows() { - return process.platform === 'win32'; -} - -function isSymlink(p: string) { - try { - const st = fs.lstatSync(p); - return st.isSymbolicLink(); - } catch { - return false; - } -} - -type Entry = { p: string; m: number }; - -function collectPaths(root: string) { - const result: string[] = []; - const stack = ['.']; - while (stack.length) { - const rel = stack.pop()!; - const abs = path.join(root, rel); - if (isSymlink(abs)) continue; - let st: fs.Stats; - try { - st = fs.statSync(abs); - } catch { - continue; - } - if (st.isDirectory()) { - // Skip our internal folder so we can write logs/policies - if (rel === '.emdash' || rel.startsWith('.emdash' + path.sep)) continue; - result.push(rel); - let entries: string[] = []; - try { - entries = fs.readdirSync(abs); - } catch { - continue; - } - for (const e of entries) { - const nextRel = rel === '.' ? e : path.join(rel, e); - stack.push(nextRel); - } - } else if (st.isFile()) { - result.push(rel); - } - } - return result; -} - -function chmodNoWrite(mode: number, isDir: boolean): number { - const noWrite = mode & ~0o222; // clear write bits - if (isDir) { - // Ensure traverse bits present - return (noWrite | 0o111) & 0o7777; - } - return noWrite & 0o7777; -} - -function applyLock(root: string): { success: boolean; changed: number; error?: string } { - try { - const entries = collectPaths(root); - const state: Entry[] = []; - let changed = 0; - for (const rel of entries) { - const abs = path.join(root, rel); - let st: fs.Stats; - try { - st = fs.statSync(abs); - } catch { - continue; - } - const isDir = st.isDirectory(); - const prevMode = st.mode & 0o7777; - const nextMode = chmodNoWrite(prevMode, isDir); - if (nextMode !== prevMode) { - try { - fs.chmodSync(abs, nextMode); - state.push({ p: rel, m: prevMode }); - changed++; - } catch {} - } - } - // Persist lock state - const baseDir = path.join(root, '.emdash'); - try { - fs.mkdirSync(baseDir, { recursive: true }); - } catch {} - const statePath = path.join(baseDir, '.planlock.json'); - try { - fs.writeFileSync(statePath, JSON.stringify(state), 'utf8'); - } catch {} - return { success: true, changed }; - } catch (e: any) { - return { success: false, changed: 0, error: e?.message || String(e) }; - } -} - -function releaseLock(root: string): { success: boolean; restored: number; error?: string } { - try { - const statePath = path.join(root, '.emdash', '.planlock.json'); - if (!fs.existsSync(statePath)) return { success: true, restored: 0 }; - let raw = ''; - try { - raw = fs.readFileSync(statePath, 'utf8'); - } catch {} - let entries: Entry[] = []; - try { - entries = JSON.parse(raw || '[]'); - } catch {} - let restored = 0; - for (const ent of entries) { - try { - const abs = path.join(root, ent.p); - fs.chmodSync(abs, ent.m); - restored++; - } catch {} - } - // Cleanup state file - try { - fs.unlinkSync(statePath); - } catch {} - return { success: true, restored }; - } catch (e: any) { - return { success: false, restored: 0, error: e?.message || String(e) }; - } -} - -export function registerPlanLockIpc(): void { - ipcMain.handle('plan:lock', async (_e, taskPath: string) => { - if (isWindows()) { - // Best-effort: still attempt chmod; ACL hardening could be added with icacls in a future pass - return applyLock(taskPath); - } - return applyLock(taskPath); - }); - - ipcMain.handle('plan:unlock', async (_e, taskPath: string) => { - if (isWindows()) { - return releaseLock(taskPath); - } - return releaseLock(taskPath); - }); -} diff --git a/src/main/services/providerStatusCache.ts b/src/main/services/providerStatusCache.ts deleted file mode 100644 index 007ee9f27..000000000 --- a/src/main/services/providerStatusCache.ts +++ /dev/null @@ -1,90 +0,0 @@ -import fs from 'fs/promises'; -import path from 'path'; -import { app } from 'electron'; -import { log } from '../lib/logger'; - -export interface ProviderStatus { - installed: boolean; - path?: string | null; - version?: string | null; - lastChecked: number; -} - -type ProviderStatusMap = Record; - -export class ProviderStatusCache { - private cache: ProviderStatusMap = {}; - private filePath: string | null = null; - private persistPromise: Promise | null = null; - private pendingPersist = false; - - constructor() { - // lazily resolved in load/persist to avoid app readiness issues - } - - async load(): Promise { - if (!this.filePath) { - this.filePath = path.join(app.getPath('userData'), 'provider-status-cache.json'); - } - if (!this.filePath) return; - try { - const content = await fs.readFile(this.filePath, 'utf8'); - const parsed = JSON.parse(content); - if (parsed && typeof parsed === 'object') { - this.cache = parsed as ProviderStatusMap; - } - } catch { - this.cache = {}; - } - } - - getAll(): ProviderStatusMap { - return { ...this.cache }; - } - - get(providerId: string): ProviderStatus | undefined { - return this.cache[providerId]; - } - - set(providerId: string, status: ProviderStatus): void { - this.cache = { - ...this.cache, - [providerId]: status, - }; - this.persist(); - } - - private persist() { - if (!this.filePath) { - this.filePath = path.join(app.getPath('userData'), 'provider-status-cache.json'); - } - if (!this.filePath) return; - - if (this.persistPromise) { - this.pendingPersist = true; - return; - } - const write = () => { - const payload = JSON.stringify(this.cache, null, 2); - this.persistPromise = fs - .writeFile(this.filePath as string, payload, 'utf8') - .catch((error) => { - log.warn('providerStatusCache:persist failed', { - filePath: this.filePath, - error: error?.message || String(error), - }); - }) - .finally(() => { - this.persistPromise = null; - const shouldRetry = this.pendingPersist; - this.pendingPersist = false; - if (shouldRetry) { - setTimeout(() => this.persist(), 250); - } - }); - }; - write(); - } -} - -export const providerStatusCache = new ProviderStatusCache(); diff --git a/src/main/services/ptyIpc.ts b/src/main/services/ptyIpc.ts deleted file mode 100644 index 70d8d0f72..000000000 --- a/src/main/services/ptyIpc.ts +++ /dev/null @@ -1,1116 +0,0 @@ -import { app, ipcMain, WebContents, BrowserWindow } from 'electron'; -import { - startPty, - writePty, - resizePty, - killPty, - getPty, - getPtyKind, - startDirectPty, - startSshPty, - removePtyRecord, - setOnDirectCliExit, - parseShellArgs, - buildProviderCliArgs, - resolveProviderCommandConfig, - killTmuxSession, - getTmuxSessionName, - getPtyTmuxSessionName, -} from './ptyManager'; -import { log } from '../lib/logger'; -import { terminalSnapshotService } from './TerminalSnapshotService'; -import { errorTracking } from '../errorTracking'; -import type { TerminalSnapshotPayload } from '../types/terminalSnapshot'; -import * as telemetry from '../telemetry'; -import { PROVIDER_IDS, getProvider, type ProviderId } from '../../shared/providers/registry'; -import { parsePtyId, isChatPty } from '../../shared/ptyId'; -import { detectAndLoadTerminalConfig } from './TerminalConfigParser'; -import { ClaudeHookService } from './ClaudeHookService'; -import { databaseService } from './DatabaseService'; -import { lifecycleScriptsService } from './LifecycleScriptsService'; -import { maybeAutoTrustForClaude } from './ClaudeConfigService'; -import { getDrizzleClient } from '../db/drizzleClient'; -import { sshConnections as sshConnectionsTable } from '../db/schema'; -import { eq } from 'drizzle-orm'; -import { execFile } from 'child_process'; -import { randomUUID } from 'crypto'; -import path from 'path'; -import { quoteShellArg } from '../utils/shellEscape'; - -const owners = new Map(); -const listeners = new Set(); -const providerPtyTimers = new Map(); -// Map PTY IDs to provider IDs for multi-agent tracking -const ptyProviderMap = new Map(); -// Prevent duplicate finish handling when cleanup and onExit race for the same PTY. -const finalizedPtys = new Set(); -// Track WebContents that have a 'destroyed' listener to avoid duplicates -const wcDestroyedListeners = new Set(); -let isAppQuitting = false; - -type FinishCause = 'process_exit' | 'app_quit' | 'owner_destroyed' | 'manual_kill'; - -// Buffer PTY output to reduce IPC overhead (helps SSH feel less laggy) -const ptyDataBuffers = new Map(); -const ptyDataTimers = new Map(); -const PTY_DATA_FLUSH_MS = 16; - -// Guard IPC sends to prevent crashes when WebContents is destroyed -function safeSendToOwner(id: string, channel: string, payload: unknown): boolean { - const wc = owners.get(id); - if (!wc) return false; - try { - if (typeof wc.isDestroyed === 'function' && wc.isDestroyed()) return false; - wc.send(channel, payload); - return true; - } catch (err) { - log.warn('ptyIpc:safeSendFailed', { - id, - channel, - error: String((err as Error)?.message || err), - }); - return false; - } -} - -function flushPtyData(id: string): void { - const buf = ptyDataBuffers.get(id); - if (!buf) return; - ptyDataBuffers.delete(id); - safeSendToOwner(id, `pty:data:${id}`, buf); -} - -function clearPtyData(id: string): void { - const t = ptyDataTimers.get(id); - if (t) { - clearTimeout(t); - ptyDataTimers.delete(id); - } - ptyDataBuffers.delete(id); -} - -function bufferedSendPtyData(id: string, chunk: string): void { - const prev = ptyDataBuffers.get(id) || ''; - ptyDataBuffers.set(id, prev + chunk); - if (ptyDataTimers.has(id)) return; - const t = setTimeout(() => { - ptyDataTimers.delete(id); - flushPtyData(id); - }, PTY_DATA_FLUSH_MS); - ptyDataTimers.set(id, t); -} - -function buildRemoteInitKeystrokes(args: { - cwd?: string; - provider?: { cli: string; cmd: string; installCommand?: string }; - tmux?: { sessionName: string }; -}): string { - const lines: string[] = []; - if (args.cwd) { - // Keep this line shell-agnostic (works in zsh/bash/fish); avoid POSIX `||` which fish doesn't support. - // If `cd` fails, the shell will print its own error message. - lines.push(`cd ${quoteShellArg(args.cwd)}`); - } - if (args.provider) { - const cli = args.provider.cli; - const install = args.provider.installCommand ? ` Install: ${args.provider.installCommand}` : ''; - const msg = `emdash: ${cli} not found on remote.${install}`; - // Run the check inside a POSIX shell so it works even if the user's login shell isn't POSIX - // (e.g. fish). PATH/env vars come from the interactive login shell started by `ssh`. - - if (args.tmux) { - // When tmux is enabled, wrap the provider command in a named tmux session. - // tmux new-session -As creates-or-attaches in one command. - // Falls back to running without tmux if tmux isn't installed on the remote. - const tmuxName = quoteShellArg(args.tmux.sessionName); - const shScript = `if command -v ${quoteShellArg(cli)} >/dev/null 2>&1; then if command -v tmux >/dev/null 2>&1; then exec tmux new-session -As ${tmuxName} -- sh -c ${quoteShellArg(args.provider.cmd)}; else printf '%s\\n' 'emdash: tmux not found on remote, running without session persistence'; exec ${args.provider.cmd}; fi; else printf '%s\\n' ${quoteShellArg( - msg - )}; fi`; - lines.push(`sh -c ${quoteShellArg(shScript)}`); - } else { - const shScript = `if command -v ${quoteShellArg(cli)} >/dev/null 2>&1; then exec ${args.provider.cmd}; else printf '%s\\n' ${quoteShellArg( - msg - )}; fi`; - lines.push(`sh -c ${quoteShellArg(shScript)}`); - } - } - - return lines.length ? `${lines.join('\n')}\n` : ''; -} - -async function resolveSshInvocation( - connectionId: string -): Promise<{ target: string; args: string[] }> { - // If created from ssh config selection, prefer using the alias so OpenSSH config - // (ProxyJump, UseKeychain, etc.) is honored by system ssh. - if (connectionId.startsWith('ssh-config:')) { - const raw = connectionId.slice('ssh-config:'.length); - let alias = raw; - try { - // New scheme uses encodeURIComponent. - if (/%[0-9A-Fa-f]{2}/.test(raw)) { - alias = decodeURIComponent(raw); - } - } catch { - alias = raw; - } - if (alias) { - return { target: alias, args: [] }; - } - } - - const { db } = await getDrizzleClient(); - const rows = await db - .select({ - id: sshConnectionsTable.id, - host: sshConnectionsTable.host, - port: sshConnectionsTable.port, - username: sshConnectionsTable.username, - privateKeyPath: sshConnectionsTable.privateKeyPath, - }) - .from(sshConnectionsTable) - .where(eq(sshConnectionsTable.id, connectionId)) - .limit(1); - - const row = rows[0]; - if (!row) { - throw new Error(`SSH connection not found: ${connectionId}`); - } - - const args: string[] = []; - if (row.port && row.port !== 22) { - args.push('-p', String(row.port)); - } - if (row.privateKeyPath) { - args.push('-i', row.privateKeyPath); - } - - const target = row.username ? `${row.username}@${row.host}` : row.host; - return { target, args }; -} - -function buildRemoteProviderInvocation(args: { - providerId: string; - autoApprove?: boolean; - initialPrompt?: string; - resume?: boolean; -}): { cli: string; cmd: string; installCommand?: string } { - const { providerId, autoApprove, initialPrompt, resume } = args; - const fallbackProvider = getProvider(providerId as ProviderId); - const resolvedConfig = resolveProviderCommandConfig(providerId); - const provider = resolvedConfig?.provider ?? fallbackProvider; - - const cliCommand = ( - resolvedConfig?.cli || - fallbackProvider?.cli || - providerId.toLowerCase() - ).trim(); - const parsedCliParts = parseShellArgs(cliCommand); - const cliCommandParts = parsedCliParts.length > 0 ? parsedCliParts : [cliCommand]; - const cliCheckCommand = cliCommandParts[0]; - - const cliArgs = buildProviderCliArgs({ - resume, - resumeFlag: resolvedConfig?.resumeFlag ?? fallbackProvider?.resumeFlag, - defaultArgs: resolvedConfig?.defaultArgs ?? fallbackProvider?.defaultArgs, - extraArgs: resolvedConfig?.extraArgs, - autoApprove, - autoApproveFlag: resolvedConfig?.autoApproveFlag ?? fallbackProvider?.autoApproveFlag, - initialPrompt, - initialPromptFlag: resolvedConfig?.initialPromptFlag ?? fallbackProvider?.initialPromptFlag, - useKeystrokeInjection: provider?.useKeystrokeInjection, - }); - - const cmdParts = [...cliCommandParts, ...cliArgs]; - const cmd = cmdParts.map(quoteShellArg).join(' '); - - return { cli: cliCheckCommand, cmd, installCommand: provider?.installCommand }; -} - -/** Convert SSH args to SCP-compatible args (e.g. `-p` port → `-P` port). */ -function buildScpArgs(sshArgs: string[]): string[] { - const scpArgs: string[] = []; - for (let i = 0; i < sshArgs.length; i++) { - if (sshArgs[i] === '-p' && i + 1 < sshArgs.length) { - // scp uses -P (uppercase) for port - scpArgs.push('-P', sshArgs[i + 1]); - i++; - } else if ( - (sshArgs[i] === '-i' || sshArgs[i] === '-o' || sshArgs[i] === '-F') && - i + 1 < sshArgs.length - ) { - scpArgs.push(sshArgs[i], sshArgs[i + 1]); - i++; - } - } - return scpArgs; -} - -function execFileAsync(cmd: string, args: string[]): Promise<{ stdout: string; stderr: string }> { - return new Promise((resolve, reject) => { - execFile(cmd, args, { timeout: 30_000 }, (error, stdout, stderr) => { - if (error) { - reject(new Error(`${cmd} failed: ${stderr || error.message}`)); - } else { - resolve({ stdout, stderr }); - } - }); - }); -} - -async function resolveShellSetup(cwd: string): Promise { - // Committed .emdash.json lives in the worktree itself - const fromCwd = lifecycleScriptsService.getShellSetup(cwd); - if (fromCwd) return fromCwd; - // Uncommitted .emdash.json only exists in the project root — look it up via DB - try { - const task = await databaseService.getTaskByPath(cwd); - const project = task ? await databaseService.getProjectById(task.projectId) : null; - if (project?.path) return lifecycleScriptsService.getShellSetup(project.path) ?? undefined; - } catch {} - return undefined; -} - -async function resolveTmuxEnabled(cwd: string): Promise { - if (lifecycleScriptsService.getTmuxEnabled(cwd)) return true; - try { - const task = await databaseService.getTaskByPath(cwd); - const project = task ? await databaseService.getProjectById(task.projectId) : null; - if (project?.path) return lifecycleScriptsService.getTmuxEnabled(project.path); - } catch {} - return false; -} - -export function registerPtyIpc(): void { - // When a direct-spawned CLI exits, spawn a shell so user can continue working - setOnDirectCliExit(async (id: string, cwd: string) => { - const wc = owners.get(id); - if (!wc) return; - - try { - // Spawn a shell in the same terminal - const proc = await startPty({ - id, - cwd, - cols: 120, - rows: 32, - }); - - if (!proc) { - log.warn('ptyIpc: Failed to spawn shell after CLI exit', { id }); - killPty(id); // Clean up dead PTY record - return; - } - - // Re-attach listeners for the new shell process - listeners.delete(id); // Clear old listener registration - if (!listeners.has(id)) { - proc.onData((data) => { - bufferedSendPtyData(id, data); - }); - - proc.onExit(({ exitCode, signal }) => { - flushPtyData(id); - clearPtyData(id); - safeSendToOwner(id, `pty:exit:${id}`, { exitCode, signal }); - owners.delete(id); - listeners.delete(id); - removePtyRecord(id); - }); - listeners.add(id); - } - - // Notify renderer that shell is ready (reuse pty:started so existing listener handles it) - if (!wc.isDestroyed()) { - wc.send('pty:started', { id }); - } - } catch (err) { - log.error('ptyIpc: Error spawning shell after CLI exit', { id, error: err }); - killPty(id); // Clean up dead PTY record - } - }); - - ipcMain.handle( - 'pty:start', - async ( - event, - args: { - id: string; - cwd?: string; - remote?: { connectionId: string }; - shell?: string; - env?: Record; - cols?: number; - rows?: number; - autoApprove?: boolean; - initialPrompt?: string; - skipResume?: boolean; - } - ) => { - const ptyStartTime = performance.now(); - if (process.env.EMDASH_DISABLE_PTY === '1') { - return { ok: false, error: 'PTY disabled via EMDASH_DISABLE_PTY=1' }; - } - try { - const { id, cwd, remote, shell, env, cols, rows, autoApprove, initialPrompt, skipResume } = - args; - const existing = getPty(id); - - // Remote PTY routing: run an interactive ssh session in a local PTY. - if (remote?.connectionId) { - const wc = event.sender; - owners.set(id, wc); - - if (existing) { - const kind = getPtyKind(id); - if (kind === 'ssh') { - return { ok: true, reused: true }; - } - // Replace an existing local PTY with an SSH-backed PTY. - try { - killPty(id); - } catch {} - listeners.delete(id); - } - - const ssh = await resolveSshInvocation(remote.connectionId); - const proc = startSshPty({ - id, - target: ssh.target, - sshArgs: ssh.args, - cols, - rows, - env, - }); - - if (!listeners.has(id)) { - proc.onData((data) => { - bufferedSendPtyData(id, data); - }); - proc.onExit(({ exitCode, signal }) => { - flushPtyData(id); - clearPtyData(id); - safeSendToOwner(id, `pty:exit:${id}`, { exitCode, signal }); - owners.delete(id); - listeners.delete(id); - removePtyRecord(id); - }); - listeners.add(id); - } - - // Resolve tmux config from local project settings - const remoteTmux = cwd ? await resolveTmuxEnabled(cwd) : false; - const remoteTmuxOpt = remoteTmux ? { sessionName: getTmuxSessionName(id) } : undefined; - - const remoteInit = buildRemoteInitKeystrokes({ cwd, tmux: remoteTmuxOpt }); - if (remoteInit) { - proc.write(remoteInit); - } - - try { - const windows = BrowserWindow.getAllWindows(); - windows.forEach((w: any) => w.webContents.send('pty:started', { id })); - } catch {} - - return { ok: true, tmux: remoteTmux }; - } - - // Determine if we should skip resume - let shouldSkipResume = skipResume; - - // Check if this is an additional (non-main) chat - const isAdditionalChat = isChatPty(id); - - if (isAdditionalChat) { - // Additional chats can resume if the provider supports per-session - // isolation (via sessionIdFlag), since each chat gets its own - // session UUID. Without session isolation, always start fresh to - // avoid all chats sharing the provider's directory-scoped state. - const parsed = parsePtyId(id); - const chatProvider = parsed ? getProvider(parsed.providerId) : null; - if (!chatProvider?.sessionIdFlag) { - shouldSkipResume = true; - } - // Otherwise keep shouldSkipResume from the renderer (undefined or - // explicitly set), which is based on whether a snapshot exists. - } else if (shouldSkipResume === undefined) { - // For main chats, check if this is a first-time start - // For Claude and similar providers, check if a session directory exists - if (cwd && shell) { - try { - const fs = require('fs'); - const path = require('path'); - const os = require('os'); - const crypto = require('crypto'); - - // Check if this is Claude by looking at the shell - const isClaudeOrSimilar = shell.includes('claude') || shell.includes('aider'); - - if (isClaudeOrSimilar) { - // Claude stores sessions in ~/.claude/projects/ with various naming schemes - // Check both hash-based and path-based directory names - const cwdHash = crypto.createHash('sha256').update(cwd).digest('hex').slice(0, 16); - const claudeHashDir = path.join(os.homedir(), '.claude', 'projects', cwdHash); - - // Also check for path-based directory name (Claude's actual format) - // Replace path separators with hyphens for the directory name - const pathBasedName = cwd.replace(/\//g, '-'); - const claudePathDir = path.join(os.homedir(), '.claude', 'projects', pathBasedName); - - // Check if any Claude session directory exists for this working directory - const projectsDir = path.join(os.homedir(), '.claude', 'projects'); - let sessionExists = false; - - // Check if the hash-based directory exists - sessionExists = fs.existsSync(claudeHashDir); - - // If not, check for path-based directory - if (!sessionExists) { - sessionExists = fs.existsSync(claudePathDir); - } - - // If still not found, scan the projects directory for any matching directory - if (!sessionExists && fs.existsSync(projectsDir)) { - try { - const dirs = fs.readdirSync(projectsDir); - // Check if any directory contains part of the working directory path - const cwdParts = cwd.split('/').filter((p) => p.length > 0); - const lastParts = cwdParts.slice(-3).join('-'); // Use last 3 parts of path - sessionExists = dirs.some((dir: string) => dir.includes(lastParts)); - } catch { - // Ignore scan errors - } - } - - // Skip resume if no session directory exists (new task) - shouldSkipResume = !sessionExists; - } else { - // For other providers, default to not skipping (allow resume if supported) - shouldSkipResume = false; - } - } catch (e) { - // On error, default to not skipping - shouldSkipResume = false; - } - } else { - // If no cwd or shell, default to not skipping - shouldSkipResume = false; - } - } else { - // Use the explicitly provided value - shouldSkipResume = shouldSkipResume || false; - } - - const parsedPty = parsePtyId(id); - if (parsedPty) maybeAutoTrustForClaude(parsedPty.providerId, cwd); - - const shellSetup = cwd ? await resolveShellSetup(cwd) : undefined; - const tmux = cwd ? await resolveTmuxEnabled(cwd) : false; - - const proc = - existing ?? - (await startPty({ - id, - cwd, - shell, - env, - cols, - rows, - autoApprove, - initialPrompt, - skipResume: shouldSkipResume, - shellSetup, - tmux, - })); - const wc = event.sender; - owners.set(id, wc); - - // Attach data/exit listeners once per PTY id - if (!listeners.has(id)) { - proc.onData((data) => { - bufferedSendPtyData(id, data); - }); - - proc.onExit(({ exitCode, signal }) => { - flushPtyData(id); - clearPtyData(id); - // Check if this PTY is still active (not replaced by a newer instance) - if (getPty(id) !== proc) { - return; - } - safeSendToOwner(id, `pty:exit:${id}`, { exitCode, signal }); - maybeMarkProviderFinish( - id, - exitCode, - signal, - isAppQuitting ? 'app_quit' : 'process_exit' - ); - owners.delete(id); - listeners.delete(id); - removePtyRecord(id); - }); - - listeners.add(id); - } - - // Clean up all PTYs owned by this WebContents when it's destroyed - // Only register once per WebContents to avoid MaxListenersExceededWarning - if (!wcDestroyedListeners.has(wc.id)) { - wcDestroyedListeners.add(wc.id); - wc.once('destroyed', () => { - wcDestroyedListeners.delete(wc.id); - // Clean up all PTYs owned by this WebContents - for (const [ptyId, owner] of owners.entries()) { - if (owner === wc) { - try { - maybeMarkProviderFinish( - ptyId, - null, - undefined, - isAppQuitting ? 'app_quit' : 'owner_destroyed' - ); - killPty(ptyId); - } catch {} - owners.delete(ptyId); - listeners.delete(ptyId); - } - } - }); - } - - // Track agent start even when reusing PTY (happens after shell respawn) - // This ensures subsequent agent runs in the same task are tracked - maybeMarkProviderStart(id); - - // Signal that PTY is ready - try { - const windows = BrowserWindow.getAllWindows(); - windows.forEach((w) => { - try { - if (!w.webContents.isDestroyed()) { - w.webContents.send('pty:started', { id }); - } - } catch {} - }); - } catch {} - - return { ok: true, tmux }; - } catch (err: any) { - log.error('pty:start FAIL', { - id: args.id, - cwd: args.cwd, - shell: args.shell, - error: err?.message || err, - }); - - // Track PTY start errors - const parsed = parseProviderPty(args.id); - await errorTracking.captureAgentSpawnError( - err, - parsed?.providerId || args.shell || 'unknown', - parsed?.taskId || args.id, - { - cwd: args.cwd, - autoApprove: args.autoApprove, - hasInitialPrompt: !!args.initialPrompt, - } - ); - - return { ok: false, error: String(err?.message || err) }; - } - } - ); - - ipcMain.on('pty:input', (_event, args: { id: string; data: string }) => { - try { - writePty(args.id, args.data); - - // Track prompts sent to agents (not shell terminals) - // Only count Enter key presses for known agent PTYs - if (args.data === '\r' || args.data === '\n') { - // Check if this PTY is associated with an agent - const providerId = ptyProviderMap.get(args.id) || parseProviderPty(args.id)?.providerId; - - if (providerId) { - // This is an agent terminal, track the prompt - telemetry.capture('agent_prompt_sent', { - provider: providerId, - }); - } - } - } catch (e) { - log.error('pty:input error', { id: args.id, error: e }); - } - }); - - ipcMain.on('pty:resize', (_event, args: { id: string; cols: number; rows: number }) => { - try { - resizePty(args.id, args.cols, args.rows); - } catch (e) { - log.error('pty:resize error', { id: args.id, cols: args.cols, rows: args.rows, error: e }); - } - }); - - ipcMain.on('pty:kill', (_event, args: { id: string }) => { - try { - // Ensure telemetry timers are cleared even on manual kill - maybeMarkProviderFinish(args.id, null, undefined, 'manual_kill'); - // Kill associated tmux session if this PTY was tmux-wrapped - if (getPtyTmuxSessionName(args.id)) { - killTmuxSession(args.id); - } - killPty(args.id); - owners.delete(args.id); - listeners.delete(args.id); - } catch (e) { - log.error('pty:kill error', { id: args.id, error: e }); - } - }); - - // Kill a tmux session by PTY ID (used during task deletion cleanup) - ipcMain.handle('pty:killTmux', async (_event, args: { id: string }) => { - try { - killTmuxSession(args.id); - return { ok: true }; - } catch (e) { - log.error('pty:killTmux error', { id: args.id, error: e }); - return { ok: false, error: String(e) }; - } - }); - - ipcMain.handle('pty:snapshot:get', async (_event, args: { id: string }) => { - try { - const snapshot = await terminalSnapshotService.getSnapshot(args.id); - return { ok: true, snapshot }; - } catch (error: any) { - log.error('pty:snapshot:get failed', { id: args.id, error }); - return { ok: false, error: error?.message || String(error) }; - } - }); - - ipcMain.handle( - 'pty:snapshot:save', - async (_event, args: { id: string; payload: TerminalSnapshotPayload }) => { - const { id, payload } = args; - const result = await terminalSnapshotService.saveSnapshot(id, payload); - if (!result.ok) { - log.warn('pty:snapshot:save failed', { id, error: result.error }); - } - return result; - } - ); - - ipcMain.handle('pty:snapshot:clear', async (_event, args: { id: string }) => { - await terminalSnapshotService.deleteSnapshot(args.id); - return { ok: true }; - }); - - ipcMain.handle('terminal:getTheme', async () => { - try { - const config = detectAndLoadTerminalConfig(); - if (config) { - return { ok: true, config }; - } - return { ok: false, error: 'No terminal configuration found' }; - } catch (error: any) { - log.error('terminal:getTheme failed', { error }); - return { ok: false, error: error?.message || String(error) }; - } - }); - - // SCP file transfer to SSH remote (for file drop on SSH terminals) - ipcMain.handle( - 'pty:scp-to-remote', - async ( - _event, - args: { connectionId: string; localPaths: string[] } - ): Promise<{ success: boolean; remotePaths?: string[]; error?: string }> => { - try { - const ssh = await resolveSshInvocation(args.connectionId); - const scpArgs = buildScpArgs(ssh.args); - const remoteDir = '/tmp/emdash-images'; - - // Ensure remote directory exists - await execFileAsync('ssh', [...ssh.args, ssh.target, `mkdir -p ${remoteDir}`]); - - // Transfer each file individually so UUID-prefixed names avoid collisions - // (batching into one scp call would lose uniqueness for same-named files) - const remotePaths: string[] = []; - for (const localPath of args.localPaths) { - const remoteName = `${randomUUID()}-${path.basename(localPath)}`; - const remotePath = `${remoteDir}/${remoteName}`; - await execFileAsync('scp', [...scpArgs, localPath, `${ssh.target}:${remotePath}`]); - remotePaths.push(remotePath); - } - - return { success: true, remotePaths }; - } catch (err: any) { - log.error('pty:scp-to-remote failed', { - connectionId: args.connectionId, - error: err?.message || err, - }); - return { success: false, error: String(err?.message || err) }; - } - } - ); - - // Start a PTY by spawning CLI directly (no shell wrapper) - // This is faster but falls back to shell-based spawn if CLI path unknown - ipcMain.handle( - 'pty:startDirect', - async ( - event, - args: { - id: string; - providerId: string; - cwd: string; - remote?: { connectionId: string }; - cols?: number; - rows?: number; - autoApprove?: boolean; - initialPrompt?: string; - env?: Record; - resume?: boolean; - } - ) => { - if (process.env.EMDASH_DISABLE_PTY === '1') { - return { ok: false, error: 'PTY disabled via EMDASH_DISABLE_PTY=1' }; - } - - try { - const { id, providerId, cwd, remote, cols, rows, autoApprove, initialPrompt, env, resume } = - args; - const existing = getPty(id); - - if (remote?.connectionId) { - const wc = event.sender; - owners.set(id, wc); - - if (existing) { - const kind = getPtyKind(id); - if (kind === 'ssh') { - return { ok: true, reused: true }; - } - try { - killPty(id); - } catch {} - listeners.delete(id); - } - - const ssh = await resolveSshInvocation(remote.connectionId); - const remoteProvider = buildRemoteProviderInvocation({ - providerId, - autoApprove, - initialPrompt, - resume, - }); - - const resolvedConfig = resolveProviderCommandConfig(providerId); - const mergedEnv = resolvedConfig?.env ? { ...resolvedConfig.env, ...env } : env; - - const proc = startSshPty({ - id, - target: ssh.target, - sshArgs: ssh.args, - cols, - rows, - env: mergedEnv, - }); - - if (!listeners.has(id)) { - proc.onData((data) => { - bufferedSendPtyData(id, data); - }); - proc.onExit(({ exitCode, signal }) => { - flushPtyData(id); - clearPtyData(id); - safeSendToOwner(id, `pty:exit:${id}`, { exitCode, signal }); - maybeMarkProviderFinish(id, exitCode, signal, 'process_exit'); - owners.delete(id); - listeners.delete(id); - removePtyRecord(id); - }); - listeners.add(id); - } - - // Resolve tmux config from local project settings - const remoteTmux = cwd ? await resolveTmuxEnabled(cwd) : false; - const tmuxOpt = remoteTmux ? { sessionName: getTmuxSessionName(id) } : undefined; - - const remoteInit = buildRemoteInitKeystrokes({ - cwd, - provider: remoteProvider, - tmux: tmuxOpt, - }); - if (remoteInit) { - proc.write(remoteInit); - } - - maybeMarkProviderStart(id); - try { - const windows = BrowserWindow.getAllWindows(); - windows.forEach((w: any) => w.webContents.send('pty:started', { id })); - } catch {} - - return { ok: true, tmux: remoteTmux }; - } - - if (existing) { - const wc = event.sender; - owners.set(id, wc); - // Still track agent start even when reusing PTY (happens after shell respawn) - maybeMarkProviderStart(id, providerId as ProviderId); - return { ok: true, reused: true }; - } - - // For additional chats without per-session isolation, never resume — - // they'd share the provider's directory-scoped session with other chats. - let effectiveResume = resume; - if (isChatPty(id)) { - const chatProvider = getProvider(providerId as ProviderId); - if (!chatProvider?.sessionIdFlag) { - effectiveResume = false; - } - } - - maybeAutoTrustForClaude(providerId, cwd); - - const shellSetup = await resolveShellSetup(cwd); - const tmux = await resolveTmuxEnabled(cwd); - - // Write Claude Code hook config so it calls back to Emdash on events - if (providerId === 'claude') { - try { - ClaudeHookService.writeHookConfig(cwd); - } catch (err) { - log.warn('pty:startDirect - failed to write Claude hook config', { - error: String(err), - }); - } - } - - // Try direct spawn first; skip if shellSetup or tmux requires a shell wrapper - const directProc = - shellSetup || tmux - ? null - : startDirectPty({ - id, - providerId, - cwd, - cols, - rows, - autoApprove, - initialPrompt, - env, - resume: effectiveResume, - tmux, - }); - - // Fall back to shell-based spawn when direct spawn is unavailable or shellSetup/tmux is set - let usedFallback = false; - let proc: import('node-pty').IPty; - if (directProc) { - proc = directProc; - } else { - const provider = getProvider(providerId as ProviderId); - if (!provider?.cli) { - return { ok: false, error: `CLI path not found for provider: ${providerId}` }; - } - if (!shellSetup && !tmux) - log.info('pty:startDirect - falling back to shell spawn', { id, providerId }); - proc = await startPty({ - id, - cwd, - shell: provider.cli, - cols, - rows, - autoApprove, - initialPrompt, - env, - skipResume: !resume, - shellSetup, - tmux, - }); - usedFallback = true; - } - - const wc = event.sender; - owners.set(id, wc); - - if (!listeners.has(id)) { - proc.onData((data) => { - bufferedSendPtyData(id, data); - }); - - proc.onExit(({ exitCode, signal }) => { - flushPtyData(id); - clearPtyData(id); - maybeMarkProviderFinish( - id, - exitCode, - signal, - isAppQuitting ? 'app_quit' : 'process_exit' - ); - // Direct-spawn CLIs can be replaced immediately by a fallback shell after exit. - // If this PTY has already been replaced, skip cleanup so we don't delete the new PTY record. - const current = getPty(id); - if (current && current !== proc) { - return; - } - safeSendToOwner(id, `pty:exit:${id}`, { exitCode, signal }); - // For direct spawn: keep owner (shell respawn reuses it), delete listeners (shell respawn re-adds) - // For fallback: clean up owner since no shell respawn happens - if (usedFallback) { - owners.delete(id); - } - listeners.delete(id); - removePtyRecord(id); - }); - listeners.add(id); - } - - // Clean up all PTYs owned by this WebContents when it's destroyed - // Only register once per WebContents to avoid MaxListenersExceededWarning - if (!wcDestroyedListeners.has(wc.id)) { - wcDestroyedListeners.add(wc.id); - wc.once('destroyed', () => { - wcDestroyedListeners.delete(wc.id); - for (const [ptyId, owner] of owners.entries()) { - if (owner === wc) { - try { - maybeMarkProviderFinish( - ptyId, - null, - undefined, - isAppQuitting ? 'app_quit' : 'owner_destroyed' - ); - killPty(ptyId); - } catch {} - owners.delete(ptyId); - listeners.delete(ptyId); - } - } - }); - } - - maybeMarkProviderStart(id, providerId as ProviderId); - - try { - const windows = BrowserWindow.getAllWindows(); - windows.forEach((w: any) => w.webContents.send('pty:started', { id })); - } catch {} - - return { ok: true, tmux }; - } catch (err: any) { - log.error('pty:startDirect FAIL', { id: args.id, error: err?.message || err }); - return { ok: false, error: String(err?.message || err) }; - } - } - ); -} - -function parseProviderPty(id: string): { - providerId: ProviderId; - taskId: string; -} | null { - const parsed = parsePtyId(id); - if (!parsed) return null; - return { providerId: parsed.providerId, taskId: parsed.suffix }; -} - -function providerRunKey(providerId: ProviderId, taskId: string) { - return `${providerId}:${taskId}`; -} - -function maybeMarkProviderStart(id: string, providerId?: ProviderId) { - finalizedPtys.delete(id); - - // First check if we have a direct provider ID (for multi-agent mode) - if (providerId && PROVIDER_IDS.includes(providerId)) { - ptyProviderMap.set(id, providerId); - const key = `${providerId}:${id}`; - if (providerPtyTimers.has(key)) return; - providerPtyTimers.set(key, Date.now()); - telemetry.capture('agent_run_start', { provider: providerId }); - return; - } - - // Check if we have a stored mapping (for subsequent calls) - const storedProvider = ptyProviderMap.get(id); - if (storedProvider) { - const key = `${storedProvider}:${id}`; - if (providerPtyTimers.has(key)) return; - providerPtyTimers.set(key, Date.now()); - telemetry.capture('agent_run_start', { provider: storedProvider }); - return; - } - - // Fall back to parsing the ID (single-agent mode) - const parsed = parseProviderPty(id); - if (!parsed) return; - const key = providerRunKey(parsed.providerId, parsed.taskId); - if (providerPtyTimers.has(key)) return; - providerPtyTimers.set(key, Date.now()); - telemetry.capture('agent_run_start', { provider: parsed.providerId }); -} - -function maybeMarkProviderFinish( - id: string, - exitCode: number | null | undefined, - signal: number | undefined, - cause: FinishCause -) { - if (finalizedPtys.has(id)) return; - finalizedPtys.add(id); - - let providerId: ProviderId | undefined; - let key: string; - - // First check if we have a stored mapping (multi-agent mode) - const storedProvider = ptyProviderMap.get(id); - if (storedProvider) { - providerId = storedProvider; - key = `${storedProvider}:${id}`; - } else { - // Fall back to parsing the ID (single-agent mode) - const parsed = parseProviderPty(id); - if (!parsed) return; - providerId = parsed.providerId; - key = providerRunKey(parsed.providerId, parsed.taskId); - } - - const started = providerPtyTimers.get(key); - providerPtyTimers.delete(key); - - // Clean up the provider mapping - ptyProviderMap.delete(id); - - // No valid exit code means the process was killed during cleanup, not a real completion - if (typeof exitCode !== 'number') return; - - const duration = started ? Math.max(0, Date.now() - started) : undefined; - const wasSignaled = signal !== undefined && signal !== null; - const outcome = exitCode !== 0 && !wasSignaled ? 'error' : 'ok'; - - telemetry.capture('agent_run_finish', { - provider: providerId, - outcome, - duration_ms: duration, - }); -} - -// Kill all PTYs on app shutdown to prevent crash loop -try { - app.on('before-quit', () => { - isAppQuitting = true; - for (const id of Array.from(owners.keys())) { - try { - // Ensure telemetry timers are cleared on app quit - maybeMarkProviderFinish(id, null, undefined, 'app_quit'); - killPty(id); - } catch {} - } - owners.clear(); - listeners.clear(); - }); -} catch {} diff --git a/src/main/services/ptyManager.ts b/src/main/services/ptyManager.ts deleted file mode 100644 index edb6fc37f..000000000 --- a/src/main/services/ptyManager.ts +++ /dev/null @@ -1,1370 +0,0 @@ -import os from 'os'; -import fs from 'fs'; -import path from 'path'; -import crypto from 'crypto'; -import type { IPty } from 'node-pty'; -import { log } from '../lib/logger'; -import { PROVIDERS, type ProviderDefinition } from '@shared/providers/registry'; -import { parsePtyId } from '@shared/ptyId'; -import { providerStatusCache } from './providerStatusCache'; -import { errorTracking } from '../errorTracking'; -import { getProviderCustomConfig } from '../settings'; -import { agentEventService } from './AgentEventService'; - -/** - * Environment variables to pass through for agent authentication. - * These are passed to CLI tools during direct spawn (which skips shell config). - */ -const AGENT_ENV_VARS = [ - 'AMP_API_KEY', - 'ANTHROPIC_API_KEY', - 'AUTOHAND_API_KEY', - 'AUGMENT_SESSION_AUTH', - 'AWS_ACCESS_KEY_ID', - 'AWS_DEFAULT_REGION', - 'AWS_PROFILE', - 'AWS_REGION', - 'AWS_SECRET_ACCESS_KEY', - 'AWS_SESSION_TOKEN', - 'AZURE_OPENAI_API_ENDPOINT', - 'AZURE_OPENAI_API_KEY', - 'AZURE_OPENAI_KEY', - 'CODEBUFF_API_KEY', - 'COPILOT_CLI_TOKEN', - 'CURSOR_API_KEY', - 'DASHSCOPE_API_KEY', - 'FACTORY_API_KEY', - 'GEMINI_API_KEY', - 'GH_TOKEN', - 'GITHUB_TOKEN', - 'GOOGLE_API_KEY', - 'GOOGLE_APPLICATION_CREDENTIALS', - 'GOOGLE_CLOUD_LOCATION', - 'GOOGLE_CLOUD_PROJECT', - 'HTTP_PROXY', - 'HTTPS_PROXY', - 'KIMI_API_KEY', - 'MISTRAL_API_KEY', - 'MOONSHOT_API_KEY', - 'NO_PROXY', - 'OPENAI_API_KEY', - 'OPENAI_BASE_URL', -]; - -type PtyRecord = { - id: string; - proc: IPty; - cwd?: string; // Working directory (for respawning shell after CLI exit) - isDirectSpawn?: boolean; // Whether this was a direct CLI spawn - kind?: 'local' | 'ssh'; - cols?: number; - rows?: number; - tmuxSessionName?: string; // Set when session is wrapped in tmux -}; - -const ptys = new Map(); -const MIN_PTY_COLS = 2; -const MIN_PTY_ROWS = 1; - -function getWindowsEssentialEnv(): Record { - const home = os.homedir(); - return { - PATH: process.env.PATH || process.env.Path || '', - PATHEXT: process.env.PATHEXT || '.COM;.EXE;.BAT;.CMD;.VBS;.VBE;.JS;.JSE;.WSF;.WSH;.MSC', - SystemRoot: process.env.SystemRoot || 'C:\\Windows', - ComSpec: process.env.ComSpec || 'C:\\Windows\\System32\\cmd.exe', - TEMP: process.env.TEMP || process.env.TMP || '', - TMP: process.env.TMP || process.env.TEMP || '', - USERPROFILE: process.env.USERPROFILE || home, - APPDATA: process.env.APPDATA || '', - LOCALAPPDATA: process.env.LOCALAPPDATA || '', - HOMEDRIVE: process.env.HOMEDRIVE || '', - HOMEPATH: process.env.HOMEPATH || '', - USERNAME: process.env.USERNAME || os.userInfo().username, - // Program file paths needed by .NET, NuGet, MSBuild, and other tools - ProgramFiles: process.env.ProgramFiles || 'C:\\Program Files', - 'ProgramFiles(x86)': process.env['ProgramFiles(x86)'] || 'C:\\Program Files (x86)', - ProgramData: process.env.ProgramData || 'C:\\ProgramData', - CommonProgramFiles: process.env.CommonProgramFiles || 'C:\\Program Files\\Common Files', - 'CommonProgramFiles(x86)': - process.env['CommonProgramFiles(x86)'] || 'C:\\Program Files (x86)\\Common Files', - ProgramW6432: process.env.ProgramW6432 || 'C:\\Program Files', - CommonProgramW6432: process.env.CommonProgramW6432 || 'C:\\Program Files\\Common Files', - }; -} - -// Display/desktop env vars needed for GUI operations from within PTY sessions. -const DISPLAY_ENV_VARS = [ - 'DISPLAY', // X11 display server - 'XAUTHORITY', // X11 auth cookie (often at non-standard path on Wayland+GNOME) - 'WAYLAND_DISPLAY', // Wayland compositor socket - 'XDG_RUNTIME_DIR', // Contains Wayland/D-Bus sockets (e.g. /run/user/1000) - 'XDG_CURRENT_DESKTOP', // Used by xdg-open for DE detection (e.g. "GNOME") - 'XDG_SESSION_TYPE', // Used by browsers/toolkits to select X11 vs Wayland - 'DBUS_SESSION_BUS_ADDRESS', // Needed by gio open and desktop portals -] as const; - -function getDisplayEnv(): Record { - const env: Record = {}; - for (const key of DISPLAY_ENV_VARS) { - if (process.env[key]) { - env[key] = process.env[key] as string; - } - } - return env; -} - -// --- Tmux session helpers --- - -/** - * Derive a deterministic tmux session name from a PTY ID. - * Sanitizes to characters allowed by tmux (alphanumeric, `-`, `_`, `.`). - */ -export function getTmuxSessionName(ptyId: string): string { - // PTY ID format: {providerId}-main-{taskId} or {providerId}-chat-{conversationId} - // Prefix with "emdash-" and sanitize - const sanitized = ptyId.replace(/[^a-zA-Z0-9._-]/g, '-'); - return `emdash-${sanitized}`; -} - -/** - * Kill a tmux session by PTY ID. Fire-and-forget — ignores errors - * for non-existent sessions (e.g., tmux not installed or session already dead). - */ -export function killTmuxSession(ptyId: string): void { - const sessionName = getTmuxSessionName(ptyId); - try { - const { execFile } = require('child_process'); - execFile('tmux', ['kill-session', '-t', sessionName], { timeout: 5000 }, (err: any) => { - if (!err) { - log.info('ptyManager:tmux - killed session', { sessionName }); - } - // Ignore errors — session may not exist or tmux not installed - }); - } catch { - // Ignore - } -} - -// TODO: Remote tmux cleanup will be handled by the workspace provider teardown script. -// The PTY record doesn't currently store SSH target/args, so we can't shell out -// `ssh tmux kill-session` from here. When workspace providers land, the -// teardown script is the right place for this. - -function resolveWindowsPtySpawn( - command: string, - args: string[] -): { command: string; args: string[] } { - if (process.platform !== 'win32') return { command, args }; - - const quoteForCmdExe = (input: string): string => { - if (input.length === 0) return '""'; - if (!/[\s"^&|<>()%!]/.test(input)) return input; - return `"${input - .replace(/%/g, '%%') - .replace(/!/g, '^!') - .replace(/(["^&|<>()])/g, '^$1')}"`; - }; - - const ext = path.extname(command).toLowerCase(); - if (ext === '.cmd' || ext === '.bat') { - const comspec = process.env.ComSpec || 'C:\\Windows\\System32\\cmd.exe'; - const fullCommandString = [command, ...args].map(quoteForCmdExe).join(' '); - return { command: comspec, args: ['/d', '/s', '/c', fullCommandString] }; - } - if (ext === '.ps1') { - return { - command: 'powershell.exe', - args: ['-NoProfile', '-ExecutionPolicy', 'Bypass', '-File', command, ...args], - }; - } - - return { command, args }; -} - -/** - * Generate a deterministic UUID from an arbitrary string. - * Uses SHA-256 and formats 16 bytes as a UUID v4-compatible string - * (with version and variant bits set per RFC 4122). - */ -function deterministicUuid(input: string): string { - const hash = crypto.createHash('sha256').update(input).digest(); - // Set version 4 bits - hash[6] = (hash[6] & 0x0f) | 0x40; - // Set variant bits - hash[8] = (hash[8] & 0x3f) | 0x80; - const hex = hash.toString('hex').slice(0, 32); - return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(16, 20)}-${hex.slice(20, 32)}`; -} - -// --------------------------------------------------------------------------- -// Persistent session-ID map -// -// Tracks which PTY IDs have already been started with --session-id so we -// know whether to create a new session or resume an existing one. -// -// First start → no entry → --session-id (create) -// Restart → entry → --resume (resume) -// --------------------------------------------------------------------------- -type SessionEntry = { uuid: string; cwd: string }; - -let _sessionMapPath: string | null = null; -let _sessionMap: Record | null = null; - -/** @internal Exported for testing. Sets session map path and clears the cache. */ -export function _resetSessionMapForTest(mapPath: string): void { - _sessionMapPath = mapPath; - _sessionMap = null; -} - -function sessionMapPath(): string { - if (!_sessionMapPath) { - // eslint-disable-next-line @typescript-eslint/no-var-requires - const { app } = require('electron'); - _sessionMapPath = path.join(app.getPath('userData'), 'pty-session-map.json'); - } - return _sessionMapPath; -} - -function loadSessionMap(): Record { - if (_sessionMap) return _sessionMap; - try { - _sessionMap = JSON.parse(fs.readFileSync(sessionMapPath(), 'utf-8')); - } catch { - _sessionMap = {}; - } - return _sessionMap!; -} - -/** Check if the session map has entries for other chats of the same provider in the same cwd. */ -function hasOtherSameProviderSessions(ptyId: string, providerId: string, cwd: string): boolean { - const map = loadSessionMap(); - const prefix = `${providerId}-`; - return Object.entries(map).some( - ([key, entry]) => key.startsWith(prefix) && key !== ptyId && entry.cwd === cwd - ); -} - -function markSessionCreated(ptyId: string, uuid: string, cwd: string): void { - const map = loadSessionMap(); - map[ptyId] = { uuid, cwd }; - try { - fs.writeFileSync(sessionMapPath(), JSON.stringify(map)); - } catch (e) { - log.warn('ptyManager: failed to persist session map', e); - } -} - -function removeSessionId(ptyId: string): void { - const map = loadSessionMap(); - delete map[ptyId]; - try { - fs.writeFileSync(sessionMapPath(), JSON.stringify(map)); - } catch (e) { - log.warn('ptyManager: failed to persist session map after removal', e); - } -} - -function claudeSessionFileExists(uuid: string, cwd: string): boolean { - try { - const encoded = cwd.replace(/[:\\/]/g, '-'); - const sessionFile = path.join(os.homedir(), '.claude', 'projects', encoded, `${uuid}.jsonl`); - return fs.existsSync(sessionFile); - } catch { - return false; - } -} - -/** - * Discover the existing Claude session ID for a working directory by scanning - * Claude Code's local project storage (~/.claude/projects//). - * - * Claude stores each conversation as a .jsonl file. We pick the most - * recently modified file whose UUID is NOT already claimed by another chat - * in our session map. This lets us seamlessly adopt an existing session - * when transitioning the main chat to session-isolated mode, so no history - * is lost. - */ -function discoverExistingClaudeSession(cwd: string, excludeUuids: Set): string | null { - try { - // Claude encodes project paths by replacing path separators; on Windows also strip ':'. - const encoded = cwd.replace(/[:\\/]/g, '-'); - const projectDir = path.join(os.homedir(), '.claude', 'projects', encoded); - - if (!fs.existsSync(projectDir)) return null; - - const entries = fs.readdirSync(projectDir).filter((f) => f.endsWith('.jsonl')); - if (entries.length === 0) return null; - - // Sort by modification time, newest first - const sorted = entries - .map((f) => ({ - uuid: f.replace('.jsonl', ''), - mtime: fs.statSync(path.join(projectDir, f)).mtimeMs, - })) - .sort((a, b) => b.mtime - a.mtime); - - // Return the most recent session not claimed by another chat - for (const entry of sorted) { - if (!excludeUuids.has(entry.uuid)) { - return entry.uuid; - } - } - return null; - } catch (e) { - log.warn('ptyManager: failed to discover existing Claude session', e); - return null; - } -} - -/** Collect all session UUIDs from the map that belong to a given provider in the same cwd, excluding one PTY. */ -function getOtherSessionUuids(ptyId: string, providerId: string, cwd: string): Set { - const map = loadSessionMap(); - const prefix = `${providerId}-`; - const uuids = new Set(); - for (const [key, entry] of Object.entries(map)) { - if (key.startsWith(prefix) && key !== ptyId && entry.cwd === cwd) { - uuids.add(entry.uuid); - } - } - return uuids; -} - -/** - * Build session-isolation CLI args for a provider that supports sessionIdFlag. - * - * Decision tree: - * 1. Known session in map → --resume - * 2. Additional chat (new) → --session-id (create) - * 3. Multi-chat transition → --session-id (adopt existing) - * 4. First-time main chat → --session-id (create, proactive) - * 5. Existing single-chat resume → (no isolation, caller uses generic -c -r) - * - * Returns true if session isolation args were added. - */ -export function applySessionIsolation( - cliArgs: string[], - provider: ProviderDefinition, - id: string, - cwd: string, - isResume: boolean -): boolean { - if (!provider.sessionIdFlag) return false; - - const parsed = parsePtyId(id); - if (!parsed) return false; - - const sessionUuid = deterministicUuid(parsed.suffix); - const isAdditionalChat = parsed.kind === 'chat'; - - const entry = loadSessionMap()[id]; - const knownSession = entry?.uuid; - if (knownSession) { - // For Claude, validate the session still exists on disk before resuming. - // Also treat cwd mismatch as stale — the session belongs to a different - // project context and Claude would look in the wrong directory. - if (provider.id === 'claude') { - const isStale = entry.cwd !== cwd || !claudeSessionFileExists(knownSession, cwd); - if (isStale) { - log.warn('ptyManager: stale session detected, creating new session', { - ptyId: id, - staleUuid: knownSession, - }); - removeSessionId(id); - // Fall through — the decision tree below will create a new session - // or the caller will use generic resume flags - } else { - cliArgs.push('--resume', knownSession); - return true; - } - } else { - cliArgs.push('--resume', knownSession); - return true; - } - } - - if (isAdditionalChat) { - cliArgs.push(provider.sessionIdFlag, sessionUuid); - markSessionCreated(id, sessionUuid, cwd); - return true; - } - - if (hasOtherSameProviderSessions(id, parsed.providerId, cwd)) { - // Main chat transitioning to multi-chat mode. Try to discover its - // existing session from Claude's local storage and adopt it. - const otherUuids = getOtherSessionUuids(id, parsed.providerId, cwd); - const existingSession = discoverExistingClaudeSession(cwd, otherUuids); - if (existingSession) { - cliArgs.push(provider.sessionIdFlag, existingSession); - markSessionCreated(id, existingSession, cwd); - } else { - cliArgs.push(provider.sessionIdFlag, sessionUuid); - markSessionCreated(id, sessionUuid, cwd); - } - return true; - } - - if (!isResume) { - // First-time creation — proactively assign a session ID so we can - // reliably resume later if more chats of this provider are added. - cliArgs.push(provider.sessionIdFlag, sessionUuid); - markSessionCreated(id, sessionUuid, cwd); - return true; - } - - return false; -} - -/** - * Parse a shell-style argument string into an array of arguments. - * Handles single quotes, double quotes, and escape characters. - * - * Examples: - * '--flag1 --flag2' → ['--flag1', '--flag2'] - * '--message "hello world"' → ['--message', 'hello world'] - * "--path '/my dir/file'" → ['--path', '/my dir/file'] - * '--arg "say \"hi\""' → ['--arg', 'say "hi"'] - */ -export function parseShellArgs(input: string): string[] { - const args: string[] = []; - let current = ''; - let inSingleQuote = false; - let inDoubleQuote = false; - let escape = false; - - for (let i = 0; i < input.length; i++) { - const char = input[i]; - - if (escape) { - // Handle escaped character - current += char; - escape = false; - continue; - } - - if (char === '\\') { - if (process.platform === 'win32') { - // Preserve backslashes for Windows paths. Only treat \" inside double-quotes as an escape. - const next = input[i + 1]; - if (inDoubleQuote && next === '"') { - escape = true; - continue; - } - } else if (!inSingleQuote) { - // POSIX-style backslash escapes next character (except inside single quotes) - escape = true; - continue; - } - } - - if (char === "'" && !inDoubleQuote) { - // Toggle single quote mode - inSingleQuote = !inSingleQuote; - continue; - } - - if (char === '"' && !inSingleQuote) { - // Toggle double quote mode - inDoubleQuote = !inDoubleQuote; - continue; - } - - if (char === ' ' && !inSingleQuote && !inDoubleQuote) { - // Space outside quotes - end of argument - if (current.length > 0) { - args.push(current); - current = ''; - } - continue; - } - - current += char; - } - - // Handle trailing backslash: include it literally - if (escape) { - current += '\\'; - } - - // Warn on unclosed quotes (still push what we have) - if (inSingleQuote || inDoubleQuote) { - log.warn('parseShellArgs: unclosed quote in input', { input }); - } - - // Don't forget the last argument - if (current.length > 0) { - args.push(current); - } - - return args; -} - -export type ResolvedProviderCommandConfig = { - provider: ProviderDefinition; - cli: string; - resumeFlag?: string; - defaultArgs?: string[]; - autoApproveFlag?: string; - initialPromptFlag?: string; - extraArgs?: string[]; - env?: Record; -}; - -type ProviderCliArgsOptions = { - resume?: boolean; - resumeFlag?: string; - defaultArgs?: string[]; - extraArgs?: string[]; - autoApprove?: boolean; - autoApproveFlag?: string; - initialPrompt?: string; - initialPromptFlag?: string; - useKeystrokeInjection?: boolean; -}; - -export function resolveProviderCommandConfig( - providerId: string -): ResolvedProviderCommandConfig | null { - const provider = PROVIDERS.find((p) => p.id === providerId); - if (!provider) return null; - - const customConfig = getProviderCustomConfig(provider.id); - - const extraArgs = - customConfig?.extraArgs !== undefined && customConfig.extraArgs.trim() !== '' - ? parseShellArgs(customConfig.extraArgs.trim()) - : undefined; - - let env: Record | undefined; - if (customConfig?.env && typeof customConfig.env === 'object') { - env = {}; - for (const [k, v] of Object.entries(customConfig.env)) { - if (typeof v === 'string' && /^[A-Za-z_][A-Za-z0-9_]*$/.test(k)) { - env[k] = v; - } - } - if (Object.keys(env).length === 0) env = undefined; - } - - return { - provider, - cli: - customConfig?.cli !== undefined && customConfig.cli !== '' - ? customConfig.cli - : provider.cli || providerId.toLowerCase(), - resumeFlag: - customConfig?.resumeFlag !== undefined ? customConfig.resumeFlag : provider.resumeFlag, - defaultArgs: - customConfig?.defaultArgs !== undefined - ? parseShellArgs(customConfig.defaultArgs) - : provider.defaultArgs, - autoApproveFlag: - customConfig?.autoApproveFlag !== undefined - ? customConfig.autoApproveFlag - : provider.autoApproveFlag, - initialPromptFlag: - customConfig?.initialPromptFlag !== undefined - ? customConfig.initialPromptFlag - : provider.initialPromptFlag, - extraArgs, - env, - }; -} - -export function buildProviderCliArgs(options: ProviderCliArgsOptions): string[] { - const args: string[] = []; - - if (options.resume && options.resumeFlag) { - args.push(...parseShellArgs(options.resumeFlag)); - } - - if (options.defaultArgs?.length) { - args.push(...options.defaultArgs); - } - - if (options.autoApprove && options.autoApproveFlag) { - args.push(...parseShellArgs(options.autoApproveFlag)); - } - - if ( - options.initialPromptFlag !== undefined && - !options.useKeystrokeInjection && - options.initialPrompt?.trim() - ) { - if (options.initialPromptFlag) { - args.push(...parseShellArgs(options.initialPromptFlag)); - } - args.push(options.initialPrompt.trim()); - } - - if (options.extraArgs?.length) { - args.push(...options.extraArgs); - } - - return args; -} - -const resolvedCommandPathCache = new Map(); - -function resolveCommandPath(command: string): string | null { - const trimmed = command.trim(); - if (!trimmed) return null; - - const pathLike = - trimmed.includes('/') || - trimmed.includes('\\') || - trimmed.startsWith('.') || - /^[A-Za-z]:/.test(trimmed); - - const isExecutableFile = (candidate: string): boolean => { - try { - const stat = fs.statSync(candidate); - if (!stat.isFile()) return false; - if (process.platform === 'win32') return true; - fs.accessSync(candidate, fs.constants.X_OK); - return true; - } catch { - return false; - } - }; - - const appendWindowsExecutableExts = (base: string): string[] => { - if (process.platform !== 'win32') return [base]; - - if (path.extname(base)) return [base]; - - const pathExt = process.env.PATHEXT || '.COM;.EXE;.BAT;.CMD'; - const exts = pathExt - .split(';') - .map((ext) => ext.trim()) - .filter(Boolean); - return [base, ...exts.map((ext) => `${base}${ext.toLowerCase()}`)]; - }; - - const resolveFromCandidates = (bases: string[], makeAbsolute: boolean): string | null => { - for (const base of bases) { - const candidates = appendWindowsExecutableExts(base); - for (const candidate of candidates) { - const target = makeAbsolute ? path.resolve(candidate) : candidate; - if (isExecutableFile(target)) { - return target; - } - } - } - return null; - }; - - if (pathLike) { - return resolveFromCandidates([trimmed], true); - } - - const pathEnv = process.env.PATH; - if (!pathEnv) return null; - - const pathDirs = pathEnv.split(path.delimiter).filter(Boolean); - const pathCandidates = pathDirs.map((dir) => path.join(dir, trimmed)); - return resolveFromCandidates(pathCandidates, false); -} - -export function parseCustomCliForDirectSpawn(input: string): string[] { - const trimmed = input.trim(); - if (!trimmed) return []; - - if (process.platform !== 'win32') { - return parseShellArgs(trimmed); - } - - // Preserve backslashes for Windows absolute/UNC paths. - if ((/^[A-Za-z]:\\/.test(trimmed) || /^\\\\/.test(trimmed)) && !/\s/.test(trimmed)) { - return [trimmed]; - } - - // Handle quoted absolute paths with spaces, e.g. "C:\Program Files\tool\tool.cmd" - const quotedAbsolutePath = trimmed.match(/^"([A-Za-z]:\\[^"]+)"$/); - if (quotedAbsolutePath) { - return [quotedAbsolutePath[1]]; - } - const singleQuotedAbsolutePath = trimmed.match(/^'([A-Za-z]:\\[^']+)'$/); - if (singleQuotedAbsolutePath) { - return [singleQuotedAbsolutePath[1]]; - } - - return parseShellArgs(trimmed); -} - -function resolveCommandPathCached(command: string): string | null { - if (resolvedCommandPathCache.has(command)) { - return resolvedCommandPathCache.get(command) ?? null; - } - const resolved = resolveCommandPath(command); - resolvedCommandPathCache.set(command, resolved); - return resolved; -} - -function needsShellResolution(command: string): boolean { - return /[|&;<>()$`]/.test(command); -} - -// Callback to spawn shell after direct CLI exits (set by ptyIpc) -let onDirectCliExitCallback: ((id: string, cwd: string) => void) | null = null; - -export function setOnDirectCliExit(callback: (id: string, cwd: string) => void): void { - onDirectCliExitCallback = callback; -} - -function escapeShSingleQuoted(value: string): string { - // Safe for embedding into a single-quoted POSIX shell string. - return `'${value.replace(/'/g, "'\\''")}'`; -} - -/** - * Spawn an interactive SSH session in a PTY. - * - * This uses the system `ssh` binary so user SSH config features (e.g. ProxyJump, - * UseKeychain on macOS) work as expected. - */ -export function startSshPty(options: { - id: string; - target: string; // alias or user@host - sshArgs?: string[]; // extra ssh args like -p, -i - remoteInitCommand?: string; // if provided, executed by remote shell - cols?: number; - rows?: number; - env?: Record; -}): IPty { - if (process.env.EMDASH_DISABLE_PTY === '1') { - throw new Error('PTY disabled via EMDASH_DISABLE_PTY=1'); - } - - const { id, target, sshArgs = [], remoteInitCommand, cols = 120, rows = 32, env } = options; - - // Lazy load native module - let pty: typeof import('node-pty'); - try { - pty = require('node-pty'); - } catch (e: any) { - throw new Error(`PTY unavailable: ${e?.message || String(e)}`); - } - - // Build a minimal environment; include SSH_AUTH_SOCK so agent works. - const useEnv: Record = { - TERM: 'xterm-256color', - COLORTERM: 'truecolor', - TERM_PROGRAM: 'emdash', - HOME: process.env.HOME || os.homedir(), - USER: process.env.USER || os.userInfo().username, - PATH: process.env.PATH || process.env.Path || '', - ...(process.env.LANG && { LANG: process.env.LANG }), - ...(process.env.TMPDIR && { TMPDIR: process.env.TMPDIR }), - ...getDisplayEnv(), - ...(process.env.SSH_AUTH_SOCK && { SSH_AUTH_SOCK: process.env.SSH_AUTH_SOCK }), - ...(process.platform === 'win32' ? getWindowsEssentialEnv() : {}), - }; - - // Pass through agent authentication env vars (same allowlist as direct spawn) - for (const key of AGENT_ENV_VARS) { - if (process.env[key]) { - useEnv[key] = process.env[key] as string; - } - } - - if (env) { - for (const [key, value] of Object.entries(env)) { - if (!key.startsWith('EMDASH_')) continue; - if (typeof value === 'string') { - useEnv[key] = value; - } - } - } - - const args: string[] = ['-tt', ...sshArgs, target]; - if (typeof remoteInitCommand === 'string' && remoteInitCommand.trim().length > 0) { - // Pass as a single remote command argument; ssh will execute it via the remote user's shell. - args.push(remoteInitCommand); - } - - const proc = pty.spawn('ssh', args, { - name: 'xterm-256color', - cols, - rows, - cwd: process.env.HOME || os.homedir(), - env: useEnv, - }); - - ptys.set(id, { id, proc, kind: 'ssh', cols, rows }); - return proc; -} - -/** - * Spawn a CLI directly without a shell wrapper. - * This is faster because it skips shell config loading (oh-my-zsh, nvm, etc.) - * - * Returns null if the CLI path is not known (not in providerStatusCache) - * or when CLI config requires shell parsing. - */ -export function startDirectPty(options: { - id: string; - providerId: string; - cwd: string; - cols?: number; - rows?: number; - autoApprove?: boolean; - initialPrompt?: string; - env?: Record; - resume?: boolean; - tmux?: boolean; -}): IPty | null { - if (process.env.EMDASH_DISABLE_PTY === '1') { - throw new Error('PTY disabled via EMDASH_DISABLE_PTY=1'); - } - - // Tmux wrapping requires a shell — fall back to startPty() which handles tmux. - if (options.tmux) { - log.info('ptyManager:directSpawn - tmux enabled, falling back to shell spawn', { - id: options.id, - }); - return null; - } - - const { - id, - providerId, - cwd, - cols = 120, - rows = 32, - autoApprove, - initialPrompt, - env, - resume, - } = options; - - const resolvedConfig = resolveProviderCommandConfig(providerId); - const provider = resolvedConfig?.provider; - - // Get the CLI path from cache - const status = providerStatusCache.get(providerId); - if (!status?.installed || !status?.path) { - log.warn('ptyManager:directSpawn - CLI path not found', { providerId }); - return null; - } - - let cliPath = status.path; - - // Direct spawn requires an executable path. If custom CLI is an alias or shell - // expression, fall back to shell mode. - if (provider && resolvedConfig && resolvedConfig.cli !== provider.cli) { - const cliParts = parseCustomCliForDirectSpawn(resolvedConfig.cli); - if (cliParts.length !== 1) { - log.info('ptyManager:directSpawn - custom CLI needs shell parsing, using fallback', { - providerId, - cli: resolvedConfig.cli, - }); - return null; - } - - const customCommand = cliParts[0]; - if (needsShellResolution(customCommand)) { - log.info('ptyManager:directSpawn - custom CLI requires shell resolution, using fallback', { - providerId, - cli: resolvedConfig.cli, - }); - return null; - } - - const resolvedCustomPath = resolveCommandPathCached(customCommand); - if (!resolvedCustomPath) { - log.info('ptyManager:directSpawn - custom CLI not directly executable, using fallback', { - providerId, - cli: resolvedConfig.cli, - }); - return null; - } - - cliPath = resolvedCustomPath; - } - - // Build CLI arguments - const cliArgs: string[] = []; - - if (provider && resolvedConfig) { - // Session isolation for multi-chat scenarios. - // See applySessionIsolation() for the full decision tree. - const usedSessionIsolation = applySessionIsolation(cliArgs, provider, id, cwd, !!resume); - - cliArgs.push( - ...buildProviderCliArgs({ - resume: !usedSessionIsolation && !!resume, - resumeFlag: resolvedConfig.resumeFlag, - defaultArgs: resolvedConfig.defaultArgs, - extraArgs: resolvedConfig.extraArgs, - autoApprove, - autoApproveFlag: resolvedConfig.autoApproveFlag, - initialPrompt, - initialPromptFlag: resolvedConfig.initialPromptFlag, - useKeystrokeInjection: provider.useKeystrokeInjection, - }) - ); - } - - // Build minimal environment - just what the CLI needs - const useEnv: Record = { - TERM: 'xterm-256color', - COLORTERM: 'truecolor', - TERM_PROGRAM: 'emdash', - HOME: process.env.HOME || os.homedir(), - USER: process.env.USER || os.userInfo().username, - // Include PATH so CLI can find its dependencies - PATH: process.env.PATH || process.env.Path || '', - ...(process.env.LANG && { LANG: process.env.LANG }), - ...(process.env.TMPDIR && { TMPDIR: process.env.TMPDIR }), - ...getDisplayEnv(), - ...(process.env.SSH_AUTH_SOCK && { SSH_AUTH_SOCK: process.env.SSH_AUTH_SOCK }), - ...(process.platform === 'win32' ? getWindowsEssentialEnv() : {}), - }; - - // Pass through agent authentication env vars - for (const key of AGENT_ENV_VARS) { - if (process.env[key]) { - useEnv[key] = process.env[key]; - } - } - - if (resolvedConfig?.env) { - for (const [key, value] of Object.entries(resolvedConfig.env)) { - if (/^[A-Za-z_][A-Za-z0-9_]*$/.test(key) && typeof value === 'string') { - useEnv[key] = value; - } - } - } - - if (env) { - for (const [key, value] of Object.entries(env)) { - if (!key.startsWith('EMDASH_')) continue; - if (typeof value === 'string') { - useEnv[key] = value; - } - } - } - - // Pass agent event hook env vars so CLI hooks can call back to Emdash - const hookPort = agentEventService.getPort(); - if (hookPort > 0) { - useEnv['EMDASH_HOOK_PORT'] = String(hookPort); - useEnv['EMDASH_PTY_ID'] = id; - useEnv['EMDASH_HOOK_TOKEN'] = agentEventService.getToken(); - } - - // Lazy load native module - let pty: typeof import('node-pty'); - try { - pty = require('node-pty'); - } catch (e: any) { - throw new Error(`PTY unavailable: ${e?.message || String(e)}`); - } - - const spawnSpec = resolveWindowsPtySpawn(cliPath, cliArgs); - const proc = pty.spawn(spawnSpec.command, spawnSpec.args, { - name: 'xterm-256color', - cols, - rows, - cwd, - env: useEnv, - }); - - // Store record with cwd for shell respawn after CLI exits - ptys.set(id, { id, proc, cwd, isDirectSpawn: true, kind: 'local', cols, rows }); - - // When CLI exits, spawn a shell so user can continue working - proc.onExit(() => { - const rec = ptys.get(id); - if (rec?.isDirectSpawn && rec.cwd && onDirectCliExitCallback) { - // Spawn shell immediately after CLI exits - onDirectCliExitCallback(id, rec.cwd); - } - }); - - return proc; -} - -function getDefaultShell(): string { - if (process.platform === 'win32') { - // Prefer ComSpec (usually cmd.exe) or fallback to PowerShell - return process.env.ComSpec || 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe'; - } - return process.env.SHELL || '/bin/bash'; -} - -export async function startPty(options: { - id: string; - cwd?: string; - shell?: string; - env?: NodeJS.ProcessEnv; - cols?: number; - rows?: number; - autoApprove?: boolean; - initialPrompt?: string; - skipResume?: boolean; - shellSetup?: string; - tmux?: boolean; -}): Promise { - if (process.env.EMDASH_DISABLE_PTY === '1') { - throw new Error('PTY disabled via EMDASH_DISABLE_PTY=1'); - } - const { - id, - cwd, - shell, - env, - cols = 80, - rows = 24, - autoApprove, - initialPrompt, - skipResume, - shellSetup, - tmux, - } = options; - - const defaultShell = getDefaultShell(); - let useShell = shell || defaultShell; - const useCwd = cwd || process.cwd() || os.homedir(); - - // Build a clean environment instead of inheriting process.env wholesale. - // - // WHY: When Emdash runs as an AppImage on Linux (or other packaged Electron apps), - // the parent process.env contains packaging artifacts like PYTHONHOME, APPDIR, - // APPIMAGE, etc. These variables can break user tools, especially Python virtual - // environments which fail with "Could not find platform independent libraries" - // when PYTHONHOME points to the AppImage's bundled Python. - // - // SOLUTION: Only pass through essential variables and let login shells (-il) - // rebuild the environment from the user's shell configuration files - // (.profile, .bashrc, .zshrc, etc.). This is how `sudo -i`, `ssh`, and other - // tools create clean user environments. - // - // See: https://github.com/generalaction/emdash/issues/485 - const useEnv: Record = { - TERM: 'xterm-256color', - COLORTERM: 'truecolor', - TERM_PROGRAM: 'emdash', - HOME: process.env.HOME || os.homedir(), - USER: process.env.USER || os.userInfo().username, - SHELL: process.env.SHELL || defaultShell, - ...(process.platform === 'win32' ? getWindowsEssentialEnv() : {}), - ...(process.env.LANG && { LANG: process.env.LANG }), - ...(process.env.TMPDIR && { TMPDIR: process.env.TMPDIR }), - ...(process.env.DISPLAY && { DISPLAY: process.env.DISPLAY }), - ...getDisplayEnv(), - ...(process.env.SSH_AUTH_SOCK && { SSH_AUTH_SOCK: process.env.SSH_AUTH_SOCK }), - ...(env || {}), - }; - - // Pass agent event hook env vars so CLI hooks can call back to Emdash - const hookPort = agentEventService.getPort(); - if (hookPort > 0) { - useEnv['EMDASH_HOOK_PORT'] = String(hookPort); - useEnv['EMDASH_PTY_ID'] = id; - useEnv['EMDASH_HOOK_TOKEN'] = agentEventService.getToken(); - } - - // On Windows, resolve shell command to full path for node-pty - if (process.platform === 'win32' && shell && !shell.includes('\\') && !shell.includes('/')) { - try { - // eslint-disable-next-line @typescript-eslint/no-var-requires - const { execSync } = require('child_process'); - - // Try .cmd first (npm globals are typically .cmd files) - let resolved = ''; - try { - resolved = execSync(`where ${shell}.cmd`, { encoding: 'utf8' }) - .trim() - .split('\n')[0] - .replace(/\r/g, '') - .trim(); - } catch { - // If .cmd doesn't exist, try without extension - resolved = execSync(`where ${shell}`, { encoding: 'utf8' }) - .trim() - .split('\n')[0] - .replace(/\r/g, '') - .trim(); - } - - // Ensure we have an executable extension - if (resolved && !resolved.match(/\.(exe|cmd|bat)$/i)) { - // If no executable extension, try appending .cmd - const cmdPath = resolved + '.cmd'; - try { - // eslint-disable-next-line @typescript-eslint/no-var-requires - const fs = require('fs'); - if (fs.existsSync(cmdPath)) { - resolved = cmdPath; - } - } catch { - // Ignore fs errors - } - } - - if (resolved) { - useShell = resolved; - } - } catch { - // Fall back to original shell name - } - } - - // Lazy load native module at call time to prevent startup crashes - // eslint-disable-next-line @typescript-eslint/no-var-requires - let pty: typeof import('node-pty'); - try { - pty = require('node-pty'); - } catch (e: any) { - throw new Error(`PTY unavailable: ${e?.message || String(e)}`); - } - - // Provide sensible defaults for interactive shells so they render prompts. - // For provider CLIs, spawn the user's shell and run the provider command via -c, - // then exec back into the shell to allow users to stay in a normal prompt after exiting the agent. - const args: string[] = []; - if (process.platform !== 'win32') { - try { - const base = String(useShell).split('/').pop() || ''; - const baseLower = base.toLowerCase(); - const provider = PROVIDERS.find((p) => p.cli === baseLower); - - if (provider) { - const resolvedConfig = resolveProviderCommandConfig(provider.id); - const resolvedCli = resolvedConfig?.cli || provider.cli || baseLower; - - // Build the provider command with flags - const cliArgs: string[] = []; - - // Session isolation — see applySessionIsolation() for the full decision tree. - const usedSessionIsolation = applySessionIsolation( - cliArgs, - provider, - id, - useCwd, - !skipResume - ); - - cliArgs.push( - ...buildProviderCliArgs({ - resume: !usedSessionIsolation && !skipResume, - resumeFlag: resolvedConfig?.resumeFlag, - defaultArgs: resolvedConfig?.defaultArgs, - extraArgs: resolvedConfig?.extraArgs, - autoApprove, - autoApproveFlag: resolvedConfig?.autoApproveFlag, - initialPrompt, - initialPromptFlag: resolvedConfig?.initialPromptFlag, - useKeystrokeInjection: provider.useKeystrokeInjection, - }) - ); - - if (resolvedConfig?.env) { - for (const [k, v] of Object.entries(resolvedConfig.env)) { - if (/^[A-Za-z_][A-Za-z0-9_]*$/.test(k) && typeof v === 'string') { - useEnv[k] = v; - } - } - } - - const cliCommand = resolvedCli; - const commandString = - cliArgs.length > 0 - ? `${cliCommand} ${cliArgs - .map((arg) => - /[\s'"\\$`\n\r\t]/.test(arg) ? `'${arg.replace(/'/g, "'\\''")}'` : arg - ) - .join(' ')}` - : cliCommand; - - const shellBase = (defaultShell.split('/').pop() || '').toLowerCase(); - - // After the provider exits, exec back into the user's shell (login+interactive) - const resumeShell = - shellBase === 'fish' - ? `'${defaultShell.replace(/'/g, "'\\''")}' -i -l` - : `'${defaultShell.replace(/'/g, "'\\''")}' -il`; - const chainCommand = shellSetup - ? `${shellSetup} && ${commandString}; exec ${resumeShell}` - : `${commandString}; exec ${resumeShell}`; - - // Always use the default shell for the -c command to avoid re-detecting provider CLI - useShell = defaultShell; - if (shellBase === 'zsh') args.push('-lic', chainCommand); - else if (shellBase === 'bash') args.push('-lic', chainCommand); - else if (shellBase === 'fish') args.push('-l', '-i', '-c', chainCommand); - else if (shellBase === 'sh') args.push('-lc', chainCommand); - else args.push('-c', chainCommand); // Fallback for other shells - } else { - // For normal shells, use login + interactive to load user configs - if (shellSetup) { - const resumeShell = - baseLower === 'fish' - ? `'${useShell.replace(/'/g, "'\\''")}' -i -l` - : `'${useShell.replace(/'/g, "'\\''")}' -il`; - if (baseLower === 'fish') { - args.push('-l', '-i', '-c', `${shellSetup}; exec ${resumeShell}`); - } else { - const cFlag = baseLower === 'sh' ? '-lc' : '-lic'; - args.push(cFlag, `${shellSetup}; exec ${resumeShell}`); - } - } else { - if (baseLower === 'fish') { - args.push('-i', '-l'); - } else { - args.push( - baseLower === 'zsh' || baseLower === 'bash' || baseLower === 'sh' ? '-il' : '-i' - ); - } - } - } - } catch {} - } - - // When tmux is enabled, wrap the spawn in a tmux session. - // tmux new-session -As creates or attaches to a named session. - // The inner shell command (with the agent CLI) runs inside tmux. - let tmuxSessionName: string | undefined; - let spawnCommand = useShell; - let spawnArgs = args; - - if (tmux && process.platform !== 'win32') { - let tmuxAvailable = false; - try { - const { execFileSync } = require('child_process'); - execFileSync('tmux', ['-V'], { timeout: 3000, stdio: 'ignore' }); - tmuxAvailable = true; - } catch { - log.warn('ptyManager:tmux - tmux not found, falling back to unwrapped spawn', { id }); - } - - if (tmuxAvailable) { - tmuxSessionName = getTmuxSessionName(id); - // Build: tmux new-session -As -- - spawnCommand = 'tmux'; - spawnArgs = ['new-session', '-As', tmuxSessionName, '--', useShell, ...args]; - log.info('ptyManager:tmux - wrapping in tmux session', { id, tmuxSessionName }); - } - } - - let proc: IPty; - try { - const spawnSpec = resolveWindowsPtySpawn(spawnCommand, spawnArgs); - proc = pty.spawn(spawnSpec.command, spawnSpec.args, { - name: 'xterm-256color', - cols, - rows, - cwd: useCwd, - env: useEnv, - }); - } catch (err: any) { - // Track initial spawn error - const provider = args.find((arg) => PROVIDERS.some((p) => p.cli === arg)); - await errorTracking.captureAgentSpawnError(err, shell || 'unknown', id, { - cwd: useCwd, - args: args.join(' '), - provider: provider || undefined, - }); - - try { - const fallbackShell = getDefaultShell(); - proc = pty.spawn(fallbackShell, [], { - name: 'xterm-256color', - cols, - rows, - cwd: useCwd, - env: useEnv, - }); - } catch (err2: any) { - // Track the fallback spawn error as critical - await errorTracking.captureCriticalError(err2, { - operation: 'pty_spawn_fallback', - service: 'ptyManager', - error_type: 'spawn_error', - shell: getDefaultShell(), - original_error: err?.message, - }); - throw new Error(`PTY spawn failed: ${err2?.message || err?.message || String(err2 || err)}`); - } - } - - ptys.set(id, { id, proc, kind: 'local', cols, rows, tmuxSessionName }); - return proc; -} - -export function writePty(id: string, data: string): void { - const rec = ptys.get(id); - if (!rec) { - return; - } - rec.proc.write(data); -} - -export function resizePty(id: string, cols: number, rows: number): void { - const rec = ptys.get(id); - if (!rec) { - return; - } - const normalizedCols = Number.isFinite(cols) ? Math.max(MIN_PTY_COLS, Math.floor(cols)) : 0; - const normalizedRows = Number.isFinite(rows) ? Math.max(MIN_PTY_ROWS, Math.floor(rows)) : 0; - if (normalizedCols <= 0 || normalizedRows <= 0) return; - if (rec.cols === normalizedCols && rec.rows === normalizedRows) return; - try { - rec.proc.resize(normalizedCols, normalizedRows); - rec.cols = normalizedCols; - rec.rows = normalizedRows; - } catch (error: any) { - if ( - error && - (error.code === 'EBADF' || - /EBADF/.test(String(error)) || - /Napi::Error/.test(String(error)) || - /ENOTTY/.test(String(error)) || - /ioctl\(2\) failed/.test(String(error)) || - error.message?.includes('not open')) - ) { - // Expected during shutdown - PTY already exited - return; - } - log.error('ptyManager:resizeFailed', { - id, - cols: normalizedCols, - rows: normalizedRows, - error: String(error), - }); - } -} - -export function killPty(id: string): void { - const rec = ptys.get(id); - if (!rec) { - return; - } - try { - rec.proc.kill(); - } finally { - ptys.delete(id); - } -} - -export function removePtyRecord(id: string): void { - ptys.delete(id); -} - -export function hasPty(id: string): boolean { - return ptys.has(id); -} - -export function getPty(id: string): IPty | undefined { - return ptys.get(id)?.proc; -} - -export function getPtyKind(id: string): 'local' | 'ssh' | undefined { - return ptys.get(id)?.kind; -} - -export function getPtyTmuxSessionName(id: string): string | undefined { - return ptys.get(id)?.tmuxSessionName; -} diff --git a/src/main/services/ssh/SshConnectionMonitor.ts b/src/main/services/ssh/SshConnectionMonitor.ts deleted file mode 100644 index 71f680869..000000000 --- a/src/main/services/ssh/SshConnectionMonitor.ts +++ /dev/null @@ -1,392 +0,0 @@ -import { EventEmitter } from 'events'; -import { ConnectionState, SshConfig } from '../../../shared/ssh/types'; -import { ConnectionMetrics } from './types'; - -/** - * Extended metrics with monitoring state - */ -interface MonitoredConnection { - connectionId: string; - state: ConnectionState; - config: SshConfig; - metrics: ConnectionMetrics; - reconnectAttempts: number; - lastError?: string; -} - -/** - * Callback that checks whether a connection is still alive in the real - * connection pool (e.g. SshService.isConnected). - */ -export type ConnectionChecker = (connectionId: string) => boolean; - -/** - * Events emitted by SshConnectionMonitor: - * - 'stateChange': (connectionId: string, state: ConnectionState, error?: string) => void - * - 'healthCheck': (connectionId: string, isHealthy: boolean, latencyMs: number) => void - * - 'reconnect': (connectionId: string, config: SshConfig, attempt: number) => void - * - 'reconnectFailed': (connectionId: string, error: string) => void - * - 'metrics': (connectionId: string, metrics: ConnectionMetrics) => void - */ - -/** - * Service for monitoring SSH connection health and metrics. - * - * Instead of maintaining its own ping timer (which was never wired up and - * caused phantom reconnect loops), the monitor now delegates liveness - * checks to a `connectionChecker` callback — typically - * `SshService.isConnected()`. ssh2's built-in keepalive - * (`keepaliveInterval` / `keepaliveCountMax`) handles the actual TCP - * liveness; when the connection drops, ssh2 emits `close` which removes - * the connection from the pool and SshService emits `disconnected`. - * - * The monitor reacts to that signal (via `handleDisconnect`) and triggers - * reconnect with exponential backoff. The periodic health check is now a - * safety net that detects pool removal the monitor didn't hear about. - */ -export class SshConnectionMonitor extends EventEmitter { - private connections: Map = new Map(); - private checkInterval?: NodeJS.Timeout; - private readonly DEFAULT_INTERVAL_MS = 30000; // 30 seconds - private readonly MAX_RECONNECT_ATTEMPTS = 3; - private readonly RECONNECT_BACKOFF_MS = [1000, 5000, 15000]; // Exponential backoff delays - private connectionChecker: ConnectionChecker; - - constructor(connectionChecker?: ConnectionChecker) { - super(); - // Default: always report alive (no-op). Caller should provide a real checker. - this.connectionChecker = connectionChecker ?? (() => true); - } - - /** - * Starts monitoring a connection. - * @param connectionId - ID of the connection to monitor - * @param config - SSH configuration for potential reconnection - */ - startMonitoring(connectionId: string, config: SshConfig): void { - // Don't duplicate monitoring - if (this.connections.has(connectionId)) { - return; - } - - const now = new Date(); - const monitored: MonitoredConnection = { - connectionId, - state: 'connected', - config, - metrics: { - connectionId, - bytesSent: 0, - bytesReceived: 0, - latencyMs: 0, - lastPingAt: now, - }, - reconnectAttempts: 0, - }; - - this.connections.set(connectionId, monitored); - this.emit('stateChange', connectionId, 'connected'); - - // Start health checks if not already running - if (!this.checkInterval) { - this.startHealthChecks(); - } - } - - /** - * Stops monitoring a connection. - * @param connectionId - ID of the connection to stop monitoring - */ - stopMonitoring(connectionId: string): void { - const monitored = this.connections.get(connectionId); - if (!monitored) { - return; - } - - // Emit disconnected state before removing - if (monitored.state !== 'disconnected') { - this.emit('stateChange', connectionId, 'disconnected'); - } - - this.connections.delete(connectionId); - - // Stop health checks if no more connections - if (this.connections.size === 0) { - this.stopHealthChecks(); - } - } - - /** - * Called when SshService reports a real disconnect (ssh2 `close` event). - * Triggers reconnect with backoff if the connection is still being monitored. - */ - handleDisconnect(connectionId: string): void { - const monitored = this.connections.get(connectionId); - if (!monitored) { - return; - } - - // Ignore if already reconnecting or disconnected - if (monitored.state === 'connecting' || monitored.state === 'disconnected') { - return; - } - - this.updateState(connectionId, 'error', 'Connection lost'); - this.attemptReconnect(connectionId); - } - - /** - * Updates the connection state. - * @param connectionId - ID of the connection - * @param state - New connection state - * @param error - Optional error message - */ - updateState(connectionId: string, state: ConnectionState, error?: string): void { - const monitored = this.connections.get(connectionId); - if (!monitored) { - return; - } - - const previousState = monitored.state; - monitored.state = state; - - if (error) { - monitored.lastError = error; - } - - // Reset reconnect attempts on successful connection - if (state === 'connected' && previousState !== 'connected') { - monitored.reconnectAttempts = 0; - monitored.lastError = undefined; - } - - this.emit('stateChange', connectionId, state, error); - } - - /** - * Gets the current connection state. - * @param connectionId - ID of the connection - * @returns Current connection state or 'disconnected' if not monitored - */ - getState(connectionId: string): ConnectionState { - const monitored = this.connections.get(connectionId); - return monitored?.state ?? 'disconnected'; - } - - /** - * Updates metrics for a connection. - * @param connectionId - ID of the connection - * @param metrics - Partial metrics to update - */ - updateMetrics(connectionId: string, metrics: Partial): void { - const monitored = this.connections.get(connectionId); - if (!monitored) { - return; - } - - const updatedMetrics = { ...monitored.metrics, ...metrics }; - monitored.metrics = updatedMetrics; - - this.emit('metrics', connectionId, updatedMetrics); - } - - /** - * Gets current metrics for a connection. - * @param connectionId - ID of the connection - * @returns Current metrics or null if not monitoring - */ - getMetrics(connectionId: string): ConnectionMetrics | null { - const monitored = this.connections.get(connectionId); - return monitored?.metrics ?? null; - } - - /** - * Starts periodic health checks for all monitored connections. - * @param intervalMs - Check interval in milliseconds (default: 30 seconds) - */ - startHealthChecks(intervalMs: number = this.DEFAULT_INTERVAL_MS): void { - this.stopHealthChecks(); - this.checkInterval = setInterval(() => { - this.performHealthChecks(); - }, intervalMs); - } - - /** - * Stops periodic health checks. - */ - stopHealthChecks(): void { - if (this.checkInterval) { - clearInterval(this.checkInterval); - this.checkInterval = undefined; - } - } - - /** - * Checks if a connection is healthy by querying the real connection pool. - * @param connectionId - ID of the connection - * @returns True if connection is healthy - */ - async isHealthy(connectionId: string): Promise { - const monitored = this.connections.get(connectionId); - if (!monitored) { - return false; - } - - if (monitored.state !== 'connected') { - return false; - } - - return this.connectionChecker(connectionId); - } - - /** - * Gets all monitored connection IDs and their states. - * @returns Array of connection ID and state pairs - */ - getAllStates(): Array<{ connectionId: string; state: ConnectionState }> { - return Array.from(this.connections.entries()).map(([id, monitored]) => ({ - connectionId: id, - state: monitored.state, - })); - } - - /** - * Gets the configuration for a monitored connection. - * Used for reconnection attempts. - * @param connectionId - ID of the connection - * @returns SSH configuration or null if not monitored - */ - getConfig(connectionId: string): SshConfig | null { - const monitored = this.connections.get(connectionId); - return monitored?.config ?? null; - } - - /** - * Gets detailed connection info including metrics and state. - * @param connectionId - ID of the connection - * @returns Detailed connection info or null if not monitored - */ - getConnectionInfo(connectionId: string): { - state: ConnectionState; - metrics: ConnectionMetrics; - reconnectAttempts: number; - lastError?: string; - } | null { - const monitored = this.connections.get(connectionId); - if (!monitored) { - return null; - } - - return { - state: monitored.state, - metrics: { ...monitored.metrics }, - reconnectAttempts: monitored.reconnectAttempts, - lastError: monitored.lastError, - }; - } - - /** - * Disposes of the monitor and cleans up all resources. - * Stops health checks, clears all connections, and removes all listeners. - */ - dispose(): void { - this.stopHealthChecks(); - - // Emit disconnected for all monitored connections - for (const [connectionId, monitored] of this.connections) { - if (monitored.state !== 'disconnected') { - this.emit('stateChange', connectionId, 'disconnected', 'Monitor disposed'); - } - } - - this.connections.clear(); - this.removeAllListeners(); - } - - /** - * Performs health checks on all monitored connections. - * Called periodically by the health check interval. - * - * Instead of relying on a lastPingAt timer (which was never updated), - * this now queries the real connection pool via connectionChecker. - * ssh2's keepalive handles TCP liveness; we just verify the connection - * is still in the pool. - */ - private performHealthChecks(): void { - for (const [connectionId, monitored] of this.connections) { - // Skip connections that are already connecting or reconnecting - if (monitored.state === 'connecting') { - continue; - } - - // Skip connections that are already disconnected - if (monitored.state === 'disconnected') { - continue; - } - - const isAlive = this.connectionChecker(connectionId); - this.emit('healthCheck', connectionId, isAlive, monitored.metrics.latencyMs); - - // If the monitor thinks it's connected but the pool says otherwise, - // the connection was dropped (e.g. ssh2 close event happened but - // the monitor's handleDisconnect was somehow missed). Trigger reconnect. - if (!isAlive && monitored.state === 'connected') { - this.updateState(connectionId, 'error', 'Connection no longer in pool'); - this.attemptReconnect(connectionId); - } - } - } - - /** - * Attempts to reconnect a connection with exponential backoff. - * @param connectionId - ID of the connection to reconnect - */ - private async attemptReconnect(connectionId: string): Promise { - const monitored = this.connections.get(connectionId); - if (!monitored) { - return; - } - - // Check if we've exceeded max reconnection attempts - if (monitored.reconnectAttempts >= this.MAX_RECONNECT_ATTEMPTS) { - const error = `Max reconnection attempts (${this.MAX_RECONNECT_ATTEMPTS}) reached`; - this.updateState(connectionId, 'disconnected', error); - this.emit('reconnectFailed', connectionId, error); - return; - } - - // Increment attempt counter - monitored.reconnectAttempts++; - - // Calculate backoff delay - const delayIndex = Math.min( - monitored.reconnectAttempts - 1, - this.RECONNECT_BACKOFF_MS.length - 1 - ); - const delay = this.RECONNECT_BACKOFF_MS[delayIndex]; - - // Update state to connecting - this.updateState(connectionId, 'connecting'); - - // Wait for backoff delay before emitting reconnect event - await this.sleep(delay); - - // Re-check that connection is still being monitored and still needs reconnection - const current = this.connections.get(connectionId); - if (!current || current.state !== 'connecting') { - return; - } - - // Emit reconnect event for the service to handle - this.emit('reconnect', connectionId, monitored.config, monitored.reconnectAttempts); - } - - /** - * Utility method for async delay. - * @param ms - Milliseconds to sleep - * @returns Promise that resolves after the delay - */ - private sleep(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); - } -} diff --git a/src/main/services/ssh/SshHostKeyService.ts b/src/main/services/ssh/SshHostKeyService.ts deleted file mode 100644 index 036cc24ab..000000000 --- a/src/main/services/ssh/SshHostKeyService.ts +++ /dev/null @@ -1,290 +0,0 @@ -import { createHash } from 'crypto'; -import { readFile, writeFile, appendFile, access } from 'fs/promises'; -import { homedir } from 'os'; -import { join } from 'path'; -import { HostKeyInfo } from '../../../shared/ssh/types'; -import { HostKeyEntry } from './types'; - -const KNOWN_HOSTS_PATH = join(homedir(), '.ssh', 'known_hosts'); - -/** - * Structured representation of a stored host key. - * Preserves the algorithm so we can round-trip to known_hosts without data loss. - */ -interface StoredHostKey { - algorithm: string; - keyBase64: string; -} - -/** - * Service for managing SSH host key verification. - * Stores and validates host fingerprints for security. - */ -export class SshHostKeyService { - private knownHosts: Map = new Map(); - private initialized = false; - - /** - * Initialize by loading known_hosts file - */ - async initialize(): Promise { - if (this.initialized) return; - - try { - await access(KNOWN_HOSTS_PATH); - const content = await readFile(KNOWN_HOSTS_PATH, 'utf-8'); - this.parseKnownHosts(content); - this.initialized = true; - } catch (err) { - // File doesn't exist or can't be read, start with empty - this.initialized = true; - } - } - - /** - * Parse known_hosts content into memory. - * Format: - */ - private parseKnownHosts(content: string): void { - const lines = content.split('\n'); - for (const line of lines) { - const trimmed = line.trim(); - if (!trimmed || trimmed.startsWith('#')) continue; - - const parts = trimmed.split(' '); - if (parts.length >= 3) { - const host = parts[0]; - const algorithm = parts[1]; - const keyBase64 = parts.slice(2).join(' '); - this.knownHosts.set(host, { algorithm, keyBase64 }); - } - } - } - - /** - * Get fingerprint for a host key - */ - getFingerprint(key: Buffer): string { - const hash = createHash('sha256').update(key).digest('base64'); - return `SHA256:${hash}`; - } - - /** - * Verifies a host's key against known hosts. - * @param host - Hostname or IP address - * @param port - SSH port - * @param keyType - Type of host key (e.g., 'rsa', 'ed25519') - * @param fingerprint - Host key fingerprint - * @returns Verification result: 'known', 'new', or 'changed' - */ - async verifyHostKey( - host: string, - port: number, - keyType: string, - fingerprint: string - ): Promise<'known' | 'new' | 'changed'> { - await this.initialize(); - - const hostPort = port === 22 ? host : `[${host}]:${port}`; - const stored = this.knownHosts.get(hostPort) || this.knownHosts.get(host); - - if (!stored) { - return 'new'; - } - - // Compare fingerprints instead of raw keys for this interface - const knownFingerprint = this.getFingerprint(Buffer.from(stored.keyBase64, 'base64')); - if (knownFingerprint === fingerprint) { - return 'known'; - } - - return 'changed'; - } - - /** - * Check if host key is known with direct key buffer comparison. - * Returns 'valid' | 'invalid' | 'unknown'. - */ - async verifyHostKeyBuffer( - host: string, - port: number, - key: Buffer - ): Promise<'valid' | 'invalid' | 'unknown'> { - await this.initialize(); - - const hostPort = port === 22 ? host : `[${host}]:${port}`; - const stored = this.knownHosts.get(hostPort) || this.knownHosts.get(host); - - if (!stored) { - return 'unknown'; - } - - const keyBase64 = key.toString('base64'); - if (stored.keyBase64 === keyBase64) { - return 'valid'; - } - - return 'invalid'; - } - - /** - * Adds or updates a host key in the known hosts store. - * @param host - Hostname or IP address - * @param port - SSH port - * @param keyType - Type of host key - * @param fingerprint - Host key fingerprint - */ - async addHostKey( - host: string, - port: number, - keyType: string, - fingerprint: string - ): Promise { - await this.initialize(); - - const hostPort = port === 22 ? host : `[${host}]:${port}`; - // Store fingerprint with algorithm so we can persist correctly - this.knownHosts.set(hostPort, { algorithm: keyType || 'ssh-ed25519', keyBase64: fingerprint }); - - // Rewrite entire file to ensure consistency - await this.persistKnownHosts(); - } - - /** - * Add a host to known_hosts with raw key buffer. - * @param host - Hostname or IP address - * @param port - SSH port - * @param key - Raw host key buffer - * @param algorithm - Key algorithm (default: 'ssh-ed25519') - */ - async addKnownHost( - host: string, - port: number, - key: Buffer, - algorithm: string = 'ssh-ed25519' - ): Promise { - await this.initialize(); - - const hostPort = port === 22 ? host : `[${host}]:${port}`; - const keyBase64 = key.toString('base64'); - const entry = `${hostPort} ${algorithm} ${keyBase64}\n`; - - this.knownHosts.set(hostPort, { algorithm, keyBase64 }); - - try { - await appendFile(KNOWN_HOSTS_PATH, entry); - } catch (err) { - throw new Error(`Failed to write to known_hosts: ${err}`); - } - } - - /** - * Removes a host from the known hosts store. - * @param host - Hostname or IP address - * @param port - SSH port - */ - async removeHostKey(host: string, port: number): Promise { - await this.initialize(); - - const hostPort = port === 22 ? host : `[${host}]:${port}`; - this.knownHosts.delete(hostPort); - this.knownHosts.delete(host); - - await this.persistKnownHosts(); - } - - /** - * Remove a host from known_hosts (alias for removeHostKey). - * @param host - Hostname or IP address - * @param port - SSH port - */ - async removeKnownHost(host: string, port: number): Promise { - return this.removeHostKey(host, port); - } - - /** - * Gets all known hosts. - * @returns Array of host key entries - */ - async getKnownHosts(): Promise { - await this.initialize(); - - const entries: HostKeyEntry[] = []; - for (const [hostPort, stored] of this.knownHosts) { - // Parse host and port from the key format - let host: string; - let port: number; - - if (hostPort.startsWith('[') && hostPort.includes(']:')) { - const match = hostPort.match(/^\[(.*)\]:(\d+)$/); - if (match) { - host = match[1]; - port = parseInt(match[2], 10); - } else { - host = hostPort; - port = 22; - } - } else { - host = hostPort; - port = 22; - } - - entries.push({ - host, - port, - keyType: stored.algorithm, - fingerprint: this.getFingerprint(Buffer.from(stored.keyBase64, 'base64')), - verifiedAt: new Date(), - }); - } - - return entries; - } - - /** - * Checks if a host is known. - * @param host - Hostname or IP address - * @param port - SSH port - * @returns True if host is known - */ - async isHostKnown(host: string, port: number): Promise { - await this.initialize(); - - const hostPort = port === 22 ? host : `[${host}]:${port}`; - return this.knownHosts.has(hostPort) || this.knownHosts.has(host); - } - - /** - * Get host key info for display - * @param host - Hostname or IP address - * @param port - SSH port - * @param key - Raw host key buffer - * @param algorithm - Key algorithm - * @returns HostKeyInfo object - */ - getHostKeyInfo(host: string, port: number, key: Buffer, algorithm: string): HostKeyInfo { - return { - host, - port, - fingerprint: this.getFingerprint(key), - algorithm, - key, - }; - } - - /** - * Persist known hosts to the known_hosts file. - */ - private async persistKnownHosts(): Promise { - const entries: string[] = []; - for (const [hostPort, stored] of this.knownHosts) { - entries.push(`${hostPort} ${stored.algorithm} ${stored.keyBase64}`); - } - - try { - await writeFile(KNOWN_HOSTS_PATH, entries.join('\n') + (entries.length > 0 ? '\n' : '')); - } catch (err) { - throw new Error(`Failed to write to known_hosts: ${err}`); - } - } -} diff --git a/src/main/services/ssh/SshService.ts b/src/main/services/ssh/SshService.ts deleted file mode 100644 index 60ccd1f24..000000000 --- a/src/main/services/ssh/SshService.ts +++ /dev/null @@ -1,419 +0,0 @@ -import { EventEmitter } from 'events'; -import { Client, SFTPWrapper, ConnectConfig } from 'ssh2'; -import { SshConfig, ExecResult } from '../../../shared/ssh/types'; -import { Connection, ConnectionPool } from './types'; -import { SshCredentialService } from './SshCredentialService'; -import { quoteShellArg } from '../../utils/shellEscape'; -import { readFile } from 'fs/promises'; -import { randomUUID } from 'crypto'; -import { homedir } from 'os'; -import { resolveIdentityAgent } from '../../utils/sshConfigParser'; - -/** Maximum number of concurrent SSH connections allowed in the pool. */ -const MAX_CONNECTIONS = 10; - -/** Threshold (fraction of MAX_CONNECTIONS) at which a warning is logged. */ -const POOL_WARNING_THRESHOLD = 0.8; - -/** - * Main SSH service for managing SSH connections, executing commands, - * and handling SFTP operations. - * - * Extends EventEmitter to emit connection events: - * - 'connected': When a connection is successfully established - * - 'error': When a connection error occurs - * - 'disconnected': When a connection is closed - */ -export class SshService extends EventEmitter { - private connections: ConnectionPool = {}; - private pendingConnections: Map> = new Map(); - private credentialService: SshCredentialService; - - constructor(credentialService?: SshCredentialService) { - super(); - this.credentialService = credentialService ?? new SshCredentialService(); - } - - /** - * Establishes a new SSH connection. - * - * Guards against duplicate connections: - * - If a connection with this ID already exists and is alive, returns immediately. - * - If a connection attempt for this ID is already in flight, coalesces onto - * the existing promise instead of opening a second TCP socket. - * - Enforces a global MAX_CONNECTIONS limit to prevent resource exhaustion. - * - * @param config - SSH connection configuration - * @returns Connection ID for future operations - */ - async connect(config: SshConfig): Promise { - const connectionId = config.id ?? randomUUID(); - - // 1. If already connected, reuse the existing connection - if (this.connections[connectionId]) { - return connectionId; - } - - // 2. If a connection attempt is already in flight, coalesce - const pending = this.pendingConnections.get(connectionId); - if (pending) { - return pending; - } - - // 3. Enforce connection pool limit - const poolSize = Object.keys(this.connections).length + this.pendingConnections.size; - if (poolSize >= MAX_CONNECTIONS) { - throw new Error( - `SSH connection pool limit reached (${MAX_CONNECTIONS}). ` + - 'Disconnect unused connections before opening new ones.' - ); - } - if (poolSize >= MAX_CONNECTIONS * POOL_WARNING_THRESHOLD) { - console.warn( - `[SshService] Connection pool at ${poolSize}/${MAX_CONNECTIONS} — approaching limit` - ); - } - - // 4. Create the connection and track the in-flight promise - const connectionPromise = this.createConnection(connectionId, config); - this.pendingConnections.set(connectionId, connectionPromise); - - try { - const result = await connectionPromise; - return result; - } finally { - this.pendingConnections.delete(connectionId); - } - } - - /** - * Internal: opens a new SSH connection and registers it in the pool. - */ - private createConnection(connectionId: string, config: SshConfig): Promise { - const client = new Client(); - - return new Promise((resolve, reject) => { - // Handle connection errors - client.on('error', (err: Error) => { - reject(err); - }); - - // Handle connection close - client.on('close', () => { - // Only clean up if this client is still the one stored in the pool. - // A stale client's close event must not remove a newer connection - // that was established under the same connectionId. - if (this.connections[connectionId]?.client === client) { - delete this.connections[connectionId]; - this.emit('disconnected', connectionId); - } - }); - - // Handle successful connection - client.on('ready', () => { - const connection: Connection = { - id: connectionId, - config, - client, - connectedAt: new Date(), - lastActivity: new Date(), - }; - - this.connections[connectionId] = connection; - this.emit('connected', connectionId); - resolve(connectionId); - }); - - // Build connection config - this.buildConnectConfig(connectionId, config) - .then((connectConfig) => { - client.connect(connectConfig); - }) - .catch((err) => { - // Never emit the special EventEmitter 'error' event unless - // someone is explicitly listening; otherwise Node will throw - // ERR_UNHANDLED_ERROR and can abort IPC replies. - if (this.listenerCount('error') > 0) { - this.emit('error', connectionId, err); - } - reject(err); - }); - }); - } - - /** - * Builds the ssh2 ConnectConfig from our SshConfig - */ - private async buildConnectConfig( - connectionId: string, - config: SshConfig - ): Promise { - const connectConfig: ConnectConfig = { - host: config.host, - port: config.port, - username: config.username, - readyTimeout: 20000, - keepaliveInterval: 60000, - keepaliveCountMax: 3, - }; - - switch (config.authType) { - case 'password': { - const inlinePassword = (config as any).password as string | undefined; - const password = inlinePassword ?? (await this.credentialService.getPassword(connectionId)); - if (!password) { - throw new Error(`No password found for connection ${connectionId}`); - } - connectConfig.password = password; - break; - } - - case 'key': { - if (!config.privateKeyPath) { - throw new Error('Private key path is required for key authentication'); - } - try { - // Expand ~ to home directory - let keyPath = config.privateKeyPath; - if (keyPath.startsWith('~/')) { - keyPath = keyPath.replace('~', homedir()); - } else if (keyPath === '~') { - keyPath = homedir(); - } - - const privateKey = await readFile(keyPath, 'utf-8'); - connectConfig.privateKey = privateKey; - - // Check for passphrase - const inlinePassphrase = (config as any).passphrase as string | undefined; - const passphrase = - inlinePassphrase ?? (await this.credentialService.getPassphrase(connectionId)); - if (passphrase) { - connectConfig.passphrase = passphrase; - } - } catch (err) { - const message = err instanceof Error ? err.message : String(err); - throw new Error(`Failed to read private key: ${message}`); - } - break; - } - - case 'agent': { - const identityAgent = await resolveIdentityAgent(config.host); - const agentSocket = identityAgent || process.env.SSH_AUTH_SOCK; - if (!agentSocket) { - throw new Error( - 'SSH agent authentication failed: no agent socket found. ' + - 'This typically happens when:\n' + - '1. The SSH agent is not running (try running "eval $(ssh-agent -s)" in your terminal)\n' + - '2. The app was launched from the GUI (Finder/Dock) instead of a terminal\n' + - '3. The SSH agent socket path could not be auto-detected\n\n' + - 'Workarounds:\n' + - '• Add IdentityAgent to this host in ~/.ssh/config (e.g. for 1Password)\n' + - '• Launch Emdash from your terminal where SSH agent is already configured\n' + - '• Use SSH key authentication instead of agent authentication\n' + - '• Ensure your SSH agent is running and your keys are added (ssh-add -l)' - ); - } - connectConfig.agent = agentSocket; - break; - } - - default: { - throw new Error(`Unsupported authentication type: ${config.authType}`); - } - } - - return connectConfig; - } - - /** - * Disconnects an existing SSH connection. - * @param connectionId - ID of the connection to close - */ - async disconnect(connectionId: string): Promise { - const connection = this.connections[connectionId]; - if (!connection) { - return; // Already disconnected or never existed - } - - // Close SFTP session if open, waiting for close to complete - if (connection.sftp) { - try { - await new Promise((resolve) => { - const sftp = connection.sftp!; - const timeout = setTimeout(() => resolve(), 2000); // 2s safety timeout - sftp.once('close', () => { - clearTimeout(timeout); - resolve(); - }); - sftp.end(); - }); - } catch { - // Ignore errors during SFTP close - } - connection.sftp = undefined; - } - - // Close SSH client - connection.client.end(); - - // Remove from pool - delete this.connections[connectionId]; - - // Emit disconnected event - this.emit('disconnected', connectionId); - } - - /** - * Executes a command on the remote host. - * @param connectionId - ID of the active connection - * @param command - Command to execute - * @param cwd - Optional working directory - * @returns Command execution result - */ - async executeCommand(connectionId: string, command: string, cwd?: string): Promise { - const connection = this.connections[connectionId]; - if (!connection) { - throw new Error(`Connection ${connectionId} not found`); - } - - // Update last activity - connection.lastActivity = new Date(); - - // Build the command with optional cwd, wrapped in a login shell so that - // ~/.ssh/config, ~/.gitconfig, and other user-level configuration files - // are available (ssh2's client.exec() uses a non-login shell by default). - const innerCommand = cwd ? `cd ${quoteShellArg(cwd)} && ${command}` : command; - const fullCommand = `bash -l -c ${quoteShellArg(innerCommand)}`; - - return new Promise((resolve, reject) => { - connection.client.exec(fullCommand, (err, stream) => { - if (err) { - reject(err); - return; - } - - let stdout = ''; - let stderr = ''; - stream.on('close', (code: number | null) => { - // ssh2 reports `code` as null when a signal terminates the process. - // Keep ExecResult.exitCode as a number for simpler downstream typing. - const exitCode = code ?? -1; - resolve({ - stdout: stdout.trim(), - stderr: stderr.trim(), - exitCode, - }); - }); - - stream.on('data', (data: Buffer) => { - stdout += data.toString('utf-8'); - }); - - stream.stderr.on('data', (data: Buffer) => { - stderr += data.toString('utf-8'); - }); - - stream.on('error', (streamErr: Error) => { - reject(streamErr); - }); - }); - }); - } - - /** - * Gets an SFTP session for file operations. - * @param connectionId - ID of the active connection - * @returns SFTP wrapper instance - */ - async getSftp(connectionId: string): Promise { - const connection = this.connections[connectionId]; - if (!connection) { - throw new Error(`Connection ${connectionId} not found`); - } - - // Return cached SFTP if available - if (connection.sftp) { - connection.lastActivity = new Date(); - return connection.sftp; - } - - // Create new SFTP session - return new Promise((resolve, reject) => { - connection.client.sftp((err, sftp) => { - if (err) { - reject(err); - return; - } - - connection.sftp = sftp; - connection.lastActivity = new Date(); - resolve(sftp); - }); - }); - } - - /** - * Gets connection info for a specific connection. - * @param connectionId - ID of the connection - * @returns Connection object or undefined if not found - */ - getConnection(connectionId: string): Connection | undefined { - return this.connections[connectionId]; - } - - /** - * Gets all active connections. - * @returns Array of connection objects - */ - getAllConnections(): Connection[] { - return Object.values(this.connections); - } - - /** - * Checks if a connection is currently connected. - * @param connectionId - ID of the connection - * @returns True if connected - */ - isConnected(connectionId: string): boolean { - return connectionId in this.connections; - } - - /** - * Lists all active connection IDs. - * @returns Array of connection IDs - */ - listConnections(): string[] { - return Object.keys(this.connections); - } - - /** - * Gets connection info for a specific connection. - * @param connectionId - ID of the connection - */ - getConnectionInfo(connectionId: string): { connectedAt: Date; lastActivity: Date } | null { - const conn = this.connections[connectionId]; - if (!conn) return null; - return { - connectedAt: conn.connectedAt, - lastActivity: conn.lastActivity, - }; - } - - /** - * Disconnects all active connections. - * Useful for cleanup on shutdown. - */ - async disconnectAll(): Promise { - const disconnectPromises = Object.keys(this.connections).map((id) => - this.disconnect(id).catch(() => { - // Ignore errors during bulk disconnect - }) - ); - await Promise.all(disconnectPromises); - } -} - -/** Module-level singleton — all main-process code should import this. */ -export const sshService = new SshService(); diff --git a/src/main/services/ssh/__tests__/SshCredentialService.test.ts b/src/main/services/ssh/__tests__/SshCredentialService.test.ts deleted file mode 100644 index 106cb8727..000000000 --- a/src/main/services/ssh/__tests__/SshCredentialService.test.ts +++ /dev/null @@ -1,327 +0,0 @@ -import { describe, it, expect, beforeEach, vi, Mock } from 'vitest'; -import { SshCredentialService } from '../SshCredentialService'; - -// Mock keytar with hoisting-safe pattern -const mockSetPassword = vi.fn().mockResolvedValue(undefined); -const mockGetPassword = vi.fn().mockResolvedValue(null); -const mockDeletePassword = vi.fn().mockResolvedValue(undefined); - -vi.mock('keytar', () => { - return { - setPassword: (...args: any[]) => mockSetPassword(...args), - getPassword: (...args: any[]) => mockGetPassword(...args), - deletePassword: (...args: any[]) => mockDeletePassword(...args), - default: { - setPassword: (...args: any[]) => mockSetPassword(...args), - getPassword: (...args: any[]) => mockGetPassword(...args), - deletePassword: (...args: any[]) => mockDeletePassword(...args), - }, - }; -}); - -describe('SshCredentialService', () => { - let service: SshCredentialService; - - beforeEach(() => { - vi.clearAllMocks(); - // Reset default mock implementations - mockSetPassword.mockResolvedValue(undefined); - mockGetPassword.mockResolvedValue(null); - mockDeletePassword.mockResolvedValue(undefined); - service = new SshCredentialService(); - }); - - describe('password operations', () => { - it('should store password in keychain', async () => { - await service.storePassword('conn-1', 'secretpassword'); - - expect(mockSetPassword).toHaveBeenCalledWith( - 'emdash-ssh', - 'conn-1:password', - 'secretpassword' - ); - }); - - it('should retrieve password from keychain', async () => { - mockGetPassword.mockResolvedValue('secretpassword'); - - const result = await service.getPassword('conn-1'); - - expect(mockGetPassword).toHaveBeenCalledWith('emdash-ssh', 'conn-1:password'); - expect(result).toBe('secretpassword'); - }); - - it('should return null when password not found', async () => { - mockGetPassword.mockResolvedValue(null); - - const result = await service.getPassword('conn-1'); - - expect(result).toBeNull(); - }); - - it('should delete password from keychain', async () => { - await service.deletePassword('conn-1'); - - expect(mockDeletePassword).toHaveBeenCalledWith('emdash-ssh', 'conn-1:password'); - }); - - it('should check if password exists', async () => { - mockGetPassword.mockResolvedValue('secretpassword'); - - const result = await service.hasPassword('conn-1'); - - expect(result).toBe(true); - expect(mockGetPassword).toHaveBeenCalledWith('emdash-ssh', 'conn-1:password'); - }); - - it('should return false when password does not exist', async () => { - mockGetPassword.mockResolvedValue(null); - - const result = await service.hasPassword('conn-1'); - - expect(result).toBe(false); - }); - - it('should throw error when store password fails', async () => { - mockSetPassword.mockRejectedValue(new Error('Keychain locked')); - - await expect(service.storePassword('conn-1', 'password')).rejects.toThrow( - 'Failed to store password for connection conn-1: Keychain locked' - ); - }); - - it('should throw error when get password fails', async () => { - mockGetPassword.mockRejectedValue(new Error('Access denied')); - - await expect(service.getPassword('conn-1')).rejects.toThrow( - 'Failed to retrieve password for connection conn-1: Access denied' - ); - }); - - it('should throw error when delete password fails', async () => { - mockDeletePassword.mockRejectedValue(new Error('Keychain error')); - - await expect(service.deletePassword('conn-1')).rejects.toThrow( - 'Failed to delete password for connection conn-1: Keychain error' - ); - }); - - it('should return false for hasPassword when keytar throws', async () => { - mockGetPassword.mockRejectedValue(new Error('Keychain error')); - - const result = await service.hasPassword('conn-1'); - - expect(result).toBe(false); - }); - }); - - describe('passphrase operations', () => { - it('should store passphrase in keychain', async () => { - mockSetPassword.mockResolvedValue(undefined); - - await service.storePassphrase('conn-1', 'my-passphrase'); - - expect(mockSetPassword).toHaveBeenCalledWith( - 'emdash-ssh', - 'conn-1:passphrase', - 'my-passphrase' - ); - }); - - it('should retrieve passphrase from keychain', async () => { - mockGetPassword.mockResolvedValue('my-passphrase'); - - const result = await service.getPassphrase('conn-1'); - - expect(mockGetPassword).toHaveBeenCalledWith('emdash-ssh', 'conn-1:passphrase'); - expect(result).toBe('my-passphrase'); - }); - - it('should return null when passphrase not found', async () => { - mockGetPassword.mockResolvedValue(null); - - const result = await service.getPassphrase('conn-1'); - - expect(result).toBeNull(); - }); - - it('should delete passphrase from keychain', async () => { - mockDeletePassword.mockResolvedValue(undefined); - - await service.deletePassphrase('conn-1'); - - expect(mockDeletePassword).toHaveBeenCalledWith('emdash-ssh', 'conn-1:passphrase'); - }); - - it('should check if passphrase exists', async () => { - mockGetPassword.mockResolvedValue('my-passphrase'); - - const result = await service.hasPassphrase('conn-1'); - - expect(result).toBe(true); - expect(mockGetPassword).toHaveBeenCalledWith('emdash-ssh', 'conn-1:passphrase'); - }); - - it('should return false when passphrase does not exist', async () => { - mockGetPassword.mockResolvedValue(null); - - const result = await service.hasPassphrase('conn-1'); - - expect(result).toBe(false); - }); - - it('should throw error when store passphrase fails', async () => { - mockSetPassword.mockRejectedValue(new Error('Keychain locked')); - - await expect(service.storePassphrase('conn-1', 'passphrase')).rejects.toThrow( - 'Failed to store passphrase for connection conn-1: Keychain locked' - ); - }); - - it('should throw error when get passphrase fails', async () => { - mockGetPassword.mockRejectedValue(new Error('Access denied')); - - await expect(service.getPassphrase('conn-1')).rejects.toThrow( - 'Failed to retrieve passphrase for connection conn-1: Access denied' - ); - }); - - it('should throw error when delete passphrase fails', async () => { - mockDeletePassword.mockRejectedValue(new Error('Keychain error')); - - await expect(service.deletePassphrase('conn-1')).rejects.toThrow( - 'Failed to delete passphrase for connection conn-1: Keychain error' - ); - }); - - it('should return false for hasPassphrase when keytar throws', async () => { - mockGetPassword.mockRejectedValue(new Error('Keychain error')); - - const result = await service.hasPassphrase('conn-1'); - - expect(result).toBe(false); - }); - }); - - describe('bulk operations', () => { - it('should store both password and passphrase', async () => { - mockSetPassword.mockResolvedValue(undefined); - - await service.storeCredentials('conn-1', { - password: 'secretpassword', - passphrase: 'my-passphrase', - }); - - expect(mockSetPassword).toHaveBeenCalledTimes(2); - expect(mockSetPassword).toHaveBeenCalledWith( - 'emdash-ssh', - 'conn-1:password', - 'secretpassword' - ); - expect(mockSetPassword).toHaveBeenCalledWith( - 'emdash-ssh', - 'conn-1:passphrase', - 'my-passphrase' - ); - }); - - it('should store only password when passphrase not provided', async () => { - mockSetPassword.mockResolvedValue(undefined); - - await service.storeCredentials('conn-1', { - password: 'secretpassword', - }); - - expect(mockSetPassword).toHaveBeenCalledTimes(1); - expect(mockSetPassword).toHaveBeenCalledWith( - 'emdash-ssh', - 'conn-1:password', - 'secretpassword' - ); - }); - - it('should store only passphrase when password not provided', async () => { - mockSetPassword.mockResolvedValue(undefined); - - await service.storeCredentials('conn-1', { - passphrase: 'my-passphrase', - }); - - expect(mockSetPassword).toHaveBeenCalledTimes(1); - expect(mockSetPassword).toHaveBeenCalledWith( - 'emdash-ssh', - 'conn-1:passphrase', - 'my-passphrase' - ); - }); - - it('should do nothing when no credentials provided', async () => { - await service.storeCredentials('conn-1', {}); - - expect(mockSetPassword).not.toHaveBeenCalled(); - }); - - it('should delete all credentials', async () => { - mockDeletePassword.mockResolvedValue(undefined); - - await service.deleteAllCredentials('conn-1'); - - expect(mockDeletePassword).toHaveBeenCalledTimes(2); - expect(mockDeletePassword).toHaveBeenCalledWith('emdash-ssh', 'conn-1:password'); - expect(mockDeletePassword).toHaveBeenCalledWith('emdash-ssh', 'conn-1:passphrase'); - }); - - it('should not fail when deleting non-existent credentials', async () => { - mockDeletePassword.mockRejectedValue(new Error('Not found')); - - // Should not throw - await expect(service.deleteAllCredentials('conn-1')).resolves.not.toThrow(); - }); - - it('should continue deleting when one credential fails', async () => { - mockDeletePassword - .mockRejectedValueOnce(new Error('Password not found')) - .mockResolvedValueOnce(undefined); - - await service.deleteAllCredentials('conn-1'); - - expect(mockDeletePassword).toHaveBeenCalledTimes(2); - }); - }); - - describe('service namespacing', () => { - it('should use correct service name for all operations', async () => { - mockSetPassword.mockResolvedValue(undefined); - - await service.storePassword('conn-1', 'pass'); - await service.storePassphrase('conn-1', 'phrase'); - - const calls = mockSetPassword.mock.calls; - expect(calls.every((call) => call[0] === 'emdash-ssh')).toBe(true); - }); - - it('should use correct key format for password', async () => { - mockSetPassword.mockResolvedValue(undefined); - - await service.storePassword('my-connection', 'password'); - - expect(mockSetPassword).toHaveBeenCalledWith( - 'emdash-ssh', - 'my-connection:password', - 'password' - ); - }); - - it('should use correct key format for passphrase', async () => { - mockSetPassword.mockResolvedValue(undefined); - - await service.storePassphrase('my-connection', 'passphrase'); - - expect(mockSetPassword).toHaveBeenCalledWith( - 'emdash-ssh', - 'my-connection:passphrase', - 'passphrase' - ); - }); - }); -}); diff --git a/src/main/services/ssh/__tests__/SshHostKeyService.test.ts b/src/main/services/ssh/__tests__/SshHostKeyService.test.ts deleted file mode 100644 index 45a0a9de7..000000000 --- a/src/main/services/ssh/__tests__/SshHostKeyService.test.ts +++ /dev/null @@ -1,395 +0,0 @@ -import { describe, it, expect, beforeEach, vi, Mock } from 'vitest'; -import { createHash } from 'crypto'; -import { SshHostKeyService } from '../SshHostKeyService'; - -// Mock fs/promises with hoisting-safe pattern -vi.mock('fs/promises', () => { - return { - readFile: vi.fn(), - writeFile: vi.fn(), - appendFile: vi.fn(), - access: vi.fn(), - }; -}); - -// Mock os -vi.mock('os', () => ({ - homedir: vi.fn().mockReturnValue('/home/testuser'), -})); - -// Import after mocking -import { readFile, writeFile, appendFile, access } from 'fs/promises'; - -describe('SshHostKeyService', () => { - let service: SshHostKeyService; - - beforeEach(() => { - vi.clearAllMocks(); - service = new SshHostKeyService(); - }); - - describe('initialization', () => { - it('should initialize with empty known_hosts if file does not exist', async () => { - (access as Mock).mockRejectedValue(new Error('File not found')); - - await service.initialize(); - const hosts = await service.getKnownHosts(); - - expect(hosts).toEqual([]); - expect(access).toHaveBeenCalledWith('/home/testuser/.ssh/known_hosts'); - }); - - it('should parse existing known_hosts file', async () => { - const knownHostsContent = ` -# This is a comment -host1.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDIhz2GK/XCUj4i6Q5yQJNL1MXMY0RxzPV2QrBqfHrDq -[host2.example.com]:2222 ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCx - -host3.example.com ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBM1 - `; - - (access as Mock).mockResolvedValue(undefined); - (readFile as Mock).mockResolvedValue(knownHostsContent); - - await service.initialize(); - const hosts = await service.getKnownHosts(); - - expect(hosts).toHaveLength(3); - expect(hosts.some((h) => h.host === 'host1.example.com')).toBe(true); - expect(hosts.some((h) => h.host === 'host2.example.com' && h.port === 2222)).toBe(true); - }); - - it('should skip re-initialization', async () => { - (access as Mock).mockRejectedValue(new Error('File not found')); - - await service.initialize(); - await service.initialize(); - - expect(access).toHaveBeenCalledTimes(1); - }); - }); - - describe('fingerprint generation', () => { - it('should generate SHA256 fingerprint', () => { - const keyBuffer = Buffer.from('test-key-data'); - const fingerprint = service.getFingerprint(keyBuffer); - - const expectedHash = createHash('sha256').update(keyBuffer).digest('base64'); - expect(fingerprint).toBe(`SHA256:${expectedHash}`); - }); - - it('should generate different fingerprints for different keys', () => { - const key1 = Buffer.from('key-one'); - const key2 = Buffer.from('key-two'); - - const fp1 = service.getFingerprint(key1); - const fp2 = service.getFingerprint(key2); - - expect(fp1).not.toBe(fp2); - }); - - it('should generate consistent fingerprints for same key', () => { - const key = Buffer.from('test-key'); - - const fp1 = service.getFingerprint(key); - const fp2 = service.getFingerprint(key); - - expect(fp1).toBe(fp2); - }); - }); - - describe('host key verification', () => { - it('should return new for unknown host', async () => { - (access as Mock).mockRejectedValue(new Error('File not found')); - - const result = await service.verifyHostKey( - 'unknown.host.com', - 22, - 'ssh-ed25519', - 'SHA256:abc123' - ); - - expect(result).toBe('new'); - }); - - it('should return known for matching fingerprint', async () => { - const keyBuffer = Buffer.from('known-key-data'); - const fingerprint = service.getFingerprint(keyBuffer); - const keyBase64 = keyBuffer.toString('base64'); - - (access as Mock).mockResolvedValue(undefined); - (readFile as Mock).mockResolvedValue(`known.host.com ssh-ed25519 ${keyBase64}`); - - const result = await service.verifyHostKey('known.host.com', 22, 'ssh-ed25519', fingerprint); - - expect(result).toBe('known'); - }); - - it('should return changed for non-matching fingerprint', async () => { - const keyBuffer = Buffer.from('original-key-data'); - const keyBase64 = keyBuffer.toString('base64'); - - (access as Mock).mockResolvedValue(undefined); - (readFile as Mock).mockResolvedValue(`changed.host.com ssh-ed25519 ${keyBase64}`); - - const result = await service.verifyHostKey( - 'changed.host.com', - 22, - 'ssh-ed25519', - 'SHA256:differentfingerprint' - ); - - expect(result).toBe('changed'); - }); - - it('should handle non-standard port format', async () => { - const keyBuffer = Buffer.from('port-key-data'); - const fingerprint = service.getFingerprint(keyBuffer); - const keyBase64 = keyBuffer.toString('base64'); - - (access as Mock).mockResolvedValue(undefined); - (readFile as Mock).mockResolvedValue(`[port.host.com]:2222 ssh-ed25519 ${keyBase64}`); - - const result = await service.verifyHostKey('port.host.com', 2222, 'ssh-ed25519', fingerprint); - - expect(result).toBe('known'); - }); - }); - - describe('verifyHostKeyBuffer', () => { - it('should return unknown for unknown host', async () => { - (access as Mock).mockRejectedValue(new Error('File not found')); - - const keyBuffer = Buffer.from('new-key'); - const result = await service.verifyHostKeyBuffer('unknown.host.com', 22, keyBuffer); - - expect(result).toBe('unknown'); - }); - - it('should return valid for matching key buffer', async () => { - const keyBuffer = Buffer.from('matching-key-data'); - const keyBase64 = keyBuffer.toString('base64'); - - (access as Mock).mockResolvedValue(undefined); - (readFile as Mock).mockResolvedValue(`valid.host.com ssh-ed25519 ${keyBase64}`); - - const result = await service.verifyHostKeyBuffer('valid.host.com', 22, keyBuffer); - - expect(result).toBe('valid'); - }); - - it('should return invalid for non-matching key buffer', async () => { - const originalKey = Buffer.from('original-key').toString('base64'); - - (access as Mock).mockResolvedValue(undefined); - (readFile as Mock).mockResolvedValue(`invalid.host.com ssh-ed25519 ${originalKey}`); - - const differentKey = Buffer.from('different-key'); - const result = await service.verifyHostKeyBuffer('invalid.host.com', 22, differentKey); - - expect(result).toBe('invalid'); - }); - }); - - describe('addHostKey', () => { - it('should add host key with standard port', async () => { - (access as Mock).mockRejectedValue(new Error('File not found')); - - await service.addHostKey('new.host.com', 22, 'ssh-ed25519', 'SHA256:abc123def456'); - - expect(writeFile).toHaveBeenCalledWith( - '/home/testuser/.ssh/known_hosts', - 'new.host.com ssh-ed25519 SHA256:abc123def456\n' - ); - }); - - it('should add host key with non-standard port', async () => { - (access as Mock).mockRejectedValue(new Error('File not found')); - - await service.addHostKey('new.host.com', 2222, 'ssh-ed25519', 'SHA256:abc123def456'); - - expect(writeFile).toHaveBeenCalledWith( - '/home/testuser/.ssh/known_hosts', - '[new.host.com]:2222 ssh-ed25519 SHA256:abc123def456\n' - ); - }); - - it('should update existing host key', async () => { - (access as Mock).mockResolvedValue(undefined); - (readFile as Mock).mockResolvedValue('old.host.com ssh-ed25519 oldkey\n'); - - await service.addHostKey('old.host.com', 22, 'ssh-ed25519', 'new-fingerprint'); - - expect(writeFile).toHaveBeenCalledWith( - '/home/testuser/.ssh/known_hosts', - 'old.host.com ssh-ed25519 new-fingerprint\n' - ); - }); - }); - - describe('addKnownHost', () => { - it('should append host with raw key buffer', async () => { - (access as Mock).mockRejectedValue(new Error('File not found')); - - const keyBuffer = Buffer.from('raw-key-data'); - await service.addKnownHost('raw.host.com', 22, keyBuffer, 'ssh-ed25519'); - - const expectedEntry = 'raw.host.com ssh-ed25519 cmF3LWtleS1kYXRh\n'; - expect(appendFile).toHaveBeenCalledWith('/home/testuser/.ssh/known_hosts', expectedEntry); - }); - - it('should use default algorithm when not specified', async () => { - (access as Mock).mockRejectedValue(new Error('File not found')); - - const keyBuffer = Buffer.from('key-data'); - await service.addKnownHost('default.algo.com', 22, keyBuffer); - - expect(appendFile).toHaveBeenCalledWith( - '/home/testuser/.ssh/known_hosts', - expect.stringContaining('ssh-ed25519') - ); - }); - - it('should throw error when append fails', async () => { - (access as Mock).mockRejectedValue(new Error('File not found')); - (appendFile as Mock).mockRejectedValue(new Error('Permission denied')); - - const keyBuffer = Buffer.from('key-data'); - await expect(service.addKnownHost('fail.host.com', 22, keyBuffer)).rejects.toThrow( - 'Failed to write to known_hosts' - ); - }); - }); - - describe('removeHostKey', () => { - it('should remove host with standard port', async () => { - (access as Mock).mockResolvedValue(undefined); - (readFile as Mock).mockResolvedValue( - `remove.host.com ssh-ed25519 key1\nother.host.com ssh-ed25519 key2\n` - ); - - await service.removeHostKey('remove.host.com', 22); - - expect(writeFile).toHaveBeenCalledWith( - '/home/testuser/.ssh/known_hosts', - 'other.host.com ssh-ed25519 key2\n' - ); - }); - - it('should remove host with non-standard port', async () => { - (access as Mock).mockResolvedValue(undefined); - (readFile as Mock).mockResolvedValue( - `[remove.host.com]:2222 ssh-ed25519 key1\nother.host.com ssh-ed25519 key2\n` - ); - - await service.removeHostKey('remove.host.com', 2222); - - expect(writeFile).toHaveBeenCalledWith( - '/home/testuser/.ssh/known_hosts', - 'other.host.com ssh-ed25519 key2\n' - ); - }); - - it('should remove both host and host:port entries', async () => { - (access as Mock).mockResolvedValue(undefined); - (readFile as Mock).mockResolvedValue( - `remove.host.com ssh-ed25519 key1\n[remove.host.com]:2222 ssh-ed25519 key2\n` - ); - - await service.removeHostKey('remove.host.com', 2222); - - const writeCall = (writeFile as Mock).mock.calls[0]; - expect(writeCall[1]).not.toContain('remove.host.com'); - }); - }); - - describe('removeKnownHost', () => { - it('should be alias for removeHostKey', async () => { - (access as Mock).mockResolvedValue(undefined); - (readFile as Mock).mockResolvedValue('alias.host.com ssh-ed25519 key\n'); - - await service.removeKnownHost('alias.host.com', 22); - - expect(writeFile).toHaveBeenCalled(); - }); - }); - - describe('getKnownHosts', () => { - it('should return all known hosts with metadata', async () => { - const keyBuffer = Buffer.from('test-key-data'); - const keyBase64 = keyBuffer.toString('base64'); - const expectedFingerprint = service.getFingerprint(keyBuffer); - - (access as Mock).mockResolvedValue(undefined); - (readFile as Mock).mockResolvedValue( - `host1.example.com ssh-ed25519 ${keyBase64}\n[host2.example.com]:2222 ssh-rsa ${keyBase64}` - ); - - const hosts = await service.getKnownHosts(); - - expect(hosts).toHaveLength(2); - expect(hosts[0]).toMatchObject({ - host: 'host1.example.com', - port: 22, - keyType: 'ssh-ed25519', - fingerprint: expectedFingerprint, - }); - expect(hosts[0].verifiedAt).toBeInstanceOf(Date); - }); - - it('should parse host:port format correctly', async () => { - (access as Mock).mockResolvedValue(undefined); - (readFile as Mock).mockResolvedValue('[complex.host.com]:2222 ssh-ed25519 keydata'); - - const hosts = await service.getKnownHosts(); - - expect(hosts[0]).toMatchObject({ - host: 'complex.host.com', - port: 2222, - }); - }); - }); - - describe('isHostKnown', () => { - it('should return true for known host', async () => { - (access as Mock).mockResolvedValue(undefined); - (readFile as Mock).mockResolvedValue('known.host.com ssh-ed25519 key\n'); - - const result = await service.isHostKnown('known.host.com', 22); - - expect(result).toBe(true); - }); - - it('should return false for unknown host', async () => { - (access as Mock).mockResolvedValue(undefined); - (readFile as Mock).mockResolvedValue('known.host.com ssh-ed25519 key\n'); - - const result = await service.isHostKnown('unknown.host.com', 22); - - expect(result).toBe(false); - }); - - it('should check both host and host:port formats', async () => { - (access as Mock).mockResolvedValue(undefined); - (readFile as Mock).mockResolvedValue('[port.host.com]:2222 ssh-ed25519 key\n'); - - expect(await service.isHostKnown('port.host.com', 2222)).toBe(true); - expect(await service.isHostKnown('port.host.com', 22)).toBe(false); - }); - }); - - describe('getHostKeyInfo', () => { - it('should return host key info object', () => { - const keyBuffer = Buffer.from('test-key-data'); - const info = service.getHostKeyInfo('info.host.com', 22, keyBuffer, 'ssh-ed25519'); - - expect(info).toMatchObject({ - host: 'info.host.com', - port: 22, - algorithm: 'ssh-ed25519', - key: keyBuffer, - }); - expect(info.fingerprint).toMatch(/^SHA256:/); - }); - }); -}); diff --git a/src/main/services/ssh/__tests__/SshService.test.ts b/src/main/services/ssh/__tests__/SshService.test.ts deleted file mode 100644 index 4375ed956..000000000 --- a/src/main/services/ssh/__tests__/SshService.test.ts +++ /dev/null @@ -1,491 +0,0 @@ -import { describe, it, expect, beforeEach, vi, Mock } from 'vitest'; -import { SshService } from '../SshService'; -import { SshCredentialService } from '../SshCredentialService'; -import { SshConfig } from '../../../../shared/ssh/types'; - -// Mock ssh2 Client -const mockClientInstance = { - on: vi.fn(), - connect: vi.fn(), - end: vi.fn(), - exec: vi.fn(), - sftp: vi.fn(), -}; - -vi.mock('ssh2', () => ({ - Client: vi.fn().mockImplementation(() => mockClientInstance), -})); - -// Mock fs/promises -vi.mock('fs/promises', () => ({ - readFile: vi.fn(), -})); - -// Mock crypto -vi.mock('crypto', () => ({ - randomUUID: vi.fn().mockReturnValue('test-uuid-123'), -})); - -// Prevent keytar/native module loading through SshService's module-level singleton. -vi.mock('../SshCredentialService', () => ({ - SshCredentialService: class MockSshCredentialService { - getPassword = vi.fn(); - getPassphrase = vi.fn(); - storePassword = vi.fn(); - storePassphrase = vi.fn(); - }, -})); - -describe('SshService', () => { - let service: SshService; - let mockCredentialService: { - getPassword: Mock; - getPassphrase: Mock; - storePassword: Mock; - storePassphrase: Mock; - }; - - beforeEach(() => { - vi.clearAllMocks(); - mockCredentialService = { - getPassword: vi.fn(), - getPassphrase: vi.fn(), - storePassword: vi.fn(), - storePassphrase: vi.fn(), - }; - service = new SshService(mockCredentialService as unknown as SshCredentialService); - }); - - describe('buildConnectConfig - via connect method', () => { - it('should build correct config for password authentication', async () => { - const config: SshConfig = { - id: 'conn-1', - name: 'Test Connection', - host: 'example.com', - port: 22, - username: 'testuser', - authType: 'password', - }; - - mockCredentialService.getPassword.mockResolvedValue('testpassword'); - - // Capture the connect config - let capturedConfig: any; - mockClientInstance.connect.mockImplementation((cfg: any) => { - capturedConfig = cfg; - // Simulate successful connection - const readyHandler = mockClientInstance.on.mock.calls.find( - (call: any) => call[0] === 'ready' - )?.[1]; - if (readyHandler) { - setTimeout(() => readyHandler(), 0); - } - }); - - await service.connect(config); - - expect(capturedConfig).toMatchObject({ - host: 'example.com', - port: 22, - username: 'testuser', - password: 'testpassword', - readyTimeout: 20000, - keepaliveInterval: 60000, - keepaliveCountMax: 3, - }); - }); - - it('should build correct config for key authentication', async () => { - const { readFile } = await import('fs/promises'); - (readFile as Mock).mockResolvedValue('-----BEGIN OPENSSH PRIVATE KEY-----'); - - const config: SshConfig = { - id: 'conn-2', - name: 'Key Connection', - host: 'example.com', - port: 22, - username: 'testuser', - authType: 'key', - privateKeyPath: '/home/user/.ssh/id_rsa', - }; - - mockCredentialService.getPassphrase.mockResolvedValue(null); - - let capturedConfig: any; - mockClientInstance.connect.mockImplementation((cfg: any) => { - capturedConfig = cfg; - const readyHandler = mockClientInstance.on.mock.calls.find( - (call: any) => call[0] === 'ready' - )?.[1]; - if (readyHandler) { - setTimeout(() => readyHandler(), 0); - } - }); - - await service.connect(config); - - expect(readFile).toHaveBeenCalledWith('/home/user/.ssh/id_rsa', 'utf-8'); - expect(capturedConfig).toMatchObject({ - host: 'example.com', - privateKey: '-----BEGIN OPENSSH PRIVATE KEY-----', - }); - }); - - it('should include passphrase for encrypted key', async () => { - const { readFile } = await import('fs/promises'); - (readFile as Mock).mockResolvedValue('-----BEGIN OPENSSH PRIVATE KEY-----'); - - const config: SshConfig = { - id: 'conn-3', - name: 'Encrypted Key', - host: 'example.com', - port: 22, - username: 'testuser', - authType: 'key', - privateKeyPath: '/home/user/.ssh/id_rsa', - }; - - mockCredentialService.getPassphrase.mockResolvedValue('keypassphrase'); - - let capturedConfig: any; - mockClientInstance.connect.mockImplementation((cfg: any) => { - capturedConfig = cfg; - const readyHandler = mockClientInstance.on.mock.calls.find( - (call: any) => call[0] === 'ready' - )?.[1]; - if (readyHandler) { - setTimeout(() => readyHandler(), 0); - } - }); - - await service.connect(config); - - expect(capturedConfig).toMatchObject({ - privateKey: '-----BEGIN OPENSSH PRIVATE KEY-----', - passphrase: 'keypassphrase', - }); - }); - - it('should build correct config for agent authentication', async () => { - const originalEnv = process.env.SSH_AUTH_SOCK; - process.env.SSH_AUTH_SOCK = '/tmp/ssh-agent.sock'; - - const config: SshConfig = { - id: 'conn-4', - name: 'Agent Connection', - host: 'example.com', - port: 22, - username: 'testuser', - authType: 'agent', - }; - - let capturedConfig: any; - mockClientInstance.connect.mockImplementation((cfg: any) => { - capturedConfig = cfg; - const readyHandler = mockClientInstance.on.mock.calls.find( - (call: any) => call[0] === 'ready' - )?.[1]; - if (readyHandler) { - setTimeout(() => readyHandler(), 0); - } - }); - - await service.connect(config); - - expect(capturedConfig).toMatchObject({ - agent: '/tmp/ssh-agent.sock', - }); - - process.env.SSH_AUTH_SOCK = originalEnv; - }); - }); - - describe('authentication error handling', () => { - it('should throw error when agent socket is not set', async () => { - const originalEnv = process.env.SSH_AUTH_SOCK; - delete process.env.SSH_AUTH_SOCK; - - const config: SshConfig = { - id: 'conn-5', - name: 'Agent Connection', - host: 'example.com', - port: 22, - username: 'testuser', - authType: 'agent', - }; - - // Suppress error event - service.on('error', () => {}); - - await expect(service.connect(config)).rejects.toThrow(/SSH agent authentication failed/); - - process.env.SSH_AUTH_SOCK = originalEnv; - }); - - it('should throw error when password is not found', async () => { - const config: SshConfig = { - id: 'conn-6', - name: 'Password Connection', - host: 'example.com', - port: 22, - username: 'testuser', - authType: 'password', - }; - - mockCredentialService.getPassword.mockResolvedValue(null); - service.on('error', () => {}); - - await expect(service.connect(config)).rejects.toThrow( - 'No password found for connection conn-6' - ); - }); - - it('should throw error when private key path is missing', async () => { - const config: SshConfig = { - id: 'conn-7', - name: 'Key Connection', - host: 'example.com', - port: 22, - username: 'testuser', - authType: 'key', - }; - - service.on('error', () => {}); - - await expect(service.connect(config)).rejects.toThrow( - 'Private key path is required for key authentication' - ); - }); - - it('should throw error when private key file cannot be read', async () => { - const { readFile } = await import('fs/promises'); - (readFile as Mock).mockRejectedValue(new Error('Permission denied')); - - const config: SshConfig = { - id: 'conn-8', - name: 'Key Connection', - host: 'example.com', - port: 22, - username: 'testuser', - authType: 'key', - privateKeyPath: '/home/user/.ssh/id_rsa', - }; - - service.on('error', () => {}); - - await expect(service.connect(config)).rejects.toThrow( - 'Failed to read private key: Permission denied' - ); - }); - }); - - describe('connection management', () => { - it('should generate UUID when id is not provided', async () => { - const config: SshConfig = { - name: 'Test Connection', - host: 'example.com', - port: 22, - username: 'testuser', - authType: 'password', - }; - - mockCredentialService.getPassword.mockResolvedValue('testpassword'); - mockClientInstance.connect.mockImplementation((cfg: any) => { - const readyHandler = mockClientInstance.on.mock.calls.find( - (call: any) => call[0] === 'ready' - )?.[1]; - if (readyHandler) { - setTimeout(() => readyHandler(), 0); - } - }); - - const connectionId = await service.connect(config); - - expect(connectionId).toBe('test-uuid-123'); - }); - - it('should track connection state', async () => { - const config: SshConfig = { - id: 'conn-9', - name: 'Test Connection', - host: 'example.com', - port: 22, - username: 'testuser', - authType: 'password', - }; - - mockCredentialService.getPassword.mockResolvedValue('testpassword'); - mockClientInstance.connect.mockImplementation((cfg: any) => { - const readyHandler = mockClientInstance.on.mock.calls.find( - (call: any) => call[0] === 'ready' - )?.[1]; - if (readyHandler) { - setTimeout(() => readyHandler(), 0); - } - }); - - expect(service.isConnected('conn-9')).toBe(false); - await service.connect(config); - expect(service.isConnected('conn-9')).toBe(true); - }); - - it('should list connections', async () => { - const config1: SshConfig = { - id: 'conn-a', - name: 'Connection A', - host: 'host-a.com', - port: 22, - username: 'user-a', - authType: 'password', - }; - - const config2: SshConfig = { - id: 'conn-b', - name: 'Connection B', - host: 'host-b.com', - port: 22, - username: 'user-b', - authType: 'password', - }; - - mockCredentialService.getPassword.mockResolvedValue('testpassword'); - - // Setup mock to capture and trigger ready handlers - const readyHandlers: Array<() => void> = []; - mockClientInstance.on.mockImplementation( - (event: string, handler: (...args: any[]) => void) => { - if (event === 'ready') { - readyHandlers.push(handler as () => void); - } - return mockClientInstance; - } - ); - - mockClientInstance.connect.mockImplementation(() => { - // Trigger the last registered ready handler - const handler = readyHandlers[readyHandlers.length - 1]; - if (handler) { - setTimeout(() => handler(), 0); - } - }); - - await service.connect(config1); - await service.connect(config2); - - const connections = service.listConnections(); - expect(connections).toContain('conn-a'); - expect(connections).toContain('conn-b'); - }); - - it('should get connection info', async () => { - const config: SshConfig = { - id: 'conn-20', - name: 'Test Connection', - host: 'example.com', - port: 22, - username: 'testuser', - authType: 'password', - }; - - mockCredentialService.getPassword.mockResolvedValue('testpassword'); - mockClientInstance.connect.mockImplementation((cfg: any) => { - const readyHandler = mockClientInstance.on.mock.calls.find( - (call: any) => call[0] === 'ready' - )?.[1]; - if (readyHandler) { - setTimeout(() => readyHandler(), 0); - } - }); - - await service.connect(config); - - const info = service.getConnectionInfo('conn-20'); - expect(info).not.toBeNull(); - expect(info?.connectedAt).toBeInstanceOf(Date); - expect(info?.lastActivity).toBeInstanceOf(Date); - }); - - it('should return null for non-existent connection info', async () => { - const info = service.getConnectionInfo('non-existent'); - expect(info).toBeNull(); - }); - - it('should get all connections', async () => { - const config: SshConfig = { - id: 'conn-21', - name: 'Test Connection', - host: 'example.com', - port: 22, - username: 'testuser', - authType: 'password', - }; - - mockCredentialService.getPassword.mockResolvedValue('testpassword'); - mockClientInstance.connect.mockImplementation((cfg: any) => { - const readyHandler = mockClientInstance.on.mock.calls.find( - (call: any) => call[0] === 'ready' - )?.[1]; - if (readyHandler) { - setTimeout(() => readyHandler(), 0); - } - }); - - await service.connect(config); - - const connections = service.getAllConnections(); - expect(connections).toHaveLength(1); - expect(connections[0].id).toBe('conn-21'); - }); - - it('should handle disconnect for non-existent connection', async () => { - await service.disconnect('non-existent'); - expect(mockClientInstance.end).not.toHaveBeenCalled(); - }); - }); - - describe('escapeShellArg', () => { - it('should escape single quotes in shell arguments', async () => { - const config: SshConfig = { - id: 'conn-esc', - name: 'Test', - host: 'example.com', - port: 22, - username: 'user', - authType: 'password', - }; - - mockCredentialService.getPassword.mockResolvedValue('password'); - - // Use exec to test escapeShellArg indirectly - const { EventEmitter } = await import('events'); - const mockStream = new EventEmitter(); - (mockStream as any).stderr = new EventEmitter(); - - mockClientInstance.connect.mockImplementation((cfg: any) => { - const readyHandler = mockClientInstance.on.mock.calls.find( - (call: any) => call[0] === 'ready' - )?.[1]; - if (readyHandler) { - setTimeout(() => readyHandler(), 0); - } - }); - - mockClientInstance.exec.mockImplementation( - (command: string, callback: (err: Error | null, stream: any) => void) => { - callback(null, mockStream); - setTimeout(() => { - mockStream.emit('close', 0); - }, 0); - } - ); - - await service.connect(config); - await service.executeCommand('conn-esc', 'ls', "/path/with'quotes"); - - // Verify the command was escaped - const execCall = mockClientInstance.exec.mock.calls[0]; - expect(execCall[0]).toContain("'"); - expect(execCall[0]).toContain("'\\''"); - }); - }); -}); diff --git a/src/main/services/ssh/index.ts b/src/main/services/ssh/index.ts deleted file mode 100644 index 5391e8afe..000000000 --- a/src/main/services/ssh/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -// SSH Services - Wave 1 Foundation -// Main exports for SSH functionality - -export { SshService } from './SshService'; -export { SshCredentialService } from './SshCredentialService'; -export { SshHostKeyService } from './SshHostKeyService'; -export { SshConnectionMonitor } from './SshConnectionMonitor'; -export type { ExecResult } from '../../../shared/ssh/types'; -export type { Connection, ConnectionPool, HostKeyEntry, ConnectionMetrics } from './types'; diff --git a/src/main/services/ssh/types.ts b/src/main/services/ssh/types.ts deleted file mode 100644 index 8dd788db1..000000000 --- a/src/main/services/ssh/types.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { Client, SFTPWrapper } from 'ssh2'; -import { SshConfig } from '../../../shared/ssh/types'; - -export interface Connection { - id: string; - config: SshConfig; - client: Client; - sftp?: SFTPWrapper; - connectedAt: Date; - lastActivity: Date; -} - -export interface ConnectionPool { - [connectionId: string]: Connection; -} - -export interface HostKeyEntry { - host: string; - port: number; - keyType: string; - fingerprint: string; - verifiedAt: Date; -} - -export interface ConnectionMetrics { - connectionId: string; - bytesSent: number; - bytesReceived: number; - latencyMs: number; - lastPingAt?: Date; -} diff --git a/src/main/services/worktreeIpc.ts b/src/main/services/worktreeIpc.ts deleted file mode 100644 index 92fa09edd..000000000 --- a/src/main/services/worktreeIpc.ts +++ /dev/null @@ -1,448 +0,0 @@ -import { ipcMain } from 'electron'; -import { worktreeService } from './WorktreeService'; -import { worktreePoolService } from './WorktreePoolService'; -import { databaseService, type Project } from './DatabaseService'; -import { getDrizzleClient } from '../db/drizzleClient'; -import { projects as projectsTable } from '../db/schema'; -import { eq } from 'drizzle-orm'; -import crypto from 'crypto'; -import { RemoteGitService } from './RemoteGitService'; -import { sshService } from './ssh/SshService'; -import { log } from '../lib/logger'; -import { quoteShellArg } from '../utils/shellEscape'; -import { - isRemoteProject, - resolveRemoteProjectForWorktreePath, -} from '../utils/remoteProjectResolver'; - -const remoteGitService = new RemoteGitService(sshService); - -function stableIdFromRemotePath(worktreePath: string): string { - const h = crypto.createHash('sha1').update(worktreePath).digest('hex').slice(0, 12); - return `wt-${h}`; -} - -async function resolveProjectByIdOrPath(args: { - projectId?: string; - projectPath?: string; -}): Promise { - if (args.projectId) { - return databaseService.getProjectById(args.projectId); - } - if (args.projectPath) { - const { db } = await getDrizzleClient(); - const rows = await db - .select({ id: projectsTable.id }) - .from(projectsTable) - .where(eq(projectsTable.path, args.projectPath)) - .limit(1); - if (rows.length > 0) { - return databaseService.getProjectById(rows[0].id); - } - } - return null; -} - -// isRemoteProject and resolveRemoteProjectForWorktreePath imported from ../utils/remoteProjectResolver - -export function registerWorktreeIpc(): void { - // Create a new worktree - ipcMain.handle( - 'worktree:create', - async ( - event, - args: { - projectPath: string; - taskName: string; - projectId: string; - baseRef?: string; - } - ) => { - try { - const project = await resolveProjectByIdOrPath({ - projectId: args.projectId, - projectPath: args.projectPath, - }); - - if (isRemoteProject(project)) { - const baseRef = args.baseRef ?? project.gitInfo.baseRef; - log.info('worktree:create (remote)', { - projectId: project.id, - remotePath: project.remotePath, - }); - const remote = await remoteGitService.createWorktree( - project.sshConnectionId, - project.remotePath, - args.taskName, - baseRef - ); - const worktree = { - id: stableIdFromRemotePath(remote.path), - name: args.taskName, - branch: remote.branch, - path: remote.path, - projectId: project.id, - status: 'active' as const, - createdAt: new Date().toISOString(), - }; - return { success: true, worktree }; - } - - const worktree = await worktreeService.createWorktree( - args.projectPath, - args.taskName, - args.projectId, - args.baseRef - ); - return { success: true, worktree }; - } catch (error) { - console.error('Failed to create worktree:', error); - return { success: false, error: (error as Error).message }; - } - } - ); - - // List worktrees for a project - ipcMain.handle('worktree:list', async (event, args: { projectPath: string }) => { - try { - const project = await resolveProjectByIdOrPath({ projectPath: args.projectPath }); - if (isRemoteProject(project)) { - const remoteWorktrees = await remoteGitService.listWorktrees( - project.sshConnectionId, - project.remotePath - ); - const worktrees = remoteWorktrees.map((wt) => { - const name = wt.path.split('/').filter(Boolean).pop() || wt.path; - return { - id: stableIdFromRemotePath(wt.path), - name, - branch: wt.branch, - path: wt.path, - projectId: project.id, - status: 'active' as const, - createdAt: new Date().toISOString(), - }; - }); - return { success: true, worktrees }; - } - - const worktrees = await worktreeService.listWorktrees(args.projectPath); - return { success: true, worktrees }; - } catch (error) { - console.error('Failed to list worktrees:', error); - return { success: false, error: (error as Error).message }; - } - }); - - // Remove a worktree - ipcMain.handle( - 'worktree:remove', - async ( - event, - args: { - projectPath: string; - worktreeId: string; - worktreePath?: string; - branch?: string; - } - ) => { - try { - const project = await resolveProjectByIdOrPath({ projectPath: args.projectPath }); - if (isRemoteProject(project)) { - const pathToRemove = args.worktreePath; - if (!pathToRemove) { - throw new Error('worktreePath is required for remote worktree removal'); - } - log.info('worktree:remove (remote)', { - projectId: project.id, - remotePath: project.remotePath, - worktreePath: pathToRemove, - }); - await remoteGitService.removeWorktree( - project.sshConnectionId, - project.remotePath, - pathToRemove - ); - // Best-effort prune to clear stale metadata. - try { - await sshService.executeCommand( - project.sshConnectionId, - 'git worktree prune --verbose', - project.remotePath - ); - } catch {} - if (args.branch) { - try { - await sshService.executeCommand( - project.sshConnectionId, - `git branch -D ${quoteShellArg(args.branch)}`, - project.remotePath - ); - } catch {} - } - return { success: true }; - } - - await worktreeService.removeWorktree( - args.projectPath, - args.worktreeId, - args.worktreePath, - args.branch - ); - return { success: true }; - } catch (error) { - console.error('Failed to remove worktree:', error); - return { success: false, error: (error as Error).message }; - } - } - ); - - // Get worktree status - ipcMain.handle('worktree:status', async (event, args: { worktreePath: string }) => { - try { - const remoteProject = await resolveRemoteProjectForWorktreePath(args.worktreePath); - if (remoteProject) { - const status = await remoteGitService.getWorktreeStatus( - remoteProject.sshConnectionId, - args.worktreePath - ); - return { success: true, status }; - } - - const status = await worktreeService.getWorktreeStatus(args.worktreePath); - return { success: true, status }; - } catch (error) { - console.error('Failed to get worktree status:', error); - return { success: false, error: (error as Error).message }; - } - }); - - // Merge worktree changes - ipcMain.handle( - 'worktree:merge', - async ( - event, - args: { - projectPath: string; - worktreeId: string; - } - ) => { - try { - const project = await resolveProjectByIdOrPath({ projectPath: args.projectPath }); - if (isRemoteProject(project)) { - return { success: false, error: 'Remote worktree merge is not supported yet' }; - } - await worktreeService.mergeWorktreeChanges(args.projectPath, args.worktreeId); - return { success: true }; - } catch (error) { - console.error('Failed to merge worktree changes:', error); - return { success: false, error: (error as Error).message }; - } - } - ); - - // Get worktree by ID - ipcMain.handle('worktree:get', async (event, args: { worktreeId: string }) => { - try { - const worktree = worktreeService.getWorktree(args.worktreeId); - return { success: true, worktree }; - } catch (error) { - console.error('Failed to get worktree:', error); - return { success: false, error: (error as Error).message }; - } - }); - - // Get all worktrees - ipcMain.handle('worktree:getAll', async () => { - try { - const worktrees = worktreeService.getAllWorktrees(); - return { success: true, worktrees }; - } catch (error) { - console.error('Failed to get all worktrees:', error); - return { success: false, error: (error as Error).message }; - } - }); - - // Ensure a reserve worktree exists for a project (background operation) - ipcMain.handle( - 'worktree:ensureReserve', - async ( - event, - args: { - projectId: string; - projectPath: string; - baseRef?: string; - } - ) => { - try { - const project = await resolveProjectByIdOrPath({ - projectId: args.projectId, - projectPath: args.projectPath, - }); - if (isRemoteProject(project)) { - // Remote worktree pooling is not supported (avoid local mkdir on remote paths). - return { success: true }; - } - // Fire and forget - don't await, just start the process - worktreePoolService.ensureReserve(args.projectId, args.projectPath, args.baseRef); - return { success: true }; - } catch (error) { - console.error('Failed to ensure reserve:', error); - return { success: false, error: (error as Error).message }; - } - } - ); - - // Check if a reserve is available for a project - ipcMain.handle('worktree:hasReserve', async (event, args: { projectId: string }) => { - try { - const project = await resolveProjectByIdOrPath({ projectId: args.projectId }); - if (isRemoteProject(project)) { - return { success: true, hasReserve: false }; - } - const hasReserve = worktreePoolService.hasReserve(args.projectId); - return { success: true, hasReserve }; - } catch (error) { - console.error('Failed to check reserve:', error); - return { success: false, error: (error as Error).message }; - } - }); - - // Claim a reserve worktree for a new task (instant operation) - ipcMain.handle( - 'worktree:claimReserve', - async ( - event, - args: { - projectId: string; - projectPath: string; - taskName: string; - baseRef?: string; - } - ) => { - try { - const project = await resolveProjectByIdOrPath({ - projectId: args.projectId, - projectPath: args.projectPath, - }); - if (isRemoteProject(project)) { - return { success: false, error: 'Remote worktree pooling is not supported yet' }; - } - const result = await worktreePoolService.claimReserve( - args.projectId, - args.projectPath, - args.taskName, - args.baseRef - ); - if (result) { - return { - success: true, - worktree: result.worktree, - needsBaseRefSwitch: result.needsBaseRefSwitch, - }; - } - return { success: false, error: 'No reserve available' }; - } catch (error) { - console.error('Failed to claim reserve:', error); - return { success: false, error: (error as Error).message }; - } - } - ); - - // Claim a reserve and persist the task in one IPC round-trip. - ipcMain.handle( - 'worktree:claimReserveAndSaveTask', - async ( - event, - args: { - projectId: string; - projectPath: string; - taskName: string; - baseRef?: string; - task: { - projectId: string; - name: string; - status: 'active' | 'idle' | 'running'; - agentId?: string | null; - metadata?: any; - useWorktree?: boolean; - }; - } - ) => { - try { - const project = await resolveProjectByIdOrPath({ - projectId: args.projectId, - projectPath: args.projectPath, - }); - if (isRemoteProject(project)) { - return { success: false, error: 'Remote worktree pooling is not supported yet' }; - } - - const claim = await worktreePoolService.claimReserve( - args.projectId, - args.projectPath, - args.taskName, - args.baseRef - ); - if (!claim) { - return { success: false, error: 'No reserve available' }; - } - - const persistedTask = { - id: claim.worktree.id, - projectId: args.projectId, - name: args.taskName, - branch: claim.worktree.branch, - path: claim.worktree.path, - status: args.task.status, - agentId: args.task.agentId ?? null, - metadata: args.task.metadata ?? null, - useWorktree: args.task.useWorktree !== false, - }; - - await databaseService.saveTask(persistedTask); - - return { - success: true, - worktree: claim.worktree, - task: persistedTask, - needsBaseRefSwitch: claim.needsBaseRefSwitch, - }; - } catch (error) { - console.error('Failed to claim reserve and save task:', error); - return { success: false, error: (error as Error).message }; - } - } - ); - - // Remove reserve for a project (cleanup) - ipcMain.handle( - 'worktree:removeReserve', - async (event, args: { projectId: string; projectPath?: string; isRemote?: boolean }) => { - try { - if (args.isRemote) { - return { success: true }; - } - - let projectPath = args.projectPath; - if (!projectPath) { - const project = await resolveProjectByIdOrPath({ projectId: args.projectId }); - if (!project) { - await worktreePoolService.removeReserve(args.projectId); - return { success: true }; - } - if (isRemoteProject(project)) { - return { success: true }; - } - projectPath = project.path; - } - - await worktreePoolService.removeReserve(args.projectId, projectPath); - return { success: true }; - } catch (error) { - console.error('Failed to remove reserve:', error); - return { success: false, error: (error as Error).message }; - } - } - ); -} diff --git a/src/main/settings.ts b/src/main/settings.ts deleted file mode 100644 index b4957b068..000000000 --- a/src/main/settings.ts +++ /dev/null @@ -1,607 +0,0 @@ -import { app } from 'electron'; -import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'node:fs'; -import { dirname, join } from 'node:path'; -import { homedir } from 'node:os'; -import type { ProviderId } from '@shared/providers/registry'; -import { isValidProviderId } from '@shared/providers/registry'; -import { isValidOpenInAppId, type OpenInAppId } from '@shared/openInApps'; - -export type DeepPartial = { - [K in keyof T]?: NonNullable extends object ? DeepPartial> : T[K]; -}; - -export type AppSettingsUpdate = DeepPartial; - -const DEFAULT_PROVIDER_ID: ProviderId = 'claude'; -const IS_MAC = process.platform === 'darwin'; - -export interface RepositorySettings { - branchPrefix: string; // e.g., 'emdash' - pushOnCreate: boolean; -} - -export type ShortcutModifier = - | 'cmd' - | 'ctrl' - | 'shift' - | 'alt' - | 'option' - | 'cmd+shift' - | 'ctrl+shift'; - -export interface ShortcutBinding { - key: string; - modifier: ShortcutModifier; -} - -export interface KeyboardSettings { - commandPalette?: ShortcutBinding; - settings?: ShortcutBinding; - toggleLeftSidebar?: ShortcutBinding; - toggleRightSidebar?: ShortcutBinding; - toggleTheme?: ShortcutBinding; - toggleKanban?: ShortcutBinding; - toggleEditor?: ShortcutBinding; - closeModal?: ShortcutBinding; - nextProject?: ShortcutBinding; - prevProject?: ShortcutBinding; - newTask?: ShortcutBinding; - nextAgent?: ShortcutBinding; - prevAgent?: ShortcutBinding; -} - -export interface InterfaceSettings { - autoRightSidebarBehavior?: boolean; - theme?: 'light' | 'dark' | 'dark-black' | 'system'; - taskHoverAction?: 'delete' | 'archive'; -} - -/** - * Custom configuration for a CLI provider. - * All fields are optional - if undefined, the default from registry.ts is used. - * If set to empty string, the flag is disabled. - */ -export interface ProviderCustomConfig { - cli?: string; - resumeFlag?: string; - defaultArgs?: string; - autoApproveFlag?: string; - initialPromptFlag?: string; - extraArgs?: string; - env?: Record; -} - -export type ProviderCustomConfigs = Record; - -export interface AppSettings { - repository: RepositorySettings; - projectPrep: { - autoInstallOnOpenInEditor: boolean; - }; - browserPreview?: { - enabled: boolean; - engine: 'chromium'; - }; - notifications?: { - enabled: boolean; - sound: boolean; - osNotifications: boolean; - soundFocusMode: 'always' | 'unfocused'; - }; - mcp?: { - context7?: { - enabled: boolean; - installHintsDismissed?: Record; - }; - }; - defaultProvider?: ProviderId; - tasks?: { - autoGenerateName: boolean; - autoApproveByDefault: boolean; - autoTrustWorktrees: boolean; - }; - projects?: { - defaultDirectory: string; - }; - keyboard?: KeyboardSettings; - interface?: InterfaceSettings; - providerConfigs?: ProviderCustomConfigs; - terminal?: { - fontFamily: string; - autoCopyOnSelection: boolean; - }; - defaultOpenInApp?: OpenInAppId; - hiddenOpenInApps?: OpenInAppId[]; -} - -function getPlatformTaskSwitchDefaults(): { next: ShortcutBinding; prev: ShortcutBinding } { - if (IS_MAC) { - return { - next: { key: ']', modifier: 'cmd' }, - prev: { key: '[', modifier: 'cmd' }, - }; - } - - return { - next: { key: 'Tab', modifier: 'ctrl' }, - prev: { key: 'Tab', modifier: 'ctrl+shift' }, - }; -} - -const TASK_SWITCH_DEFAULTS = getPlatformTaskSwitchDefaults(); - -const DEFAULT_SETTINGS: AppSettings = { - repository: { - branchPrefix: 'emdash', - pushOnCreate: true, - }, - projectPrep: { - autoInstallOnOpenInEditor: true, - }, - browserPreview: { - enabled: true, - engine: 'chromium', - }, - notifications: { - enabled: true, - sound: true, - osNotifications: true, - soundFocusMode: 'always', - }, - mcp: { - context7: { - enabled: false, - installHintsDismissed: {}, - }, - }, - defaultProvider: DEFAULT_PROVIDER_ID, - tasks: { - autoGenerateName: true, - autoApproveByDefault: false, - autoTrustWorktrees: true, - }, - projects: { - defaultDirectory: join(homedir(), 'emdash-projects'), - }, - keyboard: { - commandPalette: { key: 'k', modifier: 'cmd' }, - settings: { key: ',', modifier: 'cmd' }, - toggleLeftSidebar: { key: 'b', modifier: 'cmd' }, - toggleRightSidebar: { key: '.', modifier: 'cmd' }, - toggleTheme: { key: 't', modifier: 'cmd' }, - toggleKanban: { key: 'p', modifier: 'cmd' }, - toggleEditor: { key: 'e', modifier: 'cmd' }, - nextProject: TASK_SWITCH_DEFAULTS.next, - prevProject: TASK_SWITCH_DEFAULTS.prev, - newTask: { key: 'n', modifier: 'cmd' }, - nextAgent: { key: 'k', modifier: 'cmd+shift' }, - prevAgent: { key: 'j', modifier: 'cmd+shift' }, - }, - interface: { - autoRightSidebarBehavior: false, - theme: 'system', - taskHoverAction: 'delete', - }, - providerConfigs: {}, - terminal: { - fontFamily: '', - autoCopyOnSelection: false, - }, - defaultOpenInApp: 'terminal', - hiddenOpenInApps: [], -}; - -function getSettingsPath(): string { - const dir = app.getPath('userData'); - return join(dir, 'settings.json'); -} - -function deepMerge>(base: T, partial?: Partial): T { - if (!partial) return base; - const out: any = Array.isArray(base) ? [...(base as any)] : { ...base }; - for (const [k, v] of Object.entries(partial)) { - if (v && typeof v === 'object' && !Array.isArray(v)) { - out[k] = deepMerge((base as any)[k] ?? {}, v as any); - } else if (v !== undefined) { - out[k] = v; - } - } - return out as T; -} - -let cached: AppSettings | null = null; - -function normalizeShortcutKey(value: unknown): string | null { - if (typeof value !== 'string') return null; - const trimmed = value.trim(); - if (!trimmed) return null; - - const lower = trimmed.toLowerCase(); - if (lower === 'esc' || lower === 'escape') return 'Escape'; - if (lower === 'tab') return 'Tab'; - if (lower === 'arrowleft' || lower === 'left') return 'ArrowLeft'; - if (lower === 'arrowright' || lower === 'right') return 'ArrowRight'; - if (lower === 'arrowup' || lower === 'up') return 'ArrowUp'; - if (lower === 'arrowdown' || lower === 'down') return 'ArrowDown'; - - // Allow single printable, non-whitespace characters. - if (trimmed.length === 1 && /\S/u.test(trimmed)) { - return trimmed.toLowerCase(); - } - - return null; -} - -function normalizeShortcutModifier(value: unknown, fallback: ShortcutModifier): ShortcutModifier { - if (typeof value !== 'string') return fallback; - - const normalized = value.toLowerCase().replace(/\s+/g, ''); - const aliases: Record = { - cmd: 'cmd', - command: 'cmd', - meta: 'cmd', - ctrl: 'ctrl', - control: 'ctrl', - shift: 'shift', - alt: 'alt', - option: 'option', - opt: 'option', - 'cmd+shift': 'cmd+shift', - 'shift+cmd': 'cmd+shift', - 'command+shift': 'cmd+shift', - 'shift+command': 'cmd+shift', - 'meta+shift': 'cmd+shift', - 'shift+meta': 'cmd+shift', - 'ctrl+shift': 'ctrl+shift', - 'shift+ctrl': 'ctrl+shift', - 'control+shift': 'ctrl+shift', - 'shift+control': 'ctrl+shift', - }; - - return aliases[normalized] ?? fallback; -} - -function isBinding(binding: ShortcutBinding, modifier: ShortcutModifier, key: string): boolean { - return binding.modifier === modifier && binding.key === key; -} - -function assertNoKeyboardShortcutConflicts(keyboard?: KeyboardSettings): void { - if (!keyboard) return; - - const seen = new Map(); - - for (const [shortcutName, binding] of Object.entries(keyboard)) { - if (!binding?.key || !binding?.modifier) continue; - - const normalizedKey = binding.key.toLowerCase(); - const signature = `${binding.modifier}:${normalizedKey}`; - const conflictWith = seen.get(signature); - - if (conflictWith) { - throw new Error( - `Keyboard shortcut conflict: "${shortcutName}" duplicates "${conflictWith}".` - ); - } - - seen.set(signature, shortcutName); - } -} - -/** - * Load application settings from disk with sane defaults. - */ -export function getAppSettings(): AppSettings { - try { - if (cached) return cached; - const file = getSettingsPath(); - if (existsSync(file)) { - const raw = readFileSync(file, 'utf8'); - const parsed = JSON.parse(raw); - cached = normalizeSettings(deepMerge(DEFAULT_SETTINGS, parsed)); - return cached; - } - } catch { - // ignore read/parse errors, fall through to defaults - } - cached = { ...DEFAULT_SETTINGS }; - return cached; -} - -/** - * Update settings and persist to disk. Partial updates are deeply merged. - */ -export function updateAppSettings(partial: AppSettingsUpdate): AppSettings { - const current = getAppSettings(); - const merged = deepMerge(current, partial as Partial); - const next = normalizeSettings(merged); - if (partial.keyboard) { - assertNoKeyboardShortcutConflicts(next.keyboard); - } - persistSettings(next); - cached = next; - return next; -} - -export function persistSettings(settings: AppSettings) { - try { - const file = getSettingsPath(); - const dir = dirname(file); - if (!existsSync(dir)) mkdirSync(dir, { recursive: true }); - writeFileSync(file, JSON.stringify(settings, null, 2), 'utf8'); - } catch {} -} - -/** - * Coerce and validate settings for robustness and forward-compatibility. - */ -export function normalizeSettings(input: AppSettings): AppSettings { - const out: AppSettings = { - repository: { - branchPrefix: DEFAULT_SETTINGS.repository.branchPrefix, - pushOnCreate: DEFAULT_SETTINGS.repository.pushOnCreate, - }, - projectPrep: { - autoInstallOnOpenInEditor: DEFAULT_SETTINGS.projectPrep.autoInstallOnOpenInEditor, - }, - browserPreview: { - enabled: DEFAULT_SETTINGS.browserPreview!.enabled, - engine: DEFAULT_SETTINGS.browserPreview!.engine, - }, - notifications: { - enabled: DEFAULT_SETTINGS.notifications!.enabled, - sound: DEFAULT_SETTINGS.notifications!.sound, - osNotifications: DEFAULT_SETTINGS.notifications!.osNotifications, - soundFocusMode: DEFAULT_SETTINGS.notifications!.soundFocusMode, - }, - mcp: { - context7: { - enabled: DEFAULT_SETTINGS.mcp!.context7!.enabled, - installHintsDismissed: {}, - }, - }, - }; - - // Repository - const repo = input?.repository ?? DEFAULT_SETTINGS.repository; - let prefix = String(repo?.branchPrefix ?? DEFAULT_SETTINGS.repository.branchPrefix); - prefix = prefix.trim().replace(/\/+$/, ''); // remove trailing slashes - if (!prefix) prefix = DEFAULT_SETTINGS.repository.branchPrefix; - if (prefix.length > 50) prefix = prefix.slice(0, 50); - const push = Boolean(repo?.pushOnCreate ?? DEFAULT_SETTINGS.repository.pushOnCreate); - - out.repository.branchPrefix = prefix; - out.repository.pushOnCreate = push; - // Project prep - const prep = (input as any)?.projectPrep || {}; - out.projectPrep.autoInstallOnOpenInEditor = Boolean( - prep?.autoInstallOnOpenInEditor ?? DEFAULT_SETTINGS.projectPrep.autoInstallOnOpenInEditor - ); - - const bp = (input as any)?.browserPreview || {}; - out.browserPreview = { - enabled: Boolean(bp?.enabled ?? DEFAULT_SETTINGS.browserPreview!.enabled), - engine: 'chromium', - }; - - const notif = (input as any)?.notifications || {}; - const rawFocusMode = notif?.soundFocusMode; - out.notifications = { - enabled: Boolean(notif?.enabled ?? DEFAULT_SETTINGS.notifications!.enabled), - sound: Boolean(notif?.sound ?? DEFAULT_SETTINGS.notifications!.sound), - osNotifications: Boolean( - notif?.osNotifications ?? DEFAULT_SETTINGS.notifications!.osNotifications - ), - soundFocusMode: - rawFocusMode === 'always' || rawFocusMode === 'unfocused' - ? rawFocusMode - : DEFAULT_SETTINGS.notifications!.soundFocusMode, - }; - - // MCP - const mcp = (input as any)?.mcp || {}; - const c7 = mcp?.context7 || {}; - out.mcp = { - context7: { - enabled: Boolean(c7?.enabled ?? DEFAULT_SETTINGS.mcp!.context7!.enabled), - installHintsDismissed: - c7?.installHintsDismissed && typeof c7.installHintsDismissed === 'object' - ? { ...c7.installHintsDismissed } - : {}, - }, - }; - - // Default provider - const defaultProvider = (input as any)?.defaultProvider; - out.defaultProvider = isValidProviderId(defaultProvider) - ? defaultProvider - : DEFAULT_SETTINGS.defaultProvider!; - - // Tasks - const tasks = (input as any)?.tasks || {}; - out.tasks = { - autoGenerateName: Boolean(tasks?.autoGenerateName ?? DEFAULT_SETTINGS.tasks!.autoGenerateName), - autoApproveByDefault: Boolean( - tasks?.autoApproveByDefault ?? DEFAULT_SETTINGS.tasks!.autoApproveByDefault - ), - autoTrustWorktrees: Boolean( - tasks?.autoTrustWorktrees ?? DEFAULT_SETTINGS.tasks!.autoTrustWorktrees - ), - }; - - // Projects - const projects = (input as any)?.projects || {}; - let defaultDir = String( - projects?.defaultDirectory ?? DEFAULT_SETTINGS.projects!.defaultDirectory - ).trim(); - if (!defaultDir) { - defaultDir = DEFAULT_SETTINGS.projects!.defaultDirectory; - } - // Resolve ~ to home directory if present - if (defaultDir.startsWith('~')) { - defaultDir = join(homedir(), defaultDir.slice(1)); - } - out.projects = { - defaultDirectory: defaultDir, - }; - - // Keyboard - const keyboard = (input as any)?.keyboard || {}; - const normalizeBinding = (binding: any, defaultBinding: ShortcutBinding): ShortcutBinding => { - if (!binding || typeof binding !== 'object') return defaultBinding; - const key = normalizeShortcutKey(binding.key) ?? defaultBinding.key; - const modifier = normalizeShortcutModifier(binding.modifier, defaultBinding.modifier); - return { key, modifier }; - }; - out.keyboard = { - commandPalette: normalizeBinding( - keyboard.commandPalette, - DEFAULT_SETTINGS.keyboard!.commandPalette! - ), - settings: normalizeBinding(keyboard.settings, DEFAULT_SETTINGS.keyboard!.settings!), - toggleLeftSidebar: normalizeBinding( - keyboard.toggleLeftSidebar, - DEFAULT_SETTINGS.keyboard!.toggleLeftSidebar! - ), - toggleRightSidebar: normalizeBinding( - keyboard.toggleRightSidebar, - DEFAULT_SETTINGS.keyboard!.toggleRightSidebar! - ), - toggleTheme: normalizeBinding(keyboard.toggleTheme, DEFAULT_SETTINGS.keyboard!.toggleTheme!), - toggleKanban: normalizeBinding(keyboard.toggleKanban, DEFAULT_SETTINGS.keyboard!.toggleKanban!), - toggleEditor: normalizeBinding(keyboard.toggleEditor, DEFAULT_SETTINGS.keyboard!.toggleEditor!), - nextProject: normalizeBinding(keyboard.nextProject, DEFAULT_SETTINGS.keyboard!.nextProject!), - prevProject: normalizeBinding(keyboard.prevProject, DEFAULT_SETTINGS.keyboard!.prevProject!), - newTask: normalizeBinding(keyboard.newTask, DEFAULT_SETTINGS.keyboard!.newTask!), - nextAgent: normalizeBinding(keyboard.nextAgent, DEFAULT_SETTINGS.keyboard!.nextAgent!), - prevAgent: normalizeBinding(keyboard.prevAgent, DEFAULT_SETTINGS.keyboard!.prevAgent!), - }; - const platformTaskDefaults = getPlatformTaskSwitchDefaults(); - const isLegacyArrowPair = - isBinding(out.keyboard.nextProject!, 'cmd', 'ArrowRight') && - isBinding(out.keyboard.prevProject!, 'cmd', 'ArrowLeft'); - const isLegacyTabPair = - isBinding(out.keyboard.nextProject!, 'ctrl', 'Tab') && - isBinding(out.keyboard.prevProject!, 'ctrl+shift', 'Tab'); - if (isLegacyArrowPair || (IS_MAC && isLegacyTabPair)) { - out.keyboard.nextProject = platformTaskDefaults.next; - out.keyboard.prevProject = platformTaskDefaults.prev; - } - - // Interface - const iface = (input as any)?.interface || {}; - out.interface = { - autoRightSidebarBehavior: Boolean( - iface?.autoRightSidebarBehavior ?? DEFAULT_SETTINGS.interface!.autoRightSidebarBehavior - ), - theme: ['light', 'dark', 'dark-black', 'system'].includes(iface?.theme) - ? iface.theme - : DEFAULT_SETTINGS.interface!.theme, - taskHoverAction: iface?.taskHoverAction === 'archive' ? 'archive' : 'delete', - }; - - // Provider custom configs - const providerConfigs = (input as any)?.providerConfigs || {}; - out.providerConfigs = {}; - if (providerConfigs && typeof providerConfigs === 'object') { - for (const [providerId, config] of Object.entries(providerConfigs)) { - if (config && typeof config === 'object') { - const c = config as Record; - let env: Record | undefined; - if (c.env && typeof c.env === 'object') { - env = {}; - for (const [k, v] of Object.entries(c.env)) { - if ( - typeof k === 'string' && - typeof v === 'string' && - /^[A-Za-z_][A-Za-z0-9_]*$/.test(k) - ) { - env[k] = v; - } - } - if (Object.keys(env).length === 0) env = undefined; - } - out.providerConfigs[providerId] = { - ...(typeof c.cli === 'string' ? { cli: c.cli } : {}), - ...(typeof c.resumeFlag === 'string' ? { resumeFlag: c.resumeFlag } : {}), - ...(typeof c.defaultArgs === 'string' ? { defaultArgs: c.defaultArgs } : {}), - ...(typeof c.autoApproveFlag === 'string' ? { autoApproveFlag: c.autoApproveFlag } : {}), - ...(typeof c.initialPromptFlag === 'string' - ? { initialPromptFlag: c.initialPromptFlag } - : {}), - ...(typeof c.extraArgs === 'string' ? { extraArgs: c.extraArgs } : {}), - ...(env ? { env } : {}), - }; - } - } - } - - // Terminal - const term = (input as any)?.terminal || {}; - const fontFamily = String(term?.fontFamily ?? '').trim(); - const autoCopyOnSelection = Boolean(term?.autoCopyOnSelection ?? false); - out.terminal = { fontFamily, autoCopyOnSelection }; - - // Default Open In App - const defaultOpenInApp = (input as any)?.defaultOpenInApp; - out.defaultOpenInApp = isValidOpenInAppId(defaultOpenInApp) - ? defaultOpenInApp - : DEFAULT_SETTINGS.defaultOpenInApp!; - - // Hidden Open In Apps - const rawHidden = (input as any)?.hiddenOpenInApps; - if (Array.isArray(rawHidden)) { - const validated = rawHidden.filter(isValidOpenInAppId); - out.hiddenOpenInApps = [...new Set(validated)]; - } else { - out.hiddenOpenInApps = []; - } - - return out; -} - -/** - * Get custom configuration for a specific provider. - * Returns a shallow copy to prevent cache corruption from external mutations. - */ -export function getProviderCustomConfig(providerId: string): ProviderCustomConfig | undefined { - const settings = getAppSettings(); - const config = settings.providerConfigs?.[providerId]; - return config ? { ...config } : undefined; -} - -/** - * Get all provider custom configurations. - * Returns a deep copy to prevent cache corruption from external mutations. - */ -export function getAllProviderCustomConfigs(): ProviderCustomConfigs { - const settings = getAppSettings(); - const configs = settings.providerConfigs ?? {}; - // Return deep copy to prevent cache corruption - return Object.fromEntries(Object.entries(configs).map(([key, value]) => [key, { ...value }])); -} - -/** - * Update custom configuration for a specific provider. - * Pass undefined to remove the custom config and use defaults. - */ -export function updateProviderCustomConfig( - providerId: string, - config: ProviderCustomConfig | undefined -): void { - const settings = getAppSettings(); - const currentConfigs = settings.providerConfigs ?? {}; - - if (config === undefined) { - // Remove the config - const { [providerId]: _, ...rest } = currentConfigs; - updateAppSettings({ providerConfigs: rest }); - } else { - // Update/add the config - updateAppSettings({ - providerConfigs: { - ...currentConfigs, - [providerId]: config, - }, - }); - } -} diff --git a/src/main/tsconfig.json b/src/main/tsconfig.json deleted file mode 100644 index ea2fa1def..000000000 --- a/src/main/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../../tsconfig.main.json" -} diff --git a/src/main/types/fsListWorker.ts b/src/main/types/fsListWorker.ts deleted file mode 100644 index f7b6ab659..000000000 --- a/src/main/types/fsListWorker.ts +++ /dev/null @@ -1,19 +0,0 @@ -export type FsListItem = { - path: string; - type: 'file' | 'dir'; -}; - -export type FsListWorkerResponse = - | { - taskId: number; - ok: true; - items: FsListItem[]; - truncated: boolean; - reason?: 'maxEntries' | 'timeBudget'; - durationMs: number; - } - | { - taskId: number; - ok: false; - error: string; - }; diff --git a/src/main/types/terminalSnapshot.ts b/src/main/types/terminalSnapshot.ts deleted file mode 100644 index 3458bdf91..000000000 --- a/src/main/types/terminalSnapshot.ts +++ /dev/null @@ -1,10 +0,0 @@ -export interface TerminalSnapshotPayload { - version: 1; - createdAt: string; - cols: number; - rows: number; - data: string; - stats?: Record; -} - -export const TERMINAL_SNAPSHOT_VERSION = 1 as const; diff --git a/src/main/utils/__tests__/childProcessEnv.test.ts b/src/main/utils/__tests__/childProcessEnv.test.ts deleted file mode 100644 index 2e72bd5a2..000000000 --- a/src/main/utils/__tests__/childProcessEnv.test.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { describe, expect, it } from 'vitest'; -import { buildExternalToolEnv } from '../childProcessEnv'; - -describe('buildExternalToolEnv', () => { - it('removes AppImage-only keys and strips mount paths from PATH-like vars', () => { - const env = buildExternalToolEnv({ - APPDIR: '/tmp/.mount_emdashAbCd', - APPIMAGE: '/home/user/emdash.AppImage', - ARGV0: 'AppRun', - CHROME_DESKTOP: 'emdash.desktop', - GSETTINGS_SCHEMA_DIR: '/tmp/.mount_emdashAbCd/usr/share/glib-2.0/schemas', - OWD: '/tmp', - PATH: '/usr/local/bin:/tmp/.mount_emdashAbCd/usr/bin:/usr/bin', - LD_LIBRARY_PATH: '/tmp/.mount_emdashAbCd/usr/lib:/usr/local/cuda/lib64', - XDG_DATA_DIRS: '/tmp/.mount_emdashAbCd/usr/share:/usr/share', - HOME: '/home/user', - USER: 'user', - KEEP_ME: 'yes', - }); - - expect(env.APPDIR).toBeUndefined(); - expect(env.APPIMAGE).toBeUndefined(); - expect(env.ARGV0).toBeUndefined(); - expect(env.CHROME_DESKTOP).toBeUndefined(); - expect(env.GSETTINGS_SCHEMA_DIR).toBeUndefined(); - expect(env.OWD).toBeUndefined(); - - expect(env.PATH).toBe('/usr/local/bin:/usr/bin'); - expect(env.LD_LIBRARY_PATH).toBe('/usr/local/cuda/lib64'); - expect(env.XDG_DATA_DIRS).toBe('/usr/share'); - - expect(env.HOME).toBe('/home/user'); - expect(env.USER).toBe('user'); - expect(env.KEEP_ME).toBe('yes'); - }); - - it('removes Python vars only when they point into AppImage mount paths', () => { - const stripped = buildExternalToolEnv({ - APPDIR: '/tmp/.mount_emdashZZ', - PYTHONHOME: '/tmp/.mount_emdashZZ/usr', - PYTHONPATH: '/tmp/.mount_emdashZZ/usr/lib/python3.11', - }); - - expect(stripped.PYTHONHOME).toBeUndefined(); - expect(stripped.PYTHONPATH).toBeUndefined(); - - const kept = buildExternalToolEnv({ - PYTHONHOME: '/opt/python', - PYTHONPATH: '/opt/python/lib', - }); - - expect(kept.PYTHONHOME).toBe('/opt/python'); - expect(kept.PYTHONPATH).toBe('/opt/python/lib'); - }); -}); diff --git a/src/main/utils/__tests__/diffParser.test.ts b/src/main/utils/__tests__/diffParser.test.ts deleted file mode 100644 index 31a0a2d5b..000000000 --- a/src/main/utils/__tests__/diffParser.test.ts +++ /dev/null @@ -1,123 +0,0 @@ -import { describe, it, expect } from 'vitest'; -import { - parseDiffLines, - stripTrailingNewline, - MAX_DIFF_CONTENT_BYTES, - MAX_DIFF_OUTPUT_BYTES, -} from '../diffParser'; - -describe('parseDiffLines', () => { - it('should parse a standard unified diff', () => { - const stdout = - 'diff --git a/file.ts b/file.ts\n' + - 'index abc..def 100644\n' + - '--- a/file.ts\n' + - '+++ b/file.ts\n' + - '@@ -1,3 +1,3 @@\n' + - ' hello\n' + - '-old line\n' + - '+new line\n' + - ' world\n'; - - const { lines, isBinary } = parseDiffLines(stdout); - - expect(isBinary).toBe(false); - expect(lines).toEqual([ - { left: 'hello', right: 'hello', type: 'context' }, - { left: 'old line', type: 'del' }, - { right: 'new line', type: 'add' }, - { left: 'world', right: 'world', type: 'context' }, - ]); - }); - - it('should skip all extended diff headers', () => { - const stdout = - 'diff --git a/file.ts b/file.ts\n' + - 'new file mode 100644\n' + - 'old file mode 100755\n' + - 'deleted file mode 100644\n' + - 'similarity index 95%\n' + - 'rename from old.ts\n' + - 'rename to new.ts\n' + - 'index abc..def 100644\n' + - '--- a/file.ts\n' + - '+++ b/file.ts\n' + - '@@ -1 +1 @@\n' + - '+content\n'; - - const { lines } = parseDiffLines(stdout); - expect(lines).toEqual([{ right: 'content', type: 'add' }]); - }); - - it('should skip "No newline at end of file" markers', () => { - const stdout = - 'diff --git a/f b/f\n' + - '--- a/f\n' + - '+++ b/f\n' + - '@@ -1 +1 @@\n' + - '-old\n' + - '\\ No newline at end of file\n' + - '+new\n' + - '\\ No newline at end of file\n'; - - const { lines } = parseDiffLines(stdout); - expect(lines).toEqual([ - { left: 'old', type: 'del' }, - { right: 'new', type: 'add' }, - ]); - }); - - it('should detect binary files', () => { - const stdout = - 'diff --git a/img.png b/img.png\n' + - 'index abc..def 100644\n' + - 'Binary files a/img.png and b/img.png differ\n'; - - const { lines, isBinary } = parseDiffLines(stdout); - expect(isBinary).toBe(true); - expect(lines).toEqual([]); - }); - - it('should return empty for empty input', () => { - const { lines, isBinary } = parseDiffLines(''); - expect(lines).toEqual([]); - expect(isBinary).toBe(false); - }); - - it('should treat unrecognized prefix lines as context with full line', () => { - const { lines } = parseDiffLines('some unexpected line\n'); - expect(lines).toEqual([ - { left: 'some unexpected line', right: 'some unexpected line', type: 'context' }, - ]); - }); -}); - -describe('stripTrailingNewline', () => { - it('should strip one trailing newline', () => { - expect(stripTrailingNewline('hello\n')).toBe('hello'); - }); - - it('should strip only one trailing newline', () => { - expect(stripTrailingNewline('hello\n\n')).toBe('hello\n'); - }); - - it('should return unchanged if no trailing newline', () => { - expect(stripTrailingNewline('hello')).toBe('hello'); - }); - - it('should handle empty string', () => { - expect(stripTrailingNewline('')).toBe(''); - }); -}); - -describe('MAX_DIFF_CONTENT_BYTES', () => { - it('should be 512KB', () => { - expect(MAX_DIFF_CONTENT_BYTES).toBe(512 * 1024); - }); -}); - -describe('MAX_DIFF_OUTPUT_BYTES', () => { - it('should be 10MB', () => { - expect(MAX_DIFF_OUTPUT_BYTES).toBe(10 * 1024 * 1024); - }); -}); diff --git a/src/main/utils/__tests__/remoteOpenIn.test.ts b/src/main/utils/__tests__/remoteOpenIn.test.ts deleted file mode 100644 index 1bc837437..000000000 --- a/src/main/utils/__tests__/remoteOpenIn.test.ts +++ /dev/null @@ -1,117 +0,0 @@ -import { describe, expect, it } from 'vitest'; -import { - buildGhosttyRemoteExecArgs, - buildRemoteEditorUrl, - buildRemoteSshCommand, - buildRemoteSshAuthority, - buildRemoteTerminalShellCommand, -} from '../remoteOpenIn'; - -describe('buildRemoteSshAuthority', () => { - it('prepends username when host has no user component', () => { - expect(buildRemoteSshAuthority('example.internal', 'azureuser')).toBe( - 'azureuser@example.internal' - ); - }); - - it('preserves host when username is already embedded', () => { - expect(buildRemoteSshAuthority('existing@example.internal', 'azureuser')).toBe( - 'existing@example.internal' - ); - }); -}); - -describe('buildRemoteEditorUrl', () => { - it('builds cursor remote URL with encoded user@host authority', () => { - expect( - buildRemoteEditorUrl('cursor', 'example.internal', 'azureuser', '/home/azureuser/src') - ).toBe('cursor://vscode-remote/ssh-remote+azureuser%40example.internal/home/azureuser/src'); - }); - - it('normalizes relative target paths with a leading slash', () => { - expect(buildRemoteEditorUrl('vscode', 'example.internal', 'azureuser', 'workspace')).toBe( - 'vscode://vscode-remote/ssh-remote+azureuser%40example.internal/workspace' - ); - }); -}); - -describe('buildGhosttyRemoteExecArgs', () => { - const expectedRemoteShellCommand = - `cd '/home/azureuser/pro/smv/.emdash/worktrees/task one' && ` + - '(if command -v infocmp >/dev/null 2>&1 && [ -n "${TERM:-}" ] && infocmp "${TERM}" >/dev/null 2>&1; then :; else export TERM=xterm-256color; fi) && ' + - '(exec "${SHELL:-/bin/bash}" || exec /bin/bash || exec /bin/sh)'; - - it('builds shared remote shell bootstrap command', () => { - expect( - buildRemoteTerminalShellCommand('/home/azureuser/pro/smv/.emdash/worktrees/task one') - ).toBe(expectedRemoteShellCommand); - }); - - it('builds ssh argv tokens for Ghostty -e', () => { - expect( - buildGhosttyRemoteExecArgs({ - host: 'example.internal', - username: 'azureuser', - port: 22, - targetPath: '/home/azureuser/pro/smv/.emdash/worktrees/task one', - }) - ).toEqual([ - 'ssh', - 'azureuser@example.internal', - '-o', - 'ControlMaster=no', - '-o', - 'ControlPath=none', - '-p', - '22', - '-t', - expectedRemoteShellCommand, - ]); - }); - - it('preserves existing user@host authority', () => { - expect( - buildGhosttyRemoteExecArgs({ - host: 'ops@example.internal', - username: 'ignored-user', - port: '2202', - targetPath: '/tmp/x', - }) - ).toEqual([ - 'ssh', - 'ops@example.internal', - '-o', - 'ControlMaster=no', - '-o', - 'ControlPath=none', - '-p', - '2202', - '-t', - `cd '/tmp/x' && (if command -v infocmp >/dev/null 2>&1 && [ -n "\${TERM:-}" ] && infocmp "\${TERM}" >/dev/null 2>&1; then :; else export TERM=xterm-256color; fi) && (exec "\${SHELL:-/bin/bash}" || exec /bin/bash || exec /bin/sh)`, - ]); - }); - - it('builds quoted ssh command string for shell-based launchers', () => { - expect( - buildRemoteSshCommand({ - host: 'example.internal', - username: 'azureuser', - port: 22, - targetPath: '/home/azureuser/pro/smv/.emdash/worktrees/task one', - }) - ).toBe( - `ssh 'azureuser@example.internal' -o 'ControlMaster=no' -o 'ControlPath=none' -p '22' -t '${expectedRemoteShellCommand.replace(/'/g, `'\\''`)}'` - ); - }); - - it('preserves existing user@host authority in shell command string', () => { - expect( - buildRemoteSshCommand({ - host: 'ops@example.internal', - username: 'ignored-user', - port: 22, - targetPath: '/tmp/x', - }) - ).toContain(`ssh 'ops@example.internal'`); - }); -}); diff --git a/src/main/utils/__tests__/shellEnv.test.ts b/src/main/utils/__tests__/shellEnv.test.ts deleted file mode 100644 index 945d79a08..000000000 --- a/src/main/utils/__tests__/shellEnv.test.ts +++ /dev/null @@ -1,142 +0,0 @@ -import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'; -import { getShellEnvVar, detectSshAuthSock, initializeShellEnvironment } from '../shellEnv'; - -// Mock child_process -vi.mock('child_process', () => ({ - execSync: vi.fn(), -})); - -// Mock fs -vi.mock('fs', () => ({ - statSync: vi.fn(), - readdirSync: vi.fn(), -})); - -import { execSync } from 'child_process'; -import { statSync, readdirSync } from 'fs'; - -const mockedExecSync = vi.mocked(execSync); -const mockedStatSync = vi.mocked(statSync); -const mockedReaddirSync = vi.mocked(readdirSync); - -describe('shellEnv', () => { - const originalEnv = process.env; - - beforeEach(() => { - // Reset process.env - process.env = { ...originalEnv }; - vi.resetAllMocks(); - }); - - afterEach(() => { - process.env = originalEnv; - }); - - describe('getShellEnvVar', () => { - it('should return environment variable from shell', () => { - mockedExecSync.mockReturnValue('/path/to/socket'); - - const result = getShellEnvVar('SSH_AUTH_SOCK'); - - expect(result).toBe('/path/to/socket'); - expect(mockedExecSync).toHaveBeenCalledWith( - expect.stringContaining('printenv SSH_AUTH_SOCK'), - expect.objectContaining({ encoding: 'utf8', timeout: 5000 }) - ); - }); - - it('should return undefined when variable is empty', () => { - mockedExecSync.mockReturnValue(''); - - const result = getShellEnvVar('SSH_AUTH_SOCK'); - - expect(result).toBeUndefined(); - }); - - it('should return undefined when shell command fails', () => { - mockedExecSync.mockImplementation(() => { - throw new Error('Command failed'); - }); - - const result = getShellEnvVar('SSH_AUTH_SOCK'); - - expect(result).toBeUndefined(); - }); - }); - - describe('detectSshAuthSock', () => { - it('should return existing SSH_AUTH_SOCK if already set', () => { - process.env.SSH_AUTH_SOCK = '/existing/socket'; - - const result = detectSshAuthSock(); - - expect(result).toBe('/existing/socket'); - expect(mockedExecSync).not.toHaveBeenCalled(); - }); - - it('should detect SSH_AUTH_SOCK from shell when not set', () => { - delete process.env.SSH_AUTH_SOCK; - mockedExecSync.mockReturnValue('/shell/detected/socket'); - - const result = detectSshAuthSock(); - - expect(result).toBe('/shell/detected/socket'); - }); - - it('should check common locations as fallback', () => { - delete process.env.SSH_AUTH_SOCK; - mockedExecSync.mockImplementation(() => { - throw new Error('Shell detection failed'); - }); - - // Mock readdirSync to simulate finding a socket - mockedReaddirSync.mockImplementation((dirPath) => { - const pathStr = dirPath.toString(); - if (pathStr.includes('com.apple.launchd')) { - return ['Listeners'] as any; - } - return [] as any; - }); - - // Mock statSync to indicate it's a socket - mockedStatSync.mockReturnValue({ isSocket: () => true } as any); - - const result = detectSshAuthSock(); - - // Should find the socket in launchd directory - expect(result).toBeTruthy(); - }); - - it('should return undefined when no socket is found', () => { - delete process.env.SSH_AUTH_SOCK; - mockedExecSync.mockImplementation(() => { - throw new Error('Shell detection failed'); - }); - mockedReaddirSync.mockImplementation(() => [] as any); - - const result = detectSshAuthSock(); - - expect(result).toBeUndefined(); - }); - }); - - describe('initializeShellEnvironment', () => { - it('should set process.env.SSH_AUTH_SOCK when socket is detected', () => { - delete process.env.SSH_AUTH_SOCK; - mockedExecSync.mockReturnValue('/detected/socket'); - - initializeShellEnvironment(); - - expect(process.env.SSH_AUTH_SOCK).toBe('/detected/socket'); - }); - - it('should not overwrite existing SSH_AUTH_SOCK', () => { - process.env.SSH_AUTH_SOCK = '/existing/socket'; - - initializeShellEnvironment(); - - expect(process.env.SSH_AUTH_SOCK).toBe('/existing/socket'); - expect(mockedExecSync).not.toHaveBeenCalled(); - }); - }); -}); diff --git a/src/main/utils/dev.ts b/src/main/utils/dev.ts deleted file mode 100644 index 05c5f60b4..000000000 --- a/src/main/utils/dev.ts +++ /dev/null @@ -1 +0,0 @@ -export const isDev = process.env.NODE_ENV === 'development' || process.argv.includes('--dev'); diff --git a/src/main/utils/diffParser.ts b/src/main/utils/diffParser.ts deleted file mode 100644 index 053aae0bc..000000000 --- a/src/main/utils/diffParser.ts +++ /dev/null @@ -1,53 +0,0 @@ -/** Maximum bytes for fetching file content in diffs. */ -export const MAX_DIFF_CONTENT_BYTES = 512 * 1024; - -/** Maximum bytes for `git diff` output (larger than content limit due to headers/context). */ -export const MAX_DIFF_OUTPUT_BYTES = 10 * 1024 * 1024; - -/** Headers emitted by `git diff` that should be skipped when parsing hunks. */ -const DIFF_HEADER_PREFIXES = [ - 'diff ', - 'index ', - '--- ', - '+++ ', - '@@', - 'new file mode', - 'old file mode', - 'deleted file mode', - 'similarity index', - 'rename from', - 'rename to', - 'Binary files', -]; - -export type DiffLine = { left?: string; right?: string; type: 'context' | 'add' | 'del' }; - -export interface DiffResult { - lines: DiffLine[]; - isBinary?: boolean; - originalContent?: string; - modifiedContent?: string; -} - -/** Strip exactly one trailing newline, if present. */ -export function stripTrailingNewline(s: string): string { - return s.endsWith('\n') ? s.slice(0, -1) : s; -} - -/** Parse raw `git diff` output into structured diff lines, skipping headers. */ -export function parseDiffLines(stdout: string): { lines: DiffLine[]; isBinary: boolean } { - const result: DiffLine[] = []; - for (const line of stdout.split('\n')) { - if (!line) continue; - if (DIFF_HEADER_PREFIXES.some((p) => line.startsWith(p))) continue; - const prefix = line[0]; - const content = line.slice(1); - if (prefix === '\\') continue; - if (prefix === ' ') result.push({ left: content, right: content, type: 'context' }); - else if (prefix === '-') result.push({ left: content, type: 'del' }); - else if (prefix === '+') result.push({ right: content, type: 'add' }); - else result.push({ left: line, right: line, type: 'context' }); - } - const isBinary = result.length === 0 && stdout.includes('Binary files'); - return { lines: result, isBinary }; -} diff --git a/src/main/utils/externalLinks.ts b/src/main/utils/externalLinks.ts index 879845d3e..07da2d534 100644 --- a/src/main/utils/externalLinks.ts +++ b/src/main/utils/externalLinks.ts @@ -1,7 +1,7 @@ import { BrowserWindow, shell } from 'electron'; /** - * Ensure any external HTTP(S) links open in the user’s default browser + * Ensure any external HTTP(S) links open in the user's default browser * rather than inside the Electron window. Keeps app navigation scoped * to our renderer while preserving expected link behavior. */ @@ -9,7 +9,7 @@ export function registerExternalLinkHandlers(win: BrowserWindow, isDev: boolean) const wc = win.webContents; const isInternalAppUrl = (url: string) => { - if (isDev) return url.startsWith('http://localhost:3000'); + if (isDev) return url.startsWith(process.env.ELECTRON_RENDERER_URL!); return url.startsWith('file://') || /^http:\/\/(127\.0\.0\.1|localhost):\d+(?:\/|$)/i.test(url); }; diff --git a/src/main/utils/fsIgnores.ts b/src/main/utils/fsIgnores.ts deleted file mode 100644 index 3335bd1d1..000000000 --- a/src/main/utils/fsIgnores.ts +++ /dev/null @@ -1,11 +0,0 @@ -export const DEFAULT_IGNORES = new Set([ - '.git', - 'dist', - 'build', - 'out', - '.next', - '.nuxt', - '.cache', - 'coverage', - '.DS_Store', -]); diff --git a/src/main/utils/gitIgnore.ts b/src/main/utils/gitIgnore.ts deleted file mode 100644 index 77d9b6061..000000000 --- a/src/main/utils/gitIgnore.ts +++ /dev/null @@ -1,17 +0,0 @@ -import ignore from 'ignore'; - -export class GitIgnoreParser { - // Use any because strict typing of the ignore package might vary, but logically it's an Ignore object - private ig: any; - - constructor(content: string) { - this.ig = ignore().add(content); - } - - public ignores(path: string): boolean { - // The ignore package uses relative paths. - // If the path ends with / it might treat it as dir, but our path input from fsListWorker typically doesn't have trailing slash. - // 'ignore' usually handles 'node_modules' correctly matching directory. - return this.ig.ignores(path); - } -} diff --git a/src/main/utils/remoteProjectResolver.ts b/src/main/utils/remoteProjectResolver.ts deleted file mode 100644 index ec23bd93d..000000000 --- a/src/main/utils/remoteProjectResolver.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { databaseService, type Project } from '../services/DatabaseService'; - -export type RemoteProject = Project & { sshConnectionId: string; remotePath: string }; - -export function isRemoteProject(project: Project | null): project is RemoteProject { - return !!( - project && - project.isRemote && - typeof project.sshConnectionId === 'string' && - project.sshConnectionId.length > 0 && - typeof project.remotePath === 'string' && - project.remotePath.length > 0 - ); -} - -export async function resolveRemoteProjectForWorktreePath( - worktreePath: string -): Promise { - const all = await databaseService.getProjects(); - // Pick the longest matching remotePath prefix. - const candidates = all - .filter((p) => isRemoteProject(p)) - .filter((p) => worktreePath.startsWith(p.remotePath.replace(/\/+$/g, '') + '/')) - .sort((a, b) => b.remotePath.length - a.remotePath.length); - return candidates[0] ?? null; -} diff --git a/src/main/utils/safeStat.ts b/src/main/utils/safeStat.ts deleted file mode 100644 index 0fbc4bb64..000000000 --- a/src/main/utils/safeStat.ts +++ /dev/null @@ -1,9 +0,0 @@ -import * as fs from 'fs'; - -export function safeStat(pathname: string): fs.Stats | null { - try { - return fs.statSync(pathname); - } catch { - return null; - } -} diff --git a/src/main/utils/shellEnv.ts b/src/main/utils/shellEnv.ts index c45b9287e..d0549aedb 100644 --- a/src/main/utils/shellEnv.ts +++ b/src/main/utils/shellEnv.ts @@ -5,8 +5,8 @@ import { execSync } from 'child_process'; import * as fs from 'fs'; -import * as path from 'path'; import * as os from 'os'; +import * as path from 'path'; /** * Gets an environment variable from the user's login shell. diff --git a/src/main/workers/fsListWorker.ts b/src/main/workers/fsListWorker.ts deleted file mode 100644 index 49f005aa4..000000000 --- a/src/main/workers/fsListWorker.ts +++ /dev/null @@ -1,119 +0,0 @@ -import { parentPort } from 'worker_threads'; -import * as fs from 'fs'; -import * as path from 'path'; -import { FsListItem, FsListWorkerResponse } from '../types/fsListWorker'; -import { DEFAULT_IGNORES } from '../utils/fsIgnores'; -import { safeStat } from '../utils/safeStat'; - -type ListWorkerRequest = { - taskId: number; - root: string; - includeDirs: boolean; - recursive?: boolean; - maxEntries: number; - timeBudgetMs: number; - batchSize: number; -}; - -const yieldImmediate = () => new Promise((resolve) => setImmediate(resolve)); - -async function listFiles(request: ListWorkerRequest): Promise { - const items: FsListItem[] = []; - const stack: string[] = ['.']; - const start = Date.now(); - const deadline = start + request.timeBudgetMs; - let truncated = false; - let reason: 'maxEntries' | 'timeBudget' | undefined; - let visited = 0; - - while (stack.length > 0) { - if (items.length >= request.maxEntries) { - truncated = true; - reason = 'maxEntries'; - break; - } - if (Date.now() >= deadline) { - truncated = true; - reason = 'timeBudget'; - break; - } - - const rel = stack.pop() as string; - const abs = path.join(request.root, rel); - - const stat = safeStat(abs); - if (!stat) continue; - - if (stat.isDirectory()) { - const name = path.basename(abs); - if (rel !== '.' && DEFAULT_IGNORES.has(name)) continue; - - if (rel !== '.' && request.includeDirs) { - items.push({ path: rel, type: 'dir' }); - if (items.length >= request.maxEntries) { - truncated = true; - reason = 'maxEntries'; - break; - } - } - - // If not recursive and we are deeper than root, don't scan children - if (request.recursive === false && rel !== '.') { - continue; - } - - let entries: string[] = []; - try { - entries = fs.readdirSync(abs); - } catch { - continue; - } - - for (let i = entries.length - 1; i >= 0; i--) { - const entry = entries[i]; - if (DEFAULT_IGNORES.has(entry)) continue; - const nextRel = rel === '.' ? entry : path.join(rel, entry); - stack.push(nextRel); - } - } else if (stat.isFile()) { - items.push({ path: rel, type: 'file' }); - if (items.length >= request.maxEntries) { - truncated = true; - reason = 'maxEntries'; - break; - } - } - - visited += 1; - if (visited % request.batchSize === 0) { - await yieldImmediate(); - } - } - - return { - taskId: request.taskId, - ok: true, - items, - truncated, - reason, - durationMs: Date.now() - start, - }; -} - -if (!parentPort) { - throw new Error('fsListWorker must be run as a worker thread'); -} - -parentPort.on('message', async (request: ListWorkerRequest) => { - try { - const result = await listFiles(request); - parentPort?.postMessage(result); - } catch (error) { - const message = error instanceof Error ? error.message : 'Unknown error'; - parentPort?.postMessage({ - taskId: request.taskId, - ok: false, - error: message, - }); - } -}); diff --git a/src/preload/index.ts b/src/preload/index.ts new file mode 100644 index 000000000..9fb51bc28 --- /dev/null +++ b/src/preload/index.ts @@ -0,0 +1,14 @@ +import { contextBridge, ipcRenderer } from 'electron'; + +// Expose protected methods that allow the renderer process to use +contextBridge.exposeInMainWorld('electronAPI', { + // Generic invoke for the typed RPC client (createRPCClient) + invoke: (channel: string, ...args: unknown[]) => ipcRenderer.invoke(channel, ...args), + // Generic event bridge for the typesafe event emitter (createEventEmitter) + eventSend: (channel: string, data: unknown) => ipcRenderer.send(channel, data), + eventOn: (channel: string, cb: (data: unknown) => void) => { + const wrapped = (_: Electron.IpcRendererEvent, data: unknown) => cb(data); + ipcRenderer.on(channel, wrapped); + return () => ipcRenderer.removeListener(channel, wrapped); + }, +}); diff --git a/src/renderer/App.tsx b/src/renderer/App.tsx index f7889a59b..e2f80431e 100644 --- a/src/renderer/App.tsx +++ b/src/renderer/App.tsx @@ -1,30 +1,48 @@ -import { ThemeProvider } from './components/ThemeProvider'; -import ErrorBoundary from './components/ErrorBoundary'; -import { WelcomeScreen } from './views/Welcome'; -import { Workspace } from './views/Workspace'; -import { useLocalStorage } from './hooks/useLocalStorage'; -import { FIRST_LAUNCH_KEY } from './constants/layout'; import { QueryClient, QueryClientProvider } from '@tanstack/react-query'; -import { AppSettingsProvider } from './contexts/AppSettingsProvider'; -import { AppContextProvider } from './contexts/AppContextProvider'; -import { GithubContextProvider } from './contexts/GithubContextProvider'; -import { ProjectManagementProvider } from './contexts/ProjectManagementProvider'; -import { TaskManagementProvider } from './contexts/TaskManagementContext'; -import { ModalProvider } from './contexts/ModalProvider'; +import { useEffect } from 'react'; +import { PendingProjectsProvider } from './components/add-project-modal/pending-projects-provider'; +import ErrorBoundary from './components/error-boundary'; +import { RightSidebarProvider } from './components/ui/right-sidebar'; +import { TooltipProvider } from './components/ui/tooltip'; +import { AppContextProvider } from './core/app/AppContextProvider'; +import { AppSettingsProvider } from './core/app/AppSettingsProvider'; +import { ThemeProvider } from './core/app/ThemeProvider'; +import { AgentProvider } from './core/conversations/AgentProvider'; +import { ConversationDataProvider } from './core/conversations/conversation-data-provider'; +import { DependenciesProvider } from './core/dependencies-provider'; +import { GithubContextProvider } from './core/github-context-provider'; +import { IntegrationsProvider } from './core/integrations/integrations-provider'; +import { ModalProvider } from './core/modal/modal-provider'; +import { codeEditorPool } from './core/monaco/monaco-code-pool'; +import { diffEditorPool } from './core/monaco/monaco-diff-pool'; +import { ProjectBootstrapProvider } from './core/projects/project-bootstrap-provider'; +import { ProjectsDataProvider } from './core/projects/projects-data-provider'; +import { TerminalPoolProvider } from './core/pty/pty-pool-provider'; +import { SshConnectionProvider } from './core/ssh/ssh-connection-provider'; +import { TaskLifecycleProvider } from './core/tasks/task-lifecycle-provider'; +import { TasksDataProvider } from './core/tasks/tasks-data-provider'; +import { TerminalDataProvider } from './core/terminals/terminal-data-provider'; +import { WorkspaceLayoutContextProvider } from './core/view/layout-provider'; +import { WorkspaceViewProvider } from './core/view/provider'; +import { useLocalStorage } from './hooks/useLocalStorage'; +import { WelcomeScreen } from './views/welcome'; +import { Workspace } from './views/workspace'; + +export const FIRST_LAUNCH_KEY = 'emdash:first-launch:v1'; const queryClient = new QueryClient(); export function App() { - const [isFirstLaunch, setIsFirstLaunch] = useLocalStorage( - FIRST_LAUNCH_KEY, - true - ); + const [isFirstLaunch, setIsFirstLaunch] = useLocalStorage(FIRST_LAUNCH_KEY, true); - const renderContent = () => { - // Handle legacy string value '1' from old implementation - const isFirstLaunchBool = isFirstLaunch === true || isFirstLaunch === 1; + // Pre-warm Monaco off the critical path so the first file open is instant. + useEffect(() => { + codeEditorPool.init(0).catch(console.warn); + diffEditorPool.init(3).catch(console.warn); + }, []); - if (isFirstLaunchBool) { + const renderContent = () => { + if (isFirstLaunch) { return setIsFirstLaunch(false)} />; } return ; @@ -32,21 +50,49 @@ export function App() { return ( - - - - - + + + + + - - {renderContent()} - + + + + + + + + + + + + + + + + {renderContent()} + + + + + + + + + + + + + + + - - - - - + + + + + ); } diff --git a/src/renderer/components/AgentDropdown.tsx b/src/renderer/components/AgentDropdown.tsx deleted file mode 100644 index debddbbea..000000000 --- a/src/renderer/components/AgentDropdown.tsx +++ /dev/null @@ -1,57 +0,0 @@ -import React from 'react'; -import { Select, SelectTrigger, SelectValue, SelectContent, SelectItem } from './ui/select'; -import { type Agent } from '../types'; -import { agentConfig } from '../lib/agentConfig'; -import AgentLogo from './AgentLogo'; - -interface AgentDropdownProps { - value: Agent; - onChange: (agent: Agent) => void; - installedAgents: string[]; - disabledAgents?: string[]; - className?: string; -} - -export const AgentDropdown: React.FC = ({ - value, - onChange, - installedAgents, - disabledAgents = [], - className = '', -}) => { - const installedSet = new Set(installedAgents); - return ( - - ); -}; - -export default AgentDropdown; diff --git a/src/renderer/components/AgentInfoCard.tsx b/src/renderer/components/AgentInfoCard.tsx deleted file mode 100644 index bc2a0ee03..000000000 --- a/src/renderer/components/AgentInfoCard.tsx +++ /dev/null @@ -1,254 +0,0 @@ -import React, { useEffect, useRef, useState } from 'react'; -import { type UiAgent } from '@/providers/meta'; -import { agentAssets } from '@/providers/assets'; -import AgentLogo from './AgentLogo'; -import { ArrowUpRight, Check, Copy } from 'lucide-react'; -import { Button } from './ui/button'; -import { getDocUrlForProvider, getInstallCommandForProvider } from '@shared/providers/registry'; - -export type AgentInfo = { - title: string; - description?: string; - knowledgeCutoff?: string; - hostingNote?: string; - image?: string; - installCommand?: string; -}; - -export const agentInfo: Record = { - codex: { - title: 'Codex', - description: - 'CLI that connects to OpenAI models for project-aware code assistance and terminal workflows.', - }, - claude: { - title: 'Claude Code', - description: - 'CLI that uses Anthropic Claude for code edits, explanations, and structured refactors in the terminal.', - }, - qwen: { - title: 'Qwen Code', - description: - "Command-line interface to Alibaba's Qwen Code models for coding assistance and code completion.", - }, - droid: { - title: 'Droid', - description: "Factory AI's agent CLI for running multi-step coding tasks from the terminal.", - }, - gemini: { - title: 'Gemini', - description: - 'CLI that uses Google Gemini models to assist with coding, reasoning, and command-line tasks.', - }, - cursor: { - title: 'Cursor', - description: - "Cursor's agent CLI; provides editor-style, project-aware assistance from the shell.", - }, - copilot: { - title: 'GitHub Copilot', - description: - 'GitHub Copilot CLI brings Copilot prompts to the terminal for code, shell, and search help.', - }, - amp: { - title: 'Amp', - description: - 'Amp Code CLI for agentic coding sessions against your repository from the terminal.', - }, - opencode: { - title: 'OpenCode', - description: - 'OpenCode CLI that interfaces with models for code generation and edits from the shell.', - }, - charm: { - title: 'Charm', - description: 'Charm Crush agent CLI providing terminal-first AI assistance for coding tasks.', - }, - auggie: { - title: 'Auggie', - description: - 'Augment Code CLI to run an agent against your repository for code changes and reviews.', - }, - goose: { - title: 'Goose', - description: 'Goose CLI that routes tasks to tools and models for coding workflows.', - }, - kimi: { - title: 'Kimi', - description: - 'Kimi CLI by Moonshot AI - a shell-like coding agent with raw shell execution, Zsh integration, ACP and MCP support (technical preview).', - hostingNote: 'macOS/Linux only; first run on macOS may take ~10s due to security checks.', - }, - kilocode: { - title: 'Kilocode', - description: - 'Kilo AI coding assistant with multiple modes (architect, code, debug, ask, orchestrator). Supports hundreds of models with bring-your-own-keys for OpenRouter and AI gateways. Features keyboard-first navigation and checkpoint management.', - }, - kiro: { - title: 'Kiro', - description: - 'Kiro CLI by Amazon Web Services - interactive, terminal-first AI development assistant with MCP integrations and workflow automation.', - }, - rovo: { - title: 'Rovo Dev', - description: - "Atlassian's Rovo Dev CLI brings an AI assistant to your terminal, integrated with Jira, Confluence, and Bitbucket via the Atlassian Command Line Interface (ACLI).", - }, - cline: { - title: 'Cline', - description: - 'Cline CLI runs AI coding agents directly in your terminal. Supports multiple model providers, runs multiple instances simultaneously for parallel development, and integrates into existing shell workflows.', - }, - continue: { - title: 'Continue', - description: - 'Continue CLI (cn) is a modular coding agent for the command line. Features battle-tested agent loop, customizable models and rules, MCP tool support, and both interactive and headless modes for automation.', - }, - codebuff: { - title: 'Codebuff', - description: - 'Codebuff is an AI coding agent that helps you with coding tasks. Install globally and start using it in your project directory to get AI-powered coding assistance.', - }, - mistral: { - title: 'Mistral Vibe', - description: - 'Mistral AI command-line coding assistant powered by Devstral. Provides conversational interface to your codebase with file manipulation, code search, version control, and execution tools.', - }, - pi: { - title: 'Pi', - description: - 'Minimal terminal coding agent by Mario Zechner. Supports 15+ providers and hundreds of models, tree-structured sessions, and TypeScript extensions for custom tools and sub-agents.', - }, - autohand: { - title: 'Autohand Code', - description: - 'Terminal coding agent with auto-commit, dry-run previews, community skills, and headless CI/CD mode. Supports multiple LLM providers and unrestricted auto-approve for hands-free operation.', - }, -}; - -type Props = { - id: UiAgent; -}; - -export const AgentInfoCard: React.FC = ({ id }) => { - const info = agentInfo[id]; - const asset = agentAssets[id]; - const logo = asset.logo; - const brand = asset.name; - const installCommand = - info.installCommand ?? getInstallCommandForProvider(id) ?? 'npm install -g @openai/codex'; - const [copied, setCopied] = useState(false); - const copyResetRef = useRef(null); - - useEffect(() => { - return () => { - if (copyResetRef.current !== null) { - window.clearTimeout(copyResetRef.current); - } - }; - }, []); - - const handleCopyClick = async () => { - if (typeof navigator === 'undefined' || !navigator.clipboard) { - return; - } - const { clipboard } = navigator; - if (typeof clipboard.writeText !== 'function') { - return; - } - try { - await clipboard.writeText(installCommand); - setCopied(true); - if (copyResetRef.current !== null) { - window.clearTimeout(copyResetRef.current); - } - copyResetRef.current = window.setTimeout(() => { - setCopied(false); - copyResetRef.current = null; - }, 2000); - } catch (error) { - console.error('Failed to copy install command', error); - setCopied(false); - } - }; - - const CopyIndicatorIcon = copied ? Check : Copy; - return ( -
-
- {logo ? ( - - ) : null} -
- {brand} - / - {info.title} -
-
- {info.description ? ( -

{info.description}

- ) : null} - {getDocUrlForProvider(id) ? ( - - ) : null} - -
- - {installCommand} - - -
- {info.knowledgeCutoff || info.hostingNote ? ( -
- {info.knowledgeCutoff ? ( -
- Knowledge cutoff: {info.knowledgeCutoff} -
- ) : null} - {info.hostingNote ? ( -
{info.hostingNote}
- ) : null} -
- ) : null} -
- ); -}; - -export default AgentInfoCard; diff --git a/src/renderer/components/AgentRow.tsx b/src/renderer/components/AgentRow.tsx deleted file mode 100644 index 78f86c855..000000000 --- a/src/renderer/components/AgentRow.tsx +++ /dev/null @@ -1,30 +0,0 @@ -import React from 'react'; -import type { LucideIcon } from 'lucide-react'; - -interface AgentRowProps { - icon: LucideIcon; - label: string; - detail?: string | null; - middle: React.ReactNode; - right: React.ReactNode; -} - -const AgentRow: React.FC = ({ icon: Icon, label, detail, middle, right }) => { - return ( -
-
- - -
-

{label}

- {detail ?

{detail}

: null} -
-
-
{middle}
-
{right}
-
- ); -}; - -export default AgentRow; diff --git a/src/renderer/components/AgentSelector.tsx b/src/renderer/components/AgentSelector.tsx deleted file mode 100644 index 4aee42ef0..000000000 --- a/src/renderer/components/AgentSelector.tsx +++ /dev/null @@ -1,157 +0,0 @@ -import React, { useState } from 'react'; -import { Select, SelectTrigger, SelectValue, SelectContent, SelectItem } from './ui/select'; -import { TooltipProvider, Tooltip, TooltipTrigger, TooltipContent } from './ui/tooltip'; -import { AgentInfoCard } from './AgentInfoCard'; -import RoutingInfoCard from './RoutingInfoCard'; -import { Workflow } from 'lucide-react'; -import { Badge } from './ui/badge'; -import type { UiAgent } from '@/providers/meta'; -import { type Agent } from '../types'; -import { agentConfig } from '../lib/agentConfig'; -import AgentLogo from './AgentLogo'; - -interface AgentSelectorProps { - value: Agent; - onChange: (agent: Agent) => void; - disabled?: boolean; - className?: string; -} - -export const AgentSelector: React.FC = ({ - value, - onChange, - disabled = false, - className = '', -}) => { - return ( -
- -
- ); -}; - -const TooltipRow: React.FC<{ id: UiAgent; children: React.ReactElement }> = ({ id, children }) => { - const [open, setOpen] = useState(false); - return ( - - - {React.cloneElement(children, { - onMouseEnter: () => setOpen(true), - onMouseLeave: () => setOpen(false), - onPointerEnter: () => setOpen(true), - onPointerLeave: () => setOpen(false), - })} - - setOpen(true)} - onMouseLeave={() => setOpen(false)} - onPointerEnter={() => setOpen(true)} - onPointerLeave={() => setOpen(false)} - > - - - - ); -}; - -export default AgentSelector; - -export const RoutingTooltipRow: React.FC<{ children: React.ReactElement }> = ({ children }) => { - const [open, setOpen] = useState(false); - return ( - - - {React.cloneElement(children, { - onMouseEnter: () => setOpen(true), - onMouseLeave: () => setOpen(false), - onPointerEnter: () => setOpen(true), - onPointerLeave: () => setOpen(false), - })} - - setOpen(true)} - onMouseLeave={() => setOpen(false)} - onPointerEnter={() => setOpen(true)} - onPointerLeave={() => setOpen(false)} - > - - - - ); -}; diff --git a/src/renderer/components/AppKeyboardShortcuts.tsx b/src/renderer/components/AppKeyboardShortcuts.tsx deleted file mode 100644 index c30153f7d..000000000 --- a/src/renderer/components/AppKeyboardShortcuts.tsx +++ /dev/null @@ -1,71 +0,0 @@ -import React from 'react'; -import { useSidebar } from '../components/ui/sidebar'; -import { useRightSidebar } from '../components/ui/right-sidebar'; -import { useTheme } from '../hooks/useTheme'; -import { useKeyboardShortcuts } from '../hooks/useKeyboardShortcuts'; -import { useKeyboardSettings } from '../contexts/KeyboardSettingsContext'; -import { useTaskManagementContext } from '../contexts/TaskManagementContext'; - -export interface AppKeyboardShortcutsProps { - showCommandPalette: boolean; - showSettings: boolean; - handleToggleCommandPalette: () => void; - handleOpenSettings: () => void; - handleCloseCommandPalette: () => void; - handleCloseSettings: () => void; - handleToggleKanban: () => void; - handleToggleEditor: () => void; - handleOpenInEditor: () => void; -} - -const AppKeyboardShortcuts: React.FC = ({ - showCommandPalette, - showSettings, - handleToggleCommandPalette, - handleOpenSettings, - handleCloseCommandPalette, - handleCloseSettings, - handleToggleKanban, - handleToggleEditor, - handleOpenInEditor, -}) => { - const { toggle: toggleLeftSidebar } = useSidebar(); - const { toggle: toggleRightSidebar } = useRightSidebar(); - const { toggleTheme } = useTheme(); - const { settings: keyboardSettings } = useKeyboardSettings(); - const { handleNextTask, handlePrevTask, handleNewTask } = useTaskManagementContext(); - - useKeyboardShortcuts({ - onToggleCommandPalette: handleToggleCommandPalette, - onOpenSettings: handleOpenSettings, - onToggleLeftSidebar: toggleLeftSidebar, - onToggleRightSidebar: toggleRightSidebar, - onToggleTheme: toggleTheme, - onToggleKanban: handleToggleKanban, - onToggleEditor: handleToggleEditor, - onNextProject: handleNextTask, - onPrevProject: handlePrevTask, - onNewTask: handleNewTask, - onNextAgent: () => - window.dispatchEvent( - new CustomEvent('emdash:switch-agent', { detail: { direction: 'next' } }) - ), - onPrevAgent: () => - window.dispatchEvent( - new CustomEvent('emdash:switch-agent', { detail: { direction: 'prev' } }) - ), - onOpenInEditor: handleOpenInEditor, - onCloseModal: showCommandPalette - ? handleCloseCommandPalette - : showSettings - ? handleCloseSettings - : undefined, - isCommandPaletteOpen: showCommandPalette, - isSettingsOpen: showSettings, - customKeyboardSettings: keyboardSettings ?? undefined, - }); - - return null; -}; - -export default AppKeyboardShortcuts; diff --git a/src/renderer/components/BaseBranchControls.tsx b/src/renderer/components/BaseBranchControls.tsx deleted file mode 100644 index fe7bedecd..000000000 --- a/src/renderer/components/BaseBranchControls.tsx +++ /dev/null @@ -1,56 +0,0 @@ -import React from 'react'; -import BranchSelect, { type BranchOption } from './BranchSelect'; -import { Button } from './ui/button'; -import { GitBranch, Settings2 } from 'lucide-react'; - -interface BaseBranchControlsProps { - baseBranch?: string; - branchOptions: BranchOption[]; - isLoadingBranches: boolean; - isSavingBaseBranch: boolean; - onBaseBranchChange: (value: string) => void; - onOpenConfig?: () => void; -} - -const BaseBranchControls: React.FC = ({ - baseBranch, - branchOptions, - isLoadingBranches, - isSavingBaseBranch, - onBaseBranchChange, - onOpenConfig, -}) => { - const placeholder = isLoadingBranches - ? 'Loading...' - : branchOptions.length === 0 - ? 'No branches found' - : 'Select a base branch'; - - return ( -
- } - /> - -
- ); -}; - -export default BaseBranchControls; diff --git a/src/renderer/components/BranchSelect.tsx b/src/renderer/components/BranchSelect.tsx deleted file mode 100644 index 3a71fa1e1..000000000 --- a/src/renderer/components/BranchSelect.tsx +++ /dev/null @@ -1,174 +0,0 @@ -import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react'; -import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from './ui/select'; -import { ScrollArea } from './ui/scroll-area'; -import { Input } from './ui/input'; - -export type BranchOption = { - value: string; - label: string; -}; - -/** - * Pick best default: preferredValue if valid, else origin/main > main > first option. - */ -export function pickDefaultBranch( - options: BranchOption[], - preferredValue?: string -): string | undefined { - if (options.length === 0) return undefined; - - if (preferredValue && options.some((opt) => opt.value === preferredValue)) { - return preferredValue; - } - - const defaults = ['origin/main', 'main', 'origin/master', 'master']; - for (const branch of defaults) { - if (options.some((opt) => opt.value === branch)) return branch; - } - - return options[0].value; -} - -type BranchSelectVariant = 'default' | 'ghost'; - -interface BranchSelectProps { - value?: string; - onValueChange: (value: string) => void; - options: BranchOption[]; - disabled?: boolean; - isLoading?: boolean; - placeholder?: string; - variant?: BranchSelectVariant; - onOpenChange?: (open: boolean) => void; - icon?: React.ReactNode; -} - -const ROW_HEIGHT = 32; -const MAX_LIST_HEIGHT = 256; -const EMPTY_BRANCH_VALUE = '__branch_select_empty__'; - -const BranchSelect: React.FC = ({ - value, - onValueChange, - options, - disabled = false, - isLoading = false, - placeholder, - variant = 'default', - onOpenChange, - icon, -}) => { - const [open, setOpen] = useState(false); - const [searchTerm, setSearchTerm] = useState(''); - const searchInputRef = useRef(null); - - const navigationKeys = useMemo( - () => new Set(['ArrowUp', 'ArrowDown', 'PageUp', 'PageDown', 'Home', 'End', 'Enter', 'Escape']), - [] - ); - - const filteredOptions = useMemo(() => { - if (!searchTerm.trim()) return options; - const query = searchTerm.trim().toLowerCase(); - return options.filter((option) => option.label.toLowerCase().includes(query)); - }, [options, searchTerm]); - - const displayedOptions = useMemo(() => { - if (!value) return filteredOptions; - const hasSelection = filteredOptions.some((option) => option.value === value); - if (hasSelection) return filteredOptions; - const selectedOption = options.find((option) => option.value === value); - if (!selectedOption) return filteredOptions; - return [selectedOption, ...filteredOptions]; - }, [filteredOptions, options, value]); - - const estimatedListHeight = Math.min( - MAX_LIST_HEIGHT, - Math.max(displayedOptions.length, 1) * ROW_HEIGHT - ); - - // Focus search input when dropdown opens - useEffect(() => { - if (open) { - requestAnimationFrame(() => searchInputRef.current?.focus()); - } else { - setSearchTerm(''); - } - }, [open]); - - const handleOpenChange = useCallback( - (nextOpen: boolean) => { - setOpen(nextOpen); - onOpenChange?.(nextOpen); - }, - [onOpenChange] - ); - - const defaultPlaceholder = isLoading ? 'Loading...' : 'Select branch'; - const triggerPlaceholder = placeholder ?? defaultPlaceholder; - const hasKnownSelection = Boolean(value && options.some((option) => option.value === value)); - const selectedValue = hasKnownSelection ? (value as string) : EMPTY_BRANCH_VALUE; - - const triggerClassName = - variant === 'ghost' - ? 'h-auto border-none bg-transparent p-0 text-xs text-muted-foreground shadow-none hover:text-foreground focus:ring-0 [&>svg]:ml-0.5 [&>svg]:h-3 [&>svg]:w-3' - : 'h-8 w-full gap-2 px-3 text-xs font-medium shadow-none sm:w-auto'; - - return ( - setSearchTerm(event.currentTarget.value)} - onKeyDown={(event) => { - if (!navigationKeys.has(event.key)) { - event.stopPropagation(); - } - }} - placeholder="Search branches" - className="bg-popover px-2 py-1 text-sm" - /> - - -
- {displayedOptions.length > 0 ? ( - displayedOptions.map((option) => ( - - {option.label} - - )) - ) : ( -
No matching branches
- )} -
-
- - - ); -}; - -export default BranchSelect; diff --git a/src/renderer/components/BrowserPane.tsx b/src/renderer/components/BrowserPane.tsx deleted file mode 100644 index 0255740aa..000000000 --- a/src/renderer/components/BrowserPane.tsx +++ /dev/null @@ -1,597 +0,0 @@ -import React from 'react'; -import { X, ArrowLeft, ArrowRight, ExternalLink, RotateCw } from 'lucide-react'; -import { useBrowser } from '@/providers/BrowserProvider'; -import { cn } from '@/lib/utils'; -import { Input } from './ui/input'; -import { Spinner } from './ui/spinner'; -import { Button } from './ui/button'; -import { setLastUrl, setRunning } from '@/lib/previewStorage'; -import { PROBE_TIMEOUT_MS, SPINNER_MAX_MS, isAppPort } from '@/lib/previewNetwork'; - -const clamp = (n: number, min: number, max: number) => Math.max(min, Math.min(max, n)); - -const HANDLE_PX = 6; -const BOUNDS_CHANGE_THRESHOLD = 2; -const HIDE_DEBOUNCE_MS = 50; -const URL_LOAD_DELAY_MS = 50; -const BOUNDS_UPDATE_DELAY_MS = 100; -const PROBE_RETRY_DELAY_MS = 500; -const MAX_LOG_LINES = 8; -const WIDTH_PCT_MIN = 5; -const WIDTH_PCT_MAX = 96; -const DEFAULT_PREVIEW_URLS = [ - 'http://localhost:5173', - 'http://localhost:3000', - 'http://localhost:8080', -]; - -const BrowserPane: React.FC<{ - taskId?: string | null; - taskPath?: string | null; - overlayActive?: boolean; -}> = ({ taskId, overlayActive = false }) => { - const { - isOpen, - url, - widthPct, - setWidthPct, - close, - navigate, - clearUrl, - busy, - showSpinner, - hideSpinner, - } = useBrowser(); - const [address, setAddress] = React.useState(''); - const [lines, setLines] = React.useState([]); - const [dragging, setDragging] = React.useState(false); - const widthPctRef = React.useRef(widthPct); - React.useEffect(() => { - widthPctRef.current = widthPct; - }, [widthPct]); - const [failed, setFailed] = React.useState(false); - const [overlayRaised, setOverlayRaised] = React.useState(false); - const paneVisible = isOpen && !overlayActive && !overlayRaised; - - // Listen for global overlay events (e.g., feedback modal) and hide preview when active - React.useEffect(() => { - const onOverlay = (e: any) => { - try { - setOverlayRaised(Boolean(e?.detail?.open)); - } catch {} - }; - window.addEventListener('emdash:overlay:changed', onOverlay as any); - return () => window.removeEventListener('emdash:overlay:changed', onOverlay as any); - }, []); - - React.useEffect(() => { - if (typeof url === 'string') setAddress(url); - }, [url]); - - const prevTaskIdRef = React.useRef(null); - const lastTaskUrlRef = React.useRef(null); - React.useEffect(() => { - const prev = prevTaskIdRef.current; - const cur = (taskId || '').trim() || null; - - if (prev && cur && prev !== cur) { - try { - // Clear and hide browser view immediately when switching worktrees - (window as any).electronAPI?.browserClear?.(); - (window as any).electronAPI?.browserHide?.(); - setRunning(prev, false); - // Reset task URL tracking to force reload - lastTaskUrlRef.current = null; - } catch {} - } - - try { - // Stop all other preview servers except the new current (if any) - (window as any).electronAPI?.hostPreviewStopAll?.(cur || ''); - } catch {} - - if (prev !== cur) { - try { - clearUrl(); - hideSpinner(); - setFailed(false); - setLines([]); - } catch {} - } - - prevTaskIdRef.current = cur; - }, [taskId, clearUrl, hideSpinner]); - - React.useEffect(() => { - const off = (window as any).electronAPI?.onHostPreviewEvent?.((data: any) => { - try { - if (!data || !taskId || data.taskId !== taskId) return; - if (data.type === 'setup') { - if (data.status === 'line' && data.line) { - setLines((prev) => { - const next = [...prev, String(data.line).trim()].slice(-MAX_LOG_LINES); - return next; - }); - } - if (data.status === 'error') { - hideSpinner(); - } - } - if (data.type === 'url' && data.url) { - // CRITICAL: Only process URL events for the current taskId - // This ensures we don't load URLs from other worktrees - if (!taskId || data.taskId !== taskId) { - return; - } - setFailed(false); - const appPort = Number(window.location.port || 0); - if (isAppPort(String(data.url), appPort)) return; - showSpinner(); - navigate(String(data.url)); - try { - setLastUrl(String(taskId), String(data.url)); - } catch {} - } - if (data.type === 'exit') { - try { - setRunning(String(taskId), false); - } catch {} - hideSpinner(); - } - } catch {} - }); - return () => { - try { - off?.(); - } catch {} - }; - }, [taskId, navigate, showSpinner, hideSpinner]); - - // Verify URL reachability with TCP probe (30s grace window for slow compilers) - React.useEffect(() => { - let cancelled = false; - const urlString = (url || '').trim(); - if (!urlString) { - setFailed(false); - return; - } - (async () => { - try { - const parsed = new URL(urlString); - const host = parsed.hostname || 'localhost'; - const port = Number(parsed.port || 0); - if (!port) { - setFailed(false); - return; - } - const deadline = Date.now() + SPINNER_MAX_MS; - let isReachable = false; - while (!cancelled && Date.now() < deadline) { - try { - const res = await (window as any).electronAPI?.netProbePorts?.( - host, - [port], - PROBE_TIMEOUT_MS - ); - isReachable = !!(res && Array.isArray(res.reachable) && res.reachable.length > 0); - if (isReachable) break; - } catch {} - await new Promise((r) => setTimeout(r, PROBE_RETRY_DELAY_MS)); - } - if (!cancelled) { - if (isReachable) { - hideSpinner(); - } else { - setFailed(true); - } - } - } catch { - if (!cancelled) { - setFailed(true); - } - } - })(); - return () => { - cancelled = true; - }; - }, [url, showSpinner, hideSpinner]); - - const handleRetry = React.useCallback(() => { - if (!url) return; - showSpinner(); - try { - (window as any).electronAPI?.browserReload?.(); - } catch {} - }, [url, showSpinner]); - - // Browser view is managed in main process (WebContentsView) via IPC - // This component reports bounds and coordinates navigation - const containerRef = React.useRef(null); - const computeBounds = React.useCallback(() => { - const el = containerRef.current; - if (!el) return null; - const rect = el.getBoundingClientRect(); - const x = Math.round(rect.left + HANDLE_PX); - const y = Math.round(rect.top); - const w = Math.max(1, Math.round(rect.width - HANDLE_PX)); - const h = Math.max(1, Math.round(rect.height)); - return { x, y, width: w, height: h }; - }, []); - - const lastBoundsRef = React.useRef<{ - x: number; - y: number; - width: number; - height: number; - } | null>(null); - - const hasBoundsChanged = React.useCallback( - (newBounds: { x: number; y: number; width: number; height: number }) => { - if (!lastBoundsRef.current) return true; - const old = lastBoundsRef.current; - return ( - Math.abs(old.x - newBounds.x) > BOUNDS_CHANGE_THRESHOLD || - Math.abs(old.y - newBounds.y) > BOUNDS_CHANGE_THRESHOLD || - Math.abs(old.width - newBounds.width) > BOUNDS_CHANGE_THRESHOLD || - Math.abs(old.height - newBounds.height) > BOUNDS_CHANGE_THRESHOLD - ); - }, - [] - ); - - const visibilityTimeoutRef = React.useRef(null); - React.useEffect(() => { - if (visibilityTimeoutRef.current) { - clearTimeout(visibilityTimeoutRef.current); - visibilityTimeoutRef.current = null; - } - - const shouldShow = paneVisible && !!url && !!taskId; - - if (!shouldShow) { - visibilityTimeoutRef.current = setTimeout(() => { - try { - (window as any).electronAPI?.browserHide?.(); - lastBoundsRef.current = null; - } catch {} - visibilityTimeoutRef.current = null; - }, HIDE_DEBOUNCE_MS); - return; - } - - requestAnimationFrame(() => { - const bounds = computeBounds(); - if (bounds && bounds.width > 0 && bounds.height > 0) { - if (hasBoundsChanged(bounds)) { - lastBoundsRef.current = bounds; - try { - (window as any).electronAPI?.browserShow?.(bounds, url || undefined); - setTimeout(() => { - const updatedBounds = computeBounds(); - if (updatedBounds && updatedBounds.width > 0 && updatedBounds.height > 0) { - if (hasBoundsChanged(updatedBounds)) { - lastBoundsRef.current = updatedBounds; - try { - (window as any).electronAPI?.browserSetBounds?.(updatedBounds); - } catch {} - } - } - }, BOUNDS_UPDATE_DELAY_MS); - } catch {} - } - } - }); - - const onResize = () => { - const bounds = computeBounds(); - if (bounds && shouldShow && bounds.width > 0 && bounds.height > 0) { - if (hasBoundsChanged(bounds)) { - lastBoundsRef.current = bounds; - try { - (window as any).electronAPI?.browserSetBounds?.(bounds); - } catch {} - } - } - }; - window.addEventListener('resize', onResize); - const ResizeObserverClass = (window as any).ResizeObserver; - const resizeObserver = ResizeObserverClass ? new ResizeObserverClass(() => onResize()) : null; - if (resizeObserver && containerRef.current) resizeObserver.observe(containerRef.current); - - return () => { - if (visibilityTimeoutRef.current) { - clearTimeout(visibilityTimeoutRef.current); - visibilityTimeoutRef.current = null; - } - try { - (window as any).electronAPI?.browserHide?.(); - } catch {} - window.removeEventListener('resize', onResize); - try { - resizeObserver?.disconnect?.(); - } catch {} - }; - }, [paneVisible, url, computeBounds, hasBoundsChanged, taskId]); - - React.useEffect(() => { - if (isOpen && !url) setAddress(''); - }, [isOpen, url]); - - const lastUrlRef = React.useRef(null); - const lastTaskIdRef2 = React.useRef(null); - React.useEffect(() => { - if (taskId !== lastTaskIdRef2.current) { - lastUrlRef.current = null; - lastTaskIdRef2.current = taskId || null; - lastTaskUrlRef.current = null; - } - - if (paneVisible && url && taskId) { - const taskUrlKey = `${taskId}:${url}`; - // Force reload if task changed or URL changed - const isTaskChange = lastTaskUrlRef.current === null; - if (lastTaskUrlRef.current !== taskUrlKey || lastUrlRef.current !== url) { - lastUrlRef.current = url; - lastTaskUrlRef.current = taskUrlKey; - - try { - (window as any).electronAPI?.browserClear?.(); - } catch {} - - const timeoutId = setTimeout(() => { - try { - // Force reload when task changes to ensure fresh content - (window as any).electronAPI?.browserLoadURL?.(url, isTaskChange); - } catch {} - }, URL_LOAD_DELAY_MS); - return () => clearTimeout(timeoutId); - } - } - }, [paneVisible, url, taskId]); - - React.useEffect(() => { - let dragging = false; - let pointerId: number | null = null; - let startX = 0; - let startPct = widthPctRef.current; - const handle = document.getElementById('emdash-browser-drag'); - if (!handle) return; - - const onPointerDown = (e: PointerEvent) => { - dragging = true; - pointerId = e.pointerId; - try { - (e.target as Element).setPointerCapture?.(e.pointerId); - } catch {} - setDragging(true); - startX = e.clientX; - startPct = widthPctRef.current; - document.body.style.cursor = 'col-resize'; - e.preventDefault(); - }; - const onPointerMove = (e: PointerEvent) => { - if (!dragging) return; - const dx = startX - e.clientX; - const viewportWidth = Math.max(1, window.innerWidth); - const deltaPct = (dx / viewportWidth) * 100; - setWidthPct(clamp(startPct + deltaPct, WIDTH_PCT_MIN, WIDTH_PCT_MAX)); - e.preventDefault(); - }; - const onPointerUp = (e: PointerEvent) => { - if (!dragging) return; - dragging = false; - try { - if (pointerId != null) handle.releasePointerCapture?.(pointerId); - } catch {} - pointerId = null; - setDragging(false); - document.body.style.cursor = ''; - e.preventDefault(); - }; - - handle.addEventListener('pointerdown', onPointerDown); - window.addEventListener('pointermove', onPointerMove, { passive: false }); - window.addEventListener('pointerup', onPointerUp, { passive: false }); - return () => { - handle.removeEventListener('pointerdown', onPointerDown); - window.removeEventListener('pointermove', onPointerMove as any); - window.removeEventListener('pointerup', onPointerUp as any); - setDragging(false); - document.body.style.cursor = ''; - }; - }, [setWidthPct]); - - const { goBack, goForward } = useBrowser(); - - const handleRefresh = React.useCallback(() => { - if (!url) return; - try { - // Clear and reload to force fresh content - (window as any).electronAPI?.browserClear?.(); - setTimeout(() => { - try { - (window as any).electronAPI?.browserLoadURL?.(url, true); - } catch {} - }, 100); - } catch {} - }, [url]); - - const handleClose = React.useCallback(async () => { - void import('../lib/telemetryClient').then(({ captureTelemetry }) => { - captureTelemetry('browser_preview_closed'); - }); - try { - const id = (taskId || '').trim(); - if (id) (window as any).electronAPI?.hostPreviewStop?.(id); - } catch {} - try { - (window as any).electronAPI?.browserHide?.(); - } catch {} - try { - clearUrl(); - } catch {} - setFailed(false); - close(); - }, [taskId, clearUrl, close]); - - return ( -
-
-
- - - {url && ( - - )} -
{ - e.preventDefault(); - let next = address.trim(); - if (!/^https?:\/\//i.test(next)) next = `http://${next}`; - navigate(next); - }} - > - setAddress(e.target.value)} - placeholder="Enter URL (e.g. http://localhost:5173)" - /> -
- {!url ? ( -
- {DEFAULT_PREVIEW_URLS.map((previewUrl) => ( - - ))} -
- ) : null} - - -
- {!busy && url && lines.length > 0 && ( -
- Task Preview -
- {lines.length ? ( - {lines[lines.length - 1]} - ) : null} -
-
- )} - -
-
-
- {dragging ? ( -
- ) : null} - {busy && !url ? ( -
-
- -
-
Loading preview…
-
Starting dev server
-
-
-
- ) : null} - {url && failed && !busy ? ( -
-
-
-
Preview unavailable
-
- Server at {url} is not reachable -
-
- -
-
- ) : null} -
-
-
- ); -}; - -export default BrowserPane; diff --git a/src/renderer/components/BrowserPreviewSettingsCard.tsx b/src/renderer/components/BrowserPreviewSettingsCard.tsx deleted file mode 100644 index 218884a03..000000000 --- a/src/renderer/components/BrowserPreviewSettingsCard.tsx +++ /dev/null @@ -1,23 +0,0 @@ -import React from 'react'; -import { Switch } from './ui/switch'; -import { useAppSettings } from '@/contexts/AppSettingsProvider'; - -export default function BrowserPreviewSettingsCard() { - const { settings, updateSettings, isLoading, isSaving } = useAppSettings(); - - return ( -
-
- Show localhost links in browser - - Preview UI changes using the built-in browser view. - -
- updateSettings({ browserPreview: { enabled: next } })} - /> -
- ); -} diff --git a/src/renderer/components/ChatInterface.tsx b/src/renderer/components/ChatInterface.tsx deleted file mode 100644 index 1a13bccf6..000000000 --- a/src/renderer/components/ChatInterface.tsx +++ /dev/null @@ -1,1160 +0,0 @@ -import React, { useEffect, useState, useMemo, useCallback, useRef } from 'react'; -import { Plus, X } from 'lucide-react'; -import { useToast } from '../hooks/use-toast'; -import { useTheme } from '../hooks/useTheme'; -import { TerminalPane } from './TerminalPane'; -import InstallBanner from './InstallBanner'; -import { cn } from '@/lib/utils'; -import { agentMeta } from '../providers/meta'; -import { agentConfig } from '../lib/agentConfig'; -import AgentLogo from './AgentLogo'; -import TaskContextBadges from './TaskContextBadges'; -import { Spinner } from './ui/spinner'; -import { useInitialPromptInjection } from '../hooks/useInitialPromptInjection'; -import { useTaskComments } from '../hooks/useLineComments'; -import { type Agent } from '../types'; -import { Task } from '../types/chat'; -import { useTaskTerminals } from '@/lib/taskTerminalsStore'; -import { activityStore } from '@/lib/activityStore'; -import { rpc } from '@/lib/rpc'; -import { getInstallCommandForProvider } from '@shared/providers/registry'; -import { useAutoScrollOnTaskSwitch } from '@/hooks/useAutoScrollOnTaskSwitch'; -import { TaskScopeProvider } from './TaskScopeContext'; -import { CreateChatModal } from './CreateChatModal'; -import { type Conversation } from '../../main/services/DatabaseService'; -import { terminalSessionRegistry } from '../terminal/SessionRegistry'; -import { getTaskEnvVars } from '@shared/task/envVars'; -import { makePtyId } from '@shared/ptyId'; -import { generateTaskName } from '../lib/branchNameGenerator'; -import { ensureUniqueTaskName } from '../lib/taskNames'; -import type { Project } from '../types/app'; - -declare const window: Window & { - electronAPI: { - saveMessage: (message: any) => Promise<{ success: boolean; error?: string }>; - }; -}; - -interface Props { - task: Task; - project?: Project | null; - projectName: string; - projectPath?: string | null; - projectRemoteConnectionId?: string | null; - projectRemotePath?: string | null; - defaultBranch?: string | null; - className?: string; - initialAgent?: Agent; - onTaskInterfaceReady?: () => void; - onRenameTask?: (project: Project, task: Task, newName: string) => Promise; -} - -const ChatInterface: React.FC = ({ - task, - project, - projectName: _projectName, - projectPath, - projectRemoteConnectionId, - projectRemotePath: _projectRemotePath, - defaultBranch, - className, - initialAgent, - onTaskInterfaceReady, - onRenameTask, -}) => { - const { effectiveTheme } = useTheme(); - const { toast } = useToast(); - const [isAgentInstalled, setIsAgentInstalled] = useState(null); - const [agentStatuses, setAgentStatuses] = useState< - Record - >({}); - const [agent, setAgent] = useState(initialAgent || 'claude'); - const currentAgentStatus = agentStatuses[agent]; - const [cliStartError, setCliStartError] = useState(null); - - // Multi-chat state - const [conversations, setConversations] = useState([]); - const [activeConversationId, setActiveConversationId] = useState(null); - const [conversationsLoaded, setConversationsLoaded] = useState(false); - const [showCreateChatModal, setShowCreateChatModal] = useState(false); - const [busyByConversationId, setBusyByConversationId] = useState>({}); - const tabsContainerRef = useRef(null); - const [tabsOverflow, setTabsOverflow] = useState(false); - - const mainConversationId = useMemo( - () => conversations.find((c) => c.isMain)?.id ?? null, - [conversations] - ); - - // Update terminal ID to include conversation ID and agent - unique per conversation - const terminalId = useMemo(() => { - // Find the active conversation to check if it's the main one - const activeConversation = conversations.find((c) => c.id === activeConversationId); - - if (activeConversation?.isMain) { - // Main conversations use task-based ID for backward compatibility - // This ensures terminal sessions persist correctly - return makePtyId(agent, 'main', task.id); - } else if (activeConversationId) { - // Additional conversations use conversation-specific ID - return makePtyId(agent, 'chat', activeConversationId); - } - // Fallback to main format if no active conversation - return makePtyId(agent, 'main', task.id); - }, [activeConversationId, agent, task.id, conversations]); - - // Claude needs consistent working directory to maintain session state - const terminalCwd = useMemo(() => { - return task.path; - }, [task.path]); - - const taskEnv = useMemo(() => { - if (!projectPath) return undefined; - return getTaskEnvVars({ - taskId: task.id, - taskName: task.name, - taskPath: task.path, - projectPath, - defaultBranch: defaultBranch || undefined, - }); - }, [task.id, task.name, task.path, projectPath, defaultBranch]); - - const installedAgents = useMemo( - () => - Object.entries(agentStatuses) - .filter(([, status]) => status.installed === true) - .map(([id]) => id), - [agentStatuses] - ); - const sortedConversations = useMemo( - () => - [...conversations].sort((a, b) => { - // Sort by display order or creation time to maintain consistent order - if (a.displayOrder !== undefined && b.displayOrder !== undefined) { - return a.displayOrder - b.displayOrder; - } - return new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime(); - }), - [conversations] - ); - - const { activeTerminalId } = useTaskTerminals(task.id, task.path); - - // Line comments for agent context injection - const { formatted: commentsContext } = useTaskComments(task.id); - - // Auto-scroll to bottom when this task becomes active - useAutoScrollOnTaskSwitch(true, task.id); - - const readySignaledTaskIdRef = useRef(null); - useEffect(() => { - const el = tabsContainerRef.current; - if (!el) return; - const check = () => setTabsOverflow(el.scrollWidth > el.clientWidth); - check(); - const ro = new ResizeObserver(check); - ro.observe(el); - return () => ro.disconnect(); - }, [conversations.length]); - - useEffect(() => { - if (!onTaskInterfaceReady) return; - if (readySignaledTaskIdRef.current === task.id) return; - readySignaledTaskIdRef.current = task.id; - onTaskInterfaceReady(); - }, [task.id, onTaskInterfaceReady]); - - // Load conversations when task changes - useEffect(() => { - const loadConversations = async () => { - setConversationsLoaded(false); - const loadedConversations = await rpc.db.getConversations(task.id); - - if (loadedConversations.length > 0) { - setConversations(loadedConversations); - - // Set active conversation - const active = loadedConversations.find((c: Conversation) => c.isActive); - if (active) { - setActiveConversationId(active.id); - // Update agent to match the active conversation - if (active.provider) { - setAgent(active.provider as Agent); - } - } else { - // Fallback to first conversation - const firstConv = loadedConversations[0]; - setActiveConversationId(firstConv.id); - // Update agent to match the first conversation - if (firstConv.provider) { - setAgent(firstConv.provider as Agent); - } - await rpc.db.setActiveConversation({ - taskId: task.id, - conversationId: firstConv.id, - }); - } - setConversationsLoaded(true); - } else { - // No conversations exist - create default for backward compatibility - // This ensures existing tasks always have at least one conversation - // (preserves pre-multi-chat behavior) - const defaultConversation = await rpc.db.getOrCreateDefaultConversation(task.id); - if (defaultConversation) { - // For backward compatibility: use task.agentId if available, otherwise use current agent - // This preserves the original agent choice for tasks created before multi-chat - const taskAgent = task.agentId || agent; - const conversationWithAgent = { - ...defaultConversation, - provider: taskAgent, - isMain: true, - isActive: true, - }; - setConversations([conversationWithAgent]); - setActiveConversationId(defaultConversation.id); - - // Update the agent state to match - setAgent(taskAgent as Agent); - - // Save the agent to the conversation - await rpc.db.saveConversation(conversationWithAgent); - setConversationsLoaded(true); - } - } - }; - - loadConversations(); - }, [task.id, task.agentId]); // provider is intentionally not included as a dependency - - // Activity indicators per conversation tab (main PTY uses `task.id`, chat PTYs use `conversation.id`). - useEffect(() => { - let cancelled = false; - const unsubs: Array<() => void> = []; - const conversationIds = new Set(conversations.map((c) => c.id)); - - setBusyByConversationId((prev) => { - const next: Record = {}; - for (const id of conversationIds) next[id] = prev[id] ?? false; - return next; - }); - - if (mainConversationId) { - unsubs.push( - activityStore.subscribe(task.id, (busy) => { - if (cancelled) return; - setBusyByConversationId((prev) => { - if (prev[mainConversationId] === busy) return prev; - return { ...prev, [mainConversationId]: busy }; - }); - }) - ); - } - - for (const conv of conversations) { - if (conv.isMain) continue; - const conversationId = conv.id; - unsubs.push( - activityStore.subscribe( - conversationId, - (busy) => { - if (cancelled) return; - setBusyByConversationId((prev) => { - if (prev[conversationId] === busy) return prev; - return { ...prev, [conversationId]: busy }; - }); - }, - { kinds: ['chat'] } - ) - ); - } - - return () => { - cancelled = true; - try { - for (const off of unsubs) off?.(); - } catch {} - }; - }, [task.id, conversations, mainConversationId]); - - // Ref to control terminal focus imperatively if needed - const terminalRef = useRef<{ focus: () => void }>(null); - - // Auto-focus terminal when switching to this task - useEffect(() => { - if (!conversationsLoaded) return; - // Small delay to ensure terminal is mounted and attached - const timer = setTimeout(() => { - const session = terminalSessionRegistry.getSession(terminalId); - if (session) { - session.focus(); - } - }, 100); - return () => clearTimeout(timer); - }, [task.id, terminalId, conversationsLoaded]); - - // Focus terminal when this task becomes active (for already-mounted terminals) - useEffect(() => { - // Small delay to ensure terminal is visible after tab switch - const timer = setTimeout(() => { - terminalRef.current?.focus(); - }, 50); - return () => clearTimeout(timer); - }, [task.id]); - - useEffect(() => { - let mounted = true; - let timer: ReturnType | null = null; - const handleWindowFocus = () => { - timer = setTimeout(() => { - timer = null; - if (!mounted) return; - const session = terminalSessionRegistry.getSession(terminalId); - if (session) session.focus(); - }, 0); - }; - window.addEventListener('focus', handleWindowFocus); - return () => { - mounted = false; - if (timer !== null) clearTimeout(timer); - window.removeEventListener('focus', handleWindowFocus); - }; - }, [terminalId]); - - useEffect(() => { - const meta = agentMeta[agent]; - if (!meta?.terminalOnly || !meta.autoStartCommand) return; - - const onceKey = `cli:autoStart:${terminalId}`; - try { - if (localStorage.getItem(onceKey) === '1') return; - } catch {} - - const send = () => { - try { - (window as any).electronAPI?.ptyInput?.({ - id: terminalId, - data: `${meta.autoStartCommand}\n`, - }); - try { - localStorage.setItem(onceKey, '1'); - } catch {} - } catch {} - }; - - const api: any = (window as any).electronAPI; - let off: (() => void) | null = null; - try { - off = api?.onPtyStarted?.((info: { id: string }) => { - if (info?.id === terminalId) send(); - }); - } catch {} - - const t = setTimeout(send, 1200); - - return () => { - try { - off?.(); - } catch {} - clearTimeout(t); - }; - }, [agent, terminalId]); - - useEffect(() => { - setCliStartError(null); - }, [task.id]); - - const runInstallCommand = useCallback( - (cmd: string) => { - const api: any = (window as any).electronAPI; - const targetId = activeTerminalId; - if (!targetId) return; - - const send = () => { - try { - api?.ptyInput?.({ id: targetId, data: `${cmd}\n` }); - return true; - } catch (error) { - console.error('Failed to run install command', error); - return false; - } - }; - - // Best effort immediate send - const ok = send(); - - // Listen for PTY start in case the terminal was still spinning up - const off = api?.onPtyStarted?.((info: { id: string }) => { - if (info?.id !== targetId) return; - send(); - try { - off?.(); - } catch {} - }); - - // If immediate send worked, remove listener - if (ok) { - try { - off?.(); - } catch {} - } - }, - [activeTerminalId] - ); - - // On task change, restore last-selected agent (including Droid). - // If a locked agent exists (including Droid), prefer locked. - useEffect(() => { - try { - const lastKey = `agent:last:${task.id}`; - const last = window.localStorage.getItem(lastKey) as Agent | null; - - if (initialAgent) { - setAgent(initialAgent); - } else { - const validAgents: Agent[] = [ - 'codex', - 'claude', - 'qwen', - 'droid', - 'gemini', - 'cursor', - 'copilot', - 'amp', - 'opencode', - 'charm', - 'auggie', - 'goose', - 'kimi', - 'kilocode', - 'kiro', - 'rovo', - 'cline', - 'continue', - 'codebuff', - 'mistral', - ]; - if (last && (validAgents as string[]).includes(last)) { - setAgent(last as Agent); - } else { - setAgent('codex'); - } - } - } catch { - setAgent(initialAgent || 'codex'); - } - }, [task.id, initialAgent]); - - // Chat management handlers - const handleCreateChat = useCallback( - async (title: string, newAgent: string) => { - try { - // Don't dispose the current terminal - each chat has its own independent session - - const newConversation = await rpc.db.createConversation({ - taskId: task.id, - title, - provider: newAgent, - isMain: false, // Additional chats are never main - }); - - // Reload conversations from DB - const dbConversations = await rpc.db.getConversations(task.id); - const dbIds = new Set(dbConversations.map((c: Conversation) => c.id)); - const missingFromDb = conversations.filter((c) => !dbIds.has(c.id)); - if (missingFromDb.length > 0) { - // Re-persist conversations that only existed in React state - for (const missing of missingFromDb) { - await rpc.db.saveConversation({ ...missing, isActive: false }); - } - const retryConversations = await rpc.db.getConversations(task.id); - setConversations(retryConversations); - } else { - setConversations(dbConversations); - } - setActiveConversationId(newConversation.id); - setAgent(newAgent as Agent); - try { - window.dispatchEvent( - new CustomEvent('emdash:conversations-changed', { detail: { taskId: task.id } }) - ); - } catch {} - } catch (error) { - console.error('Exception creating conversation:', error); - toast({ - title: 'Error', - description: error instanceof Error ? error.message : 'Failed to create chat', - variant: 'destructive', - }); - } - }, - [task.id, toast, conversations] - ); - - const handleCreateNewChat = useCallback(() => { - setShowCreateChatModal(true); - }, []); - - const handleSwitchChat = useCallback( - async (conversationId: string) => { - // Don't dispose terminals - just switch between them - // Each chat maintains its own persistent terminal session - - await rpc.db.setActiveConversation({ - taskId: task.id, - conversationId, - }); - setActiveConversationId(conversationId); - - // Update provider based on conversation - const conv = conversations.find((c) => c.id === conversationId); - if (conv?.provider) { - setAgent(conv.provider as Agent); - } - }, - [task.id, conversations] - ); - - const handleCloseChat = useCallback( - async (conversationId: string) => { - if (conversations.length <= 1) { - toast({ - title: 'Cannot Close', - description: 'Cannot close the last chat', - variant: 'destructive', - }); - return; - } - - // Dispose the terminal for this chat - const convToDelete = conversations.find((c) => c.id === conversationId); - const convAgent = (convToDelete?.provider || agent) as Agent; - const terminalToDispose = makePtyId(convAgent, 'chat', conversationId); - terminalSessionRegistry.dispose(terminalToDispose); - - await rpc.db.deleteConversation(conversationId); - - // Reload conversations - const updatedConversations = await rpc.db.getConversations(task.id); - setConversations(updatedConversations); - // Switch to another chat if we deleted the active one - if (conversationId === activeConversationId && updatedConversations.length > 0) { - const newActive = updatedConversations[0]; - await rpc.db.setActiveConversation({ - taskId: task.id, - conversationId: newActive.id, - }); - setActiveConversationId(newActive.id); - // Update provider if needed - if (newActive.provider) { - setAgent(newActive.provider as Agent); - } - } - - try { - window.dispatchEvent( - new CustomEvent('emdash:conversations-changed', { detail: { taskId: task.id } }) - ); - } catch {} - }, - [conversations, agent, task.id, activeConversationId, toast] - ); - - // Persist last-selected agent per task (including Droid) - useEffect(() => { - try { - window.localStorage.setItem(`agent:last:${task.id}`, agent); - } catch {} - }, [agent, task.id]); - - // Track agent switching - const prevAgentRef = React.useRef(null); - useEffect(() => { - if (prevAgentRef.current && prevAgentRef.current !== agent) { - void (async () => { - const { captureTelemetry } = await import('../lib/telemetryClient'); - captureTelemetry('task_agent_switched', { agent }); - })(); - } - prevAgentRef.current = agent; - }, [agent]); - - useEffect(() => { - const installed = currentAgentStatus?.installed === true; - setIsAgentInstalled(installed); - }, [agent, currentAgentStatus]); - - useEffect(() => { - let cancelled = false; - let refreshCheckRequested = false; - const api: any = (window as any).electronAPI; - - const applyStatuses = (statuses: Record | undefined | null) => { - if (!statuses) return; - setAgentStatuses(statuses); - if (cancelled) return; - const installed = statuses?.[agent]?.installed === true; - setIsAgentInstalled(installed); - }; - - const maybeRefreshAgentStatus = async (statuses?: Record | undefined | null) => { - if (cancelled || refreshCheckRequested) return; - if (!api?.getProviderStatuses) return; - - const status = statuses?.[agent]; - const hasEntry = Boolean(status); - const isInstalled = status?.installed === true; - const lastChecked = - typeof status?.lastChecked === 'number' && Number.isFinite(status.lastChecked) - ? status.lastChecked - : 0; - const isStale = !lastChecked || Date.now() - lastChecked > 5 * 60 * 1000; - - if (hasEntry && isInstalled && !isStale) return; - - refreshCheckRequested = true; - try { - const refreshed = await api.getProviderStatuses({ refresh: true, providers: [agent] }); - if (cancelled) return; - if (refreshed?.success) { - applyStatuses(refreshed.statuses ?? {}); - } - } catch (error) { - console.error('Agent status refresh failed', error); - } - }; - - const load = async () => { - if (!api?.getProviderStatuses) { - setIsAgentInstalled(false); - return; - } - try { - const res = await api.getProviderStatuses(); - if (cancelled) return; - if (res?.success) { - applyStatuses(res.statuses ?? {}); - void maybeRefreshAgentStatus(res.statuses); - } else { - setIsAgentInstalled(false); - } - } catch (error) { - if (!cancelled) setIsAgentInstalled(false); - console.error('Agent status load failed', error); - } - }; - - const off = - api?.onProviderStatusUpdated?.((payload: { providerId: string; status: any }) => { - if (!payload?.providerId) return; - setAgentStatuses((prev) => { - const next = { ...prev, [payload.providerId]: payload.status }; - return next; - }); - if (payload.providerId === agent) { - setIsAgentInstalled(payload.status?.installed === true); - } - }) || null; - - void load(); - - return () => { - cancelled = true; - off?.(); - }; - }, [agent]); - - // Switch active chat/agent via global shortcuts (Cmd+Shift+J/K) - useEffect(() => { - const handleAgentSwitch = (event: Event) => { - const customEvent = event as CustomEvent<{ direction: 'next' | 'prev' }>; - if (conversations.length <= 1) return; - const direction = customEvent.detail?.direction; - if (!direction) return; - - const currentIndex = conversations.findIndex((c) => c.id === activeConversationId); - if (currentIndex === -1) return; - - let newIndex: number; - if (direction === 'prev') { - newIndex = currentIndex <= 0 ? conversations.length - 1 : currentIndex - 1; - } else { - newIndex = (currentIndex + 1) % conversations.length; - } - - const newConversation = conversations[newIndex]; - if (newConversation) { - handleSwitchChat(newConversation.id); - } - }; - - window.addEventListener('emdash:switch-agent', handleAgentSwitch); - return () => { - window.removeEventListener('emdash:switch-agent', handleAgentSwitch); - }; - }, [conversations, activeConversationId, handleSwitchChat]); - - // Close active chat tab on Cmd+W - useEffect(() => { - const handleCloseActiveChat = () => { - if (activeConversationId) { - handleCloseChat(activeConversationId); - } - }; - window.addEventListener('emdash:close-active-chat', handleCloseActiveChat); - return () => window.removeEventListener('emdash:close-active-chat', handleCloseActiveChat); - }, [activeConversationId, handleCloseChat]); - - const isTerminal = agentMeta[agent]?.terminalOnly === true; - const autoApproveEnabled = - Boolean(task.metadata?.autoApprove) && Boolean(agentMeta[agent]?.autoApproveFlag); - - const isMainConversation = activeConversationId === mainConversationId; - - const initialInjection = useMemo(() => { - if (!isTerminal) return null; - // Only inject into the main conversation — secondary chats should not - // receive the task's initial prompt or linked issue context. - if (!isMainConversation) return null; - const md = task.metadata || null; - const p = (md?.initialPrompt || '').trim(); - if (p) return p; - const issue = md?.linearIssue; - if (issue) { - const parts: string[] = []; - const line1 = `Linked Linear issue: ${issue.identifier}${issue.title ? ` — ${issue.title}` : ''}`; - parts.push(line1); - const details: string[] = []; - if (issue.state?.name) details.push(`State: ${issue.state.name}`); - if (issue.assignee?.displayName || issue.assignee?.name) - details.push(`Assignee: ${issue.assignee?.displayName || issue.assignee?.name}`); - if (issue.team?.key) details.push(`Team: ${issue.team.key}`); - if (issue.project?.name) details.push(`Project: ${issue.project.name}`); - if (details.length) parts.push(`Details: ${details.join(' • ')}`); - if (issue.url) parts.push(`URL: ${issue.url}`); - const desc = (issue as any)?.description; - if (typeof desc === 'string' && desc.trim()) { - const trimmed = desc.trim(); - const max = 1500; - const body = trimmed.length > max ? trimmed.slice(0, max) + '\n…' : trimmed; - parts.push('', 'Issue Description:', body); - } - const linearContent = parts.join('\n'); - // Prepend comments if any - if (commentsContext) { - return `The user has left the following comments on the code changes:\n\n${commentsContext}\n\n${linearContent}`; - } - return linearContent; - } - - const gh = (md as any)?.githubIssue as - | { - number: number; - title?: string; - url?: string; - state?: string; - assignees?: any[]; - labels?: any[]; - body?: string; - } - | undefined; - if (gh) { - const parts: string[] = []; - const line1 = `Linked GitHub issue: #${gh.number}${gh.title ? ` — ${gh.title}` : ''}`; - parts.push(line1); - const details: string[] = []; - if (gh.state) details.push(`State: ${gh.state}`); - try { - const as = Array.isArray(gh.assignees) - ? gh.assignees - .map((a: any) => a?.name || a?.login) - .filter(Boolean) - .join(', ') - : ''; - if (as) details.push(`Assignees: ${as}`); - } catch {} - try { - const ls = Array.isArray(gh.labels) - ? gh.labels - .map((l: any) => l?.name) - .filter(Boolean) - .join(', ') - : ''; - if (ls) details.push(`Labels: ${ls}`); - } catch {} - if (details.length) parts.push(`Details: ${details.join(' • ')}`); - if (gh.url) parts.push(`URL: ${gh.url}`); - const body = typeof gh.body === 'string' ? gh.body.trim() : ''; - if (body) { - const max = 1500; - const clipped = body.length > max ? body.slice(0, max) + '\n…' : body; - parts.push('', 'Issue Description:', clipped); - } - const ghContent = parts.join('\n'); - // Prepend comments if any - if (commentsContext) { - return `The user has left the following comments on the code changes:\n\n${commentsContext}\n\n${ghContent}`; - } - return ghContent; - } - - const j = md?.jiraIssue as any; - if (j) { - const lines: string[] = []; - const l1 = `Linked Jira issue: ${j.key}${j.summary ? ` — ${j.summary}` : ''}`; - lines.push(l1); - const details: string[] = []; - if (j.status?.name) details.push(`Status: ${j.status.name}`); - if (j.assignee?.displayName || j.assignee?.name) - details.push(`Assignee: ${j.assignee?.displayName || j.assignee?.name}`); - if (j.project?.key) details.push(`Project: ${j.project.key}`); - if (details.length) lines.push(`Details: ${details.join(' • ')}`); - if (j.url) lines.push(`URL: ${j.url}`); - const desc = typeof j.description === 'string' ? j.description.trim() : ''; - if (desc) { - const max = 1500; - const clipped = desc.length > max ? desc.slice(0, max) + '\n…' : desc; - lines.push('', 'Issue Description:', clipped); - } - const jiraContent = lines.join('\n'); - // Prepend comments if any - if (commentsContext) { - return `The user has left the following comments on the code changes:\n\n${commentsContext}\n\n${jiraContent}`; - } - return jiraContent; - } - - // If we have comments but no other context, return just the comments - if (commentsContext) { - return `The user has left the following comments on the code changes:\n\n${commentsContext}`; - } - - return null; - }, [isTerminal, isMainConversation, task.metadata, commentsContext]); - - // Only use keystroke injection for agents WITHOUT CLI flag support, - // or agents that explicitly opt into it (useKeystrokeInjection: true). - // Agents with initialPromptFlag use CLI arg injection via TerminalPane instead. - useInitialPromptInjection({ - taskId: task.id, - providerId: agent, - prompt: initialInjection, - enabled: - isTerminal && - (agentMeta[agent]?.initialPromptFlag === undefined || - agentMeta[agent]?.useKeystrokeInjection === true), - }); - - // Ensure an agent is stored for this task so fallbacks can subscribe immediately - useEffect(() => { - try { - localStorage.setItem(`taskAgent:${task.id}`, agent); - } catch {} - }, [agent, task.id]); - - // Auto-rename task from first terminal message (only if name was auto-generated) - const handleFirstMessage = useCallback( - (message: string) => { - if (!project || !onRenameTask) return; - // Only rename if this task's name was auto-generated - if (!task.metadata?.nameGenerated) return; - // Skip multi-agent tasks - if (task.metadata?.multiAgent?.enabled) return; - - const generated = generateTaskName(message); - if (!generated) return; - - const existingNames = (project.tasks || []).map((t) => t.name); - const uniqueName = ensureUniqueTaskName(generated, existingNames); - void onRenameTask(project, task, uniqueName); - }, - [project, task, onRenameTask] - ); - - // Whether to enable first-message capture for this task - const shouldCaptureFirstMessage = !!( - task.metadata?.nameGenerated && - !task.metadata?.multiAgent?.enabled && - project && - onRenameTask - ); - - if (!isTerminal) { - return null; - } - - return ( - -
- setShowCreateChatModal(false)} - onCreateChat={handleCreateChat} - installedAgents={installedAgents} - /> - -
-
-
-
-
- {sortedConversations.map((conv, index) => { - const isActive = conv.id === activeConversationId; - const convAgent = conv.provider || agent; - const config = agentConfig[convAgent as Agent]; - const agentName = config?.name || convAgent; - const isBusy = busyByConversationId[conv.id] === true; - - // Count how many chats use the same agent up to this point - const sameAgentCount = sortedConversations - .slice(0, index + 1) - .filter((c) => (c.provider || agent) === convAgent).length; - const showNumber = - sortedConversations.filter((c) => (c.provider || agent) === convAgent) - .length > 1; - - return ( - - ); - })} -
- -
- {(task.metadata?.linearIssue || - task.metadata?.githubIssue || - task.metadata?.jiraIssue) && ( - - )} - {autoApproveEnabled && ( - - - Auto-approve - - )} -
-
- {(() => { - if (isAgentInstalled === false) { - return ( - window.electronAPI.openExternal(url)} - mode="missing" - /> - ); - } - if (cliStartError) { - return ( - window.electronAPI.openExternal(url)} - mode="start_failed" - details={cliStartError} - /> - ); - } - return null; - })()} -
-
-
-
- {/* Wait for conversations to load to ensure stable terminalId */} - {conversationsLoaded && ( - { - try { - window.localStorage.setItem(`agent:locked:${task.id}`, agent); - } catch {} - }} - onStartError={(message) => { - setCliStartError(message); - }} - onStartSuccess={() => { - setCliStartError(null); - // Mark initial injection as sent so it won't re-run on restart - if (initialInjection && !task.metadata?.initialInjectionSent) { - void rpc.db.saveTask({ - ...task, - metadata: { - ...task.metadata, - initialInjectionSent: true, - }, - }); - } - }} - variant={ - effectiveTheme === 'dark' || effectiveTheme === 'dark-black' ? 'dark' : 'light' - } - themeOverride={ - agent === 'charm' - ? { - background: - effectiveTheme === 'dark-black' - ? '#0a0a0a' - : effectiveTheme === 'dark' - ? '#1f2937' - : '#ffffff', - selectionBackground: 'rgba(96, 165, 250, 0.35)', - selectionForeground: effectiveTheme === 'light' ? '#0f172a' : '#f9fafb', - } - : agent === 'mistral' - ? { - background: - effectiveTheme === 'dark-black' - ? '#141820' - : effectiveTheme === 'dark' - ? '#202938' - : '#ffffff', - selectionBackground: 'rgba(96, 165, 250, 0.35)', - selectionForeground: effectiveTheme === 'light' ? '#0f172a' : '#f9fafb', - } - : effectiveTheme === 'dark-black' - ? { - background: '#000000', - selectionBackground: 'rgba(96, 165, 250, 0.35)', - selectionForeground: '#f9fafb', - } - : undefined - } - contentFilter={ - agent === 'charm' && - effectiveTheme !== 'dark' && - effectiveTheme !== 'dark-black' - ? 'invert(1) hue-rotate(180deg) brightness(1.1) contrast(1.05)' - : undefined - } - initialPrompt={ - agentMeta[agent]?.initialPromptFlag !== undefined && - !agentMeta[agent]?.useKeystrokeInjection && - !task.metadata?.initialInjectionSent - ? (initialInjection ?? undefined) - : undefined - } - onFirstMessage={shouldCaptureFirstMessage ? handleFirstMessage : undefined} - className="h-full w-full" - /> - )} -
-
-
-
-
- ); -}; - -export default ChatInterface; diff --git a/src/renderer/components/ChatTabs.tsx b/src/renderer/components/ChatTabs.tsx deleted file mode 100644 index f62681a47..000000000 --- a/src/renderer/components/ChatTabs.tsx +++ /dev/null @@ -1,122 +0,0 @@ -import React, { useState } from 'react'; -import { X, Edit2 } from 'lucide-react'; -import { cn } from '@/lib/utils'; -import { agentConfig } from '../lib/agentConfig'; -import AgentLogo from './AgentLogo'; -import type { Agent } from '../types'; - -interface ChatTab { - id: string; - title: string; - provider?: string | null; - isActive: boolean; - messageCount?: number; -} - -interface ChatTabsProps { - tabs: ChatTab[]; - activeTabId: string | null; - onTabClick: (tabId: string) => void; - onCloseTab: (tabId: string) => void; - onRenameTab: (tabId: string, newTitle: string) => void; - onDuplicateTab?: (tabId: string) => void; -} - -export function ChatTabs({ - tabs, - activeTabId, - onTabClick, - onCloseTab, - onRenameTab, -}: ChatTabsProps) { - const [draggedTab, setDraggedTab] = useState(null); - - const handleDragStart = (e: React.DragEvent, tabId: string) => { - setDraggedTab(tabId); - e.dataTransfer.effectAllowed = 'move'; - }; - - const handleDragOver = (e: React.DragEvent) => { - e.preventDefault(); - e.dataTransfer.dropEffect = 'move'; - }; - - const handleDrop = (e: React.DragEvent, targetTabId: string) => { - e.preventDefault(); - if (draggedTab && draggedTab !== targetTabId) { - // TODO: Implement reordering logic - // Will reorder ${draggedTab} to position of ${targetTabId} - } - setDraggedTab(null); - }; - - const handleRename = (tabId: string, currentTitle: string) => { - const newTitle = prompt('Rename chat:', currentTitle); - if (newTitle && newTitle.trim() && newTitle !== currentTitle) { - onRenameTab(tabId, newTitle.trim()); - } - }; - - return ( -
- {tabs.map((tab) => { - const config = tab.provider ? agentConfig[tab.provider as Agent] : null; - return ( -
handleDragStart(e, tab.id)} - onDragOver={handleDragOver} - onDrop={(e) => handleDrop(e, tab.id)} - className={cn( - 'group flex cursor-pointer items-center gap-2 rounded-md px-3 py-1.5', - 'min-w-[120px] max-w-[200px] flex-shrink-0 transition-colors', - 'hover:bg-muted/80', - tab.id === activeTabId && 'bg-primary text-primary-foreground hover:bg-primary/90' - )} - onClick={() => onTabClick(tab.id)} - > - {config && ( - - )} - - {tab.title} - - -
- - - {tabs.length > 1 && ( - - )} -
-
- ); - })} -
- ); -} diff --git a/src/renderer/components/CheckRunsList.tsx b/src/renderer/components/CheckRunsList.tsx deleted file mode 100644 index 1917eb323..000000000 --- a/src/renderer/components/CheckRunsList.tsx +++ /dev/null @@ -1,120 +0,0 @@ -import { CheckCircle2, XCircle, Loader2, MinusCircle, ExternalLink } from 'lucide-react'; -import githubIcon from '../../assets/images/github.png'; -import type { CheckRunsStatus, CheckRun, CheckRunBucket } from '../lib/checkRunStatus'; -import { formatCheckDuration } from '../lib/checkRunStatus'; -import { Badge } from './ui/badge'; - -function BucketIcon({ bucket }: { bucket: CheckRunBucket }) { - switch (bucket) { - case 'pass': - return ; - case 'fail': - return ; - case 'pending': - return ; - case 'skipping': - case 'cancel': - return ; - } -} - -function CheckRunItem({ check }: { check: CheckRun }) { - const duration = formatCheckDuration(check.startedAt, check.completedAt); - - return ( -
- - - - -
-
{check.name}
- {check.workflow && ( -
{check.workflow}
- )} -
-
- {duration && {duration}} - {check.link && ( - - )} -
-
- ); -} - -interface ChecksPanelProps { - status: CheckRunsStatus | null; - isLoading: boolean; - hasPr: boolean; - hideSummary?: boolean; -} - -export function ChecksPanel({ status, isLoading, hasPr, hideSummary }: ChecksPanelProps) { - if (!hasPr) { - return ( -
-

No PR exists for this branch.

-
- ); - } - - if (isLoading && !status) { - return ( -
- -
- ); - } - - if (!status || !status.checks || status.checks.length === 0) { - return ( -
-
-

No CI checks found for this repository

-
-
- ); - } - - const { summary } = status; - - return ( -
- {!hideSummary && ( -
- {summary.passed > 0 && ( - - - {summary.passed} passed - - )} - {summary.failed > 0 && ( - - - {summary.failed} failed - - )} - {summary.pending > 0 && ( - - - {summary.pending} pending - - )} -
- )} -
- {status.checks.map((check, i) => ( - - ))} -
-
- ); -} diff --git a/src/renderer/components/CloneFromUrlModal.tsx b/src/renderer/components/CloneFromUrlModal.tsx deleted file mode 100644 index 7e619c0e1..000000000 --- a/src/renderer/components/CloneFromUrlModal.tsx +++ /dev/null @@ -1,268 +0,0 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import { Button } from './ui/button'; -import { DialogContent, DialogHeader, DialogTitle } from './ui/dialog'; -import { Input } from './ui/input'; -import { Label } from './ui/label'; -import { Spinner } from './ui/spinner'; -import { Separator } from './ui/separator'; -import { rpc } from '@/lib/rpc'; - -interface CloneFromUrlModalProps { - onClose: () => void; - onSuccess: (projectPath: string) => void; -} - -export const CloneFromUrlModal: React.FC = ({ onClose, onSuccess }) => { - const [repoUrl, setRepoUrl] = useState(''); - const [directoryName, setDirectoryName] = useState(''); - const [isCloning, setIsCloning] = useState(false); - const [error, setError] = useState(null); - const [progress, setProgress] = useState(''); - const [touched, setTouched] = useState(false); - - // Clean URL by removing hash, query params, and trailing slashes - const cleanUrl = useCallback((url: string): string => { - return url - .trim() - .replace(/#.*$/, '') // Remove hash/fragment - .replace(/\?.*$/, '') // Remove query parameters - .replace(/\/+$/, ''); // Remove trailing slashes - }, []); - - // Parse repo name from URL for directory name suggestion - useEffect(() => { - if (!repoUrl.trim()) { - setDirectoryName(''); - return; - } - - try { - const cleanedUrl = cleanUrl(repoUrl); - // Try to extract repo name from various URL formats - let repoName = ''; - - // Handle https://github.com/owner/repo.git or https://github.com/owner/repo - const httpsMatch = cleanedUrl.match(/github\.com[:/]([^/]+)\/([^/]+?)(?:\.git)?\/?$/i); - if (httpsMatch) { - repoName = httpsMatch[2]; - } else { - // Handle git@github.com:owner/repo.git - const sshMatch = cleanedUrl.match(/:([^/]+)\/([^/]+?)(?:\.git)?$/); - if (sshMatch) { - repoName = sshMatch[2]; - } else { - // Handle ssh://git@host/path/to/repo.git - const sshUrlMatch = cleanedUrl.match(/\/([^/]+?)(?:\.git)?\/?$/); - if (sshUrlMatch) { - repoName = sshUrlMatch[1]; - } else { - // Fallback: take last segment after splitting by / or : - const segments = cleanedUrl.split(/[/:]/).filter(Boolean); - if (segments.length > 0) { - repoName = segments[segments.length - 1].replace(/\.git$/, ''); - } - } - } - } - - if (repoName && !directoryName) { - setDirectoryName(repoName); - } - } catch (e) { - // Ignore parsing errors - } - }, [repoUrl, directoryName, cleanUrl]); - - // Reset form on mount - useEffect(() => { - setRepoUrl(''); - setDirectoryName(''); - setError(null); - setProgress(''); - setTouched(false); - }, []); - - const validateUrl = (url: string): { valid: boolean; error?: string } => { - const cleaned = cleanUrl(url); - if (!cleaned) { - return { valid: false, error: 'Repository URL is required' }; - } - - const trimmed = cleaned; - - // Check for common Git URL patterns - const patterns = [ - /^https?:\/\/.+/i, // https:// or http:// - /^git@.+:.+/i, // git@host:path - /^ssh:\/\/.+/i, // ssh:// - ]; - - const isValid = patterns.some((pattern) => pattern.test(trimmed)); - if (!isValid) { - return { - valid: false, - error: 'Please enter a valid Git URL (https://, git@, or ssh://)', - }; - } - - return { valid: true }; - }; - - const handleSubmit = useCallback( - async (e: React.FormEvent) => { - e.preventDefault(); - setTouched(true); - setError(null); - - const cleanedUrl = cleanUrl(repoUrl); - const validation = validateUrl(cleanedUrl); - if (!validation.valid) { - setError(validation.error || 'Invalid URL'); - return; - } - - if (!directoryName.trim()) { - setError('Directory name is required'); - return; - } - - setIsCloning(true); - setProgress('Cloning repository...'); - - try { - // Get default directory from settings - const settings = await rpc.appSettings.get(); - const defaultDir = settings?.projects?.defaultDirectory - ? settings.projects.defaultDirectory - : '~/emdash-projects'; - const localPath = `${defaultDir}/${directoryName.trim()}`; - - setProgress(`Cloning to ${localPath}...`); - - const cloneResult = await window.electronAPI.githubCloneRepository(cleanedUrl, localPath); - - if (!cloneResult.success) { - throw new Error(cloneResult.error || 'Failed to clone repository'); - } - - setProgress('Repository cloned successfully'); - await new Promise((resolve) => setTimeout(resolve, 500)); // Brief pause for UX - - onSuccess(localPath); - onClose(); - } catch (err) { - const errorMessage = err instanceof Error ? err.message : 'Failed to clone repository'; - setError(errorMessage); - setProgress(''); - } finally { - setIsCloning(false); - } - }, - [repoUrl, directoryName, onSuccess, onClose] - ); - - return ( - { - if (isCloning) e.preventDefault(); - }} - onEscapeKeyDown={(e) => { - if (isCloning) e.preventDefault(); - }} - > - - Clone from URL - - - - - {isCloning && progress ? ( -
-
- -
-

{progress}

-

This may take a few moments...

-
-
-
- ) : ( -
-
- - setRepoUrl(e.target.value)} - onBlur={() => setTouched(true)} - placeholder="https://github.com/owner/repo.git" - className={`w-full ${ - touched && error - ? 'border-destructive focus-visible:border-destructive focus-visible:ring-destructive' - : '' - }`} - aria-invalid={touched && !!error} - disabled={isCloning} - autoFocus - /> - {touched && error && !repoUrl.trim() && ( -

{error}

- )} -
- -
- - setDirectoryName(e.target.value)} - placeholder="my-project" - disabled={isCloning} - className="w-full" - /> -

- Local directory name (auto-detected from URL) -

-
- - {error && repoUrl.trim() && ( -
- {error.split('\n').map((line, i) => ( -

{line}

- ))} -
- )} - -
- - -
-
- )} -
- ); -}; diff --git a/src/renderer/components/CommandPaletteWrapper.tsx b/src/renderer/components/CommandPaletteWrapper.tsx deleted file mode 100644 index 104cee213..000000000 --- a/src/renderer/components/CommandPaletteWrapper.tsx +++ /dev/null @@ -1,59 +0,0 @@ -import React from 'react'; -import CommandPalette from '../components/CommandPalette'; -import { useSidebar } from '../components/ui/sidebar'; -import { useRightSidebar } from '../components/ui/right-sidebar'; -import { useTheme } from '../hooks/useTheme'; -import type { Task } from '../types/app'; -import { useProjectManagementContext } from '../contexts/ProjectManagementProvider'; -import { useTaskManagementContext } from '../contexts/TaskManagementContext'; - -export interface CommandPaletteWrapperProps { - isOpen: boolean; - onClose: () => void; - handleGoHome: () => void; - handleOpenSettings: () => void; - handleOpenKeyboardShortcuts: () => void; -} - -const CommandPaletteWrapper: React.FC = ({ - isOpen, - onClose, - handleGoHome, - handleOpenSettings, - handleOpenKeyboardShortcuts, -}) => { - const { toggle: toggleLeftSidebar } = useSidebar(); - const { toggle: toggleRightSidebar } = useRightSidebar(); - const { toggleTheme } = useTheme(); - const { projects, handleSelectProject, handleOpenProject } = useProjectManagementContext(); - const { handleSelectTask } = useTaskManagementContext(); - - return ( - { - const project = projects.find((p) => p.id === projectId); - if (project) handleSelectProject(project); - }} - onSelectTask={(projectId, taskId) => { - const project = projects.find((p) => p.id === projectId); - const task = project?.tasks?.find((w: Task) => w.id === taskId); - if (project && task) { - handleSelectProject(project); - handleSelectTask(task); - } - }} - onOpenSettings={handleOpenSettings} - onOpenKeyboardShortcuts={handleOpenKeyboardShortcuts} - onToggleLeftSidebar={toggleLeftSidebar} - onToggleRightSidebar={toggleRightSidebar} - onToggleTheme={toggleTheme} - onGoHome={handleGoHome} - onOpenProject={handleOpenProject} - /> - ); -}; - -export default CommandPaletteWrapper; diff --git a/src/renderer/components/CommentsPopover.tsx b/src/renderer/components/CommentsPopover.tsx deleted file mode 100644 index ca84face8..000000000 --- a/src/renderer/components/CommentsPopover.tsx +++ /dev/null @@ -1,230 +0,0 @@ -import React, { useMemo, useRef, useState } from 'react'; -import { Popover, PopoverTrigger, PopoverContent } from './ui/popover'; -import { Button } from './ui/button'; -import { Checkbox } from './ui/checkbox'; -import { ScrollArea } from './ui/scroll-area'; -import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from './ui/tooltip'; -import { useTaskScope } from './TaskScopeContext'; -import { usePendingInjection } from '../hooks/usePendingInjection'; -import { useTaskComments } from '../hooks/useLineComments'; -import { formatCommentsForAgent } from '../lib/formatCommentsForAgent'; -import type { LineComment } from '../types/electron-api'; - -interface CommentsPopoverProps { - taskId?: string; - children: React.ReactNode; - tooltipContent?: string; - tooltipDelay?: number; - onOpenChange?: (open: boolean) => void; - onSelectedCountChange?: (count: number) => void; -} - -export function CommentsPopover({ - taskId, - children, - tooltipContent, - tooltipDelay = 300, - onOpenChange, - onSelectedCountChange, -}: CommentsPopoverProps) { - const { taskId: scopedTaskId } = useTaskScope(); - const resolvedTaskId = taskId ?? scopedTaskId ?? ''; - const { unsentComments, markSent, refresh } = useTaskComments(resolvedTaskId); - const { setPending, clear, onInjectionUsed } = usePendingInjection(); - const [open, setOpen] = useState(false); - const [selectedIds, setSelectedIds] = useState>(new Set()); - const pendingIdsRef = useRef([]); - const hasCustomSelectionRef = useRef(false); - const lastUnsentIdsRef = useRef>(new Set()); - - const emitSelectedCount = React.useCallback( - (next: Set) => { - onSelectedCountChange?.(next.size); - }, - [onSelectedCountChange] - ); - - React.useEffect(() => { - hasCustomSelectionRef.current = false; - lastUnsentIdsRef.current = new Set(); - pendingIdsRef.current = []; - setSelectedIds(new Set()); - setOpen(false); - clear(); - onSelectedCountChange?.(0); - }, [clear, onSelectedCountChange, resolvedTaskId]); - - const handleOpenChange = (nextOpen: boolean) => { - setOpen(nextOpen); - if (nextOpen && resolvedTaskId) { - void refresh(); - } - onOpenChange?.(nextOpen); - }; - - React.useEffect(() => { - if (!resolvedTaskId) return; - const nextIds = new Set(unsentComments.map((comment) => comment.id)); - setSelectedIds((prev) => { - if (!hasCustomSelectionRef.current) { - const next = new Set(nextIds); - emitSelectedCount(next); - return next; - } - const next = new Set(Array.from(prev).filter((id) => nextIds.has(id))); - for (const id of nextIds) { - if (!lastUnsentIdsRef.current.has(id)) { - next.add(id); - } - } - emitSelectedCount(next); - return next; - }); - lastUnsentIdsRef.current = nextIds; - }, [emitSelectedCount, resolvedTaskId, unsentComments]); - - const handleSelectedChange = React.useCallback( - (next: Set) => { - hasCustomSelectionRef.current = true; - setSelectedIds(next); - emitSelectedCount(next); - }, - [emitSelectedCount] - ); - - React.useEffect(() => { - if (!resolvedTaskId) return; - const selectedComments = unsentComments.filter((comment) => selectedIds.has(comment.id)); - - if (selectedComments.length === 0) { - pendingIdsRef.current = []; - clear(); - return; - } - - const formatted = formatCommentsForAgent(selectedComments, { - includeIntro: false, - leadingNewline: true, - }); - if (!formatted) { - pendingIdsRef.current = []; - clear(); - return; - } - - pendingIdsRef.current = selectedComments.map((comment) => comment.id); - setPending(formatted); - }, [clear, selectedIds, setPending, unsentComments, resolvedTaskId]); - - React.useEffect(() => { - return onInjectionUsed(() => { - const sentIds = pendingIdsRef.current; - if (sentIds.length === 0) return; - pendingIdsRef.current = []; - void markSent(sentIds); - }); - }, [markSent, onInjectionUsed]); - - const groupedComments = useMemo(() => { - const groups = new Map(); - for (const c of unsentComments) { - const existing = groups.get(c.filePath) ?? []; - existing.push(c); - groups.set(c.filePath, existing); - } - return groups; - }, [unsentComments]); - - const allSelected = unsentComments.length > 0 && selectedIds.size === unsentComments.length; - const toggleSelectAll = () => { - if (allSelected) { - handleSelectedChange(new Set()); - } else { - handleSelectedChange(new Set(unsentComments.map((c) => c.id))); - } - }; - - return ( - - {tooltipContent ? ( - - - - {children} - - - {tooltipContent} - - - - ) : ( - {children} - )} - -
-
- Review comments - - {unsentComments.length} unsent • {selectedIds.size} selected - -
-
- -
-
- - -
- {Array.from(groupedComments.entries()).map(([filePath, fileComments]) => ( -
-
- {filePath} -
-
- {fileComments.map((comment) => ( - - ))} -
-
- ))} - {unsentComments.length === 0 && ( -
- No unsent comments. -
- )} -
-
-
-
- ); -} diff --git a/src/renderer/components/ConfigEditorModal.tsx b/src/renderer/components/ConfigEditorModal.tsx deleted file mode 100644 index 57689546b..000000000 --- a/src/renderer/components/ConfigEditorModal.tsx +++ /dev/null @@ -1,449 +0,0 @@ -import React, { useCallback, useEffect, useMemo, useState } from 'react'; -import { Button } from './ui/button'; -import { Dialog, DialogContent, DialogHeader, DialogTitle } from './ui/dialog'; -import { Input } from './ui/input'; -import { Label } from './ui/label'; -import { Spinner } from './ui/spinner'; -import { Switch } from './ui/switch'; -import { Textarea } from './ui/textarea'; - -type LifecycleScripts = { - setup: string; - run: string; - teardown: string; -}; - -type ConfigShape = Record & { - preservePatterns?: string[]; - scripts?: Partial; - shellSetup?: string; - tmux?: boolean; -}; - -interface ConfigEditorModalProps { - isOpen: boolean; - onClose: () => void; - projectPath: string; - isRemote?: boolean; - sshConnectionId?: string | null; -} - -const EMPTY_SCRIPTS: LifecycleScripts = { - setup: '', - run: '', - teardown: '', -}; -const PROJECT_CONFIG_DOCS_URL = 'https://docs.emdash.sh/project-config'; - -function ensureConfigObject(raw: unknown): ConfigShape { - return raw && typeof raw === 'object' && !Array.isArray(raw) ? (raw as ConfigShape) : {}; -} - -function scriptsFromConfig(config: ConfigShape): LifecycleScripts { - const scripts = config.scripts; - if (!scripts || typeof scripts !== 'object' || Array.isArray(scripts)) { - return { ...EMPTY_SCRIPTS }; - } - - const obj = scripts as Record; - return { - setup: typeof obj.setup === 'string' ? obj.setup : '', - run: typeof obj.run === 'string' ? obj.run : '', - teardown: typeof obj.teardown === 'string' ? obj.teardown : '', - }; -} - -function applyScripts(config: ConfigShape, scripts: LifecycleScripts): ConfigShape { - const existingScripts = - config.scripts && typeof config.scripts === 'object' && !Array.isArray(config.scripts) - ? (config.scripts as Record) - : {}; - - const cleanScripts: Record = { ...existingScripts }; - if (scripts.setup.trim()) cleanScripts.setup = scripts.setup; - else delete cleanScripts.setup; - if (scripts.run.trim()) cleanScripts.run = scripts.run; - else delete cleanScripts.run; - if (scripts.teardown.trim()) cleanScripts.teardown = scripts.teardown; - else delete cleanScripts.teardown; - - const { scripts: _scripts, ...rest } = config; - if (Object.keys(cleanScripts).length === 0) { - return rest; - } - return { - ...rest, - scripts: cleanScripts, - }; -} - -function preservePatternsFromConfig(config: ConfigShape): string[] { - const patterns = config.preservePatterns; - if (!Array.isArray(patterns)) return []; - return patterns.filter((value): value is string => typeof value === 'string'); -} - -function applyPreservePatterns(config: ConfigShape, patterns: string[]): ConfigShape { - const { preservePatterns: _preservePatterns, ...rest } = config; - if (patterns.length === 0) { - return rest; - } - return { - ...rest, - preservePatterns: patterns, - }; -} - -function applyShellSetup(config: ConfigShape, shellSetup: string): ConfigShape { - const { shellSetup: _shellSetup, ...rest } = config; - const trimmed = shellSetup.trim(); - if (!trimmed) return rest; - return { ...rest, shellSetup: trimmed }; -} - -function applyTmux(config: ConfigShape, tmux: boolean): ConfigShape { - const { tmux: _tmux, ...rest } = config; - if (!tmux) return rest; - return { ...rest, tmux: true }; -} - -export const ConfigEditorModal: React.FC = ({ - isOpen, - onClose, - projectPath, - isRemote, - sshConnectionId, -}) => { - const [config, setConfig] = useState({}); - const [scripts, setScripts] = useState({ ...EMPTY_SCRIPTS }); - const [originalScripts, setOriginalScripts] = useState({ ...EMPTY_SCRIPTS }); - const [preservePatternsInput, setPreservePatternsInput] = useState(''); - const [originalPreservePatternsInput, setOriginalPreservePatternsInput] = useState(''); - const [shellSetup, setShellSetup] = useState(''); - const [originalShellSetup, setOriginalShellSetup] = useState(''); - const [tmux, setTmux] = useState(false); - const [originalTmux, setOriginalTmux] = useState(false); - const [isLoading, setIsLoading] = useState(false); - const [isSaving, setIsSaving] = useState(false); - const [error, setError] = useState(null); - const [loadFailed, setLoadFailed] = useState(false); - - const preservePatterns = useMemo( - () => - preservePatternsInput - .split('\n') - .map((line) => line.trim()) - .filter((line) => line.length > 0), - [preservePatternsInput] - ); - - const normalizedConfigContent = useMemo(() => { - const withPatterns = applyPreservePatterns(config, preservePatterns); - const withShellSetup = applyShellSetup(withPatterns, shellSetup); - const withTmux = applyTmux(withShellSetup, tmux); - const withScripts = applyScripts(withTmux, scripts); - return `${JSON.stringify(withScripts, null, 2)}\n`; - }, [config, preservePatterns, shellSetup, tmux, scripts]); - - const scriptsDirty = useMemo( - () => - scripts.setup !== originalScripts.setup || - scripts.run !== originalScripts.run || - scripts.teardown !== originalScripts.teardown || - preservePatternsInput !== originalPreservePatternsInput || - shellSetup !== originalShellSetup || - tmux !== originalTmux, - [ - originalShellSetup, - originalPreservePatternsInput, - originalScripts.run, - originalScripts.setup, - originalScripts.teardown, - originalTmux, - shellSetup, - preservePatternsInput, - scripts.run, - scripts.setup, - scripts.teardown, - tmux, - ] - ); - - const hasChanges = scriptsDirty; - - const loadConfig = useCallback(async () => { - setIsLoading(true); - setError(null); - setLoadFailed(false); - try { - let content: string; - - if (isRemote && sshConnectionId) { - const configPath = `${projectPath}/.emdash.json`; - try { - content = await window.electronAPI.sshReadFile(sshConnectionId, configPath); - } catch { - // File doesn't exist yet on remote — treat as empty config - content = '{}'; - } - } else { - const result = await window.electronAPI.getProjectConfig(projectPath); - if (!result.success || !result.content) { - throw new Error(result.error || 'Failed to load config'); - } - content = result.content; - } - - const parsed = ensureConfigObject(JSON.parse(content)); - const nextScripts = scriptsFromConfig(parsed); - const nextPreservePatterns = preservePatternsFromConfig(parsed); - const nextShellSetup = typeof parsed.shellSetup === 'string' ? parsed.shellSetup : ''; - const nextTmux = parsed.tmux === true; - setConfig(parsed); - setScripts(nextScripts); - setOriginalScripts(nextScripts); - setPreservePatternsInput(nextPreservePatterns.join('\n')); - setOriginalPreservePatternsInput(nextPreservePatterns.join('\n')); - setShellSetup(nextShellSetup); - setOriginalShellSetup(nextShellSetup); - setTmux(nextTmux); - setOriginalTmux(nextTmux); - } catch (err) { - setConfig({}); - setScripts({ ...EMPTY_SCRIPTS }); - setOriginalScripts({ ...EMPTY_SCRIPTS }); - setPreservePatternsInput(''); - setOriginalPreservePatternsInput(''); - setShellSetup(''); - setOriginalShellSetup(''); - setTmux(false); - setOriginalTmux(false); - setError(err instanceof Error ? err.message : 'Failed to load config'); - setLoadFailed(true); - } finally { - setIsLoading(false); - } - }, [projectPath, isRemote, sshConnectionId]); - - useEffect(() => { - if (!isOpen || !projectPath) return; - void loadConfig(); - }, [isOpen, loadConfig, projectPath]); - - const handleOpenChange = (open: boolean) => { - if (!open && isSaving) return; - if (!open) onClose(); - }; - - const handleScriptChange = - (key: keyof LifecycleScripts) => (event: React.ChangeEvent) => { - const value = event.target.value; - setScripts((prev) => ({ ...prev, [key]: value })); - setError(null); - }; - - const handleSave = useCallback(async () => { - setIsSaving(true); - setError(null); - try { - if (isRemote && sshConnectionId) { - const configPath = `${projectPath}/.emdash.json`; - await window.electronAPI.sshWriteFile(sshConnectionId, configPath, normalizedConfigContent); - } else { - const result = await window.electronAPI.saveProjectConfig( - projectPath, - normalizedConfigContent - ); - if (!result.success) { - throw new Error(result.error || 'Failed to save config'); - } - } - - const nextConfig = applyScripts( - applyTmux( - applyShellSetup(applyPreservePatterns(config, preservePatterns), shellSetup), - tmux - ), - scripts - ); - setConfig(nextConfig); - setOriginalScripts(scripts); - setOriginalPreservePatternsInput(preservePatternsInput); - setOriginalShellSetup(shellSetup); - setOriginalTmux(tmux); - onClose(); - } catch (err) { - setError(err instanceof Error ? err.message : 'Failed to save config'); - } finally { - setIsSaving(false); - } - }, [ - config, - isRemote, - normalizedConfigContent, - onClose, - shellSetup, - sshConnectionId, - preservePatternsInput, - preservePatterns, - projectPath, - scripts, - tmux, - ]); - - return ( - - - - Project config - - - {isLoading ? ( -
- -
- ) : loadFailed ? ( -
{error}
- ) : ( - <> - {error ? ( -
- {error} -
- ) : null} - -
-
- -