Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CLAUDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ use the tools — ingest meetings, answer queries, maintain the brain, enrich fr

- `src/core/engine.ts` — Pluggable engine interface (BrainEngine)
- `src/core/postgres-engine.ts` — Postgres + pgvector implementation
- `src/core/sqlite-engine.ts` — SQLite + FTS5 + vec0 implementation
- `src/sqlite-schema.sql` — SQLite DDL (FTS5 triggers, vec0 virtual table)
- `src/core/db.ts` — Connection management, schema initialization
- `src/core/import-file.ts` — Shared single-file import (used by import + sync)
- `src/core/sync.ts` — Pure sync functions (manifest parsing, filtering, slug conversion)
Expand Down
30 changes: 30 additions & 0 deletions Dockerfile.sqlite
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# GBrain SQLite engine with vec0 extension pre-installed.
# Usage:
# docker build -f Dockerfile.sqlite -t gbrain-sqlite .
# docker run -v ~/.gbrain:/root/.gbrain gbrain-sqlite <command>
#
# Examples:
# docker run -v ~/.gbrain:/root/.gbrain gbrain-sqlite init --sqlite
# docker run -v ~/.gbrain:/root/.gbrain -v /path/to/notes:/notes gbrain-sqlite import /notes
#
# For vector search with vec0, the extension is pre-installed at /usr/lib/vec0.so.
# Without Docker, download the binary for your platform from:
# https://github.com/asg017/sqlite-vec/releases

FROM oven/bun:1 AS base

# Install vec0 extension for vector search
# Note: update the URL to the latest release for your target platform
RUN apt-get update && apt-get install -y wget ca-certificates && \
wget -qO /usr/lib/vec0.so \
https://github.com/asg017/sqlite-vec/releases/latest/download/vec0-linux-x86_64.so && \
chmod +x /usr/lib/vec0.so && \
apt-get clean && rm -rf /var/lib/apt/lists/*

WORKDIR /app
COPY package.json bun.lock* ./
RUN bun install --frozen-lockfile

COPY . .

ENTRYPOINT ["bun", "run", "src/cli.ts"]
2 changes: 1 addition & 1 deletion docs/SQLITE_ENGINE.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# SQLite Engine Design

## Status: Designed, not built. Community PRs welcome.
## Status: Built. See `src/core/sqlite-engine.ts`.

The pluggable engine interface (`docs/ENGINES.md`) means anyone can add a SQLite backend without touching the CLI, MCP server, or skills. This document is the full plan.

Expand Down
12 changes: 8 additions & 4 deletions src/cli.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
#!/usr/bin/env bun

import { PostgresEngine } from './core/postgres-engine.ts';
import { SQLiteEngine } from './core/sqlite-engine.ts';
import { loadConfig, toEngineConfig } from './core/config.ts';
import type { BrainEngine } from './core/engine.ts';
import { VERSION } from './version.ts';

const COMMAND_HELP: Record<string, string> = {
init: 'Usage: gbrain init [--supabase|--url <conn>]\n\nCreate brain (guided wizard).',
init: 'Usage: gbrain init [--supabase|--sqlite|--url <conn>] [--path <db-file>]\n\nCreate brain (guided wizard).',
upgrade: 'Usage: gbrain upgrade\n\nSelf-update the CLI.\n\nDetects install method (bun, binary, clawhub) and runs the appropriate update.',
get: 'Usage: gbrain get <slug>\n\nRead a page by slug (supports fuzzy matching).',
put: 'Usage: gbrain put <slug> [< file.md]\n\nWrite or update a page from stdin.',
Expand Down Expand Up @@ -236,11 +237,14 @@ async function main() {
async function connectEngine(): Promise<BrainEngine> {
const config = loadConfig();
if (!config) {
console.error('No brain configured. Run: gbrain init --supabase');
console.error('No brain configured. Run: gbrain init --supabase or gbrain init --sqlite');
process.exit(1);
}

const engine = new PostgresEngine();
const engine = config.engine === 'sqlite'
? new SQLiteEngine()
: new PostgresEngine();

await engine.connect(toEngineConfig(config));
return engine;
}
Expand All @@ -252,7 +256,7 @@ USAGE
gbrain <command> [options]

SETUP
init [--supabase|--url <conn>] Create brain (guided wizard)
init [--supabase|--sqlite|--url] Create brain (guided wizard)
upgrade Self-update

PAGES
Expand Down
101 changes: 36 additions & 65 deletions src/commands/files.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,7 @@ import { readFileSync, readdirSync, statSync, existsSync } from 'fs';
import { join, relative, extname, basename } from 'path';
import { createHash } from 'crypto';
import type { BrainEngine } from '../core/engine.ts';
import * as db from '../core/db.ts';

interface FileRecord {
id: number;
page_slug: string | null;
filename: string;
storage_path: string;
storage_url: string;
mime_type: string | null;
size_bytes: number;
content_hash: string;
metadata: Record<string, unknown>;
created_at: string;
}
import type { FileInput } from '../core/types.ts';

const MIME_TYPES: Record<string, string> = {
'.jpg': 'image/jpeg', '.jpeg': 'image/jpeg', '.png': 'image/png',
Expand All @@ -42,16 +29,16 @@ export async function runFiles(engine: BrainEngine, args: string[]) {

switch (subcommand) {
case 'list':
await listFiles(args[1]);
await listFiles(engine, args[1]);
break;
case 'upload':
await uploadFile(args.slice(1));
await uploadFile(engine, args.slice(1));
break;
case 'sync':
await syncFiles(args[1]);
await syncFiles(engine, args[1]);
break;
case 'verify':
await verifyFiles();
await verifyFiles(engine);
break;
default:
console.error(`Usage: gbrain files <list|upload|sync|verify> [args]`);
Expand All @@ -63,14 +50,8 @@ export async function runFiles(engine: BrainEngine, args: string[]) {
}
}

async function listFiles(slug?: string) {
const sql = db.getConnection();
let rows;
if (slug) {
rows = await sql`SELECT * FROM files WHERE page_slug = ${slug} ORDER BY filename`;
} else {
rows = await sql`SELECT * FROM files ORDER BY page_slug, filename LIMIT 100`;
}
async function listFiles(engine: BrainEngine, slug?: string) {
const rows = await engine.getFiles(slug);

if (rows.length === 0) {
console.log(slug ? `No files for page: ${slug}` : 'No files stored.');
Expand All @@ -84,7 +65,7 @@ async function listFiles(slug?: string) {
}
}

async function uploadFile(args: string[]) {
async function uploadFile(engine: BrainEngine, args: string[]) {
const filePath = args.find(a => !a.startsWith('--'));
const pageSlug = args.find((a, i) => args[i - 1] === '--page') || null;

Expand All @@ -99,32 +80,30 @@ async function uploadFile(args: string[]) {
const storagePath = pageSlug ? `${pageSlug}/${filename}` : `unsorted/${hash.slice(0, 8)}-${filename}`;
const mimeType = getMimeType(filePath);

const sql = db.getConnection();

// Check for existing file by hash
const existing = await sql`SELECT id FROM files WHERE content_hash = ${hash} AND storage_path = ${storagePath}`;
if (existing.length > 0) {
const existing = await engine.findFileByHash(hash, storagePath);
if (existing) {
console.log(`File already uploaded (hash match): ${storagePath}`);
return;
}

// TODO: actual Supabase Storage upload goes here
// For now, record metadata in Postgres
const storageUrl = `https://storage.supabase.co/brain-files/${storagePath}`;

await sql`
INSERT INTO files (page_slug, filename, storage_path, storage_url, mime_type, size_bytes, content_hash, metadata)
VALUES (${pageSlug}, ${filename}, ${storagePath}, ${storageUrl}, ${mimeType}, ${stat.size}, ${hash}, ${'{}'}::jsonb)
ON CONFLICT (storage_path) DO UPDATE SET
content_hash = EXCLUDED.content_hash,
size_bytes = EXCLUDED.size_bytes,
mime_type = EXCLUDED.mime_type
`;
const file: FileInput = {
page_slug: pageSlug,
filename,
storage_path: storagePath,
storage_url: storageUrl,
mime_type: mimeType,
size_bytes: stat.size,
content_hash: hash,
};
await engine.upsertFile(file);

console.log(`Uploaded: ${storagePath} (${Math.round(stat.size / 1024)}KB)`);
}

async function syncFiles(dir?: string) {
async function syncFiles(engine: BrainEngine, dir?: string) {
if (!dir || !existsSync(dir)) {
console.error('Usage: gbrain files sync <directory>');
process.exit(1);
Expand All @@ -150,37 +129,34 @@ async function syncFiles(dir?: string) {
const mimeType = getMimeType(filePath);
const stat = statSync(filePath);

const sql = db.getConnection();
const existing = await sql`SELECT id FROM files WHERE content_hash = ${hash} AND storage_path = ${storagePath}`;
if (existing.length > 0) {
const existing = await engine.findFileByHash(hash, storagePath);
if (existing) {
skipped++;
continue;
}

// Infer page slug from directory structure
const pathParts = relativePath.split('/');
const pageSlug = pathParts.length > 1 ? pathParts.slice(0, -1).join('/') : null;

const storageUrl = `https://storage.supabase.co/brain-files/${storagePath}`;

await sql`
INSERT INTO files (page_slug, filename, storage_path, storage_url, mime_type, size_bytes, content_hash, metadata)
VALUES (${pageSlug}, ${filename}, ${storagePath}, ${storageUrl}, ${mimeType}, ${stat.size}, ${hash}, ${'{}'}::jsonb)
ON CONFLICT (storage_path) DO UPDATE SET
content_hash = EXCLUDED.content_hash,
size_bytes = EXCLUDED.size_bytes,
mime_type = EXCLUDED.mime_type
`;
await engine.upsertFile({
page_slug: pageSlug,
filename,
storage_path: storagePath,
storage_url: storageUrl,
mime_type: mimeType,
size_bytes: stat.size,
content_hash: hash,
});

uploaded++;
}

console.log(`\n\nFiles sync complete: ${uploaded} uploaded, ${skipped} skipped (unchanged)`);
}

async function verifyFiles() {
const sql = db.getConnection();
const rows = await sql`SELECT * FROM files ORDER BY storage_path`;
async function verifyFiles(engine: BrainEngine) {
const rows = await engine.getFiles();

if (rows.length === 0) {
console.log('No files to verify.');
Expand All @@ -189,11 +165,8 @@ async function verifyFiles() {

let verified = 0;
let mismatches = 0;
let missing = 0;

for (const row of rows) {
// Note: full verification would check Supabase Storage hash
// For now, verify the DB record exists and has valid data
if (!row.content_hash || !row.storage_path) {
mismatches++;
console.error(` MISMATCH: ${row.storage_path} (missing hash or path)`);
Expand All @@ -202,11 +175,10 @@ async function verifyFiles() {
}
}

if (mismatches === 0 && missing === 0) {
if (mismatches === 0) {
console.log(`${verified} files verified, 0 mismatches, 0 missing`);
} else {
console.error(`VERIFY FAILED: ${mismatches} mismatches, ${missing} missing.`);
console.error(`Run: gbrain files sync --retry-failed`);
console.error(`VERIFY FAILED: ${mismatches} mismatches.`);
process.exit(1);
}
}
Expand All @@ -224,7 +196,6 @@ function collectFiles(dir: string): string[] {
if (stat.isDirectory()) {
walk(full);
} else if (!entry.endsWith('.md')) {
// Non-markdown files are candidates for storage
files.push(full);
}
}
Expand Down
48 changes: 38 additions & 10 deletions src/commands/init.ts
Original file line number Diff line number Diff line change
@@ -1,40 +1,70 @@
import { execSync } from 'child_process';
import { PostgresEngine } from '../core/postgres-engine.ts';
import { saveConfig, type GBrainConfig } from '../core/config.ts';
import { SQLiteEngine } from '../core/sqlite-engine.ts';
import { saveConfig, getConfigDir, type GBrainConfig } from '../core/config.ts';
import { join } from 'path';

export async function runInit(args: string[]) {
const isSupabase = args.includes('--supabase');
const isSqlite = args.includes('--sqlite');
const urlIndex = args.indexOf('--url');
const manualUrl = urlIndex !== -1 ? args[urlIndex + 1] : null;
const pathIndex = args.indexOf('--path');
const manualPath = pathIndex !== -1 ? args[pathIndex + 1] : null;

if (isSqlite) {
await initSqlite(manualPath);
} else {
await initPostgres(manualUrl);
}
}

async function initSqlite(dbPath: string | null) {
const resolvedPath = dbPath || join(getConfigDir(), 'brain.db');
console.log(`Creating SQLite brain at ${resolvedPath}...`);

const engine = new SQLiteEngine();
await engine.connect({ engine: 'sqlite', database_path: resolvedPath });

console.log('Running schema migration...');
await engine.initSchema();

const config: GBrainConfig = {
engine: 'sqlite',
database_path: resolvedPath,
};
saveConfig(config);
console.log('Config saved to ~/.gbrain/config.json');

const stats = await engine.getStats();
await engine.disconnect();

console.log(`\nBrain ready. ${stats.page_count} pages.`);
console.log('Next: gbrain import <dir> to add your markdown.');
}

async function initPostgres(manualUrl: string | null) {
let databaseUrl: string;

if (manualUrl) {
databaseUrl = manualUrl;
} else if (isSupabase) {
databaseUrl = await supabaseWizard();
} else {
// Default to supabase wizard
databaseUrl = await supabaseWizard();
}

// Connect and init schema
console.log('Connecting to database...');
const engine = new PostgresEngine();
await engine.connect({ database_url: databaseUrl });

console.log('Running schema migration...');
await engine.initSchema();

// Save config
const config: GBrainConfig = {
engine: 'postgres',
database_url: databaseUrl,
};
saveConfig(config);
console.log('Config saved to ~/.gbrain/config.json');

// Verify
const stats = await engine.getStats();
await engine.disconnect();

Expand All @@ -43,7 +73,6 @@ export async function runInit(args: string[]) {
}

async function supabaseWizard(): Promise<string> {
// Try Supabase CLI auto-provision
try {
execSync('bunx supabase --version', { stdio: 'pipe' });
console.log('Supabase CLI detected.');
Expand All @@ -55,7 +84,6 @@ async function supabaseWizard(): Promise<string> {
console.log('Or provide a connection URL directly.');
}

// Fallback to manual URL
console.log('\nEnter your Supabase/Postgres connection URL:');
console.log(' Format: postgresql://user:password@host:port/database');
console.log(' Find it: Supabase Dashboard > Settings > Database > Connection string\n');
Expand Down
Loading