diff --git a/backend/.env.example b/backend/.env.example index 7023d97..94fa9e9 100644 --- a/backend/.env.example +++ b/backend/.env.example @@ -1,9 +1,17 @@ # Database Configuration DB_HOST=localhost -DB_PORT=3306 -DB_NAME=paycrypt -DB_USER=root -DB_PASSWORD=your_password +DB_PORT=5432 +DB_NAME=taggedpay +DB_USER=taggedpay_user +DB_PASSWORD=your_secure_password + +# Automated Backup Configuration +BACKUP_DIR=./backups +BACKUP_FILE_PREFIX=taggedpay +BACKUP_RETENTION_DAYS=7 +BACKUP_SCHEDULE_CRON=0 2 * * * +PG_DUMP_PATH=pg_dump +PG_RESTORE_PATH=pg_restore # JWT Configuration JWT_SECRET=your_super_secret_jwt_key_here @@ -18,4 +26,4 @@ FRONTEND_URL=pay-crypt-v2.vercel.app RPC_URL=https://sepolia.infura.io/v3/YOUR_PROJECT_ID PRIVATE_KEY=your_private_key -STARKNET_CONTRACT_ADDRESS=0x028add5d29f4aa3e4144ba1a85d509de6719e58cabe42cc72f58f46c6a84a785 \ No newline at end of file +STARKNET_CONTRACT_ADDRESS=0x028add5d29f4aa3e4144ba1a85d509de6719e58cabe42cc72f58f46c6a84a785 diff --git a/backend/DATABASE_SETUP.md b/backend/DATABASE_SETUP.md index 2f51120..81e5034 100644 --- a/backend/DATABASE_SETUP.md +++ b/backend/DATABASE_SETUP.md @@ -177,16 +177,44 @@ psql -U taggedpay_user -d taggedpay < backup.sql ### Automated Backups -Set up a cron job for daily backups: +The repository now includes a production-oriented backup script at +[`scripts/backup.js`](./scripts/backup.js) that uses PostgreSQL's native +`pg_dump` custom format and verifies the resulting archive with `pg_restore`. + +Configure the backup environment variables in `.env`: + +```env +BACKUP_DIR=./backups +BACKUP_FILE_PREFIX=taggedpay +BACKUP_RETENTION_DAYS=7 +BACKUP_SCHEDULE_CRON=0 2 * * * +PG_DUMP_PATH=pg_dump +PG_RESTORE_PATH=pg_restore +``` + +Run a manual backup: + +```bash +npm run backup:db +``` + +Set up a cron job for daily backups on the host: ```bash # Edit crontab crontab -e # Add daily backup at 2 AM -0 2 * * * pg_dump -U taggedpay_user -d taggedpay > /backups/taggedpay_$(date +\%Y\%m\%d).sql +0 2 * * * cd /path/to/payCrypt_v2/backend && npm run backup:db >> /var/log/taggedpay-backup.log 2>&1 ``` +Behavior: + +- Backups are written as timestamped `.dump` files. +- New backups are verified before older backups are pruned. +- Only backups older than `BACKUP_RETENTION_DAYS` are deleted. +- Backup files are written with restrictive permissions where the host allows it. + ## Monitoring ### Check Database Health diff --git a/backend/package.json b/backend/package.json index 6fe61ff..fff4c70 100644 --- a/backend/package.json +++ b/backend/package.json @@ -7,6 +7,7 @@ "scripts": { "dev": "nodemon server.js", "start": "node server.js", + "backup:db": "node scripts/backup.js", "migrate": "knex migrate:latest && knex seed:run", "migrate:rollback": "knex migrate:rollback", "migrate:rollback:all": "knex migrate:rollback --all", @@ -62,4 +63,4 @@ "nodemon": "^3.0.1", "supertest": "^7.0.0" } -} \ No newline at end of file +} diff --git a/backend/scripts/backup.js b/backend/scripts/backup.js new file mode 100644 index 0000000..cf450f8 --- /dev/null +++ b/backend/scripts/backup.js @@ -0,0 +1,261 @@ +#!/usr/bin/env node + +import { promises as fs } from "node:fs"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; +import { execFile } from "node:child_process"; +import dotenv from "dotenv"; + +const DUMP_HEADER = "PGDMP"; +const DEFAULT_BACKUP_PREFIX = "taggedpay"; +const DEFAULT_RETENTION_DAYS = 7; +const DEFAULT_BACKUP_DIR = "backups"; +const DEFAULT_PG_DUMP_PATH = "pg_dump"; +const DEFAULT_PG_RESTORE_PATH = "pg_restore"; + +dotenv.config(); + +const __filename = fileURLToPath(import.meta.url); + +export function formatTimestamp(date = new Date()) { + const year = date.getUTCFullYear(); + const month = String(date.getUTCMonth() + 1).padStart(2, "0"); + const day = String(date.getUTCDate()).padStart(2, "0"); + const hours = String(date.getUTCHours()).padStart(2, "0"); + const minutes = String(date.getUTCMinutes()).padStart(2, "0"); + const seconds = String(date.getUTCSeconds()).padStart(2, "0"); + return `${year}${month}${day}T${hours}${minutes}${seconds}Z`; +} + +export function buildBackupFilename(prefix, timestamp) { + return `${prefix}_${timestamp}.dump`; +} + +export function isBackupFilename(filename, prefix) { + const escapedPrefix = prefix.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); + return new RegExp(`^${escapedPrefix}_(\\d{8}T\\d{6}Z)\\.dump$`).test(filename); +} + +export function extractTimestampFromFilename(filename, prefix) { + const escapedPrefix = prefix.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); + const match = filename.match( + new RegExp(`^${escapedPrefix}_(\\d{8}T\\d{6}Z)\\.dump$`) + ); + return match ? match[1] : null; +} + +export function parsePositiveInteger(value, fallback) { + const parsed = Number.parseInt(value ?? "", 10); + return Number.isInteger(parsed) && parsed > 0 ? parsed : fallback; +} + +export function resolveBackupConfig(env = process.env, cwd = process.cwd()) { + const missingVars = ["DB_HOST", "DB_PORT", "DB_NAME", "DB_USER"].filter( + (key) => !env[key] + ); + + if (missingVars.length > 0) { + throw new Error( + `Missing required database environment variables: ${missingVars.join(", ")}` + ); + } + + return { + dbHost: env.DB_HOST, + dbPort: String(env.DB_PORT), + dbName: env.DB_NAME, + dbUser: env.DB_USER, + dbPassword: env.DB_PASSWORD || "", + backupDir: path.resolve(cwd, env.BACKUP_DIR || DEFAULT_BACKUP_DIR), + backupPrefix: env.BACKUP_FILE_PREFIX || DEFAULT_BACKUP_PREFIX, + retentionDays: parsePositiveInteger( + env.BACKUP_RETENTION_DAYS, + DEFAULT_RETENTION_DAYS + ), + pgDumpPath: env.PG_DUMP_PATH || DEFAULT_PG_DUMP_PATH, + pgRestorePath: env.PG_RESTORE_PATH || DEFAULT_PG_RESTORE_PATH, + scheduleCron: env.BACKUP_SCHEDULE_CRON || "0 2 * * *", + }; +} + +export async function ensureBackupDirectory(backupDir) { + await fs.mkdir(backupDir, { recursive: true, mode: 0o700 }); + await fs.chmod(backupDir, 0o700); +} + +export function execFileAsync(command, args, options = {}) { + return new Promise((resolve, reject) => { + execFile(command, args, options, (error, stdout, stderr) => { + if (error) { + error.stdout = stdout; + error.stderr = stderr; + reject(error); + return; + } + + resolve({ stdout, stderr }); + }); + }); +} + +export async function createBackup(config, options = {}) { + const { + execFileImpl = execFileAsync, + currentDate = new Date(), + outputDirMode = 0o700, + outputFileMode = 0o600, + } = options; + + const timestamp = formatTimestamp(currentDate); + const filename = buildBackupFilename(config.backupPrefix, timestamp); + const filePath = path.join(config.backupDir, filename); + + await fs.mkdir(config.backupDir, { recursive: true, mode: outputDirMode }); + await fs.chmod(config.backupDir, outputDirMode); + + const args = [ + "--format=custom", + "--compress=6", + "--no-owner", + "--no-privileges", + "--no-password", + `--file=${filePath}`, + `--host=${config.dbHost}`, + `--port=${config.dbPort}`, + `--username=${config.dbUser}`, + config.dbName, + ]; + + await execFileImpl(config.pgDumpPath, args, { + env: { + ...process.env, + PGPASSWORD: config.dbPassword, + }, + }); + + await fs.chmod(filePath, outputFileMode); + + return { + filename, + filePath, + timestamp, + }; +} + +export async function verifyBackupFile(filePath, options = {}) { + const { execFileImpl = execFileAsync, pgRestorePath = DEFAULT_PG_RESTORE_PATH } = + options; + + const stat = await fs.stat(filePath); + if (!stat.isFile()) { + throw new Error(`Backup path is not a file: ${filePath}`); + } + + if (stat.size === 0) { + throw new Error(`Backup file is empty: ${filePath}`); + } + + const handle = await fs.open(filePath, "r"); + try { + const buffer = Buffer.alloc(DUMP_HEADER.length); + await handle.read(buffer, 0, DUMP_HEADER.length, 0); + + if (buffer.toString("utf8") !== DUMP_HEADER) { + throw new Error(`Backup file does not look like a PostgreSQL custom dump: ${filePath}`); + } + } finally { + await handle.close(); + } + + await execFileImpl(pgRestorePath, ["--list", filePath], { + env: process.env, + }); + + return true; +} + +export async function pruneBackups(config, options = {}) { + const { currentDate = new Date() } = options; + const entries = await fs.readdir(config.backupDir, { withFileTypes: true }); + const cutoff = currentDate.getTime() - config.retentionDays * 24 * 60 * 60 * 1000; + + const backups = entries + .filter((entry) => entry.isFile() && isBackupFilename(entry.name, config.backupPrefix)) + .map((entry) => { + const timestamp = extractTimestampFromFilename(entry.name, config.backupPrefix); + const parsed = timestamp + ? Date.parse( + `${timestamp.slice(0, 4)}-${timestamp.slice(4, 6)}-${timestamp.slice(6, 8)}T${timestamp.slice(9, 11)}:${timestamp.slice(11, 13)}:${timestamp.slice(13, 15)}Z` + ) + : Number.NaN; + + return { + name: entry.name, + filePath: path.join(config.backupDir, entry.name), + createdAtMs: parsed, + }; + }) + .filter((backup) => Number.isFinite(backup.createdAtMs)) + .sort((left, right) => left.createdAtMs - right.createdAtMs); + + const deleted = []; + for (const backup of backups) { + if (backup.createdAtMs >= cutoff) { + continue; + } + + await fs.unlink(backup.filePath); + deleted.push(backup.name); + } + + return deleted; +} + +export async function runBackup(env = process.env) { + const config = resolveBackupConfig(env); + + console.log( + `Starting database backup for ${config.dbName} to ${config.backupDir} (retention ${config.retentionDays} day(s))` + ); + + await ensureBackupDirectory(config.backupDir); + + const backup = await createBackup(config); + + try { + await verifyBackupFile(backup.filePath, { + pgRestorePath: config.pgRestorePath, + }); + } catch (error) { + await fs.rm(backup.filePath, { force: true }); + throw new Error(`Backup verification failed: ${error.message}`); + } + + const deletedBackups = await pruneBackups(config); + + console.log(`Backup created successfully: ${backup.filename}`); + if (deletedBackups.length > 0) { + console.log(`Pruned ${deletedBackups.length} expired backup(s)`); + } else { + console.log("No expired backups to prune"); + } + console.log( + `Daily scheduling support: ${config.scheduleCron} (configure this in your host cron)` + ); + + return { + ...backup, + deletedBackups, + }; +} + +if (process.argv[1] && path.resolve(process.argv[1]) === __filename) { + runBackup() + .then(() => { + process.exitCode = 0; + }) + .catch((error) => { + console.error(`Database backup failed: ${error.message}`); + process.exitCode = 1; + }); +} diff --git a/backend/tests/backup.test.js b/backend/tests/backup.test.js new file mode 100644 index 0000000..cadb2e5 --- /dev/null +++ b/backend/tests/backup.test.js @@ -0,0 +1,189 @@ +import os from "node:os"; +import path from "node:path"; +import { promises as fs } from "node:fs"; +import { describe, it, expect, beforeEach, afterEach, jest } from "@jest/globals"; + +import { + formatTimestamp, + buildBackupFilename, + resolveBackupConfig, + verifyBackupFile, + pruneBackups, + createBackup, +} from "../scripts/backup.js"; + +describe("backup helpers", () => { + let tempDir; + + beforeEach(async () => { + tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "paycrypt-backup-test-")); + }); + + afterEach(async () => { + if (tempDir) { + await fs.rm(tempDir, { recursive: true, force: true }); + } + }); + + it("formats timestamps in UTC filename-safe format", () => { + expect(formatTimestamp(new Date("2026-03-24T10:11:12.000Z"))).toBe( + "20260324T101112Z" + ); + }); + + it("builds timestamped backup filenames", () => { + expect(buildBackupFilename("taggedpay", "20260324T101112Z")).toBe( + "taggedpay_20260324T101112Z.dump" + ); + }); + + it("resolves backup config from environment", () => { + const config = resolveBackupConfig( + { + DB_HOST: "localhost", + DB_PORT: "5432", + DB_NAME: "taggedpay", + DB_USER: "taggedpay_user", + DB_PASSWORD: "secret", + BACKUP_DIR: "./custom-backups", + BACKUP_RETENTION_DAYS: "14", + BACKUP_FILE_PREFIX: "prod", + PG_DUMP_PATH: "/usr/bin/pg_dump", + PG_RESTORE_PATH: "/usr/bin/pg_restore", + }, + tempDir + ); + + expect(config.backupDir).toBe(path.join(tempDir, "custom-backups")); + expect(config.retentionDays).toBe(14); + expect(config.backupPrefix).toBe("prod"); + expect(config.pgDumpPath).toBe("/usr/bin/pg_dump"); + expect(config.pgRestorePath).toBe("/usr/bin/pg_restore"); + }); + + it("rejects missing required database config", () => { + expect(() => + resolveBackupConfig({ + DB_HOST: "localhost", + DB_PORT: "5432", + }) + ).toThrow("Missing required database environment variables"); + }); + + it("verifies a valid custom-format backup file", async () => { + const backupPath = path.join(tempDir, "valid.dump"); + await fs.writeFile(backupPath, Buffer.from("PGDMPmock-backup-data")); + + const execFileImpl = jest.fn().mockResolvedValue({ + stdout: "archive contents", + stderr: "", + }); + + await expect( + verifyBackupFile(backupPath, { + execFileImpl, + pgRestorePath: "pg_restore", + }) + ).resolves.toBe(true); + + expect(execFileImpl).toHaveBeenCalledWith( + "pg_restore", + ["--list", backupPath], + expect.objectContaining({ + env: process.env, + }) + ); + }); + + it("rejects empty backup files", async () => { + const backupPath = path.join(tempDir, "empty.dump"); + await fs.writeFile(backupPath, ""); + + await expect(verifyBackupFile(backupPath)).rejects.toThrow("Backup file is empty"); + }); + + it("rejects invalid backup headers", async () => { + const backupPath = path.join(tempDir, "invalid.dump"); + await fs.writeFile(backupPath, "not-a-postgres-backup"); + + await expect(verifyBackupFile(backupPath)).rejects.toThrow( + "does not look like a PostgreSQL custom dump" + ); + }); + + it("prunes only backups older than retention policy", async () => { + const recentName = "taggedpay_20260323T000000Z.dump"; + const expiredName = "taggedpay_20260310T000000Z.dump"; + const ignoredName = "notes.txt"; + + await fs.writeFile(path.join(tempDir, recentName), "recent"); + await fs.writeFile(path.join(tempDir, expiredName), "expired"); + await fs.writeFile(path.join(tempDir, ignoredName), "ignore me"); + + const deleted = await pruneBackups( + { + backupDir: tempDir, + backupPrefix: "taggedpay", + retentionDays: 7, + }, + { + currentDate: new Date("2026-03-24T00:00:00.000Z"), + } + ); + + expect(deleted).toEqual([expiredName]); + await expect(fs.access(path.join(tempDir, recentName))).resolves.toBeUndefined(); + await expect(fs.access(path.join(tempDir, ignoredName))).resolves.toBeUndefined(); + await expect(fs.access(path.join(tempDir, expiredName))).rejects.toThrow(); + }); + + it("creates a backup file with a timestamped filename and secure permissions", async () => { + const config = { + dbHost: "localhost", + dbPort: "5432", + dbName: "taggedpay", + dbUser: "taggedpay_user", + dbPassword: "secret", + backupDir: tempDir, + backupPrefix: "taggedpay", + retentionDays: 7, + pgDumpPath: "pg_dump", + }; + + const execFileImpl = jest.fn(async (command, args) => { + const fileArg = args.find((arg) => arg.startsWith("--file=")); + const outputPath = fileArg.replace("--file=", ""); + await fs.writeFile(outputPath, Buffer.from("PGDMPmock-backup-data")); + return { stdout: "", stderr: "" }; + }); + + const backup = await createBackup(config, { + execFileImpl, + currentDate: new Date("2026-03-24T10:11:12.000Z"), + }); + + expect(backup.filename).toBe("taggedpay_20260324T101112Z.dump"); + expect(execFileImpl).toHaveBeenCalledWith( + "pg_dump", + expect.arrayContaining([ + "--format=custom", + "--compress=6", + "--no-owner", + "--no-privileges", + "--no-password", + "--host=localhost", + "--port=5432", + "--username=taggedpay_user", + "taggedpay", + ]), + expect.objectContaining({ + env: expect.objectContaining({ + PGPASSWORD: "secret", + }), + }) + ); + + const stat = await fs.stat(backup.filePath); + expect(stat.size).toBeGreaterThan(0); + }); +});