Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 13 additions & 5 deletions backend/.env.example
Original file line number Diff line number Diff line change
@@ -1,9 +1,17 @@
# Database Configuration
DB_HOST=localhost
DB_PORT=3306
DB_NAME=paycrypt
DB_USER=root
DB_PASSWORD=your_password
DB_PORT=5432
DB_NAME=taggedpay
DB_USER=taggedpay_user
DB_PASSWORD=your_secure_password

# Automated Backup Configuration
BACKUP_DIR=./backups
BACKUP_FILE_PREFIX=taggedpay
BACKUP_RETENTION_DAYS=7
BACKUP_SCHEDULE_CRON=0 2 * * *
PG_DUMP_PATH=pg_dump
PG_RESTORE_PATH=pg_restore

# JWT Configuration
JWT_SECRET=your_super_secret_jwt_key_here
Expand All @@ -18,4 +26,4 @@ FRONTEND_URL=pay-crypt-v2.vercel.app

RPC_URL=https://sepolia.infura.io/v3/YOUR_PROJECT_ID
PRIVATE_KEY=your_private_key
STARKNET_CONTRACT_ADDRESS=0x028add5d29f4aa3e4144ba1a85d509de6719e58cabe42cc72f58f46c6a84a785
STARKNET_CONTRACT_ADDRESS=0x028add5d29f4aa3e4144ba1a85d509de6719e58cabe42cc72f58f46c6a84a785
32 changes: 30 additions & 2 deletions backend/DATABASE_SETUP.md
Original file line number Diff line number Diff line change
Expand Up @@ -177,16 +177,44 @@ psql -U taggedpay_user -d taggedpay < backup.sql

### Automated Backups

Set up a cron job for daily backups:
The repository now includes a production-oriented backup script at
[`scripts/backup.js`](./scripts/backup.js) that uses PostgreSQL's native
`pg_dump` custom format and verifies the resulting archive with `pg_restore`.

Configure the backup environment variables in `.env`:

```env
BACKUP_DIR=./backups
BACKUP_FILE_PREFIX=taggedpay
BACKUP_RETENTION_DAYS=7
BACKUP_SCHEDULE_CRON=0 2 * * *
PG_DUMP_PATH=pg_dump
PG_RESTORE_PATH=pg_restore
```

Run a manual backup:

```bash
npm run backup:db
```

Set up a cron job for daily backups on the host:

```bash
# Edit crontab
crontab -e

# Add daily backup at 2 AM
0 2 * * * pg_dump -U taggedpay_user -d taggedpay > /backups/taggedpay_$(date +\%Y\%m\%d).sql
0 2 * * * cd /path/to/payCrypt_v2/backend && npm run backup:db >> /var/log/taggedpay-backup.log 2>&1
```

Behavior:

- Backups are written as timestamped `.dump` files.
- New backups are verified before older backups are pruned.
- Only backups older than `BACKUP_RETENTION_DAYS` are deleted.
- Backup files are written with restrictive permissions where the host allows it.

## Monitoring

### Check Database Health
Expand Down
3 changes: 2 additions & 1 deletion backend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
"scripts": {
"dev": "nodemon server.js",
"start": "node server.js",
"backup:db": "node scripts/backup.js",
"migrate": "knex migrate:latest && knex seed:run",
"migrate:rollback": "knex migrate:rollback",
"migrate:rollback:all": "knex migrate:rollback --all",
Expand Down Expand Up @@ -62,4 +63,4 @@
"nodemon": "^3.0.1",
"supertest": "^7.0.0"
}
}
}
261 changes: 261 additions & 0 deletions backend/scripts/backup.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,261 @@
#!/usr/bin/env node

import { promises as fs } from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
import { execFile } from "node:child_process";
import dotenv from "dotenv";

const DUMP_HEADER = "PGDMP";
const DEFAULT_BACKUP_PREFIX = "taggedpay";
const DEFAULT_RETENTION_DAYS = 7;
const DEFAULT_BACKUP_DIR = "backups";
const DEFAULT_PG_DUMP_PATH = "pg_dump";
const DEFAULT_PG_RESTORE_PATH = "pg_restore";

dotenv.config();

const __filename = fileURLToPath(import.meta.url);

export function formatTimestamp(date = new Date()) {
const year = date.getUTCFullYear();
const month = String(date.getUTCMonth() + 1).padStart(2, "0");
const day = String(date.getUTCDate()).padStart(2, "0");
const hours = String(date.getUTCHours()).padStart(2, "0");
const minutes = String(date.getUTCMinutes()).padStart(2, "0");
const seconds = String(date.getUTCSeconds()).padStart(2, "0");
return `${year}${month}${day}T${hours}${minutes}${seconds}Z`;
}

export function buildBackupFilename(prefix, timestamp) {
return `${prefix}_${timestamp}.dump`;
}

export function isBackupFilename(filename, prefix) {
const escapedPrefix = prefix.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
return new RegExp(`^${escapedPrefix}_(\\d{8}T\\d{6}Z)\\.dump$`).test(filename);
}

export function extractTimestampFromFilename(filename, prefix) {
const escapedPrefix = prefix.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const match = filename.match(
new RegExp(`^${escapedPrefix}_(\\d{8}T\\d{6}Z)\\.dump$`)
);
return match ? match[1] : null;
}

export function parsePositiveInteger(value, fallback) {
const parsed = Number.parseInt(value ?? "", 10);
return Number.isInteger(parsed) && parsed > 0 ? parsed : fallback;
}

export function resolveBackupConfig(env = process.env, cwd = process.cwd()) {
const missingVars = ["DB_HOST", "DB_PORT", "DB_NAME", "DB_USER"].filter(
(key) => !env[key]
);

if (missingVars.length > 0) {
throw new Error(
`Missing required database environment variables: ${missingVars.join(", ")}`
);
}

return {
dbHost: env.DB_HOST,
dbPort: String(env.DB_PORT),
dbName: env.DB_NAME,
dbUser: env.DB_USER,
dbPassword: env.DB_PASSWORD || "",
backupDir: path.resolve(cwd, env.BACKUP_DIR || DEFAULT_BACKUP_DIR),
backupPrefix: env.BACKUP_FILE_PREFIX || DEFAULT_BACKUP_PREFIX,
retentionDays: parsePositiveInteger(
env.BACKUP_RETENTION_DAYS,
DEFAULT_RETENTION_DAYS
),
pgDumpPath: env.PG_DUMP_PATH || DEFAULT_PG_DUMP_PATH,
pgRestorePath: env.PG_RESTORE_PATH || DEFAULT_PG_RESTORE_PATH,
scheduleCron: env.BACKUP_SCHEDULE_CRON || "0 2 * * *",
};
}

export async function ensureBackupDirectory(backupDir) {
await fs.mkdir(backupDir, { recursive: true, mode: 0o700 });
await fs.chmod(backupDir, 0o700);
}

export function execFileAsync(command, args, options = {}) {
return new Promise((resolve, reject) => {
execFile(command, args, options, (error, stdout, stderr) => {
if (error) {
error.stdout = stdout;
error.stderr = stderr;
reject(error);
return;
}

resolve({ stdout, stderr });
});
});
}

export async function createBackup(config, options = {}) {
const {
execFileImpl = execFileAsync,
currentDate = new Date(),
outputDirMode = 0o700,
outputFileMode = 0o600,
} = options;

const timestamp = formatTimestamp(currentDate);
const filename = buildBackupFilename(config.backupPrefix, timestamp);
const filePath = path.join(config.backupDir, filename);

await fs.mkdir(config.backupDir, { recursive: true, mode: outputDirMode });
await fs.chmod(config.backupDir, outputDirMode);

const args = [
"--format=custom",
"--compress=6",
"--no-owner",
"--no-privileges",
"--no-password",
`--file=${filePath}`,
`--host=${config.dbHost}`,
`--port=${config.dbPort}`,
`--username=${config.dbUser}`,
config.dbName,
];

await execFileImpl(config.pgDumpPath, args, {
env: {
...process.env,
PGPASSWORD: config.dbPassword,
},
});

await fs.chmod(filePath, outputFileMode);

return {
filename,
filePath,
timestamp,
};
}

export async function verifyBackupFile(filePath, options = {}) {
const { execFileImpl = execFileAsync, pgRestorePath = DEFAULT_PG_RESTORE_PATH } =
options;

const stat = await fs.stat(filePath);
if (!stat.isFile()) {
throw new Error(`Backup path is not a file: ${filePath}`);
}

if (stat.size === 0) {
throw new Error(`Backup file is empty: ${filePath}`);
}

const handle = await fs.open(filePath, "r");
try {
const buffer = Buffer.alloc(DUMP_HEADER.length);
await handle.read(buffer, 0, DUMP_HEADER.length, 0);

if (buffer.toString("utf8") !== DUMP_HEADER) {
throw new Error(`Backup file does not look like a PostgreSQL custom dump: ${filePath}`);
}
} finally {
await handle.close();
}

await execFileImpl(pgRestorePath, ["--list", filePath], {
env: process.env,
});

return true;
}

export async function pruneBackups(config, options = {}) {
const { currentDate = new Date() } = options;
const entries = await fs.readdir(config.backupDir, { withFileTypes: true });
const cutoff = currentDate.getTime() - config.retentionDays * 24 * 60 * 60 * 1000;

const backups = entries
.filter((entry) => entry.isFile() && isBackupFilename(entry.name, config.backupPrefix))
.map((entry) => {
const timestamp = extractTimestampFromFilename(entry.name, config.backupPrefix);
const parsed = timestamp
? Date.parse(
`${timestamp.slice(0, 4)}-${timestamp.slice(4, 6)}-${timestamp.slice(6, 8)}T${timestamp.slice(9, 11)}:${timestamp.slice(11, 13)}:${timestamp.slice(13, 15)}Z`
)
: Number.NaN;

return {
name: entry.name,
filePath: path.join(config.backupDir, entry.name),
createdAtMs: parsed,
};
})
.filter((backup) => Number.isFinite(backup.createdAtMs))
.sort((left, right) => left.createdAtMs - right.createdAtMs);

const deleted = [];
for (const backup of backups) {
if (backup.createdAtMs >= cutoff) {
continue;
}

await fs.unlink(backup.filePath);
deleted.push(backup.name);
}

return deleted;
}

export async function runBackup(env = process.env) {
const config = resolveBackupConfig(env);

console.log(
`Starting database backup for ${config.dbName} to ${config.backupDir} (retention ${config.retentionDays} day(s))`
);

await ensureBackupDirectory(config.backupDir);

const backup = await createBackup(config);

try {
await verifyBackupFile(backup.filePath, {
pgRestorePath: config.pgRestorePath,
});
} catch (error) {
await fs.rm(backup.filePath, { force: true });
throw new Error(`Backup verification failed: ${error.message}`);
}

const deletedBackups = await pruneBackups(config);

console.log(`Backup created successfully: ${backup.filename}`);
if (deletedBackups.length > 0) {
console.log(`Pruned ${deletedBackups.length} expired backup(s)`);
} else {
console.log("No expired backups to prune");
}
console.log(
`Daily scheduling support: ${config.scheduleCron} (configure this in your host cron)`
);

return {
...backup,
deletedBackups,
};
}

if (process.argv[1] && path.resolve(process.argv[1]) === __filename) {
runBackup()
.then(() => {
process.exitCode = 0;
})
.catch((error) => {
console.error(`Database backup failed: ${error.message}`);
process.exitCode = 1;
});
}
Loading