Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 82 additions & 0 deletions config/rclone/rclone.conf.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
# =============================================================================
# Rclone Configuration Example
# =============================================================================
# This is an example rclone configuration file.
# Copy this to config/rclone/rclone.conf on your host and customize it.
#
# Generate a new config with: rclone config
# Docs: https://rclone.org/docs/
# =============================================================================

# -----------------------------------------------------------------------------
# S3-Compatible Storage (AWS S3, MinIO, Backblaze B2, etc.)
# -----------------------------------------------------------------------------
[s3-backup]
type = s3
provider = AWS
access_key_id = YOUR_AWS_ACCESS_KEY
secret_access_key = YOUR_AWS_SECRET_KEY
region = us-east-1
bucket = your-backup-bucket
acl = private
# For MinIO or other S3-compatible services:
# provider = Other
# endpoint = https://your-minio-server:9000
# access_key_id = YOUR_MINIO_ACCESS_KEY
# secret_access_key = YOUR_MINIO_SECRET_KEY

# -----------------------------------------------------------------------------
# Google Drive
# -----------------------------------------------------------------------------
[gdrive-backup]
type = drive
client_id = YOUR_GOOGLE_CLIENT_ID
client_secret = YOUR_GOOGLE_CLIENT_SECRET
token = {"access_token":"YOUR_ACCESS_TOKEN","token_type":"Bearer","refresh_token":"YOUR_REFRESH_TOKEN","expiry":"YOUR_EXPIRY_TIME"}
drive_id = YOUR_DRIVE_ID
root_folder_id = YOUR_ROOT_FOLDER_ID
# For shared drive/team drive:
# team_drive = YOUR_TEAM_DRIVE_ID

# -----------------------------------------------------------------------------
# OneDrive
# -----------------------------------------------------------------------------
[onedrive-backup]
type = onedrive
client_id = YOUR_ONEDRIVE_CLIENT_ID
client_secret = YOUR_ONEDRIVE_CLIENT_SECRET
tenant = common
drive_type = personal
# For OneDrive for Business:
# drive_type = business

# -----------------------------------------------------------------------------
# Backblaze B2
# -----------------------------------------------------------------------------
[b2-backup]
type = b2
account = YOUR_B2_ACCOUNT_ID
key = YOUR_B2_APPLICATION_KEY
bucket = your-b2-bucket
# For B2 with encryption:
# encryption = AES-256

# -----------------------------------------------------------------------------
# Mega.nz
# -----------------------------------------------------------------------------
[mega-backup]
type = mega
user = your@mega.email
password = YOUR_MEGA_PASSWORD

# -----------------------------------------------------------------------------
# SFTP/SSH
# -----------------------------------------------------------------------------
[sftp-backup]
type = sftp
host = your-sftp-server.com
user = your-username
port = 22
key_file = /path/to/private/key
# key_file_pass = YOUR_KEY_PASSWORD
known_hosts_file = ~/.ssh/known_hosts
231 changes: 231 additions & 0 deletions scripts/backup-to-cloud.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,231 @@
#!/bin/bash
# =============================================================================
# HomeLab Stack — Backup to Cloud Script
# =============================================================================
# Syncs local backups from /opt/homelab-backups to cloud storage via Rclone
#
# Usage:
# ./scripts/backup-to-cloud.sh # Interactive mode
# ./scripts/backup-to-cloud.sh --dry-run # Test without making changes
# ./scripts/backup-to-cloud.sh s3-backup # Sync to specific remote
# ./scripts/backup-to-cloud.sh gdrive-backup # Sync to Google Drive
#
# Cron example (daily at 2 AM):
# 0 2 * * * /opt/homelab-stack/scripts/backup-to-cloud.sh >> /var/log/backup-to-cloud.log 2>&1
#
# Payment: https://lll.io/bounty/0xaae0101ac77a2e4e0ea826eb4d309374f029b0a6
# =============================================================================

set -euo pipefail

# Configuration
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
BACKUP_SOURCE="${BACKUP_SOURCE:-/opt/homelab-backups}"
RCLONE_CONFIG="${RCLONE_CONFIG:-$ROOT_DIR/config/rclone/rclone.conf}"
LOG_FILE="${LOG_FILE:-/var/log/backup-to-cloud.log}"

# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color

# Logging functions
log_info() {
echo -e "${BLUE}[INFO]${NC} $1" | tee -a "$LOG_FILE"
}

log_success() {
echo -e "${GREEN}[SUCCESS]${NC} $1" | tee -a "$LOG_FILE"
}

log_warning() {
echo -e "${YELLOW}[WARNING]${NC} $1" | tee -a "$LOG_FILE"
}

log_error() {
echo -e "${RED}[ERROR]${NC} $1" | tee -a "$LOG_FILE"
}

# Show usage
usage() {
cat << EOF
Usage: $(basename "$0") [OPTIONS] [REMOTE]

Sync local backups to cloud storage using Rclone.

Arguments:
REMOTE Rclone remote name to sync to (e.g., s3-backup, gdrive-backup)
If not provided, lists available remotes.

Options:
--dry-run Show what would be transferred without making changes
--verbose Show detailed output
--help Show this help message

Examples:
$(basename "$0") # List available remotes
$(basename "$0") --dry-run s3-backup # Test sync to S3
$(basename "$0") gdrive-backup # Sync to Google Drive
$(basename "$0") b2-backup # Sync to Backblaze B2

Environment Variables:
BACKUP_SOURCE Source directory (default: /opt/homelab-backups)
RCLONE_CONFIG Path to rclone.conf (default: ./config/rclone/rclone.conf)
LOG_FILE Log file path (default: /var/log/backup-to-cloud.log)

EOF
}

# Check prerequisites
check_prereqs() {
if ! command -v rclone &> /dev/null; then
log_error "rclone not found. Please install rclone first."
log_info "Install: https://rclone.org/install/"
exit 1
fi

if [ ! -f "$RCLONE_CONFIG" ]; then
log_error "Rclone config not found at: $RCLONE_CONFIG"
log_info "Please copy config/rclone/rclone.conf.example to config/rclone/rclone.conf"
exit 1
fi

if [ ! -d "$BACKUP_SOURCE" ]; then
log_warning "Backup source directory does not exist: $BACKUP_SOURCE"
log_info "Creating directory..."
mkdir -p "$BACKUP_SOURCE"
fi
}

# List available remotes
list_remotes() {
log_info "Available Rclone remotes:"
rclone listremotes --config "$RCLONE_CONFIG" 2>/dev/null | while read -r remote; do
echo " - ${remote%/}"
done
}

# List available remotes and exit
show_remotes_and_exit() {
check_prereqs
echo ""
log_info "Available cloud backup targets:"
list_remotes
echo ""
log_info "Usage: $(basename "$0") [remote-name]"
exit 0
}

# Sync backup to remote
sync_to_remote() {
local remote="$1"
local dry_run="${DRY_RUN:-}"
local verbose="${VERBOSE:-}"

# Validate remote exists
if ! rclone listremotes --config "$RCLONE_CONFIG" | grep -q "^${remote}:$"; then
log_error "Remote '$remote' not found in rclone config."
log_info "Available remotes:"
list_remotes
exit 1
fi

# Check source has content
if [ -z "$(ls -A "$BACKUP_SOURCE" 2>/dev/null)" ]; then
log_warning "Backup source directory is empty: $BACKUP_SOURCE"
log_info "Nothing to sync."
exit 0
fi

# Build rclone command
local cmd="rclone sync \"$BACKUP_SOURCE\" \"${remote}:\" --config \"$RCLONE_CONFIG\""

[ -n "$dry_run" ] && cmd="$cmd --dry-run"
[ -n "$verbose" ] && cmd="$cmd --verbose"
[ -n "$verbose" ] && cmd="$cmd -v"

cmd="$cmd --log-file \"$LOG_FILE\""
cmd="$cmd --log-level INFO"
cmd="$cmd --stats 1s"
cmd="$cmd --stats-one-line"
cmd="$cmd --transfers 4"
cmd="$cmd --checkers 8"
cmd="$cmd --bwlimit 10M"
cmd="$cmd --exclude \"*.tmp\""
cmd="$cmd --exclude \"*.part\""
cmd="$cmd --exclude \".*\"" # Exclude hidden files

echo ""
log_info "Starting backup sync to ${remote}:"
log_info " Source: $BACKUP_SOURCE"
log_info " Remote: ${remote}:"
log_info " Config: $RCLONE_CONFIG"
[ -n "$dry_run" ] && log_warning "DRY-RUN MODE - No changes will be made"
echo ""

# Execute sync
if eval "$cmd"; then
log_success "Backup sync completed successfully!"

# Show stats
if [ -z "$dry_run" ]; then
echo ""
log_info "Sync summary:"
rclone about "${remote}:" --json 2>/dev/null | jq -r '.used, .free, .total' 2>/dev/null || true
fi
else
log_error "Backup sync failed!"
exit 1
fi
}

# Main script
main() {
# Parse arguments
DRY_RUN=""
VERBOSE=""
REMOTE=""

while [[ $# -gt 0 ]]; do
case $1 in
--dry-run)
DRY_RUN="1"
shift
;;
--verbose|-v)
VERBOSE="1"
shift
;;
--help|-h)
usage
exit 0
;;
--*)
log_error "Unknown option: $1"
usage
exit 1
;;
*)
REMOTE="$1"
shift
;;
esac
done

# Check prerequisites
check_prereqs

# If no remote specified, show available remotes
if [ -z "$REMOTE" ]; then
show_remotes_and_exit
fi

# Sync to specified remote
sync_to_remote "$REMOTE"
}

# Run main function
main "$@"
41 changes: 41 additions & 0 deletions stacks/backup/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# =============================================================================
# HomeLab Stack — Backup Stack Environment Configuration
# Copy this file to .env in the stacks/backup directory
# =============================================================================

# -----------------------------------------------------------------------------
# GENERAL
# -----------------------------------------------------------------------------
TZ=Asia/Shanghai
PUID=1000
PGID=1000
DOMAIN=yourdomain.com # Your base domain (e.g. home.example.com)

# -----------------------------------------------------------------------------
# RESTIC REST SERVER
# -----------------------------------------------------------------------------
RESTIC_PASSWORD= # REQUIRED: Password for restic repository
# Generate with: openssl rand -base64 32

# -----------------------------------------------------------------------------
# RCLONE (Cloud Backup Targets)
# -----------------------------------------------------------------------------
# Create rclone.conf with your cloud storage configs:
# [s3-backup]
# type = s3
# provider = AWS
# access_key_id = YOUR_AWS_KEY
# secret_access_key = YOUR_AWS_SECRET
# region = us-east-1
# bucket = your-backup-bucket
#
# [gdrive-backup]
# type = drive
# client_id = YOUR_GOOGLE_CLIENT_ID
# client_secret = YOUR_GOOGLE_CLIENT_SECRET
# token = {"access_token":"..."}
# drive_id = YOUR_DRIVE_ID
# root_folder_id = YOUR_FOLDER_ID
#
# NOTE: Mount your rclone.conf at stacks/backup/config/rclone/rclone.conf
# and ensure it has proper permissions: chmod 600 rclone.conf
Loading