diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..759e41b --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,16 @@ +name: Lint +on: + push: + branches: + - master + pull_request: + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Lint + run: make lint diff --git a/Dockerfile b/Dockerfile index d78749e..e33e166 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,6 +19,7 @@ ENV S3_PATH 'backup' ENV S3_ENDPOINT **None** ENV S3_S3V4 no ENV SCHEDULE **None** +ENV SUCCESS_WEBHOOK **None** ADD entrypoint.sh . ADD backup.sh . diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..d0d204d --- /dev/null +++ b/Makefile @@ -0,0 +1,4 @@ +SHELL_FILES := $(wildcard *.sh) + +lint: + @shellcheck --enable=require-variable-braces $(SHELL_FILES) && echo "ShellCheck passed" diff --git a/README.md b/README.md index b0ea5a0..57f7d1f 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ This is a fork of [karser/postgres-backup-s3](https://github.com/karser/docker-i Docker: ```sh -$ docker run -e S3_ACCESS_KEY_ID=key -e S3_SECRET_ACCESS_KEY=secret -e S3_BUCKET=my-bucket -e S3_PREFIX=backup -e POSTGRES_DATABASE=dbname -e POSTGRES_USER=user -e POSTGRES_PASSWORD=password -e POSTGRES_HOST=localhost f213/postgres-backup-s3 +$ docker run -e S3_ACCESS_KEY_ID=key -e S3_SECRET_ACCESS_KEY=secret -e S3_BUCKET=my-bucket -e S3_PREFIX=backup -e POSTGRES_DATABASE=dbname -e POSTGRES_USER=user -e POSTGRES_PASSWORD=password -e POSTGRES_HOST=localhost -e SCHEDULE="@daily" f213/postgres-backup-s3 ``` Docker Compose: @@ -28,7 +28,7 @@ postgres-backup: test: curl http://localhost:1880 environment: - SCHEDULE: 0 30 */2 * * * # every 2 hours at HH:30 + SCHEDULE: 0 30 */2 * * * * # every 2 hours at HH:30 S3_REGION: region S3_ACCESS_KEY_ID: key S3_SECRET_ACCESS_KEY: secret @@ -42,8 +42,6 @@ postgres-backup: SUCCESS_WEBHOOK: https://sb-ping.ru/8pp9RGwDDPzTL2R8MRb8Ae ``` -### Automatic Periodic Backups +### Crontab format -You can additionally set the `SCHEDULE` environment variable like `-e SCHEDULE="@daily"` to run the backup automatically. - -More information about the scheduling can be found [here](http://godoc.org/github.com/robfig/cron#hdr-Predefined_schedules). +Schedule format with years support. More information about the scheduling can be found [here](https://github.com/aptible/supercronic/tree/master?tab=readme-ov-file#crontab-format) diff --git a/backup.sh b/backup.sh index 4928409..6b05931 100644 --- a/backup.sh +++ b/backup.sh @@ -1,5 +1,8 @@ #! /bin/sh +# shellcheck disable=SC3040 # expecting 'pipefail' derrictive is availabe in the shell +# shellcheck disable=SC2086 # POSTGRES_HOST_OPTS and AWS_ARGS should be splitted by spaces intentionally + set -e set -o pipefail @@ -25,8 +28,8 @@ fi if [ "${POSTGRES_HOST}" = "**None**" ]; then if [ -n "${POSTGRES_PORT_5432_TCP_ADDR}" ]; then - POSTGRES_HOST=$POSTGRES_PORT_5432_TCP_ADDR - POSTGRES_PORT=$POSTGRES_PORT_5432_TCP_PORT + POSTGRES_HOST="${POSTGRES_PORT_5432_TCP_ADDR}" + POSTGRES_PORT="${POSTGRES_PORT_5432_TCP_PORT}" else echo "You need to set the POSTGRES_HOST environment variable." exit 1 @@ -43,33 +46,33 @@ if [ "${POSTGRES_PASSWORD}" = "**None**" ]; then exit 1 fi -if [ "${S3_ENDPOINT}" == "**None**" ]; then +if [ "${S3_ENDPOINT}" = "**None**" ]; then AWS_ARGS="" else AWS_ARGS="--endpoint-url ${S3_ENDPOINT}" fi # env vars needed for aws tools -export AWS_ACCESS_KEY_ID=$S3_ACCESS_KEY_ID -export AWS_SECRET_ACCESS_KEY=$S3_SECRET_ACCESS_KEY -export AWS_DEFAULT_REGION=$S3_REGION +export AWS_ACCESS_KEY_ID="${S3_ACCESS_KEY_ID}" +export AWS_SECRET_ACCESS_KEY="${S3_SECRET_ACCESS_KEY}" +export AWS_DEFAULT_REGION="${S3_REGION}" -export PGPASSWORD=$POSTGRES_PASSWORD -POSTGRES_HOST_OPTS="-h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER $POSTGRES_EXTRA_OPTS" +export PGPASSWORD="${POSTGRES_PASSWORD}" +POSTGRES_HOST_OPTS="-h ${POSTGRES_HOST} -p ${POSTGRES_PORT} -U ${POSTGRES_USER} ${POSTGRES_EXTRA_OPTS}" echo "Creating dump of ${POSTGRES_DATABASE} database from ${POSTGRES_HOST}..." -pg_dump -Fc $POSTGRES_HOST_OPTS $POSTGRES_DATABASE > db.dump +pg_dump -Fc ${POSTGRES_HOST_OPTS} "${POSTGRES_DATABASE}" > db.dump -echo "Uploading dump to $S3_BUCKET" +echo "Uploading dump to ${S3_BUCKET}" -cat db.dump | aws $AWS_ARGS s3 cp - s3://$S3_BUCKET/$S3_PREFIX/${POSTGRES_DATABASE}_$(date +"%Y-%m-%dT%H:%M:%SZ").dump || exit 2 +aws ${AWS_ARGS} s3 cp db.dump "s3://${S3_BUCKET}/${S3_PREFIX}/${POSTGRES_DATABASE}_$(date +"%Y-%m-%dT%H:%M:%SZ").dump" || exit 2 echo "DB backup uploaded successfully" rm db.dump -if [ -n $SUCCESS_WEBHOOK ]; then - echo "Notifying $SUCCESS_WEBHOOK" - curl -m 10 --retry 5 $SUCCESS_WEBHOOK +if [ ! "${SUCCESS_WEBHOOK}" = "**None**" ]; then + echo "Notifying ${SUCCESS_WEBHOOK}" + curl -m 10 --retry 5 "${SUCCESS_WEBHOOK}" fi