File tree Expand file tree Collapse file tree 4 files changed +18
-6
lines changed Expand file tree Collapse file tree 4 files changed +18
-6
lines changed Original file line number Diff line number Diff line change @@ -24,9 +24,13 @@ Service to backup and/or restore a PostgreSQL database to/from S3
2424
2525` DB_USER ` user that accesses the database (PostgreSQL "role")
2626
27- ` AWS_ACCESS_KEY ` used for S3 interactions
27+ ` AWS_ACCESS_KEY_ID ` used for S3 interactions
2828
29- ` AWS_SECRET_KEY ` used for S3 interactions
29+ ` AWS_SECRET_ACCESS_KEY ` used for S3 interactions
30+
31+ ` AWS_ACCESS_KEY ` used for S3 interactions (Deprecated)
32+
33+ ` AWS_SECRET_KEY ` used for S3 interactions (Deprecated)
3034
3135` S3_BUCKET ` e.g., _ s3://database-backups_ ** NOTE: no trailing slash**
3236
Original file line number Diff line number Diff line change @@ -10,7 +10,7 @@ log() {
1010# Function to remove sensitive values from sentry Event
1111filter_sensitive_values () {
1212 local msg=" $1 "
13- for var in AWS_ACCESS_KEY AWS_SECRET_KEY B2_APPLICATION_KEY B2_APPLICATION_KEY_ID DB_ROOTPASSWORD DB_USERPASSWORD; do
13+ for var in AWS_ACCESS_KEY AWS_SECRET_KEY AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY B2_APPLICATION_KEY B2_APPLICATION_KEY_ID DB_ROOTPASSWORD DB_USERPASSWORD; do
1414 val=" ${! var} "
1515 if [ -n " $val " ]; then
1616 msg=" ${msg// $val / [FILTERED]} "
@@ -63,6 +63,10 @@ start=$(date +%s);
6363$( PGPASSWORD=${DB_USERPASSWORD} pg_dump --host=${DB_HOST} --username=${DB_USER} --create --clean ${DB_OPTIONS} --dbname=${DB_NAME} > /tmp/${DB_NAME} .sql) || STATUS=$? ;
6464end=$( date +%s) ;
6565
66+ # maintain backward compatibility with key variables accepted by s3cmd
67+ export AWS_ACCESS_KEY_ID=" ${AWS_ACCESS_KEY_ID:- $AWS_ACCESS_KEY } "
68+ export AWS_SECRET_ACCESS_KEY=" ${AWS_SECRET_ACCESS_KEY:- $AWS_SECRET_KEY } "
69+
6670if [ $STATUS -ne 0 ]; then
6771 error_message=" ${MYNAME} : FATAL: Backup of ${DB_NAME} returned non-zero status ($STATUS ) in $( expr ${end} - ${start} ) seconds." ;
6872 log " ERROR" " ${error_message} " ;
Original file line number Diff line number Diff line change 99
1010filter_sensitive_values () {
1111 local msg=" $1 "
12- for var in AWS_ACCESS_KEY AWS_SECRET_KEY B2_APPLICATION_KEY B2_APPLICATION_KEY_ID DB_ROOTPASSWORD DB_USERPASSWORD; do
12+ for var in AWS_ACCESS_KEY AWS_SECRET_KEY AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY B2_APPLICATION_KEY B2_APPLICATION_KEY_ID DB_ROOTPASSWORD DB_USERPASSWORD; do
1313 val=" ${! var} "
1414 if [ -n " $val " ]; then
1515 msg=" ${msg// $val / [FILTERED]} "
9494log " INFO" " ${MYNAME} : copying database ${DB_NAME} backup and checksum from ${S3_BUCKET} "
9595start=$( date +%s)
9696
97+ # maintain backward compatibility with key variables accepted by s3cmd
98+ export AWS_ACCESS_KEY_ID=" ${AWS_ACCESS_KEY_ID:- $AWS_ACCESS_KEY } "
99+ export AWS_SECRET_ACCESS_KEY=" ${AWS_SECRET_ACCESS_KEY:- $AWS_SECRET_KEY } "
100+
97101# Download database backup
98102aws s3 cp " ${S3_BUCKET} /${DB_NAME} .sql.gz" " /tmp/${DB_NAME} .sql.gz" || STATUS=$?
99103if [ $STATUS -ne 0 ]; then
Original file line number Diff line number Diff line change 1- AWS_ACCESS_KEY =
2- AWS_SECRET_KEY =
1+ AWS_ACCESS_KEY_ID =
2+ AWS_SECRET_ACCESS_KEY =
33S3_BUCKET=
44
55# BackBlaze variables
You can’t perform that action at this time.
0 commit comments