ansible/roles/postgresql/templates/backup.sh in subspace-0.1.3 vs ansible/roles/postgresql/templates/backup.sh in subspace-0.2.1

- old
+ new

@@ -4,10 +4,12 @@ # Backup rotation # Storage folder where to move backup files # Must contain backup.monthly backup.weekly backup.daily folders storage=/u/apps/{{project_name}}/shared/db/backups +bucket="{{s3_db_backup_bucket}}" +prefix="{{s3_db_backup_prefix}}" # Destination file names date_hourly=`date +"%Y%m%d%H%M"` filename="$date_hourly.dump" #date_weekly=`date +"%V sav. %m-%Y"` @@ -34,11 +36,24 @@ else # Every other hour just put it in the hourly destination=backup.hourly/$filename fi -#destination is set so actually do the backup -$(PGPASSWORD={{database_password}} pg_dump --verbose -Fc --host=localhost --username={{database_user}} --file $storage/$destination {{database_name}}) +if [ "$bucket" != "disabled" ] ; then + if [ -f "$storage/latest.dump" ] ; then + if [ -f "$storage/latest-old.dump" ] ; then + rm "$storage/latest-old.dump" + fi + mv "$storage/latest.dump" "$storage/latest-old.dump" + fi + + $(PGPASSWORD={{database_password}} pg_dump --verbose -Fc --host=localhost --username={{database_user}} --file $storage/latest.dump {{database_name}}) + + curl -XPUT -T "$storage/latest.dump" -H "Host: $bucket.s3.amazonaws.com" https://$bucket.s3.amazonaws.com/$prefix/$destination +else + #destination is set so actually do the backup + $(PGPASSWORD={{database_password}} pg_dump --verbose -Fc --host=localhost --username={{database_user}} --file $storage/$destination {{database_name}}) +fi # then clean old ones # hourly - for 48 hours find $storage/backup.hourly/ -mmin +2880 -exec rm -rv {} \; # daily - keep for 14 days