Skip to content

Instantly share code, notes, and snippets.

@diegofcornejo
Created May 13, 2024 15:03
Show Gist options
  • Save diegofcornejo/f5807d43ba5414b955106e41e1c9dcaf to your computer and use it in GitHub Desktop.
Save diegofcornejo/f5807d43ba5414b955106e41e1c9dcaf to your computer and use it in GitHub Desktop.
Backup Docker volumes to Cloudflare R2 using AWS CLI
#!/bin/bash
# Constants
S3_BUCKET="<YOUR_BUCKET_NAME>"
HOST_NAME="$(hostname)"
DATE=$(date +%F-%H%M%S) # Format as 'YYYY-MM-DD-HHMMSS'
MAX_BACKUPS=5
AWS_PROFILE="<YOUR_AWS_PROFILE>"
R2_ACCOUNT_ID="<YOUR_R2_ACCOUNT_ID>"
# AWS CLI command setup for Cloudflare R2
AWS_CLI="docker run --rm -v /tools/aws:/root/.aws -v $(pwd):/aws -e AWS_PROFILE=${AWS_PROFILE} amazon/aws-cli"
R2_ENDPOINT="https://${R2_ACCOUNT_ID}.r2.cloudflarestorage.com"
# Function to upload to S3 and manage retention
upload_and_cleanup() {
volume_name=$1
archive_file=$2
# Define S3 path
# s3_path="s3://${S3_BUCKET}/${HOST_NAME}/${DATE}/${volume_name}/${archive_file}"
s3_key="${HOST_NAME}/docker/${volume_name}/${archive_file}"
# Upload to S3
echo "Uploading ${archive_file} to S3..."
$AWS_CLI s3api put-object --bucket "${S3_BUCKET}" --key "${s3_key}" --body "${archive_file}" --endpoint-url ${R2_ENDPOINT}
# Cleanup local file
rm -f "${archive_file}"
# List and remove old backups, keeping only the latest 5
# Note: This is disabled for now, delete-object it's not implemented for R2
# echo "Managing backups for volume: ${volume_name}..."
# backup_files=$($AWS_CLI s3api list-objects --bucket "${S3_BUCKET}" --prefix "${HOST_NAME}/${volume_name}/" --endpoint-url ${R2_ENDPOINT} --query "Contents[].Key" --output text | awk -F/ '{print $NF}' | sort)
# count=0
# for file in $backup_files; do
# ((count++))
# if [ $count -le $MAX_BACKUPS ]; then
# continue
# fi
# echo "Deleting old backup: $file"
# $AWS_CLI s3api delete-object --bucket "${S3_BUCKET}" --key "${HOST_NAME}/${volume_name}/${file}" --endpoint-url ${R2_ENDPOINT}
# done
}
# Listing all Docker volumes
volumes=$(docker volume ls -q)
for volume in $volumes; do
echo "Processing volume: ${volume}"
# Create a temporary directory for the backup
temp_dir=$(mktemp -d)
docker run --rm -v ${volume}:/volume -v ${temp_dir}:/backup busybox tar czvf /backup/${volume}-${DATE}.tar.gz -C /volume ./
# Fix permissions for the tar file
docker run --rm -v ${temp_dir}:/backup busybox chown $(id -u):$(id -g) /backup/${volume}-${DATE}.tar.gz
# Move the backup to the current directory and cleanup temp directory
mv ${temp_dir}/${volume}-${DATE}.tar.gz .
rmdir $temp_dir
# Upload and manage backups on S3
upload_and_cleanup $volume ${volume}-${DATE}.tar.gz
done
echo "Backup process completed."
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment