Files
ha-addon/amazon-s3-backup/run.sh
2024-04-03 14:22:04 +02:00

86 lines
3.7 KiB
Bash
Executable File

#!/usr/bin/with-contenv bashio
# ==============================================================================
# Home Assistant Community Add-on: S3 Backup
# ==============================================================================
bashio::log.level "debug"
# script global shortcuts
declare -r BACKUP_NAME="ha-backup-$(date +'%Y-%m-%d-%H-%M')"
declare -r SSH_HOME="${HOME}/.ssh"
# call Home Assistant to create a local backup
# function fails in case local backup is not created
function create-local-backup {
local -r base_folders="addons/local homeassistant media share ssl"
local data="{\"name\":\"${BACKUP_NAME}\"}"
local bak_type="non-encrypted"
bashio::log.info "Creating ${bak_type} full backup: \"${BACKUP_NAME}\""
if ! SLUG=$(bashio::api.supervisor POST /backups/new/full "${data}" .slug); then
bashio::log.fatal "Error creating ${bak_type} full backup!"
return "${__BASHIO_EXIT_NOK}"
fi
bashio::log.info "Backup created: ${SLUG}"
return "${__BASHIO_EXIT_OK}"
}
function delete-s3-backup {
delete_slug=$1
for I in `aws s3api list-objects-v2 --bucket "${bucket_name}" --endpoint-url "${custom_endpoint}" --region "${bucket_region}" --query 'Contents[*].Key' --output text`
do
bashio::log.debug "Checking object: $I"
TAG=`aws s3api get-object-tagging --bucket "${bucket_name}" --endpoint-url "${custom_endpoint}" --region "${bucket_region}" --key "$I" --output text --query "TagSet[?Key=='slug'].Value"`
bashio::log.debug "Slug for object $I: $TAG"
if [ "$TAG" = "$SLUG" ]; then
bashio::log.debug "Deleting object: $I"
aws s3api delete-object --bucket "${bucket_name}" --endpoint-url "${custom_endpoint}" --region "${bucket_region}" --key "$I" --output text
fi
done
}
bashio::log.info "Starting S3 Backup..."
create-local-backup || die "Local backup process failed! See log for details."
custom_endpoint="$(bashio::config 'custom_endpoint')"
bucket_name="$(bashio::config 'bucket_name')"
bucket_region="$(bashio::config 'bucket_region' 'minio')"
delete_local_backups="$(bashio::config 'delete_local_backups' 'true')"
local_backups_to_keep="$(bashio::config 'local_backups_to_keep' '3')"
monitor_path="/backup"
jq_filter=".backups|=sort_by(.date)|.backups|reverse|.[$local_backups_to_keep:]|.[].slug"
export AWS_ACCESS_KEY_ID="$(bashio::config 'aws_access_key')"
export AWS_SECRET_ACCESS_KEY="$(bashio::config 'aws_secret_access_key')"
export AWS_REGION="$bucket_region"
bashio::log.debug "Using AWS CLI version: '$(aws --version)'"
bashio::log.debug "Command: 'aws s3 sync $monitor_path s3://$bucket_name/ --no-progress --region $bucket_region'"
bashio::log.debug "SLUG: $SLUG $BACKUP_NAME"
bashio::log.debug "{\"TagSet\": [{ \"Key\": \"slug\", \"Value\": \"${SLUG}\" }]}"
aws s3 cp "/backup/${SLUG}.tar" "s3://${bucket_name}/${BACKUP_NAME}.tar" --endpoint-url "${custom_endpoint}" --region "${bucket_region}" --no-progress
aws s3api put-object-tagging --bucket "${bucket_name}" --key "${BACKUP_NAME}.tar" --tagging "{\"TagSet\": [{ \"Key\": \"slug\", \"Value\": \"${SLUG}\" }]}" --endpoint-url "${custom_endpoint}" --region "${bucket_region}"
if bashio::var.true "${delete_local_backups}"; then
bashio::log.info "Will delete all the oldest local backups except the '${local_backups_to_keep}' newest ones."
backup_slugs="$(bashio::api.supervisor "GET" "/backups" "false" $jq_filter)"
bashio::log.debug "Backups to delete: '$backup_slugs'"
for s in $backup_slugs; do
delete-s3-backup "$s"
bashio::log.info "Deleting Backup: '$s'"
bashio::api.supervisor "DELETE" "/backups/$s"
done
else
bashio::log.info "Will not delete any local backups since 'delete_local_backups' is set to 'false'"
fi
bashio::log.info "Finished S3 Backup."