From b0c10539bb45ec1637ebe2c45290971385554985 Mon Sep 17 00:00:00 2001 From: Alain Stucki Date: Wed, 3 Apr 2024 14:40:20 +0200 Subject: [PATCH] rename --- amazon-s3-backup/DOCS.md | 120 ----------------------------------- amazon-s3-backup/Dockerfile | 41 ------------ amazon-s3-backup/config.json | 33 ---------- amazon-s3-backup/icon.png | Bin 6106 -> 0 bytes amazon-s3-backup/run.sh | 87 ------------------------- 5 files changed, 281 deletions(-) delete mode 100755 amazon-s3-backup/DOCS.md delete mode 100755 amazon-s3-backup/Dockerfile delete mode 100755 amazon-s3-backup/config.json delete mode 100644 amazon-s3-backup/icon.png delete mode 100755 amazon-s3-backup/run.sh diff --git a/amazon-s3-backup/DOCS.md b/amazon-s3-backup/DOCS.md deleted file mode 100755 index 96848d1..0000000 --- a/amazon-s3-backup/DOCS.md +++ /dev/null @@ -1,120 +0,0 @@ -# Home Assistant Add-on: S3 Backup - -## Installation - -Follow these steps to get the add-on installed on your system: - -1. Enable **Advanced Mode** in your Home Assistant user profile. -2. Navigate in your Home Assistant frontend to **Supervisor** -> **Add-on Store**. -3. Search for "Amazon S3 Backup" add-on and click on it. -4. Click on the "INSTALL" button. - -## How to use - -1. Set the `aws_access_key`, `aws_secret_access_key`, and `bucket_name`. -2. Optionally / if necessary, change `bucket_region`, `storage_class`, and `delete_local_backups` and `local_backups_to_keep` configuration options. -3. Start the add-on to sync the `/backup/` directory to the configured `bucket_name` on Amazon S3. You can also automate this of course, see example below: - -## Automation - -To automate your backup creation and syncing to Amazon S3, add these two automations in Home Assistants `configuration.yaml` and change it to your needs: -``` -automation: - # create a full backup - - id: backup_create_full_backup - alias: Create a full backup every day at 4am - trigger: - platform: time - at: "04:00:00" - action: - service: hassio.backup_full - data: - # uses the 'now' object of the trigger to create a more user friendly name (e.g.: '202101010400_automated-backup') - name: "{{as_timestamp(trigger.now)|timestamp_custom('%Y%m%d%H%M', true)}}_automated-backup" - - # Starts the addon 15 minutes after every hour to make sure it syncs all backups, also manual ones, as soon as possible - - id: backup_upload_to_s3 - alias: Upload to S3 - trigger: - platform: time_pattern - # Matches every hour at 15 minutes past every hour - minutes: 15 - action: - service: hassio.addon_start - data: - addon: XXXXX_amazon-s3-backup -``` - -The automation above first creates a full backup at 4am, and then at 4:15am syncs to Amazon S3 and if configured deletes local backups according to your configuration. - -## Configuration - -Example add-on configuration: - -``` -aws_access_key: AKXXXXXXXXXXXXXXXX -aws_secret_access_key: XXXXXXXXXXXXXXXX -bucket_name: my-bucket -bucket_region: eu-central-1 -storage_class: STANDARD -delete_local_backups: true -local_backups_to_keep: 3 -``` - -### Option: `aws_access_key` (required) -AWS IAM access key used to access the S3 bucket. - -### Option: `aws_secret_access_key` (required) -AWS IAM secret access key used to access the S3 bucket. - -### Option: `bucket_name` (required) -Amazon S3 bucket used to store backups. - -### Option: `bucket_region` (optional, Default: eu-central-1) -AWS region where the S3 bucket was created. See https://aws.amazon.com/about-aws/global-infrastructure/ for all available regions. - -### Option: `storage_class` (optional, Default: STANDARD) -Amazon S3 storage class to use for the synced objects, when uploading files to S3. One of STANDARD, REDUCED_REDUNDANCY, STANDARD_IA, ONEZONE_IA, INTELLIGENT_TIERING, GLACIER, DEEP_ARCHIVE. For more information see https://aws.amazon.com/s3/storage-classes/. - -### Option: `delete_local_backups` (optional, Default: true) -Should the addon remove oldest local backups after syncing to your Amazon S3 Bucket? You can configure how many local backups you want to keep with the Option `local_backups_to_keep`. Oldest Backups will get deleted first. - -### Option: `local_backups_to_keep` (optional, Default: 3) -How many backups you want to keep locally? If you want to disable automatic local cleanup, set `delete_local_backups` to false. - -If you also want to automatically delete backups to keep your Amazon S3 Bucket clean, or change the storage class for backups to safe some money, you should take a look at S3 Lifecycle Rules (https://docs.aws.amazon.com/AmazonS3/latest/userguide/how-to-set-lifecycle-configuration-intro.html). - -## Security -I recommend to create a new IAM user, which: -- can not login to the AWS Console -- can only access AWS programmatically -- is used by this add-on only -- uses the lowest possible IAM Policy, which is this: - -``` -{ - "Version": "2012-10-17", - "Statement": [ - { - "Sid": "AllowAWSS3Sync", - "Effect": "Allow", - "Action": [ - "s3:PutObject", - "s3:ListBucket" - ], - "Resource": [ - "arn:aws:s3:::YOUR-S3-BUCKET-NAME/*", - "arn:aws:s3:::YOUR-S3-BUCKET-NAME" - ] - } - ] -} -``` - -## Support - -Usage of the addon requires knowledge of Amazon S3 and AWS IAM. -Under the hood it uses the aws cli version 1, specifically the `aws s3 sync` command. - -## Thanks -This addon is highly inspired by https://github.com/gdrapp/hass-addons and https://github.com/rrostt/hassio-backup-s3 diff --git a/amazon-s3-backup/Dockerfile b/amazon-s3-backup/Dockerfile deleted file mode 100755 index af0eca3..0000000 --- a/amazon-s3-backup/Dockerfile +++ /dev/null @@ -1,41 +0,0 @@ -ARG BUILD_FROM -FROM ${BUILD_FROM} - -ENV LANG C.UTF-8 - -COPY run.sh / -RUN chmod a+x /run.sh - -# add aws-cli and deps -RUN apk add -v --update --no-cache \ - python3 \ - py3-pip \ - groff \ - less \ - jq \ - aws-cli -#RUN pip3 install --upgrade awscli - -CMD [ "/run.sh" ] - -# Build arugments -ARG BUILD_DATE -ARG BUILD_REF -ARG BUILD_VERSION - -# Labels -LABEL \ - io.hass.name="bitlab S3 backup" \ - io.hass.description="Automatically create and transfer HA backups" \ - io.hass.arch="${BUILD_ARCH}" \ - io.hass.type="addon" \ - io.hass.version=${BUILD_VERSION} \ - maintainer="Alain Stucki eg{YCyTl5+dB%((r zdaqI9J~`*y^WFRJxAwE2z4lt~df&D8pDRjNTa66P00#hoOha8+4*&q%pA91f0H8zG zrXK(R5Jpc;5hxpATmt~$nS-&$!+ZAtUH|}K00HP40Qmy}KmY(@00{o_0YDRU<6mD7 z#QhHj3;+?%0Q4V>De$+c|8xKQgk*#M!h$eV)Vg2Jq_a3m5b?QQ>1N>5q!pK@>ehcc{A zK0atEA)!Z)9tl1Y6ZG_U6cV|8`?ipUehmXx4epwOezmWeg_K%LV&>!*t)6Cy5{VS@ctSSqa7W((wWZ_!vTqXcO6Rn}F zV1NZ}Sp>wIk39W79k5bfSxK=|HfDK4Zq!F^O@J@ne}T?VXbP%Z=&;$hrcqgDlWkA(9QhhGgEO@$Cori0kEb9xM%7^SM5UD&xdWORe;$J|G>LtYsU}eP6cQ@gIZj) z&Qb>@U2A(Xfpjyo*NPnQt~Yy-%!-$_46R~irUj8Wl9|*Ekpm-&8^vUl9S7r7@*6)Y z_z3P?T-EVds?~2&CYX_HGL~x}-%R8P2X2bK9u^zIp<3y=`JxoaK#7s-^u0c##CUlA zp%~&MgT&>H`>#O;5ZtN6F-RJ2n5-w=;KX;iA69v+O_m*@Qa{0Dv6c9O`)YzQPIy*m z2ACgVE5!YpsaHsF^qd^?J)#8M1uZErhSqlLexESfJszWhQ0K}@)vZRY4^N$xh}ty~hd zR>wT77u~o~$Du{g52mKGZ*Drry{%v3s2x9gZvOg^^H$UxrTF%Yfk6@ACB%i<|DGP1 zsn?_1RQqs4(2PTT`gkeh&~kN^f=fnD@**sKMv8P1p)#tU!X(}3F8&ykiO@_|SA{@c z#Ks!OEzdY6n;#t+H8FU@!c;Zv1z%y{mzUlE&I~ZCGX%BlP?>q#+-|(FAJh(CJ}a`m z&^(mP%nBWMixzAn#~q4otsQ1H-kWD(JiE9@(#>XY6s~k>K24k*R<|=IG)^DB|0-lXKd#nTb);7lK_^uE<{Bwv0oY)wUGQsYKYk z45|1o>Q;KO1`fDFy*1(w09iKY%cM6yZBLeCrA+IrA3JN)dEt7w=2&`Mz75URurG$O z{_=B_I9kiyATlJ_op3&!EPu+xTOo{8tA};Yz1!C?*Kp83CO~!)sLQm-Z2h{XWx5lL z=+NCCcCgBvJ=)QcUJlC{i&YbO@mMT)uYfgX$9+0zIV`KQ(ek9B3ASIG^rA zr_@;5-4I0&?aV_c*t@qR9i4F|^qtqZlpP^w4C+5Gj#GYYML6Dl@_CCJavPuFTZ#7N z0+#ba#VxDfXVR^$JPA$UeVwyE zjY3Ia40&HjKB^(6IXo7Jhyj0J-nobw;W$oHx7ieig*F%?uLoO34M!W;Obf%C;@9qW zJ*>^sn=<=Gu4hi?z(oDwLE)Vv&IHW_Kp3pYOCq&(j$EmYFZ6P)5(_)r3`cDyU(-zT zYirDusBn;BOE3l=&p1B$DJ_;&?uI>FC}4Bjik@38;qtr1|MB`^wR z1s6T3=iKGV#>Y=8JqxQ1Q!H#&XwokA6X9sR#`IQ;fZqi@Q~-@n?|b5<+TYhcUUj3- z`1USyz?IYS(D%{+OW9Xz0}lBDZ&FB86?*V7#5vNujsDi(NXXrvP3`X` zH;$NQa>sLLvL^6SbQABX>@Wm?SHHIq@c3Xb+PW6X#U~CQH~Wa==f=&6^SD2i>k<1m z6J5!h00wONpZuP2_$Q(OV&3H1=;S%yQGN;We2xN`eMu%Ct@unDH{9h$B4N&g2Z5~) zaL?)IAUmY3&u-(>&7!L+a;;CFuee0sC8f+`i}=+P;MYROLI@-$RUkRnhdzfbX!q*H-v2=zo*A?7=#F?^}Ca`=QuY8Uo2JX?+2S5py6Yg-mzL4pRzO z+8-ME(J*K3&*}C`9n2q*KxkNf9SP(-7|kX0VR~49p7dCN@;SXDm^|3%#(U6!p$=&U zEM)M~lucoQA5gFuFa*k~K-}>lsdQWdhjLP8jG1UOf?r73FrLn(BucBxfT{5)NvpV|ddSt#qxms8xko*Vak zL`kZ#}1Idplh zuV{6NUtX}}58tOVp%ehWhg{a(%pZCj+0Z<|OWln}GLE#NtGJ(xwtXV5qQ{TKds6CL z4*=_Ki4Q`be)Ihrn|>dlIyEg&U^V#R1TiK_9h7is7u&o*WH6xVQ#v(Dx><=%4knUM z(UJLgp&%Hyz6YEqT|3wulF?B2xh#5CRF=m2)q(X7_7@&v#)4WIWgNK-g~JWn%SVLr zavx>}os}=5kn(KQ$=@C6!0a>=G<^;*yrK%n%~X`UUV=rAc{splV4xRD|6eCLLV(TO zd^2gnY;WJAlQaJk7R;iZRwowl8>|S*eCxY<+LA&F7d{mtB%i0X(mTOC(xft?T!sR%p5iNq>h0VCHO6d zeK=HAqiUFvU}oiGj0^|{CJH^Sse#EMmV0s-hi(@Th6*xL_qiTo%D-yQHOgz5u|-QO zM_YUIqc}74*dpjtW~ujO1JT7qDf@CnDTzMM8J@MDltjeHfU5M=q2$Nx+;r8CL`e!Y z>KPdZ?zgdP&=`=y$S%gBRUF|l@~dB47>`QSC|ozt-x6DM=ZLG=HPBHgpl#@bR~t?8}sbuAsuxq+yWZ)GlUO^sAuXi}>o9NP`%?@7m;rh(JAsnCot zB;1!$4(Ot_@nDHj?)}-hzL_$bLSC>ubVu*^Tu}a#D$z^)aBz{#l!Rt+k)fMCMcPeD z&EL6EUgRZnT9yJLw74kKVt}}rgqj6+5Cp#&@+4gnm|EMsRsbu73IZ~NtgU}QO#kqmq^rqDgN_EjBW_=67?3likSLWLG1<&2d4Gcj=5VH zVK-YC&X=&$xliNeC(V~?eHLS=BHuT_4IJer=V$t7;j9wJ+(iA^dg3qYe?I(FTctku z?p_}su4{Mq^~C08sg|}@?o5ZaK>PS`H+S
    N0X@_|NlnMT@>onyb4zWu z8+ACzgHi8PRqYq%()arvvl33YxJ23vKHdnm)d6T%8K0E;lfFA&i(pC2JX+Bh8?~IM zYBG}WkkyeI`|wKIlvwO@zXlA)lt{{d10Bl7M9pEX;^s!G7BnUhBPS7-uc+1~7pSFZ zPxkdR)yMYu#83J_xP7--lRuUxQ1CYt07H>sFH;8}MH;0gbad$SwXt|TcPwqkA3T}B zV`|;Dr@ayp!Y8j?{903F4at;0$vvAmaeN0lOT_|rBp)lcQ>m5n_llPSrgUO>UT%FT z_JPAO{uI=F`g~CbSFFqqdg!4ho0Z_ULhEcgSjLivr4Wl~K9$=)j$?k0SO{T?Mx?7p zx=(3Nb0a2`79Xtj5>3pFN05L9C30wS&i0iXeEf@AiCRbFi3F&f&4E)3Y$^ z>}FK(!<2NSfma}U*lWXGIH5>H5Q0N0(@mSk%-2PhSUPp^rzs)HtU`noQQUJjz|<;r zw!LVYel=v?CV+1)x3asEz_GjuiB8po6@3pGvC63(?BGl zsdRk{wlYx0>PVWla3UapBEADgU|mM+(klg<;6TJC5MDpXQ=im5(Ajq8Uex zqV?(yzKfP}rzI1(^eTNXQGO7GIaOpUj3wEyGgU~w_5L0GxYxjsPt;ukT5aKrvW^y< zZ)`Pt&iIpz16Cc4jaJh2El=w!b}TA>oyS>SoniORjl*9g2v&?GuNt{h#89DS=bM-| zwi^#dI-b%`O+Z*L2HfbRfn6);bInxO@ir+%9%JHCRK9xaqg?4Uy1P~p8(Jm{3NvnZ zmT-@MrVyH5^7+;R#?lEqdLMn(YjQW5LzMTpfQpzhR$`z-^^NV`E%m1O7oB^SmqG&@Wr(Es%P zE%e;6(8bkJ^FeL$M?3|6n{VrH23S22iFTEDnDQuN9VcieNfd%M%*gu!Q>R_+%0xFQ zwWRUQCLQfx^`7EwjV*{R9)abd>-}djD>HOmdnG}biJF%5P#F zP%ao)vT@Xaf3~(}ex6gWW%UPHKXo{LbAkw&qM+N)EO{JYZdHaZe}Tle(7rmh<=iKa zy@{96WtJZN(4Uu)ka%9U5`Hou`Q{4mh>jOC((~GVaJD$xP^aolS`l+}^7LuZRCq*8 z{894!ruY(tjR@+ci=?X;-N`bk=V}I;iaQoWqd37sA z*CAaCqNfqS)^|_59H2gA-VUNCtJm~l#r5o zcQveP2-WqXbj|8Z%69qmw-%mG-5{f$)NOg!mdOt*pAakK zzv}m(*+cX>!W!ac;R9Gptdh>);lOc?L>WW7x(j3Q3Ft}M260^DjJTjQQhWD3Q>c>L zjJvwo(=X@Wm&K21ai*wKg4EyO$)O$W^7JjwptZRjB89qu(KzO3kFepi!L0#OU2H8` zf(vH4Y%h3yZL5J|vvLP^s!}d_%-yxbv19#U^*ekIk4XEV?PdRi5 zoEZ(H`(8Oq@qb2iE1LSk>$62)7L!Onjt-{XPK5EZIR)>`KH5`xv^{Y)H}vIvM%Hgb zU>19H%X6~u5+?om32UF4DJQ1XB^w_JyZSmdR+?;w8%r~W^neUVSeOgNf8ynsM_@?{ uW5D)ZWqV(bPb!$U|8j)c>V%wOIF+lm!QY`}?*21<2Q*Z)mCF>ZL;eTx{}i79 diff --git a/amazon-s3-backup/run.sh b/amazon-s3-backup/run.sh deleted file mode 100755 index f839a36..0000000 --- a/amazon-s3-backup/run.sh +++ /dev/null @@ -1,87 +0,0 @@ -#!/usr/bin/with-contenv bashio -# ============================================================================== -# Home Assistant Community Add-on: S3 Backup -# ============================================================================== -bashio::log.level "info" - - -# script global shortcuts -declare -r BACKUP_NAME="ha-backup-$(date +'%Y-%m-%d-%H-%M')" -declare -r SSH_HOME="${HOME}/.ssh" - -# call Home Assistant to create a local backup -# function fails in case local backup is not created -function create-local-backup { - local -r base_folders="addons/local homeassistant media share ssl" - local data="{\"name\":\"${BACKUP_NAME}\"}" - local bak_type="non-encrypted" - - bashio::log.info "Creating ${bak_type} full backup: \"${BACKUP_NAME}\"" - - if ! SLUG=$(bashio::api.supervisor POST /backups/new/full "${data}" .slug); then - bashio::log.fatal "Error creating ${bak_type} full backup!" - return "${__BASHIO_EXIT_NOK}" - fi - - - bashio::log.info "Backup created: ${SLUG}" - return "${__BASHIO_EXIT_OK}" -} - -function delete-s3-backup { - delete_slug=$1 - bashio::log.info "Searching for slug: $delete_slug" - for I in `aws s3api list-objects-v2 --bucket "${bucket_name}" --endpoint-url "${custom_endpoint}" --region "${bucket_region}" --query 'Contents[*].Key' --output text` - do - bashio::log.info "Checking object: $I" - TAG="" - TAG=`aws s3api get-object-tagging --bucket "${bucket_name}" --endpoint-url "${custom_endpoint}" --region "${bucket_region}" --key "$I" --output text --query "TagSet[?Key=='slug'].Value"` - bashio::log.info "Slug for object $I: $TAG" - if [ "$TAG" = "$delete_slug" ]; then - bashio::log.info "Deleting object: $I TAG=$TAG delete_slug=$delete_slug" - aws s3api delete-object --bucket "${bucket_name}" --endpoint-url "${custom_endpoint}" --region "${bucket_region}" --key "$I" --output text - fi - done -} - -bashio::log.info "Starting S3 Backup..." - -create-local-backup || die "Local backup process failed! See log for details." - -custom_endpoint="$(bashio::config 'custom_endpoint')" -bucket_name="$(bashio::config 'bucket_name')" -bucket_region="$(bashio::config 'bucket_region' 'minio')" -delete_local_backups="$(bashio::config 'delete_local_backups' 'true')" -local_backups_to_keep="$(bashio::config 'local_backups_to_keep' '3')" -monitor_path="/backup" -jq_filter=".backups|=sort_by(.date)|.backups|reverse|.[$local_backups_to_keep:]|.[].slug" - -export AWS_ACCESS_KEY_ID="$(bashio::config 'aws_access_key')" -export AWS_SECRET_ACCESS_KEY="$(bashio::config 'aws_secret_access_key')" -export AWS_REGION="$bucket_region" - -bashio::log.debug "Using AWS CLI version: '$(aws --version)'" -bashio::log.debug "Command: 'aws s3 sync $monitor_path s3://$bucket_name/ --no-progress --region $bucket_region'" -bashio::log.debug "SLUG: $SLUG $BACKUP_NAME" - -bashio::log.debug "{\"TagSet\": [{ \"Key\": \"slug\", \"Value\": \"${SLUG}\" }]}" - - -aws s3 cp "/backup/${SLUG}.tar" "s3://${bucket_name}/${BACKUP_NAME}.tar" --endpoint-url "${custom_endpoint}" --region "${bucket_region}" --no-progress -aws s3api put-object-tagging --bucket "${bucket_name}" --key "${BACKUP_NAME}.tar" --tagging "{\"TagSet\": [{ \"Key\": \"slug\", \"Value\": \"${SLUG}\" }]}" --endpoint-url "${custom_endpoint}" --region "${bucket_region}" - -if bashio::var.true "${delete_local_backups}"; then - bashio::log.info "Will delete all the oldest local backups except the '${local_backups_to_keep}' newest ones." - backup_slugs="$(bashio::api.supervisor "GET" "/backups" "false" $jq_filter)" - bashio::log.debug "Backups to delete: '$backup_slugs'" - - for s in $backup_slugs; do - delete-s3-backup "$s" - bashio::log.info "Deleting Backup: '$s'" - bashio::api.supervisor "DELETE" "/backups/$s" - done -else - bashio::log.info "Will not delete any local backups since 'delete_local_backups' is set to 'false'" -fi - -bashio::log.info "Finished S3 Backup."