#!/usr/bin/env bash # # Restores all files/folders inside a particular bucket path for the next 7 days. This uses the bulk retrieval tier: # # Bulk retrievals are the lowest-cost retrieval option when restoring objects # from S3 Glacier Deep Archive. They typically finish within 48 hours for # objects stored in the S3 Glacier Deep Archive storage class or S3 # Intelligent-Tiering Deep Archive tier. # # If you need faster access then use the `Expedited` or `Standard` tiers. # # Example usage: # # aws-restore-deep-glacier-folder my-deep-glacier-bucket path/to/images restored_images # # This will create a run.sh script in a folder called "restored_images". Run that to restore all files inside the `path/to/images` folder inside the my-deep-glacier bucket. # # After you run the generated script, you have to wait for AWS to make the files available for download. You can check the status of a file with: # # aws s3api head-object --bucket my-deep-glacier --key path/to/images/photo1.jpg # # (obviously change the bucket and path to suit your needs). # # Once the files are restored you can download them on the S3 website or better yet use RcloneBrowser. I'm sure there's also a way to do it over cli too, I just haven't checked. # # You'll need the aws cli tools for this script. Download them from https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html # Once installed, open a new shell and verify that you can run the `aws` command. # # If you see an error like along the lines of "'charmap' codec can't encode # character '\u200e' in position 42: character maps to " then that # means a filename has a Unicode codepoint and the dumb aws Python code is # trying to read it using your system's locale, which is very likely not set to # use the Windows UTF-8 beta feature. This is an ongoing issue in this tool # that goes back to 2013!!! There's no way to fix it using environment # variables, at least nothing worked for me. The fix provided by the devs is # heavy handed: you change your system locale to use UTF-8... This has # consequences though like breaking legacy apps that don't have Unicode support # and I'm sure other weird things will happen, such as file corruption. Anyway, # if you're getting this charmap error then I suggest changing your system # locale, run this again, then switch back to your previous locale. If you # don't get the canonical file name then you won't be able to restore it. # # You can enable the UTF-8 locale with: # # win+r -> intl.cpl -> Administrative tab -> Change system locale -> Beta: Use Unicode UTF-8 box. # if which tput >/dev/null 2>&1; then ncolors=$(tput colors) fi if [ -t 1 ] && [ -n "$ncolors" ] && [ "$ncolors" -ge 8 ]; then RED="$(tput setaf 1)" GREEN="$(tput setaf 2)" YELLOW="$(tput setaf 3)" BLUE="$(tput setaf 4)" MAGENTA="$(tput setaf 5)" CYAN="$(tput setaf 6)" BOLD="$(tput bold)" NORMAL="$(tput sgr0)" else RED="" GREEN="" YELLOW="" BLUE="" MAGENTA="" CYAN="" BOLD="" NORMAL="" fi error() { printf "${BOLD}${RED}$1${NORMAL}\n" } abort() { error "\nAborting...\n" exit 1 } set -e bucket="$1" path="$2" temp_dir="$3" number_of_objects_per_file=100 days_available=7 restore_tier="Bulk" # Can also be "Standard" or "Expedited" if [[ $bucket == "" || $path == "" || $temp_dir == "" ]]; then error "Usage: aws-restore-deep-glacier-folder " exit 1 fi printf "Restoring ${BOLD}${GREEN}$bucket:$path${NORMAL} with local temp folder ${BOLD}${GREEN}$temp_dir${NORMAL}\n" mkdir -p "$temp_dir" pushd "$temp_dir" &>/dev/null items="$(aws s3api list-objects-v2 --bucket $bucket --prefix $path --query "Contents[?StorageClass=='DEEP_ARCHIVE']" --output text)" error=$? if [[ ! $error -eq 0 ]]; then error "Error: failed to run the aws command. Aborting." exit 1 fi if [[ $items == "None" ]]; then error "Didn't find any files. Check that your bucket name and path is correct." exit 1 fi # Format the items list. output="$(echo "$items" | LC_ALL=C awk '{print substr($0, index($0, $2))}' | awk '{NF-=3};3')" mapfile -t lines_array <<< "$output" num_items="${#lines_array[@]}" printf "Number of items to restore: ${BOLD}${YELLOW}$num_items${NORMAL}\n" printf "${BOLD}${RED}Proceed?\n> ${NORMAL}" read -e proceed if [[ $proceed == "1" || $proceed == "y" || $proceed == "Y" || $proceed == "yes" || $proceed == "YES" ]]; then echo "$output" > all_objects_list.txt # Generate the main script that will kick off the restoration. printf "while read x; do\n printf \"aws s3api restore-object --restore-request '{\\\\\"Days\\\\\":$days_available,\\\\\"GlacierJobParameters\\\\\":{\\\\\"Tier\\\\\":\\\\\"$restore_tier\\\\\"}}' --bucket $bucket --key \\\\\"\$x\\\\\"\\\\n\"\n aws s3api restore-object --restore-request \"{\\\\\"Days\\\\\":$days_available,\\\\\"GlacierJobParameters\\\\\":{\\\\\"Tier\\\\\":\\\\\"$restore_tier\\\\\"}}\" --bucket $bucket --key \"\$x\"\ndone < all_objects_list.txt\nprintf \"\\\\nDone! You can now delete this folder.\\\\nYour files are currently being restored. The time it takes to restore can be found in the AWS docs - just look for the $restore_tier restore tier, which is what you used.\\\\nOnce restored, download the files from the S3 site or better yet use RCloneBrowser.\\\\n\"\n" > run.sh chmod +x run.sh printf "${BOLD}You can now run ${GREEN}$temp_dir/run.sh${NORMAL}${BOLD} to start the restoration process.\n" else echo Aborting. fi popd &>/dev/null