|
|
|
|
@@ -1,10 +1,49 @@
|
|
|
|
|
#!/usr/bin/env bash
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
|
# Restores all files/folders inside a particular bucket path,
|
|
|
|
|
# e.g. aws-restore-deep-glacier-folder mcampagnaro-deep-glacier images restored_images
|
|
|
|
|
# will restore all files inside the images folder of the mcampagnaro-deep-glacier bucket, saving
|
|
|
|
|
# temp restoration data in the local "restored_images" directory.
|
|
|
|
|
# Restores all files/folders inside a particular bucket path for the next 7 days. This uses the bulk retreival tier:
|
|
|
|
|
#
|
|
|
|
|
# Bulk retrievals are the lowest-cost retrieval option when restoring objects
|
|
|
|
|
# from S3 Glacier Deep Archive. They typically finish within 48 hours for
|
|
|
|
|
# objects stored in the S3 Glacier Deep Archive storage class or S3
|
|
|
|
|
# Intelligent-Tiering Deep Archive tier.
|
|
|
|
|
#
|
|
|
|
|
# If you need faster access then use the `Expedited` or `Standard` tiers.
|
|
|
|
|
#
|
|
|
|
|
# Example usage:
|
|
|
|
|
#
|
|
|
|
|
# aws-restore-deep-glacier-folder my-deep-glacier-bucket path/to/images restored_images
|
|
|
|
|
#
|
|
|
|
|
# This will create a run.sh script in a folder called "restored_images". Run that to restore all files inside the `path/to/images` folder inside the my-deep-glacier bucket.
|
|
|
|
|
#
|
|
|
|
|
# After you run the generated script, you have to wait for AWS to make the files available for download. You can check the status of a file with:
|
|
|
|
|
#
|
|
|
|
|
# aws s3api head-object --bucket my-deep-glacier --key path/to/images/photo1.jpg
|
|
|
|
|
#
|
|
|
|
|
# (obviously change the bucket and path to suit your needs).
|
|
|
|
|
#
|
|
|
|
|
# Once the files are restored you can download them on the S3 website or better yet use RcloneBrowser. I'm sure there's also a way to do it over cli too, I just haven't checked.
|
|
|
|
|
#
|
|
|
|
|
# You'll need the aws cli tools for this script. Download them from https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html
|
|
|
|
|
# Once installed, open a new shell and verify that you can run the `aws` command.
|
|
|
|
|
#
|
|
|
|
|
# If you see an error like along the lines of "'charmap' codec can't encode
|
|
|
|
|
# character '\u200e' in position 42: character maps to <undefined>" then that
|
|
|
|
|
# means a filename has a Unicode codepoint and the dumb aws Python code is
|
|
|
|
|
# trying to read it using your system's locale, which is very likely not set to
|
|
|
|
|
# use the Windows UTF-8 beta feature. This is an ongoing issue in this tool
|
|
|
|
|
# that goes back to 2013!!! There's no way to fix it using environment
|
|
|
|
|
# variables, at least nothing worked for me. The fix provided by the devs is
|
|
|
|
|
# heavy handed: you change your system locale to use UTF-8... This has
|
|
|
|
|
# consequences though like breaking legacy apps that don't have Unicode support
|
|
|
|
|
# and I'm sure other weird things will happen, such as file corruption. Anyway,
|
|
|
|
|
# if you're getting this charmap error then I suggest changing your system
|
|
|
|
|
# locale, run this again, then switch back to your previous locale. If you
|
|
|
|
|
# don't get the canonical file name then you won't be able to restore it.
|
|
|
|
|
#
|
|
|
|
|
# You can enable the UTF-8 locale with:
|
|
|
|
|
#
|
|
|
|
|
# win+r -> intl.cpl -> Administrative tab -> Change system locale -> Beta: Use Unicode UTF-8 box.
|
|
|
|
|
#
|
|
|
|
|
|
|
|
|
|
if which tput >/dev/null 2>&1; then
|
|
|
|
|
@@ -46,7 +85,7 @@ path="$2"
|
|
|
|
|
temp_dir="$3"
|
|
|
|
|
number_of_objects_per_file=100
|
|
|
|
|
days_available=7
|
|
|
|
|
restore_tier="Bulk" # Can also be "Standard"
|
|
|
|
|
restore_tier="Bulk" # Can also be "Standard" or "Expedited"
|
|
|
|
|
|
|
|
|
|
if [[ $bucket == "" || $path == "" || $temp_dir == "" ]]; then
|
|
|
|
|
error "Usage: aws-restore-deep-glacier-folder <bucket-name> <path-in-bucket> <local-temp-dir>"
|
|
|
|
|
@@ -61,7 +100,7 @@ pushd "$temp_dir" &>/dev/null
|
|
|
|
|
aws s3api list-objects-v2 --bucket $bucket --prefix $path --query "Contents[?StorageClass=='DEEP_ARCHIVE']" --output text | LC_ALL=C awk '{print substr($0, index($0, $2))}' | awk '{NF-=3};3' > all_objects_list.txt
|
|
|
|
|
|
|
|
|
|
# Generate the main script that will kick off the restoration.
|
|
|
|
|
printf "while read x; do\n printf \"aws s3api restore-object --restore-request '{\\\\\"Days\\\\\":$days_available,\\\\\"GlacierJobParameters\\\\\":{\\\\\"Tier\\\\\":\\\\\"$restore_tier\\\\\"}}' --bucket $bucket --key \\\\\"\$x\\\\\"\\\\n\"\n aws s3api restore-object --restore-request \"{\\\\\"Days\\\\\":$days_available,\\\\\"GlacierJobParameters\\\\\":{\\\\\"Tier\\\\\":\\\\\"$restore_tier\\\\\"}}\" --bucket $bucket --key \"\$x\"\ndone < all_objects_list.txt\nprintf \"\\\\nDone! You can now delete this folder.\\\\n\"\n" > run.sh
|
|
|
|
|
printf "while read x; do\n printf \"aws s3api restore-object --restore-request '{\\\\\"Days\\\\\":$days_available,\\\\\"GlacierJobParameters\\\\\":{\\\\\"Tier\\\\\":\\\\\"$restore_tier\\\\\"}}' --bucket $bucket --key \\\\\"\$x\\\\\"\\\\n\"\n aws s3api restore-object --restore-request \"{\\\\\"Days\\\\\":$days_available,\\\\\"GlacierJobParameters\\\\\":{\\\\\"Tier\\\\\":\\\\\"$restore_tier\\\\\"}}\" --bucket $bucket --key \"\$x\"\ndone < all_objects_list.txt\nprintf \"\\\\nDone! You can now delete this folder.\\\\nYour files are currently being restored. The time it takes to restore can be found in the AWS docs - just look for the $restore_tier restore tier, which is what you used.\\\\nOnce restored, download the files from the S3 site or better yet use RCloneBrowser.\\\\n\"\n" > run.sh
|
|
|
|
|
chmod +x run.sh
|
|
|
|
|
|
|
|
|
|
printf "${BOLD}You can now run ${GREEN}$temp_dir/run.sh${NORMAL}${BOLD} to start the restoration process.\n"
|
|
|
|
|
|