Compare commits

..

2 Commits

Author SHA1 Message Date
9fb52326f9 Update aliases 2025-12-30 19:30:20 -05:00
56e4ad3386 Improve the aws deep glacier restore scripts 2025-12-30 19:30:20 -05:00
3 changed files with 52 additions and 37 deletions

View File

@ -277,7 +277,7 @@ dos2unix_all() {
}
alias d2u='dos2unix_all'
alias e='open_filepilot'
alias e='open_explorer'
alias f='fg'
alias hist='history'
alias histroy='history'

View File

@ -52,11 +52,6 @@ error() {
printf "${BOLD}${RED}$1${NORMAL}\n"
}
abort() {
error "\nAborting...\n"
exit 1
}
set -e
bucket="$1"
@ -70,9 +65,22 @@ fi
# .Key gives us just the object paths. If you want the other metadata then remove that from the query.
item_count=$(aws s3api list-objects-v2 --bucket $bucket --prefix "$path" --query "length(Contents[?StorageClass=='DEEP_ARCHIVE'].Key)")
items="$(aws s3api list-objects-v2 --bucket $bucket --prefix "$path" --query "Contents[?StorageClass=='DEEP_ARCHIVE'].Key" --output text | tr '\t' '\n' | tr -d '\r')"
error=$?
if [[ ! $error -eq 0 ]]; then
error "Error: failed to run the aws command. Aborting."
exit 1
fi
if [[ $items == "None" ]]; then
error "Didn't find any files. Check that your bucket name and path is correct."
exit 1
fi
mapfile -t lines_array <<< "$items"
item_count="${#lines_array[@]}"
echo "$items" > "$output_file"
printf "Number of items: ${BOLD}${YELLOW}$item_count${NORMAL}\n"
aws s3api list-objects-v2 --bucket $bucket --prefix "$path" --query "Contents[?StorageClass=='DEEP_ARCHIVE'].Key" --output text | tr '\t' '\n' | tr -d '\r' > "$output_file"
printf "Wrote file list to ${BOLD}${YELLOW}$output_file${NORMAL}\n"

View File

@ -187,40 +187,29 @@ mkdir -p "$temp_dir"
pushd "$temp_dir" &>/dev/null
# .Key gives us just the object paths. If you want the other metadata then remove that from the query.
item_count=$(aws s3api list-objects-v2 --bucket $bucket --prefix "$path" --query "length(Contents[?StorageClass=='DEEP_ARCHIVE'].Key)")
items="$(aws s3api list-objects-v2 --bucket $bucket --prefix "$path" --query "Contents[?StorageClass=='DEEP_ARCHIVE'].Key" --output text | tr '\t' '\n' | tr -d '\r')"
error=$?
if [[ ! $error -eq 0 ]]; then
error "Error: failed to run the aws command. Aborting."
exit 1
fi
if [[ $item_count == 0 ]]; then
if [[ $items == "None" ]]; then
error "Didn't find any files. Check that your bucket name and path is correct."
exit 1
fi
mapfile -t lines_array <<< "$items"
item_count="${#lines_array[@]}"
# Generate the main script that will kick off the restoration.
printf "Number of items to restore: ${BOLD}${YELLOW}$item_count${NORMAL}\n"
printf "${BOLD}${RED}Create the restore script?\n> ${NORMAL}"
read -e proceed
if [[ $proceed == "1" || $proceed == "y" || $proceed == "Y" || $proceed == "yes" || $proceed == "YES" ]]; then
items="$(aws s3api list-objects-v2 --bucket $bucket --prefix "$path" --query "Contents[?StorageClass=='DEEP_ARCHIVE'].Key" --output text | tr '\t' '\n' | tr -d '\r')"
error=$?
if [[ ! $error -eq 0 ]]; then
error "Error: failed to run the aws command. Aborting."
exit 1
fi
if [[ $items == "None" ]]; then
error "Didn't find any files. Check that your bucket name and path is correct."
exit 1
fi
echo "$items" > all_objects_list.txt
# Generate the main script that will kick off the restoration.
RUN_TEMPLATE=$(cat <<EOF
if which tput >/dev/null 2>&1; then
ncolors=\$(tput colors)
@ -245,16 +234,26 @@ else
NORMAL=""
fi
failed=()
# Open an output file.
exec 3>>output.txt
fail_count=0
failed_filename="failed_keys_\$(printf '%%04x' \$((RANDOM * RANDOM))).txt"
before_sleep_count=0
sleep_every_n_requests=25
sleep_duration=0.2
printf "Files are being restored for $days_available days using the $restore_tier tier\\\n\\\n"
printf "Files are being restored for $days_available days using the $restore_tier tier\\\n\\\n" >&3
printf "\${BOLD}NOTE: Request failures will be saved to \${YELLOW}\$failed_filename\${NORMAL}\${BOLD} as they happen. If this script terminates prematurely then check this file for failures.\\\n\\\n"
printf "NOTE: Request failures will be saved to \$failed_filename as they happen. If this script terminates prematurely then check this file for failures.\\\n\\\n" >&3
index=1
while read key; do
printf "* [\$index/$item_count] \${BOLD}\$key\${NORMAL}\\\n"
printf "* [\$index/$item_count] \$key\\\n" >&3
err=\$(
aws s3api restore-object \\
--bucket mcampagnaro-deep-glacier \\
@ -270,22 +269,31 @@ while read key; do
if [[ \$err != "" ]]; then
if ! grep -qE 'RestoreAlreadyInProgress|ObjectAlreadyInActiveTierError' <<<"\$err"; then
failed+=("\$key")
printf "\${BOLD}\${RED}FAILED! \$err\${NORMAL}"
printf "FAILED! \$err\" >&3
# Save the failure to a file now in case the script exits prematurely.
fail_count=\$((fail_count + 1))
printf "%%s\\\n" "\$key" >> \$failed_filename
else
if grep -qE 'RestoreAlreadyInProgress' <<<"\$err"; then
printf "\${BOLD}\${YELLOW}SKIPPING! File restore is already in progress.\${NORMAL}"
printf "SKIPPING! File restore is already in progress." >&3
else
printf \${BOLD}"\${YELLOW}SKIPPING! File is already restored. You can now download it.\${NORMAL}"
printf "\${BOLD}\${YELLOW}SKIPPING! File is already restored. You can now download it.\${NORMAL}"
printf "SKIPPING! File is already restored. You can now download it." >&3
fi
fi
else
printf "\${BOLD}\${GREEN}SUCCESS!\${NORMAL}"
printf "SUCCESS!" >&3
fi
printf "\\\n\\\n"
printf "\\\n\\\n" >&3
if [[ \$before_sleep_count -eq sleep_every_n_requests ]]; then
printf "SLEEPING...\\\n\\\n"
printf "SLEEPING...\\\n\\\n" >&3
sleep \$sleep_duration
before_sleep_count=0
fi
@ -293,22 +301,21 @@ while read key; do
done < all_objects_list.txt
printf "\${BOLD}\${GREEN}Done!\${NORMAL}\\\n\\\n"
fail_count=\${#failed[@]}
printf "Done!\\\n\\\n" >&3
if [[ \$fail_count > 0 ]]; then
rand=\$(printf '%%04x' \$((RANDOM * RANDOM)))
filename="failed_keys_\$rand.txt"
printf "\${BOLD}\${RED}There were \$fail_count failures!\\\nSee \${NORMAL}\${BOLD}\$filename\${RED} for the list. You can replace the contents of \${NORMAL}\${BOLD}all_objects_list.txt\${RED} with the list of failures and re-run this script to process them.\${NORMAL}\\\n\\\n"
printf "%%s\\\n" "\${failed[@]}" > \$filename
printf "There were \$fail_count failures!\\\nSee \$filename for the list. You can replace the contents of all_objects_list.txt with the list of failures and re-run this script to process them.\\\n\\\n" >&3
else
printf "There were no failures. All the files are being restored. You can now delete this folder.\\\n\\\n"
printf "There were no failures. All the files are being restored. You can now delete this folder.\\\n\\\n" >&3
fi
printf "(Note: the time it takes to restore an object can be found in the AWS docs - just look for the $restore_tier restore tier, which is what you used.\\\nOnce restored, download the files from the S3 site or better yet use RCloneBrowser.\\\n"
printf "You can check the status of a file using the aws-see-restore-status script)\\\n"
exec 3>&-
EOF
)