Compare commits
No commits in common. "a8d9e18c366f2588d244fc2b03ce8880308aaf71" and "9f5cfd946988e94aec0a1ac22798a44b12a94591" have entirely different histories.
a8d9e18c36
...
9f5cfd9469
18
.aliases
18
.aliases
|
@ -1276,19 +1276,6 @@ git_create_stash_patch() {
|
|||
printf "${BOLD}${YELLOW}Created $file for stash@{$stashNum}.${NORMAL}\n"
|
||||
}
|
||||
|
||||
git_print_tracked_file_sizes() {
|
||||
git ls-tree -r -l HEAD | sort -k 4 -nr | awk '{
|
||||
sha = substr($3, 1, 7); # Truncate the commit SHA to 7 characters
|
||||
if ($4 >= 1024 * 1024) {
|
||||
printf "%s sha:%s %06.2f MB %s\n", $2, sha, $4 / 1024 / 1024, $5
|
||||
} else if ($4 >= 1024) {
|
||||
printf "%s sha:%s %06.2f KB %s\n", $2, sha, $4 / 1024, $5
|
||||
} else {
|
||||
printf "%s sha:%s %04d B %s\n", $2, sha, $4, $5
|
||||
}
|
||||
}'
|
||||
}
|
||||
|
||||
alias am='git commit --amend'
|
||||
alias amno='git_amend_nocheckin'
|
||||
alias ama='git commit --amend -C head --author'
|
||||
|
@ -1351,8 +1338,8 @@ alias gmffm='git merge --ff-only master'
|
|||
alias gmffs='git merge --ff-only --squash'
|
||||
alias gmtheirs='git merge -Xtheirs'
|
||||
alias gp='git push'
|
||||
alias gpa='echo "pushing all branches..." && git push --all && echo "pushing tags..." && git push --tags'
|
||||
alias gpaf='echo "force pushing all branches..." && git push --all -f && echo "force pushing tags..." && git push --tags -f'
|
||||
alias gpa='git push --all && echo "pushing tags..." && git push --tags'
|
||||
alias gpaf='git push --all -f && echo "pushing tags..." && git push --tags -f'
|
||||
alias gpf='git push -f'
|
||||
alias gpff='git pull --ff-only'
|
||||
alias gplu='git pull --set-upstream origin HEAD'
|
||||
|
@ -1415,7 +1402,6 @@ alias gx='git reset --hard'
|
|||
alias gxx='git reset --hard HEAD~1'
|
||||
alias gxom='git reset --hard origin/master'
|
||||
alias gstats='echo "Total commits: $(git rev-list HEAD --count)"; echo "\nAuthor breakdown:"; git shortlog | grep -E "^[^ ]"'
|
||||
alias gsize='git_print_tracked_file_sizes'
|
||||
alias gwip="git add . && git commit -m \"WIP\""
|
||||
|
||||
####################################################################################################
|
||||
|
|
|
@ -92,43 +92,17 @@ if [[ $bucket == "" || $path == "" || $temp_dir == "" ]]; then
|
|||
exit 1
|
||||
fi
|
||||
|
||||
printf "Restoring ${BOLD}${GREEN}$bucket:$path${NORMAL} with local temp folder ${BOLD}${GREEN}$temp_dir${NORMAL}\n"
|
||||
printf "${BOLD}Restoring ${GREEN}$bucket:$path${NORMAL}${BOLD} with local temp folder ${GREEN}$temp_dir${NORMAL}\n"
|
||||
|
||||
mkdir -p "$temp_dir"
|
||||
pushd "$temp_dir" &>/dev/null
|
||||
|
||||
items="$(aws s3api list-objects-v2 --bucket $bucket --prefix $path --query "Contents[?StorageClass=='DEEP_ARCHIVE']" --output text)"
|
||||
aws s3api list-objects-v2 --bucket $bucket --prefix $path --query "Contents[?StorageClass=='DEEP_ARCHIVE']" --output text | LC_ALL=C awk '{print substr($0, index($0, $2))}' | awk '{NF-=3};3' > all_objects_list.txt
|
||||
|
||||
error=$?
|
||||
if [[ ! $error -eq 0 ]]; then
|
||||
error "Error: failed to run the aws command. Aborting."
|
||||
exit 1
|
||||
fi
|
||||
# Generate the main script that will kick off the restoration.
|
||||
printf "while read x; do\n printf \"aws s3api restore-object --restore-request '{\\\\\"Days\\\\\":$days_available,\\\\\"GlacierJobParameters\\\\\":{\\\\\"Tier\\\\\":\\\\\"$restore_tier\\\\\"}}' --bucket $bucket --key \\\\\"\$x\\\\\"\\\\n\"\n aws s3api restore-object --restore-request \"{\\\\\"Days\\\\\":$days_available,\\\\\"GlacierJobParameters\\\\\":{\\\\\"Tier\\\\\":\\\\\"$restore_tier\\\\\"}}\" --bucket $bucket --key \"\$x\"\ndone < all_objects_list.txt\nprintf \"\\\\nDone! You can now delete this folder.\\\\nYour files are currently being restored. The time it takes to restore can be found in the AWS docs - just look for the $restore_tier restore tier, which is what you used.\\\\nOnce restored, download the files from the S3 site or better yet use RCloneBrowser.\\\\n\"\n" > run.sh
|
||||
chmod +x run.sh
|
||||
|
||||
if [[ $items == "None" ]]; then
|
||||
error "Didn't find any files. Check that your bucket name and path is correct."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Format the items list.
|
||||
output="$(echo "$items" | LC_ALL=C awk '{print substr($0, index($0, $2))}' | awk '{NF-=3};3')"
|
||||
|
||||
mapfile -t lines_array <<< "$output"
|
||||
num_items="${#lines_array[@]}"
|
||||
|
||||
printf "Number of items to restore: ${BOLD}${YELLOW}$num_items${NORMAL}\n"
|
||||
printf "${BOLD}${RED}Proceed?\n> ${NORMAL}"
|
||||
read -e proceed
|
||||
if [[ $proceed == "1" || $proceed == "y" || $proceed == "Y" || $proceed == "yes" || $proceed == "YES" ]]; then
|
||||
echo "$output" > all_objects_list.txt
|
||||
|
||||
# Generate the main script that will kick off the restoration.
|
||||
printf "while read x; do\n printf \"aws s3api restore-object --restore-request '{\\\\\"Days\\\\\":$days_available,\\\\\"GlacierJobParameters\\\\\":{\\\\\"Tier\\\\\":\\\\\"$restore_tier\\\\\"}}' --bucket $bucket --key \\\\\"\$x\\\\\"\\\\n\"\n aws s3api restore-object --restore-request \"{\\\\\"Days\\\\\":$days_available,\\\\\"GlacierJobParameters\\\\\":{\\\\\"Tier\\\\\":\\\\\"$restore_tier\\\\\"}}\" --bucket $bucket --key \"\$x\"\ndone < all_objects_list.txt\nprintf \"\\\\nDone! You can now delete this folder.\\\\nYour files are currently being restored. The time it takes to restore can be found in the AWS docs - just look for the $restore_tier restore tier, which is what you used.\\\\nOnce restored, download the files from the S3 site or better yet use RCloneBrowser.\\\\n\"\n" > run.sh
|
||||
chmod +x run.sh
|
||||
|
||||
printf "${BOLD}You can now run ${GREEN}$temp_dir/run.sh${NORMAL}${BOLD} to start the restoration process.\n"
|
||||
else
|
||||
echo Aborting.
|
||||
fi
|
||||
printf "${BOLD}You can now run ${GREEN}$temp_dir/run.sh${NORMAL}${BOLD} to start the restoration process.\n"
|
||||
|
||||
popd &>/dev/null
|
||||
|
|
Loading…
Reference in New Issue
Block a user