-
Notifications
You must be signed in to change notification settings - Fork 0
/
backup_index_in_s3
executable file
·178 lines (161 loc) · 5.73 KB
/
backup_index_in_s3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
#!/bin/bash
# this is for making the index.html in S3
# when you want to have your non-techies have access
# I know it'd be faster to make it at upload time but
# there's multiple programs doing the uploads
# and having a local statefile is how I get so annoyed with Terraform
# TODO: exec time is 19s for each index.html because I'm using bash
# out of stubbornness.
# -- config --
# config settings S3BUCKET and S3PREFIX are stored in
# a config file that is NOT in the git repo obviously.
# Looks in ~/.config/s3-backup.cfg or ~/.s3-backup.cfg or ./s3-backup.cfg
HTML_START='<!doctype html>
<head>
<meta http-equiv="Content-Type" content="text/html;charset=utf-8">
<meta name=viewport content="width=device-width, initial-scale=1">
<title>Backups</title>
<style type="text/css">body{margin:2.5em auto;max-width:40em;line-height:1.5;font-size:18px;color:#444;padding:0 0.5em}h1,h2,h3{line-height:1.2}</style>
<style type="text/css">th:nth-of-type(2){width:7em;}td:nth-of-type(2){text-align:right;}th:nth-of-type(3){width:10em;}td:nth-of-type(3){text-align:right;}#footer{font-style:italic;}</style>
</head>
'
# visible text at the start of each page after the title
# you can overwrite this in s3-backup.cfg
HTML_HEADER="<div>Remember to restore a backup you must first download and unpack the closest previous weekly/monthly backup, then fetch and unpack the daily backup to overwrite it.</div>"
# visible text at the bottom of each page after the title
# you can overwrite this in s3-backup.cfg
HTML_FOOTER="<hr/>
<div id=footer>Last updated: $(date -I)</div>
"
# --- init ---
whisper(){ [[ -t 0 ]] && echo "$*"; } # be quiet in cronjobs
die(){ echo >&2 "$1"; exit "${2:-1}"; }
get_top_s3url(){
if [[ -n "$S3PREFIX" ]]; then
echo "s3://${S3BUCKET}/${S3PREFIX%/}"
else
echo "s3://${S3BUCKET}"
fi
}
# where do I put the index.html object? if the user didn't say
get_todays_s3url(){
local wday mday today snardate s3path
# weekday (0 == Sun), monthday, today
wday=$(( $(printf '%(%u)T') % 7 ))
mday=$(printf '%(%d)T')
today=$(printf '%(%F)T') # yyyy-mm-dd
# snapshot day is last sunday unless today is sunday
snardate=$(printf '%(%F)T' $(( $(printf '%(%s)T') - ( wday * 86400 ) )) )
s3path="backups_daily/$today"
if [[ "$snardate" == "$today" ]]; then
s3path="backups_weekly/$today"
mday=${mday#0} # because 08 and 09 are errors in the next line
[[ $mday -lt 8 ]] && s3path="backups_monthly/$today"
fi
echo "$(get_top_s3url)/$s3path"
}
# output index file for the top of the tree
spew_top_index(){
local prefix1=$1 prefix2 i items
prefix1="${prefix1%/}"
echo "$HTML_START"
echo "<body>"
echo "<header><h2>$prefix1</h2>$HTML_HEADER</header>"
echo "<table><thead><tr><th>Name</th><th>Size</th><th>Date</th></tr></thead>"
echo "<tbody>"
for prefix2 in backups_monthly backups_weekly backups_daily; do
prefix2="${prefix2%/}"
mapfile -t items < <(aws s3 ls "$prefix1/$prefix2/")
for i in "${items[@]}"; do
if [[ $i =~ index\.html ]]; then
continue
elif [[ $i =~ " PRE " ]]; then
read -r _ name <<<"$i"
name=${name%/}
name="$prefix2/$name"
echo "<tr><td><a href=\"$name/index.html\">$name</a></td><td> </td><td> </td></tr>"
else
read -r date1 _ size name <<<"$i"
name="$prefix2/$name"
size=$(numfmt "$size" --to=iec)
echo "<tr><td><a href=\"$name\">$name</a></td><td>$size</td><td>$date1</td></tr>"
fi
done
done
echo "</tbody></table>"
echo "$HTML_FOOTER"
echo "</body>"
}
# output index file for one backup
spew_leaf_index(){
local prefix=$1 items i
prefix="${prefix%/}"
echo "$HTML_START"
echo "<body>"
echo "<header><h2>$prefix</h2>$HTML_HEADER</header>"
echo "<table><thead><tr><th>Name</th><th>Size</th><th>Date</th></tr></thead>"
echo "<tbody>"
mapfile -t items < <(aws s3 ls "$prefix/")
for i in "${items[@]}"; do
if [[ $i =~ index\.html ]]; then
continue
elif [[ $i =~ " PRE " ]]; then
read -r _ name <<<"$i"
echo "<tr><td><a href=\"$name\">$name</a></td><td> </td><td> </td></tr>"
else
read -r date1 _ size name <<<"$i"
size=$(numfmt "$size" --to=iec)
echo "<tr><td><a href=\"$name\">$name</a></td><td>$size</td><td>$date1</td></tr>"
fi
done
echo "</tbody></table>"
echo "$HTML_FOOTER"
echo "</body>"
}
# --- main() ---
umask 077
# check requirements
if ! command -v "aws" >/dev/null; then
die "EROR: need aws program; aborting."
fi
# load config to get S3BUCKET and S3PREFIX
declare S3BUCKET S3PREFIX
workdir="$(dirname "$(readlink -f -- "$0")")"
for f in "$HOME/.config/s3-backup.cfg" "$HOME/.s3-backup.cfg" "./s3-backup.cfg" "$workdir/s3-backup.cfg" ; do
# shellcheck disable=SC1090
[[ -e "$f" ]] && source "$f" && break;
done
if [[ -z "$S3BUCKET" ]]; then
die "no S3BUCKET; is the config file missing?"
fi
TMPFILE=$(mktemp)
# shellcheck disable=SC2064
trap "rm $TMPFILE" EXIT
if [[ "$#" -eq 0 ]]; then
# assume it's just for today and rebuild the top
s3url=$(get_todays_s3url)
whisper "(building $s3url/index.html)"
spew_leaf_index "$s3url" >"$TMPFILE" && \
aws s3 cp --quiet --content-type text/html "$TMPFILE" "${s3url}/index.html"
s3url=$(get_top_s3url)
whisper "(building $s3url/index.html)"
spew_top_index "$s3url" >"$TMPFILE" && \
aws s3 cp --quiet --content-type text/html "$TMPFILE" "${s3url}/index.html"
else
topurl=$(get_top_s3url)
for s3url in "$@"; do
if [[ ! $s3url =~ ^s3:// ]]; then
echo >&2 "$s3url is not an s3:// url; skipping"
continue
fi
if [[ "$s3url" == "$topurl" ]]; then
whisper "(building $s3url/index.html)"
spew_top_index "$s3url" >"$TMPFILE" && \
aws s3 cp --quiet --content-type text/html "$TMPFILE" "${s3url}/index.html"
else
whisper "(building $s3url/index.html)"
spew_leaf_index "$s3url" >"$TMPFILE" && \
aws s3 cp --quiet --content-type text/html "$TMPFILE" "${s3url}/index.html"
fi
done
fi