-
Notifications
You must be signed in to change notification settings - Fork 1
/
backup.sh
204 lines (171 loc) · 5.07 KB
/
backup.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
#!/bin/bash
# source: https://github.com/JurajMlich/ultimate-bash-backup-script-with-rotation
# author: Juraj Mlich <[email protected]>
# usage:
# backup.sh /home/WHAT_TO_BACKUP /home/WHERE_TO_BACKUP 2x10h 10x5m 2dx4 1mx5
# in the second directory, subdirectories will be made for each period + temp directory
toExclude=()
# --------------
# PARSE PARAMS
# --------------
POSITIONAL=()
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
--exclude)
toExclude+=("--exclude=$2");
shift # past argument
shift # past value
;;
*) # unknown option
POSITIONAL+=("$1") # save it in an array for later
shift # past argument
;;
esac
done
set -- "${POSITIONAL[@]}" # restore positional parameters
# --------------
# --------------
# BASE CONFIG
# -------------
dirToBackup=$(readlink -f "$1")
backupDir=$(readlink -f "$2")
date=$(date '+%Y-%m-%d')
hour=$(date '+%H-%M-%S')
# logFile=/home/juraj/Data/log/backup.log # absolute path
function log(){
echo "$1"
if [ ! -z "$logFile" ]
then
echo "$(date "+%Y-%m-%d %H:%M:%S") - $1" >> "$logFile"
fi
}
# log rotating
if [[ ! -z "$logFile" ]] && [[ -f "$logFile" ]] && [[ $(wc -l < "$logFile") -gt 300 ]]
then
echo "$(tail -n 300 "$logFile")" > "$logFile"
fi
if [[ ! -d "$backupDir" ]]
then
log "The backup directory \`$backupDir\` unavailable."
exit 1
fi;
tempDir="$backupDir/temp"
# create or purge tempDir
[[ ! -d "$tempDir" ]] && mkdir "$tempDir"
# LOCKING
# ----------------------------------
LOCKFILE="$backupDir/backup.lock"
LOCKFD=99
# PRIVATE
_lock() { flock -$1 $LOCKFD; }
_no_more_locking() { _lock u; _lock xn && rm -f $LOCKFILE; }
_prepare_locking() { eval "exec $LOCKFD>\"$LOCKFILE\""; trap _no_more_locking EXIT; }
# ON START
_prepare_locking
# PUBLIC
exlock_now() { _lock xn; } # obtain an exclusive lock immediately or fail
exlock() { _lock x; } # obtain an exclusive lock
shlock() { _lock s; } # obtain a shared lock
unlock() { _lock u; } # drop a lock
# Avoid running more instances of the script
if ! exlock_now
then
echo "The script is already executing."
exit 1
fi
# --------------------------------------
rm -rf "$tempDir/*"
function backupTo() {
local to="$1"
log "Making backup of \`$dirToBackup\` to \`$to\`."
SECONDS=0
# since we do not want to compress the backup more times than needed
# we store the path to already comprimed archive in the $backupFile variable
# and in case it is available, we only copy the file
if [[ ! -z "$backupFile" ]]
then
cp "$backupFile" "$to"
else
# make the archive in temp directory so that if the script is cancelled in
# the middle, the next execution of script does not think that the backup was
# completed (we check when the last backup was done by finding a backup file
# that was modified in less than x minutes)
tar -cpvzf "$tempDir/archive.tar.gz" "${toExclude[@]}" -C "$dirToBackup" . > "$tempDir/tar.log"
code=$?
# 1 is thrown if some files were changed as they were being comprimed
if [[ ! $code -eq 0 ]] && [[ ! $code -eq 1 ]]
then
log "Error during compressing backup. Error code: $code";
exit $code
fi
# remove the temp file that can be used to watch progress
rm "$tempDir/tar.log"
# move it to the right location
mv "$tempDir/archive.tar.gz" "$to"
backupFile="$to"
log "Backup of \`$dirToBackup\` has been saved to \`$to\`. Took $(($SECONDS / 60))m and $(($SECONDS % 60))s."
fi
}
# first two parameters are paths
for period in "${@:3}"
do
# split by x
IFS='x' read -r -a periodSplit <<< "$period"
# how many of previous backups should be kept
toKeepAmount=${periodSplit[0]}
# interval itself (e.g. 2h)
interval=${periodSplit[1]}
# numeric part of interval (e.g. 2)
intervalNumeric=$(echo "$interval" | tr -dc '0-9')
# path where to store the backup
path="$backupDir/$period"
backupName="$date.tar.gz"
if [[ $interval == *"h" ]]
then
backupName="$date $hour.tar.gz"
intervalInMins=$(($intervalNumeric * 60))
else
if [[ $interval == *"d" ]]
then
intervalInMins=$(($intervalNumeric * 60 * 24))
elif [[ $interval == *"w" ]]
then
intervalInMins=$(($intervalNumeric * 60 * 24 * 7))
elif [[ $interval == *"m" ]]
then
intervalInMins=$(($intervalNumeric * 60 * 24 * 30))
else
backupName="$date $hour.tar.gz"
intervalInMins=$intervalNumeric
fi
fi
# check if backup is not already done and do not continue if it is
if [[ ! -d "$path" ]]
then
# if the directory does not exist, let the flow continue as the
# the backup obviously does not exist
mkdir "$path";
else
created=$(find "$path" -maxdepth 1 -mmin -$((intervalInMins - 1)) -type f | wc -l)
# if the backup exists, process to the next backup period
if [[ $created -gt 0 ]]
then
continue;
fi
fi
# remove old backups
count=$(ls "$path" -Aq | wc -l)
if [[ $count -gt $((toKeepAmount - 1)) ]]
then
toRemoveFileNames=$(ls "$path" -t -1 | tail -n -$(($count - $toKeepAmount + 1)));
while read -r line; do
log "Removing $path/$line"
rm -rf "$path/$line"
done <<< "$toRemoveFileNames"
fi
# make new backup
backupTo "$path/$backupName"
done
rm -rf "$tempDir"