-
Notifications
You must be signed in to change notification settings - Fork 2
/
backup.sh
executable file
·168 lines (140 loc) · 5.79 KB
/
backup.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
#!/usr/bin/env bash
#---------------------------------------------------------------------
usage () {
cat <<EOT
${0##*/}
Backup a directory, MySQL, PostgreSQL, MongoDB and SQLite databases
using a rotating scheme of daily, weekly and monthly backups.
To use, move the config.sh.default included in the repo to config.sh
and edit the various settings as need.
When it runs for each feature turned on:
* Creates the backup
* Moves to the BACKUP_LOCAL_PATH folder
* Names the backup as daily_db_backup_${TOOL_BEING_BACKED_UP}....
* If it is 1st day of the week, copys it to
weekly_${TOOL_BEING_BACKED_UP}....
* If it is 1st day of the month, copys it to
monthly_${TOOL_BEING_BACKED_UP}.....
* Daily backups that are older than a week are deleted
* Weekly backups that are older than a 28 days (4 weeks) are deleted
* Monthly backups that are older than a 365 days (1 year) are deleted
* If the AMAZON_S3_UPLOAD_ACTIVE is set to true, the BACKUP_LOCAL_PATH
folder is synched with a S3 Bucket
Requires:
S3 Command Line Tools installed: http://s3tools.org/s3cmd
Usage:
${0##*/}
Bash Variables Set From "config.sh":
# MySQL Settings
MYSQL_DB_USERNAME=ThisIsYourMySQLUsername
MYSQL_DB_PASSWORD=ThisIsYourMySQLRootPassword
# PostgreSQL Settings
POSTGRES_DB_USERNAME=ThisIsYourPostgresUsername
# MongoDB Settings
MONGO_DB_USERNAME=ThisIsYourMongoDBUsername
MONGO_DB_PASSWORD=ThisIsYourMongoDBPassword
# SQLite Settings
SQLITE_PATH=/Users/jtyost2/Sites/jtyost2/S3_Backup/testing
# Webroot Backup Settings
WEBROOT_LOCAL_PATH=/Users/jtyost2/Desktop
BACKUP_LOCAL_PATH=/Users/jtyost2/Sites/jtyost2/testing_dump/
AMAZON_S3_PATH=s3://BucketName/folder_in_bucket/
# Turn on or off features
MYSQL_DUMP_ACTIVE=false
POSTGRESQL_DUMP_ACTIVE=false
MONGODB_DUMP_ACTIVE=false
SQLITE_DUMP_ACTIVE=true
AMAZON_S3_UPLOAD_ACTIVE=false
EOT
exit ${1:-0} # Exit with code 0 unless an arg is passed to the method.
}
if [ "$1" = '-h' ]; then
usage
fi
#Import settings from ./config.sh
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source ${DIR}/config.sh
#DateTimeString
THEDATE=`date +%Y_%m_%d_%H_%M`
#Dump Databases for Daily DB Backups
if ${MYSQL_DUMP_ACTIVE};
then
mysqldump --add-drop-table --all-databases --user=${MYSQL_DB_USERNAME} --password=${MYSQL_DB_PASSWORD} | gzip -9 > ${BACKUP_LOCAL_PATH}daily_db_backup_mysql_${THEDATE}.sql.gz
fi
if ${POSTGRESQL_DUMP_ACTIVE};
then
pg_dumpall --username=${POSTGRES_DB_USERNAME} | gzip -9 > ${BACKUP_LOCAL_PATH}daily_db_backup_postgresql_${THEDATE}.sql.gz
fi
if ${MONGODB_DUMP_ACTIVE};
then
mongodump --host localhost --username ${MONGO_DB_USERNAME} --password ${MONGO_DB_PASSWORD} | gzip -9 > ${BACKUP_LOCAL_PATH}daily_db_backup_mongodb_${THEDATE}.sql.gz
fi
if ${SQLITE_DUMP_ACTIVE};
then
gzip -9 < ${SQLITE_PATH} > ${BACKUP_LOCAL_PATH}daily_db_backup_sqlite_${THEDATE}.gz
fi
#Tar and Gzip WWW Folder for Daily Backup
tar -cf ${BACKUP_LOCAL_PATH}daily_site_backup_${THEDATE}.tar -X ${DIR}/tar_exclude.txt ${WEBROOT_LOCAL_PATH}
gzip -9 ${BACKUP_LOCAL_PATH}daily_site_backup_${THEDATE}.tar
#Find and delete old daily site backups that are over a week old
find ${BACKUP_LOCAL_PATH}daily_site_backup_* -mtime +7 -exec rm -f {} \;
#Find and delete old daily database backups that are over a week old
find ${BACKUP_LOCAL_PATH}daily_db_backup_* -mtime +7 -exec rm -f {} \;
if [ `date +%u` = 1 ]
then
#Copy Daily Database Backup for Weekly DB Backups
if ${MYSQL_DUMP_ACTIVE};
then
cp ${BACKUP_LOCAL_PATH}daily_db_backup_mysql_${THEDATE}.sql.gz ${BACKUP_LOCAL_PATH}weekly_db_backup_mysql_${THEDATE}.sql.gz
fi
if ${POSTGRESQL_DUMP_ACTIVE};
then
cp ${BACKUP_LOCAL_PATH}daily_db_backup_postgresql_${THEDATE}.sql.gz ${BACKUP_LOCAL_PATH}weekly_db_backup_postgresql_${THEDATE}.sql.gz
fi
if ${MONGODB_DUMP_ACTIVE};
then
cp ${BACKUP_LOCAL_PATH}daily_db_backup_mongodb_${THEDATE}.sql.gz ${BACKUP_LOCAL_PATH}weekly_db_backup_mongodb_${THEDATE}.sql.gz
fi
if ${SQLITE_DUMP_ACTIVE};
then
cp ${BACKUP_LOCAL_PATH}daily_db_backup_sqlite_${THEDATE}.gz ${BACKUP_LOCAL_PATH}weekly_db_backup_sqlite_${THEDATE}.gz
fi
#Copy Daily Webroot Backup for Weekly Backup
cp ${BACKUP_LOCAL_PATH}daily_site_backup_${THEDATE}.tar.gz ${BACKUP_LOCAL_PATH}weekly_site_backup_${THEDATE}.tar.gz
#Find and delete old daily site backups that are over 4 weeks old
find ${BACKUP_LOCAL_PATH}weekly_site_backup_* -mtime +28 -exec rm -f {} \;
#Find and delete old daily database backups that are over 4 weeks old
find ${BACKUP_LOCAL_PATH}weekly_db_backup_* -mtime +28 -exec rm -f {} \;
fi
if [ `date +%d` = 01 ]
then
#Copy Daily Database Backup for Monthly DB Backups
if ${MYSQL_DUMP_ACTIVE};
then
cp ${BACKUP_LOCAL_PATH}daily_db_backup_mysql_${THEDATE}.sql.gz ${BACKUP_LOCAL_PATH}monthly_db_backup_mysql_${THEDATE}.sql.gz
fi
if ${POSTGRESQL_DUMP_ACTIVE};
then
cp ${BACKUP_LOCAL_PATH}daily_db_backup_postgresql_${THEDATE}.sql.gz ${BACKUP_LOCAL_PATH}monthly_db_backup_postgresql_${THEDATE}.sql.gz
fi
if ${MONGODB_DUMP_ACTIVE};
then
cp ${BACKUP_LOCAL_PATH}daily_db_backup_mongodb_${THEDATE}.sql.gz ${BACKUP_LOCAL_PATH}monthly_db_backup_mongodb_${THEDATE}.sql.gz
fi
if ${SQLITE_DUMP_ACTIVE};
then
cp ${BACKUP_LOCAL_PATH}daily_db_backup_sqlite_${THEDATE}.gz ${BACKUP_LOCAL_PATH}monthly_db_backup_sqlite_${THEDATE}.gz
fi
#Copy Daily Webroot Backup for Monthly Backup
cp ${BACKUP_LOCAL_PATH}daily_site_backup_${THEDATE}.tar.gz ${BACKUP_LOCAL_PATH}monthly_site_backup_${THEDATE}.tar.gz
#Find and delete old monthly site backups that are over 1 year old
find ${BACKUP_LOCAL_PATH}monthly_site_backup_* -mtime +356 -exec rm -f {} \;
#Find and delete old monthly database backups that are over 1 year old
find ${BACKUP_LOCAL_PATH}monthly_db_backup_* -mtime +356 -exec rm -f {} \;
fi
#S3 Sync
if ${AMAZON_S3_UPLOAD_ACTIVE};
then
s3cmd sync --delete-removed ${BACKUP_LOCAL_PATH} ${AMAZON_S3_PATH}
fi
echo "S3Backup Completed For: ${THEDATE}" >&2