-
Notifications
You must be signed in to change notification settings - Fork 1
/
run.py
executable file
·78 lines (61 loc) · 2.33 KB
/
run.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
#!/usr/local/bin/python
import logging
import os
import schedule
import subprocess
import tempfile
import time
from google.cloud import storage
# Set up logging
logging.basicConfig(format = '%(asctime)s %(message)s', level = logging.INFO)
# Upload file to google cloud storage
# Authentication is done via a service account keyfiles. Default location is
# /etc/creds.json. Customise via GOOGLE_APPLICATION_CREDENTIALS environment variable
# Set the bucket to store backups in via BUCKET
BUCKET = os.environ.get('BUCKET')
KEEP = int(os.environ.get('KEEP', 5))
class GCS:
def __init__(self, bucket, keep):
self.keep = keep
self.storage = storage.Client()
if not self.storage.lookup_bucket(bucket):
logging.critical("ERROR: bucket does not exist [%s]" % bucket)
exit(1)
else:
self.bucket = self.storage.get_bucket(bucket)
def upload(self, f, upload_name):
blob = storage.Blob(upload_name, self.bucket)
logging.info("Uploading backup to %s%s" % (self.bucket, upload_name))
blob.upload_from_file(f, rewind=True)
def cleanup(self, prefix = ""):
backups = list(self.bucket.list_blobs(prefix = prefix))
to_delete = backups[:max(0, len(backups) - self.keep)]
for backup in to_delete:
backup.delete()
# How often to run backup
EVERY_N_DAYS = int(os.environ.get('EVERY_N_DAYS', 1)) # default to once a day
AT_TIME = os.environ.get('AT_TIME', "00:00") # at midnight
# DB configuration options
HOST = os.environ.get('MYSQL_HOST', 'localhost')
PORT = os.environ.get('MYSQL_PORT', '3306')
USERNAME = os.environ.get('MYSQL_USER')
PASSWORD = os.environ.get('MYSQL_PASSWORD')
DB = os.environ.get('MYSQL_DATABASE')
cloud = GCS(BUCKET, KEEP)
def backup():
backup_name = "%s/%s-%s.sql" % (DB, DB, time.strftime("%Y-%m-%d-%H%M%S"))
logging.info("Running backup %s" % backup_name)
with tempfile.NamedTemporaryFile() as f:
subprocess.run([
"mysqldump",
"--host=%s" % HOST,
"--port=%s" % PORT,
"--user=%s" % USERNAME,
"--password=%s" % PASSWORD,
DB], stdout = f)
cloud.upload(f, backup_name)
cloud.cleanup(DB)
schedule.every(EVERY_N_DAYS).days.at(AT_TIME).do(backup)
while True:
schedule.run_pending()
time.sleep(30)