-
Notifications
You must be signed in to change notification settings - Fork 1
/
storelet.py
112 lines (91 loc) · 3.99 KB
/
storelet.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
import os
import logging
from tempfile import mkstemp, mkdtemp
from shutil import rmtree
from zipfile import ZipFile, ZIP_DEFLATED
from datetime import datetime
from boto.s3.connection import S3Connection
from boto.s3.key import Key
__version__ = "0.1.8"
__author__ = "Mark Embling"
__email__ = "[email protected]"
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
LOGGING_DEFAULTS = {"level": logging.INFO,
"format": "%(asctime)s [%(levelname)s]: %(message)s"}
def setup_logging(**kwargs):
"""Convenience function for setting up some sane logging defaults"""
opts = dict(LOGGING_DEFAULTS.items() + kwargs.items())
logging.basicConfig(**opts)
class ZipBackup(object):
"""
A compressed ZIP file backup
Note: large inclusion operations can sometimes take time as files
are compressed on the fly. This prevents all the files being copied
to a temporary location (and using unnecessary extra space) and
storing up the need for a potentially large compression at the end.
"""
def __init__(self, name):
self.name = name
_, self._path = mkstemp()
logger.debug("Created temporary file %s" % self._path)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def close(self):
os.remove(self._path)
logger.debug("Removed temporary file %s" % self._path)
def include_directory(self, path, preserve_paths=False, name=None):
"""Add the contents of a directory to the backup"""
path = os.path.abspath(path)
logger.debug("Adding directory %s" % path)
with ZipFile(self._path, 'a', ZIP_DEFLATED, allowZip64=True) as zipfile:
for base,dirs,files in os.walk(path):
logger.debug("Walking directory %s" % path)
for file in files:
filename = os.path.join(base, file)
try:
zipfile.write(filename,
self._get_filename_for_archive(
path, filename, preserve_paths, name))
logger.info("Added file %s" % filename)
except:
logger.warn("Could not add file %s" % file, exc_info=True)
logger.debug("Finished directory %s" % path)
def save_to_s3(self, bucket, access_key, secret_key, **kwargs):
"""Save the backup to Amazon S3"""
logger.info("Saving to S3 in '%s' bucket" % bucket)
conn = S3Connection(access_key, secret_key, **kwargs)
bucket = conn.get_bucket(bucket)
key = Key(bucket)
key.key = '%s_%s.zip' % \
(self.name, datetime.now().strftime("%Y%m%d%H%M%S"))
key.set_contents_from_filename(self._path)
logger.info("Saving to S3 done %s" % key.key)
def include_new_dir(self, name):
"""Add a new empty directory to the backup"""
return BackupIncludedDirectory(name, self)
def _get_filename_for_archive(self, directory, filename,
preserve_paths, name):
if not preserve_paths:
filename = filename.replace(directory, "")
if name is not None:
filename = name + os.sep + filename
return filename
class BackupIncludedDirectory(object):
"""A new directory which is subsequently added to the backup"""
def __init__(self, name, owner):
self.name = name
self.path = mkdtemp()
self._owner = owner
logger.debug("Created temporary directory %s" % self.path)
def __str__(self):
return self.path
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self._owner.include_directory(self.path, preserve_paths=False,
name=self.name)
rmtree(self.path)
logger.debug("Removed temporary directory %s" % self.path)