]>
git.siccegge.de Git - tools.git/blob - backup-zfs
7ac2f39ee2851b06103b771c450f18f5f180a3fa
2 # -*- coding: utf-8 -*-
4 from __future__
import print_function
8 SFTP_HOST
= 'botero.siccegge.de'
9 SFTP_DIR
= '/srv/backup/mitoraj'
12 GPGUSER
= '9FED5C6CE206B70A585770CA965522B9D49AE731'
25 'green': "\033[0;32m",
27 'yellow': "\033[0;33m",
28 'purple': "\033[0;35m",
34 def print_colored(data
, color
):
35 sys
.stdout
.write(term
[color
])
36 sys
.stdout
.write(data
)
37 sys
.stdout
.write(term
['none'])
38 sys
.stdout
.write('\n')
41 def postprocess_datasets(datasets
):
42 devices
= set([entry
.split('@')[0] for entry
in datasets
])
45 for device
in devices
:
46 result
[device
] = sorted([ entry
.split('@')[1] for entry
in datasets
47 if entry
.startswith(device
) ])
53 paramiko
.util
.log_to_file("/tmp/paramiko.log")
55 host_keys
= paramiko
.util
.load_host_keys(os
.path
.expanduser('~/.ssh/known_hosts'))
56 hostkeytype
= host_keys
[SFTP_HOST
].keys()[0]
57 hostkey
= host_keys
[SFTP_HOST
][hostkeytype
]
59 agent
= paramiko
.Agent()
60 transport
= paramiko
.Transport((SFTP_HOST
, 22),
61 default_window_size
=1024*BUFFER
,
62 default_max_packet_size
=64*BUFFER
)
63 # transport.max_packet_size = BUFFER
64 # transport.window_size = BUFFER * 64
65 transport
.connect(hostkey
=hostkey
)
67 for key
in agent
.get_keys():
69 transport
.auth_publickey(SFTP_USER
, key
)
71 except paramiko
.SSHException
:
74 sftp
= transport
.open_sftp_client()
77 def sftp_send(dataset
, reference
=None):
79 filename
= '%s.full.zfs.gpg' % dataset
81 filename
= '%s.from.%s.zfs.gpg' % (dataset
, reference
)
89 zfscommand
= ['sudo', 'zfs', 'send', '-D', '%s/%s' % (ZPOOL
, dataset
)]
90 if reference
is not None:
91 zfscommand
= zfscommand
+ ['-i', reference
]
93 zfs
= subprocess
.Popen(zfscommand
, stdout
=subprocess
.PIPE
, bufsize
=2*BUFFER
)
96 gpgcommand
= [ 'gpg', '--batch', '--compress-algo', 'ZLIB',
97 '--sign', '--encrypt', '--recipient', GPGUSER
]
98 gpg
= subprocess
.Popen(gpgcommand
, bufsize
=2*BUFFER
,
99 stdout
=subprocess
.PIPE
,
101 stderr
=subprocess
.PIPE
)
104 junk
= gpg
.stdout
.read(BUFFER
)
106 if gpg
.returncode
not in [None, 0]:
107 print_colored("Error:\n\n" + gpg
.stderr
, 'red')
110 with
open('/tmp/SHA256SUM', 'a') as digestfile
:
111 with sftp
.open(filename
, 'xw', BUFFER
) as remotefile
:
112 digest
= hashlib
.sha256()
113 sys
.stdout
.write(term
['purple'])
119 #if len(junk) < BUFFER:
120 # print_colored("short read: %d" % len(junk), 'yellow')
121 done
= done
+ len(junk
)
122 # sys.stdout.write('#')
124 sys
.stdout
.write("\r%s %.3f GB (%.3f MB/s) " % (term
['green'], (1.0 * done
) / (1024 ** 3), (done
/ (1024 ** 2 *(time
.time() - startt
)))))
126 remotefile
.write(junk
)
128 junk
= gpg
.stdout
.read(BUFFER
)
130 sys
.stdout
.write('\r')
131 print_colored(" %.3f GB DONE (%.3f MB/s)" % ((1.0 * done
) / (1024 ** 3), (done
/ (1024 ** 2 *(time
.time() - startt
)))), 'green')
132 digestfile
.write("%s %s\n" % (digest
.hexdigest(), filename
))
134 def syncronize(local_datasets
, remote_datasets
):
135 for device
in local_datasets
.keys():
137 for dataset
in local_datasets
[device
]:
141 if device
in remote_datasets
:
142 if dataset
in remote_datasets
[device
]:
143 print_colored("%s@%s -- found on remote server" % (device
, dataset
), 'yellow')
147 print_colored("Initial syncronization for device %s" % device
, 'green')
148 sftp_send("%s@%s" % (device
, dataset
))
152 if last
[:7] == dataset
[:7]:
153 print_colored("%s@%s -- incremental backup (reference: %s)" %
154 (device
, dataset
, last
), 'green')
155 sftp_send("%s@%s" % (device
, dataset
), last
)
157 print_colored("%s@%s -- full backup" % (device
, dataset
), 'green')
158 sftp_send("%s@%s" % (device
, dataset
))
159 #print_colored("%s@%s -- doing incremental backup" % (device, dataset), 'green')
160 #sftp_send("%s@%s" % (device, dataset), lastmonth)
163 def get_remote_datasets():
164 datasets
= sftp
.listdir()
165 datasets
= filter(lambda x
: '@' in x
, datasets
)
167 datasets
= [ entry
.split('.')[0] for entry
in datasets
]
169 return postprocess_datasets(datasets
)
171 def get_local_datasets():
172 datasets
= subprocess
.check_output(['sudo', 'zfs', 'list', '-t', 'snapshot', '-H', '-o', 'name'])
173 datasets
= datasets
.strip().split('\n')
175 datasets
= [ entry
[5:] for entry
in datasets
]
177 return postprocess_datasets(datasets
)
181 syncronize(get_local_datasets(), get_remote_datasets())
183 if __name__
== '__main__':