~ed.so/duplicity/reuse-passphrase-for-signing-fix

« back to all changes in this revision

Viewing changes to duplicity-bin

  • Committer: bescoto
  • Date: 2002-10-29 01:49:46 UTC
  • Revision ID: vcs-imports@canonical.com-20021029014946-3m4rmm5plom7pl6q
Initial checkin

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
#!/usr/bin/env python
 
2
# duplicity -- Encrypted bandwidth efficient backup
 
3
# Version $version released September 29, 2002
 
4
#
 
5
# Copyright (C) 2002 Ben Escoto <bescoto@stanford.edu>
 
6
#
 
7
# This program is licensed under the GNU General Public License (GPL).
 
8
# you can redistribute it and/or modify it under the terms of the GNU
 
9
# General Public License as published by the Free Software Foundation,
 
10
# Inc., 675 Mass Ave, Cambridge MA 02139, USA; either version 2 of the
 
11
# License, or (at your option) any later version.  Distributions of
 
12
# rdiff-backup should include a copy of the GPL in a file called
 
13
# COPYING.  The GPL is also available online at
 
14
# http://www.gnu.org/copyleft/gpl.html.
 
15
#
 
16
# See http://rdiff-backup.stanford.edu/duplicity for more information.
 
17
# Please send mail to me or the mailing list if you find bugs or have
 
18
# any suggestions.
 
19
 
 
20
from __future__ import generators
 
21
import getpass, gzip, os, sys, time, types
 
22
from duplicity import collections, commandline, diffdir, dup_temp, \
 
23
         dup_time, file_naming, globals, gpg, log, manifest, patchdir, \
 
24
         path, robust
 
25
 
 
26
def get_passphrase():
 
27
        """Get passphrase from environment or, failing that, from user"""
 
28
        try: return os.environ['PASSPHRASE']
 
29
        except KeyError:
 
30
                log.Log("PASSPHRASE variable not set, asking user.", 5)
 
31
                return getpass.getpass("GnuPG passphrase: ")
 
32
 
 
33
def write_multivol(backup_type, tarblock_iter, backend):
 
34
        """Encrypt volumes of tarblock_iter and write to backend
 
35
 
 
36
        backup_type should be "inc" or "full" and only matters here when
 
37
        picking the filenames.  The path_prefix will determine the names
 
38
        of the files written to backend.  Also writes manifest file.
 
39
 
 
40
        """
 
41
        mf = manifest.Manifest().set_dirinfo()
 
42
        vol_num = 1
 
43
        while tarblock_iter.peek():
 
44
                # Create volume
 
45
                start_index = tarblock_iter.peek().index
 
46
                dest_filename = file_naming.get(backup_type, vol_num, encrypted = 1)
 
47
                tdp = dup_temp.new_tempduppath(file_naming.parse(dest_filename))
 
48
                gpg.GPGWriteFile(tarblock_iter, tdp.name, globals.gpg_profile)
 
49
                tdp.setdata()
 
50
                end_index = tarblock_iter.get_previous_index()
 
51
 
 
52
                # Add volume information to manifest
 
53
                vi = manifest.VolumeInfo()
 
54
                vi.set_info(vol_num, start_index, end_index)
 
55
                vi.set_hash("SHA1", gpg.get_hash("SHA1", tdp))
 
56
                mf.add_volume_info(vi)
 
57
 
 
58
                backend.put(tdp, dest_filename)
 
59
                tdp.delete()
 
60
                vol_num += 1
 
61
 
 
62
        write_manifest(mf, backup_type, backend)
 
63
 
 
64
def write_manifest(mf, backup_type, backend):
 
65
        """Write manifest to file in archive_dir and encrypted to backend"""
 
66
        mf_string = mf.to_string()
 
67
        if globals.archive_dir:
 
68
                local_mf_name = file_naming.get(backup_type, manifest = 1)
 
69
                fin = dup_temp.get_fileobj_duppath(globals.archive_dir, local_mf_name)
 
70
                fin.write(mf_string)
 
71
                fin.close()
 
72
 
 
73
        remote_mf_name = file_naming.get(backup_type, manifest = 1, encrypted = 1)
 
74
        remote_fin = backend.get_fileobj_write(remote_mf_name)
 
75
        remote_fin.write(mf_string)
 
76
        remote_fin.close()
 
77
 
 
78
def get_sig_fileobj(sig_type):
 
79
        """Return a fileobj opened for writing, save results as signature
 
80
 
 
81
        If globals.archive_dir is available, save signatures there
 
82
        gzipped.  Otherwise save them on the backend encrypted.
 
83
 
 
84
        """
 
85
        assert sig_type == "full-sig" or sig_type == "new-sig"
 
86
        if globals.archive_dir:
 
87
                sig_filename = file_naming.get(sig_type, gzipped = 1)
 
88
                return dup_temp.get_fileobj_duppath(globals.archive_dir, sig_filename)
 
89
        else:
 
90
                sig_filename = file_naming.get(sig_type, encrypted = 1)
 
91
                return globals.backend.get_fileobj_write(sig_filename)
 
92
 
 
93
def full_backup(col_stats):
 
94
        """Do full backup of directory to backend, using archive_dir"""
 
95
        sig_outfp = get_sig_fileobj("full-sig")
 
96
        tarblock_iter = diffdir.DirFull_WriteSig(globals.select, sig_outfp)
 
97
        write_multivol("full", tarblock_iter, globals.backend)
 
98
        sig_outfp.close()
 
99
        col_stats.set_values(sig_chain_warning = None).cleanup_signatures()
 
100
        
 
101
def check_sig_chain(col_stats):
 
102
        """Get last signature chain for inc backup, or None if none available"""
 
103
        if not col_stats.matched_chain_pair:
 
104
                if globals.incremental:
 
105
                        log.FatalError(
 
106
"""Fatal Error: Unable to start incremental backup.  Old signatures
 
107
not found and --incremental specified""")
 
108
                else: log.Warn("No signatures found, switching to full backup.")
 
109
                return None
 
110
        return col_stats.matched_chain_pair[0]
 
111
 
 
112
def incremental_backup(sig_chain):
 
113
        """Do incremental backup of directory to backend, using archive_dir"""
 
114
        dup_time.setprevtime(sig_chain.end_time)
 
115
        new_sig_outfp = get_sig_fileobj("new-sig")
 
116
        tarblock_iter = diffdir.DirDelta_WriteSig(globals.select,
 
117
                                                          sig_chain.get_fileobjs(), new_sig_outfp)
 
118
        write_multivol("inc", tarblock_iter, globals.backend)
 
119
        new_sig_outfp.close()
 
120
 
 
121
def restore(col_stats):
 
122
        """Restore archive in globals.backend to globals.local_path"""
 
123
        if globals.restore_dir: index = tuple(globals.restore_dir.split("/"))
 
124
        else: index = ()
 
125
        target = globals.local_path
 
126
        time = globals.restore_time or globals.current_time # default to now
 
127
        backup_chain = col_stats.get_backup_chain_at_time(time)
 
128
        assert backup_chain, col_stats.all_backup_chains
 
129
        backup_setlist = backup_chain.get_sets_at_time(time)
 
130
 
 
131
        for backup_set in backup_setlist:
 
132
                log.Log("Patching from backup set at time " +
 
133
                                backup_set.get_timestr(), 4)
 
134
                patchdir.Patch_from_iter(target,
 
135
                                                                 restore_fileobj_iter(backup_set, index),
 
136
                                                                 index)
 
137
 
 
138
def restore_fileobj_iter(backup_set, index = ()):
 
139
        """Get file object iterator from backup_set contain given index"""
 
140
        manifest = backup_set.get_manifest()
 
141
        for vol_num in manifest.get_containing_volumes(index):
 
142
                yield restore_get_enc_fileobj(backup_set.backend,
 
143
                                                                          backup_set.volume_name_dict[vol_num],
 
144
                                                                          manifest.volume_info_dict[vol_num])
 
145
 
 
146
def restore_get_enc_fileobj(backend, filename, volume_info):
 
147
        """Return plaintext fileobj from encrypted filename on backend
 
148
 
 
149
        If volume_info is set, the hash of the file will be checked,
 
150
        assuming some hash is available.  Also, if globals.sign_key is
 
151
        set, a fatal error will be raised if file not signed by sign_key.
 
152
 
 
153
        """
 
154
        parseresults = file_naming.parse(filename)
 
155
        tdp = dup_temp.new_tempduppath(parseresults)
 
156
        backend.get(filename, tdp)
 
157
        restore_check_hash(volume_info, tdp)
 
158
        
 
159
        fileobj = tdp.filtered_open_with_delete("rb")
 
160
        if parseresults.encrypted and globals.gpg_profile.sign_key:
 
161
                restore_add_sig_check(fileobj)
 
162
        return fileobj
 
163
 
 
164
def restore_check_hash(volume_info, vol_path):
 
165
        """Check the hash of vol_path path against data in volume_info"""
 
166
        hash_pair = volume_info.get_best_hash()
 
167
        if hash_pair:
 
168
                calculated_hash = gpg.get_hash(hash_pair[0], vol_path)
 
169
                if calculated_hash != hash_pair[1]:
 
170
                        log.FatalError("Invalid data - %s hash mismatch:\n"
 
171
                                                   "Calculated hash: %s\n"
 
172
                                                   "Manifest hash: %s\n" %
 
173
                                                   (hash_pair[0], calculated_hash, hash_pair[1]))
 
174
 
 
175
def restore_add_sig_check(fileobj):
 
176
        """Require signature when closing fileobj matches sig in gpg_profile"""
 
177
        assert (isinstance(fileobj, dup_temp.FileobjHooked) and
 
178
                        isinstance(fileobj.fileobj, gpg.GPGFile)), fileobj
 
179
        def check_signature():
 
180
                """Thunk run when closing volume file"""
 
181
                actual_sig = fileobj.fileobj.get_signature()
 
182
                if actual_sig != globals.gpg_profile.sign_key:
 
183
                        log.FatalError("Volume was not signed by key %s, not %s" %
 
184
                                                   (actual_sig, globals.gpg_profile.sign_key))
 
185
        fileobj.addhook(check_signature)
 
186
 
 
187
def check_last_manifest(col_stats):
 
188
        """Check consistency and hostname/directory of last manifest"""
 
189
        if not col_stats.all_backup_chains: return
 
190
        last_backup_set = col_stats.all_backup_chains[-1].get_last()
 
191
        last_backup_set.check_manifests()
 
192
 
 
193
def main():
 
194
        """Start/end here"""
 
195
        action = commandline.ProcessCommandLine(sys.argv[1:])
 
196
        dup_time.setcurtime(globals.current_time)
 
197
        col_stats = collections.CollectionsStatus(globals.backend,
 
198
                                                                                          globals.archive_dir).set_values()
 
199
        log.Log("Collection Status\n-----------------\n" + str(col_stats), 8)
 
200
 
 
201
        os.umask(077)
 
202
        globals.gpg_profile.passphrase = get_passphrase()
 
203
        if action == "restore": restore(col_stats)
 
204
        else:
 
205
                assert action == "inc" or action == "full", action
 
206
                check_last_manifest(col_stats)
 
207
                if action == "full": full_backup(col_stats)
 
208
                else:
 
209
                        sig_chain = check_sig_chain(col_stats)
 
210
                        if not sig_chain: full_backup(col_stats)
 
211
                        else: incremental_backup(sig_chain)
 
212
        dup_temp.cleanup()
 
213
 
 
214
 
 
215
if __name__ == "__main__": main()