~ed.so/duplicity/reuse-passphrase-for-signing-fix

« back to all changes in this revision

Viewing changes to duplicity-bin

  • Committer: loafman
  • Date: 2008-12-22 17:22:44 UTC
  • Revision ID: vcs-imports@canonical.com-20081222172244-cjurdc0mt5d41n6d
patch #6700: Make duplicity translatable
https://savannah.nongnu.org/patch/?6700

Show diffs side-by-side

added added

removed removed

Lines of Context:
37
37
     dup_time, file_naming, globals, gpg, log, manifest, patchdir, \
38
38
     path, robust, tempdir, asyncscheduler
39
39
 
 
40
import gettext
 
41
gettext.install('duplicity')
 
42
 
40
43
# If exit_val is not None, exit with given value at end.
41
44
exit_val = None
42
45
 
238
241
    """Get last signature chain for inc backup, or None if none available"""
239
242
    if not col_stats.matched_chain_pair:
240
243
        if globals.incremental:
241
 
            log.FatalError("Fatal Error: Unable to start incremental backup.  "
242
 
                           "Old signatures not found and incremental specified",
 
244
            log.FatalError(_("Fatal Error: Unable to start incremental backup.  "
 
245
                             "Old signatures not found and incremental specified"),
243
246
                           log.ErrorCode.inc_without_sigs)
244
247
        else:
245
 
            log.Warn("No signatures found, switching to full backup.")
 
248
            log.Warn(_("No signatures found, switching to full backup."))
246
249
        return None
247
250
    return col_stats.matched_chain_pair[0]
248
251
 
251
254
    """If globals.print_statistics, print stats after adding bytes_written"""
252
255
    if globals.print_statistics:
253
256
        diffdir.stats.TotalDestinationSizeChange = bytes_written
254
 
        print diffdir.stats.get_stats_logstring("Backup Statistics")    
 
257
        print diffdir.stats.get_stats_logstring(_("Backup Statistics"))
255
258
 
256
259
 
257
260
def incremental_backup(sig_chain):
274
277
    """List the files current in the archive (examining signature only)"""
275
278
    sig_chain = check_sig_chain(col_stats)
276
279
    if not sig_chain:
277
 
        log.FatalError("No signature data found, unable to list files.",
 
280
        log.FatalError(_("No signature data found, unable to list files."),
278
281
                       log.ErrorCode.no_sigs)
279
282
    path_iter = diffdir.get_combined_path_iter(sig_chain.get_fileobjs())
280
283
    for path in path_iter:
290
293
    if not patchdir.Write_ROPaths(globals.local_path, 
291
294
                                  restore_get_patched_rop_iter(col_stats)):
292
295
        if globals.restore_dir:
293
 
            log.FatalError("%s not found in archive, no files restored."
 
296
            log.FatalError(_("%s not found in archive, no files restored.")
294
297
                           % (globals.restore_dir,),
295
298
                           log.ErrorCode.restore_dir_not_found)
296
299
        else:
297
 
            log.FatalError("No files found in archive - nothing restored.",
 
300
            log.FatalError(_("No files found in archive - nothing restored."),
298
301
                           log.ErrorCode.no_restore_files)
299
302
 
300
303
 
318
321
            yield restore_get_enc_fileobj(backup_set.backend,
319
322
                                          backup_set.volume_name_dict[vol_num],
320
323
                                          manifest.volume_info_dict[vol_num])
321
 
            log.Progress('Processed volume %d of %d' % (vol_num, numvols),
 
324
            log.Progress(_('Processed volume %d of %d') % (vol_num, numvols),
322
325
                         vol_num, numvols)
323
326
 
324
327
    fileobj_iters = map(get_fileobj_iter, backup_setlist)
351
354
    if hash_pair:
352
355
        calculated_hash = gpg.get_hash(hash_pair[0], vol_path)
353
356
        if calculated_hash != hash_pair[1]:
354
 
            log.FatalError("Invalid data - %s hash mismatch:\n"
355
 
                           "Calculated hash: %s\n"
356
 
                           "Manifest hash: %s\n" %
357
 
                           (hash_pair[0], calculated_hash, hash_pair[1]),
 
357
            log.FatalError("%s\n%s\n%s\n" %
 
358
                           (_("Invalid data - %s hash mismatch:") % hash_pair[0],
 
359
                            _("Calculated hash: %s") % calculated_hash,
 
360
                            _("Manifest hash: %s") % hash_pair[1]),
358
361
                           log.ErrorCode.mismatched_hash)
359
362
 
360
363
 
366
369
        """Thunk run when closing volume file"""
367
370
        actual_sig = fileobj.fileobj.get_signature()
368
371
        if actual_sig != globals.gpg_profile.sign_key:
369
 
            log.FatalError("Volume was not signed by key %s, not %s" %
 
372
            log.FatalError(_("Volume was signed by key %s, not %s") %
370
373
                           (actual_sig, globals.gpg_profile.sign_key),
371
374
                           log.ErrorCode.unsigned_volume)
372
375
    fileobj.addhook(check_signature)
386
389
        if not backup_ropath.compare_verbose(current_path):
387
390
            diff_count += 1
388
391
        total_count += 1
389
 
    log.Log("Verify complete: %s %s compared, %s %s found." %
390
 
            (total_count, total_count == 1 and "file" or "files",
391
 
             diff_count, diff_count == 1 and "difference" or "differences"), 3)
 
392
    # Unfortunately, ngettext doesn't handle multiple number variables, so we
 
393
    # split up the string.
 
394
    log.Log(_("Verify complete: %s, %s.") %
 
395
            (gettext.ngettext("%d file compared",
 
396
                              "%d files compared", total_count) % total_count,
 
397
             gettext.ngettext("%d difference found",
 
398
                              "%d differences found", diff_count) % diff_count),
 
399
            3)
392
400
    if diff_count >= 1:
393
401
        exit_val = 1
394
402
 
397
405
    """Delete the extraneous files in the current backend"""
398
406
    extraneous = col_stats.get_extraneous()
399
407
    if not extraneous:
400
 
        log.Warn("No extraneous files found, nothing deleted in cleanup.")
 
408
        log.Warn(_("No extraneous files found, nothing deleted in cleanup."))
401
409
        return
402
410
 
403
411
    filestr = "\n".join(extraneous)
404
412
    if globals.force:
405
 
        if len(extraneous) > 1:
406
 
            log.Log("Deleting these files from backend:\n"+filestr, 3)
407
 
        else:
408
 
            log.Log("Deleting this file from backend:\n"+filestr, 3)
 
413
        Log.Log(gettext.ngettext("Deleting this file from backend:",
 
414
                                 "Deleting these files from backend:",
 
415
                                 len(extraneous))+"\n"+filestr, 3)
409
416
        if not globals.dry_run:
410
417
            col_stats.backend.delete(extraneous)
411
418
    else:
412
 
        if len(extraneous) > 1:
413
 
            log.Warn("Found the following files to delete:")
414
 
        else:
415
 
            log.Warn("Found the following file to delete:")
416
 
        log.Warn(filestr + "\nRun duplicity again with the --force "
417
 
                 "option to actually delete.")
 
419
        Log.Log("%s\n%s\n%s" %
 
420
                (gettext.ngettext("Found the following file to delete:",
 
421
                                  "Found the following files to delete:",
 
422
                                  len(extraneous)),
 
423
                 filestr,
 
424
                 _("Run duplicity again with the --force option to actually delete.")),
 
425
                3)
418
426
 
419
427
 
420
428
def remove_all_but_n_full(col_stats):
436
444
 
437
445
    req_list = col_stats.get_older_than_required(globals.remove_time)
438
446
    if req_list:
439
 
        log.Warn("There are backup set(s) at time(s):\n%s\nWhich can't be "
440
 
                 "deleted because newer sets depend on them." %
441
 
                 set_times_str(req_list))
 
447
        log.Warn("%s\n%s\n%s" %
 
448
                 (_("There are backup set(s) at time(s):"),
 
449
                  set_times_str(req_list),
 
450
                  _("Which can't be deleted because newer sets depend on them.")))
442
451
 
443
452
    if (col_stats.matched_chain_pair and
444
453
        col_stats.matched_chain_pair[1].end_time < globals.remove_time):
445
 
        log.Warn("Current active backup chain is older than specified time.\n"
446
 
             "However, it will not be deleted.  To remove all your backups,\n"
447
 
                 "manually purge the repository.")
 
454
        log.Warn(_("Current active backup chain is older than specified time.  "
 
455
                   "However, it will not be deleted.  To remove all your backups, "
 
456
                   "manually purge the repository."))
448
457
 
449
458
    setlist = col_stats.get_older_than(globals.remove_time)
450
459
    if not setlist:
451
 
        log.Warn("No old backup sets found, nothing deleted.")
 
460
        log.Warn(_("No old backup sets found, nothing deleted."))
452
461
        return
453
462
    if globals.force:
454
 
        if len(setlist) > 1:
455
 
            log.Log("Deleting backup sets at times:\n" +
456
 
                    set_times_str(setlist), 3)
457
 
        else:
458
 
            log.Log("Deleting backup set at times:\n" +
459
 
                    set_times_str(setlist), 3)
 
463
        log.Log(gettext.ngettext("Deleting backup set at time:",
 
464
                                 "Deleting backup sets at times:",
 
465
                                 len(setlist)) +
 
466
                "\n" + set_times_str(setlist), 3)
460
467
        if globals.dry_run:
461
468
            col_stats.set_values(sig_chain_warning = None)
462
469
        else:
465
472
                set.delete()
466
473
            col_stats.set_values(sig_chain_warning = None).cleanup_signatures()
467
474
    else:
468
 
        if len(setlist) > 1:
469
 
            log.Warn("Found old backup sets at the following times:")
470
 
        else:
471
 
            log.Warn("Found old backup set at the following time:")
472
 
        log.Warn(set_times_str(setlist) + 
473
 
                 "\nRerun command with --force option to actually delete.")
 
475
        log.Warn(gettext.ngettext("Found old backup set at the following time:",
 
476
                                  "Found old backup sets at the following times:",
 
477
                                  len(setlist)) +
 
478
                 "\n" + set_times_str(setlist) + "\n" +
 
479
                 _("Rerun command with --force option to actually delete."), 3)
474
480
 
475
481
 
476
482
def check_last_manifest(col_stats):
492
498
 
493
499
    last_full_time = col_stats.get_last_full_backup_time()
494
500
    if last_full_time > 0:
495
 
        log.Log("Last full backup date: " + dup_time.timetopretty(last_full_time), 4)
 
501
        log.Log(_("Last full backup date:") + " " + dup_time.timetopretty(last_full_time), 4)
496
502
    else:
497
 
        log.Log("Last full backup date: none", 4)
 
503
        log.Log(_("Last full backup date: none"), 4)
498
504
    if action == "inc" and last_full_time < globals.full_force_time:
499
 
        log.Log("Last full backup is too old, forcing full backup", 3)
 
505
        log.Log(_("Last full backup is too old, forcing full backup"), 3)
500
506
        action = "full"
501
507
 
502
508
    os.umask(077)
568
574
    except duplicity.errors.UserError, e:
569
575
        # For user errors, don't show an ugly stack trace by
570
576
        # default. But do with sufficient verbosity.
571
 
        log.Info("User error detail: %s"
572
 
             "" % (''.join(traceback.format_exception(*sys.exc_info()))))
 
577
        log.Info(_("User error detail: %s")
 
578
                 % (''.join(traceback.format_exception(*sys.exc_info()))))
573
579
        log.FatalError("%s: %s" % (e.__class__.__name__,
574
580
                       str(e)), log.ErrorCode.user_error,
575
581
                       e.__class__.__name__)