diff --git a/rdiff-backup-1.2.8/CHANGELOG b/rdiff-backup-1.2.8/CHANGELOG
index 3408ab4..9e5829a 100644
--- a/rdiff-backup-1.2.8/CHANGELOG
+++ b/rdiff-backup-1.2.8/CHANGELOG
@@ -1,3 +1,12 @@
+New since v1.2.8 (unreleased, local modifications)
+---------------------------
+
+Added --verify-full, which verifies the integrity of an entire repository
+including all increments and metadata. This should be much faster than
+--verify-at-time=<date in the past>, and is more comprehensive in terms of
+what is verified.
+
+
 New in v1.2.8 (2009/03/16)
 ---------------------------
 
diff --git a/rdiff-backup-1.2.8/misc/integrify.py b/rdiff-backup-1.2.8/misc/integrify.py
new file mode 100644
index 0000000..dee519a
--- /dev/null
+++ b/rdiff-backup-1.2.8/misc/integrify.py
@@ -0,0 +1,186 @@
+#! /usr/bin/env python
+"""rdiff-backup integrity data builder
+
+run this script on your repository to generate integrity data for
+rdiff-backup --verify-full
+
+WARNING this will not detect if your repository has already been corrupted.
+It will create integrity signatures with the files as they exist when you
+run the script. Any corruption that happened before that point can only
+be detected with --verify-at-time. However, after you run this script you
+can use --verify-full to verify that nothing has changed since you ran this
+script.
+
+Usage:
+./integrify.py [options] /path/to/your/repository
+
+Options:
+    -v : verbose output
+    -vv : very verbose output
+    --use-compatible-timestamps : see rdiff-backup manual
+    --null-separator : see rdiff-backup manual
+    --no-compression : see rdiff-backup manual
+    --force : ignore existing integrity data files
+
+"""
+import glob
+import hashlib
+import logging
+import os
+import shutil
+import sys
+import time
+from datetime import datetime
+from gzip import GzipFile
+from os.path import join, exists, dirname
+from subprocess import call
+
+log = logging.getLogger()
+zero_sig = hashlib.sha1("").hexdigest()
+use_compatible_timestamps = False
+
+def main():
+    if "-vv" in sys.argv:
+        level = logging.DEBUG
+    elif "-v" in sys.argv:
+        level = logging.INFO
+    else:
+        level = logging.WARN
+    if "--use-compatible-timestamps" in sys.argv:
+        global use_compatible_timestamps # HACK!!
+        use_compatible_timestamps = True
+    logging.basicConfig(
+        format="%(asctime)s %(levelname)s %(message)s",
+        level=level,
+    )
+    repo = os.path.abspath(sys.argv[-1])
+    assert os.path.exists(repo), "not found: %s" % repo
+    rbdir = os.path.join(repo, "rdiff-backup-data")
+    assert os.path.exists(rbdir), "not found: %s" % rbdir
+    if "--force" not in sys.argv:
+        for item in glob.iglob(os.path.join(rbdir, "integrity.*")):
+            print "integrity data exists (use --force to generate anyway)"
+            print item
+            sys.exit()
+    data = IntegrityData(rbdir,
+        "--no-compression" not in sys.argv,
+        "--null-separator" in sys.argv and "\0" or "\n",
+    )
+    data.procdir(rbdir)
+    data.close()
+
+
+def timetostring(timeinseconds):
+	"""Return w3 datetime compliant listing of timeinseconds, or one in
+	which :'s have been replaced with -'s
+
+    Shamelessly ripped from rdiff-backup Time module.
+	"""
+	if not use_compatible_timestamps:
+		format_string = "%Y-%m-%dT%H:%M:%S"
+	else:
+		format_string = "%Y-%m-%dT%H-%M-%S"
+	s = time.strftime(format_string, time.localtime(timeinseconds))
+	return s + gettzd(timeinseconds)
+
+def gettzd(timeinseconds = None):
+	"""Return w3's timezone identification string.
+
+	Expresed as [+/-]hh:mm.  For instance, PDT is -07:00 during
+	dayling savings and -08:00 otherwise.  Zone coincides with what
+	localtime(), etc., use.  If no argument given, use the current
+	time.
+
+    Shamelessly ripped from rdiff-backup Time module.
+
+	"""
+	if timeinseconds is None: timeinseconds = time.time()
+	dst_in_effect = time.daylight and time.localtime(timeinseconds)[8]
+	if dst_in_effect: offset = -time.altzone/60
+	else: offset = -time.timezone/60
+	if offset > 0: prefix = "+"
+	elif offset < 0: prefix = "-"
+	else: return "Z" # time is already in UTC
+
+	if use_compatible_timestamps: time_separator = '-'
+	else: time_separator = ':'
+	hours, minutes = map(abs, divmod(offset, 60))
+	assert 0 <= hours <= 23
+	assert 0 <= minutes <= 59
+	return "%s%02d%s%02d" % (prefix, hours, time_separator, minutes)
+
+
+class IntegrityData(object):
+
+    def __init__(self, rbdir, compress, linesep):
+        timestr = timetostring(time.time())
+        gz = ".gz" if compress else ""
+        self.filename = "integrity.%s.data%s" % (timestr, gz)
+        self.linesep = linesep
+        path = os.path.join(rbdir, self.filename)
+        self.rbdir = rbdir
+        self.file = open(path, "wb")
+        if compress:
+            self.fileobj = GzipFile(path, "wb", fileobj=self.file)
+        else:
+            self.fileobj = self.file
+        self.buffer = [
+            "# Format of each line in integrity data file:",
+            "# Filename SHA1Digest",
+        ]
+
+    def procdir(self, basepath, rel=(), bufsize=512):
+        for name in os.listdir(basepath):
+            path = os.path.join(basepath, name)
+            index = rel + (name,)
+            relpath = "/".join(index)
+            if name == self.filename:
+                continue
+            if relpath == "backup.log":
+                log.debug("skipping: %s", relpath)
+                continue
+            log.info("processing: %s", relpath)
+            if os.path.isdir(path):
+                self.update(relpath, "<dir>")
+                self.procdir(path, index)
+                continue
+            try:
+                fp = open(path, "rb")
+                try:
+                    read = False
+                    sig = hashlib.sha1()
+                    data = fp.read(bufsize)
+                    if data:
+                        while data:
+                            sig.update(data)
+                            data = fp.read(bufsize)
+                        sig = sig.hexdigest()
+                    else:
+                        sig = "0"
+                finally:
+                    fp.close()
+                self.update(relpath, sig)
+            except Exception, ex:
+                log.error("error reading %s", relpath, ex, exc_info=True)
+                self.update(relpath, "<unknown signature>")
+
+    def update(self, relpath, sig):
+        self.buffer.append(" ".join([relpath, sig]))
+        if len(self.buffer) >= 100:
+            self.write_buffer()
+
+    def write_buffer(self):
+        log.debug("writing buffer...")
+        self.fileobj.write(self.linesep.join(self.buffer + [""]))
+        self.buffer = []
+
+    def close(self):
+        if self.buffer:
+            self.write_buffer()
+        self.fileobj.close()
+        self.file.close()
+        
+
+if __name__ == "__main__":
+    main()
+
diff --git a/rdiff-backup-1.2.8/rdiff_backup/FilenameMapping.py b/rdiff-backup-1.2.8/rdiff_backup/FilenameMapping.py
index 8bb3951..f86cd8f 100644
--- a/rdiff-backup-1.2.8/rdiff_backup/FilenameMapping.py
+++ b/rdiff-backup-1.2.8/rdiff_backup/FilenameMapping.py
@@ -132,12 +132,14 @@ class QuotedRPath(rpath.RPath):
 	the index is quoted, not the base.
 
 	"""
-	def __init__(self, connection, base, index = (), data = None):
+	def __init__(self, connection, base, index = (), data = None,
+			calc_sha1_on_write=0):
 		"""Make new QuotedRPath"""
 		self.quoted_index = tuple(map(quote, index))
 		self.conn = connection
 		self.index = index
 		self.base = base
+		self.calc_sha1_on_write = calc_sha1_on_write
 		if base is not None:
 			if base == "/": self.path = "/" + "/".join(self.quoted_index)
 			else: self.path = "/".join((base,) + self.quoted_index)
diff --git a/rdiff-backup-1.2.8/rdiff_backup/Main.py b/rdiff-backup-1.2.8/rdiff_backup/Main.py
index bba2afa..04d4bbe 100644
--- a/rdiff-backup-1.2.8/rdiff_backup/Main.py
+++ b/rdiff-backup-1.2.8/rdiff_backup/Main.py
@@ -86,7 +86,7 @@ def parse_cmdlineoptions(arglist):
 		  "restrict-read-only=", "restrict-update-only=", "server",
 		  "ssh-no-compression", "tempdir=", "terminal-verbosity=",
 		  "test-server", "user-mapping-file=", "verbosity=", "verify",
-		  "verify-at-time=", "version"])
+		  "verify-at-time=", "verify-full", "verify-full-since=", "version"])
 	except getopt.error, e:
 		commandline_error("Bad commandline options: " + str(e))
 
@@ -203,6 +203,10 @@ def parse_cmdlineoptions(arglist):
 		elif opt == "-v" or opt == "--verbosity": Log.setverbosity(arg)
 		elif opt == "--verify": action, restore_timestr = "verify", "now"
 		elif opt == "--verify-at-time": action, restore_timestr = "verify", arg
+		elif opt == "--verify-full":
+			action, restore_timestr = "verify-full", None
+		elif opt == "--verify-full-since":
+			action, restore_timestr = "verify-full", arg
 		elif opt == "-V" or opt == "--version":
 			print "rdiff-backup " + Globals.version
 			sys.exit(0)
@@ -216,7 +220,7 @@ def check_action():
 					   1: ['list-increments', 'list-increment-sizes',
 						   'remove-older-than', 'list-at-time',
 						   'list-changed-since', 'check-destination-dir',
-						   'verify'],
+						   'verify', 'verify-full'],
 					   2: ['backup', 'restore', 'restore-as-of',
 						   'compare', 'compare-hash', 'compare-full']}
 	l = len(args)
@@ -290,12 +294,12 @@ def take_action(rps):
 	elif action == "restore-as-of": Restore(rps[0], rps[1], 1)
 	elif action == "test-server": SetConnections.TestConnections()
 	elif action == "verify": Verify(rps[0])
+	elif action == "verify-full": Verify(rps[0], full=1)
 	else: raise AssertionError("Unknown action " + action)
 
 def cleanup():
 	"""Do any last minute cleaning before exiting"""
 	Log("Cleaning up", 6)
-	if ErrorLog.isopen(): ErrorLog.close()
 	Log.close_logfile()
 	if not Globals.server: SetConnections.CloseConnections()
 
@@ -331,6 +335,7 @@ def Backup(rpin, rpout):
 	SetConnections.BackupInitConnections(rpin.conn, rpout.conn)
 	backup_check_dirs(rpin, rpout)
 	backup_set_rbdir(rpin, rpout)
+	rpout.conn.integrity.IntegrityData.init()
 	rpout.conn.fs_abilities.backup_set_globals(rpin, force)
 	if Globals.chars_to_quote: rpout = backup_quoted_rpaths(rpout)
 	init_user_group_mapping(rpout.conn)
@@ -343,8 +348,10 @@ def Backup(rpin, rpout):
 		backup.Mirror_and_increment(rpin, rpout, incdir)
 		rpout.conn.Main.backup_remove_curmirror_local()
 	else:
-		backup.Mirror(rpin, rpout)
+		backup.Mirror(rpin, rpout, incdir)
 		rpout.conn.Main.backup_touch_curmirror_local(rpin, rpout)
+	if ErrorLog.isopen(): ErrorLog.close()
+	rpout.conn.integrity.IntegrityData.close()
 
 def backup_quoted_rpaths(rpout):
 	"""Get QuotedRPath versions of important RPaths.  Return rpout"""
@@ -846,17 +853,26 @@ def Compare(compare_type, src_rp, dest_rp, compare_time = None):
 		compare_func = compare.Compare_full
 	return_val = compare_func(src_rp, mirror_rp, inc_rp, compare_time)
 
-def Verify(dest_rp, verify_time = None):
-	"""Check the hashes of the regular files against mirror_metadata"""
+def Verify(dest_rp, verify_time = None, full = 0):
+	"""Check the hashes of the regular files against mirror_metadata
+
+	if full, verify rdiff-backup-data files as well
+
+	"""
 	global return_val
 	dest_rp = require_root_set(dest_rp, 1)
-	if not verify_time:
+	if not verify_time and not (full and restore_timestr is None):
 		try: verify_time = Time.genstrtotime(restore_timestr)
 		except Time.TimeException, exc: Log.FatalError(str(exc))
 
 	mirror_rp = restore_root.new_index(restore_index)
 	inc_rp = Globals.rbdir.append_path("increments", restore_index)
-	return_val = dest_rp.conn.compare.Verify(mirror_rp, inc_rp, verify_time)
+	if full:
+		return_val = dest_rp.conn.compare.VerifyFull(
+			mirror_rp, inc_rp, verify_time)
+	else:
+		return_val = dest_rp.conn.compare.Verify(
+			mirror_rp, inc_rp, verify_time)
 
 
 def CheckDest(dest_rp):
@@ -873,9 +889,11 @@ def CheckDest(dest_rp):
 
 def checkdest_need_check(dest_rp):
 	"""Return None if no dest dir found, 1 if dest dir needs check, 0 o/w"""
+	import integrity
 	if not dest_rp.isdir() or not Globals.rbdir.isdir(): return None
 	for filename in Globals.rbdir.listdir():
-		if filename not in ['chars_to_quote', 'backup.log']: break
+		if not (filename in ['chars_to_quote', 'backup.log']
+			or filename.startswith(integrity.filename_prefix)): break
 	else: # This may happen the first backup just after we test for quoting
 		return None
 	curmirroot = Globals.rbdir.append("current_mirror")
diff --git a/rdiff-backup-1.2.8/rdiff_backup/Security.py b/rdiff-backup-1.2.8/rdiff_backup/Security.py
index a259221..53b21a8 100644
--- a/rdiff-backup-1.2.8/rdiff_backup/Security.py
+++ b/rdiff-backup-1.2.8/rdiff_backup/Security.py
@@ -116,7 +116,7 @@ def set_security_level(action, cmdpairs):
 	elif action in ["test-server", "list-increments", 'list-increment-sizes',
 					"list-at-time", "list-changed-since",
 					"calculate-average", "remove-older-than", "compare",
-					"compare-hash", "compare-full", "verify"]:
+					"compare-hash", "compare-full", "verify", "verify-full"]:
 		sec_level = "minimal"
 		rdir = tempfile.gettempdir()
 	else: assert 0, "Unknown action %s" % action
diff --git a/rdiff-backup-1.2.8/rdiff_backup/backup.py b/rdiff-backup-1.2.8/rdiff_backup/backup.py
index 5dbd0a7..6e1f0d2 100644
--- a/rdiff-backup-1.2.8/rdiff_backup/backup.py
+++ b/rdiff-backup-1.2.8/rdiff_backup/backup.py
@@ -17,7 +17,11 @@
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
 # USA
 
-"""High level functions for mirroring and mirror+incrementing"""
+"""High level functions for mirroring and mirror+incrementing
+
+TODO refactor Mirror, Mirror_and_increment, DestinationStruct.patch and
+DestinationStruct.patch_and_increment (there's a LOT of duplication there)
+"""
 
 from __future__ import generators
 import errno
@@ -25,7 +29,7 @@ import Globals, metadata, rorpiter, TempFile, Hardlink, robust, increment, \
 	   rpath, static, log, selection, Time, Rdiff, statistics, iterfile, \
 	   hash, longname
 
-def Mirror(src_rpath, dest_rpath):
+def Mirror(src_rpath, dest_rpath, inc_rpath):
 	"""Turn dest_rpath into a copy of src_rpath"""
 	log.Log("Starting mirror %s to %s" % (src_rpath.path, dest_rpath.path), 4)
 	SourceS = src_rpath.conn.backup.SourceStruct
@@ -35,7 +39,7 @@ def Mirror(src_rpath, dest_rpath):
 	DestS.set_rorp_cache(dest_rpath, source_rpiter, 0)
 	dest_sigiter = DestS.get_sigs(dest_rpath)
 	source_diffiter = SourceS.get_diffs(dest_sigiter)
-	DestS.patch(dest_rpath, source_diffiter)
+	DestS.patch(dest_rpath, source_diffiter, inc_rpath)
 
 def Mirror_and_increment(src_rpath, dest_rpath, inc_rpath):
 	"""Mirror + put increments in tree based at inc_rpath"""
@@ -224,9 +228,10 @@ class DestinationStruct:
 			else:
 				raise
 
-	def patch(cls, dest_rpath, source_diffiter, start_index = ()):
+	def patch(cls, dest_rpath, source_diffiter, inc_rpath, start_index = ()):
 		"""Patch dest_rpath with an rorpiter of diffs"""
-		ITR = rorpiter.IterTreeReducer(PatchITRB, [dest_rpath, cls.CCPP])
+		ITR = rorpiter.IterTreeReducer(PatchITRB,
+									   [dest_rpath, inc_rpath, cls.CCPP])
 		for diff in rorpiter.FillInIter(source_diffiter, dest_rpath):
 			log.Log("Processing changed file " + diff.get_indexpath(), 5)
 			ITR(diff.index, diff)
@@ -505,9 +510,10 @@ class PatchITRB(rorpiter.ITRBranch):
 	contents.
 
 	"""
-	def __init__(self, basis_root_rp, CCPP):
+	def __init__(self, basis_root_rp, inc_root_rp, CCPP):
 		"""Set basis_root_rp, the base of the tree to be incremented"""
 		self.basis_root_rp = basis_root_rp
+		self.inc_root_rp = inc_root_rp
 		assert basis_root_rp.conn is Globals.local_connection
 		self.statfileobj = (statistics.get_active_statfileobj() or
 							statistics.StatFileObj())
@@ -523,7 +529,7 @@ class PatchITRB(rorpiter.ITRBranch):
 	def fast_process(self, index, diff_rorp):
 		"""Patch base_rp with diff_rorp (case where neither is directory)"""
 		mirror_rp, discard = longname.get_mirror_inc_rps(
-			self.CCPP.get_rorps(index), self.basis_root_rp)
+			self.CCPP.get_rorps(index), self.basis_root_rp, self.inc_root_rp)
 		assert not mirror_rp.isdir(), mirror_rp
 		tf = TempFile.new(mirror_rp)
 		if self.patch_to_temp(mirror_rp, diff_rorp, tf):
@@ -622,7 +628,7 @@ class PatchITRB(rorpiter.ITRBranch):
 	def start_process(self, index, diff_rorp):
 		"""Start processing directory - record information for later"""
 		self.base_rp, discard = longname.get_mirror_inc_rps(
-			self.CCPP.get_rorps(index), self.basis_root_rp)
+			self.CCPP.get_rorps(index), self.basis_root_rp, self.inc_root_rp)
 		if diff_rorp.isdir(): self.prepare_dir(diff_rorp, self.base_rp)
 		elif self.set_dir_replacement(diff_rorp, self.base_rp):
 			if diff_rorp.lstat(): self.CCPP.flag_success(index)
@@ -680,10 +686,6 @@ class IncrementITRB(PatchITRB):
 	Like PatchITRB, but this time also write increments.
 
 	"""
-	def __init__(self, basis_root_rp, inc_root_rp, rorp_cache):
-		self.inc_root_rp = inc_root_rp
-		PatchITRB.__init__(self, basis_root_rp, rorp_cache)
-
 	def fast_process(self, index, diff_rorp):
 		"""Patch base_rp with diff_rorp and write increment (neither is dir)"""
 		mirror_rp, inc_prefix = longname.get_mirror_inc_rps(
diff --git a/rdiff-backup-1.2.8/rdiff_backup/compare.py b/rdiff-backup-1.2.8/rdiff_backup/compare.py
index c499e5c..0b872c9 100644
--- a/rdiff-backup-1.2.8/rdiff_backup/compare.py
+++ b/rdiff-backup-1.2.8/rdiff_backup/compare.py
@@ -71,12 +71,13 @@ def Compare_full(src_rp, mirror_rp, inc_rp, compare_time):
 	repo_side.close_rf_cache()
 	return return_val
 
-def Verify(mirror_rp, inc_rp, verify_time):
+def Verify(mirror_rp, inc_rp, verify_time, return_verified_count = 0):
 	"""Compute SHA1 sums of repository files and check against metadata"""
 	assert mirror_rp.conn is Globals.local_connection
 	repo_iter = RepoSide.init_and_get_iter(mirror_rp, inc_rp, verify_time)
 	base_index = RepoSide.mirror_base.index
 
+	verified = 0
 	bad_files = 0
 	for repo_rorp in repo_iter:
 		if not repo_rorp.isreg(): continue
@@ -85,6 +86,7 @@ def Verify(mirror_rp, inc_rp, verify_time):
 					"perhaps because this feature was added in v1.1.1"
 					% (repo_rorp.get_indexpath(),), 2)
 			continue
+		verified += 1
 		fp = RepoSide.rf_cache.get_fp(base_index + repo_rorp.index, repo_rorp)
 		computed_hash = hash.compute_sha1_fp(fp)
 		if computed_hash == repo_rorp.get_sha1():
@@ -97,9 +99,63 @@ def Verify(mirror_rp, inc_rp, verify_time):
 					(repo_rorp.get_indexpath(), computed_hash,
 					 repo_rorp.get_sha1()), 2)
 	RepoSide.close_rf_cache()
-	if not bad_files: log.Log("Every file verified successfully.", 3)
+	if return_verified_count: return verified, bad_files
+	elif not bad_files: log.Log("Every file verified successfully.", 3)
 	return bad_files
 
+def VerifyFull(mirror_rp, inc_rp, verify_time):
+	"""Verify the integrity of all files in the repository
+
+	Verifies all files in the current mirror plus all differential data and
+	metadata written since verify_time. The verify is done by computing the
+	SHA1 sum of each file and comparing that to the corresponding sum stored
+	in the repository.
+
+	This should be faster and more comprehensive than Verify(...) with the
+	same arguments.
+
+	"""
+	import Time, integrity, os
+	assert inc_rp.conn is Globals.local_connection
+	def warn(message, *args):
+		log.Log("Warning: " + (message % args), 2)
+	now = Time.genstrtotime("now")
+	verified, bad = Verify(mirror_rp, inc_rp, now, return_verified_count=1)
+	integrity_iter = integrity.IterIncrementFiles(inc_rp, verify_time)
+	for path, indexpath, sig in integrity_iter:
+		log.Log("Verifying: %s" % indexpath, 5)
+		verified += 1
+		if sig.startswith("<"):
+			if sig == "<dir>":
+				if not os.path.isdir(path):
+					warn("%s should be a directory", indexpath)
+					bad += 1
+			else:
+				warn("%s not verified: unhandled file type %s", indexpath, sig)
+				bad += 1
+		elif sig == "0":
+			if os.stat(path)[6] != 0:
+				warn("%s should be a zero-length file" % indexpath)
+				bad += 1
+		else:
+			try:
+				computed_hash = hash.compute_sha1_fp(open(path, "rb"))
+			except IOError, err:
+				if err.errno == 2:
+					warn("file not found: %s", indexpath)
+				else:
+					warn("cannot verify %s: %s", indexpath, err)
+				bad += 1
+			else:
+				if computed_hash != sig:
+					warn("Computed SHA1 digest of %s\n   %s\n"
+						"doesn't match recorded digest of\n   %s" %
+						(indexpath, computed_hash, sig))
+					bad += 1
+	if bad: log.Log("Your backup repository may be corrupted!", 2)
+	else: log.Log("Successfully verified %s files" % verified, 3)
+	return bad
+
 def print_reports(report_iter):
 	"""Given an iter of CompareReport objects, print them to screen"""
 	assert not Globals.server
diff --git a/rdiff-backup-1.2.8/rdiff_backup/connection.py b/rdiff-backup-1.2.8/rdiff_backup/connection.py
index 7799585..69e2ccd 100644
--- a/rdiff-backup-1.2.8/rdiff_backup/connection.py
+++ b/rdiff-backup-1.2.8/rdiff_backup/connection.py
@@ -540,7 +540,7 @@ import Globals, Time, Rdiff, Hardlink, FilenameMapping, C, Security, \
 	   Main, rorpiter, selection, increment, statistics, manage, lazy, \
 	   iterfile, rpath, robust, restore, manage, backup, connection, \
 	   TempFile, SetConnections, librsync, log, regress, fs_abilities, \
-	   eas_acls, user_group, compare
+	   eas_acls, user_group, compare, integrity
 
 try: import win_acls
 except ImportError: pass
diff --git a/rdiff-backup-1.2.8/rdiff_backup/fs_abilities.py b/rdiff-backup-1.2.8/rdiff_backup/fs_abilities.py
index 1e22318..6b82028 100644
--- a/rdiff-backup-1.2.8/rdiff_backup/fs_abilities.py
+++ b/rdiff-backup-1.2.8/rdiff_backup/fs_abilities.py
@@ -149,6 +149,7 @@ class FSAbilities:
 		self.root_rp = rbdir
 		self.read_only = 0
 		subdir = TempFile.new_in_dir(rbdir)
+		subdir.calc_sha1_on_write = 0 # don't track this dir for verify-full
 		subdir.mkdir()
 
 		self.set_extended_filenames(subdir)
diff --git a/rdiff-backup-1.2.8/rdiff_backup/increment.py b/rdiff-backup-1.2.8/rdiff_backup/increment.py
index afab0d2..1b7f2d7 100644
--- a/rdiff-backup-1.2.8/rdiff_backup/increment.py
+++ b/rdiff-backup-1.2.8/rdiff_backup/increment.py
@@ -115,11 +115,10 @@ def get_inc(rp, typestr, time = None):
 	if time is None: time = Time.prevtime
 	addtostr = lambda s: "%s.%s.%s" % (s, Time.timetostring(time), typestr)
 	if rp.index:
-		incrp = rp.__class__(rp.conn, rp.base, rp.index[:-1] +
-							 (addtostr(rp.index[-1]),))
+		incrp = rp.newpath(rp.base, rp.index[:-1] + (addtostr(rp.index[-1]),))
 	else:
 		dirname, basename = rp.dirsplit()
-		incrp = rp.__class__(rp.conn, dirname, (addtostr(basename),))
+		incrp = rp.newpath(dirname, (addtostr(basename),))
 	assert not incrp.lstat(), incrp
 	return incrp
 
diff --git a/rdiff-backup-1.2.8/rdiff_backup/integrity.py b/rdiff-backup-1.2.8/rdiff_backup/integrity.py
new file mode 100755
index 0000000..11ae035
--- /dev/null
+++ b/rdiff-backup-1.2.8/rdiff_backup/integrity.py
@@ -0,0 +1,238 @@
+# Copyright 2010 Daniel Miller
+#
+# This file is part of rdiff-backup.
+#
+# rdiff-backup is free software; you can redistribute it and/or modify
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# rdiff-backup is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with rdiff-backup; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
+# USA
+
+"""Provides functions and classes for full-repository integrity tracking"""
+
+import Globals, log, Main, metadata, Time, sha, increment, rpath
+
+
+class BufferedDataWriter:
+	"""Buffered data file writer abstract base class
+
+	Subclasses must implement docstring_lines() and may write(line) to the
+	file as as needed.
+
+	"""
+	_fileobj, _rp = None, None
+	_line_sep = None
+	def init(self, name):
+		"""Open file stats object and prepare to write"""
+		assert not (self._fileobj or self._rp), (self._fileobj, self._rp)
+		rpbase = Globals.rbdir.append(name)
+		suffix = Globals.compression and 'data.gz' or 'data'
+		self._rp = increment.get_inc(rpbase, suffix, Time.curtime)
+		assert not self._rp.lstat()
+		self._fileobj = self._rp.open("wb", compress = Globals.compression)
+
+		self._line_sep = Globals.null_separator and '\0' or '\n'
+		self.line_buffer = list(self.docstring_lines())
+		#self.write_buffer()
+
+	def docstring_lines(self):
+		"""Documentation lines for the data file
+
+		Must return a list or yield lines to be written to the file.
+		The lines will be written once at the beginning of the file.
+
+		"""
+		raise NotImplementedError("abstract method")
+
+	def write(self, line):
+		"""Write one line to the file"""
+		self.line_buffer.append(line)
+		if len(self.line_buffer) >= 100: self.write_buffer()
+
+	def write_buffer(self):
+		"""Write buffer to file because buffer is full
+
+		The buffer part is necessary because the GzipFile.write()
+		method seems fairly slow.
+
+		"""
+		assert self.line_buffer and self._fileobj
+		self._fileobj.write(self._line_sep.join(self.line_buffer + [""]))
+		self.line_buffer = []
+
+	def close(self):
+		"""Close file stats file"""
+		assert self._fileobj, self._fileobj
+		if self.line_buffer: self.write_buffer()
+		assert not self._fileobj.close()
+		self._fileobj = self._rp = None
+
+
+class _IntegrityData(BufferedDataWriter):
+	"""Record and write out integrity hashes"""
+	basename = "integrity"
+	def init(self):
+		"""Open integrity data object and prepare to write"""
+		assert Globals.rbdir.conn is Globals.local_connection
+		Globals.rbdir.calc_sha1_on_write = 1
+		Main.incdir.calc_sha1_on_write = 1
+		BufferedDataWriter.init(self, self.basename)
+		return self
+
+	def docstring_lines(self):
+		"""Documentation string for the file"""
+		yield "# Format of each line in integrity data file:"
+		yield "# Filename SHA1Digest"
+
+	def update(self, rp, sig):
+		filename = metadata.quote_path(rp.get_indexpath())
+		if hasattr(rp,"base") and \
+			rp.base.endswith("rdiff-backup-data/increments"):
+			filename = "increments/" + filename
+		if filename.endswith("/."):
+			filename = filename[:-2]
+		if filename.endswith("/"):
+			filename = filename.rstrip("/")
+		self.write(" ".join([filename, sig]))
+
+IntegrityData = _IntegrityData()
+filename_prefix = _IntegrityData.basename + "."
+
+
+class FileWrapper:
+	"""Wrapper around a file-like object
+
+	Only use this with files that will be written in a single
+	pass and then closed.  (There is no seek().)  When you close it,
+	the integrity signature will be written to the RPath.
+
+	"""
+	def __init__(self, fileobj, rp):
+		self.rp = rp
+		self.fileobj = fileobj
+		self.sig = sha.new()
+		self.written = 0
+		self.closed = 0
+
+	def write(self, data):
+		assert not self.closed
+		if data != "":
+			self.written = 1
+			self.sig.update(data)
+			self.fileobj.write(data)
+
+	def close(self):
+		if self.written: sig = self.sig.hexdigest()
+		else: sig = "0"
+		IntegrityData.update(self.rp, sig)
+		return self.fileobj.close()
+
+
+def makeFileWrapper(rp):
+	def IntegrityFileWrapper(fileobj):
+		return FileWrapper(fileobj, rp)
+	return IntegrityFileWrapper
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+def IterIncrementFiles(inc_rp, verify_time):
+	"""Iterate (path, indexpath, signature) tuples from integrity files
+
+	Files are generated starting with the most recent increments in the
+	repository.
+
+	verify_time identifies the oldest backup increment for which integrity
+	information will be generated. If None, all integrity information in the
+	repository will be generated.
+
+	Generated tuples consist of three strings:
+	path is the absolute path of the file on the local filesystem.
+	indexpath is relative to the repository root.
+	signature is the SHA1 digest of the the file. This may also be one of
+		several special values: 0=zero length file, <dir>=directory, etc.
+		See rpath.RPath filesystem mutating functions for more file types.
+
+	"""
+	import os, restore
+	assert inc_rp.conn is Globals.local_connection
+	sep = Globals.null_separator and "\0" or "\n"
+	rbdir = Globals.rbdir
+	root = rbdir.path
+	indexpath_prefix = "rdiff-backup-data/"
+	incs = restore.get_inclist(rbdir.append(IntegrityData.basename))
+	incs.sort(key=lambda rp:-rp.getinctime())
+	first = 1
+	for rp in incs:
+		if verify_time is not None and rp.getinctime() < verify_time:
+			break
+		fileobj = rp.open("rb", rp.path.endswith(".gz"))
+		for line in iterlines(fileobj, sep):
+			if not line.strip() or line.strip().startswith("#"):
+				if "\0" in line[:-1]:
+					log.Log("Warning: %s appears to have null delimiters "
+						"(hint: use --null-separator)" % rp.path, 2)
+				continue
+			filepath, sig = line.rstrip(sep).rsplit(" ", 1)
+			path = os.path.join(root, filepath)
+			if not first:
+				if filepath.startswith("mirror_metadata.") \
+					and ".snapshot" in filepath \
+					and not os.path.exists(path) and \
+					os.path.exists(path.replace(".snapshot", ".diff")):
+					# this is an expected mirror_metadata name change
+					# the .diff file was already verified
+					continue
+				elif filepath.startswith("current_mirror.") \
+					and not os.path.exists(path):
+					continue
+			yield path, indexpath_prefix + filepath, sig
+		fileobj.close()
+		first = 0
+
+def iterlines(fileobj, linesep, chunksize=512):
+	"""Iterate 'lines' in the given file
+
+	fileobj - a file-like object (must have a 'read(size)' method)
+	linesep - line separator character (must be a single, 1-byte character)
+	chunksize - the number of bytes to get on each fileobj.read() call
+
+	This function returns an iterator that yields lines from the given file.
+	The line ending character (linesep) is not removed from the end of each
+	line before it is yielded.
+
+	"""
+	assert len(linesep) == 1, "unsupported line separator: %r" % linesep
+	if linesep == "\n":
+		return fileobj
+	def iterlines(fileobj, linesep, chunksize):
+		leftover = []
+		while True:
+			chunk = fileobj.read(chunksize)
+			if not chunk:
+				break
+			while True:
+				i = chunk.find(linesep) + 1
+				if i < 1:
+					if chunk:
+						leftover.append(chunk)
+					break
+				if leftover:
+					yield "".join(leftover) + chunk[:i]
+					leftover = []
+				else:
+					yield chunk[:i]
+				chunk = chunk[i:]
+		if leftover:
+			yield "".join(leftover)
+	return iterlines(fileobj, linesep, chunksize)
diff --git a/rdiff-backup-1.2.8/rdiff_backup/rpath.py b/rdiff-backup-1.2.8/rdiff_backup/rpath.py
index a22fb9f..c92a527 100644
--- a/rdiff-backup-1.2.8/rdiff_backup/rpath.py
+++ b/rdiff-backup-1.2.8/rdiff_backup/rpath.py
@@ -859,7 +859,8 @@ class RPath(RORPath):
 	"""
 	regex_chars_to_quote = re.compile("[\\\\\\\"\\$`]")
 
-	def __init__(self, connection, base, index = (), data = None):
+	def __init__(self, connection, base, index = (), data = None,
+			calc_sha1_on_write=0):
 		"""RPath constructor
 
 		connection = self.conn is the Connection the RPath will use to
@@ -876,6 +877,7 @@ class RPath(RORPath):
 		self.conn = connection
 		self.index = index
 		self.base = base
+		self.calc_sha1_on_write = calc_sha1_on_write
 		if base is not None:
 			if base == "/": self.path = "/" + "/".join(index)
 			else: self.path = "/".join((base,) + index)
@@ -982,11 +984,15 @@ class RPath(RORPath):
 		log.Log("Making directory " + self.path, 6)
 		self.conn.os.mkdir(self.path)
 		self.setdata()
+		if self.calc_sha1_on_write:
+			self.conn.integrity.IntegrityData.update(self, "<dir>")
 
 	def makedirs(self):
 		log.Log("Making directory path " + self.path, 6)
 		self.conn.os.makedirs(self.path)
 		self.setdata()
+		if self.calc_sha1_on_write:
+			self.conn.integrity.IntegrityData.update(self, "<dir>")
 
 	def rmdir(self):
 		log.Log("Removing directory " + self.path, 6)
@@ -1002,24 +1008,32 @@ class RPath(RORPath):
 		self.conn.os.symlink(linktext, self.path)
 		self.setdata()
 		assert self.issym()
+		if self.calc_sha1_on_write:
+			self.conn.integrity.IntegrityData.update(self, "<sym>")
 
 	def hardlink(self, linkpath):
 		"""Make self into a hardlink joined to linkpath"""
 		log.Log("Hard linking %s to %s" % (self.path, linkpath), 6)
 		self.conn.os.link(linkpath, self.path)
 		self.setdata()
+		if self.calc_sha1_on_write:
+			self.conn.integrity.IntegrityData.update(self, "<hardlink>")
 
 	def mkfifo(self):
 		"""Make a fifo at self.path"""
 		self.conn.os.mkfifo(self.path)
 		self.setdata()
 		assert self.isfifo()
+		if self.calc_sha1_on_write:
+			self.conn.integrity.IntegrityData.update(self, "<fifo>")
 
 	def mksock(self):
 		"""Make a socket at self.path"""
 		self.conn.rpath.make_socket_local(self)
 		self.setdata()
 		assert self.issock()
+		if self.calc_sha1_on_write:
+			self.conn.integrity.IntegrityData.update(self, "<sock>")
 
 	def touch(self):
 		"""Make sure file at self.path exists"""
@@ -1027,6 +1041,8 @@ class RPath(RORPath):
 		self.conn.open(self.path, "w").close()
 		self.setdata()
 		assert self.isreg(), self.path
+		if self.calc_sha1_on_write:
+			self.conn.integrity.IntegrityData.update(self, "0")
 
 	def hasfullperms(self):
 		"""Return true if current process has full permissions on the file"""
@@ -1135,30 +1151,38 @@ class RPath(RORPath):
 	def get_parent_rp(self):
 		"""Return new RPath of directory self is in"""
 		if self.index:
-			return self.__class__(self.conn, self.base, self.index[:-1])
+			return self.__class__(self.conn, self.base, self.index[:-1],
+				calc_sha1_on_write=self.calc_sha1_on_write)
 		dirname = self.dirsplit()[0]
-		if dirname: return self.__class__(self.conn, dirname)
-		else: return self.__class__(self.conn, "/")
+		if dirname: return self.__class__(self.conn, dirname,
+			calc_sha1_on_write=self.calc_sha1_on_write)
+		else: return self.__class__(self.conn, "/",
+			calc_sha1_on_write=self.calc_sha1_on_write)
 
 	def newpath(self, newpath, index = ()):
 		"""Return new RPath with the same connection but different path"""
-		return self.__class__(self.conn, newpath, index)
+		return self.__class__(self.conn, newpath, index,
+			calc_sha1_on_write=self.calc_sha1_on_write)
 
 	def append(self, ext):
 		"""Return new RPath with same connection by adjoing ext"""
-		return self.__class__(self.conn, self.base, self.index + (ext,))
+		return self.__class__(self.conn, self.base, self.index + (ext,),
+			calc_sha1_on_write=self.calc_sha1_on_write)
 
 	def append_path(self, ext, new_index = ()):
 		"""Like append, but add ext to path instead of to index"""
-		return self.__class__(self.conn, "/".join((self.base, ext)), new_index)
+		return self.__class__(self.conn, "/".join((self.base, ext)), new_index,
+			calc_sha1_on_write=self.calc_sha1_on_write)
 
 	def new_index(self, index):
 		"""Return similar RPath but with new index"""
-		return self.__class__(self.conn, self.base, index)
+		return self.__class__(self.conn, self.base, index,
+			calc_sha1_on_write=self.calc_sha1_on_write)
 
 	def new_index_empty(self, index):
 		"""Return similar RPath with given index, but initialize to empty"""
-		return self.__class__(self.conn, self.base, index, {'type': None})
+		return self.__class__(self.conn, self.base, index, {'type': None},
+			calc_sha1_on_write=self.calc_sha1_on_write)
 
 	def open(self, mode, compress = None):
 		"""Return open file.  Supports modes "w" and "r".
@@ -1170,8 +1194,15 @@ class RPath(RORPath):
 
 		"""
 		if self.conn is Globals.local_connection:
-			if compress: return GzipFile(self.path, mode)
-			else: return open(self.path, mode)
+			if self.calc_sha1_on_write and mode and mode in "wb":
+				assert Globals.isbackup_writer, self.path
+				# wrapper records signature on write and calls
+				# self.set_sha1 on close
+				import integrity
+				wrapper = integrity.makeFileWrapper(self)
+			else: wrapper = lambda f:f
+			if compress: return GzipFile(self.path, mode, wrapper)
+			else: return wrapper(open(self.path, mode))
 
 		if compress:
 			if mode == "r" or mode == "rb":
@@ -1429,6 +1460,21 @@ class GzipFile(gzip.GzipFile):
 	messages.  Use this class instead to clean those up.
 
 	"""
+	def __init__(self, filename=None, mode=None, wrapper=None):
+		fileobj = open(filename, mode or 'rb')
+		if wrapper is not None:
+			fileobj = wrapper(fileobj)
+		self._rdiff_fileobj = fileobj
+		gzip.GzipFile.__init__(self, filename, mode=mode, fileobj=fileobj)
+	def close(self):
+		"""Close the GzipFile in addition to wrapped fileobj
+
+		gzip.GzipFile.close() does not close the fileobj, but the wrapper
+		may need to be closed (e.g. to record the integrity signature)
+
+		"""
+		gzip.GzipFile.close(self)
+		self._rdiff_fileobj.close()
 	def __del__(self): pass
 	def __getattr__(self, name):
 		if name == 'fileno': return self.fileobj.fileno
diff --git a/rdiff-backup-1.2.8/rdiff_backup/statistics.py b/rdiff-backup-1.2.8/rdiff_backup/statistics.py
index b16fb12..5b8bf7f 100644
--- a/rdiff-backup-1.2.8/rdiff_backup/statistics.py
+++ b/rdiff-backup-1.2.8/rdiff_backup/statistics.py
@@ -353,64 +353,32 @@ def print_active_stats():
 	Globals.client_conn.sys.stdout.write(statmsg)
 
 
-class FileStats:
+from integrity import BufferedDataWriter
+
+class _FileStats(BufferedDataWriter):
 	"""Keep track of less detailed stats on file-by-file basis"""
-	_fileobj, _rp = None, None
-	_line_sep = None
-	def init(cls):
+	def init(self):
 		"""Open file stats object and prepare to write"""
-		assert not (cls._fileobj or cls._rp), (cls._fileobj, cls._rp)
-		rpbase = Globals.rbdir.append("file_statistics")
-		suffix = Globals.compression and 'data.gz' or 'data'
-		cls._rp = increment.get_inc(rpbase, suffix, Time.curtime)
-		assert not cls._rp.lstat()
-		cls._fileobj = cls._rp.open("wb", compress = Globals.compression)
-
-		cls._line_sep = Globals.null_separator and '\0' or '\n'
-		cls.write_docstring()
-		cls.line_buffer = []
-
-	def write_docstring(cls):
-		"""Write the first line (a documentation string) into file"""
-		cls._fileobj.write("# Format of each line in file statistics file:")
-		cls._fileobj.write(cls._line_sep)
-		cls._fileobj.write("# Filename Changed SourceSize MirrorSize "
-						   "IncrementSize" + cls._line_sep)
-
-	def update(cls, source_rorp, dest_rorp, changed, inc):
+		BufferedDataWriter.init(self, "file_statistics")
+
+	def docstring_lines(self):
+		"""Documentation string for the file"""
+		yield "# Format of each line in file statistics file:"
+		yield "# Filename Changed SourceSize MirrorSize IncrementSize"
+
+	def update(self, source_rorp, dest_rorp, changed, inc):
 		"""Update file stats with given information"""
 		if source_rorp: filename = source_rorp.get_indexpath()
 		else: filename = dest_rorp.get_indexpath()
 		filename = metadata.quote_path(filename)
 
-		size_list = map(cls.get_size, [source_rorp, dest_rorp, inc])
-		line = " ".join([filename, str(changed)] + size_list)
-		cls.line_buffer.append(line)
-		if len(cls.line_buffer) >= 100: cls.write_buffer()
+		size_list = map(self.get_size, [source_rorp, dest_rorp, inc])
+		self.write(" ".join([filename, str(changed)] + size_list))
 
-	def get_size(cls, rorp):
+	def get_size(self, rorp):
 		"""Return the size of rorp as string, or "NA" if not a regular file"""
 		if not rorp: return "NA"
 		if rorp.isreg(): return str(rorp.getsize())
 		else: return "0"
 
-	def write_buffer(cls):
-		"""Write buffer to file because buffer is full
-
-		The buffer part is necessary because the GzipFile.write()
-		method seems fairly slow.
-
-		"""
-		assert cls.line_buffer and cls._fileobj
-		cls.line_buffer.append('') # have join add _line_sep to end also
-		cls._fileobj.write(cls._line_sep.join(cls.line_buffer))
-		cls.line_buffer = []
-
-	def close(cls):
-		"""Close file stats file"""
-		assert cls._fileobj, cls._fileobj
-		if cls.line_buffer: cls.write_buffer()
-		assert not cls._fileobj.close()
-		cls._fileobj = cls._rp = None
-
-static.MakeClass(FileStats)
+FileStats = _FileStats()
diff --git a/rdiff-backup-1.2.8/testing/verifyfulltest.py b/rdiff-backup-1.2.8/testing/verifyfulltest.py
new file mode 100644
index 0000000..aa5d51c
--- /dev/null
+++ b/rdiff-backup-1.2.8/testing/verifyfulltest.py
@@ -0,0 +1,165 @@
+import glob
+import os
+import shutil
+import time
+import unittest
+from os.path import join, dirname
+from rdiff_backup import Globals, SetConnections, user_group
+from subprocess import Popen, PIPE
+
+def remove(dirname):
+	assert dirname.startswith(basedir), \
+		"remove outside of testfiles: " + dirname
+	if os.path.exists(dirname):
+		shutil.rmtree(dirname)
+
+def touch(path, filename=None, data="touched\n"):
+	assert path.startswith(basedir), "touching outside of testfiles: " + path
+	name = str(filename)
+	if not os.path.exists(path) or os.path.isfile(path):
+		assert filename is None, filename
+		with open(path, "a") as file:
+			file.write(data)
+	else:
+		assert os.path.isdir(path), path
+		assert name is not None
+		touch(join(path, name), data=data)
+
+def checkoutput(output, checks, name):
+	if isinstance(checks, basestring):
+		checks = [checks]
+	if not checks:
+		assert not output, "unexpected %s:\n%s" % (name, output)
+	else:
+		errors = []
+		for check in checks:
+			if check not in output:
+				errors.append("%s not found: %r" % (name, check))
+		if errors:
+			raise AssertionError(output + "\n\n" + "\n".join(errors))
+
+def docmd(*cmd, **kw):
+	output = kw.pop("output", [])
+	errors = kw.pop("errors", [])
+	assert not kw, "unexpected kwargs: %r" % (kw,)
+	def shorten(cmd):
+		for item in cmd:
+			yield item.rsplit("/", 1)[-1]
+	#print " ".join(shorten(cmd))
+	proc = Popen(cmd, stdout=PIPE, stderr=PIPE)
+	stdout, stderr = [], []
+	while proc.returncode is None:
+		out, err = proc.communicate()
+		if out: stdout.append(out)
+		if err: stderr.append(err)
+	checkoutput("".join(stdout), output, "output")
+	checkoutput("".join(stderr), errors, "error")
+
+def backup(*options):
+	docmd(RBBin, *(options + (src, bak)))
+
+def backup_twice():
+	backup()
+	touch(src_dir, "newfile.txt")
+	touch(src_dir_file)
+	touch(src_file)
+	time.sleep(1) # can't backup more than once per second
+	backup()
+
+testdir = dirname(os.path.abspath(__file__))
+RBBin = join(dirname(testdir), "rdiff-backup")
+basedir = join(testdir, "testfiles")
+src = join(basedir, "verify-full-test-src")
+src_dir = join(src, "dir")
+src_dir_file = join(src_dir, "file.txt")
+src_file = join(src, "file.txt")
+bak = join(basedir, "verify-full-test-bak")
+bak_dir_file = join(bak, "dir", "file.txt")
+datadir = join(bak, "rdiff-backup-data")
+
+class VerifyFullTest(unittest.TestCase):
+	"""Test verify-full"""
+	def setUp(self):
+		"""Start server"""
+		#Log.setverbosity(5)
+		assert os.path.exists(basedir), basedir
+		assert not os.path.exists(src), src
+		assert not os.path.exists(src), bak
+		os.mkdir(src)
+		os.mkdir(src_dir)
+		touch(src_dir_file)
+		touch(src_file)
+
+	def tearDown(self):
+		remove(src)
+		remove(bak)
+
+	def testVerifyFull1(self):
+		"""verify-full after initial backup"""
+		backup()
+		integs = glob.glob(join(datadir, "integrity.*"))
+		assert len(integs) == 1, integs
+		docmd(RBBin, "--verify-full", bak, output="Successfully verified")
+
+	def testVerifyFull3(self):
+		"""verify-full after multiple backups"""
+		backup_twice()
+		integs = glob.glob(join(datadir, "integrity.*"))
+		assert len(integs) == 2, integs
+		docmd(RBBin, "--verify-full", bak, output="Successfully verified")
+
+	def testVerifyFullNullSepFail(self):
+		"""verify-full after initial backup with --null-separator (fail)"""
+		backup("--null-separator")
+		docmd(RBBin, "--verify-full", bak, output="Successfully verified",
+			errors="appears to have null delimiters")
+
+	def testVerifyFullNullSepPass(self):
+		"""verify-full after initial backup with --null-separator (pass)"""
+		backup("--null-separator")
+		docmd(RBBin, "--null-separator", "--verify-full", bak,
+			output="Successfully verified")
+
+	def testVerifyFullWithCorruption(self):
+		"""verify-full on corrupted repository"""
+		backup_twice()
+		errors = []
+		ignores = ["backup.log", "chars_to_quote", "special_escapes"]
+		for dirpath, dirnames, filenames in os.walk(datadir):
+			for name in filenames:
+				if name in ignores or name.startswith("integrity.") \
+					or name.startswith("mirror_metadata."):
+					continue
+				path = join(dirpath, name)
+				short = path[len(bak) + 1:]
+				if os.stat(path)[6] == 0:
+					errors.append(short + " should be a zero-length file")
+				else:
+					errors.append("Computed SHA1 digest of " + short)
+				touch(path)
+		assert os.path.exists(bak_dir_file), bak_dir_file
+		touch(bak_dir_file)
+		errors.append("Computed SHA1 digest of dir/file.txt")
+		docmd(RBBin, "--verify-full", bak, errors=errors)
+
+	def testVerifyFullWithIntegrityDataCorruption1(self):
+		"""verify-full with corrupted integrity data (compressed)
+		"""
+		backup()
+		errors = ["Not a gzipped file"]
+		ignores = ["backup.log", "chars_to_quote", "special_escapes"]
+		for path in glob.glob(join(datadir, "integrity.*")):
+			touch(path)
+		docmd(RBBin, "--verify-full", bak, errors=errors)
+
+	def testVerifyFullWithIntegrityDataCorruption2(self):
+		"""verify-full with corrupted integrity data (no compresssion)
+		"""
+		backup("--no-compression")
+		errors = ["Warning: file not found: rdiff-backup-data/file"]
+		ignores = ["backup.log", "chars_to_quote", "special_escapes"]
+		for path in glob.glob(join(datadir, "integrity.*")):
+			touch(path, data="file corruption!")
+		docmd(RBBin, "--verify-full", bak, errors=errors)
+
+if __name__ == "__main__": unittest.main()
