From: Philipp Gesang Date: Fri, 11 Aug 2017 14:41:51 +0000 (+0200) Subject: use random data in multivol tests X-Git-Tag: v2.2~7^2~75 X-Git-Url: http://developer.intra2net.com/git/?a=commitdiff_plain;h=85e7013f04ed55a033530888e9c3947565b522a7;p=python-delta-tar use random data in multivol tests Brute force incompressibility to preven gzip from invalidating our multivolume tests. --- diff --git a/testing/__init__.py b/testing/__init__.py index 2fd7d52..ebaf6c8 100644 --- a/testing/__init__.py +++ b/testing/__init__.py @@ -22,6 +22,8 @@ from deltatar import crypto import sys +BLOCK_SIZE = 8096 + def new_volume_handler(tarobj, base_name, volume_number, encryption=None): ''' Handles the new volumes @@ -56,7 +58,7 @@ class BaseTest(unittest.TestCase): ''' os.system("rm -rf big big2 small small2 sample.* pdtcrypt-object-*.bin") - def create_file(self, path, length): + def create_file_low_entropy(self, path, length): ''' Creates a file with some gibberish inside, returning the md5sum of that file. File path and length are specified as function arguments. @@ -72,6 +74,35 @@ class BaseTest(unittest.TestCase): write_handle.write(data[:remainder]) return self.md5sum(path) + + def create_file_high_entropy(self, path, length): + """ + Create a file with quality random content to counteract compression. + """ + fd = os.open (path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC) + try: + p = 0 + while p < length: + todo = min (length - p, BLOCK_SIZE) + os.write (fd, os.urandom (todo)) + p += todo + finally: + os.close (fd) + assert p == length + return self.md5sum (path) + + + def create_file(self, path, length, random=False): + ''' + Creates a file with some gibberish inside, returning the md5sum of that + file. File path and length are specified as function arguments. + ''' + if random is True: + return self.create_file_high_entropy (path, length) + + return self.create_file_low_entropy (path, length) + + def create_symlink(self, linkname, path): os.symlink(linkname, path) return self.md5sum(path, linkname=linkname) diff --git a/testing/test_recover.py b/testing/test_recover.py index edee250..327d0c5 100644 --- a/testing/test_recover.py +++ b/testing/test_recover.py @@ -9,8 +9,10 @@ import deltatar.tarfile as tarfile from . import BaseTest TEST_PASSWORD = "test1234" -TEST_VOLSIZ = 3 # MB +TEST_VOLSIZ = 2 # MB TEST_FILESPERVOL = 3 +VOLUME_OVERHEAD = 1.4 # account for tar overhead when fitting files into + # volumes; this is black magic ############################################################################### ## helpers ## @@ -160,7 +162,7 @@ class RecoverTest (BaseTest): os.makedirs (self.src_path) for i in range (5): - f = "dummy_%rd" % i + f = "dummy_%d" % i self.hash [f] = self.create_file ("%s/%s" % (self.src_path, f), 5 + i) @@ -197,15 +199,19 @@ class RecoverTest (BaseTest): index_file = "%s.%s" % (index_file , deltatar.PDTCRYPT_EXTENSION) if self.VOLUMES > 1: - # add n files for one nth the volume size each - fsiz = int (TEST_VOLSIZ / TEST_FILESPERVOL * 1024 * 1024) - fcnt = self.VOLUMES * TEST_FILESPERVOL + # add n files for one nth the volume size each, corrected + # for metadata and tar block overhead + fsiz = int ( ( TEST_VOLSIZ + / (TEST_FILESPERVOL * VOLUME_OVERHEAD)) + * 1024 * 1024) + fcnt = (self.VOLUMES - 1) * TEST_FILESPERVOL for i in range (fcnt): nvol, invol = divmod(i, TEST_FILESPERVOL) f = "dummy_vol_%d_n_%0.2d" % (nvol, invol) self.hash [f] = self.create_file ("%s/%s" % (self.src_path, f), - fsiz) + fsiz, + random=True) def vname (_x, _y, n, *a, **kwa): return backup_file % n