bpo-32713: Fix tarfile.itn for large/negative float values. (GH-5434)
[python-delta-tar] / testing / __init__.py
CommitLineData
866c42e6
DGM
1# Copyright (C) 2013 Intra2net AG
2#
494b38aa
DGM
3# This program is free software; you can redistribute it and/or modify
4# it under the terms of the GNU Lesser General Public License as published
5# by the Free Software Foundation; either version 3 of the License, or
866c42e6
DGM
6# (at your option) any later version.
7#
8# This program is distributed in the hope that it will be useful,
9# but WITHOUT ANY WARRANTY; without even the implied warranty of
10# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
494b38aa 11# GNU Lesser General Public License for more details.
866c42e6
DGM
12#
13# You should have received a copy of the GNU General Public License
494b38aa
DGM
14# along with this program. If not, see
15# <http://www.gnu.org/licenses/lgpl-3.0.html>
866c42e6
DGM
16
17
0112ba0d
ERE
18import os, unittest, hashlib, string
19import random
20
cb7a3911
PG
21from deltatar import crypto
22
fbfda3d4
PG
23import sys
24
85e7013f
PG
25BLOCK_SIZE = 8096
26
fbfda3d4 27def new_volume_handler(tarobj, base_name, volume_number, encryption=None):
26fa5ad5
ERE
28 '''
29 Handles the new volumes
30 '''
31 volume_path = "%s.%d" % (base_name, volume_number)
fbfda3d4
PG
32 tarobj.open_volume(volume_path, encryption=encryption)
33
34def make_new_encryption_volume_handler(encryption):
35 '''
36 Handles the new volumes using the right crypto context.
37 '''
38 return lambda tarobj, base_name, volume_number: \
39 new_volume_handler (tarobj, base_name, volume_number,
40 encryption=encryption)
26fa5ad5 41
c7609167
ERE
42def closing_new_volume_handler(tarobj, base_name, volume_number):
43 '''
44 Handles the new volumes
45 '''
46 volume_path = "%s.%d" % (base_name, volume_number)
47 tarobj.fileobj.close()
48 tarobj.open_volume(volume_path)
49
0112ba0d
ERE
50class BaseTest(unittest.TestCase):
51 """
52 Test concatenated compression in tarfiles
53 """
54
55 def tearDown(self):
56 '''
57 Remove temporal files created by unit tests
58 '''
a0873dcc 59 os.system("rm -rf big big2 small small2 sample.* pdtcrypt-object-*.bin")
0112ba0d 60
85e7013f 61 def create_file_low_entropy(self, path, length):
0112ba0d
ERE
62 '''
63 Creates a file with some gibberish inside, returning the md5sum of that
64 file. File path and length are specified as function arguments.
65 '''
25d64d27
CH
66 data = string.ascii_lowercase + string.digits + "\n"
67
68 # determine how often need to repeat data and how much part of data is
69 # left in the end to fill file up to length
70 n_blocks, remainder = divmod(length, len(data))
71 with open(path, 'w') as write_handle:
72 for _ in range(n_blocks):
73 write_handle.write(data)
74 write_handle.write(data[:remainder])
0112ba0d
ERE
75 return self.md5sum(path)
76
85e7013f
PG
77
78 def create_file_high_entropy(self, path, length):
79 """
80 Create a file with quality random content to counteract compression.
81 """
82 fd = os.open (path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC)
83 try:
84 p = 0
85 while p < length:
86 todo = min (length - p, BLOCK_SIZE)
87 os.write (fd, os.urandom (todo))
88 p += todo
89 finally:
90 os.close (fd)
91 assert p == length
92 return self.md5sum (path)
93
94
95 def create_file(self, path, length, random=False):
96 '''
97 Creates a file with some gibberish inside, returning the md5sum of that
98 file. File path and length are specified as function arguments.
99 '''
100 if random is True:
101 return self.create_file_high_entropy (path, length)
102
103 return self.create_file_low_entropy (path, length)
104
105
f5d9144b
PG
106 def create_symlink(self, linkname, path):
107 os.symlink(linkname, path)
108 return self.md5sum(path, linkname=linkname)
109
110 def md5sum(self, filename, linkname=None):
0112ba0d 111 '''
f5d9144b
PG
112 Returns the md5sum of a file specified by its filename/path or, if
113 ``linkname`` is specified, the hash of both paths (for symlinks).
0112ba0d
ERE
114 '''
115 md5 = hashlib.md5()
f5d9144b
PG
116 if linkname is None:
117 with open(filename,'rb') as f:
118 for chunk in iter(lambda: f.read(128*md5.block_size), b''):
119 md5.update(chunk)
120 else: # symlink; hash paths
121 md5.update(filename.encode("UTF-8"))
122 md5.update(linkname.encode("UTF-8"))
26fa5ad5 123 return md5.hexdigest()