estimate_post = tarobj.fileobj.estim_file_size()
actual_size = os.stat(tar_file_name).st_size
err = abs(actual_size - estimate)
- print('mode {:>11s}, {:2} files (up to size {:9}): '
- 'estim={:9}, true={:9}, post={:9}, err={:5}'
- .format(mode, size_number, sizes[size_number],
- estimate, actual_size, estimate_post, err))
+ #print('mode {:>11s}, {:2} files (up to size {:9}): '
+ # 'estim={:9}, true={:9}, post={:9}, err={:5}'
+ # .format(mode, size_number, sizes[size_number],
+ # estimate, actual_size, estimate_post, err))
os.unlink(tar_file_name)
if err > max_err:
max_err = err
if err > max_err_post:
max_err_post = err
- print('max err is {}, post={}'.format(max_err, max_err_post))
+ #print('max err is {}, post={}'.format(max_err, max_err_post))
assert max_err < 13*1024
assert max_err_post == 0
assert not os.path.exists("big")
# extract with normal tar and check output
- print('unpacking:')
+ #print('unpacking:')
import subprocess
output = subprocess.check_output(
"tar xvf sample.tar.gz".split(),
universal_newlines=True)
- for line in output.splitlines():
- print(line.rstrip())
+ #for line in output.splitlines():
+ # print(line.rstrip())
assert os.path.exists("big")
assert hash == self.md5sum("big")
# extract with shell means; slightly complicated because the linux
# tar/gunzip cannot do gzipped-multi-volume archives
- print('unpacking:')
+ #print('unpacking:')
import subprocess
for cmd in 'gunzip -v sample.tar.gz', 'gunzip -v sample.1.tar.gz', \
'tar xvfM sample.tar --file=sample.1.tar':
- print(cmd)
+ #print(cmd)
output = subprocess.check_output(cmd.split(),
universal_newlines=True)
- for line in output.splitlines():
- print(line.rstrip())
+ #for line in output.splitlines():
+ # print(line.rstrip())
assert os.path.exists(filename)
assert hash == self.md5sum(filename)