From: Philipp Gesang Date: Wed, 29 Jan 2020 15:57:57 +0000 (+0100) Subject: fix resource leaks building recovery index X-Git-Tag: v2.2~7^2~4 X-Git-Url: http://developer.intra2net.com/git/?a=commitdiff_plain;h=bcc8b174f2e09799390802d29c32a6c5b88567a6;p=python-delta-tar fix resource leaks building recovery index Python 3.7 now emits warnings about possible resource leaks of which deltatar provokes plenty. The main culprit here is manual resource management of file handles in face of early returns by exception. --- diff --git a/deltatar/tarfile.py b/deltatar/tarfile.py index 21db045..edfe584 100644 --- a/deltatar/tarfile.py +++ b/deltatar/tarfile.py @@ -3046,7 +3046,6 @@ class TarFile(object): source.close() # only if we are extracting a multivolume this can be treated if not self.new_volume_handler: - target.close() raise Exception("We need to read a new volume and you" " didn't supply a new_volume_handler") @@ -3058,7 +3057,8 @@ class TarFile(object): tarinfo = self.firstmember source = self.fileobj iterate = True - target.close() + finally: + if iterate is False: target.close() def makeunknown(self, tarinfo, targetpath): @@ -3808,6 +3808,8 @@ def gen_rescue_index (gen_volume_name, mode, maxvol=None, password=None, key=Non return acc infos += functools.reduce (aux, offsets, []) + fileobj.close() + nvol += 1 def aux (o, nvol, ti): diff --git a/testing/__init__.py b/testing/__init__.py index ebaf6c8..8f3bbbe 100644 --- a/testing/__init__.py +++ b/testing/__init__.py @@ -29,6 +29,7 @@ def new_volume_handler(tarobj, base_name, volume_number, encryption=None): Handles the new volumes ''' volume_path = "%s.%d" % (base_name, volume_number) + tarobj.close() tarobj.open_volume(volume_path, encryption=encryption) def make_new_encryption_volume_handler(encryption): diff --git a/testing/test_concat_compress.py b/testing/test_concat_compress.py index 84f47ba..b2539b9 100644 --- a/testing/test_concat_compress.py +++ b/testing/test_concat_compress.py @@ -90,11 +90,12 @@ class ConcatCompressTest(BaseTest): tarobj.close() os.unlink("big") - fo = open("sample.tar.gz", 'rb') + fo = open("sample.tar.gz", 'rb') # will not be released on tarfile.close() fo.seek(pos) tarobj = TarFile.open(mode="r#gz", fileobj=fo) tarobj.extract(tarobj.next()) tarobj.close() + fo.close() assert os.path.exists("big") assert hash == self.md5sum("big") @@ -126,11 +127,12 @@ class ConcatCompressTest(BaseTest): os.unlink("small2") # extract only the "small" file - fo = open("sample.tar.gz", 'rb') + fo = open("sample.tar.gz", 'rb') # will not be released on tarfile.close() fo.seek(pos) tarobj = TarFile.open(mode="r#gz", fileobj=fo) tarobj.extract(tarobj.next()) tarobj.close() + fo.close() assert os.path.exists("small") assert hash['small'] == self.md5sum("small") @@ -174,12 +176,13 @@ class ConcatCompressTest(BaseTest): tarobj.open_volume(volume_path) # extract only the "small" file - fo = open("sample.tar.gz", 'rb') + fo = open("sample.tar.gz", 'rb') # will not be released on tarfile.close() fo.seek(pos) tarobj = TarFile.open(mode="r#gz", fileobj=fo, new_volume_handler=new_volume_handler_fo) tarobj.extract(tarobj.next()) tarobj.close() + fo.close() assert os.path.exists("big") assert hash['big'] == self.md5sum("big")