fix resource leaks building recovery index
authorPhilipp Gesang <philipp.gesang@intra2net.com>
Wed, 29 Jan 2020 15:57:57 +0000 (16:57 +0100)
committerThomas Jarosch <thomas.jarosch@intra2net.com>
Sat, 1 Feb 2020 13:42:43 +0000 (14:42 +0100)
Python 3.7 now emits warnings about possible resource leaks
of which deltatar provokes plenty. The main culprit here is
manual resource management of file handles in face of early
returns by exception.

deltatar/tarfile.py
testing/__init__.py
testing/test_concat_compress.py

index 21db045..edfe584 100644 (file)
@@ -3046,7 +3046,6 @@ class TarFile(object):
                 source.close()
                 # only if we are extracting a multivolume this can be treated
                 if not self.new_volume_handler:
-                    target.close()
                     raise Exception("We need to read a new volume and you"
                         " didn't supply a new_volume_handler")
 
@@ -3058,7 +3057,8 @@ class TarFile(object):
                 tarinfo = self.firstmember
                 source = self.fileobj
                 iterate = True
-        target.close()
+            finally:
+                if iterate is False: target.close()
 
 
     def makeunknown(self, tarinfo, targetpath):
@@ -3808,6 +3808,8 @@ def gen_rescue_index (gen_volume_name, mode, maxvol=None, password=None, key=Non
             return acc
         infos += functools.reduce (aux, offsets, [])
 
+        fileobj.close()
+
         nvol += 1
 
     def aux (o, nvol, ti):
index ebaf6c8..8f3bbbe 100644 (file)
@@ -29,6 +29,7 @@ def new_volume_handler(tarobj, base_name, volume_number, encryption=None):
     Handles the new volumes
     '''
     volume_path = "%s.%d" % (base_name, volume_number)
+    tarobj.close()
     tarobj.open_volume(volume_path, encryption=encryption)
 
 def make_new_encryption_volume_handler(encryption):
index 84f47ba..b2539b9 100644 (file)
@@ -90,11 +90,12 @@ class ConcatCompressTest(BaseTest):
         tarobj.close()
         os.unlink("big")
 
-        fo = open("sample.tar.gz", 'rb')
+        fo = open("sample.tar.gz", 'rb') # will not be released on tarfile.close()
         fo.seek(pos)
         tarobj = TarFile.open(mode="r#gz", fileobj=fo)
         tarobj.extract(tarobj.next())
         tarobj.close()
+        fo.close()
         assert os.path.exists("big")
         assert hash == self.md5sum("big")
 
@@ -126,11 +127,12 @@ class ConcatCompressTest(BaseTest):
         os.unlink("small2")
 
         # extract only the "small" file
-        fo = open("sample.tar.gz", 'rb')
+        fo = open("sample.tar.gz", 'rb') # will not be released on tarfile.close()
         fo.seek(pos)
         tarobj = TarFile.open(mode="r#gz", fileobj=fo)
         tarobj.extract(tarobj.next())
         tarobj.close()
+        fo.close()
         assert os.path.exists("small")
         assert hash['small'] == self.md5sum("small")
 
@@ -174,12 +176,13 @@ class ConcatCompressTest(BaseTest):
             tarobj.open_volume(volume_path)
 
         # extract only the "small" file
-        fo = open("sample.tar.gz", 'rb')
+        fo = open("sample.tar.gz", 'rb') # will not be released on tarfile.close()
         fo.seek(pos)
         tarobj = TarFile.open(mode="r#gz", fileobj=fo,
                               new_volume_handler=new_volume_handler_fo)
         tarobj.extract(tarobj.next())
         tarobj.close()
+        fo.close()
         assert os.path.exists("big")
         assert hash['big'] == self.md5sum("big")