# * 1 block used to store the header information of the stored file
     # * 2 blocks used to mark the end of the tar file
     tarfile_overhead = 3*BLOCKSIZE
+    file_overhead = 1*BLOCKSIZE
 
     # overhead size used to calculate the exact maximum size of a tar volume,
     # corresponding with a multivolume tar file storing a single file. In the
         assert os.path.exists("big")
         assert hash == self.md5sum("big")
 
-
     def test_corner_case_split_size2(self):
         '''
         Creates a tar file with a single file inside that contains the maximum
         assert os.path.exists("big")
         assert hash == self.md5sum("big")
 
+    def test_corner_case_split_size3(self):
+        '''
+        Creates a tar file with a single file inside that contains the maximum
+        size allowed in one volume but without the overhead.
+        '''
+        hash = self.create_file("big", 4*1024*1024)
+
+        # create the tar file with volumes
+        tarobj = TarFile.open("sample.tar",
+                              mode="w",
+                              format=self.tarfile_format,
+                              max_volume_size=2*1024*1024,
+                              new_volume_handler=new_volume_handler)
+        tarobj.add("big")
+        tarobj.close()
+
+        # check that the tar volumes were correctly created
+        assert os.path.exists("sample.tar")
+        assert os.path.exists("sample.tar.1")
+        assert os.path.exists("sample.tar.2")
+        assert not os.path.exists("sample.tar.3")
+
+        os.unlink("big")
+        assert not os.path.exists("big")
+
+        # extract and check output
+        tarobj = TarFile.open("sample.tar",
+                              mode="r",
+                              new_volume_handler=new_volume_handler)
+        tarobj.extractall()
+        tarobj.close()
+        assert os.path.exists("big")
+        assert hash == self.md5sum("big")
+
+    def test_corner_case_split_size4(self):
+        '''
+        Creates a tar file with multiple files inside that contains the maximum
+        size allowed in one volume.
+        '''
+        hash = dict()
+        hash['big'] = self.create_file("big", 3*1024*1024)
+        hash['small'] = self.create_file("small", 1*1024*1024)
+
+        # create the tar file with volumes
+        tarobj = TarFile.open("sample.tar",
+                              mode="w",
+                              format=self.tarfile_format,
+                              max_volume_size=(4*1024*1024 +
+                                               self.tarfile_overhead +
+                                               self.file_overhead),
+                              new_volume_handler=new_volume_handler)
+        tarobj.add("big")
+        tarobj.add("small")
+        tarobj.close()
+
+        # check that the tar volumes were correctly created
+        assert os.path.exists("sample.tar")
+        assert not os.path.exists("sample.tar.1")
+
+        for key, value in hash.iteritems():
+            os.unlink(key)
+            assert not os.path.exists(key)
+
+        # extract and check output
+        tarobj = TarFile.open("sample.tar",
+                              mode="r",
+                              new_volume_handler=new_volume_handler)
+        tarobj.extractall()
+        tarobj.close()
+
+        for key, value in hash.iteritems():
+            assert os.path.exists(key)
+            assert value == self.md5sum(key)
+
+    def test_corner_case_split_size5(self):
+        '''
+        Creates a tar file with multiple files inside that contains the maximum
+        size allowed in one volume.
+        '''
+        hash = dict()
+        hash['big'] = self.create_file("big", 3*1024*1024)
+        hash['small'] = self.create_file("small", 1*1024*1024)
+
+        # create the tar file with volumes
+        tarobj = TarFile.open("sample.tar",
+                              mode="w",
+                              format=self.tarfile_format,
+                              max_volume_size=(2*1024*1024 +
+                                               self.tarfile_overhead +
+                                               self.file_overhead),
+                              new_volume_handler=new_volume_handler)
+        tarobj.add("big")
+        tarobj.add("small")
+        tarobj.close()
+
+        # check that the tar volumes were correctly created
+        assert os.path.exists("sample.tar")
+        assert os.path.exists("sample.tar.1")
+        assert not os.path.exists("sample.tar.2")
+
+        for key, value in hash.iteritems():
+            os.unlink(key)
+            assert not os.path.exists(key)
+
+        # extract and check output
+        tarobj = TarFile.open("sample.tar",
+                              mode="r",
+                              new_volume_handler=new_volume_handler)
+        tarobj.extractall()
+        tarobj.close()
+
+        for key, value in hash.iteritems():
+            assert os.path.exists(key)
+            assert value == self.md5sum(key)
+
     def test_volume_not_found(self):
         '''
         Create a tar file with multiple volumes and one file and extract it, but
     # * 1 block used to store the pax header
     # * 2 blocks used to mark the end of the tar file
     tarfile_overhead = 5*BLOCKSIZE
-
+    file_overhead = 3*BLOCKSIZE
 
     # overhead size used to calculate the exact maximum size of a tar volume,
     # corresponding with a multivolume tar file storing a single file. In the