def restore_backup(self, target_path, backup_indexes_paths=[],
backup_tar_path=None, restore_callback=None,
- disaster=False):
+ disaster=tarfile.TOLERANCE_STRICT):
'''
Restores a backup.
helper = RestoreHelper(self, cwd, backup_path=backup_tar_path,
tarobj=index_it.tar_obj)
elif mode == "diff":
- helper = RestoreHelper(self, cwd, backup_indexes_paths, disaster=disaster)
+ helper = RestoreHelper(self, cwd, backup_indexes_paths,
+ disaster=disaster)
try:
# get iterator from newest index at _data[0]
index1 = helper._data[0]["path"]
iipath = ipath.get ("path", "")
self.logger.error("FAILED to restore: {} ({})"
.format(iipath, e))
- if disaster is True:
+ if disaster != tarfile.TOLERANCE_STRICT:
failed.append ((iipath, e))
continue
try:
helper.restore(ipath, l_no, restore_callback)
except Exception as e:
- if disaster is False:
+ if disaster == tarfile.TOLERANCE_STRICT:
raise
failed.append ((ipath.get ("path", ""), e))
continue
"""
return self.restore_backup(target_path,
backup_indexes_paths=backup_indexes_paths,
- disaster=True)
+ disaster=tarfile.TOLERANCE_RECOVER)
+
+
+ def rescue_backup(self, target_path, backup_indexes_paths=[],
+ restore_callback=None):
+ """
+ More aggressive “unfsck” mode: do not rely on the index data as the
+ files may be corrupt; skim files for header-like information and
+ attempt to retrieve the data.
+ """
+ return self.restore_backup(target_path,
+ backup_indexes_paths=backup_indexes_paths,
+ disaster=tarfile.TOLERANCE_RESCUE)
def _parse_json_line(self, f, l_no):
# tarfile.extractall for details.
_directories = []
- _disaster = False
+ _disaster = tarfile.TOLERANCE_STRICT
def __init__(self, deltatar, cwd, index_list=None, backup_path=False,
- tarobj=None, disaster=False):
+ tarobj=None, disaster=tarfile.TOLERANCE_STRICT):
'''
Constructor opens the tars and init the data structures.
encryption=index_data["decryptor"],
new_volume_handler=index_data['new_volume_handler'],
save_to_members=False,
- tolerant=self._disaster)
+ tolerance=self._disaster)
member = index_data['tarobj'].__iter__().__next__()
GZ_MAGIC_DEFLATE = struct.pack ("<BBB", GZ_MAGIC [0], GZ_MAGIC [1],
GZ_METHOD_DEFLATE)
+TOLERANCE_STRICT = 0
+TOLERANCE_RECOVER = 1 # rely on offsets in index
+TOLERANCE_RESCUE = 2 # deduce metadata from archive contents
+
#---------------------------------------------------------
# archive handling mode
#---------------------------------------------------------
"""
remainder = -1 # track size in encrypted entries
- tolerant = False
+ tolerance = TOLERANCE_STRICT
def __init__(self, name, mode, comptype, fileobj, bufsize,
concat=False, encryption=None, enccounter=None,
- compresslevel=9, tolerant=False):
+ compresslevel=9, tolerance=TOLERANCE_STRICT):
"""Construct a _Stream object.
"""
self.arcmode = arcmode_set (concat, encryption, comptype)
- self.tolerant = tolerant
+ self.tolerance = tolerance
self._extfileobj = True
if fileobj is None:
try:
return self.encryption.process (buf)
except RuntimeError as exn:
- if self.tolerant is True:
+ if self.tolerance != TOLERANCE_STRICT:
raise DecryptionError (exn)
raise
try:
self._init_read_gz()
except DecryptionError:
- if self.tolerant is True:
+ if self.tolerance != TOLERANCE_STRICT:
# return whatever data was processed successfully
if len (buf) > 0:
t.append (buf)
else:
self.remainder -= todo
except DecryptionError:
- if self.tolerant is False:
+ if self.tolerance == TOLERANCE_STRICT:
raise
self.encryption.drop ()
if good_crypto == 0:
@classmethod
def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE,
- encryption=None, compresslevel=9, tolerant=False, **kwargs):
+ encryption=None, compresslevel=9, tolerance=TOLERANCE_STRICT,
+ **kwargs):
"""Open a tar archive for reading, writing or appending. Return
an appropriate TarFile class.
stream = _Stream(name, filemode, comptype, fileobj, bufsize,
concat=True, encryption=encryption,
- compresslevel=compresslevel, tolerant=tolerant)
+ compresslevel=compresslevel, tolerance=tolerance)
kwargs ["concat"] = True
try:
t = cls(name, filemode, stream, **kwargs)