Since the same decryption context is carried over between the Tar
volumes of one backup set, the built-in IV uniqueness checks
suffice. Between multiple backup sets, the salt and IV fixed
parts change, so there is no occasion for conflict. The IVs of
auxiliary files are unique anyways.
index_it.release()
os.chdir(cwd)
helper.cleanup()
- helper.validate()
def _parse_json_line(self, f, l_no):
'''
self._data.append(s)
- def validate (self):
- """If encryption was used, verify post-conditions."""
- if len (self._decryptors) == 0:
- return
- acc = None
- for dec in self._decryptors:
- if acc is None:
- acc = dec.used_ivs.copy ()
- else:
- used_ivs = dec.used_ivs
- intersect = used_ivs & acc
- if len (intersect) > 0:
- raise Exception ("ERROR: %d duplicate IVs found during "
- "decryption" % len (intersect))
- acc |= used_ivs
-
-
def cleanup(self):
'''
Closes all open files