dir_it = self._recursive_walk_dir('.')
dir_path_it = self.jsonize_path_iterator(dir_it)
+ index_it = sorted(index_it, key=lambda x: x[0]['path'])
+ dir_path_it = sorted(dir_path_it, key=lambda x: x['path'])
+
# for each file to be in the backup, do:
- for ipath, dpath, l_no in self.collate_iterators(index_it, dir_path_it):
+ for ipath, dpath, l_no in self.collate_iterators(iter(index_it), iter(dir_path_it)):
action = None
# if file is not in the index, it means it's a new file, so we have
# to take a snapshot
dir_it = self._recursive_walk_dir('.')
dir_path_it = self.jsonize_path_iterator(dir_it)
+ index_it = sorted(index_it, key=lambda x: x[0]['path'])
+ dir_path_it = sorted(dir_path_it, key=lambda x: x['path'])
+ helper._data[0]['iterator'] = iter(index_it)
+
# for each file to be in the backup, do:
- for ipath, dpath, l_no in self.collate_iterators(index_it, dir_path_it):
+ for ipath, dpath, l_no in self.collate_iterators(iter(index_it), iter(dir_path_it)):
if not ipath:
upath = dpath['path']
op_type = dpath['type']
# we have to restore the file, but first we need to delete the
# current existing file
helper.delete(upath)
- helper.restore(ipath, l_no)
+ if ipath:
+ helper.restore(ipath, l_no)
helper.restore_directories_permissions()
os.chdir(cwd)
cur_index = 1
while cur_index < len(self._data):
data = self._data[cur_index]
- it = data['iterator']
+ listit = list(data['iterator'])
+ it = iter(listit)
+ data['iterator'] = iter(listit)
# find the path in the index
d = None
# seek tarfile if needed
offset = file_data.get('offset', -1)
if index_data['tarobj']:
- member = index_data['tarobj'].next()
- if member.path != index_data['tarobj']:
+ # TODO review this code...
+ #member = index_data['tarobj'].next()
+ #if member.path != index_data['tarobj']:
# force a seek and reopen
- index_data['tarobj'].close()
- index_data['tarobj'] = None
+ index_data['tarobj'].close()
+ index_data['tarobj'] = None
# open the tarfile if needed
if not index_data['tarobj']: