u'size': stinfo.st_size
}
- def _equal_stat_dicts(self, d1, d2):
+ def _equal_stat_dicts(self, d1, d2, listsnapshot_equal=False):
'''
Return if the dicts are equal in the stat keys
'''
- keys = [u'gid', u'type', u'mode', u'mtime', u'size', u'inode',
- u'ctime', u'uid']
+ keys = [u'gid', u'type', u'mode',u'size', u'uid',
+ # TODO: check how to restore this correctly if possible
+ #u'mtime', u'ctime', u'inode'
+ ]
if (not d1 and d2 != None) or (d1 != None and not d2):
return False
- if self.prefixed(d1.get('path', -1)) != self.prefixed(d2.get('path', -2)):
- return False
+ if not listsnapshot_equal:
+ if self.prefixed(d1.get('path', -1)) != self.prefixed(d2.get('path', -2)):
+ return False
+ else:
+ if self.prefixed(d1.get('path', -1)) != self.prefixed(d2.get('path', -2)):
+ return False
for key in keys:
if d1.get(key, -1) != d2.get(key, -2):
return False
return True
- def prefixed(self, path):
+ def prefixed(self, path, listsnapshot_equal=False):
'''
if a path is not prefixed, return it prefixed
'''
for prefix in self.__path_prefix_list:
if path.startswith(prefix):
+ if listsnapshot_equal and prefix == u'list://':
+ return u'snapshot://' + path[len(prefix):]
return path
return u'snapshot://' + path
# open
f = open(index_path, 'r')
# check index header
- j, l_no = self._parse_json_line(f, -1)
+ j, l_no = self._parse_json_line(f, 0)
if j.get("type", '') != 'python-delta-tar-index' or\
j.get('version', -1) != 1:
raise Exception("invalid index file format: %s" % json.dumps(j))
# find BEGIN-FILE-LIST, ignore other headers
while True:
- j, l_no = self._parse_json_line(f, -1)
+ j, l_no = self._parse_json_line(f, l_no)
if j.get('type', '') == 'BEGIN-FILE-LIST':
break
# read each file in the index and process it to do the retore
while True:
try:
- j, l_no = self._parse_json_line(f, -1)
+ j, l_no = self._parse_json_line(f, l_no)
except Exception, e:
f.close()
raise e
yield j, l_no
- def jsonize_path_iterator(self, iter):
+ def jsonize_path_iterator(self, iter, strip=0):
'''
converts the yielded items of an iterator into json path lines.
+
+ strip: Strip the smallest prefix containing num leading slashes from
+ the file path.
'''
while True:
try:
path = iter.next()
- yield self._stat_dict(path)
+ if strip == 0:
+ yield self._stat_dict(path)
+ else:
+ st = self._stat_dict(path)
+ st['path'] = "/".join(path.split("/")[strip:])
+ yield st
except StopIteration:
break
while True:
if not elem1:
try:
- elem1 = it1.next()
- l_no += 1
- if isinstance(elem1, tuple):
- elem1 = elem1[0]
+ elem1, l_no = it1.next()
except StopIteration:
if elem2:
yield (None, elem2, l_no)
except StopIteration:
if elem1:
yield (elem1, None, l_no)
- for elem1 in it1:
- if isinstance(elem1, tuple):
- elem1 = elem1[0]
+ for elem1, l_no in it1:
yield (elem1, None, l_no)
break
index2 = self.unprefixed(elem2['path'])
continue
# if both files are equal, we have nothing to restore
- if self._equal_stat_dicts(ipath, dpath):
+ if self._equal_stat_dicts(ipath, dpath, listsnapshot_equal=True):
continue
# we have to restore the file, but first we need to delete the
# current existing file
- helper.delete(self.unprefixed(ipath['path']))
+ helper.delete(upath)
helper.restore(ipath, l_no)
os.chdir(cwd)
'''
Delete a file
'''
+ if not os.path.exists(path):
+ return
+
if os.path.isdir(path):
shutil.rmtree(path)
else:
'''
data = self._data[0]
path = itpath['path']
+ upath = self._deltatar.unprefixed(path)
# if path is found in the first index as to be deleted or snapshotted,
# deal with it and finish
except StopIteration:
break
- dpath = self._data.unprefixed(d.get('path', ''))
+ dpath = self._deltatar.unprefixed(d.get('path', ''))
- if path == dpath:
+ if upath == dpath:
break
if not d:
'''
Create base test data
'''
- os.system('rm -rf source_dir source_dir2 backup_dir backup_dir? huge')
+ os.system('rm -rf target_dir source_dir source_dir2 backup_dir backup_dir2 huge')
os.makedirs('source_dir/test/test2')
self.hash = dict()
self.hash["source_dir/test/test2"] = ''
'''
Remove temporal files created by unit tests
'''
- os.system("rm -rf source_dir source_dir2 backup_dir backup_dir? huge")
+ os.system("rm -rf source_dir target_dir source_dir2 backup_dir backup_dir2 huge")
def test_restore_simple_full_backup(self):
'''
backup_tar_path=tar_path)
assert os.listdir("source_dir") == ['zzzz', 'bigdir']
+ def test_restore_from_index_diff_backup(self):
+ '''
+ Creates a full backup, modifies some files, creates a diff backup,
+ then restores the diff backup from zero.
+ '''
+ # this test only works for uncompressed or concat compressed modes
+ if self.MODE.startswith(':') or self.MODE.startswith('|'):
+ return
+
+ deltatar = DeltaTar(mode=self.MODE, password=self.PASSWORD,
+ logger=self.consoleLogger)
+
+ # create first backup
+ deltatar.create_full_backup(
+ source_path="source_dir",
+ backup_path="backup_dir")
+
+ prev_index_filename = deltatar.index_name_func(is_full=True)
+ prev_index_path = os.path.join("backup_dir", prev_index_filename)
+
+ # add some new files and directories
+ os.makedirs('source_dir/bigdir')
+ self.hash["source_dir/bigdir"] = ""
+ os.unlink("source_dir/small")
+ self.hash["source_dir/bigdir/a"] = self.create_file("source_dir/bigdir/a", 100)
+ self.hash["source_dir/bigdir/b"] = self.create_file("source_dir/bigdir/b", 500)
+ self.hash["source_dir/zzzz"] = self.create_file("source_dir/zzzz", 100)
+
+ deltatar.create_diff_backup("source_dir", "backup_dir2",
+ prev_index_path)
+
+ # apply diff backup in target_dir
+ index_filename = deltatar.index_name_func(is_full=True)
+ index_path = os.path.join("backup_dir2", index_filename)
+ deltatar.restore_backup("target_dir",
+ backup_indexes_paths=[index_path, prev_index_path])
+
+ # then compare the two directories source_dir and target_dir and check
+ # they are the same
+ source_it = deltatar._recursive_walk_dir('source_dir')
+ source_it = deltatar.jsonize_path_iterator(source_it, strip=1)
+ target_it = deltatar._recursive_walk_dir('target_dir')
+ target_it = deltatar.jsonize_path_iterator(target_it, strip=1)
+ while True:
+ try:
+ sitem = source_it.next()
+ titem = target_it.next()
+ except StopIteration:
+ try:
+ titem = target_it.next()
+ raise Exception("iterators do not stop at the same time")
+ except StopIteration:
+ break
+ assert deltatar._equal_stat_dicts(sitem, titem)
+
+ def test_restore_from_index_diff_backup2(self):
+ '''
+ Creates a full backup, modifies some files, creates a diff backup,
+ then restores the diff backup with the full backup as a starting point.
+ '''
+ # this test only works for uncompressed or concat compressed modes
+ if self.MODE.startswith(':') or self.MODE.startswith('|'):
+ return
+
+ deltatar = DeltaTar(mode=self.MODE, password=self.PASSWORD,
+ logger=self.consoleLogger)
+
+ # create first backup
+ deltatar.create_full_backup(
+ source_path="source_dir",
+ backup_path="backup_dir")
+
+ prev_index_filename = deltatar.index_name_func(is_full=True)
+ prev_index_path = os.path.join("backup_dir", prev_index_filename)
+
+ # add some new files and directories
+ os.makedirs('source_dir/bigdir')
+ self.hash["source_dir/bigdir"] = ""
+ os.unlink("source_dir/small")
+ self.hash["source_dir/bigdir/a"] = self.create_file("source_dir/bigdir/a", 100)
+ self.hash["source_dir/bigdir/b"] = self.create_file("source_dir/bigdir/b", 500)
+ self.hash["source_dir/zzzz"] = self.create_file("source_dir/zzzz", 100)
+
+ deltatar.create_diff_backup("source_dir", "backup_dir2",
+ prev_index_path)
+
+ # first restore initial backup in target_dir
+ tar_filename = deltatar.volume_name_func('backup_dir', True, 0)
+ tar_path = os.path.join("backup_dir", tar_filename)
+ deltatar.restore_backup("target_dir", backup_tar_path=tar_path)
+
+ # then apply diff backup in target_dir
+ index_filename = deltatar.index_name_func(is_full=True)
+ index_path = os.path.join("backup_dir2", index_filename)
+ deltatar.restore_backup("target_dir",
+ backup_indexes_paths=[index_path, prev_index_path])
+
+ # then compare the two directories source_dir and target_dir and check
+ # they are the same
+ source_it = deltatar._recursive_walk_dir('source_dir')
+ source_it = deltatar.jsonize_path_iterator(source_it, strip=1)
+ target_it = deltatar._recursive_walk_dir('target_dir')
+ target_it = deltatar.jsonize_path_iterator(target_it, strip=1)
+ while True:
+ try:
+ sitem = source_it.next()
+ titem = target_it.next()
+ except StopIteration:
+ try:
+ titem = target_it.next()
+ raise Exception("iterators do not stop at the same time")
+ except StopIteration:
+ break
+ assert deltatar._equal_stat_dicts(sitem, titem)
+
+
class DeltaTar2Test(DeltaTarTest):
'''