Update afstest.py

This commit is contained in:
tmpz23 2022-02-06 21:38:13 +01:00 committed by GitHub
parent 1d0905ba96
commit bf779237eb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -1,11 +1,14 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from afstool import AfsInvalidFileLenError, Afs, FilenameResolver import afstool
from configparser import ConfigParser
import copy
import os import os
from pathlib import Path from pathlib import Path
import shutil import shutil
from time import time from time import time
from datetime import datetime
__version__ = "0.0.5" __version__ = "0.0.6"
__author__ = "rigodron, algoflash, GGLinnk" __author__ = "rigodron, algoflash, GGLinnk"
__license__ = "MIT" __license__ = "MIT"
__status__ = "developpement" __status__ = "developpement"
@ -33,12 +36,12 @@ def test_storage():
# Need to know offsets of TOC to get the max length of files # Need to know offsets of TOC to get the max length of files
# and unpacked names of files when duplicated # and unpacked names of files when duplicated
class AfsTest(Afs): class AfsTest(afstool.Afs):
# return a list of tuples with (offset, resolved filename) # return a list of tuples with (offset, resolved filename)
def get_range(self, folder_path:Path): def get_range(self, folder_path:Path):
sys_path = folder_path / "sys" sys_path = folder_path / "sys"
self._Afs__loadsys_from_folder(sys_path) self._Afs__loadsys_from_folder(sys_path)
resolver = FilenameResolver(sys_path) resolver = afstool.FilenameResolver(sys_path)
offsets_names_map = [(0, "SYS TOC")] offsets_names_map = [(0, "SYS TOC")]
for i in range(0, self._Afs__file_count): for i in range(0, self._Afs__file_count):
@ -82,7 +85,7 @@ def compare_files(file1_path:Path, file2_path:Path):
# compare two folder # compare two folder
# -> raise an exception if there is a difference in paths or in file content # -> raise an exception if there is a difference in paths or in file content
def compare_folders(folder1: Path, folder2: Path, compare_mtime=False): def compare_folders(folder1: Path, folder2: Path, compare_mtime:bool = False):
folder1_tmp_paths = list(folder1.glob("*")) folder1_tmp_paths = list(folder1.glob("*"))
folder1_file_count = len(folder1_tmp_paths) folder1_file_count = len(folder1_tmp_paths)
print(f"compare \"{folder1}\" - \"{folder2}\" ({folder1_file_count} files)") print(f"compare \"{folder1}\" - \"{folder2}\" ({folder1_file_count} files)")
@ -121,7 +124,7 @@ def compare_unpacked_AFS(folder1: Path, folder2: Path):
def patch_all_bytes(file_path:Path, max_len:int = None): def patch_all_bytes(file_path:Path, max_len:int = None):
file_data = bytearray(file_path.read_bytes()) file_data = bytearray(file_path.read_bytes())
if max_len == None: if max_len is None:
max_len = len(file_data) max_len = len(file_data)
elif max_len < len(file_data): elif max_len < len(file_data):
file_data = file_data[:max_len] file_data = file_data[:max_len]
@ -151,7 +154,7 @@ def patch_unpackedfiles_in_folder(folder_path:Path, bool_len:bool = False):
max_len += 1 max_len += 1
# else there is no limit because last file # else there is no limit because last file
else: else:
max_len = file_path.stat().st_size + Afs.ALIGN max_len = file_path.stat().st_size + afstool.Afs.ALIGN
break break
patch_all_bytes(file_path, max_len) patch_all_bytes(file_path, max_len)
if bool_len: if bool_len:
@ -180,6 +183,72 @@ def repack_unpack2_compare():
shutil.rmtree(unpack2_path) shutil.rmtree(unpack2_path)
# generate an unpacked AFS filesys for testing
# files are filled with 0xff
def mk_rebuild_filesys(unpacked_path:Path, files:list, afs_rebuild_conf:dict, afs_rebuild_csv:str = ""):
sys_path = unpacked_path / "sys"
root_path = unpacked_path / "root"
sys_path.mkdir(parents=True)
root_path.mkdir()
# create files
for file_tuple in files:
(root_path / file_tuple[0]).write_bytes(b"\xff" * file_tuple[1])
# create afs_config.conf
conf_txt = f"[Default]\n"\
f"AFS_MAGIC = {afs_rebuild_conf['Default']['AFS_MAGIC']}\n"\
f"files_rebuild_strategy = {afs_rebuild_conf['Default']['files_rebuild_strategy']}\n"\
f"filename_directory = {afs_rebuild_conf['Default']['filename_directory']}\n\n"
if afs_rebuild_conf["Default"]["filename_directory"] == "True":
conf_txt += f"[FilenameDirectory]\n\n"\
f"toc_offset_of_fd_offset = {afs_rebuild_conf['FilenameDirectory']['toc_offset_of_fd_offset']}\n"\
f"fd_offset = {afs_rebuild_conf['FilenameDirectory']['fd_offset']}\n"\
f"fd_last_attribute_type = {afs_rebuild_conf['FilenameDirectory']['fd_last_attribute_type']}\n"
(sys_path / "afs_rebuild.conf").write_text(conf_txt)
if len(afs_rebuild_csv) > 0:
(sys_path / "afs_rebuild.csv").write_text(afs_rebuild_csv)
def test_except(afs_rebuild_conf:dict, exception, rebuild_csv_data=""):
global i
i += 1
rebuild_path = unpack_path / f"rebuild_{i:02}"
mk_rebuild_filesys(rebuild_path, [("a.bin", 0x500),("b.bin", 0x600),("c.bin", 0x700)], afs_rebuild_conf, rebuild_csv_data)
a = afstool.Afs()
try:
a.rebuild(rebuild_path)
raise Exception(f"Error while rebuilding {rebuild_path}.")
except exception:
print(f"Valid {exception.__name__} check.")
def test_rebuild_repack(afs_rebuild_conf:dict, files:list, raw_data:bytes, rebuild_csv_data:str = "", raw_fd_data:bytes = None):
global i
i += 1
rebuild_path = unpack_path / f"rebuild_{i:02}"
mk_rebuild_filesys(rebuild_path, files, afs_rebuild_conf, rebuild_csv_data)
rebuilded_repack_path = repack_path / Path(rebuild_path.stem).with_suffix(".afs")
a = afstool.Afs()
a.rebuild(rebuild_path)
# Retrieve FD dates for each files
if afs_rebuild_conf["Default"]["filename_directory"] == "True":
raw_fd_data = bytearray(raw_fd_data)
for j in range(0, len(raw_fd_data), 48):
mtime = datetime.fromtimestamp(round((rebuild_path / "root" / (raw_fd_data[j:j+32]).split(b"\x00")[0].decode("utf-8")).stat().st_mtime))
raw_fd_data[j+32:j+32+12] = mtime.year.to_bytes(2,"little")+mtime.month.to_bytes(2,"little")+mtime.day.to_bytes(2,"little")+\
mtime.hour.to_bytes(2,"little")+mtime.minute.to_bytes(2,"little")+mtime.second.to_bytes(2,"little")
raw_data += raw_fd_data.ljust(0x800, b"\x00")
a.pack(rebuild_path, rebuilded_repack_path)
if rebuilded_repack_path.read_bytes() != raw_data:
raise Exception(f"Error - Not the expected repack {rebuilded_repack_path}.")
print(f"Success - {rebuild_path}.")
def list_bytes(l:list): return b"".join(list(map(lambda x: x.to_bytes(4,"little"), l)))
################################################## ##################################################
# afstool.py commands wrappers # afstool.py commands wrappers
################################################## ##################################################
@ -196,9 +265,12 @@ def afstool_unpack(afs_path:Path, folder_path:Path):
def afstool_stats(path:Path): def afstool_stats(path:Path):
if os.system(f"python afstool.py -s \"{path}\" > NUL") != 0: if os.system(f"python afstool.py -s \"{path}\" > NUL") != 0:
raise Exception("Error while getting stats.") raise Exception("Error while getting stats.")
def afstool_rebuild(folder_path:Path):
if os.system(f"python afstool.py -r \"{folder_path}\"") != 0:
raise Exception("Error while rebuilding.")
TEST_COUNT = 7 TEST_COUNT = 10
start = time() start = time()
print("###############################################################################") print("###############################################################################")
@ -251,7 +323,7 @@ repack_path.mkdir()
for folder_path in unpack_path.glob("*"): for folder_path in unpack_path.glob("*"):
afstool_pack(folder_path, repack_path / Path(folder_path.stem).with_suffix('.afs')) afstool_pack(folder_path, repack_path / Path(folder_path.stem).with_suffix('.afs'))
# compare repack_path afss_path # compare afss_path repack_path
compare_folders(afss_path, repack_path) compare_folders(afss_path, repack_path)
shutil.rmtree(repack_path) shutil.rmtree(repack_path)
@ -289,10 +361,10 @@ repack_path.mkdir()
# repack unpack_path repack_path # repack unpack_path repack_path
for folder_path in unpack_path.glob("*"): for folder_path in unpack_path.glob("*"):
try: try:
afs = Afs() afs = afstool.Afs()
afs.pack(folder_path, repack_path / Path(folder_path.stem).with_suffix('.afs')) afs.pack(folder_path, repack_path / Path(folder_path.stem).with_suffix('.afs'))
raise Exception(f"Error - Invalid file len check. Must raise an exception.") raise Exception(f"Error - Invalid file len check. Must raise an exception.")
except AfsInvalidFileLenError: except afstool.AfsInvalidFileLenError:
print(f"Correct AfsInvalidFileLenError - {folder_path}") print(f"Correct AfsInvalidFileLenError - {folder_path}")
shutil.rmtree(repack_path) shutil.rmtree(repack_path)
@ -302,16 +374,224 @@ print(f"# TEST 7/{TEST_COUNT}")
print("# Comparing [unpack_path]->patch(blocks - 1)->pack->unpack->[unpack2_path].") print("# Comparing [unpack_path]->patch(blocks - 1)->pack->unpack->[unpack2_path].")
print("###############################################################################") print("###############################################################################")
# Patch unpack files with 1 block less # Patch unpack files with 1 block less
for file_path in unpack_path.glob("*/root/*"): for folder_path in unpack_path.glob("*"):
patch_all_bytes(file_path, file_path.stat().st_size - Afs.ALIGN) print(f"Patching {folder_path}...")
for file_path in folder_path.glob("root/*"):
patch_all_bytes(file_path, file_path.stat().st_size - afstool.Afs.ALIGN)
repack_unpack2_compare() repack_unpack2_compare()
shutil.rmtree(unpack_path)
print("###############################################################################")
print(f"# TEST 8/{TEST_COUNT}")
print("# Comparing [afss_path]->unpack->rebuild->pack->[repack_path].")
print("###############################################################################")
unpack_path.mkdir()
repack_path.mkdir()
# unpack afss_path unpack_path
for afs_path in afss_path.glob("*"):
afstool_unpack(afs_path, unpack_path / afs_path.stem)
# rebuild unpack_path
for folder_path in unpack_path.glob("*"):
afstool_rebuild(folder_path)
config = ConfigParser()
# pack unpack_path repack_path
for folder_path in unpack_path.glob("*"):
config.read(folder_path / "sys" / "afs_rebuild.conf")
if config["Default"]["filename_directory"] == "True":
if config["FilenameDirectory"]["fd_last_attribute_type"] == "unknown":
continue
afstool_pack(folder_path, repack_path / Path(folder_path.stem).with_suffix(".afs"))
# compare afss_path repack_path when fd_last_attribute_type != unknown
for file_path in repack_path.glob("*"):
afs_path = afss_path / file_path.name
if not compare_files(file_path, afs_path):
raise Exception(f"Error - \"{file_path}\" and \"{afs_path}\" are different.")
shutil.rmtree(repack_path)
shutil.rmtree(unpack_path)
print("###############################################################################")
print(f"# TEST 9/{TEST_COUNT}")
print("# Testing exceptions - (afs_rebuild.conf & afs_rebuild.csv).")
print("###############################################################################")
unpack_path.mkdir()
repack_path.mkdir()
# Here we have to test by limits rebuild command with every params
afs_rebuild_conf1 = {
"Default": {
"AFS_MAGIC": "0x41465300",
"files_rebuild_strategy": "auto",
"filename_directory": "False",
}
}
afs_rebuild_conf2 = {
"Default": {
"AFS_MAGIC": "0x41465300",
"files_rebuild_strategy": "auto",
"filename_directory": "True",
},
"FilenameDirectory": {
"toc_offset_of_fd_offset": "auto",
"fd_offset": "auto",
"fd_last_attribute_type": "unknown"
}
}
i = -1
afs_rebuild_conf1["Default"]["filename_directory"] = "abcd"
test_except(afs_rebuild_conf1, afstool.AfsFilenameDirectoryValueError, "b.bin/0x1/0x1000/b.bin")
afs_rebuild_conf1["Default"]["filename_directory"] = "False"
for afs_rebuild_conf in [afs_rebuild_conf1, afs_rebuild_conf2]:
afs_rebuild_conf["Default"]["AFS_MAGIC"] = "1234"
test_except(afs_rebuild_conf, afstool.AfsInvalidMagicNumberError)
afs_rebuild_conf["Default"]["AFS_MAGIC"] = "0x41465300";
afs_rebuild_conf["Default"]["files_rebuild_strategy"] = "abcd"
test_except(afs_rebuild_conf, afstool.AfsInvalidFilesRebuildStrategy)
afs_rebuild_conf["Default"]["files_rebuild_strategy"] = "auto"
test_except(afs_rebuild_conf, afstool.AfsInvalidFilePathError, "d.bin/0x1/0x1000/d.bin")
test_except(afs_rebuild_conf, afstool.AfsInvalidFieldsCountError, "b.bin/0x1/0x1000/b.bin/d")
for tmp_conf in ["index", "mixed"]:
afs_rebuild_conf["Default"]["files_rebuild_strategy"] = tmp_conf
test_except(afs_rebuild_conf, afstool.AfsIndexValueError, "b.bin/123/0x1000/b.bin")
test_except(afs_rebuild_conf, afstool.AfsIndexOverflowError, "b.bin/0x3/0x1000/b.bin")
test_except(afs_rebuild_conf, afstool.AfsIndexCollisionError, "b.bin/0x1/0x1000/b.bin\nc.bin/0x1/0x2000/c.bin")
for tmp_conf in ["offset", "mixed"]:
afs_rebuild_conf["Default"]["files_rebuild_strategy"] = tmp_conf
test_except(afs_rebuild_conf, afstool.AfsOffsetValueError, "b.bin/0x1/123/b.bin")
test_except(afs_rebuild_conf, afstool.AfsOffsetAlignError, "b.bin/0x1/0x555/b.bin")
test_except(afs_rebuild_conf, afstool.AfsOffsetCollisionError, "b.bin/0x1/0x8000/b.bin\nc.bin/0x2/0x8000/c.bin")
afs_rebuild_conf1["Default"]["files_rebuild_strategy"] = "auto"
afs_rebuild_conf2["Default"]["files_rebuild_strategy"] = "auto"
afs_rebuild_conf = afs_rebuild_conf2
afs_rebuild_conf["FilenameDirectory"]["toc_offset_of_fd_offset"] = "abcd"
test_except(afs_rebuild_conf, afstool.AfsFdOffsetOffsetValueError)
afs_rebuild_conf["FilenameDirectory"]["toc_offset_of_fd_offset"] = "auto"
afs_rebuild_conf["FilenameDirectory"]["fd_offset"] = "abcd"
test_except(afs_rebuild_conf, afstool.AfsFdOffsetValueError)
afs_rebuild_conf["FilenameDirectory"]["fd_offset"] = "auto"
afs_rebuild_conf["FilenameDirectory"]["fd_last_attribute_type"] = "abcd"
test_except(afs_rebuild_conf, afstool.AfsFdLastAttributeTypeValueError)
afs_rebuild_conf["FilenameDirectory"]["fd_last_attribute_type"] = "unknown"
afs_rebuild_conf["FilenameDirectory"]["fd_offset"] = "0x1000"
test_except(afs_rebuild_conf, afstool.AfsFdOffsetCollisionError, "a.bin/auto/0x1000/a.bin")
afs_rebuild_conf["FilenameDirectory"]["fd_offset"] = "auto"
print("###############################################################################")
print(f"# TEST 10/{TEST_COUNT}")
print("# Testing rebuild - (afs_rebuild.conf & afs_rebuild.csv).")
print("###############################################################################")
tmp_count = 9
raw_data = tmp_count * [None]
raw_header_data = tmp_count * [None]
raw_fd_header = tmp_count * [None]
raw_files_data = tmp_count * [None]
raw_fd_data = tmp_count * [None]
# toc: 00000000
raw_header_data[0] = b"\x41\x46\x53\x20"+list_bytes([0x1, 0x800, 0x800])
raw_fd_header[0] = list_bytes([0x1000, 0x30])
raw_files_data[0] = b"\xff"*0x800
raw_fd_data[0] = b"00000000".ljust(0x30, b"\x00")
# toc: 00000000
raw_header_data[1] = b"\x41\x46\x53\x00"+list_bytes([0x1, 0x800, 0x800])
raw_fd_header[1] = raw_fd_header[0]
raw_files_data[1] = raw_files_data[0]
raw_fd_data[1] = raw_fd_data[0]
# toc: bac content: bac
raw_header_data[2] = b"\x41\x46\x53\x00"+list_bytes([0x3, 0x800, 0x600, 0x1000, 0x500, 0x1800, 0x700])
raw_fd_header[2] = list_bytes([0x2000, 0x90])
raw_files_data[2] = (b"\xff"*0x600).ljust(0x800, b"\x00") + (b"\xff"*0x500).ljust(0x800, b"\x00") + (b"\xff"*0x700).ljust(0x800, b"\x00")
raw_fd_data[2] = b"b.bin".ljust(0x30, b"\x00")+b"a.bin".ljust(0x30, b"\x00")+b"c.bin".ljust(0x30, b"\x00")
# toc: abc content: acb
raw_header_data[3] = b"\x41\x46\x53\x00"+list_bytes([0x3, 0x800, 0x500, 0x8000, 0x600, 0x1000, 0x700])
raw_fd_header[3] = list_bytes([0x8800, 0x90])
raw_files_data[3] = ((b"\xff"*0x500).ljust(0x800, b"\x00") + b"\xff"*0x700).ljust(0x7800, b"\x00") + (b"\xff"*0x600).ljust(0x800, b"\x00")
raw_fd_data[3] = b"a.bin".ljust(0x30, b"\x00")+b"b.bin".ljust(0x30, b"\x00")+b"c.bin".ljust(0x30, b"\x00")
# toc: abc content: cba - free(0x800-0x1000) b->0x1000 a-len=0x1000
raw_header_data[4] = b"\x41\x46\x53\x00"+list_bytes([0x3, 0x1800, 0x900, 0x1000, 0x600, 0x800, 0x700])
raw_fd_header[4] = list_bytes([0x2800, 0x90])
raw_files_data[4] = (b"\xff"*0x700).ljust(0x800, b"\x00") + (b"\xff"*0x600).ljust(0x800, b"\x00") + (b"\xff"*0x900).ljust(0x1000, b"\x00")
raw_fd_data[4] = raw_fd_data[3]
# a=auto/3000 b=2000/500 c=auto/1000 d=auto/2000
# toc: abcd content: cdba
raw_header_data[5] = b"\x41\x46\x53\x00"+list_bytes([0x4, 0x4800, 0x2901, 0x2800, 0x1902, 0x800, 0x903, 0x1800, 0x904])
raw_fd_header[5] = list_bytes([0x7800, 0xc0])
raw_files_data[5] = (b"\xff"*0x903).ljust(0x1000, b"\x00") + (b"\xff"*0x904).ljust(0x1000, b"\x00") + (b"\xff"*0x1902).ljust(0x2000, b"\x00") + (b"\xff"*0x2901).ljust(0x3000, b"\x00")
raw_fd_data[5] = b"a.bin".ljust(0x30, b"\x00")+b"b.bin".ljust(0x30, b"\x00")+b"c.bin".ljust(0x30, b"\x00")+b"d.bin".ljust(0x30, b"\x00")
# toc: bac content: acb
raw_header_data[6] = b"\x41\x46\x53\x00"+list_bytes([0x3, 0x8000, 0x600, 0x800, 0x500, 0x1000, 0x700])
raw_fd_header[6] = list_bytes([0x8800, 0x90])
raw_files_data[6] = ((b"\xff"*0x500).ljust(0x800, b"\x00") + b"\xff"*0x700).ljust(0x7800, b"\x00") + (b"\xff"*0x600).ljust(0x800, b"\x00")
raw_fd_data[6] = raw_fd_data[2]
# test mixed with (c-0-off=0x3800;b-1-l=0x2000;a-2-l=0x1000)
# toc: cba - content:bac sort filename then index : abc cba then allocate offset: b=800&len=2000 a=2800
raw_header_data[7] = b"\x41\x46\x53\x00"+list_bytes([0x3, 0x3800, 0x700, 0x800, 0x1901, 0x2800, 0x902])
raw_fd_header[7] = list_bytes([0x4000, 0x90])
raw_files_data[7] = (b"\xff"*0x1901).ljust(0x2000, b"\x00") + (b"\xff"*0x902).ljust(0x1000, b"\x00") + (b"\xff"*0x700).ljust(0x800, b"\x00")
raw_fd_data[7] = b"c.bin".ljust(0x30, b"\x00")+b"b.bin".ljust(0x30, b"\x00")+b"a.bin".ljust(0x30, b"\x00")
# toc: bac - content: abc
raw_header_data[8] = b"\x41\x46\x53\x00"+list_bytes([0x3, 0x1000, 0x601, 0x800, 0x702, 0x1800, 0x500])
raw_fd_header[8] = list_bytes([0x2000, 0x90])
raw_files_data[8] = (b"\xff"*0x702).ljust(0x800, b"\x00") + (b"\xff"*0x601).ljust(0x800, b"\x00") + (b"\xff"*0x500).ljust(0x800, b"\x00")
raw_fd_data[8] = raw_fd_data[2]
afs_rebuild_conf3 = copy.deepcopy(afs_rebuild_conf2)
afs_rebuild_conf3["FilenameDirectory"]["toc_offset_of_fd_offset"] = "0x500"
afs_rebuild_conf4 = copy.deepcopy(afs_rebuild_conf2)
afs_rebuild_conf4["FilenameDirectory"]["toc_offset_of_fd_offset"] = "0x7f8"
for afs_rebuild_conf in [afs_rebuild_conf1, afs_rebuild_conf2, afs_rebuild_conf3, afs_rebuild_conf4]:
for j in range(0, len(raw_data)):
raw_data[j] = raw_header_data[j]
if afs_rebuild_conf["Default"]["filename_directory"] == "True":
pad_len = int(afs_rebuild_conf["FilenameDirectory"]["toc_offset_of_fd_offset"][2:], 16) if afs_rebuild_conf["FilenameDirectory"]["toc_offset_of_fd_offset"] != "auto" else 0
raw_data[j] = raw_data[j].ljust(pad_len, b"\x00")+raw_fd_header[j]
raw_data[j] = (raw_data[j]).ljust(0x800, b"\x00") + raw_files_data[j]
afs_rebuild_conf["Default"]["AFS_MAGIC"] = "0x41465320"
test_rebuild_repack(afs_rebuild_conf, [("00000000", 0x800)], raw_data[0], raw_fd_data=raw_fd_data[0])
afs_rebuild_conf["Default"]["AFS_MAGIC"] = "0x41465300"
test_rebuild_repack(afs_rebuild_conf, [("00000000", 0x800)], raw_data[1], raw_fd_data=raw_fd_data[1])
for tmp_conf in ["mixed", "index"]:
afs_rebuild_conf["Default"]["files_rebuild_strategy"] = tmp_conf
test_rebuild_repack(afs_rebuild_conf, [("a.bin", 0x500),("b.bin", 0x600),("c.bin", 0x700)], raw_data[2], "b.bin/0x0/auto/b.bin", raw_fd_data=raw_fd_data[2])
for tmp_conf in ["offset", "mixed"]:
afs_rebuild_conf["Default"]["files_rebuild_strategy"] = tmp_conf # sort files by offset
test_rebuild_repack(afs_rebuild_conf, [("a.bin", 0x500),("b.bin", 0x600),("c.bin", 0x700)], raw_data[3], "b.bin/auto/0x8000/b.bin", raw_fd_data=raw_fd_data[3])
test_rebuild_repack(afs_rebuild_conf, [("a.bin", 0x900),("b.bin", 0x600),("c.bin", 0x700)], raw_data[4], "b.bin/auto/0x1000/b.bin", raw_fd_data=raw_fd_data[4])
test_rebuild_repack(afs_rebuild_conf, [("a.bin", 0x2901),("b.bin", 0x1902),("c.bin", 0x903),("d.bin", 0x904)], raw_data[5], "a.bin/auto/auto/a.bin\nb.bin/auto/0x2800/b.bin\nc.bin/auto/auto/c.bin\nd.bin/auto/auto/d.bin", raw_fd_data=raw_fd_data[5])
afs_rebuild_conf["Default"]["files_rebuild_strategy"] = "mixed"
test_rebuild_repack(afs_rebuild_conf, [("a.bin", 0x500),("b.bin", 0x600),("c.bin", 0x700)], raw_data[6], "b.bin/0x0/0x8000/b.bin", raw_fd_data=raw_fd_data[6])
test_rebuild_repack(afs_rebuild_conf, [("a.bin", 0x902),("b.bin", 0x1901),("c.bin", 0x700)], raw_data[7], "c.bin/0x0/0x3800/c.bin\nb.bin/0x1/auto/b.bin\na.bin/0x2/auto/a.bin", raw_fd_data=raw_fd_data[7])
test_rebuild_repack(afs_rebuild_conf, [("a.bin", 0x702),("b.bin", 0x601),("c.bin", 0x500)], raw_data[8], "c.bin/auto/0x1800/c.bin\nb.bin/auto/0x1000/b.bin\na.bin/0x1/0x800/a.bin", raw_fd_data=raw_fd_data[8])
print("###############################################################################") print("###############################################################################")
print("# Cleaning test folders.") print("# Cleaning test folders.")
print("###############################################################################") print("###############################################################################")
# Remove tests folders # Remove tests folders
shutil.rmtree(unpack_path) shutil.rmtree(unpack_path)
shutil.rmtree(repack_path)
end = time() end = time()
print("###############################################################################") print("###############################################################################")