NeoGF/pzztest.py

166 lines
7.9 KiB
Python
Raw Normal View History

2021-11-18 23:51:54 +01:00
import argparse
import hashlib
import os
from pathlib import Path
import pzztool
import shutil
2021-11-18 23:51:54 +01:00
2021-11-20 17:39:55 +01:00
TPL_MAGIC_NUMBER = b"\x00\x20\xAF\x30" # http://virtualre.rf.gd/index.php/TPL_(Format_de_fichier)
2021-11-19 15:50:17 +01:00
2022-01-16 17:38:39 +01:00
unpack_path = Path("unpack")
repack_path = Path("repack")
afsdump_path = Path("afs_data/root")
2022-01-16 17:38:39 +01:00
# compare all files sha256 from folder1 and folder2
# -> print the filename if there is a difference
def verify_sha256(folder1: Path, folder2: Path):
invalid_files_count = 0
2022-01-16 17:38:39 +01:00
for pzz_path in folder1.glob("*.pzz"):
if hashlib.sha256( pzz_path.read_bytes() ).hexdigest() != hashlib.sha256( (folder2 / pzz_path.name).read_bytes() ).hexdigest() :
print(f"ERROR - INVALID FILE : {pzz_file_name}")
invalid_files_count +=1
print(f"Invalid files : {invalid_files_count}/{len(list(folder1.glob('*')))}")
# compare sha256 of two files
# -> print the filename if there is a difference
2021-11-28 09:38:36 +01:00
def verify_sha256_2(file1: Path, file2: Path):
2022-01-16 17:38:39 +01:00
return hashlib.sha256( file1.read_bytes() ).hexdigest() == hashlib.sha256( file2.read_bytes() ).hexdigest()
2021-11-28 09:38:36 +01:00
2021-11-19 15:50:17 +01:00
def get_argparser():
2021-11-18 23:51:54 +01:00
parser = argparse.ArgumentParser(description='TEST TOOL')
parser.add_argument('input_path', metavar='INPUT', help='')
2021-11-28 09:38:36 +01:00
parser.add_argument('output_path', metavar='OUTPUT', help='', nargs='?', default="")
2021-11-19 15:50:17 +01:00
group = parser.add_mutually_exclusive_group(required=True)
2021-11-20 17:39:55 +01:00
group.add_argument('-tdc', '--test-decompress-compress', action='store_true', help="")
group.add_argument('-tbup', '--test-batch-unpack-pack', action='store_true', help="""
2021-11-19 15:50:17 +01:00
-tbup source_pzz_folder
source_pzz_folder : put all pzz in this folder
2022-01-16 17:38:39 +01:00
unpack_path : will be created with all unpacked pzz from pzz folder
repack_path : will be created with all packed pzz from unpack_path folder
print file_name when sha256 is different between source_pzz_folder and repack_path folder""")
2021-11-20 17:39:55 +01:00
group.add_argument('-tbunpzzpzz', '--test-batch-unpzz-pzz', action='store_true', help="""
-tbunpzzpzz source_pzz_folder
source_pzz_folder : put all pzz in this folder
2022-01-16 17:38:39 +01:00
unpack_path : will be created with all unpzz pzz from pzz folder
repack_path : will be created with all pzz(pzz_folder) from unpack_path folder
print file_name when sha256 is different between source_pzz_folder and repack_path folder""")
2021-11-19 15:50:17 +01:00
group.add_argument('-tctplh', '--test-check-tpl-headers', action='store_true', help="-tctplh afs_data_folder : check all files headers in the afs_data and print those who have the tpl magicfile")
2021-11-20 17:39:55 +01:00
group.add_argument('-tcd', '--test-check-decompress', action='store_true', help="""
2021-11-19 15:50:17 +01:00
pzz : put all pzz in this folder
2021-11-20 17:39:55 +01:00
then tip "pzztool.py -tcd pzz"
2021-11-19 15:50:17 +01:00
The script will then check that tpls are correctly decompressed with their specific characteristics""")
2021-11-28 09:38:36 +01:00
group.add_argument('-tcp', '--test-compare-position', action='store_true', help="compare plxxxx.pzz subfiles with plxxxx files inside afs_data.afs")
2021-11-19 15:50:17 +01:00
return parser
2021-11-18 23:51:54 +01:00
if __name__ == '__main__':
2021-11-19 15:50:17 +01:00
args = get_argparser().parse_args()
p_input = Path(args.input_path)
if args.test_decompress_compress:
print("# TEST : DECOMPRESS COMPRESS")
listofinvalid = []
2022-01-16 17:38:39 +01:00
for pzzp_path in p_input.glob('*'):
original_bytes = pzzp_path.read_bytes()
2021-11-19 15:50:17 +01:00
decomp_bytes = pzztool.pzz_decompress(original_bytes)
recomp_bytes = pzztool.pzz_compress(decomp_bytes)
original_digest = hashlib.sha256(original_bytes).hexdigest()
recomp_digest = hashlib.sha256(recomp_bytes).hexdigest()
if original_digest != recomp_digest:
2022-01-16 17:38:39 +01:00
print(f"Invalid sha256 for {pzzp_path} : ({original_digest}) ({recomp_digest})")
2021-11-19 15:50:17 +01:00
elif args.test_batch_unpack_pack:
print("# TEST : BATCH UNPACK PACK")
2022-01-16 17:38:39 +01:00
# Remove unpack_path and repack_path
if unpack_path.is_dir():
shutil.rmtree(unpack_path)
if repack_path.is_dir():
shutil.rmtree(repack_path)
if os.system(f"python pzztool.py -bu {p_input} {unpack_path}") != 0:
raise Exception("Error while batch unpack.")
if os.system(f"python pzztool.py -bp {unpack_path} {repack_path}") != 0:
raise Exception("Error while batch pack.")
verify_sha256(p_input, repack_path)
elif args.test_batch_unpzz_pzz:
2022-01-16 17:38:39 +01:00
# Remove unpack_path and repack_path
if unpack_path.is_dir():
shutil.rmtree(unpack_path)
if repack_path.is_dir():
shutil.rmtree(repack_path)
if os.system(f"python pzztool.py -bunpzz {p_input} {unpack_path}") != 0:
raise Exception("Error while batch unpzz.")
if os.system(f"python pzztool.py -bpzz {unpack_path} {repack_path}") != 0:
raise Exception("Error while batch pzz.")
verify_sha256(p_input, repack_path)
"""
2022-01-16 17:38:39 +01:00
if pzz : U -> decomp / already tested because unpzz let it decompressed by default
if pzz : U -> comp / has to be tested
if pzz : C -> decomp / already tested because unpzz decompress by default
if pzz : C -> comp / has to be tested
"""
2022-01-16 17:38:39 +01:00
# Remove repack_path
shutil.rmtree(repack_path)
# For all unpack_path folder we compress the file (if U -> comp ; if C -> comp)
for pzzpart_path in unpack_path.glob('*/*'):
# create a new compressed file without removing the original file
if os.system(f"python pzztool.py -c {pzzpart_path}") != 0:
raise Exception("Error while compress.")
# remove the original
os.remove(f"{pzzpart_path}")
if os.system(f"python pzztool.py -bpzz {unpack_path} {repack_path}") != 0:
raise Exception("Error while batch pzz.")
verify_sha256(p_input, repack_path)
2021-11-19 15:50:17 +01:00
elif args.test_check_decompress:
print("# TEST : CHECK DECOMPRESS")
2022-01-16 17:38:39 +01:00
if os.system(f"python pzztool.py -bunpzz {p_input} {unpack_path}") != 0:
raise Exception("Error while batch unpzz.")
2021-11-20 17:39:55 +01:00
invalid_files_count = 0
total = 0
# check that all TPLs length is a multiple of 32
2022-01-16 17:38:39 +01:00
for tpl_path in unpack_path.glob("**/*.tpl"):
2021-11-20 17:39:55 +01:00
if p.is_file():
total+=1
2022-01-16 17:38:39 +01:00
if (tpl_path.stat().st_size % 32) != 0:
print(f"Invalid TPL file length modulo 32 ({tpl_path.stat().st_size % 32}) - {tpl_path}")
2021-11-20 17:39:55 +01:00
invalid_files_count += 1
print(f"Invalid files : {invalid_files_count}/{total}")
2021-11-28 09:38:36 +01:00
elif args.test_compare_position:
2022-01-16 17:38:39 +01:00
# FULL_AFS_FILE_DUMP contains all unpacked files from afs_data.afs
# unpack_path contains result of pzztool.py -bunpzz on all pzz files
# What you have to compare (prove that files of borgs (plxxxx.pzz) are positional and same as pl files in the root of afs_data) :
2021-11-28 09:38:36 +01:00
# pzztest.py -tcp 0 data.bin
2022-01-16 17:38:39 +01:00
# Some afs_data files are named data2 or data3 and it's sometime absent
2021-11-28 09:38:36 +01:00
# pzztest.py -tcp 2 hit.bin
# pzztest.py -tcp 3 mot.bin
# pzztest.py -tcp 4 _mdl.arc
# pzztest.py -tcp 5 b_mdl.arc
# pzztest.py -tcp 6 g_mdl.arc
# pzztest.py -tcp 7 s_mdl.arc
# pzztest.py -tcp 8 c_mdl.arc
# pzztest.py -tcp 9 k_mdl.arc
2022-01-16 17:38:39 +01:00
for pzzpart_path in unpack_path.glob(f"**/00{p_input}*"):
file_path = afsdump_path / pzzpart_path.parent.name / p_output
2021-11-28 09:38:36 +01:00
if pzzpart_path.parent.name[:2] == "pl":
if not file_path.is_file():
print(f"File doesn't exist : {file_path}")
elif pzzpart_path.stat().st_size == 0:
print(f"File is empty : {pzzpart_path}")
else:
if not verify_sha256_2(pzzpart_path, file_path):
print(f"DIFFERENCE : {pzzpart_path} - {file_path}")