From 4b5b5b602651d6beebaff52a015bfb5eaf892c9f Mon Sep 17 00:00:00 2001 From: tmpz23 <28760271+tmpz23@users.noreply.github.com> Date: Sun, 16 Jan 2022 17:38:39 +0100 Subject: [PATCH] Update pzztest.py --- pzztest.py | 139 +++++++++++++++++++++++++++-------------------------- 1 file changed, 72 insertions(+), 67 deletions(-) diff --git a/pzztest.py b/pzztest.py index 14cff10..3a2e7b8 100644 --- a/pzztest.py +++ b/pzztest.py @@ -8,25 +8,25 @@ import shutil TPL_MAGIC_NUMBER = b"\x00\x20\xAF\x30" # http://virtualre.rf.gd/index.php/TPL_(Format_de_fichier) +unpack_path = Path("unpack") +repack_path = Path("repack") +afsdump_path = Path("afs_data/root") -# compare le sha256 de chaque PZZ des dossiers passés en argument -# -> affiche le nom de fichier en cas de différence +# compare all files sha256 from folder1 and folder2 +# -> print the filename if there is a difference def verify_sha256(folder1: Path, folder2: Path): invalid_files_count = 0 - for pzz_file_name in os.listdir(folder1): - with (folder1 / pzz_file_name).open("rb") as f1, (folder2 / pzz_file_name).open("rb") as f2: - if hashlib.sha256( f1.read() ).hexdigest() != hashlib.sha256( f2.read() ).hexdigest() : - print(f"ERROR - INVALID FILE : {pzz_file_name}") - invalid_files_count +=1 - print(f"Invalid files : {invalid_files_count}/{len(os.listdir(folder1))}") + for pzz_path in folder1.glob("*.pzz"): + if hashlib.sha256( pzz_path.read_bytes() ).hexdigest() != hashlib.sha256( (folder2 / pzz_path.name).read_bytes() ).hexdigest() : + print(f"ERROR - INVALID FILE : {pzz_file_name}") + invalid_files_count +=1 + print(f"Invalid files : {invalid_files_count}/{len(list(folder1.glob('*')))}") -# compare le sha256 des deux fichiers passés en argument -# -> affiche le nom de fichier en cas de différence + +# compare sha256 of two files +# -> print the filename if there is a difference def verify_sha256_2(file1: Path, file2: Path): - with file1.open("rb") as f1, file2.open("rb") as f2: - if hashlib.sha256( f1.read() ).hexdigest() != hashlib.sha256( f2.read() ).hexdigest() : - return False - return True + return hashlib.sha256( file1.read_bytes() ).hexdigest() == hashlib.sha256( file2.read_bytes() ).hexdigest() def get_argparser(): @@ -39,15 +39,15 @@ def get_argparser(): group.add_argument('-tbup', '--test-batch-unpack-pack', action='store_true', help=""" -tbup source_pzz_folder source_pzz_folder : put all pzz in this folder - pzzu : will be created with all unpacked pzz from pzz folder - pzz2 : will be created with all packed pzz from pzzu folder - print file_name when sha256 is different between source_pzz_folder and pzz2 folder""") + unpack_path : will be created with all unpacked pzz from pzz folder + repack_path : will be created with all packed pzz from unpack_path folder + print file_name when sha256 is different between source_pzz_folder and repack_path folder""") group.add_argument('-tbunpzzpzz', '--test-batch-unpzz-pzz', action='store_true', help=""" -tbunpzzpzz source_pzz_folder source_pzz_folder : put all pzz in this folder - pzzu : will be created with all unpzz pzz from pzz folder - pzz2 : will be created with all pzz(pzz_folder) from pzzu folder - print file_name when sha256 is different between source_pzz_folder and pzz2 folder""") + unpack_path : will be created with all unpzz pzz from pzz folder + repack_path : will be created with all pzz(pzz_folder) from unpack_path folder + print file_name when sha256 is different between source_pzz_folder and repack_path folder""") group.add_argument('-tctplh', '--test-check-tpl-headers', action='store_true', help="-tctplh afs_data_folder : check all files headers in the afs_data and print those who have the tpl magicfile") group.add_argument('-tcd', '--test-check-decompress', action='store_true', help=""" pzz : put all pzz in this folder @@ -66,9 +66,8 @@ if __name__ == '__main__': print("# TEST : DECOMPRESS COMPRESS") listofinvalid = [] - for filename in os.listdir(args.input_path): - file = open(os.path.join(args.input_path, filename), 'rb') - original_bytes = file.read() + for pzzp_path in p_input.glob('*'): + original_bytes = pzzp_path.read_bytes() decomp_bytes = pzztool.pzz_decompress(original_bytes) recomp_bytes = pzztool.pzz_compress(decomp_bytes) @@ -76,68 +75,74 @@ if __name__ == '__main__': recomp_digest = hashlib.sha256(recomp_bytes).hexdigest() if original_digest != recomp_digest: - listofinvalid.append(f"{filename} : ({original_digest}) ({recomp_digest})") - file.close() - - for invalid in listofinvalid: - print(invalid) + print(f"Invalid sha256 for {pzzp_path} : ({original_digest}) ({recomp_digest})") elif args.test_batch_unpack_pack: print("# TEST : BATCH UNPACK PACK") + # Remove unpack_path and repack_path + if unpack_path.is_dir(): + shutil.rmtree(unpack_path) + if repack_path.is_dir(): + shutil.rmtree(repack_path) - os.system(f"python pzztool.py -bu {args.input_path} pzzu") - os.system("python pzztool.py -bp pzzu pzz2") - verify_sha256(p_input, Path("pzz2")) + if os.system(f"python pzztool.py -bu {p_input} {unpack_path}") != 0: + raise Exception("Error while batch unpack.") + if os.system(f"python pzztool.py -bp {unpack_path} {repack_path}") != 0: + raise Exception("Error while batch pack.") + verify_sha256(p_input, repack_path) elif args.test_batch_unpzz_pzz: - os.system(f"python pzztool.py -bunpzz {args.input_path} pzzu") - os.system("python pzztool.py -bpzz pzzu pzz2") - verify_sha256(p_input, Path("pzz2")) + # Remove unpack_path and repack_path + if unpack_path.is_dir(): + shutil.rmtree(unpack_path) + if repack_path.is_dir(): + shutil.rmtree(repack_path) - # Clean du dossier pzz2 généré par le script - shutil.rmtree("pzz2") + if os.system(f"python pzztool.py -bunpzz {p_input} {unpack_path}") != 0: + raise Exception("Error while batch unpzz.") + if os.system(f"python pzztool.py -bpzz {unpack_path} {repack_path}") != 0: + raise Exception("Error while batch pzz.") + verify_sha256(p_input, repack_path) """ - si pzz : U -> decomp / testé sur les fichiers car l'unpzz décompresse par défaut - si pzz : U -> comp / à tester - si pzz : C -> decomp / testé sur les fichiers car l'unpzz décompresse par défaut - si pzz : C -> comp / à tester + if pzz : U -> decomp / already tested because unpzz let it decompressed by default + if pzz : U -> comp / has to be tested + if pzz : C -> decomp / already tested because unpzz decompress by default + if pzz : C -> comp / has to be tested """ - # On parcours tous les dossiers : si U -> comp ; si C -> comp : compression de tous les fichiers peu importe le type - for pzz_folder in os.listdir("pzzu"): - for pzz_file_part_name in os.listdir("pzzu/"+pzz_folder): - # créé un nouveau fichier compressé, à côté de l'original - os.system(f"python pzztool.py -c pzzu/{pzz_folder}/{pzz_file_part_name}") - # supprime l'original - os.remove(f"pzzu/{pzz_folder}/{pzz_file_part_name}") - os.system("python pzztool.py -bpzz pzzu pzz2") - verify_sha256(p_input, Path("pzz2")) - elif args.test_check_tpl_headers: - # Démontre que SEUL les TPLs ont ce magicnumber - # TEST OK - print("# TEST : CHECK TPLs HEADERS") - for afs_data_filename in os.listdir(p_input): - with open(p_input / afs_data_filename, "rb") as afs_data_file: - if TPL_MAGIC_NUMBER == afs_data_file.read(4) and Path(afs_data_filename).suffix != ".tpl": - print(f"TPL magicfile found : afs_data.afs/{afs_data_filename}") + # Remove repack_path + shutil.rmtree(repack_path) + + # For all unpack_path folder we compress the file (if U -> comp ; if C -> comp) + for pzzpart_path in unpack_path.glob('*/*'): + # create a new compressed file without removing the original file + if os.system(f"python pzztool.py -c {pzzpart_path}") != 0: + raise Exception("Error while compress.") + # remove the original + os.remove(f"{pzzpart_path}") + + if os.system(f"python pzztool.py -bpzz {unpack_path} {repack_path}") != 0: + raise Exception("Error while batch pzz.") + verify_sha256(p_input, repack_path) elif args.test_check_decompress: print("# TEST : CHECK DECOMPRESS") - os.system(f"python pzztool.py -bunpzz {args.input_path} pzzu") + if os.system(f"python pzztool.py -bunpzz {p_input} {unpack_path}") != 0: + raise Exception("Error while batch unpzz.") invalid_files_count = 0 total = 0 # check that all TPLs length is a multiple of 32 - for p in Path("pzzu").glob("**/*.tpl"): + for tpl_path in unpack_path.glob("**/*.tpl"): if p.is_file(): - #print(Path(p).stat().st_size, end=' ') total+=1 - if (Path(p).stat().st_size % 32) != 0: - print(f"Invalid TPL file length modulo 32 ({Path(p).stat().st_size % 32}) - {p}") + if (tpl_path.stat().st_size % 32) != 0: + print(f"Invalid TPL file length modulo 32 ({tpl_path.stat().st_size % 32}) - {tpl_path}") invalid_files_count += 1 print(f"Invalid files : {invalid_files_count}/{total}") elif args.test_compare_position: - # FULL_AFS_FILE_DUMP contient tous les fichiers de l'afs_data.afs et pzzu le résultat de pzztool.py -bunpzz sur l'ensemble des pzz - # Comparaisons à effectuer : + # FULL_AFS_FILE_DUMP contains all unpacked files from afs_data.afs + # unpack_path contains result of pzztool.py -bunpzz on all pzz files + # What you have to compare (prove that files of borgs (plxxxx.pzz) are positional and same as pl files in the root of afs_data) : # pzztest.py -tcp 0 data.bin - # Les fichiers de l'afs_data sont parfois data2 / data3 ou absents + # Some afs_data files are named data2 or data3 and it's sometime absent # pzztest.py -tcp 2 hit.bin # pzztest.py -tcp 3 mot.bin # pzztest.py -tcp 4 _mdl.arc @@ -147,8 +152,8 @@ if __name__ == '__main__': # pzztest.py -tcp 8 c_mdl.arc # pzztest.py -tcp 9 k_mdl.arc - for pzzpart_path in Path("pzzu").glob("**/00"+args.input_path+"*"): - file_path = Path("FULL_AFS_FILE_DUMP/"+pzzpart_path.parent.name+args.output_path) + for pzzpart_path in unpack_path.glob(f"**/00{p_input}*"): + file_path = afsdump_path / pzzpart_path.parent.name / p_output if pzzpart_path.parent.name[:2] == "pl": if not file_path.is_file():