2021-11-04 19:40:42 +01:00
#!/usr/bin/env python3
2021-11-16 13:01:17 +01:00
from math import ceil
from pathlib import Path
2021-11-20 14:00:27 +01:00
import shutil
2021-11-19 15:47:34 +01:00
from struct import unpack
from os import listdir
import logging
2021-11-28 00:18:50 +01:00
__version__ = " 1.4.2 "
2021-11-16 13:01:17 +01:00
__author__ = " rigodron, algoflash, GGLinnk "
2021-11-04 19:40:42 +01:00
__OriginalAutor__ = " infval "
2021-11-16 13:01:17 +01:00
__license__ = " MIT "
__status__ = " developpement "
2021-11-04 19:40:42 +01:00
2021-11-18 23:49:42 +01:00
# Pour plus d'informations sur le format PZZ :
# http://virtualre.rf.gd/index.php/PZZ_(Gotcha_Force)
2021-11-15 01:25:01 +01:00
BIT_COMPRESSION_FLAG = 0x40000000
FILE_LENGTH_MASK = 0x3FFFFFFF
CHUNK_SIZE = 0x800
2021-11-20 17:24:49 +01:00
TPL_MAGIC_NUMBER = b " \x00 \x20 \xAF \x30 " # http://virtualre.rf.gd/index.php/TPL_(Format_de_fichier)
2021-11-22 23:09:53 +01:00
CHD_MAGIC_NUMBER = b " Head "
BIN_HITS_MAGICNUMBER = b " STIH "
TSB_MAGIC_NUMBER = b " TSBD "
def get_file_path ( file_content : bytes , path : Path ) :
if path . name [ 5 : 7 ] == " pl " : # si c'est un plxxxx
if path . name [ 0 : 3 ] == " 000 " :
return path . with_name ( path . name + " data " ) . with_suffix ( " .bin " )
if path . name [ 0 : 3 ] == " 002 " :
return path . with_name ( path . name + " hit " ) . with_suffix ( " .bin " )
if path . name [ 0 : 3 ] == " 003 " :
return path . with_name ( path . name + " mot " ) . with_suffix ( " .bin " )
2021-11-22 08:09:08 +01:00
if file_content . startswith ( TPL_MAGIC_NUMBER ) :
2021-11-22 23:09:53 +01:00
return path . with_suffix ( " .tpl " )
if file_content . startswith ( CHD_MAGIC_NUMBER ) :
return path . with_suffix ( " .chd " )
if file_content . startswith ( TSB_MAGIC_NUMBER ) :
return path . with_suffix ( " .tsb " )
if file_content . startswith ( BIN_HITS_MAGICNUMBER ) :
return path . with_suffix ( " .bin " )
2021-11-20 17:24:49 +01:00
# Par défaut
2021-11-22 23:09:53 +01:00
return path . with_suffix ( " .dat " )
2021-11-20 17:24:49 +01:00
2021-11-22 08:09:08 +01:00
# Non implémenté : pour supprimer le pad à la fin des fichiers unpack
# Les fichiers sans pad se terminent éventuellement par des b"\x00"
# ce qui impose de connaître le format de fichier pour implémenter cette fonction
def remove_padding ( file_content : bytearray ) :
return file_content
# return file_content.rstrip(b'\x00')
2021-11-20 17:24:49 +01:00
def bytes_align ( bout : bytes ) :
2021-11-28 00:18:50 +01:00
# Comme le montre le fichier pl080d/006C_pl080d.pzzp, on ajoute 0x800 si c'est aligné sur un multiple
if len ( bout ) % CHUNK_SIZE == 0 :
return bout . ljust ( CHUNK_SIZE * ( len ( bout ) / CHUNK_SIZE + 1 ) , b " \x00 " )
2021-11-22 08:09:08 +01:00
return bout . ljust ( CHUNK_SIZE * ceil ( len ( bout ) / CHUNK_SIZE ) , b " \x00 " )
2021-11-04 19:40:42 +01:00
2021-11-16 13:01:17 +01:00
2021-11-05 18:44:11 +01:00
def pzz_decompress ( compressed_bytes : bytes ) :
uncompressed_bytes = bytearray ( )
compressed_bytes_size = len ( compressed_bytes ) / / 2 * 2
2021-11-04 19:40:42 +01:00
cb = 0 # Control bytes
2021-11-22 08:09:08 +01:00
cb_bit = - 1 # rotations de 15 à 0 pour le flag de compression
2021-11-04 19:40:42 +01:00
i = 0
2021-11-05 18:44:11 +01:00
while i < compressed_bytes_size :
2021-11-22 08:09:08 +01:00
if cb_bit < 0 : # tous les
2021-11-16 13:01:17 +01:00
cb = compressed_bytes [ i + 1 ]
2021-11-05 18:44:11 +01:00
cb | = compressed_bytes [ i + 0 ] << 8
2021-11-04 19:40:42 +01:00
cb_bit = 15
i + = 2
continue
compress_flag = cb & ( 1 << cb_bit )
cb_bit - = 1
if compress_flag :
2021-11-16 13:01:17 +01:00
c = compressed_bytes [ i + 1 ]
2021-11-05 18:44:11 +01:00
c | = compressed_bytes [ i + 0 ] << 8
2021-11-22 08:09:08 +01:00
2021-11-04 19:40:42 +01:00
offset = ( c & 0x7FF ) * 2
if offset == 0 :
2021-11-16 13:01:17 +01:00
break # End of the compressed data
2021-11-04 19:40:42 +01:00
count = ( c >> 11 ) * 2
if count == 0 :
i + = 2
2021-11-16 13:01:17 +01:00
c = compressed_bytes [ i + 1 ]
2021-11-05 18:44:11 +01:00
c | = compressed_bytes [ i + 0 ] << 8
2021-11-04 19:40:42 +01:00
count = c * 2
2021-11-05 18:44:11 +01:00
index = len ( uncompressed_bytes ) - offset
2021-11-04 19:40:42 +01:00
for j in range ( count ) :
2021-11-05 18:44:11 +01:00
uncompressed_bytes . append ( uncompressed_bytes [ index + j ] )
2021-11-04 19:40:42 +01:00
else :
2021-11-22 08:09:08 +01:00
uncompressed_bytes + = compressed_bytes [ i : i + 2 ]
2021-11-04 19:40:42 +01:00
i + = 2
2021-11-05 18:44:11 +01:00
return uncompressed_bytes
2021-11-16 13:01:17 +01:00
2021-11-22 08:09:08 +01:00
def pzz_compress ( uncompressed_bytes : bytes ) :
compressed_bytes = bytearray ( 2 )
size_uncompressed_bytes = len ( uncompressed_bytes ) / / 2 * 2
2021-11-04 19:40:42 +01:00
cb = 0 # Control bytes
2021-11-22 08:09:08 +01:00
cb_bit = 15 # rotations de 15 à 0 pour le flag de compression
2021-11-04 19:40:42 +01:00
cb_pos = 0
i = 0
2021-11-22 08:09:08 +01:00
while i < size_uncompressed_bytes :
start = max ( i - 4094 , 0 ) # start = 2 si i = 4096 (0x800*2)
2021-11-04 19:40:42 +01:00
count_r = 0
max_i = - 1
2021-11-22 08:09:08 +01:00
#######################################################
# start : contient l'index .. (en cours de rédaction)
#######################################################
2021-11-04 19:40:42 +01:00
while True :
2021-11-22 08:09:08 +01:00
# start = index première occurence de uncompressed_bytes[i:i+2] entre start et i+1
# on regarde maxi dans les 4094 derniers octets
start = uncompressed_bytes . find ( uncompressed_bytes [ i : i + 2 ] , start , i + 1 )
# si les 2 octets étudiés n'apparaissent pas dans les 4094 derniers octets
if start == - 1 :
break
# si la première occurence n'est pas à un index multiple de 2, on l'ignore
if start % 2 != 0 :
2021-11-04 19:40:42 +01:00
start + = 1
continue
2021-11-22 08:09:08 +01:00
count = 2
while i < size_uncompressed_bytes - count and \
count < 0xFFFF * 2 and \
uncompressed_bytes [ start + count ] == uncompressed_bytes [ i + count ] and \
uncompressed_bytes [ start + count + 1 ] == uncompressed_bytes [ i + count + 1 ] :
count + = 2
if count_r < count :
count_r = count
max_i = start
start + = 2
2021-11-04 19:40:42 +01:00
start = max_i
2021-11-22 08:09:08 +01:00
#######################################################
#
#######################################################
2021-11-04 19:40:42 +01:00
compress_flag = 0
if count_r > = 4 :
compress_flag = 1
2021-11-22 08:09:08 +01:00
offset = ( i - start ) / / 2
2021-11-04 19:40:42 +01:00
count_r / / = 2
c = offset
if count_r < = 0x1F :
c | = count_r << 11
2021-11-22 08:09:08 +01:00
compressed_bytes + = c . to_bytes ( 2 , " big " )
2021-11-04 19:40:42 +01:00
else :
2021-11-22 08:09:08 +01:00
compressed_bytes + = c . to_bytes ( 2 , " big " ) + count_r . to_bytes ( 2 , " big " )
2021-11-04 19:40:42 +01:00
i + = count_r * 2
else :
2021-11-22 08:09:08 +01:00
compressed_bytes + = uncompressed_bytes [ i : i + 2 ]
2021-11-04 19:40:42 +01:00
i + = 2
cb | = ( compress_flag << cb_bit )
cb_bit - = 1
if cb_bit < 0 :
2021-11-22 08:09:08 +01:00
compressed_bytes [ cb_pos : cb_pos + 2 ] = cb . to_bytes ( 2 , " big " )
cb = 0
2021-11-04 19:40:42 +01:00
cb_bit = 15
2021-11-22 08:09:08 +01:00
cb_pos = len ( compressed_bytes )
compressed_bytes + = b " \x00 \x00 "
2021-11-04 19:40:42 +01:00
cb | = ( 1 << cb_bit )
2021-11-22 08:09:08 +01:00
compressed_bytes [ cb_pos : cb_pos + 2 ] = cb . to_bytes ( 2 , " big " )
compressed_bytes + = b " \x00 \x00 "
2021-11-05 18:44:11 +01:00
2021-11-22 08:09:08 +01:00
return bytes_align ( compressed_bytes )
2021-11-04 19:40:42 +01:00
2021-11-16 13:01:17 +01:00
2021-11-20 14:00:27 +01:00
def pzz_unpack ( pzz_path : Path , dest_folder : Path , auto_decompress : bool = False ) :
2021-11-16 13:19:13 +01:00
if pzz_path . suffix != " .pzz " :
2021-11-19 15:47:34 +01:00
logging . warning ( f " Invalid file format ' { pzz_path . suffix } ' ; it should be .pzz file format " )
2021-11-16 13:01:17 +01:00
if dest_folder != Path ( ' . ' ) :
2021-11-15 20:18:43 +01:00
unpacked_pzz_path = dest_folder
2021-11-16 13:01:17 +01:00
else :
2021-11-15 20:18:43 +01:00
unpacked_pzz_path = pzz_path . parent / pzz_path . stem
2021-11-19 15:47:34 +01:00
2021-11-20 14:00:27 +01:00
if ( auto_decompress ) :
logging . info ( f " unpzz( { pzz_path } ) in folder { unpacked_pzz_path } " )
else :
logging . info ( f " unpacking { pzz_path } in folder { unpacked_pzz_path } " )
2021-11-15 01:25:01 +01:00
unpacked_pzz_path . mkdir ( exist_ok = True )
2021-11-14 15:43:29 +01:00
2021-11-22 08:09:08 +01:00
with pzz_path . open ( " rb " ) as pzz_file :
2021-11-14 15:43:29 +01:00
# file_count reçoit le nombre de fichiers présent dans le PZZ :
2021-11-16 13:01:17 +01:00
# On lit les 4 premiers octets (uint32 big-endian)
file_count , = unpack ( " >I " , pzz_file . read ( 4 ) )
2021-11-05 18:44:11 +01:00
2021-11-14 15:43:29 +01:00
# files_descriptors reçoit un tuple avec l'ensemble des descripteurs de fichiers (groupes d'uint32 big-endian)
2021-11-18 23:49:42 +01:00
files_descriptors = unpack ( f " > { file_count } I " , pzz_file . read ( file_count * 4 ) )
2021-11-04 19:40:42 +01:00
2021-11-19 16:11:41 +01:00
logging . debug ( f " -> File count : { file_count } " )
2021-11-04 19:40:42 +01:00
2021-11-15 01:25:01 +01:00
offset = CHUNK_SIZE
2021-11-16 13:01:17 +01:00
# on parcours le tuple de descripteurs de fichiers
for index , file_descriptor in enumerate ( files_descriptors ) :
2021-11-14 15:43:29 +01:00
2021-11-16 13:01:17 +01:00
# Le bit 30 correspond au flag de compression (bits numérotés de 0 à 31)
is_compressed = ( file_descriptor & BIT_COMPRESSION_FLAG ) != 0
if not is_compressed : # Si le fichier n'est pas compressé, on ajoute 'U' derrière l'index
2021-11-15 20:18:43 +01:00
compression_status = ' U '
2021-11-20 14:00:27 +01:00
else : # Si le fichier est compressé on ajoute 'C' derrière l'index et l'extension ".pzzp"
2021-11-15 20:18:43 +01:00
compression_status = ' C '
2021-11-14 15:43:29 +01:00
2021-11-16 13:01:17 +01:00
# file_descriptor reçoit maintenant les 30 premiers bits : (la taille / CHUNK_SIZE)
2021-11-15 01:25:01 +01:00
file_descriptor & = FILE_LENGTH_MASK
2021-11-14 15:34:54 +01:00
2021-11-14 15:43:29 +01:00
# file_len reçoit la taille du fichier
2021-11-15 01:25:01 +01:00
# la taille du fichier est un multiple de CHUNK_SIZE, on paddera avec des 0 jusqu'au fichier suivant
2021-11-16 13:01:17 +01:00
# file_len contient alors la taille du fichier en octets
file_len = file_descriptor * CHUNK_SIZE
2021-11-14 15:43:29 +01:00
# On forme le nom du nouveau fichier que l'on va extraire
2021-11-19 14:05:40 +01:00
filename = f " { index : 03 } { compression_status } _ { pzz_path . stem } "
2021-11-20 17:24:49 +01:00
file_path = unpacked_pzz_path / filename
2021-11-04 19:40:42 +01:00
2021-11-20 14:00:27 +01:00
logging . debug ( f " -> Offset: { offset : 010 } - { file_path } " )
2021-11-15 20:18:43 +01:00
# Si la taille est nulle, on créé un fichier vide et on passe au descripteur de fichier suivant
if file_len == 0 :
2021-11-20 17:24:49 +01:00
file_path . with_suffix ( " .dat " ) . touch ( )
2021-11-15 20:18:43 +01:00
continue
2021-11-04 19:40:42 +01:00
2021-11-14 15:43:29 +01:00
# On se positionne au début du fichier dans l'archive
2021-11-15 01:25:01 +01:00
pzz_file . seek ( offset )
2021-11-17 20:54:56 +01:00
# On extrait notre fichier et on le décompresse
2021-11-20 14:00:27 +01:00
if compression_status == ' C ' and auto_decompress :
2021-11-20 17:24:49 +01:00
file_content = pzz_decompress ( pzz_file . read ( file_len ) )
2021-11-17 20:54:56 +01:00
else :
2021-11-20 17:24:49 +01:00
file_content = pzz_file . read ( file_len )
2021-11-22 08:09:08 +01:00
file_content = remove_padding ( bytearray ( file_content ) )
2021-11-20 17:24:49 +01:00
if not auto_decompress and compression_status != ' U ' :
file_path = file_path . with_suffix ( " .pzzp " )
else :
2021-11-22 23:09:53 +01:00
file_path = get_file_path ( file_content , file_path )
2021-11-20 17:24:49 +01:00
file_path . write_bytes ( file_content )
2021-11-14 15:43:29 +01:00
# Enfin, on ajoute la taille du fichier afin de pointer sur le fichier suivant
2021-11-15 01:25:01 +01:00
# La taille du fichier étant un multiple de CHUNK_SIZE, on aura complété les 2048 octets finaux avec des 0x00
2021-11-14 15:43:29 +01:00
offset + = file_len
2021-11-04 19:40:42 +01:00
2021-11-16 13:01:17 +01:00
2021-11-20 14:00:27 +01:00
def pzz_pack ( src_path : Path , dest_file : Path , auto_compress : bool = False ) :
if dest_file == Path ( ' . ' ) :
dest_file = src_path . with_suffix ( " .pzz " )
if dest_file . suffix != " .pzz " :
logging . warning ( " Invalid file format : dest must be a pzz " )
2021-11-15 01:25:01 +01:00
# On récupère les fichiers du dossier à compresser
src_files = listdir ( src_path )
2021-11-07 20:31:19 +01:00
2021-11-20 14:00:27 +01:00
if auto_compress :
logging . info ( f " pzz( { src_path } ) in pzz { dest_file } " )
2021-11-16 13:01:17 +01:00
else :
2021-11-20 14:00:27 +01:00
logging . info ( f " packing { src_path } in pzz { dest_file } " )
2021-11-22 08:09:08 +01:00
logging . debug ( f " -> { len ( src_files ) } files to pack " )
2021-11-16 13:01:17 +01:00
2021-11-20 14:00:27 +01:00
with dest_file . open ( " wb " ) as pzz_file :
2021-11-15 01:25:01 +01:00
# On se place à la fin du header PZZ
pzz_file . seek ( CHUNK_SIZE )
2021-11-07 20:31:19 +01:00
2021-11-22 08:09:08 +01:00
# On récupère le nombre total de fichiers pour le mettre au début du header
header_bytes = len ( src_files ) . to_bytes ( 4 , byteorder = ' big ' )
2021-11-15 01:25:01 +01:00
# On écrit tous les fichiers à la suite du header
2021-11-16 13:01:17 +01:00
for src_file_name in src_files :
2021-11-20 14:00:27 +01:00
is_compressed = Path ( src_file_name ) . suffix == " .pzzp "
2021-11-18 23:49:42 +01:00
compression_status = src_file_name [ 3 : 4 ]
2021-11-16 13:01:17 +01:00
2021-11-22 08:09:08 +01:00
src_file = ( src_path / src_file_name ) . read_bytes ( )
2021-11-19 19:36:40 +01:00
2021-11-22 08:09:08 +01:00
# Le fichier doit être compressé avant d'être pack
if compression_status == ' C ' and not is_compressed and auto_compress :
src_file = pzz_compress ( src_file )
# Le fichier doit être décompressé avant d'être pack
elif compression_status == ' U ' and is_compressed and auto_compress :
src_file = pzz_decompress ( src_file ) # padding à gérer
2021-11-19 19:36:40 +01:00
2021-11-22 08:09:08 +01:00
"""
# on ajoute le padding pour correspondre à un multiple de CHUNK_SIZE
if compression_status == ' U ' :
if ( len ( src_file ) % CHUNK_SIZE ) > 0 :
src_file . extend ( b " \x00 " * ( CHUNK_SIZE - ( len ( src_file ) % CHUNK_SIZE ) ) )
"""
2021-11-19 19:36:40 +01:00
2021-11-22 08:09:08 +01:00
# file_descriptor = arrondi supérieur de la taille / CHUNK_SIZE
file_descriptor = ceil ( len ( src_file ) / CHUNK_SIZE )
2021-11-07 20:31:19 +01:00
2021-11-22 08:09:08 +01:00
# On ajoute le flag de compression au file_descriptor
if compression_status == ' C ' :
file_descriptor | = BIT_COMPRESSION_FLAG
2021-11-19 19:36:40 +01:00
2021-11-22 08:09:08 +01:00
header_bytes + = file_descriptor . to_bytes ( 4 , byteorder = ' big ' )
pzz_file . write ( src_file )
2021-11-19 19:36:40 +01:00
2021-11-22 08:09:08 +01:00
pzz_file . seek ( 0 )
# On écrit le header
pzz_file . write ( header_bytes )
2021-11-16 13:01:17 +01:00
2021-11-04 19:40:42 +01:00
2021-11-20 14:00:27 +01:00
def unpzz ( src_path : Path , dest_file : Path ) :
pzz_unpack ( src_path , dest_file , auto_decompress = True )
def pzz ( src_path : Path , dest_file : Path ) :
pzz_pack ( src_path , dest_file , auto_compress = True )
2021-11-04 19:40:42 +01:00
def get_argparser ( ) :
import argparse
2021-11-18 11:54:44 +01:00
parser = argparse . ArgumentParser ( description = ' PZZ (de)compressor & unpacker - [GameCube] Gotcha Force v ' + __version__ )
2021-11-22 08:09:08 +01:00
parser . add_argument ( ' --version ' , action = ' version ' , version = ' %(prog)s ' + __version__ )
2021-11-18 11:54:44 +01:00
parser . add_argument ( ' -v ' , ' --verbose ' , action = ' store_true ' , help = ' verbose mode ' )
2021-11-22 08:09:08 +01:00
parser . add_argument ( ' -di ' , ' --disable-ignore ' , action = ' store_true ' , help = " Disable .pzzp or .pzz file extension verification. " )
parser . add_argument ( ' input_path ' , metavar = ' INPUT ' , help = ' ' )
2021-11-18 11:54:44 +01:00
parser . add_argument ( ' output_path ' , metavar = ' OUTPUT ' , help = ' ' , nargs = ' ? ' , default = " " )
2021-11-04 19:40:42 +01:00
group = parser . add_mutually_exclusive_group ( required = True )
2021-11-20 14:00:27 +01:00
group . add_argument ( ' -pzz ' , ' --pzz ' , action = ' store_true ' , help = " -pzz source_folder (dest_file.pzz) : pzz source_folder in new file source_folder.pzz or dest_file if specified " )
group . add_argument ( ' -unpzz ' , ' --unpzz ' , action = ' store_true ' , help = " -unpzz source_folder.pzz (dest_folder) : unpzz the pzz in new folder source_folder or dest_folder if specified " )
group . add_argument ( ' -bpzz ' , ' --batch-pzz ' , action = ' store_true ' , help = ' -bpzz source_folder (dest_folder) : Batch pzz (auto compress) all pzz_folder from source_folder into source_folder or dest_folder if specified ' )
group . add_argument ( ' -bunpzz ' , ' --batch-unpzz ' , action = ' store_true ' , help = ' -bunpzz source_folder (dest_folder) : Batch unpzz (auto decompress) all pzz from source_folder into source_folder or dest_folder if specified ' )
group . add_argument ( ' -p ' , ' --pack ' , action = ' store_true ' , help = " -p source_folder (dest_file.pzz) : Pack source_folder in new file source_folder.pzz or dest_file if specified " )
group . add_argument ( ' -u ' , ' --unpack ' , action = ' store_true ' , help = ' -u source_folder.pzz (dest_folder) : Unpack the pzz in new folder source_folder or dest_folder if specified ' )
group . add_argument ( ' -bp ' , ' --batch-pack ' , action = ' store_true ' , help = ' -bp source_folder (dest_folder) : Batch pack all pzz_folder from source_folder into source_folder or dest_folder if specified ' )
group . add_argument ( ' -bu ' , ' --batch-unpack ' , action = ' store_true ' , help = ' -bu source_folder (dest_folder) : Batch unpack all pzz from source_folder into source_folder or dest_folder if specified ' )
group . add_argument ( ' -c ' , ' --compress ' , action = ' store_true ' , help = ' -c source_file (dest_file) : compress source_file in source_file.pzzp or dest_file if specified ' )
group . add_argument ( ' -d ' , ' --decompress ' , action = ' store_true ' , help = ' -d source_file.pzzp (dest_file) : decompress source_file.pzzp in source_file or dest_file if specified ' )
group . add_argument ( ' -bc ' , ' --batch-compress ' , action = ' store_true ' , help = ' -bc source_folder dest_folder : compress all files from source_folder into dest_folder ' )
group . add_argument ( ' -bd ' , ' --batch-decompress ' , action = ' store_true ' , help = ' -bd source_folder dest_folder : decompress all files from source_folder into dest_folder ' )
2021-11-04 19:40:42 +01:00
return parser
2021-11-16 13:01:17 +01:00
2021-11-04 19:40:42 +01:00
if __name__ == ' __main__ ' :
2021-11-19 15:47:34 +01:00
logging . basicConfig ( format = ' %(levelname)s : %(message)s ' , level = logging . INFO )
2021-11-16 13:01:17 +01:00
args = get_argparser ( ) . parse_args ( )
2021-11-04 19:40:42 +01:00
2021-11-16 13:01:17 +01:00
p_input = Path ( args . input_path )
2021-11-04 19:40:42 +01:00
p_output = Path ( args . output_path )
2021-11-19 15:47:34 +01:00
if args . verbose :
2021-11-28 00:18:50 +01:00
logging . getLogger ( ) . setLevel ( logging . DEBUG )
2021-11-19 15:47:34 +01:00
2021-11-18 11:54:44 +01:00
if args . compress :
2021-11-19 15:47:34 +01:00
logging . info ( " ### Compress " )
2021-11-20 14:00:27 +01:00
if ( p_output == Path ( " . " ) ) :
p_output = Path ( p_input . with_suffix ( " .pzzp " ) )
# Si on a pas la bonne extension on ne compresse pas le fichier
if not args . disable_ignore and p_output . suffix != " .pzzp " :
logging . warning ( f " Ignored - { p_output } - bad extension - must be a pzzp " )
else :
logging . info ( f " Compressing { p_input } in { p_output } " )
p_output . write_bytes ( pzz_compress ( p_input . read_bytes ( ) ) )
2021-11-04 19:40:42 +01:00
elif args . decompress :
2021-11-19 15:47:34 +01:00
logging . info ( " ### Decompress " )
2021-11-20 17:24:49 +01:00
if p_output == Path ( " . " ) :
p_output = p_input . parent / p_input . stem
2021-11-20 14:00:27 +01:00
# Si on a pas la bonne extension on ne decompresse pas le fichier
if not args . disable_ignore and p_input . suffix != " .pzzp " :
logging . warning ( f " Ignored - { p_input } - bad extension - must be a pzzp " )
else :
2021-11-20 17:24:49 +01:00
output_file_content = pzz_decompress ( p_input . read_bytes ( ) )
2021-11-22 23:09:53 +01:00
p_output = get_file_path ( output_file_content , p_output )
2021-11-20 14:00:27 +01:00
logging . info ( f " Decompressing { p_input } in { p_output } " )
2021-11-20 17:24:49 +01:00
p_output . write_bytes ( output_file_content )
2021-11-04 19:40:42 +01:00
elif args . batch_compress :
2021-11-19 15:47:34 +01:00
logging . info ( " ### Batch Compress " )
2021-11-04 19:40:42 +01:00
p_output . mkdir ( exist_ok = True )
2021-11-18 11:54:44 +01:00
for filename in listdir ( p_input ) :
2021-11-20 14:00:27 +01:00
# Si on a pas la bonne extension on ne compresse pas le fichier
if not args . disable_ignore and Path ( filename ) . suffix == " .pzzp " :
logging . warning ( f " Ignored - { filename } - bad extension - musn ' t be a pzzp " )
shutil . copy ( p_input / filename , p_output / filename )
continue
logging . info ( f " Compressing { filename } " )
2021-11-22 08:09:08 +01:00
( p_output / ( Path ( filename ) . stem + " .pzzp " ) ) . write_bytes ( pzz_compress ( ( p_input / filename ) . read_bytes ( ) ) )
2021-11-04 19:40:42 +01:00
elif args . batch_decompress :
2021-11-19 15:47:34 +01:00
logging . info ( " ### Batch Decompress " )
2021-11-04 19:40:42 +01:00
p_output . mkdir ( exist_ok = True )
2021-11-18 11:54:44 +01:00
for filename in listdir ( p_input ) :
2021-11-20 14:00:27 +01:00
if not args . disable_ignore and Path ( filename ) . suffix != " .pzzp " :
logging . warning ( f " Ignored - { filename } - bad extension - must be a pzzp " )
shutil . copy ( p_input / filename , p_output / filename )
continue
logging . info ( f " Decompressing { filename } " )
2021-11-22 08:09:08 +01:00
uncompressed_content = pzz_decompress ( ( p_input / filename ) . read_bytes ( ) )
2021-11-22 23:09:53 +01:00
uncompressed_path = get_file_path ( uncompressed_content , p_output / Path ( filename ) )
2021-11-22 08:09:08 +01:00
uncompressed_path . write_bytes ( uncompressed_content )
2021-11-18 11:54:44 +01:00
elif args . pack :
2021-11-19 15:47:34 +01:00
logging . info ( " ### Pack " )
2021-11-15 20:18:43 +01:00
pzz_pack ( p_input , p_output )
2021-11-04 19:40:42 +01:00
elif args . unpack :
2021-11-19 15:47:34 +01:00
logging . info ( " ### Unpack " )
2021-11-15 20:18:43 +01:00
pzz_unpack ( p_input , p_output )
2021-11-20 14:00:27 +01:00
elif args . pzz :
logging . info ( " ### PZZ " )
pzz ( p_input , p_output )
elif args . unpzz :
logging . info ( " ### UNPZZ " )
unpzz ( p_input , p_output )
2021-11-15 20:18:43 +01:00
elif args . batch_pack :
2021-11-19 15:47:34 +01:00
logging . info ( " ### Batch Pack " )
2021-11-15 20:18:43 +01:00
p_output . mkdir ( exist_ok = True )
2021-11-20 14:00:27 +01:00
if ( p_output == Path ( ' . ' ) ) :
p_output = p_input
2021-11-15 20:18:43 +01:00
for folder in listdir ( p_input ) :
2021-11-18 23:49:42 +01:00
pzz_pack ( p_input / folder , p_output / Path ( folder ) . with_suffix ( " .pzz " ) )
2021-11-04 19:40:42 +01:00
elif args . batch_unpack :
2021-11-19 15:47:34 +01:00
logging . info ( " ### Batch Unpack " )
2021-11-04 19:40:42 +01:00
p_output . mkdir ( exist_ok = True )
2021-11-20 14:00:27 +01:00
if ( p_output == Path ( ' . ' ) ) :
p_output = p_input
2021-11-15 20:18:43 +01:00
for filename in listdir ( p_input ) :
pzz_unpack ( p_input / filename , p_output / Path ( filename ) . stem )
2021-11-20 14:00:27 +01:00
elif args . batch_pzz :
logging . info ( " ### Batch PZZ " )
p_output . mkdir ( exist_ok = True )
if ( p_output == Path ( ' . ' ) ) :
p_output = p_input
for folder in listdir ( p_input ) :
pzz ( p_input / folder , p_output / Path ( folder ) . with_suffix ( " .pzz " ) )
elif args . batch_unpzz :
logging . info ( " ### Batch UNPZZ " )
p_output . mkdir ( exist_ok = True )
if ( p_output == Path ( ' . ' ) ) :
p_output = p_input
for filename in listdir ( p_input ) :
unpzz ( p_input / filename , p_output / Path ( filename ) . stem )