PzzTool fix, changes & improvements

- Fix byte alignment.
- PEP8: Set line length at 200.
- Batch rewrited and enabled back.
- Add '--disable-ignore' argument: Disable filename check.
- Add '--verbose' argument: Enable verbose.
- Remove unwanted imports.
Add GitHub .gitignore for python.
This commit is contained in:
Gabriel GRONDIN 2021-11-18 11:54:44 +01:00
parent 59df762bcf
commit e135a54c93
Signed by: GGLinnk
GPG Key ID: 22C286336131C900
3 changed files with 192 additions and 43 deletions

139
.gitignore vendored Normal file
View File

@ -0,0 +1,139 @@
### GitHub Python .gitignore
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/

5
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"python.formatting.autopep8Args": [
"--max-line-length=200"
]
}

View File

@ -1,9 +1,9 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from math import ceil from math import ceil
from struct import unpack, pack from struct import unpack
from pathlib import Path from pathlib import Path
from os import listdir from os import listdir, path
__version__ = "1.3.1" __version__ = "1.3.8"
__author__ = "rigodron, algoflash, GGLinnk" __author__ = "rigodron, algoflash, GGLinnk"
__OriginalAutor__ = "infval" __OriginalAutor__ = "infval"
__license__ = "MIT" __license__ = "MIT"
@ -58,7 +58,7 @@ def pzz_decompress(compressed_bytes: bytes):
def bytes_align(bout: bytes): def bytes_align(bout: bytes):
while not hex(len(bout)).endswith("000"): while len(bout) % CHUNK_SIZE > 0:
bout.extend(b"\x00") bout.extend(b"\x00")
@ -267,31 +267,25 @@ def pzz_pack(src_path, dest_file):
b"\x00" * (CHUNK_SIZE - (pzz_file.tell() % CHUNK_SIZE))) b"\x00" * (CHUNK_SIZE - (pzz_file.tell() % CHUNK_SIZE)))
def get_argparser(): def get_argparser():
import argparse import argparse
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(description='PZZ (de)compressor & unpacker - [GameCube] Gotcha Force v' + __version__)
description='PZZ (de)compressor & unpacker - [GameCube] Gotcha Force v' + __version__) parser.add_argument('--version', action='version', version='%(prog)s ' + __version__)
parser.add_argument('--version', action='version', parser.add_argument('-v', '--verbose', action='store_true', help='verbose mode')
version='%(prog)s ' + __version__)
parser.add_argument('input_path', metavar='INPUT', help='') parser.add_argument('input_path', metavar='INPUT', help='')
parser.add_argument('output_path', metavar='OUTPUT', parser.add_argument('output_path', metavar='OUTPUT', help='', nargs='?', default="")
help='', nargs='?', default="")
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-p', '--pack', action='store_true',
help="-p source_folder dest_file.pzz(optionnal) : Pack source_folder in new file source_folder.pzz")
group.add_argument('-u', '--unpack', action='store_true',
help='-u source_folder.pzz dest_folder(optionnal) : Unpack the pzz in new folder source_folder')
group.add_argument('-bp', '--batch-pack', action='store_true',
help='-bp source_folder dest_folder(optionnal - if not specified it will pack in source_folder)')
group.add_argument('-bu', '--batch-unpack', action='store_true',
help='-bu source_folder dest_folder(optionnal - if not specified it will unpack in source_folder)')
group.add_argument('-a', '-aa',action='store_true', help='sha256') group = parser.add_mutually_exclusive_group(required=True)
# group.add_argument('-c', '--compress', action='store_true', help='') group.add_argument('-p', '--pack', action='store_true', help="-p source_folder dest_file.pzz(optionnal) : Pack source_folder in new file source_folder.pzz")
# group.add_argument('-d', '--decompress', action='store_true', help='Unpacked files from PZZ') group.add_argument('-u', '--unpack', action='store_true', help='-u source_folder.pzz dest_folder(optionnal) : Unpack the pzz in new folder source_folder')
# group.add_argument('-bc', '--batch-compress', action='store_true', help='INPUT relative pattern; e.g. AFS_DATA\\*.bin') group.add_argument('-bp', '--batch-pack', action='store_true', help='-bp source_folder dest_folder(optionnal - if not specified it will pack in source_folder)')
# group.add_argument('-bd', '--batch-decompress', action='store_true', help='INPUT relative pattern; e.g. AFS_DATA\\*_compressed.dat') group.add_argument('-bu', '--batch-unpack', action='store_true', help='-bu source_folder dest_folder(optionnal - if not specified it will unpack in source_folder)')
# group.add_argument('-a', '-aa', action='store_true', help='sha256')
group.add_argument('-c', '--compress', action='store_true', help='')
group.add_argument('-d', '--decompress', action='store_true', help='Unpacked files from PZZ')
group.add_argument('-bc', '--batch-compress', action='store_true', help='INPUT relative pattern; e.g. AFS_DATA\\*.bin')
group.add_argument('-bd', '--batch-decompress', action='store_true', help='INPUT relative pattern; e.g. AFS_DATA\\*_compressed.dat')
group.add_argument('-di', '--disable-ignore', action='store_true', help="Disable filename ignore")
return parser return parser
@ -300,7 +294,6 @@ if __name__ == '__main__':
p_input = Path(args.input_path) p_input = Path(args.input_path)
p_output = Path(args.output_path) p_output = Path(args.output_path)
"""
if args.compress: if args.compress:
print("### Compress") print("### Compress")
p_output.write_bytes(pzz_compress(p_input.read_bytes())) p_output.write_bytes(pzz_compress(p_input.read_bytes()))
@ -311,26 +304,38 @@ if __name__ == '__main__':
print("### Batch Compress") print("### Batch Compress")
p_output.mkdir(exist_ok=True) p_output.mkdir(exist_ok=True)
p = Path('.') for filename in listdir(p_input):
for filename in p.glob(args.input_path): if (not args.ignore_filename) and ("_uncompressed" in filename):
print(filename) if args.verbose:
b = filename.read_bytes() print(f"Compressing {filename}")
(p_output / filename.name).with_suffix(".dat").write_bytes(pzz_compress(b)) recomp_filename = filename.replace(
"_uncompressed", "_recompressed")
uncompressed = open(path.join(p_input, filename), 'rb')
recompressed = open(path.join(p_output, filename), 'wb')
recompressed.write(pzz_compress(uncompressed.read()))
recompressed.close()
uncompressed.close()
else:
print(f"Ignored: {filename}")
elif args.batch_decompress: elif args.batch_decompress:
print("### Batch Decompress") print("### Batch Decompress")
p_output.mkdir(exist_ok=True) p_output.mkdir(exist_ok=True)
p = Path('.') for filename in listdir(p_input):
for filename in p.glob(args.input_path): if (not args.ignore_filename) and ("_compressed" in filename):
print(filename) print(f"Decompressing {filename}")
try: uncomp_filename = filename.replace(
b = filename.read_bytes() "_compressed", "_uncompressed")
(p_output / filename.name).with_suffix(".bin").write_bytes(pzz_decompress(b))
except IndexError: compressed = open(path.join(p_input, filename), 'rb')
print("! Wrong PZZ file") uncompressed = open(path.join(p_output, uncomp_filename), 'wb')
el uncompressed.write(pzz_decompress(compressed.read()))
""" uncompressed.close()
if args.pack: compressed.close()
else:
print(f"Ignored: {filename}")
elif args.pack:
print("### Pack") print("### Pack")
pzz_pack(p_input, p_output) pzz_pack(p_input, p_output)
elif args.unpack: elif args.unpack: