FD paths with folders handling.

Fixed paths with folders handling and restricted unpack in root folder.
This commit is contained in:
tmpz23 2022-08-19 16:06:06 +02:00 committed by GitHub
parent 432c23a341
commit 245aef8b5a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -9,7 +9,7 @@ import re
import time import time
__version__ = "0.1.4" __version__ = "0.2.0"
__author__ = "rigodron, algoflash, GGLinnk" __author__ = "rigodron, algoflash, GGLinnk"
__license__ = "MIT" __license__ = "MIT"
__status__ = "developpement" __status__ = "developpement"
@ -45,73 +45,97 @@ class AfsEmptyBlockValueError(Exception): pass
class AfsEmptyBlockAlignError(Exception): pass class AfsEmptyBlockAlignError(Exception): pass
######################################################################### def normalize_parent(path_str:str):
# class: FilenameResolver "Normalize the parent of a path to avoid out of extract folder access."
# Constructor: system path of the unpack folder if Path(path_str).parent == Path("."): return path_str
# DESCRIPTION
# Use sys/filename_resolver.csv to resolve filename to their index parent_str = str(Path(path_str).parent).replace(".", "")
# in the TOC. Allow also to rename files since the FD and the TOC while parent_str[0] == "/" or parent_str[0] == "\\":
# are not rebuild during pack. parent_str = parent_str[1:]
# The resolver is necessary in multiple cases: return parent_str + "/" + Path(path_str).name
# * When multiple packed files have the same name in the FD
# * When there is no FD
# * When names contains invalid path operator (not implemented yet)
#########################################################################
class FilenameResolver: class FilenameResolver:
"""
Constructor: system path of the unpack folder
DESCRIPTION
Use sys/filename_resolver.csv to resolve filename to their index
in the TOC. Allow also to rename files since the FD and the TOC
are not rebuild during pack.
The resolver is necessary in multiple cases:
* When multiple packed files have the same name in the FD
* When there is no FD
* When names contains invalid path operator (not implemented yet)
"""
__sys_path = None __sys_path = None
# names_dict: {unpacked_filename: toc_index, ... } # names_dict: {unpacked_filename: toc_index, ... }
__names_dict = None __names_dict = None
__resolve_buffer = "" __resolve_buffer = ""
__separator = '/' __separator = '?'
def __init__(self, sys_path:Path): def __init__(self, sys_path:Path):
self.__sys_path = sys_path self.__sys_path = sys_path
self.__names_dict = {} self.__names_dict = {}
self.__load() self.__load()
# Load names_dict if there is a csv
def __load(self): def __load(self):
"Load names_dict if there is a csv"
if (self.__sys_path / "filename_resolver.csv").is_file(): if (self.__sys_path / "filename_resolver.csv").is_file():
self.__resolve_buffer = (self.__sys_path / "filename_resolver.csv").read_text() self.__resolve_buffer = (self.__sys_path / "filename_resolver.csv").read_text()
for line in self.__resolve_buffer.split('\n'): for line in self.__resolve_buffer.split('\n'):
name_tuple = line.split(self.__separator) name_tuple = line.split(self.__separator)
self.__names_dict[name_tuple[1]] = int(name_tuple[0]) self.__names_dict[name_tuple[1]] = int(name_tuple[0])
# Save the resolve_buffer containing formated names_dict to the csv if not empty
def save(self): def save(self):
"Save the resolve_buffer containing formated names_dict to the csv if not empty"
if len(self.__resolve_buffer) > 0: if len(self.__resolve_buffer) > 0:
logging.info(f"Writting {Path('sys/filename_resolver.csv')}") logging.info(f"Writting {Path('sys/filename_resolver.csv')}")
(self.__sys_path / "filename_resolver.csv").write_text(self.__resolve_buffer[:-1]) (self.__sys_path / "filename_resolver.csv").write_text(self.__resolve_buffer[:-1])
# Resolve generate a unique filename when unpacking def resolve_new(self, file_index:int, filename:str):
# return the filename or new generated filename if duplicated """
def resolve_new(self, fileindex:int, filename:str): Resolve generate a unique filename when unpacking
input: file_index = int
input: filename = string
return the filename or new generated filename if duplicated
"""
normalized_str = normalize_parent(filename)
if filename != normalized_str:
filename = normalized_str
if filename not in self.__names_dict:
self.__names_dict[filename] = file_index
self.__resolve_buffer += f"{file_index}{self.__separator}{filename}\n"
return filename
if filename in self.__names_dict: if filename in self.__names_dict:
i = 1 i = 1
new_filename = f"{Path(filename).stem} ({i}){Path(filename).suffix}" new_filename = f"{Path(filename).parent / Path(filename).stem} ({i}){Path(filename).suffix}"
while new_filename in self.__names_dict: while new_filename in self.__names_dict:
i+=1 i+=1
new_filename = f"{Path(filename).stem} ({i}){Path(filename).suffix}" new_filename = f"{Path(filename).parent / Path(filename).stem} ({i}){Path(filename).suffix}"
self.__names_dict[new_filename] = fileindex self.__names_dict[new_filename] = file_index
self.__resolve_buffer += f"{fileindex}{self.__separator}{new_filename}\n" self.__resolve_buffer += f"{file_index}{self.__separator}{new_filename}\n"
return new_filename return new_filename
self.__names_dict[filename] = fileindex self.__names_dict[filename] = file_index
return filename return filename
# Add new entry forcing the unpacked_filename def add(self, file_index:int, unpacked_filename:str):
def add(self, fileindex:int, unpacked_filename:str): "Add new entry forcing the unpacked_filename"
self.__names_dict[unpacked_filename] = fileindex self.__names_dict[unpacked_filename] = file_index
self.__resolve_buffer += f"{fileindex}{self.__separator}{unpacked_filename}\n" self.__resolve_buffer += f"{file_index}{self.__separator}{unpacked_filename}\n"
# return previously generated filename using the index of the file in the TOC def resolve_from_index(self, file_index:int, filename:str):
# else return filename """
def resolve_from_index(self, fileindex:int, filename:str): input: file_index = int
input: filename = str
return previously generated filename using the index of the file in the TOC
else return filename
"""
for filename_key, fileindex_value in self.__names_dict.items(): for filename_key, fileindex_value in self.__names_dict.items():
if fileindex_value == fileindex: if fileindex_value == file_index:
return filename_key return filename_key
return filename return filename
# http://wiki.xentax.com/index.php/GRAF:AFS_AFS
#########################################################################
# class: Afs
# DESCRIPTION Afs handle all operations needed by the command parser
#########################################################################
class Afs: class Afs:
"""
DESCRIPTION Afs handle all operations needed by the command parser
http://wiki.xentax.com/index.php/GRAF:AFS_AFS
"""
MAGIC_00 = b"AFS\x00" MAGIC_00 = b"AFS\x00"
MAGIC_20 = b"AFS\x20" MAGIC_20 = b"AFS\x20"
# The header and each files are aligned to 0x800 # The header and each files are aligned to 0x800
@ -164,7 +188,8 @@ class Afs:
mtime.hour.to_bytes(2,"little")+ \ mtime.hour.to_bytes(2,"little")+ \
mtime.minute.to_bytes(2,"little")+\ mtime.minute.to_bytes(2,"little")+\
mtime.second.to_bytes(2,"little") mtime.second.to_bytes(2,"little")
def __patch_fdlasts(self, fileindex:int, fd_last_attribute_type): # Patch FD last attributes according to the type def __patch_fdlasts(self, fileindex:int, fd_last_attribute_type):
"Patch FD last attributes according to the type"
if type(fd_last_attribute_type) == int: # every entry has the same const value if type(fd_last_attribute_type) == int: # every entry has the same const value
self.__filenamedirectory[fileindex*Afs.FILENAMEDIRECTORY_ENTRY_LEN+44:fileindex*Afs.FILENAMEDIRECTORY_ENTRY_LEN+48] = fd_last_attribute_type.to_bytes(4, "little") self.__filenamedirectory[fileindex*Afs.FILENAMEDIRECTORY_ENTRY_LEN+44:fileindex*Afs.FILENAMEDIRECTORY_ENTRY_LEN+48] = fd_last_attribute_type.to_bytes(4, "little")
elif fd_last_attribute_type == "length": # elif fd_last_attribute_type == "length": #
@ -176,25 +201,29 @@ class Afs:
if updated_fdlast_index < self.__file_count: if updated_fdlast_index < self.__file_count:
self.__filenamedirectory[updated_fdlast_index*Afs.FILENAMEDIRECTORY_ENTRY_LEN+44:updated_fdlast_index*Afs.FILENAMEDIRECTORY_ENTRY_LEN+48] = self.__get_file_len(fileindex).to_bytes(4, "little") self.__filenamedirectory[updated_fdlast_index*Afs.FILENAMEDIRECTORY_ENTRY_LEN+44:updated_fdlast_index*Afs.FILENAMEDIRECTORY_ENTRY_LEN+48] = self.__get_file_len(fileindex).to_bytes(4, "little")
# fd_last_attribute_type == unknown # fd_last_attribute_type == unknown
# Add padding to align datas to next block
def __pad(self, data:bytes): def __pad(self, data:bytes):
"Add padding to align datas to next block"
if len(data) % Afs.ALIGN != 0: if len(data) % Afs.ALIGN != 0:
data += b"\x00" * (Afs.ALIGN - (len(data) % Afs.ALIGN)) data += b"\x00" * (Afs.ALIGN - (len(data) % Afs.ALIGN))
return data return data
# We can't know if there is a FD without searching and loading data for it
# So we have to clean loaded data if values are invalid
def __clean_filenamedirectory(self): def __clean_filenamedirectory(self):
"""
We can't know if there is a FD without searching and loading data for it
So we have to clean loaded data if values are invalid
"""
self.__filenamedirectory = None self.__filenamedirectory = None
self.__filenamedirectory_offset = None self.__filenamedirectory_offset = None
self.__filenamedirectory_len = None self.__filenamedirectory_len = None
# Load the TOC and the FD from an AFS file
# this operation is difficult because there are many cases possible:
# is there or not a FD?
# is there padding at the end of files offset/length list in the TOC?
# So we have to search and control values and test it for errors
# If there is no FD self.__filename_directory is None
# return True if there is a FD else None
def __loadsys_from_afs(self, afs_file, afs_len:int): def __loadsys_from_afs(self, afs_file, afs_len:int):
"""
Load the TOC and the FD from an AFS file
this operation is difficult because there are many cases possible:
is there or not a FD?
is there padding at the end of files offset/length list in the TOC?
So we have to search and control values and test it for errors
If there is no FD self.__filename_directory is None
return True if there is a FD else None
"""
self.__tableofcontent = afs_file.read(Afs.HEADER_LEN) self.__tableofcontent = afs_file.read(Afs.HEADER_LEN)
if self.__get_magic() not in [Afs.MAGIC_00, Afs.MAGIC_20]: if self.__get_magic() not in [Afs.MAGIC_00, Afs.MAGIC_20]:
raise AfsInvalidMagicNumberError("Error - Invalid AFS magic number.") raise AfsInvalidMagicNumberError("Error - Invalid AFS magic number.")
@ -256,8 +285,8 @@ class Afs:
# Here FD is valid and we read it's length # Here FD is valid and we read it's length
self.__tableofcontent += afs_file.read(self.__filenamedirectory_offset_offset+8 - tableofcontent_len) self.__tableofcontent += afs_file.read(self.__filenamedirectory_offset_offset+8 - tableofcontent_len)
return True return True
# Load the TOC and FD from an unpacked afs. This time it's easier
def __loadsys_from_folder(self, sys_path:Path): def __loadsys_from_folder(self, sys_path:Path):
"Load the TOC and FD from an unpacked afs. This time it's easier"
self.__tableofcontent = bytearray( (sys_path / "tableofcontent.bin").read_bytes() ) self.__tableofcontent = bytearray( (sys_path / "tableofcontent.bin").read_bytes() )
self.__file_count = self.__get_file_count() self.__file_count = self.__get_file_count()
@ -269,8 +298,8 @@ class Afs:
self.__filenamedirectory_len = self.__get_filenamedirectory_len() self.__filenamedirectory_len = self.__get_filenamedirectory_len()
if self.__filenamedirectory_len != len(self.__filenamedirectory): if self.__filenamedirectory_len != len(self.__filenamedirectory):
raise AfsInvalidFilenameDirectoryLengthError("Error - Tableofcontent filenamedirectory length does not match real filenamedirectory length.") raise AfsInvalidFilenameDirectoryLengthError("Error - Tableofcontent filenamedirectory length does not match real filenamedirectory length.")
# Print is used for stats
def __print(self, title:str, lines_tuples, columns:list = list(range(7)), infos:str = ""): def __print(self, title:str, lines_tuples, columns:list = list(range(7)), infos:str = ""):
"Print is used for stats"
stats_buffer = "#"*100+f"\n# {title}\n"+"#"*100+f"\n{infos}|"+"-"*99+"\n" stats_buffer = "#"*100+f"\n# {title}\n"+"#"*100+f"\n{infos}|"+"-"*99+"\n"
if 0 in columns: stats_buffer += "| Index "; if 0 in columns: stats_buffer += "| Index ";
if 1 in columns: stats_buffer += "| b offset "; if 1 in columns: stats_buffer += "| b offset ";
@ -283,10 +312,12 @@ class Afs:
for line in lines_tuples: for line in lines_tuples:
stats_buffer += line if type(line) == str else "| "+" | ".join(line)+"\n" stats_buffer += line if type(line) == str else "| "+" | ".join(line)+"\n"
print(stats_buffer, end='') print(stats_buffer, end='')
# This method is used to check the next file offset and control if there is overlapping during pack
# end offset not included (0,1) -> len=1
# return a list of offsets where files and sys files begin
def __get_offsets_map(self): def __get_offsets_map(self):
"""
This method is used to check the next file offset and control if there is overlapping during pack
end offset not included (0,1) -> len=1
return a list of offsets where files and sys files begin
"""
# offsets_map is used to check next used offset when updating files # offsets_map is used to check next used offset when updating files
# we also check if there is intersect between files # we also check if there is intersect between files
offsets_map = [(0, len(self.__tableofcontent))] offsets_map = [(0, len(self.__tableofcontent))]
@ -306,9 +337,11 @@ class Afs:
last_tuple = offsets_tuple last_tuple = offsets_tuple
offsets_map[i] = offsets_tuple[0] offsets_map[i] = offsets_tuple[0]
return offsets_map return offsets_map
# This method is used for stats command
# end offset not included (0,1) -> len=1
def __get_formated_map(self): def __get_formated_map(self):
"""
This method is used for stats command
end offset not included (0,1) -> len=1
"""
files_map = [("SYS TOC ", "00000000", f"{len(self.__tableofcontent):08x}", f"{len(self.__tableofcontent):08x}", "SYS TOC"+' '*12, "SYS TOC ", "SYS TOC")] files_map = [("SYS TOC ", "00000000", f"{len(self.__tableofcontent):08x}", f"{len(self.__tableofcontent):08x}", "SYS TOC"+' '*12, "SYS TOC ", "SYS TOC")]
for i in range(self.__file_count): for i in range(self.__file_count):
@ -324,14 +357,16 @@ class Afs:
f"{self.__filenamedirectory_offset + len(self.__filenamedirectory):08x}", \ f"{self.__filenamedirectory_offset + len(self.__filenamedirectory):08x}", \
f"{len(self.__filenamedirectory):08x}", "SYS FD"+' '*13, "SYS FD ", "SYS FD")) f"{len(self.__filenamedirectory):08x}", "SYS FD"+' '*13, "SYS FD ", "SYS FD"))
return files_map return files_map
# At the end of the FD there is 4 bytes used for different purposes
# To keep data we search what kind of data it is:
# return one of this values:
# * length
# * offset-length
# * 0x123 # (hex constant)
# * unknwon
def __get_fdlast_type(self): def __get_fdlast_type(self):
"""
At the end of the FD there is 4 bytes used for different purposes
To keep data we search what kind of data it is:
return one of this values:
* length
* offset-length
* 0x123 # (hex constant)
* unknwon
"""
# Try to get the type of FD last attribute # Try to get the type of FD last attribute
length_type = True length_type = True
offset_length_type = True offset_length_type = True
@ -350,12 +385,14 @@ class Afs:
if constant_type: return f"0x{constant_type:x}" if constant_type: return f"0x{constant_type:x}"
logging.info("Unknown FD last attribute type.") logging.info("Unknown FD last attribute type.")
return "unknown" return "unknown"
# At the end of unpack we use this function to write the 2 files:
# * "sys/afs_rebuild.conf"
# * "sys/afs_rebuild.csv"
# this file will contains every parameters of the AFS to allow exact pack copy when possible (fd_last_atribute != unknown)
# see documentation for further informations
def __write_rebuild_config(self, sys_path:Path, resolver:FilenameResolver): def __write_rebuild_config(self, sys_path:Path, resolver:FilenameResolver):
"""
At the end of unpack we use this function to write the 2 files:
* "sys/afs_rebuild.conf"
* "sys/afs_rebuild.csv"
this file will contains every parameters of the AFS to allow exact pack copy when possible (fd_last_atribute != unknown)
see documentation for further informations
"""
config = ConfigParser(allow_no_value=True) # allow_no_value to allow adding comments config = ConfigParser(allow_no_value=True) # allow_no_value to allow adding comments
config.optionxform = str # makes options case sensitive config.optionxform = str # makes options case sensitive
config.add_section("Default") config.add_section("Default")
@ -375,11 +412,11 @@ class Afs:
for i in range(self.__file_count): for i in range(self.__file_count):
filename = self.__get_file_name(i) if self.__filenamedirectory else f"{i:08}" filename = self.__get_file_name(i) if self.__filenamedirectory else f"{i:08}"
unpacked_filename = resolver.resolve_from_index(i, filename) if self.__filenamedirectory else f"{i:08}" unpacked_filename = resolver.resolve_from_index(i, filename) if self.__filenamedirectory else f"{i:08}"
rebuild_csv += f"{unpacked_filename}/0x{i:x}/0x{self.__get_file_offset(i):x}/{filename}\n" rebuild_csv += f"{unpacked_filename}?0x{i:x}?0x{self.__get_file_offset(i):x}?{filename}\n"
if len(rebuild_csv) > 0: if len(rebuild_csv) > 0:
(sys_path / "afs_rebuild.csv").write_text(rebuild_csv[:-1]) (sys_path / "afs_rebuild.csv").write_text(rebuild_csv[:-1])
# Method used to unpack an AFS inside a folder
def unpack(self, afs_path:Path, folder_path:Path): def unpack(self, afs_path:Path, folder_path:Path):
"Method used to unpack an AFS inside a folder"
sys_path = folder_path / "sys" sys_path = folder_path / "sys"
root_path = folder_path / "root" root_path = folder_path / "root"
sys_path.mkdir(parents=True) sys_path.mkdir(parents=True)
@ -391,11 +428,11 @@ class Afs:
logging.info("There is no filename directory. Creating new names and dates for files.") logging.info("There is no filename directory. Creating new names and dates for files.")
else: else:
logging.debug(f"filenamedirectory_offset:0x{self.__filenamedirectory_offset:x}, filenamedirectory_len:0x{self.__filenamedirectory_len:x}.") logging.debug(f"filenamedirectory_offset:0x{self.__filenamedirectory_offset:x}, filenamedirectory_len:0x{self.__filenamedirectory_len:x}.")
logging.info(f"Writting {Path('sys/filenamedirectory.bin')}") logging.info("Writting sys/filenamedirectory.bin")
(sys_path / "filenamedirectory.bin").write_bytes(self.__filenamedirectory) (sys_path / "filenamedirectory.bin").write_bytes(self.__filenamedirectory)
resolver = FilenameResolver(sys_path) resolver = FilenameResolver(sys_path)
logging.info(f"Writting {Path('sys/tableofcontent.bin')}") logging.info("Writting sys/tableofcontent.bin")
(sys_path / "tableofcontent.bin").write_bytes(self.__tableofcontent) (sys_path / "tableofcontent.bin").write_bytes(self.__tableofcontent)
logging.info(f"Extracting {self.__file_count} files.") logging.info(f"Extracting {self.__file_count} files.")
@ -404,6 +441,9 @@ class Afs:
file_len = self.__get_file_len(i) file_len = self.__get_file_len(i)
filename = resolver.resolve_new(i, self.__get_file_name(i)) if self.__filenamedirectory else f"{i:08}" filename = resolver.resolve_new(i, self.__get_file_name(i)) if self.__filenamedirectory else f"{i:08}"
if Path(filename).parent != Path("."):
(root_path / Path(filename).parent).mkdir(parents=True, exist_ok=True)
logging.debug(f"Writting {root_path / filename} 0x{file_offset:x}:0x{file_offset + file_len:x}") logging.debug(f"Writting {root_path / filename} 0x{file_offset:x}:0x{file_offset + file_len:x}")
afs_file.seek(file_offset) afs_file.seek(file_offset)
(root_path / filename).write_bytes(afs_file.read(file_len)) (root_path / filename).write_bytes(afs_file.read(file_len))
@ -415,10 +455,12 @@ class Afs:
if self.__filenamedirectory: if self.__filenamedirectory:
resolver.save() resolver.save()
self.__write_rebuild_config(sys_path, resolver) self.__write_rebuild_config(sys_path, resolver)
# Methood used to pack un unpacked folder inside a new AFS file
# for a file pack will use the next file offset as max file length an raise an exception if the length overflow
# pack keep FD and TOC inchanged except for file length, FD dates, fd_last_attribute updates
def pack(self, folder_path:Path, afs_path:Path = None): def pack(self, folder_path:Path, afs_path:Path = None):
"""
Methood used to pack un unpacked folder inside a new AFS file
for a file pack will use the next file offset as max file length an raise an exception if the length overflow
pack keep FD and TOC inchanged except for file length, FD dates, fd_last_attribute updates
"""
if afs_path is None: if afs_path is None:
afs_path = folder_path / Path(folder_path.name).with_suffix(".afs") afs_path = folder_path / Path(folder_path.name).with_suffix(".afs")
elif afs_path.suffix != ".afs": elif afs_path.suffix != ".afs":
@ -436,6 +478,7 @@ class Afs:
if fd_last_attribute_type[:2] == "0x": if fd_last_attribute_type[:2] == "0x":
fd_last_attribute_type = int(fd_last_attribute_type, 16) fd_last_attribute_type = int(fd_last_attribute_type, 16)
try:
with afs_path.open("wb") as afs_file: with afs_path.open("wb") as afs_file:
# We update files # We update files
for i in range(self.__file_count): for i in range(self.__file_count):
@ -468,11 +511,16 @@ class Afs:
logging.debug(f"Packing {sys_path / 'tableofcontent.bin'} at the beginning of the AFS.") logging.debug(f"Packing {sys_path / 'tableofcontent.bin'} at the beginning of the AFS.")
afs_file.seek(0) afs_file.seek(0)
afs_file.write(self.__tableofcontent) afs_file.write(self.__tableofcontent)
# Rebuild will use following config files: except AfsInvalidFileLenError:
# * "sys/afs_rebuild.conf" afs_path.unlink()
# * "sys/afs_rebuild.csv" raise
# It will rebuild the unpacked AFS sys files (TOC and FD) in the sys folder
def rebuild(self, folder_path:Path): def rebuild(self, folder_path:Path):
"""
Rebuild will use following config files:
* "sys/afs_rebuild.conf"
* "sys/afs_rebuild.csv"
It will rebuild the unpacked AFS sys files (TOC and FD) in the sys folder
"""
config = ConfigParser() config = ConfigParser()
root_path = folder_path / "root" root_path = folder_path / "root"
sys_path = folder_path / "sys" sys_path = folder_path / "sys"
@ -489,7 +537,7 @@ class Afs:
logging.info(f"Removing {path}.") logging.info(f"Removing {path}.")
path.unlink() path.unlink()
files_paths = list(root_path.glob("*")) files_paths = [path for path in root_path.glob("**/*") if path.is_file()]
self.__file_count = len(files_paths) self.__file_count = len(files_paths)
max_offset = None max_offset = None
@ -524,7 +572,7 @@ class Afs:
# We parse the file csv and verify entries retrieving length for files # We parse the file csv and verify entries retrieving length for files
if (sys_path / "afs_rebuild.csv").is_file(): if (sys_path / "afs_rebuild.csv").is_file():
for line in (sys_path / "afs_rebuild.csv").read_text().split('\n'): for line in (sys_path / "afs_rebuild.csv").read_text().split('\n'):
line_splited = line.split('/') line_splited = line.split('?')
if len(line_splited) == 4: if len(line_splited) == 4:
unpacked_filename = line_splited[0] unpacked_filename = line_splited[0]
index = None index = None
@ -667,11 +715,13 @@ class Afs:
(sys_path / "filenamedirectory.bin").write_bytes(self.__filenamedirectory) (sys_path / "filenamedirectory.bin").write_bytes(self.__filenamedirectory)
logging.info(f"Writting {Path('sys/tableofcontent.bin')}") logging.info(f"Writting {Path('sys/tableofcontent.bin')}")
(sys_path / "tableofcontent.bin").write_bytes(self.__tableofcontent) (sys_path / "tableofcontent.bin").write_bytes(self.__tableofcontent)
# Stats will print the AFS stats:
# Get full informations about header, TOC, FD, full memory mapping
# sorted by offsets (files and sys files), addresses space informations,
# and duplicated filenames grouped by filenames.
def stats(self, path:Path): def stats(self, path:Path):
"""
Stats will print the AFS stats:
Get full informations about header, TOC, FD, full memory mapping
sorted by offsets (files and sys files), addresses space informations,
and duplicated filenames grouped by filenames.
"""
if path.is_file(): if path.is_file():
with path.open("rb") as afs_file: with path.open("rb") as afs_file:
self.__loadsys_from_afs(afs_file, path.stat().st_size) self.__loadsys_from_afs(afs_file, path.stat().st_size)