Skip to content

Commit

Permalink
fix: Have multiple record types for the comments section.
Browse files Browse the repository at this point in the history
  • Loading branch information
hutcheb committed Jun 19, 2024
1 parent 9780ebe commit 64662eb
Show file tree
Hide file tree
Showing 12 changed files with 248 additions and 99 deletions.
1 change: 0 additions & 1 deletion acd/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@

from .api import *
6 changes: 4 additions & 2 deletions acd/api.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from dataclasses import dataclass
from os import PathLike

from acd.database.acd_database import AcdDatabase
from acd.l5x.elements import Controller

from acd.export_l5x import ExportL5x
Expand Down Expand Up @@ -98,8 +99,9 @@ def compress(self):
class ExtractAcdDatabaseRecordsToFiles(ExportProject):
"""Export a Controller to a raw database record tree"""
filename: PathLike
output_directory: PathLike

def extract(self):
# Implement the extraction of an ACD file
unzip = Unzip(self.filename)
unzip.write_files(self.output_directory)
database = AcdDatabase(self.filename, self.output_directory)
database.extract_to_file()
16 changes: 4 additions & 12 deletions acd/comments.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,21 +20,13 @@ def __post_init__(self):
return

query: str = "INSERT INTO comments VALUES (?, ?, ?, ?, ?, ?)"
if r.lookup_id == 1:
text = r.body.record_string_utf16.value

elif r.lookup_id == 0:
text = r.body.record_string_utf8
else:
text = ""

entry: tuple = (
r.header.seq_number,
r.header.string_length,
r.lookup_id,
text,
r.header.sub_record_length,
r.body.object_id,
r.body.record_string,
r.header.record_type,
r.sub_record_type)
r.header.parent)
self._cur.execute(query, entry)

def replace_tag_references(self, sb_rec):
Expand Down
63 changes: 63 additions & 0 deletions acd/database/acd_database.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
import os
from dataclasses import dataclass
from loguru import logger as log


from acd.dbextract import DbExtract
from acd.unzip import Unzip


@dataclass
class AcdDatabase:
input_filename: os.PathLike
output_filename: str
_temp_dir: str = "build" # tempfile.mkdtemp()

def __post_init__(self):
if not os.path.exists(os.path.join(self._temp_dir)):
os.makedirs(self._temp_dir)

log.info("Extracting ACD database file")
unzip = Unzip(self.input_filename)
unzip.write_files(self._temp_dir)

log.info("Getting records from ACD Comps file and storing in sqllite database")
self.comps_db = DbExtract(os.path.join(self._temp_dir, "Comps.Dat")).read()

log.info("Getting records from ACD SbRegion file and storing in sqllite database")
self.sb_region_db = DbExtract(os.path.join(self._temp_dir, "SbRegion.Dat")).read()

log.info("Getting records from ACD Comments file and storing in sqllite database")
self.comments_db = DbExtract(os.path.join(self._temp_dir, "Comments.Dat")).read()

log.info("Getting records from ACD Nameless file and storing in sqllite database")
self.nameless_db = DbExtract(os.path.join(self._temp_dir, "Nameless.Dat")).read()

def extract_to_file(self):
directory = os.path.join(self._temp_dir, "comps_db")
if not os.path.exists(os.path.join(directory)):
os.makedirs(directory)
for count, record in enumerate(self.comps_db.records.record):
with open(os.path.join(directory, str(count)), "wb") as out_file:
out_file.write(record.record.record_buffer)

directory = os.path.join(self._temp_dir, "sb_region_db")
if not os.path.exists(os.path.join(directory)):
os.makedirs(directory)
for count, record in enumerate(self.sb_region_db.records.record):
with open(os.path.join(directory, str(count)), "wb") as out_file:
out_file.write(record.record.record_buffer)

directory = os.path.join(self._temp_dir, "comments_db")
if not os.path.exists(os.path.join(directory)):
os.makedirs(directory)
for count, record in enumerate(self.comments_db.records.record):
with open(os.path.join(directory, str(count)), "wb") as out_file:
out_file.write(record.record.record_buffer)

directory = os.path.join(self._temp_dir, "nameless_db")
if not os.path.exists(os.path.join(directory)):
os.makedirs(directory)
for count, record in enumerate(self.nameless_db.records.record):
with open(os.path.join(directory, str(count)), "wb") as out_file:
out_file.write(record.record.record_buffer)
4 changes: 2 additions & 2 deletions acd/export_l5x.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,9 @@ def __post_init__(self):
self._cur.execute("CREATE TABLE region_map(object_id int, parent_id int, unknown int, seq_no int, record BLOB NOT NULL)")
log.debug("Create Comments table in sqllite db")
self._cur.execute(
"CREATE TABLE comments(seq_number int, string_length int, lookup_id int, comment text, record_type int, sub_record_type int)")
log.debug("Create Nameless table in sqllite db")
"CREATE TABLE comments(seq_number int, sub_record_length int, object_id int, record_string text, record_type int, parent int)")

log.debug("Create Nameless table in sqllite db")
self._cur.execute(
"CREATE TABLE nameless(object_id int, parent_id int, record BLOB NOT NULL)")

Expand Down
137 changes: 81 additions & 56 deletions acd/generated/comments/fafa_coments.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,58 @@ def __init__(self, _io, _parent=None, _root=None):

def _read(self):
self.record_length = self._io.read_u4le()
self._raw_header = self._io.read_bytes(12)
self._raw_header = self._io.read_bytes(10)
_io__raw_header = KaitaiStream(BytesIO(self._raw_header))
self.header = FafaComents.Header(_io__raw_header, self, self._root)
self.body = FafaComents.Body(self.header.string_start_position, self.lookup_id, self._io, self, self._root)
_on = self.header.record_type
if _on == 14:
self._raw_body = self._io.read_bytes((self.record_length - 10))
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
self.body = FafaComents.Utf16Record(_io__raw_body, self, self._root)
elif _on == 1:
self._raw_body = self._io.read_bytes((self.record_length - 10))
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
self.body = FafaComents.AsciiRecord(_io__raw_body, self, self._root)
elif _on == 13:
self._raw_body = self._io.read_bytes((self.record_length - 10))
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
self.body = FafaComents.Utf16Record(_io__raw_body, self, self._root)
elif _on == 23:
self._raw_body = self._io.read_bytes((self.record_length - 10))
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
self.body = FafaComents.ControllerRecord(_io__raw_body, self, self._root)
else:
self.body = self._io.read_bytes((self.record_length - 10))

class ControllerRecord(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()

def _read(self):
self.unknown_1 = self._io.read_bytes(8)
self.object_id = self._io.read_u4le()
self.unknown_2 = self._io.read_bytes(4)
self.tag_reference = FafaComents.StrzUtf16(self._io, self, self._root)
self.unknown_3 = self._io.read_bytes(12)
self.record_string = (self._io.read_bytes_term(0, False, True, True)).decode(u"UTF-8")


class AsciiRecord(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()

def _read(self):
self.unknown_1 = self._io.read_bytes(13)
self.object_id = self._io.read_u4le()
self.unknown_2 = self._io.read_bytes(13)
self.record_string = (self._io.read_bytes_term(0, False, True, True)).decode(u"UTF-8")


class Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
Expand All @@ -43,76 +91,37 @@ def seq_number(self):
return getattr(self, '_m_seq_number', None)

@property
def string_length(self):
if hasattr(self, '_m_string_length'):
return self._m_string_length
def record_type(self):
if hasattr(self, '_m_record_type'):
return self._m_record_type

_pos = self._io.pos()
self._io.seek(2)
self._m_string_length = self._io.read_u2le()
self._m_record_type = self._io.read_u2le()
self._io.seek(_pos)
return getattr(self, '_m_string_length', None)
return getattr(self, '_m_record_type', None)

@property
def string_start_position(self):
if hasattr(self, '_m_string_start_position'):
return self._m_string_start_position
def sub_record_length(self):
if hasattr(self, '_m_sub_record_length'):
return self._m_sub_record_length

_pos = self._io.pos()
self._io.seek(4)
self._m_string_start_position = self._io.read_u2le()
self._m_sub_record_length = self._io.read_u2le()
self._io.seek(_pos)
return getattr(self, '_m_string_start_position', None)
return getattr(self, '_m_sub_record_length', None)

@property
def record_type(self):
if hasattr(self, '_m_record_type'):
return self._m_record_type
def parent(self):
if hasattr(self, '_m_parent'):
return self._m_parent

_pos = self._io.pos()
self._io.seek(6)
self._m_record_type = self._io.read_u2le()
self._m_parent = self._io.read_u4le()
self._io.seek(_pos)
return getattr(self, '_m_record_type', None)


class Body(KaitaiStruct):
def __init__(self, string_start_position, sub_record_type, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.string_start_position = string_start_position
self.sub_record_type = sub_record_type
self._read()

def _read(self):
pass

@property
def record_string_utf8(self):
if hasattr(self, '_m_record_string_utf8'):
return self._m_record_string_utf8

if self.sub_record_type == 0:
_pos = self._io.pos()
self._io.seek(43)
self._m_record_string_utf8 = (self._io.read_bytes_term(0, False, True, True)).decode(u"UTF-8")
self._io.seek(_pos)

return getattr(self, '_m_record_string_utf8', None)

@property
def record_string_utf16(self):
if hasattr(self, '_m_record_string_utf16'):
return self._m_record_string_utf16

if self.sub_record_type == 1:
_pos = self._io.pos()
self._io.seek(46)
self._m_record_string_utf16 = FafaComents.StrzUtf16(self._io, self, self._root)
self._io.seek(_pos)

return getattr(self, '_m_record_string_utf16', None)
return getattr(self, '_m_parent', None)


class StrzUtf16(KaitaiStruct):
Expand Down Expand Up @@ -147,6 +156,22 @@ def code_units(self):
return getattr(self, '_m_code_units', None)


class Utf16Record(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()

def _read(self):
self.unknown_1 = self._io.read_bytes(8)
self.object_id = self._io.read_u4le()
self.unknown_2 = self._io.read_bytes(6)
self.tag_reference = FafaComents.StrzUtf16(self._io, self, self._root)
self.unknown_3 = self._io.read_bytes(12)
self.record_string = (self._io.read_bytes_term(0, False, True, True)).decode(u"UTF-8")


@property
def lookup_id(self):
if hasattr(self, '_m_lookup_id'):
Expand Down
26 changes: 21 additions & 5 deletions acd/generated/dat.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,18 @@ def _read(self):
self.record_buffer = self._io.read_bytes_full()


class BffbRecord(KaitaiStruct):
def __init__(self, record_length, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.record_length = record_length
self._read()

def _read(self):
self.record_buffer = self._io.read_bytes(self.record_length)


class Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
Expand Down Expand Up @@ -96,18 +108,22 @@ def _read(self):
raise kaitaistruct.ValidationNotAnyOfError(self.identifier, self._io, u"/types/record/seq/0")
self.record_length = self._io.read_u4le()
_on = self.identifier
if _on == 64250:
if _on == 65278:
self._raw_record = self._io.read_bytes((self.record_length - 6))
_io__raw_record = KaitaiStream(BytesIO(self._raw_record))
self.record = Dat.FefeRecord(_io__raw_record, self, self._root)
elif _on == 64447:
self._raw_record = self._io.read_bytes((self.record_length - 6))
_io__raw_record = KaitaiStream(BytesIO(self._raw_record))
self.record = Dat.BffbRecord((self.record_length - 6), _io__raw_record, self, self._root)
elif _on == 64250:
self._raw_record = self._io.read_bytes((self.record_length - 6))
_io__raw_record = KaitaiStream(BytesIO(self._raw_record))
self.record = Dat.FafaRecord((self.record_length - 6), _io__raw_record, self, self._root)
elif _on == 65021:
self._raw_record = self._io.read_bytes((self.record_length - 6))
_io__raw_record = KaitaiStream(BytesIO(self._raw_record))
self.record = Dat.FdfdRecord((self.record_length - 6), _io__raw_record, self, self._root)
elif _on == 65278:
self._raw_record = self._io.read_bytes((self.record_length - 6))
_io__raw_record = KaitaiStream(BytesIO(self._raw_record))
self.record = Dat.FefeRecord(_io__raw_record, self, self._root)
else:
self.record = self._io.read_bytes((self.record_length - 6))

Expand Down
Loading

0 comments on commit 64662eb

Please sign in to comment.