Skip to content

Commit

Permalink
convert cutscenes concurrently
Browse files Browse the repository at this point in the history
  • Loading branch information
BLooperZ committed Jan 17, 2021
1 parent 4dfe849 commit a2a49b1
Show file tree
Hide file tree
Showing 2 changed files with 85 additions and 46 deletions.
130 changes: 84 additions & 46 deletions src/remonstered/core/cutscenes.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import concurrent.futures
import functools
import io
import itertools
import os
Expand All @@ -15,6 +17,7 @@


UINT32LE = Struct('<I')
G_PAK = None


@contextmanager
Expand Down Expand Up @@ -79,55 +82,90 @@ def get_smush_offsets(res):
yield offset + 8


def compress_and_convert_cutscenes(
pak: lpak.LPakArchive, files: Iterable[str] = (), output_dir: str = '.'
):
for fname in files:
basename = os.path.basename(fname)
simplename, ext = os.path.splitext(basename)

videohd = next(pak.iglob(os.path.join('videohd', f'{simplename}.ogv')), None)
if videohd:
# override SAN file with compressed version
with pak.open(fname, 'rb') as res, suppress_stdout():
data = strip_compress_san(res)

directory = os.path.join(
output_dir, os.path.basename(os.path.dirname(fname))
)
os.makedirs(directory, exist_ok=True)
with open(os.path.join(directory, basename), 'wb') as out:
out.write(data)

flubase = f'{simplename}.flu'
flufile = next(
pak.iglob(os.path.join(os.path.dirname(fname), flubase)),
None,
def get_base_size(pak: lpak.LPakArchive, fname: str):
no_file = lpak.LPAKFileEntry(0, 0, 0, 0, 0)
basename = os.path.basename(fname)
simplename, ext = os.path.splitext(basename)
videohd = os.path.join('videohd', f'{simplename}.ogv')
flubase = f'{simplename}.flu'
flufile = os.path.join(os.path.dirname(fname), flubase)
return (
pak.index[fname].decompressed_size
+ pak.index.get(videohd, no_file).decompressed_size
+ pak.index.get(flufile, no_file).decompressed_size
)


def compress_single(pak: lpak.LPakArchive, fname: str, output_dir: str = '.'):
basename = os.path.basename(fname)
simplename, ext = os.path.splitext(basename)

videohd = next(pak.iglob(os.path.join('videohd', f'{simplename}.ogv')), None)
if videohd:
# override SAN file with compressed version
with pak.open(fname, 'rb') as res, suppress_stdout():
data = strip_compress_san(res)

directory = os.path.join(output_dir, os.path.basename(os.path.dirname(fname)))
os.makedirs(directory, exist_ok=True)
with open(os.path.join(directory, basename), 'wb') as out:
out.write(data)

flubase = f'{simplename}.flu'
flufile = next(
pak.iglob(os.path.join(os.path.dirname(fname), flubase)),
None,
)
if flufile:
with pak.open(flufile, 'rb') as res:
flu = res.read(0x324)
flurest = res.read()

with pak.open(fname, 'rb') as res:
raw_content = res.read()
assert flurest == b''.join(
UINT32LE.pack(offset) for offset in get_smush_offsets(raw_content)
)
if flufile:
with pak.open(flufile, 'rb') as res:
flu = res.read(0x324)
flurest = res.read()

with pak.open(fname, 'rb') as res:
raw_content = res.read()
assert flurest == b''.join(
UINT32LE.pack(offset) for offset in get_smush_offsets(raw_content)
)

with open(os.path.join(directory, flubase), 'wb') as out:
out.write(flu)
out.write(
b''.join(
UINT32LE.pack(offset) for offset in get_smush_offsets(data)
)
with open(os.path.join(directory, flubase), 'wb') as out:
out.write(flu)
out.write(
b''.join(
UINT32LE.pack(offset) for offset in get_smush_offsets(data)
)
)

# extract audio stream from HD video
with pak.open(videohd, 'rb') as vid:
stream = vid.read()
extract_ogv_audio(stream, os.path.join(directory, f'{simplename}.ogg'))
return get_base_size(pak, fname)


def init_worker(archive_name: str):
global G_PAK
G_PAK = lpak.LPakArchive(archive_name)


def convert_worker(fname: str, output_dir: str = '.'):
global G_PAK
assert G_PAK is not None
return compress_single(G_PAK, fname, output_dir)

# extract audio stream from HD video
with pak.open(videohd, 'rb') as vid:
stream = vid.read()
extract_ogv_audio(stream, os.path.join(directory, f'{simplename}.ogg'))
yield 1

def compress_and_convert_cutscenes(
pak: lpak.LPakArchive, files: Iterable[str] = (), output_dir: str = '.'
):
worker = functools.partial(convert_worker, output_dir=output_dir)
with concurrent.futures.ProcessPoolExecutor(
initializer=init_worker, initargs=(pak.path,)
) as executor:
try:
results = executor.map(worker, files)
yield from results
except KeyboardInterrupt as kbi:
executor.shutdown(wait=False)
raise kbi


def convert_cutscenes(pak: lpak.LPakArchive, output_dir: str = '.'):
Expand All @@ -137,7 +175,7 @@ def convert_cutscenes(pak: lpak.LPakArchive, output_dir: str = '.'):
)
if len(files) > 0:
action = 'Converting cutscenes...'
total_size = len(files)
total_size = sum(get_base_size(pak, fname) for fname in files)
yield action, (
compress_and_convert_cutscenes(pak, files, output_dir),
total_size,
Expand Down
1 change: 1 addition & 0 deletions src/remonstered/core/lpak.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,7 @@ def __init__(self, filename: str, fileobj: Optional[IO[bytes]] = None) -> None:
tag, version, views = read_header(self._stream)
read_findex = get_findex if version < 1.5 else get_findex_v15
self.index, self._data = read_findex(self._stream, views)
self.path = filename

def __enter__(self) -> 'LPakArchive':
return self
Expand Down

0 comments on commit a2a49b1

Please sign in to comment.