anime-blk-stuff/helpers/index.py
2026-01-05 12:33:47 +03:00

143 lines
4 KiB
Python

# extract and parse asset map
import helpers.utils as utils
from parsers.ai import AssetIndex
import io
import os
import sys
import json
import shutil
import itertools
import subprocess
from rich import print
from datetime import datetime
from rich.markdown import Markdown
from kaitaistruct import KaitaiStream
from rich.progress import Progress, SpinnerColumn, TimeElapsedColumn, Console
def extract_asset_index():
game_target = utils.get_config("gameTarget")
if not game_target:
print("[red3]No game target !\nDefine it with --set_target first[/]")
return
console = Console(record=True)
print(Markdown(f"## Generating asset index file (target : v{game_target})"))
with Progress(
SpinnerColumn(),
*Progress.get_default_columns(),
TimeElapsedColumn(),
console=console,
transient=False
) as progress:
task = progress.add_task("[cyan]Building asset index...", total=None)
# extract 0000006f from blk
blk_file = utils.get_path(f"blk/31049740.blk") # TODO: use config for block id
progress.log("Extracting [b]0000006f[/]...")
subprocess.call([utils.get_config("studioPath"), blk_file, utils.get_path("bin/"), "--game", "GI", "--types", "MiHoYoBinData"], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT)
shutil.move(utils.get_path("bin/MiHoYoBinData/0000006f"), utils.get_path("bin/0000006f"))
shutil.rmtree(utils.get_path("bin/MiHoYoBinData"))
# parse 0000006f
progress.log("Parsing...")
with open(utils.get_path("bin/0000006f"), "rb") as f:
ai_ref_data = f.read()
f.close()
stream = KaitaiStream(io.BytesIO(ai_ref_data)) # this allow file deletion later
asset_index_ref = AssetIndex(stream)
with open(utils.get_path("configs/map_helper.json"), "r") as f:
asset_map = json.loads(f.read())
f.close()
asset_index = {}
# types
progress.log(Markdown("Processing __types__..."))
asset_index["Types"] = {e.name.data:e.mapped_to.data for e in asset_index_ref.type_mapping}
# sub assets
progress.log(Markdown("Processing __sub assets__..."))
sub = {}
pos = 0
for e in asset_index_ref.assets:
try:
name = asset_map[str((e.path_hash_last << 8) | e.path_hash_pre)]
pos += 1
except:
name = ""
if e.sub_asset_id not in sub:
sub[e.sub_asset_id] = []
sub[e.sub_asset_id].append({
"Name": name,
"PathHashPre": e.path_hash_pre,
"PathHashLast": e.path_hash_last
})
coverage = pos/len(asset_index_ref.assets)
asset_index["SubAssets"] = sub
# dependencies
progress.log(Markdown("Processing __dependencies__..."))
asset_index["Dependencies"] = {e.asset_id:e.dependencies_list for e in asset_index_ref.dependencies}
# preload blocks
progress.log(Markdown("Processing __preload blocks__..."))
asset_index["PreloadBlocks"] = asset_index_ref.preload_blocks
# preload shader blocks
progress.log(Markdown("Processing __preload shader blocks__..."))
asset_index["PreloadShaderBlocks"] = asset_index_ref.preload_shader_blocks
# assets
progress.log(Markdown("Processing __assets__..."))
blocksGroups = {j.block_list:i.group_id for i in asset_index_ref.block_groups for j in i.block_list}
asset_index["Assets"] = {j.path_hash_pre:{
"Language": blocksGroups[i.block_id] if i.block_id in blocksGroups.keys() else 0,
"Id": i.block_id,
"Offset": j.offset
} for i in asset_index_ref.block_infos for j in i.asset_offsets}
# write output
data = json.dumps(asset_index, ensure_ascii=False, sort_keys=True)
filesize = len(data)
with open(utils.get_path(f"configs/indexes/index.{game_target}.json"), "w", encoding="utf-8") as f:
f.write(data)
f.close()
metadata = {
"coverage": coverage,
"size": filesize,
"time": datetime.now().timestamp()
}
with open(utils.get_path(f"configs/indexes/index.{game_target}.meta.json"), "w") as f:
f.write(json.dumps(metadata))
f.close()
data = None
asset_index_ref = None
os.remove(utils.get_path("bin/0000006f"))
progress.stop_task(task)
progress.log(Markdown("## Done !"))
progress.log(Markdown(f"# Coverage : `{'{0:.2%}'.format(coverage)}` | Map size : `{round(filesize / 1048576, 2)} Mb`"))