Add web-based .packed explorer, updated parser and ghidra untility script

This commit is contained in:
Daniel S. 2023-05-07 21:29:21 +02:00
parent 8e0df74541
commit 58407ecc9f
35 changed files with 3897 additions and 353 deletions

177
tools/remaster/scrap_parse/.gitignore vendored Normal file
View file

@ -0,0 +1,177 @@
# Generated by Cargo
# will have compiled files and executables
debug/
target/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
Cargo.lock
# These are backup files generated by rustfmt
**/*.rs.bk
# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
*.pkl.gz

File diff suppressed because it is too large Load diff

View file

@ -9,13 +9,18 @@ edition = "2021"
anyhow = "1.0.69"
binrw = "0.11.1"
chrono = { version = "0.4.23", features = ["serde"] }
chrono-humanize = "0.2.2"
# chrono-humanize = "0.2.2"
clap = { version = "4.1.6", features = ["derive"] }
configparser = { version = "3.0.2", features = ["indexmap"] }
flate2 = "1.0.25"
fs-err = "2.9.0"
indexmap = { version = "1.9.2", features = ["serde"] }
# memmap2 = "0.5.10"
modular-bitfield = "0.11.2"
rhexdump = "0.1.1"
serde = { version = "1.0.152", features = ["derive"] }
serde_json = { version = "1.0.93", features = ["unbounded_depth"] }
serde-pickle = "1.1.1"
serde_json = { version = "1.0.95", features = ["preserve_order", "unbounded_depth"] }
steamlocate = "1.1.0"
walkdir = "2.3.3"
obj = "0.10.2"

View file

@ -0,0 +1,23 @@
import pickle
import subprocess as SP
from . import packed_browser
from . import level_import
def scrap_bridge(*cmd):
cmd=["scrap_parse",*cmd]
proc=SP.Popen(cmd,stderr=None,stdin=None,stdout=SP.PIPE,shell=True,text=False)
stdout,stderr=proc.communicate()
code=proc.wait()
if code:
raise RuntimeError(str(stderr,"utf8"))
return pickle.loads(stdout)
def register():
packed_browser.register()
level_import.regiser()
def unregister():
packed_browser.unregister()
level_import.unregister()

View file

@ -2,16 +2,16 @@ import bpy
import sys
import os
import re
import json
import gzip
import pickle
import argparse
import shutil
from glob import glob
from mathutils import Vector
from pathlib import Path
import numpy as np
import itertools as ITT
from pprint import pprint
# from .. import scrap_bridge
import bmesh
from bpy.props import StringProperty, BoolProperty
from bpy_extras.io_utils import ImportHelper
@ -25,12 +25,6 @@ if "--" in sys.argv:
parser.add_argument("file_list", nargs="+")
cmdline = parser.parse_args(args)
def fix_pos(xyz):
x, y, z = xyz
return x, z, y
class ScrapImporter(object):
def __init__(self, options):
self.unhandled = set()
@ -39,16 +33,22 @@ class ScrapImporter(object):
self.model_scale = 1000.0
self.spawn_pos = {}
self.objects = {}
print("Loading", filepath)
with gzip.open(filepath, "r") as fh:
data = json.load(fh)
# print("Loading", filepath)
# scrapland_path=scrap_bridge("find-scrapland")
# print(scrapland_path)
# packed_data=scrap_bridge("parse-packed",scrapland_path)
# print(packed_data)
# get_output(["scrap_parse","parse-file","--stdout",scrapland_path,"levels/temple"])
# raise NotImplementedError()
with gzip.open(filepath, "rb") as fh:
data = pickle.load(fh)
self.path = data.pop("path")
self.root = data.pop("root")
self.config = data.pop("config")
self.dummies = data.pop("dummies")["DUM"]["dummies"]
self.dummies = data.pop("dummies")["dummies"]
self.moredummies = data.pop("moredummies")
self.emi = data.pop("emi")["EMI"]
self.sm3 = data.pop("sm3")["SM3"]
self.emi = data.pop("emi")
self.sm3 = data.pop("sm3")
def make_empty(self, name, pos, rot=None):
empty = bpy.data.objects.new(name, None)
@ -119,7 +119,7 @@ class ScrapImporter(object):
bpy.context.scene.collection.objects.link(light)
def create_nodes(self):
for node in self.sm3["scene"]["nodes"]:
for node in self.sm3["scene"].get("nodes",[]):
node_name = node["name"]
node = node.get("content", {})
if not node:
@ -212,6 +212,8 @@ class ScrapImporter(object):
)
else:
folders = ITT.chain([start_folder], start_folder.parents)
folders=list(folders)
print(f"Looking for {path} in {folders}")
for folder in folders:
for suffix in file_extensions:
for dds in [".", "dds"]:
@ -227,7 +229,7 @@ class ScrapImporter(object):
return list(filter(lambda i: (i.type, i.name) == (dtype, name), node.inputs))
def build_material(self, mat_key, mat_def):
def build_material(self, mat_key, mat_def, map_def):
mat_props = dict(m.groups() for m in re.finditer(r"\(\+(\w+)(?::(\w*))?\)",mat_key))
for k,v in mat_props.items():
mat_props[k]=v or True
@ -260,13 +262,13 @@ class ScrapImporter(object):
"Roughness": 0.0,
"Specular": 0.2,
}
tex_slots=[
"Base Color",
"Metallic",
None, # "Clearcoat" ? env map?
"Normal",
"Emission"
]
tex_slot_map={
"base": "Base Color",
"metallic":"Metallic",
"unk_1":None, # "Clearcoat" ? env map?
"bump":"Normal",
"glow":"Emission"
}
mat = bpy.data.materials.new(mat_key)
mat.use_nodes = True
@ -275,7 +277,13 @@ class ScrapImporter(object):
imgs = {}
animated_textures={}
is_transparent = True
for slot,tex in zip(tex_slots,mat_def["maps"]):
print(map_def)
if map_def[0]:
print("=== MAP[0]:",self.resolve_path(map_def[0]))
if map_def[2]:
print("=== MAP[2]:",self.resolve_path(map_def[2]))
for slot,tex in mat_def["maps"].items():
slot=tex_slot_map.get(slot)
if (slot is None) and tex:
self.unhandled.add(tex["texture"])
print(f"Don't know what to do with {tex}")
@ -286,9 +294,7 @@ class ScrapImporter(object):
continue
tex_name = os.path.basename(tex_file)
if ".000." in tex_name:
tex_files=glob(tex_file.replace(".000.",".*."))
num_frames=len(tex_files)
animated_textures[slot]=num_frames
animated_textures[slot]=len(glob(tex_file.replace(".000.",".*.")))
mat_props.update(overrides.get(tex_name,{}))
if any(
tex_name.find(fragment) != -1
@ -297,7 +303,7 @@ class ScrapImporter(object):
continue
else:
is_transparent = False
imgs[slot]=bpy.data.images.load(tex_file)
imgs[slot]=bpy.data.images.load(tex_file,check_existing=True)
for n in nodes:
nodes.remove(n)
out = nodes.new("ShaderNodeOutputMaterial")
@ -311,7 +317,6 @@ class ScrapImporter(object):
settings.update(glass_settings)
for name, value in settings.items():
shader.inputs[name].default_value = value
sockets_used = set()
for socket,img in imgs.items():
img_node = nodes.new("ShaderNodeTexImage")
img_node.name = img.name
@ -369,17 +374,20 @@ class ScrapImporter(object):
node_tree.links.new(imgs["Base Color"].outputs["Color"],transp_shader.inputs["Color"])
shader_out=mix_shader.outputs["Shader"]
node_tree.links.new(shader_out, out.inputs["Surface"])
# try:
# bpy.ops.node.button()
# except:
# pass
return mat
def apply_maps(self, ob, m_mat, m_map):
mat_key, m_mat = m_mat
map_key, m_map = m_map # TODO?: MAP
map_key, m_map = m_map
if mat_key == 0:
return
mat_name = m_mat.get("name", f"MAT:{mat_key:08X}")
map_name = f"MAP:{map_key:08X}"
if mat_name not in bpy.data.materials:
ob.active_material = self.build_material(mat_name, m_mat)
ob.active_material = self.build_material(mat_name, m_mat, m_map)
else:
ob.active_material = bpy.data.materials[mat_name]
@ -424,17 +432,17 @@ class ScrapImporter(object):
ob = bpy.data.objects.new(name, me)
self.apply_maps(ob, m_mat, m_map)
bpy.context.scene.collection.objects.link(ob)
self.objects.setdefault(name, []).append(ob)
self.objects.setdefault(name.split("(")[0], []).append(ob)
return ob
class Scrap_Load(Operator, ImportHelper):
bl_idname = "scrap_utils.import_json"
bl_label = "Import JSON"
bl_idname = "scrap_utils.import_pickle"
bl_label = "Import Pickle"
filename_ext = ".json.gz"
filter_glob: StringProperty(default="*.json.gz", options={"HIDDEN"})
filename_ext = ".pkl.gz"
filter_glob: StringProperty(default="*.pkl.gz", options={"HIDDEN"})
create_dummies: BoolProperty(
name="Import dummies",
@ -447,25 +455,20 @@ class Scrap_Load(Operator, ImportHelper):
)
create_tracks: BoolProperty(
name="Create track curves",
default=True
name="Create track curves",
default=True
)
merge_objects: BoolProperty(
name="Merge objects by name",
default=False
name="Merge objects by name",
default=False
)
remove_dup_verts: BoolProperty(
name="Remove overlapping vertices\nfor smoother meshes",
default=True
name="Remove overlapping vertices\nfor smoother meshes",
default=True
)
# remove_dup_verts: BoolProperty(
# name="Remove overlapping vertices for smoother meshes",
# default=False
# )
def execute(self, context):
bpy.ops.preferences.addon_enable(module = "node_arrange")
@ -488,7 +491,7 @@ def unregister():
if __name__ == "__main__":
if cmdline is None or not cmdline.file_list:
register()
bpy.ops.scrap_utils.import_json("INVOKE_DEFAULT")
bpy.ops.scrap_utils.import_pickle("INVOKE_DEFAULT")
else:
for file in cmdline.file_list:
bpy.context.preferences.view.show_splash = False

View file

@ -0,0 +1,118 @@
import sys
from .. import scrap_bridge
from bpy.props import (StringProperty, BoolProperty, CollectionProperty,
IntProperty)
bl_info = {
"name": "Packed Archive File",
"blender": (2, 71, 0),
"location": "File > Import",
"description": "Import data from Scrapland .packed Archive",
"category": "Import-Export"}
class ImportFilearchives(bpy.types.Operator):
"""Import whole filearchives directory."""
bl_idname = "import_scene.packed"
bl_label = 'Import Scrapland .packed'
directory = StringProperty(name="'Scrapland' folder",
subtype="DIR_PATH", options={'HIDDEN'})
filter_folder = BoolProperty(default=True, options={'HIDDEN'})
filter_glob = StringProperty(default="", options={'HIDDEN'})
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def execute(self, context):
# TODO: Validate filepath
bpy.ops.ui.packed_browser('INVOKE_DEFAULT',filepath=self.directory)
return {'FINISHED'}
class PackedFile(bpy.types.PropertyGroup):
path = bpy.props.StringProperty()
packed_file = bpy.props.StringProperty()
selected = bpy.props.BoolProperty(name="")
offset = bpy.props.IntProperty()
size = bpy.props.IntProperty()
archive = None
class PackedBrowser(bpy.types.Operator):
bl_idname = "ui.packed_browser"
bl_label = "Packed Browser"
bl_options = {'INTERNAL'}
files = CollectionProperty(type=PackedFile)
selected_index = IntProperty(default=0)
def invoke(self, context, event):
scrapland_path=scrap_bridge("find-scrapland")
print(scrapland_path)
packed_data=scrap_bridge("parse-packed",scrapland_path)
print(packed_data)
self.packed_data=packed_data
return context.window_manager.invoke_props_dialog(self)
def draw(self, context):
if self.selected_index != -1:
print("new selected_index: " + str(self.selected_index))
self.files.clear()
for packed_name,files in self.archive:
for file in files:
entry = self.files.add()
entry.packed_file = packed_name
[entry.path,entry.offset,entry.size]=file
self.selected_index = -1
self.layout.template_list("PackedDirList", "", self, "current_dir", self, "selected_index")
def execute(self, context):
print("execute")
return {'FINISHED'}
class PackedDirList(bpy.types.UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname):
operator = data
packed_entry = item
if self.layout_type in {'DEFAULT', 'COMPACT'}:
layout.prop(packed_entry, "name", text="", emboss=False, icon_value=icon)
layout.prop(packed_entry, "selected")
elif self.layout_type in {'GRID'}:
layout.alignment = 'CENTER'
layout.label(text="", icon_value=icon)
def menu_func_import(self, context):
self.layout.operator(ImportFilearchives.bl_idname, text="Scrapland .packed")
classes=[
PackedFile,
PackedDirList,
PackedBrowser,
ImportFilearchives,
]
def register():
for cls in classes:
bpy.utils.regiser_class(cls)
bpy.types.INFO_MT_file_import.append(menu_func_import)
def unregister():
for cls in reversed(classes):
bpy.utils.unregister_class(cls)
bpy.types.INFO_MT_file_import.remove(menu_func_import)
if __name__ == "__main__":
import imp
imp.reload(sys.modules[__name__])
for cls in classes:
bpy.utils.regiser_class(cls)

View file

@ -0,0 +1,66 @@
int _D3DXGetFVFVertexSize(uint fvf)
{
uint uVar1;
uint uVar2;
uint uVar3;
int vert_size;
uVar1 = fvf & 0xe;
vert_size = 0;
if (uVar1 == 2) {
vert_size = 0xc;
}
else if ((uVar1 == 4) || (uVar1 == 6)) {
vert_size = 0x10;
}
else if (uVar1 == 8) {
vert_size = 0x14;
}
else if (uVar1 == 0xa) {
vert_size = 0x18;
}
else if (uVar1 == 0xc) {
vert_size = 0x1c;
}
else if (uVar1 == 0xe) {
vert_size = 0x20;
}
if ((fvf & 0x10) != 0) {
vert_size += 0xc;
}
if ((fvf & 0x20) != 0) {
vert_size += 4;
}
if ((fvf & 0x40) != 0) {
vert_size += 4;
}
if (fvf < '\0') {
vert_size += 4;
}
uVar1 = fvf >> 8 & 0xf;
uVar3 = fvf >> 16;
if (uVar3 == 0) {
vert_size += uVar1 * 8;
}
else {
for (; uVar1 != 0; uVar1 -= 1) {
uVar2 = uVar3 & 3;
if (uVar2 == 0) {
vert_size += 8;
}
else if (uVar2 == 1) {
vert_size += 0xc;
}
else if (uVar2 == 2) {
vert_size += 0x10;
}
else if (uVar2 == 3) {
vert_size += 4;
}
uVar3 >>= 2;
}
}
return vert_size;
}

View file

@ -1,103 +0,0 @@
bl_info = {
"name": "Riot Archive File (RAF)",
"blender": (2, 71, 0),
"location": "File &gt; Import",
"description": "Import LoL data of an Riot Archive File",
"category": "Import-Export"}
import bpy
from io_scene_lolraf import raf_utils
from bpy.props import (StringProperty, BoolProperty, CollectionProperty,
IntProperty)
class ImportFilearchives(bpy.types.Operator):
"""Import whole filearchives directory."""
bl_idname = "import_scene.rafs"
bl_label = 'Import LoL filearchives'
directory = StringProperty(name="'filearchives' folder",
subtype="DIR_PATH", options={'HIDDEN'})
filter_folder = BoolProperty(default=True, options={'HIDDEN'})
filter_glob = StringProperty(default="", options={'HIDDEN'})
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def execute(self, context):
# TODO: Validate filepath
bpy.ops.ui.raf_browser('INVOKE_DEFAULT',filepath=self.directory)
return {'FINISHED'}
class RAFEntry(bpy.types.PropertyGroup):
name = bpy.props.StringProperty()
selected = bpy.props.BoolProperty(name="")
archive = None
class RAFBrowser(bpy.types.Operator):
bl_idname = "ui.raf_browser"
bl_label = "RAF-browser"
bl_options = {'INTERNAL'}
filepath = StringProperty()
current_dir = CollectionProperty(type=RAFEntry)
selected_index = IntProperty(default=0)
def invoke(self, context, event):
global archive
archive = raf_utils.RAFArchive(self.filepath)
return context.window_manager.invoke_props_dialog(self)
def draw(self, context):
if self.selected_index != -1:
print("new selected_index: " + str(self.selected_index))
global archive
# TODO: change current directory of archive
self.current_dir.clear()
for dir in archive.current_dir():
entry = self.current_dir.add()
entry.name = dir
self.selected_index = -1
self.layout.template_list("RAFDirList", "", self, "current_dir", self, "selected_index")
def execute(self, context):
print("execute")
return {'FINISHED'}
class RAFDirList(bpy.types.UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname):
operator = data
raf_entry = item
if self.layout_type in {'DEFAULT', 'COMPACT'}:
layout.prop(raf_entry, "name", text="", emboss=False, icon_value=icon)
layout.prop(raf_entry, "selected")
elif self.layout_type in {'GRID'}:
layout.alignment = 'CENTER'
layout.label(text="", icon_value=icon)
def menu_func_import(self, context):
self.layout.operator(ImportFilearchives.bl_idname, text="LoL Filearchives")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_import.append(menu_func_import)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_import.remove(menu_func_import)
if __name__ == "__main__":
import imp
imp.reload(raf_utils)
bpy.utils.register_module(__name__)

View file

@ -0,0 +1,15 @@
use std::path::PathBuf;
use steamlocate::SteamDir;
use anyhow::{bail,Result};
const APP_ID: u32 = 897610;
pub(crate) fn get_executable() -> Result<PathBuf> {
let Some(mut steam) = SteamDir::locate() else {
bail!("Failed to find steam folder");
};
let Some(app) = steam.app(&APP_ID) else {
bail!("App {APP_ID} is not installed!");
};
Ok(app.path.clone())
}

View file

@ -5,6 +5,7 @@ use binrw::prelude::*;
use binrw::until_exclusive;
use chrono::{DateTime, NaiveDateTime, Utc};
use clap::Parser;
use clap::Subcommand;
use configparser::ini::Ini;
use flate2::write::GzEncoder;
use flate2::Compression;
@ -15,14 +16,37 @@ use modular_bitfield::specifiers::B2;
use modular_bitfield::specifiers::B4;
use modular_bitfield::BitfieldSpecifier;
use serde::Serialize;
use serde_json::Map;
use serde_json::Value;
use std::collections::HashMap;
use std::fmt::Debug;
use std::fs::File;
use std::io::{BufReader, Read, Seek};
use std::io::{BufReader, Cursor, Read, Seek};
use std::path::Path;
use std::path::PathBuf;
use walkdir::WalkDir;
mod find_scrap;
type IniData = IndexMap<String, IndexMap<String, Option<String>>>;
#[binread]
#[derive(Serialize, Debug)]
struct PackedFile{
path: PascalString,
size: u32,
offset: u32
}
#[binread]
#[br(magic = b"BFPK")]
#[derive(Serialize, Debug)]
struct PackedHeader {
#[br(temp,assert(version==0))]
version: u32,
#[br(temp)]
num_files: u32,
#[br(count=num_files)]
files: Vec<PackedFile>,
}
#[binread]
#[derive(Serialize, Debug)]
@ -141,6 +165,7 @@ struct IniSection {
#[br(magic = b"INI\0")]
#[derive(Debug)]
struct INI {
#[br(temp)]
size: u32,
#[br(temp)]
num_sections: u32,
@ -153,13 +178,17 @@ impl Serialize for INI {
where
S: serde::Serializer,
{
use serde::ser::Error;
let blocks: Vec<String> = self
.sections
.iter()
.flat_map(|s| s.sections.iter())
.map(|s| s.string.clone())
.collect();
Ini::new().read(blocks.join("\n")).serialize(serializer)
Ini::new()
.read(blocks.join("\n"))
.map_err(Error::custom)?
.serialize(serializer)
}
}
@ -227,7 +256,7 @@ enum Pos {
#[repr(u32)]
#[derive(Debug, Serialize, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct FVF {
reserved_1: bool,
reserved: bool,
pos: Pos,
normal: bool,
point_size: bool,
@ -267,17 +296,17 @@ impl FVF {
}
}
fn num_w(&self) -> usize {
use Pos::*;
match self.pos() {
XYZ | XYZRHW => 0,
XYZB1 => 1,
XYZB2 => 2,
XYZB3 => 3,
XYZB4 => 4,
XYZB5 => 5,
}
}
// fn num_w(&self) -> usize {
// use Pos::*;
// match self.pos() {
// XYZ | XYZRHW => 0,
// XYZB1 => 1,
// XYZB2 => 2,
// XYZB3 => 3,
// XYZB4 => 4,
// XYZB5 => 5,
// }
// }
}
fn vertex_size_from_id(fmt_id: u32) -> Result<u32> {
@ -361,6 +390,7 @@ struct MD3D {
tris: Vec<[u16; 3]>,
mesh_data: LFVF,
unk_table_1: RawTable<2>,
rest: Unparsed<0x100>
// TODO:
// ==
// unk_t1_count: u32,
@ -383,7 +413,7 @@ enum NodeData {
#[br(magic = 0x0u32)]
Null,
#[br(magic = 0xa1_00_00_01_u32)]
TriangleMesh, // Empty?
TriangleMesh(Unparsed<0x10>), // TODO: Empty or unused?
#[br(magic = 0xa1_00_00_02_u32)]
Mesh(MD3D),
#[br(magic = 0xa2_00_00_04_u32)]
@ -393,7 +423,7 @@ enum NodeData {
#[br(magic = 0xa4_00_00_10_u32)]
Ground(SUEL),
#[br(magic = 0xa5_00_00_20_u32)]
SisPart(Unparsed<0x10>), // TODO: Particles
SistPart(Unparsed<0x10>), // TODO: Particles
#[br(magic = 0xa6_00_00_40_u32)]
Graphic3D(SPR3),
#[br(magic = 0xa6_00_00_80_u32)]
@ -521,6 +551,16 @@ struct MAP {
unk_3: Option<[u8; 0xc]>,
}
#[binread]
#[derive(Debug, Serialize)]
struct Textures {
base: Optional<MAP>,
metallic: Optional<MAP>,
unk_1: Optional<MAP>,
bump: Optional<MAP>,
glow: Optional<MAP>
}
#[binread]
#[br(magic = b"MAT\0")]
#[derive(Debug, Serialize)]
@ -532,7 +572,7 @@ struct MAT {
name: Option<PascalString>,
unk_f: [RGBA; 7],
unk_data: [RGBA; 0x18 / 4],
maps: [Optional<MAP>; 5], // Base Color, Metallic?, ???, Normal, Emission
maps: Textures
}
#[binread]
@ -556,9 +596,9 @@ struct SCN {
#[br(temp,assert(unk_3==1))]
unk_3: u32,
num_nodes: u32,
#[br(count = num_nodes)] // 32
#[br(count = 1)] // 32
nodes: Vec<Node>,
ani: Optional<ANI>, // TODO:?
// ani: Optional<ANI>, // TODO: ?
}
fn convert_timestamp(dt: u32) -> Result<DateTime<Utc>> {
@ -682,11 +722,11 @@ struct CM3 {
#[binread]
#[derive(Debug, Serialize)]
struct Dummy {
has_next: u32,
name: PascalString,
pos: [f32; 3],
rot: [f32; 3],
info: Optional<INI>,
has_next: u32,
}
#[binread]
@ -697,7 +737,6 @@ struct DUM {
#[br(assert(version==1, "Invalid DUM version"))]
version: u32,
num_dummies: u32,
unk_1: u32,
#[br(count=num_dummies)]
dummies: Vec<Dummy>,
}
@ -826,13 +865,6 @@ enum Data {
EMI(EMI),
}
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
struct Args {
root: PathBuf,
path: PathBuf,
}
fn parse_file(path: &PathBuf) -> Result<Data> {
let mut rest_size = 0;
let mut fh = BufReader::new(fs::File::open(path)?);
@ -842,11 +874,11 @@ fn parse_file(path: &PathBuf) -> Result<Data> {
.unwrap_or(0)
.try_into()
.unwrap_or(u32::MAX);
println!("Read {} bytes from {}", pos, path.display());
eprintln!("Read {} bytes from {}", pos, path.display());
let mut buffer = [0u8; 0x1000];
if let Ok(n) = fh.read(&mut buffer) {
if n != 0 {
println!("Rest:\n{}", rhexdump::hexdump_offset(&buffer[..n], pos));
eprintln!("Rest:\n{}", rhexdump::hexdump_offset(&buffer[..n], pos));
}
};
while let Ok(n) = fh.read(&mut buffer) {
@ -855,52 +887,182 @@ fn parse_file(path: &PathBuf) -> Result<Data> {
}
rest_size += n;
}
println!("+{rest_size} unparsed bytes");
eprintln!("+{rest_size} unparsed bytes");
Ok(ret)
}
fn load_ini(path: &PathBuf) -> IndexMap<String, IndexMap<String, Option<String>>> {
fn load_ini(path: &PathBuf) -> IniData {
Ini::new().load(path).unwrap_or_default()
}
fn load_data(root: &Path, path: &Path) -> Result<Value> {
let full_path = &root.join(path);
let emi_path = full_path.join("map").join("map3d.emi");
let sm3_path = emi_path.with_extension("sm3");
let dum_path = emi_path.with_extension("dum");
let config_file = emi_path.with_extension("ini");
let moredummies = emi_path.with_file_name("moredummies").with_extension("ini");
let mut data = serde_json::to_value(HashMap::<(), ()>::default())?;
data["config"] = serde_json::to_value(load_ini(&config_file))?;
data["moredummies"] = serde_json::to_value(load_ini(&moredummies))?;
data["emi"] = serde_json::to_value(parse_file(&emi_path)?)?;
data["sm3"] = serde_json::to_value(parse_file(&sm3_path)?)?;
data["dummies"] = serde_json::to_value(parse_file(&dum_path)?)?;
data["path"] = serde_json::to_value(path)?;
data["root"] = serde_json::to_value(root)?;
Ok(data)
#[derive(Serialize, Debug)]
struct Level {
config: IniData,
moredummies: IniData,
emi: EMI,
sm3: SM3,
dummies: DUM,
path: PathBuf,
root: PathBuf,
}
fn main() -> Result<()> {
let args = Args::try_parse()?;
impl Level {
fn load(root: &Path, path: &Path) -> Result<Self> {
let full_path = &root.join(path);
let emi_path = full_path.join("map").join("map3d.emi");
let sm3_path = emi_path.with_extension("sm3");
let dum_path = emi_path.with_extension("dum");
let config_file = emi_path.with_extension("ini");
let moredummies = emi_path.with_file_name("moredummies").with_extension("ini");
let config = load_ini(&config_file);
let moredummies = load_ini(&moredummies);
let Data::EMI(emi) = parse_file(&emi_path)? else {
bail!("Failed to parse EMI at {emi_path}", emi_path=emi_path.display());
};
let Data::SM3(sm3) = parse_file(&sm3_path)? else {
bail!("Failed to parse SM3 at {sm3_path}", sm3_path=sm3_path.display());
};
let Data::DUM(dummies) = parse_file(&dum_path)? else {
bail!("Failed to parse DUM at {dum_path}", dum_path=dum_path.display());
};
Ok(Level {
config,
moredummies,
emi,
sm3,
dummies,
path: path.into(),
root: root.into(),
})
}
}
#[derive(Subcommand, Debug)]
enum Commands {
FindScrapland,
ParsePacked {
scrap_path: PathBuf,
},
ParseFile {
#[clap(long)]
/// Write to stdout
stdout: bool,
/// Scrapland root path
root: PathBuf,
/// Level to parse and convert
level: PathBuf,
},
}
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
#[command(propagate_version = true)]
struct Args {
#[arg(long,short)]
/// Write data as JSON
json: bool,
#[command(subcommand)]
command: Commands,
}
fn cmd_parse_packed(root: &Path) -> Result<HashMap<PathBuf, Vec<PackedFile>>> {
let mut packed_map = HashMap::new();
for entry in WalkDir::new(root).into_iter().filter_map(|e| e.ok()) {
let path = entry.path();
if path
.extension()
.map(|e| e.to_str() == Some("packed"))
.unwrap_or(false)
{
let path = entry.path().to_owned();
let header: PackedHeader = BufReader::new(File::open(&path)?).read_le()?;
packed_map.insert(path, header.files);
}
}
Ok(packed_map)
}
fn to_bytes<T>(data: &T, json: bool) -> Result<Vec<u8>> where T: Serialize {
if json {
Ok(serde_json::to_vec_pretty(data)?)
} else {
Ok(serde_pickle::to_vec(data,Default::default())?)
}
}
fn cmd_parse_file(stdout: bool, root: &Path, path: &Path, json: bool) -> Result<()> {
let out_path = PathBuf::from(
args.path
path
.components()
.last()
.unwrap()
.as_os_str()
.to_string_lossy()
.into_owned(),
)
.with_extension("json.gz");
let full_path = &args.root.join(&args.path);
let data = if full_path.is_dir() {
load_data(&args.root, &args.path)?
);
let out_path = if json {
out_path.with_extension("json.gz")
} else {
serde_json::to_value(parse_file(full_path)?)?
out_path.with_extension("pkl.gz")
};
let full_path = &root.join(path);
let data = if full_path.is_dir() {
let level = Level::load(root, path)?;
to_bytes(&level,json)?
} else {
let data = parse_file(full_path)?;
to_bytes(&data,json)?
};
let mut data = Cursor::new(data);
if stdout {
let mut stdout = std::io::stdout().lock();
std::io::copy(&mut data, &mut stdout)?;
} else {
let mut fh = GzEncoder::new(File::create(&out_path)?, Compression::best());
std::io::copy(&mut data, &mut fh)?;
eprintln!("Wrote {path}", path = out_path.display());
};
let mut dumpfile = GzEncoder::new(File::create(&out_path)?, Compression::best());
serde_json::to_writer_pretty(&mut dumpfile, &data)?;
println!("Wrote {path}", path = out_path.display());
Ok(())
}
fn emi_to_obj(emi: EMI) -> ! {
// let mut obj_data = obj::ObjData::default();
// for mesh in emi.tri {
// for vert in mesh.data.verts_1.inner.map(|d| d.data).unwrap_or_default() {
// obj_data.position.push(vert.xyz);
// obj_data.normal.push(vert.normal.unwrap_or_default());
// obj_data.texture.push(vert.tex_1.unwrap_or_default().0.try_into().unwrap());
// }
// for vert in mesh.data.verts_2.inner.map(|d| d.data).unwrap_or_default() {
// obj_data.position.push(vert.xyz);
// obj_data.normal.push(vert.normal.unwrap_or_default());
// }
// }
todo!("EMI to OBJ converter");
}
fn main() -> Result<()> {
let args = Args::try_parse()?;
match args.command {
Commands::FindScrapland => {
let data = to_bytes(&find_scrap::get_executable()?,args.json)?;
let mut stdout = std::io::stdout().lock();
std::io::copy(&mut &data[..], &mut stdout)?;
}
Commands::ParsePacked { scrap_path } => {
let data = to_bytes(&cmd_parse_packed(&scrap_path)?,args.json)?;
let mut stdout = std::io::stdout().lock();
std::io::copy(&mut &data[..], &mut stdout)?;
}
Commands::ParseFile {
stdout,
root,
level,
} => {
cmd_parse_file(stdout, &root, &level, args.json)?;
}
}
Ok(())
}

View file

@ -1,3 +1,7 @@
// https://learn.microsoft.com/en-us/windows/win32/direct3dhlsl/dx9-graphics-reference-asm-ps-1-x
//
// ################################################
//
// #[derive(Debug)]
// enum VecArg {
// Tex(f32,f32,f32,f32),