diff --git a/fast64_internal/f3d/f3d_import.py b/fast64_internal/f3d/f3d_import.py new file mode 100644 index 000000000..6926c600f --- /dev/null +++ b/fast64_internal/f3d/f3d_import.py @@ -0,0 +1,934 @@ +# ------------------------------------------------------------------------ +# Header +# ------------------------------------------------------------------------ + +import bpy + +import os, struct, math + +from functools import lru_cache +from pathlib import Path +from mathutils import Vector, Euler, Matrix +from collections import namedtuple +from dataclasses import dataclass +from copy import deepcopy +from re import findall +from numbers import Number +from collections.abc import Sequence + +from ..f3d.f3d_material import F3DMaterialProperty, RDPSettings, TextureProperty +from ..f3d.f3d_gbi import get_F3D_GBI + +from ..utility import hexOrDecInt +from ..utility_importer import * + +# ------------------------------------------------------------------------ +# Classes +# ------------------------------------------------------------------------ + + +# will format light struct data passed +class Lights1: + def __init__(self, name: str, data_str: str): + self.name = name + data = [eval(dat.strip()) for dat in data_str.split(",")] + self.ambient = [*data[0:3], 0xFF] + self.diffuse = [*data[3:6], 0xFF] + self.direction = data[9:12] + + +# this will hold tile properties +class Tile: + def __init__(self): + self.Fmt = "RGBA" + self.Siz = "16" + self.Slow = 32 + self.Tlow = 32 + self.Shigh = 32 + self.Thigh = 32 + self.SMask = 5 + self.TMask = 5 + self.SShift = 0 + self.TShift = 0 + self.Sflags = None + self.Tflags = None + self.tmem = 0 + + def eval_texture_format(self): + # make better + return f"{self.Fmt.replace('G_IM_FMT_','')}{self.Siz.replace('G_IM_SIZ_','').replace('b','')}" + + +# this will hold texture properties, dataclass props +# are created in order for me to make comparisons in a set +@dataclass(init=True, eq=True, unsafe_hash=True) +class Texture: + Timg: tuple + Fmt: str + Siz: int + Width: int = 0 + Height: int = 0 + Pal: tuple = None + + def size(self): + return self.Width, self.Height + + def eval_texture_format(self): + return f"{self.Fmt.replace('G_IM_FMT_','')}{self.Siz.replace('G_IM_SIZ_','').replace('b','')}" + + +# This is a data storage class and mat to f3dmat converting class +# used when importing for kirby +class Mat: + def __init__(self): + self.GeoSet = [] + self.GeoClear = [] + self.tiles = [Tile() for a in range(8)] + # dict[mem_offset] = tex + self.tmem = dict() + self.base_tile = 0 + self.tex0 = None + self.tex1 = None + self.other_mode = dict() + self.num_lights = 1 + self.light_col = {} + self.ambient_light = tuple() + + # calc the hash for an f3d mat and see if its equal to this mats hash + def mat_hash_f3d(self, f3d: F3DMaterialProperty): + # texture,1 cycle combiner, render mode, geo modes, some other blender settings, tile size (very important in kirby64) + rdp = f3d.rdp_settings + if f3d.tex0.tex: + T = f3d.tex0.tex_reference + else: + T = "" + F3Dprops = ( + T, + f3d.combiner1.A, + f3d.combiner1.B, + f3d.combiner1.C, + f3d.combiner1.D, + f3d.combiner1.A_alpha, + f3d.combiner1.B_alpha, + f3d.combiner1.C_alpha, + f3d.combiner1.D_alpha, + f3d.rdp_settings.rendermode_preset_cycle_1, + f3d.rdp_settings.rendermode_preset_cycle_2, + f3d.rdp_settings.g_lighting, + f3d.rdp_settings.g_shade, + f3d.rdp_settings.g_shade_smooth, + f3d.rdp_settings.g_zbuffer, + f3d.rdp_settings.g_mdsft_alpha_compare, + f3d.rdp_settings.g_mdsft_zsrcsel, + f3d.rdp_settings.g_mdsft_alpha_dither, + f3d.tex0.S.high, + f3d.tex0.T.high, + f3d.tex0.S.low, + f3d.tex0.T.low, + ) + if hasattr(self, "Combiner"): + MyT = "" + if hasattr(self.tex0, "Timg"): + MyT = str(self.tex0.Timg) + else: + pass + + def EvalGeo(self, mode): + for a in self.GeoSet: + if mode in a.lower(): + return True + for a in self.GeoClear: + if mode in a.lower(): + return False + else: + return True + + chkT = lambda x, y, d: x.__dict__.get(y, d) + rendermode = getattr(self, "RenderMode", ["G_RM_AA_ZB_OPA_SURF", "G_RM_AA_ZB_OPA_SURF2"]) + MyProps = ( + MyT, + *self.Combiner[0:8], + *rendermode, + EvalGeo(self, "g_lighting"), + EvalGeo(self, "g_shade"), + EvalGeo(self, "g_shade_smooth"), + EvalGeo(self, "g_zbuffer"), + chkT(self, "g_mdsft_alpha_compare", "G_AC_NONE"), + chkT(self, "g_mdsft_zsrcsel", "G_ZS_PIXEL"), + chkT(self, "g_mdsft_alpha_dither", "G_AD_NOISE"), + self.tiles[0].Shigh, + self.tiles[0].Thigh, + self.tiles[0].Slow, + self.tiles[0].Tlow, + ) + dupe = hash(MyProps) == hash(F3Dprops) + return dupe + return False + + def mat_hash(self, mat: bpy.types.Material): + return False + + def convert_color(self, color: Sequence[Number]): + return [int(a) / 255 for a in color] + + def load_texture(self, ForceNewTex: bool, path: Path, tex: Texture): + png = path / f"bank_{tex.Timg[0]}" / f"{tex.Timg[1]}" + png = (*png.glob("*.png"),) + if png: + i = bpy.data.images.get(str(png[0])) + if not i or ForceNewTex: + return bpy.data.images.load(filepath=str(png[0])) + else: + return i + + def apply_PBSDF_Mat(self, mat: bpy.types.Material, tex_path: Path, tex: Texture): + nt = mat.node_tree + nodes = nt.nodes + links = nt.links + pbsdf = nodes.get("Principled BSDF") + if not pbsdf: + return + tex_node = nodes.new("ShaderNodeTexImage") + links.new(pbsdf.inputs[0], tex_node.outputs[0]) # base color + links.new(pbsdf.inputs[21], tex_node.outputs[1]) # alpha color + image = self.LoadTexture(0, tex_path, tex) + if image: + tex_node.image = image + + def apply_material_settings(self, mat: bpy.types.Material, tex_path: Path): + f3d = mat.f3d_mat + + self.set_texture_tile_mapping() + self.set_register_settings(mat, f3d) + self.set_textures(f3d) + + with bpy.context.temp_override(material=mat): + bpy.ops.material.update_f3d_nodes() + + def set_register_settings(self, mat: bpy.types.Material, f3d: F3DMaterialProperty): + self.set_fog(f3d) + self.set_color_registers(f3d) + self.set_geo_mode(f3d.rdp_settings, mat) + self.set_combiner(f3d) + self.set_rendermode(f3d) + self.set_othermode(f3d) + + # map tiles to locations in tmem + # this ignores the application of LoDs for magnification + # since fast64 uses tile0 as tex0 always, so to get expected + # results we need to start tex0 at the proper base tile + def set_texture_tile_mapping(self): + for index, tile in enumerate(self.tiles): + tex_index = index - self.base_tile + if tex_index < 0: + continue + tex = self.tmem.get(tile.tmem, None) + setattr(self, f"tex{tex_index}", tex) + + def set_textures(self, f3d: F3DMaterialProperty, tex_path: Path): + self.set_tex_scale(f3d) + if self.tex0 and self.set_tex: + self.set_tex_settings( + f3d.tex0, self.load_texture(0, tex_path, self.tex0), self.tiles[0 + self.base_tile], self.tex0.Timg + ) + if self.tex1 and self.set_tex: + self.set_tex_settings( + f3d.tex1, self.load_texture(0, tex_path, self.tex1), self.tiles[1 + self.base_tile], self.tex1.Timg + ) + + def set_fog(self, f3d: F3DMaterialProperty): + if hasattr(self, "fog_position"): + f3d.set_fog = True + f3d.use_global_fog = False + f3d.fog_position[0] = eval(self.fog_pos[0]) + f3d.fog_position[1] = eval(self.fog_pos[1]) + if hasattr(self, "fog_color"): + f3d.set_fog = True + f3d.use_global_fog = False + f3d.fog_color = self.convert_color(self.fog_color) + + def set_color_registers(self, f3d: F3DMaterialProperty): + if self.ambient_light: + f3d.set_ambient_from_light = False + f3d.ambient_light_color = self.convert_color(self.ambient_light) + if self.light_col: + # this is a dict but I'll only use the first color for now + f3d.set_lights = True + if self.light_col.get(1): + f3d.default_light_color = self.convert_color(self.light_col[1]) + if hasattr(self, "blend_color"): + f3d.set_blend = True + f3d.blend_color = self.convert_color(self.blend_color) + if hasattr(self, "env_color"): + f3d.set_env = True + f3d.env_color = self.convert_color(self.env_color) + if hasattr(self, "prim_color"): + prim = self.prim_color + f3d.set_prim = True + f3d.prim_lod_min = int(prim[0]) + f3d.prim_lod_frac = int(prim[1]) + f3d.prim_color = self.convert_color(prim[-4:]) + + def set_tex_scale(self, f3d: F3DMaterialProperty): + if hasattr(self, "set_tex"): + # not exactly the same but gets the point across maybe? + f3d.tex0.tex_set = self.set_tex + f3d.tex1.tex_set = self.set_tex + # tex scale gets set to 0 when textures are disabled which is automatically done + # often to save processing power between mats or something, or just adhoc bhv + # though in fast64, we don't want to ever set it to zero + if f3d.rdp_settings.g_tex_gen or any([a < 1 and a > 0 for a in self.tex_scale]): + f3d.scale_autoprop = False + f3d.tex_scale = self.tex_scale + + def set_tex_settings( + self, tex_prop: TextureProperty, image: bpy.types.Image, tile: Tile, tex_img: Union[Sequence, str] + ): + tex_prop.tex_reference = str(tex_img) # setting prop for hash purposes + tex_prop.tex_set = True + tex_prop.tex = image + tex_prop.tex_format = tile.eval_texture_format() + s_flags = self.eval_tile_flags(tile.Sflags) + tex_prop.S.mirror = "mirror" in s_flags + tex_prop.S.clamp = "clamp" in s_flags + t_flags = self.eval_tile_flags(tile.Tflags) + tex_prop.T.mirror = "mirror" in t_flags + tex_prop.T.clamp = "clamp" in t_flags + tex_prop.S.low = tile.Slow + tex_prop.T.low = tile.Tlow + tex_prop.S.high = tile.Shigh + tex_prop.T.high = tile.Thigh + tex_prop.S.mask = tile.SMask + tex_prop.T.mask = tile.TMask + + # rework with new render mode stuffs + def set_rendermode(self, f3d: F3DMaterialProperty): + rdp = f3d.rdp_settings + if hasattr(self, "RenderMode"): + rdp.set_rendermode = True + # if the enum isn't there, then just print an error for now + try: + rdp.rendermode_preset_cycle_1 = self.RenderMode[0] + rdp.rendermode_preset_cycle_2 = self.RenderMode[1] + # print(f"set render modes with render mode {self.RenderMode}") + except: + print(f"could not set render modes with render mode {self.RenderMode}") + + def set_othermode(self, f3d: F3DMaterialProperty): + rdp = f3d.rdp_settings + for prop, val in self.other_mode.items(): + setattr(rdp, prop, val) + # add in exception handling here + + def set_geo_mode(self, rdp: RDPSettings, mat: bpy.types.Material): + # texture gen has a different name than gbi + for a in self.GeoSet: + setattr(rdp, a.replace("G_TEXTURE_GEN", "G_TEX_GEN").lower().strip(), True) + for a in self.GeoClear: + setattr(rdp, a.replace("G_TEXTURE_GEN", "G_TEX_GEN").lower().strip(), False) + + # Very lazy for now + def set_combiner(self, f3d: F3DMaterialProperty): + f3d.presetName = "Custom" + if not hasattr(self, "Combiner"): + f3d.combiner1.A = "TEXEL0" + f3d.combiner1.A_alpha = "0" + f3d.combiner1.C = "SHADE" + f3d.combiner1.C_alpha = "0" + f3d.combiner1.D = "0" + f3d.combiner1.D_alpha = "1" + else: + f3d.combiner1.A = self.Combiner[0] + f3d.combiner1.B = self.Combiner[1] + f3d.combiner1.C = self.Combiner[2] + f3d.combiner1.D = self.Combiner[3] + f3d.combiner1.A_alpha = self.Combiner[4] + f3d.combiner1.B_alpha = self.Combiner[5] + f3d.combiner1.C_alpha = self.Combiner[6] + f3d.combiner1.D_alpha = self.Combiner[7] + f3d.combiner2.A = self.Combiner[8] + f3d.combiner2.B = self.Combiner[9] + f3d.combiner2.C = self.Combiner[10] + f3d.combiner2.D = self.Combiner[11] + f3d.combiner2.A_alpha = self.Combiner[12] + f3d.combiner2.B_alpha = self.Combiner[13] + f3d.combiner2.C_alpha = self.Combiner[14] + f3d.combiner2.D_alpha = self.Combiner[15] + + def eval_tile_flags(self, flags: str): + if not flags: + return [] + GBIflags = { + "G_TX_NOMIRROR": None, + "G_TX_WRAP": None, + "G_TX_MIRROR": ("mirror"), + "G_TX_CLAMP": ("clamp"), + "0": None, + "1": ("mirror"), + "2": ("clamp"), + "3": ("clamp", "mirror"), + } + x = [] + fsplit = flags.split("|") + for f in fsplit: + z = GBIflags.get(f.strip(), 0) + if z: + x.append(z) + return x + + +# handles DL import processing, specifically built to process each cmd into the mat class +# should be inherited into a larger F3d class which wraps DL processing +# does not deal with flow control or gathering the data containers (VB, Geo cls etc.) +class DL(DataParser): + # the min needed for this class to work for importing + def __init__(self, lastmat=None): + self.Vtx = {} + self.Gfx = {} + self.Light_t = {} + self.Ambient_t = {} + self.Lights1 = {} + self.Textures = {} + self.NewMat = 1 + self.f3d_gbi = get_F3D_GBI() + if not lastmat: + self.LastMat = Mat() + self.LastMat.name = 0 + else: + self.LastMat = lastmat + super().__init__() + + def gsSPEndDisplayList(self, macro: Macro): + return self.break_parse + + def gsSPBranchList(self, macro: Macro): + NewDL = self.Gfx.get(branched_dl := macro.args[0]) + if not NewDL: + raise Exception( + "Could not find DL {} in levels/{}/{}leveldata.inc.c".format( + NewDL, self.scene.level_import.Level, self.scene.level_import.Prefix + ) + ) + self.reset_parser(branched_dl) + self.parse_stream(NewDL, branched_dl) + return self.break_parse + + def gsSPDisplayList(self, macro: Macro): + NewDL = self.Gfx.get(branched_dl := macro.args[0]) + if not NewDL: + raise Exception( + "Could not find DL {} in levels/{}/{}leveldata.inc.c".format( + NewDL, self.scene.level_import.Level, self.scene.level_import.Prefix + ) + ) + self.reset_parser(branched_dl) + self.parse_stream(NewDL, branched_dl) + return self.continue_parse + + def gsSPEndDisplayList(self, macro: Macro): + return self.break_parse + + def gsSPVertex(self, macro: Macro): + # vertex references commonly use pointer arithmatic. I will deal with that case here, but not for other things unless it somehow becomes a problem later + if "+" in macro.args[0]: + ref, offset = macro.args[0].split("+") + offset = hexOrDecInt(offset) + else: + ref = macro.args[0] + offset = 0 + VB = self.Vtx.get(ref.strip()) + if not VB: + raise Exception( + "Could not find VB {} in levels/{}/{}leveldata.inc.c".format( + ref, self.scene.level_import.Level, self.scene.level_import.Prefix + ) + ) + vertex_load_start = hexOrDecInt(macro.args[2]) + vertex_load_length = hexOrDecInt(macro.args[1]) + Verts = VB[ + offset : offset + vertex_load_length + ] # If you use array indexing here then you deserve to have this not work + Verts = [self.parse_vert(v) for v in Verts] + for k, i in enumerate(range(vertex_load_start, vertex_load_length, 1)): + self.VertBuff[i] = [Verts[k], vertex_load_start] + # These are all independent data blocks in blender + self.Verts.extend([v[0] for v in Verts]) + self.UVs.extend([v[1] for v in Verts]) + self.VCs.extend([v[2] for v in Verts]) + self.LastLoad = vertex_load_length + return self.continue_parse + + def gsSPModifyVertex(self, macro: Macro): + vtx = self.VertBuff[hexOrDecInt(macro.args[0])] + where = self.eval_modify_vtx(macro.args[1]) + val = hexOrDecInt(macro.args[2]) + # if it is None, something weird, or screenspace I won't edit it + if where == "ST": + uv = (val >> 16) & 0xFFFF, val & 0xFFFF + self.Verts.append(self.Verts[vtx]) + self.UVs.append(uv) + self.VCs.append(self.VCs[vtx]) + self.VertBuff[hexOrDecInt(macro.args[0])] = len(self.Verts) + elif where == "RGBA": + vertex_col = [(val >> 8 * i) & 0xFF for i in range(4)].reverse() + self.Verts.append(self.Verts[vtx]) + self.UVs.append(self.UVs[vtx]) + self.VCs.append(vertex_col) + self.VertBuff[hexOrDecInt(macro.args[0])] = len(self.Verts) + return self.continue_parse + + def gsSP2Triangles(self, macro: Macro): + self.make_new_material() + args = [hexOrDecInt(a) for a in macro.args] + Tri1 = self.parse_tri(args[:3]) + Tri2 = self.parse_tri(args[4:7]) + self.Tris.append(Tri1) + self.Tris.append(Tri2) + return self.continue_parse + + def gsSP1Triangle(self, macro: Macro): + self.make_new_material() + args = [hexOrDecInt(a) for a in macro.args] + Tri = self.parse_tri(args[:3]) + self.Tris.append(Tri) + return self.continue_parse + + # materials + # Mats will be placed sequentially. The first item of the list is the triangle number + # The second is the material class + def gsDPSetRenderMode(self, macro: Macro): + self.NewMat = 1 + self.LastMat.RenderMode = [a.strip() for a in macro.args] + return self.continue_parse + + # The highest numbered light is always the ambient light + def gsSPLight(self, macro: Macro): + self.NewMat = 1 + light = re.search("&.+\.", macro.args[0]).group()[1:-1] + light = Lights1(light, self.Lights1.get(light)[0]) + if ".a" in macro.args[0]: + self.LastMat.ambient_light = light.ambient + else: + num = re.search("_\d", macro.args[0]).group()[1] + num = int(num) if num else 1 + self.LastMat.light_col[num] = light.diffuse + return self.continue_parse + + # numlights0 still gives one ambient and diffuse light + def gsSPNumLights(self, macro: Macro): + self.NewMat = 1 + num = re.search("_\d", macro.args[0]).group()[1] + num = int(num) if num else 1 + self.LastMat.num_lights = num + return self.continue_parse + + def gsSPLightColor(self, macro: Macro): + self.NewMat = 1 + num = re.search("_\d", macro.args[0]).group()[1] + num = int(num) if num else 1 + self.LastMat.light_col[num] = eval(macro.args[-1]).to_bytes(4, "big") + return self.continue_parse + + # not finished yet + def gsSPSetLights0(self, macro: Macro): + return self.continue_parse + + def gsSPSetLights1(self, macro: Macro): + return self.continue_parse + + def gsSPSetLights2(self, macro: Macro): + return self.continue_parse + + def gsSPSetLights3(self, macro: Macro): + return self.continue_parse + + def gsSPSetLights4(self, macro: Macro): + return self.continue_parse + + def gsSPSetLights5(self, macro: Macro): + return self.continue_parse + + def gsSPSetLights6(self, macro: Macro): + return self.continue_parse + + def gsSPSetLights7(self, macro: Macro): + return self.continue_parse + + def gsSPSetOtherMode(self, macro: Macro): + self.NewMat = 1 + if macro.args[0] == "G_SETOTHERMODE_H": + for i, othermode in enumerate(macro.args[3].split("|")): + # this may cause an issue if someone uses a wacky custom othermode H + mode_h_attr = RDPSettings.other_mode_h_attributes[i][1] + self.LastMat.other_mode[mode_h_attr] = othermode.strip() + else: + if int(macro.args[2]) > 3: + self.LastMat.RenderMode = [] + # top two bits are z src and alpha compare, rest is render mode + for i, othermode in enumerate(macro.args[3].split("|")): + if int(macro.args[2]) > 3 and i > 1: + self.LastMat.RenderMode.append(othermode) + continue + mode_l_attr = RDPSettings.other_mode_l_attributes[i][1] + self.LastMat.other_mode[mode_l_attr] = othermode.strip() + return self.continue_parse + + # some independent other mode settings + def gsDPSetTexturePersp(self, macro: Macro): + self.NewMat = 1 + self.LastMat.other_mode["g_mdsft_textpersp"] = macro.args[0] + return self.continue_parse + + def gsDPSetDepthSource(self, macro: Macro): + self.NewMat = 1 + self.LastMat.other_mode["g_mdsft_zsrcsel"] = macro.args[0] + return self.continue_parse + + def gsDPSetColorDither(self, macro: Macro): + self.NewMat = 1 + self.LastMat.other_mode["g_mdsft_rgb_dither"] = macro.args[0] + return self.continue_parse + + def gsDPSetAlphaDither(self, macro: Macro): + self.NewMat = 1 + self.LastMat.other_mode["g_mdsft_alpha_dither"] = macro.args[0] + return self.continue_parse + + def gsDPSetCombineKey(self, macro: Macro): + self.NewMat = 1 + self.LastMat.other_mode["g_mdsft_combkey"] = macro.args[0] + return self.continue_parse + + def gsDPSetTextureConvert(self, macro: Macro): + self.NewMat = 1 + self.LastMat.other_mode["g_mdsft_textconv"] = macro.args[0] + return self.continue_parse + + def gsDPSetTextureFilter(self, macro: Macro): + self.NewMat = 1 + self.LastMat.other_mode["g_mdsft_text_filt"] = macro.args[0] + return self.continue_parse + + def gsDPSetTextureLOD(self, macro: Macro): + self.NewMat = 1 + self.LastMat.other_mode["g_mdsft_textlod"] = macro.args[0] + return self.continue_parse + + def gsDPSetTextureDetail(self, macro: Macro): + self.NewMat = 1 + self.LastMat.other_mode["g_mdsft_textdetail"] = macro.args[0] + return self.continue_parse + + def gsDPSetCycleType(self, macro: Macro): + self.NewMat = 1 + self.LastMat.other_mode["g_mdsft_cycletype"] = macro.args[0] + return self.continue_parse + + def gsDPSetTextureLUT(self, macro: Macro): + self.NewMat = 1 + self.LastMat.other_mode["g_mdsft_textlut"] = macro.args[0] + return self.continue_parse + + def gsDPPipelineMode(self, macro: Macro): + self.NewMat = 1 + self.LastMat.other_mode["g_mdsft_pipeline"] = macro.args[0] + return self.continue_parse + + def gsDPSetAlphaCompare(self, macro: Macro): + self.NewMat = 1 + self.LastMat.other_mode["g_mdsft_alpha_compare"] = macro.args[0] + return self.continue_parse + + def gsSPFogFactor(self, macro: Macro): + return self.continue_parse + + def gsDPSetFogColor(self, macro: Macro): + self.NewMat = 1 + self.LastMat.fog_color = macro.args + return self.continue_parse + + def gsSPFogPosition(self, macro: Macro): + self.NewMat = 1 + self.LastMat.fog_pos = macro.args + return self.continue_parse + + def gsDPSetBlendColor(self, macro: Macro): + self.NewMat = 1 + self.LastMat.blend_color = macro.args + return self.continue_parse + + def gsDPSetPrimColor(self, macro: Macro): + self.NewMat = 1 + self.LastMat.prim_color = macro.args + return self.continue_parse + + def gsDPSetEnvColor(self, macro: Macro): + self.NewMat = 1 + self.LastMat.env_color = macro.args + return self.continue_parse + + # multiple geo modes can happen in a row that contradict each other + # this is mostly due to culling wanting diff geo modes than drawing + # but sometimes using the same vertices + def gsSPClearGeometryMode(self, macro: Macro): + self.NewMat = 1 + args = [a.strip() for a in macro.args[0].split("|")] + for a in args: + if a in self.LastMat.GeoSet: + self.LastMat.GeoSet.remove(a) + self.LastMat.GeoClear.extend(args) + return self.continue_parse + + def gsSPSetGeometryMode(self, macro: Macro): + self.NewMat = 1 + args = [a.strip() for a in macro.args[0].split("|")] + for a in args: + if a in self.LastMat.GeoClear: + self.LastMat.GeoClear.remove(a) + self.LastMat.GeoSet.extend(args) + return self.continue_parse + + def gsSPGeometryMode(self, macro: Macro): + self.NewMat = 1 + argsC = [a.strip() for a in macro.args[0].split("|")] + argsS = [a.strip() for a in macro.args[1].split("|")] + for a in argsC: + if a in self.LastMat.GeoSet: + self.LastMat.GeoSet.remove(a) + for a in argsS: + if a in self.LastMat.GeoClear: + self.LastMat.GeoClear.remove(a) + self.LastMat.GeoClear.extend(argsC) + self.LastMat.GeoSet.extend(argsS) + return self.continue_parse + + def gsSPLoadGeometryMode(self, macro: Macro): + self.NewMat = 1 + geo_set = {a.strip().lower() for a in macro.args[0].split("|")} + all_geos = set(RDPSettings.geo_mode_attributes.values()) + self.LastMat.GeoSet = list(geo_set) + self.LastMat.GeoClear = list(all_geos.difference(geo_set)) + return self.continue_parse + + def gsDPSetCombineMode(self, macro: Macro): + self.NewMat = 1 + self.LastMat.Combiner = self.eval_set_combine_macro(macro.args) + return self.continue_parse + + def gsDPSetCombineLERP(self, macro: Macro): + self.NewMat = 1 + self.LastMat.Combiner = macro.args + return self.continue_parse + + # root tile, scale and set tex + def gsSPTexture(self, macro: Macro): + self.NewMat = 1 + macros = { + "G_ON": 2, + "G_OFF": 0, + } + set_tex = macros.get(macro.args[-1]) + if set_tex == None: + set_tex = hexOrDecInt(macro.args[-1]) + self.LastMat.set_tex = set_tex == 2 + self.LastMat.tex_scale = [ + ((0x10000 * (hexOrDecInt(a) < 0)) + hexOrDecInt(a)) / 0xFFFF for a in macro.args[0:2] + ] # signed half to unsigned half + self.LastMat.base_tile = self.eval_tile_enum(macro.args[-2]) + return self.continue_parse + + # last tex is a palette + def gsDPLoadTLUTCmd(self, macro: Macro): + if hasattr(self.LastMat, "loadtex"): + tex = self.LastMat.loadtex + tile_index = self.eval_tile_enum(macro.args[0]) + tex.tile = self.LastMat.tiles[tile_index] + self.LastMat.pal = tex + else: + print( + "**--Load block before set t img, DL is partial and missing context" + "likely static file meant to be used as a piece of a realtime system.\n" + "No interpretation on file possible**--" + ) + return None + return self.continue_parse + + def gsDPLoadBlock(self, macro: Macro): + if hasattr(self.LastMat, "loadtex"): + tex = self.LastMat.loadtex + # these values aren't necessary when the texture is already in png format + # tex.dxt = hexOrDecInt(args[4]) + # tex.texels = hexOrDecInt(args[3]) + tile_index = self.eval_tile_enum(macro.args[0]) + tex.tile = self.LastMat.tiles[tile_index] + self.LastMat.tmem[tex.tile.tmem] = tex + else: + print( + "**--Load block before set t img, DL is partial and missing context" + "likely static file meant to be used as a piece of a realtime system.\n" + "No interpretation on file possible**--" + ) + return None + return self.continue_parse + + def gsDPSetTextureImage(self, macro: Macro): + self.NewMat = 1 + Timg = macro.args[3] + Fmt = macro.args[0] + Siz = macro.args[1] + self.LastMat.loadtex = Texture(Timg, Fmt, Siz) + return self.continue_parse + + def gsDPSetTileSize(self, macro: Macro): + self.NewMat = 1 + tile = self.LastMat.tiles[self.eval_tile_enum(macro.args[0])] + tile.Slow = self.eval_image_frac(macro.args[1]) + tile.Tlow = self.eval_image_frac(macro.args[2]) + tile.Shigh = self.eval_image_frac(macro.args[3]) + tile.Thigh = self.eval_image_frac(macro.args[4]) + return self.continue_parse + + def gsDPSetTile(self, macro: Macro): + self.NewMat = 1 + tile = self.LastMat.tiles[self.eval_tile_enum(macro.args[4])] + tile.tmem = hexOrDecInt(macro.args[3]) + tile.Fmt = macro.args[0].strip() + tile.Siz = macro.args[1].strip() + tile.Tflags = macro.args[6].strip() + tile.TMask = self.eval_tile_enum(macro.args[7]) + tile.TShift = self.eval_tile_enum(macro.args[8]) + tile.Sflags = macro.args[9].strip() + tile.SMask = self.eval_tile_enum(macro.args[10]) + tile.SShift = self.eval_tile_enum(macro.args[11]) + return self.continue_parse + + # combined macros + def gsDPLoadTLUT(self, macro: Macro): + # count, tmemaddr, tex + args = macro.args + self.gsDPSetTextureImage(macro.partial("G_IM_FMT_RGBA", "G_IM_SIZ_16b", 1, args[2])) + self.gsDPSetTile(macro.partial(0, 0, 0, args[1], 7, 0, 0, 0, 0, 0, 0, 0)) + self.gsDPLoadTLUTCmd(macro.partial(7, args[0])) + return self.continue_parse + + def gsDPLoadTextureBlock(self, macro: Macro): + # 0tex, 1fmt, 2siz, 3height, 4width, 5pal, 6flags, 8masks, 10shifts + args = macro.args + fmt = self.eval_timg_format(args[1]) + siz = self.eval_timg_format(args[2]) + self.gsDPSetTextureImage(macro.partial(fmt, siz, 1, args[0])) + self.gsDPSetTile(macro.partial(fmt, siz, 0, 0, 7, 0, args[7], args[9], args[11], args[6], args[8], args[10])) + # self.gsDPLoadSync(macro) + self.gsDPLoadBlock(macro.partial(7, 0, 0, 0, 0)) # I don't need args + # self.gsDPPipeSync(macro) + self.gsDPSetTile( + macro.partial(fmt, siz, 0, 0, 0, args[5], args[7], args[9], args[11], args[6], args[8], args[10]) + ) + self.gsDPSetTileSize(macro.partial(7, 0, 0, (hexOrDecInt(args[4]) - 1) << 2, (hexOrDecInt(args[3]) - 1) << 2)) + + return self.continue_parse + + def gsDPLoadTextureBlockS(self, macro: Macro): + # only changes dxt and that doesn't matter here + return self.gsDPLoadTextureBlock(macro) + + def _gsDPLoadTextureBlock(self, macro: Macro): + # 0tex, 1tmem, 2fmt, 3siz, 4height, 5width, 6pal, 7flags, 9masks, 11shifts + args = macro.args + fmt = eval_timg_format(args[2]) + siz = eval_timg_format(args[3]) + self.gsDPSetTextureImage(macro.partial(fmt, siz, 1, args[0])) + self.gsDPSetTile(macro.partial(fmt, siz, 0, 0, 7, 0, args[8], args[10], args[12], args[7], args[9], args[11])) + # self.gsDPLoadSync(macro) + self.gsDPLoadBlock(macro.partial(7, 0, 0, 0, 0)) + # self.gsDPPipeSync(macro) + self.gsDPSetTile( + macro.partial(fmt, siz, 0, 0, 0, args[5], args[7], args[9], args[11], args[6], args[8], args[10]) + ) + self.gsDPSetTileSize(macro.partial(7, 0, 0, (hexOrDecInt(args[4]) - 1) << 2, (hexOrDecInt(args[3]) - 1) << 2)) + return self.continue_parse + + def gsDPLoadTextureBlock_4b(self, macro: Macro): + # 0tex, 1fmt, 2height, 3width, 4pal, 5flags, 7masks, 9shifts + args = macro.args + fmt = eval_timg_format(args[1]) + self.gsDPSetTextureImage(macro.partial(fmt, "G_IM_SIZ_16b", 1, args[0])) + self.gsDPSetTile( + macro.partial(fmt, "G_IM_SIZ_16b", 0, 0, 7, 0, args[6], args[8], args[10], args[5], args[7], args[9]) + ) + # self.gsDPLoadSync(macro) + self.gsDPLoadBlock(macro.partial(7, 0, 0, 0, 0)) + # self.gsDPPipeSync(macro) + self.gsDPSetTile( + macro.partial(fmt, "G_IM_SIZ_4b", 0, 0, 0, args[4], args[3], args[8], args[10], args[3], args[7], args[9]) + ) + self.gsDPSetTileSize(macro.partial(7, 0, 0, (hexOrDecInt(args[4]) - 1) << 2, (hexOrDecInt(args[3]) - 1) << 2)) + return self.continue_parse + + def gsDPLoadTextureBlock_4bs(self, macro: Macro): + # only changes dxt and that doesn't matter here + return self.gsDPLoadTextureBlock_4b(macro) + + # other stuff that probably doesn't matter since idk who uses these + # if they break make an issue + # _gsDPLoadTextureBlockTile + # gsDPLoadMultiBlock + # gsDPLoadMultiBlockS + + # syncs need no processing + def gsSPCullDisplayList(self, macro: Macro): + return self.continue_parse + + def gsDPPipeSync(self, macro: Macro): + return self.continue_parse + + def gsDPLoadSync(self, macro: Macro): + return self.continue_parse + + def gsDPTileSync(self, macro: Macro): + return self.continue_parse + + def gsDPFullSync(self, macro: Macro): + return self.continue_parse + + def gsDPNoOp(self, macro: Macro): + return self.continue_parse + + def make_new_material(self): + if self.NewMat: + self.NewMat = 0 + self.Mats.append([len(self.Tris) - 1, self.LastMat]) + self.LastMat = deepcopy(self.LastMat) # for safety + + def parse_tri(self, Tri: Sequence[int]): + return [self.VertBuff[a] for a in Tri] + + # if someone uses just the int these catch that + def eval_timg_format(self, fmt: str): + GBI_fmt_ints = { + "0": "G_IM_FMT_RGBA", + "1": "G_IM_FMT_YUV", + "2": "G_IM_FMT_CI", + "3": "G_IM_FMT_IA", + "4": "G_IM_FMT_I", + } + return GBI_fmt_ints.get(fmt, fmt) + + def eval_image_frac(self, arg: Union[str, Number]): + if type(arg) == int: + return arg + arg2 = arg.replace("G_TEXTURE_IMAGE_FRAC", "2") + # evals bad probably + return eval(arg2) + + def eval_tile_enum(self, arg: Union[str, Number]): + if type(arg) is str: + # fix later + return getattr(self.f3d_gbi, arg, 0) + else: + return hexOrDecInt(arg) + + def eval_set_combine_macro(self, arg: str): + return getattr(self.f3d_gbi, arg[0], ["TEXEL0", "0", "SHADE", "0", "TEXEL0", "0", "SHADE", "0"]) + getattr( + self.f3d_gbi, arg[1], ["TEXEL0", "0", "SHADE", "0", "TEXEL0", "0", "SHADE", "0"] + ) diff --git a/fast64_internal/sm64/__init__.py b/fast64_internal/sm64/__init__.py index 81fdbf596..325ad29cf 100644 --- a/fast64_internal/sm64/__init__.py +++ b/fast64_internal/sm64/__init__.py @@ -90,6 +90,13 @@ sm64_anim_unregister, ) +from .sm64_level_importer import ( + sm64_import_panel_register, + sm64_import_panel_unregister, + sm64_import_register, + sm64_import_unregister, +) + def sm64_panel_register(): settings_panels_register() @@ -98,6 +105,7 @@ def sm64_panel_register(): sm64_bone_panel_register() sm64_cam_panel_register() sm64_obj_panel_register() + sm64_import_panel_register() sm64_geo_parser_panel_register() sm64_geo_writer_panel_register() sm64_spline_panel_register() @@ -113,6 +121,7 @@ def sm64_panel_unregister(): sm64_bone_panel_unregister() sm64_cam_panel_unregister() sm64_obj_panel_unregister() + sm64_import_panel_unregister() sm64_geo_parser_panel_unregister() sm64_geo_writer_panel_unregister() sm64_spline_panel_unregister() @@ -130,6 +139,7 @@ def sm64_register(register_panels: bool): sm64_obj_register() sm64_geo_parser_register() sm64_geo_writer_register() + sm64_import_register() sm64_level_register() sm64_spline_register() sm64_dl_writer_register() @@ -156,6 +166,7 @@ def sm64_unregister(unregister_panels: bool): sm64_dl_parser_unregister() sm64_anim_unregister() settings_props_unregister() + sm64_import_unregister() if unregister_panels: sm64_panel_unregister() diff --git a/fast64_internal/sm64/settings/properties.py b/fast64_internal/sm64/settings/properties.py index 0275fb9ad..680b713b5 100644 --- a/fast64_internal/sm64/settings/properties.py +++ b/fast64_internal/sm64/settings/properties.py @@ -10,6 +10,7 @@ from ...utility import directory_path_checks, directory_ui_warnings, prop_split, set_prop_if_in_data, upgrade_old_prop from ..sm64_constants import defaultExtendSegment4 from ..sm64_objects import SM64_CombinedObjectProperties +from ..sm64_level_importer import SM64_ImportProperties from ..sm64_utility import export_rom_ui_warnings, import_rom_ui_warnings from ..tools import SM64_AddrConvProperties @@ -81,6 +82,8 @@ class SM64_Properties(PropertyGroup): name="Matstack Fix", description="Exports account for matstack fix requirements", ) + # importing, do I need to upgrade props for this? + importer: PointerProperty(type=SM64_ImportProperties) @property def binary_export(self): diff --git a/fast64_internal/sm64/sm64_constants.py b/fast64_internal/sm64/sm64_constants.py index 8d802ef28..628c5b9d2 100644 --- a/fast64_internal/sm64/sm64_constants.py +++ b/fast64_internal/sm64/sm64_constants.py @@ -145,6 +145,40 @@ "wmotr": "LEVEL_WMOTR", } +LEVEL_ID_NUMBERS = { + 4: "bbh", + 5: "ccm", + 6: "castle_inside", + 7: "hmc", + 8: "ssl", + 9: "bob", + 10: "sl", + 11: "wdw", + 12: "jrb", + 13: "thi", + 14: "ttc", + 15: "rr", + 16: "castle_grounds", + 17: "bitdw", + 18: "vcutm", + 19: "bitfs", + 20: "sa", + 21: "bits", + 22: "lll", + 23: "ddd", + 24: "wf", + 25: "ending", + 26: "castle_courtyard", + 27: "pss", + 28: "cotmc", + 29: "totwc", + 30: "bowser_1", + 31: "wmotr", + 33: "bowser_2", + 34: "bowser_3", + 36: "ttm", +} + cameraTriggerNames = { "bbh": "sCamBBH", "bitdw": "sCamBitDW", @@ -1764,6 +1798,12 @@ def __init__(self, geoAddr, level, switchDict): ("macro_yellow_coin", "Yellow Coin", "Yellow Coin"), ("macro_yellow_coin_2", "Yellow Coin 2", "Yellow Coin 2"), ] +enumVersionDefs = [ + ("VERSION_US", "VERSION_US", ""), + ("VERSION_JP", "VERSION_JP", ""), + ("VERSION_EU", "VERSION_EU", ""), + ("VERSION_SH", "VERSION_SH", ""), +] enumBehaviorMacros = [ ("Custom", "Custom", ""), @@ -2142,6 +2182,230 @@ def __init__(self, geoAddr, level, switchDict): ("Custom", "Custom", "Custom"), ] + +# what is in specific groups and the segmented addresses +group_0_geos = [ + ("bubble_geo", "bubble_geo", "0x17000000"), + ("purple_marble_geo", "purple_marble_geo", "0x1700001c"), + ("smoke_geo", "smoke_geo", "0x17000038"), + ("burn_smoke_geo", "burn_smoke_geo", "0x17000084"), + ("small_water_splash_geo", "small_water_splash_geo", "0x1700009c"), + ("idle_water_wave_geo", "idle_water_wave_geo", "0x17000124"), + ("wave_trail_geo", "wave_trail_geo", "0x17000168"), + ("sparkles_geo", "sparkles_geo", "0x170001bc"), + ("water_splash_geo", "water_splash_geo", "0x17000230"), + ("sparkles_animation_geo", "sparkles_animation_geo", "0x17000284"), + ("mario_geo", "mario_geo", "0x17002dd4"), +] + +group_1_geos = [ + ("yellow_sphere_geo", "yellow_sphere_geo", "0x0c000000"), + ("hoot_geo", "hoot_geo", "0x0c000018"), + ("yoshi_egg_geo", "yoshi_egg_geo", "0x0c0001e4"), + ("thwomp_geo", "thwomp_geo", "0x0c000248"), + ("bullet_bill_geo", "bullet_bill_geo", "0x0c000264"), + ("heave_ho_geo", "heave_ho_geo", "0x0c00028c"), +] + +group_2_geos = [ + ("bully_geo", "bully_geo", "0x0c000000"), + ("bully_boss_geo", "bully_boss_geo", "0x0c000120"), + ("blargg_geo", "blargg_geo", "0x0c000240"), +] + +group_3_geos = [ + ("king_bobomb_geo", "king_bobomb_geo", "0x0c000000"), + ("water_bomb_geo", "water_bomb_geo", "0x0c000308"), + ("water_bomb_shadow_geo", "water_bomb_shadow_geo", "0x0c000328"), +] + +group_4_geos = [ + ("clam_shell_geo", "clam_shell_geo", "0x0c000000"), + ("sushi_geo", "sushi_geo", "0x0c000068"), + ("unagi_geo", "unagi_geo", "0x0c00010c"), +] + +group_5_geos = [ + ("klepto_geo", "klepto_geo", "0x0c000000"), + ("eyerok_left_hand_geo", "eyerok_left_hand_geo", "0x0c0005a8"), + ("eyerok_right_hand_geo", "eyerok_right_hand_geo", "0x0c0005e4"), + ("pokey_head_geo", "pokey_head_geo", "0x0c000610"), + ("pokey_body_part_geo", "pokey_body_part_geo", "0x0c000644"), +] + +group_6_geos = [ + ("monty_mole_geo", "monty_mole_geo", "0x0c000000"), + ("ukiki_geo", "ukiki_geo", "0x0c000110"), + ("fwoosh_geo", "fwoosh_geo", "0x0c00036c"), +] + +group_7_geos = [ + ("spindrift_geo", "spindrift_geo", "0x0c000000"), + ("penguin_geo", "penguin_geo", "0x0c000104"), + ("mr_blizzard_hidden_geo", "mr_blizzard_hidden_geo", "0x0c00021c"), + ("mr_blizzard_geo", "mr_blizzard_geo", "0x0c000348"), +] + +group_8_geos = [ + ("springboard_top_geo", "springboard_top_geo", "0x0c000000"), + ("springboard_spring_geo", "springboard_spring_geo", "0x0c000018"), + ("springboard_bottom_geo", "springboard_bottom_geo", "0x0c000030"), + ("cap_switch_geo", "cap_switch_geo", "0x0c000048"), +] + +group_9_geos = [ + ("bookend_part_geo", "bookend_part_geo", "0x0c000000"), + ("bookend_geo", "bookend_geo", "0x0c0000c0"), + ("haunted_chair_geo", "haunted_chair_geo", "0x0c0000d8"), + ("small_key_geo", "small_key_geo", "0x0c000188"), + ("mad_piano_geo", "mad_piano_geo", "0x0c0001b4"), + ("boo_geo", "boo_geo", "0x0c000224"), + ("haunted_cage_geo", "haunted_cage_geo", "0x0c000274"), +] + +group_10_geos = [ + ("birds_geo", "birds_geo", "0x0c000000"), + ("peach_geo", "peach_geo", "0x0c000410"), + ("yoshi_geo", "yoshi_geo", "0x0c000468"), +] + +group_11_geos = [ + ("bubba_geo", "bubba_geo", "0x0c000000"), + ("wiggler_head_geo", "wiggler_head_geo", "0x0c000030"), + ("enemy_lakitu_geo", "enemy_lakitu_geo", "0x0c0001bc"), + ("spiny_ball_geo", "spiny_ball_geo", "0x0c000290"), + ("spiny_geo", "spiny_geo", "0x0c000328"), +] + +group_12_geos = [ + ("bowser_flames_geo", "bowser_flames_geo", "0x0d000000"), + ("invisible_bowser_accessory_geo", "invisible_bowser_accessory_geo", "0x0d000090"), + ("bowser_1_yellow_sphere_geo", "bowser_1_yellow_sphere_geo", "0x0d0000b0"), + ("bowser_shadow_geo", "bowser_shadow_geo", "0x0d000ab8"), + ("bowser_geo", "bowser_geo", "0x0d000ac4"), + ("bowser2_geo", "bowser2_geo", "0x0d000b40"), + ("bowser_bomb_geo", "bowser_bomb_geo", "0x0d000bbc"), + ("bowser_impact_smoke_geo", "bowser_impact_smoke_geo", "0x0d000bfc"), +] + +group_13_geos = [ + ("skeeter_geo", "skeeter_geo", "0x0d000000"), + ("seaweed_geo", "seaweed_geo", "0x0d000284"), + ("water_mine_geo", "water_mine_geo", "0x0d0002f4"), + ("cyan_fish_geo", "cyan_fish_geo", "0x0d000324"), + ("bub_geo", "bub_geo", "0x0d00038c"), + ("water_ring_geo", "water_ring_geo", "0x0d000414"), + ("treasure_chest_base_geo", "treasure_chest_base_geo", "0x0d000450"), + ("treasure_chest_lid_geo", "treasure_chest_lid_geo", "0x0d000468"), +] + +group_14_geos = [ + ("koopa_flag_geo", "koopa_flag_geo", "0x0d000000"), + ("wooden_post_geo", "wooden_post_geo", "0x0d0000b8"), + ("koopa_without_shell_geo", "koopa_without_shell_geo", "0x0d0000d0"), + ("koopa_with_shell_geo", "koopa_with_shell_geo", "0x0d000214"), + ("piranha_plant_geo", "piranha_plant_geo", "0x0d000358"), + ("whomp_geo", "whomp_geo", "0x0d000480"), + ("metallic_ball_geo", "metallic_ball_geo", "0x0d0005d0"), + ("chain_chomp_geo", "chain_chomp_geo", "0x0d0005ec"), +] + +group_15_geos = [ + ("lakitu_geo", "lakitu_geo", "0x0d000000"), + ("toad_geo", "toad_geo", "0x0d0003e4"), + ("mips_geo", "mips_geo", "0x0d000448"), + ("boo_castle_geo", "boo_castle_geo", "0x0d0005b0"), +] + +group_16_geos = [ + ("moneybag_geo", "moneybag_geo", "0x0d0000f0"), + ("mr_i_geo", "mr_i_geo", "0x0d000000"), + ("mr_i_iris_geo", "mr_i_iris_geo", "0x0d00001c"), +] + +group_17_geos = [ + ("swoop_geo", "swoop_geo", "0x0d0000dc"), + ("snufit_geo", "snufit_geo", "0x0d0001a0"), + ("dorrie_geo", "dorrie_geo", "0x0d000230"), + ("scuttlebug_geo", "scuttlebug_geo", "0x0d000394"), +] + +common_0_geos = [ + ("blue_coin_switch_geo", "blue_coin_switch_geo", "0x0f000000"), + ("test_platform_geo", "test_platform_geo", "0x0f000020"), + ("dAmpGeo", "amp_geo", "0x0f000028"), + ("cannon_base_geo", "cannon_base_geo", "0x0f0001a8"), + ("cannon_barrel_geo", "cannon_barrel_geo", "0x0f0001c0"), + ("chuckya_geo", "chuckya_geo", "0x0f0001d8"), + ("purple_switch_geo", "purple_switch_geo", "0x0f0004cc"), + ("checkerboard_platform_geo", "checkerboard_platform_geo", "0x0f0004e4"), + ("heart_geo", "heart_geo", "0x0f0004fc"), + ("flyguy_geo", "flyguy_geo", "0x0f000518"), + ("breakable_box_geo", "breakable_box_geo", "0x0f0005d0"), + ("breakable_box_small_geo", "breakable_box_small_geo", "0x0f000610"), + ("bowling_ball_geo", "bowling_ball_geo", "0x0f000640"), + ("bowling_ball_track_geo", "bowling_ball_track_geo", "0x0f00066c"), + ("exclamation_box_geo", "exclamation_box_geo", "0x0f000694"), + ("goomba_geo", "goomba_geo", "0x0f0006e4"), + ("black_bobomb_geo", "black_bobomb_geo", "0x0f0007b8"), + ("bobomb_buddy_geo", "bobomb_buddy_geo", "0x0f0008f4"), + ("metal_box_geo", "metal_box_geo", "0x0f000a30"), + ("exclamation_box_outline_geo", "exclamation_box_outline_geo", "0x0f000a58"), + ("koopa_shell_geo", "koopa_shell_geo", "0x0f000ab0"), + ("koopa_shell2_geo", "koopa_shell2_geo", "0x0f000adc"), + ("koopa_shell3_geo", "koopa_shell3_geo", "0x0f000b08"), +] + +common_1_geos = [ + ("mist_geo", "mist_geo", "0x16000000"), + ("white_puff_geo", "white_puff_geo", "0x16000020"), + ("explosion_geo", "explosion_geo", "0x16000040"), + ("butterfly_geo", "butterfly_geo", "0x160000a8"), + ("yellow_coin_geo", "yellow_coin_geo", "0x1600013c"), + ("yellow_coin_no_shadow_geo", "yellow_coin_no_shadow_geo", "0x160001a0"), + ("blue_coin_geo", "blue_coin_geo", "0x16000200"), + ("blue_coin_no_shadow_geo", "blue_coin_no_shadow_geo", "0x16000264"), + ("red_coin_geo", "red_coin_geo", "0x160002c4"), + ("red_coin_no_shadow_geo", "red_coin_no_shadow_geo", "0x16000328"), + ("warp_pipe_geo", "warp_pipe_geo", "0x16000388"), + ("castle_door_geo", "castle_door_geo", "0x160003a8"), + ("cabin_door_geo", "cabin_door_geo", "0x1600043c"), + ("wooden_door_geo", "wooden_door_geo", "0x160004d0"), + ("wooden_door2_geo", "wooden_door2_geo", "0x16000564"), + ("metal_door_geo", "metal_door_geo", "0x160005f8"), + ("hazy_maze_door_geo", "hazy_maze_door_geo", "0x1600068c"), + ("haunted_door_geo", "haunted_door_geo", "0x16000720"), + ("castle_door_0_star_geo", "castle_door_0_star_geo", "0x160007b4"), + ("castle_door_1_star_geo", "castle_door_1_star_geo", "0x16000868"), + ("castle_door_3_stars_geo", "castle_door_3_stars_geo", "0x1600091c"), + ("key_door_geo", "key_door_geo", "0x160009d0"), + ("bowser_key_geo", "bowser_key_geo", "0x16000a84"), + ("bowser_key_cutscene_geo", "bowser_key_cutscene_geo", "0x16000ab0"), + ("red_flame_shadow_geo", "red_flame_shadow_geo", "0x16000b10"), + ("red_flame_geo", "red_flame_geo", "0x16000b2c"), + ("blue_flame_geo", "blue_flame_geo", "0x16000b8c"), + ("fish_shadow_geo", "fish_shadow_geo", "0x16000bec"), + ("fish_geo", "fish_geo", "0x16000c44"), + ("leaves_geo", "leaves_geo", "0x16000c8c"), + ("marios_cap_geo", "marios_cap_geo", "0x16000ca4"), + ("marios_metal_cap_geo", "marios_metal_cap_geo", "0x16000cf0"), + ("marios_wing_cap_geo", "marios_wing_cap_geo", "0x16000d3c"), + ("marios_winged_metal_cap_geo", "marios_winged_metal_cap_geo", "0x16000da8"), + ("number_geo", "number_geo", "0x16000e14"), + ("mushroom_1up_geo", "mushroom_1up_geo", "0x16000e84"), + ("star_geo", "star_geo", "0x16000ea0"), + ("dirt_animation_geo", "dirt_animation_geo", "0x16000ed4"), + ("cartoon_star_geo", "cartoon_star_geo", "0x16000f24"), + ("transparent_star_geo", "transparent_star_geo", "0x16000f6c"), + ("white_particle_geo", "white_particle_geo", "0x16000f98"), + ("wooden_signpost_geo", "wooden_signpost_geo", "0x16000fb4"), + ("bubbly_tree_geo", "bubbly_tree_geo", "0x16000fe8"), + ("spiky_tree_geo", "spiky_tree_geo", "0x16001000"), + ("snow_tree_geo", "snow_tree_geo", "0x16001018"), + ("spiky_tree1_geo", "spiky_tree1_geo", "0x16001030"), + ("palm_tree_geo", "palm_tree_geo", "0x16001048"), +] + T = TypeVar("T") DictOrVal = T | dict[T] | None ListOrVal = T | list[T] | None diff --git a/fast64_internal/sm64/sm64_geolayout_utility.py b/fast64_internal/sm64/sm64_geolayout_utility.py index f42220558..0d15f174d 100644 --- a/fast64_internal/sm64/sm64_geolayout_utility.py +++ b/fast64_internal/sm64/sm64_geolayout_utility.py @@ -42,6 +42,7 @@ def __init__(self, deform, theme): "StartRenderArea": BoneNodeProperties(True, "THEME13"), # 0x20 "Ignore": BoneNodeProperties(False, "THEME08"), # Used for rigging "SwitchOption": BoneNodeProperties(False, "THEME11"), + "CustomNonAnimated": BoneNodeProperties(False, "THEME11"), } boneLayers = {"anim": 0, "other": 1, "meta": 2, "visual": 3} diff --git a/fast64_internal/sm64/sm64_level_importer.py b/fast64_internal/sm64/sm64_level_importer.py new file mode 100644 index 000000000..9f67fef63 --- /dev/null +++ b/fast64_internal/sm64/sm64_level_importer.py @@ -0,0 +1,2890 @@ +# ------------------------------------------------------------------------ +# Header +# ------------------------------------------------------------------------ +from __future__ import annotations + + +import bpy +import bmesh + +from bpy.props import ( + StringProperty, + BoolProperty, + IntProperty, + FloatProperty, + FloatVectorProperty, + EnumProperty, + PointerProperty, + CollectionProperty, + IntVectorProperty, + BoolVectorProperty, +) +from bpy.types import ( + Panel, + Menu, + Operator, + PropertyGroup, +) +from bpy.utils import register_class, unregister_class + +import cProfile, pstats, io +from pstats import SortKey + +import os, sys, math, re, typing +from array import array +from struct import * +from shutil import copy +from pathlib import Path +from types import ModuleType +from mathutils import Vector, Euler, Matrix, Quaternion +from copy import deepcopy +from dataclasses import dataclass +from typing import TextIO +from numbers import Number +from collections.abc import Sequence + +# from SM64classes import * + +from ..f3d.f3d_import import * +from ..f3d.f3d_material import update_node_values_of_material +from ..panels import SM64_Panel +from ..utility_importer import * +from ..utility import ( + transform_mtx_blender_to_n64, + rotate_quat_n64_to_blender, + rotate_object, + parentObject, + GetEnums, + prop_split, + create_collection, + read16bitRGBA, + hexOrDecInt, +) +from .sm64_objects import enumEnvFX +from .sm64_constants import ( + enumVersionDefs, + enumLevelNames, + enumSpecialsNames, + LEVEL_ID_NUMBERS, + groups_obj_export, + group_0_geos, + group_1_geos, + group_2_geos, + group_3_geos, + group_4_geos, + group_5_geos, + group_6_geos, + group_7_geos, + group_8_geos, + group_9_geos, + group_10_geos, + group_11_geos, + group_12_geos, + group_13_geos, + group_14_geos, + group_15_geos, + group_16_geos, + group_17_geos, + common_0_geos, + common_1_geos, +) + +# ------------------------------------------------------------------------ +# Data +# ------------------------------------------------------------------------ + +# do something better than this later +Layers = { + "LAYER_FORCE": "0", + "LAYER_OPAQUE": "1", + "LAYER_OPAQUE_DECAL": "2", + "LAYER_OPAQUE_INTER": "3", + "LAYER_ALPHA": "4", + "LAYER_TRANSPARENT": "5", + "LAYER_TRANSPARENT_DECAL": "6", + "LAYER_TRANSPARENT_INTER": "7", +} + +# ------------------------------------------------------------------------ +# Classes +# ------------------------------------------------------------------------ + + +@dataclass +class Object: + model: str + pos: Vector + angle: Euler + bparam: str + behavior: str + act_mask: int + + +class Area: + def __init__( + self, + root: bpy.types.Object, + geo: str, + levelRoot: bpy.types.Object, + num: int, + scene: bpy.types.Scene, + col: bpy.types.Collection, + ): + self.root = root + self.geo = geo.strip() + self.num = num + self.scene = scene + self.props = scene.fast64.sm64.importer + # Set level root as parent + parentObject(levelRoot, root) + # set default vars + root.sm64_obj_type = "Area Root" + root.areaIndex = num + self.objects = [] + self.placed_special_objects = [] # for linking objects later + self.col = col + + def add_warp(self, args: list[str], type: str): + # set context to the root + bpy.context.view_layer.objects.active = self.root + # call fast64s warp node creation operator + bpy.ops.bone.add_warp_node() + warp = self.root.warpNodes[0] + warp.warpID = args[0] + warp.destNode = args[3] + level = args[1].strip().replace("LEVEL_", "").lower() + if level == "castle": + level = "castle_inside" + if level.isdigit(): + level = LEVEL_ID_NUMBERS.get(eval(level)) + if not level: + level = "bob" + warp.warpType = type + warp.destLevelEnum = level + warp.destArea = args[2] + chkpoint = args[-1].strip() + # Sorry for the hex users here + if "WARP_NO_CHECKPOINT" in chkpoint or int(chkpoint.isdigit() * chkpoint + "0") == 0: + warp.warpFlagEnum = "WARP_NO_CHECKPOINT" + else: + warp.warpFlagEnum = "WARP_CHECKPOINT" + + def add_instant_warp(self, args: list[str]): + # set context to the root + bpy.context.view_layer.objects.active = self.root + # call fast64s warp node creation operator + bpy.ops.bone.add_warp_node() + warp = self.root.warpNodes[0] + warp.type = "Instant" + warp.warpID = args[0] + warp.destArea = args[1] + warp.instantOffset = [hexOrDecInt(val) for val in args[2:5]] + + def add_object(self, args: list[str]): + # error prone? do people do math in pos? + pos = ( + Vector(hexOrDecInt(arg) for arg in args[1:4]) / self.scene.fast64.sm64.blender_to_sm64_scale + ) @ transform_mtx_blender_to_n64() + angle = Euler([math.radians(eval(a.strip())) for a in args[4:7]], "ZXY") + angle = rotate_quat_n64_to_blender(angle).to_euler("XYZ") + self.objects.append(Object(args[0], pos, angle, *args[7:])) + + def place_objects(self, col_name: str = None, actor_models: dict[model_name, bpy.Types.Object] = None): + if not col_name: + col = self.col + else: + col = create_collection(self.root.users_collection[0], col_name) + for object in self.objects: + bpy_obj = self.place_object(object, col) + if not actor_models: + continue + model_obj = actor_models.get(object.model, None) + if model_obj is None: + continue + self.link_bpy_obj_to_empty(bpy_obj, model_obj, col) + if not actor_models: + return + for placed_obj in self.placed_special_objects: + if "level_geo" in placed_obj.sm64_special_enum: + level_geo_model_name = self.get_level_geo_from_special(placed_obj.sm64_special_enum) + model_obj = actor_models.get(level_geo_model_name, None) + if model_obj: + self.link_bpy_obj_to_empty(placed_obj, model_obj, col) + + def get_level_geo_from_special(self, special_name: str): + return special_name.replace("special", "MODEL").replace("geo", "GEOMETRY").upper() + + def write_special_objects(self, special_objs: list[str], col: bpy.types.Collection): + special_presets = {enum[0] for enum in enumSpecialsNames} + for special in special_objs: + bpy_obj = bpy.data.objects.new("Empty", None) + col.objects.link(bpy_obj) + parentObject(self.root, bpy_obj) + bpy_obj.name = f"Special Object {special[0]}" + bpy_obj.sm64_obj_type = "Special" + if special[0] in special_presets: + bpy_obj.sm64_special_enum = special[0] + else: + bpy_obj.sm64_special_enum = "Custom" + bpy_obj.sm64_obj_preset = special[0] + loc = [eval(a.strip()) / self.scene.fast64.sm64.blender_to_sm64_scale for a in special[1:4]] + # rotate to fit sm64s axis + bpy_obj.location = [loc[0], -loc[2], loc[1]] + bpy_obj.rotation_euler[2] = hexOrDecInt(special[4]) + bpy_obj.sm64_obj_set_yaw = True + if special[5]: + bpy_obj.sm64_obj_set_bparam = True + bpy_obj.fast64.sm64.game_object.use_individual_params = False + bpy_obj.fast64.sm64.game_object.bparams = str(special[5]) + self.placed_special_objects.append(bpy_obj) + + def place_object(self, object: Object, col: bpy.types.Collection): + bpy_obj = bpy.data.objects.new("Empty", None) + col.objects.link(bpy_obj) + parentObject(self.root, bpy_obj) + bpy_obj.name = "Object {} {}".format(object.behavior, object.model) + bpy_obj.sm64_obj_type = "Object" + bpy_obj.sm64_behaviour_enum = "Custom" + bpy_obj.sm64_obj_behaviour = object.behavior.strip() + # change this to look at props version number? + if hasattr(bpy_obj, "sm64_obj_bparam"): + bpy_obj.sm64_obj_bparam = object.bparam + else: + bpy_obj.fast64.sm64.game_object.bparams = object.bparam + bpy_obj.sm64_obj_model = object.model + bpy_obj.location = object.pos + bpy_obj.rotation_euler.rotate(object.angle) + # set act mask, !fix this for hacker versions + mask = object.act_mask + if type(mask) == str and mask.isdigit(): + mask = eval(mask) + form = "sm64_obj_use_act{}" + if mask == 31: + for i in range(1, 7, 1): + setattr(bpy_obj, form.format(i), True) + else: + for i in range(1, 7, 1): + if mask & (1 << (i - 1)): + setattr(bpy_obj, form.format(i), True) + else: + setattr(bpy_obj, form.format(i), False) + return bpy_obj + + def link_bpy_obj_to_empty( + self, bpy_obj: bpy.Types.Object, model_obj: bpy.Types.Collection, col: bpy.Types.Collection + ): + # duplicate, idk why temp override doesn't work + # with bpy.context.temp_override(active_object = model_obj, selected_objects = model_obj.children_recursive): + # bpy.ops.object.duplicate_move_linked() + bpy.ops.object.select_all(action="DESELECT") + for child in model_obj.children_recursive: + child.select_set(True) + model_obj.select_set(True) + bpy.context.view_layer.objects.active = model_obj + bpy.ops.object.duplicate_move() + new_obj = bpy.context.active_object + bpy.ops.object.transform_apply(location=False, rotation=True, scale=True, properties=False) + # unlink from col, add to area col + for obj in (new_obj, *new_obj.children_recursive): + obj.users_collection[0].objects.unlink(obj) + col.objects.link(obj) + new_obj.location = bpy_obj.location + new_obj.rotation_euler = bpy_obj.rotation_euler + # add constraints so obj follows along when you move empty + copy_loc = new_obj.constraints.new("COPY_LOCATION") + copy_loc.target = bpy_obj + copy_rot = new_obj.constraints.new("COPY_ROTATION") + copy_rot.target = bpy_obj + + +class Level(DataParser): + def __init__(self, scripts: dict[str, list[str]], scene: bpy.types.Scene, root: bpy.types.Object): + self.scripts = scripts + self.scene = scene + self.props = scene.fast64.sm64.importer + self.areas: dict[area_index:int, Area] = {} + self.cur_area: int = None + self.root = root + self.loaded_geos: dict[model_name:str, geo_name:str] = dict() + self.loaded_dls: dict[model_name:str, dl_name:str] = dict() + super().__init__() + + def parse_level_script(self, entry: str, col: bpy.types.Collection = None): + script_stream = self.scripts[entry] + scale = self.scene.fast64.sm64.blender_to_sm64_scale + if not col: + col = self.scene.collection + self.parse_stream_from_start(script_stream, entry, col) + return self.areas + + def AREA(self, macro: Macro, col: bpy.types.Collection): + area_root = bpy.data.objects.new("Empty", None) + if self.props.use_collection: + area_col = bpy.data.collections.new(f"{self.props.level_name} area {macro.args[0]}") + col.children.link(area_col) + else: + area_col = col + area_col.objects.link(area_root) + area_root.name = f"{self.props.level_name} Area Root {macro.args[0]}" + self.areas[macro.args[0]] = Area(area_root, macro.args[1], self.root, int(macro.args[0]), self.scene, area_col) + self.cur_area = macro.args[0] + return self.continue_parse + + def END_AREA(self, macro: Macro, col: bpy.types.Collection): + self.cur_area = None + return self.continue_parse + + # Jumps are only taken if they're in the script.c file for now + # continues script + def JUMP_LINK(self, macro: Macro, col: bpy.types.Collection): + if self.scripts.get(macro.args[0]): + self.parse_level_script(macro.args[0], col=col) + return self.continue_parse + + # ends script + def JUMP(self, macro: Macro, col: bpy.types.Collection): + new_entry = self.scripts.get(macro.args[-1]) + if new_entry: + self.parse_level_script(macro.args[-1], col=col) + return self.break_parse + + def EXIT(self, macro: Macro, col: bpy.types.Collection): + return self.break_parse + + def RETURN(self, macro: Macro, col: bpy.types.Collection): + return self.break_parse + + # Now deal with data cmds rather than flow control ones + def WARP_NODE(self, macro: Macro, col: bpy.types.Collection): + self.areas[self.cur_area].add_warp(macro.args, "Warp") + return self.continue_parse + + def PAINTING_WARP_NODE(self, macro: Macro, col: bpy.types.Collection): + self.areas[self.cur_area].add_warp(macro.args, "Painting") + return self.continue_parse + + def INSTANT_WARP(self, macro: Macro, col: bpy.types.Collection): + self.areas[self.cur_area].add_instant_warp(macro.args) + return self.continue_parse + + def OBJECT_WITH_ACTS(self, macro: Macro, col: bpy.types.Collection): + # convert act mask from ORs of act names to a number + mask = macro.args[-1] + if not mask.isdigit(): + mask = mask.replace("ACT_", "") + mask = mask.split("|") + # Attempt for safety I guess + try: + accumulator = 0 + for m in mask: + accumulator += 1 << int(m) + mask = accumulator + except: + mask = 31 + self.areas[self.cur_area].add_object([*macro.args[:-1], mask]) + return self.continue_parse + + def OBJECT(self, macro: Macro, col: bpy.types.Collection): + # Only difference is act mask, which I set to 31 to mean all acts + self.areas[self.cur_area].add_object([*macro.args, 31]) + return self.continue_parse + + def TERRAIN_TYPE(self, macro: Macro, col: bpy.types.Collection): + if not macro.args[0].isdigit(): + self.areas[self.cur_area].root.terrainEnum = macro.args[0] + else: + terrains = { + 0: "TERRAIN_GRASS", + 1: "TERRAIN_STONE", + 2: "TERRAIN_SNOW", + 3: "TERRAIN_SAND", + 4: "TERRAIN_SPOOKY", + 5: "TERRAIN_WATER", + 6: "TERRAIN_SLIDE", + 7: "TERRAIN_MASK", + } + try: + num = eval(macro.args[0]) + self.areas[self.cur_area].root.terrainEnum = terrains.get(num) + except: + print("could not set terrain") + return self.continue_parse + + def SHOW_DIALOG(self, macro: Macro, col: bpy.types.Collection): + root = self.areas[self.cur_area].root + root.showStartDialog = True + root.startDialog = macro.args[1] + return self.continue_parse + + def TERRAIN(self, macro: Macro, col: bpy.types.Collection): + self.areas[self.cur_area].terrain = macro.args[0] + return self.continue_parse + + def SET_BACKGROUND_MUSIC(self, macro: Macro, col: bpy.types.Collection): + return self.generic_music(macro, col) + + def SET_MENU_MUSIC_WITH_REVERB(self, macro: Macro, col: bpy.types.Collection): + return self.generic_music(macro, col) + + def SET_BACKGROUND_MUSIC_WITH_REVERB(self, macro: Macro, col: bpy.types.Collection): + return self.generic_music(macro, col) + + def SET_MENU_MUSIC(self, macro: Macro, col: bpy.types.Collection): + return self.generic_music(macro, col) + + def generic_music(self, macro: Macro, col: bpy.types.Collection): + root = self.areas[self.cur_area].root + root.musicSeqEnum = "Custom" + root.music_seq = macro.args[1] + return self.continue_parse + + # Don't support these for now + def MACRO_OBJECTS(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def WHIRLPOOL(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def SET_ECHO(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def MARIO_POS(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def SET_REG(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def GET_OR_SET(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def CHANGE_AREA_SKYBOX(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + # Don't support for now but maybe later + def JUMP_LINK_PUSH_ARG(self, macro: Macro, col: bpy.types.Collection): + raise Exception("no support yet woops") + + def JUMP_N_TIMES(self, macro: Macro, col: bpy.types.Collection): + raise Exception("no support yet woops") + + def LOOP_BEGIN(self, macro: Macro, col: bpy.types.Collection): + raise Exception("no support yet woops") + + def LOOP_UNTIL(self, macro: Macro, col: bpy.types.Collection): + raise Exception("no support yet woops") + + def JUMP_IF(self, macro: Macro, col: bpy.types.Collection): + raise Exception("no support yet woops") + + def JUMP_LINK_IF(self, macro: Macro, col: bpy.types.Collection): + raise Exception("no support yet woops") + + def SKIP_IF(self, macro: Macro, col: bpy.types.Collection): + raise Exception("no support yet woops") + + def SKIP(self, macro: Macro, col: bpy.types.Collection): + raise Exception("no support yet woops") + + def SKIP_NOP(self, macro: Macro, col: bpy.types.Collection): + raise Exception("no support yet woops") + + def LOAD_AREA(self, macro: Macro, col: bpy.types.Collection): + raise Exception("no support yet woops") + + def UNLOAD_AREA(self, macro: Macro, col: bpy.types.Collection): + raise Exception("no support yet woops") + + def UNLOAD_MARIO_AREA(self, macro: Macro, col: bpy.types.Collection): + raise Exception("no support yet woops") + + def UNLOAD_AREA(self, macro: Macro, col: bpy.types.Collection): + raise Exception("no support yet woops") + + # use group mapping to set groups eventually + def LOAD_MIO0(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_MIO0_TEXTURE(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_TITLE_SCREEN_BG(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_GODDARD(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_BEHAVIOR_DATA(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_COMMON0(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_GROUPB(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_GROUPA(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_EFFECTS(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_SKYBOX(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_TEXTURE_BIN(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_LEVEL_DATA(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_YAY0(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_YAY0_TEXTURE(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_VANILLA_OBJECTS(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_RAW(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_RAW_WITH_CODE(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_MARIO_HEAD(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def LOAD_MODEL_FROM_GEO(self, macro: Macro, col: bpy.types.Collection): + self.loaded_geos[macro.args[0]] = macro.args[1] + return self.continue_parse + + def LOAD_MODEL_FROM_DL(self, macro: Macro, col: bpy.types.Collection): + self.loaded_dls[macro.args[0]] = macro.args[1] + return self.continue_parse + + # throw exception saying I cannot process + def EXECUTE(self, macro: Macro, col: bpy.types.Collection): + raise Exception("Processing of EXECUTE macro is not currently supported") + + def EXIT_AND_EXECUTE(self, macro: Macro, col: bpy.types.Collection): + raise Exception("Processing of EXIT_AND_EXECUTE macro is not currently supported") + + def EXECUTE_WITH_CODE(self, macro: Macro, col: bpy.types.Collection): + raise Exception("Processing of EXECUTE_WITH_CODE macro is not currently supported") + + def EXIT_AND_EXECUTE_WITH_CODE(self, macro: Macro, col: bpy.types.Collection): + raise Exception("Processing of EXIT_AND_EXECUTE_WITH_CODE macro is not currently supported") + + # not useful for bpy, dummy these script cmds + def CMD3A(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def STOP_MUSIC(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def GAMMA(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def BLACKOUT(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def TRANSITION(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def NOP(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def CMD23(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def PUSH_POOL(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def POP_POOL(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def SLEEP(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def ROOMS(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def MARIO(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def INIT_LEVEL(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def ALLOC_LEVEL_POOL(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def FREE_LEVEL_POOL(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def CALL(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def CALL_LOOP(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def CLEAR_LEVEL(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + def SLEEP_BEFORE_EXIT(self, macro: Macro, col: bpy.types.Collection): + return self.continue_parse + + +@dataclass +class ColTri: + type: Any + verts: list[int] + special_param: Any = None + + +class Collision(DataParser): + def __init__(self, collision: list[str], scale: float): + self.collision = collision + self.scale = scale + self.vertices = [] + # key=type,value=tri data + self.tris: list[ColTri] = [] + self.type: str = None + self.special_objects = [] + self.water_boxes = [] + super().__init__() + + def write_water_boxes( + self, scene: bpy.types.Scene, parent: bpy.types.Object, name: str, col: bpy.types.Collection = None + ): + for i, w in enumerate(self.water_boxes): + Obj = bpy.data.objects.new("Empty", None) + scene.collection.objects.link(Obj) + parentObject(parent, Obj) + Obj.name = "WaterBox_{}_{}".format(name, i) + Obj.sm64_obj_type = "Water Box" + x1 = eval(w[1]) / (self.scale) + x2 = eval(w[3]) / (self.scale) + z1 = eval(w[2]) / (self.scale) + z2 = eval(w[4]) / (self.scale) + y = eval(w[5]) / (self.scale) + Xwidth = abs(x2 - x1) / (2) + Zwidth = abs(z2 - z1) / (2) + loc = [x2 - Xwidth, -(z2 - Zwidth), y - 1] + Obj.location = loc + scale = [Xwidth, Zwidth, 1] + Obj.scale = scale + + def write_collision( + self, scene: bpy.types.Scene, name: str, parent: bpy.types.Object, col: bpy.types.Collection = None + ): + if not col: + col = scene.collection + self.write_water_boxes(scene, parent, name, col) + mesh = bpy.data.meshes.new(f"{name} data") + mesh.from_pydata(self.vertices, [], [tri.verts for tri in self.tris]) + obj = bpy.data.objects.new(f"{name} mesh", mesh) + col.objects.link(obj) + obj.ignore_render = True + if parent: + parentObject(parent, obj) + rotate_object(-90, obj, world=1) + bpy.context.view_layer.objects.active = obj + max = len(obj.data.polygons) + col_materials: dict[str, "mat_index"] = dict() + for i, (bpy_tri, col_tri) in enumerate(zip(obj.data.polygons, self.tris)): + if col_tri.type not in col_materials: + bpy.ops.object.create_f3d_mat() # the newest mat should be in slot[-1] + mat = obj.data.materials[-1] + col_materials[col_tri.type] = len(obj.data.materials) - 1 + # fix this + mat.collision_type_simple = "Custom" + mat.collision_custom = col_tri.type + mat.name = "Sm64_Col_Mat_{}".format(col_tri.type) + # Just to give some variety + mat.f3d_mat.default_light_color = [a / 255 for a in (hash(id(int(i))) & 0xFFFFFFFF).to_bytes(4, "big")] + if col_tri.special_param is not None: + mat.use_collision_param = True + mat.collision_param = str(col_tri.special_param) + # I don't think I care about this. It makes program slow + # with bpy.context.temp_override(material=mat): + # bpy.ops.material.update_f3d_nodes() + bpy_tri.material_index = col_materials[col_tri.type] + return obj + + def parse_collision(self): + self.parse_stream(self.collision, 0) + + def COL_VERTEX(self, macro: Macro): + self.vertices.append([eval(v) / self.scale for v in macro.args]) + return self.continue_parse + + def COL_TRI_INIT(self, macro: Macro): + self.type = macro.args[0] + return self.continue_parse + + def COL_TRI(self, macro: Macro): + self.tris.append(ColTri(self.type, [eval(a) for a in macro.args])) + return self.continue_parse + + def COL_TRI_SPECIAL(self, macro: Macro): + self.tris.append(ColTri(self.type, [eval(a) for a in macro.args[0:3]], special_param=eval(macro.args[3]))) + return self.continue_parse + + def COL_WATER_BOX(self, macro: Macro): + # id, x1, z1, x2, z2, y + self.water_boxes.append(macro.args) + return self.continue_parse + + # not written out currently + def SPECIAL_OBJECT(self, macro: Macro): + self.special_objects.append((*macro.args, 0, 0)) + return self.continue_parse + + def SPECIAL_OBJECT_WITH_YAW(self, macro: Macro): + self.special_objects.append((*macro.args, 0)) + return self.continue_parse + + def SPECIAL_OBJECT_WITH_YAW_AND_PARAM(self, macro: Macro): + self.special_objects.append(macro.args) + return self.continue_parse + + # don't do anything to bpy + def COL_WATER_BOX_INIT(self, macro: Macro): + return self.continue_parse + + def COL_INIT(self, macro: Macro): + return self.continue_parse + + def COL_VERTEX_INIT(self, macro: Macro): + return self.continue_parse + + def COL_SPECIAL_INIT(self, macro: Macro): + return self.continue_parse + + def COL_TRI_STOP(self, macro: Macro): + return self.continue_parse + + def COL_END(self, macro: Macro): + return self.continue_parse + + +class SM64_Material(Mat): + def load_texture(self, force_new_tex: bool, textures: dict, path: Path, tex: Texture): + if not tex: + return None + Timg = textures.get(tex.Timg)[0].split("/")[-1] + Timg = Timg.replace("#include ", "").replace('"', "").replace("'", "").replace("inc.c", "png") + image = bpy.data.images.get(Timg) + if not image or force_new_tex: + Timg = textures.get(tex.Timg)[0] + Timg = Timg.replace("#include ", "").replace('"', "").replace("'", "").replace("inc.c", "png") + # deal with duplicate pathing (such as /actors/actors etc.) + Extra = path.relative_to(Path(bpy.path.abspath(bpy.context.scene.fast64.sm64.decomp_path))) + for e in Extra.parts: + Timg = Timg.replace(e + "/", "") + # deal with actor import path not working for shared textures + if "textures" in Timg: + fp = Path(bpy.path.abspath(bpy.context.scene.fast64.sm64.decomp_path)) / Timg + else: + fp = path / Timg + return bpy.data.images.load(filepath=str(fp)) + else: + return image + + def apply_PBSDF_Mat(self, mat: bpy.types.Material, textures: dict, tex_path: Path, layer: int, tex: Texture): + nt = mat.node_tree + nodes = nt.nodes + links = nt.links + pbsdf = nodes.get("Principled BSDF") + if not pbsdf: + return + tex_node = nodes.new("ShaderNodeTexImage") + links.new(pbsdf.inputs[0], tex_node.outputs[0]) # base color + links.new(pbsdf.inputs[21], tex_node.outputs[1]) # alpha color + image = self.load_texture(bpy.context.scene.fast64.sm64.importer.force_new_tex, textures, tex_path, tex) + if image: + tex_node.image = image + if int(layer) > 4: + mat.blend_method == "BLEND" + + def apply_material_settings(self, mat: bpy.types.Material, textures: dict, tex_path: Path, layer: int): + self.set_texture_tile_mapping() + + if bpy.context.scene.fast64.sm64.importer.as_obj: + return self.apply_PBSDF_Mat(mat, textures, tex_path, layer, self.tex0) + + f3d = mat.f3d_mat + + f3d.draw_layer.sm64 = layer + self.set_register_settings(mat, f3d) + self.set_textures(f3d, textures, tex_path) + + # manually call node update for speed + mat.f3d_update_flag = True + update_node_values_of_material(mat, bpy.context) + mat.f3d_mat.presetName = "Custom" + mat.f3d_update_flag = False + + def set_textures(self, f3d: F3DMaterialProperty, textures: dict, tex_path: Path): + self.set_tex_scale(f3d) + if self.tex0 and self.set_tex: + self.set_tex_settings( + f3d.tex0, + self.load_texture(bpy.context.scene.fast64.sm64.importer.force_new_tex, textures, tex_path, self.tex0), + self.tiles[0 + self.base_tile], + self.tex0.Timg, + ) + if self.tex1 and self.set_tex: + self.set_tex_settings( + f3d.tex1, + self.load_texture(bpy.context.scene.fast64.sm64.importer.force_new_tex, textures, tex_path, self.tex1), + self.tiles[1 + self.base_tile], + self.tex1.Timg, + ) + + +class SM64_F3D(DL): + def __init__(self, scene): + self.scene = scene + self.props = scene.fast64.sm64.importer + super().__init__(lastmat=SM64_Material()) + + # Textures only contains the texture data found inside the model.inc.c file and the texture.inc.c file + # this will add all the textures located in the /textures/ folder in decomp + def get_generic_textures(self, root_path: Path): + for t in [ + "cave.c", + "effect.c", + "fire.c", + "generic.c", + "grass.c", + "inside.c", + "machine.c", + "mountain.c", + "outside.c", + "sky.c", + "snow.c", + "spooky.c", + "water.c", + ]: + t = root_path / "bin" / t + t = open(t, "r", newline="") + tex = t + # For textures, try u8, and s16 aswell + self.Textures.update( + get_data_types_from_file( + tex, + { + "Texture": [None, None], + "u8": [None, None], + "s16": [None, None], + }, + ) + ) + t.close() + + # recursively parse the display list in order to return a bunch of model data + def get_f3d_data_from_model(self, start: str, last_mat: SM64_Material = None): + DL = self.Gfx.get(start) + self.VertBuff = [0] * 32 # If you're doing some fucky shit with a larger vert buffer it sucks to suck I guess + if not DL: + raise Exception("Could not find DL {}".format(start)) + self.Verts = [] + self.Tris = [] + self.UVs = [] + self.VCs = [] + self.Mats = [] + if last_mat: + self.LastMat = last_mat + self.parse_stream(DL, start) + self.NewMat = 0 + self.StartName = start + return [self.Verts, self.Tris] + + # turn member of vtx str arr into vtx args + def parse_vert(self, Vert: str): + v = Vert.replace("{", "").replace("}", "").split(",") + num = lambda x: [eval(a) for a in x] + pos = num(v[:3]) + uv = num(v[4:6]) + vc = num(v[6:10]) + return [pos, uv, vc] + + # given tri args in gbi cmd, give appropriate tri indices in vert list + def parse_tri(self, Tri: list[int]): + L = len(self.Verts) + return [a + L - self.LastLoad for a in Tri] + + def apply_mesh_data(self, obj: bpy.types.Object, mesh: bpy.types.Mesh, layer: int, tex_path: Path): + bpy.context.view_layer.objects.active = obj + ind = -1 + new = -1 + UVmap = obj.data.uv_layers.new(name="UVMap") + # I can get the available enums for color attrs with this func + vcol_enums = GetEnums(bpy.types.FloatColorAttribute, "data_type") + # enums were changed in a blender version, this should future proof it a little + if "FLOAT_COLOR" in vcol_enums: + e = "FLOAT_COLOR" + else: + e = "COLOR" + Vcol = obj.data.color_attributes.get("Col") + if not Vcol: + Vcol = obj.data.color_attributes.new(name="Col", type=e, domain="CORNER") + Valph = obj.data.color_attributes.get("Alpha") + if not Valph: + Valph = obj.data.color_attributes.new(name="Alpha", type=e, domain="CORNER") + + b_mesh = bmesh.new() + b_mesh.from_mesh(mesh) + tris = b_mesh.faces + tris.ensure_lookup_table() + uv_map = b_mesh.loops.layers.uv.active + v_color = b_mesh.loops.layers.float_color["Col"] + v_alpha = b_mesh.loops.layers.float_color["Alpha"] + + self.Mats.append([len(tris), 0]) + for i, t in enumerate(tris): + if i > self.Mats[ind + 1][0]: + new = self.create_new_f3d_mat(self.Mats[ind + 1][1], mesh) + ind += 1 + if not new: + new = len(mesh.materials) - 1 + mat = mesh.materials[new] + mat.name = "sm64 F3D Mat {} {}".format(obj.name, new) + self.Mats[new][1].apply_material_settings(mat, self.Textures, tex_path, layer) + else: + # I tried to re use mat slots but it is much slower, and not as accurate + # idk if I was just doing it wrong or the search is that much slower, but this is easier + mesh.materials.append(new) + new = len(mesh.materials) - 1 + # if somehow there is no material assigned to the triangle or something is lost + if new != -1: + self.apply_loop_data(new, mesh, t, uv_map, v_color, v_alpha) + b_mesh.to_mesh(mesh) + + def apply_loop_data(self, mat: bpy.Types.Material, mesh: bpy.Types.Mesh, tri, uv_map, v_color, v_alpha): + tri.material_index = mat + # Get texture size or assume 32, 32 otherwise + i = mesh.materials[mat].f3d_mat.tex0.tex + if not i: + WH = (32, 32) + else: + WH = i.size + # Set UV data and Vertex Color Data + for v, l in zip(tri.verts, tri.loops): + uv = self.UVs[v.index] + vcol = self.VCs[v.index] + # scale verts + l[uv_map].uv = [a * (1 / (32 * b)) if b > 0 else a * 0.001 * 32 for a, b in zip(uv, WH)] + # idk why this is necessary. N64 thing or something? + l[uv_map].uv[1] = l[uv_map].uv[1] * -1 + 1 + l[v_color] = [a / 255 for a in vcol] + + # create a new f3d_mat given an SM64_Material class but don't create copies with same props + def create_new_f3d_mat(self, mat: SM64_Material, mesh: bpy.types.Mesh): + if not self.props.force_new_tex: + # check if this mat was used already in another mesh (or this mat if DL is garbage or something) + # even looping n^2 is probably faster than duping 3 mats with blender speed + for j, F3Dmat in enumerate(bpy.data.materials): + if F3Dmat.is_f3d: + dupe = mat.mat_hash_f3d(F3Dmat.f3d_mat) + if dupe: + return F3Dmat + f3d_mat = None + for mat in bpy.data.materials: + if mat.is_f3d: + f3d_mat = mat + if f3d_mat: + new_mat = f3d_mat.id_data.copy() # make a copy of the data block + # add a mat slot and add mat to it + mesh.materials.append(new_mat) + else: + if self.props.as_obj: + NewMat = bpy.data.materials.new(f"sm64 {mesh.name.replace('Data', 'material')}") + mesh.materials.append(NewMat) # the newest mat should be in slot[-1] for the mesh materials + NewMat.use_nodes = True + else: + bpy.ops.object.create_f3d_mat() # the newest mat should be in slot[-1] for the mesh materials + return None + + +# holds model found by geo +@dataclass +class ModelDat: + transform: Matrix + layer: int + model_name: str + vertex_group_name: str = None + switch_index: int = 0 + armature_obj: bpy.types.Object = None + object: bpy.types.Object = None + + +# base class for geo layouts and armatures +class GraphNodes(DataParser): + _skipped_geo_asm_funcs = { + "geo_movtex_pause_control", + "geo_movtex_draw_water_regions", + "geo_cannon_circle_base", + "geo_envfx_main", + } + + def __init__( + self, + geo_layouts: dict[geo_name:str, geo_data : list[str]], + scene: bpy.types.Scene, + name: str, + col: bpy.types.Collection, + parent_bone: bpy.types.Bone = None, + geo_parent: GeoArmature = None, + stream: list[Any] = None, + ): + self.geo_layouts = geo_layouts + self.models = [] + self.children = [] + self.scene = scene + self.props = scene.fast64.sm64.importer + if not stream: + stream = list() + self.stream = stream + self.parent_transform = transform_mtx_blender_to_n64().inverted() + self.last_transform = transform_mtx_blender_to_n64().inverted() + self.name = name + self.col = col + super().__init__(parent=geo_parent) + + def parse_layer(self, layer: str): + if not layer.isdigit(): + layer = Layers.get(layer) + if not layer: + layer = 1 + return layer + + @property + def ordered_name(self): + return f"{self.get_parser(self.stream[-1]).head}_{self.name}" + + @property + def first_obj(self): + if self.root: + return self.root + for model in self.models: + if model.object: + return model.object + for child in self.children: + if root := child.first_obj: + return root + return None + + def get_translation(self, trans_vector: Sequence): + translation = [float(val) for val in trans_vector] + return [translation[0], -translation[2], translation[1]] + + def get_rotation(self, rot_vector: Sequence): + rotation = Euler((math.radians(float(val)) for val in rot_vector), "ZXY") + return rotate_quat_n64_to_blender(rotation.to_quaternion()).to_euler("XYZ") + + def set_transform(self, geo_obj, translation: Sequence): + raise Exception("you must call this function from a sublcass") + + def set_geo_type(self, geo_obj: bpy.types.Object, geo_type: str): + raise Exception("you must call this function from a sublcass") + + def set_draw_layer(self, geo_obj: bpy.types.Object, layer: int): + raise Exception("you must call this function from a sublcass") + + def make_root(self, name, *args): + raise Exception("you must call this function from a sublcass") + + def setup_geo_obj(self, *args): + raise Exception("you must call this function from a sublcass") + + def add_model(self, *args): + raise Exception("you must call this function from a sublcass") + + def GEO_BRANCH_AND_LINK(self, macro: Macro, depth: int): + new_geo_layout = self.geo_layouts.get(macro.args[0]) + if new_geo_layout: + self.stream.append(macro.args[0]) + self.parse_stream_from_start(new_geo_layout, macro.args[0], depth) + return self.continue_parse + + def GEO_BRANCH(self, macro: Macro, depth: int): + new_geo_layout = self.geo_layouts.get(macro.args[1]) + if new_geo_layout: + self.stream.append(macro.args[1]) + self.parse_stream_from_start(new_geo_layout, macro.args[1], depth) + # arg 0 determines if you return and continue or end after the branch + if eval(macro.args[0]): + return self.continue_parse + else: + return self.break_parse + + def GEO_END(self, macro: Macro, depth: int): + self.stream = None + return self.break_parse + + def GEO_RETURN(self, macro: Macro, depth: int): + self.stream.pop() + return self.break_parse + + def GEO_CLOSE_NODE(self, macro: Macro, depth: int): + return self.break_parse + + def GEO_DISPLAY_LIST(self, macro: Macro, depth: int): + # translation, rotation, layer, model + geo_obj = self.add_model( + ModelDat(self.parent_transform, *macro.args), "display_list", self.display_list, macro.args[0] + ) + self.set_transform(geo_obj, self.last_transform) + return self.continue_parse + + def GEO_BILLBOARD_WITH_PARAMS_AND_DL(self, macro: Macro, depth: int): + transform = Matrix() + transform.translation = self.get_translation(macro.args[1:4]) + self.last_transform = self.parent_transform @ transform + + model = macro.args[-1] + if model != "NULL": + geo_obj = self.add_model( + ModelDat(self.last_transform, macro.args[0], model), "billboard", self.billboard, macro.args[0] + ) + else: + geo_obj = self.setup_geo_obj("billboard", self.billboard, macro.args[0]) + self.set_transform(geo_obj, self.last_transform) + return self.continue_parse + + def GEO_BILLBOARD_WITH_PARAMS(self, macro: Macro, depth: int): + transform = Matrix() + transform.translation = self.get_translation(macro.args[1:4]) + self.last_transform = self.parent_transform @ transform + + geo_obj = self.setup_geo_obj("billboard", self.billboard, macro.args[0]) + self.set_transform(geo_obj, self.last_transform) + return self.continue_parse + + def GEO_BILLBOARD(self, macro: Macro, depth: int): + self.setup_geo_obj("billboard", self.billboard, macro.args[0]) + return self.continue_parse + + def GEO_ANIMATED_PART(self, macro: Macro, depth: int): + # layer, translation, DL + transform = Matrix() + transform.translation = self.get_translation(macro.args[1:4]) + self.last_transform = self.parent_transform @ transform + model = macro.args[-1] + + if model != "NULL": + geo_obj = self.add_model( + ModelDat(self.last_transform, macro.args[0], model), "bone", self.animated_part, macro.args[0] + ) + else: + geo_obj = self.setup_geo_obj("bone", self.animated_part, macro.args[0]) + self.set_transform(geo_obj, self.last_transform) + return self.continue_parse + + def GEO_ROTATION_NODE(self, macro: Macro, depth: int): + geo_obj = self.GEO_ROTATE(macro, depth) + if geo_obj: + self.set_geo_type(geo_obj, self.rotate) + return self.continue_parse + + def GEO_ROTATE(self, macro: Macro, depth: int): + transform = Matrix.LocRotScale(Vector(), self.get_rotation(macro.args[1:4]), Vector((1, 1, 1))) + self.last_transform = self.parent_transform @ transform + return self.setup_geo_obj("rotate", self.translate_rotate, macro.args[0]) + + def GEO_ROTATION_NODE_WITH_DL(self, macro: Macro, depth: int): + geo_obj = self.GEO_ROTATE_WITH_DL(macro, depth) + return self.continue_parse + + def GEO_ROTATE_WITH_DL(self, macro: Macro, depth: int): + transform = Matrix.LocRotScale(Vector(), self.get_rotation(macro.args[1:4]), Vector((1, 1, 1))) + self.last_transform = self.parent_transform @ transform + + model = macro.args[-1] + if model != "NULL": + geo_obj = self.add_model( + ModelDat(self.last_transform, macro.args[0], model), "rotate", self.translate_rotate, macro.args[0] + ) + else: + geo_obj = self.setup_geo_obj("rotate", self.translate_rotate, macro.args[0]) + self.set_transform(geo_obj, self.last_transform) + return geo_obj + + def GEO_TRANSLATE_ROTATE_WITH_DL(self, macro: Macro, depth: int): + transform = Matrix.LocRotScale( + self.get_translation(macro.args[1:4]), self.get_rotation(macro.args[4:7]), Vector((1, 1, 1)) + ) + self.last_transform = self.parent_transform @ transform + + model = macro.args[-1] + if model != "NULL": + geo_obj = self.add_model( + ModelDat(self.last_transform, macro.args[0], model), + "trans/rotate", + self.translate_rotate, + macro.args[0], + ) + else: + geo_obj = self.setup_geo_obj("trans/rotate", self.translate_rotate, macro.args[0]) + self.set_transform(geo_obj, self.last_transform) + return self.continue_parse + + def GEO_TRANSLATE_ROTATE(self, macro: Macro, depth: int): + transform = Matrix.LocRotScale( + self.get_translation(macro.args[1:4]), self.get_rotation(macro.args[1:4]), Vector((1, 1, 1)) + ) + self.last_transform = self.parent_transform @ transform + + geo_obj = self.setup_geo_obj("trans/rotate", self.translate_rotate, macro.args[0]) + self.set_transform(geo_obj, self.last_transform) + return self.continue_parse + + def GEO_TRANSLATE_WITH_DL(self, macro: Macro, depth: int): + geo_obj = self.GEO_TRANSLATE_NODE_WITH_DL(macro, depth) + if geo_obj: + self.set_geo_type(geo_obj, self.translate_rotate) + return self.continue_parse + + def GEO_TRANSLATE_NODE_WITH_DL(self, macro: Macro, depth: int): + transform = Matrix() + transform.translation = self.get_translation(macro.args[1:4]) + self.last_transform = self.parent_transform @ transform + + model = macro.args[-1] + if model != "NULL": + geo_obj = self.add_model( + ModelDat(self.last_transform, macro.args[0], model), "translate", self.translate, macro.args[0] + ) + else: + geo_obj = self.setup_geo_obj("translate", self.translate, macro.args[0]) + self.set_transform(geo_obj, self.last_transform) + return geo_obj + + def GEO_TRANSLATE(self, macro: Macro, depth: int): + obj = self.GEO_TRANSLATE_NODE(macro, depth) + if obj: + self.set_geo_type(geo_obj, self.translate_rotate) + return self.continue_parse + + def GEO_TRANSLATE_NODE(self, macro: Macro, depth: int): + transform = Matrix() + transform.translation = self.get_translation(macro.args[1:4]) + self.last_transform = self.parent_transform @ transform + + geo_obj = self.setup_geo_obj("translate", self.translate, macro.args[0]) + self.set_transform(geo_obj, self.last_transform) + return geo_obj + + def GEO_SCALE_WITH_DL(self, macro: Macro, depth: int): + scale = eval(macro.args[1]) / 0x10000 + self.last_transform = scale * self.last_transform + + model = macro.args[-1] + geo_obj = self.add_model(ModelDat(self.last_transform, macro.args[0], macro.args[-1])) + self.set_transform(geo_obj, self.last_transform) + return self.continue_parse + + # these have no affect on the bpy + def GEO_NOP_1A(self, macro: Macro, depth: int): + return self.continue_parse + + def GEO_NOP_1E(self, macro: Macro, depth: int): + return self.continue_parse + + def GEO_NOP_1F(self, macro: Macro, depth: int): + return self.continue_parse + + def GEO_NODE_START(self, macro: Macro, depth: int): + return self.continue_parse + + def GEO_NODE_SCREEN_AREA(self, macro: Macro, depth: int): + return self.continue_parse + + def GEO_ZBUFFER(self, macro: Macro, depth: int): + return self.continue_parse + + def GEO_RENDER_OBJ(self, macro: Macro, depth: int): + return self.continue_parse + + # This should probably do something but I haven't coded it in yet + def GEO_COPY_VIEW(self, macro: Macro, depth: int): + return self.continue_parse + + def GEO_ASSIGN_AS_VIEW(self, macro: Macro, depth: int): + return self.continue_parse + + def GEO_UPDATE_NODE_FLAGS(self, macro: Macro, depth: int): + return self.continue_parse + + def GEO_NODE_ORTHO(self, macro: Macro, depth: int): + return self.continue_parse + + # These need special bhv for each type + def GEO_ASM(self, macro: Macro, depth: int): + raise Exception("you must call this function from a sublcass") + + def GEO_RENDER_RANGE(self, macro: Macro, depth: int): + raise Exception("you must call this function from a sublcass") + + def GEO_CULLING_RADIUS(self, macro: Macro, depth: int): + raise Exception("you must call this function from a sublcass") + + def GEO_HELD_OBJECT(self, macro: Macro, depth: int): + raise Exception("you must call this function from a sublcass") + + def GEO_SCALE(self, macro: Macro, depth: int): + raise Exception("you must call this function from a sublcass") + + def GEO_SWITCH_CASE(self, macro: Macro, depth: int): + raise Exception("you must call this function from a sublcass") + + def GEO_SHADOW(self, macro: Macro, depth: int): + raise Exception("you must call this function from a sublcass") + + def GEO_CAMERA(self, macro: Macro, depth: int): + raise Exception("you must call this function from a sublcass") + + def GEO_CAMERA_FRUSTRUM(self, macro: Macro, depth: int): + raise Exception("you must call this function from a sublcass") + + def GEO_CAMERA_FRUSTUM_WITH_FUNC(self, macro: Macro, depth: int): + raise Exception("you must call this function from a sublcass") + + def GEO_BACKGROUND(self, macro: Macro, depth: int): + raise Exception("you must call this function from a sublcass") + + def GEO_BACKGROUND_COLOR(self, macro: Macro, depth: int): + raise Exception("you must call this function from a sublcass") + + +class GeoLayout(GraphNodes): + switch = "Switch" + translate_rotate = "Geo Translate/Rotat" + translate = "Geo Translate Node" + rotate = "Geo Rotation Node" + billboard = "Geo Billboard" + display_list = "Geo Displaylist" + shadow = "Custom Geo Command" + asm = "Geo ASM" + scale = "Geo Scale" + animated_part = "Geo Translate Node" + custom_animated = "Custom Geo Command" + custom = "Custom Geo Command" + + def __init__( + self, + geo_layouts: dict, + root: bpy.types.Object, + scene: bpy.types.Scene, + name: str, + area_root: bpy.types.Object, + col: bpy.types.Collection = None, + geo_parent: GeoLayout = None, + stream: list[Any] = None, + pass_args: dict = None, + ): + self.parent = root + self.area_root = area_root # for properties that can only be written to area + self.root = root + self.obj = None # last object on this layer of the tree, will become parent of next child + # undetermined args to pass on in dict + if pass_args: + self.pass_args = pass_args + else: + self.pass_args = dict() + if not col: + col = area_root.users_collection[0] + else: + col = col + super().__init__(geo_layouts, scene, name, col, geo_parent=geo_parent, stream=stream) + + def set_transform(self, geo_obj: bpy.types.Object, transform: Matrix): + if not geo_obj: + return + geo_obj.matrix_world = ( + geo_obj.matrix_world + @ transform_matrix_to_bpy(transform) + * (1 / self.scene.fast64.sm64.blender_to_sm64_scale) + ) + + def set_geo_type(self, geo_obj: bpy.types.Object, geo_cmd: str): + geo_obj.sm64_obj_type = geo_cmd + + def set_draw_layer(self, geo_obj: bpy.types.Object, layer: int): + geo_obj.draw_layer_static = str(self.parse_layer(layer)) + + # make an empty node to act as the root of this geo layout + # use this to hold a transform, or an actual cmd, otherwise rt is passed + def make_root(self, name: str, parent_obj: bpy.types.Object, mesh: bpy.types.Mesh): + self.obj = bpy.data.objects.new(name, mesh) + self.col.objects.link(self.obj) + # keep? I don't like this formulation + if parent_obj: + parentObject(parent_obj, self.obj, keep=0) + return self.obj + + def setup_geo_obj(self, obj_name: str, geo_cmd: str, layer: int = None, mesh: bpy.types.Mesh = None): + geo_obj = self.make_root(f"{self.ordered_name} {obj_name}", self.root, mesh) + if geo_cmd: + self.set_geo_type(geo_obj, geo_cmd) + if layer: + self.set_draw_layer(geo_obj, layer) + return geo_obj + + def add_model(self, model_data: ModelDat, *args): + self.models.append(model_data) + # add placeholder mesh + mesh = bpy.data.meshes.get("sm64_import_placeholder_mesh") + if not mesh: + mesh = bpy.data.meshes.new("sm64_import_placeholder_mesh") + geo_obj = self.setup_geo_obj(model_data.model_name, None, layer=model_data.layer, mesh=mesh) + geo_obj.ignore_collision = True + model_data.object = geo_obj + # check for mesh props + if render_range := self.pass_args.get("render_range", None): + geo_obj.use_render_range = True + geo_obj.render_range = render_range + del self.pass_args["render_range"] + if culling_radius := self.pass_args.get("culling_radius", None): + geo_obj.use_render_area = True + geo_obj.culling_radius = culling_radius + del self.pass_args["culling_radius"] + return geo_obj + + def parse_level_geo(self, start: str): + geo_layout = self.geo_layouts.get(start) + if not geo_layout: + raise Exception( + "Could not find geo layout {} from levels/{}/{}geo.c".format( + start, self.props.level_name, self.props.level_prefix + ), + "pass_linked_export", + ) + self.stream.append(start) + self.parse_stream_from_start(geo_layout, start, 0) + + def GEO_ASM(self, macro: Macro, depth: int): + # envfx goes on the area root + if "geo_envfx_main" in macro.args[1]: + env_fx = macro.args[1] + if any(env_fx is enum_fx[0] for enum_fx in enumEnvFX): + self.area_root.envOption = env_fx + else: + self.area_root.envOption = "Custom" + self.area_root.envType = env_fx + if macro.args[1] in self._skipped_geo_asm_funcs and self.props.export_friendly: + return self.continue_parse + geo_obj = self.setup_geo_obj("asm", self.asm) + # probably will need to be overridden by each subclass + asm = geo_obj.fast64.sm64.geo_asm + asm.param = macro.args[0] + asm.func = macro.args[1] + return self.continue_parse + + def GEO_SCALE(self, macro: Macro, depth: int): + scale = eval(macro.args[1]) / 0x10000 + geo_obj = self.setup_geo_obj("scale", self.scale, macro.args[0]) + geo_obj.scale = (scale, scale, scale) + return self.continue_parse + + # shadows aren't naturally supported but we can emulate them with custom geo cmds + # change so this can be applied to mesh on root? + def GEO_SHADOW(self, macro: Macro, depth: int): + geo_obj = self.setup_geo_obj("shadow empty", self.shadow) + geo_obj.customGeoCommand = "GEO_SHADOW" + geo_obj.customGeoCommandArgs = ", ".join(macro.args) + return self.continue_parse + + def GEO_SWITCH_CASE(self, macro: Macro, depth: int): + geo_obj = self.setup_geo_obj("switch", self.switch) + # probably will need to be overridden by each subclass + geo_obj.switchParam = eval(macro.args[0]) + geo_obj.switchFunc = macro.args[1] + return self.continue_parse + + # can only apply type to area root + def GEO_CAMERA(self, macro: Macro, depth: int): + self.area_root.camOption = "Custom" + self.area_root.camType = macro.args[0] + return self.continue_parse + + def GEO_BACKGROUND(self, macro: Macro, depth: int): + level_root = self.area_root.parent + # check if in enum + skybox_name = macro.args[0].replace("BACKGROUND_", "") + bg_enums = {enum.identifier for enum in level_root.bl_rna.properties["background"].enum_items} + if skybox_name in bg_enums: + level_root.background = skybox_name + else: + level_root.background = "CUSTOM" + # this is cringe and should be changed + scene.fast64.sm64.level.backgroundID = macro.args[0] + # I don't have access to the bg segment, that is in level obj + scene.fast64.sm64.level.backgroundSegment = "unavailable srry :(" + + return self.continue_parse + + def GEO_BACKGROUND_COLOR(self, macro: Macro, depth: int): + level_root = self.area_root.parent + level_root.useBackgroundColor = True + level_root.backgroundColor = read16bitRGBA(hexOrDecInt(macro.args[0])) + return self.continue_parse + + # can only apply to meshes + def GEO_RENDER_RANGE(self, macro: Macro, depth: int): + self.pass_args["render_range"] = [ + hexOrDecInt(range) / self.scene.fast64.sm64.blender_to_sm64_scale for range in macro.args + ] + return self.continue_parse + + def GEO_CULLING_RADIUS(self, macro: Macro, depth: int): + self.pass_args["culling_radius"] = hexOrDecInt(macro.args[0]) / self.scene.fast64.sm64.blender_to_sm64_scale + return self.continue_parse + + # make better + def GEO_CAMERA_FRUSTRUM(self, macro: Macro, depth: int): + self.area_root.camOption = "Custom" + self.area_root.camType = macro.args[0] + return self.continue_parse + + def GEO_CAMERA_FRUSTUM_WITH_FUNC(self, macro: Macro, depth: int): + self.area_root.camOption = "Custom" + self.area_root.camType = macro.args[0] + return self.continue_parse + + def GEO_OPEN_NODE(self, macro: Macro, depth: int): + if self.obj: + GeoChild = GeoLayout( + self.geo_layouts, + self.obj, + self.scene, + self.name, + self.area_root, + col=self.col, + geo_parent=self, + stream=self.stream, + pass_args=self.pass_args, + ) + else: + GeoChild = GeoLayout( + self.geo_layouts, + self.root, + self.scene, + self.name, + self.area_root, + col=self.col, + geo_parent=self, + stream=self.stream, + pass_args=self.pass_args, + ) + GeoChild.parent_transform = self.last_transform + GeoChild.parse_stream(self.geo_layouts.get(self.stream[-1]), self.stream[-1], depth + 1) + self.children.append(GeoChild) + return self.continue_parse + + +class GeoArmature(GraphNodes): + switch = "Switch" + start = "Start" + translate_rotate = "TranslateRotate" + translate = "Translate" + rotate = "Rotate" + billboard = "Billboard" + display_list = "DisplayList" + shadow = "Shadow" + asm = "Function" + held_object = "HeldObject" + scale = "Scale" + render_area = "StartRenderArea" + animated_part = "DisplayListWithOffset" + custom_animated = "CustomAnimated" + custom = "CustomNonAnimated" + + def __init__( + self, + geo_layouts: dict, + armature_obj: bpy.types.Armature, + scene: bpy.types.Scene, + name: str, + col: bpy.types.Collection, + is_switch_child: bool = False, + parent_bone: bpy.types.Bone = None, + geo_parent: GeoArmature = None, + switch_armatures: dict[int, bpy.types.Object] = None, + stream: Any = None, + ): + self.armature = armature_obj + self.parent_bone = None if not parent_bone else parent_bone.name + self.bone = None + self.is_switch_child = is_switch_child + self.switch_index = 0 + # parent to this instead of parent bone for brief moment it will exist + self.switch_option_bone: str = None + if not switch_armatures: + self.switch_armatures = dict() + else: + self.switch_armatures = switch_armatures + super().__init__(geo_layouts, scene, name, col, geo_parent=geo_parent, stream=stream) + + def enter_edit_mode(self, geo_armature: bpy.types.Object): + geo_armature.select_set(True) + bpy.context.view_layer.objects.active = geo_armature + bpy.ops.object.mode_set(mode="EDIT", toggle=False) + + def get_or_init_geo_armature(self): + # if not the first child, make a new armature object and switch option root bone + if self.switch_index > 0 and not self.switch_armatures.get(self.switch_index, None): + name = f"{self.ordered_name} switch_option" + switch_armature = bpy.data.objects.new(name, bpy.data.armatures.new(name)) + self.col.objects.link(switch_armature) + self.switch_armatures[self.switch_index] = switch_armature + + self.enter_edit_mode(switch_armature) + edit_bone = switch_armature.data.edit_bones.new(name) + eb_name = edit_bone.name + # give it a non zero length + edit_bone.head = (0, 0, 0) + edit_bone.tail = (0, 0, 0.1) + bpy.ops.object.mode_set(mode="OBJECT", toggle=False) + switch_opt_bone = switch_armature.data.bones[eb_name] + self.switch_option_bone = eb_name + self.set_geo_type(switch_opt_bone, "SwitchOption") + # add switch option and set to mesh override + switch_bone = self.armature.data.bones.get(self.parent_bone, None) + option = switch_bone.switch_options.add() + option.switchType = "Mesh" + option.optionArmature = switch_armature + elif self.switch_armatures: + switch_armature = self.switch_armatures.get(self.switch_index, self.armature) + else: + switch_armature = self.armature + return switch_armature + + def set_transform(self, geo_bone: bpy.types.Bone, transform: Matrix): + # only the position of the head really matters, so the tail + # will take an ad hoc position of 1 above the head + name = geo_bone.name + self.enter_edit_mode(armature_obj := self.get_or_init_geo_armature()) + edit_bone = armature_obj.data.edit_bones.get(name, None) + location = transform_matrix_to_bpy(transform).to_translation() * ( + 1 / self.scene.fast64.sm64.blender_to_sm64_scale + ) + edit_bone.head = location + edit_bone.tail = location + Vector((0, 0, 1)) + bpy.ops.object.mode_set(mode="OBJECT", toggle=False) + # due to blender ptr memes, swapping between edit and obj mode + # will mutate an attr, because the data struct self.bones is rebuilt + # or something idk, and now where the previous bone was is replaced by + # a new one, so I must retrieve it again + self.bone = armature_obj.data.bones[name] + # set the rotation mode + armature_obj.pose.bones[name].rotation_mode = "XYZ" + if self.is_switch_child: + self.switch_index += 1 + + def set_geo_type(self, geo_bone: bpy.types.Bone, geo_cmd: str): + geo_bone.geo_cmd = geo_cmd + + def set_draw_layer(self, geo_bone: bpy.types.Bone, layer: int): + geo_bone.draw_layer = str(self.parse_layer(layer)) + + def make_root(self, name: str): + self.enter_edit_mode(armature_obj := self.get_or_init_geo_armature()) + edit_bone = armature_obj.data.edit_bones.new(name) + eb_name = edit_bone.name + # give it a non zero length + edit_bone.head = (0, 0, 0) + edit_bone.tail = (0, 0, 1) + # use self.switch_option_bone as parent, this does not logically follow from sm64 graph + # but is due to fast64 rules, where switch option acts as "virtual" bone in between child and parent + if self.switch_option_bone or self.parent_bone: + edit_bone.parent = armature_obj.data.edit_bones.get(self.switch_option_bone or self.parent_bone) + self.switch_option_bone = None + bpy.ops.object.mode_set(mode="OBJECT", toggle=False) + self.bone = armature_obj.data.bones[eb_name] + return self.bone + + def setup_geo_obj(self, obj_name: str, geo_cmd: str, layer: int = None): + geo_bone = self.make_root(f"{self.ordered_name} {obj_name}") + self.set_geo_type(geo_bone, geo_cmd) + if layer: + self.set_draw_layer(geo_bone, layer) + return geo_bone + + def add_model(self, model_data: ModelDat, obj_name: str, geo_cmd: str, layer: int = None): + ind = self.get_parser(self.stream[-1]).head + self.models.append(model_data) + model_data.vertex_group_name = f"{self.ordered_name} {obj_name} {model_data.model_name}" + model_data.switch_index = self.switch_index + return self.setup_geo_obj(f"{obj_name} {model_data.model_name}", geo_cmd, layer) + + def parse_armature(self, start: str, props: SM64_ImportProperties): + geo_layout = self.geo_layouts.get(start) + if not geo_layout: + raise Exception( + "Could not find geo layout {} from levels/{}/{}geo.c".format( + start, props.level_name, props.level_prefix + ) + ) + bpy.context.view_layer.objects.active = self.get_or_init_geo_armature() + self.stream.append(start) + self.parse_stream_from_start(geo_layout, start, 0) + + def GEO_ASM(self, macro: Macro, depth: int): + geo_obj = self.setup_geo_obj("asm", self.asm) + if not macro.args[0].isdigit(): + print("could not convert geo asm arg") + else: + geo_obj.func_param = int(macro.args[0]) + geo_obj.geo_func = macro.args[1] + return self.continue_parse + + def GEO_SHADOW(self, macro: Macro, depth: int): + geo_bone = self.setup_geo_obj("shadow", self.shadow) + geo_bone.shadow_solidity = hexOrDecInt(macro.args[1]) / 255 + geo_bone.shadow_scale = hexOrDecInt(macro.args[2]) + return self.continue_parse + + # cmd not supported in fast64 for some reason? + def GEO_RENDER_RANGE(self, macro: Macro, depth: int): + geo_bone = self.setup_geo_obj("render_range", self.custom) + geo_bone.fast64.sm64.custom_geo_cmd_macro = "GEO_RENDER_RANGE" + geo_bone.fast64.sm64.custom_geo_cmd_args = ",".join(macro.args) + return self.continue_parse + + # can switch children have their own culling radius? does it have to + # be on the root? this currently allows each independent geo to have one + def GEO_CULLING_RADIUS(self, macro: Macro, depth: int): + geo_armature = self.get_or_init_geo_armature() + geo_armature.use_render_area = True # cringe name, it is cull not render area + geo_armature.culling_radius = float(macro.args[0]) + return self.continue_parse + + def GEO_SWITCH_CASE(self, macro: Macro, depth: int): + geo_bone = self.setup_geo_obj("switch", self.switch) + # probably will need to be overridden by each subclass + geo_bone.func_param = eval(macro.args[0]) + geo_bone.geo_func = macro.args[1] + return self.continue_parse + + def GEO_SCALE_WITH_DL(self, macro: Macro, depth: int): + scale = eval(macro.args[1]) / 0x10000 + self.last_transform = [(0, 0, 0), self.last_transform[1]] + + model = macro.args[-1] + geo_obj = self.add_model( + ModelDat((0, 0, 0), (0, 0, 0), macro.args[0], macro.args[-1], scale=scale), + "scale", + self.scale, + macro.args[0], + ) + self.set_transform(geo_obj, self.last_transform) + return self.continue_parse + + def GEO_SCALE(self, macro: Macro, depth: int): + scale = eval(macro.args[1]) / 0x10000 + + geo_bone = self.setup_geo_obj("scale", self.scale, macro.args[0]) + geo_bone.geo_scale = scale + return self.continue_parse + + # can be used as a container for several nodes under a single switch child + def GEO_NODE_START(self, macro: Macro, depth: int): + geo_bone = self.setup_geo_obj("start", self.start, "1") + return self.continue_parse + + # add some stuff here + def GEO_HELD_OBJECT(self, macro: Macro, depth: int): + return self.continue_parse + + def GEO_OPEN_NODE(self, macro: Macro, depth: int): + if self.bone: + is_switch_child = self.bone.geo_cmd == self.switch + GeoChild = GeoArmature( + self.geo_layouts, + self.get_or_init_geo_armature(), + self.scene, + self.name, + self.col, + is_switch_child, + parent_bone=self.bone, + geo_parent=self, + stream=self.stream, + switch_armatures=self.switch_armatures if is_switch_child else None, + ) + else: + GeoChild = GeoArmature( + self.geo_layouts, + self.get_or_init_geo_armature(), + self.scene, + self.name, + self.col, + geo_parent=self, + stream=self.stream, + # switch_armatures=self.switch_armatures, # I think double open node won't cause an issue here? + ) + GeoChild.parent_transform = self.last_transform + GeoChild.parse_stream(self.geo_layouts.get(self.stream[-1]), self.stream[-1], depth + 1) + self.children.append(GeoChild) + return self.continue_parse + + +# ------------------------------------------------------------------------ +# Functions +# ------------------------------------------------------------------------ + + +# parse aggregate files, and search for sm64 specific fast64 export name schemes +def get_all_aggregates(aggregate_path: Path, filenames: tuple[callable], root_path: Path) -> list[Path]: + with open(aggregate_path, "r", newline="") as file: + caught_files = parse_aggregate_file(file, filenames, root_path, aggregate_path) + # catch fast64 includes + fast64 = parse_aggregate_file(file, (lambda path: "leveldata.inc.c" in path.name,), root_path, aggregate_path) + if fast64: + with open(fast64[0], "r", newline="") as fast64_dat: + caught_files.extend(parse_aggregate_file(fast64_dat, filenames, root_path, aggregate_path)) + return caught_files + + +# given a path, get a level object by parsing the script.c file +def parse_level_script(script_files: list[Path], scene: bpy.types.Scene, col: bpy.types.Collection = None): + root = bpy.data.objects.new("Empty", None) + if not col: + scene.collection.objects.link(root) + else: + col.objects.link(root) + props = scene.fast64.sm64.importer + root.name = f"Level Root {props.level_name}" + root.sm64_obj_type = "Level Root" + # Now parse the script and get data about the level + # Store data in attribute of a level class then assign later and return class + scripts = dict() + for script_file in script_files: + with open(script_file, "r", newline="") as script_file: + scripts.update(get_data_types_from_file(script_file, {"LevelScript": ["(", ")"]})) + lvl = Level(scripts, scene, root) + entry = props.entry.format(props.level_name) + lvl.parse_level_script(entry, col=col) + return lvl + + +# write the objects from a level object +def write_level_objects(lvl: Level, col_name: str = None, actor_models: dict[model_name, bpy.Types.Mesh] = None): + for area in lvl.areas.values(): + area.place_objects(col_name=col_name, actor_models=actor_models) + + +# from a geo layout, create all the mesh's +def write_armature_to_bpy( + geo_armature: GeoArmature, + scene: bpy.types.Scene, + f3d_dat: SM64_F3D, + root_path: Path, + parsed_model_data: dict, + cleanup: bool = True, +): + parsed_model_data = recurse_armature(geo_armature, scene, f3d_dat, root_path, parsed_model_data, cleanup=cleanup) + + objects_by_armature = dict() + for model_dat in parsed_model_data.values(): + if not objects_by_armature.get(model_dat.armature_obj, None): + objects_by_armature[model_dat.armature_obj] = [model_dat.object] + else: + objects_by_armature[model_dat.armature_obj].append(model_dat.object) + + for armature_obj, objects in objects_by_armature.items(): + # I don't really know the specific override needed for this to work + override = {**bpy.context.copy(), "selected_editable_objects": objects, "active_object": objects[0]} + with bpy.context.temp_override(**override): + bpy.ops.object.join() + + obj = objects[0] + parentObject(armature_obj, obj) + obj.scale *= 1 / scene.fast64.sm64.blender_to_sm64_scale + rotate_object(-90, obj) + obj.ignore_collision = True + # armature deform + mod = obj.modifiers.new("deform", "ARMATURE") + mod.object = geo_armature.armature + + +def apply_mesh_data( + f3d_dat: SM64_F3D, obj: bpy.types.Object, mesh: bpy.types.Mesh, layer: int, root_path: Path, cleanup: bool = False +): + f3d_dat.apply_mesh_data(obj, mesh, layer, root_path) + if cleanup: + mesh = obj.data + # clean up after applying dat + mesh.validate() + mesh.update(calc_edges=True) + # final operators to clean stuff up + # shade smooth + obj.select_set(True) + bpy.context.view_layer.objects.active = obj + bpy.ops.object.shade_smooth() + bpy.ops.object.mode_set(mode="EDIT") + bpy.ops.mesh.remove_doubles() + bpy.ops.object.mode_set(mode="OBJECT") + + +def recurse_armature( + geo_armature: GeoArmature, + scene: bpy.types.Scene, + f3d_dat: SM64_F3D, + root_path: Path, + parsed_model_data: dict, + cleanup: bool = True, +): + if geo_armature.models: + # create a mesh for each one + for model_data in geo_armature.models: + name = f"{model_data.model_name} data" + if name in parsed_model_data.keys(): + mesh = parsed_model_data[name].mesh + name = 0 + else: + mesh = bpy.data.meshes.new(name) + model_data.mesh = mesh + parsed_model_data[name] = model_data + [verts, tris] = f3d_dat.get_f3d_data_from_model(model_data.model_name) + mesh.from_pydata(verts, [], tris) + + obj = bpy.data.objects.new(f"{model_data.model_name} obj", mesh) + + obj.matrix_world = transform_matrix_to_bpy(model_data.transform) * ( + 1 / scene.fast64.sm64.blender_to_sm64_scale + ) + + model_data.object = obj + geo_armature.col.objects.link(obj) + # vertex groups are shared with shared mesh data + if model_data.vertex_group_name and name: + vertex_group = obj.vertex_groups.new(name=model_data.vertex_group_name) + vertex_group.add([vert.index for vert in obj.data.vertices], 1, "ADD") + if model_data.switch_index: + model_data.armature_obj = geo_armature.switch_armatures[model_data.switch_index] + else: + model_data.armature_obj = geo_armature.armature + + if name: + layer = geo_armature.parse_layer(model_data.layer) + apply_mesh_data(f3d_dat, obj, mesh, str(layer), root_path, cleanup) + + if not geo_armature.children: + return parsed_model_data + for arm in geo_armature.children: + parsed_model_data = recurse_armature(arm, scene, f3d_dat, root_path, parsed_model_data, cleanup=cleanup) + return parsed_model_data + + +# from a geo layout, create all the mesh's +def write_geo_to_bpy( + geo: GeoLayout, + scene: bpy.types.Scene, + f3d_dat: SM64_F3D, + root_path: Path, + meshes: dict[str, bpy.Types.Mesh], + cleanup: bool = True, +) -> dict[str, bpy.Types.Mesh]: + if geo.models: + # create a mesh for each one. + for model_data in geo.models: + name = f"{model_data.model_name} data" + if name in meshes.keys(): + mesh = meshes[name] + name = 0 + else: + mesh = bpy.data.meshes.new(name) + meshes[name] = mesh + [verts, tris] = f3d_dat.get_f3d_data_from_model(model_data.model_name) + mesh.from_pydata(verts, [], tris) + + # swap out placeholder mesh data + model_data.object.data = mesh + + if name: + apply_mesh_data( + f3d_dat, model_data.object, mesh, str(geo.parse_layer(model_data.layer)), root_path, cleanup + ) + if not geo.children: + return meshes + for g in geo.children: + meshes = write_geo_to_bpy(g, scene, f3d_dat, root_path, meshes, cleanup=cleanup) + return meshes + + +# write the gfx for a level given the level data, and f3d data +def write_level_to_bpy(lvl: Level, scene: bpy.types.Scene, root_path: Path, f3d_dat: SM64_F3D, cleanup: bool = False): + for area in lvl.areas.values(): + write_geo_to_bpy(area.geo, scene, f3d_dat, root_path, dict(), cleanup=cleanup) + return lvl + + +# given a geo.c file and a path, return cleaned up geo layouts in a dict +def construct_geo_layouts_from_file(geo_paths: list[Path], root_path: Path) -> dict[geo_name:str, geo_data : list[str]]: + geo_layout_files = [] + for path in geo_paths: + geo_layout_files += get_all_aggregates(path, (lambda path: "geo.inc.c" in path.name,), root_path) + if not geo_layout_files: + return + # because of fast64, these can be recursively defined (though I expect only a depth of one) + for file in geo_layout_files: + geo_layout_files.extend(get_all_aggregates(file, (lambda path: "geo.inc.c" in path.name,), root_path)) + geo_layout_data = {} # stores cleaned up geo layout lines + for geo_file in geo_layout_files: + with open(geo_file, "r", newline="") as geo_file: + geo_layout_data.update(get_data_types_from_file(geo_file, {"GeoLayout": ["(", ")"]})) + return geo_layout_data + + +# get all the relevant data types cleaned up and organized for the f3d class +def construct_sm64_f3d_data_from_file(gfx: SM64_F3D, model_file: TextIO) -> SM64_F3D: + gfx_dat = get_data_types_from_file( + model_file, + { + "Vtx": ["{", "}"], + "Gfx": ["(", ")"], + "Light_t": [None, None], + "Ambient_t": [None, None], + "Lights1": [None, None], + }, + collated=True, + ) + for key, value in gfx_dat.items(): + attr = getattr(gfx, key) + attr.update(value) + gfx.Textures.update( + get_data_types_from_file( + model_file, + { + "Texture": [None, None], + "u8": [None, None], + "s16": [None, None], + }, + ) + ) + return gfx + + +# Parse an aggregate group file or level data file for f3d data +def construct_model_data_from_file(aggregates: list[Path], scene: bpy.types.Scene, root_path: Path) -> SM64_F3D: + model_files = [] + texture_files = [] + for dat_file in aggregates: + model_files += get_all_aggregates( + dat_file, + ( + lambda path: "model.inc.c" in path.name, + lambda path: path.match("*[0-9].inc.c"), # deal with 1.inc.c files etc. + lambda path: "painting.inc.c" in path.name, # add way to deal with 1.inc.c filees etc. + ), + root_path, + ) + texture_files += get_all_aggregates( + dat_file, + ( + lambda path: "texture.inc.c" in path.name, + lambda path: "textureNew.inc.c" in path.name, + ), + root_path, + ) + # Get all modeldata in the level + sm64_f3d_data = SM64_F3D(scene) + for model_file in model_files: + model_file = open(model_file, "r", newline="") + construct_sm64_f3d_data_from_file(sm64_f3d_data, model_file) + # Update file to have texture.inc.c textures, deal with included textures in the model.inc.c files aswell + for texture_file in [*texture_files, *model_files]: + with open(texture_file, "r", newline="") as texture_file: + # For textures, try u8, and s16 aswell + sm64_f3d_data.Textures.update( + get_data_types_from_file( + texture_file, + { + "Texture": [None, None], + "u8": [None, None], + "s16": [None, None], + }, + ) + ) + return sm64_f3d_data + + +# Parse an aggregate group file or level data file for geo layouts corresponding to list of model IDs +def find_actor_models_from_model_ids( + geo_paths: list[Path], + model_ids: list[str], + level: Level, + scene: bpy.types.Scene, + root_obj: bpy.types.Object, + root_path: Path, + col: bpy.types.Collection = None, +) -> dict[model_id, GeoLayout]: + geo_layout_dict = construct_geo_layouts_from_file(geo_paths, root_path) + geo_layout_per_model: dict[model_id, GeoLayout] = dict() + for model in model_ids: + layout_name = level.loaded_geos.get(model, None) + if not layout_name: + # create a warning off of this somehow? + print(f"could not find model {model}") + continue + geo_layout = GeoLayout(geo_layout_dict, root_obj, scene, layout_name, root_obj, col=col) + try: + geo_layout.parse_level_geo(layout_name) + geo_layout_per_model[model] = geo_layout + except Exception as exc: + if exc.args[1] == "pass_linked_export": + print(exc) + else: + raise Exception(exc) + return geo_layout_per_model + + +# Parse an aggregate group file or level data file for geo layouts +def find_actor_models_from_geo( + geo_paths: list[Path], + layout_name: str, + scene: bpy.types.Scene, + root_obj: bpy.types.Object, + root_path: Path, + col: bpy.types.Collection = None, +) -> GeoLayout: + geo_layout_dict = construct_geo_layouts_from_file(geo_paths, root_path) + geo_layout = GeoLayout(geo_layout_dict, root_obj, scene, layout_name, root_obj, col=col) + geo_layout.parse_level_geo(layout_name) + return geo_layout + + +def find_armature_models_from_geo( + geo_paths: list[Path], + layout_name: str, + scene: bpy.types.Scene, + armature_obj: bpy.types.Armature, + root_path: Path, + col: bpy.types.Collection, +) -> GeoArmature: + geo_layout_dict = construct_geo_layouts_from_file(geo_paths, root_path) + geo_armature = GeoArmature(geo_layout_dict, armature_obj, scene, "{}".format(layout_name), col) + geo_armature.parse_armature(layout_name, scene.fast64.sm64.importer) + return geo_armature + + +# Find DL references given a level geo file and a path to a level folder +def find_level_models_from_geo( + geo_paths: list[Path], lvl: Level, scene: bpy.types.Scene, root_path: Path, col_name: str = None +) -> Level: + props = scene.fast64.sm64.importer + geo_layout_dict = construct_geo_layouts_from_file(geo_paths, root_path) + for area_index, area in lvl.areas.items(): + if col_name: + col = create_collection(area.root.users_collection[0], col_name) + else: + col = None + geo = GeoLayout( + geo_layout_dict, area.root, scene, f"GeoRoot {props.level_name} {area_index}", area.root, col=col + ) + geo.parse_level_geo(area.geo) + area.geo = geo + return lvl + + +# import level graphics given geo.c file, and a level object +def import_level_graphics( + geo_paths: list[Path], + lvl: Level, + scene: bpy.types.Scene, + root_path: Path, + aggregates: list[Path], + cleanup: bool = False, + col_name: str = None, +) -> Level: + lvl = find_level_models_from_geo(geo_paths, lvl, scene, root_path, col_name=col_name) + models = construct_model_data_from_file(aggregates, scene, root_path) + # just a try, in case you are importing from something other than base decomp repo (like RM2C output folder) + try: + models.get_generic_textures(root_path) + except: + print("could not import genric textures, if this errors later from missing textures this may be why") + lvl = write_level_to_bpy(lvl, scene, root_path, models, cleanup=cleanup) + return lvl + + +# get all the collision data from a certain path +def find_collision_data_from_path(aggregate: Path, lvl: Level, scene: bpy.types.Scene, root_path: Path) -> Level: + collision_files = get_all_aggregates(aggregate, (lambda path: "collision.inc.c" in path.name,), root_path) + col_data = dict() + for col_file in collision_files: + if not os.path.isfile(col_file): + continue + with open(col_file, "r", newline="") as col_file: + col_data.update(get_data_types_from_file(col_file, {"Collision": ["(", ")"]})) + # search for the area terrain from available collision data + for area in lvl.areas.values(): + area.ColFile = col_data.get(area.terrain, None) + if not area.ColFile: + props = scene.fast64.sm64.importer + raise Exception( + f"Collision {area.terrain} not found in levels/{props.level_name}/{props.level_prefix}leveldata.c" + ) + return lvl + + +def write_level_collision_to_bpy( + lvl: Level, + scene: bpy.types.Scene, + cleanup: bool, + col_name: str = None, + actor_models: dict[model_name, bpy.Types.Mesh] = None, +): + for area_index, area in lvl.areas.items(): + if not col_name: + col = area.root.users_collection[0] + else: + col = create_collection(area.root.users_collection[0], col_name) + col_parser = Collision(area.ColFile, scene.fast64.sm64.blender_to_sm64_scale) + col_parser.parse_collision() + name = "SM64 {} Area {} Col".format(scene.fast64.sm64.importer.level_name, area_index) + obj = col_parser.write_collision(scene, name, area.root, col) + area.write_special_objects(col_parser.special_objects, col) + # final operators to clean stuff up + if cleanup: + obj.data.validate() + obj.data.update(calc_edges=True) + # shade smooth + obj.select_set(True) + bpy.context.view_layer.objects.active = obj + bpy.ops.object.shade_smooth() + bpy.ops.object.mode_set(mode="EDIT") + bpy.ops.mesh.remove_doubles() + bpy.ops.object.mode_set(mode="OBJECT") + + +# import level collision given a level script +def import_level_collision( + aggregate: Path, lvl: Level, scene: bpy.types.Scene, root_path: Path, cleanup: bool, col_name: str = None +) -> Level: + lvl = find_collision_data_from_path( + aggregate, lvl, scene, root_path + ) # Now Each area has its collision file nicely formatted + write_level_collision_to_bpy(lvl, scene, cleanup, col_name=col_name) + return lvl + + +# ------------------------------------------------------------------------ +# Operators +# ------------------------------------------------------------------------ + + +class SM64_ActImport(Operator): + bl_label = "Import Actor" + bl_idname = "wm.sm64_import_actor" + bl_options = {"REGISTER", "UNDO"} + + cleanup: BoolProperty(name="Cleanup Mesh", default=1) + + def execute(self, context): + scene = context.scene + rt_col = context.collection + props = scene.fast64.sm64.importer + + decomp_path = Path(bpy.path.abspath(scene.fast64.sm64.decomp_path)) + + group_prefix = props.group_prefix + level_name = props.level_name + level_prefix = props.level_prefix + geo_paths = ( + decomp_path / "actors" / (group_prefix + "_geo.c"), + decomp_path / "levels" / level_name / (level_prefix + "geo.c"), + ) + model_data_paths = ( + decomp_path / "actors" / (group_prefix + ".c"), + decomp_path / "levels" / level_name / (level_prefix + "leveldata.c"), + ) + + geo_layout = find_actor_models_from_geo( + geo_paths, props.geo_layout, scene, None, decomp_path, col=rt_col + ) # return geo layout class and write the geo layout + models = construct_model_data_from_file(model_data_paths, scene, decomp_path) + # just a try, in case you are importing from not the base decomp repo + try: + models.get_generic_textures(decomp_path) + except: + print("could not import genric textures, if this errors later from missing textures this may be why") + write_geo_to_bpy(geo_layout, scene, models, decomp_path, {}, cleanup=self.cleanup) + return {"FINISHED"} + + +class SM64_ArmatureImport(Operator): + bl_label = "Import Armature" + bl_idname = "wm.sm64_import_armature" + bl_options = {"REGISTER", "UNDO"} + + cleanup: BoolProperty(name="Cleanup Mesh", default=1) + + def execute(self, context): + scene = context.scene + rt_col = context.collection + props = scene.fast64.sm64.importer + + decomp_path = Path(bpy.path.abspath(scene.fast64.sm64.decomp_path)) + + group_prefix = props.group_prefix + level_name = props.level_name + level_prefix = props.level_prefix + geo_paths = ( + decomp_path / "actors" / (group_prefix + "_geo.c"), + decomp_path / "levels" / level_name / (level_prefix + "geo.c"), + ) + model_data_paths = ( + decomp_path / "actors" / (group_prefix + ".c"), + decomp_path / "levels" / level_name / (level_prefix + "leveldata.c"), + ) + + name = f"Actor {props.geo_layout}" + armature_obj = bpy.data.objects.new(name, bpy.data.armatures.new(name)) + rt_col.objects.link(armature_obj) + + geo_armature = find_armature_models_from_geo( + geo_paths, props.geo_layout, scene, armature_obj, decomp_path, col=rt_col + ) # return geo layout class and write the geo layout + models = construct_model_data_from_file(model_data_paths, scene, decomp_path) + # just a try, in case you are importing from not the base decomp repo + try: + models.get_generic_textures(decomp_path) + except: + print("could not import genric textures, if this errors later from missing textures this may be why") + write_armature_to_bpy(geo_armature, scene, models, decomp_path, {}, cleanup=self.cleanup) + return {"FINISHED"} + + +def get_operator_paths(props: SM64_ImportProperties, decomp_path: Path) -> Tuple[leveldat_path, script_path, geo_path]: + level = decomp_path / "levels" / props.level_name + script = level / (props.level_prefix + "script.c") + geo = level / (props.level_prefix + "geo.c") + leveldat = level / (props.level_prefix + "leveldata.c") + return (leveldat, script, geo) + + +class SM64_LvlImport(Operator): + bl_label = "Import Level" + bl_idname = "wm.sm64_import_level" + + cleanup = False + + def execute(self, context): + pr = cProfile.Profile() + pr.enable() + scene = context.scene + props = scene.fast64.sm64.importer + + col = context.collection + if props.use_collection: + obj_col = f"{props.level_name} obj" + gfx_col = f"{props.level_name} gfx" + col_col = f"{props.level_name} col" + else: + obj_col = gfx_col = col_col = None + + decomp_path = Path(bpy.path.abspath(scene.fast64.sm64.decomp_path)) + level_data_path, script_path, geo_path = get_operator_paths(props, decomp_path) + + lvl = parse_level_script( + [script_path, decomp_path / "levels" / "scripts.c"], scene, col=col + ) # returns level class + + if props.import_linked_actors: + unique_model_ids = {model for model in lvl.loaded_geos.keys()} + unique_model_ids.update({model for model in lvl.loaded_dls.keys()}) + unique_model_ids.update({object.model for area in lvl.areas.values() for object in area.objects}) + + geo_actor_paths = [ + *( + decomp_path / "actors" / (linked_group.group_prefix + "_geo.c") + for linked_group in props.linked_groups + ), + geo_path, + ] + model_actor_paths = [ + *(decomp_path / "actors" / (linked_group.group_prefix + ".c") for linked_group in props.linked_groups), + level_data_path, + ] + actor_col = create_collection(col, "linked actors col") + actor_geo_layouts: dict[model_id, GeoLayout] = find_actor_models_from_model_ids( + geo_actor_paths, unique_model_ids, lvl, scene, None, decomp_path, col=actor_col + ) + model_data = construct_model_data_from_file(model_actor_paths, scene, decomp_path) + # just a try, in case you are importing from not the base decomp repo + try: + model_data.get_generic_textures(decomp_path) + except: + print("could not import genric textures, if this errors later from missing textures this may be why") + meshes = {} + for model, geo_layout in actor_geo_layouts.items(): + meshes = write_geo_to_bpy(geo_layout, scene, model_data, decomp_path, meshes, cleanup=self.cleanup) + # update model to be root obj of geo + actor_geo_layouts[model] = geo_layout.first_obj + + lvl = import_level_collision(level_data_path, lvl, scene, decomp_path, self.cleanup, col_name=col_col) + write_level_objects(lvl, col_name=obj_col, actor_models=actor_geo_layouts) + # actor_col.hide_render = True + # actor_col.hide_viewport = True + else: + write_level_objects(lvl, col_name=obj_col) + lvl = import_level_collision(level_data_path, lvl, scene, decomp_path, self.cleanup, col_name=col_col) + lvl = import_level_graphics( + [geo_path], lvl, scene, decomp_path, [level_data_path], cleanup=self.cleanup, col_name=gfx_col + ) + pr.disable() + s = io.StringIO() + sortby = SortKey.CUMULATIVE + ps = pstats.Stats(pr, stream=s).sort_stats(sortby) + ps.print_stats(20) + print(s.getvalue()) + return {"FINISHED"} + + +class SM64_LvlGfxImport(Operator): + bl_label = "Import Gfx" + bl_idname = "wm.sm64_import_level_gfx" + + cleanup = False + + def execute(self, context): + scene = context.scene + props = scene.fast64.sm64.importer + + col = context.collection + if props.use_collection: + gfx_col = f"{props.level_name} gfx" + else: + gfx_col = None + + decomp_path = Path(bpy.path.abspath(scene.fast64.sm64.decomp_path)) + model_data_path, script_path, geo_path = get_operator_paths(props, decomp_path) + + lvl = parse_level_script( + [script_path, decomp_path / "levels" / "scripts.c"], scene, col=col + ) # returns level class + lvl = import_level_graphics( + [geo_path], lvl, scene, decomp_path, [model_data_path], cleanup=self.cleanup, col_name=gfx_col + ) + return {"FINISHED"} + + +class SM64_LvlColImport(Operator): + bl_label = "Import Collision" + bl_idname = "wm.sm64_import_level_col" + + cleanup = True + + def execute(self, context): + scene = context.scene + props = scene.fast64.sm64.importer + + col = context.collection + if props.use_collection: + col_col = f"{props.level_name} collision" + else: + col_col = None + + decomp_path = Path(bpy.path.abspath(scene.fast64.sm64.decomp_path)) + level_data_path, script_path, _ = get_operator_paths(props, decomp_path) + + lvl = parse_level_script( + [script_path, decomp_path / "levels" / "scripts.c"], scene, col=col + ) # returns level class + lvl = import_level_collision(level_data_path, lvl, scene, decomp_path, self.cleanup, col_name=col_col) + return {"FINISHED"} + + +class SM64_ObjImport(Operator): + bl_label = "Import Objects" + bl_idname = "wm.sm64_import_object" + + def execute(self, context): + scene = context.scene + props = scene.fast64.sm64.importer + + col = context.collection + if props.use_collection: + obj_col = f"{props.level_name} objs" + else: + obj_col = None + + decomp_path = Path(bpy.path.abspath(scene.fast64.sm64.decomp_path)) + _, script_path, _ = get_operator_paths(props, decomp_path) + + lvl = parse_level_script( + [script_path, decomp_path / "levels" / "scripts.c"], scene, col=col + ) # returns level class + write_level_objects(lvl, col_name=obj_col) + return {"FINISHED"} + + +# ------------------------------------------------------------------------ +# Props +# ------------------------------------------------------------------------ + + +class SM64_AddGroup(bpy.types.Operator): + bl_idname = "scene.add_group" + bl_label = "Add Group" + option: bpy.props.IntProperty() + + def execute(self, context): + prop = context.scene.fast64.sm64.importer + prop.linked_groups.add() + prop.linked_groups.move(len(prop.linked_groups) - 1, self.option) + self.report({"INFO"}, "Success!") + return {"FINISHED"} + + +class SM64_RemoveGroup(bpy.types.Operator): + bl_idname = "scene.remove_group" + bl_label = "Remove Group" + option: bpy.props.IntProperty() + + def execute(self, context): + prop = context.scene.fast64.sm64.importer + prop.linked_groups.remove(self.option) + self.report({"INFO"}, "Success!") + return {"FINISHED"} + + +class SM64_GroupProperties(PropertyGroup): + """ + properties for selecting a group for importing + specifically made for when importing levels and you + need to define loaded groups for linked objects + """ + + expand: bpy.props.BoolProperty(name="Expand", default=True) + group_preset: EnumProperty( + name="group preset", description="The group you want to load geo from", items=groups_obj_export + ) + group_prefix_custom: StringProperty( + name="Prefix", + description="Prefix before expected aggregator files like script.c, leveldata.c and geo.c. Enter group name if not using dropdowns.", + default="", + ) + + @property + def group_prefix(self): + if self.group_preset == "custom": + return self.group_prefix_custom + else: + return self.group_preset + + def draw(self, layout, index): + box = layout.box().column() + box.prop( + self, + "expand", + text=f"Group Load: {self.group_preset}", + icon="TRIA_DOWN" if self.expand else "TRIA_RIGHT", + ) + if self.expand: + prop_split(box, self, "group_preset", "Group Preset") + if self.group_preset == "custom": + prop_split(box, self, "group_prefix_custom", "Custom Group") + + row = box.row() + row.operator("scene.add_group", text="Add Group").option = index + 1 + row.operator("scene.remove_group", text="Remove Group").option = index + + +class SM64_ImportProperties(PropertyGroup): + # actor props + geo_layout_str: StringProperty(name="geo_layout", description="Name of GeoLayout") + + group_preset: EnumProperty( + name="group preset", description="The group you want to load geo from", items=groups_obj_export + ) + group_0_geo_enum: EnumProperty( + name="group 0 geos", + description="preset geos from vanilla in group 0", + items=[*group_0_geos, ("Custom", "Custom", "Custom")], + ) + group_1_geo_enum: EnumProperty( + name="group 1 geos", + description="preset geos from vanilla in group 1", + items=[*group_1_geos, ("Custom", "Custom", "Custom")], + ) + group_2_geo_enum: EnumProperty( + name="group 2 geos", + description="preset geos from vanilla in group 2", + items=[*group_2_geos, ("Custom", "Custom", "Custom")], + ) + group_3_geo_enum: EnumProperty( + name="group 3 geos", + description="preset geos from vanilla in group 3", + items=[*group_3_geos, ("Custom", "Custom", "Custom")], + ) + group_4_geo_enum: EnumProperty( + name="group 4 geos", + description="preset geos from vanilla in group 4", + items=[*group_4_geos, ("Custom", "Custom", "Custom")], + ) + group_5_geo_enum: EnumProperty( + name="group 5 geos", + description="preset geos from vanilla in group 5", + items=[*group_5_geos, ("Custom", "Custom", "Custom")], + ) + group_6_geo_enum: EnumProperty( + name="group 6 geos", + description="preset geos from vanilla in group 6", + items=[*group_6_geos, ("Custom", "Custom", "Custom")], + ) + group_7_geo_enum: EnumProperty( + name="group 7 geos", + description="preset geos from vanilla in group 7", + items=[*group_7_geos, ("Custom", "Custom", "Custom")], + ) + group_8_geo_enum: EnumProperty( + name="group 8 geos", + description="preset geos from vanilla in group 8", + items=[*group_8_geos, ("Custom", "Custom", "Custom")], + ) + group_9_geo_enum: EnumProperty( + name="group 9 geos", + description="preset geos from vanilla in group 9", + items=[*group_9_geos, ("Custom", "Custom", "Custom")], + ) + group_10_geo_enum: EnumProperty( + name="group 10 geos", + description="preset geos from vanilla in group 10", + items=[*group_10_geos, ("Custom", "Custom", "Custom")], + ) + group_11_geo_enum: EnumProperty( + name="group 11 geos", + description="preset geos from vanilla in group 11", + items=[*group_11_geos, ("Custom", "Custom", "Custom")], + ) + group_12_geo_enum: EnumProperty( + name="group 12 geos", + description="preset geos from vanilla in group 12", + items=[*group_12_geos, ("Custom", "Custom", "Custom")], + ) + group_13_geo_enum: EnumProperty( + name="group 13 geos", + description="preset geos from vanilla in group 13", + items=[*group_13_geos, ("Custom", "Custom", "Custom")], + ) + group_14_geo_enum: EnumProperty( + name="group 14 geos", + description="preset geos from vanilla in group 14", + items=[*group_14_geos, ("Custom", "Custom", "Custom")], + ) + group_15_geo_enum: EnumProperty( + name="group 15 geos", + description="preset geos from vanilla in group 15", + items=[*group_15_geos, ("Custom", "Custom", "Custom")], + ) + group_16_geo_enum: EnumProperty( + name="group 16 geos", + description="preset geos from vanilla in group 16", + items=[*group_16_geos, ("Custom", "Custom", "Custom")], + ) + group_17_geo_enum: EnumProperty( + name="group 17 geos", + description="preset geos from vanilla in group 17", + items=[*group_17_geos, ("Custom", "Custom", "Custom")], + ) + common_0_geo_enum: EnumProperty( + name="common 0 geos", + description="preset geos from vanilla in common 0", + items=[*common_0_geos, ("Custom", "Custom", "Custom")], + ) + common_1_geo_enum: EnumProperty( + name="common 1 geos", + description="preset geos from vanilla in common 1", + items=[*common_1_geos, ("Custom", "Custom", "Custom")], + ) + actor_prefix_custom: StringProperty( + name="Prefix", + description="Prefix before expected aggregator files like script.c, leveldata.c and geo.c. Enter group name if not using dropdowns.", + default="", + ) + version: EnumProperty( + name="Version", + description="Version of the game for any ifdef macros", + items=enumVersionDefs, + ) + target: StringProperty( + name="Target", description="The platform target for any #ifdefs in code", default="TARGET_N64" + ) + + # level props + level_enum: EnumProperty(name="Level", description="Choose a level", items=enumLevelNames, default="bob") + level_custom: StringProperty( + name="Custom Level Name", + description="Custom level name", + default="", + ) + level_prefix: StringProperty( + name="Prefix", + description="Prefix before expected aggregator files like script.c, leveldata.c and geo.c. Leave blank unless using custom files", + default="", + ) + entry: StringProperty( + name="Entrypoint", + description="The name of the level script entry variable. Levelname is put between braces.", + default="level_{}_entry", + ) + version: EnumProperty( + name="Version", + description="Version of the game for any ifdef macros", + items=enumVersionDefs, + ) + target: StringProperty( + name="Target", description="The platform target for any #ifdefs in code", default="TARGET_N64" + ) + force_new_tex: BoolProperty( + name="force_new_tex", + description="Forcefully load new textures even if duplicate path/name is detected", + default=False, + ) + as_obj: BoolProperty( + name="As OBJ", description="Make new materials as PBSDF so they export to obj format", default=False + ) + use_collection: BoolProperty( + name="use_collection", description="Make new collections to organzie content during imports", default=True + ) + export_friendly: BoolProperty( + name="Export Friendly", + description="Format import to be friendly for exporting for hacks rather than importing a 1:1 representation", + default=True, + ) + import_linked_actors: BoolProperty( + name="Import Actors", description="Imports the models of actors. Actor models will be duplicates", default=True + ) + linked_groups: CollectionProperty(type=SM64_GroupProperties) + # add collection property for groups to look through + # add method to collect all groups and then turn those into paths + # look through all those path aggregates to get includes for actor importing + + @property + def level_name(self): + if self.level_enum == "Custom": + return self.level_custom + else: + return self.level_enum + + @property + def geo_group_name(self): + if self.group_preset == "Custom": + return None + if self.group_preset == "common0": + return "common_0_geo_enum" + if self.group_preset == "common1": + return "common_1_geo_enum" + else: + return f"group_{self.group_preset.removeprefix('group')}_geo_enum" + + @property + def group_prefix(self): + if self.group_preset == "custom": + return self.actor_prefix_custom + else: + return self.group_preset + + @property + def geo_layout(self): + if self.group_preset == "custom": + return self.geo_layout_str + else: + return getattr(self, self.geo_group_name) + + def draw_actor(self, layout: bpy.types.UILayout): + box = layout.box() + box.label(text="SM64 Actor Importer") + box.prop(self, "group_preset") + if self.group_preset == "Custom": + box.prop(self, "actor_prefix_custom") + box.prop(self, "geo_layout_str") + else: + box.prop(self, self.geo_group_name) + box.prop(self, "version") + box.prop(self, "target") + + def draw_level(self, layout: bpy.types.UILayout): + box = layout.box() + box.label(text="Level Importer") + box.prop(self, "level_enum") + if self.level_enum == "Custom": + box.prop(self, "level_custom") + box.prop(self, "entry") + box.prop(self, "level_prefix") + box.prop(self, "version") + box.prop(self, "target") + row = box.row() + row.prop(self, "force_new_tex") + row.prop(self, "as_obj") + row.prop(self, "export_friendly") + row.prop(self, "import_linked_actors") + row.prop(self, "use_collection") + if self.import_linked_actors: + box = box.box() + box.operator("scene.add_group", text="Add Group Load") + for index, group in enumerate(self.linked_groups): + group.draw(box, index) + + +# ------------------------------------------------------------------------ +# Panels +# ------------------------------------------------------------------------ + + +class SM64_ImportPanel(SM64_Panel): + bl_label = "SM64 Importer" + bl_idname = "sm64_PT_importer" + bl_context = "objectmode" + import_panel = True + + def draw(self, context): + layout = self.layout + scene = context.scene + importer_props = scene.fast64.sm64.importer + importer_props.draw_level(layout) + layout.operator("wm.sm64_import_level") + layout.operator("wm.sm64_import_level_gfx") + layout.operator("wm.sm64_import_level_col") + layout.operator("wm.sm64_import_object") + importer_props.draw_actor(layout) + layout.operator("wm.sm64_import_actor") + layout.operator("wm.sm64_import_armature") + + +classes = ( + SM64_AddGroup, + SM64_RemoveGroup, + SM64_GroupProperties, + SM64_ImportProperties, + SM64_LvlImport, + SM64_LvlGfxImport, + SM64_LvlColImport, + SM64_ObjImport, + SM64_ActImport, + SM64_ArmatureImport, +) + + +def sm64_import_panel_register(): + register_class(SM64_ImportPanel) + + +def sm64_import_register(): + for cls in classes: + register_class(cls) + + +def sm64_import_panel_unregister(): + unregister_class(SM64_ImportPanel) + + +def sm64_import_unregister(): + for cls in reversed(classes): + unregister_class(cls) diff --git a/fast64_internal/utility.py b/fast64_internal/utility.py index e5bd48197..28ed0bea7 100644 --- a/fast64_internal/utility.py +++ b/fast64_internal/utility.py @@ -171,13 +171,19 @@ def selectSingleObject(obj: bpy.types.Object): bpy.context.view_layer.objects.active = obj -def parentObject(parent, child): - bpy.ops.object.select_all(action="DESELECT") +def parentObject(parent, child, keep=0): + if not keep: + child.parent = parent + child.matrix_local = child.matrix_parent_inverse + else: + bpy.ops.object.select_all(action="DESELECT") - child.select_set(True) - parent.select_set(True) - bpy.context.view_layer.objects.active = parent - bpy.ops.object.parent_set(type="OBJECT", keep_transform=True) + child.select_set(True) + parent.select_set(True) + bpy.context.view_layer.objects.active = parent + bpy.ops.object.parent_set(type="OBJECT", keep_transform=True) + parent.select_set(False) + child.select_set(False) def getFMeshName(vertexGroup, namePrefix, drawLayer, isSkinned): @@ -214,6 +220,13 @@ def getGroupNameFromIndex(obj, index): return None +# creates a new collection and links it to parent +def create_collection(parent: bpy.types.Collection, name: str): + col = bpy.data.collections.new(name) + parent.children.link(col) + return col + + def copyPropertyCollection(oldProp, newProp): newProp.clear() for item in oldProp: @@ -1168,6 +1181,19 @@ def doRotation(angle, axis): bpy.ops.transform.rotate(value=direction * angle, orient_axis=axis, orient_type="GLOBAL") +# consider checking redundancy of this with above functions? +def rotate_object(deg: float, obj: bpy.types.Object, world: bool = 0): + deg = Euler((math.radians(-deg), 0, 0)) + deg = deg.to_quaternion().to_matrix().to_4x4() + if world: + obj.matrix_world = obj.matrix_world @ deg + obj.select_set(True) + bpy.context.view_layer.objects.active = obj + bpy.ops.object.transform_apply(rotation=True) + else: + obj.matrix_basis = obj.matrix_basis @ deg + + def getAddressFromRAMAddress(RAMAddress): addr = RAMAddress - 0x80000000 if addr < 0: @@ -1631,6 +1657,18 @@ def rotate_quat_blender_to_n64(rotation: mathutils.Quaternion): return new_rot.to_quaternion() +def rotate_quat_n64_to_blender(rotation: mathutils.Quaternion): + new_rot = transform_mtx_blender_to_n64().inverted() @ rotation.to_matrix().to_4x4() @ transform_mtx_blender_to_n64() + return new_rot.to_quaternion() + + +# this will take a blender property, its enumprop name, and then return a list of the allowed enums +def GetEnums(prop, enum): + enumProp = prop.bl_rna.properties.get(enum) + if enumProp: + return [item.identifier for item in enumProp.enum_items] + + def all_values_equal_x(vals: Iterable, test): return len(set(vals) - set([test])) == 0 diff --git a/fast64_internal/utility_importer.py b/fast64_internal/utility_importer.py new file mode 100644 index 000000000..b535398e9 --- /dev/null +++ b/fast64_internal/utility_importer.py @@ -0,0 +1,233 @@ +from __future__ import annotations + +import re +import bpy +from functools import partial +from dataclasses import dataclass +from pathlib import Path +from typing import TextIO, Any, Union +from numbers import Number +from collections.abc import Sequence +from .utility import transform_mtx_blender_to_n64 + + +@dataclass +class Macro: + cmd: str + args: list[str] + + # strip each arg + def __post_init__(self): + self.args = [arg.strip() if type(arg) is str else arg for arg in self.args] + self.cmd = self.cmd.strip() + + # make new macro that is the indices chosen or supplied args + def partial(self, *new_args: Any): + return Macro(self.cmd, (arg for arg in new_args)) + + +@dataclass +class Parser: + cur_stream: Sequence[Any] + head: int = -1 + + def stream(self): + while self.head < len(self.cur_stream) - 1: + self.head += 1 + yield self.cur_stream[self.head] + + +# basic methods and utility to parse scripts or data streams of bytecode +class DataParser: + # parsing flow status codes + continue_parse = 1 + break_parse = 2 + + def __init__(self, parent: DataParser = None): + # for forward referencing scripts, keep track of the stream + if parent: + self.parsed_streams = parent.parsed_streams + else: + self.parsed_streams = dict() + + # for if you're jumping, you start from the beginning, but if you're starting/stopping + # then you want to just pickup from the last spot + def parse_stream_from_start(self, dat_stream: Sequence[Any], entry_id: Any, *args, **kwargs): + self.reset_parser(entry_id) + self.parse_stream(dat_stream, entry_id, *args, **kwargs) + + def parse_stream(self, dat_stream: Sequence[Any], entry_id: Any, *args, **kwargs): + parser = self.parsed_streams.get(entry_id) + if not parser: + self.parsed_streams[entry_id] = (parser := Parser(dat_stream)) + for line in parser.stream(): + cur_macro = self.c_macro_split(line) + func = getattr(self, cur_macro.cmd, None) + if not func: + raise Exception(f"Macro {cur_macro} not found in parser function") + else: + flow_status = func(cur_macro, *args, **kwargs) + if flow_status == self.break_parse: + return + + def reset_parser(self, entry_id: Any): + self.parsed_streams[entry_id] = None + + def get_parser(self, entry_id: Any, relative_offset: int = 0): + parser = self.parsed_streams[entry_id] + parser.head += relative_offset + return parser + + def c_macro_split(self, macro: str) -> list[str]: + args_start = macro.find("(") + return Macro(macro[:args_start], macro[args_start + 1 : macro.rfind(")")].split(",")) + + +def transform_matrix_to_bpy(transform: Matrix) -> Matrix: + return transform_mtx_blender_to_n64().inverted() @ transform @ transform_mtx_blender_to_n64() + + +# make something more generic here where user can supply their own function +def evaluate_macro(line: str): + props = bpy.context.scene.fast64.sm64.importer + if props.version in line: + return False + if props.target in line: + return False + return True + + +# gets rid of comments, whitespace and macros in a file +def pre_parse_file(file: TextIO) -> list[str]: + multi_line_comment_regx = "/\*[^*]*\*+(?:[^/*][^*]*\*+)*/" + file = re.sub(multi_line_comment_regx, "", file.read()) + skip_macro = 0 # bool to skip during macros + output_lines = [] + for line in file.splitlines(): + # remove line comment + if (comment := line.rfind("//")) > 0: + line = line[:comment] + # check for macro + if "#if" in line: + skip_macro = evaluate_macro(line) + if "#ifdef" in line: + skip_macro = evaluate_macro(line) + continue + if "#elif" in line: + skip_macro = evaluate_macro(line) + continue + if "#else" in line or "#endif" in line: + skip_macro = 0 + continue + if not skip_macro and line: + output_lines.append(line) + return output_lines + + +# given an aggregate file that imports many files, find files with the name of type +def parse_aggregate_file( + agg_file: TextIO, file_catches: tuple[callable], root_path: Path, aggregate_path: Path +) -> list[Path]: + agg_file.seek(0) # so it may be read multiple times + + file_lines = pre_parse_file(agg_file) + # remove include and quotes + remove = {"#include", '"', "'"} + caught_files = [] + for line in file_lines: + # if not line: + # continue + for r in remove: + line = line.replace(r, "") + line = Path(line.strip()) + for callable in file_catches: + if callable(line): + caught_files.append(line) + break + + # include is relative cur aggregate file or root + def get_file_path(root_path: Path, include: str): + if (include_file := root_path / include).exists(): + return include_file + if (include_file := aggregate_path.parent / include).exists(): + return include_file + raise Exception(f"could not find inclusion file {include}") + + if caught_files: + return [get_file_path(root_path, include) for include in caught_files] + else: + return [] + + +# Search through a C file to find data of typeName[] and split it into a list +# of data types with all comments removed +def get_data_types_from_file(file: TextIO, type_dict, collated=False): + # from a raw file, create a dict of types. Types should all be arrays + file_lines = pre_parse_file(file) + array_bounds_regx = "\[[0-9a-fx]*\]" # basically [] with any valid number in it + equality_regx = "\s*=" # finds the first char before the equals sign + output_variables = {type_name: dict() for type_name in type_dict.keys()} + type_found = None + var_dat_buffer = [] + for line in file_lines: + if type_found: + # Check for end of array + if ";" in line: + output_variables[type_found[0]][type_found[1]] = "".join(var_dat_buffer) + type_found = None + var_dat_buffer = [] + else: + var_dat_buffer.append(line) + continue + # name ends at the array bounds, or the equals sign + match = re.search(array_bounds_regx, line, flags=re.IGNORECASE) + if not match: + match = re.search(equality_regx, line, flags=re.IGNORECASE) + type_collisions = [type_name for type_name in type_dict.keys() if type_name in line] + if match and type_collisions: + # there should ideally only be one collision + type_name = type_collisions[0] + variable_name = line[line.find(type_name) + len(type_name) : match.span()[0]].strip() + type_found = (type_name, variable_name) + # Now remove newlines from each line, and then split macro ends + # This makes each member of the array a single macro or array + for data_type, delimiters in type_dict.items(): + for variable, data in output_variables[data_type].items(): + output_variables[data_type][variable] = format_data_arr(data, delimiters) + + # if collated, organize by data type, otherwise just take the various dicts raw + return ( + output_variables + if collated + else {vd_key: vd_value for var_dict in output_variables.values() for vd_key, vd_value in var_dict.items()} + ) + + +# takes a raw string representing data and then formats it into an array +def format_data_arr(raw_data: str, delimiters: tuple[str]) -> list[str]: + raw_data = raw_data.replace("\n", "") + arr = [] # arr of data in format + buf = "" # buf to put currently processed data in + pos = 0 # cur position in str + stack = 0 # stack cnt of parenthesis + app = 0 # flag to append data + while pos < len(raw_data): + char = raw_data[pos] + if char == delimiters[0]: + stack += 1 + app = 1 + if char == delimiters[1]: + stack -= 1 + if app == 1 and stack == 0: + app = 0 + buf += raw_data[pos : pos + 2] # get the last parenthesis and comma + arr.append(buf.strip()) + pos += 2 + buf = "" + continue + buf += char + pos += 1 + # for when the delim characters are nothing + if buf: + arr.append(buf) + return arr