diff --git a/README.md b/README.md index a897ec4..5729fc8 100644 --- a/README.md +++ b/README.md @@ -1,36 +1,48 @@ # umodel_tools + ![PyLint](https://github.com/skarndev/umodel_tools/actions/workflows/pylint.yml/badge.svg) ![pycodestyle](https://github.com/skarndev/umodel_tools/actions/workflows/pycodestyle.yml/badge.svg) [![](https://dcbadge.vercel.app/api/server/DJYZwyegSF)](https://discord.gg/DJYZwyegSF) +A fork from [skarndev repo]([https://github.com/skarndev/umodel_tools]) +https://github.com/skarndev/umodel_tools + A [Blender](https://blender.org) add-on featuring import of [Unreal Engine](https://www.unrealengine.com) games static meshes (stored in-game as .uasset) and maps (stored in-game as .umap). It provides basic support for many -UE games, while [some](https://skarndev.github.io/umodel_tools/supported_games.html) have dedicated improved support. +UE games, while some (https://skarndev.github.io/umodel_tools/supported_games.html) have dedicated improved support. ![](/docs/source/images/demo.jpeg?raw=true "Demo") # Features + - Unreal Engine games map (.umap) and (.uasset) import. - Creation of asset library in Blender out of game's assets. - PBR materials. - Lights (experimental support). # Usage + You can learn how to install and use the add-on on the [documentation page](https://skarndev.github.io/umodel_tools/). +Compatible with Blender 4.2 or higher. +Recommend Blender 4.3 + # Support + You can support core feature development and adding more games at my [Boosty page](https://boosty.to/skarn). # Roadmap -- Adding more games. + +- Adding more games. - Improving support for asset management. - Improving map import. - Improving lights import. # Credits + - Gildor, for creating [UEViewer](https://www.gildor.org/en/projects/umodel). - Developers of [FModel](https://fmodel.app). - Developers of these [import scripts](https://github.com/Ganonmaster/Blender-Scripts/tree/master/ue4map-tools). @@ -38,5 +50,6 @@ You can support core feature development and adding more games at my - Loveslove for early testing. # Disclaimer + 3D assets and maps used by most of the games are copyrighted property of game's owners. This software does not promote asset piracy and is intended for artistic and research purposes only. diff --git a/umodel_tools/__init__.py b/umodel_tools/__init__.py index dd4821e..ffdb3a2 100644 --- a/umodel_tools/__init__.py +++ b/umodel_tools/__init__.py @@ -16,13 +16,11 @@ # # ##### END GPL LICENSE BLOCK ##### +import bpy import os import sys import traceback -import bpy - - # include custom lib vendoring dir parent_dir = os.path.abspath(os.path.dirname(__file__)) vendor_dir = os.path.join(parent_dir, 'third_party') @@ -31,13 +29,12 @@ from . import auto_load # nopep8 pylint: disable=wrong-import-position - #: Addon description for Blender. Displayed in settings. bl_info = { "name": "UModel Tools", - "author": "Skarn", - "version": (1, 0), - "blender": (3, 40, 0), + "author": "Skarn, KingxJulz", + "version": (1,0,5), + "blender": (4,2,0), "description": "Import Unreal Engine games scenes and assets into Blender.", "category": "Import-Export" } diff --git a/umodel_tools/asset_importer.py b/umodel_tools/asset_importer.py index 284e3b4..1ebeb08 100644 --- a/umodel_tools/asset_importer.py +++ b/umodel_tools/asset_importer.py @@ -5,7 +5,7 @@ import shutil import contextlib -from io_import_scene_unreal_psa_psk_280 import pskimport # pylint: disable=import-error +from .third_party.io_import_scene_unreal_psa_psk_280 import pskimport # pylint: disable=import-error import bpy from . import enums @@ -14,26 +14,98 @@ from . import props_txt_parser from . import game_profiles +from bpy.props import BoolProperty, StringProperty, EnumProperty + + +# --- Constants for Nodes, Sockets, and Node Groups --- +ALPHA_MODE_CHANNEL = 'CHANNEL_PACKED' +NODE_FRAME = 'NodeFrame' + +# Nodes Shaders +BSDF_DIFFUSE_NODE = 'ShaderNodeBsdfDiffuse' +BSDF_EMISSION_NODE = 'ShaderNodeEmission' +BSDF_GLOSSY_NODE = 'ShaderNodeBsdfGlossy' +PRINCIPLED_SHADER_NODE = 'ShaderNodeBsdfPrincipled' +BSDF_TRANSPARENT_NODE = 'ShaderNodeBsdfTransparent' +BSDF_GLASS_NODE = 'ShaderNodeBsdfGlass' +SHADER_ADD_NODE = 'ShaderNodeAddShader' +SHADER_MIX_NODE = 'ShaderNodeMixShader' + +# Nodes Color +RGB_MIX_NODE = 'ShaderNodeMix' # +INVERT_NODE = 'ShaderNodeInvert' # + +# Nodes Input +TEXTURE_IMAGE_NODE = 'ShaderNodeTexImage' +ENVIRONMENT_IMAGE_NODE = 'ShaderNodeTexEnvironment' +COORD_NODE = 'ShaderNodeTexCoord' + +# Nodes Outputs +OUTPUT_NODE = 'ShaderNodeOutputMaterial' + +# Nodes Vector +MAPPING_NODE = 'ShaderNodeMapping' +NORMAL_MAP_NODE = 'ShaderNodeNormalMap' # + +# Nodes Convert +SHADER_NODE_MATH = 'ShaderNodeMath' # +SHADER_NODE_CLAMP = 'ShaderNodeClamp' # +RGB_TO_BW_NODE = 'ShaderNodeRGBToBW' +SHADER_NODE_SEPARATE_COLOR = 'ShaderNodeSeparateColor' # +SHADER_NODE_COMBINE_COLOR = 'ShaderNodeCombineColor' # + +# Node Groups +NODE_GROUP = 'ShaderNodeGroup' # +NODE_GROUP_INPUT = 'NodeGroupInput' # +NODE_GROUP_OUTPUT = 'NodeGroupOutput' # +SHADER_NODE_TREE = 'ShaderNodeTree' # + + +# Node Socket Types +COLOR_SOCKET_NODE = 'NodeSocketColor' # +FLOAT_SOCKET_NODE = 'NodeSocketFloat' # +VECTOR_SOCKET_NODE = 'NodeSocketVector' # +BOOL_SOCKET_NODE = 'NodeSocketBool' # + + +# Node Custom Groups +INVERT_CHANNEL_NODE = 'Invert Channel' +MIX_NORMAL_NODE = 'Normal Mix' +NORMAL_MASK_NODE = 'Normal Mask' + +MAIN_SHADER_NODE = "MainShader" # +ORM_EXTRA_SHADER_NODE = "ORMExtraShader" # +ODMASK_SHADER_NODE = "OverlayDiffuseMask" # + class AssetImporter: """Implements functionality of asset import from UModel output. Intended to be inherited a bpy.types.Operator subclass. """ - load_pbr_maps: bpy.props.BoolProperty( - name="Load PBR textures", - description="Load normal maps, specular, roughness, etc into materials. Experimental", - default=True + link_to_scene: BoolProperty(name="Link", + description="Link created asset into the current scene. Experimental", default=False + ) + + append_to_scene: BoolProperty(name="Append", + description="Append created asset into the current scene. Experimental", default=True + ) + + overwrite: BoolProperty(name="Overwrite AssetLibrary", + description="Overwrite existing assets within assets directory. Experimental", default=False ) - import_backface_culling: bpy.props.BoolProperty( - name="Use backface culling", + load_pbr_maps: BoolProperty(name="Load PBR textures", + description="Load normal maps, specular, roughness, etc into materials. Experimental", default=True + ) + + import_backface_culling: BoolProperty(name="Use backface culling", description="If this setting is checked, material settings for backface culling will be kept, " "otherwise backface culling is always off", default=False ) - texture_format: bpy.props.EnumProperty( + texture_format: EnumProperty( name="Texture format", description="Format of textures expected to be in the UModel export directory.", items=[ @@ -41,7 +113,17 @@ class AssetImporter: ('.dds', '.dds', '', 1), ('.tga', '.tga', '', 2) ], - default='.png' + default='.dds' + ) + + mesh_format: EnumProperty( + name="Mesh format", + description="Format of mesh exported from Umdoel / Fmodel,\n expected to be in the export directory.", + items=[ + ('.psk', '.psk', '', 0), + ('.uemodel', '.uemodel', '', 1) + ], + default='.psk' ) _unrecognized_texture_types: set[str] = set() @@ -82,6 +164,7 @@ def _print_unrecognized_textures(self) -> None: print(self._unrecognized_texture_types) self._unrecognized_texture_types.clear() + def _load_asset(self, context: bpy.types.Context, asset_dir: str, @@ -108,7 +191,12 @@ def _load_asset(self, asset_path_abs = asset_path_abs_no_ext + '.blend' try: - if not os.path.isfile(asset_path_abs): + if self.overwrite or not os.path.isfile(asset_path_abs): + + if self.overwrite and os.path.isfile(asset_path_abs): + print(f"[Overwrite] Removing existing asset: {asset_path_abs}") + os.remove(asset_path_abs) # Remove current asset if overwrite enable. + self._import_asset_to_library(context=context, asset_library_dir=asset_dir, asset_path=asset_path, umodel_export_dir=umodel_export_dir, db=db, game_profile=game_profile) @@ -122,12 +210,116 @@ def _load_asset(self, assert len(data_to.objects) == 1 return data_to.objects[0] + + # if load: + # # Instead of checking for already linked asset, we append the asset. + # with utils.redirect_cstdout(): + # # Use link=False to append rather than link. + # with bpy.data.libraries.load(asset_path_abs, link=False) as (data_from, data_to): + # data_to.objects = list(data_from.objects) + # assert len(data_to.objects) == 1 + # appended_obj = data_to.objects[0] + # # Make sure the appended object is added to the current collection. + # if appended_obj.name not in context.scene.objects: + # context.collection.objects.link(appended_obj) + # return appended_obj return None except (RuntimeError, FileNotFoundError): traceback.print_exc() return None + + def _load_asset_appended(self, + context: bpy.types.Context, + asset_dir: str, + asset_path: str, + umodel_export_dir: str, + game_profile: str, + db: t.Optional[asset_db.AssetDB] = None + ) -> bpy.types.Object | None: + """ + Loads the asset from the asset library by appending it (making it fully local), + rather than linking it as an external reference. + """ + asset_path_abs_no_ext = os.path.join(asset_dir, os.path.splitext(asset_path)[0]) + asset_path_abs = asset_path_abs_no_ext + '.blend' + + try: + if self.overwrite or not os.path.isfile(asset_path_abs): + + if self.overwrite and os.path.isfile(asset_path_abs): + print(f"[Overwrite] Removing existing asset: {asset_path_abs}") + os.remove(asset_path_abs) # Remove current asset if overwrite enable. + + self._import_asset_to_library(context=context, asset_library_dir=asset_dir, asset_path=asset_path, + umodel_export_dir=umodel_export_dir, db=db, game_profile=game_profile) + + with utils.redirect_cstdout(): + # Use link=False to append instead of linking. + with bpy.data.libraries.load(asset_path_abs, link=False) as (data_from, data_to): + data_to.objects = list(data_from.objects) + assert len(data_to.objects) == 1 + appended_obj = data_to.objects[0] + + # Ensure the appended object is added to the current collection. + if appended_obj.name not in context.scene.objects: + context.collection.objects.link(appended_obj) + return appended_obj + + except (RuntimeError, FileNotFoundError): + traceback.print_exc() + return None + + def _load_asset_linked(self, + context: bpy.types.Context, + asset_dir: str, + asset_path: str, + umodel_export_dir: str, + game_profile: str, + db: t.Optional[asset_db.AssetDB] = None + ) -> bpy.types.Object | None: + """ + Loads the asset from the asset library by linking it (as an external reference) + and then ensures the linked asset is added to the current scene. + """ + asset_path_abs_no_ext = os.path.join(asset_dir, os.path.splitext(asset_path)[0]) + asset_path_abs = asset_path_abs_no_ext + '.blend' + + try: + if self.overwrite or not os.path.isfile(asset_path_abs): + + if self.overwrite and os.path.isfile(asset_path_abs): + print(f"[Overwrite] Removing existing asset: {asset_path_abs}") + os.remove(asset_path_abs) # Remove current asset if overwrite enable. + + self._import_asset_to_library(context=context, + asset_library_dir=asset_dir, + asset_path=asset_path, + umodel_export_dir=umodel_export_dir, + db=db, + game_profile=game_profile) + + # Try to find the already linked asset. + linked_obj = utils.linked_libraries_search(asset_path_abs, bpy.types.Object) + if linked_obj is None: + with utils.redirect_cstdout(): + # Use link=True to link the asset from the external .blend. + with bpy.data.libraries.load(asset_path_abs, link=True) as (data_from, data_to): + data_to.objects = list(data_from.objects) + assert len(data_to.objects) == 1 + linked_obj = data_to.objects[0] + + # Ensure the linked object is added to the current scene's collection. + if linked_obj.name not in context.collection.objects: + context.collection.objects.link(linked_obj) + + return linked_obj + + except (RuntimeError, FileNotFoundError): + traceback.print_exc() + return None + def _import_image_to_library(self, tex_path: str, @@ -183,11 +375,35 @@ def _import_material_to_library(self, material_path_local_no_ext = os.path.splitext(os.path.splitext(material_path_local)[0])[0] # remove .props.txt + if not os.path.isfile(os.path.join(umodel_export_dir, material_path_local)): + # Try finding it in all extracted folders + possible_matches = [] + for root, _, files in os.walk(umodel_export_dir): + for f in files: + if f.lower() == os.path.basename(material_path_local).lower(): + possible_matches.append(os.path.join(root, f)) + + if len(possible_matches) == 1: + material_path_local = os.path.relpath(possible_matches[0], umodel_export_dir) + print(f"[Fallback] Using found material path: {material_path_local}") + elif len(possible_matches) > 1: + print(f"[Warning] Multiple .props.txt matches found for {material_name}:") + for match in possible_matches: + print(" ", match) + material_path_local = os.path.relpath(possible_matches[0], umodel_export_dir) + print(f"[Fallback] Using first match: {material_path_local}") + else: + raise FileNotFoundError(f"Could not find material: {material_path_local}") + # load texture infos, may throw OSError if file is not found. # pylint: disable=unpacking-non-sequence - desc_ast, texture_infos, base_prop_overrides = props_txt_parser.parse_props_txt(os.path.join(umodel_export_dir, + desc_ast, texture_infos, base_prop_overrides, vec_infos = props_txt_parser.parse_props_txt(os.path.join(umodel_export_dir, material_path_local), mode='MATERIAL') + + print(f"Vector params found: {vec_infos}") # Debug vector param values + print(f"Override params found: {base_prop_overrides}") # Debug override values + new_mat = bpy.data.materials.new(material_name) new_mat.asset_mark() new_mat.asset_data.catalog_id = db.uid_for_entry(material_path_local_no_ext) @@ -241,14 +457,388 @@ def _import_material_to_library(self, ao_mix.inputs[7].default_value = (1, 1, 1, 1) new_mat.node_tree.links.new(ao_mix.outputs[2], bsdf.inputs['Base Color']) + # --- Custom Node Group Creation --- + def create_main_node_group(): + node_group = bpy.data.node_groups.new(MAIN_SHADER_NODE, SHADER_NODE_TREE) + + # Create interface sockets + node_group.interface.new_socket(name="Diffuse Texture",description="Color map texture",in_out='INPUT',socket_type=COLOR_SOCKET_NODE) + node_group.interface.new_socket(name="AO",description="Packed textures with value in different channel, R-Occlusion, G-Roughness, B-Metallic",in_out='INPUT',socket_type=FLOAT_SOCKET_NODE) + node_group.interface.new_socket(name="Gamma Strength",description="Gamma Strength",in_out='INPUT',socket_type=FLOAT_SOCKET_NODE) + + node_group.interface.new_socket( + name="Diffuse + AO", + description="Output Diffuse and Ambient Occlusion multiplied.", + in_out='OUTPUT', + socket_type=COLOR_SOCKET_NODE + ) + + # Create node group input and output nodes + group_input_node = node_group.nodes.new(NODE_GROUP_INPUT) + group_input_node.location = (-900, 0) + + group_output_node = node_group.nodes.new(NODE_GROUP_OUTPUT) + group_output_node.location = (300, 0) + + gamma = node_group.nodes.new("ShaderNodeGamma") + gamma.inputs[1].default_value = 0.8 # Factor + gamma.location = (-650, 0) + + # Mix node + mix_node = node_group.nodes.new(RGB_MIX_NODE) + mix_node.data_type = 'RGBA' + mix_node.blend_type = 'MULTIPLY' + mix_node.inputs[0].default_value = 0.5 # Factor + mix_node.inputs[6].default_value = (0.0,0.0,0.0,1.0) # A - Color + mix_node.location = (-450, 0) + + node_group.links.new(group_input_node.outputs[0], gamma.inputs[0]) # Diffuse to Gamma + node_group.links.new(gamma.outputs[0], mix_node.inputs[6]) # Gamma to mix + + node_group.links.new(group_input_node.outputs[1], mix_node.inputs[7]) # Occlusion to mix + node_group.links.new(group_input_node.outputs[2], gamma.inputs[1]) # Gamma strength + + node_group.links.new(mix_node.outputs['Result'], group_output_node.inputs[0]) # mix output + + print(f"Node group '{MAIN_SHADER_NODE}' created successfully.") + + node_group.use_fake_user = True + return node_group + + def get_or_create_main_node_group(): + if MAIN_SHADER_NODE in bpy.data.node_groups: + return bpy.data.node_groups[MAIN_SHADER_NODE] + else: + return create_main_node_group() + + def create_orm_extra_node_group(): + node_group = bpy.data.node_groups.new(ORM_EXTRA_SHADER_NODE, SHADER_NODE_TREE) + + # Create interface sockets + node_group.interface.new_socket( + name="ORM Input", + description="Packed texture map", + in_out='INPUT', + socket_type=FLOAT_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Multiply", + description="Multiply ORM factor", + in_out='INPUT', + socket_type=FLOAT_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Min", + description="Min value", + in_out='INPUT', + socket_type=FLOAT_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Max", + description="Max value", + in_out='INPUT', + socket_type=FLOAT_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Red Chann Input", + description="ORM-Red channel", + in_out='INPUT', + socket_type=FLOAT_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Red Param Value", + description="ORM-Red channel Parameter value", + in_out='INPUT', + socket_type=FLOAT_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Green Chann Input", + description="ORM-Green channel", + in_out='INPUT', + socket_type=FLOAT_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Green Param Value", + description="ORM-Green channel Parameter value", + in_out='INPUT', + socket_type=FLOAT_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Blue Chann Input", + description="ORM-Blue channel", + in_out='INPUT', + socket_type=FLOAT_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Blue Param Value", + description="ORM-Blue channel Parameter value", + in_out='INPUT', + socket_type=FLOAT_SOCKET_NODE + ) + node_group.interface.new_socket( + name="ORM Output", + description="ORM overlay output", + in_out='OUTPUT', + socket_type=FLOAT_SOCKET_NODE + ) + + # Create node group input and output nodes + group_input_node = node_group.nodes.new(NODE_GROUP_INPUT) + group_input_node.location = (-800, 0) + + group_output_node = node_group.nodes.new(NODE_GROUP_OUTPUT) + group_output_node.location = (300, 0) + + # Math node + math_node = node_group.nodes.new(SHADER_NODE_MATH) + math_node.inputs[0].default_value = 0.5 # Mix Factor + math_node.operation = 'MULTIPLY' + math_node.location = (-600, 0) + + # Math node + clamp_node = node_group.nodes.new(SHADER_NODE_CLAMP) + clamp_node.inputs[0].default_value = 0.5 # Mix Factor + clamp_node.location = (-500, -100) + + # Mix node + mix_node = node_group.nodes.new(RGB_MIX_NODE) + mix_node.inputs[0].default_value = 0.5 # Mix Factor + mix_node.location = (-400, -200) + + mix_node1 = node_group.nodes.new(RGB_MIX_NODE) + mix_node1.inputs[0].default_value = 0.5 # Mix Factor + mix_node1.location = (-300, -300) + + mix_node2 = node_group.nodes.new(RGB_MIX_NODE) + mix_node2.inputs[0].default_value = 0.5 # Mix Factor + mix_node2.location = (-200, -400) + + # Link nodes + node_group.links.new(group_input_node.outputs[0], math_node.inputs[0]) + node_group.links.new(group_input_node.outputs[1], math_node.inputs[1]) + + node_group.links.new(math_node.outputs[0], clamp_node.inputs[0]) # Math to clamp + node_group.links.new(group_input_node.outputs[2], clamp_node.inputs[1]) + node_group.links.new(group_input_node.outputs[3], clamp_node.inputs[2]) + + node_group.links.new(clamp_node.outputs[0], mix_node.inputs[2]) # Clamp to mix + node_group.links.new(group_input_node.outputs[4], mix_node.inputs[0]) + node_group.links.new(group_input_node.outputs[5], mix_node.inputs[3]) + + node_group.links.new(mix_node.outputs[0], mix_node1.inputs[2]) # Mix to mix1 + node_group.links.new(group_input_node.outputs[6], mix_node1.inputs[0]) + node_group.links.new(group_input_node.outputs[7], mix_node1.inputs[3]) + + node_group.links.new(mix_node1.outputs[0], mix_node2.inputs[2]) # Mix1 to mix2 + node_group.links.new(group_input_node.outputs[8], mix_node2.inputs[0]) + node_group.links.new(group_input_node.outputs[9], mix_node2.inputs[3]) + + node_group.links.new(mix_node2.outputs[0], group_output_node.inputs[0]) # Output + + print(f"Node group '{ORM_EXTRA_SHADER_NODE}' created successfully.") + + node_group.use_fake_user = True + return node_group + + def get_or_create_orm_extra_node_group(): + if ORM_EXTRA_SHADER_NODE in bpy.data.node_groups: + return bpy.data.node_groups[ORM_EXTRA_SHADER_NODE] + else: + return create_orm_extra_node_group() + + def create_od_mask_node_group(): + node_group = bpy.data.node_groups.new(ODMASK_SHADER_NODE, SHADER_NODE_TREE) + + # Create interface sockets + node_group.interface.new_socket( + name="Diffuse Input", + description="Color map", + in_out='INPUT', + socket_type=COLOR_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Simple Tint", + description="Tint mask, texture map.", + in_out='INPUT', + socket_type=COLOR_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Red Chann Input", + description="Dirt map, Red Channel.", + in_out='INPUT', + socket_type=FLOAT_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Mask Value 1", + description="Get multiplied by the Red Channel.", + in_out='INPUT', + socket_type=FLOAT_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Diffuse Color (Red Channel)", + description="Diffuse texture map - Red Channel.", + in_out='INPUT', + socket_type=COLOR_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Green Chann Input", + description="Dirt map, Green Channel.", + in_out='INPUT', + socket_type=FLOAT_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Mask Value 2", + description="Get multiplied by the Green Channel.", + in_out='INPUT', + socket_type=FLOAT_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Diffuse Color (Green Channel)", + description="Diffuse texture map - Green Channel.", + in_out='INPUT', + socket_type=COLOR_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Blue Chann Input", + description="Dirt map, Blue Channel.", + in_out='INPUT', + socket_type=FLOAT_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Mask Value 3", + description="Get multiplied by the Blue Channel.", + in_out='INPUT', + socket_type=FLOAT_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Diffuse Color (Blue Channel)", + description="Diffuse texture map - Blue Channel.", + in_out='INPUT', + socket_type=COLOR_SOCKET_NODE + ) + node_group.interface.new_socket( + name="Diffuse Out", + description="Dirt Map and Simple Tint overlayed output.", + in_out='OUTPUT', + socket_type=COLOR_SOCKET_NODE + ) + + # Create node group input and output nodes + group_input_node = node_group.nodes.new(NODE_GROUP_INPUT) + group_input_node.location = (-900, 0) + + group_output_node = node_group.nodes.new(NODE_GROUP_OUTPUT) + group_output_node.location = (300, 0) + + # Mix node #1 + mix_node = node_group.nodes.new(RGB_MIX_NODE) + mix_node.inputs[0].default_value = 0.5 # Mix Factor + mix_node.data_type = 'RGBA' + mix_node.blend_type = 'MULTIPLY' + mix_node.inputs[0].default_value = 1.0 # Factor + mix_node.location = (-750, 0) + + # Mix node #2 + mix_node2 = node_group.nodes.new(RGB_MIX_NODE) + mix_node2.inputs[0].default_value = 0.5 # Mix Factor + mix_node2.data_type = 'RGBA' + mix_node2.blend_type = 'MULTIPLY' + mix_node2.inputs[0].default_value = 1.0 # Factor + mix_node2.location = (-600, 0) + + # Mix node #3 + mix_node3 = node_group.nodes.new(RGB_MIX_NODE) + mix_node3.inputs[0].default_value = 0.5 # Mix Factor + mix_node3.data_type = 'RGBA' + mix_node3.blend_type = 'OVERLAY' + mix_node3.inputs[0].default_value = 1.0 # Factor + mix_node3.location = (-450, 0) + + # Mix node #4 + mix_node4 = node_group.nodes.new(RGB_MIX_NODE) + mix_node4.inputs[0].default_value = 0.5 # Mix Factor + mix_node4.data_type = 'RGBA' + mix_node4.blend_type = 'MULTIPLY' + mix_node4.inputs[0].default_value = 1.0 # Factor + mix_node4.inputs[7].default_value = (0.735,0.735,0.735,1.0) # A - Color + mix_node4.location = (-700, -200) + + # Mix node #5 + mix_node5 = node_group.nodes.new(RGB_MIX_NODE) + mix_node5.inputs[0].default_value = 0.5 # Mix Factor + mix_node5.data_type = 'RGBA' + mix_node5.blend_type = 'MULTIPLY' + mix_node5.inputs[0].default_value = 1.0 # Factor + mix_node5.inputs[7].default_value = (0.735,0.735,0.735,1.0) # A - Color + mix_node5.location = (-600, -200) + + # Mix node #6 + mix_node6 = node_group.nodes.new(RGB_MIX_NODE) + mix_node6.inputs[0].default_value = 0.5 # Mix Factor + mix_node6.data_type = 'RGBA' + mix_node6.blend_type = 'MULTIPLY' + mix_node6.inputs[0].default_value = 1.0 # Factor + mix_node6.inputs[7].default_value = (0.735,0.735,0.735,1.0) # A - Color + mix_node6.location = (-450, -200) + + # Mix node #7 + mix_node7 = node_group.nodes.new(RGB_MIX_NODE) + mix_node7.inputs[0].default_value = 0.5 # Mix Factor + mix_node7.data_type = 'RGBA' + mix_node7.blend_type = 'OVERLAY' + mix_node7.inputs[0].default_value = 1.0 # Factor + mix_node7.location = (-300, -100) + + # Link nodes + node_group.links.new(group_input_node.outputs[0], mix_node.inputs[6]) # Simple Tint + node_group.links.new(group_input_node.outputs[1], mix_node.inputs[7]) + + node_group.links.new(group_input_node.outputs[4], mix_node2.inputs[7]) + node_group.links.new(group_input_node.outputs[7], mix_node3.inputs[7]) + node_group.links.new(group_input_node.outputs[10], mix_node7.inputs[7]) + + node_group.links.new(group_input_node.outputs[2], mix_node4.inputs[7]) + node_group.links.new(group_input_node.outputs[3], mix_node4.inputs[0]) + + node_group.links.new(group_input_node.outputs[5], mix_node5.inputs[7]) + node_group.links.new(group_input_node.outputs[6], mix_node5.inputs[0]) + + node_group.links.new(group_input_node.outputs[8], mix_node6.inputs[7]) + node_group.links.new(group_input_node.outputs[9], mix_node6.inputs[0]) + + node_group.links.new(mix_node.outputs['Result'], mix_node2.inputs[6]) + node_group.links.new(mix_node2.outputs['Result'], mix_node3.inputs[6]) + node_group.links.new(mix_node3.outputs['Result'], mix_node7.inputs[6]) + + node_group.links.new(mix_node4.outputs['Result'], mix_node2.inputs[0]) + node_group.links.new(mix_node5.outputs['Result'], mix_node3.inputs[0]) + node_group.links.new(mix_node6.outputs['Result'], mix_node7.inputs[0]) + + node_group.links.new(mix_node7.outputs['Result'], group_output_node.inputs[0]) # Diffuse output + + print(f"Node group '{ODMASK_SHADER_NODE}' created successfully.") + + node_group.use_fake_user = True + return node_group + + def get_or_create_od_mask_node_group(): + if ODMASK_SHADER_NODE in bpy.data.node_groups: + return bpy.data.node_groups[ODMASK_SHADER_NODE] + else: + return create_od_mask_node_group() + # in order to simulate some blending modes special node logic is required match special_blend_mode: case None: new_mat.node_tree.links.new(bsdf.outputs['BSDF'], out.inputs['Surface']) case enums.SpecialBlendingMode.Add: + # mainshader = get_or_create_main_node_group() # Testing + transparent_bsdf = new_mat.node_tree.nodes.new('ShaderNodeBsdfTransparent') add_shader = new_mat.node_tree.nodes.new('ShaderNodeAddShader') + # new_mat.node_tree.links.new(img_node.outputs[0], transparent_bsdf.inpputs['Color']) # Testing + new_mat.node_tree.links.new(bsdf.outputs['BSDF'], add_shader.inputs[0]) new_mat.node_tree.links.new(transparent_bsdf.outputs['BSDF'], add_shader.inputs[1]) new_mat.node_tree.links.new(add_shader.outputs[0], out.inputs['Surface']) @@ -270,11 +860,6 @@ def _import_material_to_library(self, if not self.load_pbr_maps and not game_profile_impl.is_diffuse_tex_type(tex_type, tex_short_name): continue - # skip the texture if we don't know what to do with it - if not game_profile_impl.do_process_texture(tex_type, tex_short_name): - self._unrecognized_texture_types.add(tex_type) - continue - # normalize path from config tex_path_no_ext = os.path.normpath(tex_path_no_ext) @@ -316,6 +901,9 @@ def _import_material_to_library(self, tex_type=tex_type, tex_short_name=tex_short_name, img_node=img_node, + main_shader=get_or_create_main_node_group(), + orm_shader=get_or_create_orm_extra_node_group(), + odmask_shader=get_or_create_od_mask_node_group(), ao_mix_node=ao_mix, bsdf_node=bsdf, out_node=out) @@ -333,7 +921,7 @@ def _import_material_to_library(self, material_lib_path = os.path.join(asset_library_dir, material_path_local_no_ext) + '.blend' os.makedirs(os.path.dirname(material_lib_path), exist_ok=True) bpy.data.libraries.write(filepath=material_lib_path, datablocks={new_mat, }, fake_user=True) - bpy.data.materials.remove(new_mat, do_unlink=True) + # bpy.data.materials.remove(new_mat, do_unlink=True) def _import_asset_to_library(self, context: bpy.types.Context, @@ -348,7 +936,7 @@ def _import_asset_to_library(self, :param context: Current Blender context. :param asset_library_dir: Directory to store the asset, and its dependencies in. :param asset_path: Path to the asset in game format. - :param umodel_export_dir: UModel output directory to source .psk files from. + :param umodel_export_dir: UModel/Fmodel output directory to source .psk/.uemodel files from. :param game_profile: Game profile to import. :param db: Asset database to operate on. If given, no saving is performed, else the function handles everything by itself. @@ -370,32 +958,71 @@ def _import_asset_to_library(self, os.makedirs(asset_absolute_dir, exist_ok=True) asset_psk_path_noext = os.path.join(umodel_export_dir, asset_path_local_noext) + asset_uemodel_path_noext = os.path.join(umodel_export_dir, asset_path_local_noext) # UEFormat -- - if os.path.isfile(pskx_path := asset_psk_path_noext + '.pskx'): - utils.verbose_print(f"Importing \"{pskx_path}\"") - - with contextlib.redirect_stdout(io.StringIO()): - if not pskimport(filepath=pskx_path, - context=context, - bImportbone=False): - raise RuntimeError(f"Error: Failed importing asset {asset_psk_path_noext + '.pskx'} " - "due to unknown reason.") + # Import psk/pskx files + if self.mesh_format == '.psk': + pskx_path = asset_psk_path_noext + '.pskx' + psk_path = asset_psk_path_noext + '.psk' + found_path = None animated = False - elif os.path.isfile(psk_path := asset_psk_path_noext + '.psk'): - utils.verbose_print(f"Importing \"{psk_path}\"") - with contextlib.redirect_stdout(io.StringIO()): - if not pskimport(filepath=psk_path, - context=context, - bImportbone=False): - raise RuntimeError(f"Error: Failed importing asset {asset_psk_path_noext + '.psk'} " - "due to unknown reason.") - animated = True + # Check original paths first + if os.path.isfile(pskx_path): + found_path = pskx_path + animated = False + elif os.path.isfile(psk_path): + found_path = psk_path + animated = True + else: + # Try fallback paths + if 'Content' in pskx_path or 'Content' in psk_path: + fallback_pskx = pskx_path.replace('Content', 'Game') + fallback_psk = psk_path.replace('Content', 'Game') + + if os.path.isfile(fallback_pskx): + found_path = fallback_pskx + animated = False + utils.verbose_print(f"[Fallback] Using .pskx: {fallback_pskx}") + elif os.path.isfile(fallback_psk): + found_path = fallback_psk + animated = True + utils.verbose_print(f"[Fallback] Using .psk: {fallback_psk}") + else: + raise FileNotFoundError( + f"Error: Asset not found in any path:\n" + f"- {pskx_path}\n- {psk_path}\n- {fallback_pskx}\n- {fallback_psk}" + ) + else: + raise FileNotFoundError( + f"Error: Asset not found:\n- {pskx_path}\n- {psk_path}" + ) - else: - raise FileNotFoundError(f"Error: Failed importing asset: {asset_psk_path_noext} was not found " - "(.psk/.pskx).") + # Import found mesh + utils.verbose_print(f"Importing \"{found_path}\"") + with contextlib.redirect_stdout(io.StringIO()): + if not pskimport(filepath=found_path, context=context, bImportbone=False): + raise RuntimeError(f"Error: Failed importing asset {found_path}") + + # Import .uemodel files + elif self.mesh_format == '.uemodel': + if os.path.isfile(uemodel_path := asset_uemodel_path_noext + self.mesh_format): + utils.verbose_print(f"Importing \"{uemodel_path}\"") + full_path = r"E:\Game_Dumps\Atomic Heart\Game\Meshes\FamaleCorpc_NotLifted_08_Idle_Static.uemodel" # example import, need proper implementation. + directory = os.path.dirname(full_path) # "E:\Game_Dumps\Atomic Heart\Game\Meshes" + filename = os.path.basename(full_path) # "FamaleCorpc_NotLifted_08_Idle_Static.uemodel" + result = bpy.ops.uf.import_uemodel( + 'EXEC_DEFAULT', + directory=directory, + files=[{"name": filename}] + ) + print(result) + if 'FINISHED' not in result: + print("Import failed:", result) + animated = True + else: + raise FileNotFoundError(f"Error: Failed importing asset: {asset_uemodel_path_noext} was not found ({self.mesh_format}).") obj = context.object @@ -406,7 +1033,7 @@ def _import_asset_to_library(self, # handle materials new_materials = [] - # - read material descriptor file and identify associated materials + # # - read material descriptor file and identify associated materials try: # pylint: disable=unpacking-non-sequence _, mat_descriptors_paths = props_txt_parser.parse_props_txt(asset_psk_path_noext + '.props.txt', @@ -448,6 +1075,7 @@ def _import_asset_to_library(self, for mat in old_materials: try: bpy.data.materials.remove(mat, do_unlink=True) + print("removed old materials") except ReferenceError: # TODO: figure out why? pass @@ -491,6 +1119,7 @@ def _import_asset_to_library(self, data_to.materials = [data_from.materials[0]] new_mat = data_to.materials[0] + print("Only using one material from library, even though multiple might be present") except FileNotFoundError as e: new_mat = bpy.data.materials.new(f"{material_name}_Placeholder") @@ -509,17 +1138,23 @@ def _import_asset_to_library(self, else: obj.data.materials.append(mat) - # remove original materials - for mat in old_materials: - try: - bpy.data.materials.remove(mat, do_unlink=True) - except ReferenceError: # TODO: figure out why? - pass + # # remove original materials + # for mat in old_materials: + # try: + # bpy.data.materials.remove(mat, do_unlink=True) + # print("removed old materials = 2") + # except ReferenceError: # TODO: figure out why? + # pass # obj.asset_generate_preview() asset_abs_lib_path = os.path.join(asset_library_dir, asset_path_local_noext) + '.blend' os.makedirs(os.path.dirname(asset_abs_lib_path), exist_ok=True) + + if self.overwrite and os.path.isfile(asset_abs_lib_path): + print(f"[Overwrite] Removing existing library asset: {asset_abs_lib_path}") + os.remove(asset_abs_lib_path) + bpy.data.libraries.write(asset_abs_lib_path, {obj, }, fake_user=True) # cleanup @@ -528,10 +1163,13 @@ def _import_asset_to_library(self, bpy.data.meshes.remove(mesh, do_unlink=True) for mat, _ in new_materials: - try: - bpy.data.materials.remove(mat, do_unlink=True) - except ReferenceError: - pass + # Only remove if we're not linking/attaching assets to the scene. + if not (self.link_to_scene or self.append_to_scene): + try: + bpy.data.materials.remove(mat, do_unlink=True) + print("removed new materials") + except ReferenceError: + pass if not has_external_db: db.save_db() diff --git a/umodel_tools/auto_load.py b/umodel_tools/auto_load.py index 3325d8f..64aa1f7 100644 --- a/umodel_tools/auto_load.py +++ b/umodel_tools/auto_load.py @@ -1,12 +1,10 @@ +import bpy import typing import inspect import pkgutil import importlib from pathlib import Path -from ordered_set import OrderedSet - -import bpy - +from .third_party.ordered_set import OrderedSet __all__ = ( "init", diff --git a/umodel_tools/game_profiles/__init__.py b/umodel_tools/game_profiles/__init__.py index 81218ac..9d578aa 100644 --- a/umodel_tools/game_profiles/__init__.py +++ b/umodel_tools/game_profiles/__init__.py @@ -53,7 +53,10 @@ def handle_material_texture_pbr(mat: bpy.types.Material, tex_type: str, tex_short_name: str, img_node: bpy.types.ShaderNodeTexImage, - ao_mix_node: bpy.types.ShaderNodeMix, + main_shader: bpy.types.ShaderNodeTree, + orm_shader: bpy.types.ShaderNodeTree, + odmask_shader: bpy.types.ShaderNodeTree, + ao_mix_node: bpy.types.ShaderNodeAmbientOcclusion, bsdf_node: bpy.types.ShaderNodeBsdfPrincipled, out_node: bpy.types.ShaderNodeOutputMaterial) -> None: """Handles adding texture maps to a PBR material. @@ -62,7 +65,7 @@ def handle_material_texture_pbr(mat: bpy.types.Material, :param tex_type: Current texture type. :param tex_short_name: Basename of the texture file without extension. :param img_node: Image node in the material's node tree. - :param ao_mix_node: Ambient Occlusion mixing node in the material's node tree. + :param main_shader: a custom shader node group. :param bsdf_node: PrincipledBSDF node in the material's node tree. :param out_node: Material output node in the material's node tree. """ diff --git a/umodel_tools/game_profiles/atomic_heart.py b/umodel_tools/game_profiles/atomic_heart.py new file mode 100644 index 0000000..48517d7 --- /dev/null +++ b/umodel_tools/game_profiles/atomic_heart.py @@ -0,0 +1,1009 @@ +"""This module implements support for Atomic Heart game. +Known issues: + - Blended materials are not properly supported. Currently the first texture is used. +""" + +import enum +import typing as t +from typing import TypeAlias, Tuple, Dict +import dataclasses + +import bpy +import lark + + +GAME_NAME = "Atomic Heart" +GAME_DESCRIPTION = "Atomic Heart (2023) by Mundfish, Focus Entertainment" + +# --- Constants for Nodes, Sockets, and Node Groups --- +ALPHA_MODE_CHANNEL = 'CHANNEL_PACKED' +NODE_FRAME = 'NodeFrame' + +# Nodes Shaders +BSDF_DIFFUSE_NODE = 'ShaderNodeBsdfDiffuse' +BSDF_EMISSION_NODE = 'ShaderNodeEmission' +BSDF_GLOSSY_NODE = 'ShaderNodeBsdfGlossy' +PRINCIPLED_SHADER_NODE = 'ShaderNodeBsdfPrincipled' +BSDF_TRANSPARENT_NODE = 'ShaderNodeBsdfTransparent' +BSDF_GLASS_NODE = 'ShaderNodeBsdfGlass' +SHADER_ADD_NODE = 'ShaderNodeAddShader' +SHADER_MIX_NODE = 'ShaderNodeMixShader' + +# Nodes Color +RGB_MIX_NODE = 'ShaderNodeMix' # +INVERT_NODE = 'ShaderNodeInvert' # + +# Nodes Input +TEXTURE_IMAGE_NODE = 'ShaderNodeTexImage' +ENVIRONMENT_IMAGE_NODE = 'ShaderNodeTexEnvironment' +COORD_NODE = 'ShaderNodeTexCoord' + +# Nodes Outputs +OUTPUT_NODE = 'ShaderNodeOutputMaterial' + +# Nodes Vector +MAPPING_NODE = 'ShaderNodeMapping' +NORMAL_MAP_NODE = 'ShaderNodeNormalMap' # + +# Nodes Convert +SHADER_NODE_MATH = 'ShaderNodeMath' # +SHADER_NODE_CLAMP = 'ShaderNodeClamp' # +RGB_TO_BW_NODE = 'ShaderNodeRGBToBW' +SHADER_NODE_SEPARATE_COLOR = 'ShaderNodeSeparateColor' # +SHADER_NODE_COMBINE_COLOR = 'ShaderNodeCombineColor' # + +# Node Groups +NODE_GROUP = 'ShaderNodeGroup' # +NODE_GROUP_INPUT = 'NodeGroupInput' # +NODE_GROUP_OUTPUT = 'NodeGroupOutput' # +SHADER_NODE_TREE = 'ShaderNodeTree' # + +# Node Socket Types +COLOR_SOCKET_NODE = 'NodeSocketColor' # +FLOAT_SOCKET_NODE = 'NodeSocketFloat' # +VECTOR_SOCKET_NODE = 'NodeSocketVector' # +BOOL_SOCKET_NODE = 'NodeSocketBool' # + +# Node Custom Groups +INVERT_CHANNEL_NODE = 'Invert Channel' +MIX_NORMAL_NODE = 'Normal Mix' +NORMAL_MASK_NODE = 'Normal Mask' +MAIN_SHADER_NODE = "MainShader" # +ORM_EXTRA_SHADER_NODE = "ORMExtraShader" # +ODMASK_SHADER_NODE = "OverlayDiffuseMask" # + +# --- Texture Map Types and Suffix Mapping --- +class TextureMapTypes(enum.Enum): + """All texture map types supported by the material generator.""" + DIFFUSE = enum.auto() + BASECOLOR = enum.auto() + MicroDiffuse = enum.auto() + Normal = enum.auto() + MicroNormal = enum.auto() + ORM = enum.auto() + MRA = enum.auto() + TOEH = enum.auto() + NOISE = enum.auto() + HEIGHT = enum.auto() + ALPHA = enum.auto() + MASK = enum.auto() + MICROMASK = enum.auto() + RGB_TINT = enum.auto() + DIRT = enum.auto() + + +# Suffix mapping (lowercase and uppercase) +SUFFIX_MAP = { + 'a': TextureMapTypes.DIRT, + 'A': TextureMapTypes.DIRT, + + 'd': TextureMapTypes.DIFFUSE, + 'D': TextureMapTypes.DIFFUSE, + 'diffuse': TextureMapTypes.DIFFUSE, + 'Diffuse': TextureMapTypes.DIFFUSE, + + 'bc': TextureMapTypes.BASECOLOR, + 'BC': TextureMapTypes.BASECOLOR, + 'albedo': TextureMapTypes.BASECOLOR, + 'albedo': TextureMapTypes.BASECOLOR, + 'plastic': TextureMapTypes.BASECOLOR, + 'Plastic': TextureMapTypes.BASECOLOR, + + 'n': TextureMapTypes.Normal, + 'N': TextureMapTypes.Normal, + 'normal': TextureMapTypes.Normal, + 'Normal': TextureMapTypes.Normal, + + 'orm': TextureMapTypes.ORM, + 'orm1': TextureMapTypes.ORM, + 'ORM': TextureMapTypes.ORM, + 'ORM1': TextureMapTypes.ORM, + 'mra': TextureMapTypes.MRA, + 'mra1': TextureMapTypes.MRA, + 'MRA': TextureMapTypes.MRA, + 'MRA1': TextureMapTypes.MRA, + + 'toe': TextureMapTypes.TOEH, + 'TOE': TextureMapTypes.TOEH, + 'toeh': TextureMapTypes.TOEH, + 'TOEH': TextureMapTypes.TOEH, + 'glass_dirt': TextureMapTypes.TOEH, + 'Glass_Dirt': TextureMapTypes.TOEH, + '2': TextureMapTypes.TOEH, # Overlay_2 + + 'mask': TextureMapTypes.MASK, + 'Mask': TextureMapTypes.MASK, + 'opacity': TextureMapTypes.MASK, + 'Opacity': TextureMapTypes.MASK, + + 'roughness': TextureMapTypes.NOISE, + 'Roughness': TextureMapTypes.NOISE, + 'r': TextureMapTypes.NOISE, + 'R': TextureMapTypes.NOISE, + + 'm': TextureMapTypes.MICROMASK, + 'M': TextureMapTypes.MICROMASK, + + 'BaseColor3': TextureMapTypes.DIFFUSE, + 'Normal3': TextureMapTypes.Normal, + + +} + +@dataclasses.dataclass +class MaterialContext: + bsdf_node: t.Optional[bpy.types.ShaderNodeBsdfPrincipled | bpy.types.ShaderNodeBsdfDiffuse] + desc_ast: lark.Tree + use_pbr: bool + msk_index: int = dataclasses.field(default=0) + diffuse_connected: bool = dataclasses.field(default=False) + linked_maps: set[TextureMapTypes.DIFFUSE] = dataclasses.field(default_factory=set) + +_state_buffer: dict[bpy.types.Material, MaterialContext] = {} + +def process_material(mat: bpy.types.Material, desc_ast: lark.Tree, use_pbr: bool): # pylint: disable=unused-argument + _state_buffer[mat] = MaterialContext(bsdf_node=None, desc_ast=desc_ast, use_pbr=use_pbr) + +def do_process_texture(tex_type: str, tex_short_name: str) -> bool: # pylint: disable=unused-argument + return bool(_short_name_to_tex_type(tex_short_name)) + +def is_diffuse_tex_type(tex_type: str, tex_short_name: str) -> bool: # pylint: disable=unused-argument + return _short_name_to_tex_type(tex_short_name) == {TextureMapTypes.DIFFUSE, TextureMapTypes.MASK} + +def handle_material_texture_pbr(mat: bpy.types.Material, + tex_type: str, # unused here + tex_short_name: str, + img_node: bpy.types.ShaderNodeTexImage, + main_shader: bpy.types.ShaderNodeTree, + orm_shader: bpy.types.ShaderNodeTree, + odmask_shader: bpy.types.ShaderNodeTree, + ao_mix_node: bpy.types.ShaderNodeAmbientOcclusion, + bsdf_node: bpy.types.ShaderNodeBsdfPrincipled, + out_node: bpy.types.ShaderNodeOutputMaterial): + # This is not validated. + mat_ctx = _state_buffer[mat] + mat_ctx.bsdf_node = bsdf_node + + main_group = main_shader + orm_group = orm_shader + odm_group = odmask_shader + + bsdf_node.location = (100, 20) + out_node.location = (600, 0) + ao_mix_node.location = (-100, -200) + ao_mix_node.inputs['B'].default_value = (0.0,0.0,0.0,1.0) # Default Black + + # Determine the texture type to process: + bl_tex_type = _short_name_to_tex_type(tex_short_name) + # Avoid processing the same texture type twice + if bl_tex_type in mat_ctx.linked_maps: + return + + def create_mix_node(mat, loc_x, loc_y, color, default_value, blend_type='MIX'): + mix_node = mat.node_tree.nodes.new('ShaderNodeMixRGB') + mix_node.location = (loc_x, loc_y) + + mix_node.data_type = 'RGBA' + mix_node.blend_type = blend_type + + # Extract only the RGB values, discarding the alpha + rgb_color = color[:3] if color is not None else default_value[:3] + + # Ensure the color is a 4-tuple (RGBA) by appending 1.0 as alpha if necessary + rgba_color = color if len(color) == 4 else (color[0], color[1], color[2], 1.0) + + # Assign the RGB values to the inputs[2] of the Mix node + mix_node.inputs[2].default_value = rgba_color + + return mix_node + + def create_rgb_mask_tint_group(): + group_name = "RGBMaskTintGroup" + if group_name in bpy.data.node_groups: + return bpy.data.node_groups[group_name] + + node_group = bpy.data.node_groups.new(group_name, 'ShaderNodeTree') + + # Inputs + node_group.interface.new_socket(name="Image", description="Color input", in_out='INPUT', socket_type=COLOR_SOCKET_NODE) + node_group.interface.new_socket(name="Color R", description="Red channel input", in_out='INPUT', socket_type=COLOR_SOCKET_NODE) + node_group.interface.new_socket(name="Color G", description="Green channel input", in_out='INPUT', socket_type=COLOR_SOCKET_NODE) + node_group.interface.new_socket(name="Color B", description="Blue channel input", in_out='INPUT', socket_type=COLOR_SOCKET_NODE) + node_group.interface.new_socket(name="Color A", description="Alpha channel input", in_out='INPUT', socket_type=COLOR_SOCKET_NODE) + node_group.interface.new_socket(name="Tint", in_out='INPUT', socket_type=BOOL_SOCKET_NODE) + + # Outputs + node_group.interface.new_socket(name="Color", description="Color output", in_out='OUTPUT', socket_type=COLOR_SOCKET_NODE) + node_group.interface.new_socket(name="R", description="Red channel output", in_out='OUTPUT', socket_type=FLOAT_SOCKET_NODE) + node_group.interface.new_socket(name="G", description="Green channel output", in_out='OUTPUT', socket_type=FLOAT_SOCKET_NODE) + node_group.interface.new_socket(name="B", description="Blue channel output", in_out='OUTPUT', socket_type=FLOAT_SOCKET_NODE) + node_group.interface.new_socket(name="A", description="Alpha channel output", in_out='OUTPUT', socket_type=FLOAT_SOCKET_NODE) + + nodes = node_group.nodes + links = node_group.links + nodes.clear() + + input_node = nodes.new("NodeGroupInput") + input_node.location = (-1000, 0) + + output_node = nodes.new("NodeGroupOutput") + output_node.location = (600, 0) + + separate = nodes.new("ShaderNodeSeparateColor") + separate.location = (-700, 0) + + # R Mix + r_mix = nodes.new("ShaderNodeMixRGB") + r_mix.blend_type = 'MULTIPLY' + r_mix.inputs['Fac'].default_value = 1.0 + r_mix.label = 'R Mix' + r_mix.location = (-400, 200) + + # G Mix + g_mix = nodes.new("ShaderNodeMixRGB") + g_mix.blend_type = 'MULTIPLY' + g_mix.inputs['Fac'].default_value = 1.0 + g_mix.label = 'G Mix' + g_mix.location = (-400, 0) + + # B Mix + b_mix = nodes.new("ShaderNodeMixRGB") + b_mix.blend_type = 'MULTIPLY' + b_mix.inputs['Fac'].default_value = 1.0 + b_mix.label = 'B Mix' + b_mix.location = (-400, -200) + + # Add R + G + add_rg = nodes.new("ShaderNodeMixRGB") + add_rg.blend_type = 'ADD' + add_rg.inputs['Fac'].default_value = 1.0 + add_rg.label = 'Add RG' + add_rg.location = (-100, 100) + + # Add above + B + add_rgb = nodes.new("ShaderNodeMixRGB") + add_rgb.blend_type = 'ADD' + add_rgb.inputs['Fac'].default_value = 1.0 + add_rgb.label = 'Add RGB' + add_rgb.location = (200, 0) + + # Tint switch + switch = nodes.new("ShaderNodeMix") + # switch.blend_type = 'MIX' + switch.label = 'Tint Switch' + switch.location = (400, 100) + + # Link inputs + links.new(input_node.outputs['Image'], separate.inputs['Color']) + links.new(input_node.outputs['Image'], switch.inputs['A']) + + links.new(separate.outputs['Red'], r_mix.inputs['Color1']) + links.new(separate.outputs['Green'], g_mix.inputs['Color1']) + links.new(separate.outputs['Blue'], b_mix.inputs['Color1']) + + links.new(input_node.outputs['Color R'], r_mix.inputs['Color2']) + links.new(input_node.outputs['Color G'], g_mix.inputs['Color2']) + links.new(input_node.outputs['Color B'], b_mix.inputs['Color2']) + + links.new(r_mix.outputs['Color'], add_rg.inputs['Color1']) + links.new(g_mix.outputs['Color'], add_rg.inputs['Color2']) + + links.new(add_rg.outputs['Color'], add_rgb.inputs['Color1']) + links.new(b_mix.outputs['Color'], add_rgb.inputs['Color2']) + links.new(add_rgb.outputs['Color'], switch.inputs['B']) + links.new(input_node.outputs['Tint'], switch.inputs[0]) # Bool switch + + # Output final color + links.new(switch.outputs[0], output_node.inputs['Color']) + + # Optional passthrough channels + links.new(separate.outputs['Red'], output_node.inputs[1]) + links.new(separate.outputs['Green'], output_node.inputs[2]) + links.new(separate.outputs['Blue'], output_node.inputs[3]) + + # # Alpha passthrough + # separate_alpha = nodes.new("ShaderNodeSeparateRGBA") + # separate_alpha.location = (-700, -300) + # links.new(input_node.outputs['Mask Image'], separate_alpha.inputs['Image']) + # links.new(separate_alpha.outputs['Alpha'], output_node.inputs[4]) + + links.new(input_node.outputs['Color A'], output_node.inputs['A']) # Temp Connection + + return node_group + + def create_roughness_mask_group(): + group_name = "RoughnessMaskGroup" + if group_name in bpy.data.node_groups: + return bpy.data.node_groups[group_name] + + node_group = bpy.data.node_groups.new(group_name, 'ShaderNodeTree') + + # Inputs + node_group.interface.new_socket(name="ORM Image", description="RGB: Roughness, Metallic, AO", in_out='INPUT', socket_type=COLOR_SOCKET_NODE) + node_group.interface.new_socket(name="AO Scale", description="B channel multiplier", in_out='INPUT', socket_type=FLOAT_SOCKET_NODE) + node_group.interface.new_socket(name="Roughness Scale", description="R channel multiplier", in_out='INPUT', socket_type=FLOAT_SOCKET_NODE) + node_group.interface.new_socket(name="Metallic Scale", description="G channel multiplier", in_out='INPUT', socket_type=FLOAT_SOCKET_NODE) + + # Outputs + node_group.interface.new_socket(name="AO", description="AO output", in_out='OUTPUT', socket_type=FLOAT_SOCKET_NODE) + node_group.interface.new_socket(name="Roughness", description="Roughness output", in_out='OUTPUT', socket_type=FLOAT_SOCKET_NODE) + node_group.interface.new_socket(name="Metallic", description="Metallic output", in_out='OUTPUT', socket_type=FLOAT_SOCKET_NODE) + + nodes = node_group.nodes + links = node_group.links + nodes.clear() + + input_node = nodes.new("NodeGroupInput") + input_node.location = (-600, 0) + + output_node = nodes.new("NodeGroupOutput") + output_node.location = (600, 0) + + separate = nodes.new("ShaderNodeSeparateColor") + separate.location = (-300, 0) + + # Multipliers + r_mult = nodes.new("ShaderNodeMath") + r_mult.operation = 'MULTIPLY' + r_mult.label = 'AO * Scale' + r_mult.location = (100, 200) + + g_mult = nodes.new("ShaderNodeMath") + g_mult.operation = 'MULTIPLY' + g_mult.label = 'Roughness * Scale' + g_mult.location = (100, 0) + + b_mult = nodes.new("ShaderNodeMath") + b_mult.operation = 'MULTIPLY' + b_mult.label = 'Metallic * Scale' + b_mult.location = (100, -200) + + # Connect image to separate + links.new(input_node.outputs['ORM Image'], separate.inputs['Color']) + + # R channel + links.new(separate.outputs['Red'], r_mult.inputs[0]) + links.new(input_node.outputs['AO Scale'], r_mult.inputs[1]) + links.new(r_mult.outputs[0], output_node.inputs['AO']) + + # G channel + links.new(separate.outputs['Green'], g_mult.inputs[0]) + links.new(input_node.outputs['Roughness Scale'], g_mult.inputs[1]) + links.new(g_mult.outputs[0], output_node.inputs['Roughness']) + + # B channel + links.new(separate.outputs['Blue'], b_mult.inputs[0]) + links.new(input_node.outputs['Metallic Scale'], b_mult.inputs[1]) + links.new(b_mult.outputs[0], output_node.inputs['Metallic']) + return node_group + + def create_directx_to_opengl_normal_group(): + group_name = "DirectXToOpenGLNormal" + if group_name in bpy.data.node_groups: + return bpy.data.node_groups[group_name] + + group = bpy.data.node_groups.new(group_name, 'ShaderNodeTree') + + # Define socket types + BOOL_SOCKET_NODE = 'NodeSocketBool' + + # Inputs + group.interface.new_socket(name="DirectX Normal", in_out='INPUT', socket_type=COLOR_SOCKET_NODE) + group.interface.new_socket(name="Flip Y", in_out='INPUT', socket_type=BOOL_SOCKET_NODE) + group.interface.new_socket(name="Strength", in_out='INPUT', socket_type=FLOAT_SOCKET_NODE) + + # Outputs + group.interface.new_socket(name="OpenGL Normal", in_out='OUTPUT', socket_type=VECTOR_SOCKET_NODE) + + nodes = group.nodes + links = group.links + nodes.clear() + + input_node = nodes.new("NodeGroupInput") + input_node.location = (-800, 0) + + output_node = nodes.new("NodeGroupOutput") + output_node.location = (400, 0) + + separate = nodes.new("ShaderNodeSeparateColor") + separate.location = (-600, 0) + + # Invert math node: 1 - G + invert = nodes.new("ShaderNodeMath") + invert.operation = 'SUBTRACT' + invert.inputs[0].default_value = 1.0 + invert.location = (-300, 100) + + # Mix: if Flip Y is enabled, use inverted green, otherwise pass original + mix_green = nodes.new("ShaderNodeMix") + mix_green.data_type = 'FLOAT' + mix_green.location = (-100, 100) + + combine = nodes.new("ShaderNodeCombineColor") + combine.location = (100, 0) + + normal_map = nodes.new("ShaderNodeNormalMap") + normal_map.inputs[0].default_value = 1.2 + normal_map.location = (250, 0) + + # Connect DirectX input to Separate Color + links.new(input_node.outputs['DirectX Normal'], separate.inputs['Color']) + links.new(input_node.outputs['Strength'], normal_map.inputs[0]) + + # Invert G + links.new(separate.outputs['Green'], invert.inputs[1]) + links.new(invert.outputs[0], mix_green.inputs[2]) # Inverted G + links.new(separate.outputs['Green'], mix_green.inputs[3]) # Original G + links.new(input_node.outputs['Flip Y'], mix_green.inputs['Factor']) + + # Combine back + links.new(separate.outputs['Red'], combine.inputs['Red']) + links.new(mix_green.outputs[0], combine.inputs['Green']) + links.new(separate.outputs['Blue'], combine.inputs['Blue']) + + # Plug into normal map node + links.new(combine.outputs['Color'], normal_map.inputs['Color']) + + # Final output + links.new(normal_map.outputs['Normal'], output_node.inputs['OpenGL Normal']) + + return group + + def create_mapping_group(): + group_name = "MappingGroup" + if group_name in bpy.data.node_groups: + return bpy.data.node_groups[group_name] + + group = bpy.data.node_groups.new(group_name, 'ShaderNodeTree') + + VECTOR_SOCKET_NODE = 'NodeSocketVector' + + # Inputs + group.interface.new_socket(name="Scale", in_out='INPUT', socket_type=VECTOR_SOCKET_NODE) + + # Outputs + group.interface.new_socket(name="UV0", in_out='OUTPUT', socket_type=VECTOR_SOCKET_NODE) + group.interface.new_socket(name="UV1", in_out='OUTPUT', socket_type=VECTOR_SOCKET_NODE) + group.interface.new_socket(name="UV2", in_out='OUTPUT', socket_type=VECTOR_SOCKET_NODE) + + nodes = group.nodes + links = group.links + nodes.clear() + + input_node = nodes.new("NodeGroupInput") + input_node.location = (-800, 0) + + output_node = nodes.new("NodeGroupOutput") + output_node.location = (400, 0) + + # Attribute Node (Custom UV Set) + attribute = nodes.new('ShaderNodeAttribute') + attribute.attribute_name = "EXTRAUVS0" + attribute.location = (-600, 0) + + attribute1 = nodes.new('ShaderNodeAttribute') + attribute1.attribute_name = "EXTRAUVS1" + attribute1.location = (-600, -150) + + attribute2 = nodes.new('ShaderNodeAttribute') + attribute2.attribute_name = "EXTRAUVS2" + attribute2.location = (-600, -300) + + # Mapping Node + mapping = nodes.new('ShaderNodeMapping') + mapping.vector_type = 'POINT' + mapping.location = (-200, 0) + + mapping1 = nodes.new('ShaderNodeMapping') + mapping1.vector_type = 'POINT' + mapping1.location = (-100, 0) + + mapping2 = nodes.new('ShaderNodeMapping') + mapping2.vector_type = 'POINT' + mapping2.location = (0, 0) + + # Connect nodes + links.new(input_node.outputs['Scale'], mapping.inputs['Scale']) + links.new(input_node.outputs['Scale'], mapping1.inputs['Scale']) + links.new(input_node.outputs['Scale'], mapping2.inputs['Scale']) + links.new(attribute.outputs['Vector'], mapping.inputs['Vector']) + links.new(attribute1.outputs['Vector'], mapping1.inputs['Vector']) + links.new(attribute2.outputs['Vector'], mapping2.inputs['Vector']) + links.new(mapping.outputs['Vector'], output_node.inputs[0]) # UV0 + links.new(mapping1.outputs['Vector'], output_node.inputs[1]) # UV1 + links.new(mapping2.outputs['Vector'], output_node.inputs[2]) # UV2 + + return group + + # Mark texture as processed + mat_ctx.linked_maps.add(bl_tex_type) + + # Ensure the material uses nodes and clear any old nodes: + mat.use_nodes = True + + # --- Reuse or create the custom node group --- + # main shader + if "MainShader" in mat.node_tree.nodes: + main_node = mat.node_tree.nodes["MainShader"] + print(f"'MainShader' already exists in material '{mat.name}'") + else: + main_node = mat.node_tree.nodes.new("ShaderNodeGroup") + main_node.name = "MainShader" + main_node.node_tree = main_shader # your already available node group + main_node.location = (-250, 0) + main_node.node_tree.use_fake_user = True + if main_node.node_tree.interface.items_tree: + main_node.node_tree.interface.active_index = 0 + main_node.node_tree.interface.active = main_node.node_tree.interface.items_tree[0] + main_node.node_tree.interface.active.default_value = (1.0, 1.0, 1.0, 1.0) + # main_node.inputs[3].default_value = (0.735,0.735,0.735,1.0) + print(f"Inserted 'MainShader' into material '{mat.name}'") + + # ORM node group + if "ORMExtraShader" in mat.node_tree.nodes: + orm_node = mat.node_tree.nodes["ORMExtraShader"] + print(f"'ORMExtraShader' already exists in material '{mat.name}'") + else: + orm_node = mat.node_tree.nodes.new("ShaderNodeGroup") + orm_node.name = "ORMExtraShader" + orm_node.node_tree = orm_shader + orm_node.location = (-250, 350) + orm_node.node_tree.use_fake_user = True + # Default values + orm_node.inputs[1].default_value = 8.0 # Multiply + orm_node.inputs[2].default_value = -5.9 # Min + orm_node.inputs[3].default_value = 0.3 # Max + orm_node.inputs[5].default_value = 0.0 # Red Param Value + orm_node.inputs[7].default_value = 0.0 # Green Param Value + orm_node.inputs[9].default_value = 0.0 # Blue Param Value + print(f"Inserted 'ORMExtraShader' into material '{mat.name}'") + + # OD Mask Shader node group + if "OverlayDiffuseMask" in mat.node_tree.nodes: + odm_node = mat.node_tree.nodes["OverlayDiffuseMask"] + print(f"'OverlayDiffuseMask' already exists in material '{mat.name}'") + else: + odm_node = mat.node_tree.nodes.new("ShaderNodeGroup") + odm_node.name = "OverlayDiffuseMask" + odm_node.node_tree = odmask_shader + odm_node.location = (-480, 200) + odm_node.node_tree.use_fake_user = True + # Default values + odm_node.inputs[1].default_value = (0.9,0.7,0.6,1.0) # Simple Tint + odm_node.inputs[4].default_value = (0.735,0.735,0.735,1.0) # Diffuse Red + odm_node.inputs[7].default_value = (0.735,0.735,0.735,1.0) # Diffuse Green + odm_node.inputs[10].default_value = (0.735,0.735,0.735,1.0) # Diffuse Blue + print(f"Inserted 'OverlayDiffuseMask' into material '{mat.name}'") + + if "MappingGroup" in mat.node_tree.nodes: + mapping_node = mat.node_tree.nodes["MappingGroup"] + print(f"'Mapping' already exists in material '{mat.name}'") + else: + mapping_node = mat.node_tree.nodes.new("ShaderNodeGroup") + mapping_node.name = "MappingGroup" + mapping_node.node_tree = create_mapping_group() + mapping_node.location = (-1500, -300) + mapping_node.node_tree.use_fake_user = True + print(f"Inserted 'MappingGroup' into material '{mat.name}'") + + # Link custom node groups + mat.node_tree.links.new(ao_mix_node.outputs['Result'], bsdf_node.inputs[2]) # Mix to Roughness + mat.node_tree.links.new(orm_node.outputs[0], ao_mix_node.inputs[0]) # ORM Shader to Mix + mat.node_tree.links.new(odm_node.outputs[0], main_node.inputs[0]) # To Diffuse Texture mainshader + mat.node_tree.links.new(main_node.outputs[0], bsdf_node.inputs[0]) # To Base Color BSDF + + main_node.inputs[0].default_value = (0.0,0.0,0.0,1.0) # color + main_node.inputs[1].default_value = (0.0,0.0,0.0,1.0) # AO + main_node.inputs[2].default_value = 0.8 # Gamma Strength + + mapping_node.inputs[0].default_value[0] = 1 # X + mapping_node.inputs[0].default_value[1] = 1 # Y + mapping_node.inputs[0].default_value[2] = 1 # Z + + # Process the current texture: + match bl_tex_type: + case TextureMapTypes.DIFFUSE: + mat_ctx = _state_buffer[mat] + mask_colors, vector_params = _get_vector_and_mask_colors(ast=mat_ctx.desc_ast) + + # Retrieve colors for each mask channel + simpletint = vector_params.get('simple tint', (1, 1, 1, 1)) + watercolor = vector_params.get('water color', (0, 0, 0, 1)) + + odcolor_red = vector_params.get('overlay diffuse color (red channel)', (1, 1, 1, 1)) + odcolor_green = vector_params.get('overlay diffuse color (green channel)', (1, 1, 1, 1)) + odcolor_blue = vector_params.get('overlay diffuse color (blue channel)', (1, 1, 1, 1)) + + tint3 = mask_colors.get('tint 0 color', (simpletint)) or mask_colors.get('tint 0.0 color (Main Layer)', (simpletint)) + tint2 = mask_colors.get('tint 0.33 color', (odcolor_red)) or mask_colors.get('tint 0.33 color (Main Layer)', (odcolor_red)) + tint1 = mask_colors.get('tint 0.66 color', (odcolor_green)) or mask_colors.get('tint 0.66 color (Main Layer)', (odcolor_green)) + tint0 = mask_colors.get('tint 1.0 color', (odcolor_blue)) or mask_colors.get('tint 1.0 color (Main Layer)', (odcolor_blue)) + + # Create the RGB Mask Tint group (or get it if already exists) + tint_group = create_rgb_mask_tint_group() + + # Add node to material + group_node = mat.node_tree.nodes.new('ShaderNodeGroup') + group_node.node_tree = tint_group + group_node.location = (-750, 400) + + group_node.inputs['Tint'].default_value = True + + # Connect mask texture to group input + mat.node_tree.links.new(img_node.outputs['Color'], group_node.inputs['Image']) + + group_node.inputs['Color R'].default_value = tint3 + group_node.inputs['Color G'].default_value = tint2 + group_node.inputs['Color B'].default_value = tint1 + group_node.inputs['Color A'].default_value = tint0 + + # Connect final tinted color to ODM node + if not mat_ctx.diffuse_connected: + mat.node_tree.links.new(img_node.outputs['Color'], odm_node.inputs[0]) # color + mat.node_tree.links.new(group_node.outputs['Color'], odm_node.inputs[1]) # tinted output + odm_node.inputs[1].default_value = simpletint # Simple tint + odm_node.inputs[4].default_value = odcolor_red # Red overlay + odm_node.inputs[7].default_value = odcolor_green # Green overlay + odm_node.inputs[10].default_value = odcolor_blue # Blue overlay + mat.node_tree.links.new(group_node.outputs['R'], odm_node.inputs[3]) + mat.node_tree.links.new(group_node.outputs['G'], odm_node.inputs[6]) + mat.node_tree.links.new(group_node.outputs['B'], odm_node.inputs[9]) + # mat.node_tree.links.new(group_node.outputs['Simple Tint'], odm_node.inputs[1]) + # Optional alpha + # mat.node_tree.links.new(group_node.outputs['A'], bsdf_node.inputs['Alpha']) + + img_node.select = True + mat.node_tree.nodes.active = img_node + img_node.location = (-1050, 150) + + mat_ctx.msk_index += 1 + # case TextureMapTypes.DIRT: + # mat_ctx = _state_buffer[mat] + # mask_colors, vector_params = _get_vector_and_mask_colors(ast=mat_ctx.desc_ast) + + # img_node.image.colorspace_settings.name = 'Non-Color' + + # # Retrieve colors for each mask channel + # tint3 = mask_colors.get('tint 0 color}', (0, 0, 0, 1)) or mask_colors.get('tint 0.0 color (Main Layer)}', (0, 0, 0, 1)) + # tint2 = mask_colors.get('tint 0.33 color}', (0, 0, 0, 1)) or mask_colors.get('tint 0.33 color (Main Layer)}', (0, 0, 0, 1)) + # tint1 = mask_colors.get('tint 0.66 color}', (0, 0, 0, 1)) or mask_colors.get('tint 0.66 color (Main Layer)}', (0, 0, 0, 1)) + # tint0 = mask_colors.get('tint 1.0 color}', (0, 0, 0, 1)) or mask_colors.get('tint 1.0 color (Main Layer)}', (0, 0, 0, 1)) + + # simpletint = vector_params.get('simple tint', (0, 0, 0, 1)) + # watercolor = vector_params.get('water color', (0, 0, 0, 1)) + + # odcolor_red = vector_params.get('overlay diffuse color (red channel)', (0, 0, 0, 1)) + # odcolor_green = vector_params.get('overlay diffuse color (green channel)', (0, 0, 0, 1)) + # odcolor_blue = vector_params.get('overlay diffuse color (blue channel)', (0, 0, 0, 1)) + + # # Create the RGB Mask Tint group (or get it if already exists) + # tint_group = create_rgb_mask_tint_group() + + # # Add node to material + # group_node = mat.node_tree.nodes.new('ShaderNodeGroup') + # group_node.node_tree = tint_group + # group_node.location = (-750, 300) + + # # Connect mask texture to group input + # mat.node_tree.links.new(img_node.outputs['Color'], group_node.inputs['Image']) + + # group_node.inputs['Color R'].default_value = tint0 + # group_node.inputs['Color G'].default_value = tint1 + # group_node.inputs['Color B'].default_value = tint2 + # group_node.inputs['Color A'].default_value = tint3 + + # # Connect final tinted color to ODM node + # if not mat_ctx.diffuse_connected: + # mat.node_tree.links.new(img_node.outputs['Color'], odm_node.inputs[0]) # color + # mat.node_tree.links.new(group_node.outputs['Color'], odm_node.inputs[1]) # tinted output + # odm_node.inputs[1].default_value = simpletint # Simple tint + # odm_node.inputs[2].default_value = odcolor_red # Red overlay + # odm_node.inputs[5].default_value = odcolor_green # Green overlay + # odm_node.inputs[8].default_value = odcolor_blue # Blue overlay + # mat.node_tree.links.new(group_node.outputs['R'], odm_node.inputs[4]) + # mat.node_tree.links.new(group_node.outputs['G'], odm_node.inputs[7]) + # mat.node_tree.links.new(group_node.outputs['B'], odm_node.inputs[10]) + # # mat.node_tree.links.new(group_node.outputs['Simple Tint'], odm_node.inputs[1]) + # # Optional alpha + # # mat.node_tree.links.new(group_node.outputs['A'], bsdf_node.inputs['Alpha']) + + # img_node.select = True + # mat.node_tree.nodes.active = img_node + # img_node.location = (-1050, 250) + + # mat_ctx.msk_index += 1 + case TextureMapTypes.BASECOLOR: + mat_ctx = _state_buffer[mat] + mask_colors, vector_params = _get_vector_and_mask_colors(ast=mat_ctx.desc_ast) + + img_node.location = (-750, 150) + img_node.image.colorspace_settings.name = 'sRGB' + + # Retrieve colors for each mask channel + tintfl = vector_params.get('tint (fl)', (1, 1, 1, 1)) + simpletint = vector_params.get('simple tint', (tintfl)) + watercolor = vector_params.get('water color', (simpletint)) + + odcolor_red = vector_params.get('overlay diffuse color (red channel)', (1, 1, 1, 1)) + odcolor_green = vector_params.get('overlay diffuse color (green channel)', (1, 1, 1, 1)) + odcolor_blue = vector_params.get('overlay diffuse color (blue channel)', (1, 1, 1, 1)) + + tint3 = mask_colors.get('tint 0 color', (simpletint)) or mask_colors.get('tint 0.0 color (Main Layer)', (simpletint)) + tint2 = mask_colors.get('tint 0.33 color', (odcolor_red)) or mask_colors.get('tint 0.33 color (Main Layer)', (odcolor_red)) + tint1 = mask_colors.get('tint 0.66 color', (odcolor_green)) or mask_colors.get('tint 0.66 color (Main Layer)', (odcolor_green)) + tint0 = mask_colors.get('tint 1.0 color', (odcolor_blue)) or mask_colors.get('tint 1.0 color (Main Layer)', (odcolor_blue)) + + # Create the RGB Mask Tint group (or get it if already exists) + tint_group = create_rgb_mask_tint_group() + + # Add node to material + group_node = mat.node_tree.nodes.new('ShaderNodeGroup') + group_node.node_tree = tint_group + group_node.location = (-750, 400) + + # Connect mask texture to group input + mat.node_tree.links.new(img_node.outputs['Color'], group_node.inputs['Image']) + + group_node.inputs['Color R'].default_value = tint3 + group_node.inputs['Color G'].default_value = tint2 + group_node.inputs['Color B'].default_value = tint1 + group_node.inputs['Color A'].default_value = tint0 + + mat.node_tree.links.new(mapping_node.outputs['UV1'], img_node.inputs[0]) + + # Connect final tinted color to ODM node + if not mat_ctx.diffuse_connected: + mat.node_tree.links.new(img_node.outputs['Color'], odm_node.inputs[0]) # color + mat.node_tree.links.new(group_node.outputs['Color'], odm_node.inputs[1]) # tinted output + odm_node.inputs[1].default_value = simpletint # Simple tint + odm_node.inputs[4].default_value = odcolor_red # Red overlay + odm_node.inputs[7].default_value = odcolor_green # Green overlay + odm_node.inputs[10].default_value = odcolor_blue # Blue overlay + mat.node_tree.links.new(group_node.outputs['R'], odm_node.inputs[3]) + mat.node_tree.links.new(group_node.outputs['G'], odm_node.inputs[6]) + mat.node_tree.links.new(group_node.outputs['B'], odm_node.inputs[9]) + case TextureMapTypes.ORM: + img_node.location = (-750, -150) + img_node.image.colorspace_settings.name = 'Non-Color' + + rough_group = create_roughness_mask_group() + + group_node = mat.node_tree.nodes.new('ShaderNodeGroup') + group_node.node_tree = rough_group + group_node.location = (-450, -150) + + # Connect texture + mat.node_tree.links.new(img_node.outputs['Color'], group_node.inputs['ORM Image']) + mat.node_tree.links.new(img_node.outputs['Color'], group_node.inputs[2]) + mat.node_tree.links.new(img_node.outputs['Color'], orm_node.inputs[0]) + + # Optional scales + group_node.inputs['Roughness Scale'].default_value = 1.0 + group_node.inputs['Metallic Scale'].default_value = 1.0 + group_node.inputs['AO Scale'].default_value = 1.0 + + # Connect to BSDF + mat.node_tree.links.new(group_node.outputs['AO'], main_node.inputs['AO']) + mat.node_tree.links.new(group_node.outputs['Metallic'], bsdf_node.inputs['Metallic']) + mat.node_tree.links.new(group_node.outputs['Roughness'], ao_mix_node.inputs['A']) # Roughness Mask to Mix + case TextureMapTypes.MRA: + img_node.location = (-750, -150) + img_node.image.colorspace_settings.name = 'Non-Color' + + rough_group = create_roughness_mask_group() + + group_node = mat.node_tree.nodes.new('ShaderNodeGroup') + group_node.node_tree = rough_group + group_node.location = (-450, -150) + + # Connect texture + mat.node_tree.links.new(img_node.outputs['Color'], group_node.inputs['ORM Image']) + + # Optional scales + group_node.inputs['Roughness Scale'].default_value = 1.0 + group_node.inputs['Metallic Scale'].default_value = 1.0 + group_node.inputs['AO Scale'].default_value = 1.0 + + # Connect to BSDF + # Compare to ORM map + mat.node_tree.links.new(group_node.outputs['AO'], bsdf_node.inputs['Metallic']) + mat.node_tree.links.new(group_node.outputs['Roughness'], bsdf_node.inputs['Roughness']) + mat.node_tree.links.new(group_node.outputs['Metallic'], main_node.inputs['AO']) + case TextureMapTypes.Normal: + img_node.location = (-750, -450) + img_node.image.colorspace_settings.name = 'Non-Color' + + normal_map_group = create_directx_to_opengl_normal_group() + node = mat.node_tree.nodes.new("ShaderNodeGroup") + node.node_tree = normal_map_group + node.inputs[2].default_value = 2.0 # Normal Strength + node.location = (-400, -450) + mat.node_tree.links.new(img_node.outputs['Color'], node.inputs['DirectX Normal']) + + # Enable Flip Y + node.inputs['Flip Y'].default_value = False + + # Connect to BSDF + mat.node_tree.links.new(node.outputs['OpenGL Normal'], bsdf_node.inputs['Normal']) + + if TextureMapTypes.Normal == +1: # Check for other normal textures + mat.node_tree.links.new(img_node.outputs['Color'], node.inputs['DirectX Normal']) + case TextureMapTypes.MASK: + img_node.location = (-750, -750) + img_node.image.colorspace_settings.name = 'Non-Color' + + # Create the RGB Mask Tint group + tint_group = create_rgb_mask_tint_group() + + # Add node to material + group_node = mat.node_tree.nodes.new('ShaderNodeGroup') + group_node.node_tree = tint_group + group_node.location = (-500, -750) + + # Connect mask texture to group input + mat.node_tree.links.new(img_node.outputs['Color'], group_node.inputs['Image']) + + # mat.node_tree.links.new(img_node.outputs['Color'], main_node.inputs[6]) + # mat.node_tree.links.new(mapping.outputs['Vector'], img_node.inputs[0]) + case TextureMapTypes.TOEH: + img_node.location = (-750, -750) + img_node.image.colorspace_settings.name = 'Non-Color' + toeh_split_node = mat.node_tree.nodes.new(SHADER_NODE_SEPARATE_COLOR) + toeh_split_node.location = (-450, -750) + displacement_node = mat.node_tree.nodes.new('ShaderNodeDisplacement') + displacement_node.inputs[1].default_value = 0.0 # Midlevel + displacement_node.inputs[2].default_value = 0.2 # Scale + displacement_node.location = (-250, -750) + mat.node_tree.links.new(img_node.outputs['Color'], toeh_split_node.inputs['Color']) + mat.node_tree.links.new(toeh_split_node.outputs['Red'], ao_mix_node.inputs['B']) # Roughness Mask to Mix + # if tex_short_name == 'toeh' or tex_short_name == 'toe': # so overlay_2 dont apply opacity + mat.node_tree.links.new(toeh_split_node.outputs['Green'], bsdf_node.inputs['Alpha']) + mat.node_tree.links.new(toeh_split_node.outputs['Blue'], displacement_node.inputs['Height']) # Displacement + mat.node_tree.links.new(displacement_node.outputs['Displacement'], out_node.inputs['Displacement']) + case TextureMapTypes.DIRT: + img_node.location = (-1350, 200) + img_node.image.colorspace_settings.name = 'Non-Color' + dirt_split_node = mat.node_tree.nodes.new(SHADER_NODE_SEPARATE_COLOR) + dirt_split_node.location = (-1250, 350) + mat.node_tree.links.new(img_node.outputs['Color'], dirt_split_node.inputs['Color']) + # mat.node_tree.links.new(img_node.outputs['Color'], odm_node.inputs[0]) + mat.node_tree.links.new(dirt_split_node.outputs['Red'], orm_node.inputs[4]) + mat.node_tree.links.new(dirt_split_node.outputs['Green'], orm_node.inputs[6]) + mat.node_tree.links.new(dirt_split_node.outputs['Blue'], orm_node.inputs[8]) + mat.node_tree.links.new(mapping_node.outputs['UV0'], img_node.inputs[0]) + case TextureMapTypes.NOISE: + img_node.location = (-1050, 0) + img_node.image.colorspace_settings.name = 'Non-Color' + noise_split_node = mat.node_tree.nodes.new(SHADER_NODE_SEPARATE_COLOR) + noise_split_node.location = (-1050, 200) + mat.node_tree.links.new(img_node.outputs['Color'], noise_split_node.inputs['Color']) + mat.node_tree.links.new(noise_split_node.outputs['Red'], orm_node.inputs[4]) + mat.node_tree.links.new(noise_split_node.outputs['Green'], orm_node.inputs[6]) + mat.node_tree.links.new(noise_split_node.outputs['Green'], ao_mix_node.inputs['A']) # Roughness Mask to Mix + mat.node_tree.links.new(noise_split_node.outputs['Blue'], orm_node.inputs[8]) + mat.node_tree.links.new(mapping_node.outputs['UV1'], img_node.inputs[0]) + case TextureMapTypes.MICROMASK: + img_node.location = (-1050, -350) + img_node.image.colorspace_settings.name = 'Non-Color' + mat.node_tree.links.new(img_node.outputs['Color'], main_node.inputs[1]) # to AO, Testing + mat.node_tree.links.new(mapping_node.outputs['UV1'], img_node.inputs[0]) + + # case TextureMapTypes.RGB_TINT: + # mat_ctx.msk_index += 1 + +def handle_material_texture_simple(mat: bpy.types.Material, + tex_type: str, # pylint: disable=unused-argument + tex_short_name: str, # pylint: disable=unused-argument + img_node: bpy.types.ShaderNodeTexImage, + bsdf_node: bpy.types.ShaderNodeBsdfDiffuse): + _state_buffer[mat].bsdf_node = bsdf_node + + # Do NOT clear the node tree here. Instead, just link the texture. + mat.node_tree.links.new(img_node.outputs['Color'], bsdf_node.inputs['Color']) + img_node.select = True + mat.node_tree.nodes.active = img_node + print("Linked image color to BSDF color") + +def end_process_material(mat: bpy.types.Material): + mat_ctx = _state_buffer[mat] + + if mat_ctx.use_pbr and mat_ctx.bsdf_node is not None: + # set defaults + mat_ctx.bsdf_node.inputs[1].default_value = 0.0 # Metallic + mat_ctx.bsdf_node.inputs[2].default_value = 0.1 # Roughness + mat_ctx.bsdf_node.inputs[3].default_value = 1.5 # IOR + mat_ctx.bsdf_node.inputs[15].default_value = 0.8 # Specular Anisotropic + mat_ctx.bsdf_node.inputs[24].default_value = 0.1 # Sheen Weight + mat_ctx.bsdf_node.inputs[20].default_value = 0.030 # Clearcoat roughness + mat_ctx.bsdf_node.inputs[21].default_value = 1.5 # Clearcoat IOR + + # del _state_buffer[mat] + +# Non-interface functions below +def _short_name_to_tex_type(tex_short_name: str) -> t.Optional[TextureMapTypes]: + """Convert short texture name to a recognized texture map type if possible. + + :return: TextureMapType or None. + """ + return SUFFIX_MAP.get(tex_short_name.lower().split('_')[-1]) + + +Color: t.TypeAlias = tuple[float, float, float, float] + +def _get_vector_and_mask_colors(ast: lark.Tree) -> tuple[Dict[str, Color], Dict[str, Color]]: + """Separates VectorParameterValues into indexed mask colors and named parameters. + + Returns: + mask_colors: 'color 1', 'color 2', etc. + named_params: 'Simple Tint', 'Water Color', etc. + """ + mask_colors = {} + named_params = {} + + for child in ast.children: + if child.data != 'definition': + continue + def_name, array_qual, value = child.children + + if def_name != 'VectorParameterValues': + continue + if array_qual is None or value.data != 'structured_block': + continue + + for tex_param_def in value.children: + _, _, tex_param = tex_param_def.children + param_info, param_val, _ = tex_param.children # ParameterInfo, ParameterValue, ParameterName + + # Default vector (0,0,0,1) + color = {'r': 0.0, 'g': 0.0, 'b': 0.0, 'a': 1.0} + + if param_val.children[2].data != 'structured_block': + continue + + color_vec = param_val.children[2] + + # Extract color + for channel_def in color_vec.children: + channel_name, _, channel = channel_def.children + cname = channel_name.lower() + if cname in {'r', 'g', 'b', 'a'}: + color[cname] = float(channel.children[0].value) + + # Extract name from ParameterInfo + name_tree = param_info.children[2].children[0].children[2] + param_name = name_tree.children[0].value.strip().lower() + + # Split between mask colors and named ones + if param_name.startswith("color "): + mask_colors[param_name] = (color['r'], color['g'], color['b'], color['a']) + else: + named_params[param_name] = (color['r'], color['g'], color['b'], color['a']) + + return mask_colors, named_params + +def _clamp_color(c: tuple[float, float, float, float]) -> tuple[float, float, float, float]: + return tuple(min(1.0, max(0.0, ch)) for ch in c) + +def get_color(mask_colors, key: str, fallback: Color) -> Color: + return _clamp_color(mask_colors.get(key, mask_colors.get(key.replace("color", "colour"), fallback))) + diff --git a/umodel_tools/game_profiles/dead_island_2.py b/umodel_tools/game_profiles/dead_island_2.py new file mode 100644 index 0000000..4cef45b --- /dev/null +++ b/umodel_tools/game_profiles/dead_island_2.py @@ -0,0 +1,845 @@ +"""This module implements support for Dead Island 2 game. +Known issues: + - Blended materials are not properly supported. Currently the first texture is used. +""" + +import enum +import typing as t +from typing import TypeAlias, Tuple, Dict +import dataclasses + +import bpy +import lark + + +GAME_NAME = "Dead Island 2" +GAME_DESCRIPTION = "Dead Island 2 (2023) by Deep Silver" + +# Node Socket Types +COLOR_SOCKET_NODE = 'NodeSocketColor' # +FLOAT_SOCKET_NODE = 'NodeSocketFloat' # +VECTOR_SOCKET_NODE = 'NodeSocketVector' # +BOOL_SOCKET_NODE = 'NodeSocketBool' # + +class TextureMapTypes(enum.Enum): + """All texture map types supported by the material generator. + """ + Diffuse = enum.auto() + Normal = enum.auto() + ATX = enum.auto() + ATR = enum.auto() + AHA = enum.auto() + MRO = enum.auto() + DRO = enum.auto() + EMISSION = enum.auto() + MSK = enum.auto() + WEAR_MSK = enum.auto() + +#: Suffixes of textures for automatic texture purpose guessing (lowercase only) +SUFFIX_MAP = { + 'd': TextureMapTypes.Diffuse, + 'n': TextureMapTypes.Normal, + 'e': TextureMapTypes.EMISSION, + 'mro': TextureMapTypes.MRO, + 'dro': TextureMapTypes.DRO, + 'bm': TextureMapTypes.MSK, + 'atx': TextureMapTypes.ATX, + 'atr': TextureMapTypes.ATR, + 'aha': TextureMapTypes.AHA, + + #: Extra Suffixes (Uppercase) + 'D': TextureMapTypes.Diffuse, + 'N': TextureMapTypes.Normal, + 'E': TextureMapTypes.EMISSION, + 'MRO': TextureMapTypes.MRO, + 'DRO': TextureMapTypes.DRO, + 'BM': TextureMapTypes.MSK, + 'ATX': TextureMapTypes.ATX, + 'ATR': TextureMapTypes.ATR, + 'AHA': TextureMapTypes.AHA, +} + +@dataclasses.dataclass +class MaterialContext: + bsdf_node: t.Optional[bpy.types.ShaderNodeBsdfPrincipled | bpy.types.ShaderNodeBsdfDiffuse] + desc_ast: lark.Tree + use_pbr: bool + msk_index: int = dataclasses.field(default=0) + diffuse_connected: bool = dataclasses.field(default=False) + linked_maps: set[TextureMapTypes.Diffuse] = dataclasses.field(default_factory=set) + + +_state_buffer: dict[bpy.types.Material, MaterialContext] = {} + + +def process_material(mat: bpy.types.Material, desc_ast: lark.Tree, use_pbr: bool): # pylint: disable=unused-argument + _state_buffer[mat] = MaterialContext(bsdf_node=None, desc_ast=desc_ast, use_pbr=use_pbr) + + +def do_process_texture(tex_type: str, tex_short_name: str) -> bool: # pylint: disable=unused-argument + return bool(_short_name_to_tex_type(tex_short_name)) + + +def is_diffuse_tex_type(tex_type: str, tex_short_name: str) -> bool: # pylint: disable=unused-argument + return _short_name_to_tex_type(tex_short_name) == {TextureMapTypes.Diffuse, TextureMapTypes.MSK} + + +def handle_material_texture_pbr(mat: bpy.types.Material, + tex_type: str, # pylint: disable=unused-argument + tex_short_name: str, + img_node: bpy.types.ShaderNodeTexImage, + main_shader: bpy.types.ShaderNodeTree, + orm_shader: bpy.types.ShaderNodeTree, + odmask_shader: bpy.types.ShaderNodeTree, + ao_mix_node: bpy.types.ShaderNodeMix, + bsdf_node: bpy.types.ShaderNodeBsdfPrincipled, + out_node: bpy.types.ShaderNodeOutputMaterial): + # Note: we presume AHA and ATX are mutually exclusive and never appear together. + # This is not validated. + mat_ctx = _state_buffer[mat] + mat_ctx.bsdf_node = bsdf_node + + ao_mix_node.location = (-100, 300) + # ao_mix_node.inputs['B'].default_value = (0.0,0.0,0.0,1.0) # Default Black + + bsdf_node.location = (100, 20) + out_node.location = (600, 0) + + bl_tex_type = _short_name_to_tex_type(tex_short_name) + + # do not connect the same texture twice + if bl_tex_type in mat_ctx.linked_maps: + return + + def create_mix_node(mat, loc_x, loc_y, color, default_value): + # Using ShaderNodeMixRGB instead of ShaderNodeMix + mix_node = mat.node_tree.nodes.new('ShaderNodeMixRGB') + mix_node.location = (loc_x, loc_y) + + # mix_node.data_type = 'RGBA' + # mix_node.blend_type = 'MIX' + + # Extract only the RGB values, discarding the alpha + #rgb_color = color[:3] if color is not None else default_value[:3] + + # Ensure the color is a 4-tuple (RGBA) by appending 1.0 as alpha if necessary + rgba_color = color if len(color) == 4 else (color[0], color[1], color[2], 1.0) + + # Assign the RGB values to the inputs[2] of the Mix node + mix_node.inputs[2].default_value = rgba_color + + return mix_node + + def create_rgb_mask_tint_group(): + group_name = "RGBMaskTintGroup" + if group_name in bpy.data.node_groups: + return bpy.data.node_groups[group_name] + + node_group = bpy.data.node_groups.new(group_name, 'ShaderNodeTree') + + # Inputs + node_group.interface.new_socket(name="Image", description="Color input", in_out='INPUT', socket_type=COLOR_SOCKET_NODE) + node_group.interface.new_socket(name="Color R", description="Red channel input", in_out='INPUT', socket_type=COLOR_SOCKET_NODE) + node_group.interface.new_socket(name="Color G", description="Green channel input", in_out='INPUT', socket_type=COLOR_SOCKET_NODE) + node_group.interface.new_socket(name="Color B", description="Blue channel input", in_out='INPUT', socket_type=COLOR_SOCKET_NODE) + node_group.interface.new_socket(name="Color A", description="Alpha channel input", in_out='INPUT', socket_type=COLOR_SOCKET_NODE) + node_group.interface.new_socket(name="Tint", in_out='INPUT', socket_type=BOOL_SOCKET_NODE) + + # Outputs + node_group.interface.new_socket(name="Color", description="Color output", in_out='OUTPUT', socket_type=COLOR_SOCKET_NODE) + node_group.interface.new_socket(name="R", description="Red channel output", in_out='OUTPUT', socket_type=FLOAT_SOCKET_NODE) + node_group.interface.new_socket(name="G", description="Green channel output", in_out='OUTPUT', socket_type=FLOAT_SOCKET_NODE) + node_group.interface.new_socket(name="B", description="Blue channel output", in_out='OUTPUT', socket_type=FLOAT_SOCKET_NODE) + node_group.interface.new_socket(name="A", description="Alpha channel output", in_out='OUTPUT', socket_type=FLOAT_SOCKET_NODE) + + nodes = node_group.nodes + links = node_group.links + nodes.clear() + + input_node = nodes.new("NodeGroupInput") + input_node.location = (-1000, 0) + + output_node = nodes.new("NodeGroupOutput") + output_node.location = (600, 0) + + separate = nodes.new("ShaderNodeSeparateColor") + separate.location = (-700, 0) + + # input_node.inputs['Color A'].default_value = (0.0,0.0,0.0,1.0) # Default Alpha Color + # output_node.outputs['A'].default_value = (0.0,0.0,0.0,1.0) # Default Alpha Color + + # R Mix + r_mix = nodes.new("ShaderNodeMixRGB") + r_mix.blend_type = 'MULTIPLY' + r_mix.inputs['Fac'].default_value = 1.0 + r_mix.label = 'R Mix' + r_mix.location = (-400, 200) + + # G Mix + g_mix = nodes.new("ShaderNodeMixRGB") + g_mix.blend_type = 'MULTIPLY' + g_mix.inputs['Fac'].default_value = 1.0 + g_mix.label = 'G Mix' + g_mix.location = (-400, 0) + + # B Mix + b_mix = nodes.new("ShaderNodeMixRGB") + b_mix.blend_type = 'MULTIPLY' + b_mix.inputs['Fac'].default_value = 1.0 + b_mix.label = 'B Mix' + b_mix.location = (-400, -200) + + # Add R + G + add_rg = nodes.new("ShaderNodeMixRGB") + add_rg.blend_type = 'ADD' + add_rg.inputs['Fac'].default_value = 1.0 + add_rg.label = 'Add RG' + add_rg.location = (-100, 100) + + # Add above + B + add_rgb = nodes.new("ShaderNodeMixRGB") + add_rgb.blend_type = 'ADD' + add_rgb.inputs['Fac'].default_value = 1.0 + add_rgb.label = 'Add RGB' + add_rgb.location = (200, 0) + + # Tint switch + switch = nodes.new("ShaderNodeMix") + # switch.blend_type = 'MIX' + switch.label = 'Tint Switch' + switch.location = (400, 100) + + # Link inputs + links.new(input_node.outputs['Image'], separate.inputs['Color']) + links.new(input_node.outputs['Image'], switch.inputs['A']) + # links.new(separate.outputs['Red'], r_mix.inputs['Fac']) + # links.new(separate.outputs['Green'], g_mix.inputs['Fac']) + # links.new(separate.outputs['Blue'], b_mix.inputs['Fac']) + + links.new(separate.outputs['Red'], r_mix.inputs['Color1']) + links.new(separate.outputs['Green'], g_mix.inputs['Color1']) + links.new(separate.outputs['Blue'], b_mix.inputs['Color1']) + + links.new(input_node.outputs['Color R'], r_mix.inputs['Color2']) + links.new(input_node.outputs['Color G'], g_mix.inputs['Color2']) + links.new(input_node.outputs['Color B'], b_mix.inputs['Color2']) + + links.new(r_mix.outputs['Color'], add_rg.inputs['Color1']) + links.new(g_mix.outputs['Color'], add_rg.inputs['Color2']) + + links.new(add_rg.outputs['Color'], add_rgb.inputs['Color1']) + links.new(b_mix.outputs['Color'], add_rgb.inputs['Color2']) + links.new(add_rgb.outputs['Color'], switch.inputs['B']) + links.new(input_node.outputs['Tint'], switch.inputs[0]) # Bool switch + + # Output final color + links.new(switch.outputs[0], output_node.inputs['Color']) + + # Optional passthrough channels + links.new(separate.outputs['Red'], output_node.inputs[1]) + links.new(separate.outputs['Green'], output_node.inputs[2]) + links.new(separate.outputs['Blue'], output_node.inputs[3]) + + # # Alpha passthrough + # separate_alpha = nodes.new("ShaderNodeSeparateRGBA") + # separate_alpha.location = (-700, -300) + # links.new(input_node.outputs['Mask Image'], separate_alpha.inputs['Image']) + # links.new(separate_alpha.outputs['Alpha'], output_node.inputs[4]) + + links.new(input_node.outputs['Color A'], output_node.inputs['A']) # Temp Connection + + return node_group + + def create_roughness_mask_group(): + group_name = "RoughnessMaskGroup" + if group_name in bpy.data.node_groups: + return bpy.data.node_groups[group_name] + + node_group = bpy.data.node_groups.new(group_name, 'ShaderNodeTree') + + # Inputs + node_group.interface.new_socket(name="ORM Image", description="RGB: Roughness, Metallic, AO", in_out='INPUT', socket_type=COLOR_SOCKET_NODE) + node_group.interface.new_socket(name="AO Scale", description="B channel multiplier", in_out='INPUT', socket_type=FLOAT_SOCKET_NODE) + node_group.interface.new_socket(name="Roughness Scale", description="R channel multiplier", in_out='INPUT', socket_type=FLOAT_SOCKET_NODE) + node_group.interface.new_socket(name="Metallic Scale", description="G channel multiplier", in_out='INPUT', socket_type=FLOAT_SOCKET_NODE) + + # Outputs + node_group.interface.new_socket(name="AO", description="AO output", in_out='OUTPUT', socket_type=FLOAT_SOCKET_NODE) + node_group.interface.new_socket(name="Roughness", description="Roughness output", in_out='OUTPUT', socket_type=FLOAT_SOCKET_NODE) + node_group.interface.new_socket(name="Metallic", description="Metallic output", in_out='OUTPUT', socket_type=FLOAT_SOCKET_NODE) + + nodes = node_group.nodes + links = node_group.links + nodes.clear() + + input_node = nodes.new("NodeGroupInput") + input_node.location = (-600, 0) + + output_node = nodes.new("NodeGroupOutput") + output_node.location = (600, 0) + + separate = nodes.new("ShaderNodeSeparateColor") + separate.location = (-300, 0) + + # Multipliers + r_mult = nodes.new("ShaderNodeMath") + r_mult.operation = 'MULTIPLY' + r_mult.label = 'AO * Scale' + r_mult.location = (100, 200) + + g_mult = nodes.new("ShaderNodeMath") + g_mult.operation = 'MULTIPLY' + g_mult.label = 'Roughness * Scale' + g_mult.location = (100, 0) + + b_mult = nodes.new("ShaderNodeMath") + b_mult.operation = 'MULTIPLY' + b_mult.label = 'Metallic * Scale' + b_mult.location = (100, -200) + + # Connect image to separate + links.new(input_node.outputs['ORM Image'], separate.inputs['Color']) + + # R channel + links.new(separate.outputs['Red'], r_mult.inputs[0]) + links.new(input_node.outputs['AO Scale'], r_mult.inputs[1]) + links.new(r_mult.outputs[0], output_node.inputs['AO']) + + # G channel + links.new(separate.outputs['Green'], g_mult.inputs[0]) + links.new(input_node.outputs['Roughness Scale'], g_mult.inputs[1]) + links.new(g_mult.outputs[0], output_node.inputs['Roughness']) + + # B channel + links.new(separate.outputs['Blue'], b_mult.inputs[0]) + links.new(input_node.outputs['Metallic Scale'], b_mult.inputs[1]) + links.new(b_mult.outputs[0], output_node.inputs['Metallic']) + return node_group + + def create_directx_to_opengl_normal_group(): + group_name = "DirectXToOpenGLNormal" + if group_name in bpy.data.node_groups: + return bpy.data.node_groups[group_name] + + group = bpy.data.node_groups.new(group_name, 'ShaderNodeTree') + + # Define socket types + BOOL_SOCKET_NODE = 'NodeSocketBool' + + # Inputs + group.interface.new_socket(name="DirectX Normal", in_out='INPUT', socket_type=COLOR_SOCKET_NODE) + group.interface.new_socket(name="Flip Y", in_out='INPUT', socket_type=BOOL_SOCKET_NODE) + group.interface.new_socket(name="Strength", in_out='INPUT', socket_type=FLOAT_SOCKET_NODE) + + # Outputs + group.interface.new_socket(name="OpenGL Normal", in_out='OUTPUT', socket_type=VECTOR_SOCKET_NODE) + + nodes = group.nodes + links = group.links + nodes.clear() + + input_node = nodes.new("NodeGroupInput") + input_node.location = (-800, 0) + + output_node = nodes.new("NodeGroupOutput") + output_node.location = (400, 0) + + separate = nodes.new("ShaderNodeSeparateColor") + separate.location = (-600, 0) + + # Invert math node: 1 - G + invert = nodes.new("ShaderNodeMath") + invert.operation = 'SUBTRACT' + invert.inputs[0].default_value = 1.0 + invert.location = (-300, 100) + + # Mix: if Flip Y is enabled, use inverted green, otherwise pass original + mix_green = nodes.new("ShaderNodeMix") + mix_green.data_type = 'FLOAT' + mix_green.location = (-100, 100) + + combine = nodes.new("ShaderNodeCombineColor") + combine.location = (100, 0) + + normal_map = nodes.new("ShaderNodeNormalMap") + normal_map.inputs[0].default_value = 1.2 + normal_map.location = (250, 0) + + # Connect DirectX input to Separate Color + links.new(input_node.outputs['DirectX Normal'], separate.inputs['Color']) + links.new(input_node.outputs['Strength'], normal_map.inputs[0]) + + # Invert G + links.new(separate.outputs['Green'], invert.inputs[1]) + links.new(invert.outputs[0], mix_green.inputs[2]) # Inverted G + links.new(separate.outputs['Green'], mix_green.inputs[3]) # Original G + links.new(input_node.outputs['Flip Y'], mix_green.inputs['Factor']) + + # Combine back + links.new(separate.outputs['Red'], combine.inputs['Red']) + links.new(mix_green.outputs[0], combine.inputs['Green']) + links.new(separate.outputs['Blue'], combine.inputs['Blue']) + + # Plug into normal map node + links.new(combine.outputs['Color'], normal_map.inputs['Color']) + + # Final output + links.new(normal_map.outputs['Normal'], output_node.inputs['OpenGL Normal']) + + return group + + def create_mapping_group(): + group_name = "MappingGroup" + if group_name in bpy.data.node_groups: + return bpy.data.node_groups[group_name] + + group = bpy.data.node_groups.new(group_name, 'ShaderNodeTree') + + VECTOR_SOCKET_NODE = 'NodeSocketVector' + + # Inputs + group.interface.new_socket(name="Scale", in_out='INPUT', socket_type=VECTOR_SOCKET_NODE) + + # Outputs + group.interface.new_socket(name="UV0", in_out='OUTPUT', socket_type=VECTOR_SOCKET_NODE) + group.interface.new_socket(name="UV1", in_out='OUTPUT', socket_type=VECTOR_SOCKET_NODE) + group.interface.new_socket(name="UV2", in_out='OUTPUT', socket_type=VECTOR_SOCKET_NODE) + + nodes = group.nodes + links = group.links + nodes.clear() + + input_node = nodes.new("NodeGroupInput") + input_node.location = (-800, 0) + + output_node = nodes.new("NodeGroupOutput") + output_node.location = (400, 0) + + # Attribute Node (Custom UV Set) + attribute = nodes.new('ShaderNodeAttribute') + attribute.attribute_name = "EXTRAUVS0" + attribute.location = (-600, 0) + + attribute1 = nodes.new('ShaderNodeAttribute') + attribute1.attribute_name = "EXTRAUVS1" + attribute1.location = (-600, -150) + + attribute2 = nodes.new('ShaderNodeAttribute') + attribute2.attribute_name = "EXTRAUVS2" + attribute2.location = (-600, -300) + + # Mapping Node + mapping = nodes.new('ShaderNodeMapping') + mapping.vector_type = 'POINT' + mapping.location = (-200, 0) + + mapping1 = nodes.new('ShaderNodeMapping') + mapping1.vector_type = 'POINT' + mapping1.location = (-100, 0) + + mapping2 = nodes.new('ShaderNodeMapping') + mapping2.vector_type = 'POINT' + mapping2.location = (0, 0) + + # Connect nodes + links.new(input_node.outputs['Scale'], mapping.inputs['Scale']) + links.new(input_node.outputs['Scale'], mapping1.inputs['Scale']) + links.new(input_node.outputs['Scale'], mapping2.inputs['Scale']) + links.new(attribute.outputs['Vector'], mapping.inputs['Vector']) + links.new(attribute1.outputs['Vector'], mapping1.inputs['Vector']) + links.new(attribute2.outputs['Vector'], mapping2.inputs['Vector']) + links.new(mapping.outputs['Vector'], output_node.inputs[0]) # UV0 + links.new(mapping1.outputs['Vector'], output_node.inputs[1]) # UV1 + links.new(mapping2.outputs['Vector'], output_node.inputs[2]) # UV2 + + return group + + # remember that we processed a texture of that type + mat_ctx.linked_maps.add(bl_tex_type) + + # Ensure the material uses nodes and clear any old nodes: + mat.use_nodes = True + + # --- Reuse or create the custom node group --- + # main shader + if "MainShader" in mat.node_tree.nodes: + main_node = mat.node_tree.nodes["MainShader"] + print(f"'MainShader' already exists in material '{mat.name}'") + else: + main_node = mat.node_tree.nodes.new("ShaderNodeGroup") + main_node.name = "MainShader" + main_node.node_tree = main_shader # your already available node group + main_node.location = (-250, 0) + main_node.node_tree.use_fake_user = True + if main_node.node_tree.interface.items_tree: + main_node.node_tree.interface.active_index = 0 + main_node.node_tree.interface.active = main_node.node_tree.interface.items_tree[0] + main_node.node_tree.interface.active.default_value = (1.0, 1.0, 1.0, 1.0) + # main_node.inputs[3].default_value = (0.735,0.735,0.735,1.0) + print(f"Inserted 'MainShader' into material '{mat.name}'") + + # # ORM node group + # if "ORMExtraShader" in mat.node_tree.nodes: + # orm_node = mat.node_tree.nodes["ORMExtraShader"] + # print(f"'ORMExtraShader' already exists in material '{mat.name}'") + # else: + # orm_node = mat.node_tree.nodes.new("ShaderNodeGroup") + # orm_node.name = "ORMExtraShader" + # orm_node.node_tree = orm_shader + # orm_node.location = (-250, 350) + # orm_node.node_tree.use_fake_user = True + # # Default values + # orm_node.inputs[1].default_value = 8.0 # Multiply + # orm_node.inputs[2].default_value = -5.9 # Min + # orm_node.inputs[3].default_value = 0.3 # Max + # orm_node.inputs[5].default_value = 0.0 # Red Param Value + # orm_node.inputs[7].default_value = 0.0 # Green Param Value + # orm_node.inputs[9].default_value = 0.0 # Blue Param Value + # print(f"Inserted 'ORMExtraShader' into material '{mat.name}'") + + # OD Mask Shader node group + if "OverlayDiffuseMask" in mat.node_tree.nodes: + odm_node = mat.node_tree.nodes["OverlayDiffuseMask"] + print(f"'OverlayDiffuseMask' already exists in material '{mat.name}'") + else: + odm_node = mat.node_tree.nodes.new("ShaderNodeGroup") + odm_node.name = "OverlayDiffuseMask" + odm_node.node_tree = odmask_shader + odm_node.location = (-250, 600) + odm_node.node_tree.use_fake_user = True + # Default values + odm_node.inputs[1].default_value = (0.9,0.7,0.6,1.0) # Simple Tint + odm_node.inputs[4].default_value = (0.735,0.735,0.735,1.0) # Diffuse Red + odm_node.inputs[7].default_value = (0.735,0.735,0.735,1.0) # Diffuse Green + odm_node.inputs[10].default_value = (0.735,0.735,0.735,1.0) # Diffuse Blue + print(f"Inserted 'OverlayDiffuseMask' into material '{mat.name}'") + + if "MappingGroup" in mat.node_tree.nodes: + mapping_node = mat.node_tree.nodes["MappingGroup"] + print(f"'Mapping' already exists in material '{mat.name}'") + else: + mapping_node = mat.node_tree.nodes.new("ShaderNodeGroup") + mapping_node.name = "MappingGroup" + mapping_node.node_tree = create_mapping_group() + mapping_node.location = (-1500, -300) + mapping_node.node_tree.use_fake_user = True + print(f"Inserted 'MappingGroup' into material '{mat.name}'") + + # Link custom node groups + mat.node_tree.links.new(main_node.outputs[0], bsdf_node.inputs[0]) # To Base Color BSDF + + main_node.inputs[0].default_value = (0.0,0.0,0.0,1.0) # color + main_node.inputs[1].default_value = 0.0 # AO + main_node.inputs[2].default_value = 0.8 # Gamma Strength + + mapping_node.inputs[0].default_value[0] = 1 # X + mapping_node.inputs[0].default_value[1] = 1 # Y + mapping_node.inputs[0].default_value[2] = 1 # Z + + match bl_tex_type: + case None: + ao_mix_node.select = True + bsdf_node.inputs[4].default_value = 0.2 # Alpha + bsdf_node.inputs[2].default_value = 0.0 # Transmission + bsdf_node.inputs[18].default_value = 1.0 # Transmission + case TextureMapTypes.Diffuse: + mat_ctx = _state_buffer[mat] + mask_colors, vector_params = _get_vector_and_mask_colors(ast=mat_ctx.desc_ast) + + img_node.location = (-750, 150) + img_node.image.colorspace_settings.name = 'sRGB' + img_node.select = True + mat.node_tree.nodes.active = img_node + + # Retrieve colors for each mask channel + dtint = vector_params.get('diffuse colour tint', (1, 1, 1, 1)) + sss = vector_params.get('Subsurface Colour', (1, 1, 1, 1)) + + tintr = mask_colors.get('tint 0 color', (dtint)) + tintg = mask_colors.get('tint 0.33 color', (sss)) # Temp Default color + tintb = mask_colors.get('tint 0.66 color', (1, 1, 1, 1)) + tinta = mask_colors.get('tint 1.0 color', (1, 1, 1, 1)) + + # Create the RGB Mask Tint group (or get it if already exists) + tint_group = create_rgb_mask_tint_group() + + # Add node to material + group_node = mat.node_tree.nodes.new('ShaderNodeGroup') + group_node.node_tree = tint_group + group_node.location = (-450, 300) + + group_node.inputs['Tint'].default_value = False + + # Connect mask texture to group input + mat.node_tree.links.new(img_node.outputs['Color'], group_node.inputs['Image']) + + group_node.inputs['Color R'].default_value = tintr + group_node.inputs['Color G'].default_value = tintg + group_node.inputs['Color B'].default_value = tintb + group_node.inputs['Color A'].default_value = tinta + + mat.node_tree.links.new(group_node.outputs['Color'], main_node.inputs[0]) # tinted output to main + + mat_ctx.msk_index += 1 + + case TextureMapTypes.MRO: + img_node.location = (-750, -150) + img_node.image.colorspace_settings.name = 'Non-Color' + invert_node = mat.node_tree.nodes.new('ShaderNodeInvert') + invert_node.inputs['Fac'].default_value = 0.3 + invert_node.location = (-250, -150) + + rough_group = create_roughness_mask_group() + group_node = mat.node_tree.nodes.new('ShaderNodeGroup') + group_node.node_tree = rough_group + group_node.location = (-450, -150) + + # Connect texture + mat.node_tree.links.new(img_node.outputs['Color'], group_node.inputs['ORM Image']) + + # Optional scales + group_node.inputs['Roughness Scale'].default_value = 1.0 + group_node.inputs['Metallic Scale'].default_value = 1.0 + group_node.inputs['AO Scale'].default_value = 1.0 + mat.node_tree.links.new(img_node.outputs['Color'], group_node.inputs['Roughness Scale']) + + # Connect to BSDF + # Compare to ORM map + mat.node_tree.links.new(group_node.outputs['AO'], bsdf_node.inputs['Metallic']) + mat.node_tree.links.new(group_node.outputs['Roughness'], ao_mix_node.inputs['A']) + mat.node_tree.links.new(group_node.outputs['Roughness'], invert_node.inputs['Color']) + mat.node_tree.links.new(invert_node.outputs['Color'], bsdf_node.inputs['Roughness']) + mat.node_tree.links.new(group_node.outputs['Metallic'], main_node.inputs['AO']) + + case TextureMapTypes.DRO: + img_node.location = (-1200, -150) + img_node.image.colorspace_settings.name = 'Non-Color' + # invert_node = mat.node_tree.nodes.new('ShaderNodeInvert') + # invert_node.inputs['Fac'].default_value = 0.3 + # invert_node.location = (-900, -150) + + rough_group = create_roughness_mask_group() + group_node = mat.node_tree.nodes.new('ShaderNodeGroup') + group_node.node_tree = rough_group + group_node.location = (-900, -150) + + # Connect texture + mat.node_tree.links.new(img_node.outputs['Color'], group_node.inputs['ORM Image']) + mat.node_tree.links.new(mapping_node.outputs['UV0'], img_node.inputs['Vector']) + + # Optional scales + group_node.inputs['Roughness Scale'].default_value = 1.0 + group_node.inputs['Metallic Scale'].default_value = 1.0 + group_node.inputs['AO Scale'].default_value = 1.0 + + # Connect to BSDF + # Compare to ORM map + # mat.node_tree.links.new(group_node.outputs['AO'], bsdf_node.inputs['Metallic']) + # mat.node_tree.links.new(group_node.outputs['Roughness'], ao_mix_node.inputs['A']) + # mat.node_tree.links.new(group_node.outputs['Roughness'], invert_node.inputs['Color']) + # mat.node_tree.links.new(invert_node.outputs['Color'], bsdf_node.inputs['Roughness']) + # mat.node_tree.links.new(group_node.outputs['Metallic'], main_node.inputs['AO']) + + case TextureMapTypes.Normal: + img_node.location = (-750, -450) + img_node.image.colorspace_settings.name = 'Non-Color' + + normal_map_group = create_directx_to_opengl_normal_group() + node = mat.node_tree.nodes.new("ShaderNodeGroup") + node.node_tree = normal_map_group + node.inputs[2].default_value = 2.0 # Normal Strength + node.location = (-400, -450) + mat.node_tree.links.new(img_node.outputs['Color'], node.inputs['DirectX Normal']) + + # Enable Flip Y + node.inputs['Flip Y'].default_value = False + + # Connect to BSDF + mat.node_tree.links.new(node.outputs['OpenGL Normal'], bsdf_node.inputs['Normal']) + + if TextureMapTypes.Normal == +1: # Check for other normal textures + mat.node_tree.links.new(img_node.outputs['Color'], node.inputs['DirectX Normal']) + + case TextureMapTypes.EMISSION: + img_node.location = (-200, -450) + bsdf_node.inputs[28].default_value = 1.0 # Emission strength + mat.node_tree.links.new(img_node.outputs['Color'], bsdf_node.inputs[27]) # Emission color + + case TextureMapTypes.ATX: + atx_split_node = mat.node_tree.nodes.new('ShaderNodeSeparateColor') + atx_split_node.location = (-400, -700) + img_node.location = (-750, -800) + img_node.image.colorspace_settings.name = 'Non-Color' + mat.node_tree.links.new(img_node.outputs['Color'], atx_split_node.inputs['Color']) + mat.node_tree.links.new(atx_split_node.outputs['Red'], bsdf_node.inputs['Alpha']) + mat.node_tree.links.new(atx_split_node.outputs['Green'], bsdf_node.inputs[18]) # Transmission weight + mat.node_tree.links.new(atx_split_node.outputs['Blue'], bsdf_node.inputs['Alpha']) # Roughness + + case TextureMapTypes.ATR: + atr_split_node = mat.node_tree.nodes.new('ShaderNodeSeparateColor') + atr_split_node.location = (-400, -700) + mix_node = mat.node_tree.nodes.new('ShaderNodeMixRGB') + mix_node.location = (-200, -700) + img_node.location = (-750, -1000) + img_node.image.colorspace_settings.name = 'Non-Color' + mat.node_tree.links.new(img_node.outputs['Color'], atr_split_node.inputs['Color']) + mat.node_tree.links.new(atr_split_node.outputs['Red'], bsdf_node.inputs['Alpha']) + mat.node_tree.links.new(split_node.outputs['Green'], mix_node.inputs['Color1']) + mat.node_tree.links.new(atr_split_node.outputs['Blue'], mix_node.inputs['Color2']) + mat.node_tree.links.new(mix_node.outputs['Color'], bsdf_node.inputs['Roughness']) + + case TextureMapTypes.AHA: + # Split Channels + aha_split_node = mat.node_tree.nodes.new('ShaderNodeSeparateColor') + aha_split_node.location = (-400, -900) + + # height component + displacement_node = mat.node_tree.nodes.new('ShaderNodeDisplacement') + displacement_node.location = (400, -500) + img_node.location = (-750, -1100) + img_node.image.colorspace_settings.name = 'Non-Color' + mat.node_tree.links.new(displacement_node.outputs['Displacement'], out_node.inputs['Displacement']) + mat.node_tree.links.new(img_node.outputs['Color'], aha_split_node.inputs['Color']) + mat.node_tree.links.new(aha_split_node.outputs['Green'], displacement_node.inputs['Height']) + displacement_node.inputs[2].default_value = 0.1 + + case TextureMapTypes.WEAR_MSK: + mat_ctx.msk_index += 1 + + case TextureMapTypes.MSK: + mat_ctx = _state_buffer[mat] + mask_colors = _get_mask_colors(ast=mat_ctx.desc_ast) + + img_node.location = (-900, 200) + img_node.image.colorspace_settings.name = 'Non-Color' + + # Retrieve colors for each mask channel + color1 = mask_colors.get(f'color {mat_ctx.msk_index + 1}', (0, 0, 1, 1)) + color2 = mask_colors.get(f'color {mat_ctx.msk_index + 2}', (0, 1, 0, 1)) + color3 = mask_colors.get(f'color {mat_ctx.msk_index + 3}', (1, 0, 0, 1)) + color4 = mask_colors.get(f'plastic base colour', (0.067708, 0.066298, 0.066298, 1)) + + # Separate the color channels + msk_split = mat.node_tree.nodes.new('ShaderNodeSeparateColor') + msk_split.location = (-700, -200) + + # Create mix nodes for each color + b_mix = create_mix_node(mat, -500, -200, color1, (0, 0, 1, 1)) + g_mix = create_mix_node(mat, -300, -200, color2, (0, 1, 0, 1)) + r_mix = create_mix_node(mat, -100, -200, color3, (1, 0, 0, 1)) + plastic_mix = create_mix_node(mat, 100, -200, color4, (0.067708, 0.066298, 0.066298, 1)) + + # Connect nodes + mat.node_tree.links.new(img_node.outputs['Color'], msk_split.inputs['Color']) + mat.node_tree.links.new(msk_split.outputs['Red'], r_mix.inputs[0]) + mat.node_tree.links.new(msk_split.outputs['Green'], g_mix.inputs[0]) + mat.node_tree.links.new(msk_split.outputs['Blue'], b_mix.inputs[0]) + + # Connect the mix nodes together + mat.node_tree.links.new(b_mix.outputs['Color'], g_mix.inputs[1]) + mat.node_tree.links.new(g_mix.outputs['Color'], r_mix.inputs[1]) + mat.node_tree.links.new(r_mix.outputs['Color'], plastic_mix.inputs[1]) + + # Connect the final result to the BSDF node + if not mat_ctx.diffuse_connected: + mat.node_tree.links.new(plastic_mix.outputs['Color'], bsdf_node.inputs[0]) + mat.node_tree.links.new(img_node.outputs['Alpha'], bsdf_node.inputs['Alpha']) + img_node.select = True + mat.node_tree.nodes.active = img_node + img_node.location = (-950, -250) + img_node.image.colorspace_settings.name = 'Non-Color' + + mat_ctx.msk_index += 1 + + print(f"mask_colors: {mask_colors}") + print(f"color1: {color1}, color2: {color2}, color3: {color3}, color4: {color4}") + + +def handle_material_texture_simple(mat: bpy.types.Material, + tex_type: str, # pylint: disable=unused-argument + tex_short_name: str, # pylint: disable=unused-argument + img_node: bpy.types.ShaderNodeTexImage, + bsdf_node: bpy.types.ShaderNodeBsdfDiffuse): + _state_buffer[mat].bsdf_node = bsdf_node + + mat.node_tree.links.new(img_node.outputs['Color'], bsdf_node.inputs['Color']) + img_node.select = True + mat.node_tree.nodes.active = img_node + +def end_process_material(mat: bpy.types.Material): + mat_ctx = _state_buffer[mat] + + if mat_ctx.use_pbr and mat_ctx.bsdf_node is not None: + # set defaults + mat_ctx.bsdf_node.inputs[1].default_value = 0.0 # Metallic + mat_ctx.bsdf_node.inputs[2].default_value = 0.1 # Roughness + mat_ctx.bsdf_node.inputs[3].default_value = 1.5 # IOR + mat_ctx.bsdf_node.inputs[15].default_value = 0.8 # Specular Anisotropic + mat_ctx.bsdf_node.inputs[24].default_value = 0.1 # Sheen Weight + mat_ctx.bsdf_node.inputs[20].default_value = 0.030 # Clearcoat roughness + mat_ctx.bsdf_node.inputs[21].default_value = 1.5 # Clearcoat IOR + + del _state_buffer[mat] + + +# Non-interface functions below +def _short_name_to_tex_type(tex_short_name: str) -> t.Optional[TextureMapTypes]: + """Convert short texture name to a recognized texture map type if possible. + + :return: TextureMapType or None. + """ + return SUFFIX_MAP.get(tex_short_name.lower().split('_')[-1]) + + +Color: t.TypeAlias = tuple[float, float, float, float] + +def _get_vector_and_mask_colors(ast: lark.Tree) -> tuple[Dict[str, Color], Dict[str, Color]]: + """Separates VectorParameterValues into indexed mask colors and named parameters. + + Returns: + mask_colors: 'color 1', 'color 2', etc. + named_params: 'Simple Tint', 'Water Color', etc. + """ + mask_colors = {} + named_params = {} + + for child in ast.children: + if child.data != 'definition': + continue + def_name, array_qual, value = child.children + + if def_name != 'VectorParameterValues': + continue + if array_qual is None or value.data != 'structured_block': + continue + + for tex_param_def in value.children: + _, _, tex_param = tex_param_def.children + param_info, param_val, _ = tex_param.children # ParameterInfo, ParameterValue, ParameterName + + # Default vector (0,0,0,1) + color = {'r': 0.0, 'g': 0.0, 'b': 0.0, 'a': 1.0} + + if param_val.children[2].data != 'structured_block': + continue + + color_vec = param_val.children[2] + + # Extract color + for channel_def in color_vec.children: + channel_name, _, channel = channel_def.children + cname = channel_name.lower() + if cname in {'r', 'g', 'b', 'a'}: + color[cname] = float(channel.children[0].value) + + # Extract name from ParameterInfo + name_tree = param_info.children[2].children[0].children[2] + param_name = name_tree.children[0].value.strip().lower() + + # Split between mask colors and named ones + if param_name.startswith("color "): + mask_colors[param_name] = (color['r'], color['g'], color['b'], color['a']) + else: + named_params[param_name] = (color['r'], color['g'], color['b'], color['a']) + + return mask_colors, named_params \ No newline at end of file diff --git a/umodel_tools/game_profiles/hogwarts_legacy.py b/umodel_tools/game_profiles/hogwarts_legacy.py index 9551ee0..c5e772a 100644 --- a/umodel_tools/game_profiles/hogwarts_legacy.py +++ b/umodel_tools/game_profiles/hogwarts_legacy.py @@ -69,6 +69,14 @@ class TextureMapTypes(enum.Enum): "mroh/sroh a map": TextureMapTypes.MROH, "mro a map": TextureMapTypes.MROH, + "Diffuse Map": TextureMapTypes.Diffuse, + "MRO Map": TextureMapTypes.MRO, + "Normal Map": TextureMapTypes.Normal, + + "D": TextureMapTypes.Diffuse, + "MRO": TextureMapTypes.MRO, + "N": TextureMapTypes.Normal, + # Weird stuff goes here "color glass": TextureMapTypes.Diffuse, "base color": TextureMapTypes.Diffuse, @@ -145,25 +153,28 @@ def handle_material_texture_pbr(mat: bpy.types.Material, mat_ctx.diffuse_connected = True case TextureMapTypes.Normal: + img_node.image.colorspace_settings.name = 'Non-Color' normal_map_node = mat.node_tree.nodes.new('ShaderNodeNormalMap') - mat.node_tree.links.new(normal_map_node.outputs['Normal'], - bsdf_node.inputs['Normal']) - mat.node_tree.links.new(img_node.outputs['Color'], - normal_map_node.inputs['Color']) + mat.node_tree.links.new(normal_map_node.outputs['Normal'],bsdf_node.inputs['Normal']) + mat.node_tree.links.new(img_node.outputs['Color'],normal_map_node.inputs['Color']) + case TextureMapTypes.SRO: + img_node.image.colorspace_settings.name = 'Non-Color' sro_split = mat.node_tree.nodes.new('ShaderNodeSeparateColor') mat.node_tree.links.new(sro_split.outputs['Red'], bsdf_node.inputs['Specular']) mat.node_tree.links.new(sro_split.outputs['Green'], bsdf_node.inputs['Roughness']) mat.node_tree.links.new(sro_split.outputs['Blue'], ao_mix_node.inputs[7]) mat.node_tree.links.new(img_node.outputs['Color'], sro_split.inputs['Color']) + case TextureMapTypes.MROH: # MRO components + img_node.image.colorspace_settings.name = 'Non-Color' mroh_split = mat.node_tree.nodes.new('ShaderNodeSeparateColor') mat.node_tree.links.new(mroh_split.outputs['Red'], bsdf_node.inputs['Metallic']) mat.node_tree.links.new(mroh_split.outputs['Green'], bsdf_node.inputs['Roughness']) mat.node_tree.links.new(mroh_split.outputs['Blue'], ao_mix_node.inputs[7]) mat.node_tree.links.new(img_node.outputs['Color'], mroh_split.inputs['Color']) - + # height component displacement_node = mat.node_tree.nodes.new('ShaderNodeDisplacement') mat.node_tree.links.new(displacement_node.outputs['Displacement'], @@ -171,6 +182,7 @@ def handle_material_texture_pbr(mat: bpy.types.Material, mat.node_tree.links.new(img_node.outputs['Alpha'], displacement_node.inputs['Height']) case TextureMapTypes.MRO: + img_node.image.colorspace_settings.name = 'Non-Color' mro_split = mat.node_tree.nodes.new('ShaderNodeSeparateColor') mat.node_tree.links.new(mro_split.outputs['Red'], bsdf_node.inputs['Metallic']) mat.node_tree.links.new(mro_split.outputs['Green'], bsdf_node.inputs['Roughness']) @@ -181,6 +193,7 @@ def handle_material_texture_pbr(mat: bpy.types.Material, mat_ctx.msk_index += 1 case TextureMapTypes.MSK: + img_node.image.colorspace_settings.name = 'Non-Color' mat_ctx = _state_buffer[mat] mask_colors = _get_mask_colors(ast=mat_ctx.desc_ast) @@ -241,11 +254,11 @@ def end_process_material(mat: bpy.types.Material): if mat_ctx.use_pbr and mat_ctx.bsdf_node is not None: # set defaults - mat_ctx.bsdf_node.inputs[4].default_value = 1.01 # Subsurface IOR - mat_ctx.bsdf_node.inputs[7].default_value = 0.0 # Specular - mat_ctx.bsdf_node.inputs[9].default_value = 0.0 # Roughness - mat_ctx.bsdf_node.inputs[13].default_value = 0.0 # Sheen Tint - mat_ctx.bsdf_node.inputs[15].default_value = 0.0 # Clearcoat roughness + mat_ctx.bsdf_node.inputs[2].default_value = 0.0 # Roughness + mat_ctx.bsdf_node.inputs[3].default_value = 1.01 # IOR + # mat_ctx.bsdf_node.inputs[7].default_value = 0.0 # Specular + # mat_ctx.bsdf_node.inputs[24].default_value = 0.0 # Sheen Tint + # mat_ctx.bsdf_node.inputs[20].default_value = 0.0 # Clearcoat roughness del _state_buffer[mat] @@ -254,7 +267,6 @@ def end_process_material(mat: bpy.types.Material): Color: t.TypeAlias = tuple[float, float, float] - def _get_mask_colors(ast: lark.Tree) -> dict[str, Color]: """Get MSK colors from texture parameters. diff --git a/umodel_tools/game_profiles/the_callisto_protocol.py b/umodel_tools/game_profiles/the_callisto_protocol.py new file mode 100644 index 0000000..3dfab00 --- /dev/null +++ b/umodel_tools/game_profiles/the_callisto_protocol.py @@ -0,0 +1,353 @@ +"""This module implements support for Dead Island 2 game. +Known issues: + - Blended materials are not properly supported. Currently the first texture is used. +""" + +import enum +import typing as t +from typing import TypeAlias, Tuple, Dict +import dataclasses + +import bpy +import lark + + +GAME_NAME = "The Callisto Protocol" +GAME_DESCRIPTION = "The Callisto Protocol (2022) by Striking Distance Studios and Krafton" + +class TextureMapTypes(enum.Enum): + """All texture map types supported by the material generator. + """ + Diffuse = enum.auto() + Normal = enum.auto() + ATX = enum.auto() + AHA = enum.auto() + MRO = enum.auto() + ORM = enum.auto() + ORMS = enum.auto() + SSS = enum.auto() + MSK = enum.auto() + M = enum.auto() + WEAR_MSK = enum.auto() + +#: Suffixes of textures for automatic texture purpose guessing (lowercase only) +SUFFIX_MAP = { + 'd': TextureMapTypes.Diffuse, + 'n': TextureMapTypes.Normal, + 'mro': TextureMapTypes.MRO, + 'bm': TextureMapTypes.MSK, + 'atx': TextureMapTypes.ATX, + 'aha': TextureMapTypes.AHA, + + #: Extra Suffixes (Uppercase) + 'D': TextureMapTypes.Diffuse, + 'N': TextureMapTypes.Normal, + 'MRO': TextureMapTypes.MRO, + 'BM': TextureMapTypes.MSK, + 'ATX': TextureMapTypes.ATX, + 'AHA': TextureMapTypes.AHA, + + "Alpha Mask Texture": TextureMapTypes.M, + "PM_Diffuse": TextureMapTypes.Diffuse, + "Diffuse Map": TextureMapTypes.Diffuse, + "PM_Normals": TextureMapTypes.Normal, + "Normal Map": TextureMapTypes.Normal, + "PM_SpecularMasks": TextureMapTypes.ORM, + "ORM Map": TextureMapTypes.ORM, + "SSS Map": TextureMapTypes.SSS, + + "M": TextureMapTypes.M, + "_M": TextureMapTypes.M, + "D": TextureMapTypes.Diffuse, + "_D": TextureMapTypes.Diffuse, + "N": TextureMapTypes.Normal, + "_N": TextureMapTypes.Normal, + "ORM": TextureMapTypes.ORM, + "_ORM": TextureMapTypes.ORM, + "ORMS": TextureMapTypes.ORM, + "_ORMS": TextureMapTypes.ORM, + "SSS": TextureMapTypes.SSS, + "_SSS": TextureMapTypes.SSS, +} + + +@dataclasses.dataclass +class MaterialContext: + bsdf_node: t.Optional[bpy.types.ShaderNodeBsdfPrincipled | bpy.types.ShaderNodeBsdfDiffuse] + desc_ast: lark.Tree + use_pbr: bool + msk_index: int = dataclasses.field(default=0) + diffuse_connected: bool = dataclasses.field(default=False) + linked_maps: set[TextureMapTypes.Diffuse] = dataclasses.field(default_factory=set) + +_state_buffer: dict[bpy.types.Material, MaterialContext] = {} + + +def process_material(mat: bpy.types.Material, desc_ast: lark.Tree, use_pbr: bool): # pylint: disable=unused-argument + _state_buffer[mat] = MaterialContext(bsdf_node=None, desc_ast=desc_ast, use_pbr=use_pbr) + + +def do_process_texture(tex_type: str, tex_short_name: str) -> bool: # pylint: disable=unused-argument + return bool(_short_name_to_tex_type(tex_short_name)) + + +def is_diffuse_tex_type(tex_type: str, tex_short_name: str) -> bool: # pylint: disable=unused-argument + return _short_name_to_tex_type(tex_short_name) == {TextureMapTypes.Diffuse, TextureMapTypes.MSK} + + +def handle_material_texture_pbr(mat: bpy.types.Material, + tex_type: str, # pylint: disable=unused-argument + tex_short_name: str, + img_node: bpy.types.ShaderNodeTexImage, + ao_mix_node: bpy.types.ShaderNodeMix, + bsdf_node: bpy.types.ShaderNodeBsdfPrincipled, + out_node: bpy.types.ShaderNodeOutputMaterial): + # Note: we presume AHA and ATX are mutually exclusive and never appear together. + # This is not validated. + mat_ctx = _state_buffer[mat] + mat_ctx.bsdf_node = bsdf_node + + # Set the location of nodes in the node editor + ao_node = mat.node_tree.nodes.new('ShaderNodeAmbientOcclusion') + ao_node.location = (-400, 100) + mix_node = mat.node_tree.nodes.new('ShaderNodeMixRGB') + mix_node.location = (-200, 50) + ao_mix_node.location = (-200, 300) + bsdf_node.location = (100, 20) + out_node.location = (600, 0) + + bl_tex_type = _short_name_to_tex_type(tex_short_name) + + # do not connect the same texture twice + if bl_tex_type in mat_ctx.linked_maps: + return + + def create_mix_node(mat, loc_x, loc_y, color, default_value): + # Using ShaderNodeMixRGB instead of ShaderNodeMix + mix_node = mat.node_tree.nodes.new('ShaderNodeMixRGB') + mix_node.location = (loc_x, loc_y) + # mix_node.data_type = 'RGBA' + # mix_node.blend_type = 'MIX' + + # Extract only the RGB values, discarding the alpha + #rgb_color = color[:3] if color is not None else default_value[:3] + + # Ensure the color is a 4-tuple (RGBA) by appending 1.0 as alpha if necessary + rgba_color = color if len(color) == 4 else (color[0], color[1], color[2], 1.0) + + # Assign the RGB values to the inputs[2] of the Mix node + mix_node.inputs[2].default_value = rgba_color + + return mix_node + + # remember that we processed a texture of that type + mat_ctx.linked_maps.add(bl_tex_type) + + match bl_tex_type: + case TextureMapTypes.Diffuse: + img_node.location = (-750, 150) + img_node.image.colorspace_settings.name = 'sRGB' + mat.node_tree.links.new(img_node.outputs['Color'], ao_node.inputs['Color']) + mat.node_tree.links.new(img_node.outputs['Color'], mix_node.inputs[1]) + mat.node_tree.links.new(ao_node.outputs['Color'], mix_node.inputs[2]) + mat.node_tree.links.new(mix_node.outputs['Color'], bsdf_node.inputs['Base Color']) + + img_node.select = True + mat.node_tree.nodes.active = img_node + mat_ctx.diffuse_connected = True + + case TextureMapTypes.Normal: + normal_map_node = mat.node_tree.nodes.new('ShaderNodeNormalMap') + normal_map_node.location = (-400, -500) + img_node.location = (-750, -500) + img_node.image.colorspace_settings.name = 'Non-Color' + mat.node_tree.links.new(img_node.outputs['Color'], normal_map_node.inputs['Color']) + normal_map_node.inputs[0].default_value = 2 + mat.node_tree.links.new(normal_map_node.outputs['Normal'], bsdf_node.inputs['Normal']) + mat.node_tree.links.new(normal_map_node.outputs['Normal'], ao_node.inputs['Normal']) + + case TextureMapTypes.ATX: + atx_split_node = mat.node_tree.nodes.new('ShaderNodeSeparateColor') + atx_split_node.location = (-400, -700) + img_node.location = (-750, -800) + img_node.image.colorspace_settings.name = 'Non-Color' + mat.node_tree.links.new(img_node.outputs['Color'], atx_split_node.inputs['Color']) + mat.node_tree.links.new(atx_split_node.outputs['Red'], bsdf_node.inputs['Alpha']) + + case TextureMapTypes.AHA: + # Split Channels + aha_split_node = mat.node_tree.nodes.new('ShaderNodeSeparateColor') + aha_split_node.location = (-400, -900) + + # height component + displacement_node = mat.node_tree.nodes.new('ShaderNodeDisplacement') + displacement_node.location = (400, -500) + img_node.location = (-750, -1100) + img_node.image.colorspace_settings.name = 'Non-Color' + mat.node_tree.links.new(displacement_node.outputs['Displacement'], out_node.inputs['Displacement']) + mat.node_tree.links.new(img_node.outputs['Color'], aha_split_node.inputs['Color']) + mat.node_tree.links.new(aha_split_node.outputs['Green'], displacement_node.inputs['Height']) + displacement_node.inputs[2].default_value = 0.1 + + case TextureMapTypes.ORM: + mix_node.blend_type = 'MULTIPLY' + mro_split = mat.node_tree.nodes.new('ShaderNodeSeparateColor') + mro_split.location = (-400, -300) + invert_node = mat.node_tree.nodes.new('ShaderNodeInvert') + invert_node.location = (-200, -350) + invert2_node = mat.node_tree.nodes.new('ShaderNodeInvert') + invert2_node.location = (-200, -200) + img_node.location = (-750, -150) + img_node.image.colorspace_settings.name = 'Non-Color' + mat.node_tree.links.new(img_node.outputs['Color'], mro_split.inputs['Color']) + mat.node_tree.links.new(mro_split.outputs['Red'], invert2_node.inputs[0]) + mat.node_tree.links.new(mro_split.outputs['Red'], invert2_node.inputs[1]) + mat.node_tree.links.new(invert2_node.outputs['Color'], bsdf_node.inputs['Metallic']) + mat.node_tree.links.new(mro_split.outputs['Green'], bsdf_node.inputs['Roughness']) + mat.node_tree.links.new(mro_split.outputs['Green'], invert_node.inputs['Color']) + mat.node_tree.links.new(invert_node.outputs['Color'], bsdf_node.inputs[13]) + mat.node_tree.links.new(mro_split.outputs['Blue'], mix_node.inputs[0]) + + case TextureMapTypes.WEAR_MSK: + mat_ctx.msk_index += 1 + + case TextureMapTypes.MSK: + mat_ctx = _state_buffer[mat] + mask_colors = _get_mask_colors(ast=mat_ctx.desc_ast) + + img_node.location = (-900, 200) + img_node.image.colorspace_settings.name = 'Non-Color' + + # Retrieve colors for each mask channel + color1 = mask_colors.get(f'color {mat_ctx.msk_index + 1}', (0, 0, 1, 1)) + color2 = mask_colors.get(f'color {mat_ctx.msk_index + 2}', (0, 1, 0, 1)) + color3 = mask_colors.get(f'color {mat_ctx.msk_index + 3}', (1, 0, 0, 1)) + color4 = mask_colors.get(f'plastic base colour', (0.067708, 0.066298, 0.066298, 1)) + + # Separate the color channels + msk_split = mat.node_tree.nodes.new('ShaderNodeSeparateColor') + msk_split.location = (-700, -200) + + # Create mix nodes for each color + b_mix = create_mix_node(mat, -500, -200, color1, (0, 0, 1, 1)) + g_mix = create_mix_node(mat, -300, -200, color2, (0, 1, 0, 1)) + r_mix = create_mix_node(mat, -100, -200, color3, (1, 0, 0, 1)) + plastic_mix = create_mix_node(mat, 100, -200, color4, (0.067708, 0.066298, 0.066298, 1)) + + # Connect nodes + mat.node_tree.links.new(img_node.outputs['Color'], msk_split.inputs['Color']) + mat.node_tree.links.new(msk_split.outputs['Red'], r_mix.inputs[0]) + mat.node_tree.links.new(msk_split.outputs['Green'], g_mix.inputs[0]) + mat.node_tree.links.new(msk_split.outputs['Blue'], b_mix.inputs[0]) + + # Connect the mix nodes together + mat.node_tree.links.new(b_mix.outputs['Color'], g_mix.inputs[1]) + mat.node_tree.links.new(g_mix.outputs['Color'], r_mix.inputs[1]) + mat.node_tree.links.new(r_mix.outputs['Color'], plastic_mix.inputs[1]) + + # Connect the final result to the BSDF node + if not mat_ctx.diffuse_connected: + mat.node_tree.links.new(plastic_mix.outputs['Color'], bsdf_node.inputs[0]) + mat.node_tree.links.new(img_node.outputs['Alpha'], bsdf_node.inputs['Alpha']) + img_node.select = True + mat.node_tree.nodes.active = img_node + img_node.location = (-950, -250) + img_node.image.colorspace_settings.name = 'Non-Color' + + mat_ctx.msk_index += 1 + + print(f"mask_colors: {mask_colors}") + print(f"color1: {color1}, color2: {color2}, color3: {color3}, color4: {color4}") + + +def handle_material_texture_simple(mat: bpy.types.Material, + tex_type: str, # pylint: disable=unused-argument + tex_short_name: str, # pylint: disable=unused-argument + img_node: bpy.types.ShaderNodeTexImage, + bsdf_node: bpy.types.ShaderNodeBsdfDiffuse): + _state_buffer[mat].bsdf_node = bsdf_node + + mat.node_tree.links.new(img_node.outputs['Color'], bsdf_node.inputs['Color']) + img_node.select = True + mat.node_tree.nodes.active = img_node + +def end_process_material(mat: bpy.types.Material): + mat_ctx = _state_buffer[mat] + + if mat_ctx.use_pbr and mat_ctx.bsdf_node is not None: + # set defaults + mat_ctx.bsdf_node.inputs[3].default_value = 1.4 # IOR + mat_ctx.bsdf_node.inputs[1].default_value = 0.3 # Metallic + mat_ctx.bsdf_node.inputs[12].default_value = 1.0 # Specular IOR Level + mat_ctx.bsdf_node.inputs[2].default_value = 0.1 # Roughness + mat_ctx.bsdf_node.inputs[23].default_value = 0.1 # Sheen Weight + mat_ctx.bsdf_node.inputs[19].default_value = 0.030 # Clearcoat roughness + mat_ctx.bsdf_node.inputs[20].default_value = 1.4 # Clearcoat IOR + + del _state_buffer[mat] + + +# Non-interface functions below + +def _short_name_to_tex_type(tex_short_name: str) -> t.Optional[TextureMapTypes]: + """Convert short texture name to a recognized texture map type if possible. + + :return: TextureMapType or None. + """ + return SUFFIX_MAP.get(tex_short_name.lower().split('_')[-1]) + + +Color: t.TypeAlias = tuple[float, float, float, float] + +def _get_mask_colors(ast: lark.Tree) -> Dict[str, Color]: + """Get MSK colors from texture parameters. + + :param ast: .props.txt AST + :return: dictionary mapping color names to values. + """ + colors = {} + + for child in ast.children: + assert child.data == 'definition' + def_name, array_qual, value = child.children + + match def_name: + case 'VectorParameterValues': + assert array_qual is not None + assert value.data == 'structured_block' + + for tex_param_def in value.children: + _, _, tex_param = tex_param_def.children + param_info, param_val, _ = tex_param.children # ParameterInfo, ParameterValue, ParameterName + _, _, color_vec = param_val.children + + color_name = param_info.children[2].children[0].children[2].children[0].value.strip() + + # Ignore unused materials + if color_vec.data != 'structured_block': + continue + + color = { + 'r': 0.0, + 'g': 0.0, + 'b': 0.0, + 'a': 1.0 + } + + not_a_color = False + for channel_def in color_vec.children: + channel_name, _, channel = channel_def.children + channel_name = channel_name.lower() + + if channel_name not in {'r', 'g', 'b', 'a'}: + not_a_color = True + continue + + color[channel_name] = float(channel.children[0].value) + + if not_a_color: + continue + + colors[color_name.lower()] = (color['r'], color['g'], color['b'], color['a']) + + return colors \ No newline at end of file diff --git a/umodel_tools/map_importer.py b/umodel_tools/map_importer.py index e36ecd0..19762e3 100644 --- a/umodel_tools/map_importer.py +++ b/umodel_tools/map_importer.py @@ -4,7 +4,7 @@ import typing as t import enum -import mathutils as mu +import mathutils import bpy import tqdm @@ -20,7 +20,7 @@ def split_object_path(object_path): path_parts = object_path.split(".") if len(path_parts) > 1: - # Usually works, but will fail If the path contains multiple periods. + # Usually works, but will fail If the path contains mathutilsltiple periods. return path_parts[0] # Nothing to do @@ -51,16 +51,16 @@ def __init__(self, self.scale = scale @property - def matrix_4x4(self) -> mu.Matrix: - return mu.Matrix.LocRotScale(mu.Vector(self.pos), - mu.Euler(self.rot_euler, 'XYZ'), - mu.Vector(self.scale)) + def matrix_4x4(self) -> mathutils.Matrix: + return mathutils.Matrix.LocRotScale(mathutils.Vector(self.pos), + mathutils.Euler(self.rot_euler, 'XYZ'), + mathutils.Vector(self.scale)) def get_parent_transform_matrix(json_obj, obj_type: str, obj_outer: str, - obj_name: str) -> mu.Matrix: + obj_name: str) -> mathutils.Matrix: for entity in json_obj: if (((entity_type := entity.get("Type", None)) is None or entity_type != obj_type) @@ -115,7 +115,7 @@ class StaticMesh: asset_path: str = "" transform: InstanceTransform instance_transforms: list[InstanceTransform] - parent_mtx: t.Optional[mu.Matrix] = None + parent_mtx: t.Optional[mathutils.Matrix] = None # these are just properties to help with debugging no_entity: bool = False @@ -211,8 +211,8 @@ def __init__(self, json_obj: t.Any, json_entity: t.Any, entity_type: str) -> Non trs.pos = (pos.get("X") / 100, pos.get("Y") / -100, pos.get("Z") / 100) if (rot := trs_data.get("Rotation", None)) is not None: - rot_quat = mu.Quaternion((rot.get("W"), rot.get("X"), rot.get("Y"), rot.get("Z"))) - quat_to_euler: mu.Euler = rot_quat.to_euler() # pylint: disable=no-value-for-parameter + rot_quat = mathutils.Quaternion((rot.get("W"), rot.get("X"), rot.get("Y"), rot.get("Z"))) + quat_to_euler: mathutils.Euler = rot_quat.to_euler() # pylint: disable=no-value-for-parameter trs.rot_euler = (-quat_to_euler.x, quat_to_euler.y, -quat_to_euler.z) if (scale := trs_data.get("Scale3D", None)) is not None: @@ -256,7 +256,7 @@ def link_object_instance(self, new_obj.scale = (trs.scale[0], trs.scale[1], trs.scale[2]) new_obj.location = (trs.pos[0], trs.pos[1], trs.pos[2]) new_obj.rotation_mode = 'XYZ' - new_obj.rotation_euler = mu.Euler((trs.rot_euler[0], trs.rot_euler[1], trs.rot_euler[2]), 'XYZ') + new_obj.rotation_euler = mathutils.Euler((trs.rot_euler[0], trs.rot_euler[1], trs.rot_euler[2]), 'XYZ') else: new_obj.matrix_world = self.parent_mtx @ trs.matrix_4x4 @@ -303,7 +303,7 @@ class IntensityUnits(enum.Enum): rot: tuple[float, float, float] = (0.0, 0.0, 0.0) scale: tuple[float, float, float] = (1.0, 1.0, 1.0) color: tuple[float, float, float] = (1.0, 1.0, 1.0) - parent_mtx: t.Optional[mu.Matrix] = None + parent_mtx: t.Optional[mathutils.Matrix] = None intensity: float = math.pi intensity_units: IntensityUnits = IntensityUnits.Unitless cone_angle: float @@ -379,7 +379,7 @@ def temp_to_color(temp: float) -> tuple[float, float, float]: ((b[0] * temp + b[1]) * temp + b[2]) * temp + b[3]) @staticmethod - def quaternion_to_euler(quaternion: mu.Quaternion) -> tuple[float, float, float]: + def quaternion_to_euler(quaternion: mathutils.Quaternion) -> tuple[float, float, float]: w, y, x, z = quaternion roll = math.atan2(2 * (w * x + y * z), 1 - 2 * (x * x + y * y)) pitch = math.asin(max(min(2 * (w * y - z * x), 1), -1)) @@ -398,7 +398,7 @@ def normalize_rotation(x, y, z) -> tuple[float, float, float]: :return: Euler angle as tuple in Blender's coordinate space. """ - euler = mu.Euler(( + euler = mathutils.Euler(( math.radians(x), math.radians(y), math.radians(z) @@ -407,7 +407,7 @@ def normalize_rotation(x, y, z) -> tuple[float, float, float]: quat = euler.to_quaternion() # pylint: disable=assignment-from-no-return # swizzle the quaternion - quat = mu.Quaternion([quat.w, quat.x, quat.y, -quat.z]) + quat = mathutils.Quaternion([quat.w, quat.x, quat.y, -quat.z]) x, y, z = GameLight.quaternion_to_euler(quat) @@ -477,7 +477,7 @@ def __init__(self, json_obj, json_entity) -> None: if (temp := props.get("Temperature", None)) is not None: self.color = self.temp_to_color(temp) - # TODO: for now color overrides the temperature based setting if present. Check if they're mutually exclusive. + # TODO: for now color overrides the temperature based setting if present. Check if they're mathutilstually exclusive. if (color := props.get("LightColor", None)) is not None: self.color = self.get_linear_rgb(color) @@ -524,7 +524,7 @@ def import_light(self, collection) -> bool: light_obj.scale = (self.scale[0], self.scale[1], self.scale[2]) light_obj.location = (self.pos[0], self.pos[1], self.pos[2]) light_obj.rotation_mode = 'XYZ' - light_obj.rotation_euler = mu.Euler((self.rot[0], self.rot[1], self.rot[2]), 'XYZ') + light_obj.rotation_euler = mathutils.Euler((self.rot[0], self.rot[1], self.rot[2]), 'XYZ') else: local_mtx = InstanceTransform() local_mtx.pos = self.pos diff --git a/umodel_tools/operators.py b/umodel_tools/operators.py index a3bcb0d..086e87f 100644 --- a/umodel_tools/operators.py +++ b/umodel_tools/operators.py @@ -7,7 +7,7 @@ import tqdm.contrib import bpy import bpy_extras.io_utils -import mathutils as mu +import mathutils from . import utils from . import asset_importer @@ -15,9 +15,11 @@ from . import map_importer from . import preferences +from bpy.props import BoolProperty, StringProperty, EnumProperty + def _get_object_aabb_verts(obj: bpy.types.Object) -> list[tuple[float, float, float]]: - return [obj.matrix_world @ mu.Vector(corner) for corner in obj.bound_box] + return [obj.matrix_world @ mathutils.Vector(corner) for corner in obj.bound_box] class UMODELTOOLS_OT_recover_unreal_asset(asset_importer.AssetImporter, bpy.types.Operator): @@ -128,7 +130,7 @@ class UMODELTOOLS_OT_import_unreal_assets(asset_importer.AssetImporter, bpy.type bl_description = "Imports a subdirectory of assets to the specified asset directory" bl_options = {'REGISTER', 'UNDO'} - asset_sub_dir: bpy.props.StringProperty( + asset_sub_dir: StringProperty( name="Asset subdir", description="Path to a subdirectory containing assets" ) @@ -180,7 +182,7 @@ def execute(self, context: bpy.types.Context) -> set[str]: for root, _, files in os.walk(asset_sub_dir_abs): for file in files: _, ext = os.path.splitext(file) - if ext not in {'.psk', '.pskx'}: + if ext not in {'.psk', '.pskx', '.uemodel'}: continue total_models += 1 @@ -192,20 +194,31 @@ def execute(self, context: bpy.types.Context) -> set[str]: for root, _, files in os.walk(asset_sub_dir_abs): for file in files: file_base, ext = os.path.splitext(file) - if ext not in {'.psk', '.pskx'}: + if ext not in {'.psk', '.pskx', '.uemodel'}: continue file_abs = os.path.join(root, file_base) + '.uasset' file_rel = os.path.relpath(file_abs, umodel_export_dir) print(f"\n\nImporting asset {file_rel}...") - self._load_asset(context=context, - asset_dir=asset_dir, - asset_path=file_rel, - umodel_export_dir=umodel_export_dir, - load=False, - db=db, - game_profile=profile.game) + + if self.link_to_scene and self.append_to_scene: + return self._op_message('ERROR', "Come on, 'Link' or 'Append'—you can't have both!") + elif self.link_to_scene: + # Load created assets into current scene by linking + self._load_asset_linked(context=context, asset_dir=asset_dir, asset_path=file_rel, + umodel_export_dir=umodel_export_dir, game_profile=profile.game) + elif self.append_to_scene: + # Load created assets into current scene by appending + self._load_asset_appended(context=context, asset_dir=asset_dir, asset_path=file_rel, + umodel_export_dir=umodel_export_dir, game_profile=profile.game) + elif self.overwrite_existed: + self._load_asset(context=context, asset_dir=asset_dir, asset_path=file_rel, + umodel_export_dir=umodel_export_dir, load=True, db=db, game_profile=profile.game) + else: + # Default behavior: asset is imported into the library but not loaded into the scene. + self._load_asset(context=context, asset_dir=asset_dir, asset_path=file_rel, + umodel_export_dir=umodel_export_dir, load=False, db=db, game_profile=profile.game) progress_bar.update(1) @@ -295,7 +308,7 @@ class UMODELTOOLS_OT_realign_asset(bpy.types.Operator): def execute(self, context: bpy.types.Context) -> set[str]: if not len(context.selected_objects) == 2: - self.report({'ERROR'}, "Exactly 2 objects must be selected.") + self.report({'ERROR'}, "Exactly 2 objects mathutilsst be selected.") return {'CANCELLED'} asset_idx = None @@ -305,7 +318,7 @@ def execute(self, context: bpy.types.Context) -> set[str]: break if asset_idx is None: - self.report({'ERROR'}, "One of the objects must be an Unreal asset.") + self.report({'ERROR'}, "One of the objects mathutilsst be an Unreal asset.") return {'CANCELLED'} asset_obj = context.selected_objects[asset_idx] diff --git a/umodel_tools/preferences.py b/umodel_tools/preferences.py index 67dc4ed..de172af 100644 --- a/umodel_tools/preferences.py +++ b/umodel_tools/preferences.py @@ -1,10 +1,12 @@ -import typing as t +import typing as t import bpy from . import PACKAGE_NAME from . import game_profiles +from bpy.props import BoolProperty, StringProperty, EnumProperty, CollectionProperty, IntProperty + def get_addon_preferences() -> 'UMODELTOOLS_AP_addon_preferences': """Returns this addon's preferences. @@ -18,25 +20,25 @@ class UMODELTOOLS_PG_game_profile(bpy.types.PropertyGroup): """Game profile settings """ - name: bpy.props.StringProperty( + name: StringProperty( name="Name", description="Name of the profile" ) - game: bpy.props.EnumProperty( + game: EnumProperty( name="Game", description="Game of this profile", items=game_profiles.SUPPORTED_GAMES, default=0 ) - umodel_export_dir: bpy.props.StringProperty( + umodel_export_dir: StringProperty( name="UModel Export Directory", description="Path to the UModel export directory with game assets", subtype='DIR_PATH' ) - asset_dir: bpy.props.StringProperty( + asset_dir: StringProperty( name="Asset Directory", description="Path to the directory where the assets for current project are stored", subtype='DIR_PATH' @@ -67,7 +69,7 @@ class UMODELTOOLS_OT_actions(bpy.types.Operator): bl_description = "Move items up and down, add and remove" bl_options = {'REGISTER', 'INTERNAL', 'UNDO'} - action: bpy.props.EnumProperty( + action: EnumProperty( items=( ('UP', "Up", ""), ('DOWN', "Down", ""), @@ -112,29 +114,29 @@ class UMODELTOOLS_AP_addon_preferences(bpy.types.AddonPreferences): bl_idname = PACKAGE_NAME - profiles: bpy.props.CollectionProperty( + profiles: CollectionProperty( name="Profiles", description="Saved game profiles", type=UMODELTOOLS_PG_game_profile ) - active_profile_index: bpy.props.IntProperty( + active_profile_index: IntProperty( default=0 ) - display_cur_profile: bpy.props.BoolProperty( + display_cur_profile: BoolProperty( name="Display current profile", description="Display current profile on top of Blender's window", default=True ) - verbose: bpy.props.BoolProperty( + verbose: BoolProperty( name="Verbose import", description="Print detailed logging information on import", default=False ) - debug: bpy.props.BoolProperty( + debug: BoolProperty( name="Debug", description="Enables debugging output, intended for developers only", default=False @@ -177,3 +179,55 @@ def draw(self, context: bpy.types.Context): layout.prop(game_profile, "game") layout.prop(game_profile, "umodel_export_dir") layout.prop(game_profile, "asset_dir") + + +class UMODELTOOLS_PT_scene_panel(bpy.types.Panel): + """Scene properties panel that references add-on preferences. + """ + bl_label = "Umodel Tools Settings" + bl_idname = "UMODELTOOLS_PT_scene_panel" + bl_space_type = 'PROPERTIES' + bl_region_type = 'WINDOW' + bl_context = 'scene' + + def draw(self, context): + # Retrieve the add-on preferences using the package name as the identifier. + addon_prefs = context.preferences.addons[PACKAGE_NAME].preferences + layout = self.layout + + # Mimic header and display options + layout.label(text="Display Options", icon='SCENE_DATA') + layout.prop(addon_prefs, "display_cur_profile") + layout.prop(addon_prefs, "verbose") + if context.preferences.view.show_developer_ui: + layout.prop(addon_prefs, "debug") + + layout.separator() + + # Mimic the game profiles list layout + layout.label(text="Game Profiles", icon='FILE_FOLDER') + row = layout.row() + row.template_list("UMODELTOOLS_UL_game_profiles", "", + addon_prefs, "profiles", + addon_prefs, "active_profile_index") + + col = row.column(align=True) + col.operator(UMODELTOOLS_OT_actions.bl_idname, icon='ADD', text="").action = 'ADD' + col.operator(UMODELTOOLS_OT_actions.bl_idname, icon='REMOVE', text="").action = 'REMOVE' + col.separator() + col.operator(UMODELTOOLS_OT_actions.bl_idname, icon='TRIA_UP', text="").action = 'UP' + col.operator(UMODELTOOLS_OT_actions.bl_idname, icon='TRIA_DOWN', text="").action = 'DOWN' + + # Display additional settings for the active profile + try: + game_profile = addon_prefs.profiles[addon_prefs.active_profile_index] + except IndexError: + pass + else: + layout.separator() + layout.label(text="Profile Settings", icon='PREFERENCES') + layout.prop(game_profile, "game") + layout.prop(game_profile, "umodel_export_dir") + layout.prop(game_profile, "asset_dir") + layout.separator() + diff --git a/umodel_tools/props_txt_parser.py b/umodel_tools/props_txt_parser.py index b79b54f..8c3b830 100644 --- a/umodel_tools/props_txt_parser.py +++ b/umodel_tools/props_txt_parser.py @@ -18,7 +18,7 @@ def parse_props_txt(props_txt_path: str, mode: t.Literal['MESH']) -> tuple[lark. @t.overload def parse_props_txt(props_txt_path: str, mode: t.Literal['MATERIAL'] - ) -> tuple[lark.Tree, dict[str, str], dict[str, str | float | bool]]: + ) -> tuple[lark.Tree, dict[str, str], dict[str, str | float | bool], dict[str, tuple[float, float, float, float]]]: ... @@ -69,9 +69,10 @@ def parse_props_txt(props_txt_path: str, material_paths.append(path_value.children[0].value[1:][:-1]) return ast, material_paths - + case 'MATERIAL': texture_infos = {} + vector_infos = {} base_prop_overrides = None for child in ast.children: @@ -98,6 +99,33 @@ def parse_props_txt(props_txt_path: str, tex_type = param_info.children[2].children[0].children[2].children[0].value.strip() texture_infos[tex_type] = tex_path + + case 'VectorParameterValues': + assert array_qual is not None + assert value.data == 'structured_block' + + for vec_param_def in value.children: + _, _, vec_param = vec_param_def.children + param_info, param_val, _ = vec_param.children + _, _, vec_desc = param_val.children + + if vec_desc.data != 'structured_block': + continue # skip if not a valid vec4 + + param_name = param_info.children[2].children[0].children[2].children[0].value.strip() + + color = {'r': 0.0, 'g': 0.0, 'b': 0.0, 'a': 1.0} + for channel_def in vec_desc.children: + channel_name, _, channel = channel_def.children + channel_name = channel_name.lower() + + if channel_name not in {'r', 'g', 'b', 'a'}: + continue + + color[channel_name] = float(channel.children[0].value) + + vector_infos[param_name] = (color['r'], color['g'], color['b'], color['a']) + case 'BasePropertyOverrides': assert array_qual is None assert value.data == 'structured_block' @@ -120,7 +148,7 @@ def parse_props_txt(props_txt_path: str, base_prop_overrides[prop_name] = prop_value - return ast, texture_infos, base_prop_overrides + return ast, texture_infos, base_prop_overrides, vector_infos case _: raise NotImplementedError() diff --git a/umodel_tools/third_party/io_import_scene_unreal_psa_psk_280.py b/umodel_tools/third_party/io_import_scene_unreal_psa_psk_280.py index b2ff31b..6860d0c 100644 --- a/umodel_tools/third_party/io_import_scene_unreal_psa_psk_280.py +++ b/umodel_tools/third_party/io_import_scene_unreal_psa_psk_280.py @@ -66,16 +66,15 @@ """ # https://github.com/gildor2/UModel/blob/master/Exporters/Psk.h - import bpy import re -from mathutils import Vector, Matrix, Quaternion +import mathutils from bpy.props import (FloatProperty, StringProperty, BoolProperty, EnumProperty, PointerProperty ) - + from struct import unpack, unpack_from, Struct import time @@ -83,7 +82,6 @@ # from mathutils import * # from math import * - def util_obj_link(context, obj): # return bpy.context.scene_collection.objects.link(obj) # bpy.context.view_layer.collections[0].collection.objects.link(obj) @@ -108,7 +106,7 @@ def util_get_scene(context): def get_uv_layers(mesh_obj): return mesh_obj.uv_layers - + def obj_select_get(obj): return obj.select_get() @@ -126,22 +124,22 @@ def util_bytes_to_str(in_bytes): class class_psk_bone: name = "" - + parent = None - + bone_index = 0 parent_index = 0 - + # scale = [] - + mat_world = None mat_world_rot = None - + orig_quat = None orig_loc = None - + children = None - + have_weight_data = False # TODO simplify? @@ -160,11 +158,11 @@ def util_select_all(select): if bpy.ops.pose.select_all.poll(): bpy.ops.pose.select_all(action = actionString) - + def util_ui_show_msg(msg): bpy.ops.pskpsa.message('INVOKE_DEFAULT', message = msg) - - + + PSKPSA_FILE_HEADER = { 'psk':b'ACTRHEAD\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', 'psa':b'ANIMHEAD\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' @@ -175,21 +173,21 @@ def util_is_header_valid(filename, file_ext, chunk_id, error_callback): '''Return True if chunk_id is a valid psk/psa (file_ext) 'magick number'.''' if chunk_id != PSKPSA_FILE_HEADER[file_ext]: error_callback( - "File %s is not a %s file. (header mismach)\nExpected: %s \nPresent %s" % ( + "File %s is not a %s file. (header mismach)\nExpected: %s \nPresent %s" % ( filename, file_ext, PSKPSA_FILE_HEADER[file_ext], chunk_id) - ) + ) return False return True - - + + def util_gen_name_part(filepath): '''Return file name without extension''' return re.match(r'.*[/\\]([^/\\]+?)(\..{2,5})?$', filepath).group(1) - - + + def vec_to_axis_vec(vec_in, vec_out): - '''Make **vec_out** to be an axis-aligned unit vector that is closest to vec_in. (basis?)''' + '''Make **vec_out** to be an axis-aligned unit mathutils.Vector that is closest to vec_in. (basis?)''' x, y, z = vec_in if abs(x) > abs(y): if abs(x) > abs(z): @@ -201,21 +199,21 @@ def vec_to_axis_vec(vec_in, vec_out): vec_out.y = 1 if y >= 0 else -1 else: vec_out.z = 1 if z >= 0 else -1 - - + + def calc_bone_rotation(psk_bone, bone_len, bDirectly, avg_bone_len): children = psk_bone.children - vecy = Vector((0.0, 1.0, 0.0)) - quat = Quaternion((1.0, 0.0, 0.0, 0.0)) - axis_vec = Vector() - + vecy = mathutils.Vector((0.0, 1.0, 0.0)) + quat = mathutils.Quaternion((1.0, 0.0, 0.0, 0.0)) + axis_vec = mathutils.Vector() + # bone with 0 children (orphan bone) if len(children) == 0: # Single bone. ALONE. if psk_bone.parent == None: return (bone_len, quat) - + elif bDirectly: # @ # axis_vec = psk_bone.orig_quat * psk_bone.orig_loc @@ -233,7 +231,7 @@ def calc_bone_rotation(psk_bone, bone_len, bDirectly, avg_bone_len): # reorient bone to other axis bychanging our base Y vec... # this is not tested well - vecy = Vector((1.0, 0.0, 0.0)) + vecy = mathutils.Vector((1.0, 0.0, 0.0)) else: # @ # vec_to_axis_vec(psk_bone.orig_quat.conjugated() * psk_bone.parent.axis_vec, axis_vec) @@ -242,59 +240,59 @@ def calc_bone_rotation(psk_bone, bone_len, bDirectly, avg_bone_len): vec_to_axis_vec(v, axis_vec) return (bone_len, vecy.rotation_difference(axis_vec)) - + # bone with > 0 children BUT only 1 non orphan bone ( reorient to it! ) if bDirectly and len(children) > 1: - + childs_with_childs = 0 - + for child in filter(lambda c: len(c.children), children): - + childs_with_childs += 1 - + if childs_with_childs > 1: break - + candidate = child - + if childs_with_childs == 1: # print('candidate',psk_bone.name,candidate.name) return (len(candidate.orig_loc), vecy.rotation_difference(candidate.orig_loc)) - + # bone with > 0 children - sumvec = Vector() + sumvec = mathutils.Vector() sumlen = 0 - + for child in children: sumvec += (child.orig_loc) sumlen += child.orig_loc.length sumlen /= len(children) sumlen = max(sumlen, 0.01) - + if bDirectly: return (sumlen, vecy.rotation_difference(sumvec)) - + vec_to_axis_vec(sumvec, axis_vec) psk_bone.axis_vec = axis_vec return (sumlen, vecy.rotation_difference(axis_vec)) - + def __pass(*args,**kwargs): pass def util_check_file_header(file, ftype): header_bytes = file.read(32) - + if len(header_bytes) < 32: return False - + if not header_bytes.startswith( PSKPSA_FILE_HEADER[ftype] ): return False - + return True - - + + def color_linear_to_srgb(c): """ Convert from linear to sRGB color space. @@ -304,7 +302,7 @@ def color_linear_to_srgb(c): return 0.0 if c < 0.0 else c * 12.92 else: return 1.055 * pow(c, 1.0 / 2.4) - 0.055 - + def pskimport(filepath, context = None, bImportmesh = True, @@ -317,29 +315,33 @@ def pskimport(filepath, bReorientDirectly = False, bScaleDown = True, bToSRGB = True, + bSmoothShade = True, error_callback = None): ''' Import mesh and skeleton from .psk/.pskx files - + Args: bReorientBones: Axis based bone orientation to children - + error_callback: Called when importing is failed. - + error_callback = lambda msg: print('reason:', msg) - + ''' + + bpy.context.scene.unit_settings.scale_length=0.01 + if not hasattr( error_callback, '__call__'): # error_callback = __pass error_callback = print - + # ref_time = time.process_time() if not bImportbone and not bImportmesh: error_callback("Nothing to do.\nSet something for import.") return False - + print ("-----------------------------------------------") print ("---------EXECUTING PSK PYTHON IMPORTER---------") print ("-----------------------------------------------") @@ -354,7 +356,7 @@ def pskimport(filepath, if not util_check_file_header(file, 'psk'): error_callback('Not psk file:\n "'+filepath+'"') return False - + Vertices = None Wedges = None Faces = None @@ -366,86 +368,86 @@ def pskimport(filepath, Extrauvs = [] Normals = None WedgeIdx_by_faceIdx = None - + if not context: context = bpy.context - #================================================================================================== - # Materials MaterialNameRaw | TextureIndex | PolyFlags | AuxMaterial | AuxFlags | LodBias | LodStyle + #================================================================================================== + # Materials MaterialNameRaw | TextureIndex | PolyFlags | AuxMaterial | AuxFlags | LodBias | LodStyle # Only Name is usable. def read_materials(): - + nonlocal Materials - + Materials = [] - + for counter in range(chunk_datacount): (MaterialNameRaw,) = unpack_from('64s24x', chunk_data, chunk_datasize * counter) - + Materials.append( util_bytes_to_str( MaterialNameRaw ) ) - - - #================================================================================================== + + + #================================================================================================== # Faces WdgIdx1 | WdgIdx2 | WdgIdx3 | MatIdx | AuxMatIdx | SmthGrp def read_faces(): - + if not bImportmesh: return True - + nonlocal Faces, UV_by_face, WedgeIdx_by_faceIdx UV_by_face = [None] * chunk_datacount Faces = [None] * chunk_datacount WedgeIdx_by_faceIdx = [None] * chunk_datacount - + if len(Wedges) > 65536: unpack_format = '=IIIBBI' else: unpack_format = '=HHHBBI' - + unpack_data = Struct(unpack_format).unpack_from - + for counter in range(chunk_datacount): (WdgIdx1, WdgIdx2, WdgIdx3, - MatIndex, + MatIndex, AuxMatIndex, #unused SmoothingGroup # Umodel is not exporting SmoothingGroups ) = unpack_data(chunk_data, counter * chunk_datasize) - + # looks ugly # Wedges is (point_index, u, v, MatIdx) ((vertid0, u0, v0, matid0), (vertid1, u1, v1, matid1), (vertid2, u2, v2, matid2)) = Wedges[WdgIdx1], Wedges[WdgIdx2], Wedges[WdgIdx3] - + # note order: C,B,A # Faces[counter] = (vertid2, vertid1, vertid0) Faces[counter] = (vertid1, vertid0, vertid2) # Faces[counter] = (vertid1, vertid2, vertid0) # Faces[counter] = (vertid0, vertid1, vertid2) - + # uv = ( ( u2, 1.0 - v2 ), ( u1, 1.0 - v1 ), ( u0, 1.0 - v0 ) ) uv = ( ( u1, 1.0 - v1 ), ( u0, 1.0 - v0 ), ( u2, 1.0 - v2 ) ) - + # Mapping: FaceIndex <=> UV data <=> FaceMatIndex UV_by_face[counter] = (uv, MatIndex, (matid2, matid1, matid0)) - + # We need this for EXTRA UVs WedgeIdx_by_faceIdx[counter] = (WdgIdx3, WdgIdx2, WdgIdx1) - + #================================================================================================== # Vertices X | Y | Z def read_vertices(): - + if not bImportmesh: return True - + nonlocal Vertices - + Vertices = [None] * chunk_datacount - + unpack_data = Struct('3f').unpack_from - + if bScaleDown: for counter in range( chunk_datacount ): (vec_x, vec_y, vec_z) = unpack_data(chunk_data, counter * chunk_datasize) @@ -455,92 +457,92 @@ def read_vertices(): else: for counter in range( chunk_datacount ): Vertices[counter] = unpack_data(chunk_data, counter * chunk_datasize) - - - #================================================================================================== - # Wedges (UV) VertexId | U | V | MatIdx + + + #================================================================================================== + # Wedges (UV) VertexId | U | V | MatIdx def read_wedges(): - + if not bImportmesh: return True - + nonlocal Wedges - + Wedges = [None] * chunk_datacount - + unpack_data = Struct('=IffBxxx').unpack_from - + for counter in range( chunk_datacount ): (vertex_id, u, v, material_index) = unpack_data( chunk_data, counter * chunk_datasize ) - + # print(vertex_id, u, v, material_index) # Wedges[counter] = (vertex_id, u, v, material_index) Wedges[counter] = [vertex_id, u, v, material_index] - - #================================================================================================== + + #================================================================================================== # Bones (VBone .. VJointPos ) Name|Flgs|NumChld|PrntIdx|Qw|Qx|Qy|Qz|LocX|LocY|LocZ|Lngth|XSize|YSize|ZSize def read_bones(): - + nonlocal Bones, bImportbone - + if chunk_datacount == 0: bImportbone = False - + if bImportbone: # unpack_data = Struct('64s3i11f').unpack_from unpack_data = Struct('64s3i7f16x').unpack_from else: unpack_data = Struct('64s56x').unpack_from - + Bones = [None] * chunk_datacount - + for counter in range( chunk_datacount ): Bones[counter] = unpack_data( chunk_data, chunk_datasize * counter) - - - #================================================================================================== + + + #================================================================================================== # Influences (Bone Weight) (VRawBoneInfluence) ( Weight | PntIdx | BoneIdx) def read_weights(): nonlocal Weights - + if not bImportmesh: return True - + Weights = [None] * chunk_datacount - + unpack_data = Struct('fii').unpack_from - + for counter in range(chunk_datacount): Weights[counter] = unpack_data(chunk_data, chunk_datasize * counter) - - #================================================================================================== + + #================================================================================================== # Vertex colors. R G B A bytes. NOTE: it is Wedge color.(uses Wedges index) def read_vertex_colors(): - + nonlocal VertexColors - + unpack_data = Struct("=4B").unpack_from - + VertexColors = [None] * chunk_datacount - + for counter in range( chunk_datacount ): - VertexColors[counter] = unpack_data(chunk_data, chunk_datasize * counter) - - - #================================================================================================== + VertexColors[counter] = unpack_data(chunk_data, chunk_datasize * counter) + + + #================================================================================================== # Extra UV. U | V def read_extrauvs(): unpack_data = Struct("=2f").unpack_from - + uvdata = [None] * chunk_datacount - + for counter in range( chunk_datacount ): - uvdata[counter] = unpack_data(chunk_data, chunk_datasize * counter) - + uvdata[counter] = unpack_data(chunk_data, chunk_datasize * counter) + Extrauvs.append(uvdata) #================================================================================================== @@ -556,8 +558,8 @@ def read_normals(): for counter in range(chunk_datacount): Normals[counter] = unpack_data(chunk_data, counter * chunk_datasize) - - + + CHUNKS_HANDLERS = { 'PNTS0000': read_vertices, 'VTXW0000': read_wedges, @@ -573,100 +575,100 @@ def read_normals(): 'EXTRAUVS': read_extrauvs, 'VTXNORMS': read_normals } - + #=================================================================================================== # File. Read all needed data. # VChunkHeader Struct # ChunkID|TypeFlag|DataSize|DataCount # 0 |1 |2 |3 - + while True: - + header_bytes = file.read(32) - + if len(header_bytes) < 32: - + if len(header_bytes) != 0: error_callback("Unexpected end of file.(%s/32 bytes)" % len(header_bytes)) break - + (chunk_id, chunk_type, chunk_datasize, chunk_datacount) = unpack('20s3i', header_bytes) - + chunk_id_str = util_bytes_to_str(chunk_id) chunk_id_str = chunk_id_str[:8] - + if chunk_id_str in CHUNKS_HANDLERS: - + chunk_data = file.read( chunk_datasize * chunk_datacount) - + if len(chunk_data) < chunk_datasize * chunk_datacount: error_callback('Psk chunk %s is broken.' % chunk_id_str) return False - + CHUNKS_HANDLERS[chunk_id_str]() - + else: - + print('Unknown chunk: ', chunk_id_str) file.seek(chunk_datasize * chunk_datacount, 1) - - + + # print(chunk_id_str, chunk_datacount) - + file.close() - + print(" Importing file:", filepath) - + if not bImportmesh and (Bones is None or len(Bones) == 0): error_callback("Psk: no skeleton data.") return False MAX_UVS = 8 NAME_UV_PREFIX = "UV" - + # file name w/out extension gen_name_part = util_gen_name_part(filepath) gen_names = { - 'armature_object': gen_name_part + '.ao', - 'armature_data': gen_name_part + '.ad', - 'mesh_object': gen_name_part + '.mo', - 'mesh_data': gen_name_part + '.md' + 'armature_object': 'Armature', + 'armature_data': gen_name_part, + 'mesh_object': gen_name_part, + 'mesh_data': gen_name_part } - + if bImportmesh: mesh_data = bpy.data.meshes.new(gen_names['mesh_data']) mesh_obj = bpy.data.objects.new(gen_names['mesh_object'], mesh_data) - - + + #================================================================================================== # UV. Prepare if bImportmesh: if bSpltiUVdata: # store how much each "matrial index" have vertices - + uv_mat_ids = {} - + for (_, _, _, material_index) in Wedges: - + if not (material_index in uv_mat_ids): uv_mat_ids[material_index] = 1 else: uv_mat_ids[material_index] += 1 - - + + # if we have more UV material indexes than blender UV maps, then... if bSpltiUVdata and len(uv_mat_ids) > MAX_UVS : - + uv_mat_ids_len = len(uv_mat_ids) - + print('UVs: %s out of %s is combined in a first UV map(%s0)' % (uv_mat_ids_len - 8, uv_mat_ids_len, NAME_UV_PREFIX)) - + mat_idx_proxy = [0] * len(uv_mat_ids) - + counts_sorted = sorted(uv_mat_ids.values(), reverse = True) - + new_mat_index = MAX_UVS - 1 - + for c in counts_sorted: for mat_idx, counts in uv_mat_ids.items(): if c == counts: @@ -674,7 +676,7 @@ def read_normals(): if new_mat_index > 0: new_mat_index -= 1 # print('MatIdx remap: %s > %s' % (mat_idx,new_mat_index)) - + for i in range(len(Wedges)): Wedges[i][3] = mat_idx_proxy[Wedges[i][3]] @@ -682,17 +684,17 @@ def read_normals(): # print('uv_mat_ids', uv_mat_ids) # print('uv_mat_ids', uv_mat_ids) # for w in Wedges: - + if bImportmesh: # print("-- Materials -- (index, name, faces)") blen_materials = [] for materialname in Materials: matdata = bpy.data.materials.get(materialname) - + if matdata is None: matdata = bpy.data.materials.new( materialname ) # matdata = bpy.data.materials.new( materialname ) - + blen_materials.append( matdata ) mesh_data.materials.append( matdata ) # print(counter,materialname,TextureIndex) @@ -709,20 +711,20 @@ def init_psk_bone(i, psk_bones, name_raw): return psk_bone psk_bone_name_toolong = False - + # indexed by bone index. array of psk_bone psk_bones = [None] * len(Bones) - + if not bImportbone: #data needed for mesh-only import - + for counter,(name_raw,) in enumerate(Bones): init_psk_bone(counter, psk_bones, name_raw) - + if bImportbone: #else? - + # average bone length sum_bone_pos = 0 - + for counter, (name_raw, flags, NumChildren, ParentIndex, #0 1 2 3 quat_x, quat_y, quat_z, quat_w, #4 5 6 7 vec_x, vec_y, vec_z @@ -730,12 +732,12 @@ def init_psk_bone(i, psk_bones, name_raw): # joint_length, #11 # scale_x, scale_y, scale_z ) in enumerate(Bones): - + psk_bone = init_psk_bone(counter, psk_bones, name_raw) - + psk_bone.bone_index = counter psk_bone.parent_index = ParentIndex - + # Tested. 64 is getting cut to 63 if len(psk_bone.name) > 63: psk_bone_name_toolong = True @@ -744,53 +746,53 @@ def init_psk_bone(i, psk_bones, name_raw): # make sure we have valid parent_index if psk_bone.parent_index < 0: psk_bone.parent_index = 0 - + # psk_bone.scale = (scale_x, scale_y, scale_z) # print("%s: %03f %03f | %f" % (psk_bone.name, scale_x, scale_y, joint_length),scale_x) # print("%s:" % (psk_bone.name), vec_x, quat_x) # store bind pose to make it available for psa-import via CustomProperty of the Blender bone - psk_bone.orig_quat = Quaternion((quat_w, quat_x, quat_y, quat_z)) + psk_bone.orig_quat = mathutils.Quaternion((quat_w, quat_x, quat_y, quat_z)) if bScaleDown: - psk_bone.orig_loc = Vector((vec_x * 0.01, vec_y * 0.01, vec_z * 0.01)) + psk_bone.orig_loc = mathutils.Vector((vec_x * 0.01, vec_y * 0.01, vec_z * 0.01)) else: - psk_bone.orig_loc = Vector((vec_x, vec_y, vec_z)) + psk_bone.orig_loc = mathutils.Vector((vec_x, vec_y, vec_z)) # root bone must have parent_index = 0 and selfindex = 0 if psk_bone.parent_index == 0 and psk_bone.bone_index == psk_bone.parent_index: if bDontInvertRoot: - psk_bone.mat_world_rot = psk_bone.orig_quat.to_matrix() + psk_bone.mat_world_rot = psk_bone.orig_quat.to_mathutils.Matrix() else: - psk_bone.mat_world_rot = psk_bone.orig_quat.conjugated().to_matrix() - psk_bone.mat_world = Matrix.Translation(psk_bone.orig_loc) + psk_bone.mat_world_rot = psk_bone.orig_quat.conjugated().to_mathutils.Matrix() + psk_bone.mat_world = mathutils.Matrix.Translation(psk_bone.orig_loc) sum_bone_pos += psk_bone.orig_loc.length - - + + #================================================================================================== - # Bones. Calc World-space matrix - + # Bones. Calc World-space mathutils.Matrix + # TODO optimize math. for psk_bone in psk_bones: - + if psk_bone.parent_index == 0: if psk_bone.bone_index == 0: psk_bone.parent = None continue - + parent = psk_bones[psk_bone.parent_index] - + psk_bone.parent = parent - + parent.children.append(psk_bone) - - # mat_world - world space bone matrix WITHOUT own rotation + + # mat_world - world space bone mathutils.Matrix WITHOUT own rotation # mat_world_rot - world space bone rotation WITH own rotation # psk_bone.mat_world = parent.mat_world_rot.to_4x4() # psk_bone.mat_world.translation = parent.mat_world.translation + parent.mat_world_rot * psk_bone.orig_loc - # psk_bone.mat_world_rot = parent.mat_world_rot * psk_bone.orig_quat.conjugated().to_matrix() + # psk_bone.mat_world_rot = parent.mat_world_rot * psk_bone.orig_quat.conjugated().to_mathutils.Matrix() psk_bone.mat_world = parent.mat_world_rot.to_4x4() @@ -799,18 +801,18 @@ def init_psk_bone(i, psk_bones, name_raw): psk_bone.mat_world.translation = parent.mat_world.translation + v - psk_bone.mat_world_rot = psk_bone.orig_quat.conjugated().to_matrix() + psk_bone.mat_world_rot = psk_bone.orig_quat.conjugated().to_mathutils.Matrix() psk_bone.mat_world_rot.rotate( parent.mat_world_rot ) # psk_bone.mat_world = ( parent.mat_world_rot.to_4x4() * psk_bone.trans) # psk_bone.mat_world.translation += parent.mat_world.translation - # psk_bone.mat_world_rot = parent.mat_world_rot * psk_bone.orig_quat.conjugated().to_matrix() - - + # psk_bone.mat_world_rot = parent.mat_world_rot * psk_bone.orig_quat.conjugated().to_mathutils.Matrix() + + #================================================================================================== # Skeleton. Prepare. - + armature_data = bpy.data.armatures.new(gen_names['armature_data']) armature_obj = bpy.data.objects.new(gen_names['armature_object'], armature_data) # TODO: options for axes and x_ray? @@ -824,13 +826,13 @@ def init_psk_bone(i, psk_bones, name_raw): util_select_all(False) util_obj_select(context, armature_obj) util_obj_set_active(context, armature_obj) - + utils_set_mode('EDIT') - - + + sum_bone_pos /= len(Bones) # average sum_bone_pos *= fBonesizeRatio # corrected - + # bone_size_choosen = max(0.01, round((min(sum_bone_pos, fBonesize)))) bone_size_choosen = max(0.01, round((min(sum_bone_pos, fBonesize))*100)/100) # bone_size_choosen = max(0.01, min(sum_bone_pos, fBonesize)) @@ -838,7 +840,7 @@ def init_psk_bone(i, psk_bones, name_raw): if not bReorientBones: new_bone_size = bone_size_choosen - + #================================================================================================== # Skeleton. Build. if psk_bone_name_toolong: @@ -875,7 +877,7 @@ def init_psk_bone(i, psk_bones, name_raw): else: if bDontInvertRoot: psk_bone.orig_quat.conjugate() - + if bReorientBones: (new_bone_size, quat_orient_diff) = calc_bone_rotation(psk_bone, bone_size_choosen, bReorientDirectly, sum_bone_pos) # @ @@ -885,40 +887,40 @@ def init_psk_bone(i, psk_bones, name_raw): post_quat.rotate( psk_bone.orig_quat.conjugated() ) else: post_quat = psk_bone.orig_quat.conjugated() - - # only length of this vector is matter? - edit_bone.tail = Vector(( 0.0, new_bone_size, 0.0)) + + # only length of this mathutils.Vector is matter? + edit_bone.tail = mathutils.Vector(( 0.0, new_bone_size, 0.0)) # @ - # edit_bone.matrix = psk_bone.mat_world * post_quat.to_matrix().to_4x4() + # edit_bone.mathutils.Matrix = psk_bone.mat_world * post_quat.to_mathutils.Matrix().to_4x4() m = post_quat.copy() m.rotate( psk_bone.mat_world ) - m = m.to_matrix().to_4x4() + m = m.to_mathutils.Matrix().to_4x4() m.translation = psk_bone.mat_world.translation - edit_bone.matrix = m - - + edit_bone.mathutils.Matrix = m + + # some dev code... #### FINAL # post_quat = psk_bone.orig_quat.conjugated() * quat_diff - # edit_bone.matrix = psk_bone.mat_world * test_quat.to_matrix().to_4x4() + # edit_bone.mathutils.Matrix = psk_bone.mat_world * test_quat.to_mathutils.Matrix().to_4x4() # edit_bone["post_quat"] = test_quat - #### - - # edit_bone["post_quat"] = Quaternion((1,0,0,0)) - # edit_bone.matrix = psk_bone.mat_world* psk_bone.rot + #### + # edit_bone["post_quat"] = mathutils.Quaternion((1,0,0,0)) + # edit_bone.mathutils.Matrix = psk_bone.mat_world* psk_bone.rot + # if edit_bone.parent: - # edit_bone.matrix = edit_bone.parent.matrix * psk_bone.trans * (psk_bone.orig_quat.conjugated().to_matrix().to_4x4()) - # edit_bone.matrix = edit_bone.parent.matrix * psk_bone.trans * (test_quat.to_matrix().to_4x4()) + # edit_bone.mathutils.Matrix = edit_bone.parent.mathutils.Matrix * psk_bone.trans * (psk_bone.orig_quat.conjugated().to_mathutils.Matrix().to_4x4()) + # edit_bone.mathutils.Matrix = edit_bone.parent.mathutils.Matrix * psk_bone.trans * (test_quat.to_mathutils.Matrix().to_4x4()) # else: - # edit_bone.matrix = psk_bone.orig_quat.to_matrix().to_4x4() - - + # edit_bone.mathutils.Matrix = psk_bone.orig_quat.to_mathutils.Matrix().to_4x4() + + # save bindPose information for .psa import # dev edit_bone["orig_quat"] = psk_bone.orig_quat @@ -928,41 +930,41 @@ def init_psk_bone(i, psk_bones, name_raw): ''' bone = edit_bone if psk_bone.parent is not None: - orig_loc = bone.matrix.translation - bone.parent.matrix.translation - orig_loc.rotate( bone.parent.matrix.to_quaternion().conjugated() ) - + orig_loc = bone.mathutils.Matrix.translation - bone.parent.mathutils.Matrix.translation + orig_loc.rotate( bone.parent.mathutils.Matrix.to_mathutils.Quaternion().conjugated() ) - orig_quat = bone.matrix.to_quaternion() - orig_quat.rotate( bone.parent.matrix.to_quaternion().conjugated() ) + + orig_quat = bone.mathutils.Matrix.to_mathutils.Quaternion() + orig_quat.rotate( bone.parent.mathutils.Matrix.to_mathutils.Quaternion().conjugated() ) orig_quat.conjugate() if orig_quat.dot( psk_bone.orig_quat ) < 0.95: print(bone.name, psk_bone.orig_quat, orig_quat, orig_quat.dot( psk_bone.orig_quat )) - print('parent:', bone.parent.matrix.to_quaternion(), bone.parent.matrix.to_quaternion().rotation_difference(bone.matrix.to_quaternion()) ) + print('parent:', bone.parent.mathutils.Matrix.to_mathutils.Quaternion(), bone.parent.mathutils.Matrix.to_mathutils.Quaternion().rotation_difference(bone.mathutils.Matrix.to_mathutils.Quaternion()) ) if (psk_bone.orig_loc - orig_loc).length > 0.02: print(bone.name, psk_bone.orig_loc, orig_loc, (psk_bone.orig_loc - orig_loc).length) ''' utils_set_mode('OBJECT') - + #================================================================================================== # Weights - if bImportmesh: - + if bImportmesh: + vertices_total = len(Vertices) - + for ( _, PointIndex, BoneIndex ) in Weights: if PointIndex < vertices_total: # can it be not? psk_bones[BoneIndex].have_weight_data = True # else: # print(psk_bones[BoneIndex].name, 'for other mesh',PointIndex ,vertices_total) - + #print("weight:", PointIndex, BoneIndex, Weight) # Weights.append(None) # print(Weights.count(None)) - - + + # Original vertex colorization code ''' # Weights.sort( key = lambda wgh: wgh[0]) @@ -992,7 +994,7 @@ def init_psk_bone(i, psk_bones, name_raw): tmpCol[2] -= 60 #Add the material to the mesh VtxCol.append(tmpCol) - + for x in range(len(Tmsh.faces)): for y in range(len(Tmsh.faces[x].v)): #find v in Weights[n][0] @@ -1013,35 +1015,35 @@ def init_psk_bone(i, psk_bones, name_raw): #=================================================================================================== # UV. Setup. - + if bImportmesh: # Trick! Create UV maps BEFORE mesh and get (0,0) coordinates for free! # ...otherwise UV coords will be copied from active, or calculated from mesh... - + if bSpltiUVdata: - + for i in range(len(uv_mat_ids)): get_uv_layers(mesh_data).new(name = NAME_UV_PREFIX + str(i)) - + else: - + get_uv_layers(mesh_data).new(name = NAME_UV_PREFIX+"_SINGLE") - - + + for counter, uv_data in enumerate(Extrauvs): - + if len(mesh_data.uv_layers) < MAX_UVS: - + get_uv_layers(mesh_data).new(name = "EXTRAUVS"+str(counter)) - + else: - + Extrauvs.remove(uv_data) print('Extra UV layer %s is ignored. Re-import without "Split UV data".' % counter) - - #================================================================================================== + + #================================================================================================== # Mesh. Build. - + mesh_data.from_pydata(Vertices,[],Faces) #================================================================================================== @@ -1050,56 +1052,56 @@ def init_psk_bone(i, psk_bones, name_raw): if Normals is not None: mesh_data.polygons.foreach_set("use_smooth", [True] * len(mesh_data.polygons)) mesh_data.normals_split_custom_set_from_vertices(Normals) - mesh_data.use_auto_smooth = True - + mesh_data.shade_smooth() + #=================================================================================================== # UV. Set. - + if bImportmesh: for face in mesh_data.polygons: face.material_index = UV_by_face[face.index][1] uv_layers = mesh_data.uv_layers - + if not bSpltiUVdata: uvLayer = uv_layers[0] - + # per face # for faceIdx, (faceUVs, faceMatIdx, _, _, wmidx) in enumerate(UV_by_face): for faceIdx, (faceUVs, faceMatIdx, WedgeMatIds) in enumerate(UV_by_face): - + # per vertex for vertN, uv in enumerate(faceUVs): loopId = faceIdx * 3 + vertN - + if bSpltiUVdata: uvLayer = uv_layers[WedgeMatIds[vertN]] - + uvLayer.data[loopId].uv = uv #================================================================================================== # VertexColors - + if VertexColors is not None: - + vtx_color_layer = mesh_data.vertex_colors.new(name = "PSKVTXCOL_0", do_init = False) - + pervertex = [None] * len(Vertices) - + for counter, (vertexid,_,_,_) in enumerate(Wedges): - + # Is it possible ? if (pervertex[vertexid] is not None) and (pervertex[vertexid] != VertexColors[counter]): print('Not equal vertex colors. ', vertexid, pervertex[vertexid], VertexColors[counter]) - + pervertex[vertexid] = VertexColors[counter] - - + + for counter, loop in enumerate(mesh_data.loops): - + color = pervertex[ loop.vertex_index ] - + if color is None: vtx_color_layer.data[ counter ].color = (1.,1.,1.,1.) else: @@ -1117,16 +1119,16 @@ def init_psk_bone(i, psk_bones, name_raw): color[2] / 255, color[3] / 255 ) - + #=================================================================================================== # Extra UVs. Set. - + # for counter, uv_data in enumerate(Extrauvs): - + # uvLayer = mesh_data.uv_layers[ counter - len(Extrauvs) ] - + # for uv_index, uv_coords in enumerate(uv_data): - + # uvLayer.data[uv_index].uv = (uv_coords[0], 1.0 - uv_coords[1]) @@ -1135,7 +1137,7 @@ def init_psk_bone(i, psk_bones, name_raw): uvLayer = mesh_data.uv_layers[ counter - len(Extrauvs) ] for faceIdx, (WedgeIdx3,WedgeIdx2,WedgeIdx1) in enumerate(WedgeIdx_by_faceIdx): - + # equal to gltf uvLayer.data[faceIdx*3 ].uv = (uv_data[WedgeIdx2][0], 1.0 - uv_data[WedgeIdx2][1]) uvLayer.data[faceIdx*3+1].uv = (uv_data[WedgeIdx1][0], 1.0 - uv_data[WedgeIdx1][1]) @@ -1143,26 +1145,26 @@ def init_psk_bone(i, psk_bones, name_raw): # uvLayer.data[faceIdx*3 ].uv = (uv_data[WedgeIdx3][0], 1.0 - uv_data[WedgeIdx3][1]) # uvLayer.data[faceIdx*3+1].uv = (uv_data[WedgeIdx2][0], 1.0 - uv_data[WedgeIdx2][1]) # uvLayer.data[faceIdx*3+2].uv = (uv_data[WedgeIdx1][0], 1.0 - uv_data[WedgeIdx1][1]) - - + + #=================================================================================================== # Mesh. Vertex Groups. Bone Weights. - + for psk_bone in psk_bones: if psk_bone.have_weight_data: psk_bone.vertex_group = mesh_obj.vertex_groups.new(name = psk_bone.name) # else: # print(psk_bone.name, 'have no influence on this mesh') - + for weight, vertex_id, bone_index_w in filter(None, Weights): psk_bones[bone_index_w].vertex_group.add((vertex_id,), weight, 'ADD') - - + + #=================================================================================================== # Skeleton. Colorize. - + if bImportbone: - + bone_group_unused = armature_obj.pose.bone_groups.new(name = "Unused bones") bone_group_unused.color_set = 'THEME14' @@ -1172,62 +1174,67 @@ def init_psk_bone(i, psk_bones, name_raw): armature_data.show_group_colors = True for psk_bone in psk_bones: - + pose_bone = armature_obj.pose.bones[psk_bone.name] - + if psk_bone.have_weight_data: - + if len(psk_bone.children) == 0: pose_bone.bone_group = bone_group_nochild - + else: pose_bone.bone_group = bone_group_unused - - + + #=================================================================================================== # Final - + if bImportmesh: - + util_obj_link(context, mesh_obj) util_select_all(False) - - - if not bImportbone: - + + + if not bImportbone: + util_obj_select(context, mesh_obj) util_obj_set_active(context, mesh_obj) - + else: # select_all(False) util_obj_select(context, armature_obj) - + # parenting mesh to armature object mesh_obj.parent = armature_obj mesh_obj.parent_type = 'OBJECT' - + # add armature modifier blender_modifier = mesh_obj.modifiers.new( armature_obj.data.name, type = 'ARMATURE') blender_modifier.show_expanded = False blender_modifier.use_vertex_groups = True blender_modifier.use_bone_envelopes = False blender_modifier.object = armature_obj - + # utils_set_mode('OBJECT') # select_all(False) util_obj_select(context, armature_obj) util_obj_set_active(context, armature_obj) - + # print("Done: %f sec." % (time.process_time() - ref_time)) utils_set_mode('OBJECT') + + # apply smooth shading + if bImportmesh and bSmoothShade: + for f in mesh_data.polygons: + f.use_smooth = True return True class class_psa_bone: name = "" - + parent = None - + fcurve_loc_x = None fcurve_loc_y = None fcurve_loc_z = None @@ -1235,33 +1242,33 @@ class class_psa_bone: fcurve_quat_y = None fcurve_quat_z = None fcurve_quat_w = None - + post_quat = None orig_quat = None orig_loc = None - + def blen_get_armature_from_selection(): armature_obj = None - + for obj in bpy.data.objects: if obj.type == 'ARMATURE' and obj_select_get(obj): armature_obj = obj break - - if armature_obj is None: + + if armature_obj is None: for obj in bpy.data.objects: if obj.type == 'MESH' and obj_select_get(obj): for modifier in obj.modifiers: if modifier.type == 'ARMATURE': armature_obj = modifier.object break - + return armature_obj - - + + def psaimport(filepath, context = None, oArmature = None, @@ -1277,37 +1284,37 @@ def psaimport(filepath, error_callback = print ): """Import animation data from 'filepath' using 'oArmature' - + Args: first_frames: (0 - import all) Import only 'first_frames' from each action - + bActionsToTrack: Put all imported actions in one NLAtrack. - + oArmature: Skeleton used to calculate keyframes """ print ("-----------------------------------------------") print ("---------EXECUTING PSA PYTHON IMPORTER---------") print ("-----------------------------------------------") - + file_ext = 'psa' try: psafile = open(filepath, 'rb') except IOError: error_callback('Error while opening file for reading:\n "'+filepath+'"') return False - + print ("Importing file: ", filepath) - - + + if not context: context = bpy.context - + armature_obj = oArmature - - if armature_obj is None: + + if armature_obj is None: armature_obj = blen_get_armature_from_selection() if armature_obj is None: error_callback("No armature selected.") @@ -1327,32 +1334,32 @@ def read_chunk(): (chunk_id, chunk_type, chunk_datasize, chunk_datacount) = unpack('20s3i', psafile.read(32)) - + chunk_data = psafile.read(chunk_datacount * chunk_datasize) - #============================================================================================== + #============================================================================================== # General Header - #============================================================================================== + #============================================================================================== read_chunk() - + if not util_is_header_valid(filepath, file_ext, chunk_id, error_callback): return False - - #============================================================================================== + + #============================================================================================== # Bones (FNamedBoneBinary) - #============================================================================================== + #============================================================================================== read_chunk() - + psa_bones = {} - + def new_psa_bone(bone, pose_bone): psa_bone = class_psa_bone() - + psa_bones[pose_bone.name] = psa_bone - + psa_bone.name = pose_bone.name - + psa_bone.pose_bone = pose_bone - + if bone.parent != None: # does needed parent bone was added from psa file if bone.parent.name in psa_bones: @@ -1367,25 +1374,25 @@ def new_psa_bone(bone, pose_bone): if bone.get('orig_quat') is None: if bone.parent != None: + + psa_bone.orig_loc = bone.mathutils.Matrix_local.translation - bone.parent.mathutils.Matrix_local.translation + psa_bone.orig_loc.rotate( bone.parent.mathutils.Matrix_local.to_mathutils.Quaternion().conjugated() ) - psa_bone.orig_loc = bone.matrix_local.translation - bone.parent.matrix_local.translation - psa_bone.orig_loc.rotate( bone.parent.matrix_local.to_quaternion().conjugated() ) - - psa_bone.orig_quat = bone.matrix_local.to_quaternion() - psa_bone.orig_quat.rotate( bone.parent.matrix_local.to_quaternion().conjugated() ) + psa_bone.orig_quat = bone.mathutils.Matrix_local.to_mathutils.Quaternion() + psa_bone.orig_quat.rotate( bone.parent.mathutils.Matrix_local.to_mathutils.Quaternion().conjugated() ) psa_bone.orig_quat.conjugate() else: - psa_bone.orig_loc = bone.matrix_local.translation.copy() - psa_bone.orig_quat = bone.matrix_local.to_quaternion() + psa_bone.orig_loc = bone.mathutils.Matrix_local.translation.copy() + psa_bone.orig_quat = bone.mathutils.Matrix_local.to_mathutils.Quaternion() psa_bone.post_quat = psa_bone.orig_quat.conjugated() else: - psa_bone.orig_quat = Quaternion(bone['orig_quat']) - psa_bone.orig_loc = Vector(bone['orig_loc']) - psa_bone.post_quat = Quaternion(bone['post_quat']) + psa_bone.orig_quat = mathutils.Quaternion(bone['orig_quat']) + psa_bone.orig_loc = mathutils.Vector(bone['orig_loc']) + psa_bone.post_quat = mathutils.Quaternion(bone['post_quat']) return psa_bone - + #Bones Data BoneIndex2Name = [None] * chunk_datacount BoneNotFoundList = [] @@ -1395,28 +1402,28 @@ def new_psa_bone(bone, pose_bone): # printlog("Name\tFlgs\tNumChld\tPrntIdx\tQx\tQy\tQz\tQw\tLocX\tLocY\tLocZ\tLength\tXSize\tYSize\tZSize\n") - + # for case insensetive comparison # key = lowered name # value = orignal name skeleton_bones_lowered = {} - + for blender_bone_name in armature_obj.data.bones.keys(): skeleton_bones_lowered[blender_bone_name.lower()] = blender_bone_name - + for counter in range(chunk_datacount): - + # tPrntIdx is -1 for parent; and 0 for other; no more useful data # indata = unpack_from('64s3i11f', chunk_data, chunk_datasize * counter) (indata) = unpack_from('64s56x', chunk_data, chunk_datasize * counter) in_name = util_bytes_to_str(indata[0]) # bonename = util_bytes_to_str(indata[0]).upper() - + in_name_lowered = in_name.lower() if in_name_lowered in skeleton_bones_lowered: orig_name = skeleton_bones_lowered[in_name_lowered] - + count_duplicates = BonePsaImportedNames.count( in_name_lowered ) if count_duplicates > 0: @@ -1433,40 +1440,40 @@ def new_psa_bone(bone, pose_bone): print(" PSK do not have numbered duplicate name(but PSA have!):", duplicate_name_numbered) BonePsaImportedNames.append(in_name_lowered) continue - - - # use a skeleton bone name + + + # use a skeleton bone name BoneIndex2Name[counter] = orig_name - PsaBonesToProcess[counter] = new_psa_bone(armature_obj.data.bones[orig_name], + PsaBonesToProcess[counter] = new_psa_bone(armature_obj.data.bones[orig_name], armature_obj.pose.bones[orig_name]) BonePsaImportedNames.append(in_name_lowered) else: # print("Can't find the bone:", orig_name, in_name_lowered) BoneNotFoundList.append(counter) - - + + if len(psa_bones) == 0: error_callback('No bone was match!\nSkip import!') return False - + # does anyone care? for blender_bone_name in armature_obj.data.bones.keys(): if BoneIndex2Name.count(blender_bone_name) == 0: BonesWithoutAnimation.append(blender_bone_name) - + if len(BoneNotFoundList) > 0: print('PSA have data for more bones: %i.' % len(BoneNotFoundList)) - + if len(BonesWithoutAnimation) > 0: print('PSA do not have data for %i bones:\n' % len(BonesWithoutAnimation), ', '.join(BonesWithoutAnimation)) - #============================================================================================== + #============================================================================================== # Animations (AniminfoBinary) - #============================================================================================== + #============================================================================================== read_chunk() Raw_Key_Nums = 0 Action_List = [None] * chunk_datacount - + for counter in range(chunk_datacount): (action_name_raw, #0 group_name_raw, #1 @@ -1481,18 +1488,18 @@ def new_psa_bone(bone, pose_bone): FirstRawFrame, #10 NumRawFrames #11 ) = unpack_from('64s64s4i3f3i', chunk_data, chunk_datasize * counter) - + action_name = util_bytes_to_str( action_name_raw ) group_name = util_bytes_to_str( group_name_raw ) Raw_Key_Nums += Totalbones * NumRawFrames Action_List[counter] = ( action_name, group_name, Totalbones, NumRawFrames) - - #============================================================================================== + + #============================================================================================== # Raw keys (VQuatAnimKey) 3f vec, 4f quat, 1f time - #============================================================================================== + #============================================================================================== read_chunk() - + if(Raw_Key_Nums != chunk_datacount): error_callback( 'Raw_Key_Nums Inconsistent.' @@ -1502,35 +1509,35 @@ def new_psa_bone(bone, pose_bone): return False Raw_Key_List = [None] * chunk_datacount - + unpack_data = Struct('3f4f4x').unpack_from - + for counter in range(chunk_datacount): - pos = Vector() - quat = Quaternion() - + pos = mathutils.Vector() + quat = mathutils.Quaternion() + ( pos.x, pos.y, pos.z, quat.x, quat.y, quat.z, quat.w ) = unpack_data( chunk_data, chunk_datasize * counter) - + if bScaleDown: Raw_Key_List[counter] = (pos * 0.01, quat) else: Raw_Key_List[counter] = (pos, quat) - + psafile.close() - + utils_set_mode('OBJECT') # index of current frame in raw input data raw_key_index = 0 - + util_obj_set_active(context, armature_obj) - + gen_name_part = util_gen_name_part(filepath) - + armature_obj.animation_data_create() - + if bActionsToTrack: nla_track = armature_obj.animation_data.nla_tracks.new() nla_track.name = gen_name_part @@ -1544,25 +1551,25 @@ def new_psa_bone(bone, pose_bone): if track.strips[-1].frame_end > nla_track_last_frame: nla_track_last_frame = track.strips[-1].frame_end - + is_first_action = True first_action = None - + for counter, (Name, Group, Totalbones, NumRawFrames) in enumerate(Action_List): ref_time = time.process_time() - + if Group != 'None': Name = "(%s) %s" % (Group,Name) if bFilenameAsPrefix: Name = "(%s) %s" % (gen_name_part, Name) - + action = bpy.data.actions.new(name = Name) - + # force print usefull information to console(due to possible long execution) print("Action {0:>3d}/{1:<3d} frames: {2:>4d} {3}".format( counter+1, len(Action_List), NumRawFrames, Name) ) - + if first_frames > 0: maxframes = first_frames keyframes = min(first_frames, NumRawFrames) @@ -1571,26 +1578,26 @@ def new_psa_bone(bone, pose_bone): else: maxframes = 99999999 keyframes = NumRawFrames - + # create all fcurves(for all bones) for an action # for pose_bone in armature_obj.pose.bones: for psa_bone in PsaBonesToProcess: if psa_bone is None: continue pose_bone = psa_bone.pose_bone - - data_path = pose_bone.path_from_id("rotation_quaternion") + + data_path = pose_bone.path_from_id("rotation_mathutils.Quaternion") psa_bone.fcurve_quat_w = action.fcurves.new(data_path, index = 0) psa_bone.fcurve_quat_x = action.fcurves.new(data_path, index = 1) psa_bone.fcurve_quat_y = action.fcurves.new(data_path, index = 2) psa_bone.fcurve_quat_z = action.fcurves.new(data_path, index = 3) - + if not bRotationOnly: data_path = pose_bone.path_from_id("location") psa_bone.fcurve_loc_x = action.fcurves.new(data_path, index = 0) psa_bone.fcurve_loc_y = action.fcurves.new(data_path, index = 1) psa_bone.fcurve_loc_z = action.fcurves.new(data_path, index = 2) - + # 1. Pre-add keyframes! \0/ # 2. Set data: keyframe_points[].co[0..1] # 3. If 2 is not done, do 4: (important!!!) @@ -1603,23 +1610,23 @@ def new_psa_bone(bone, pose_bone): psa_bone.fcurve_quat_z.keyframe_points.add(keyframes) if not bRotationOnly: - psa_bone.fcurve_loc_x.keyframe_points.add(keyframes) - psa_bone.fcurve_loc_y.keyframe_points.add(keyframes) - psa_bone.fcurve_loc_z.keyframe_points.add(keyframes) - + psa_bone.fcurve_loc_x.keyframe_points.add(keyframes) + psa_bone.fcurve_loc_y.keyframe_points.add(keyframes) + psa_bone.fcurve_loc_z.keyframe_points.add(keyframes) + for i in range(0,min(maxframes, NumRawFrames)): # raw_key_index+= Totalbones * 5 #55 for j in range(Totalbones): if j in BoneNotFoundList: raw_key_index += 1 continue - + psa_bone = PsaBonesToProcess[j] # pose_bone = psa_bone.pose_bone - + p_pos = Raw_Key_List[raw_key_index][0] p_quat = Raw_Key_List[raw_key_index][1] - + # @ # if psa_bone.parent: # quat = (p_quat * psa_bone.post_quat).conjugated() * (psa_bone.orig_quat * psa_bone.post_quat) @@ -1642,17 +1649,17 @@ def new_psa_bone(bone, pose_bone): q.rotate( p_quat ) quat.rotate( q.conjugated() ) - + # @ # loc = psa_bone.post_quat.conjugated() * p_pos - psa_bone.post_quat.conjugated() * psa_bone.orig_loc - + if not bRotationOnly: loc = (p_pos - psa_bone.orig_loc) # "edit bone" location is in "parent space" # but "pose bone" location is in "local space(bone)" # so we need to transform from parent(edit_bone) to local space (pose_bone) loc.rotate( psa_bone.post_quat.conjugated() ) - + # if not bRotationOnly: # loc = (p_pos - psa_bone.orig_loc) # if psa_bone.parent is not None: @@ -1663,48 +1670,48 @@ def new_psa_bone(bone, pose_bone): # loc.rotate( q.conjugated() ) # loc.rotate( q ) # pass - + # quat = p_quat.conjugated() # quat = p_quat # quat.rotate( psa_bone.orig_quat.conjugated() ) - # quat = Quaternion() + # quat = mathutils.Quaternion() # loc = -p_pos # loc = (p_pos - psa_bone.orig_loc) - # loc = Vector() + # loc = mathutils.Vector() # loc.rotate( psa_bone.post_quat.conjugated() ) # Set it? - # pose_bone.rotation_quaternion = quat + # pose_bone.rotation_mathutils.Quaternion = quat # pose_bone.location = loc - # pose_bone.rotation_quaternion = orig_rot.conjugated() - # pose_bone.location = p_pos - (pose_bone.bone.matrix_local.translation - pose_bone.bone.parent.matrix_local.translation) - + # pose_bone.rotation_mathutils.Quaternion = orig_rot.conjugated() + # pose_bone.location = p_pos - (pose_bone.bone.mathutils.Matrix_local.translation - pose_bone.bone.parent.mathutils.Matrix_local.translation) + ##### Works + post_quat (without location works) # quat = (p_quat * psa_bone.post_quat).conjugated() * (psa_bone.orig_quat * psa_bone.post_quat) # loc = psa_bone.post_quat.conjugated() * (p_pos - psa_bone.orig_loc) - + psa_bone.fcurve_quat_w.keyframe_points[i].co = i, quat.w psa_bone.fcurve_quat_x.keyframe_points[i].co = i, quat.x psa_bone.fcurve_quat_y.keyframe_points[i].co = i, quat.y psa_bone.fcurve_quat_z.keyframe_points[i].co = i, quat.z - + psa_bone.fcurve_quat_w.keyframe_points[i].interpolation = fcurve_interpolation psa_bone.fcurve_quat_x.keyframe_points[i].interpolation = fcurve_interpolation psa_bone.fcurve_quat_y.keyframe_points[i].interpolation = fcurve_interpolation psa_bone.fcurve_quat_z.keyframe_points[i].interpolation = fcurve_interpolation - - + + if not bRotationOnly: psa_bone.fcurve_loc_x.keyframe_points[i].co = i, loc.x psa_bone.fcurve_loc_y.keyframe_points[i].co = i, loc.y psa_bone.fcurve_loc_z.keyframe_points[i].co = i, loc.z - + psa_bone.fcurve_loc_x.keyframe_points[i].interpolation = fcurve_interpolation psa_bone.fcurve_loc_y.keyframe_points[i].interpolation = fcurve_interpolation psa_bone.fcurve_loc_z.keyframe_points[i].interpolation = fcurve_interpolation - + # Old path. Slower. # psa_bone.fcurve_quat_w.keyframe_points.insert(i,quat.w,{'NEEDED','FAST'}).interpolation = fcurve_interpolation # psa_bone.fcurve_quat_x.keyframe_points.insert(i,quat.x,{'NEEDED','FAST'}).interpolation = fcurve_interpolation @@ -1715,14 +1722,14 @@ def new_psa_bone(bone, pose_bone): # psa_bone.fcurve_loc_y.keyframe_points.insert(i,loc.y,{'NEEDED','FAST'}).interpolation = fcurve_interpolation # psa_bone.fcurve_loc_z.keyframe_points.insert(i,loc.z,{'NEEDED','FAST'}).interpolation = fcurve_interpolation raw_key_index += 1 - + # on first frame # break raw_key_index += (NumRawFrames-min(maxframes,NumRawFrames)) * Totalbones # Add action to tail of the nla track if bActionsToTrack: - + if len(nla_track.strips) == 0: strip = nla_stripes.new(Name, nla_track_last_frame, action) else: @@ -1736,17 +1743,17 @@ def new_psa_bone(bone, pose_bone): if is_first_action: first_action = action is_first_action = False - + print("Done: %f sec." % (time.process_time() - ref_time)) # break on first animation set # break - + scene = util_get_scene(context) if not bActionsToTrack: if not scene.is_nla_tweakmode: armature_obj.animation_data.action = first_action - + if bUpdateTimelineRange: scene.frame_start = 0 @@ -1760,7 +1767,7 @@ def new_psa_bone(bone, pose_bone): util_select_all(False) util_obj_select(context, armature_obj) util_obj_set_active(context, armature_obj) - + # 2.8 crashes # scene.frame_set(0) @@ -1779,27 +1786,27 @@ def execute(self, context): for line in self.lines: if len(line) > maxlen: maxlen = len(line) - + print(self.message) - + self.report({'WARNING'}, self.message) return {'FINISHED'} - + def invoke(self, context, event): self.lines = self.message.split("\n") maxlen = 0 for line in self.lines: if len(line) > maxlen: maxlen = len(line) - + self.line0 = self.lines.pop(0) - + return context.window_manager.invoke_props_dialog(self, width = 100 + 6*maxlen) - + def cancel(self, context): # print('cancel') self.execute(self) - + def draw(self, context): layout = self.layout sub = layout.column() @@ -1807,7 +1814,7 @@ def draw(self, context): for line in self.lines: sub.label(text = line) - + #properties for panels, and Operator. class ImportProps(): @@ -1838,7 +1845,7 @@ class ImportProps(): name = "Reorient directly", description = "Directly to children.\n * Axes will not be preserved.\n * Orphan bones - in direction from parent head\n * With only one non-orphan bone - to that one.", default = False, - ) + ) import_mode : EnumProperty( name = "Import mode.", items = (('All','All','Import mesh and skeleton'), @@ -1873,24 +1880,29 @@ class ImportProps(): bScaleDown : BoolProperty( name = "Scale down", description = " * Used by PSK and PSA.\n * Multiply coordinates by 0.01\n * From \"cm.\" to \"m.\"", - default = True, + default = False, ) bToSRGB : BoolProperty( name = "sRGB vertex color", description = "Apply 'linear RGB -> sRGB' conversion over vertex colors", default = True, ) - + bSmoothShade : BoolProperty( + name = "Smooth shading", + description = "Apply smooth shading after importing.", + default = True, + ) + def draw_psk(self, context): props = bpy.context.scene.pskpsa_import layout = self.layout layout.prop(props, 'import_mode', expand = True) layout.prop(props, 'bReorientBones') - + sub = layout.row() sub.prop(props, 'bReorientDirectly') sub.enabled = props.bReorientBones - + # layout.prop(props, 'bDontInvertRoot') layout.prop(props, 'bSpltiUVdata') sub = layout.row() @@ -1898,12 +1910,13 @@ def draw_psk(self, context): sub.prop(props, 'bDontInvertRoot') if not props.bDontInvertRoot: sub.label(text = "", icon = 'ERROR') - + layout.prop(props, 'bScaleDown') layout.prop(props, 'bToSRGB') layout.prop(props, 'fBonesizeRatio') layout.prop(props, 'fBonesize') - + layout.prop(props, 'bSmoothShade') + def draw_psa(self, context): props = context.scene.pskpsa_import layout = self.layout @@ -1913,7 +1926,7 @@ def draw_psa(self, context): layout.prop(props,'bRotationOnly') # layout.prop(props, 'bDontInvertRoot') # layout.separator() - + class PskImportOptions(bpy.types.PropertyGroup, ImportProps): pass @@ -1955,7 +1968,7 @@ def execute(self, context): else: util_ui_show_msg( "Can't find any situable Armature modifier for selected mesh.") - + elif context.object.type == 'ARMATURE': for obj in bpy.context.selected_objects: if obj.type == 'MESH': @@ -1965,12 +1978,12 @@ def execute(self, context): blen_hide_unused( context.object, context.object) return {'FINISHED'} - + return {'FINISHED'} - + class IMPORT_OT_psk(bpy.types.Operator, ImportProps): - + bl_idname = "import_scene.psk" bl_label = "Import PSK" bl_space_type = "PROPERTIES" @@ -1986,22 +1999,22 @@ class IMPORT_OT_psk(bpy.types.Operator, ImportProps): ) files : bpy.props.CollectionProperty(type=bpy.types.OperatorFileListElement, options={'HIDDEN', 'SKIP_SAVE'}) directory : bpy.props.StringProperty(subtype='FILE_PATH', options={'HIDDEN', 'SKIP_SAVE'}) - + def draw(self, context): self.draw_psk(context) # self.layout.prop(context.scene.pskpsa_import, 'bDontInvertRoot') # draw = ImportProps.draw_psk - + def execute(self, context): if not self.filepath: raise Exception("filepath not set") - + no_errors = True - + if not self.directory: - # possibly excuting from script, + # possibly excuting from script, # bcs blender will set this value, even for a single file - + keywords = self.as_keywords( ignore=( "import_mode", @@ -2010,11 +2023,11 @@ def execute(self, context): "bActionsToTrack", "bUpdateTimelineRange", "bRotationOnly", - "files", + "files", "directory" ) ) - + if self.import_mode == 'Mesh': bImportmesh = True bImportbone = False @@ -2024,14 +2037,14 @@ def execute(self, context): else: bImportmesh = True bImportbone = True - + # ugly workaround keywords["bImportbone"] = bImportbone keywords["bImportmesh"] = bImportmesh - + no_errors = pskimport( **keywords ) - - else: + + else: props = bpy.context.scene.pskpsa_import if props.import_mode == 'Mesh': bImportmesh = True @@ -2042,11 +2055,11 @@ def execute(self, context): else: bImportmesh = True bImportbone = True - - + + for _, fileListElement in enumerate(self.files): fpath = self.directory + fileListElement.name - + no_errors = no_errors and pskimport( fpath, context = context, @@ -2059,20 +2072,21 @@ def execute(self, context): bDontInvertRoot = props.bDontInvertRoot, bScaleDown = props.bScaleDown, bToSRGB = props.bToSRGB, - error_callback = util_ui_show_msg + bSmoothShade = props.bSmoothShade, + error_callback = util_ui_show_msg, ) if not no_errors: return {'CANCELLED'} else: return {'FINISHED'} - + def invoke(self, context, event): wm = context.window_manager wm.fileselect_add(self) return {'RUNNING_MODAL'} - + class IMPORT_OT_psa(bpy.types.Operator, ImportProps): '''Load a skeleton animation from .psa\n * Selected armature will be used.''' bl_idname = "import_scene.psa" @@ -2090,16 +2104,16 @@ class IMPORT_OT_psa(bpy.types.Operator, ImportProps): ) files : bpy.props.CollectionProperty(type=bpy.types.OperatorFileListElement, options={'HIDDEN', 'SKIP_SAVE'}) directory : bpy.props.StringProperty(subtype='FILE_PATH', options={'HIDDEN', 'SKIP_SAVE'}) - + def draw(self, context): self.draw_psa(context) self.layout.prop(context.scene.pskpsa_import, 'bDontInvertRoot') - + def execute(self, context): props = context.scene.pskpsa_import - + if not self.directory: - # possibly excuting from script, + # possibly excuting from script, # bcs blender will set this value, even for a single file psaimport( **(self.as_keywords( ignore=( @@ -2111,19 +2125,19 @@ def execute(self, context): "bReorientDirectly", "bToSRGB", "filter_glob", - "files", + "files", "directory" ) )) ) return {'FINISHED'} - + for _, fileListElement in enumerate(self.files): fpath = self.directory + fileListElement.name psaimport( fpath, context = context, - bFilenameAsPrefix = props.bFilenameAsPrefix, - bActionsToTrack = props.bActionsToTrack, + bFilenameAsPrefix = props.bFilenameAsPrefix, + bActionsToTrack = props.bActionsToTrack, oArmature = blen_get_armature_from_selection(), bDontInvertRoot = props.bDontInvertRoot, bUpdateTimelineRange = props.bUpdateTimelineRange, @@ -2132,7 +2146,7 @@ def execute(self, context): error_callback = util_ui_show_msg ) return {'FINISHED'} - + def invoke(self, context, event): if blen_get_armature_from_selection() is None: util_ui_show_msg('Select an armature.') @@ -2147,14 +2161,14 @@ class PSKPSA_PT_import_panel(bpy.types.Panel, ImportProps): bl_space_type = "VIEW_3D" bl_region_type = "UI" bl_category = "PSK / PSA" - + # @classmethod # def poll(cls, context): # print(context.scene.get('pskpsa_import'),'poll') # context.scene.update_tag() # context.scene.update() # return context.scene.get('pskpsa_import') is not None - + def draw(self, context): props = context.scene.pskpsa_import if props is None: @@ -2162,24 +2176,23 @@ def draw(self, context): return # return layout = self.layout - + # layout.label(text = "Mesh and skeleton:") layout.operator(IMPORT_OT_psk.bl_idname, icon = 'MESH_DATA') self.draw_psk(context) # layout.prop(props, 'import_mode',expand = True) - + sub = layout.row() sub.operator(PSKPSA_OT_hide_unused_bones.bl_idname, icon = 'BONE_DATA') sub.enabled = (context.object is not None) and (context.object.type == 'MESH' or context.object.type == 'ARMATURE') - + layout.separator() layout.separator() # layout.label(text = "Animation:", icon = 'ANIM') layout.operator(IMPORT_OT_psa.bl_idname, icon = 'ANIM') self.draw_psa(context) - - + def menu_import_draw(self, context): self.layout.operator(IMPORT_OT_psk.bl_idname, text = "Skeleton Mesh (.psk)") self.layout.operator(IMPORT_OT_psa.bl_idname, text = "Skeleton Anim (.psa)") @@ -2191,29 +2204,28 @@ def menu_import_draw(self, context): PSKPSA_PT_import_panel, PSKPSA_OT_show_message, PSKPSA_OT_hide_unused_bones - ) - - + ) + def register(): from bpy.utils import register_class for cls in classes: register_class(cls) - + bpy.types.TOPBAR_MT_file_import.append(menu_import_draw) bpy.types.Scene.pskpsa_import = PointerProperty(type = PskImportOptions) - + def unregister(): from bpy.utils import unregister_class for cls in classes: unregister_class(cls) - + bpy.types.TOPBAR_MT_file_import.remove(menu_import_draw) del bpy.types.Scene.pskpsa_import - + if __name__ == "__main__": register() if __name__ == "io_import_scene_unreal_psa_psk_270_dev": - import pskpsadev \ No newline at end of file + import pskpsadev