Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # bitcpy
- # NOTE
- # Loads a Legend of Grimrock 2 model from either a .model file or a .dat container
- # Loads a Legend of Grimrock 2 animation from either a .animation file or a .dat container
- # Saves a Legend of Grimrock 2 model
- #
- # Names of models and animations in .dat container are determined by doing an extremly simple scan of lua compiled files.
- # FIX
- # . Needs a cleanup pass by now
- # . Need to fix quad split so it matches Mikkelsen, that's how Blender does it when baking if IRC
- # . Fix so animations can be imported even if there are nodes missing
- # . Should we remove nodes that are bone transforms during import, or keep them for completeness sake?
- # . Exporter should create missing nodes for bones, no point in forcing user to place them
- # . Make sure to clean out action sequence of old data when importing new animations
- # . Animation import shouldn't have to flush and update for each bone being posed for every keyframe, kills performance completely.
- # . Add shadow casting depending on material settings
- # . Importing animations from LoG 1 has a different format, should be supported though
- # . germanny experienced distortion when importing 'wizard_check_door' on the model 'wizard', skinning issue perhaps, but shouldn't all animations break if that was the case?
- # . Animation Exporter
- bl_info = {
- "name": "Legend of Grimrock 2 Import/Export (model / animation)",
- "author": "",
- "version": (1, 3, 0),
- "blender": (2, 71, 0),
- "api": 36339,
- "location": "File > Import > Legend of Grimrock 2 Model (.model)",
- "description": "Import/Export Legend of Grimrock Models/Anims (.model/.anim)",
- "warning": "",
- "wiki_url": "",
- "tracker_url": "",
- "category": "Import-Export"}
- import os
- import struct
- import sys
- import math
- import bpy
- import bmesh
- import time
- from mathutils import *
- from bpy.props import *
- from bpy_extras.io_utils import ExportHelper, ImportHelper, axis_conversion
- from bpy_extras.image_utils import load_image
- # Vertex data types
- VTX_DATA_BYTE = 0
- VTX_DATA_SHORT = 1
- VTX_DATA_INT = 2
- VTX_DATA_FLOAT = 3
- # Vertex array types
- VTX_ARRAY_POSITION = 0
- VTX_ARRAY_NORMAL = 1
- VTX_ARRAY_TANGENT = 2
- VTX_ARRAY_BITANGENT = 3
- VTX_ARRAY_COLOR = 4
- VTX_ARRAY_TEXCOORD0 = 5
- VTX_ARRAY_TEXCOORD1 = 6
- VTX_ARRAY_TEXCOORD2 = 7
- VTX_ARRAY_TEXCOORD3 = 8
- VTX_ARRAY_TEXCOORD4 = 9
- VTX_ARRAY_TEXCOORD5 = 10
- VTX_ARRAY_TEXCOORD6 = 11
- VTX_ARRAY_TEXCOORD7 = 12
- VTX_ARRAY_BONE_INDEX = 13
- VTX_ARRAY_BONE_WEIGHT = 14
- # Writes a little-endian byte to file
- def write_le_byte(file_object, value):
- file_object.write(struct.pack("<B", value))
- file_object.flush()
- # Writes a little-endian short to file
- def write_le_short(file_object, value):
- file_object.write(struct.pack("<h", value))
- file_object.flush()
- # Writes a little-endian integer to file
- def write_le_int(file_object, value):
- file_object.write(struct.pack("<i", value))
- file_object.flush()
- # Writes a little-endian float to file
- def write_le_float(file_object, value):
- file_object.write(struct.pack("<f", value))
- file_object.flush()
- # write_le_string
- # Writes a little-endian ANSI encoded string to file
- def write_le_string(file_object, value):
- raw = value.encode("ascii", "ignore")
- file_object.write(struct.pack("<i", len(raw)) + raw)
- file_object.flush()
- # read_len_string
- # Read unsigned integer from file used to read back a string using integer as length from the same file object
- def read_len_string(file_object, endian = '<'):
- num = read_int(file_object, endian)
- if num == 0:
- return ""
- return read_string(file_object, num, endian)
- # Reads file magic from file
- def read_magic(file_object, endian = '<'):
- data = struct.unpack(endian+"4s", file_object.read(4))[0]
- return data;
- # read_uint
- # Read unsigned integer from file
- def read_uint(file_object, endian = '<'):
- data = struct.unpack(endian+'I', file_object.read(4))[0]
- return data
- # read_int
- # Read signed integer from file
- def read_int(file_object, endian = '<'):
- data = struct.unpack(endian+'i', file_object.read(4))[0]
- return data
- # read_int2
- # Read two signed integers from file
- def read_int2(file_object, endian = '<'):
- data = struct.unpack(endian+'ii', file_object.read(8))
- return data
- # read_int3
- # Read three signed integers from file
- def read_int3(file_object, endian = '<'):
- data = struct.unpack(endian+'iii', file_object.read(12))
- return data
- # read_int4
- # Read four signed integers from file
- def read_int4(file_object, endian = '<'):
- data = struct.unpack(endian+'iiii', file_object.read(16))
- return data
- # read_float
- # Read float from file
- def read_float(file_object, endian = '<'):
- data = struct.unpack(endian+'f', file_object.read(4))[0]
- return data
- # read_float2
- # Read two floats from file
- def read_float2(file_object, endian = '<'):
- data = struct.unpack(endian+'ff', file_object.read(8))
- return data
- # read_float3
- # Read three floats from file
- def read_float3(file_object, endian = '<'):
- data = struct.unpack(endian+'fff', file_object.read(12))
- return data
- # read_float4
- # Read four floats from file
- def read_float4(file_object, endian = '<'):
- data = struct.unpack(endian+'ffff', file_object.read(16))
- return data
- # read_matrix4x3
- # Read a matrix consisting of four rows with three columns
- def read_matrix4x3(file_object, endian = '<'):
- data = struct.unpack(endian+'ffffffffffff', file_object.read(48))
- return data
- # read_short
- # Read signed short from file
- def read_short(file_object, endian = '<'):
- data = struct.unpack(endian+'h', file_object.read(2))[0]
- return data
- # read_short2
- # Read two signed shorts from file
- def read_short2(file_object, endian = '<'):
- data = struct.unpack(endian+'hh', file_object.read(4))
- return data
- # read_short3
- # Read three signed shorts from file
- def read_short3(file_object, endian = '<'):
- data = struct.unpack(endian+'hhh', file_object.read(6))
- return data
- # read_short4
- # Read four signed shorts from file
- def read_short4(file_object, endian = '<'):
- data = struct.unpack(endian+'hhhh', file_object.read(8))
- return data
- # read_byte
- # Read unsigned byte from file
- def read_byte(file_object, endian = '<'):
- data = struct.unpack(endian+'B', file_object.read(1))[0]
- return data
- # read_byte2
- # Read two unsigned bytes from file
- def read_byte2(file_object, endian = '<'):
- data = struct.unpack(endian+'BB', file_object.read(2))
- return data
- # read_byte3
- # Read three unsigned bytes from file
- def read_byte3(file_object, endian = '<'):
- data = struct.unpack(endian+'BBB', file_object.read(3))
- return data
- # read_byte4
- # Read four unsigned bytes from file
- def read_byte4(file_object, endian = '<'):
- data = struct.unpack(endian+'BBBB', file_object.read(4))
- return data
- # read_string
- # Read string from file
- def read_string(file_object, num, endian = '<'):
- raw_string = struct.unpack(endian+str(num)+'s', file_object.read(num))[0]
- data = raw_string.decode("utf-8", "ignore")
- return data
- # read_len_string
- # Read unsigned integer from file used to read back a string using integer as length from the same file object
- def read_len_string(file_object, endian = '<'):
- num = read_int(file_object, endian)
- if num == 0:
- return ""
- return read_string(file_object, num, endian)
- # decode_string
- # Decode string from buffer
- def decode_string(buffer_object, endian = '<'):
- raw_string = struct.unpack(endian+str(len(buffer_object))+'s', buffer_object)[0]
- data = raw_string.decode("utf-8", "ignore")
- return data
- # Vec3
- class Vec3():
- __slots__ = (
- "x",
- "y",
- "z",
- )
- def __init__(self, x = 0.0, y = 0.0, z = 0.0):
- self.x = x
- self.y = y
- self.z = z
- # read
- # Reads a 3 dimensional vector using Grimrock 2 documentation
- # float x
- # float y
- # float z
- def read(self, file_object):
- self.x = read_float(file_object)
- self.y = read_float(file_object)
- self.z = read_float(file_object)
- # write
- # Writes a 4 dimensional vector to disk
- def write(self, file_object):
- write_le_float(file_object, self.x)
- write_le_float(file_object, self.y)
- write_le_float(file_object, self.z)
- # Quat
- class Quat():
- __slots__ = (
- "x",
- "y",
- "z",
- "w",
- )
- def __init__(self, x = 0.0, y = 0.0, z = 0.0, w = 1.0):
- self.x = x
- self.y = y
- self.z = z
- self.w = w
- # read
- # Reads a 4 component quaternion using Grimrock 2 documentation
- # float x
- # float y
- # float z
- # float w
- def read(self, file_object):
- self.x = read_float(file_object)
- self.y = read_float(file_object)
- self.z = read_float(file_object)
- self.w = read_float(file_object)
- # write
- # Writes a 4 component quaternion to disk
- def write(self, file_object):
- write_le_float(file_object, self.x)
- write_le_float(file_object, self.y)
- write_le_float(file_object, self.z)
- write_le_float(file_object, self.w)
- # Mat4x3
- class Mat4x3():
- __slots__ = (
- "rows"
- )
- def __init__(self):
- self.rows = [
- Vec3( x = 1.0, y = 0.0, z = 0.0 ),
- Vec3( x = 0.0, y = 1.0, z = 0.0 ),
- Vec3( x = 0.0, y = 0.0, z = 1.0 ),
- Vec3( x = 0.0, y = 0.0, z = 0.0 )
- ]
- # read
- # Reads a 4x3 matrix using Grimrock 2 documentation
- # vec3 baseX
- # vec3 baseY
- # vec3 baseZ
- # vec3 translation
- def read(self, file_object):
- for i in range(4):
- self.rows[i].read(file_object)
- # write
- # Writes a 4x4 matrix to disk
- def write(self, file_object):
- for i in range(4):
- self.rows[i].write(file_object)
- def to_matrix(self):
- r1 = [self.rows[0].x, self.rows[1].x, self.rows[2].x, self.rows[3].x]
- r2 = [self.rows[0].y, self.rows[1].y, self.rows[2].y, self.rows[3].y]
- r3 = [self.rows[0].z, self.rows[1].z, self.rows[2].z, self.rows[3].z]
- r4 = [0.0, 0.0, 0.0, 1.0]
- return Matrix((r1,r2,r3,r4))
- # from_matrix
- # Sets Mat4x3 using a 4x4 Blender matrix
- def from_matrix(self, bl_mat):
- self.rows[0] = Vec3( x = bl_mat[0].x, y = bl_mat[1].x, z = bl_mat[2].x )
- self.rows[1] = Vec3( x = bl_mat[0].y, y = bl_mat[1].y, z = bl_mat[2].y )
- self.rows[2] = Vec3( x = bl_mat[0].z, y = bl_mat[1].z, z = bl_mat[2].z )
- self.rows[3] = Vec3( x = bl_mat[0].w, y = bl_mat[1].w, z = bl_mat[2].w )
- # Bone
- class Bone():
- __slots__ = (
- "node_index",
- "model_to_bone",
- )
- def __init__(self):
- self.node_index = -1
- self.model_to_bone = Mat4x3()
- def create_empty(self, node_index):
- self.node_index = node_index
- self.model_to_bone = Mat4x3()
- # read
- # Reads a Bone structure using Grimrock 2 documentation
- # int32 boneNodeIndex
- # Mat4x3 invRestMatrix
- def read(self, file_object):
- self.node_index = read_int(file_object)
- self.model_to_bone.read(file_object)
- # write
- # Writes bone structure to disk
- def write(self, file_object):
- write_le_int(file_object, self.node_index)
- self.model_to_bone.write(file_object)
- # MeshSegment
- class MeshSegment():
- __slots__ = (
- "material",
- "primitive_type",
- "index_offset",
- "num_triangles",
- )
- def __init__(self):
- self.material = None
- self.primitive_type = 0
- self.index_offset = 0
- self.num_triangles = 0
- # create_empty
- # Creates an empty valid mesh segment
- def create_empty(self):
- self.material = "none"
- self.primitive_type = 2
- self.index_offset = 0
- self.num_triangles = 0
- # read
- # Reads a MeshSegment using Grimrock 2 documentation
- # string material
- # int32 primitiveType
- # int32 firstIndex
- # int32 count
- def read(self, file_object):
- self.material = read_len_string(file_object)
- self.primitive_type = read_int(file_object)
- self.index_offset = read_int(file_object)
- self.num_triangles = read_int(file_object)
- # write
- # Writes mesh segment structure to disk
- def write(self, file_object):
- write_le_string(file_object, self.material)
- write_le_int(file_object, self.primitive_type)
- write_le_int(file_object, self.index_offset)
- write_le_int(file_object, self.num_triangles)
- # VertexArray
- class VertexArray():
- __slots__ = (
- "data_type",
- "dim",
- "stride",
- "data",
- )
- def __init__(self):
- self.data_type = 0
- self.dim = 0
- self.stride = 0
- self.data = None
- # create_empty
- # Creates empty vertex array data buffer
- def create_empty(self):
- self.data_type = 0
- self.dim = 0
- self.stride = 0
- self.data = []
- # is_valid_type
- # Helper to determine if data_type value is valid for read_data_type calls
- def is_valid_type(self):
- return (self.data_type == VTX_DATA_BYTE or self.data_type == VTX_DATA_SHORT or self.data_type == VTX_DATA_INT or self.data_type == VTX_DATA_FLOAT)
- # is_valid_dim
- # Helper to determine if dimensional value is in valid range for read_data_type calls
- def is_valid_dim(self):
- return (self.dim >= 1 and self.dim <= 4)
- # read_data_type
- # Helper to read dimensional attribute of each data type, (ex. 3 bytes, 2 floats, 4 shorts etc.)
- def read_data_type(self, file_object):
- if self.data_type == VTX_DATA_BYTE:
- if self.dim == 1:
- return read_byte(file_object)
- elif self.dim == 2:
- return read_byte2(file_object)
- elif self.dim == 3:
- return read_byte3(file_object)
- elif self.dim == 4:
- return read_byte4(file_object)
- elif self.data_type == VTX_DATA_SHORT:
- if self.dim == 1:
- return read_short(file_object)
- elif self.dim == 2:
- return read_short2(file_object)
- elif self.dim == 3:
- return read_short3(file_object)
- elif self.dim == 4:
- return read_short4(file_object)
- elif self.data_type == VTX_DATA_INT:
- if self.dim == 1:
- return read_int(file_object)
- elif self.dim == 2:
- return read_int2(file_object)
- elif self.dim == 3:
- return read_int3(file_object)
- elif self.dim == 4:
- return read_int4(file_object)
- elif self.data_type == VTX_DATA_FLOAT:
- if self.dim == 1:
- return read_float(file_object)
- elif self.dim == 2:
- return read_float2(file_object)
- elif self.dim == 3:
- return read_float3(file_object)
- elif self.dim == 4:
- return read_float4(file_object)
- # read
- # Reads VertexArray data using Grimrock 2 documentation
- # int32 dataType
- # int32 dim
- # int32 stride
- # byte num_vertices*stride
- def read(self, num_vertices, file_object):
- # Read type, dimension and stride
- self.data_type = read_int(file_object)
- self.dim = read_int(file_object)
- self.stride = read_int(file_object)
- # Skip if size between vertex to vertex is zero
- if self.stride == 0:
- return
- # Pre allocate the data, read data type if valid otherwise just call raw read for each entry
- self.data = num_vertices * [None]
- if self.is_valid_type() and self.is_valid_dim():
- for i in range(num_vertices):
- self.data[i] = self.read_data_type(file_object)
- else:
- print("Unknown VertexArray data, type(%d), dimension(%d), stride(%d)" % (self.data_type, self.dim, self.stride))
- for i in range(num_vertices):
- self.data[i] = file_object.read(self.stride)
- # write
- # Writes VertexArray data to disk
- def write(self, file_object):
- write_le_int(file_object, self.data_type)
- write_le_int(file_object, self.dim)
- write_le_int(file_object, self.stride)
- if self.stride > 0:
- # VERY slow
- for entry in self.data:
- for value in entry:
- if self.data_type == VTX_DATA_BYTE:
- write_le_byte(file_object, value)
- elif self.data_type == VTX_DATA_SHORT:
- write_le_short(file_object, value)
- elif self.data_type == VTX_DATA_INT:
- write_le_int(file_object, value)
- elif self.data_type == VTX_DATA_FLOAT:
- write_le_float(file_object, value)
- # MeshData
- class MeshData():
- __slots__ = (
- "magic",
- "version",
- "num_vertices",
- "vertex_arrays",
- "num_indices",
- "indices",
- "num_segments",
- "segments",
- "bound_center",
- "bound_radius",
- "bound_min",
- "bound_max",
- )
- def __init__(self):
- self.magic = 0
- self.version = 0
- self.num_vertices = 0
- self.vertex_arrays = 15 * [None]
- self.num_indices = 0
- self.indices = None
- self.num_segments = 0
- self.segments = None
- self.bound_center = [0.0, 0.0, 0.0]
- self.bound_radius = 0.0
- self.bound_min = [0.0, 0.0, 0.0]
- self.bound_max = [0.0, 0.0, 0.0]
- # create_empty
- # Creates empty valid mesh data
- def create_empty(self):
- self.magic = 0x4853454d # 'MESH'
- self.version = 2
- self.num_vertices = 0
- self.vertex_arrays = 15 * [None]
- for i in range(len(self.vertex_arrays)):
- vertex_array = VertexArray()
- vertex_array.create_empty()
- self.vertex_arrays[i] = vertex_array
- self.num_indices = 0
- self.indices = []
- self.num_segments = 0
- self.segments = []
- self.bound_center = [0.0, 0.0, 0.0]
- self.bound_radius = 0.0
- self.bound_min = [0.0, 0.0, 0.0]
- self.bound_max = [0.0, 0.0, 0.0]
- # read
- # Reads MeshData using Grimrock 2 documentation
- # FourCC magic
- # int32 version
- # int32 numVertices
- # VertexArray * 15
- # int32 numIndices
- # int32 * numIndices
- # int32 numSegents
- # MeshSegment * numSegments
- # vec3 boundCenter
- # float boundRadius
- # vec3 boundMin
- # vec3 boundMax
- def read(self, file_object):
- # Read MeshData magic, skip if not equal 'MESH'
- self.magic = read_magic(file_object)
- if self.magic != b'MESH':
- print("Invalid MeshData magic '%s', expected 'MESH'" % self.magic)
- return False
- # Read version, skip if version isn't equal to 2
- self.version = read_int(file_object)
- if self.version != 2:
- print("Invalid MeshData version %d, expected 2" % self.version)
- return False
- # Read number of vertices
- self.num_vertices = read_int(file_object)
- # Read vertex-array data
- for i in range(15):
- vertex_array = VertexArray()
- vertex_array.read(self.num_vertices, file_object)
- if vertex_array.data != None:
- self.vertex_arrays[i] = vertex_array
- # Read number of indices
- self.num_indices = read_int(file_object)
- # Read index buffer data
- if self.num_indices > 0:
- self.indices = self.num_indices * [0]
- for i in range(self.num_indices):
- self.indices[i] = read_int(file_object)
- # Read number of segments
- self.num_segments = read_int(file_object)
- # Read MeshSegment data
- if self.num_segments > 0:
- self.segments = self.num_segments * [None]
- for i in range(self.num_segments):
- segment = MeshSegment()
- segment.read(file_object)
- self.segments[i] = segment
- # Read bounds information
- self.bound_center = read_float3(file_object)
- self.bound_radius = read_float(file_object)
- self.bound_min = read_float3(file_object)
- self.bound_max = read_float3(file_object)
- return True
- # write
- # Writes mesh structure to disk
- def write(self, file_object):
- write_le_int(file_object, self.magic)
- write_le_int(file_object, self.version)
- write_le_int(file_object, self.num_vertices)
- array_index = 0
- for vertex_array in self.vertex_arrays:
- array_index += 1
- vertex_array.write(file_object)
- write_le_int(file_object, self.num_indices)
- for index in self.indices:
- write_le_int(file_object, index)
- write_le_int(file_object, self.num_segments)
- for segment in self.segments:
- segment.write(file_object)
- for comp in self.bound_center:
- write_le_float(file_object, comp)
- write_le_float(file_object, self.bound_radius)
- for comp in self.bound_min:
- write_le_float(file_object, comp)
- for comp in self.bound_max:
- write_le_float(file_object, comp)
- # MeshEntity
- class MeshEntity():
- __slots__ = (
- "mesh_data",
- "num_bones",
- "bones",
- "emissive_color",
- "cast_shadow",
- )
- def __init__(self):
- self.mesh_data = None
- self.num_bones = 0
- self.bones = None
- self.emissive_color = Vec3()
- self.cast_shadow = False
- # create_empty
- # Creates empty mesh entity
- def create_empty(self):
- mesh_data = MeshData()
- mesh_data.create_empty()
- self.mesh_data = mesh_data
- self.num_bones = 0
- self.bones = []
- self.emissive_color = Vec3()
- self.cast_shadow = True
- # read
- # Reads MeshEntity using Grimrock 2 documentation
- # MeshData
- # int32 numBones
- # Bone * numBones
- # Vec3 emissiveColor
- # byte castShadows
- def read(self, file_object):
- # Read mesh data
- self.mesh_data = MeshData()
- if not self.mesh_data.read(file_object):
- return False
- # Read number of bones
- self.num_bones = read_int(file_object)
- # Read bones data
- if self.num_bones > 0:
- self.bones = self.num_bones * [None]
- for i in range(self.num_bones):
- bone = Bone()
- bone.read(file_object)
- self.bones[i] = bone
- # Read emissive color
- self.emissive_color.read(file_object)
- # Read cast shadows property
- cast_shadows = read_byte(file_object)
- if cast_shadows != 0:
- self.cast_shadow = True
- else:
- self.cast_shadow = False
- return True
- # write
- # Writes mesh entity structure to disk
- def write(self, file_object):
- self.mesh_data.write(file_object)
- write_le_int(file_object, self.num_bones)
- for bone in self.bones:
- bone.write(file_object)
- self.emissive_color.write(file_object)
- cast_shadows = 0
- if self.cast_shadow:
- cast_shadows = 1
- write_le_byte(file_object, cast_shadows)
- # Node
- class Node():
- __slots__ = (
- "name",
- "local_to_parent",
- "parent",
- "type",
- "mesh_entity",
- )
- def __init__(self):
- self.name = ""
- self.local_to_parent = Mat4x3()
- self.parent = -1
- self.type = -1
- self.mesh_entity = None
- # create_empty
- # Creates an empty node to work with
- def create_empty(self, name, local_to_parent):
- self.name = name
- self.local_to_parent.from_matrix(local_to_parent)
- self.parent = -1
- self.type = -1
- self.mesh_entity = None
- # set_mesh_entity
- # Assigns a mesh entity to the node
- def set_mesh_entity(self, mesh_entity):
- self.mesh_entity = mesh_entity
- if self.mesh_entity == None:
- self.type = -1
- else:
- self.type = 0
- # read
- # Reads a Node using Grimrock 2 documentation
- # string name
- # Mat4x3 localToParent
- # int32 parent
- # int32 type
- # (MeshEntity)
- def read(self, file_object):
- # Read name of node
- self.name = read_len_string(file_object)
- # Read local to parent transform
- self.local_to_parent.read(file_object)
- # Read parent node
- self.parent = read_int(file_object)
- # Read node type
- self.type = read_int(file_object)
- # Read mesh entity if type is zero
- if self.type == 0:
- self.mesh_entity = MeshEntity()
- if not self.mesh_entity.read(file_object):
- return False
- return True
- # write
- # Writes a node structure to file
- def write(self, file_object):
- write_le_string(file_object, self.name)
- self.local_to_parent.write(file_object)
- write_le_int(file_object, self.parent)
- write_le_int(file_object, self.type)
- if self.mesh_entity != None:
- self.mesh_entity.write(file_object)
- # Model
- class Model():
- __slots__ = (
- "magic",
- "version",
- "num_nodes",
- "nodes",
- )
- def __init__(self):
- self.magic = 0
- self.version = 0
- self.num_nodes = 0
- self.nodes = None
- # init
- # creates an empty valid Model structure
- def create_empty(self):
- self.magic = 0x314c444d
- self.version = 2
- self.nodes = []
- # add_node
- # Appends a node to the model
- def add_node(self, node):
- self.nodes.append(node)
- self.num_nodes = len(self.nodes)
- # find_node_index
- # Searches for a certain node and returns the index in the list, -1 if not found
- def find_node_index(self, name):
- for node_index in range(self.num_nodes):
- node = self.nodes[node_index]
- if node.name == name:
- return node_index
- return -1
- # read
- # Reads a ModelFile using Grimrock 2 documentation
- # int32 magic
- # int32 version
- # int32 numNodes
- # Node * numNodes
- def read(self, file_object):
- # Read magic, skip if not equal 'MDL1'
- self.magic = read_magic(file_object)
- if self.magic != b'MDL1':
- print("Invalid ModelFile magic '%s', expected 'MDL1'" % self.magic)
- return False
- # Read version, skip if not equal 2
- self.version = read_int(file_object)
- if self.version != 2:
- print("Invalid ModelFile version %d, expected 2" % self.version)
- return False
- # Read number of nodes
- self.num_nodes = read_int(file_object)
- # Read in nodes
- if self.num_nodes > 0:
- self.nodes = self.num_nodes * [None]
- for i in range(self.num_nodes):
- node = Node()
- node.read(file_object)
- self.nodes[i] = node
- return True
- # write
- # Writes the model structure to disk
- def write(self, file_object):
- write_le_int(file_object, self.magic)
- write_le_int(file_object, self.version)
- write_le_int(file_object, self.num_nodes)
- for node in self.nodes:
- node.write(file_object)
- return True
- class NodeItem():
- __slots__ = (
- "node_name",
- "num_positions",
- "positions",
- "num_rotations",
- "rotations",
- "num_scales",
- "scales",
- )
- def __init__(self):
- self.node_name = ""
- self.num_positions = 0
- self.positions = None
- self.num_rotations = 0
- self.rotations = None
- self.num_scales = 0
- self.scales = None
- # read
- # Reads a Item from animation files using Grimrock 2 documentation
- def read(self, file_object):
- self.node_name = read_len_string(file_object)
- self.num_positions = read_int(file_object)
- if self.num_positions > 0:
- self.positions = self.num_positions * [None]
- for i in range(self.num_positions):
- position = Vec3()
- position.read(file_object)
- self.positions[i] = position
- self.num_rotations = read_int(file_object)
- if self.num_rotations > 0:
- self.rotations = self.num_rotations * [None]
- for i in range(self.num_rotations):
- rotation = Quat()
- rotation.read(file_object)
- self.rotations[i] = rotation
- self.num_scales = read_int(file_object)
- if self.num_scales > 0:
- self.scales = self.num_scales * [None]
- for i in range(self.num_scales):
- scale = Vec3()
- scale.read(file_object)
- self.scales[i] = scale
- # Animation
- #
- class Animation():
- __slots__ = (
- "magic",
- "version",
- "name",
- "frames_per_second",
- "num_frames",
- "num_items",
- "items",
- )
- def __init__(self):
- self.magic = 0
- self.version = 0
- self.name = ""
- self.frames_per_second = 0
- self.num_frames = 0
- self.num_items = 0
- self.items = None
- # read
- # Reads an AnimationFile using Grimrock 2 documentation
- def read(self, file_object):
- # Read magic, skip if not equal 'ANIM'
- self.magic = read_magic(file_object)
- if self.magic != b'ANIM':
- print("Invalid AnimationFile magic '%s', expected 'ANIM'" % self.magic)
- return False
- # Read version, skip if not equal 2
- self.version = read_int(file_object)
- if self.version != 2:
- print("Invalid AnimationFile version %d, expected 2" % self.version)
- return False
- self.name = read_len_string(file_object)
- self.frames_per_second = read_float(file_object)
- self.num_frames = read_int(file_object)
- self.num_items = read_int(file_object)
- # Read in animation node items
- if self.num_items > 0:
- self.items = self.num_items * [None]
- for i in range(self.num_items):
- item = NodeItem()
- item.read(file_object)
- self.items[i] = item
- return True
- # FileEntry
- # File entry information in .dat container
- class FileEntry(object):
- __slots__ = (
- "hash_name",
- "file_offset",
- "size_compressed",
- "size_uncompressed",
- "unknown",
- "name",
- )
- def __init__(self):
- self.hash_name = 0
- self.file_offset = 0
- self.size_compressed = 0
- self.size_uncompressed = 0
- self.unknown = 0
- self.name = None
- # ObjectTransform
- # Simple helper to store/restore an object matrix
- class ObjectTransform():
- __slots__ = (
- "name",
- "matrix_world",
- )
- def __init__(self, name):
- self.name = name
- self.matrix_world = None
- # clear_transform
- # Caches a transform and clears it on the Blender object
- def clear_transform(self):
- self.matrix_world = None
- if self.name in bpy.data.objects:
- bl_object = bpy.data.objects[self.name]
- self.matrix_world = bl_object.matrix_world.copy()
- bl_object.matrix_world = Matrix.Identity(4)
- bpy.context.scene.update()
- # restore_transform
- # Restores a cached transform on a Blender object
- def restore_transform(self):
- if self.matrix_world != None and self.name in bpy.data.objects:
- bl_object = bpy.data.objects[self.name]
- bl_object.matrix_world = self.matrix_world.copy()
- self.matrix_world = None
- bpy.context.scene.update()
- # ArmatureInfo
- # FIXME: CLEAN !!
- # This is a complete MESS, clean it up and make it more understandable/readable
- # Alot of these things have weird namings and/or doesn't really match what we want
- # There is also alot of unnecesary work done in here
- class ArmatureInfo():
- __slots__ = (
- "model",
- "node",
- "bones",
- "nodes",
- "b2m_transforms",
- "node_to_bone_index",
- "bone_parents",
- "bone_childs",
- "bone_index_order",
- )
- def __init__(self, model, node):
- self.model = model
- self.node = node
- self.bones = node.mesh_entity.bones
- self.nodes = model.nodes
- # Don't change order, dependencies within function calls
- self.create_b2m_transforms()
- self.create_node_to_bone_indices()
- self.create_bone_parents()
- self.create_bone_childs()
- self.create_bone_index_order()
- # create_b2m_transforms
- # Creates bone to model space transforms for each bone
- def create_b2m_transforms(self):
- # Calculate all bone to model transforms
- self.b2m_transforms = []
- for bone in self.bones:
- # Fetch deform node
- deform_node = self.nodes[bone.node_index]
- # Fetch model to bone matrix and invert it to get the bone to model matrix
- m2b = bone.model_to_bone.to_matrix()
- b2m = m2b.inverted()
- # Store calculates bone to model matrix
- self.b2m_transforms.append(b2m)
- # create_node_to_bone_indices
- # Creates a node to bone index mapping
- def create_node_to_bone_indices(self):
- # Create a node to bone mapping
- self.node_to_bone_index = [-1] * len(self.nodes)
- for bone_index in range(len(self.bones)):
- node_index = self.bones[bone_index].node_index
- self.node_to_bone_index[node_index] = bone_index
- # create_bone_parents
- # Generate a list of bone parents for each bone
- def create_bone_parents(self):
- # Figure out all parent bones
- self.bone_parents = []
- for bone in self.bones:
- parent_bone_index = -1
- parent_bone = None
- # Walk the node chain backwards until we find a bone or there are no more parents
- node = self.nodes[bone.node_index]
- while node.parent != -1 and parent_bone_index == -1:
- parent_bone_index = self.node_to_bone_index[node.parent]
- node = self.nodes[node.parent]
- # If we found a parent bone index while walking the chain backwards fetch the bone
- if parent_bone_index != -1:
- parent_bone = self.bones[parent_bone_index]
- # Append either None or a valid parent bone
- self.bone_parents.append(parent_bone)
- # create_bone_childs
- # Generates lists of childrens for each bone
- def create_bone_childs(self):
- # Map childrens for each bone
- self.bone_childs = []
- for parent_bone in self.bones:
- children = []
- for bone in self.bones:
- if bone == parent_bone:
- continue
- # Check against current parent bone and see if we're a child of it
- bone_index = self.node_to_bone_index[bone.node_index]
- if self.bone_parents[bone_index] == parent_bone:
- children.append(bone)
- # Add the list of children to this bone (can be empty)
- self.bone_childs.append(children)
- # create_bone_index_order
- # Creates a index list ordered such as root always comes first
- def create_bone_index_order(self):
- # Figure out the order in which we want to create the bones, since we want to start
- # with bones not having any parents and walking down the chain so we can parent them while
- # we are building them
- self.bone_index_order = []
- # Start by adding bones without parents
- for bone_index in range(len(self.bones)):
- if self.bone_parents[bone_index] == None:
- self.bone_index_order.append(bone_index)
- # Start from the beginning of the bone index list and run a pass until index reaches the end.
- # This will happen naturally after bones stops being added to the list
- idx = 0
- while idx < len(self.bone_index_order):
- find_bone_index = self.bone_index_order[idx]
- find_node_index = self.bones[find_bone_index].node_index
- # Go through all bone parents and add them if a bone is parented to the current bone we are
- # scanning for. Also make sure we're not scanning ourselves
- for bone_index in range(len(self.bone_parents)):
- if bone_index == find_bone_index:
- continue
- parent_bone = self.bone_parents[bone_index]
- if parent_bone == None:
- continue
- if parent_bone.node_index == find_node_index:
- self.bone_index_order.append(bone_index)
- idx += 1
- # get_bone_node
- # Returns node that bone drives
- def get_bone_node(self, bone):
- return self.nodes[bone.node_index]
- # get_bone_to_model
- # Returns the bone to model-space matrix
- def get_bone_to_model(self, bone):
- bone_index = self.node_to_bone_index[bone.node_index]
- return self.b2m_transforms[bone_index]
- # get_length
- # Returns the length between bone and location
- def get_length(self, bone, origin):
- bone_index = self.node_to_bone_index[bone.node_index]
- bone_b2m = self.b2m_transforms[bone_index]
- bone_origin = bone_b2m.to_translation()
- vec = origin - bone_origin
- return vec.length
- # get_forward
- # CLEANUP
- # Not quite, used to fetch the 'forward' orientation of a rest bone matrix
- def get_forward(self, matrix):
- return Vector([matrix[0].x,matrix[1].x,matrix[2].x]).normalized()
- #return matrix[2].xyz.normalized()
- # get_bone_origin
- # CLEANUP
- # Incorrect, simply fetches an untransformed rest bone origin
- def get_bone_origin(self, bone):
- bone_index = self.node_to_bone_index[bone.node_index]
- bone_b2m = self.b2m_transforms[bone_index]
- return bone_b2m.to_translation()
- # get_look_at_child
- # CLEANUP
- # Returns a child bone that the input is looking at (can return None if not looking at a particular child)
- # Finds a child bone that the parent is aiming at
- def get_look_at_child(self, bone):
- # Don't do anything if we don't have children
- bone_index = self.node_to_bone_index[bone.node_index]
- if len(self.bone_childs[bone_index]) <= 0:
- return None
- # bone transform and origin
- bone_b2m = self.b2m_transforms[bone_index]
- bone_origin = bone_b2m.to_translation()
- # Get bone direction as a normalized vector
- bone_forward = self.get_forward(bone_b2m)
- # Don't break on first child, check if we have a child closer to ourselves
- num_look = 0
- max_length = -sys.float_info.max
- min_length = sys.float_info.max
- for child in self.bone_childs[bone_index]:
- child_origin = self.get_bone_origin(child)
- # Create a vector pointing towards the child and check the angle between them
- # Subtract epsilon from 1 to allow some small slack to determine if point is
- # looking straight at child
- vec = (child_origin - bone_origin)
- cosv = bone_forward.dot(vec.normalized())
- if cosv >= (1.0 - 0.000001):
- vec_len = vec.length
- max_length = max( max_length, vec_len )
- min_length = min( min_length, vec_len )
- num_look += 1
- # If we didn't look at any point, return 'nothing'
- if num_look == 0:
- return None
- # Return a point inbetween all points we where looking straight at
- return bone_origin + bone_forward * ( min_length + max_length ) * 0.5
- # get_child_midpoint
- # CLEANUP
- # Sort of does what it says, it takes all childs of a bone and calculates a tail
- # that lies inbetween the mean average of all child bones.
- def get_child_midpoint(self, bone):
- # Don't do anything if we don't have children
- bone_index = self.node_to_bone_index[bone.node_index]
- if len(self.bone_childs[bone_index]) <= 1:
- return None
- # bone transform and origin
- bone_b2m = self.b2m_transforms[bone_index]
- bone_origin = bone_b2m.to_translation()
- # Get bone direction as a normalized vector
- bone_forward = self.get_forward(bone_b2m)
- children = self.bone_childs[bone_index]
- mid_origin = self.get_bone_origin(children[0])
- for i in range(len(children)-1):
- mid_origin += (self.get_bone_origin(children[i+1]) - mid_origin) * 0.5
- return bone_origin + bone_forward * (mid_origin - bone_origin).length
- # project_vertex_group
- # CLEANUP
- # It does NOT project a vertex group against anything.
- # Projects untransformed vertices onto untransformed rest bone and extrudes the
- # tail to most extending vertex
- def project_vertex_group(self, bone, vertex_groups):
- if vertex_groups == None:
- return None
- bone_name = self.nodes[bone.node_index].name
- if bone_name not in vertex_groups.keys():
- return None
- mesh_data = self.node.mesh_entity.mesh_data
- positions = get_vertex_array(mesh_data, VTX_ARRAY_POSITION)
- if positions == None:
- return None
- bone_index = self.node_to_bone_index[bone.node_index]
- bone_b2m = self.b2m_transforms[bone_index]
- bone_origin = bone_b2m.to_translation()
- # Get bone direction as a normalized vector
- bone_forward = self.get_forward(bone_b2m)
- group = vertex_groups[bone_name]
- max_length = -sys.float_info.max
- for (vi, w) in group:
- if w < 0.000001:
- continue
- vpos = Vector(positions[vi])
- vec = vpos - bone_origin
- if bone_forward.dot(vec) <= 0.000001:
- continue
- max_length = max(max_length, vec.length * math.sqrt(w))
- return bone_origin + bone_forward * max_length
- # FrameObject
- # Used for posing an object or bone during animation, only represents current rotations
- class FrameObject():
- __slots__ = (
- "name",
- "parent",
- "matrix_local",
- "matrix_world",
- "has_position_track",
- "has_rotation_track",
- "has_scale_track",
- "has_any_track",
- )
- def __init__(self, bl_object):
- self.name = bl_object.name
- self.parent = bl_object.parent
- self.matrix_local = bl_object.matrix_local
- self.matrix_world = bl_object.matrix_world
- self.has_position_track = False
- self.has_rotation_track = False
- self.has_scale_track = False
- self.has_any_track = False
- # clear_tracks
- # Remove animation frame tracks for this frame
- def clear_tracks(self):
- self.has_position_track = False
- self.has_rotation_track = False
- self.has_scale_track = False
- self.has_any_track = False
- # set_tracks
- # Specify animation frame tracks used this frame
- def set_tracks(self, position, rotation, scale):
- self.has_position_track = position
- self.has_rotation_track = rotation
- self.has_scale_track = scale
- self.has_any_track = position or rotation or scale
- # ObjectKeyFrame
- # Specifies an object setup to generate an animation frame from
- class ObjectKeyframe():
- __slots__ = (
- "frame_objects",
- "name_to_index_map"
- )
- def __init__(self):
- self.frame_objects = []
- self.name_to_index_map = {}
- # add_object
- # Adds a frame object using a Blender object as input
- def add_object(self, bl_object):
- frame_object = FrameObject(bl_object)
- self.name_to_index_map[frame_object.name] = len(self.frame_objects)
- self.frame_objects.append(frame_object)
- # get_object
- # Returns a frame object mapped to a specific name
- def get_object(self, name):
- index = self.name_to_index_map[name]
- return self.frame_objects[index]
- # evaluate
- # Traverses the objects and calculates the world matrix in sequential order
- # Parents are garaunteed to be evaluated before children
- def evaluate(self):
- for frame_object in self.frame_objects:
- if frame_object.parent != None:
- parent = self.get_object(frame_object.parent.name)
- frame_object.matrix_world = parent.matrix_world * frame_object.matrix_local
- else:
- frame_object.matrix_world = frame_object.matrix_local
- class TriMaterial():
- __slots__ = (
- "material_index",
- "indices",
- )
- def __init__(self):
- self.material_index = -1
- self.indices = []
- class TriSurface():
- __slots__ = (
- "bl_vertex_weights",
- "vertices",
- "colors",
- "normals",
- "uvs",
- "tangents",
- "bitangents",
- "bone_weights",
- "bone_indices",
- "materials",
- "num_indices",
- "vertex_hash",
- "bound_hash",
- "bound_min",
- "bound_max",
- "has_uv",
- "has_color",
- "has_tangents",
- "has_weights",
- )
- def __init__(self):
- self.bl_vertex_weights = []
- self.vertices = []
- self.colors = []
- self.normals = []
- self.uvs = []
- self.tangents = []
- self.bitangents = []
- self.bone_weights = []
- self.bone_indices = []
- self.materials = []
- self.num_indices = 0
- self.vertex_hash = {k: [] for k in range(1024)}
- self.bound_hash = [0.0, 0.0, 0.0]
- self.bound_min = [0.0, 0.0, 0.0]
- self.bound_max = [0.0, 0.0, 0.0]
- self.has_uv = False
- self.has_color = False
- self.has_tangents = False
- self.has_weights = False
- @staticmethod
- def compare_vec3(vec1, vec2, epsilon = 0.000001):
- return abs(vec1[0] - vec2[0]) <= epsilon and abs(vec1[1] - vec2[1]) <= epsilon and abs(vec1[2] - vec2[2]) <= epsilon
- @staticmethod
- def compare_vec2(vec1, vec2, epsilon = 0.000001):
- return abs(vec1[0] - vec2[0]) <= epsilon and abs(vec1[1] - vec2[1]) <= epsilon
- # get_vertex_hash_key
- # Generates a hash key for a vertex coordinate
- def get_vertex_hash_key(self, vertex):
- x = int((vertex[0] - self.bound_min[0]) * self.bound_hash[0])
- y = int((vertex[1] - self.bound_min[1]) * self.bound_hash[1])
- z = int((vertex[2] - self.bound_min[2]) * self.bound_hash[2])
- return (x + y + z) % 1024
- # add_unique_vertex
- # Vertex welding, vertex coordinate, uv, color and tangent and bitangent must all be equal to count as single vertex
- def add_unique_vertex(self, bl_mesh, loop_index, poly_loop, uv_loops, color_loops):
- # Fetch vertex coordinate
- vertex = bl_mesh.vertices[poly_loop.vertex_index].co
- #vertex = (vertex.x, vertex.y, vertex.z)
- # Fetch normal
- normal = poly_loop.normal
- # Fetch color if available
- color = None
- if color_loops != None:
- color = color_loops[loop_index].color
- # Fetch uv, tangent and bitangent if available
- uv = None
- tangent = None
- bitangent = None
- if uv_loops != None:
- uv = uv_loops[loop_index].uv
- tangent = poly_loop.tangent
- bitangent = poly_loop.bitangent
- # Generate hash-key for vertex coordinate
- hash_key = self.get_vertex_hash_key(vertex)
- # Check if vertex already exists or we need to create one
- for vertex_index in self.vertex_hash[hash_key]:
- if not TriSurface.compare_vec3(self.vertices[vertex_index], vertex):
- continue
- if not TriSurface.compare_vec3(self.normals[vertex_index], normal):
- continue
- if uv_loops != None:
- if not TriSurface.compare_vec2(self.uvs[vertex_index], uv):
- continue
- if not TriSurface.compare_vec3(self.tangents[vertex_index], tangent):
- continue
- # Maybe check sign only ?
- if not TriSurface.compare_vec3(self.bitangents[vertex_index], bitangent):
- continue
- if color_loops != None:
- if not TriSurface.compare_vec3(self.colors[vertex_index], color, epsilon = 0.001):
- continue
- # Everything is a match, return this vertex
- return vertex_index
- # Nothing found, create new unique vertex
- new_vertex_index = len(self.vertices)
- # Add to hash for lookup
- self.vertex_hash[hash_key].append(new_vertex_index)
- # Add vertex coordinate and normal to lists
- self.vertices.append((vertex[0], vertex[1], vertex[2]))
- self.normals.append((normal[0], normal[1], normal[2]))
- # Add color to list, if available
- if color_loops != None:
- self.colors.append((color.r, color.g, color.b))
- # Add uv, tangent and bitangent to lists, if available
- if uv_loops != None:
- self.uvs.append((uv[0], uv[1]))
- self.tangents.append((tangent[0], tangent[1], tangent[2]))
- self.bitangents.append((bitangent[0], bitangent[1], bitangent[2]))
- # Add weights, if available
- if self.has_weights:
- bl_bone_weights, bl_bone_indices = zip(*self.bl_vertex_weights[poly_loop.vertex_index])
- self.bone_weights.append((bl_bone_weights[0], bl_bone_weights[1], bl_bone_weights[2], bl_bone_weights[3]))
- self.bone_indices.append((bl_bone_indices[0], bl_bone_indices[1], bl_bone_indices[2], bl_bone_indices[3]))
- # Return the index of the newly created vertex
- return new_vertex_index
- # create_from_mesh
- # Generate a triangle surface
- def create_from_mesh(self, settings, bl_mesh, bones_name, bl_vertex_groups, bl_bounds):
- # Figure out hash bounds
- bound_min = [bl_bounds[0][0], bl_bounds[0][1], bl_bounds[0][2]]
- bound_max = [bl_bounds[0][0], bl_bounds[0][1], bl_bounds[0][2]]
- for i in range(8):
- bl_bound = bl_bounds[i]
- for j in range(3):
- bound_min[j] = min(bound_min[j], bl_bound[j])
- bound_max[j] = max(bound_max[j], bl_bound[j])
- for i in range(3):
- size = bound_max[i] - bound_min[i]
- self.bound_hash[i] = 1024.0 / size
- self.bound_min[i] = bound_min[i]
- self.bound_max[i] = bound_max[i]
- # Only allow exporting from quads or tris
- # Select invalid polygons so we can show users where problems occoured
- # FIXME: Fix the selection, it could be nice to tell the user it failed and show them why
- #for polygon in bl_mesh.polygons:
- # if polygon.loop_total > 4:
- # return None
- # Only calculate tangents if we have a active uv-layer, and we selected tangents in settings
- if bl_mesh.uv_layers.active != None and settings.option_tangents:
- bl_mesh.calc_tangents(uvmap = bl_mesh.uv_layers.active.name)
- self.has_tangents = True
- # Calculate split normals if we never calculated the tangents
- if not self.has_tangents:
- bl_mesh.calc_normals_split()
- # Collect weights for each vertex, make sure each vertex has exactly 4 weights, if bones are available
- if bones_name != None:
- self.has_weights = True
- for vertex in bl_mesh.vertices:
- # Figure out bones and weights for vertex
- bl_vertex_weights = []
- for group in vertex.groups:
- vertex_group = bl_vertex_groups[group.group]
- bone_index = bones_name.index(vertex_group.name)
- bone_weight = vertex_group.weight(vertex.index)
- bl_vertex_weights.append((bone_weight, bone_index))
- # Ensure exactly 4 weights, always
- num_weights = len(bl_vertex_weights)
- if num_weights > 4:
- bl_vertex_weights.sort()
- bl_vertex_weights = bl_vertex_weights[num_weights - 4:]
- elif num_weights < 4:
- bl_vertex_weights += [(0.0, 0)] * (4 - num_weights)
- # Add the four weights and indices
- self.bl_vertex_weights.append(bl_vertex_weights)
- # Create material slots
- for bl_material in bl_mesh.materials:
- material = TriMaterial()
- self.materials.append(material)
- # Select active uv- and color layer
- uv_layer = bl_mesh.uv_layers.active
- color_layer = bl_mesh.vertex_colors.active
- # Fetch the uv-loops
- uv_loops = None
- if uv_layer != None:
- self.has_uv = True
- uv_loops = uv_layer.data
- # Fetch the color-loops
- color_loops = None
- if color_layer != None:
- self.has_color = True
- color_loops = color_layer.data
- # Fetch the geometry loops
- poly_loops = bl_mesh.loops
- poly_indices = [None,None,None,None]
- # Create vertices
- for polygon in bl_mesh.polygons:
- is_quad = len(polygon.loop_indices) == 4
- for i, loop_index in enumerate(polygon.loop_indices):
- poly_indices[i] = self.add_unique_vertex(bl_mesh, loop_index, poly_loops[loop_index], uv_loops, color_loops)
- material = self.materials[polygon.material_index]
- material.indices.append(poly_indices[0])
- for i in range(2):
- material.indices.append(poly_indices[1+i])
- if is_quad:
- # FIXME: Need to split quads the same way Mikkelsen does it, otherwise we break the tangent space
- material.indices.append(poly_indices[0])
- for i in range(2):
- material.indices.append(poly_indices[2+i])
- # Accumulate total number of indices from all materials
- for material in self.materials:
- self.num_indices += len(material.indices)
- # create_game_to_blender_matrix
- # Generates a matrix that goes from LHS to RHS and orients the model in a more Blender friendly direction
- def create_game_to_blender_matrix():
- return Matrix(([1,0,0,0],[0,1,0,0],[0,0,-1,0],[0,0,0,1])) * axis_conversion("Z", "Y", "X", "-Z").to_4x4()
- # create_bone_space_matrix
- # Generates a matrix that transforms a matrix in to Blender bone space orientation
- def create_bone_space_matrix():
- return Matrix(([ 0, 1, 0, 0],[-1, 0, 0, 0],[ 0, 0, 1, 0],[ 0, 0, 0, 1]))
- # get_vertex_array
- # Returns either a valid vertex arrays data container or None if invalid.
- def get_vertex_array(mesh_data, vtx_array_type):
- if mesh_data.vertex_arrays[vtx_array_type] != None:
- return mesh_data.vertex_arrays[vtx_array_type].data
- return None
- # convert_colors
- # Converts an array of byte range [0, 255] colors to float range [0, 1] values
- # It simply does so by multiplying all values with the reciprocal of 255
- def convert_colors(byte_colors):
- num = len(byte_colors)
- dim = len(byte_colors[0])
- float_colors = num * [dim * [0.0]]
- scale = 1.0 / 255.0
- for i in range(num):
- for j in range(dim):
- float_colors[i][j] = float( byte_colors[i][j] ) * scale
- return float_colors
- # convert_color_float3_to_byte4
- # Simple float to byte color conversion
- def convert_color_float3_to_byte4(float_color):
- # 1.0 / 255.0 = 0.003921568627451
- return (
- max(0, min(255, int(float_color[0] * 0.003921568627451))),
- max(0, min(255, int(float_color[1] * 0.003921568627451))),
- max(0, min(255, int(float_color[2] * 0.003921568627451))),
- 255
- )
- # apply_local_matrix
- # Applies a nodes local to parent matrix to a Blender objects local matrix
- def apply_local_matrix(ob, node):
- ob.matrix_local = node.local_to_parent.to_matrix()
- # calculate_frame
- # Creates world transforms for all nodes
- def calculate_frame(nodes, game_matrix):
- num_nodes = len(nodes)
- unsorted_node_indices = range(num_nodes)
- sorted_node_indices = []
- # Collect the root nodes only
- for node_index in unsorted_node_indices:
- node = nodes[node_index]
- if node.parent == -1:
- sorted_node_indices.append(node_index)
- # Run a pass and add nodes until all unsorted nodes have been added to the sorted list
- idx = 0
- while idx < len(sorted_node_indices):
- # Get the node that we should check parents against
- find_node_index = sorted_node_indices[idx]
- # For each node in the unsorted list, check if it's parented to the current sorted node we're looking at
- for node_index in unsorted_node_indices:
- # Skip ourselves
- if find_node_index == node_index:
- continue
- if nodes[node_index].parent == -1:
- continue
- # Append if unsorted parent node is the same as current sorted node
- if nodes[node_index].parent == find_node_index:
- sorted_node_indices.append(node_index)
- idx += 1
- # Create the base transform, we add the game_matrix here since it's not part of the node hierarchy
- base_transform = Matrix.Identity(4) * game_matrix
- # Transform all nodes into world space using sorted list so we are garantueed to have a composite matrix
- # for parents that should use it
- unsorted_node_transforms = [None] * num_nodes
- frame = [None] * num_nodes
- for i in unsorted_node_indices:
- node_index = sorted_node_indices[i]
- node = nodes[node_index]
- local_to_parent = node.local_to_parent.to_matrix()
- if node.parent != -1:
- unsorted_node_transforms[node_index] = unsorted_node_transforms[node.parent] * local_to_parent
- else:
- unsorted_node_transforms[node_index] = base_transform * local_to_parent
- frame[i] = [node_index, unsorted_node_transforms[node_index]]
- # Return frame
- return frame
- # create_object_frame
- # Create a frame structure for animation purposes out of object heirarcy in blender scene
- def create_object_frame():
- if "game_matrix" not in bpy.data.objects.keys():
- return None
- frame = ObjectKeyframe()
- objects = []
- objects.append(bpy.data.objects["game_matrix"])
- idx = 0
- while idx < len(objects):
- obj = objects[idx]
- frame.add_object(obj)
- for child in obj.children:
- objects.append(child)
- idx += 1
- return frame
- # load_binary_model
- # Loads a binary model using Grimrock 2 documentation
- # FourCC magic
- # int32 version
- # int32 numNodes
- # (Node) * numNodes
- def load_binary_model(file_object, context):
- # Try to read the binary model
- model = Model()
- if not model.read(file_object):
- print("Failed to load ModelFile")
- return False
- build_model(model)
- return True
- # save_binary_model
- # Saves blender scene into a binary model compatible with Grimrock 2
- def save_binary_model(file_object, settings, context):
- model = create_model(settings)
- model.write(file_object)
- return True
- # load_binary_animation
- # Loads a binary model using Grimrock 2 documentation
- def load_binary_animation(file_object, armature, context):
- # Try to read the binary animation
- anim = Animation()
- if not anim.read(file_object):
- print("Failed to load AnimationFile")
- return False
- # Apply the animation to the armature
- build_animation(armature, anim)
- return True
- # build_vertex_groups
- # Builds vertex groups for skinning
- def build_vertex_groups(model, node):
- if node.mesh_entity.num_bones <= 0:
- return None
- bone_indices = get_vertex_array(node.mesh_entity.mesh_data, VTX_ARRAY_BONE_INDEX)
- bone_weights = get_vertex_array(node.mesh_entity.mesh_data, VTX_ARRAY_BONE_WEIGHT)
- if bone_indices == None or bone_weights == None:
- return None
- vertex_groups = {}
- for i in range(node.mesh_entity.mesh_data.num_vertices):
- vtx_bone_indices = bone_indices[i]
- vtx_bone_weights = bone_weights[i]
- num_bone_indices = len(vtx_bone_indices)
- for j in range(num_bone_indices):
- bone_index = vtx_bone_indices[j]
- bone_weight = vtx_bone_weights[j]
- bone = node.mesh_entity.bones[bone_index]
- deform_node = model.nodes[bone.node_index]
- if deform_node.name not in vertex_groups.keys():
- vertex_groups[deform_node.name] = []
- vertex_groups[deform_node.name].append([i, bone_weight])
- return vertex_groups
- # build_armature
- # Builds a blender armature from Grimrock Model description
- def build_armature(model, node, vertex_groups, game_matrix):
- if node.mesh_entity.num_bones <= 0:
- return None
- # Create an armature object
- armature = bpy.data.armatures.new(node.name + "_rig")
- armature.draw_type = 'STICK'
- # Create a rig
- rig = bpy.data.objects.new(node.name + "_rig", armature)
- rig.show_x_ray = True
- if node.parent != -1:
- rig.parent = bpy.data.objects[model.nodes[node.parent].name]
- #apply_transform(rig, model.nodes, node, game_matrix)
- apply_local_matrix(rig, node)
- # Update scene and set active rig
- bpy.context.scene.objects.link(rig)
- bpy.context.scene.objects.active = rig
- bpy.context.scene.update()
- # Switch to edit mode and create all bones
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='EDIT')
- # Do some pre-processing to make it easier for us when generating the armature
- info = ArmatureInfo(model, node)
- # Calculate world transforms for all nodes
- frame = calculate_frame(model.nodes, game_matrix)
- # Build bones using our bone index order list (this ensures roots are created before children)
- for bone_index in info.bone_index_order:
- bone = info.bones[bone_index]
- deform_node = info.nodes[bone.node_index]
- b2m = info.b2m_transforms[bone_index]
- # Start by checking if we're looking at any particular child (crossing their head positions, if any)
- tail_origin = info.get_look_at_child(bone)
- # If we're not crossing any child bones, fetch the middle point of all childs (if any)
- if tail_origin == None:
- tail_origin = info.get_child_midpoint(bone)
- # Try projecting it towards vertices influenced by bone
- if tail_origin == None:
- tail_origin = info.project_vertex_group(bone, vertex_groups)
- # If no tail origin has been found, just extrude the bone in it's forward direction
- if tail_origin == None:
- tail_origin = b2m.to_translation() + info.get_forward(b2m)
- bone_length = info.get_length(bone, tail_origin)
- bone_length = max(bone_length, 0.001)
- # Fetch Blender EditBone parent
- bl_bone_parent = None
- bone_parent = info.bone_parents[bone_index]
- if bone_parent != None:
- parent_node = model.nodes[bone_parent.node_index]
- bl_bone_parent = armature.edit_bones[parent_node.name]
- bl_bone = armature.edit_bones.new(deform_node.name)
- bl_bone.head = Vector([0, 0, 0])
- bl_bone.tail = Vector([bone_length, 0, 0])
- bl_bone.transform(b2m.to_3x3())
- bl_bone.translate(b2m.to_translation())
- bl_bone.parent = bl_bone_parent
- # Switch back to object mode in order to refresh armature
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='OBJECT')
- # Switch to pose mode and pose the model in initial position
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='POSE')
- # pose the bones in initial state
- bl_bone_space = create_bone_space_matrix()
- bpy.context.scene.update()
- for (node_index, node_transform) in frame:
- deform_node = model.nodes[node_index]
- if not deform_node.name in rig.pose.bones.keys():
- continue
- pose_bone = rig.pose.bones[deform_node.name]
- pm = rig.matrix_world.inverted() * node_transform * bl_bone_space
- pose_bone.matrix = pm
- # FIXME: hack, flush the animation system in Blender
- #bpy.context.scene.frame_set(0)
- bpy.context.scene.update()
- bpy.context.scene.update()
- # Done, switch back to object mode
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='OBJECT')
- bpy.context.scene.update()
- return rig
- # build_model
- # Builds Blender objects from Grimrock Model
- def build_model(model):
- game_matrix = create_game_to_blender_matrix()
- # Calculate world transforms for all nodes
- frame = calculate_frame(model.nodes, game_matrix)
- # Before adding any meshes or armatures go into Object mode.
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='OBJECT')
- # Build top most game matrix object
- game_matrix_ob = bpy.data.objects.new("game_matrix", None)
- game_matrix_ob.empty_draw_type = 'ARROWS'
- game_matrix_ob.empty_draw_size = 0.25
- game_matrix_ob.matrix_local = game_matrix
- bpy.context.scene.objects.link(game_matrix_ob)
- # Build all nodes but skip mesh entity nodes since we create them in a later loop
- for (node_index, node_transform) in frame:
- node = model.nodes[node_index]
- if node.mesh_entity != None:
- continue
- # Fetch parent object, if there is no parent for the node we use the
- # top most game matrix conversion object
- parent_ob = None
- if node.parent != -1:
- parent_name = model.nodes[node.parent].name
- parent_ob = bpy.data.objects[parent_name]
- else:
- parent_ob = game_matrix_ob
- node_ob = bpy.data.objects.new(node.name, None)
- bpy.context.scene.objects.link(node_ob)
- node_ob.empty_draw_type = 'ARROWS'
- node_ob.empty_draw_size = 0.25
- node_ob.parent = parent_ob
- node_ob.matrix_world = node_transform
- # Update the scene after all objects have been created
- bpy.context.scene.update()
- # Now loop through all nodes again, but this time create the actuall mesh objects
- for (node_index, node_transform) in frame:
- node = model.nodes[node_index]
- if node.mesh_entity == None:
- continue
- # Build vertex groups for skinning
- vertex_groups = build_vertex_groups(model, node)
- # Build the armature and pose it in inital state
- bl_rig = build_armature(model, node, vertex_groups, game_matrix)
- # Fetch needed data to build
- mesh_data = node.mesh_entity.mesh_data
- positions = get_vertex_array(mesh_data, VTX_ARRAY_POSITION)
- normals = get_vertex_array(mesh_data, VTX_ARRAY_NORMAL)
- colors = get_vertex_array(mesh_data, VTX_ARRAY_COLOR)
- indices = mesh_data.indices
- num_faces = int( mesh_data.num_indices / 3 )
- # Create Mesh to work with
- me = bpy.data.meshes.new(node.name)
- # Auto smooth the mesh
- me.use_auto_smooth = True
- # Add vertices
- # Note: These are in native game format, object hierarchy takes care of the transform
- me.vertices.add(mesh_data.num_vertices)
- for i in range(mesh_data.num_vertices):
- co = Vector(positions[i])
- me.vertices[i].co = (co.x, co.y, co.z)
- # Add normals
- # Note: These are in native game format, object hierarchy takes care of the transform
- if normals != None:
- for i in range(mesh_data.num_vertices):
- normal = Vector(normals[i])
- me.vertices[i].normal = (normal.x, normal.y, normal.z)
- # Add faces
- # Note: No flipping, object hierarchy makes sure this comes out correct
- me.tessfaces.add(num_faces)
- for i in range(num_faces):
- idx = i * 3
- me.tessfaces[i].vertices_raw = (indices[idx+0], indices[idx+1], indices[idx+2], 0)
- me.tessfaces[i].use_smooth = True
- # Add colors
- if colors != None:
- # Create color-set layer
- color_layer = me.tessface_vertex_colors.new("colorset")
- me.tessface_vertex_colors.active = color_layer
- # Convert to float range
- float_colors = convert_colors(colors)
- # Assign colors
- for f in me.tessfaces:
- color_layer.data[f.index].color1 = float_colors[f.vertices[0]][0:3]
- color_layer.data[f.index].color2 = float_colors[f.vertices[1]][0:3]
- color_layer.data[f.index].color3 = float_colors[f.vertices[2]][0:3]
- # Add texture coordinate sets
- # Note: We flip the v-coordinates, so remember to reverse that when exporting out!
- first_uv_layer = None
- for i in range(8):
- tex_coords = get_vertex_array(mesh_data, VTX_ARRAY_TEXCOORD0 + i)
- if tex_coords == None:
- continue
- # Create uv-layer for these texture coordinates
- uv_layer = me.tessface_uv_textures.new("uvset%d" % i)
- me.tessface_uv_textures.active = uv_layer
- if first_uv_layer == None:
- first_uv_layer = uv_layer
- # Assign uv coordinates to layer faces
- for f in me.tessfaces:
- uvco1 = tex_coords[f.vertices[0]][0:2]
- uvco2 = tex_coords[f.vertices[1]][0:2]
- uvco3 = tex_coords[f.vertices[2]][0:2]
- # Flip v coordinates
- uvco1 = (uvco1[0], 1.0 - uvco1[1])
- uvco2 = (uvco2[0], 1.0 - uvco2[1])
- uvco3 = (uvco3[0], 1.0 - uvco3[1])
- uv_layer.data[f.index].uv = (uvco1, uvco2, uvco3)
- # Set first created uv-layer as active layer
- if first_uv_layer != None:
- me.tessface_uv_textures.active = first_uv_layer
- # Create and assign materials
- for segment in mesh_data.segments:
- bl_material = None
- if segment.material not in bpy.data.materials.keys():
- bl_material = bpy.data.materials.new(segment.material)
- else:
- bl_material = bpy.data.materials[segment.material]
- material_index = len(me.materials)
- me.materials.append(bl_material)
- face_offset = int(segment.index_offset / 3)
- for fi in range(segment.num_triangles):
- me.tessfaces[fi].material_index = material_index
- # Figure out parent object for this node, if none we automatically add it to the game matrix object
- parent_ob = game_matrix_ob
- if node.parent != -1:
- parent_node = model.nodes[node.parent]
- parent_ob = bpy.data.objects[parent_node.name]
- # Create mesh object and link with scene
- node_ob = bpy.data.objects.new(node.name, me)
- node_ob.parent = parent_ob
- node_ob.matrix_world = node_transform
- # Link with scene
- bpy.context.scene.objects.link(node_ob)
- # Create the skinning groups for this object as well as a modifier for the rig
- if vertex_groups != None:
- for group_name, group in vertex_groups.items():
- bl_group = node_ob.vertex_groups.new(group_name)
- for (v, w) in group:
- bl_group.add([v], w, 'ADD')
- mod = node_ob.modifiers.new('RigModifier', 'ARMATURE')
- mod.object = bl_rig
- mod.use_bone_envelopes = False
- mod.use_vertex_groups = True
- # Update the mesh
- me.update()
- # Update scene
- bpy.context.scene.update()
- # build_animation
- def build_animation(armature_object, anim):
- #bpy.context.window_manager.progress_begin(0, 9999)
- # This isn't quite correct.
- # fps isn't right since we simply truncate to integer
- fps = int(anim.frames_per_second)
- # Figure out range of animation
- frame_start = 1
- frame_end = 1 + anim.num_frames
- # Update frame settings
- bpy.context.scene.render.fps = fps
- bpy.context.scene.render.fps_base = 1.0
- bpy.context.scene.frame_start = frame_start
- bpy.context.scene.frame_end = frame_end
- bpy.context.scene.update()
- game_matrix = create_game_to_blender_matrix()
- # Set the armature as the current selected object
- bpy.context.scene.objects.active = armature_object
- # Switch to pose mode
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='POSE')
- armature = armature_object.data
- pose = armature_object.pose
- frame = create_object_frame()
- bl_bone_space = create_bone_space_matrix()
- for i in range(anim.num_frames):
- for item in anim.items:
- frame_object = frame.get_object(item.node_name)
- has_position_track = i < item.num_positions
- has_rotation_track = i < item.num_rotations
- has_scale_track = i < item.num_scales
- # Tell frame object what tracks are valid for this frame
- frame_object.set_tracks(has_position_track, has_rotation_track, has_scale_track)
- # Skip matrix update if there are no tracks available for this object this frame
- if not frame_object.has_any_track:
- continue
- # Fetch current local matrix and decompose it in to location, rotation and scale components
- matrix_local = frame_object.matrix_local
- loc, rot, scl = matrix_local.decompose()
- # Apply animation tracks for valid
- if has_position_track:
- p = item.positions[i]
- loc = Vector([p.x, p.y, p.z])
- if has_rotation_track:
- q = item.rotations[i]
- rot = Quaternion((q.w, q.x, q.y, q.z))
- if has_scale_track:
- s = item.scales[i]
- scl = Vector([s.x, s.y, s.z])
- # Create matrices out of our frame components
- mat_loc = Matrix.Translation((loc.x, loc.y, loc.z))
- mat_scl = Matrix(([scl.x,0,0,0],[0,scl.y,0,0],[0,0,scl.z,0],[0,0,0,1]))
- mat_rot = rot.to_matrix().to_4x4()
- # Compose the final local matrix
- matrix_local = mat_loc * mat_rot * mat_scl
- # Set the matrix in the frame object
- frame_object.matrix_local = matrix_local
- # Calculate world matrices
- frame.evaluate()
- frame_index = i + 1
- for frame_object in frame.frame_objects:
- # Skip object if no tracks assigned this frame
- if not frame_object.has_any_track:
- continue
- # If frame object exists as a pose bone, alter that instead of the actual object
- keyframe_object = None
- if frame_object.name in pose.bones.keys():
- # Find the post bone and update it
- pose_bone = pose.bones[frame_object.name]
- pm = armature_object.matrix_world.inverted() * frame_object.matrix_world * bl_bone_space
- pose_bone.matrix = pm
- keyframe_object = pose_bone
- else:
- # Update actual blender object
- bl_object = bpy.data.objects[frame_object.name]
- bl_object.matrix_world = frame_object.matrix_world
- keyframe_object = bl_object
- # FIXME: hack, flush the animation system in Blender
- #bpy.context.scene.frame_set(frame_index)
- bpy.context.scene.update()
- # Add a keyframe(s) at this location
- if keyframe_object != None:
- if frame_object.has_position_track:
- keyframe_object.keyframe_insert(data_path="location", frame = frame_index)
- if frame_object.has_rotation_track:
- keyframe_object.keyframe_insert(data_path="rotation_quaternion", frame = frame_index)
- if frame_object.has_scale_track:
- keyframe_object.keyframe_insert(data_path="scale", frame = frame_index)
- # Go to next frame
- #progress_value = min(int((frame_index / anim.num_frames) * 99.0), 99) * 100
- #bpy.context.window_manager.progress_update(progress_value)
- #bpy.context.window_manager.progress_end()
- # Switch to object mode
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='OBJECT')
- return True
- # create_model
- # Creates a Model() from current blender scene
- def create_model(settings):
- #print("Export skinning: " + str(settings.option_skinning))
- #print("Export tangents: " + str(settings.option_tangents))
- # Switch to object mode before doing anything
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='OBJECT')
- # Create an empty model to work with
- model = Model()
- model.create_empty()
- # Collect all root objects, also attempt to find the game object
- objects = []
- mesh_objects = []
- for obj in bpy.data.objects:
- if obj.parent != None:
- continue
- # Only care about EMPTY and MESH objects
- if obj.type == 'EMPTY' or obj.type == 'MESH':
- objects.append(obj)
- # Append all children
- idx = 0
- while idx < len(objects):
- obj = objects[idx]
- for child in obj.children:
- # Only care about EMPTY and MESH objects
- if child.type != 'EMPTY' and child.type != 'MESH':
- continue
- # Add mesh objects to a special list that we want to iterate at a later stage
- if child.type == 'MESH':
- mesh_objects.append(child)
- objects.append(child)
- idx += 1
- # Generate nodes for all collected objects
- for obj in objects:
- # Ignore game_matrix object, it's simply a helper object to transform model into Blender friendly space
- if obj.name == "game_matrix":
- continue
- node = Node()
- node.create_empty(obj.name, obj.matrix_local)
- # Set the parent for the node
- if obj.parent != None and obj.parent.name != "game_matrix":
- node.parent = model.find_node_index(obj.parent.name)
- # Add node to model
- model.add_node(node)
- # Build the mesh entities for mesh objects
- for obj in mesh_objects:
- bl_rig = None
- bl_rig_state = 'REST'
- # Find the armature, cache the rig and the state it's currently in
- armature_ob = obj.find_armature()
- if armature_ob != None:
- bl_rig = armature_ob.data
- bl_rig_state = bl_rig.pose_position
- # Set rig in rest position if we're currently looking at it as posed
- if bl_rig != None and bl_rig_state != settings.option_rig_state:
- bl_rig.pose_position = settings.option_rig_state
- bpy.context.scene.update()
- bones_info = None
- bones_name = None
- if bl_rig != None and settings.option_skinning:
- # Collect bones and parent bone index
- bones_info = []
- for bl_bone in bl_rig.bones:
- parent_bone_index = -1
- if bl_bone.parent != None:
- # Find parent bone
- for index in range(len(bl_rig.bones)):
- if bl_rig.bones[index].name == bl_bone.parent.name:
- parent_bone_index = index
- break
- bones_info.append([bl_bone, parent_bone_index])
- # Sort by parent index
- bones_info = sorted(bones_info, key = lambda bone_info: bone_info[1])
- # Create named list of bones info
- bones_name = []
- for bone_info in bones_info:
- bones_name.append(bone_info[0].name)
- # Get the mesh data
- bl_mesh = obj.to_mesh(bpy.context.scene, True, 'RENDER', calc_tessface = False, calc_undeformed = False)
- surface = TriSurface()
- surface.create_from_mesh(settings, bl_mesh, bones_name, obj.vertex_groups, obj.bound_box)
- mesh_entity = MeshEntity()
- mesh_entity.create_empty()
- mesh_data = mesh_entity.mesh_data
- mesh_data.num_vertices = len(surface.vertices)
- # Copy vertex coordinates
- positions = mesh_data.vertex_arrays[VTX_ARRAY_POSITION]
- positions.data_type = VTX_DATA_FLOAT
- positions.dim = 3
- positions.stride = 12
- positions.data = [None] * mesh_data.num_vertices
- for vi, vertex in enumerate(surface.vertices):
- positions.data[vi] = (vertex[0], vertex[1], vertex[2])
- # Copy normals
- normals = mesh_data.vertex_arrays[VTX_ARRAY_NORMAL]
- normals.data_type = VTX_DATA_FLOAT
- normals.dim = 3
- normals.stride = 12
- normals.data = [None] * mesh_data.num_vertices
- for vi, normal in enumerate(surface.normals):
- normals.data[vi] = (normal[0], normal[1], normal[2])
- # Copy uv coordinates
- if surface.has_uv:
- texcoords = mesh_data.vertex_arrays[VTX_ARRAY_TEXCOORD0]
- texcoords.data_type = VTX_DATA_FLOAT
- texcoords.dim = 2
- texcoords.stride = 8
- texcoords.data = [None] * mesh_data.num_vertices
- for vi, uv in enumerate(surface.uvs):
- texcoords.data[vi] = (uv[0], 1.0 - uv[1])
- # Copy tangents and bitangents
- if surface.has_tangents:
- tangents = mesh_data.vertex_arrays[VTX_ARRAY_TANGENT]
- tangents.data_type = VTX_DATA_FLOAT
- tangents.dim = 3
- tangents.stride = 12
- tangents.data = [None] * mesh_data.num_vertices
- for vi, tangent in enumerate(surface.tangents):
- tangents.data[vi] = (tangent[0], tangent[1], tangent[2])
- bitangents = mesh_data.vertex_arrays[VTX_ARRAY_BITANGENT]
- bitangents.data_type = VTX_DATA_FLOAT
- bitangents.dim = 3
- bitangents.stride = 12
- bitangents.data = [None] * mesh_data.num_vertices
- for vi, bitangent in enumerate(surface.bitangents):
- bitangents.data[vi] = (bitangent[0], bitangent[1], bitangent[2])
- # Copy colors
- if surface.has_color:
- colors = mesh_data.vertex_arrays[VTX_ARRAY_COLOR]
- colors.data_type = VTX_DATA_BYTE
- colors.dim = 4
- colors.stride = 4
- colors.data = [None] * mesh_data.num_vertices
- for vi, color in enumerate(surface.colors):
- colors.data[vi] = convert_color_float3_to_byte4(color)
- # Copy vertex bone- weights and indices
- if surface.has_weights:
- bone_indices = mesh_data.vertex_arrays[VTX_ARRAY_BONE_INDEX]
- bone_indices.data_type = VTX_DATA_INT
- bone_indices.dim = 4
- bone_indices.stride = 16
- bone_indices.data = [None] * mesh_data.num_vertices
- for vi, indices in enumerate(surface.bone_indices):
- bone_indices.data[vi] = (indices[0], indices[1], indices[2], indices[3])
- bone_weights = mesh_data.vertex_arrays[VTX_ARRAY_BONE_WEIGHT]
- bone_weights.data_type = VTX_DATA_FLOAT
- bone_weights.dim = 4
- bone_weights.stride = 16
- bone_weights.data = [None] * mesh_data.num_vertices
- for vi, weights in enumerate(surface.bone_weights):
- bone_weights.data[vi] = (weights[0], weights[1], weights[2], weights[3])
- if surface.has_weights:
- # Set number of bones
- mesh_entity.num_bones = len(bones_info)
- # Add bones to mesh entity
- bl_bone_space = create_bone_space_matrix()
- for bl_bone, parent_index in bones_info:
- node_index = model.find_node_index(bl_bone.name)
- m2b = bl_bone_space * bl_bone.matrix_local.inverted()
- bone = Bone()
- bone.create_empty(node_index)
- bone.model_to_bone.from_matrix(m2b)
- mesh_entity.bones.append(bone)
- # Copy indices and create segments
- for material_index, tri_material in enumerate(surface.materials):
- num_indices = len(tri_material.indices)
- if num_indices <= 0:
- continue
- segment = MeshSegment()
- segment.create_empty()
- segment.material = bl_mesh.materials[material_index].name
- segment.index_offset = len(mesh_data.indices)
- segment.num_triangles = int(num_indices / 3)
- mesh_data.segments.append(segment)
- for index in tri_material.indices:
- mesh_data.indices.append(index)
- mesh_data.num_indices = len(mesh_data.indices)
- mesh_data.num_segments = len(mesh_data.segments)
- mesh_data.bound_min = [surface.bound_min[0], surface.bound_min[1], surface.bound_min[2]]
- mesh_data.bound_max = [surface.bound_max[0], surface.bound_max[1], surface.bound_max[2]]
- # Calculate bounds
- len_sqr = 0.0
- for i in range(3):
- mesh_data.bound_min[i] = surface.bound_min[i]
- mesh_data.bound_max[i] = surface.bound_max[i]
- mesh_data.bound_center[i] = (surface.bound_min[i] + surface.bound_max[i]) * 0.5
- len_sqr += (surface.bound_max[i] - mesh_data.bound_center[i]) ** 2
- if len_sqr > 0.000001:
- mesh_data.bound_radius = math.sqrt(len_sqr)
- # Set created mesh entity on node
- node_index = model.find_node_index(obj.name)
- node = model.nodes[node_index]
- node.set_mesh_entity(mesh_entity)
- # Put rig back in pose position
- if bl_rig != None and bl_rig_state != settings.option_rig_state:
- bl_rig.pose_position = bl_rig_state
- bpy.context.scene.update()
- return model
- # fnv1a
- # Hash a string using fnv1-a
- def fnv1a(string, seed = 0x811C9DC5, prime = 0x01000193):
- uint32_max = 2 ** 32
- hash_value = seed
- for c in string:
- hash_value = ( ( ord( c ) ^ hash_value ) * prime ) % uint32_max
- return hash_value
- # load_file_table
- # Loads the .dat container file table header
- def load_file_table(file_object):
- file_table = None
- dat_magic = 0
- try:
- dat_magic = read_magic(file_object)
- except:
- print("Error parsing .dat file header!")
- file_object.close()
- return file_table
- # Figure out if it's a valid dat package
- if dat_magic != b'GRA2':
- print("Not a valid Legend of Grimrock 2 .dat file!")
- file_object.close()
- return file_table
- # Read number of file headers
- num_files = read_int(file_object)
- # Read in the file header table for all entries
- file_table = num_files * [None]
- for i in range(num_files):
- entry = FileEntry()
- entry.hash_name = read_uint(file_object)
- entry.file_offset = read_uint(file_object)
- entry.size_compressed = read_uint(file_object)
- entry.size_uncompressed = read_uint(file_object)
- entry.unknown = read_uint(file_object)
- file_table[ i ] = entry
- return file_table
- # load_animation_table
- # Loads a .dat container animation information
- def load_animation_table(filename):
- anim_table = []
- file_object = open(filename, 'rb')
- file_table = load_file_table(file_object)
- if file_table == None or len(file_table) <= 0:
- return anim_table
- # Only care about lua script files and animation files
- # We use the lua to scan for animation asset names so we can name the animation table properly
- magic_lua = 0x014a4c1b
- magic_anim = 0x4d494e41 #b'ANIM'
- # Create search strings to use when scaning the lua files
- anim_search = b'assets/animations/'
- fbx_search = b'.fbx'
- # Seek to all file entries, read the magic and place table indices for different file types
- anim_names = {}
- for entry in file_table:
- # Haven't come across any, think I recall them being zlib compressed ?
- # Skip them for now
- if entry.size_compressed != 0:
- print("Compressed file in .dat package, skipping entry")
- continue
- # Haven't come across any, I have no idea what this might be
- # Skip these entries for now
- if entry.unknown != 0:
- print("Found unknown data in .dat package, skipping entry")
- continue
- # Seek to start of internal file and read the first four bytes and treat them as
- # a 'type' magic of what that file entry actually is.
- # This is of course not correct, but for the sake of finding animations and lua files it works just fine.
- file_object.seek(entry.file_offset, os.SEEK_SET)
- file_magic = read_uint(file_object)
- # Handle lua file entries
- if file_magic == magic_lua:
- # Read out the entire contents of the file into a bytearray
- lua_buffer = bytearray(file_object.read(entry.size_uncompressed))
- # Search the bytearray 'for assets/animations/' until no more could be found
- buffer_index = lua_buffer.find(anim_search, 0)
- while buffer_index >= 0:
- # Lookup .fbx ending
- # Now, we could potentially found .fbx several hundred bytes later.
- # It's a bit dangerous to assume it always ends with .fbx, but it works for now
- end_index = lua_buffer.find(fbx_search, buffer_index + 14)
- # If we didn't find an .fbx ending, abort now
- if end_index < 0:
- break
- # Decode a string from our search indices and append a .animation ending
- asset_name = decode_string(lua_buffer[buffer_index:end_index]) + ".animation"
- # Use FNV1a to hash the asset name
- hash_name = fnv1a(asset_name)
- # If hash name isn't already in list, append a new entry pointing to the real asset name
- # so we can 'decode' file names later
- if hash_name not in anim_names:
- anim_names[hash_name] = asset_name
- # Restart the search from the end of the last search result
- buffer_index = lua_buffer.find(anim_search, end_index + 4)
- # Handle animation file entries, this simply adds the table entry to the animation table
- if file_magic == magic_anim:
- anim_table.append(entry)
- # We're done with the file for now, close it
- file_object.close()
- # Go through our animation table and attempt to resolve all the asset names
- # If we couldn't find one, simply build a name using the hash name
- num_unresolved = 0
- for entry in anim_table:
- if entry.hash_name in anim_names:
- entry.name = anim_names[entry.hash_name]
- else:
- entry.name = "hash_%8x" % entry.hash_name
- num_unresolved += 1
- return anim_table
- # load_model_table
- # Loads .dat container model information
- def load_model_table(filename):
- model_table = []
- file_object = open(filename, 'rb')
- file_table = load_file_table(file_object)
- if file_table == None or len(file_table) <= 0:
- return model_table
- # Only care about lua script files and model files
- # We use the lua to scan for model asset names so we can name the model table properly
- magic_lua = 0x014a4c1b
- magic_model = 0x314c444d #b'MDL1'
- # Create search strings to use when scaning the lua files
- model_search = b'assets/models/'
- fbx_search = b'.fbx'
- # Seek to all file entries, read the magic and place table indices for different file types
- model_names = {}
- for entry in file_table:
- # Haven't come across any, think I recall them being zlib compressed ?
- # Skip them for now
- if entry.size_compressed != 0:
- print("Compressed file in .dat package, skipping entry")
- continue
- # Haven't come across any, I have no idea what this might be
- # Skip these entries for now
- if entry.unknown != 0:
- print("Found unknown data in .dat package, skipping entry")
- continue
- # Seek to start of internal file and read the first four bytes and treat them as
- # a 'type' magic of what that file entry actually is.
- # This is of course not correct, but for the sake of finding models and lua files it works just fine.
- file_object.seek(entry.file_offset, os.SEEK_SET)
- file_magic = read_uint(file_object)
- # Handle lua file entries
- if file_magic == magic_lua:
- # Read out the entire contents of the file into a bytearray
- lua_buffer = bytearray(file_object.read(entry.size_uncompressed))
- # Search the bytearray 'for assets/models/' until no more could be found
- buffer_index = lua_buffer.find(model_search, 0)
- while buffer_index >= 0:
- # Lookup .fbx ending
- # Now, we could potentially found .fbx several hundred bytes later.
- # It's a bit dangerous to assume it always ends with .fbx, but it works for now
- end_index = lua_buffer.find(fbx_search, buffer_index + 14)
- # If we didn't find an .fbx ending, abort now
- if end_index < 0:
- break
- # Decode a string from our search indices and append a .model ending
- asset_name = decode_string(lua_buffer[buffer_index:end_index]) + ".model"
- # Use FNV1a to hash the asset name
- hash_name = fnv1a(asset_name)
- # If hash name isn't already in list, append a new entry pointing to the real asset name
- # so we can 'decode' file names later
- if hash_name not in model_names:
- model_names[hash_name] = asset_name
- # Restart the search from the end of the last search result
- buffer_index = lua_buffer.find(model_search, end_index + 4)
- # Handle model file entries, this simply adds the table entry to the model table
- if file_magic == magic_model:
- model_table.append(entry)
- # We're done with the file for now, close it
- file_object.close()
- # Go through our model table and attempt to resolve all the asset names
- # If we couldn't find one, simply build a name using the hash name
- num_unresolved = 0
- for entry in model_table:
- if entry.hash_name in model_names:
- entry.name = model_names[entry.hash_name]
- else:
- entry.name = "hash_%8x" % entry.hash_name
- num_unresolved += 1
- return model_table
- # load_model
- # Loads a model from a binary file, uses a file offset if we're reading directly from a .dat container
- def load_model(filename, file_offset, context):
- name, ext = os.path.splitext(os.path.basename(filename))
- # Seek to offset in file, this is only used if loading from .dat containers
- file_object = open(filename, 'rb')
- if file_offset > 0:
- file_object.seek(file_offset, os.SEEK_SET)
- # Load the binary model data
- load_binary_model(file_object, context)
- # Close file if load_binary_model hasn't already done so
- if not file_object.closed:
- file_object.close()
- return True
- # save_model
- # Saves a scene to binary model file
- def save_model(settings, context):
- # Clear the game-matrix transform
- game_matrix = ObjectTransform("game_matrix")
- game_matrix.clear_transform()
- try:
- file_object = open(settings.filename, 'wb')
- save_binary_model(file_object, settings, context)
- except:
- # Restore any game-matrix changes
- game_matrix.restore_transform()
- # Make sure we close the file on errors!
- if not file_object.closed:
- file_object.close()
- # Done with error handling, re-raise exception
- raise
- # Restore any game-matrix changes
- game_matrix.restore_transform()
- # Close the file if save_binary_model hasn't already done so
- if not file_object.closed:
- file_object.close()
- return True
- # load_animation
- # Loads an animation from a binary file, uses a file offset if we're reading directly from a .dat container
- def load_animation(filename, file_offset, armature, context):
- name, ext = os.path.splitext(os.path.basename(filename))
- # Seek to offset in file, this is only used if loading from .dat containers
- file_object = open(filename, 'rb')
- if file_offset > 0:
- file_object.seek(file_offset, os.SEEK_SET)
- # Load the binary model data
- load_binary_animation(file_object, armature, context)
- # Close file if load_binary_animation hasn't already done so
- if not file_object.closed:
- file_object.close()
- return True
- # IMPORT_OT_model
- # Blender UI and base for importing models
- class IMPORT_OT_model(bpy.types.Operator, ImportHelper):
- # Import Model Operator.
- bl_idname = "import_scene.model"
- bl_label = "Import Model"
- bl_description = "Import a Legend of Grimrock 2 model"
- bl_options = { 'REGISTER', 'UNDO' }
- # File selection UI property
- filepath = StringProperty(name="File Path", description="Filepath used for importing the model file.", maxlen=1024, default="")
- # File list UI properties for .dat containers
- file_list = CollectionProperty(type=bpy.types.PropertyGroup)
- file_list_index = IntProperty()
- # Holds information if .dat container is being imported with specific model selection
- dat_file = ""
- model_table = []
- def execute(self, context):
- file_offset = 0
- # Dig out file offset if loading from .dat container
- if self.dat_file == self.filepath:
- file_offset = self.model_table[self.file_list_index].file_offset
- # Load from binary file
- load_model(self.filepath, file_offset, context)
- return {'FINISHED'}
- # clear_file_list
- # Clears the file_list UI property of all entries and resets the dat_file cached value
- def clear_file_list(self):
- self.dat_file = ""
- num = len(self.file_list)
- while num > 0:
- self.file_list.remove(num-1)
- num -= 1
- # build_file_list
- # Updates the file_list UI property from selected .dat file, or cleans it out if needed
- def build_file_list(self):
- # Figure out if we selected a .dat file or if we slected a different .dat file
- name, ext = os.path.splitext(os.path.basename(self.filepath))
- if ext.lower() != ".dat":
- self.clear_file_list()
- return
- # Cached dat_file is still up to date, simply ignore any updates
- if self.filepath == self.dat_file:
- return
- # Clean out any previous entries in the UI file list
- self.clear_file_list()
- # Load package header and extract model information
- self.dat_file = self.filepath
- self.model_table = load_model_table(self.filepath)
- # Add all the model table entries to the UI file list
- for entry in self.model_table:
- item = self.file_list.add()
- item.name = entry.name
- # draw
- def draw(self, context):
- layout = self.layout
- # Update the file_list UI property if needed
- self.build_file_list()
- row = layout.row(True)
- row.label("Legend of Grimrock 2 .dat container")
- layout.template_list("UI_UL_list", "OpenFileDAT", self, "file_list", self, "file_list_index", rows=15)
- def invoke(self, context, event):
- wm = context.window_manager
- wm.fileselect_add(self)
- return {'RUNNING_MODAL'}
- # IMPORT_OT_anim
- # Blender UI and base for importing animations
- class IMPORT_OT_anim(bpy.types.Operator, ImportHelper):
- # Import Animation Operator.
- bl_idname = "import_scene.animation"
- bl_label = "Import Animation"
- bl_description = "Import a Legend of Grimrock 2 animation"
- bl_options = { 'REGISTER', 'UNDO' }
- # File selection UI property
- filepath = StringProperty(name="File Path", description="Filepath used for importing the animation file.", maxlen=1024, default="")
- # Armature selection UI property
- armature_name = StringProperty(name="Armature", description="Armature to apply animation data on.", maxlen=1024, default="")
- # File list UI properties for .dat containers
- file_list = CollectionProperty(type=bpy.types.PropertyGroup)
- file_list_index = IntProperty()
- # Holds information if .dat container is being imported with specific model selection
- dat_file = ""
- animation_table = []
- # execute
- def execute(self, context):
- file_offset = 0
- # Dig out file offset if loading from .dat container
- if self.dat_file == self.filepath:
- file_offset = self.animation_table[self.file_list_index].file_offset
- if len(self.armature_name) <= 0 or not context.scene.objects[self.armature_name]:
- print("Can't load animation, couldn't find selected armature.")
- return {'FINISHED'}
- armature = context.scene.objects[self.armature_name]
- if armature.type != 'ARMATURE':
- print("Can't load animation, object to apply on is not of type ARMATURE.")
- return {'FINISHED'}
- # Load from binary file
- load_animation(self.filepath, file_offset, armature, context)
- return {'FINISHED'}
- # clear_file_list
- # Clears the file_list UI property of all entries and resets the dat_file cached value
- def clear_file_list(self):
- self.dat_file = ""
- num = len(self.file_list)
- while num > 0:
- self.file_list.remove(num-1)
- num -= 1
- # build_file_list
- # Updates the file_list UI property from selected .dat file, or cleans it out if needed
- def build_file_list(self):
- # Figure out if we selected a .dat file or if we slected a different .dat file
- name, ext = os.path.splitext(os.path.basename(self.filepath))
- if ext.lower() != ".dat":
- self.clear_file_list()
- return
- # Cached dat_file is still up to date, simply ignore any updates
- if self.filepath == self.dat_file:
- return
- # Clean out any previous entries in the UI file list
- self.clear_file_list()
- # Load package header and extract animation information
- self.dat_file = self.filepath
- self.animation_table = load_animation_table(self.filepath)
- # Add all the animation table entries to the UI file list
- for entry in self.animation_table:
- item = self.file_list.add()
- item.name = entry.name
- # draw
- def draw(self, context):
- layout = self.layout
- # Update the file_list UI property if needed
- self.build_file_list()
- row = layout.row(True)
- row.prop_search(self, "armature_name", bpy.data, "armatures")
- row = layout.row(True)
- row.label("Legend of Grimrock 2 .dat container")
- layout.template_list("UI_UL_list", "OpenFileDAT", self, "file_list", self, "file_list_index", rows=15)
- # invoke
- def invoke(self, context, event):
- wm = context.window_manager
- wm.fileselect_add(self)
- return {'RUNNING_MODAL'}
- # ExportSettings
- # Settings for save_model
- class ExportSettings():
- __slots__ = (
- "filename",
- "option_skinning",
- "option_tangents",
- "option_rig_state",
- )
- def __init__(self):
- self.filename = ""
- self.option_skinning = True
- self.option_tangents = True
- self.option_rig_state = 'REST'
- # EXPORT_OT_model
- # Blender UI and base for exporting models
- class EXPORT_OT_model(bpy.types.Operator, ExportHelper):
- # Export Model Operator.
- bl_idname = "export_scene.model"
- bl_label = "Export Model"
- bl_description = "Export to a Legend of Grimrock 2 model"
- bl_options = { 'REGISTER', 'UNDO' }
- filename_ext = ".model"
- filter_glob = StringProperty( default="*.model", options={'HIDDEN'} )
- option_skinning = BoolProperty(name="Skinning", description="If true, bones and skinning weights is exported.", default=True)
- option_tangents = BoolProperty(name="Tangents", description="If true, tangents and bi-tangents are calculated and exported.", default=True)
- options_posing = EnumProperty(items=[
- ('POSE', 'Pose', "Exports the mesh in a posed state rather than the default rest state.", 'POSE_DATA', 1),
- ('REST', 'Rest', "Exports the mesh in unposed rest state.", 'ARMATURE_DATA', 2) #OUTLINER_OB_ARMATURE
- ], name = "", description="Mesh deformation setting", default='REST')
- # File selection UI property
- filepath = StringProperty(name="File Path", description="Filepath used for exporting the model file.", maxlen=1024, default="")
- # execute
- def execute(self, context):
- # Create settings to send to binary model export
- settings = ExportSettings()
- settings.filename = self.filepath
- settings.option_rig_state = self.options_posing
- settings.option_skinning = self.option_skinning
- settings.option_tangents = self.option_tangents
- # Save to binary file
- save_model(settings, context)
- return {'FINISHED'}
- # draw
- def draw(self, context):
- layout = self.layout
- row = layout.row(True)
- row.label(text="", icon = 'MOD_ARMATURE')
- row.prop(self, "option_skinning")
- row = layout.row(True)
- row.label(text="", icon = 'MANIPUL')
- row.prop(self, "option_tangents")
- row = layout.row(True)
- row.label(text="Armature State")
- row = layout.row(True)
- row.prop(self, "options_posing")
- # invoke
- def invoke(self, context, event):
- wm = context.window_manager
- wm.fileselect_add(self)
- return {'RUNNING_MODAL'}
- # menu_func_import_model
- # Blender menu operator to invoke model importer
- def menu_func_import_model(self, context):
- self.layout.operator(IMPORT_OT_model.bl_idname, text="Legend of Grimrock 2 Model (.model)")
- # menu_func_export_model
- # Blender menu operator to invoke model exporter
- def menu_func_export_model(self, context):
- self.layout.operator(EXPORT_OT_model.bl_idname, text="Legend of Grimrock 2 Model (.model)")
- # menu_func_import_anim
- # Blender menu operator to invoke animation importer
- def menu_func_import_anim(self, context):
- self.layout.operator(IMPORT_OT_anim.bl_idname, text="Legend of Grimrock 2 Animation (.animation)")
- # register
- # Registers menu functions
- def register():
- bpy.utils.register_module(__name__)
- bpy.types.INFO_MT_file_import.append(menu_func_import_model)
- bpy.types.INFO_MT_file_import.append(menu_func_import_anim)
- bpy.types.INFO_MT_file_export.append(menu_func_export_model)
- # unregister
- # Unregisters menu functions
- def unregister():
- bpy.utils.unregister_module(__name__)
- bpy.types.INFO_MT_file_import.remove(menu_func_import_model)
- bpy.types.INFO_MT_file_import.remove(menu_func_import_anim)
- bpy.types.INFO_MT_file_export.remove(menu_func_export_model)
- # Main function
- if __name__ == "__main__":
- register()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement