Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # bitcpy
- # Loads a Legend of Grimrock 2 model from either a .model file or a .dat container
- # Names of models and animations in .dat container are determined by doing an extremly simple scan of lua compiled files.
- bl_info = {
- "name": "Legend of Grimrock 2 Model Format (.model)",
- "author": "",
- "version": (1, 1, 0),
- "blender": (2, 71, 0),
- "api": 36339,
- "location": "File > Import > Legend of Grimrock 2 Model (.model)",
- "description": "Import Legend of Grimrock Models (.model)",
- "warning": "",
- "wiki_url": "",
- "tracker_url": "",
- "category": "Import-Export"}
- import os
- import struct
- import sys
- import math
- import bpy
- import bmesh
- from mathutils import *
- from bpy.props import *
- from bpy_extras.io_utils import ExportHelper, ImportHelper, axis_conversion
- from bpy_extras.image_utils import load_image
- # Vertex data types
- VTX_DATA_BYTE = 0
- VTX_DATA_SHORT = 1
- VTX_DATA_INT = 2
- VTX_DATA_FLOAT = 3
- # Vertex array types
- VTX_ARRAY_POSITION = 0
- VTX_ARRAY_NORMAL = 1
- VTX_ARRAY_TANGENT = 2
- VTX_ARRAY_BITANGENT = 3
- VTX_ARRAY_COLOR = 4
- VTX_ARRAY_TEXCOORD0 = 5
- VTX_ARRAY_TEXCOORD1 = 6
- VTX_ARRAY_TEXCOORD2 = 7
- VTX_ARRAY_TEXCOORD3 = 8
- VTX_ARRAY_TEXCOORD4 = 9
- VTX_ARRAY_TEXCOORD5 = 10
- VTX_ARRAY_TEXCOORD6 = 11
- VTX_ARRAY_TEXCOORD7 = 12
- VTX_ARRAY_BONE_INDEX = 13
- VTX_ARRAY_BONE_WEIGHT = 14
- # Reads file magic from file
- def read_magic(file_object, endian = '<'):
- data = struct.unpack(endian+"4s", file_object.read(4))[0]
- return data;
- # read_uint
- # Read unsigned integer from file
- def read_uint(file_object, endian = '<'):
- data = struct.unpack(endian+'I', file_object.read(4))[0]
- return data
- # read_int
- # Read signed integer from file
- def read_int(file_object, endian = '<'):
- data = struct.unpack(endian+'i', file_object.read(4))[0]
- return data
- # read_int2
- # Read two signed integers from file
- def read_int2(file_object, endian = '<'):
- data = struct.unpack(endian+'ii', file_object.read(8))
- return data
- # read_int3
- # Read three signed integers from file
- def read_int3(file_object, endian = '<'):
- data = struct.unpack(endian+'iii', file_object.read(12))
- return data
- # read_int4
- # Read four signed integers from file
- def read_int4(file_object, endian = '<'):
- data = struct.unpack(endian+'iiii', file_object.read(16))
- return data
- # read_float
- # Read float from file
- def read_float(file_object, endian = '<'):
- data = struct.unpack(endian+'f', file_object.read(4))[0]
- return data
- # read_float2
- # Read two floats from file
- def read_float2(file_object, endian = '<'):
- data = struct.unpack(endian+'ff', file_object.read(8))
- return data
- # read_float3
- # Read three floats from file
- def read_float3(file_object, endian = '<'):
- data = struct.unpack(endian+'fff', file_object.read(12))
- return data
- # read_float4
- # Read four floats from file
- def read_float4(file_object, endian = '<'):
- data = struct.unpack(endian+'ffff', file_object.read(16))
- return data
- # read_matrix4x3
- # Read a matrix consisting of four rows with three columns
- def read_matrix4x3(file_object, endian = '<'):
- data = struct.unpack(endian+'ffffffffffff', file_object.read(48))
- return data
- # read_short
- # Read signed short from file
- def read_short(file_object, endian = '<'):
- data = struct.unpack(endian+'h', file_object.read(2))[0]
- return data
- # read_short2
- # Read two signed shorts from file
- def read_short2(file_object, endian = '<'):
- data = struct.unpack(endian+'hh', file_object.read(4))
- return data
- # read_short3
- # Read three signed shorts from file
- def read_short3(file_object, endian = '<'):
- data = struct.unpack(endian+'hhh', file_object.read(6))
- return data
- # read_short4
- # Read four signed shorts from file
- def read_short4(file_object, endian = '<'):
- data = struct.unpack(endian+'hhhh', file_object.read(8))
- return data
- # read_byte
- # Read unsigned byte from file
- def read_byte(file_object, endian = '<'):
- data = struct.unpack(endian+'B', file_object.read(1))[0]
- return data
- # read_byte2
- # Read two unsigned bytes from file
- def read_byte2(file_object, endian = '<'):
- data = struct.unpack(endian+'BB', file_object.read(2))
- return data
- # read_byte3
- # Read three unsigned bytes from file
- def read_byte3(file_object, endian = '<'):
- data = struct.unpack(endian+'BBB', file_object.read(3))
- return data
- # read_byte4
- # Read four unsigned bytes from file
- def read_byte4(file_object, endian = '<'):
- data = struct.unpack(endian+'BBBB', file_object.read(4))
- return data
- # read_string
- # Read string from file
- def read_string(file_object, num, endian = '<'):
- raw_string = struct.unpack(endian+str(num)+'s', file_object.read(num))[0]
- data = raw_string.decode("utf-8", "ignore")
- return data
- # read_len_string
- # Read unsigned integer from file used to read back a string using integer as length from the same file object
- def read_len_string(file_object, endian = '<'):
- num = read_int(file_object, endian)
- if num == 0:
- return ""
- return read_string(file_object, num, endian)
- # decode_string
- # Decode string from buffer
- def decode_string(buffer_object, endian = '<'):
- raw_string = struct.unpack(endian+str(len(buffer_object))+'s', buffer_object)[0]
- data = raw_string.decode("utf-8", "ignore")
- return data
- # Vec3
- class Vec3():
- __slots__ = (
- "x",
- "y",
- "z",
- )
- def __init__(self, x = 0.0, y = 0.0, z = 0.0):
- self.x = x
- self.y = y
- self.z = z
- # read
- # Reads a 3 dimensional vector using Grimrock 2 documentation
- # float x
- # float y
- # float z
- def read(self, file_object):
- self.x = read_float(file_object)
- self.y = read_float(file_object)
- self.z = read_float(file_object)
- # Quat
- class Quat():
- __slots__ = (
- "x",
- "y",
- "z",
- "w",
- )
- def __init__(self, x = 0.0, y = 0.0, z = 0.0, w = 1.0):
- self.x = x
- self.y = y
- self.z = z
- self.w = w
- # read
- # Reads a 4 component quaternion using Grimrock 2 documentation
- # float x
- # float y
- # float z
- # float w
- def read(self, file_object):
- self.x = read_float(file_object)
- self.y = read_float(file_object)
- self.z = read_float(file_object)
- self.w = read_float(file_object)
- # Mat4x3
- class Mat4x3():
- __slots__ = (
- "rows"
- )
- def __init__(self):
- self.rows = [
- Vec3( x = 1.0, y = 0.0, z = 0.0 ),
- Vec3( x = 0.0, y = 1.0, z = 0.0 ),
- Vec3( x = 0.0, y = 0.0, z = 1.0 ),
- Vec3( x = 0.0, y = 0.0, z = 0.0 )
- ]
- # read
- # Reads a 4x3 matrix using Grimrock 2 documentation
- # vec3 baseX
- # vec3 baseY
- # vec3 baseZ
- # vec3 translation
- def read(self, file_object):
- for i in range(4):
- self.rows[i].read(file_object)
- def to_matrix(self):
- r1 = [self.rows[0].x, self.rows[1].x, self.rows[2].x, self.rows[3].x]
- r2 = [self.rows[0].y, self.rows[1].y, self.rows[2].y, self.rows[3].y]
- r3 = [self.rows[0].z, self.rows[1].z, self.rows[2].z, self.rows[3].z]
- r4 = [0.0, 0.0, 0.0, 1.0]
- return Matrix((r1,r2,r3,r4))
- # Bone
- class Bone():
- __slots__ = (
- "node_index",
- "model_to_bone",
- )
- def __init__(self):
- self.node_index = -1
- self.model_to_bone = Mat4x3()
- # read
- # Reads a Bone structure using Grimrock 2 documentation
- # int32 boneNodeIndex
- # Mat4x3 invRestMatrix
- def read(self, file_object):
- self.node_index = read_int(file_object)
- self.model_to_bone.read(file_object)
- # MeshSegment
- class MeshSegment():
- __slots__ = (
- "material",
- "primitive_type",
- "index_offset",
- "num_triangles",
- )
- def __init__(self):
- self.material = None
- self.primitive_type = 0
- self.index_offset = 0
- self.num_triangles = 0
- # read
- # Reads a MeshSegment using Grimrock 2 documentation
- # string material
- # int32 primitiveType
- # int32 firstIndex
- # int32 count
- def read(self, file_object):
- self.material = read_len_string(file_object)
- self.primitive_type = read_int(file_object)
- self.index_offset = read_int(file_object)
- self.num_triangles = read_int(file_object)
- # VertexArray
- class VertexArray():
- __slots__ = (
- "data_type",
- "dim",
- "stride",
- "data",
- )
- def __init__(self):
- self.data_type = 0
- self.dim = 0
- self.stride = 0
- self.data = None
- # is_valid_type
- # Helper to determine if data_type value is valid for read_data_type calls
- def is_valid_type(self):
- return (self.data_type == VTX_DATA_BYTE or self.data_type == VTX_DATA_SHORT or self.data_type == VTX_DATA_INT or self.data_type == VTX_DATA_FLOAT)
- # is_valid_dim
- # Helper to determine if dimensional value is in valid range for read_data_type calls
- def is_valid_dim(self):
- return (self.dim >= 1 and self.dim <= 4)
- # read_data_type
- # Helper to read dimensional attribute of each data type, (ex. 3 bytes, 2 floats, 4 shorts etc.)
- def read_data_type(self, file_object):
- if self.data_type == VTX_DATA_BYTE:
- if self.dim == 1:
- return read_byte(file_object)
- elif self.dim == 2:
- return read_byte2(file_object)
- elif self.dim == 3:
- return read_byte3(file_object)
- elif self.dim == 4:
- return read_byte4(file_object)
- elif self.data_type == VTX_DATA_SHORT:
- if self.dim == 1:
- return read_short(file_object)
- elif self.dim == 2:
- return read_short2(file_object)
- elif self.dim == 3:
- return read_short3(file_object)
- elif self.dim == 4:
- return read_short4(file_object)
- elif self.data_type == VTX_DATA_INT:
- if self.dim == 1:
- return read_int(file_object)
- elif self.dim == 2:
- return read_int2(file_object)
- elif self.dim == 3:
- return read_int3(file_object)
- elif self.dim == 4:
- return read_int4(file_object)
- elif self.data_type == VTX_DATA_FLOAT:
- if self.dim == 1:
- return read_float(file_object)
- elif self.dim == 2:
- return read_float2(file_object)
- elif self.dim == 3:
- return read_float3(file_object)
- elif self.dim == 4:
- return read_float4(file_object)
- # read
- # Reads VertexArray data using Grimrock 2 documentation
- # int32 dataType
- # int32 dim
- # int32 stride
- # byte num_vertices*stride
- def read(self, num_vertices, file_object):
- # Read type, dimension and stride
- self.data_type = read_int(file_object)
- self.dim = read_int(file_object)
- self.stride = read_int(file_object)
- # Skip if size between vertex to vertex is zero
- if self.stride == 0:
- return
- # Pre allocate the data, read data type if valid otherwise just call raw read for each entry
- self.data = num_vertices * [None]
- if self.is_valid_type() and self.is_valid_dim():
- for i in range(num_vertices):
- self.data[i] = self.read_data_type(file_object)
- else:
- print("Unknown VertexArray data, type(%d), dimension(%d), stride(%d)" % (self.data_type, self.dim, self.stride))
- for i in range(num_vertices):
- self.data[i] = file_object.read(self.stride)
- # MeshData
- class MeshData():
- __slots__ = (
- "magic",
- "version",
- "num_vertices",
- "vertex_arrays",
- "num_indices",
- "indices",
- "num_segments",
- "segments",
- "bound_center",
- "bound_radius",
- "bound_min",
- "bound_max",
- )
- def __init__(self):
- self.magic = 0
- self.version = 0
- self.num_vertices = 0
- self.vertex_arrays = 15 * [None]
- self.num_indices = 0
- self.indices = None
- self.num_segments = 0
- self.segments = None
- self.bound_center = [0.0, 0.0, 0.0]
- self.bound_radius = 0.0
- self.bound_min = [0.0, 0.0, 0.0]
- self.bound_max = [0.0, 0.0, 0.0]
- # read
- # Reads MeshData using Grimrock 2 documentation
- # FourCC magic
- # int32 version
- # int32 numVertices
- # VertexArray * 15
- # int32 numIndices
- # int32 * numIndices
- # int32 numSegents
- # MeshSegment * numSegments
- # vec3 boundCenter
- # float boundRadius
- # vec3 boundMin
- # vec3 boundMax
- def read(self, file_object):
- # Read MeshData magic, skip if not equal 'MESH'
- self.magic = read_magic(file_object)
- if self.magic != b'MESH':
- print("Invalid MeshData magic '%s', expected 'MESH'" % self.magic)
- return False
- # Read version, skip if version isn't equal to 2
- self.version = read_int(file_object)
- if self.version != 2:
- print("Invalid MeshData version %d, expected 2" % self.version)
- return False
- # Read number of vertices
- self.num_vertices = read_int(file_object)
- # Read vertex-array data
- for i in range(15):
- vertex_array = VertexArray()
- vertex_array.read(self.num_vertices, file_object)
- if vertex_array.data != None:
- self.vertex_arrays[i] = vertex_array
- # Read number of indices
- self.num_indices = read_int(file_object)
- # Read index buffer data
- if self.num_indices > 0:
- self.indices = self.num_indices * [0]
- for i in range(self.num_indices):
- self.indices[i] = read_int(file_object)
- # Read number of segments
- self.num_segments = read_int(file_object)
- # Read MeshSegment data
- if self.num_segments > 0:
- self.segments = self.num_segments * [None]
- for i in range(self.num_segments):
- segment = MeshSegment()
- segment.read(file_object)
- self.segments[i] = segment
- # Read bounds information
- self.bound_center = read_float3(file_object)
- self.bound_radius = read_float(file_object)
- self.bound_min = read_float3(file_object)
- self.bound_max = read_float3(file_object)
- return True
- # MeshEntity
- class MeshEntity():
- __slots__ = (
- "mesh_data",
- "num_bones",
- "bones",
- "emissive_color",
- "cast_shadow",
- )
- def __init__(self):
- self.mesh_data = None
- self.num_bones = 0
- self.bones = None
- self.emissive_color = Vec3()
- self.cast_shadow = False
- # read
- # Reads MeshEntity using Grimrock 2 documentation
- # MeshData
- # int32 numBones
- # Bone * numBones
- # Vec3 emissiveColor
- # byte castShadows
- def read(self, file_object):
- # Read mesh data
- self.mesh_data = MeshData()
- if not self.mesh_data.read(file_object):
- return False
- # Read number of bones
- self.num_bones = read_int(file_object)
- # Read bones data
- if self.num_bones > 0:
- self.bones = self.num_bones * [None]
- for i in range(self.num_bones):
- bone = Bone()
- bone.read(file_object)
- self.bones[i] = bone
- # Read emissive color
- self.emissive_color.read(file_object)
- # Read cast shadows property
- cast_shadows = read_byte(file_object)
- if cast_shadows != 0:
- self.cast_shadow = True
- else:
- self.cast_shadow = False
- return True
- # Node
- class Node():
- __slots__ = (
- "name",
- "local_to_parent",
- "parent",
- "type",
- "mesh_entity",
- )
- def __init__(self):
- self.name = ""
- self.local_to_parent = Mat4x3()
- self.parent = -1
- self.type = -1
- self.mesh_entity = None
- # read
- # Reads a Node using Grimrock 2 documentation
- # string name
- # Mat4x3 localToParent
- # int32 parent
- # int32 type
- # (MeshEntity)
- def read(self, file_object):
- # Read name of node
- self.name = read_len_string(file_object)
- # Read local to parent transform
- self.local_to_parent.read(file_object)
- # Read parent node
- self.parent = read_int(file_object)
- # Read node type
- self.type = read_int(file_object)
- # Read mesh entity if type is zero
- if self.type == 0:
- self.mesh_entity = MeshEntity()
- if not self.mesh_entity.read(file_object):
- return False
- return True
- # Model
- class Model():
- __slots__ = (
- "magic",
- "version",
- "num_nodes",
- "nodes",
- )
- def __init__(self):
- self.magic = 0
- self.version = 0
- self.num_nodes = 0
- self.nodes = None
- # read
- # Reads a ModelFile using Grimrock 2 documentation
- # int32 magic
- # int32 version
- # int32 numNodes
- # Node * numNodes
- def read(self, file_object):
- # Read magic, skip if not equal 'MDL1'
- self.magic = read_magic(file_object)
- if self.magic != b'MDL1':
- print("Invalid ModelFile magic '%s', expected 'MDL1'" % self.magic)
- return False
- # Read version, skip if not equal 2
- self.version = read_int(file_object)
- if self.version != 2:
- print("Invalid ModelFile version %d, expected 2" % self.version)
- return False
- # Read number of nodes
- self.num_nodes = read_int(file_object)
- # Read in nodes
- if self.num_nodes > 0:
- self.nodes = self.num_nodes * [None]
- for i in range(self.num_nodes):
- node = Node()
- node.read(file_object)
- self.nodes[i] = node
- return True
- class NodeItem():
- __slots__ = (
- "node_name",
- "num_positions",
- "positions",
- "num_rotations",
- "rotations",
- "num_scales",
- "scales",
- )
- def __init__(self):
- self.node_name = ""
- self.num_positions = 0
- self.positions = None
- self.num_rotations = 0
- self.rotations = None
- self.num_scales = 0
- self.scales = None
- def read(self, file_object):
- self.node_name = read_len_string(file_object)
- self.num_positions = read_int(file_object)
- if self.num_positions > 0:
- self.positions = self.num_positions * [None]
- for i in range(self.num_positions):
- position = Vec3()
- position.read(file_object)
- self.positions[i] = position
- self.num_rotations = read_int(file_object)
- if self.num_rotations > 0:
- self.rotations = self.num_rotations * [None]
- for i in range(self.num_rotations):
- rotation = Quat()
- rotation.read(file_object)
- self.rotations[i] = rotation
- self.num_scales = read_int(file_object)
- if self.num_scales > 0:
- self.scales = self.num_scales * [None]
- for i in range(self.num_scales):
- scale = Vec3()
- scale.read(file_object)
- self.scales[i] = scale
- # Animation
- #
- class Animation():
- __slots__ = (
- "magic",
- "version",
- "name",
- "frames_per_second",
- "num_frames",
- "num_items",
- "items",
- )
- def __init__(self):
- self.magic = 0
- self.version = 0
- self.name = ""
- self.frames_per_second = 0
- self.num_frames = 0
- self.num_items = 0
- self.items = None
- def read(self, file_object):
- # Read magic, skip if not equal 'ANIM'
- self.magic = read_magic(file_object)
- if self.magic != b'ANIM':
- print("Invalid AnimationFile magic '%s', expected 'ANIM'" % self.magic)
- return False
- # Read version, skip if not equal 2
- self.version = read_int(file_object)
- if self.version != 2:
- print("Invalid AnimationFile version %d, expected 2" % self.version)
- return False
- self.name = read_len_string(file_object)
- self.frames_per_second = read_float(file_object)
- self.num_frames = read_int(file_object)
- self.num_items = read_int(file_object)
- # Read in animation node items
- if self.num_items > 0:
- self.items = self.num_items * [None]
- for i in range(self.num_items):
- item = NodeItem()
- item.read(file_object)
- self.items[i] = item
- return True
- # FileEntry
- # File entry information in .dat container
- class FileEntry(object):
- __slots__ = (
- "hash_name",
- "file_offset",
- "size_compressed",
- "size_uncompressed",
- "unknown",
- "name",
- )
- def __init__(self):
- self.hash_name = 0
- self.file_offset = 0
- self.size_compressed = 0
- self.size_uncompressed = 0
- self.unknown = 0
- self.name = None
- # ArmatureInfo
- # This is a complete MESS, clean it up and make it more understandable/readable
- # Alot of these things have weird namings and/or doesn't really match what we want
- # There is also alot of unnecesary work done in here
- class ArmatureInfo():
- __slots__ = (
- "model",
- "node",
- "bones",
- "nodes",
- "b2m_transforms",
- "node_to_bone_index",
- "bone_parents",
- "bone_childs",
- "bone_index_order",
- )
- def __init__(self, model, node):
- self.model = model
- self.node = node
- self.bones = node.mesh_entity.bones
- self.nodes = model.nodes
- # Don't change order, dependencies within function calls
- self.create_b2m_transforms()
- self.create_node_to_bone_indices()
- self.create_bone_parents()
- self.create_bone_childs()
- self.create_bone_index_order()
- # create_b2m_transforms
- # Creates bone to model space transforms for each bone
- def create_b2m_transforms(self):
- # Calculate all bone to model transforms
- self.b2m_transforms = []
- for bone in self.bones:
- # Fetch deform node
- deform_node = self.nodes[bone.node_index]
- # Fetch model to bone matrix and invert it to get the bone to model matrix
- m2b = bone.model_to_bone.to_matrix()
- b2m = m2b.inverted()
- # Store calculates bone to model matrix
- self.b2m_transforms.append(b2m)
- # create_node_to_bone_indices
- # Creates a node to bone index mapping
- def create_node_to_bone_indices(self):
- # Create a node to bone mapping
- self.node_to_bone_index = [-1] * len(self.nodes)
- for bone_index in range(len(self.bones)):
- node_index = self.bones[bone_index].node_index
- self.node_to_bone_index[node_index] = bone_index
- # create_bone_parents
- # Generate a list of bone parents for each bone
- def create_bone_parents(self):
- # Figure out all parent bones
- self.bone_parents = []
- for bone in self.bones:
- parent_bone_index = -1
- parent_bone = None
- # Walk the node chain backwards until we find a bone or there are no more parents
- node = self.nodes[bone.node_index]
- while node.parent != -1 and parent_bone_index == -1:
- parent_bone_index = self.node_to_bone_index[node.parent]
- node = self.nodes[node.parent]
- # If we found a parent bone index while walking the chain backwards fetch the bone
- if parent_bone_index != -1:
- parent_bone = self.bones[parent_bone_index]
- # Append either None or a valid parent bone
- self.bone_parents.append(parent_bone)
- # create_bone_childs
- # Generates lists of childrens for each bone
- def create_bone_childs(self):
- # Map childrens for each bone
- self.bone_childs = []
- for parent_bone in self.bones:
- children = []
- for bone in self.bones:
- if bone == parent_bone:
- continue
- # Check against current parent bone and see if we're a child of it
- bone_index = self.node_to_bone_index[bone.node_index]
- if self.bone_parents[bone_index] == parent_bone:
- children.append(bone)
- # Add the list of children to this bone (can be empty)
- self.bone_childs.append(children)
- # create_bone_index_order
- # Creates a index list ordered such as root always comes first
- def create_bone_index_order(self):
- # Figure out the order in which we want to create the bones, since we want to start
- # with bones not having any parents and walking down the chain so we can parent them while
- # we are building them
- self.bone_index_order = []
- # Start by adding bones without parents
- for bone_index in range(len(self.bones)):
- if self.bone_parents[bone_index] == None:
- self.bone_index_order.append(bone_index)
- # Start from the beginning of the bone index list and run a pass until index reaches the end.
- # This will happen naturally after bones stops being added to the list
- idx = 0
- while idx < len(self.bone_index_order):
- find_bone_index = self.bone_index_order[idx]
- find_node_index = self.bones[find_bone_index].node_index
- # Go through all bone parents and add them if a bone is parented to the current bone we are
- # scanning for. Also make sure we're not scanning ourselves
- for bone_index in range(len(self.bone_parents)):
- if bone_index == find_bone_index:
- continue
- parent_bone = self.bone_parents[bone_index]
- if parent_bone == None:
- continue
- if parent_bone.node_index == find_node_index:
- self.bone_index_order.append(bone_index)
- idx += 1
- # get_bone_node
- # Returns node that bone drives
- def get_bone_node(self, bone):
- return self.nodes[bone.node_index]
- # get_bone_to_model
- # Returns the bone to model-space matrix
- def get_bone_to_model(self, bone):
- bone_index = self.node_to_bone_index[bone.node_index]
- return self.b2m_transforms[bone_index]
- # get_length
- # Returns the length between bone and location
- def get_length(self, bone, origin):
- bone_index = self.node_to_bone_index[bone.node_index]
- bone_b2m = self.b2m_transforms[bone_index]
- bone_origin = bone_b2m.to_translation()
- vec = origin - bone_origin
- return vec.length
- # get_forward
- # CLEANUP
- # Not quite, used to fetch the 'forward' orientation of a rest bone matrix
- def get_forward(self, matrix):
- return Vector([matrix[0].x,matrix[1].x,matrix[2].x]).normalized()
- #return matrix[2].xyz.normalized()
- # get_bone_origin
- # CLEANUP
- # Incorrect, simply fetches an untransformed rest bone origin
- def get_bone_origin(self, bone):
- bone_index = self.node_to_bone_index[bone.node_index]
- bone_b2m = self.b2m_transforms[bone_index]
- return bone_b2m.to_translation()
- # get_look_at_child
- # CLEANUP
- # Returns a child bone that the input is looking at (can return None if not looking at a particular child)
- # Finds a child bone that the parent is aiming at
- def get_look_at_child(self, bone):
- # Don't do anything if we don't have children
- bone_index = self.node_to_bone_index[bone.node_index]
- if len(self.bone_childs[bone_index]) <= 0:
- return None
- # bone transform and origin
- bone_b2m = self.b2m_transforms[bone_index]
- bone_origin = bone_b2m.to_translation()
- # Get bone direction as a normalized vector
- bone_forward = self.get_forward(bone_b2m)
- # Don't break on first child, check if we have a child closer to ourselves
- num_look = 0
- max_length = -sys.float_info.max
- min_length = sys.float_info.max
- for child in self.bone_childs[bone_index]:
- child_origin = self.get_bone_origin(child)
- # Create a vector pointing towards the child and check the angle between them
- # Subtract epsilon from 1 to allow some small slack to determine if point is
- # looking straight at child
- vec = (child_origin - bone_origin)
- cosv = bone_forward.dot(vec.normalized())
- if cosv >= (1.0 - 0.000001):
- vec_len = vec.length
- max_length = max( max_length, vec_len )
- min_length = min( min_length, vec_len )
- num_look += 1
- # If we didn't look at any point, return 'nothing'
- if num_look == 0:
- return None
- # Return a point inbetween all points we where looking straight at
- return bone_origin + bone_forward * ( min_length + max_length ) * 0.5
- # get_child_midpoint
- # CLEANUP
- # Sort of does what it says, it takes all childs of a bone and calculates a tail
- # that lies inbetween the mean average of all child bones.
- def get_child_midpoint(self, bone):
- # Don't do anything if we don't have children
- bone_index = self.node_to_bone_index[bone.node_index]
- if len(self.bone_childs[bone_index]) <= 1:
- return None
- # bone transform and origin
- bone_b2m = self.b2m_transforms[bone_index]
- bone_origin = bone_b2m.to_translation()
- # Get bone direction as a normalized vector
- bone_forward = self.get_forward(bone_b2m)
- children = self.bone_childs[bone_index]
- mid_origin = self.get_bone_origin(children[0])
- for i in range(len(children)-1):
- mid_origin += (self.get_bone_origin(children[i+1]) - mid_origin) * 0.5
- return bone_origin + bone_forward * (mid_origin - bone_origin).length
- # project_vertex_group
- # CLEANUP
- # It does NOT project a vertex group against anything.
- # Projects untransformed vertices onto untransformed rest bone and extrudes the
- # tail to most extending vertex
- def project_vertex_group(self, bone, vertex_groups):
- if vertex_groups == None:
- return None
- bone_name = self.nodes[bone.node_index].name
- if bone_name not in vertex_groups.keys():
- return None
- mesh_data = self.node.mesh_entity.mesh_data
- positions = get_vertex_array(mesh_data, VTX_ARRAY_POSITION)
- if positions == None:
- return None
- bone_index = self.node_to_bone_index[bone.node_index]
- bone_b2m = self.b2m_transforms[bone_index]
- bone_origin = bone_b2m.to_translation()
- # Get bone direction as a normalized vector
- bone_forward = self.get_forward(bone_b2m)
- group = vertex_groups[bone_name]
- max_length = -sys.float_info.max
- for (vi, w) in group:
- if w < 0.000001:
- continue
- vpos = Vector(positions[vi])
- vec = vpos - bone_origin
- if bone_forward.dot(vec) <= 0.000001:
- continue
- max_length = max(max_length, vec.length * math.sqrt(w))
- return bone_origin + bone_forward * max_length
- # FrameObject
- # Used for posing an object or bone during animation, only represents current rotations
- class FrameObject():
- __slots__ = (
- "name",
- "parent",
- "matrix_local",
- "matrix_world",
- "has_position_track",
- "has_rotation_track",
- "has_scale_track",
- "has_any_track",
- )
- def __init__(self, bl_object):
- self.name = bl_object.name
- self.parent = bl_object.parent
- self.matrix_local = bl_object.matrix_local
- self.matrix_world = bl_object.matrix_world
- self.has_position_track = False
- self.has_rotation_track = False
- self.has_scale_track = False
- self.has_any_track = False
- # clear_tracks
- # Remove animation frame tracks for this frame
- def clear_tracks(self):
- self.has_position_track = False
- self.has_rotation_track = False
- self.has_scale_track = False
- self.has_any_track = False
- # set_tracks
- # Specify animation frame tracks used this frame
- def set_tracks(self, position, rotation, scale):
- self.has_position_track = position
- self.has_rotation_track = rotation
- self.has_scale_track = scale
- self.has_any_track = position or rotation or scale
- # ObjectKeyFrame
- # Specifies an object setup to generate an animation frame from
- class ObjectKeyframe():
- __slots__ = (
- "frame_objects",
- "name_to_index_map"
- )
- def __init__(self):
- self.frame_objects = []
- self.name_to_index_map = {}
- # add_object
- # Adds a frame object using a Blender object as input
- def add_object(self, bl_object):
- frame_object = FrameObject(bl_object)
- self.name_to_index_map[frame_object.name] = len(self.frame_objects)
- self.frame_objects.append(frame_object)
- # get_object
- # Returns a frame object mapped to a specific name
- def get_object(self, name):
- index = self.name_to_index_map[name]
- return self.frame_objects[index]
- # evaluate
- # Traverses the objects and calculates the world matrix in sequential order
- # Parents are garaunteed to be evaluated before children
- def evaluate(self):
- for frame_object in self.frame_objects:
- if frame_object.parent != None:
- parent = self.get_object(frame_object.parent.name)
- frame_object.matrix_world = parent.matrix_world * frame_object.matrix_local
- else:
- frame_object.matrix_world = frame_object.matrix_local
- # create_game_to_blender_matrix
- # Generates a matrix that goes from LHS to RHS and orients the model in a more Blender friendly direction
- def create_game_to_blender_matrix():
- return Matrix(([1,0,0,0],[0,1,0,0],[0,0,-1,0],[0,0,0,1])) * axis_conversion("Z", "Y", "X", "-Z").to_4x4()
- # create_bone_space_matrix
- # Generates a matrix that transforms a matrix in to Blender bone space orientation
- def create_bone_space_matrix():
- return Matrix(([ 0, 1, 0, 0],[-1, 0, 0, 0],[ 0, 0, 1, 0],[ 0, 0, 0, 1]))
- # get_vertex_array
- # Returns either a valid vertex arrays data container or None if invalid.
- def get_vertex_array(mesh_data, vtx_array_type):
- if mesh_data.vertex_arrays[vtx_array_type] != None:
- return mesh_data.vertex_arrays[vtx_array_type].data
- return None
- # convert_colors
- # Converts an array of byte range [0, 255] colors to float range [0, 1] values
- # It simpl does so by multiplying all values with the reciprocal of 255
- def convert_colors(byte_colors):
- num = len(byte_colors)
- dim = len(byte_colors[0])
- float_colors = num * [dim * [0.0]]
- scale = 1.0 / 255.0
- for i in range(num):
- for j in range(dim):
- float_colors[i][j] = float( byte_colors[i][j] ) * scale
- return float_colors
- # apply_local_matrix
- # Applies a nodes local to parent matrix to a Blender objects local matrix
- def apply_local_matrix(ob, node):
- ob.matrix_local = node.local_to_parent.to_matrix()
- # calculate_frame
- # Creates world transforms for all nodes
- def calculate_frame(nodes, game_matrix):
- num_nodes = len(nodes)
- unsorted_node_indices = range(num_nodes)
- sorted_node_indices = []
- # Collect the root nodes only
- for node_index in unsorted_node_indices:
- node = nodes[node_index]
- if node.parent == -1:
- sorted_node_indices.append(node_index)
- # Run a pass and add nodes until all unsorted nodes have been added to the sorted list
- idx = 0
- while idx < len(sorted_node_indices):
- # Get the node that we should check parents against
- find_node_index = sorted_node_indices[idx]
- # For each node in the unsorted list, check if it's parented to the current sorted node we're looking at
- for node_index in unsorted_node_indices:
- # Skip ourselves
- if find_node_index == node_index:
- continue
- if nodes[node_index].parent == -1:
- continue
- # Append if unsorted parent node is the same as current sorted node
- if nodes[node_index].parent == find_node_index:
- sorted_node_indices.append(node_index)
- idx += 1
- # Create the base transform, we add the game_matrix here since it's not part of the node hierarchy
- base_transform = Matrix.Identity(4) * game_matrix
- # Transform all nodes into world space using sorted list so we are garantueed to have a composite matrix
- # for parents that should use it
- unsorted_node_transforms = [None] * num_nodes
- frame = [None] * num_nodes
- for i in unsorted_node_indices:
- node_index = sorted_node_indices[i]
- node = nodes[node_index]
- local_to_parent = node.local_to_parent.to_matrix()
- if node.parent != -1:
- unsorted_node_transforms[node_index] = unsorted_node_transforms[node.parent] * local_to_parent
- else:
- unsorted_node_transforms[node_index] = base_transform * local_to_parent
- frame[i] = [node_index, unsorted_node_transforms[node_index]]
- # Return frame
- return frame
- # create_object_frame
- # Create a frame structure for animation purposes out of object heirarcy in blender scene
- def create_object_frame():
- if "game_matrix" not in bpy.data.objects.keys():
- return None
- frame = ObjectKeyframe()
- objects = []
- objects.append(bpy.data.objects["game_matrix"])
- idx = 0
- while idx < len(objects):
- obj = objects[idx]
- frame.add_object(obj)
- for child in obj.children:
- objects.append(child)
- idx += 1
- return frame
- # load_binary_model
- # Loads a binary model using Grimrock 2 documentation
- # FourCC magic
- # int32 version
- # int32 numNodes
- # (Node) * numNodes
- def load_binary_model(file_object, context):
- # Try to read the binary model
- model = Model()
- if not model.read(file_object):
- print("Failed to load ModelFile")
- return False
- build_model(model)
- return True
- # load_binary_animation
- # Loads a binary model using Grimrock 2 documentation
- def load_binary_animation(file_object, armature, context):
- # Try to read the binary animation
- anim = Animation()
- if not anim.read(file_object):
- print("Failed to load AnimationFile")
- return False
- # Apply the animation to the armature
- build_animation(armature, anim)
- return True
- # build_vertex_groups
- # Builds vertex groups for skinning
- def build_vertex_groups(model, node):
- if node.mesh_entity.num_bones <= 0:
- return None
- bone_indices = get_vertex_array(node.mesh_entity.mesh_data, VTX_ARRAY_BONE_INDEX)
- bone_weights = get_vertex_array(node.mesh_entity.mesh_data, VTX_ARRAY_BONE_WEIGHT)
- if bone_indices == None or bone_weights == None:
- return None
- vertex_groups = {}
- for i in range(node.mesh_entity.mesh_data.num_vertices):
- vtx_bone_indices = bone_indices[i]
- vtx_bone_weights = bone_weights[i]
- num_bone_indices = len(vtx_bone_indices)
- for j in range(num_bone_indices):
- bone_index = vtx_bone_indices[j]
- bone_weight = vtx_bone_weights[j]
- bone = node.mesh_entity.bones[bone_index]
- deform_node = model.nodes[bone.node_index]
- if deform_node.name not in vertex_groups.keys():
- vertex_groups[deform_node.name] = []
- vertex_groups[deform_node.name].append([i, bone_weight])
- return vertex_groups
- # build_armature
- # Builds a blender armature from Grimrock Model description
- def build_armature(model, node, vertex_groups, game_matrix):
- if node.mesh_entity.num_bones <= 0:
- return None
- # Create an armature object
- armature = bpy.data.armatures.new(node.name + "_rig")
- armature.draw_type = 'STICK'
- # Create a rig
- rig = bpy.data.objects.new(node.name + "_rig", armature)
- rig.show_x_ray = True
- if node.parent != -1:
- rig.parent = bpy.data.objects[model.nodes[node.parent].name]
- #apply_transform(rig, model.nodes, node, game_matrix)
- apply_local_matrix(rig, node)
- # Update scene and set active rig
- bpy.context.scene.objects.link(rig)
- bpy.context.scene.objects.active = rig
- bpy.context.scene.update()
- # Switch to edit mode and create all bones
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='EDIT')
- # Do some pre-processing to make it easier for us when generating the armature
- info = ArmatureInfo(model, node)
- # Calculate world transforms for all nodes
- frame = calculate_frame(model.nodes, game_matrix)
- # Build bones using our bone index order list (this ensures roots are created before children)
- for bone_index in info.bone_index_order:
- bone = info.bones[bone_index]
- deform_node = info.nodes[bone.node_index]
- b2m = info.b2m_transforms[bone_index]
- # Start by checking if we're looking at any particular child (crossing their head positions, if any)
- tail_origin = info.get_look_at_child(bone)
- # If we're not crossing any child bones, fetch the middle point of all childs (if any)
- if tail_origin == None:
- tail_origin = info.get_child_midpoint(bone)
- # Try projecting it towards vertices influenced by bone
- if tail_origin == None:
- tail_origin = info.project_vertex_group(bone, vertex_groups)
- # If no tail origin has been found, just extrude the bone in it's forward direction
- if tail_origin == None:
- tail_origin = b2m.to_translation() + info.get_forward(b2m)
- bone_length = info.get_length(bone, tail_origin)
- bone_length = max(bone_length, 0.001)
- # Fetch Blender EditBone parent
- bl_bone_parent = None
- bone_parent = info.bone_parents[bone_index]
- if bone_parent != None:
- parent_node = model.nodes[bone_parent.node_index]
- bl_bone_parent = armature.edit_bones[parent_node.name]
- bl_bone = armature.edit_bones.new(deform_node.name)
- bl_bone.head = Vector([0, 0, 0])
- bl_bone.tail = Vector([bone_length, 0, 0])
- bl_bone.transform(b2m.to_3x3())
- bl_bone.translate(b2m.to_translation())
- bl_bone.parent = bl_bone_parent
- # Switch back to object mode in order to refresh armature
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='OBJECT')
- # Switch to pose mode and pose the model in initial position
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='POSE')
- # pose the bones in initial state
- bl_bone_space = create_bone_space_matrix()
- for (node_index, node_transform) in frame:
- deform_node = model.nodes[node_index]
- if not deform_node.name in rig.pose.bones.keys():
- continue
- pose_bone = rig.pose.bones[deform_node.name]
- pm = rig.matrix_world.inverted() * node_transform * bl_bone_space
- pose_bone.matrix = pm
- bpy.context.scene.update()
- # Done, switch back to object mode
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='OBJECT')
- bpy.context.scene.update()
- return rig
- # build_model
- # Builds Blender objects from Grimrock Model
- def build_model(model):
- game_matrix = create_game_to_blender_matrix()
- # Calculate world transforms for all nodes
- frame = calculate_frame(model.nodes, game_matrix)
- # Before adding any meshes or armatures go into Object mode.
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='OBJECT')
- # Build top most game matrix object
- game_matrix_ob = bpy.data.objects.new("game_matrix", None)
- game_matrix_ob.empty_draw_type = 'ARROWS'
- game_matrix_ob.empty_draw_size = 0.25
- game_matrix_ob.matrix_local = game_matrix
- bpy.context.scene.objects.link(game_matrix_ob)
- # Build all nodes but skip mesh entity nodes since we create them in a later loop
- for (node_index, node_transform) in frame:
- node = model.nodes[node_index]
- if node.mesh_entity != None:
- continue
- # Fetch parent object, if there is no parent for the node we use the
- # top most game matrix conversion object
- parent_ob = None
- if node.parent != -1:
- parent_name = model.nodes[node.parent].name
- parent_ob = bpy.data.objects[parent_name]
- else:
- parent_ob = game_matrix_ob
- node_ob = bpy.data.objects.new(node.name, None)
- bpy.context.scene.objects.link(node_ob)
- node_ob.empty_draw_type = 'ARROWS'
- node_ob.empty_draw_size = 0.25
- node_ob.parent = parent_ob
- node_ob.matrix_world = node_transform
- # Update the scene after all objects have been created
- bpy.context.scene.update()
- # Now loop through all nodes again, but this time create the actuall mesh objects
- for (node_index, node_transform) in frame:
- node = model.nodes[node_index]
- if node.mesh_entity == None:
- continue
- # Build vertex groups for skinning
- vertex_groups = build_vertex_groups(model, node)
- # Build the armature and pose it in inital state
- bl_rig = build_armature(model, node, vertex_groups, game_matrix)
- # Fetch needed data to build
- mesh_data = node.mesh_entity.mesh_data
- positions = get_vertex_array(mesh_data, VTX_ARRAY_POSITION)
- normals = get_vertex_array(mesh_data, VTX_ARRAY_NORMAL)
- colors = get_vertex_array(mesh_data, VTX_ARRAY_COLOR)
- indices = mesh_data.indices
- num_faces = int( mesh_data.num_indices / 3 )
- # Create Mesh to work with
- me = bpy.data.meshes.new(node.name)
- # Add vertices
- # Note: These are in native game format, object hierarchy takes care of the transform
- me.vertices.add(mesh_data.num_vertices)
- for i in range(mesh_data.num_vertices):
- co = Vector(positions[i])
- me.vertices[i].co = (co.x, co.y, co.z)
- # Add normals
- # Note: These are in native game format, object hierarchy takes care of the transform
- if normals != None:
- for i in range(mesh_data.num_vertices):
- normal = Vector(normals[i])
- me.vertices[i].normal = (normal.x, normal.y, normal.z)
- # Add faces
- # Note: No flipping, object hierarchy makes sure this comes out correct
- me.tessfaces.add(num_faces)
- for i in range(num_faces):
- idx = i * 3
- me.tessfaces[i].vertices_raw = (indices[idx+0], indices[idx+1], indices[idx+2], 0)
- # Add colors
- if colors != None:
- # Create color-set layer
- color_layer = me.tessface_vertex_colors.new("colorset")
- me.tessface_vertex_colors.active = color_layer
- # Convert to float range
- float_colors = convert_colors(colors)
- # Assign colors
- for f in me.tessfaces:
- color_layer.data[f.index].color1 = float_colors[f.vertices[0]][0:3]
- color_layer.data[f.index].color2 = float_colors[f.vertices[1]][0:3]
- color_layer.data[f.index].color3 = float_colors[f.vertices[2]][0:3]
- # Add texture coordinate sets
- # Note: We flip the v-coordinates, so remember to reverse that when exporting out!
- first_uv_layer = None
- for i in range(8):
- tex_coords = get_vertex_array(mesh_data, VTX_ARRAY_TEXCOORD0 + i)
- if tex_coords == None:
- continue
- # Create uv-layer for these texture coordinates
- uv_layer = me.tessface_uv_textures.new("uvset%d" % i)
- me.tessface_uv_textures.active = uv_layer
- if first_uv_layer == None:
- first_uv_layer = uv_layer
- # Assign uv coordinates to layer faces
- for f in me.tessfaces:
- uvco1 = tex_coords[f.vertices[0]][0:2]
- uvco2 = tex_coords[f.vertices[1]][0:2]
- uvco3 = tex_coords[f.vertices[2]][0:2]
- # Flip v coordinates
- uvco1 = (uvco1[0], 1.0 - uvco1[1])
- uvco2 = (uvco2[0], 1.0 - uvco2[1])
- uvco3 = (uvco3[0], 1.0 - uvco3[1])
- uv_layer.data[f.index].uv = (uvco1, uvco2, uvco3)
- # Set first created uv-layer as active layer
- if first_uv_layer != None:
- me.tessface_uv_textures.active = first_uv_layer
- # Figure out parent object for this node, if none we automatically add it to the game matrix object
- parent_ob = game_matrix_ob
- if node.parent != -1:
- parent_node = model.nodes[node.parent]
- parent_ob = bpy.data.objects[parent_node.name]
- # Create mesh object and link with scene
- node_ob = bpy.data.objects.new(node.name, me)
- node_ob.parent = parent_ob
- node_ob.matrix_world = node_transform
- # Link with scene
- bpy.context.scene.objects.link(node_ob)
- # Create the skinning groups for this object as well as a modifier for the rig
- if vertex_groups != None:
- for group_name, group in vertex_groups.items():
- bl_group = node_ob.vertex_groups.new(group_name)
- for (v, w) in group:
- bl_group.add([v], w, 'ADD')
- mod = node_ob.modifiers.new('RigModifier', 'ARMATURE')
- mod.object = bl_rig
- mod.use_bone_envelopes = False
- mod.use_vertex_groups = True
- # Update the mesh
- me.update()
- # Update scene
- bpy.context.scene.update()
- # build_animation
- def build_animation(armature_object, anim):
- # This isn't quite correct.
- # fps isn't right since we simply truncate to integer
- fps = int(anim.frames_per_second)
- # Figure out range of animation
- frame_start = 1
- frame_end = 1 + anim.num_frames
- # Update frame settings
- bpy.context.scene.render.fps = fps
- bpy.context.scene.render.fps_base = 1.0
- bpy.context.scene.frame_start = frame_start
- bpy.context.scene.frame_end = frame_end
- bpy.context.scene.update()
- game_matrix = create_game_to_blender_matrix()
- # Set the armature as the current selected object
- bpy.context.scene.objects.active = armature_object
- # Switch to pose mode
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='POSE')
- armature = armature_object.data
- pose = armature_object.pose
- frame = create_object_frame()
- bl_bone_space = create_bone_space_matrix()
- for i in range(anim.num_frames):
- for item in anim.items:
- frame_object = frame.get_object(item.node_name)
- has_position_track = i < item.num_positions
- has_rotation_track = i < item.num_rotations
- has_scale_track = i < item.num_scales
- # Tell frame object what tracks are valid for this frame
- frame_object.set_tracks(has_position_track, has_rotation_track, has_scale_track)
- # Skip matrix update if there are no tracks available for this object this frame
- if not frame_object.has_any_track:
- continue
- # Fetch current local matrix and decompose it in to location, rotation and scale components
- matrix_local = frame_object.matrix_local
- loc, rot, scl = matrix_local.decompose()
- # Apply animation tracks for valid
- if has_position_track:
- p = item.positions[i]
- loc = Vector([p.x, p.y, p.z])
- if has_rotation_track:
- q = item.rotations[i]
- rot = Quaternion((q.w, q.x, q.y, q.z))
- if has_scale_track:
- s = item.scales[i]
- scl = Vector([s.x, s.y, s.z])
- # Create matrices out of our frame components
- mat_loc = Matrix.Translation((loc.x, loc.y, loc.z))
- mat_scl = Matrix(([scl.x,0,0,0],[0,scl.y,0,0],[0,0,scl.z,0],[0,0,0,1]))
- mat_rot = rot.to_matrix().to_4x4()
- # Compose the final local matrix
- matrix_local = mat_loc * mat_rot * mat_scl
- # Set the matrix in the frame object
- frame_object.matrix_local = matrix_local
- # Calculate world matrices
- frame.evaluate()
- frame_index = i + 1
- for frame_object in frame.frame_objects:
- # Skip object if no tracks assigned this frame
- if not frame_object.has_any_track:
- continue
- # If frame object exists as a pose bone, alter that instead of the actual object
- keyframe_object = None
- if frame_object.name in pose.bones.keys():
- # Find the post bone and update it
- pose_bone = pose.bones[frame_object.name]
- pm = armature_object.matrix_world.inverted() * frame_object.matrix_world * bl_bone_space
- pose_bone.matrix = pm
- keyframe_object = pose_bone
- else:
- # Update actual blender object
- bl_object = bpy.data.objects[frame_object.name]
- bl_object.matrix_world = frame_object.matrix_world
- keyframe_object = bl_object
- bpy.context.scene.update()
- # Add a keyframe(s) at this location
- if keyframe_object != None:
- if frame_object.has_position_track:
- keyframe_object.keyframe_insert(data_path="location", frame = frame_index)
- if frame_object.has_rotation_track:
- keyframe_object.keyframe_insert(data_path="rotation_quaternion", frame = frame_index)
- if frame_object.has_scale_track:
- keyframe_object.keyframe_insert(data_path="scale", frame = frame_index)
- # Go to next frame
- # Switch to object mode
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='OBJECT')
- return True
- # fnv1a
- # Hash a string using fnv1-a
- def fnv1a(string, seed = 0x811C9DC5, prime = 0x01000193):
- uint32_max = 2 ** 32
- hash_value = seed
- for c in string:
- hash_value = ( ( ord( c ) ^ hash_value ) * prime ) % uint32_max
- return hash_value
- # load_file_table
- # Loads the .dat container file table header
- def load_file_table(file_object):
- file_table = None
- dat_magic = 0
- try:
- dat_magic = read_magic(file_object)
- except:
- print("Error parsing .dat file header!")
- file_object.close()
- return file_table
- # Figure out if it's a valid dat package
- if dat_magic != b'GRA2':
- print("Not a valid Legend of Grimrock 2 .dat file!")
- file_object.close()
- return file_table
- # Read number of file headers
- num_files = read_int(file_object)
- # Read in the file header table for all entries
- file_table = num_files * [None]
- for i in range(num_files):
- entry = FileEntry()
- entry.hash_name = read_uint(file_object)
- entry.file_offset = read_uint(file_object)
- entry.size_compressed = read_uint(file_object)
- entry.size_uncompressed = read_uint(file_object)
- entry.unknown = read_uint(file_object)
- file_table[ i ] = entry
- return file_table
- # load_animation_table
- # Loads a .dat container animation information
- def load_animation_table(filename):
- anim_table = []
- file_object = open(filename, 'rb')
- file_table = load_file_table(file_object)
- if file_table == None or len(file_table) <= 0:
- return anim_table
- # Only care about lua script files and animation files
- # We use the lua to scan for animation asset names so we can name the animation table properly
- magic_lua = 0x014a4c1b
- magic_anim = 0x4d494e41 #b'ANIM'
- # Create search strings to use when scaning the lua files
- anim_search = b'assets/animations/'
- fbx_search = b'.fbx'
- # Seek to all file entries, read the magic and place table indices for different file types
- anim_names = {}
- for entry in file_table:
- # Haven't come across any, think I recall them being zlib compressed ?
- # Skip them for now
- if entry.size_compressed != 0:
- print("Compressed file in .dat package, skipping entry")
- continue
- # Haven't come across any, I have no idea what this might be
- # Skip these entries for now
- if entry.unknown != 0:
- print("Found unknown data in .dat package, skipping entry")
- continue
- # Seek to start of internal file and read the first four bytes and treat them as
- # a 'type' magic of what that file entry actually is.
- # This is of course not correct, but for the sake of finding animations and lua files it works just fine.
- file_object.seek(entry.file_offset, os.SEEK_SET)
- file_magic = read_uint(file_object)
- # Handle lua file entries
- if file_magic == magic_lua:
- # Read out the entire contents of the file into a bytearray
- lua_buffer = bytearray(file_object.read(entry.size_uncompressed))
- # Search the bytearray 'for assets/animations/' until no more could be found
- buffer_index = lua_buffer.find(anim_search, 0)
- while buffer_index >= 0:
- # Lookup .fbx ending
- # Now, we could potentially found .fbx several hundred bytes later.
- # It's a bit dangerous to assume it always ends with .fbx, but it works for now
- end_index = lua_buffer.find(fbx_search, buffer_index + 14)
- # If we didn't find an .fbx ending, abort now
- if end_index < 0:
- break
- # Decode a string from our search indices and append a .animation ending
- asset_name = decode_string(lua_buffer[buffer_index:end_index]) + ".animation"
- # Use FNV1a to hash the asset name
- hash_name = fnv1a(asset_name)
- # If hash name isn't already in list, append a new entry pointing to the real asset name
- # so we can 'decode' file names later
- if hash_name not in anim_names:
- anim_names[hash_name] = asset_name
- # Restart the search from the end of the last search result
- buffer_index = lua_buffer.find(anim_search, end_index + 4)
- # Handle animation file entries, this simply adds the table entry to the animation table
- if file_magic == magic_anim:
- anim_table.append(entry)
- # We're done with the file for now, close it
- file_object.close()
- # Go through our animation table and attempt to resolve all the asset names
- # If we couldn't find one, simply build a name using the hash name
- num_unresolved = 0
- for entry in anim_table:
- if entry.hash_name in anim_names:
- entry.name = anim_names[entry.hash_name]
- else:
- entry.name = "hash_%8x" % entry.hash_name
- num_unresolved += 1
- return anim_table
- # load_model_table
- # Loads .dat container model information
- def load_model_table(filename):
- model_table = []
- file_object = open(filename, 'rb')
- file_table = load_file_table(file_object)
- if file_table == None or len(file_table) <= 0:
- return model_table
- # Only care about lua script files and model files
- # We use the lua to scan for model asset names so we can name the model table properly
- magic_lua = 0x014a4c1b
- magic_model = 0x314c444d #b'MDL1'
- # Create search strings to use when scaning the lua files
- model_search = b'assets/models/'
- fbx_search = b'.fbx'
- # Seek to all file entries, read the magic and place table indices for different file types
- model_names = {}
- for entry in file_table:
- # Haven't come across any, think I recall them being zlib compressed ?
- # Skip them for now
- if entry.size_compressed != 0:
- print("Compressed file in .dat package, skipping entry")
- continue
- # Haven't come across any, I have no idea what this might be
- # Skip these entries for now
- if entry.unknown != 0:
- print("Found unknown data in .dat package, skipping entry")
- continue
- # Seek to start of internal file and read the first four bytes and treat them as
- # a 'type' magic of what that file entry actually is.
- # This is of course not correct, but for the sake of finding models and lua files it works just fine.
- file_object.seek(entry.file_offset, os.SEEK_SET)
- file_magic = read_uint(file_object)
- # Handle lua file entries
- if file_magic == magic_lua:
- # Read out the entire contents of the file into a bytearray
- lua_buffer = bytearray(file_object.read(entry.size_uncompressed))
- # Search the bytearray 'for assets/models/' until no more could be found
- buffer_index = lua_buffer.find(model_search, 0)
- while buffer_index >= 0:
- # Lookup .fbx ending
- # Now, we could potentially found .fbx several hundred bytes later.
- # It's a bit dangerous to assume it always ends with .fbx, but it works for now
- end_index = lua_buffer.find(fbx_search, buffer_index + 14)
- # If we didn't find an .fbx ending, abort now
- if end_index < 0:
- break
- # Decode a string from our search indices and append a .model ending
- asset_name = decode_string(lua_buffer[buffer_index:end_index]) + ".model"
- # Use FNV1a to hash the asset name
- hash_name = fnv1a(asset_name)
- # If hash name isn't already in list, append a new entry pointing to the real asset name
- # so we can 'decode' file names later
- if hash_name not in model_names:
- model_names[hash_name] = asset_name
- # Restart the search from the end of the last search result
- buffer_index = lua_buffer.find(model_search, end_index + 4)
- # Handle model file entries, this simply adds the table entry to the model table
- if file_magic == magic_model:
- model_table.append(entry)
- # We're done with the file for now, close it
- file_object.close()
- # Go through our model table and attempt to resolve all the asset names
- # If we couldn't find one, simply build a name using the hash name
- num_unresolved = 0
- for entry in model_table:
- if entry.hash_name in model_names:
- entry.name = model_names[entry.hash_name]
- else:
- entry.name = "hash_%8x" % entry.hash_name
- num_unresolved += 1
- return model_table
- # load_model
- # Loads a model from a binary file, uses a file offset if we're reading directly from a .dat container
- def load_model(filename, file_offset, context):
- name, ext = os.path.splitext(os.path.basename(filename))
- # Seek to offset in file, this is only used if loading from .dat containers
- file_object = open(filename, 'rb')
- if file_offset > 0:
- file_object.seek(file_offset, os.SEEK_SET)
- # Load the binary model data
- load_binary_model(file_object, context)
- # Close file if load_binary_model hasn't already done so
- if not file_object.closed:
- file_object.close()
- return True
- # load_animation
- # Loads an animation from a binary file, uses a file offset if we're reading directly from a .dat container
- def load_animation(filename, file_offset, armature, context):
- name, ext = os.path.splitext(os.path.basename(filename))
- # Seek to offset in file, this is only used if loading from .dat containers
- file_object = open(filename, 'rb')
- if file_offset > 0:
- file_object.seek(file_offset, os.SEEK_SET)
- # Load the binary model data
- load_binary_animation(file_object, armature, context)
- # Close file if load_binary_model hasn't already done so
- if not file_object.closed:
- file_object.close()
- return True
- # IMPORT_OT_model
- # Blender UI and base for importing models
- class IMPORT_OT_model(bpy.types.Operator, ImportHelper):
- # Import Model Operator.
- bl_idname = "import_scene.model"
- bl_label = "Import Model"
- bl_description = "Import a Legend of Grimrock 2 model"
- bl_options = { 'REGISTER', 'UNDO' }
- # File selection UI property
- filepath = StringProperty(name="File Path", description="Filepath used for importing the mesh file.", maxlen=1024, default="")
- # File list UI properties for .dat containers
- file_list = CollectionProperty(type=bpy.types.PropertyGroup)
- file_list_index = IntProperty()
- # Holds information if .dat container is being imported with specific model selection
- dat_file = ""
- model_table = []
- def execute(self, context):
- file_offset = 0
- # Dig out file offset if loading from .dat container
- if self.dat_file == self.filepath:
- file_offset = self.model_table[self.file_list_index].file_offset
- # Load from binary file
- load_model(self.filepath, file_offset, context)
- return {'FINISHED'}
- # clear_file_list
- # Clears the file_list UI property of all entries and resets the dat_file cached value
- def clear_file_list(self):
- self.dat_file = ""
- num = len(self.file_list)
- while num > 0:
- self.file_list.remove(num-1)
- num -= 1
- # build_file_list
- # Updates the file_list UI property from selected .dat file, or cleans it out if needed
- def build_file_list(self):
- # Figure out if we selected a .dat file or if we slected a different .dat file
- name, ext = os.path.splitext(os.path.basename(self.filepath))
- if ext.lower() != ".dat":
- self.clear_file_list()
- return
- # Cached dat_file is still up to date, simply ignore any updates
- if self.filepath == self.dat_file:
- return
- # Clean out any previous entries in the UI file list
- self.clear_file_list()
- # Load package header and extract model information
- self.dat_file = self.filepath
- self.model_table = load_model_table(self.filepath)
- # Add all the model table entries to the UI file list
- for entry in self.model_table:
- item = self.file_list.add()
- item.name = entry.name
- # draw
- def draw(self, context):
- layout = self.layout
- # Update the file_list UI property if needed
- self.build_file_list()
- row = layout.row(True)
- row.label("Legend of Grimrock 2 .dat container")
- layout.template_list("UI_UL_list", "OpenFileDAT", self, "file_list", self, "file_list_index", rows=15)
- def invoke(self, context, event):
- wm = context.window_manager
- wm.fileselect_add(self)
- return {'RUNNING_MODAL'}
- # IMPORT_OT_anim
- # Blender UI and base for importing animations
- class IMPORT_OT_anim(bpy.types.Operator, ImportHelper):
- # Import Animation Operator.
- bl_idname = "import_scene.animation"
- bl_label = "Import Animation"
- bl_description = "Import a Legend of Grimrock 2 animation"
- bl_options = { 'REGISTER', 'UNDO' }
- # File selection UI property
- filepath = StringProperty(name="File Path", description="Filepath used for importing the mesh file.", maxlen=1024, default="")
- # Armature selection UI property
- armature_name = StringProperty(name="Armature", description="Armature to apply animation data on.", maxlen=1024, default="")
- # File list UI properties for .dat containers
- file_list = CollectionProperty(type=bpy.types.PropertyGroup)
- file_list_index = IntProperty()
- # Holds information if .dat container is being imported with specific model selection
- dat_file = ""
- animation_table = []
- # execute
- def execute(self, context):
- file_offset = 0
- # Dig out file offset if loading from .dat container
- if self.dat_file == self.filepath:
- file_offset = self.animation_table[self.file_list_index].file_offset
- if len(self.armature_name) <= 0 or not context.scene.objects[self.armature_name]:
- print("Can't load animation, couldn't find selected armature.")
- return {'FINISHED'}
- armature = context.scene.objects[self.armature_name]
- if armature.type != 'ARMATURE':
- print("Can't load animation, object to apply on is not of type ARMATURE.")
- return {'FINISHED'}
- # Load from binary file
- load_animation(self.filepath, file_offset, armature, context)
- return {'FINISHED'}
- # clear_file_list
- # Clears the file_list UI property of all entries and resets the dat_file cached value
- def clear_file_list(self):
- self.dat_file = ""
- num = len(self.file_list)
- while num > 0:
- self.file_list.remove(num-1)
- num -= 1
- # build_file_list
- # Updates the file_list UI property from selected .dat file, or cleans it out if needed
- def build_file_list(self):
- # Figure out if we selected a .dat file or if we slected a different .dat file
- name, ext = os.path.splitext(os.path.basename(self.filepath))
- if ext.lower() != ".dat":
- self.clear_file_list()
- return
- # Cached dat_file is still up to date, simply ignore any updates
- if self.filepath == self.dat_file:
- return
- # Clean out any previous entries in the UI file list
- self.clear_file_list()
- # Load package header and extract animation information
- self.dat_file = self.filepath
- self.animation_table = load_animation_table(self.filepath)
- # Add all the animation table entries to the UI file list
- for entry in self.animation_table:
- item = self.file_list.add()
- item.name = entry.name
- # draw
- def draw(self, context):
- layout = self.layout
- # Update the file_list UI property if needed
- self.build_file_list()
- row = layout.row(True)
- row.prop_search(self, "armature_name", bpy.data, "armatures")
- row = layout.row(True)
- row.label("Legend of Grimrock 2 .dat container")
- layout.template_list("UI_UL_list", "OpenFileDAT", self, "file_list", self, "file_list_index", rows=15)
- # invoke
- def invoke(self, context, event):
- wm = context.window_manager
- wm.fileselect_add(self)
- return {'RUNNING_MODAL'}
- # menu_func_import_model
- # Blender menu operator to invoke model importer
- def menu_func_import_model(self, context):
- self.layout.operator(IMPORT_OT_model.bl_idname, text="Legend of Grimrock 2 Model (.model)")
- # menu_func_import_model
- # Blender menu operator to invoke animation importer
- def menu_func_import_anim(self, context):
- self.layout.operator(IMPORT_OT_anim.bl_idname, text="Legend of Grimrock 2 Animation (.animation)")
- # register
- # Registers menu functions
- def register():
- bpy.utils.register_module(__name__)
- bpy.types.INFO_MT_file_import.append(menu_func_import_model)
- bpy.types.INFO_MT_file_import.append(menu_func_import_anim)
- # unregister
- # Unregisters menu functions
- def unregister():
- bpy.utils.unregister_module(__name__)
- bpy.types.INFO_MT_file_import.remove(menu_func_import_model)
- bpy.types.INFO_MT_file_import.remove(menu_func_import_anim)
- # Main function
- if __name__ == "__main__":
- register()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement