|
{"nwo":"Befzz\/blender3d_import_psk_psa","sha":"47f1418aef7642f300e0fccbe3c96654ab275a52","path":"addons\/io_import_scene_unreal_psa_psk_270.py","language":"python","identifier":"util_is_header_valid","parameters":"(filename, file_ext, chunk_id, error_callback)","argument_list":"","return_statement":"return True","docstring":"Return True if chunk_id is a valid psk\/psa (file_ext) 'magick number'.","docstring_summary":"Return True if chunk_id is a valid psk\/psa (file_ext) 'magick number'.","docstring_tokens":["Return","True","if","chunk_id","is","a","valid","psk","\/","psa","(","file_ext",")","magick","number","."],"function":"def util_is_header_valid(filename, file_ext, chunk_id, error_callback):\n '''Return True if chunk_id is a valid psk\/psa (file_ext) 'magick number'.'''\n if chunk_id != PSKPSA_FILE_HEADER[file_ext]:\n error_callback(\n \"File %s is not a %s file. (header mismach)\\nExpected: %s \\nPresent %s\" % ( \n filename, file_ext,\n PSKPSA_FILE_HEADER[file_ext], chunk_id)\n ) \n return False\n return True","function_tokens":["def","util_is_header_valid","(","filename",",","file_ext",",","chunk_id",",","error_callback",")",":","if","chunk_id","!=","PSKPSA_FILE_HEADER","[","file_ext","]",":","error_callback","(","\"File %s is not a %s file. (header mismach)\\nExpected: %s \\nPresent %s\"","%","(","filename",",","file_ext",",","PSKPSA_FILE_HEADER","[","file_ext","]",",","chunk_id",")",")","return","False","return","True"],"url":"https:\/\/github.com\/Befzz\/blender3d_import_psk_psa\/blob\/47f1418aef7642f300e0fccbe3c96654ab275a52\/addons\/io_import_scene_unreal_psa_psk_270.py#L162-L171"} |
|
{"nwo":"Befzz\/blender3d_import_psk_psa","sha":"47f1418aef7642f300e0fccbe3c96654ab275a52","path":"addons\/io_import_scene_unreal_psa_psk_270.py","language":"python","identifier":"util_gen_name_part","parameters":"(filepath)","argument_list":"","return_statement":"return re.match(r'.*[\/\\\\]([^\/\\\\]+?)(\\..{2,5})?$', filepath).group(1)","docstring":"Return file name without extension","docstring_summary":"Return file name without extension","docstring_tokens":["Return","file","name","without","extension"],"function":"def util_gen_name_part(filepath):\n '''Return file name without extension'''\n return re.match(r'.*[\/\\\\]([^\/\\\\]+?)(\\..{2,5})?$', filepath).group(1)","function_tokens":["def","util_gen_name_part","(","filepath",")",":","return","re",".","match","(","r'.*[\/\\\\]([^\/\\\\]+?)(\\..{2,5})?$'",",","filepath",")",".","group","(","1",")"],"url":"https:\/\/github.com\/Befzz\/blender3d_import_psk_psa\/blob\/47f1418aef7642f300e0fccbe3c96654ab275a52\/addons\/io_import_scene_unreal_psa_psk_270.py#L174-L176"} |
|
{"nwo":"Befzz\/blender3d_import_psk_psa","sha":"47f1418aef7642f300e0fccbe3c96654ab275a52","path":"addons\/io_import_scene_unreal_psa_psk_270.py","language":"python","identifier":"vec_to_axis_vec","parameters":"(vec_in, vec_out)","argument_list":"","return_statement":"","docstring":"Make **vec_out** to be an axis-aligned unit vector that is closest to vec_in. (basis?)","docstring_summary":"Make **vec_out** to be an axis-aligned unit vector that is closest to vec_in. (basis?)","docstring_tokens":["Make","**","vec_out","**","to","be","an","axis","-","aligned","unit","vector","that","is","closest","to","vec_in",".","(","basis?",")"],"function":"def vec_to_axis_vec(vec_in, vec_out):\n '''Make **vec_out** to be an axis-aligned unit vector that is closest to vec_in. (basis?)'''\n x, y, z = vec_in\n if abs(x) > abs(y):\n if abs(x) > abs(z):\n vec_out.x = 1 if x >= 0 else -1\n else:\n vec_out.z = 1 if z >= 0 else -1\n else:\n if abs(y) > abs(z):\n vec_out.y = 1 if y >= 0 else -1\n else:\n vec_out.z = 1 if z >= 0 else -1","function_tokens":["def","vec_to_axis_vec","(","vec_in",",","vec_out",")",":","x",",","y",",","z","=","vec_in","if","abs","(","x",")",">","abs","(","y",")",":","if","abs","(","x",")",">","abs","(","z",")",":","vec_out",".","x","=","1","if","x",">=","0","else","-","1","else",":","vec_out",".","z","=","1","if","z",">=","0","else","-","1","else",":","if","abs","(","y",")",">","abs","(","z",")",":","vec_out",".","y","=","1","if","y",">=","0","else","-","1","else",":","vec_out",".","z","=","1","if","z",">=","0","else","-","1"],"url":"https:\/\/github.com\/Befzz\/blender3d_import_psk_psa\/blob\/47f1418aef7642f300e0fccbe3c96654ab275a52\/addons\/io_import_scene_unreal_psa_psk_270.py#L179-L191"} |
|
{"nwo":"Befzz\/blender3d_import_psk_psa","sha":"47f1418aef7642f300e0fccbe3c96654ab275a52","path":"addons\/io_import_scene_unreal_psa_psk_270.py","language":"python","identifier":"pskimport","parameters":"(filepath,\n context = bpy.context,\n bImportmesh = True,\n bImportbone = True,\n bSpltiUVdata = False,\n fBonesize = 5.0,\n fBonesizeRatio = 0.6,\n bDontInvertRoot = False,\n bReorientBones = False,\n bReorientDirectly = False,\n error_callback = None)","argument_list":"","return_statement":"return True","docstring":"Import mesh and skeleton from .psk\/.pskx files\n \n Args:\n bReorientBones:\n Axis based bone orientation to children\n \n error_callback:\n Called when importing is failed.\n \n __name__('?', error_callback = lambda msg: print('reason:',msg)","docstring_summary":"Import mesh and skeleton from .psk\/.pskx files\n \n Args:\n bReorientBones:\n Axis based bone orientation to children\n \n error_callback:\n Called when importing is failed.\n \n __name__('?', error_callback = lambda msg: print('reason:',msg)","docstring_tokens":["Import","mesh","and","skeleton","from",".","psk","\/",".","pskx","files","Args",":","bReorientBones",":","Axis","based","bone","orientation","to","children","error_callback",":","Called","when","importing","is","failed",".","__name__","(","?","error_callback","=","lambda","msg",":","print","(","reason",":","msg",")"],"function":"def pskimport(filepath,\n context = bpy.context,\n bImportmesh = True,\n bImportbone = True,\n bSpltiUVdata = False,\n fBonesize = 5.0,\n fBonesizeRatio = 0.6,\n bDontInvertRoot = False,\n bReorientBones = False,\n bReorientDirectly = False,\n error_callback = None):\n '''\n Import mesh and skeleton from .psk\/.pskx files\n \n Args:\n bReorientBones:\n Axis based bone orientation to children\n \n error_callback:\n Called when importing is failed.\n \n __name__('?', error_callback = lambda msg: print('reason:',msg)\n \n '''\n if not hasattr( error_callback, '__call__'):\n error_callback = __pass\n \n # ref_time = time.process_time()\n if not bImportbone and not bImportmesh:\n error_callback(\"Nothing to do.\\nSet something for import.\")\n return False\n file_ext = 'psk'\n \n print (\"-----------------------------------------------\")\n print (\"---------EXECUTING PSK PYTHON IMPORTER---------\")\n print (\"-----------------------------------------------\")\n\n #file may not exist\n try:\n file = open(filepath,'rb')\n except IOError:\n error_callback('Error while opening file for reading:\\n \"'+filepath+'\"')\n return False\n\n if not util_check_file_header(file, 'psk'):\n error_callback('Not psk file:\\n \"'+filepath+'\"')\n return False\n \n Vertices = None\n Wedges = None\n Faces = None\n UV_by_face = None\n Materials = None\n Bones = None\n Weights = None\n Extrauvs = []\n Normals = None\n \n #================================================================================================== \n # Materials MaterialNameRaw | TextureIndex | PolyFlags | AuxMaterial | AuxFlags | LodBias | LodStyle \n # Only Name is usable.\n def read_materials():\n \n nonlocal Materials\n \n Materials = []\n \n for counter in range(chunk_datacount):\n\n (MaterialNameRaw,) = unpack_from('64s24x', chunk_data, chunk_datasize * counter)\n \n Materials.append( util_bytes_to_str( MaterialNameRaw ) )\n \n \n #================================================================================================== \n # Faces WdgIdx1 | WdgIdx2 | WdgIdx3 | MatIdx | AuxMatIdx | SmthGrp\n def read_faces():\n nonlocal Faces, UV_by_face\n \n if not bImportmesh:\n return True\n \n UV_by_face = [None] * chunk_datacount\n Faces = [None] * chunk_datacount\n \n if len(Wedges) > 65536:\n unpack_format = '=IIIBBI'\n else:\n unpack_format = '=HHHBBI'\n \n unpack_data = Struct(unpack_format).unpack_from\n \n for counter in range(chunk_datacount):\n (WdgIdx1, WdgIdx2, WdgIdx3,\n MatIndex, \n AuxMatIndex, #unused\n SmoothingGroup # Umodel is not exporting SmoothingGroups\n ) = unpack_data(chunk_data, counter * chunk_datasize)\n \n # looks ugly\n # Wedges is (point_index, u, v, MatIdx)\n ((vertid0, u0, v0, matid0), (vertid1, u1, v1, matid1), (vertid2, u2, v2, matid2)) = Wedges[WdgIdx1], Wedges[WdgIdx2], Wedges[WdgIdx3]\n \n # note order: C,B,A\n Faces[counter] = (vertid2, vertid1, vertid0)\n \n uv = ( ( u2, 1.0 - v2 ), ( u1, 1.0 - v1 ), ( u0, 1.0 - v0 ) )\n \n # Mapping: FaceIndex <=> UV data <=> FaceMatIndex\n UV_by_face[counter] = (uv, MatIndex, (matid2, matid1, matid0))\n \n \n #==================================================================================================\n # Vertices X | Y | Z\n def read_vertices():\n nonlocal bImportbone, bImportmesh\n \n if not bImportmesh:\n return True\n \n nonlocal Vertices\n \n Vertices = [None] * chunk_datacount\n \n unpack_data = Struct('3f').unpack_from\n \n for counter in range( chunk_datacount ):\n (vec_x, vec_y, vec_z) = unpack_data(chunk_data, counter * chunk_datasize)\n Vertices[counter] = (vec_x, vec_y, vec_z)\n \n \n #================================================================================================== \n # Wedges (UV) VertexId | U | V | MatIdx \n def read_wedges():\n \n nonlocal bImportbone, bImportmesh\n if not bImportmesh:\n return True\n \n nonlocal Wedges\n \n Wedges = [None] * chunk_datacount\n \n unpack_data = Struct('=IffBxxx').unpack_from\n \n for counter in range( chunk_datacount ):\n (vertex_id,\n u, v,\n material_index) = unpack_data( chunk_data, counter * chunk_datasize )\n \n # print(vertex_id, u, v, material_index)\n # Wedges[counter] = (vertex_id, u, v, material_index)\n Wedges[counter] = [vertex_id, u, v, material_index]\n \n #================================================================================================== \n # Bones (VBone .. VJointPos ) Name|Flgs|NumChld|PrntIdx|Qw|Qx|Qy|Qz|LocX|LocY|LocZ|Lngth|XSize|YSize|ZSize\n def read_bones():\n \n nonlocal Bones, bImportbone\n \n if chunk_datacount == 0:\n bImportbone = False\n \n if bImportbone:\n unpack_data = Struct('64s3i11f').unpack_from\n else:\n unpack_data = Struct('64s56x').unpack_from\n \n Bones = [None] * chunk_datacount\n \n for counter in range( chunk_datacount ):\n Bones[counter] = unpack_data( chunk_data, chunk_datasize * counter)\n \n \n #================================================================================================== \n # Influences (Bone Weight) (VRawBoneInfluence) ( Weight | PntIdx | BoneIdx)\n def read_weights():\n # nonlocal Weights, bImportmesh\n nonlocal Weights\n \n if not bImportmesh:\n return True\n \n Weights = [None] * chunk_datacount\n \n unpack_data = Struct('fii').unpack_from\n \n for counter in range(chunk_datacount):\n Weights[counter] = unpack_data(chunk_data, chunk_datasize * counter)\n \n \n #================================================================================================== \n # Extra UV. U | V\n def read_extrauvs():\n unpack_data = Struct(\"=2f\").unpack_from\n \n uvdata = [None] * chunk_datacount\n \n for counter in range( chunk_datacount ):\n uvdata[counter] = unpack_data(chunk_data, chunk_datasize * counter) \n \n Extrauvs.append(uvdata)\n\n #==================================================================================================\n # Vertex Normals NX | NY | NZ\n def read_normals():\n if not bImportmesh:\n return True\n\n nonlocal Normals\n Normals = [None] * chunk_datacount\n\n unpack_data = Struct('3f').unpack_from\n\n for counter in range(chunk_datacount):\n Normals[counter] = unpack_data(chunk_data, counter * chunk_datasize)\n \n \n CHUNKS_HANDLERS = {\n 'PNTS0000': read_vertices,\n 'VTXW0000': read_wedges,\n 'VTXW3200': read_wedges,#?\n 'FACE0000': read_faces,\n 'FACE3200': read_faces,\n 'MATT0000': read_materials,\n 'REFSKELT': read_bones,\n 'REFSKEL0': read_bones, #?\n 'RAWW0000': read_weights,\n 'RAWWEIGH': read_weights,\n 'EXTRAUVS': read_extrauvs,\n 'VTXNORMS': read_normals\n }\n \n #===================================================================================================\n # File. Read all needed data.\n # VChunkHeader Struct\n # ChunkID|TypeFlag|DataSize|DataCount\n # 0 |1 |2 |3\n \n while True:\n \n header_bytes = file.read(32)\n \n if len(header_bytes) < 32:\n \n if len(header_bytes) != 0:\n error_callback(\"Unexpected end of file.(%s\/32 bytes)\" % len(header_bytes))\n break\n \n (chunk_id, chunk_type, chunk_datasize, chunk_datacount) = unpack('20s3i', header_bytes)\n \n chunk_id_str = util_bytes_to_str(chunk_id)\n chunk_id_str = chunk_id_str[:8]\n \n if chunk_id_str in CHUNKS_HANDLERS:\n \n chunk_data = file.read( chunk_datasize * chunk_datacount)\n \n if len(chunk_data) < chunk_datasize * chunk_datacount:\n error_callback('Psk chunk %s is broken.' % chunk_id_str)\n return False\n \n CHUNKS_HANDLERS[chunk_id_str]()\n \n else:\n \n print('Unknown chunk: ', chunk_id_str)\n file.seek(chunk_datasize * chunk_datacount, 1)\n \n \n # print(chunk_id_str, chunk_datacount)\n \n file.close()\n \n print(\" Importing file:\", filepath)\n \n if not bImportmesh and (Bones is None or len(Bones) == 0):\n error_callback(\"Psk: no skeleton data.\")\n return False\n\n MAX_UVS = 8\n NAME_UV_PREFIX = \"UV\"\n \n # file name w\/out extension\n gen_name_part = util_gen_name_part(filepath)\n gen_names = {\n 'armature_object': gen_name_part + '.ao',\n 'armature_data': gen_name_part + '.ad',\n 'mesh_object': gen_name_part + '.mo',\n 'mesh_data': gen_name_part + '.md'\n }\n \n if bImportmesh:\n mesh_data = bpy.data.meshes.new(gen_names['mesh_data'])\n mesh_obj = bpy.data.objects.new(gen_names['mesh_object'], mesh_data)\n \n \n #==================================================================================================\n # UV. Prepare\n if bImportmesh:\n if bSpltiUVdata:\n # store how much each \"matrial index\" have vertices\n \n uv_mat_ids = {}\n \n for (_, _, _, material_index) in Wedges:\n \n if not (material_index in uv_mat_ids):\n uv_mat_ids[material_index] = 1\n else:\n uv_mat_ids[material_index] += 1\n \n \n # if we have more UV material indexes than blender UV maps, then...\n if bSpltiUVdata and len(uv_mat_ids) > MAX_UVS :\n \n uv_mat_ids_len = len(uv_mat_ids)\n \n print('UVs: %s out of %s is combined in a first UV map(%s0)' % (uv_mat_ids_len - 8, uv_mat_ids_len, NAME_UV_PREFIX))\n \n mat_idx_proxy = [0] * len(uv_mat_ids)\n \n counts_sorted = sorted(uv_mat_ids.values(), reverse = True)\n \n new_mat_index = MAX_UVS - 1\n \n for c in counts_sorted:\n for mat_idx, counts in uv_mat_ids.items():\n if c == counts:\n mat_idx_proxy[mat_idx] = new_mat_index\n if new_mat_index > 0:\n new_mat_index -= 1\n # print('MatIdx remap: %s > %s' % (mat_idx,new_mat_index))\n \n for i in range(len(Wedges)):\n Wedges[i][3] = mat_idx_proxy[Wedges[i][3]]\n\n # print('Wedges:', chunk_datacount)\n # print('uv_mat_ids', uv_mat_ids)\n # print('uv_mat_ids', uv_mat_ids)\n # for w in Wedges:\n \n if bImportmesh:\n # print(\"-- Materials -- (index, name, faces)\")\n blen_materials = []\n for materialname in Materials:\n matdata = bpy.data.materials.get(materialname)\n \n if matdata is None:\n matdata = bpy.data.materials.new( materialname )\n # matdata = bpy.data.materials.new( materialname )\n \n blen_materials.append( matdata )\n mesh_data.materials.append( matdata )\n # print(counter,materialname,TextureIndex)\n # if mat_groups.get(counter) is not None:\n # print(\"%i: %s\" % (counter, materialname), len(mat_groups[counter]))\n\n #==================================================================================================\n # Prepare bone data\n psk_bone_name_toolong = False\n\n def init_psk_bone(i, psk_bones, name_raw):\n psk_bone = class_psk_bone()\n psk_bone.children = []\n psk_bone.name = util_bytes_to_str(name_raw)\n psk_bones[i] = psk_bone\n return psk_bone\n \n # indexed by bone index. array of psk_bone\n psk_bones = [None] * len(Bones)\n \n if not bImportbone: #data needed for mesh-only import\n \n for counter,(name_raw,) in enumerate(Bones):\n init_psk_bone(counter, psk_bones, name_raw)\n \n if bImportbone: #else?\n \n # average bone length\n sum_bone_pos = 0\n \n for counter, (name_raw, flags, NumChildren, ParentIndex, #0 1 2 3\n quat_x, quat_y, quat_z, quat_w, #4 5 6 7\n vec_x, vec_y, vec_z, #8 9 10\n joint_length, #11\n scale_x, scale_y, scale_z) in enumerate(Bones):\n \n psk_bone = init_psk_bone(counter, psk_bones, name_raw)\n \n psk_bone.bone_index = counter\n psk_bone.parent_index = ParentIndex\n\n if len(psk_bone.name) > 60:\n psk_bone_name_toolong = True\n\n # print(psk_bone.bone_index, psk_bone.parent_index, psk_bone.name) \n # make sure we have valid parent_index\n if psk_bone.parent_index < 0:\n psk_bone.parent_index = 0\n\n # psk_bone.scale = (scale_x, scale_y, scale_z)\n\n # store bind pose to make it available for psa-import via CustomProperty of the Blender bone\n psk_bone.orig_quat = Quaternion((quat_w, quat_x, quat_y, quat_z))\n psk_bone.orig_loc = Vector((vec_x, vec_y, vec_z))\n\n # root bone must have parent_index = 0 and selfindex = 0\n if psk_bone.parent_index == 0 and psk_bone.bone_index == psk_bone.parent_index:\n if bDontInvertRoot:\n psk_bone.mat_world_rot = psk_bone.orig_quat.to_matrix()\n else:\n psk_bone.mat_world_rot = psk_bone.orig_quat.conjugated().to_matrix()\n psk_bone.mat_world = Matrix.Translation(psk_bone.orig_loc)\n\n sum_bone_pos += psk_bone.orig_loc.length\n \n \n #==================================================================================================\n # Bones. Calc World-space matrix\n \n # TODO optimize math.\n for psk_bone in psk_bones:\n \n if psk_bone.parent_index == 0:\n if psk_bone.bone_index == 0:\n psk_bone.parent = None\n continue\n \n parent = psk_bones[psk_bone.parent_index]\n \n psk_bone.parent = parent\n \n parent.children.append(psk_bone)\n \n # mat_world - world space bone matrix WITHOUT own rotation\n # mat_world_rot - world space bone rotation WITH own rotation\n psk_bone.mat_world = parent.mat_world_rot.to_4x4()\n psk_bone.mat_world.translation = parent.mat_world.translation + parent.mat_world_rot * psk_bone.orig_loc\n psk_bone.mat_world_rot = parent.mat_world_rot * psk_bone.orig_quat.conjugated().to_matrix()\n \n # psk_bone.mat_world = ( parent.mat_world_rot.to_4x4() * psk_bone.trans)\n # psk_bone.mat_world.translation += parent.mat_world.translation\n # psk_bone.mat_world_rot = parent.mat_world_rot * psk_bone.orig_quat.conjugated().to_matrix()\n \n \n #==================================================================================================\n # Skeleton. Prepare.\n \n armature_data = bpy.data.armatures.new(gen_names['armature_data'])\n armature_obj = bpy.data.objects.new(gen_names['armature_object'], armature_data)\n # TODO: options for axes and x_ray?\n armature_data.show_axes = False\n armature_data.draw_type = 'STICK'\n armature_obj.show_x_ray = True\n\n util_obj_link(context, armature_obj)\n\n util_select_all(False)\n util_obj_select(context, armature_obj)\n util_obj_set_active(context, armature_obj)\n \n utils_set_mode('EDIT')\n \n \n sum_bone_pos \/= len(Bones) # average\n sum_bone_pos *= fBonesizeRatio # corrected\n \n bone_size_choosen = max(0.01, round((min(sum_bone_pos, fBonesize))))\n\n if not bReorientBones:\n new_bone_size = bone_size_choosen\n #==================================================================================================\n # Skeleton. Build.\n if psk_bone_name_toolong:\n for psk_bone in psk_bones:\n\n\n # TODO too long name cutting options?\n long_name = psk_bone.name\n psk_bone.name = psk_bone.name[-60:]\n\n edit_bone = armature_obj.data.edit_bones.new(psk_bone.name)\n edit_bone[\"long_name\"] = long_name\n\n psk_bone.name = edit_bone.name\n\n # print(psk_bone.name)\n # print(edit_bone.name)\n else:\n for psk_bone in psk_bones:\n edit_bone = armature_obj.data.edit_bones.new(psk_bone.name)\n psk_bone.name = edit_bone.name\n\n\n for psk_bone in psk_bones:\n edit_bone = armature_obj.data.edit_bones[psk_bone.name]\n\n armature_obj.data.edit_bones.active = edit_bone\n\n if psk_bone.parent is not None:\n edit_bone.parent = armature_obj.data.edit_bones[psk_bone.parent.name]\n else:\n if bDontInvertRoot:\n psk_bone.orig_quat.conjugate()\n \n if bReorientBones:\n (new_bone_size, quat_orient_diff) = calc_bone_rotation(psk_bone, bone_size_choosen, bReorientDirectly, sum_bone_pos)\n post_quat = psk_bone.orig_quat.conjugated() * quat_orient_diff\n else:\n post_quat = psk_bone.orig_quat.conjugated()\n \n # only length of this vector is matter?\n edit_bone.tail = Vector(( 0.0, new_bone_size, 0.0))\n # edit_bone.tail = Vector((0.0, 0.0, new_bone_size))\n \n # edit_bone.matrix = psk_bone.mat_world * quat_diff.to_matrix().to_4x4()\n edit_bone.matrix = psk_bone.mat_world * post_quat.to_matrix().to_4x4()\n \n \n # some dev code...\n #### FINAL\n # post_quat = psk_bone.orig_quat.conjugated() * quat_diff\n # edit_bone.matrix = psk_bone.mat_world * test_quat.to_matrix().to_4x4()\n # edit_bone[\"post_quat\"] = test_quat\n #### \n\n # edit_bone[\"post_quat\"] = Quaternion((1,0,0,0))\n # edit_bone.matrix = psk_bone.mat_world* psk_bone.rot\n \n\n # if edit_bone.parent:\n # edit_bone.matrix = edit_bone.parent.matrix * psk_bone.trans * (psk_bone.orig_quat.conjugated().to_matrix().to_4x4())\n # edit_bone.matrix = edit_bone.parent.matrix * psk_bone.trans * (test_quat.to_matrix().to_4x4())\n # else:\n # edit_bone.matrix = psk_bone.orig_quat.to_matrix().to_4x4()\n \n \n # save bindPose information for .psa import\n edit_bone[\"orig_quat\"] = psk_bone.orig_quat\n edit_bone[\"orig_loc\"] = psk_bone.orig_loc\n edit_bone[\"post_quat\"] = post_quat\n \n utils_set_mode('OBJECT')\n \n #==================================================================================================\n # Weights\n if bImportmesh: \n \n vertices_total = len(Vertices)\n \n for ( _, PointIndex, BoneIndex ) in Weights:\n if PointIndex < vertices_total: # can it be not?\n psk_bones[BoneIndex].have_weight_data = True\n # else:\n # print(psk_bones[BoneIndex].name, 'for other mesh',PointIndex ,vertices_total)\n \n #print(\"weight:\", PointIndex, BoneIndex, Weight)\n # Weights.append(None)\n # print(Weights.count(None))\n \n \n # Original vertex colorization code\n '''\n # Weights.sort( key = lambda wgh: wgh[0])\n if bImportmesh:\n VtxCol = []\n bones_count = len(psk_bones)\n for x in range(bones_count):\n #change the overall darkness of each material in a range between 0.1 and 0.9\n tmpVal = ((float(x) + 1.0) \/ bones_count * 0.7) + 0.1\n tmpVal = int(tmpVal * 256)\n tmpCol = [tmpVal, tmpVal, tmpVal, 0]\n #Change the color of each material slightly\n if x % 3 == 0:\n if tmpCol[0] < 128:\n tmpCol[0] += 60\n else:\n tmpCol[0] -= 60\n if x % 3 == 1:\n if tmpCol[1] < 128:\n tmpCol[1] += 60\n else:\n tmpCol[1] -= 60\n if x % 3 == 2:\n if tmpCol[2] < 128:\n tmpCol[2] += 60\n else:\n tmpCol[2] -= 60\n #Add the material to the mesh\n VtxCol.append(tmpCol)\n \n for x in range(len(Tmsh.faces)):\n for y in range(len(Tmsh.faces[x].v)):\n #find v in Weights[n][0]\n findVal = Tmsh.faces[x].v[y].index\n n = 0\n while findVal != Weights[n][0]:\n n = n + 1\n TmpCol = VtxCol[Weights[n][1]]\n #check if a vertex has more than one influence\n if n != len(Weights) - 1:\n if Weights[n][0] == Weights[n + 1][0]:\n #if there is more than one influence, use the one with the greater influence\n #for simplicity only 2 influences are checked, 2nd and 3rd influences are usually very small\n if Weights[n][2] < Weights[n + 1][2]:\n TmpCol = VtxCol[Weights[n + 1][1]]\n Tmsh.faces[x].col.append(NMesh.Col(TmpCol[0], TmpCol[1], TmpCol[2], 0))\n '''\n\n #===================================================================================================\n # UV. Setup.\n \n if bImportmesh:\n # Trick! Create UV maps BEFORE mesh and get (0,0) coordinates for free!\n # ...otherwise UV coords will be copied from active, or calculated from mesh...\n \n if bSpltiUVdata:\n \n for i in range(len(uv_mat_ids)):\n get_uv_layers(mesh_data).new(name = NAME_UV_PREFIX + str(i))\n \n else:\n \n get_uv_layers(mesh_data).new(name = NAME_UV_PREFIX+\"_SINGLE\")\n \n \n for counter, uv_data in enumerate(Extrauvs):\n \n if len(mesh_data.uv_layers) < MAX_UVS:\n \n get_uv_layers(mesh_data).new(name = \"EXTRAUVS\"+str(counter))\n \n else:\n \n Extrauvs.remove(uv_data)\n print('Extra UV layer %s is ignored. Re-import without \"Split UV data\".' % counter)\n \n #================================================================================================== \n # Mesh. Build.\n \n mesh_data.from_pydata(Vertices,[],Faces)\n\n #==================================================================================================\n # Vertex Normal. Set.\n\n if Normals is not None:\n mesh_data.polygons.foreach_set(\"use_smooth\", [True] * len(mesh_data.polygons))\n mesh_data.normals_split_custom_set_from_vertices(Normals)\n mesh_data.use_auto_smooth = True\n \n #===================================================================================================\n # UV. Set.\n \n if bImportmesh:\n\n for face in mesh_data.polygons:\n face.material_index = UV_by_face[face.index][1]\n\n uv_layers = mesh_data.uv_layers\n \n if not bSpltiUVdata:\n uvLayer = uv_layers[0]\n \n # per face\n # for faceIdx, (faceUVs, faceMatIdx, _, _, wmidx) in enumerate(UV_by_face):\n for faceIdx, (faceUVs, faceMatIdx, WedgeMatIds) in enumerate(UV_by_face):\n \n # per vertex\n for vertN, uv in enumerate(faceUVs):\n loopId = faceIdx * 3 + vertN\n \n if bSpltiUVdata:\n uvLayer = uv_layers[WedgeMatIds[vertN]]\n \n uvLayer.data[loopId].uv = uv\n\n #===================================================================================================\n # Extra UVs. Set.\n \n for counter, uv_data in enumerate(Extrauvs):\n \n uvLayer = mesh_data.uv_layers[ counter - len(Extrauvs) ]\n \n for uv_index, uv_coords in enumerate(uv_data):\n \n uvLayer.data[uv_index].uv = (uv_coords[0], 1.0 - uv_coords[1])\n \n #===================================================================================================\n # Mesh. Vertex Groups. Bone Weights.\n \n for psk_bone in psk_bones:\n if psk_bone.have_weight_data:\n psk_bone.vertex_group = mesh_obj.vertex_groups.new(psk_bone.name)\n # else:\n # print(psk_bone.name, 'have no influence on this mesh')\n \n for weight, vertex_id, bone_index_w in filter(None, Weights):\n psk_bones[bone_index_w].vertex_group.add((vertex_id,), weight, 'ADD')\n \n \n #===================================================================================================\n # Skeleton. Colorize.\n \n if bImportbone:\n \n bone_group_unused = armature_obj.pose.bone_groups.new(\"Unused bones\")\n bone_group_unused.color_set = 'THEME14'\n\n bone_group_nochild = armature_obj.pose.bone_groups.new(\"No children\")\n bone_group_nochild.color_set = 'THEME03'\n\n armature_data.show_group_colors = True\n\n for psk_bone in psk_bones:\n \n pose_bone = armature_obj.pose.bones[psk_bone.name]\n \n if psk_bone.have_weight_data:\n \n if len(psk_bone.children) == 0:\n pose_bone.bone_group = bone_group_nochild\n \n else:\n pose_bone.bone_group = bone_group_unused\n \n \n #===================================================================================================\n # Final\n \n if bImportmesh:\n \n util_obj_link(context, mesh_obj)\n util_select_all(False)\n \n \n if not bImportbone: \n \n util_obj_select(context, mesh_obj)\n util_obj_set_active(context, mesh_obj)\n \n else:\n # select_all(False)\n util_obj_select(context, armature_obj)\n \n # parenting mesh to armature object\n mesh_obj.parent = armature_obj\n mesh_obj.parent_type = 'OBJECT'\n \n # add armature modifier\n blender_modifier = mesh_obj.modifiers.new( armature_obj.data.name, type = 'ARMATURE')\n blender_modifier.show_expanded = False\n blender_modifier.use_vertex_groups = True\n blender_modifier.use_bone_envelopes = False\n blender_modifier.object = armature_obj\n \n # utils_set_mode('OBJECT')\n # select_all(False)\n util_obj_select(context, armature_obj)\n util_obj_set_active(context, armature_obj)\n \n # print(\"Done: %f sec.\" % (time.process_time() - ref_time))\n utils_set_mode('OBJECT')\n return True","function_tokens":["def","pskimport","(","filepath",",","context","=","bpy",".","context",",","bImportmesh","=","True",",","bImportbone","=","True",",","bSpltiUVdata","=","False",",","fBonesize","=","5.0",",","fBonesizeRatio","=","0.6",",","bDontInvertRoot","=","False",",","bReorientBones","=","False",",","bReorientDirectly","=","False",",","error_callback","=","None",")",":","if","not","hasattr","(","error_callback",",","'__call__'",")",":","error_callback","=","__pass","# ref_time = time.process_time()","if","not","bImportbone","and","not","bImportmesh",":","error_callback","(","\"Nothing to do.\\nSet something for import.\"",")","return","False","file_ext","=","'psk'","print","(","\"-----------------------------------------------\"",")","print","(","\"---------EXECUTING PSK PYTHON IMPORTER---------\"",")","print","(","\"-----------------------------------------------\"",")","#file may not exist","try",":","file","=","open","(","filepath",",","'rb'",")","except","IOError",":","error_callback","(","'Error while opening file for reading:\\n \"'","+","filepath","+","'\"'",")","return","False","if","not","util_check_file_header","(","file",",","'psk'",")",":","error_callback","(","'Not psk file:\\n \"'","+","filepath","+","'\"'",")","return","False","Vertices","=","None","Wedges","=","None","Faces","=","None","UV_by_face","=","None","Materials","=","None","Bones","=","None","Weights","=","None","Extrauvs","=","[","]","Normals","=","None","#================================================================================================== ","# Materials MaterialNameRaw | TextureIndex | PolyFlags | AuxMaterial | AuxFlags | LodBias | LodStyle ","# Only Name is usable.","def","read_materials","(",")",":","nonlocal","Materials","Materials","=","[","]","for","counter","in","range","(","chunk_datacount",")",":","(","MaterialNameRaw",",",")","=","unpack_from","(","'64s24x'",",","chunk_data",",","chunk_datasize","*","counter",")","Materials",".","append","(","util_bytes_to_str","(","MaterialNameRaw",")",")","#================================================================================================== ","# Faces WdgIdx1 | WdgIdx2 | WdgIdx3 | MatIdx | AuxMatIdx | SmthGrp","def","read_faces","(",")",":","nonlocal","Faces",",","UV_by_face","if","not","bImportmesh",":","return","True","UV_by_face","=","[","None","]","*","chunk_datacount","Faces","=","[","None","]","*","chunk_datacount","if","len","(","Wedges",")",">","65536",":","unpack_format","=","'=IIIBBI'","else",":","unpack_format","=","'=HHHBBI'","unpack_data","=","Struct","(","unpack_format",")",".","unpack_from","for","counter","in","range","(","chunk_datacount",")",":","(","WdgIdx1",",","WdgIdx2",",","WdgIdx3",",","MatIndex",",","AuxMatIndex",",","#unused","SmoothingGroup","# Umodel is not exporting SmoothingGroups",")","=","unpack_data","(","chunk_data",",","counter","*","chunk_datasize",")","# looks ugly","# Wedges is (point_index, u, v, MatIdx)","(","(","vertid0",",","u0",",","v0",",","matid0",")",",","(","vertid1",",","u1",",","v1",",","matid1",")",",","(","vertid2",",","u2",",","v2",",","matid2",")",")","=","Wedges","[","WdgIdx1","]",",","Wedges","[","WdgIdx2","]",",","Wedges","[","WdgIdx3","]","# note order: C,B,A","Faces","[","counter","]","=","(","vertid2",",","vertid1",",","vertid0",")","uv","=","(","(","u2",",","1.0","-","v2",")",",","(","u1",",","1.0","-","v1",")",",","(","u0",",","1.0","-","v0",")",")","# Mapping: FaceIndex <=> UV data <=> FaceMatIndex","UV_by_face","[","counter","]","=","(","uv",",","MatIndex",",","(","matid2",",","matid1",",","matid0",")",")","#==================================================================================================","# Vertices X | Y | Z","def","read_vertices","(",")",":","nonlocal","bImportbone",",","bImportmesh","if","not","bImportmesh",":","return","True","nonlocal","Vertices","Vertices","=","[","None","]","*","chunk_datacount","unpack_data","=","Struct","(","'3f'",")",".","unpack_from","for","counter","in","range","(","chunk_datacount",")",":","(","vec_x",",","vec_y",",","vec_z",")","=","unpack_data","(","chunk_data",",","counter","*","chunk_datasize",")","Vertices","[","counter","]","=","(","vec_x",",","vec_y",",","vec_z",")","#================================================================================================== ","# Wedges (UV) VertexId | U | V | MatIdx ","def","read_wedges","(",")",":","nonlocal","bImportbone",",","bImportmesh","if","not","bImportmesh",":","return","True","nonlocal","Wedges","Wedges","=","[","None","]","*","chunk_datacount","unpack_data","=","Struct","(","'=IffBxxx'",")",".","unpack_from","for","counter","in","range","(","chunk_datacount",")",":","(","vertex_id",",","u",",","v",",","material_index",")","=","unpack_data","(","chunk_data",",","counter","*","chunk_datasize",")","# print(vertex_id, u, v, material_index)","# Wedges[counter] = (vertex_id, u, v, material_index)","Wedges","[","counter","]","=","[","vertex_id",",","u",",","v",",","material_index","]","#================================================================================================== ","# Bones (VBone .. VJointPos ) Name|Flgs|NumChld|PrntIdx|Qw|Qx|Qy|Qz|LocX|LocY|LocZ|Lngth|XSize|YSize|ZSize","def","read_bones","(",")",":","nonlocal","Bones",",","bImportbone","if","chunk_datacount","==","0",":","bImportbone","=","False","if","bImportbone",":","unpack_data","=","Struct","(","'64s3i11f'",")",".","unpack_from","else",":","unpack_data","=","Struct","(","'64s56x'",")",".","unpack_from","Bones","=","[","None","]","*","chunk_datacount","for","counter","in","range","(","chunk_datacount",")",":","Bones","[","counter","]","=","unpack_data","(","chunk_data",",","chunk_datasize","*","counter",")","#================================================================================================== ","# Influences (Bone Weight) (VRawBoneInfluence) ( Weight | PntIdx | BoneIdx)","def","read_weights","(",")",":","# nonlocal Weights, bImportmesh","nonlocal","Weights","if","not","bImportmesh",":","return","True","Weights","=","[","None","]","*","chunk_datacount","unpack_data","=","Struct","(","'fii'",")",".","unpack_from","for","counter","in","range","(","chunk_datacount",")",":","Weights","[","counter","]","=","unpack_data","(","chunk_data",",","chunk_datasize","*","counter",")","#================================================================================================== ","# Extra UV. U | V","def","read_extrauvs","(",")",":","unpack_data","=","Struct","(","\"=2f\"",")",".","unpack_from","uvdata","=","[","None","]","*","chunk_datacount","for","counter","in","range","(","chunk_datacount",")",":","uvdata","[","counter","]","=","unpack_data","(","chunk_data",",","chunk_datasize","*","counter",")","Extrauvs",".","append","(","uvdata",")","#==================================================================================================","# Vertex Normals NX | NY | NZ","def","read_normals","(",")",":","if","not","bImportmesh",":","return","True","nonlocal","Normals","Normals","=","[","None","]","*","chunk_datacount","unpack_data","=","Struct","(","'3f'",")",".","unpack_from","for","counter","in","range","(","chunk_datacount",")",":","Normals","[","counter","]","=","unpack_data","(","chunk_data",",","counter","*","chunk_datasize",")","CHUNKS_HANDLERS","=","{","'PNTS0000'",":","read_vertices",",","'VTXW0000'",":","read_wedges",",","'VTXW3200'",":","read_wedges",",","#?","'FACE0000'",":","read_faces",",","'FACE3200'",":","read_faces",",","'MATT0000'",":","read_materials",",","'REFSKELT'",":","read_bones",",","'REFSKEL0'",":","read_bones",",","#?","'RAWW0000'",":","read_weights",",","'RAWWEIGH'",":","read_weights",",","'EXTRAUVS'",":","read_extrauvs",",","'VTXNORMS'",":","read_normals","}","#===================================================================================================","# File. Read all needed data.","# VChunkHeader Struct","# ChunkID|TypeFlag|DataSize|DataCount","# 0 |1 |2 |3","while","True",":","header_bytes","=","file",".","read","(","32",")","if","len","(","header_bytes",")","<","32",":","if","len","(","header_bytes",")","!=","0",":","error_callback","(","\"Unexpected end of file.(%s\/32 bytes)\"","%","len","(","header_bytes",")",")","break","(","chunk_id",",","chunk_type",",","chunk_datasize",",","chunk_datacount",")","=","unpack","(","'20s3i'",",","header_bytes",")","chunk_id_str","=","util_bytes_to_str","(","chunk_id",")","chunk_id_str","=","chunk_id_str","[",":","8","]","if","chunk_id_str","in","CHUNKS_HANDLERS",":","chunk_data","=","file",".","read","(","chunk_datasize","*","chunk_datacount",")","if","len","(","chunk_data",")","<","chunk_datasize","*","chunk_datacount",":","error_callback","(","'Psk chunk %s is broken.'","%","chunk_id_str",")","return","False","CHUNKS_HANDLERS","[","chunk_id_str","]","(",")","else",":","print","(","'Unknown chunk: '",",","chunk_id_str",")","file",".","seek","(","chunk_datasize","*","chunk_datacount",",","1",")","# print(chunk_id_str, chunk_datacount)","file",".","close","(",")","print","(","\" Importing file:\"",",","filepath",")","if","not","bImportmesh","and","(","Bones","is","None","or","len","(","Bones",")","==","0",")",":","error_callback","(","\"Psk: no skeleton data.\"",")","return","False","MAX_UVS","=","8","NAME_UV_PREFIX","=","\"UV\"","# file name w\/out extension","gen_name_part","=","util_gen_name_part","(","filepath",")","gen_names","=","{","'armature_object'",":","gen_name_part","+","'.ao'",",","'armature_data'",":","gen_name_part","+","'.ad'",",","'mesh_object'",":","gen_name_part","+","'.mo'",",","'mesh_data'",":","gen_name_part","+","'.md'","}","if","bImportmesh",":","mesh_data","=","bpy",".","data",".","meshes",".","new","(","gen_names","[","'mesh_data'","]",")","mesh_obj","=","bpy",".","data",".","objects",".","new","(","gen_names","[","'mesh_object'","]",",","mesh_data",")","#==================================================================================================","# UV. Prepare","if","bImportmesh",":","if","bSpltiUVdata",":","# store how much each \"matrial index\" have vertices","uv_mat_ids","=","{","}","for","(","_",",","_",",","_",",","material_index",")","in","Wedges",":","if","not","(","material_index","in","uv_mat_ids",")",":","uv_mat_ids","[","material_index","]","=","1","else",":","uv_mat_ids","[","material_index","]","+=","1","# if we have more UV material indexes than blender UV maps, then...","if","bSpltiUVdata","and","len","(","uv_mat_ids",")",">","MAX_UVS",":","uv_mat_ids_len","=","len","(","uv_mat_ids",")","print","(","'UVs: %s out of %s is combined in a first UV map(%s0)'","%","(","uv_mat_ids_len","-","8",",","uv_mat_ids_len",",","NAME_UV_PREFIX",")",")","mat_idx_proxy","=","[","0","]","*","len","(","uv_mat_ids",")","counts_sorted","=","sorted","(","uv_mat_ids",".","values","(",")",",","reverse","=","True",")","new_mat_index","=","MAX_UVS","-","1","for","c","in","counts_sorted",":","for","mat_idx",",","counts","in","uv_mat_ids",".","items","(",")",":","if","c","==","counts",":","mat_idx_proxy","[","mat_idx","]","=","new_mat_index","if","new_mat_index",">","0",":","new_mat_index","-=","1","# print('MatIdx remap: %s > %s' % (mat_idx,new_mat_index))","for","i","in","range","(","len","(","Wedges",")",")",":","Wedges","[","i","]","[","3","]","=","mat_idx_proxy","[","Wedges","[","i","]","[","3","]","]","# print('Wedges:', chunk_datacount)","# print('uv_mat_ids', uv_mat_ids)","# print('uv_mat_ids', uv_mat_ids)","# for w in Wedges:","if","bImportmesh",":","# print(\"-- Materials -- (index, name, faces)\")","blen_materials","=","[","]","for","materialname","in","Materials",":","matdata","=","bpy",".","data",".","materials",".","get","(","materialname",")","if","matdata","is","None",":","matdata","=","bpy",".","data",".","materials",".","new","(","materialname",")","# matdata = bpy.data.materials.new( materialname )","blen_materials",".","append","(","matdata",")","mesh_data",".","materials",".","append","(","matdata",")","# print(counter,materialname,TextureIndex)","# if mat_groups.get(counter) is not None:","# print(\"%i: %s\" % (counter, materialname), len(mat_groups[counter]))","#==================================================================================================","# Prepare bone data","psk_bone_name_toolong","=","False","def","init_psk_bone","(","i",",","psk_bones",",","name_raw",")",":","psk_bone","=","class_psk_bone","(",")","psk_bone",".","children","=","[","]","psk_bone",".","name","=","util_bytes_to_str","(","name_raw",")","psk_bones","[","i","]","=","psk_bone","return","psk_bone","# indexed by bone index. array of psk_bone","psk_bones","=","[","None","]","*","len","(","Bones",")","if","not","bImportbone",":","#data needed for mesh-only import","for","counter",",","(","name_raw",",",")","in","enumerate","(","Bones",")",":","init_psk_bone","(","counter",",","psk_bones",",","name_raw",")","if","bImportbone",":","#else?","# average bone length","sum_bone_pos","=","0","for","counter",",","(","name_raw",",","flags",",","NumChildren",",","ParentIndex",",","#0 1 2 3","quat_x",",","quat_y",",","quat_z",",","quat_w",",","#4 5 6 7","vec_x",",","vec_y",",","vec_z",",","#8 9 10","joint_length",",","#11","scale_x",",","scale_y",",","scale_z",")","in","enumerate","(","Bones",")",":","psk_bone","=","init_psk_bone","(","counter",",","psk_bones",",","name_raw",")","psk_bone",".","bone_index","=","counter","psk_bone",".","parent_index","=","ParentIndex","if","len","(","psk_bone",".","name",")",">","60",":","psk_bone_name_toolong","=","True","# print(psk_bone.bone_index, psk_bone.parent_index, psk_bone.name) ","# make sure we have valid parent_index","if","psk_bone",".","parent_index","<","0",":","psk_bone",".","parent_index","=","0","# psk_bone.scale = (scale_x, scale_y, scale_z)","# store bind pose to make it available for psa-import via CustomProperty of the Blender bone","psk_bone",".","orig_quat","=","Quaternion","(","(","quat_w",",","quat_x",",","quat_y",",","quat_z",")",")","psk_bone",".","orig_loc","=","Vector","(","(","vec_x",",","vec_y",",","vec_z",")",")","# root bone must have parent_index = 0 and selfindex = 0","if","psk_bone",".","parent_index","==","0","and","psk_bone",".","bone_index","==","psk_bone",".","parent_index",":","if","bDontInvertRoot",":","psk_bone",".","mat_world_rot","=","psk_bone",".","orig_quat",".","to_matrix","(",")","else",":","psk_bone",".","mat_world_rot","=","psk_bone",".","orig_quat",".","conjugated","(",")",".","to_matrix","(",")","psk_bone",".","mat_world","=","Matrix",".","Translation","(","psk_bone",".","orig_loc",")","sum_bone_pos","+=","psk_bone",".","orig_loc",".","length","#==================================================================================================","# Bones. Calc World-space matrix","# TODO optimize math.","for","psk_bone","in","psk_bones",":","if","psk_bone",".","parent_index","==","0",":","if","psk_bone",".","bone_index","==","0",":","psk_bone",".","parent","=","None","continue","parent","=","psk_bones","[","psk_bone",".","parent_index","]","psk_bone",".","parent","=","parent","parent",".","children",".","append","(","psk_bone",")","# mat_world - world space bone matrix WITHOUT own rotation","# mat_world_rot - world space bone rotation WITH own rotation","psk_bone",".","mat_world","=","parent",".","mat_world_rot",".","to_4x4","(",")","psk_bone",".","mat_world",".","translation","=","parent",".","mat_world",".","translation","+","parent",".","mat_world_rot","*","psk_bone",".","orig_loc","psk_bone",".","mat_world_rot","=","parent",".","mat_world_rot","*","psk_bone",".","orig_quat",".","conjugated","(",")",".","to_matrix","(",")","# psk_bone.mat_world = ( parent.mat_world_rot.to_4x4() * psk_bone.trans)","# psk_bone.mat_world.translation += parent.mat_world.translation","# psk_bone.mat_world_rot = parent.mat_world_rot * psk_bone.orig_quat.conjugated().to_matrix()","#==================================================================================================","# Skeleton. Prepare.","armature_data","=","bpy",".","data",".","armatures",".","new","(","gen_names","[","'armature_data'","]",")","armature_obj","=","bpy",".","data",".","objects",".","new","(","gen_names","[","'armature_object'","]",",","armature_data",")","# TODO: options for axes and x_ray?","armature_data",".","show_axes","=","False","armature_data",".","draw_type","=","'STICK'","armature_obj",".","show_x_ray","=","True","util_obj_link","(","context",",","armature_obj",")","util_select_all","(","False",")","util_obj_select","(","context",",","armature_obj",")","util_obj_set_active","(","context",",","armature_obj",")","utils_set_mode","(","'EDIT'",")","sum_bone_pos","\/=","len","(","Bones",")","# average","sum_bone_pos","*=","fBonesizeRatio","# corrected","bone_size_choosen","=","max","(","0.01",",","round","(","(","min","(","sum_bone_pos",",","fBonesize",")",")",")",")","if","not","bReorientBones",":","new_bone_size","=","bone_size_choosen","#==================================================================================================","# Skeleton. Build.","if","psk_bone_name_toolong",":","for","psk_bone","in","psk_bones",":","# TODO too long name cutting options?","long_name","=","psk_bone",".","name","psk_bone",".","name","=","psk_bone",".","name","[","-","60",":","]","edit_bone","=","armature_obj",".","data",".","edit_bones",".","new","(","psk_bone",".","name",")","edit_bone","[","\"long_name\"","]","=","long_name","psk_bone",".","name","=","edit_bone",".","name","# print(psk_bone.name)","# print(edit_bone.name)","else",":","for","psk_bone","in","psk_bones",":","edit_bone","=","armature_obj",".","data",".","edit_bones",".","new","(","psk_bone",".","name",")","psk_bone",".","name","=","edit_bone",".","name","for","psk_bone","in","psk_bones",":","edit_bone","=","armature_obj",".","data",".","edit_bones","[","psk_bone",".","name","]","armature_obj",".","data",".","edit_bones",".","active","=","edit_bone","if","psk_bone",".","parent","is","not","None",":","edit_bone",".","parent","=","armature_obj",".","data",".","edit_bones","[","psk_bone",".","parent",".","name","]","else",":","if","bDontInvertRoot",":","psk_bone",".","orig_quat",".","conjugate","(",")","if","bReorientBones",":","(","new_bone_size",",","quat_orient_diff",")","=","calc_bone_rotation","(","psk_bone",",","bone_size_choosen",",","bReorientDirectly",",","sum_bone_pos",")","post_quat","=","psk_bone",".","orig_quat",".","conjugated","(",")","*","quat_orient_diff","else",":","post_quat","=","psk_bone",".","orig_quat",".","conjugated","(",")","# only length of this vector is matter?","edit_bone",".","tail","=","Vector","(","(","0.0",",","new_bone_size",",","0.0",")",")","# edit_bone.tail = Vector((0.0, 0.0, new_bone_size))","# edit_bone.matrix = psk_bone.mat_world * quat_diff.to_matrix().to_4x4()","edit_bone",".","matrix","=","psk_bone",".","mat_world","*","post_quat",".","to_matrix","(",")",".","to_4x4","(",")","# some dev code...","#### FINAL","# post_quat = psk_bone.orig_quat.conjugated() * quat_diff","# edit_bone.matrix = psk_bone.mat_world * test_quat.to_matrix().to_4x4()","# edit_bone[\"post_quat\"] = test_quat","#### ","# edit_bone[\"post_quat\"] = Quaternion((1,0,0,0))","# edit_bone.matrix = psk_bone.mat_world* psk_bone.rot","# if edit_bone.parent:","# edit_bone.matrix = edit_bone.parent.matrix * psk_bone.trans * (psk_bone.orig_quat.conjugated().to_matrix().to_4x4())","# edit_bone.matrix = edit_bone.parent.matrix * psk_bone.trans * (test_quat.to_matrix().to_4x4())","# else:","# edit_bone.matrix = psk_bone.orig_quat.to_matrix().to_4x4()","# save bindPose information for .psa import","edit_bone","[","\"orig_quat\"","]","=","psk_bone",".","orig_quat","edit_bone","[","\"orig_loc\"","]","=","psk_bone",".","orig_loc","edit_bone","[","\"post_quat\"","]","=","post_quat","utils_set_mode","(","'OBJECT'",")","#==================================================================================================","# Weights","if","bImportmesh",":","vertices_total","=","len","(","Vertices",")","for","(","_",",","PointIndex",",","BoneIndex",")","in","Weights",":","if","PointIndex","<","vertices_total",":","# can it be not?","psk_bones","[","BoneIndex","]",".","have_weight_data","=","True","# else:","# print(psk_bones[BoneIndex].name, 'for other mesh',PointIndex ,vertices_total)","#print(\"weight:\", PointIndex, BoneIndex, Weight)","# Weights.append(None)","# print(Weights.count(None))","# Original vertex colorization code","'''\n # Weights.sort( key = lambda wgh: wgh[0])\n if bImportmesh:\n VtxCol = []\n bones_count = len(psk_bones)\n for x in range(bones_count):\n #change the overall darkness of each material in a range between 0.1 and 0.9\n tmpVal = ((float(x) + 1.0) \/ bones_count * 0.7) + 0.1\n tmpVal = int(tmpVal * 256)\n tmpCol = [tmpVal, tmpVal, tmpVal, 0]\n #Change the color of each material slightly\n if x % 3 == 0:\n if tmpCol[0] < 128:\n tmpCol[0] += 60\n else:\n tmpCol[0] -= 60\n if x % 3 == 1:\n if tmpCol[1] < 128:\n tmpCol[1] += 60\n else:\n tmpCol[1] -= 60\n if x % 3 == 2:\n if tmpCol[2] < 128:\n tmpCol[2] += 60\n else:\n tmpCol[2] -= 60\n #Add the material to the mesh\n VtxCol.append(tmpCol)\n \n for x in range(len(Tmsh.faces)):\n for y in range(len(Tmsh.faces[x].v)):\n #find v in Weights[n][0]\n findVal = Tmsh.faces[x].v[y].index\n n = 0\n while findVal != Weights[n][0]:\n n = n + 1\n TmpCol = VtxCol[Weights[n][1]]\n #check if a vertex has more than one influence\n if n != len(Weights) - 1:\n if Weights[n][0] == Weights[n + 1][0]:\n #if there is more than one influence, use the one with the greater influence\n #for simplicity only 2 influences are checked, 2nd and 3rd influences are usually very small\n if Weights[n][2] < Weights[n + 1][2]:\n TmpCol = VtxCol[Weights[n + 1][1]]\n Tmsh.faces[x].col.append(NMesh.Col(TmpCol[0], TmpCol[1], TmpCol[2], 0))\n '''","#===================================================================================================","# UV. Setup.","if","bImportmesh",":","# Trick! Create UV maps BEFORE mesh and get (0,0) coordinates for free!","# ...otherwise UV coords will be copied from active, or calculated from mesh...","if","bSpltiUVdata",":","for","i","in","range","(","len","(","uv_mat_ids",")",")",":","get_uv_layers","(","mesh_data",")",".","new","(","name","=","NAME_UV_PREFIX","+","str","(","i",")",")","else",":","get_uv_layers","(","mesh_data",")",".","new","(","name","=","NAME_UV_PREFIX","+","\"_SINGLE\"",")","for","counter",",","uv_data","in","enumerate","(","Extrauvs",")",":","if","len","(","mesh_data",".","uv_layers",")","<","MAX_UVS",":","get_uv_layers","(","mesh_data",")",".","new","(","name","=","\"EXTRAUVS\"","+","str","(","counter",")",")","else",":","Extrauvs",".","remove","(","uv_data",")","print","(","'Extra UV layer %s is ignored. Re-import without \"Split UV data\".'","%","counter",")","#================================================================================================== ","# Mesh. Build.","mesh_data",".","from_pydata","(","Vertices",",","[","]",",","Faces",")","#==================================================================================================","# Vertex Normal. Set.","if","Normals","is","not","None",":","mesh_data",".","polygons",".","foreach_set","(","\"use_smooth\"",",","[","True","]","*","len","(","mesh_data",".","polygons",")",")","mesh_data",".","normals_split_custom_set_from_vertices","(","Normals",")","mesh_data",".","use_auto_smooth","=","True","#===================================================================================================","# UV. Set.","if","bImportmesh",":","for","face","in","mesh_data",".","polygons",":","face",".","material_index","=","UV_by_face","[","face",".","index","]","[","1","]","uv_layers","=","mesh_data",".","uv_layers","if","not","bSpltiUVdata",":","uvLayer","=","uv_layers","[","0","]","# per face","# for faceIdx, (faceUVs, faceMatIdx, _, _, wmidx) in enumerate(UV_by_face):","for","faceIdx",",","(","faceUVs",",","faceMatIdx",",","WedgeMatIds",")","in","enumerate","(","UV_by_face",")",":","# per vertex","for","vertN",",","uv","in","enumerate","(","faceUVs",")",":","loopId","=","faceIdx","*","3","+","vertN","if","bSpltiUVdata",":","uvLayer","=","uv_layers","[","WedgeMatIds","[","vertN","]","]","uvLayer",".","data","[","loopId","]",".","uv","=","uv","#===================================================================================================","# Extra UVs. Set.","for","counter",",","uv_data","in","enumerate","(","Extrauvs",")",":","uvLayer","=","mesh_data",".","uv_layers","[","counter","-","len","(","Extrauvs",")","]","for","uv_index",",","uv_coords","in","enumerate","(","uv_data",")",":","uvLayer",".","data","[","uv_index","]",".","uv","=","(","uv_coords","[","0","]",",","1.0","-","uv_coords","[","1","]",")","#===================================================================================================","# Mesh. Vertex Groups. Bone Weights.","for","psk_bone","in","psk_bones",":","if","psk_bone",".","have_weight_data",":","psk_bone",".","vertex_group","=","mesh_obj",".","vertex_groups",".","new","(","psk_bone",".","name",")","# else:","# print(psk_bone.name, 'have no influence on this mesh')","for","weight",",","vertex_id",",","bone_index_w","in","filter","(","None",",","Weights",")",":","psk_bones","[","bone_index_w","]",".","vertex_group",".","add","(","(","vertex_id",",",")",",","weight",",","'ADD'",")","#===================================================================================================","# Skeleton. Colorize.","if","bImportbone",":","bone_group_unused","=","armature_obj",".","pose",".","bone_groups",".","new","(","\"Unused bones\"",")","bone_group_unused",".","color_set","=","'THEME14'","bone_group_nochild","=","armature_obj",".","pose",".","bone_groups",".","new","(","\"No children\"",")","bone_group_nochild",".","color_set","=","'THEME03'","armature_data",".","show_group_colors","=","True","for","psk_bone","in","psk_bones",":","pose_bone","=","armature_obj",".","pose",".","bones","[","psk_bone",".","name","]","if","psk_bone",".","have_weight_data",":","if","len","(","psk_bone",".","children",")","==","0",":","pose_bone",".","bone_group","=","bone_group_nochild","else",":","pose_bone",".","bone_group","=","bone_group_unused","#===================================================================================================","# Final","if","bImportmesh",":","util_obj_link","(","context",",","mesh_obj",")","util_select_all","(","False",")","if","not","bImportbone",":","util_obj_select","(","context",",","mesh_obj",")","util_obj_set_active","(","context",",","mesh_obj",")","else",":","# select_all(False)","util_obj_select","(","context",",","armature_obj",")","# parenting mesh to armature object","mesh_obj",".","parent","=","armature_obj","mesh_obj",".","parent_type","=","'OBJECT'","# add armature modifier","blender_modifier","=","mesh_obj",".","modifiers",".","new","(","armature_obj",".","data",".","name",",","type","=","'ARMATURE'",")","blender_modifier",".","show_expanded","=","False","blender_modifier",".","use_vertex_groups","=","True","blender_modifier",".","use_bone_envelopes","=","False","blender_modifier",".","object","=","armature_obj","# utils_set_mode('OBJECT')","# select_all(False)","util_obj_select","(","context",",","armature_obj",")","util_obj_set_active","(","context",",","armature_obj",")","# print(\"Done: %f sec.\" % (time.process_time() - ref_time))","utils_set_mode","(","'OBJECT'",")","return","True"],"url":"https:\/\/github.com\/Befzz\/blender3d_import_psk_psa\/blob\/47f1418aef7642f300e0fccbe3c96654ab275a52\/addons\/io_import_scene_unreal_psa_psk_270.py#L273-L1036"} |
|
{"nwo":"Befzz\/blender3d_import_psk_psa","sha":"47f1418aef7642f300e0fccbe3c96654ab275a52","path":"addons\/io_import_scene_unreal_psa_psk_270.py","language":"python","identifier":"psaimport","parameters":"(filepath,\n context = bpy.context,\n oArmature = None,\n bFilenameAsPrefix = False,\n bActionsToTrack = False,\n first_frames = 0,\n bDontInvertRoot = False,\n bUpdateTimelineRange = False,\n fcurve_interpolation = 'LINEAR',\n error_callback = __pass\n )","argument_list":"","return_statement":"","docstring":"Import animation data from 'filepath' using 'oArmature'\n \n Args:\n first_frames: (0 - import all)\n Import only 'first_frames' from each action\n \n bActionsToTrack:\n Put all imported actions in one NLAtrack.\n \n oArmature:\n Skeleton used to calculate keyframes","docstring_summary":"Import animation data from 'filepath' using 'oArmature'\n \n Args:\n first_frames: (0 - import all)\n Import only 'first_frames' from each action\n \n bActionsToTrack:\n Put all imported actions in one NLAtrack.\n \n oArmature:\n Skeleton used to calculate keyframes","docstring_tokens":["Import","animation","data","from","filepath","using","oArmature","Args",":","first_frames",":","(","0","-","import","all",")","Import","only","first_frames","from","each","action","bActionsToTrack",":","Put","all","imported","actions","in","one","NLAtrack",".","oArmature",":","Skeleton","used","to","calculate","keyframes"],"function":"def psaimport(filepath,\n context = bpy.context,\n oArmature = None,\n bFilenameAsPrefix = False,\n bActionsToTrack = False,\n first_frames = 0,\n bDontInvertRoot = False,\n bUpdateTimelineRange = False,\n fcurve_interpolation = 'LINEAR',\n error_callback = __pass\n ):\n \"\"\"Import animation data from 'filepath' using 'oArmature'\n \n Args:\n first_frames: (0 - import all)\n Import only 'first_frames' from each action\n \n bActionsToTrack:\n Put all imported actions in one NLAtrack.\n \n oArmature:\n Skeleton used to calculate keyframes\n \"\"\"\n print (\"-----------------------------------------------\")\n print (\"---------EXECUTING PSA PYTHON IMPORTER---------\")\n print (\"-----------------------------------------------\")\n \n file_ext = 'psa'\n try:\n psafile = open(filepath, 'rb')\n except IOError:\n error_callback('Error while opening file for reading:\\n \"'+filepath+'\"')\n return False\n \n print (\"Importing file: \", filepath)\n \n armature_obj = oArmature\n \n if armature_obj is None: \n armature_obj = blen_get_armature_from_selection()\n if armature_obj is None:\n error_callback(\"No armature selected.\")\n return False\n\n\n chunk_id = None\n chunk_type = None\n chunk_datasize = None\n chunk_datacount = None\n chunk_data = None\n\n def read_chunk():\n nonlocal chunk_id, chunk_type,\\\n chunk_datasize, chunk_datacount,\\\n chunk_data\n\n (chunk_id, chunk_type,\n chunk_datasize, chunk_datacount) = unpack('20s3i', psafile.read(32))\n \n chunk_data = psafile.read(chunk_datacount * chunk_datasize)\n #============================================================================================== \n # General Header\n #============================================================================================== \n read_chunk()\n \n if not util_is_header_valid(filepath, file_ext, chunk_id, error_callback):\n return False\n \n #============================================================================================== \n # Bones (FNamedBoneBinary)\n #============================================================================================== \n read_chunk()\n \n psa_bones = {}\n \n def new_psa_bone(bone, pose_bone):\n psa_bone = class_psa_bone()\n \n psa_bones[pose_bone.name] = psa_bone\n \n psa_bone.name = pose_bone.name\n \n psa_bone.pose_bone = pose_bone\n \n if bone.parent != None:\n # does needed parent bone was added from psa file\n if bone.parent.name in psa_bones:\n psa_bone.parent = psa_bones[bone.parent.name]\n # no. armature doesnt match\n else:\n psa_bone.parent = None\n # else:\n # psa_bone.parent = None\n \n psa_bone.orig_quat = Quaternion(bone['orig_quat'])\n psa_bone.orig_loc = Vector(bone['orig_loc'])\n psa_bone.post_quat = Quaternion(bone['post_quat'])\n return psa_bone\n \n #Bones Data\n BoneIndex2Name = [None] * chunk_datacount\n BoneNotFoundList = []\n BonesWithoutAnimation = []\n PsaBonesToProcess = [None] * chunk_datacount\n\n # printlog(\"Name\\tFlgs\\tNumChld\\tPrntIdx\\tQx\\tQy\\tQz\\tQw\\tLocX\\tLocY\\tLocZ\\tLength\\tXSize\\tYSize\\tZSize\\n\")\n\n \n # for case insensetive comparison\n # key = lowered name\n # value = orignal name\n skeleton_bones_lowered = {}\n \n for blender_bone_name in armature_obj.data.bones.keys():\n skeleton_bones_lowered[blender_bone_name.lower()] = blender_bone_name\n\n \n for counter in range(chunk_datacount):\n \n # tPrntIdx is -1 for parent; and 0 for other; no more useful data\n # indata = unpack_from('64s3i11f', chunk_data, chunk_datasize * counter)\n (indata) = unpack_from('64s56x', chunk_data, chunk_datasize * counter)\n in_name = util_bytes_to_str(indata[0])\n # bonename = util_bytes_to_str(indata[0]).upper()\n \n in_name_lowered = in_name.lower()\n if in_name_lowered in skeleton_bones_lowered:\n orig_name = skeleton_bones_lowered[in_name_lowered]\n \n # use a skeleton bone name \n BoneIndex2Name[counter] = orig_name\n PsaBonesToProcess[counter] = new_psa_bone(armature_obj.data.bones[orig_name], \n armature_obj.pose.bones[orig_name])\n else:\n # print(\"Can't find the bone:\", bonename)\n BoneNotFoundList.append(counter)\n \n \n if len(psa_bones) == 0:\n error_callback('No bone was match!\\nSkip import!')\n return False\n \n # does anyone care?\n for blender_bone_name in armature_obj.data.bones.keys():\n if BoneIndex2Name.count(blender_bone_name) == 0:\n BonesWithoutAnimation.append(blender_bone_name)\n \n if len(BoneNotFoundList) > 0:\n print('Not found bones: %i.' % len(BoneNotFoundList));\n \n if len(BonesWithoutAnimation) > 0:\n print('Bones(%i) without animation data:\\n' % len(BonesWithoutAnimation), ', '.join(BonesWithoutAnimation))\n #============================================================================================== \n # Animations (AniminfoBinary)\n #============================================================================================== \n read_chunk()\n\n Raw_Key_Nums = 0\n Action_List = [None] * chunk_datacount\n \n for counter in range(chunk_datacount):\n (action_name_raw, #0\n group_name_raw, #1\n Totalbones, #2\n RootInclude, #3\n KeyCompressionStyle, #4\n KeyQuotum, #5\n KeyReduction, #6\n TrackTime, #7\n AnimRate, #8\n StartBone, #9\n FirstRawFrame, #10\n NumRawFrames #11\n ) = unpack_from('64s64s4i3f3i', chunk_data, chunk_datasize * counter)\n \n action_name = util_bytes_to_str( action_name_raw )\n group_name = util_bytes_to_str( group_name_raw )\n\n Raw_Key_Nums += Totalbones * NumRawFrames\n Action_List[counter] = ( action_name, group_name, Totalbones, NumRawFrames)\n \n #============================================================================================== \n # Raw keys (VQuatAnimKey) 3f vec, 4f quat, 1f time\n #============================================================================================== \n read_chunk()\n \n if(Raw_Key_Nums != chunk_datacount):\n error_callback(\n 'Raw_Key_Nums Inconsistent.'\n '\\nData count found: '+chunk_datacount+\n '\\nRaw_Key_Nums:' + Raw_Key_Nums\n )\n return False\n\n Raw_Key_List = [None] * chunk_datacount\n \n unpack_data = Struct('3f4f4x').unpack_from\n \n for counter in range(chunk_datacount):\n pos = Vector()\n quat = Quaternion()\n \n ( pos.x, pos.y, pos.z,\n quat.x, quat.y, quat.z, quat.w\n ) = unpack_data( chunk_data, chunk_datasize * counter)\n \n Raw_Key_List[counter] = (pos, quat)\n \n psafile.close()\n \n utils_set_mode('OBJECT')\n\n # index of current frame in raw input data\n raw_key_index = 0\n \n util_obj_set_active(context, armature_obj)\n \n gen_name_part = util_gen_name_part(filepath)\n \n armature_obj.animation_data_create()\n \n if bActionsToTrack:\n nla_track = armature_obj.animation_data.nla_tracks.new()\n nla_track.name = gen_name_part\n nla_stripes = nla_track.strips\n nla_track_last_frame = 0\n else:\n is_first_action = True\n first_action = None\n \n for counter, (Name, Group, Totalbones, NumRawFrames) in enumerate(Action_List):\n ref_time = time.process_time()\n \n if Group != 'None':\n Name = \"(%s) %s\" % (Group,Name)\n if bFilenameAsPrefix:\n Name = \"(%s) %s\" % (gen_name_part, Name)\n \n action = bpy.data.actions.new(name = Name)\n \n # force print usefull information to console(due to possible long execution)\n print(\"Action {0:>3d}\/{1:<3d} frames: {2:>4d} {3}\".format(\n counter+1, len(Action_List), NumRawFrames, Name)\n )\n \n if first_frames > 0:\n maxframes = first_frames\n keyframes = min(first_frames, NumRawFrames)\n #dev\n # keyframes += 1\n else:\n maxframes = 99999999\n keyframes = NumRawFrames\n \n # create all fcurves(for all bones) for an action\n # for pose_bone in armature_obj.pose.bones:\n for psa_bone in PsaBonesToProcess:\n if psa_bone is None:\n continue\n pose_bone = psa_bone.pose_bone\n \n data_path = pose_bone.path_from_id(\"rotation_quaternion\")\n psa_bone.fcurve_quat_w = action.fcurves.new(data_path, index = 0)\n psa_bone.fcurve_quat_x = action.fcurves.new(data_path, index = 1)\n psa_bone.fcurve_quat_y = action.fcurves.new(data_path, index = 2)\n psa_bone.fcurve_quat_z = action.fcurves.new(data_path, index = 3)\n \n data_path = pose_bone.path_from_id(\"location\")\n psa_bone.fcurve_loc_x = action.fcurves.new(data_path, index = 0)\n psa_bone.fcurve_loc_y = action.fcurves.new(data_path, index = 1)\n psa_bone.fcurve_loc_z = action.fcurves.new(data_path, index = 2)\n \n # 1. Pre-add keyframes! \\0\/\n # 2. Set data: keyframe_points[].co[0..1]\n # 3. If 2 is not done, do 4: (important!!!)\n # 4. \"Apply\" data: fcurve.update()\n # added keyframes points by default is breaking fcurve somehow\n # bcs they are all at the same position?\n psa_bone.fcurve_quat_w.keyframe_points.add(keyframes)\n psa_bone.fcurve_quat_x.keyframe_points.add(keyframes)\n psa_bone.fcurve_quat_y.keyframe_points.add(keyframes)\n psa_bone.fcurve_quat_z.keyframe_points.add(keyframes)\n\n psa_bone.fcurve_loc_x.keyframe_points.add(keyframes) \n psa_bone.fcurve_loc_y.keyframe_points.add(keyframes) \n psa_bone.fcurve_loc_z.keyframe_points.add(keyframes) \n \n for i in range(0,min(maxframes, NumRawFrames)):\n # raw_key_index+= Totalbones * 5 #55\n for j in range(Totalbones):\n if j in BoneNotFoundList:\n raw_key_index += 1\n continue\n \n psa_bone = PsaBonesToProcess[j]\n pose_bone = psa_bone.pose_bone\n \n p_pos = Raw_Key_List[raw_key_index][0]\n p_quat = Raw_Key_List[raw_key_index][1]\n \n ##### Worked with no bone rotation\n # quat = p_quat.conjugated() * psa_bone.orig_quat\n # loc = p_pos - psa_bone.orig_loc\n #####\n \n\n if psa_bone.parent:\n ##### Correct\n # orig_prot = pose_bone.bone.parent.matrix_local.to_3x3().to_quaternion()\n # orig_rot = pose_bone.bone.matrix_local.to_3x3().to_quaternion()\n # orig_rot = (orig_prot.conjugated() * orig_rot)\n ######\n\n #### FINAL\n quat = (p_quat * psa_bone.post_quat).conjugated() * (psa_bone.orig_quat * psa_bone.post_quat)\n # loc = psa_bone.post_quat.conjugated() * p_pos - psa_bone.post_quat.conjugated() * psa_bone.orig_loc\n ####\n else:\n if bDontInvertRoot:\n quat = (p_quat.conjugated() * psa_bone.post_quat).conjugated() * (psa_bone.orig_quat * psa_bone.post_quat)\n else:\n quat = (p_quat * psa_bone.post_quat).conjugated() * (psa_bone.orig_quat * psa_bone.post_quat)\n \n loc = psa_bone.post_quat.conjugated() * p_pos - psa_bone.post_quat.conjugated() * psa_bone.orig_loc\n \n pose_bone.rotation_quaternion = quat\n pose_bone.location = loc\n # pose_bone.rotation_quaternion = orig_rot.conjugated()\n # pose_bone.location = p_pos - (pose_bone.bone.matrix_local.translation - pose_bone.bone.parent.matrix_local.translation)\n \n ##### Works + post_quat (without location works)\n # quat = (p_quat * psa_bone.post_quat).conjugated() * (psa_bone.orig_quat * psa_bone.post_quat)\n # loc = psa_bone.post_quat.conjugated() * (p_pos - psa_bone.orig_loc)\n\n \n psa_bone.fcurve_quat_w.keyframe_points[i].co = i, quat.w\n psa_bone.fcurve_quat_x.keyframe_points[i].co = i, quat.x\n psa_bone.fcurve_quat_y.keyframe_points[i].co = i, quat.y\n psa_bone.fcurve_quat_z.keyframe_points[i].co = i, quat.z\n \n psa_bone.fcurve_quat_w.keyframe_points[i].interpolation = fcurve_interpolation\n psa_bone.fcurve_quat_x.keyframe_points[i].interpolation = fcurve_interpolation\n psa_bone.fcurve_quat_y.keyframe_points[i].interpolation = fcurve_interpolation\n psa_bone.fcurve_quat_z.keyframe_points[i].interpolation = fcurve_interpolation\n \n psa_bone.fcurve_loc_x.keyframe_points[i].co = i, loc.x\n psa_bone.fcurve_loc_y.keyframe_points[i].co = i, loc.y\n psa_bone.fcurve_loc_z.keyframe_points[i].co = i, loc.z\n \n psa_bone.fcurve_loc_x.keyframe_points[i].interpolation = fcurve_interpolation\n psa_bone.fcurve_loc_y.keyframe_points[i].interpolation = fcurve_interpolation\n psa_bone.fcurve_loc_z.keyframe_points[i].interpolation = fcurve_interpolation\n \n # Old path. Slower.\n # psa_bone.fcurve_quat_w.keyframe_points.insert(i,quat.w,{'NEEDED','FAST'}).interpolation = fcurve_interpolation\n # psa_bone.fcurve_quat_x.keyframe_points.insert(i,quat.x,{'NEEDED','FAST'}).interpolation = fcurve_interpolation\n # psa_bone.fcurve_quat_y.keyframe_points.insert(i,quat.y,{'NEEDED','FAST'}).interpolation = fcurve_interpolation\n # psa_bone.fcurve_quat_z.keyframe_points.insert(i,quat.z,{'NEEDED','FAST'}).interpolation = fcurve_interpolation\n\n # psa_bone.fcurve_loc_x.keyframe_points.insert(i,loc.x,{'NEEDED','FAST'}).interpolation = fcurve_interpolation\n # psa_bone.fcurve_loc_y.keyframe_points.insert(i,loc.y,{'NEEDED','FAST'}).interpolation = fcurve_interpolation\n # psa_bone.fcurve_loc_z.keyframe_points.insert(i,loc.z,{'NEEDED','FAST'}).interpolation = fcurve_interpolation\n raw_key_index += 1\n \n # on first frame\n # break\n raw_key_index += (NumRawFrames-min(maxframes,NumRawFrames)) * Totalbones\n\n # Add action to tail of the nla track\n if bActionsToTrack:\n if nla_track_last_frame == 0:\n nla_stripes.new(Name, 0, action)\n else:\n nla_stripes.new(Name, nla_stripes[-1].frame_end, action)\n\n nla_track_last_frame += NumRawFrames\n elif is_first_action:\n first_action = action\n is_first_action = False\n \n print(\"Done: %f sec.\" % (time.process_time() - ref_time))\n # break on first animation set\n # break\n \n scene = util_get_scene(context)\n \n if not bActionsToTrack:\n if not scene.is_nla_tweakmode:\n armature_obj.animation_data.action = first_action\n \n if bUpdateTimelineRange:\n\n scene.frame_start = 0\n\n if bActionsToTrack:\n scene.frame_end = sum(frames for _, _, _, frames in Action_List) - 1\n else:\n scene.frame_end = max(frames for _, _, _, frames in Action_List) - 1\n\n\n util_select_all(False)\n util_obj_select(context, armature_obj)\n util_obj_set_active(context, armature_obj)\n \n # 2.8 crashes\n if not is_blen_280:\n scene.frame_set(0)","function_tokens":["def","psaimport","(","filepath",",","context","=","bpy",".","context",",","oArmature","=","None",",","bFilenameAsPrefix","=","False",",","bActionsToTrack","=","False",",","first_frames","=","0",",","bDontInvertRoot","=","False",",","bUpdateTimelineRange","=","False",",","fcurve_interpolation","=","'LINEAR'",",","error_callback","=","__pass",")",":","print","(","\"-----------------------------------------------\"",")","print","(","\"---------EXECUTING PSA PYTHON IMPORTER---------\"",")","print","(","\"-----------------------------------------------\"",")","file_ext","=","'psa'","try",":","psafile","=","open","(","filepath",",","'rb'",")","except","IOError",":","error_callback","(","'Error while opening file for reading:\\n \"'","+","filepath","+","'\"'",")","return","False","print","(","\"Importing file: \"",",","filepath",")","armature_obj","=","oArmature","if","armature_obj","is","None",":","armature_obj","=","blen_get_armature_from_selection","(",")","if","armature_obj","is","None",":","error_callback","(","\"No armature selected.\"",")","return","False","chunk_id","=","None","chunk_type","=","None","chunk_datasize","=","None","chunk_datacount","=","None","chunk_data","=","None","def","read_chunk","(",")",":","nonlocal","chunk_id",",","chunk_type",",","chunk_datasize",",","chunk_datacount",",","chunk_data","(","chunk_id",",","chunk_type",",","chunk_datasize",",","chunk_datacount",")","=","unpack","(","'20s3i'",",","psafile",".","read","(","32",")",")","chunk_data","=","psafile",".","read","(","chunk_datacount","*","chunk_datasize",")","#============================================================================================== ","# General Header","#============================================================================================== ","read_chunk","(",")","if","not","util_is_header_valid","(","filepath",",","file_ext",",","chunk_id",",","error_callback",")",":","return","False","#============================================================================================== ","# Bones (FNamedBoneBinary)","#============================================================================================== ","read_chunk","(",")","psa_bones","=","{","}","def","new_psa_bone","(","bone",",","pose_bone",")",":","psa_bone","=","class_psa_bone","(",")","psa_bones","[","pose_bone",".","name","]","=","psa_bone","psa_bone",".","name","=","pose_bone",".","name","psa_bone",".","pose_bone","=","pose_bone","if","bone",".","parent","!=","None",":","# does needed parent bone was added from psa file","if","bone",".","parent",".","name","in","psa_bones",":","psa_bone",".","parent","=","psa_bones","[","bone",".","parent",".","name","]","# no. armature doesnt match","else",":","psa_bone",".","parent","=","None","# else:","# psa_bone.parent = None","psa_bone",".","orig_quat","=","Quaternion","(","bone","[","'orig_quat'","]",")","psa_bone",".","orig_loc","=","Vector","(","bone","[","'orig_loc'","]",")","psa_bone",".","post_quat","=","Quaternion","(","bone","[","'post_quat'","]",")","return","psa_bone","#Bones Data","BoneIndex2Name","=","[","None","]","*","chunk_datacount","BoneNotFoundList","=","[","]","BonesWithoutAnimation","=","[","]","PsaBonesToProcess","=","[","None","]","*","chunk_datacount","# printlog(\"Name\\tFlgs\\tNumChld\\tPrntIdx\\tQx\\tQy\\tQz\\tQw\\tLocX\\tLocY\\tLocZ\\tLength\\tXSize\\tYSize\\tZSize\\n\")","# for case insensetive comparison","# key = lowered name","# value = orignal name","skeleton_bones_lowered","=","{","}","for","blender_bone_name","in","armature_obj",".","data",".","bones",".","keys","(",")",":","skeleton_bones_lowered","[","blender_bone_name",".","lower","(",")","]","=","blender_bone_name","for","counter","in","range","(","chunk_datacount",")",":","# tPrntIdx is -1 for parent; and 0 for other; no more useful data","# indata = unpack_from('64s3i11f', chunk_data, chunk_datasize * counter)","(","indata",")","=","unpack_from","(","'64s56x'",",","chunk_data",",","chunk_datasize","*","counter",")","in_name","=","util_bytes_to_str","(","indata","[","0","]",")","# bonename = util_bytes_to_str(indata[0]).upper()","in_name_lowered","=","in_name",".","lower","(",")","if","in_name_lowered","in","skeleton_bones_lowered",":","orig_name","=","skeleton_bones_lowered","[","in_name_lowered","]","# use a skeleton bone name ","BoneIndex2Name","[","counter","]","=","orig_name","PsaBonesToProcess","[","counter","]","=","new_psa_bone","(","armature_obj",".","data",".","bones","[","orig_name","]",",","armature_obj",".","pose",".","bones","[","orig_name","]",")","else",":","# print(\"Can't find the bone:\", bonename)","BoneNotFoundList",".","append","(","counter",")","if","len","(","psa_bones",")","==","0",":","error_callback","(","'No bone was match!\\nSkip import!'",")","return","False","# does anyone care?","for","blender_bone_name","in","armature_obj",".","data",".","bones",".","keys","(",")",":","if","BoneIndex2Name",".","count","(","blender_bone_name",")","==","0",":","BonesWithoutAnimation",".","append","(","blender_bone_name",")","if","len","(","BoneNotFoundList",")",">","0",":","print","(","'Not found bones: %i.'","%","len","(","BoneNotFoundList",")",")","if","len","(","BonesWithoutAnimation",")",">","0",":","print","(","'Bones(%i) without animation data:\\n'","%","len","(","BonesWithoutAnimation",")",",","', '",".","join","(","BonesWithoutAnimation",")",")","#============================================================================================== ","# Animations (AniminfoBinary)","#============================================================================================== ","read_chunk","(",")","Raw_Key_Nums","=","0","Action_List","=","[","None","]","*","chunk_datacount","for","counter","in","range","(","chunk_datacount",")",":","(","action_name_raw",",","#0","group_name_raw",",","#1","Totalbones",",","#2","RootInclude",",","#3","KeyCompressionStyle",",","#4","KeyQuotum",",","#5","KeyReduction",",","#6","TrackTime",",","#7","AnimRate",",","#8","StartBone",",","#9","FirstRawFrame",",","#10","NumRawFrames","#11",")","=","unpack_from","(","'64s64s4i3f3i'",",","chunk_data",",","chunk_datasize","*","counter",")","action_name","=","util_bytes_to_str","(","action_name_raw",")","group_name","=","util_bytes_to_str","(","group_name_raw",")","Raw_Key_Nums","+=","Totalbones","*","NumRawFrames","Action_List","[","counter","]","=","(","action_name",",","group_name",",","Totalbones",",","NumRawFrames",")","#============================================================================================== ","# Raw keys (VQuatAnimKey) 3f vec, 4f quat, 1f time","#============================================================================================== ","read_chunk","(",")","if","(","Raw_Key_Nums","!=","chunk_datacount",")",":","error_callback","(","'Raw_Key_Nums Inconsistent.'","'\\nData count found: '","+","chunk_datacount","+","'\\nRaw_Key_Nums:'","+","Raw_Key_Nums",")","return","False","Raw_Key_List","=","[","None","]","*","chunk_datacount","unpack_data","=","Struct","(","'3f4f4x'",")",".","unpack_from","for","counter","in","range","(","chunk_datacount",")",":","pos","=","Vector","(",")","quat","=","Quaternion","(",")","(","pos",".","x",",","pos",".","y",",","pos",".","z",",","quat",".","x",",","quat",".","y",",","quat",".","z",",","quat",".","w",")","=","unpack_data","(","chunk_data",",","chunk_datasize","*","counter",")","Raw_Key_List","[","counter","]","=","(","pos",",","quat",")","psafile",".","close","(",")","utils_set_mode","(","'OBJECT'",")","# index of current frame in raw input data","raw_key_index","=","0","util_obj_set_active","(","context",",","armature_obj",")","gen_name_part","=","util_gen_name_part","(","filepath",")","armature_obj",".","animation_data_create","(",")","if","bActionsToTrack",":","nla_track","=","armature_obj",".","animation_data",".","nla_tracks",".","new","(",")","nla_track",".","name","=","gen_name_part","nla_stripes","=","nla_track",".","strips","nla_track_last_frame","=","0","else",":","is_first_action","=","True","first_action","=","None","for","counter",",","(","Name",",","Group",",","Totalbones",",","NumRawFrames",")","in","enumerate","(","Action_List",")",":","ref_time","=","time",".","process_time","(",")","if","Group","!=","'None'",":","Name","=","\"(%s) %s\"","%","(","Group",",","Name",")","if","bFilenameAsPrefix",":","Name","=","\"(%s) %s\"","%","(","gen_name_part",",","Name",")","action","=","bpy",".","data",".","actions",".","new","(","name","=","Name",")","# force print usefull information to console(due to possible long execution)","print","(","\"Action {0:>3d}\/{1:<3d} frames: {2:>4d} {3}\"",".","format","(","counter","+","1",",","len","(","Action_List",")",",","NumRawFrames",",","Name",")",")","if","first_frames",">","0",":","maxframes","=","first_frames","keyframes","=","min","(","first_frames",",","NumRawFrames",")","#dev","# keyframes += 1","else",":","maxframes","=","99999999","keyframes","=","NumRawFrames","# create all fcurves(for all bones) for an action","# for pose_bone in armature_obj.pose.bones:","for","psa_bone","in","PsaBonesToProcess",":","if","psa_bone","is","None",":","continue","pose_bone","=","psa_bone",".","pose_bone","data_path","=","pose_bone",".","path_from_id","(","\"rotation_quaternion\"",")","psa_bone",".","fcurve_quat_w","=","action",".","fcurves",".","new","(","data_path",",","index","=","0",")","psa_bone",".","fcurve_quat_x","=","action",".","fcurves",".","new","(","data_path",",","index","=","1",")","psa_bone",".","fcurve_quat_y","=","action",".","fcurves",".","new","(","data_path",",","index","=","2",")","psa_bone",".","fcurve_quat_z","=","action",".","fcurves",".","new","(","data_path",",","index","=","3",")","data_path","=","pose_bone",".","path_from_id","(","\"location\"",")","psa_bone",".","fcurve_loc_x","=","action",".","fcurves",".","new","(","data_path",",","index","=","0",")","psa_bone",".","fcurve_loc_y","=","action",".","fcurves",".","new","(","data_path",",","index","=","1",")","psa_bone",".","fcurve_loc_z","=","action",".","fcurves",".","new","(","data_path",",","index","=","2",")","# 1. Pre-add keyframes! \\0\/","# 2. Set data: keyframe_points[].co[0..1]","# 3. If 2 is not done, do 4: (important!!!)","# 4. \"Apply\" data: fcurve.update()","# added keyframes points by default is breaking fcurve somehow","# bcs they are all at the same position?","psa_bone",".","fcurve_quat_w",".","keyframe_points",".","add","(","keyframes",")","psa_bone",".","fcurve_quat_x",".","keyframe_points",".","add","(","keyframes",")","psa_bone",".","fcurve_quat_y",".","keyframe_points",".","add","(","keyframes",")","psa_bone",".","fcurve_quat_z",".","keyframe_points",".","add","(","keyframes",")","psa_bone",".","fcurve_loc_x",".","keyframe_points",".","add","(","keyframes",")","psa_bone",".","fcurve_loc_y",".","keyframe_points",".","add","(","keyframes",")","psa_bone",".","fcurve_loc_z",".","keyframe_points",".","add","(","keyframes",")","for","i","in","range","(","0",",","min","(","maxframes",",","NumRawFrames",")",")",":","# raw_key_index+= Totalbones * 5 #55","for","j","in","range","(","Totalbones",")",":","if","j","in","BoneNotFoundList",":","raw_key_index","+=","1","continue","psa_bone","=","PsaBonesToProcess","[","j","]","pose_bone","=","psa_bone",".","pose_bone","p_pos","=","Raw_Key_List","[","raw_key_index","]","[","0","]","p_quat","=","Raw_Key_List","[","raw_key_index","]","[","1","]","##### Worked with no bone rotation","# quat = p_quat.conjugated() * psa_bone.orig_quat","# loc = p_pos - psa_bone.orig_loc","#####","if","psa_bone",".","parent",":","##### Correct","# orig_prot = pose_bone.bone.parent.matrix_local.to_3x3().to_quaternion()","# orig_rot = pose_bone.bone.matrix_local.to_3x3().to_quaternion()","# orig_rot = (orig_prot.conjugated() * orig_rot)","######","#### FINAL","quat","=","(","p_quat","*","psa_bone",".","post_quat",")",".","conjugated","(",")","*","(","psa_bone",".","orig_quat","*","psa_bone",".","post_quat",")","# loc = psa_bone.post_quat.conjugated() * p_pos - psa_bone.post_quat.conjugated() * psa_bone.orig_loc","####","else",":","if","bDontInvertRoot",":","quat","=","(","p_quat",".","conjugated","(",")","*","psa_bone",".","post_quat",")",".","conjugated","(",")","*","(","psa_bone",".","orig_quat","*","psa_bone",".","post_quat",")","else",":","quat","=","(","p_quat","*","psa_bone",".","post_quat",")",".","conjugated","(",")","*","(","psa_bone",".","orig_quat","*","psa_bone",".","post_quat",")","loc","=","psa_bone",".","post_quat",".","conjugated","(",")","*","p_pos","-","psa_bone",".","post_quat",".","conjugated","(",")","*","psa_bone",".","orig_loc","pose_bone",".","rotation_quaternion","=","quat","pose_bone",".","location","=","loc","# pose_bone.rotation_quaternion = orig_rot.conjugated()","# pose_bone.location = p_pos - (pose_bone.bone.matrix_local.translation - pose_bone.bone.parent.matrix_local.translation)","##### Works + post_quat (without location works)","# quat = (p_quat * psa_bone.post_quat).conjugated() * (psa_bone.orig_quat * psa_bone.post_quat)","# loc = psa_bone.post_quat.conjugated() * (p_pos - psa_bone.orig_loc)","psa_bone",".","fcurve_quat_w",".","keyframe_points","[","i","]",".","co","=","i",",","quat",".","w","psa_bone",".","fcurve_quat_x",".","keyframe_points","[","i","]",".","co","=","i",",","quat",".","x","psa_bone",".","fcurve_quat_y",".","keyframe_points","[","i","]",".","co","=","i",",","quat",".","y","psa_bone",".","fcurve_quat_z",".","keyframe_points","[","i","]",".","co","=","i",",","quat",".","z","psa_bone",".","fcurve_quat_w",".","keyframe_points","[","i","]",".","interpolation","=","fcurve_interpolation","psa_bone",".","fcurve_quat_x",".","keyframe_points","[","i","]",".","interpolation","=","fcurve_interpolation","psa_bone",".","fcurve_quat_y",".","keyframe_points","[","i","]",".","interpolation","=","fcurve_interpolation","psa_bone",".","fcurve_quat_z",".","keyframe_points","[","i","]",".","interpolation","=","fcurve_interpolation","psa_bone",".","fcurve_loc_x",".","keyframe_points","[","i","]",".","co","=","i",",","loc",".","x","psa_bone",".","fcurve_loc_y",".","keyframe_points","[","i","]",".","co","=","i",",","loc",".","y","psa_bone",".","fcurve_loc_z",".","keyframe_points","[","i","]",".","co","=","i",",","loc",".","z","psa_bone",".","fcurve_loc_x",".","keyframe_points","[","i","]",".","interpolation","=","fcurve_interpolation","psa_bone",".","fcurve_loc_y",".","keyframe_points","[","i","]",".","interpolation","=","fcurve_interpolation","psa_bone",".","fcurve_loc_z",".","keyframe_points","[","i","]",".","interpolation","=","fcurve_interpolation","# Old path. Slower.","# psa_bone.fcurve_quat_w.keyframe_points.insert(i,quat.w,{'NEEDED','FAST'}).interpolation = fcurve_interpolation","# psa_bone.fcurve_quat_x.keyframe_points.insert(i,quat.x,{'NEEDED','FAST'}).interpolation = fcurve_interpolation","# psa_bone.fcurve_quat_y.keyframe_points.insert(i,quat.y,{'NEEDED','FAST'}).interpolation = fcurve_interpolation","# psa_bone.fcurve_quat_z.keyframe_points.insert(i,quat.z,{'NEEDED','FAST'}).interpolation = fcurve_interpolation","# psa_bone.fcurve_loc_x.keyframe_points.insert(i,loc.x,{'NEEDED','FAST'}).interpolation = fcurve_interpolation","# psa_bone.fcurve_loc_y.keyframe_points.insert(i,loc.y,{'NEEDED','FAST'}).interpolation = fcurve_interpolation","# psa_bone.fcurve_loc_z.keyframe_points.insert(i,loc.z,{'NEEDED','FAST'}).interpolation = fcurve_interpolation","raw_key_index","+=","1","# on first frame","# break","raw_key_index","+=","(","NumRawFrames","-","min","(","maxframes",",","NumRawFrames",")",")","*","Totalbones","# Add action to tail of the nla track","if","bActionsToTrack",":","if","nla_track_last_frame","==","0",":","nla_stripes",".","new","(","Name",",","0",",","action",")","else",":","nla_stripes",".","new","(","Name",",","nla_stripes","[","-","1","]",".","frame_end",",","action",")","nla_track_last_frame","+=","NumRawFrames","elif","is_first_action",":","first_action","=","action","is_first_action","=","False","print","(","\"Done: %f sec.\"","%","(","time",".","process_time","(",")","-","ref_time",")",")","# break on first animation set","# break","scene","=","util_get_scene","(","context",")","if","not","bActionsToTrack",":","if","not","scene",".","is_nla_tweakmode",":","armature_obj",".","animation_data",".","action","=","first_action","if","bUpdateTimelineRange",":","scene",".","frame_start","=","0","if","bActionsToTrack",":","scene",".","frame_end","=","sum","(","frames","for","_",",","_",",","_",",","frames","in","Action_List",")","-","1","else",":","scene",".","frame_end","=","max","(","frames","for","_",",","_",",","_",",","frames","in","Action_List",")","-","1","util_select_all","(","False",")","util_obj_select","(","context",",","armature_obj",")","util_obj_set_active","(","context",",","armature_obj",")","# 2.8 crashes","if","not","is_blen_280",":","scene",".","frame_set","(","0",")"],"url":"https:\/\/github.com\/Befzz\/blender3d_import_psk_psa\/blob\/47f1418aef7642f300e0fccbe3c96654ab275a52\/addons\/io_import_scene_unreal_psa_psk_270.py#L1078-L1484"} |
|
{"nwo":"Befzz\/blender3d_import_psk_psa","sha":"47f1418aef7642f300e0fccbe3c96654ab275a52","path":"addons\/io_import_scene_unreal_psa_psk_280.py","language":"python","identifier":"util_is_header_valid","parameters":"(filename, file_ext, chunk_id, error_callback)","argument_list":"","return_statement":"return True","docstring":"Return True if chunk_id is a valid psk\/psa (file_ext) 'magick number'.","docstring_summary":"Return True if chunk_id is a valid psk\/psa (file_ext) 'magick number'.","docstring_tokens":["Return","True","if","chunk_id","is","a","valid","psk","\/","psa","(","file_ext",")","magick","number","."],"function":"def util_is_header_valid(filename, file_ext, chunk_id, error_callback):\n '''Return True if chunk_id is a valid psk\/psa (file_ext) 'magick number'.'''\n if chunk_id != PSKPSA_FILE_HEADER[file_ext]:\n error_callback(\n \"File %s is not a %s file. (header mismach)\\nExpected: %s \\nPresent %s\" % ( \n filename, file_ext,\n PSKPSA_FILE_HEADER[file_ext], chunk_id)\n ) \n return False\n return True","function_tokens":["def","util_is_header_valid","(","filename",",","file_ext",",","chunk_id",",","error_callback",")",":","if","chunk_id","!=","PSKPSA_FILE_HEADER","[","file_ext","]",":","error_callback","(","\"File %s is not a %s file. (header mismach)\\nExpected: %s \\nPresent %s\"","%","(","filename",",","file_ext",",","PSKPSA_FILE_HEADER","[","file_ext","]",",","chunk_id",")",")","return","False","return","True"],"url":"https:\/\/github.com\/Befzz\/blender3d_import_psk_psa\/blob\/47f1418aef7642f300e0fccbe3c96654ab275a52\/addons\/io_import_scene_unreal_psa_psk_280.py#L171-L180"} |
|
{"nwo":"Befzz\/blender3d_import_psk_psa","sha":"47f1418aef7642f300e0fccbe3c96654ab275a52","path":"addons\/io_import_scene_unreal_psa_psk_280.py","language":"python","identifier":"util_gen_name_part","parameters":"(filepath)","argument_list":"","return_statement":"return re.match(r'.*[\/\\\\]([^\/\\\\]+?)(\\..{2,5})?$', filepath).group(1)","docstring":"Return file name without extension","docstring_summary":"Return file name without extension","docstring_tokens":["Return","file","name","without","extension"],"function":"def util_gen_name_part(filepath):\n '''Return file name without extension'''\n return re.match(r'.*[\/\\\\]([^\/\\\\]+?)(\\..{2,5})?$', filepath).group(1)","function_tokens":["def","util_gen_name_part","(","filepath",")",":","return","re",".","match","(","r'.*[\/\\\\]([^\/\\\\]+?)(\\..{2,5})?$'",",","filepath",")",".","group","(","1",")"],"url":"https:\/\/github.com\/Befzz\/blender3d_import_psk_psa\/blob\/47f1418aef7642f300e0fccbe3c96654ab275a52\/addons\/io_import_scene_unreal_psa_psk_280.py#L183-L185"} |
|
{"nwo":"Befzz\/blender3d_import_psk_psa","sha":"47f1418aef7642f300e0fccbe3c96654ab275a52","path":"addons\/io_import_scene_unreal_psa_psk_280.py","language":"python","identifier":"vec_to_axis_vec","parameters":"(vec_in, vec_out)","argument_list":"","return_statement":"","docstring":"Make **vec_out** to be an axis-aligned unit vector that is closest to vec_in. (basis?)","docstring_summary":"Make **vec_out** to be an axis-aligned unit vector that is closest to vec_in. (basis?)","docstring_tokens":["Make","**","vec_out","**","to","be","an","axis","-","aligned","unit","vector","that","is","closest","to","vec_in",".","(","basis?",")"],"function":"def vec_to_axis_vec(vec_in, vec_out):\n '''Make **vec_out** to be an axis-aligned unit vector that is closest to vec_in. (basis?)'''\n x, y, z = vec_in\n if abs(x) > abs(y):\n if abs(x) > abs(z):\n vec_out.x = 1 if x >= 0 else -1\n else:\n vec_out.z = 1 if z >= 0 else -1\n else:\n if abs(y) > abs(z):\n vec_out.y = 1 if y >= 0 else -1\n else:\n vec_out.z = 1 if z >= 0 else -1","function_tokens":["def","vec_to_axis_vec","(","vec_in",",","vec_out",")",":","x",",","y",",","z","=","vec_in","if","abs","(","x",")",">","abs","(","y",")",":","if","abs","(","x",")",">","abs","(","z",")",":","vec_out",".","x","=","1","if","x",">=","0","else","-","1","else",":","vec_out",".","z","=","1","if","z",">=","0","else","-","1","else",":","if","abs","(","y",")",">","abs","(","z",")",":","vec_out",".","y","=","1","if","y",">=","0","else","-","1","else",":","vec_out",".","z","=","1","if","z",">=","0","else","-","1"],"url":"https:\/\/github.com\/Befzz\/blender3d_import_psk_psa\/blob\/47f1418aef7642f300e0fccbe3c96654ab275a52\/addons\/io_import_scene_unreal_psa_psk_280.py#L188-L200"} |
|
{"nwo":"Befzz\/blender3d_import_psk_psa","sha":"47f1418aef7642f300e0fccbe3c96654ab275a52","path":"addons\/io_import_scene_unreal_psa_psk_280.py","language":"python","identifier":"color_linear_to_srgb","parameters":"(c)","argument_list":"","return_statement":"","docstring":"Convert from linear to sRGB color space.\n Source: Cycles addon implementation, node_color.h.","docstring_summary":"Convert from linear to sRGB color space.\n Source: Cycles addon implementation, node_color.h.","docstring_tokens":["Convert","from","linear","to","sRGB","color","space",".","Source",":","Cycles","addon","implementation","node_color",".","h","."],"function":"def color_linear_to_srgb(c):\n \"\"\"\n Convert from linear to sRGB color space.\n Source: Cycles addon implementation, node_color.h.\n \"\"\"\n if c < 0.0031308:\n return 0.0 if c < 0.0 else c * 12.92\n else:\n return 1.055 * pow(c, 1.0 \/ 2.4) - 0.055","function_tokens":["def","color_linear_to_srgb","(","c",")",":","if","c","<","0.0031308",":","return","0.0","if","c","<","0.0","else","c","*","12.92","else",":","return","1.055","*","pow","(","c",",","1.0","\/","2.4",")","-","0.055"],"url":"https:\/\/github.com\/Befzz\/blender3d_import_psk_psa\/blob\/47f1418aef7642f300e0fccbe3c96654ab275a52\/addons\/io_import_scene_unreal_psa_psk_280.py#L295-L303"} |
|
{"nwo":"Befzz\/blender3d_import_psk_psa","sha":"47f1418aef7642f300e0fccbe3c96654ab275a52","path":"addons\/io_import_scene_unreal_psa_psk_280.py","language":"python","identifier":"pskimport","parameters":"(filepath,\n context = None,\n bImportmesh = True,\n bImportbone = True,\n bSpltiUVdata = False,\n fBonesize = 5.0,\n fBonesizeRatio = 0.6,\n bDontInvertRoot = True,\n bReorientBones = False,\n bReorientDirectly = False,\n bScaleDown = True,\n bToSRGB = True,\n error_callback = None)","argument_list":"","return_statement":"return True","docstring":"Import mesh and skeleton from .psk\/.pskx files\n \n Args:\n bReorientBones:\n Axis based bone orientation to children\n \n error_callback:\n Called when importing is failed.\n \n error_callback = lambda msg: print('reason:', msg)","docstring_summary":"Import mesh and skeleton from .psk\/.pskx files\n \n Args:\n bReorientBones:\n Axis based bone orientation to children\n \n error_callback:\n Called when importing is failed.\n \n error_callback = lambda msg: print('reason:', msg)","docstring_tokens":["Import","mesh","and","skeleton","from",".","psk","\/",".","pskx","files","Args",":","bReorientBones",":","Axis","based","bone","orientation","to","children","error_callback",":","Called","when","importing","is","failed",".","error_callback","=","lambda","msg",":","print","(","reason",":","msg",")"],"function":"def pskimport(filepath,\n context = None,\n bImportmesh = True,\n bImportbone = True,\n bSpltiUVdata = False,\n fBonesize = 5.0,\n fBonesizeRatio = 0.6,\n bDontInvertRoot = True,\n bReorientBones = False,\n bReorientDirectly = False,\n bScaleDown = True,\n bToSRGB = True,\n error_callback = None):\n '''\n Import mesh and skeleton from .psk\/.pskx files\n \n Args:\n bReorientBones:\n Axis based bone orientation to children\n \n error_callback:\n Called when importing is failed.\n \n error_callback = lambda msg: print('reason:', msg)\n \n '''\n if not hasattr( error_callback, '__call__'):\n # error_callback = __pass\n error_callback = print\n \n # ref_time = time.process_time()\n if not bImportbone and not bImportmesh:\n error_callback(\"Nothing to do.\\nSet something for import.\")\n return False\n \n print (\"-----------------------------------------------\")\n print (\"---------EXECUTING PSK PYTHON IMPORTER---------\")\n print (\"-----------------------------------------------\")\n\n #file may not exist\n try:\n file = open(filepath,'rb')\n except IOError:\n error_callback('Error while opening file for reading:\\n \"'+filepath+'\"')\n return False\n\n if not util_check_file_header(file, 'psk'):\n error_callback('Not psk file:\\n \"'+filepath+'\"')\n return False\n \n Vertices = None\n Wedges = None\n Faces = None\n UV_by_face = None\n Materials = None\n Bones = None\n Weights = None\n VertexColors = None\n Extrauvs = []\n Normals = None\n WedgeIdx_by_faceIdx = None\n \n if not context:\n context = bpy.context\n #================================================================================================== \n # Materials MaterialNameRaw | TextureIndex | PolyFlags | AuxMaterial | AuxFlags | LodBias | LodStyle \n # Only Name is usable.\n def read_materials():\n \n nonlocal Materials\n \n Materials = []\n \n for counter in range(chunk_datacount):\n\n (MaterialNameRaw,) = unpack_from('64s24x', chunk_data, chunk_datasize * counter)\n \n Materials.append( util_bytes_to_str( MaterialNameRaw ) )\n \n \n #================================================================================================== \n # Faces WdgIdx1 | WdgIdx2 | WdgIdx3 | MatIdx | AuxMatIdx | SmthGrp\n def read_faces():\n \n if not bImportmesh:\n return True\n \n nonlocal Faces, UV_by_face, WedgeIdx_by_faceIdx\n\n UV_by_face = [None] * chunk_datacount\n Faces = [None] * chunk_datacount\n WedgeIdx_by_faceIdx = [None] * chunk_datacount\n \n if len(Wedges) > 65536:\n unpack_format = '=IIIBBI'\n else:\n unpack_format = '=HHHBBI'\n \n unpack_data = Struct(unpack_format).unpack_from\n \n for counter in range(chunk_datacount):\n (WdgIdx1, WdgIdx2, WdgIdx3,\n MatIndex, \n AuxMatIndex, #unused\n SmoothingGroup # Umodel is not exporting SmoothingGroups\n ) = unpack_data(chunk_data, counter * chunk_datasize)\n \n # looks ugly\n # Wedges is (point_index, u, v, MatIdx)\n ((vertid0, u0, v0, matid0), (vertid1, u1, v1, matid1), (vertid2, u2, v2, matid2)) = Wedges[WdgIdx1], Wedges[WdgIdx2], Wedges[WdgIdx3]\n \n # note order: C,B,A\n # Faces[counter] = (vertid2, vertid1, vertid0)\n\n Faces[counter] = (vertid1, vertid0, vertid2)\n # Faces[counter] = (vertid1, vertid2, vertid0)\n # Faces[counter] = (vertid0, vertid1, vertid2)\n \n # uv = ( ( u2, 1.0 - v2 ), ( u1, 1.0 - v1 ), ( u0, 1.0 - v0 ) )\n uv = ( ( u1, 1.0 - v1 ), ( u0, 1.0 - v0 ), ( u2, 1.0 - v2 ) )\n \n # Mapping: FaceIndex <=> UV data <=> FaceMatIndex\n UV_by_face[counter] = (uv, MatIndex, (matid2, matid1, matid0))\n \n # We need this for EXTRA UVs\n WedgeIdx_by_faceIdx[counter] = (WdgIdx3, WdgIdx2, WdgIdx1)\n\n \n #==================================================================================================\n # Vertices X | Y | Z\n def read_vertices():\n \n if not bImportmesh:\n return True\n \n nonlocal Vertices\n \n Vertices = [None] * chunk_datacount\n \n unpack_data = Struct('3f').unpack_from\n \n if bScaleDown:\n for counter in range( chunk_datacount ):\n (vec_x, vec_y, vec_z) = unpack_data(chunk_data, counter * chunk_datasize)\n Vertices[counter] = (vec_x*0.01, vec_y*0.01, vec_z*0.01)\n # equal to gltf\n # Vertices[counter] = (vec_x*0.01, vec_z*0.01, -vec_y*0.01)\n else:\n for counter in range( chunk_datacount ):\n Vertices[counter] = unpack_data(chunk_data, counter * chunk_datasize)\n \n \n #================================================================================================== \n # Wedges (UV) VertexId | U | V | MatIdx \n def read_wedges():\n \n if not bImportmesh:\n return True\n \n nonlocal Wedges\n \n Wedges = [None] * chunk_datacount\n \n unpack_data = Struct('=IffBxxx').unpack_from\n \n for counter in range( chunk_datacount ):\n (vertex_id,\n u, v,\n material_index) = unpack_data( chunk_data, counter * chunk_datasize )\n \n # print(vertex_id, u, v, material_index)\n # Wedges[counter] = (vertex_id, u, v, material_index)\n Wedges[counter] = [vertex_id, u, v, material_index]\n \n #================================================================================================== \n # Bones (VBone .. VJointPos ) Name|Flgs|NumChld|PrntIdx|Qw|Qx|Qy|Qz|LocX|LocY|LocZ|Lngth|XSize|YSize|ZSize\n def read_bones():\n \n nonlocal Bones, bImportbone\n \n if chunk_datacount == 0:\n bImportbone = False\n \n if bImportbone:\n # unpack_data = Struct('64s3i11f').unpack_from\n unpack_data = Struct('64s3i7f16x').unpack_from\n else:\n unpack_data = Struct('64s56x').unpack_from\n \n Bones = [None] * chunk_datacount\n \n for counter in range( chunk_datacount ):\n Bones[counter] = unpack_data( chunk_data, chunk_datasize * counter)\n \n \n #================================================================================================== \n # Influences (Bone Weight) (VRawBoneInfluence) ( Weight | PntIdx | BoneIdx)\n def read_weights():\n\n nonlocal Weights\n \n if not bImportmesh:\n return True\n \n Weights = [None] * chunk_datacount\n \n unpack_data = Struct('fii').unpack_from\n \n for counter in range(chunk_datacount):\n Weights[counter] = unpack_data(chunk_data, chunk_datasize * counter)\n \n #================================================================================================== \n # Vertex colors. R G B A bytes. NOTE: it is Wedge color.(uses Wedges index)\n def read_vertex_colors():\n \n nonlocal VertexColors\n \n unpack_data = Struct(\"=4B\").unpack_from\n \n VertexColors = [None] * chunk_datacount\n \n for counter in range( chunk_datacount ):\n VertexColors[counter] = unpack_data(chunk_data, chunk_datasize * counter) \n \n \n #================================================================================================== \n # Extra UV. U | V\n def read_extrauvs():\n\n unpack_data = Struct(\"=2f\").unpack_from\n \n uvdata = [None] * chunk_datacount\n \n for counter in range( chunk_datacount ):\n uvdata[counter] = unpack_data(chunk_data, chunk_datasize * counter) \n \n Extrauvs.append(uvdata)\n\n #==================================================================================================\n # Vertex Normals NX | NY | NZ\n def read_normals():\n if not bImportmesh:\n return True\n\n nonlocal Normals\n Normals = [None] * chunk_datacount\n\n unpack_data = Struct('3f').unpack_from\n\n for counter in range(chunk_datacount):\n Normals[counter] = unpack_data(chunk_data, counter * chunk_datasize)\n \n \n CHUNKS_HANDLERS = {\n 'PNTS0000': read_vertices,\n 'VTXW0000': read_wedges,\n 'VTXW3200': read_wedges,#?\n 'FACE0000': read_faces,\n 'FACE3200': read_faces,\n 'MATT0000': read_materials,\n 'REFSKELT': read_bones,\n 'REFSKEL0': read_bones, #?\n 'RAWW0000': read_weights,\n 'RAWWEIGH': read_weights,\n 'VERTEXCO': read_vertex_colors, # VERTEXCOLOR\n 'EXTRAUVS': read_extrauvs,\n 'VTXNORMS': read_normals\n }\n \n #===================================================================================================\n # File. Read all needed data.\n # VChunkHeader Struct\n # ChunkID|TypeFlag|DataSize|DataCount\n # 0 |1 |2 |3\n \n while True:\n \n header_bytes = file.read(32)\n \n if len(header_bytes) < 32:\n \n if len(header_bytes) != 0:\n error_callback(\"Unexpected end of file.(%s\/32 bytes)\" % len(header_bytes))\n break\n \n (chunk_id, chunk_type, chunk_datasize, chunk_datacount) = unpack('20s3i', header_bytes)\n \n chunk_id_str = util_bytes_to_str(chunk_id)\n chunk_id_str = chunk_id_str[:8]\n \n if chunk_id_str in CHUNKS_HANDLERS:\n \n chunk_data = file.read( chunk_datasize * chunk_datacount)\n \n if len(chunk_data) < chunk_datasize * chunk_datacount:\n error_callback('Psk chunk %s is broken.' % chunk_id_str)\n return False\n \n CHUNKS_HANDLERS[chunk_id_str]()\n \n else:\n \n print('Unknown chunk: ', chunk_id_str)\n file.seek(chunk_datasize * chunk_datacount, 1)\n \n \n # print(chunk_id_str, chunk_datacount)\n \n file.close()\n \n print(\" Importing file:\", filepath)\n \n if not bImportmesh and (Bones is None or len(Bones) == 0):\n error_callback(\"Psk: no skeleton data.\")\n return False\n\n MAX_UVS = 8\n NAME_UV_PREFIX = \"UV\"\n \n # file name w\/out extension\n gen_name_part = util_gen_name_part(filepath)\n gen_names = {\n 'armature_object': gen_name_part + '.ao',\n 'armature_data': gen_name_part + '.ad',\n 'mesh_object': gen_name_part + '.mo',\n 'mesh_data': gen_name_part + '.md'\n }\n \n if bImportmesh:\n mesh_data = bpy.data.meshes.new(gen_names['mesh_data'])\n mesh_obj = bpy.data.objects.new(gen_names['mesh_object'], mesh_data)\n \n \n #==================================================================================================\n # UV. Prepare\n if bImportmesh:\n if bSpltiUVdata:\n # store how much each \"matrial index\" have vertices\n \n uv_mat_ids = {}\n \n for (_, _, _, material_index) in Wedges:\n \n if not (material_index in uv_mat_ids):\n uv_mat_ids[material_index] = 1\n else:\n uv_mat_ids[material_index] += 1\n \n \n # if we have more UV material indexes than blender UV maps, then...\n if bSpltiUVdata and len(uv_mat_ids) > MAX_UVS :\n \n uv_mat_ids_len = len(uv_mat_ids)\n \n print('UVs: %s out of %s is combined in a first UV map(%s0)' % (uv_mat_ids_len - 8, uv_mat_ids_len, NAME_UV_PREFIX))\n \n mat_idx_proxy = [0] * len(uv_mat_ids)\n \n counts_sorted = sorted(uv_mat_ids.values(), reverse = True)\n \n new_mat_index = MAX_UVS - 1\n \n for c in counts_sorted:\n for mat_idx, counts in uv_mat_ids.items():\n if c == counts:\n mat_idx_proxy[mat_idx] = new_mat_index\n if new_mat_index > 0:\n new_mat_index -= 1\n # print('MatIdx remap: %s > %s' % (mat_idx,new_mat_index))\n \n for i in range(len(Wedges)):\n Wedges[i][3] = mat_idx_proxy[Wedges[i][3]]\n\n # print('Wedges:', chunk_datacount)\n # print('uv_mat_ids', uv_mat_ids)\n # print('uv_mat_ids', uv_mat_ids)\n # for w in Wedges:\n \n if bImportmesh:\n # print(\"-- Materials -- (index, name, faces)\")\n blen_materials = []\n for materialname in Materials:\n matdata = bpy.data.materials.get(materialname)\n \n if matdata is None:\n matdata = bpy.data.materials.new( materialname )\n # matdata = bpy.data.materials.new( materialname )\n \n blen_materials.append( matdata )\n mesh_data.materials.append( matdata )\n # print(counter,materialname,TextureIndex)\n # if mat_groups.get(counter) is not None:\n # print(\"%i: %s\" % (counter, materialname), len(mat_groups[counter]))\n\n #==================================================================================================\n # Prepare bone data\n def init_psk_bone(i, psk_bones, name_raw):\n psk_bone = class_psk_bone()\n psk_bone.children = []\n psk_bone.name = util_bytes_to_str(name_raw)\n psk_bones[i] = psk_bone\n return psk_bone\n\n psk_bone_name_toolong = False\n \n # indexed by bone index. array of psk_bone\n psk_bones = [None] * len(Bones)\n \n if not bImportbone: #data needed for mesh-only import\n \n for counter,(name_raw,) in enumerate(Bones):\n init_psk_bone(counter, psk_bones, name_raw)\n \n if bImportbone: #else?\n \n # average bone length\n sum_bone_pos = 0\n \n for counter, (name_raw, flags, NumChildren, ParentIndex, #0 1 2 3\n quat_x, quat_y, quat_z, quat_w, #4 5 6 7\n vec_x, vec_y, vec_z\n # , #8 9 10\n # joint_length, #11\n # scale_x, scale_y, scale_z\n ) in enumerate(Bones):\n \n psk_bone = init_psk_bone(counter, psk_bones, name_raw)\n \n psk_bone.bone_index = counter\n psk_bone.parent_index = ParentIndex\n \n # Tested. 64 is getting cut to 63\n if len(psk_bone.name) > 63:\n psk_bone_name_toolong = True\n # print('Warning. Bone name is too long:', psk_bone.name)\n\n # make sure we have valid parent_index\n if psk_bone.parent_index < 0:\n psk_bone.parent_index = 0\n \n # psk_bone.scale = (scale_x, scale_y, scale_z)\n # print(\"%s: %03f %03f | %f\" % (psk_bone.name, scale_x, scale_y, joint_length),scale_x)\n # print(\"%s:\" % (psk_bone.name), vec_x, quat_x)\n\n # store bind pose to make it available for psa-import via CustomProperty of the Blender bone\n psk_bone.orig_quat = Quaternion((quat_w, quat_x, quat_y, quat_z))\n\n if bScaleDown:\n psk_bone.orig_loc = Vector((vec_x * 0.01, vec_y * 0.01, vec_z * 0.01))\n else:\n psk_bone.orig_loc = Vector((vec_x, vec_y, vec_z))\n\n # root bone must have parent_index = 0 and selfindex = 0\n if psk_bone.parent_index == 0 and psk_bone.bone_index == psk_bone.parent_index:\n if bDontInvertRoot:\n psk_bone.mat_world_rot = psk_bone.orig_quat.to_matrix()\n else:\n psk_bone.mat_world_rot = psk_bone.orig_quat.conjugated().to_matrix()\n psk_bone.mat_world = Matrix.Translation(psk_bone.orig_loc)\n\n sum_bone_pos += psk_bone.orig_loc.length\n \n \n #==================================================================================================\n # Bones. Calc World-space matrix\n \n # TODO optimize math.\n for psk_bone in psk_bones:\n \n if psk_bone.parent_index == 0:\n if psk_bone.bone_index == 0:\n psk_bone.parent = None\n continue\n \n parent = psk_bones[psk_bone.parent_index]\n \n psk_bone.parent = parent\n \n parent.children.append(psk_bone)\n \n # mat_world - world space bone matrix WITHOUT own rotation\n # mat_world_rot - world space bone rotation WITH own rotation\n\n # psk_bone.mat_world = parent.mat_world_rot.to_4x4()\n # psk_bone.mat_world.translation = parent.mat_world.translation + parent.mat_world_rot * psk_bone.orig_loc\n # psk_bone.mat_world_rot = parent.mat_world_rot * psk_bone.orig_quat.conjugated().to_matrix()\n\n psk_bone.mat_world = parent.mat_world_rot.to_4x4()\n\n v = psk_bone.orig_loc.copy()\n v.rotate( parent.mat_world_rot )\n psk_bone.mat_world.translation = parent.mat_world.translation + v\n\n\n psk_bone.mat_world_rot = psk_bone.orig_quat.conjugated().to_matrix()\n psk_bone.mat_world_rot.rotate( parent.mat_world_rot )\n\n\n # psk_bone.mat_world = ( parent.mat_world_rot.to_4x4() * psk_bone.trans)\n # psk_bone.mat_world.translation += parent.mat_world.translation\n # psk_bone.mat_world_rot = parent.mat_world_rot * psk_bone.orig_quat.conjugated().to_matrix()\n \n \n #==================================================================================================\n # Skeleton. Prepare.\n \n armature_data = bpy.data.armatures.new(gen_names['armature_data'])\n armature_obj = bpy.data.objects.new(gen_names['armature_object'], armature_data)\n # TODO: options for axes and x_ray?\n armature_data.show_axes = False\n\n armature_data.display_type = 'STICK'\n armature_obj.show_in_front = True\n\n util_obj_link(context, armature_obj)\n\n util_select_all(False)\n util_obj_select(context, armature_obj)\n util_obj_set_active(context, armature_obj)\n \n utils_set_mode('EDIT')\n \n \n sum_bone_pos \/= len(Bones) # average\n sum_bone_pos *= fBonesizeRatio # corrected\n \n # bone_size_choosen = max(0.01, round((min(sum_bone_pos, fBonesize))))\n bone_size_choosen = max(0.01, round((min(sum_bone_pos, fBonesize))*100)\/100)\n # bone_size_choosen = max(0.01, min(sum_bone_pos, fBonesize))\n # print(\"Bonesize %f | old: %f round: %f\" % (bone_size_choosen, max(0.01, min(sum_bone_pos, fBonesize)),max(0.01, round((min(sum_bone_pos, fBonesize))*100)\/100)))\n\n if not bReorientBones:\n new_bone_size = bone_size_choosen\n \n #==================================================================================================\n # Skeleton. Build.\n if psk_bone_name_toolong:\n print('Warning. Some bones will be renamed(names are too long). Animation import may be broken.')\n for psk_bone in psk_bones:\n\n # TODO too long name cutting options?\n orig_long_name = psk_bone.name\n\n # Blender will cut the name here (>63 chars)\n edit_bone = armature_obj.data.edit_bones.new(psk_bone.name)\n edit_bone[\"orig_long_name\"] = orig_long_name\n\n # if orig_long_name != edit_bone.name:\n # print('--')\n # print(len(orig_long_name),orig_long_name)\n # print(len(edit_bone.name),edit_bone.name)\n\n # Use the bone name made by blender (.001 , .002 etc.)\n psk_bone.name = edit_bone.name\n\n else:\n for psk_bone in psk_bones:\n edit_bone = armature_obj.data.edit_bones.new(psk_bone.name)\n psk_bone.name = edit_bone.name\n\n for psk_bone in psk_bones:\n edit_bone = armature_obj.data.edit_bones[psk_bone.name]\n\n armature_obj.data.edit_bones.active = edit_bone\n\n if psk_bone.parent is not None:\n edit_bone.parent = armature_obj.data.edit_bones[psk_bone.parent.name]\n else:\n if bDontInvertRoot:\n psk_bone.orig_quat.conjugate()\n \n if bReorientBones:\n (new_bone_size, quat_orient_diff) = calc_bone_rotation(psk_bone, bone_size_choosen, bReorientDirectly, sum_bone_pos)\n # @\n # post_quat = psk_bone.orig_quat.conjugated() * quat_orient_diff\n\n post_quat = quat_orient_diff\n post_quat.rotate( psk_bone.orig_quat.conjugated() )\n else:\n post_quat = psk_bone.orig_quat.conjugated()\n \n # only length of this vector is matter?\n edit_bone.tail = Vector(( 0.0, new_bone_size, 0.0))\n\n # @\n # edit_bone.matrix = psk_bone.mat_world * post_quat.to_matrix().to_4x4()\n\n m = post_quat.copy()\n m.rotate( psk_bone.mat_world )\n\n m = m.to_matrix().to_4x4()\n m.translation = psk_bone.mat_world.translation\n\n edit_bone.matrix = m\n \n \n # some dev code...\n #### FINAL\n # post_quat = psk_bone.orig_quat.conjugated() * quat_diff\n # edit_bone.matrix = psk_bone.mat_world * test_quat.to_matrix().to_4x4()\n # edit_bone[\"post_quat\"] = test_quat\n #### \n\n # edit_bone[\"post_quat\"] = Quaternion((1,0,0,0))\n # edit_bone.matrix = psk_bone.mat_world* psk_bone.rot\n \n\n # if edit_bone.parent:\n # edit_bone.matrix = edit_bone.parent.matrix * psk_bone.trans * (psk_bone.orig_quat.conjugated().to_matrix().to_4x4())\n # edit_bone.matrix = edit_bone.parent.matrix * psk_bone.trans * (test_quat.to_matrix().to_4x4())\n # else:\n # edit_bone.matrix = psk_bone.orig_quat.to_matrix().to_4x4()\n \n \n # save bindPose information for .psa import\n # dev\n edit_bone[\"orig_quat\"] = psk_bone.orig_quat\n edit_bone[\"orig_loc\"] = psk_bone.orig_loc\n edit_bone[\"post_quat\"] = post_quat\n\n '''\n bone = edit_bone\n if psk_bone.parent is not None:\n orig_loc = bone.matrix.translation - bone.parent.matrix.translation\n orig_loc.rotate( bone.parent.matrix.to_quaternion().conjugated() )\n\n \n orig_quat = bone.matrix.to_quaternion()\n orig_quat.rotate( bone.parent.matrix.to_quaternion().conjugated() )\n orig_quat.conjugate()\n\n if orig_quat.dot( psk_bone.orig_quat ) < 0.95:\n print(bone.name, psk_bone.orig_quat, orig_quat, orig_quat.dot( psk_bone.orig_quat ))\n print('parent:', bone.parent.matrix.to_quaternion(), bone.parent.matrix.to_quaternion().rotation_difference(bone.matrix.to_quaternion()) )\n\n\n if (psk_bone.orig_loc - orig_loc).length > 0.02:\n print(bone.name, psk_bone.orig_loc, orig_loc, (psk_bone.orig_loc - orig_loc).length)\n '''\n utils_set_mode('OBJECT')\n \n #==================================================================================================\n # Weights\n if bImportmesh: \n \n vertices_total = len(Vertices)\n \n for ( _, PointIndex, BoneIndex ) in Weights:\n if PointIndex < vertices_total: # can it be not?\n psk_bones[BoneIndex].have_weight_data = True\n # else:\n # print(psk_bones[BoneIndex].name, 'for other mesh',PointIndex ,vertices_total)\n \n #print(\"weight:\", PointIndex, BoneIndex, Weight)\n # Weights.append(None)\n # print(Weights.count(None))\n \n \n # Original vertex colorization code\n '''\n # Weights.sort( key = lambda wgh: wgh[0])\n if bImportmesh:\n VtxCol = []\n bones_count = len(psk_bones)\n for x in range(bones_count):\n #change the overall darkness of each material in a range between 0.1 and 0.9\n tmpVal = ((float(x) + 1.0) \/ bones_count * 0.7) + 0.1\n tmpVal = int(tmpVal * 256)\n tmpCol = [tmpVal, tmpVal, tmpVal, 0]\n #Change the color of each material slightly\n if x % 3 == 0:\n if tmpCol[0] < 128:\n tmpCol[0] += 60\n else:\n tmpCol[0] -= 60\n if x % 3 == 1:\n if tmpCol[1] < 128:\n tmpCol[1] += 60\n else:\n tmpCol[1] -= 60\n if x % 3 == 2:\n if tmpCol[2] < 128:\n tmpCol[2] += 60\n else:\n tmpCol[2] -= 60\n #Add the material to the mesh\n VtxCol.append(tmpCol)\n \n for x in range(len(Tmsh.faces)):\n for y in range(len(Tmsh.faces[x].v)):\n #find v in Weights[n][0]\n findVal = Tmsh.faces[x].v[y].index\n n = 0\n while findVal != Weights[n][0]:\n n = n + 1\n TmpCol = VtxCol[Weights[n][1]]\n #check if a vertex has more than one influence\n if n != len(Weights) - 1:\n if Weights[n][0] == Weights[n + 1][0]:\n #if there is more than one influence, use the one with the greater influence\n #for simplicity only 2 influences are checked, 2nd and 3rd influences are usually very small\n if Weights[n][2] < Weights[n + 1][2]:\n TmpCol = VtxCol[Weights[n + 1][1]]\n Tmsh.faces[x].col.append(NMesh.Col(TmpCol[0], TmpCol[1], TmpCol[2], 0))\n '''\n\n #===================================================================================================\n # UV. Setup.\n \n if bImportmesh:\n # Trick! Create UV maps BEFORE mesh and get (0,0) coordinates for free!\n # ...otherwise UV coords will be copied from active, or calculated from mesh...\n \n if bSpltiUVdata:\n \n for i in range(len(uv_mat_ids)):\n get_uv_layers(mesh_data).new(name = NAME_UV_PREFIX + str(i))\n \n else:\n \n get_uv_layers(mesh_data).new(name = NAME_UV_PREFIX+\"_SINGLE\")\n \n \n for counter, uv_data in enumerate(Extrauvs):\n \n if len(mesh_data.uv_layers) < MAX_UVS:\n \n get_uv_layers(mesh_data).new(name = \"EXTRAUVS\"+str(counter))\n \n else:\n \n Extrauvs.remove(uv_data)\n print('Extra UV layer %s is ignored. Re-import without \"Split UV data\".' % counter)\n \n #================================================================================================== \n # Mesh. Build.\n \n mesh_data.from_pydata(Vertices,[],Faces)\n\n #==================================================================================================\n # Vertex Normal. Set.\n\n if Normals is not None:\n mesh_data.polygons.foreach_set(\"use_smooth\", [True] * len(mesh_data.polygons))\n mesh_data.normals_split_custom_set_from_vertices(Normals)\n mesh_data.use_auto_smooth = True\n \n #===================================================================================================\n # UV. Set.\n \n if bImportmesh:\n\n for face in mesh_data.polygons:\n face.material_index = UV_by_face[face.index][1]\n\n uv_layers = mesh_data.uv_layers\n \n if not bSpltiUVdata:\n uvLayer = uv_layers[0]\n \n # per face\n # for faceIdx, (faceUVs, faceMatIdx, _, _, wmidx) in enumerate(UV_by_face):\n for faceIdx, (faceUVs, faceMatIdx, WedgeMatIds) in enumerate(UV_by_face):\n \n # per vertex\n for vertN, uv in enumerate(faceUVs):\n loopId = faceIdx * 3 + vertN\n \n if bSpltiUVdata:\n uvLayer = uv_layers[WedgeMatIds[vertN]]\n \n uvLayer.data[loopId].uv = uv\n\n #==================================================================================================\n # VertexColors\n \n if VertexColors is not None:\n \n vtx_color_layer = mesh_data.vertex_colors.new(name = \"PSKVTXCOL_0\", do_init = False)\n \n pervertex = [None] * len(Vertices)\n \n for counter, (vertexid,_,_,_) in enumerate(Wedges):\n \n # Is it possible ?\n if (pervertex[vertexid] is not None) and (pervertex[vertexid] != VertexColors[counter]):\n print('Not equal vertex colors. ', vertexid, pervertex[vertexid], VertexColors[counter])\n \n pervertex[vertexid] = VertexColors[counter]\n \n \n for counter, loop in enumerate(mesh_data.loops):\n \n color = pervertex[ loop.vertex_index ]\n \n if color is None:\n vtx_color_layer.data[ counter ].color = (1.,1.,1.,1.)\n else:\n if bToSRGB:\n vtx_color_layer.data[ counter ].color = (\n color_linear_to_srgb(color[0] \/ 255),\n color_linear_to_srgb(color[1] \/ 255),\n color_linear_to_srgb(color[2] \/ 255),\n color[3] \/ 255\n )\n else:\n vtx_color_layer.data[ counter ].color = (\n color[0] \/ 255,\n color[1] \/ 255,\n color[2] \/ 255,\n color[3] \/ 255\n )\n \n #===================================================================================================\n # Extra UVs. Set.\n \n # for counter, uv_data in enumerate(Extrauvs):\n \n # uvLayer = mesh_data.uv_layers[ counter - len(Extrauvs) ]\n \n # for uv_index, uv_coords in enumerate(uv_data):\n \n # uvLayer.data[uv_index].uv = (uv_coords[0], 1.0 - uv_coords[1])\n\n\n for counter, uv_data in enumerate(Extrauvs):\n\n uvLayer = mesh_data.uv_layers[ counter - len(Extrauvs) ]\n\n for faceIdx, (WedgeIdx3,WedgeIdx2,WedgeIdx1) in enumerate(WedgeIdx_by_faceIdx):\n \n # equal to gltf\n uvLayer.data[faceIdx*3 ].uv = (uv_data[WedgeIdx2][0], 1.0 - uv_data[WedgeIdx2][1])\n uvLayer.data[faceIdx*3+1].uv = (uv_data[WedgeIdx1][0], 1.0 - uv_data[WedgeIdx1][1])\n uvLayer.data[faceIdx*3+2].uv = (uv_data[WedgeIdx3][0], 1.0 - uv_data[WedgeIdx3][1])\n # uvLayer.data[faceIdx*3 ].uv = (uv_data[WedgeIdx3][0], 1.0 - uv_data[WedgeIdx3][1])\n # uvLayer.data[faceIdx*3+1].uv = (uv_data[WedgeIdx2][0], 1.0 - uv_data[WedgeIdx2][1])\n # uvLayer.data[faceIdx*3+2].uv = (uv_data[WedgeIdx1][0], 1.0 - uv_data[WedgeIdx1][1])\n \n \n #===================================================================================================\n # Mesh. Vertex Groups. Bone Weights.\n \n for psk_bone in psk_bones:\n if psk_bone.have_weight_data:\n psk_bone.vertex_group = mesh_obj.vertex_groups.new(name = psk_bone.name)\n # else:\n # print(psk_bone.name, 'have no influence on this mesh')\n \n for weight, vertex_id, bone_index_w in filter(None, Weights):\n psk_bones[bone_index_w].vertex_group.add((vertex_id,), weight, 'ADD')\n \n \n #===================================================================================================\n # Skeleton. Colorize.\n \n if bImportbone:\n \n bone_group_unused = armature_obj.pose.bone_groups.new(name = \"Unused bones\")\n bone_group_unused.color_set = 'THEME14'\n\n bone_group_nochild = armature_obj.pose.bone_groups.new(name = \"No children\")\n bone_group_nochild.color_set = 'THEME03'\n\n armature_data.show_group_colors = True\n\n for psk_bone in psk_bones:\n \n pose_bone = armature_obj.pose.bones[psk_bone.name]\n \n if psk_bone.have_weight_data:\n \n if len(psk_bone.children) == 0:\n pose_bone.bone_group = bone_group_nochild\n \n else:\n pose_bone.bone_group = bone_group_unused\n \n \n #===================================================================================================\n # Final\n \n if bImportmesh:\n \n util_obj_link(context, mesh_obj)\n util_select_all(False)\n \n \n if not bImportbone: \n \n util_obj_select(context, mesh_obj)\n util_obj_set_active(context, mesh_obj)\n \n else:\n # select_all(False)\n util_obj_select(context, armature_obj)\n \n # parenting mesh to armature object\n mesh_obj.parent = armature_obj\n mesh_obj.parent_type = 'OBJECT'\n \n # add armature modifier\n blender_modifier = mesh_obj.modifiers.new( armature_obj.data.name, type = 'ARMATURE')\n blender_modifier.show_expanded = False\n blender_modifier.use_vertex_groups = True\n blender_modifier.use_bone_envelopes = False\n blender_modifier.object = armature_obj\n \n # utils_set_mode('OBJECT')\n # select_all(False)\n util_obj_select(context, armature_obj)\n util_obj_set_active(context, armature_obj)\n \n # print(\"Done: %f sec.\" % (time.process_time() - ref_time))\n utils_set_mode('OBJECT')\n return True","function_tokens":["def","pskimport","(","filepath",",","context","=","None",",","bImportmesh","=","True",",","bImportbone","=","True",",","bSpltiUVdata","=","False",",","fBonesize","=","5.0",",","fBonesizeRatio","=","0.6",",","bDontInvertRoot","=","True",",","bReorientBones","=","False",",","bReorientDirectly","=","False",",","bScaleDown","=","True",",","bToSRGB","=","True",",","error_callback","=","None",")",":","if","not","hasattr","(","error_callback",",","'__call__'",")",":","# error_callback = __pass","error_callback","=","print","# ref_time = time.process_time()","if","not","bImportbone","and","not","bImportmesh",":","error_callback","(","\"Nothing to do.\\nSet something for import.\"",")","return","False","print","(","\"-----------------------------------------------\"",")","print","(","\"---------EXECUTING PSK PYTHON IMPORTER---------\"",")","print","(","\"-----------------------------------------------\"",")","#file may not exist","try",":","file","=","open","(","filepath",",","'rb'",")","except","IOError",":","error_callback","(","'Error while opening file for reading:\\n \"'","+","filepath","+","'\"'",")","return","False","if","not","util_check_file_header","(","file",",","'psk'",")",":","error_callback","(","'Not psk file:\\n \"'","+","filepath","+","'\"'",")","return","False","Vertices","=","None","Wedges","=","None","Faces","=","None","UV_by_face","=","None","Materials","=","None","Bones","=","None","Weights","=","None","VertexColors","=","None","Extrauvs","=","[","]","Normals","=","None","WedgeIdx_by_faceIdx","=","None","if","not","context",":","context","=","bpy",".","context","#================================================================================================== ","# Materials MaterialNameRaw | TextureIndex | PolyFlags | AuxMaterial | AuxFlags | LodBias | LodStyle ","# Only Name is usable.","def","read_materials","(",")",":","nonlocal","Materials","Materials","=","[","]","for","counter","in","range","(","chunk_datacount",")",":","(","MaterialNameRaw",",",")","=","unpack_from","(","'64s24x'",",","chunk_data",",","chunk_datasize","*","counter",")","Materials",".","append","(","util_bytes_to_str","(","MaterialNameRaw",")",")","#================================================================================================== ","# Faces WdgIdx1 | WdgIdx2 | WdgIdx3 | MatIdx | AuxMatIdx | SmthGrp","def","read_faces","(",")",":","if","not","bImportmesh",":","return","True","nonlocal","Faces",",","UV_by_face",",","WedgeIdx_by_faceIdx","UV_by_face","=","[","None","]","*","chunk_datacount","Faces","=","[","None","]","*","chunk_datacount","WedgeIdx_by_faceIdx","=","[","None","]","*","chunk_datacount","if","len","(","Wedges",")",">","65536",":","unpack_format","=","'=IIIBBI'","else",":","unpack_format","=","'=HHHBBI'","unpack_data","=","Struct","(","unpack_format",")",".","unpack_from","for","counter","in","range","(","chunk_datacount",")",":","(","WdgIdx1",",","WdgIdx2",",","WdgIdx3",",","MatIndex",",","AuxMatIndex",",","#unused","SmoothingGroup","# Umodel is not exporting SmoothingGroups",")","=","unpack_data","(","chunk_data",",","counter","*","chunk_datasize",")","# looks ugly","# Wedges is (point_index, u, v, MatIdx)","(","(","vertid0",",","u0",",","v0",",","matid0",")",",","(","vertid1",",","u1",",","v1",",","matid1",")",",","(","vertid2",",","u2",",","v2",",","matid2",")",")","=","Wedges","[","WdgIdx1","]",",","Wedges","[","WdgIdx2","]",",","Wedges","[","WdgIdx3","]","# note order: C,B,A","# Faces[counter] = (vertid2, vertid1, vertid0)","Faces","[","counter","]","=","(","vertid1",",","vertid0",",","vertid2",")","# Faces[counter] = (vertid1, vertid2, vertid0)","# Faces[counter] = (vertid0, vertid1, vertid2)","# uv = ( ( u2, 1.0 - v2 ), ( u1, 1.0 - v1 ), ( u0, 1.0 - v0 ) )","uv","=","(","(","u1",",","1.0","-","v1",")",",","(","u0",",","1.0","-","v0",")",",","(","u2",",","1.0","-","v2",")",")","# Mapping: FaceIndex <=> UV data <=> FaceMatIndex","UV_by_face","[","counter","]","=","(","uv",",","MatIndex",",","(","matid2",",","matid1",",","matid0",")",")","# We need this for EXTRA UVs","WedgeIdx_by_faceIdx","[","counter","]","=","(","WdgIdx3",",","WdgIdx2",",","WdgIdx1",")","#==================================================================================================","# Vertices X | Y | Z","def","read_vertices","(",")",":","if","not","bImportmesh",":","return","True","nonlocal","Vertices","Vertices","=","[","None","]","*","chunk_datacount","unpack_data","=","Struct","(","'3f'",")",".","unpack_from","if","bScaleDown",":","for","counter","in","range","(","chunk_datacount",")",":","(","vec_x",",","vec_y",",","vec_z",")","=","unpack_data","(","chunk_data",",","counter","*","chunk_datasize",")","Vertices","[","counter","]","=","(","vec_x","*","0.01",",","vec_y","*","0.01",",","vec_z","*","0.01",")","# equal to gltf","# Vertices[counter] = (vec_x*0.01, vec_z*0.01, -vec_y*0.01)","else",":","for","counter","in","range","(","chunk_datacount",")",":","Vertices","[","counter","]","=","unpack_data","(","chunk_data",",","counter","*","chunk_datasize",")","#================================================================================================== ","# Wedges (UV) VertexId | U | V | MatIdx ","def","read_wedges","(",")",":","if","not","bImportmesh",":","return","True","nonlocal","Wedges","Wedges","=","[","None","]","*","chunk_datacount","unpack_data","=","Struct","(","'=IffBxxx'",")",".","unpack_from","for","counter","in","range","(","chunk_datacount",")",":","(","vertex_id",",","u",",","v",",","material_index",")","=","unpack_data","(","chunk_data",",","counter","*","chunk_datasize",")","# print(vertex_id, u, v, material_index)","# Wedges[counter] = (vertex_id, u, v, material_index)","Wedges","[","counter","]","=","[","vertex_id",",","u",",","v",",","material_index","]","#================================================================================================== ","# Bones (VBone .. VJointPos ) Name|Flgs|NumChld|PrntIdx|Qw|Qx|Qy|Qz|LocX|LocY|LocZ|Lngth|XSize|YSize|ZSize","def","read_bones","(",")",":","nonlocal","Bones",",","bImportbone","if","chunk_datacount","==","0",":","bImportbone","=","False","if","bImportbone",":","# unpack_data = Struct('64s3i11f').unpack_from","unpack_data","=","Struct","(","'64s3i7f16x'",")",".","unpack_from","else",":","unpack_data","=","Struct","(","'64s56x'",")",".","unpack_from","Bones","=","[","None","]","*","chunk_datacount","for","counter","in","range","(","chunk_datacount",")",":","Bones","[","counter","]","=","unpack_data","(","chunk_data",",","chunk_datasize","*","counter",")","#================================================================================================== ","# Influences (Bone Weight) (VRawBoneInfluence) ( Weight | PntIdx | BoneIdx)","def","read_weights","(",")",":","nonlocal","Weights","if","not","bImportmesh",":","return","True","Weights","=","[","None","]","*","chunk_datacount","unpack_data","=","Struct","(","'fii'",")",".","unpack_from","for","counter","in","range","(","chunk_datacount",")",":","Weights","[","counter","]","=","unpack_data","(","chunk_data",",","chunk_datasize","*","counter",")","#================================================================================================== ","# Vertex colors. R G B A bytes. NOTE: it is Wedge color.(uses Wedges index)","def","read_vertex_colors","(",")",":","nonlocal","VertexColors","unpack_data","=","Struct","(","\"=4B\"",")",".","unpack_from","VertexColors","=","[","None","]","*","chunk_datacount","for","counter","in","range","(","chunk_datacount",")",":","VertexColors","[","counter","]","=","unpack_data","(","chunk_data",",","chunk_datasize","*","counter",")","#================================================================================================== ","# Extra UV. U | V","def","read_extrauvs","(",")",":","unpack_data","=","Struct","(","\"=2f\"",")",".","unpack_from","uvdata","=","[","None","]","*","chunk_datacount","for","counter","in","range","(","chunk_datacount",")",":","uvdata","[","counter","]","=","unpack_data","(","chunk_data",",","chunk_datasize","*","counter",")","Extrauvs",".","append","(","uvdata",")","#==================================================================================================","# Vertex Normals NX | NY | NZ","def","read_normals","(",")",":","if","not","bImportmesh",":","return","True","nonlocal","Normals","Normals","=","[","None","]","*","chunk_datacount","unpack_data","=","Struct","(","'3f'",")",".","unpack_from","for","counter","in","range","(","chunk_datacount",")",":","Normals","[","counter","]","=","unpack_data","(","chunk_data",",","counter","*","chunk_datasize",")","CHUNKS_HANDLERS","=","{","'PNTS0000'",":","read_vertices",",","'VTXW0000'",":","read_wedges",",","'VTXW3200'",":","read_wedges",",","#?","'FACE0000'",":","read_faces",",","'FACE3200'",":","read_faces",",","'MATT0000'",":","read_materials",",","'REFSKELT'",":","read_bones",",","'REFSKEL0'",":","read_bones",",","#?","'RAWW0000'",":","read_weights",",","'RAWWEIGH'",":","read_weights",",","'VERTEXCO'",":","read_vertex_colors",",","# VERTEXCOLOR","'EXTRAUVS'",":","read_extrauvs",",","'VTXNORMS'",":","read_normals","}","#===================================================================================================","# File. Read all needed data.","# VChunkHeader Struct","# ChunkID|TypeFlag|DataSize|DataCount","# 0 |1 |2 |3","while","True",":","header_bytes","=","file",".","read","(","32",")","if","len","(","header_bytes",")","<","32",":","if","len","(","header_bytes",")","!=","0",":","error_callback","(","\"Unexpected end of file.(%s\/32 bytes)\"","%","len","(","header_bytes",")",")","break","(","chunk_id",",","chunk_type",",","chunk_datasize",",","chunk_datacount",")","=","unpack","(","'20s3i'",",","header_bytes",")","chunk_id_str","=","util_bytes_to_str","(","chunk_id",")","chunk_id_str","=","chunk_id_str","[",":","8","]","if","chunk_id_str","in","CHUNKS_HANDLERS",":","chunk_data","=","file",".","read","(","chunk_datasize","*","chunk_datacount",")","if","len","(","chunk_data",")","<","chunk_datasize","*","chunk_datacount",":","error_callback","(","'Psk chunk %s is broken.'","%","chunk_id_str",")","return","False","CHUNKS_HANDLERS","[","chunk_id_str","]","(",")","else",":","print","(","'Unknown chunk: '",",","chunk_id_str",")","file",".","seek","(","chunk_datasize","*","chunk_datacount",",","1",")","# print(chunk_id_str, chunk_datacount)","file",".","close","(",")","print","(","\" Importing file:\"",",","filepath",")","if","not","bImportmesh","and","(","Bones","is","None","or","len","(","Bones",")","==","0",")",":","error_callback","(","\"Psk: no skeleton data.\"",")","return","False","MAX_UVS","=","8","NAME_UV_PREFIX","=","\"UV\"","# file name w\/out extension","gen_name_part","=","util_gen_name_part","(","filepath",")","gen_names","=","{","'armature_object'",":","gen_name_part","+","'.ao'",",","'armature_data'",":","gen_name_part","+","'.ad'",",","'mesh_object'",":","gen_name_part","+","'.mo'",",","'mesh_data'",":","gen_name_part","+","'.md'","}","if","bImportmesh",":","mesh_data","=","bpy",".","data",".","meshes",".","new","(","gen_names","[","'mesh_data'","]",")","mesh_obj","=","bpy",".","data",".","objects",".","new","(","gen_names","[","'mesh_object'","]",",","mesh_data",")","#==================================================================================================","# UV. Prepare","if","bImportmesh",":","if","bSpltiUVdata",":","# store how much each \"matrial index\" have vertices","uv_mat_ids","=","{","}","for","(","_",",","_",",","_",",","material_index",")","in","Wedges",":","if","not","(","material_index","in","uv_mat_ids",")",":","uv_mat_ids","[","material_index","]","=","1","else",":","uv_mat_ids","[","material_index","]","+=","1","# if we have more UV material indexes than blender UV maps, then...","if","bSpltiUVdata","and","len","(","uv_mat_ids",")",">","MAX_UVS",":","uv_mat_ids_len","=","len","(","uv_mat_ids",")","print","(","'UVs: %s out of %s is combined in a first UV map(%s0)'","%","(","uv_mat_ids_len","-","8",",","uv_mat_ids_len",",","NAME_UV_PREFIX",")",")","mat_idx_proxy","=","[","0","]","*","len","(","uv_mat_ids",")","counts_sorted","=","sorted","(","uv_mat_ids",".","values","(",")",",","reverse","=","True",")","new_mat_index","=","MAX_UVS","-","1","for","c","in","counts_sorted",":","for","mat_idx",",","counts","in","uv_mat_ids",".","items","(",")",":","if","c","==","counts",":","mat_idx_proxy","[","mat_idx","]","=","new_mat_index","if","new_mat_index",">","0",":","new_mat_index","-=","1","# print('MatIdx remap: %s > %s' % (mat_idx,new_mat_index))","for","i","in","range","(","len","(","Wedges",")",")",":","Wedges","[","i","]","[","3","]","=","mat_idx_proxy","[","Wedges","[","i","]","[","3","]","]","# print('Wedges:', chunk_datacount)","# print('uv_mat_ids', uv_mat_ids)","# print('uv_mat_ids', uv_mat_ids)","# for w in Wedges:","if","bImportmesh",":","# print(\"-- Materials -- (index, name, faces)\")","blen_materials","=","[","]","for","materialname","in","Materials",":","matdata","=","bpy",".","data",".","materials",".","get","(","materialname",")","if","matdata","is","None",":","matdata","=","bpy",".","data",".","materials",".","new","(","materialname",")","# matdata = bpy.data.materials.new( materialname )","blen_materials",".","append","(","matdata",")","mesh_data",".","materials",".","append","(","matdata",")","# print(counter,materialname,TextureIndex)","# if mat_groups.get(counter) is not None:","# print(\"%i: %s\" % (counter, materialname), len(mat_groups[counter]))","#==================================================================================================","# Prepare bone data","def","init_psk_bone","(","i",",","psk_bones",",","name_raw",")",":","psk_bone","=","class_psk_bone","(",")","psk_bone",".","children","=","[","]","psk_bone",".","name","=","util_bytes_to_str","(","name_raw",")","psk_bones","[","i","]","=","psk_bone","return","psk_bone","psk_bone_name_toolong","=","False","# indexed by bone index. array of psk_bone","psk_bones","=","[","None","]","*","len","(","Bones",")","if","not","bImportbone",":","#data needed for mesh-only import","for","counter",",","(","name_raw",",",")","in","enumerate","(","Bones",")",":","init_psk_bone","(","counter",",","psk_bones",",","name_raw",")","if","bImportbone",":","#else?","# average bone length","sum_bone_pos","=","0","for","counter",",","(","name_raw",",","flags",",","NumChildren",",","ParentIndex",",","#0 1 2 3","quat_x",",","quat_y",",","quat_z",",","quat_w",",","#4 5 6 7","vec_x",",","vec_y",",","vec_z","# , #8 9 10","# joint_length, #11","# scale_x, scale_y, scale_z",")","in","enumerate","(","Bones",")",":","psk_bone","=","init_psk_bone","(","counter",",","psk_bones",",","name_raw",")","psk_bone",".","bone_index","=","counter","psk_bone",".","parent_index","=","ParentIndex","# Tested. 64 is getting cut to 63","if","len","(","psk_bone",".","name",")",">","63",":","psk_bone_name_toolong","=","True","# print('Warning. Bone name is too long:', psk_bone.name)","# make sure we have valid parent_index","if","psk_bone",".","parent_index","<","0",":","psk_bone",".","parent_index","=","0","# psk_bone.scale = (scale_x, scale_y, scale_z)","# print(\"%s: %03f %03f | %f\" % (psk_bone.name, scale_x, scale_y, joint_length),scale_x)","# print(\"%s:\" % (psk_bone.name), vec_x, quat_x)","# store bind pose to make it available for psa-import via CustomProperty of the Blender bone","psk_bone",".","orig_quat","=","Quaternion","(","(","quat_w",",","quat_x",",","quat_y",",","quat_z",")",")","if","bScaleDown",":","psk_bone",".","orig_loc","=","Vector","(","(","vec_x","*","0.01",",","vec_y","*","0.01",",","vec_z","*","0.01",")",")","else",":","psk_bone",".","orig_loc","=","Vector","(","(","vec_x",",","vec_y",",","vec_z",")",")","# root bone must have parent_index = 0 and selfindex = 0","if","psk_bone",".","parent_index","==","0","and","psk_bone",".","bone_index","==","psk_bone",".","parent_index",":","if","bDontInvertRoot",":","psk_bone",".","mat_world_rot","=","psk_bone",".","orig_quat",".","to_matrix","(",")","else",":","psk_bone",".","mat_world_rot","=","psk_bone",".","orig_quat",".","conjugated","(",")",".","to_matrix","(",")","psk_bone",".","mat_world","=","Matrix",".","Translation","(","psk_bone",".","orig_loc",")","sum_bone_pos","+=","psk_bone",".","orig_loc",".","length","#==================================================================================================","# Bones. Calc World-space matrix","# TODO optimize math.","for","psk_bone","in","psk_bones",":","if","psk_bone",".","parent_index","==","0",":","if","psk_bone",".","bone_index","==","0",":","psk_bone",".","parent","=","None","continue","parent","=","psk_bones","[","psk_bone",".","parent_index","]","psk_bone",".","parent","=","parent","parent",".","children",".","append","(","psk_bone",")","# mat_world - world space bone matrix WITHOUT own rotation","# mat_world_rot - world space bone rotation WITH own rotation","# psk_bone.mat_world = parent.mat_world_rot.to_4x4()","# psk_bone.mat_world.translation = parent.mat_world.translation + parent.mat_world_rot * psk_bone.orig_loc","# psk_bone.mat_world_rot = parent.mat_world_rot * psk_bone.orig_quat.conjugated().to_matrix()","psk_bone",".","mat_world","=","parent",".","mat_world_rot",".","to_4x4","(",")","v","=","psk_bone",".","orig_loc",".","copy","(",")","v",".","rotate","(","parent",".","mat_world_rot",")","psk_bone",".","mat_world",".","translation","=","parent",".","mat_world",".","translation","+","v","psk_bone",".","mat_world_rot","=","psk_bone",".","orig_quat",".","conjugated","(",")",".","to_matrix","(",")","psk_bone",".","mat_world_rot",".","rotate","(","parent",".","mat_world_rot",")","# psk_bone.mat_world = ( parent.mat_world_rot.to_4x4() * psk_bone.trans)","# psk_bone.mat_world.translation += parent.mat_world.translation","# psk_bone.mat_world_rot = parent.mat_world_rot * psk_bone.orig_quat.conjugated().to_matrix()","#==================================================================================================","# Skeleton. Prepare.","armature_data","=","bpy",".","data",".","armatures",".","new","(","gen_names","[","'armature_data'","]",")","armature_obj","=","bpy",".","data",".","objects",".","new","(","gen_names","[","'armature_object'","]",",","armature_data",")","# TODO: options for axes and x_ray?","armature_data",".","show_axes","=","False","armature_data",".","display_type","=","'STICK'","armature_obj",".","show_in_front","=","True","util_obj_link","(","context",",","armature_obj",")","util_select_all","(","False",")","util_obj_select","(","context",",","armature_obj",")","util_obj_set_active","(","context",",","armature_obj",")","utils_set_mode","(","'EDIT'",")","sum_bone_pos","\/=","len","(","Bones",")","# average","sum_bone_pos","*=","fBonesizeRatio","# corrected","# bone_size_choosen = max(0.01, round((min(sum_bone_pos, fBonesize))))","bone_size_choosen","=","max","(","0.01",",","round","(","(","min","(","sum_bone_pos",",","fBonesize",")",")","*","100",")","\/","100",")","# bone_size_choosen = max(0.01, min(sum_bone_pos, fBonesize))","# print(\"Bonesize %f | old: %f round: %f\" % (bone_size_choosen, max(0.01, min(sum_bone_pos, fBonesize)),max(0.01, round((min(sum_bone_pos, fBonesize))*100)\/100)))","if","not","bReorientBones",":","new_bone_size","=","bone_size_choosen","#==================================================================================================","# Skeleton. Build.","if","psk_bone_name_toolong",":","print","(","'Warning. Some bones will be renamed(names are too long). Animation import may be broken.'",")","for","psk_bone","in","psk_bones",":","# TODO too long name cutting options?","orig_long_name","=","psk_bone",".","name","# Blender will cut the name here (>63 chars)","edit_bone","=","armature_obj",".","data",".","edit_bones",".","new","(","psk_bone",".","name",")","edit_bone","[","\"orig_long_name\"","]","=","orig_long_name","# if orig_long_name != edit_bone.name:","# print('--')","# print(len(orig_long_name),orig_long_name)","# print(len(edit_bone.name),edit_bone.name)","# Use the bone name made by blender (.001 , .002 etc.)","psk_bone",".","name","=","edit_bone",".","name","else",":","for","psk_bone","in","psk_bones",":","edit_bone","=","armature_obj",".","data",".","edit_bones",".","new","(","psk_bone",".","name",")","psk_bone",".","name","=","edit_bone",".","name","for","psk_bone","in","psk_bones",":","edit_bone","=","armature_obj",".","data",".","edit_bones","[","psk_bone",".","name","]","armature_obj",".","data",".","edit_bones",".","active","=","edit_bone","if","psk_bone",".","parent","is","not","None",":","edit_bone",".","parent","=","armature_obj",".","data",".","edit_bones","[","psk_bone",".","parent",".","name","]","else",":","if","bDontInvertRoot",":","psk_bone",".","orig_quat",".","conjugate","(",")","if","bReorientBones",":","(","new_bone_size",",","quat_orient_diff",")","=","calc_bone_rotation","(","psk_bone",",","bone_size_choosen",",","bReorientDirectly",",","sum_bone_pos",")","# @","# post_quat = psk_bone.orig_quat.conjugated() * quat_orient_diff","post_quat","=","quat_orient_diff","post_quat",".","rotate","(","psk_bone",".","orig_quat",".","conjugated","(",")",")","else",":","post_quat","=","psk_bone",".","orig_quat",".","conjugated","(",")","# only length of this vector is matter?","edit_bone",".","tail","=","Vector","(","(","0.0",",","new_bone_size",",","0.0",")",")","# @","# edit_bone.matrix = psk_bone.mat_world * post_quat.to_matrix().to_4x4()","m","=","post_quat",".","copy","(",")","m",".","rotate","(","psk_bone",".","mat_world",")","m","=","m",".","to_matrix","(",")",".","to_4x4","(",")","m",".","translation","=","psk_bone",".","mat_world",".","translation","edit_bone",".","matrix","=","m","# some dev code...","#### FINAL","# post_quat = psk_bone.orig_quat.conjugated() * quat_diff","# edit_bone.matrix = psk_bone.mat_world * test_quat.to_matrix().to_4x4()","# edit_bone[\"post_quat\"] = test_quat","#### ","# edit_bone[\"post_quat\"] = Quaternion((1,0,0,0))","# edit_bone.matrix = psk_bone.mat_world* psk_bone.rot","# if edit_bone.parent:","# edit_bone.matrix = edit_bone.parent.matrix * psk_bone.trans * (psk_bone.orig_quat.conjugated().to_matrix().to_4x4())","# edit_bone.matrix = edit_bone.parent.matrix * psk_bone.trans * (test_quat.to_matrix().to_4x4())","# else:","# edit_bone.matrix = psk_bone.orig_quat.to_matrix().to_4x4()","# save bindPose information for .psa import","# dev","edit_bone","[","\"orig_quat\"","]","=","psk_bone",".","orig_quat","edit_bone","[","\"orig_loc\"","]","=","psk_bone",".","orig_loc","edit_bone","[","\"post_quat\"","]","=","post_quat","'''\n bone = edit_bone\n if psk_bone.parent is not None:\n orig_loc = bone.matrix.translation - bone.parent.matrix.translation\n orig_loc.rotate( bone.parent.matrix.to_quaternion().conjugated() )\n\n \n orig_quat = bone.matrix.to_quaternion()\n orig_quat.rotate( bone.parent.matrix.to_quaternion().conjugated() )\n orig_quat.conjugate()\n\n if orig_quat.dot( psk_bone.orig_quat ) < 0.95:\n print(bone.name, psk_bone.orig_quat, orig_quat, orig_quat.dot( psk_bone.orig_quat ))\n print('parent:', bone.parent.matrix.to_quaternion(), bone.parent.matrix.to_quaternion().rotation_difference(bone.matrix.to_quaternion()) )\n\n\n if (psk_bone.orig_loc - orig_loc).length > 0.02:\n print(bone.name, psk_bone.orig_loc, orig_loc, (psk_bone.orig_loc - orig_loc).length)\n '''","utils_set_mode","(","'OBJECT'",")","#==================================================================================================","# Weights","if","bImportmesh",":","vertices_total","=","len","(","Vertices",")","for","(","_",",","PointIndex",",","BoneIndex",")","in","Weights",":","if","PointIndex","<","vertices_total",":","# can it be not?","psk_bones","[","BoneIndex","]",".","have_weight_data","=","True","# else:","# print(psk_bones[BoneIndex].name, 'for other mesh',PointIndex ,vertices_total)","#print(\"weight:\", PointIndex, BoneIndex, Weight)","# Weights.append(None)","# print(Weights.count(None))","# Original vertex colorization code","'''\n # Weights.sort( key = lambda wgh: wgh[0])\n if bImportmesh:\n VtxCol = []\n bones_count = len(psk_bones)\n for x in range(bones_count):\n #change the overall darkness of each material in a range between 0.1 and 0.9\n tmpVal = ((float(x) + 1.0) \/ bones_count * 0.7) + 0.1\n tmpVal = int(tmpVal * 256)\n tmpCol = [tmpVal, tmpVal, tmpVal, 0]\n #Change the color of each material slightly\n if x % 3 == 0:\n if tmpCol[0] < 128:\n tmpCol[0] += 60\n else:\n tmpCol[0] -= 60\n if x % 3 == 1:\n if tmpCol[1] < 128:\n tmpCol[1] += 60\n else:\n tmpCol[1] -= 60\n if x % 3 == 2:\n if tmpCol[2] < 128:\n tmpCol[2] += 60\n else:\n tmpCol[2] -= 60\n #Add the material to the mesh\n VtxCol.append(tmpCol)\n \n for x in range(len(Tmsh.faces)):\n for y in range(len(Tmsh.faces[x].v)):\n #find v in Weights[n][0]\n findVal = Tmsh.faces[x].v[y].index\n n = 0\n while findVal != Weights[n][0]:\n n = n + 1\n TmpCol = VtxCol[Weights[n][1]]\n #check if a vertex has more than one influence\n if n != len(Weights) - 1:\n if Weights[n][0] == Weights[n + 1][0]:\n #if there is more than one influence, use the one with the greater influence\n #for simplicity only 2 influences are checked, 2nd and 3rd influences are usually very small\n if Weights[n][2] < Weights[n + 1][2]:\n TmpCol = VtxCol[Weights[n + 1][1]]\n Tmsh.faces[x].col.append(NMesh.Col(TmpCol[0], TmpCol[1], TmpCol[2], 0))\n '''","#===================================================================================================","# UV. Setup.","if","bImportmesh",":","# Trick! Create UV maps BEFORE mesh and get (0,0) coordinates for free!","# ...otherwise UV coords will be copied from active, or calculated from mesh...","if","bSpltiUVdata",":","for","i","in","range","(","len","(","uv_mat_ids",")",")",":","get_uv_layers","(","mesh_data",")",".","new","(","name","=","NAME_UV_PREFIX","+","str","(","i",")",")","else",":","get_uv_layers","(","mesh_data",")",".","new","(","name","=","NAME_UV_PREFIX","+","\"_SINGLE\"",")","for","counter",",","uv_data","in","enumerate","(","Extrauvs",")",":","if","len","(","mesh_data",".","uv_layers",")","<","MAX_UVS",":","get_uv_layers","(","mesh_data",")",".","new","(","name","=","\"EXTRAUVS\"","+","str","(","counter",")",")","else",":","Extrauvs",".","remove","(","uv_data",")","print","(","'Extra UV layer %s is ignored. Re-import without \"Split UV data\".'","%","counter",")","#================================================================================================== ","# Mesh. Build.","mesh_data",".","from_pydata","(","Vertices",",","[","]",",","Faces",")","#==================================================================================================","# Vertex Normal. Set.","if","Normals","is","not","None",":","mesh_data",".","polygons",".","foreach_set","(","\"use_smooth\"",",","[","True","]","*","len","(","mesh_data",".","polygons",")",")","mesh_data",".","normals_split_custom_set_from_vertices","(","Normals",")","mesh_data",".","use_auto_smooth","=","True","#===================================================================================================","# UV. Set.","if","bImportmesh",":","for","face","in","mesh_data",".","polygons",":","face",".","material_index","=","UV_by_face","[","face",".","index","]","[","1","]","uv_layers","=","mesh_data",".","uv_layers","if","not","bSpltiUVdata",":","uvLayer","=","uv_layers","[","0","]","# per face","# for faceIdx, (faceUVs, faceMatIdx, _, _, wmidx) in enumerate(UV_by_face):","for","faceIdx",",","(","faceUVs",",","faceMatIdx",",","WedgeMatIds",")","in","enumerate","(","UV_by_face",")",":","# per vertex","for","vertN",",","uv","in","enumerate","(","faceUVs",")",":","loopId","=","faceIdx","*","3","+","vertN","if","bSpltiUVdata",":","uvLayer","=","uv_layers","[","WedgeMatIds","[","vertN","]","]","uvLayer",".","data","[","loopId","]",".","uv","=","uv","#==================================================================================================","# VertexColors","if","VertexColors","is","not","None",":","vtx_color_layer","=","mesh_data",".","vertex_colors",".","new","(","name","=","\"PSKVTXCOL_0\"",",","do_init","=","False",")","pervertex","=","[","None","]","*","len","(","Vertices",")","for","counter",",","(","vertexid",",","_",",","_",",","_",")","in","enumerate","(","Wedges",")",":","# Is it possible ?","if","(","pervertex","[","vertexid","]","is","not","None",")","and","(","pervertex","[","vertexid","]","!=","VertexColors","[","counter","]",")",":","print","(","'Not equal vertex colors. '",",","vertexid",",","pervertex","[","vertexid","]",",","VertexColors","[","counter","]",")","pervertex","[","vertexid","]","=","VertexColors","[","counter","]","for","counter",",","loop","in","enumerate","(","mesh_data",".","loops",")",":","color","=","pervertex","[","loop",".","vertex_index","]","if","color","is","None",":","vtx_color_layer",".","data","[","counter","]",".","color","=","(","1.",",","1.",",","1.",",","1.",")","else",":","if","bToSRGB",":","vtx_color_layer",".","data","[","counter","]",".","color","=","(","color_linear_to_srgb","(","color","[","0","]","\/","255",")",",","color_linear_to_srgb","(","color","[","1","]","\/","255",")",",","color_linear_to_srgb","(","color","[","2","]","\/","255",")",",","color","[","3","]","\/","255",")","else",":","vtx_color_layer",".","data","[","counter","]",".","color","=","(","color","[","0","]","\/","255",",","color","[","1","]","\/","255",",","color","[","2","]","\/","255",",","color","[","3","]","\/","255",")","#===================================================================================================","# Extra UVs. Set.","# for counter, uv_data in enumerate(Extrauvs):","# uvLayer = mesh_data.uv_layers[ counter - len(Extrauvs) ]","# for uv_index, uv_coords in enumerate(uv_data):","# uvLayer.data[uv_index].uv = (uv_coords[0], 1.0 - uv_coords[1])","for","counter",",","uv_data","in","enumerate","(","Extrauvs",")",":","uvLayer","=","mesh_data",".","uv_layers","[","counter","-","len","(","Extrauvs",")","]","for","faceIdx",",","(","WedgeIdx3",",","WedgeIdx2",",","WedgeIdx1",")","in","enumerate","(","WedgeIdx_by_faceIdx",")",":","# equal to gltf","uvLayer",".","data","[","faceIdx","*","3","]",".","uv","=","(","uv_data","[","WedgeIdx2","]","[","0","]",",","1.0","-","uv_data","[","WedgeIdx2","]","[","1","]",")","uvLayer",".","data","[","faceIdx","*","3","+","1","]",".","uv","=","(","uv_data","[","WedgeIdx1","]","[","0","]",",","1.0","-","uv_data","[","WedgeIdx1","]","[","1","]",")","uvLayer",".","data","[","faceIdx","*","3","+","2","]",".","uv","=","(","uv_data","[","WedgeIdx3","]","[","0","]",",","1.0","-","uv_data","[","WedgeIdx3","]","[","1","]",")","# uvLayer.data[faceIdx*3 ].uv = (uv_data[WedgeIdx3][0], 1.0 - uv_data[WedgeIdx3][1])","# uvLayer.data[faceIdx*3+1].uv = (uv_data[WedgeIdx2][0], 1.0 - uv_data[WedgeIdx2][1])","# uvLayer.data[faceIdx*3+2].uv = (uv_data[WedgeIdx1][0], 1.0 - uv_data[WedgeIdx1][1])","#===================================================================================================","# Mesh. Vertex Groups. Bone Weights.","for","psk_bone","in","psk_bones",":","if","psk_bone",".","have_weight_data",":","psk_bone",".","vertex_group","=","mesh_obj",".","vertex_groups",".","new","(","name","=","psk_bone",".","name",")","# else:","# print(psk_bone.name, 'have no influence on this mesh')","for","weight",",","vertex_id",",","bone_index_w","in","filter","(","None",",","Weights",")",":","psk_bones","[","bone_index_w","]",".","vertex_group",".","add","(","(","vertex_id",",",")",",","weight",",","'ADD'",")","#===================================================================================================","# Skeleton. Colorize.","if","bImportbone",":","bone_group_unused","=","armature_obj",".","pose",".","bone_groups",".","new","(","name","=","\"Unused bones\"",")","bone_group_unused",".","color_set","=","'THEME14'","bone_group_nochild","=","armature_obj",".","pose",".","bone_groups",".","new","(","name","=","\"No children\"",")","bone_group_nochild",".","color_set","=","'THEME03'","armature_data",".","show_group_colors","=","True","for","psk_bone","in","psk_bones",":","pose_bone","=","armature_obj",".","pose",".","bones","[","psk_bone",".","name","]","if","psk_bone",".","have_weight_data",":","if","len","(","psk_bone",".","children",")","==","0",":","pose_bone",".","bone_group","=","bone_group_nochild","else",":","pose_bone",".","bone_group","=","bone_group_unused","#===================================================================================================","# Final","if","bImportmesh",":","util_obj_link","(","context",",","mesh_obj",")","util_select_all","(","False",")","if","not","bImportbone",":","util_obj_select","(","context",",","mesh_obj",")","util_obj_set_active","(","context",",","mesh_obj",")","else",":","# select_all(False)","util_obj_select","(","context",",","armature_obj",")","# parenting mesh to armature object","mesh_obj",".","parent","=","armature_obj","mesh_obj",".","parent_type","=","'OBJECT'","# add armature modifier","blender_modifier","=","mesh_obj",".","modifiers",".","new","(","armature_obj",".","data",".","name",",","type","=","'ARMATURE'",")","blender_modifier",".","show_expanded","=","False","blender_modifier",".","use_vertex_groups","=","True","blender_modifier",".","use_bone_envelopes","=","False","blender_modifier",".","object","=","armature_obj","# utils_set_mode('OBJECT')","# select_all(False)","util_obj_select","(","context",",","armature_obj",")","util_obj_set_active","(","context",",","armature_obj",")","# print(\"Done: %f sec.\" % (time.process_time() - ref_time))","utils_set_mode","(","'OBJECT'",")","return","True"],"url":"https:\/\/github.com\/Befzz\/blender3d_import_psk_psa\/blob\/47f1418aef7642f300e0fccbe3c96654ab275a52\/addons\/io_import_scene_unreal_psa_psk_280.py#L305-L1220"} |
|
{"nwo":"Befzz\/blender3d_import_psk_psa","sha":"47f1418aef7642f300e0fccbe3c96654ab275a52","path":"addons\/io_import_scene_unreal_psa_psk_280.py","language":"python","identifier":"psaimport","parameters":"(filepath,\n context = None,\n oArmature = None,\n bFilenameAsPrefix = False,\n bActionsToTrack = False,\n first_frames = 0,\n bDontInvertRoot = True,\n bUpdateTimelineRange = False,\n bRotationOnly = False,\n bScaleDown = True,\n fcurve_interpolation = 'LINEAR',\n # error_callback = __pass\n error_callback = print\n )","argument_list":"","return_statement":"","docstring":"Import animation data from 'filepath' using 'oArmature'\n \n Args:\n first_frames: (0 - import all)\n Import only 'first_frames' from each action\n \n bActionsToTrack:\n Put all imported actions in one NLAtrack.\n \n oArmature:\n Skeleton used to calculate keyframes","docstring_summary":"Import animation data from 'filepath' using 'oArmature'\n \n Args:\n first_frames: (0 - import all)\n Import only 'first_frames' from each action\n \n bActionsToTrack:\n Put all imported actions in one NLAtrack.\n \n oArmature:\n Skeleton used to calculate keyframes","docstring_tokens":["Import","animation","data","from","filepath","using","oArmature","Args",":","first_frames",":","(","0","-","import","all",")","Import","only","first_frames","from","each","action","bActionsToTrack",":","Put","all","imported","actions","in","one","NLAtrack",".","oArmature",":","Skeleton","used","to","calculate","keyframes"],"function":"def psaimport(filepath,\n context = None,\n oArmature = None,\n bFilenameAsPrefix = False,\n bActionsToTrack = False,\n first_frames = 0,\n bDontInvertRoot = True,\n bUpdateTimelineRange = False,\n bRotationOnly = False,\n bScaleDown = True,\n fcurve_interpolation = 'LINEAR',\n # error_callback = __pass\n error_callback = print\n ):\n \"\"\"Import animation data from 'filepath' using 'oArmature'\n \n Args:\n first_frames: (0 - import all)\n Import only 'first_frames' from each action\n \n bActionsToTrack:\n Put all imported actions in one NLAtrack.\n \n oArmature:\n Skeleton used to calculate keyframes\n \"\"\"\n print (\"-----------------------------------------------\")\n print (\"---------EXECUTING PSA PYTHON IMPORTER---------\")\n print (\"-----------------------------------------------\")\n \n file_ext = 'psa'\n try:\n psafile = open(filepath, 'rb')\n except IOError:\n error_callback('Error while opening file for reading:\\n \"'+filepath+'\"')\n return False\n \n print (\"Importing file: \", filepath)\n \n \n if not context:\n context = bpy.context\n \n armature_obj = oArmature\n \n if armature_obj is None: \n armature_obj = blen_get_armature_from_selection()\n if armature_obj is None:\n error_callback(\"No armature selected.\")\n return False\n\n\n chunk_id = None\n chunk_type = None\n chunk_datasize = None\n chunk_datacount = None\n chunk_data = None\n\n def read_chunk():\n nonlocal chunk_id, chunk_type,\\\n chunk_datasize, chunk_datacount,\\\n chunk_data\n\n (chunk_id, chunk_type,\n chunk_datasize, chunk_datacount) = unpack('20s3i', psafile.read(32))\n \n chunk_data = psafile.read(chunk_datacount * chunk_datasize)\n #============================================================================================== \n # General Header\n #============================================================================================== \n read_chunk()\n \n if not util_is_header_valid(filepath, file_ext, chunk_id, error_callback):\n return False\n \n #============================================================================================== \n # Bones (FNamedBoneBinary)\n #============================================================================================== \n read_chunk()\n \n psa_bones = {}\n \n def new_psa_bone(bone, pose_bone):\n psa_bone = class_psa_bone()\n \n psa_bones[pose_bone.name] = psa_bone\n \n psa_bone.name = pose_bone.name\n \n psa_bone.pose_bone = pose_bone\n \n if bone.parent != None:\n # does needed parent bone was added from psa file\n if bone.parent.name in psa_bones:\n psa_bone.parent = psa_bones[bone.parent.name]\n # no. armature doesnt match\n else:\n psa_bone.parent = None\n # else:\n # psa_bone.parent = None\n\n # brute fix for non psk skeletons\n if bone.get('orig_quat') is None:\n\n if bone.parent != None:\n \n psa_bone.orig_loc = bone.matrix_local.translation - bone.parent.matrix_local.translation\n psa_bone.orig_loc.rotate( bone.parent.matrix_local.to_quaternion().conjugated() )\n\n psa_bone.orig_quat = bone.matrix_local.to_quaternion()\n psa_bone.orig_quat.rotate( bone.parent.matrix_local.to_quaternion().conjugated() )\n psa_bone.orig_quat.conjugate()\n else:\n psa_bone.orig_loc = bone.matrix_local.translation.copy()\n psa_bone.orig_quat = bone.matrix_local.to_quaternion()\n\n psa_bone.post_quat = psa_bone.orig_quat.conjugated()\n else:\n psa_bone.orig_quat = Quaternion(bone['orig_quat'])\n psa_bone.orig_loc = Vector(bone['orig_loc'])\n psa_bone.post_quat = Quaternion(bone['post_quat'])\n\n return psa_bone\n \n #Bones Data\n BoneIndex2Name = [None] * chunk_datacount\n BoneNotFoundList = []\n BonesWithoutAnimation = []\n PsaBonesToProcess = [None] * chunk_datacount\n BonePsaImportedNames = []\n\n # printlog(\"Name\\tFlgs\\tNumChld\\tPrntIdx\\tQx\\tQy\\tQz\\tQw\\tLocX\\tLocY\\tLocZ\\tLength\\tXSize\\tYSize\\tZSize\\n\")\n\n \n # for case insensetive comparison\n # key = lowered name\n # value = orignal name\n skeleton_bones_lowered = {}\n \n for blender_bone_name in armature_obj.data.bones.keys():\n skeleton_bones_lowered[blender_bone_name.lower()] = blender_bone_name\n\n \n for counter in range(chunk_datacount):\n \n # tPrntIdx is -1 for parent; and 0 for other; no more useful data\n # indata = unpack_from('64s3i11f', chunk_data, chunk_datasize * counter)\n (indata) = unpack_from('64s56x', chunk_data, chunk_datasize * counter)\n in_name = util_bytes_to_str(indata[0])\n # bonename = util_bytes_to_str(indata[0]).upper()\n \n in_name_lowered = in_name.lower()\n if in_name_lowered in skeleton_bones_lowered:\n orig_name = skeleton_bones_lowered[in_name_lowered]\n \n count_duplicates = BonePsaImportedNames.count( in_name_lowered )\n\n if count_duplicates > 0:\n\n duplicate_name_numbered = in_name_lowered + ('.%03d' % count_duplicates)\n\n # print('Dup:', in_name_lowered, '~',duplicate_name_numbered)\n\n # Skeleton have duplicate name too?\n if duplicate_name_numbered in skeleton_bones_lowered:\n orig_name = orig_name + ('.%03d' % count_duplicates)\n else:\n # Skip animation import for that bone\n print(\" PSK do not have numbered duplicate name(but PSA have!):\", duplicate_name_numbered)\n BonePsaImportedNames.append(in_name_lowered)\n continue\n \n \n # use a skeleton bone name \n BoneIndex2Name[counter] = orig_name\n PsaBonesToProcess[counter] = new_psa_bone(armature_obj.data.bones[orig_name], \n armature_obj.pose.bones[orig_name])\n BonePsaImportedNames.append(in_name_lowered)\n else:\n # print(\"Can't find the bone:\", orig_name, in_name_lowered)\n BoneNotFoundList.append(counter)\n \n \n if len(psa_bones) == 0:\n error_callback('No bone was match!\\nSkip import!')\n return False\n \n # does anyone care?\n for blender_bone_name in armature_obj.data.bones.keys():\n if BoneIndex2Name.count(blender_bone_name) == 0:\n BonesWithoutAnimation.append(blender_bone_name)\n \n if len(BoneNotFoundList) > 0:\n print('PSA have data for more bones: %i.' % len(BoneNotFoundList))\n \n if len(BonesWithoutAnimation) > 0:\n print('PSA do not have data for %i bones:\\n' % len(BonesWithoutAnimation), ', '.join(BonesWithoutAnimation))\n #============================================================================================== \n # Animations (AniminfoBinary)\n #============================================================================================== \n read_chunk()\n\n Raw_Key_Nums = 0\n Action_List = [None] * chunk_datacount\n \n for counter in range(chunk_datacount):\n (action_name_raw, #0\n group_name_raw, #1\n Totalbones, #2\n RootInclude, #3\n KeyCompressionStyle, #4\n KeyQuotum, #5\n KeyReduction, #6\n TrackTime, #7\n AnimRate, #8\n StartBone, #9\n FirstRawFrame, #10\n NumRawFrames #11\n ) = unpack_from('64s64s4i3f3i', chunk_data, chunk_datasize * counter)\n \n action_name = util_bytes_to_str( action_name_raw )\n group_name = util_bytes_to_str( group_name_raw )\n\n Raw_Key_Nums += Totalbones * NumRawFrames\n Action_List[counter] = ( action_name, group_name, Totalbones, NumRawFrames)\n \n #============================================================================================== \n # Raw keys (VQuatAnimKey) 3f vec, 4f quat, 1f time\n #============================================================================================== \n read_chunk()\n \n if(Raw_Key_Nums != chunk_datacount):\n error_callback(\n 'Raw_Key_Nums Inconsistent.'\n '\\nData count found: '+chunk_datacount+\n '\\nRaw_Key_Nums:' + Raw_Key_Nums\n )\n return False\n\n Raw_Key_List = [None] * chunk_datacount\n \n unpack_data = Struct('3f4f4x').unpack_from\n \n for counter in range(chunk_datacount):\n pos = Vector()\n quat = Quaternion()\n \n ( pos.x, pos.y, pos.z,\n quat.x, quat.y, quat.z, quat.w\n ) = unpack_data( chunk_data, chunk_datasize * counter)\n \n if bScaleDown:\n Raw_Key_List[counter] = (pos * 0.01, quat)\n else:\n Raw_Key_List[counter] = (pos, quat)\n \n psafile.close()\n \n utils_set_mode('OBJECT')\n\n # index of current frame in raw input data\n raw_key_index = 0\n \n util_obj_set_active(context, armature_obj)\n \n gen_name_part = util_gen_name_part(filepath)\n \n armature_obj.animation_data_create()\n \n if bActionsToTrack:\n nla_track = armature_obj.animation_data.nla_tracks.new()\n nla_track.name = gen_name_part\n nla_stripes = nla_track.strips\n\n nla_track_last_frame = 0\n\n if len(armature_obj.animation_data.nla_tracks) > 0:\n for track in armature_obj.animation_data.nla_tracks:\n if len(track.strips) > 0:\n if track.strips[-1].frame_end > nla_track_last_frame:\n nla_track_last_frame = track.strips[-1].frame_end\n\n \n is_first_action = True\n first_action = None\n \n for counter, (Name, Group, Totalbones, NumRawFrames) in enumerate(Action_List):\n ref_time = time.process_time()\n \n if Group != 'None':\n Name = \"(%s) %s\" % (Group,Name)\n if bFilenameAsPrefix:\n Name = \"(%s) %s\" % (gen_name_part, Name)\n \n action = bpy.data.actions.new(name = Name)\n \n # force print usefull information to console(due to possible long execution)\n print(\"Action {0:>3d}\/{1:<3d} frames: {2:>4d} {3}\".format(\n counter+1, len(Action_List), NumRawFrames, Name)\n )\n \n if first_frames > 0:\n maxframes = first_frames\n keyframes = min(first_frames, NumRawFrames)\n #dev\n # keyframes += 1\n else:\n maxframes = 99999999\n keyframes = NumRawFrames\n \n # create all fcurves(for all bones) for an action\n # for pose_bone in armature_obj.pose.bones:\n for psa_bone in PsaBonesToProcess:\n if psa_bone is None:\n continue\n pose_bone = psa_bone.pose_bone\n \n data_path = pose_bone.path_from_id(\"rotation_quaternion\")\n psa_bone.fcurve_quat_w = action.fcurves.new(data_path, index = 0)\n psa_bone.fcurve_quat_x = action.fcurves.new(data_path, index = 1)\n psa_bone.fcurve_quat_y = action.fcurves.new(data_path, index = 2)\n psa_bone.fcurve_quat_z = action.fcurves.new(data_path, index = 3)\n \n if not bRotationOnly:\n data_path = pose_bone.path_from_id(\"location\")\n psa_bone.fcurve_loc_x = action.fcurves.new(data_path, index = 0)\n psa_bone.fcurve_loc_y = action.fcurves.new(data_path, index = 1)\n psa_bone.fcurve_loc_z = action.fcurves.new(data_path, index = 2)\n \n # 1. Pre-add keyframes! \\0\/\n # 2. Set data: keyframe_points[].co[0..1]\n # 3. If 2 is not done, do 4: (important!!!)\n # 4. \"Apply\" data: fcurve.update()\n # # added keyframes points by default is breaking fcurve somehow\n # # bcs they are all at the same position?\n psa_bone.fcurve_quat_w.keyframe_points.add(keyframes)\n psa_bone.fcurve_quat_x.keyframe_points.add(keyframes)\n psa_bone.fcurve_quat_y.keyframe_points.add(keyframes)\n psa_bone.fcurve_quat_z.keyframe_points.add(keyframes)\n\n if not bRotationOnly:\n psa_bone.fcurve_loc_x.keyframe_points.add(keyframes) \n psa_bone.fcurve_loc_y.keyframe_points.add(keyframes) \n psa_bone.fcurve_loc_z.keyframe_points.add(keyframes) \n \n for i in range(0,min(maxframes, NumRawFrames)):\n # raw_key_index+= Totalbones * 5 #55\n for j in range(Totalbones):\n if j in BoneNotFoundList:\n raw_key_index += 1\n continue\n \n psa_bone = PsaBonesToProcess[j]\n # pose_bone = psa_bone.pose_bone\n \n p_pos = Raw_Key_List[raw_key_index][0]\n p_quat = Raw_Key_List[raw_key_index][1]\n \n # @\n # if psa_bone.parent:\n # quat = (p_quat * psa_bone.post_quat).conjugated() * (psa_bone.orig_quat * psa_bone.post_quat)\n # else:\n # if bDontInvertRoot:\n # quat = (p_quat.conjugated() * psa_bone.post_quat).conjugated() * (psa_bone.orig_quat * psa_bone.post_quat)\n # else:\n # quat = (p_quat * psa_bone.post_quat).conjugated() * (psa_bone.orig_quat * psa_bone.post_quat)\n\n q = psa_bone.post_quat.copy()\n q.rotate( psa_bone.orig_quat )\n\n quat = q\n\n q = psa_bone.post_quat.copy()\n\n if psa_bone.parent == None and bDontInvertRoot:\n q.rotate( p_quat.conjugated() )\n else:\n q.rotate( p_quat )\n\n quat.rotate( q.conjugated() )\n \n # @\n # loc = psa_bone.post_quat.conjugated() * p_pos - psa_bone.post_quat.conjugated() * psa_bone.orig_loc\n \n if not bRotationOnly:\n loc = (p_pos - psa_bone.orig_loc)\n # \"edit bone\" location is in \"parent space\"\n # but \"pose bone\" location is in \"local space(bone)\"\n # so we need to transform from parent(edit_bone) to local space (pose_bone)\n loc.rotate( psa_bone.post_quat.conjugated() )\n \n # if not bRotationOnly:\n # loc = (p_pos - psa_bone.orig_loc)\n # if psa_bone.parent is not None:\n # q = psa_bone.parent.post_quat.copy()\n # q.rotate( psa_bone.parent.orig_quat )\n # print(q)\n # loc.rotate( psa_bone.parent.post_quat.conjugated() )\n # loc.rotate( q.conjugated() )\n # loc.rotate( q )\n # pass\n \n # quat = p_quat.conjugated()\n # quat = p_quat\n # quat.rotate( psa_bone.orig_quat.conjugated() )\n # quat = Quaternion()\n # loc = -p_pos\n # loc = (p_pos - psa_bone.orig_loc)\n # loc = Vector()\n # loc.rotate( psa_bone.post_quat.conjugated() )\n\n # Set it?\n # pose_bone.rotation_quaternion = quat\n # pose_bone.location = loc\n\n # pose_bone.rotation_quaternion = orig_rot.conjugated()\n # pose_bone.location = p_pos - (pose_bone.bone.matrix_local.translation - pose_bone.bone.parent.matrix_local.translation)\n \n ##### Works + post_quat (without location works)\n # quat = (p_quat * psa_bone.post_quat).conjugated() * (psa_bone.orig_quat * psa_bone.post_quat)\n # loc = psa_bone.post_quat.conjugated() * (p_pos - psa_bone.orig_loc)\n\n \n psa_bone.fcurve_quat_w.keyframe_points[i].co = i, quat.w\n psa_bone.fcurve_quat_x.keyframe_points[i].co = i, quat.x\n psa_bone.fcurve_quat_y.keyframe_points[i].co = i, quat.y\n psa_bone.fcurve_quat_z.keyframe_points[i].co = i, quat.z\n \n psa_bone.fcurve_quat_w.keyframe_points[i].interpolation = fcurve_interpolation\n psa_bone.fcurve_quat_x.keyframe_points[i].interpolation = fcurve_interpolation\n psa_bone.fcurve_quat_y.keyframe_points[i].interpolation = fcurve_interpolation\n psa_bone.fcurve_quat_z.keyframe_points[i].interpolation = fcurve_interpolation\n \n \n if not bRotationOnly:\n psa_bone.fcurve_loc_x.keyframe_points[i].co = i, loc.x\n psa_bone.fcurve_loc_y.keyframe_points[i].co = i, loc.y\n psa_bone.fcurve_loc_z.keyframe_points[i].co = i, loc.z\n \n psa_bone.fcurve_loc_x.keyframe_points[i].interpolation = fcurve_interpolation\n psa_bone.fcurve_loc_y.keyframe_points[i].interpolation = fcurve_interpolation\n psa_bone.fcurve_loc_z.keyframe_points[i].interpolation = fcurve_interpolation\n \n # Old path. Slower.\n # psa_bone.fcurve_quat_w.keyframe_points.insert(i,quat.w,{'NEEDED','FAST'}).interpolation = fcurve_interpolation\n # psa_bone.fcurve_quat_x.keyframe_points.insert(i,quat.x,{'NEEDED','FAST'}).interpolation = fcurve_interpolation\n # psa_bone.fcurve_quat_y.keyframe_points.insert(i,quat.y,{'NEEDED','FAST'}).interpolation = fcurve_interpolation\n # psa_bone.fcurve_quat_z.keyframe_points.insert(i,quat.z,{'NEEDED','FAST'}).interpolation = fcurve_interpolation\n\n # psa_bone.fcurve_loc_x.keyframe_points.insert(i,loc.x,{'NEEDED','FAST'}).interpolation = fcurve_interpolation\n # psa_bone.fcurve_loc_y.keyframe_points.insert(i,loc.y,{'NEEDED','FAST'}).interpolation = fcurve_interpolation\n # psa_bone.fcurve_loc_z.keyframe_points.insert(i,loc.z,{'NEEDED','FAST'}).interpolation = fcurve_interpolation\n raw_key_index += 1\n \n # on first frame\n # break\n raw_key_index += (NumRawFrames-min(maxframes,NumRawFrames)) * Totalbones\n\n # Add action to tail of the nla track\n if bActionsToTrack:\n \n if len(nla_track.strips) == 0:\n strip = nla_stripes.new(Name, nla_track_last_frame, action)\n else:\n strip = nla_stripes.new(Name, nla_stripes[-1].frame_end, action)\n\n # Do not pollute track. Makes other tracks 'visible' through 'empty space'.\n strip.extrapolation = 'NOTHING'\n\n nla_track_last_frame += NumRawFrames\n\n if is_first_action:\n first_action = action\n is_first_action = False\n \n print(\"Done: %f sec.\" % (time.process_time() - ref_time))\n # break on first animation set\n # break\n \n scene = util_get_scene(context)\n\n if not bActionsToTrack:\n if not scene.is_nla_tweakmode:\n armature_obj.animation_data.action = first_action\n \n if bUpdateTimelineRange:\n\n scene.frame_start = 0\n\n if bActionsToTrack:\n scene.frame_end = sum(frames for _, _, _, frames in Action_List) - 1\n else:\n scene.frame_end = max(frames for _, _, _, frames in Action_List) - 1\n\n\n util_select_all(False)\n util_obj_select(context, armature_obj)\n util_obj_set_active(context, armature_obj)","function_tokens":["def","psaimport","(","filepath",",","context","=","None",",","oArmature","=","None",",","bFilenameAsPrefix","=","False",",","bActionsToTrack","=","False",",","first_frames","=","0",",","bDontInvertRoot","=","True",",","bUpdateTimelineRange","=","False",",","bRotationOnly","=","False",",","bScaleDown","=","True",",","fcurve_interpolation","=","'LINEAR'",",","# error_callback = __pass","error_callback","=","print",")",":","print","(","\"-----------------------------------------------\"",")","print","(","\"---------EXECUTING PSA PYTHON IMPORTER---------\"",")","print","(","\"-----------------------------------------------\"",")","file_ext","=","'psa'","try",":","psafile","=","open","(","filepath",",","'rb'",")","except","IOError",":","error_callback","(","'Error while opening file for reading:\\n \"'","+","filepath","+","'\"'",")","return","False","print","(","\"Importing file: \"",",","filepath",")","if","not","context",":","context","=","bpy",".","context","armature_obj","=","oArmature","if","armature_obj","is","None",":","armature_obj","=","blen_get_armature_from_selection","(",")","if","armature_obj","is","None",":","error_callback","(","\"No armature selected.\"",")","return","False","chunk_id","=","None","chunk_type","=","None","chunk_datasize","=","None","chunk_datacount","=","None","chunk_data","=","None","def","read_chunk","(",")",":","nonlocal","chunk_id",",","chunk_type",",","chunk_datasize",",","chunk_datacount",",","chunk_data","(","chunk_id",",","chunk_type",",","chunk_datasize",",","chunk_datacount",")","=","unpack","(","'20s3i'",",","psafile",".","read","(","32",")",")","chunk_data","=","psafile",".","read","(","chunk_datacount","*","chunk_datasize",")","#============================================================================================== ","# General Header","#============================================================================================== ","read_chunk","(",")","if","not","util_is_header_valid","(","filepath",",","file_ext",",","chunk_id",",","error_callback",")",":","return","False","#============================================================================================== ","# Bones (FNamedBoneBinary)","#============================================================================================== ","read_chunk","(",")","psa_bones","=","{","}","def","new_psa_bone","(","bone",",","pose_bone",")",":","psa_bone","=","class_psa_bone","(",")","psa_bones","[","pose_bone",".","name","]","=","psa_bone","psa_bone",".","name","=","pose_bone",".","name","psa_bone",".","pose_bone","=","pose_bone","if","bone",".","parent","!=","None",":","# does needed parent bone was added from psa file","if","bone",".","parent",".","name","in","psa_bones",":","psa_bone",".","parent","=","psa_bones","[","bone",".","parent",".","name","]","# no. armature doesnt match","else",":","psa_bone",".","parent","=","None","# else:","# psa_bone.parent = None","# brute fix for non psk skeletons","if","bone",".","get","(","'orig_quat'",")","is","None",":","if","bone",".","parent","!=","None",":","psa_bone",".","orig_loc","=","bone",".","matrix_local",".","translation","-","bone",".","parent",".","matrix_local",".","translation","psa_bone",".","orig_loc",".","rotate","(","bone",".","parent",".","matrix_local",".","to_quaternion","(",")",".","conjugated","(",")",")","psa_bone",".","orig_quat","=","bone",".","matrix_local",".","to_quaternion","(",")","psa_bone",".","orig_quat",".","rotate","(","bone",".","parent",".","matrix_local",".","to_quaternion","(",")",".","conjugated","(",")",")","psa_bone",".","orig_quat",".","conjugate","(",")","else",":","psa_bone",".","orig_loc","=","bone",".","matrix_local",".","translation",".","copy","(",")","psa_bone",".","orig_quat","=","bone",".","matrix_local",".","to_quaternion","(",")","psa_bone",".","post_quat","=","psa_bone",".","orig_quat",".","conjugated","(",")","else",":","psa_bone",".","orig_quat","=","Quaternion","(","bone","[","'orig_quat'","]",")","psa_bone",".","orig_loc","=","Vector","(","bone","[","'orig_loc'","]",")","psa_bone",".","post_quat","=","Quaternion","(","bone","[","'post_quat'","]",")","return","psa_bone","#Bones Data","BoneIndex2Name","=","[","None","]","*","chunk_datacount","BoneNotFoundList","=","[","]","BonesWithoutAnimation","=","[","]","PsaBonesToProcess","=","[","None","]","*","chunk_datacount","BonePsaImportedNames","=","[","]","# printlog(\"Name\\tFlgs\\tNumChld\\tPrntIdx\\tQx\\tQy\\tQz\\tQw\\tLocX\\tLocY\\tLocZ\\tLength\\tXSize\\tYSize\\tZSize\\n\")","# for case insensetive comparison","# key = lowered name","# value = orignal name","skeleton_bones_lowered","=","{","}","for","blender_bone_name","in","armature_obj",".","data",".","bones",".","keys","(",")",":","skeleton_bones_lowered","[","blender_bone_name",".","lower","(",")","]","=","blender_bone_name","for","counter","in","range","(","chunk_datacount",")",":","# tPrntIdx is -1 for parent; and 0 for other; no more useful data","# indata = unpack_from('64s3i11f', chunk_data, chunk_datasize * counter)","(","indata",")","=","unpack_from","(","'64s56x'",",","chunk_data",",","chunk_datasize","*","counter",")","in_name","=","util_bytes_to_str","(","indata","[","0","]",")","# bonename = util_bytes_to_str(indata[0]).upper()","in_name_lowered","=","in_name",".","lower","(",")","if","in_name_lowered","in","skeleton_bones_lowered",":","orig_name","=","skeleton_bones_lowered","[","in_name_lowered","]","count_duplicates","=","BonePsaImportedNames",".","count","(","in_name_lowered",")","if","count_duplicates",">","0",":","duplicate_name_numbered","=","in_name_lowered","+","(","'.%03d'","%","count_duplicates",")","# print('Dup:', in_name_lowered, '~',duplicate_name_numbered)","# Skeleton have duplicate name too?","if","duplicate_name_numbered","in","skeleton_bones_lowered",":","orig_name","=","orig_name","+","(","'.%03d'","%","count_duplicates",")","else",":","# Skip animation import for that bone","print","(","\" PSK do not have numbered duplicate name(but PSA have!):\"",",","duplicate_name_numbered",")","BonePsaImportedNames",".","append","(","in_name_lowered",")","continue","# use a skeleton bone name ","BoneIndex2Name","[","counter","]","=","orig_name","PsaBonesToProcess","[","counter","]","=","new_psa_bone","(","armature_obj",".","data",".","bones","[","orig_name","]",",","armature_obj",".","pose",".","bones","[","orig_name","]",")","BonePsaImportedNames",".","append","(","in_name_lowered",")","else",":","# print(\"Can't find the bone:\", orig_name, in_name_lowered)","BoneNotFoundList",".","append","(","counter",")","if","len","(","psa_bones",")","==","0",":","error_callback","(","'No bone was match!\\nSkip import!'",")","return","False","# does anyone care?","for","blender_bone_name","in","armature_obj",".","data",".","bones",".","keys","(",")",":","if","BoneIndex2Name",".","count","(","blender_bone_name",")","==","0",":","BonesWithoutAnimation",".","append","(","blender_bone_name",")","if","len","(","BoneNotFoundList",")",">","0",":","print","(","'PSA have data for more bones: %i.'","%","len","(","BoneNotFoundList",")",")","if","len","(","BonesWithoutAnimation",")",">","0",":","print","(","'PSA do not have data for %i bones:\\n'","%","len","(","BonesWithoutAnimation",")",",","', '",".","join","(","BonesWithoutAnimation",")",")","#============================================================================================== ","# Animations (AniminfoBinary)","#============================================================================================== ","read_chunk","(",")","Raw_Key_Nums","=","0","Action_List","=","[","None","]","*","chunk_datacount","for","counter","in","range","(","chunk_datacount",")",":","(","action_name_raw",",","#0","group_name_raw",",","#1","Totalbones",",","#2","RootInclude",",","#3","KeyCompressionStyle",",","#4","KeyQuotum",",","#5","KeyReduction",",","#6","TrackTime",",","#7","AnimRate",",","#8","StartBone",",","#9","FirstRawFrame",",","#10","NumRawFrames","#11",")","=","unpack_from","(","'64s64s4i3f3i'",",","chunk_data",",","chunk_datasize","*","counter",")","action_name","=","util_bytes_to_str","(","action_name_raw",")","group_name","=","util_bytes_to_str","(","group_name_raw",")","Raw_Key_Nums","+=","Totalbones","*","NumRawFrames","Action_List","[","counter","]","=","(","action_name",",","group_name",",","Totalbones",",","NumRawFrames",")","#============================================================================================== ","# Raw keys (VQuatAnimKey) 3f vec, 4f quat, 1f time","#============================================================================================== ","read_chunk","(",")","if","(","Raw_Key_Nums","!=","chunk_datacount",")",":","error_callback","(","'Raw_Key_Nums Inconsistent.'","'\\nData count found: '","+","chunk_datacount","+","'\\nRaw_Key_Nums:'","+","Raw_Key_Nums",")","return","False","Raw_Key_List","=","[","None","]","*","chunk_datacount","unpack_data","=","Struct","(","'3f4f4x'",")",".","unpack_from","for","counter","in","range","(","chunk_datacount",")",":","pos","=","Vector","(",")","quat","=","Quaternion","(",")","(","pos",".","x",",","pos",".","y",",","pos",".","z",",","quat",".","x",",","quat",".","y",",","quat",".","z",",","quat",".","w",")","=","unpack_data","(","chunk_data",",","chunk_datasize","*","counter",")","if","bScaleDown",":","Raw_Key_List","[","counter","]","=","(","pos","*","0.01",",","quat",")","else",":","Raw_Key_List","[","counter","]","=","(","pos",",","quat",")","psafile",".","close","(",")","utils_set_mode","(","'OBJECT'",")","# index of current frame in raw input data","raw_key_index","=","0","util_obj_set_active","(","context",",","armature_obj",")","gen_name_part","=","util_gen_name_part","(","filepath",")","armature_obj",".","animation_data_create","(",")","if","bActionsToTrack",":","nla_track","=","armature_obj",".","animation_data",".","nla_tracks",".","new","(",")","nla_track",".","name","=","gen_name_part","nla_stripes","=","nla_track",".","strips","nla_track_last_frame","=","0","if","len","(","armature_obj",".","animation_data",".","nla_tracks",")",">","0",":","for","track","in","armature_obj",".","animation_data",".","nla_tracks",":","if","len","(","track",".","strips",")",">","0",":","if","track",".","strips","[","-","1","]",".","frame_end",">","nla_track_last_frame",":","nla_track_last_frame","=","track",".","strips","[","-","1","]",".","frame_end","is_first_action","=","True","first_action","=","None","for","counter",",","(","Name",",","Group",",","Totalbones",",","NumRawFrames",")","in","enumerate","(","Action_List",")",":","ref_time","=","time",".","process_time","(",")","if","Group","!=","'None'",":","Name","=","\"(%s) %s\"","%","(","Group",",","Name",")","if","bFilenameAsPrefix",":","Name","=","\"(%s) %s\"","%","(","gen_name_part",",","Name",")","action","=","bpy",".","data",".","actions",".","new","(","name","=","Name",")","# force print usefull information to console(due to possible long execution)","print","(","\"Action {0:>3d}\/{1:<3d} frames: {2:>4d} {3}\"",".","format","(","counter","+","1",",","len","(","Action_List",")",",","NumRawFrames",",","Name",")",")","if","first_frames",">","0",":","maxframes","=","first_frames","keyframes","=","min","(","first_frames",",","NumRawFrames",")","#dev","# keyframes += 1","else",":","maxframes","=","99999999","keyframes","=","NumRawFrames","# create all fcurves(for all bones) for an action","# for pose_bone in armature_obj.pose.bones:","for","psa_bone","in","PsaBonesToProcess",":","if","psa_bone","is","None",":","continue","pose_bone","=","psa_bone",".","pose_bone","data_path","=","pose_bone",".","path_from_id","(","\"rotation_quaternion\"",")","psa_bone",".","fcurve_quat_w","=","action",".","fcurves",".","new","(","data_path",",","index","=","0",")","psa_bone",".","fcurve_quat_x","=","action",".","fcurves",".","new","(","data_path",",","index","=","1",")","psa_bone",".","fcurve_quat_y","=","action",".","fcurves",".","new","(","data_path",",","index","=","2",")","psa_bone",".","fcurve_quat_z","=","action",".","fcurves",".","new","(","data_path",",","index","=","3",")","if","not","bRotationOnly",":","data_path","=","pose_bone",".","path_from_id","(","\"location\"",")","psa_bone",".","fcurve_loc_x","=","action",".","fcurves",".","new","(","data_path",",","index","=","0",")","psa_bone",".","fcurve_loc_y","=","action",".","fcurves",".","new","(","data_path",",","index","=","1",")","psa_bone",".","fcurve_loc_z","=","action",".","fcurves",".","new","(","data_path",",","index","=","2",")","# 1. Pre-add keyframes! \\0\/","# 2. Set data: keyframe_points[].co[0..1]","# 3. If 2 is not done, do 4: (important!!!)","# 4. \"Apply\" data: fcurve.update()","# # added keyframes points by default is breaking fcurve somehow","# # bcs they are all at the same position?","psa_bone",".","fcurve_quat_w",".","keyframe_points",".","add","(","keyframes",")","psa_bone",".","fcurve_quat_x",".","keyframe_points",".","add","(","keyframes",")","psa_bone",".","fcurve_quat_y",".","keyframe_points",".","add","(","keyframes",")","psa_bone",".","fcurve_quat_z",".","keyframe_points",".","add","(","keyframes",")","if","not","bRotationOnly",":","psa_bone",".","fcurve_loc_x",".","keyframe_points",".","add","(","keyframes",")","psa_bone",".","fcurve_loc_y",".","keyframe_points",".","add","(","keyframes",")","psa_bone",".","fcurve_loc_z",".","keyframe_points",".","add","(","keyframes",")","for","i","in","range","(","0",",","min","(","maxframes",",","NumRawFrames",")",")",":","# raw_key_index+= Totalbones * 5 #55","for","j","in","range","(","Totalbones",")",":","if","j","in","BoneNotFoundList",":","raw_key_index","+=","1","continue","psa_bone","=","PsaBonesToProcess","[","j","]","# pose_bone = psa_bone.pose_bone","p_pos","=","Raw_Key_List","[","raw_key_index","]","[","0","]","p_quat","=","Raw_Key_List","[","raw_key_index","]","[","1","]","# @","# if psa_bone.parent:","# quat = (p_quat * psa_bone.post_quat).conjugated() * (psa_bone.orig_quat * psa_bone.post_quat)","# else:","# if bDontInvertRoot:","# quat = (p_quat.conjugated() * psa_bone.post_quat).conjugated() * (psa_bone.orig_quat * psa_bone.post_quat)","# else:","# quat = (p_quat * psa_bone.post_quat).conjugated() * (psa_bone.orig_quat * psa_bone.post_quat)","q","=","psa_bone",".","post_quat",".","copy","(",")","q",".","rotate","(","psa_bone",".","orig_quat",")","quat","=","q","q","=","psa_bone",".","post_quat",".","copy","(",")","if","psa_bone",".","parent","==","None","and","bDontInvertRoot",":","q",".","rotate","(","p_quat",".","conjugated","(",")",")","else",":","q",".","rotate","(","p_quat",")","quat",".","rotate","(","q",".","conjugated","(",")",")","# @","# loc = psa_bone.post_quat.conjugated() * p_pos - psa_bone.post_quat.conjugated() * psa_bone.orig_loc","if","not","bRotationOnly",":","loc","=","(","p_pos","-","psa_bone",".","orig_loc",")","# \"edit bone\" location is in \"parent space\"","# but \"pose bone\" location is in \"local space(bone)\"","# so we need to transform from parent(edit_bone) to local space (pose_bone)","loc",".","rotate","(","psa_bone",".","post_quat",".","conjugated","(",")",")","# if not bRotationOnly:","# loc = (p_pos - psa_bone.orig_loc)","# if psa_bone.parent is not None:","# q = psa_bone.parent.post_quat.copy()","# q.rotate( psa_bone.parent.orig_quat )","# print(q)","# loc.rotate( psa_bone.parent.post_quat.conjugated() )","# loc.rotate( q.conjugated() )","# loc.rotate( q )","# pass","# quat = p_quat.conjugated()","# quat = p_quat","# quat.rotate( psa_bone.orig_quat.conjugated() )","# quat = Quaternion()","# loc = -p_pos","# loc = (p_pos - psa_bone.orig_loc)","# loc = Vector()","# loc.rotate( psa_bone.post_quat.conjugated() )","# Set it?","# pose_bone.rotation_quaternion = quat","# pose_bone.location = loc","# pose_bone.rotation_quaternion = orig_rot.conjugated()","# pose_bone.location = p_pos - (pose_bone.bone.matrix_local.translation - pose_bone.bone.parent.matrix_local.translation)","##### Works + post_quat (without location works)","# quat = (p_quat * psa_bone.post_quat).conjugated() * (psa_bone.orig_quat * psa_bone.post_quat)","# loc = psa_bone.post_quat.conjugated() * (p_pos - psa_bone.orig_loc)","psa_bone",".","fcurve_quat_w",".","keyframe_points","[","i","]",".","co","=","i",",","quat",".","w","psa_bone",".","fcurve_quat_x",".","keyframe_points","[","i","]",".","co","=","i",",","quat",".","x","psa_bone",".","fcurve_quat_y",".","keyframe_points","[","i","]",".","co","=","i",",","quat",".","y","psa_bone",".","fcurve_quat_z",".","keyframe_points","[","i","]",".","co","=","i",",","quat",".","z","psa_bone",".","fcurve_quat_w",".","keyframe_points","[","i","]",".","interpolation","=","fcurve_interpolation","psa_bone",".","fcurve_quat_x",".","keyframe_points","[","i","]",".","interpolation","=","fcurve_interpolation","psa_bone",".","fcurve_quat_y",".","keyframe_points","[","i","]",".","interpolation","=","fcurve_interpolation","psa_bone",".","fcurve_quat_z",".","keyframe_points","[","i","]",".","interpolation","=","fcurve_interpolation","if","not","bRotationOnly",":","psa_bone",".","fcurve_loc_x",".","keyframe_points","[","i","]",".","co","=","i",",","loc",".","x","psa_bone",".","fcurve_loc_y",".","keyframe_points","[","i","]",".","co","=","i",",","loc",".","y","psa_bone",".","fcurve_loc_z",".","keyframe_points","[","i","]",".","co","=","i",",","loc",".","z","psa_bone",".","fcurve_loc_x",".","keyframe_points","[","i","]",".","interpolation","=","fcurve_interpolation","psa_bone",".","fcurve_loc_y",".","keyframe_points","[","i","]",".","interpolation","=","fcurve_interpolation","psa_bone",".","fcurve_loc_z",".","keyframe_points","[","i","]",".","interpolation","=","fcurve_interpolation","# Old path. Slower.","# psa_bone.fcurve_quat_w.keyframe_points.insert(i,quat.w,{'NEEDED','FAST'}).interpolation = fcurve_interpolation","# psa_bone.fcurve_quat_x.keyframe_points.insert(i,quat.x,{'NEEDED','FAST'}).interpolation = fcurve_interpolation","# psa_bone.fcurve_quat_y.keyframe_points.insert(i,quat.y,{'NEEDED','FAST'}).interpolation = fcurve_interpolation","# psa_bone.fcurve_quat_z.keyframe_points.insert(i,quat.z,{'NEEDED','FAST'}).interpolation = fcurve_interpolation","# psa_bone.fcurve_loc_x.keyframe_points.insert(i,loc.x,{'NEEDED','FAST'}).interpolation = fcurve_interpolation","# psa_bone.fcurve_loc_y.keyframe_points.insert(i,loc.y,{'NEEDED','FAST'}).interpolation = fcurve_interpolation","# psa_bone.fcurve_loc_z.keyframe_points.insert(i,loc.z,{'NEEDED','FAST'}).interpolation = fcurve_interpolation","raw_key_index","+=","1","# on first frame","# break","raw_key_index","+=","(","NumRawFrames","-","min","(","maxframes",",","NumRawFrames",")",")","*","Totalbones","# Add action to tail of the nla track","if","bActionsToTrack",":","if","len","(","nla_track",".","strips",")","==","0",":","strip","=","nla_stripes",".","new","(","Name",",","nla_track_last_frame",",","action",")","else",":","strip","=","nla_stripes",".","new","(","Name",",","nla_stripes","[","-","1","]",".","frame_end",",","action",")","# Do not pollute track. Makes other tracks 'visible' through 'empty space'.","strip",".","extrapolation","=","'NOTHING'","nla_track_last_frame","+=","NumRawFrames","if","is_first_action",":","first_action","=","action","is_first_action","=","False","print","(","\"Done: %f sec.\"","%","(","time",".","process_time","(",")","-","ref_time",")",")","# break on first animation set","# break","scene","=","util_get_scene","(","context",")","if","not","bActionsToTrack",":","if","not","scene",".","is_nla_tweakmode",":","armature_obj",".","animation_data",".","action","=","first_action","if","bUpdateTimelineRange",":","scene",".","frame_start","=","0","if","bActionsToTrack",":","scene",".","frame_end","=","sum","(","frames","for","_",",","_",",","_",",","frames","in","Action_List",")","-","1","else",":","scene",".","frame_end","=","max","(","frames","for","_",",","_",",","_",",","frames","in","Action_List",")","-","1","util_select_all","(","False",")","util_obj_select","(","context",",","armature_obj",")","util_obj_set_active","(","context",",","armature_obj",")"],"url":"https:\/\/github.com\/Befzz\/blender3d_import_psk_psa\/blob\/47f1418aef7642f300e0fccbe3c96654ab275a52\/addons\/io_import_scene_unreal_psa_psk_280.py#L1262-L1759"} |