[fbx] bugfixes skinning, materials and debugging info, merry xmas 🎄

Better materials:
- default values will no longer trigger things like emission,
clear coat being enabled etc incorrectly, mostly importers misunderstand
this value, a value of 1 is not actually enabled emission, it must have a
non zero setting other than emission value for the emission flags to
actually be enabled correctly in our engine #42386
- lambert materials are warned against significantly,
do not use Lambert in Godot, use a PBR material like Ai Standard Surface
 (it's like going from low quality to high definition in terms of output
 in scenes and on assets)
- roughness values are calculated correctly in this version

Fixes for normal's array - some normal's from some files were dropped and
generated, this is now fixed.
- FBX indexing for items with (-1) index, for normal data is supported,
this is super helpful at increasing our range of compatibility of
FBX meshes.

Fix bone rest data **no longer requiring go to bind pose in Maya and various applications**
- Partial fix for #43688

Validation tools added for validating large projects

Provide package name, vendor and FBX vendor in the log.

Implemented metadata properties so we can read extra document
info required for bug reporting

**FBX 2011 (version 7200)** is unsupported now,
you must re-export your file in Maya or Max to upgrade the file format.

Fixes skin bind poses being generated based on node xforms in the scene
Fixes duplicate bones being added to skin and prevents add_bones from
registering skeleton bones it now registers skin bind poses,
but this should really be documented in the engine correctly right now
 it doesn't tell you this is the case.
This commit is contained in:
Gordon MacPherson 2020-12-02 15:38:11 +00:00 committed by RevoluPowered
parent 0b5a9bf716
commit de61cfe7c5
20 changed files with 513 additions and 252 deletions

View file

@ -33,8 +33,8 @@
#include "fbx_node.h"
#include "import_state.h"
Ref<FBXNode> FBXBone::get_link(const ImportState &state) const {
print_verbose("bone name: " + bone_name);
Ref<FBXNode> FBXSkinDeformer::get_link(const ImportState &state) const {
print_verbose("bone name: " + bone->bone_name);
// safe for when deformers must be polyfilled when skin has different count of binds to bones in the scene ;)
if (!cluster) {
@ -54,53 +54,3 @@ Ref<FBXNode> FBXBone::get_link(const ImportState &state) const {
// the node in space this is for, like if it's FOR a target.
return link_node;
}
/* right now we just get single skin working and we can patch in the multiple tomorrow - per skin not per bone. */
// this will work for multiple meshes :) awesomeness.
// okay so these formula's are complex and need proper understanding of
// shear, pivots, geometric pivots, pre rotation and post rotation
// additionally DO NOT EDIT THIS if your blender file isn't working.
// Contact RevoluPowered Gordon MacPherson if you are contemplating making edits to this.
Transform FBXBone::get_vertex_skin_xform(const ImportState &state, Transform mesh_global_position, bool &r_valid_pose) {
r_valid_pose = false;
print_verbose("get_vertex_skin_xform: " + bone_name);
// safe to do, this means we have 'remove unused deformer' checked.
if (!cluster) {
print_verbose("bone [" + itos(bone_id) + "] " + bone_name + ": has no skin offset poly-filling the skin to make rasterizer happy with unused deformers not being skinned");
r_valid_pose = true;
return Transform();
}
ERR_FAIL_COND_V_MSG(cluster == nullptr, Transform(), "[serious] unable to resolve the fbx cluster for this bone " + bone_name);
// these methods will ONLY work for Maya.
if (cluster->TransformAssociateModelValid()) {
//print_error("additive skinning in use");
Transform associate_global_init_position = cluster->TransformAssociateModel();
Transform associate_global_current_position = Transform();
Transform reference_global_init_position = cluster->GetTransform();
Transform cluster_global_init_position = cluster->TransformLink();
Ref<FBXNode> link_node = get_link(state);
ERR_FAIL_COND_V_MSG(link_node.is_null(), Transform(), "invalid link corrupt file detected");
r_valid_pose = true;
Transform cluster_global_current_position = link_node.is_valid() && link_node->pivot_transform.is_valid() ? link_node->pivot_transform->GlobalTransform : Transform();
vertex_transform_matrix = reference_global_init_position.affine_inverse() * associate_global_init_position * associate_global_current_position.affine_inverse() *
cluster_global_current_position * cluster_global_init_position.affine_inverse() * reference_global_init_position;
} else {
//print_error("non additive skinning is in use");
Transform reference_global_position = cluster->GetTransform();
Transform reference_global_current_position = mesh_global_position;
//Transform geometric_pivot = Transform(); // we do not use this - 3ds max only
Transform global_init_position = cluster->TransformLink();
if (global_init_position.basis.determinant() == 0) {
global_init_position = Transform(Basis(), global_init_position.origin);
}
Transform cluster_relative_init_position = global_init_position.affine_inverse() * reference_global_position;
Transform cluster_relative_position_inverse = reference_global_current_position.affine_inverse() * global_init_position;
vertex_transform_matrix = cluster_relative_position_inverse * cluster_relative_init_position;
r_valid_pose = true;
}
return vertex_transform_matrix;
}

View file

@ -49,9 +49,6 @@ struct FBXBone : public Reference {
return !valid_parent;
}
uint64_t target_node_id; // the node target id for the skeleton element
bool valid_target = false; // only applies to bones with a mesh / in the skin.
// Godot specific data
int godot_bone_id = -2; // godot internal bone id assigned after import
@ -60,36 +57,34 @@ struct FBXBone : public Reference {
bool valid_armature_id = false;
uint64_t armature_id = 0;
// Vertex Weight information
Transform transform_link; // todo remove
Transform transform_matrix; // todo remove
/* get associate model - the model can be invalid sometimes */
Ref<FBXBone> get_associate_model() const {
return parent_bone;
}
/* link node is the parent bone */
Ref<FBXNode> get_link(const ImportState &state) const;
Transform get_vertex_skin_xform(const ImportState &state, Transform mesh_global_position, bool &valid);
Transform vertex_transform_matrix;
Transform local_cluster_matrix; // set_bone_pose
mutable const FBXDocParser::Deformer *skin = nullptr;
mutable const FBXDocParser::Cluster *cluster = nullptr;
mutable const FBXDocParser::Geometry *geometry = nullptr;
mutable const FBXDocParser::ModelLimbNode *limb_node = nullptr;
void set_pivot_xform(Ref<PivotTransform> p_pivot_xform) {
pivot_xform = p_pivot_xform;
void set_node(Ref<FBXNode> p_node) {
node = p_node;
}
// pose node / if assigned
Transform pose_node = Transform();
bool assigned_pose_node = false;
Ref<FBXBone> parent_bone = Ref<FBXBone>();
Ref<PivotTransform> pivot_xform = Ref<PivotTransform>();
Ref<FBXSkeleton> fbx_skeleton = Ref<FBXSkeleton>();
// Stores the pivot xform for this bone
Ref<FBXNode> node = nullptr;
Ref<FBXBone> parent_bone = nullptr;
Ref<FBXSkeleton> fbx_skeleton = nullptr;
};
struct FBXSkinDeformer {
FBXSkinDeformer(Ref<FBXBone> p_bone, const FBXDocParser::Cluster *p_cluster) :
cluster(p_cluster), bone(p_bone) {}
~FBXSkinDeformer() {}
const FBXDocParser::Cluster *cluster;
Ref<FBXBone> bone;
/* get associate model - the model can be invalid sometimes */
Ref<FBXBone> get_associate_model() const {
return bone->parent_bone;
}
Ref<FBXNode> get_link(const ImportState &state) const;
};
#endif // FBX_BONE_H

View file

@ -29,9 +29,9 @@
/*************************************************************************/
#include "fbx_material.h"
#include "scene/resources/material.h"
#include "scene/resources/texture.h"
#include "tools/validation_tools.h"
String FBXMaterial::get_material_name() const {
return material_name;
@ -286,10 +286,16 @@ Ref<SpatialMaterial> FBXMaterial::import_material(ImportState &state) {
}
if (desc == PROPERTY_DESC_IGNORE) {
WARN_PRINT("[Ignored] The FBX material parameter: `" + String(name.c_str()) + "` is ignored.");
//WARN_PRINT("[Ignored] The FBX material parameter: `" + String(name.c_str()) + "` is ignored.");
continue;
} else {
print_verbose("FBX Material parameter: " + String(name.c_str()));
// Check for Diffuse material system / lambert materials / legacy basically
if (name == "Diffuse" && !warning_non_pbr_material) {
ValidationTracker::get_singleton()->add_validation_error(state.path, "Invalid material settings change to Ai Standard Surface shader, mat name: " + material_name.c_escape());
warning_non_pbr_material = true;
}
}
// DISABLE when adding support for all weird and wonderful material formats
@ -313,20 +319,41 @@ Ref<SpatialMaterial> FBXMaterial::import_material(ImportState &state) {
const FBXDocParser::TypedProperty<Vector3> *vector_value = dynamic_cast<const FBXDocParser::TypedProperty<Vector3> *>(prop);
if (!real_value && !vector_value) {
WARN_PRINT("unsupported datatype in property: " + String(name.c_str()));
//WARN_PRINT("unsupported datatype in property: " + String(name.c_str()));
continue;
}
//
// Zero / default value properties
// TODO: implement fields correctly tomorrow so we check 'has x mapping' before 'read x mapping' etc.
// if(real_value)
// {
// if(real_value->Value() == 0 && !vector_value)
// {
// continue;
// }
// }
if (vector_value && !real_value) {
if (vector_value->Value() == Vector3(0, 0, 0) && !real_value) {
continue;
}
}
switch (desc) {
case PROPERTY_DESC_ALBEDO_COLOR: {
if (vector_value) {
const Vector3 &color = vector_value->Value();
// Make sure to not lost any eventual opacity.
Color c = spatial_material->get_albedo();
c[0] = color[0];
c[1] = color[1];
c[2] = color[2];
spatial_material->set_albedo(c);
if (color != Vector3(0, 0, 0)) {
Color c = Color();
c[0] = color[0];
c[1] = color[1];
c[2] = color[2];
spatial_material->set_albedo(c);
}
} else if (real_value) {
print_error("albedo is unsupported format?");
}
@ -336,7 +363,7 @@ Ref<SpatialMaterial> FBXMaterial::import_material(ImportState &state) {
const real_t opacity = real_value->Value();
if (opacity < (1.0 - CMP_EPSILON)) {
Color c = spatial_material->get_albedo();
c[3] = opacity;
c.a = opacity;
spatial_material->set_albedo(c);
material_info.features.push_back(SpatialMaterial::Feature::FEATURE_TRANSPARENT);
spatial_material->set_depth_draw_mode(SpatialMaterial::DEPTH_DRAW_ALPHA_OPAQUE_PREPASS);
@ -355,16 +382,7 @@ Ref<SpatialMaterial> FBXMaterial::import_material(ImportState &state) {
print_error("unsupported specular vector value: " + vector_value->Value());
}
} break;
case PROPERTY_DESC_SPECULAR_ROUGHNESS: {
if (real_value) {
print_verbose("specular roughness value:" + rtos(real_value->Value()));
spatial_material->set_roughness(MIN(1.0f, real_value->Value()));
}
if (vector_value) {
print_error("unsupported specular roughness color: " + vector_value->Value());
}
} break;
case PROPERTY_DESC_SPECULAR_COLOR: {
if (vector_value) {
print_error("unsupported specular color: " + vector_value->Value());
@ -401,7 +419,8 @@ Ref<SpatialMaterial> FBXMaterial::import_material(ImportState &state) {
}
} break;
case PROPERTY_DESC_COAT_ROUGHNESS: {
if (real_value) {
// meaning is that approx equal to zero is disabled not actually zero. ;)
if (real_value && Math::is_equal_approx(real_value->Value(), 0.0f)) {
print_verbose("clearcoat real value: " + rtos(real_value->Value()));
spatial_material->set_clearcoat_gloss(1.0 - real_value->Value());
@ -411,11 +430,11 @@ Ref<SpatialMaterial> FBXMaterial::import_material(ImportState &state) {
}
} break;
case PROPERTY_DESC_EMISSIVE: {
if (real_value) {
if (real_value && Math::is_equal_approx(real_value->Value(), 0.0f)) {
print_verbose("Emissive real value: " + rtos(real_value->Value()));
spatial_material->set_emission_energy(real_value->Value());
material_info.features.push_back(SpatialMaterial::Feature::FEATURE_EMISSION);
} else {
} else if (vector_value && !vector_value->Value().is_equal_approx(Vector3(0, 0, 0))) {
const Vector3 &color = vector_value->Value();
Color c;
c[0] = color[0];
@ -426,7 +445,7 @@ Ref<SpatialMaterial> FBXMaterial::import_material(ImportState &state) {
}
} break;
case PROPERTY_DESC_EMISSIVE_COLOR: {
if (vector_value) {
if (vector_value && !vector_value->Value().is_equal_approx(Vector3(0, 0, 0))) {
const Vector3 &color = vector_value->Value();
Color c;
c[0] = color[0];

View file

@ -38,6 +38,7 @@
struct FBXMaterial : public Reference {
String material_name = String();
bool warning_non_pbr_material = false;
FBXDocParser::Material *material = nullptr;
/* Godot materials
@ -149,6 +150,8 @@ struct FBXMaterial : public Reference {
{ "Maya|SpecularTexture|file", SpatialMaterial::TextureParam::TEXTURE_METALLIC },
/* Roughness */
// Arnold Roughness Map
{ "Maya|specularRoughness", SpatialMaterial::TextureParam::TEXTURE_ROUGHNESS },
{ "3dsMax|Parameters|roughness_map", SpatialMaterial::TextureParam::TEXTURE_ROUGHNESS },
{ "Maya|TEX_roughness_map", SpatialMaterial::TextureParam::TEXTURE_ROUGHNESS },
@ -169,7 +172,6 @@ struct FBXMaterial : public Reference {
//{ "Maya|diffuseRoughness", SpatialMaterial::TextureParam::UNSUPPORTED },
//{ "Maya|diffuseRoughness|file", SpatialMaterial::TextureParam::UNSUPPORTED },
//{ "Maya|specularRoughness", SpatialMaterial::TextureParam::UNSUPPORTED },
//{ "ShininessExponent", SpatialMaterial::TextureParam::UNSUPPORTED },
//{ "ReflectionFactor", SpatialMaterial::TextureParam::UNSUPPORTED },
//{ "TransparentColor",SpatialMaterial::TextureParam::TEXTURE_CHANNEL_ALPHA },
@ -185,7 +187,6 @@ struct FBXMaterial : public Reference {
PROPERTY_DESC_ROUGHNESS,
PROPERTY_DESC_SPECULAR,
PROPERTY_DESC_SPECULAR_COLOR,
PROPERTY_DESC_SPECULAR_ROUGHNESS,
PROPERTY_DESC_SHINYNESS,
PROPERTY_DESC_COAT,
PROPERTY_DESC_COAT_ROUGHNESS,
@ -203,8 +204,8 @@ struct FBXMaterial : public Reference {
{ "Maya|specular", PROPERTY_DESC_SPECULAR },
{ "Maya|specularColor", PROPERTY_DESC_SPECULAR_COLOR },
/* Specular roughness */
{ "Maya|specularRoughness", PROPERTY_DESC_SPECULAR_ROUGHNESS },
/* Specular roughness - arnold roughness map */
{ "Maya|specularRoughness", PROPERTY_DESC_ROUGHNESS },
/* Transparent */
{ "Opacity", PROPERTY_DESC_TRANSPARENT },
@ -221,10 +222,10 @@ struct FBXMaterial : public Reference {
{ "Maya|roughness", PROPERTY_DESC_ROUGHNESS },
/* Coat */
{ "Maya|coat", PROPERTY_DESC_COAT },
//{ "Maya|coat", PROPERTY_DESC_COAT },
/* Coat roughness */
{ "Maya|coatRoughness", PROPERTY_DESC_COAT_ROUGHNESS },
//{ "Maya|coatRoughness", PROPERTY_DESC_COAT_ROUGHNESS },
/* Emissive */
{ "Maya|emission", PROPERTY_DESC_EMISSIVE },

View file

@ -115,8 +115,9 @@ struct SurfaceData {
Array morphs;
};
MeshInstance *FBXMeshData::create_fbx_mesh(const ImportState &state, const FBXDocParser::MeshGeometry *mesh_geometry, const FBXDocParser::Model *model, bool use_compression) {
MeshInstance *FBXMeshData::create_fbx_mesh(const ImportState &state, const FBXDocParser::MeshGeometry *p_mesh_geometry, const FBXDocParser::Model *model, bool use_compression) {
mesh_geometry = p_mesh_geometry;
// todo: make this just use a uint64_t FBX ID this is a copy of our original materials unfortunately.
const std::vector<const FBXDocParser::Material *> &material_lookup = model->GetMaterials();
@ -195,7 +196,7 @@ MeshInstance *FBXMeshData::create_fbx_mesh(const ImportState &state, const FBXDo
// TODO please add skinning.
//mesh_id = mesh_geometry->ID();
sanitize_vertex_weights();
sanitize_vertex_weights(state);
// Re organize polygon vertices to to correctly take into account strange
// UVs.
@ -214,7 +215,6 @@ MeshInstance *FBXMeshData::create_fbx_mesh(const ImportState &state, const FBXDo
// Make sure that from this moment on the mesh_geometry is no used anymore.
// This is a safety step, because the mesh_geometry data are no more valid
// at this point.
mesh_geometry = nullptr;
const int vertex_count = vertices.size();
@ -305,7 +305,7 @@ MeshInstance *FBXMeshData::create_fbx_mesh(const ImportState &state, const FBXDo
// This must be done before add_vertex because the surface tool is
// expecting this before the st->add_vertex() call
add_vertex(
add_vertex(state,
surface->surface_tool,
state.scale,
vertex,
@ -350,6 +350,7 @@ MeshInstance *FBXMeshData::create_fbx_mesh(const ImportState &state, const FBXDo
for (unsigned int vi = 0; vi < surface->vertices_map.size(); vi += 1) {
const Vertex vertex = surface->vertices_map[vi];
add_vertex(
state,
morph_st,
state.scale,
vertex,
@ -405,8 +406,26 @@ MeshInstance *FBXMeshData::create_fbx_mesh(const ImportState &state, const FBXDo
return godot_mesh;
}
void FBXMeshData::sanitize_vertex_weights() {
const int max_bones = VS::ARRAY_WEIGHTS_SIZE;
void FBXMeshData::sanitize_vertex_weights(const ImportState &state) {
const int max_vertex_influence_count = VS::ARRAY_WEIGHTS_SIZE;
Map<int, int> skeleton_to_skin_bind_id;
// TODO: error's need added
const FBXDocParser::Skin *fbx_skin = mesh_geometry->DeformerSkin();
if (fbx_skin == nullptr || fbx_skin->Clusters().size() == 0) {
return; // do nothing
}
//
// Precalculate the skin cluster mapping
//
int bind_id = 0;
for (const FBXDocParser::Cluster *cluster : fbx_skin->Clusters()) {
Ref<FBXBone> bone = state.fbx_bone_map[cluster->TargetNode()->ID()];
skeleton_to_skin_bind_id.insert(bone->godot_bone_id, bind_id);
bind_id++;
}
for (const Vertex *v = vertex_weights.next(nullptr); v != nullptr; v = vertex_weights.next(v)) {
VertexWeightMapping *vm = vertex_weights.getptr(*v);
@ -414,7 +433,6 @@ void FBXMeshData::sanitize_vertex_weights() {
ERR_CONTINUE(vm->bones_ref.size() != vm->weights.size()); // No message, already checked.
const int initial_size = vm->weights.size();
{
// Init bone id
int *bones_ptr = vm->bones.ptrw();
@ -423,7 +441,7 @@ void FBXMeshData::sanitize_vertex_weights() {
for (int i = 0; i < vm->weights.size(); i += 1) {
// At this point this is not possible because the skeleton is already initialized.
CRASH_COND(bones_ref_ptr[i]->godot_bone_id == -2);
bones_ptr[i] = bones_ref_ptr[i]->godot_bone_id;
bones_ptr[i] = skeleton_to_skin_bind_id[bones_ref_ptr[i]->godot_bone_id];
}
// From this point on the data is no more valid.
@ -446,18 +464,18 @@ void FBXMeshData::sanitize_vertex_weights() {
{
// Resize
vm->weights.resize(max_bones);
vm->bones.resize(max_bones);
vm->weights.resize(max_vertex_influence_count);
vm->bones.resize(max_vertex_influence_count);
real_t *weights_ptr = vm->weights.ptrw();
int *bones_ptr = vm->bones.ptrw();
for (int i = initial_size; i < max_bones; i += 1) {
for (int i = initial_size; i < max_vertex_influence_count; i += 1) {
weights_ptr[i] = 0.0;
bones_ptr[i] = 0;
}
// Normalize
real_t sum = 0.0;
for (int i = 0; i < max_bones; i += 1) {
for (int i = 0; i < max_vertex_influence_count; i += 1) {
sum += weights_ptr[i];
}
if (sum > 0.0) {
@ -499,9 +517,22 @@ void FBXMeshData::reorganize_vertices(
// Take the normal and see if we need to duplicate this polygon.
if (r_normals_raw.has(index)) {
const HashMap<PolygonId, Vector3> *nrml_arr = r_normals_raw.getptr(index);
if (nrml_arr->has(polygon_index)) {
this_vert_poly_normal = nrml_arr->get(polygon_index);
} else if (nrml_arr->has(-1)) {
this_vert_poly_normal = nrml_arr->get(-1);
} else {
print_error("invalid normal detected: " + itos(index) + " polygon index: " + itos(polygon_index));
for (const PolygonId *pid = nrml_arr->next(nullptr); pid != nullptr; pid = nrml_arr->next(pid)) {
print_verbose("debug contents key: " + itos(*pid));
if (nrml_arr->has(*pid)) {
print_verbose("contents valid: " + nrml_arr->get(*pid));
}
}
}
// Now, check if we need to duplicate it.
for (const PolygonId *pid = nrml_arr->next(nullptr); pid != nullptr; pid = nrml_arr->next(pid)) {
if (*pid == polygon_index) {
@ -683,6 +714,7 @@ void FBXMeshData::reorganize_vertices(
}
void FBXMeshData::add_vertex(
const ImportState &state,
Ref<SurfaceTool> p_surface_tool,
real_t p_scale,
Vertex p_vertex,
@ -719,7 +751,22 @@ void FBXMeshData::add_vertex(
// TODO what about binormals?
// TODO there is other?
gen_weight_info(p_surface_tool, p_vertex);
if (vertex_weights.has(p_vertex)) {
// Let's extract the weight info.
const VertexWeightMapping *vm = vertex_weights.getptr(p_vertex);
const Vector<int> &bones = vm->bones;
// the bug is that the bone idx is wrong because it is not ref'ing the skin.
if (bones.size() > VS::ARRAY_WEIGHTS_SIZE) {
print_error("[weight overflow detected]");
}
p_surface_tool->add_weights(vm->weights);
// 0 1 2 3 4 5 6 7 < local skeleton / skin for mesh
// 0 1 2 3 4 5 6 7 8 9 10 < actual skeleton with all joints
p_surface_tool->add_bones(bones);
}
// The surface tool want the vertex position as last thing.
p_surface_tool->add_vertex((p_vertices_position[p_vertex] + p_morph_value) * p_scale);
@ -895,14 +942,7 @@ void FBXMeshData::gen_weight_info(Ref<SurfaceTool> st, Vertex vertex_id) const {
const VertexWeightMapping *vm = vertex_weights.getptr(vertex_id);
st->add_weights(vm->weights);
st->add_bones(vm->bones);
print_verbose("[doc] Triangle added weights to mesh for bones");
} else {
// This vertex doesn't have any bone info, while the model is using the
// bones.
// So nothing more to do.
}
print_verbose("[doc] Triangle added weights to mesh for bones");
}
int FBXMeshData::get_vertex_from_polygon_vertex(const std::vector<int> &p_polygon_indices, int p_index) const {

View file

@ -40,6 +40,7 @@
#include "import_state.h"
#include "tools/import_utils.h"
struct FBXNode;
struct FBXMeshData;
struct FBXBone;
struct ImportState;
@ -68,6 +69,10 @@ struct FBXMeshData : Reference {
Vector<Vector3> normals;
};
// FIXME: remove this is a hack for testing only
mutable const FBXDocParser::MeshGeometry *mesh_geometry = nullptr;
Ref<FBXNode> mesh_node = nullptr;
/// vertex id, Weight Info
/// later: perf we can use array here
HashMap<int, VertexWeightMapping> vertex_weights;
@ -75,7 +80,7 @@ struct FBXMeshData : Reference {
// translate fbx mesh data from document context to FBX Mesh Geometry Context
bool valid_weight_indexes = false;
MeshInstance *create_fbx_mesh(const ImportState &state, const FBXDocParser::MeshGeometry *mesh_geometry, const FBXDocParser::Model *model, bool use_compression);
MeshInstance *create_fbx_mesh(const ImportState &state, const FBXDocParser::MeshGeometry *p_mesh_geometry, const FBXDocParser::Model *model, bool use_compression);
void gen_weight_info(Ref<SurfaceTool> st, int vertex_id) const;
@ -87,7 +92,7 @@ struct FBXMeshData : Reference {
MeshInstance *godot_mesh_instance = nullptr;
private:
void sanitize_vertex_weights();
void sanitize_vertex_weights(const ImportState &state);
/// Make sure to reorganize the vertices so that the correct UV is taken.
/// This step is needed because differently from the normal, that can be
@ -107,6 +112,7 @@ private:
HashMap<int, HashMap<int, Vector2> > &r_uv_2_raw);
void add_vertex(
const ImportState &state,
Ref<SurfaceTool> p_surface_tool,
real_t p_scale,
int p_vertex,
@ -135,17 +141,17 @@ private:
/// Returns -1 if `p_index` is invalid.
int get_vertex_from_polygon_vertex(const std::vector<int> &p_face_indices, int p_index) const;
/// Retuns true if this polygon_vertex_index is the end of a new polygon.
/// Returns true if this polygon_vertex_index is the end of a new polygon.
bool is_end_of_polygon(const std::vector<int> &p_face_indices, int p_index) const;
/// Retuns true if this polygon_vertex_index is the begin of a new polygon.
/// Returns true if this polygon_vertex_index is the begin of a new polygon.
bool is_start_of_polygon(const std::vector<int> &p_face_indices, int p_index) const;
/// Returns the number of polygons.
int count_polygons(const std::vector<int> &p_face_indices) const;
/// Used to extract data from the `MappingData` alligned with vertex.
/// Useful to extract normal/uvs/colors/tangets/etc...
/// Used to extract data from the `MappingData` aligned with vertex.
/// Useful to extract normal/uvs/colors/tangents/etc...
/// If the function fails somehow, it returns an hollow vector and print an error.
template <class R, class T>
HashMap<int, R> extract_per_vertex_data(
@ -157,7 +163,7 @@ private:
R p_fall_back) const;
/// Used to extract data from the `MappingData` organized per polygon.
/// Useful to extract the materila
/// Useful to extract the material
/// If the function fails somehow, it returns an hollow vector and print an error.
template <class T>
HashMap<int, T> extract_per_polygon(

View file

@ -40,7 +40,6 @@ void FBXSkeleton::init_skeleton(const ImportState &state) {
if (skeleton == nullptr && skeleton_bone_count > 0) {
skeleton = memnew(Skeleton);
Ref<FBXNode> skeleton_parent_node;
if (fbx_node.is_valid()) {
// cache skeleton attachment for later during node creation
// can't be done until after node hierarchy is built
@ -100,11 +99,11 @@ void FBXSkeleton::init_skeleton(const ImportState &state) {
ERR_FAIL_COND_MSG(skeleton->get_bone_count() != bone_count, "Not all bones got added, is the file corrupted?");
for (Map<int, Ref<FBXBone> >::Element *bone_element = bone_map.front(); bone_element; bone_element = bone_element->next()) {
Ref<FBXBone> bone = bone_element->value();
const Ref<FBXBone> bone = bone_element->value();
int bone_index = bone_element->key();
print_verbose("working on bone: " + itos(bone_index) + " bone name:" + bone->bone_name);
skeleton->set_bone_rest(bone->godot_bone_id, get_unscaled_transform(bone->pivot_xform->LocalTransform, state.scale));
skeleton->set_bone_rest(bone->godot_bone_id, get_unscaled_transform(bone->node->pivot_transform->LocalTransform, state.scale));
// lookup parent ID
if (bone->valid_parent && state.fbx_bone_map.has(bone->parent_bone_id)) {

View file

@ -42,7 +42,7 @@ struct FBXNode;
struct ImportState;
struct FBXBone;
struct FBXSkeleton : Reference, ModelAbstraction {
struct FBXSkeleton : Reference {
Ref<FBXNode> fbx_node = Ref<FBXNode>();
Vector<Ref<FBXBone> > skeleton_bones = Vector<Ref<FBXBone> >();
Skeleton *skeleton = nullptr;

View file

@ -32,7 +32,9 @@
#define IMPORT_STATE_H
#include "fbx_mesh_data.h"
#include "modules/fbx/tools/import_utils.h"
#include "tools/import_utils.h"
#include "tools/validation_tools.h"
#include "pivot_transform.h"
#include "core/bind/core_bind.h"

View file

@ -80,14 +80,37 @@ void PivotTransform::ReadTransformChain() {
const Vector3 &GeometricScaling = ImportUtils::safe_import_vector3(FBXDocParser::PropertyGet<Vector3>(props, "GeometricScaling", ok));
if (ok) {
geometric_scaling = GeometricScaling;
} else {
geometric_scaling = Vector3(0, 0, 0);
}
const Vector3 &GeometricRotation = ImportUtils::safe_import_vector3(FBXDocParser::PropertyGet<Vector3>(props, "GeometricRotation", ok));
if (ok) {
geometric_rotation = ImportUtils::EulerToQuaternion(rot, ImportUtils::deg2rad(GeometricRotation));
} else {
geometric_rotation = Quat();
}
const Vector3 &GeometricTranslation = ImportUtils::safe_import_vector3(FBXDocParser::PropertyGet<Vector3>(props, "GeometricTranslation", ok));
if (ok) {
geometric_translation = ImportUtils::FixAxisConversions(GeometricTranslation);
} else {
geometric_translation = Vector3(0, 0, 0);
}
if (geometric_rotation != Quat()) {
print_error("geometric rotation is unsupported!");
//CRASH_COND(true);
}
if (!geometric_scaling.is_equal_approx(Vector3(1, 1, 1))) {
print_error("geometric scaling is unsupported!");
//CRASH_COND(true);
}
if (!geometric_translation.is_equal_approx(Vector3(0, 0, 0))) {
print_error("geometric translation is unsupported.");
//CRASH_COND(true);
}
}
@ -114,6 +137,20 @@ Transform PivotTransform::ComputeLocalTransform(Vector3 p_translation, Quat p_ro
return T * Roff * Rp * Rpre * R * Rpost.affine_inverse() * Rp.affine_inverse() * Soff * Sp * S * Sp.affine_inverse();
}
Transform PivotTransform::ComputeGlobalTransform(Transform t) const {
Vector3 pos = t.origin;
Vector3 scale = t.basis.get_scale();
Quat rot = t.basis.get_rotation_quat();
return ComputeGlobalTransform(pos, rot, scale);
}
Transform PivotTransform::ComputeLocalTransform(Transform t) const {
Vector3 pos = t.origin;
Vector3 scale = t.basis.get_scale();
Quat rot = t.basis.get_rotation_quat();
return ComputeLocalTransform(pos, rot, scale);
}
Transform PivotTransform::ComputeGlobalTransform(Vector3 p_translation, Quat p_rotation, Vector3 p_scaling) const {
Transform T, Roff, Rp, Soff, Sp, S;

View file

@ -31,6 +31,7 @@
#ifndef PIVOT_TRANSFORM_H
#define PIVOT_TRANSFORM_H
#include "core/math/transform.h"
#include "core/reference.h"
#include "model_abstraction.h"
@ -84,6 +85,9 @@ struct PivotTransform : Reference, ModelAbstraction {
print_verbose("raw pre_rotation " + raw_pre_rotation * (180 / Math_PI));
print_verbose("raw post_rotation " + raw_post_rotation * (180 / Math_PI));
}
Transform ComputeGlobalTransform(Transform t) const;
Transform ComputeLocalTransform(Transform t) const;
Transform ComputeGlobalTransform(Vector3 p_translation, Quat p_rotation, Vector3 p_scaling) const;
Transform ComputeLocalTransform(Vector3 p_translation, Quat p_rotation, Vector3 p_scaling) const;

View file

@ -151,17 +151,51 @@ Node *EditorSceneImporterFBX::import_scene(const String &p_path, uint32_t p_flag
// safety for version handling
if (doc.IsSafeToImport()) {
Spatial *spatial = _generate_scene(p_path, &doc, p_flags, p_bake_fps, 8);
// todo: move to document shutdown (will need to be validated after moving; this code has been validated already)
for (FBXDocParser::TokenPtr token : tokens) {
if (token) {
delete token;
token = nullptr;
bool is_blender_fbx = false;
//const FBXDocParser::PropertyPtr app_vendor = p_document->GlobalSettingsPtr()->Props()
// p_document->Creator()
const FBXDocParser::PropertyTable *import_props = doc.GetMetadataProperties();
const FBXDocParser::PropertyPtr app_name = import_props->Get("Original|ApplicationName");
const FBXDocParser::PropertyPtr app_vendor = import_props->Get("Original|ApplicationVendor");
const FBXDocParser::PropertyPtr app_version = import_props->Get("Original|ApplicationVersion");
//
if (app_name) {
const FBXDocParser::TypedProperty<std::string> *app_name_string = dynamic_cast<const FBXDocParser::TypedProperty<std::string> *>(app_name);
if (app_name_string) {
print_verbose("FBX App Name: " + String(app_name_string->Value().c_str()));
}
}
return spatial;
if (app_vendor) {
const FBXDocParser::TypedProperty<std::string> *app_vendor_string = dynamic_cast<const FBXDocParser::TypedProperty<std::string> *>(app_vendor);
if (app_vendor_string) {
print_verbose("FBX App Vendor: " + String(app_vendor_string->Value().c_str()));
is_blender_fbx = app_vendor_string->Value().find("Blender") != std::string::npos;
}
}
if (app_version) {
const FBXDocParser::TypedProperty<std::string> *app_version_string = dynamic_cast<const FBXDocParser::TypedProperty<std::string> *>(app_version);
if (app_version_string) {
print_verbose("FBX App Version: " + String(app_version_string->Value().c_str()));
}
}
if (!is_blender_fbx) {
Spatial *spatial = _generate_scene(p_path, &doc, p_flags, p_bake_fps, 8);
// todo: move to document shutdown (will need to be validated after moving; this code has been validated already)
for (FBXDocParser::TokenPtr token : tokens) {
if (token) {
delete token;
token = nullptr;
}
}
return spatial;
} else {
print_error("We can't import blender FBX files, they're not implemented correctly at export time and would require several hacks in the FBX importer which could break Maya imports.");
}
} else {
print_error("Cannot import file: " + p_path + " version of file is unsupported, please re-export in your modelling package file version is: " + itos(doc.FBXVersion()));
}
@ -356,6 +390,7 @@ Spatial *EditorSceneImporterFBX::_generate_scene(
// Size relative to cm.
const real_t fbx_unit_scale = p_document->GlobalSettingsPtr()->UnitScaleFactor();
print_verbose("FBX unit scale import value: " + rtos(fbx_unit_scale));
// Set FBX file scale is relative to CM must be converted to M
state.scale = fbx_unit_scale / 100.0;
print_verbose("FBX unit scale is: " + rtos(state.scale));
@ -366,6 +401,11 @@ Spatial *EditorSceneImporterFBX::_generate_scene(
state.enable_animation_import = true;
Ref<FBXNode> root_node;
root_node.instance();
// make sure fake noFBXDocParser::PropertyPtr ptrde always has a transform too ;)
Ref<PivotTransform> pivot_transform;
pivot_transform.instance();
root_node->pivot_transform = pivot_transform;
root_node->node_name = "root node";
root_node->current_node_id = 0;
root_node->godot_node = state.root;
@ -376,9 +416,16 @@ Spatial *EditorSceneImporterFBX::_generate_scene(
// cache basic node information from FBX document
// grabs all FBX bones
BuildDocumentBones(Ref<FBXBone>(), state, p_document, 0L);
BuildDocumentNodes(nullptr, state, p_document, 0L, nullptr);
BuildDocumentNodes(Ref<PivotTransform>(), state, p_document, 0L, nullptr);
// Build document skinning information
// Algorithm is this:
// Get Deformer: object with "Skin" class.
// Deformer:: has link to Geometry:: (correct mesh for skin)
// Deformer:: has Source which is the SubDeformer:: (e.g. the Cluster)
// Notes at the end it configures the vertex weight mapping.
for (uint64_t skin_id : p_document->GetSkinIDs()) {
// Validate the parser
FBXDocParser::LazyObject *lazy_skin = p_document->GetObject(skin_id);
@ -389,7 +436,6 @@ Spatial *EditorSceneImporterFBX::_generate_scene(
ERR_CONTINUE_MSG(skin == nullptr, "invalid skin added to skin list [parser bug]");
const std::vector<const FBXDocParser::Connection *> source_to_destination = p_document->GetConnectionsBySourceSequenced(skin_id);
const std::vector<const FBXDocParser::Connection *> destination_to_source = p_document->GetConnectionsByDestinationSequenced(skin_id);
FBXDocParser::MeshGeometry *mesh = nullptr;
uint64_t mesh_id = 0;
@ -409,21 +455,13 @@ Spatial *EditorSceneImporterFBX::_generate_scene(
// Validate the mesh exists and was retrieved
ERR_CONTINUE_MSG(mesh_id == 0, "mesh id is invalid");
const std::vector<const FBXDocParser::Cluster *> clusters = skin->Clusters();
// NOTE: this will ONLY work on skinned bones (it is by design.)
// A cluster is a skinned bone so SKINS won't contain unskinned bones so we need to pre-add all bones and parent them in a step beforehand.
for (const FBXDocParser::Connection *con : destination_to_source) {
FBXDocParser::Object *ob = con->SourceObject();
//
// Read the FBX Document bone information
//
// Get bone weight data
const FBXDocParser::Cluster *deformer = dynamic_cast<const FBXDocParser::Cluster *>(ob);
ERR_CONTINUE_MSG(deformer == nullptr, "invalid bone cluster");
const uint64_t deformer_id = deformer->ID();
for (const FBXDocParser::Cluster *cluster : clusters) {
ERR_CONTINUE_MSG(cluster == nullptr, "invalid bone cluster");
const uint64_t deformer_id = cluster->ID();
std::vector<const FBXDocParser::Connection *> connections = p_document->GetConnectionsByDestinationSequenced(deformer_id);
// Weight data always has a node in the scene lets grab the limb's node in the scene :) (reverse set to true since it's the opposite way around)
@ -444,8 +482,8 @@ Spatial *EditorSceneImporterFBX::_generate_scene(
//
// Cache Weight Information into bone for later usage if you want the raw data.
const std::vector<unsigned int> &indexes = deformer->GetIndices();
const std::vector<float> &weights = deformer->GetWeights();
const std::vector<unsigned int> &indexes = cluster->GetIndices();
const std::vector<float> &weights = cluster->GetWeights();
Ref<FBXMeshData> mesh_vertex_data;
// this data will pre-exist if vertex weight information is found
@ -468,7 +506,7 @@ Spatial *EditorSceneImporterFBX::_generate_scene(
VertexWeightMapping &vm = mesh_vertex_data->vertex_weights[vertex_index];
vm.weights.push_back(influence_weight);
vm.bones.push_back(0);
vm.bones.push_back(0); // bone id is pushed on here during sanitization phase
vm.bones_ref.push_back(bone_element);
}
@ -531,31 +569,6 @@ Spatial *EditorSceneImporterFBX::_generate_scene(
// we opted to merge the entire scene onto one skeleton for now
// if we need to change this we have an archive of the old code.
const std::vector<uint64_t> &bind_pose_ids = p_document->GetBindPoseIDs();
for (uint64_t skin_id : bind_pose_ids) {
FBXDocParser::LazyObject *lazy_skin = p_document->GetObject(skin_id);
const FBXDocParser::FbxPose *active_skin = lazy_skin->Get<FBXDocParser::FbxPose>();
if (active_skin) {
const std::vector<FBXDocParser::FbxPoseNode *> &bind_poses = active_skin->GetBindPoses();
for (FBXDocParser::FbxPoseNode *pose_node : bind_poses) {
Transform t = pose_node->GetBindPose();
uint64_t fbx_node_id = pose_node->GetNodeID();
if (state.fbx_bone_map.has(fbx_node_id)) {
Ref<FBXBone> bone = state.fbx_bone_map[fbx_node_id];
if (bone.is_valid()) {
print_verbose("assigned skin pose from the file for bone " + bone->bone_name + ", transform: " + t);
bone->pose_node = t;
bone->assigned_pose_node = true;
}
}
}
}
}
// bind pose normally only has 1 per mesh but can have more than one
// this is the point of skins
// in FBX first bind pose is the master for the first skin
@ -586,15 +599,22 @@ Spatial *EditorSceneImporterFBX::_generate_scene(
// now populate bone on the armature node list
fbx_skeleton_inst->skeleton_bones.push_back(bone);
CRASH_COND_MSG(!state.fbx_target_map.has(armature_id), "invalid armature [serious]");
Ref<FBXNode> node = state.fbx_target_map[armature_id];
CRASH_COND_MSG(node.is_null(), "invalid node [serious]");
CRASH_COND_MSG(node->pivot_transform.is_null(), "invalid pivot transform [serious]");
fbx_skeleton_inst->fbx_node = node;
ERR_CONTINUE_MSG(fbx_skeleton_inst->fbx_node.is_null(), "invalid skeleton node [serious]");
// we need to have a valid armature id and the model configured for the bone to be assigned fully.
// happens once per skeleton
if (state.fbx_target_map.has(armature_id) && !fbx_skeleton_inst->has_model()) {
Ref<FBXNode> node = state.fbx_target_map[armature_id];
fbx_skeleton_inst->set_model(node->get_model());
fbx_skeleton_inst->fbx_node = node;
print_verbose("allocated fbx skeleton primary / armature node for the level: " + node->node_name);
} else if (!state.fbx_target_map.has(armature_id) && !fbx_skeleton_inst->has_model()) {
if (state.fbx_target_map.has(armature_id) && !fbx_skeleton_inst->fbx_node->has_model()) {
print_verbose("allocated fbx skeleton primary / armature node for the level: " + fbx_skeleton_inst->fbx_node->node_name);
} else if (!state.fbx_target_map.has(armature_id) && !fbx_skeleton_inst->fbx_node->has_model()) {
print_error("bones are not mapped to an armature node for armature id: " + itos(armature_id) + " bone: " + bone->bone_name);
// this means bone will be removed and not used, which is safe actually and no skeleton will be created.
}
@ -602,7 +622,14 @@ Spatial *EditorSceneImporterFBX::_generate_scene(
// setup skeleton instances if required :)
for (Map<uint64_t, Ref<FBXSkeleton> >::Element *skeleton_node = state.skeleton_map.front(); skeleton_node; skeleton_node = skeleton_node->next()) {
skeleton_node->value()->init_skeleton(state);
Ref<FBXSkeleton> &skeleton = skeleton_node->value();
skeleton->init_skeleton(state);
ERR_CONTINUE_MSG(skeleton->fbx_node.is_null(), "invalid fbx target map, missing skeleton");
}
// This list is not populated
for (Map<uint64_t, Ref<FBXNode> >::Element *skin_mesh = state.MeshNodes.front(); skin_mesh; skin_mesh = skin_mesh->next()) {
}
}
@ -638,6 +665,8 @@ Spatial *EditorSceneImporterFBX::_generate_scene(
state.renderer_mesh_data.insert(mesh_id, mesh_data_precached);
}
mesh_data_precached->mesh_node = fbx_node;
// mesh node, mesh id
mesh_node = mesh_data_precached->create_fbx_mesh(state, mesh_geometry, fbx_node->fbx_model, (p_flags & IMPORT_USE_COMPRESSION) != 0);
if (!state.MeshNodes.has(mesh_id)) {
@ -681,33 +710,69 @@ Spatial *EditorSceneImporterFBX::_generate_scene(
}
}
for (Map<uint64_t, Ref<FBXNode> >::Element *skin_mesh = state.MeshNodes.front(); skin_mesh; skin_mesh = skin_mesh->next()) {
const uint64_t mesh_id = skin_mesh->key();
Ref<FBXNode> fbx_node = skin_mesh->value();
for (Map<uint64_t, Ref<FBXMeshData> >::Element *mesh_data = state.renderer_mesh_data.front(); mesh_data; mesh_data = mesh_data->next()) {
const uint64_t mesh_id = mesh_data->key();
Ref<FBXMeshData> mesh = mesh_data->value();
ERR_CONTINUE_MSG(state.MeshSkins.has(skin_mesh->key()), "invalid skin already exists for this mesh?");
print_verbose("[doc] caching skin for " + itos(mesh_id) + ", mesh node name: " + fbx_node->node_name);
Ref<Skin> skin;
skin.instance();
const FBXDocParser::MeshGeometry *mesh_geometry = p_document->GetObject(mesh_id)->Get<FBXDocParser::MeshGeometry>();
for (Map<uint64_t, Ref<FBXBone> >::Element *elem = state.fbx_bone_map.front(); elem; elem = elem->next()) {
Ref<FBXBone> bone = elem->value();
Transform ignore_t;
Ref<FBXSkeleton> skeleton = bone->fbx_skeleton;
// grab the skin bind
bool valid_bind = false;
Transform bind = bone->get_vertex_skin_xform(state, fbx_node->pivot_transform->GlobalTransform, valid_bind);
ERR_CONTINUE_MSG(mesh->mesh_node.is_null(), "invalid mesh allocation");
ERR_CONTINUE_MSG(!valid_bind, "invalid bind");
const FBXDocParser::Skin *mesh_skin = mesh_geometry->DeformerSkin();
if (bind.basis.determinant() == 0) {
bind = Transform(Basis(), bind.origin);
}
skin->add_named_bind(bone->bone_name, get_unscaled_transform(bind, state.scale));
if (!mesh_skin) {
continue; // safe to continue
}
state.MeshSkins.insert(mesh_id, skin);
//
// Skin bone configuration
//
//
// Get Mesh Node Xform only
//
// ERR_CONTINUE_MSG(!state.fbx_target_map.has(mesh_id), "invalid xform for the skin pose: " + itos(mesh_id));
// Ref<FBXNode> mesh_node_xform_data = state.fbx_target_map[mesh_id];
if (!mesh_skin) {
continue; // not a deformer.
}
if (mesh_skin->Clusters().size() == 0) {
continue; // possibly buggy mesh
}
// Lookup skin or create it if it's not found.
Ref<Skin> skin;
if (!state.MeshSkins.has(mesh_id)) {
print_verbose("Created new skin");
skin.instance();
state.MeshSkins.insert(mesh_id, skin);
} else {
print_verbose("Grabbed skin");
skin = state.MeshSkins[mesh_id];
}
for (const FBXDocParser::Cluster *cluster : mesh_skin->Clusters()) {
// node or bone this cluster targets (in theory will only be a bone target)
uint64_t skin_target_id = cluster->TargetNode()->ID();
print_verbose("adding cluster [" + itos(cluster->ID()) + "] " + String(cluster->Name().c_str()) + " for target: [" + itos(skin_target_id) + "] " + String(cluster->TargetNode()->Name().c_str()));
ERR_CONTINUE_MSG(!state.fbx_bone_map.has(skin_target_id), "no bone found by that ID? locator");
const Ref<FBXBone> bone = state.fbx_bone_map[skin_target_id];
const Ref<FBXSkeleton> skeleton = bone->fbx_skeleton;
const Ref<FBXNode> skeleton_node = skeleton->fbx_node;
skin->add_named_bind(
bone->bone_name,
get_unscaled_transform(
skeleton_node->pivot_transform->GlobalTransform.affine_inverse() * cluster->TransformLink().affine_inverse(), state.scale));
}
print_verbose("cluster name / id: " + String(mesh_skin->Name().c_str()) + " [" + itos(mesh_skin->ID()) + "]");
print_verbose("skeleton has " + itos(state.fbx_bone_map.size()) + " binds");
print_verbose("fbx skin has " + itos(mesh_skin->Clusters().size()) + " binds");
}
// mesh data iteration for populating skeleton mapping
@ -1211,7 +1276,7 @@ Spatial *EditorSceneImporterFBX::_generate_scene(
for (Map<uint64_t, Ref<FBXBone> >::Element *element = state.fbx_bone_map.front(); element; element = element->next()) {
Ref<FBXBone> bone = element->value();
bone->parent_bone.unref();
bone->pivot_xform.unref();
bone->node.unref();
bone->fbx_skeleton.unref();
}
@ -1303,27 +1368,10 @@ void EditorSceneImporterFBX::BuildDocumentBones(Ref<FBXBone> p_parent_bone,
}
uint64_t limb_id = limb_node->ID();
const FBXDocParser::Cluster *deformer = ProcessDOMConnection<FBXDocParser::Cluster>(p_doc, limb_id);
bone_element->bone_id = limb_id;
bone_element->bone_name = ImportUtils::FBXNodeToName(model->Name());
bone_element->parent_bone = p_parent_bone;
if (deformer != nullptr) {
print_verbose("[doc] Mesh Cluster: " + String(deformer->Name().c_str()) + ", " + deformer->TransformLink());
print_verbose("fbx node: debug name: " + String(model->Name().c_str()) + "bone name: " + String(deformer->Name().c_str()));
// assign FBX animation bind pose compensation data;
bone_element->transform_link = deformer->TransformLink();
bone_element->transform_matrix = deformer->GetTransform();
bone_element->cluster = deformer;
// skin configures target node ID.
bone_element->target_node_id = deformer->TargetNode()->ID();
bone_element->valid_target = true;
bone_element->bone_id = limb_id;
}
// insert limb by ID into list.
state.fbx_bone_map.insert(limb_node->ID(), bone_element);
}
@ -1389,7 +1437,7 @@ void EditorSceneImporterFBX::BuildDocumentNodes(
if (state.fbx_bone_map.has(current_node_id)) {
Ref<FBXBone> bone = state.fbx_bone_map[current_node_id];
if (bone.is_valid()) {
bone->set_pivot_xform(fbx_transform);
bone->set_node(new_node);
print_verbose("allocated bone data: " + bone->bone_name);
}
}
@ -1403,11 +1451,14 @@ void EditorSceneImporterFBX::BuildDocumentNodes(
new_node->set_parent(state.fbx_root_node);
}
CRASH_COND_MSG(new_node->pivot_transform.is_null(), "invalid fbx target map pivot transform [serious]");
// populate lookup tables with references
// [fbx_node_id, fbx_node]
state.fbx_node_list.push_back(new_node);
if (!state.fbx_target_map.has(new_node->current_node_id)) {
state.fbx_target_map[new_node->current_node_id] = new_node;
}

View file

@ -37,7 +37,10 @@
#include "tools/import_utils.h"
#include "core/bind/core_bind.h"
#include "core/dictionary.h"
#include "core/io/resource_importer.h"
#include "core/local_vector.h"
#include "core/ustring.h"
#include "core/vector.h"
#include "editor/import/resource_importer_scene.h"
#include "editor/project_settings_editor.h"

View file

@ -184,7 +184,6 @@ ObjectPtr LazyObject::LoadObject() {
if (!strcmp(classtag.c_str(), "Cluster")) {
object.reset(new Cluster(id, element, doc, name));
} else if (!strcmp(classtag.c_str(), "Skin")) {
object.reset(new Skin(id, element, doc, name));
} else if (!strcmp(classtag.c_str(), "BlendShape")) {
object.reset(new BlendShape(id, element, doc, name));
@ -288,12 +287,15 @@ Document::~Document() {
delete v.second;
}
if (metadata_properties != nullptr) {
delete metadata_properties;
}
// clear globals import pointer
globals.reset();
}
// ------------------------------------------------------------------------------------------------
static const unsigned int LowerSupportedVersion = 7100;
static const unsigned int LowerSupportedVersion = 7300;
static const unsigned int UpperSupportedVersion = 7700;
bool Document::ReadHeader() {
@ -310,11 +312,11 @@ bool Document::ReadHeader() {
// While we may have some success with newer files, we don't support
// the older 6.n fbx format
if (fbxVersion < LowerSupportedVersion) {
DOMWarning("unsupported, old format version, supported are only FBX 2011, FBX 2012 and FBX 2013, you can re-export using Maya, 3DS, blender or Autodesk FBX tool");
DOMWarning("unsupported, old format version, FBX 2015-2020, you must re-export in a more modern version of your original modelling application");
return false;
}
if (fbxVersion > UpperSupportedVersion) {
DOMWarning("unsupported, newer format version, supported are only FBX 2011, up to FBX 2020"
DOMWarning("unsupported, newer format version, supported are only FBX 2015, up to FBX 2020"
" trying to read it nevertheless");
}
@ -323,6 +325,20 @@ bool Document::ReadHeader() {
creator = ParseTokenAsString(GetRequiredToken(ecreator, 0));
}
//
// Scene Info
//
const ElementPtr scene_info = shead->GetElement("SceneInfo");
if (scene_info) {
PropertyTable *fileExportProps = const_cast<PropertyTable *>(GetPropertyTable(*this, "", scene_info, scene_info->Compound(), true));
if (fileExportProps) {
metadata_properties = fileExportProps;
}
}
const ElementPtr etimestamp = shead->GetElement("CreationTimeStamp");
if (etimestamp && etimestamp->Compound()) {
const ScopePtr stimestamp = etimestamp->Compound();

View file

@ -1196,6 +1196,10 @@ public:
return globals.get();
}
const PropertyTable *GetMetadataProperties() const {
return metadata_properties;
}
const PropertyTemplateMap &Templates() const {
return templates;
}
@ -1289,6 +1293,7 @@ private:
std::vector<uint64_t> materials;
std::vector<uint64_t> skins;
mutable std::vector<const AnimationStack *> animationStacksResolved;
PropertyTable *metadata_properties = nullptr;
std::shared_ptr<FileGlobalSettings> globals = nullptr;
};

View file

@ -106,7 +106,7 @@ public:
}
private:
const Skin *skin;
const Skin *skin = nullptr;
std::vector<const BlendShape *> blendShapes;
};

View file

@ -168,10 +168,10 @@ char EncodeBase64(char byte) {
}
/** Encodes a block of 4 bytes to base64 encoding
*
* @param bytes Bytes to encode.
* @param out_string String to write encoded values to.
* @param string_pos Position in out_string.*/
* @param string_pos Position in out_string.
*/
void EncodeByteBlock(const char *bytes, std::string &out_string, size_t string_pos) {
char b0 = (bytes[0] & 0xFC) >> 2;
char b1 = (bytes[0] & 0x03) << 4 | ((bytes[1] & 0xF0) >> 4);

View file

@ -83,14 +83,6 @@ namespace FBXDocParser {
namespace Util {
/** helper for std::for_each to delete all heap-allocated items in a container */
template <typename T>
struct delete_fun {
void operator()(const volatile T *del) {
delete del;
}
};
/** Get a string representation for a #TokenType. */
const char *TokenTypeString(TokenType t);

View file

@ -0,0 +1,48 @@
/*************************************************************************/
/* validation_tools.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#include "validation_tools.h"
#ifdef TOOLS_ENABLED
#include "core/print_string.h"
#include "core/ustring.h"
ValidationTracker::Entries *ValidationTracker::entries_singleton = memnew(ValidationTracker::Entries);
// for printing our CSV to dump validation problems of files
// later we can make some agnostic tooling for this but this is fine for the time being.
void ValidationTracker::Entries::add_validation_error(String asset_path, String message) {
print_error(message);
// note: implementation is static
validation_entries[asset_path].push_back(message);
}
#endif // TOOLS_ENABLED

View file

@ -0,0 +1,93 @@
/*************************************************************************/
/* validation_tools.h */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#ifndef FBX_VALIDATION_TOOLS_H
#define FBX_VALIDATION_TOOLS_H
#ifdef TOOLS_ENABLED
#include "core/local_vector.h"
#include "core/map.h"
#include "core/ustring.h"
#include <core/io/json.h>
#include <core/os/file_access.h>
#include <scene/3d/path.h>
class ValidationTracker {
protected:
struct Entries {
Map<String, LocalVector<String> > validation_entries = Map<String, LocalVector<String> >();
// for printing our CSV to dump validation problems of files
// later we can make some agnostic tooling for this but this is fine for the time being.
void add_validation_error(String asset_path, String message);
void print_to_csv() {
print_verbose("Exporting assset validation log please wait");
String massive_log_file;
String csv_header = "file_path, error message, extra data\n";
massive_log_file += csv_header;
for (Map<String, LocalVector<String> >::Element *element = validation_entries.front(); element; element = element->next()) {
for (unsigned int x = 0; x < element->value().size(); x++) {
const String &line_entry = element->key() + ", " + element->value()[x].c_escape() + "\n";
massive_log_file += line_entry;
}
}
String path = "asset_validation_errors.csv";
Error err;
FileAccess *file = FileAccess::open(path, FileAccess::WRITE, &err);
if (!file || err) {
if (file)
memdelete(file);
print_error("ValidationTracker Error - failed to create file - path: %s\n" + path);
return;
}
file->store_string(massive_log_file);
if (file->get_error() != OK && file->get_error() != ERR_FILE_EOF) {
print_error("ValidationTracker Error - failed to write to file - path: %s\n" + path);
}
file->close();
memdelete(file);
}
};
// asset path, error messages
static Entries *entries_singleton;
public:
static Entries *get_singleton() {
return entries_singleton;
}
};
#endif // TOOLS_ENABLED
#endif // FBX_VALIDATION_TOOLS_H