Merge branch 'master' into master
commit
89c4640744
|
@ -0,0 +1,6 @@
|
|||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
|
@ -43,7 +43,7 @@ jobs:
|
|||
toolchain: ninja-vs-win64-cxx17
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: lukka/get-cmake@latest
|
||||
|
||||
|
@ -64,21 +64,21 @@ jobs:
|
|||
|
||||
- name: Checkout Hunter toolchains
|
||||
if: endsWith(matrix.name, 'hunter')
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: cpp-pm/polly
|
||||
path: cmake/polly
|
||||
|
||||
- name: Remove contrib directory for Hunter builds
|
||||
if: contains(matrix.name, 'hunter')
|
||||
uses: JesseTG/rm@v1.0.2
|
||||
uses: JesseTG/rm@v1.0.3
|
||||
with:
|
||||
path: contrib
|
||||
|
||||
- name: Cache DX SDK
|
||||
id: dxcache
|
||||
if: contains(matrix.name, 'windows')
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: '${{ github.workspace }}/DX_SDK'
|
||||
key: ${{ runner.os }}-DX_SDK
|
||||
|
@ -110,7 +110,7 @@ jobs:
|
|||
cmakeListsOrSettingsJson: CMakeListsTxtAdvanced
|
||||
cmakeListsTxtPath: '${{ github.workspace }}/CMakeLists.txt'
|
||||
cmakeAppendedArgs: '-GNinja -DCMAKE_BUILD_TYPE=Release ${{ steps.windows_extra_cmake_args.outputs.args }} ${{ steps.hunter_extra_cmake_args.outputs.args }}'
|
||||
buildWithCMakeArgs: '-- -v'
|
||||
buildWithCMakeArgs: '-- -j 24 -v'
|
||||
buildDirectory: '${{ github.workspace }}/build/'
|
||||
|
||||
- name: Exclude certain tests in Hunter specific builds
|
||||
|
@ -122,7 +122,7 @@ jobs:
|
|||
run: cd build/bin && ./unit ${{ steps.hunter_extra_test_args.outputs.args }}
|
||||
shell: bash
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: matrix.name == 'windows-msvc'
|
||||
with:
|
||||
name: 'assimp-bins-${{ matrix.name }}-${{ github.sha }}'
|
||||
|
|
|
@ -11,7 +11,7 @@ jobs:
|
|||
name: adress-sanitizer
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: lukka/get-cmake@latest
|
||||
- uses: lukka/set-shell-env@v1
|
||||
with:
|
||||
|
@ -35,7 +35,7 @@ jobs:
|
|||
name: undefined-behavior-sanitizer
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: lukka/get-cmake@latest
|
||||
- uses: lukka/set-shell-env@v1
|
||||
with:
|
||||
|
|
30
Build.md
30
Build.md
|
@ -14,7 +14,8 @@ The assimp port in vcpkg is kept up to date by Microsoft team members and commun
|
|||
## Install on Ubuntu
|
||||
You can install the Asset-Importer-Lib via apt:
|
||||
```
|
||||
sudo apt-get install assimp
|
||||
sudo apt-get update
|
||||
sudo apt-get install libassimp-dev
|
||||
```
|
||||
|
||||
## Install pyassimp
|
||||
|
@ -84,23 +85,28 @@ Besides the toolchain, compilation should be the same as for Linux / Unix.
|
|||
|
||||
### CMake build options
|
||||
The cmake-build-environment provides options to configure the build. The following options can be used:
|
||||
- **ASSIMP_HUNTER_ENABLED (default OFF)**: Enable Hunter package manager support.
|
||||
- **BUILD_SHARED_LIBS (default ON)**: Generation of shared libs (dll for windows, so for Linux). Set this to OFF to get a static lib.
|
||||
- **BUILD_FRAMEWORK ( default OFF, MacOnly)**: Build package as Mac OS X Framework bundle
|
||||
- **ASSIMP_BUILD_FRAMEWORK (default OFF, MacOnly)**: Build package as Mac OS X Framework bundle.
|
||||
- **ASSIMP_DOUBLE_PRECISION (default OFF)**: All data will be stored as double values.
|
||||
- **ASSIMP_OPT_BUILD_PACKAGES ( default OFF)**: Set to ON to generate CPack configuration files and packaging targets
|
||||
- **ASSIMP_ANDROID_JNIIOSYSTEM ( default OFF )**: Android JNI IOSystem support is active
|
||||
- **ASSIMP_NO_EXPORT ( default OFF )**: Disable Assimp's export functionality
|
||||
- **ASSIMP_BUILD_ZLIB ( default OFF )**: Build your own zlib
|
||||
- **ASSIMP_OPT_BUILD_PACKAGES (default OFF)**: Set to ON to generate CPack configuration files and packaging targets.
|
||||
- **ASSIMP_ANDROID_JNIIOSYSTEM (default OFF)**: Android JNI IOSystem support is active.
|
||||
- **ASSIMP_NO_EXPORT (default OFF)**: Disable Assimp's export functionality.
|
||||
- **ASSIMP_BUILD_ZLIB (default OFF)**: Build our own zlib.
|
||||
- **ASSIMP_BUILD_ALL_EXPORTERS_BY_DEFAULT (default ON)**: Build Assimp with all exporter senabled.
|
||||
- **ASSIMP_BUILD_ALL_IMPORTERS_BY_DEFAULT (default ON)**: Build Assimp with all importer senabled.
|
||||
- **ASSIMP_BUILD_ASSIMP_TOOLS (default ON)**: If the supplementary tools for Assimp are built in addition to the library.
|
||||
- **ASSIMP_BUILD_SAMPLES (default OFF)**: If the official samples are built as well (needs Glut).
|
||||
- **ASSIMP_BUILD_TESTS (default ON)**: If the test suite for Assimp is built in addition to the library.
|
||||
- **ASSIMP_COVERALLS (default OFF)**: Enable this to measure test coverage.
|
||||
- **ASSIMP_ERROR_MAX( default OFF)**: Enable all warnings.
|
||||
- **ASSIMP_WERROR( default OFF )**: Treat warnings as errors.
|
||||
- **ASSIMP_INSTALL (default ON)**: Install Assimp library. Disable this if you want to use Assimp as a submodule.
|
||||
- **ASSIMP_WARNINGS_AS_ERRORS (default ON)**: Treat all warnings as errors.
|
||||
- **ASSIMP_ASAN (default OFF)**: Enable AddressSanitizer.
|
||||
- **ASSIMP_UBSAN (default OFF)**: Enable Undefined Behavior sanitizer.
|
||||
- **SYSTEM_IRRXML ( default OFF )**: Use system installed Irrlicht/IrrXML library.
|
||||
- **BUILD_DOCS ( default OFF )**: Build documentation using Doxygen.
|
||||
- **INJECT_DEBUG_POSTFIX( default ON )**: Inject debug postfix in .a/.so lib names
|
||||
- **IGNORE_GIT_HASH ( default OFF )**: Don't call git to get the hash.
|
||||
- **ASSIMP_BUILD_DOCS (default OFF)**: Build documentation using Doxygen. OBSOLETE, see https://github.com/assimp/assimp-docs
|
||||
- **ASSIMP_INJECT_DEBUG_POSTFIX (default ON)**: Inject debug postfix in .a/.so/.lib/.dll lib names
|
||||
- **ASSIMP_IGNORE_GIT_HASH (default OFF)**: Don't call git to get the hash.
|
||||
- **ASSIMP_INSTALL_PDB (default ON)**: Install MSVC debug files.
|
||||
- **USE_STATIC_CRT (default OFF)**: Link against the static MSVC runtime libraries.
|
||||
- **ASSIMP_BUILD_DRACO (default OFF)**: Build Draco libraries. Primarily for glTF.
|
||||
- **ASSIMP_BUILD_ASSIMP_VIEW (default ON, if DirectX found, OFF otherwise)**: Build Assimp view tool (requires DirectX).
|
||||
|
|
|
@ -56,7 +56,7 @@ IF(ASSIMP_HUNTER_ENABLED)
|
|||
add_definitions(-DASSIMP_USE_HUNTER)
|
||||
ENDIF()
|
||||
|
||||
PROJECT(Assimp VERSION 5.2.0)
|
||||
PROJECT(Assimp VERSION 5.2.4)
|
||||
|
||||
# All supported options ###############################################
|
||||
|
||||
|
@ -90,7 +90,7 @@ OPTION( ASSIMP_BUILD_ZLIB
|
|||
)
|
||||
OPTION( ASSIMP_BUILD_ASSIMP_TOOLS
|
||||
"If the supplementary tools for Assimp are built in addition to the library."
|
||||
ON
|
||||
OFF
|
||||
)
|
||||
OPTION ( ASSIMP_BUILD_SAMPLES
|
||||
"If the official samples are built as well (needs Glut)."
|
||||
|
@ -108,9 +108,9 @@ OPTION( ASSIMP_INSTALL
|
|||
"Disable this if you want to use assimp as a submodule."
|
||||
ON
|
||||
)
|
||||
OPTION ( ASSIMP_ERROR_MAX
|
||||
"Enable all warnings."
|
||||
OFF
|
||||
OPTION ( ASSIMP_WARNINGS_AS_ERRORS
|
||||
"Treat all warnings as errors."
|
||||
ON
|
||||
)
|
||||
OPTION ( ASSIMP_ASAN
|
||||
"Enable AddressSanitizer."
|
||||
|
@ -139,10 +139,6 @@ IF (WIN32)
|
|||
ADD_DEFINITIONS( -DWIN32_LEAN_AND_MEAN )
|
||||
|
||||
IF(MSVC)
|
||||
OPTION (ASSIMP_BUILD_ASSIMP_VIEW
|
||||
"If the Assimp view tool is built. (requires DirectX)"
|
||||
OFF )
|
||||
|
||||
OPTION( ASSIMP_INSTALL_PDB
|
||||
"Install MSVC debug files."
|
||||
ON )
|
||||
|
@ -164,12 +160,6 @@ IF (WIN32)
|
|||
$<$<CONFIG:Debug>:/MTd>
|
||||
$<$<CONFIG:Release>:/MT>
|
||||
)
|
||||
else()
|
||||
add_compile_options(
|
||||
$<$<CONFIG:>:/MD>
|
||||
$<$<CONFIG:Debug>:/MDd>
|
||||
$<$<CONFIG:Release>:/MD>
|
||||
)
|
||||
endif()
|
||||
ENDIF()
|
||||
ENDIF()
|
||||
|
@ -190,6 +180,7 @@ ENDIF()
|
|||
IF(NOT BUILD_SHARED_LIBS)
|
||||
MESSAGE(STATUS "Shared libraries disabled")
|
||||
SET(LINK_SEARCH_START_STATIC TRUE)
|
||||
SET(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_STATIC_LIBRARY_SUFFIX} ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
ELSE()
|
||||
MESSAGE(STATUS "Shared libraries enabled")
|
||||
ENDIF()
|
||||
|
@ -203,8 +194,8 @@ SET (ASSIMP_SOVERSION 5)
|
|||
|
||||
SET( ASSIMP_PACKAGE_VERSION "0" CACHE STRING "the package-specific version used for uploading the sources" )
|
||||
if(NOT ASSIMP_HUNTER_ENABLED)
|
||||
# Enable C++11 support globally
|
||||
set(CMAKE_CXX_STANDARD 11)
|
||||
# Enable C++17 support globally
|
||||
set(CMAKE_CXX_STANDARD 17)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
set(CMAKE_C_STANDARD 99)
|
||||
endif()
|
||||
|
@ -237,16 +228,6 @@ IF(ASSIMP_DOUBLE_PRECISION)
|
|||
ADD_DEFINITIONS(-DASSIMP_DOUBLE_PRECISION)
|
||||
ENDIF()
|
||||
|
||||
CONFIGURE_FILE(
|
||||
${CMAKE_CURRENT_LIST_DIR}/revision.h.in
|
||||
${CMAKE_CURRENT_BINARY_DIR}/revision.h
|
||||
)
|
||||
|
||||
CONFIGURE_FILE(
|
||||
${CMAKE_CURRENT_LIST_DIR}/include/assimp/config.h.in
|
||||
${CMAKE_CURRENT_BINARY_DIR}/include/assimp/config.h
|
||||
)
|
||||
|
||||
INCLUDE_DIRECTORIES( BEFORE
|
||||
./
|
||||
code/
|
||||
|
@ -276,7 +257,7 @@ ENDIF()
|
|||
# Grouped compiler settings ########################################
|
||||
IF ((CMAKE_C_COMPILER_ID MATCHES "GNU") AND NOT MINGW)
|
||||
IF(NOT ASSIMP_HUNTER_ENABLED)
|
||||
SET(CMAKE_CXX_STANDARD 11)
|
||||
SET(CMAKE_CXX_STANDARD 17)
|
||||
SET(CMAKE_POSITION_INDEPENDENT_CODE ON)
|
||||
ENDIF()
|
||||
# hide all not-exported symbols
|
||||
|
@ -306,7 +287,7 @@ ELSEIF(MSVC)
|
|||
SET(CMAKE_SHARED_LINKER_FLAGS_RELEASE "${CMAKE_SHARED_LINKER_FLAGS_RELEASE} /DEBUG:FULL /PDBALTPATH:%_PDB% /OPT:REF /OPT:ICF")
|
||||
ELSEIF (CMAKE_CXX_COMPILER_ID MATCHES "Clang" )
|
||||
IF(NOT ASSIMP_HUNTER_ENABLED)
|
||||
SET(CMAKE_CXX_STANDARD 11)
|
||||
SET(CMAKE_CXX_STANDARD 17)
|
||||
SET(CMAKE_POSITION_INDEPENDENT_CODE ON)
|
||||
ENDIF()
|
||||
SET(CMAKE_CXX_FLAGS "-fvisibility=hidden -fno-strict-aliasing -Wall -Wno-long-long ${CMAKE_CXX_FLAGS}" )
|
||||
|
@ -318,10 +299,14 @@ ELSEIF( MINGW )
|
|||
message(WARNING "MinGW is old, if you experience errors, update MinGW.")
|
||||
ENDIF()
|
||||
IF(NOT ASSIMP_HUNTER_ENABLED)
|
||||
SET(CMAKE_CXX_FLAGS "-std=gnu++11 ${CMAKE_CXX_FLAGS}")
|
||||
SET(CMAKE_CXX_FLAGS "-std=gnu++17 ${CMAKE_CXX_FLAGS}")
|
||||
SET(CMAKE_C_FLAGS "-fPIC ${CMAKE_C_FLAGS}")
|
||||
ENDIF()
|
||||
IF (CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
SET(CMAKE_CXX_FLAGS "-fvisibility=hidden -fno-strict-aliasing -Wall -Wno-long-long -Wa,-mbig-obj -g ${CMAKE_CXX_FLAGS}")
|
||||
ELSE()
|
||||
SET(CMAKE_CXX_FLAGS "-fvisibility=hidden -fno-strict-aliasing -Wall -Wno-long-long -Wa,-mbig-obj -O3 ${CMAKE_CXX_FLAGS}")
|
||||
ENDIF()
|
||||
SET(CMAKE_C_FLAGS "-fno-strict-aliasing ${CMAKE_C_FLAGS}")
|
||||
ENDIF()
|
||||
|
||||
|
@ -343,16 +328,6 @@ IF (ASSIMP_COVERALLS)
|
|||
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -fprofile-arcs -ftest-coverage")
|
||||
ENDIF()
|
||||
|
||||
IF (ASSIMP_ERROR_MAX)
|
||||
MESSAGE(STATUS "Turning on all warnings")
|
||||
IF (MSVC)
|
||||
ADD_COMPILE_OPTIONS(/W4) # NB: there is a /Wall option, pedantic mode
|
||||
ELSE()
|
||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall")
|
||||
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall")
|
||||
ENDIF()
|
||||
ENDIF()
|
||||
|
||||
IF (ASSIMP_ASAN)
|
||||
MESSAGE(STATUS "AddressSanitizer enabled")
|
||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=address")
|
||||
|
@ -713,10 +688,12 @@ ENDIF()
|
|||
|
||||
# Main assimp code
|
||||
ADD_SUBDIRECTORY( code/ )
|
||||
|
||||
IF ( ASSIMP_BUILD_ASSIMP_TOOLS )
|
||||
# The viewer for windows only
|
||||
IF (WIN32)
|
||||
OPTION ( ASSIMP_BUILD_ASSIMP_VIEW "If the Assimp view tool is built. (requires DirectX)" OFF )
|
||||
FIND_PACKAGE(DirectX)
|
||||
OPTION ( ASSIMP_BUILD_ASSIMP_VIEW "If the Assimp view tool is built. (requires DirectX)" ${DirectX_FOUND} )
|
||||
IF ( ASSIMP_BUILD_ASSIMP_VIEW )
|
||||
ADD_SUBDIRECTORY( tools/assimp_view/ )
|
||||
ENDIF ()
|
||||
|
@ -741,12 +718,22 @@ IF ( ASSIMP_BUILD_TESTS )
|
|||
ADD_SUBDIRECTORY( test/ )
|
||||
ENDIF ()
|
||||
|
||||
# Generate a pkg-config .pc for the Assimp library.
|
||||
# Generate a pkg-config .pc, revision.h, and config.h for the Assimp library.
|
||||
CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/assimp.pc.in" "${PROJECT_BINARY_DIR}/assimp.pc" @ONLY )
|
||||
IF ( ASSIMP_INSTALL )
|
||||
INSTALL( FILES "${PROJECT_BINARY_DIR}/assimp.pc" DESTINATION ${ASSIMP_LIB_INSTALL_DIR}/pkgconfig/ COMPONENT ${LIBASSIMP-DEV_COMPONENT})
|
||||
ENDIF()
|
||||
|
||||
CONFIGURE_FILE(
|
||||
${CMAKE_CURRENT_LIST_DIR}/revision.h.in
|
||||
${CMAKE_CURRENT_BINARY_DIR}/revision.h
|
||||
)
|
||||
|
||||
CONFIGURE_FILE(
|
||||
${CMAKE_CURRENT_LIST_DIR}/include/assimp/config.h.in
|
||||
${CMAKE_CURRENT_BINARY_DIR}/include/assimp/config.h
|
||||
)
|
||||
|
||||
IF ( ASSIMP_INSTALL )
|
||||
IF(CMAKE_CPACK_COMMAND AND UNIX AND ASSIMP_OPT_BUILD_PACKAGES)
|
||||
# Packing information
|
||||
|
|
2
INSTALL
2
INSTALL
|
@ -14,4 +14,4 @@ https://assimp-docs.readthedocs.io/en/latest/
|
|||
Building Assimp
|
||||
------------------------------
|
||||
|
||||
Just check the build-instaructions which you can find here: https://github.com/assimp/assimp/blob/master/Build.md
|
||||
Just check the build-instructions which you can find here: https://github.com/assimp/assimp/blob/master/Build.md
|
||||
|
|
|
@ -9,9 +9,11 @@ A library to import and export various 3d-model-formats including scene-post-pro
|
|||
src="https://scan.coverity.com/projects/5607/badge.svg"/>
|
||||
</a>
|
||||
[![Codacy Badge](https://app.codacy.com/project/badge/Grade/9973693b7bdd4543b07084d5d9cf4745)](https://www.codacy.com/gh/assimp/assimp/dashboard?utm_source=github.com&utm_medium=referral&utm_content=assimp/assimp&utm_campaign=Badge_Grade)
|
||||
|
||||
[![Coverage Status](https://coveralls.io/repos/github/assimp/assimp/badge.svg?branch=master)](https://coveralls.io/github/assimp/assimp?branch=master)
|
||||
[![Join the chat at https://gitter.im/assimp/assimp](https://badges.gitter.im/assimp/assimp.svg)](https://gitter.im/assimp/assimp?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[![Average time to resolve an issue](http://isitmaintained.com/badge/resolution/assimp/assimp.svg)](http://isitmaintained.com/project/assimp/assimp "Average time to resolve an issue")
|
||||
[![Percentage of issues still open](http://isitmaintained.com/badge/open/assimp/assimp.svg)](http://isitmaintained.com/project/assimp/assimp "Percentage of issues still open")
|
||||
[![Total alerts](https://img.shields.io/lgtm/alerts/g/assimp/assimp.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/assimp/assimp/alerts/)
|
||||
<br>
|
||||
|
||||
|
@ -58,6 +60,7 @@ Open Asset Import Library is implemented in C++. The directory structure looks l
|
|||
/code Source code
|
||||
/contrib Third-party libraries
|
||||
/doc Documentation (doxysource and pre-compiled docs)
|
||||
/fuzz Contains the test-code for the Google-Fuzzer project
|
||||
/include Public header C and C++ header files
|
||||
/scripts Scripts used to generate the loading code for some formats
|
||||
/port Ports to other languages and scripts to maintain those.
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Use this section to tell people about which versions of your project are
|
||||
currently being supported with security updates.
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| 5.2.4 | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you have found any security vulnerability you can contact us via
|
||||
kim.kulling@googlemail.com
|
||||
|
|
@ -55,7 +55,7 @@ if(WIN32) # The only platform it makes sense to check for DirectX SDK
|
|||
endif(CMAKE_CL_64)
|
||||
find_library(DirectX_LIBRARY NAMES d3d9 HINTS ${DirectX_LIB_SEARCH_PATH} PATH_SUFFIXES ${DirectX_LIBPATH_SUFFIX})
|
||||
find_library(DirectX_D3DX9_LIBRARY NAMES d3dx9 HINTS ${DirectX_LIB_SEARCH_PATH} PATH_SUFFIXES ${DirectX_LIBPATH_SUFFIX})
|
||||
find_library(DirectX_DXERR_LIBRARY NAMES DxErr HINTS ${DirectX_LIB_SEARCH_PATH} PATH_SUFFIXES ${DirectX_LIBPATH_SUFFIX})
|
||||
find_library(DirectX_DXERR_LIBRARY NAMES DxErr DxErr9 HINTS ${DirectX_LIB_SEARCH_PATH} PATH_SUFFIXES ${DirectX_LIBPATH_SUFFIX})
|
||||
find_library(DirectX_DXGUID_LIBRARY NAMES dxguid HINTS ${DirectX_LIB_SEARCH_PATH} PATH_SUFFIXES ${DirectX_LIBPATH_SUFFIX})
|
||||
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ macro(clear_if_changed TESTVAR)
|
|||
set(${var} "NOTFOUND" CACHE STRING "x" FORCE)
|
||||
endforeach(var)
|
||||
endif ()
|
||||
set(${TESTVAR}_INT_CHECK ${${TESTVAR}} CACHE INTERNAL "x" FORCE)
|
||||
set(${TESTVAR}_INT_CHECK "${${TESTVAR}}" CACHE INTERNAL "x" FORCE)
|
||||
endmacro(clear_if_changed)
|
||||
|
||||
# Try to get some hints from pkg-config, if available
|
||||
|
|
|
@ -262,6 +262,7 @@ void Discreet3DSImporter::ConvertMaterial(D3DS::Material &oldMat,
|
|||
unsigned int iWire = 1;
|
||||
mat.AddProperty<int>((int *)&iWire, 1, AI_MATKEY_ENABLE_WIREFRAME);
|
||||
}
|
||||
[[fallthrough]];
|
||||
|
||||
case D3DS::Discreet3DS::Gouraud:
|
||||
eShading = aiShadingMode_Gouraud;
|
||||
|
|
|
@ -1284,7 +1284,7 @@ void Discreet3DSImporter::ParseColorChunk(aiColor3D *out, bool acceptPercent) {
|
|||
switch (chunk.Flag) {
|
||||
case Discreet3DS::CHUNK_LINRGBF:
|
||||
bGamma = true;
|
||||
|
||||
// fallthrough
|
||||
case Discreet3DS::CHUNK_RGBF:
|
||||
if (sizeof(float) * 3 > diff) {
|
||||
*out = clrError;
|
||||
|
@ -1297,6 +1297,7 @@ void Discreet3DSImporter::ParseColorChunk(aiColor3D *out, bool acceptPercent) {
|
|||
|
||||
case Discreet3DS::CHUNK_LINRGBB:
|
||||
bGamma = true;
|
||||
// fallthrough
|
||||
case Discreet3DS::CHUNK_RGBB: {
|
||||
if (sizeof(char) * 3 > diff) {
|
||||
*out = clrError;
|
||||
|
|
|
@ -74,6 +74,8 @@ namespace XmlTag {
|
|||
const char* const pid = "pid";
|
||||
const char* const pindex = "pindex";
|
||||
const char* const p1 = "p1";
|
||||
const char *const p2 = "p2";
|
||||
const char *const p3 = "p3";
|
||||
const char* const name = "name";
|
||||
const char* const type = "type";
|
||||
const char* const build = "build";
|
||||
|
|
|
@ -186,6 +186,9 @@ D3MFOpcPackage::D3MFOpcPackage(IOSystem *pIOHandler, const std::string &rFile) :
|
|||
D3MFOpcPackage::~D3MFOpcPackage() {
|
||||
mZipArchive->Close(mRootStream);
|
||||
delete mZipArchive;
|
||||
for (auto tex : mEmbeddedTextures) {
|
||||
delete tex;
|
||||
}
|
||||
}
|
||||
|
||||
IOStream *D3MFOpcPackage::RootStream() const {
|
||||
|
|
|
@ -64,7 +64,7 @@ bool validateColorString(const char *color) {
|
|||
return true;
|
||||
}
|
||||
|
||||
aiFace ReadTriangle(XmlNode &node) {
|
||||
aiFace ReadTriangle(XmlNode &node, int &texId0, int &texId1, int &texId2) {
|
||||
aiFace face;
|
||||
|
||||
face.mNumIndices = 3;
|
||||
|
@ -73,6 +73,11 @@ aiFace ReadTriangle(XmlNode &node) {
|
|||
face.mIndices[1] = static_cast<unsigned int>(std::atoi(node.attribute(XmlTag::v2).as_string()));
|
||||
face.mIndices[2] = static_cast<unsigned int>(std::atoi(node.attribute(XmlTag::v3).as_string()));
|
||||
|
||||
texId0 = texId1 = texId2 = -1;
|
||||
XmlParser::getIntAttribute(node, XmlTag::p1, texId0);
|
||||
XmlParser::getIntAttribute(node, XmlTag::p2, texId1);
|
||||
XmlParser::getIntAttribute(node, XmlTag::p3, texId2);
|
||||
|
||||
return face;
|
||||
}
|
||||
|
||||
|
@ -106,7 +111,7 @@ bool getNodeAttribute(const XmlNode &node, const std::string &attribute, int &va
|
|||
return false;
|
||||
}
|
||||
|
||||
aiMatrix4x4 parseTransformMatrix(std::string matrixStr) {
|
||||
aiMatrix4x4 parseTransformMatrix(const std::string& matrixStr) {
|
||||
// split the string
|
||||
std::vector<float> numbers;
|
||||
std::string currentNumber;
|
||||
|
@ -412,6 +417,9 @@ void XmlSerializer::ImportTriangles(XmlNode &node, aiMesh *mesh) {
|
|||
bool hasPid = getNodeAttribute(currentNode, D3MF::XmlTag::pid, pid);
|
||||
bool hasP1 = getNodeAttribute(currentNode, D3MF::XmlTag::p1, p1);
|
||||
|
||||
int texId[3];
|
||||
Texture2DGroup *group = nullptr;
|
||||
aiFace face = ReadTriangle(currentNode, texId[0], texId[1], texId[2]);
|
||||
if (hasPid && hasP1) {
|
||||
auto it = mResourcesDictionnary.find(pid);
|
||||
if (it != mResourcesDictionnary.end()) {
|
||||
|
@ -420,23 +428,34 @@ void XmlSerializer::ImportTriangles(XmlNode &node, aiMesh *mesh) {
|
|||
mesh->mMaterialIndex = baseMaterials->mMaterialIndex[p1];
|
||||
} else if (it->second->getType() == ResourceType::RT_Texture2DGroup) {
|
||||
if (mesh->mTextureCoords[0] == nullptr) {
|
||||
Texture2DGroup *group = static_cast<Texture2DGroup *>(it->second);
|
||||
mesh->mNumUVComponents[0] = 2;
|
||||
for (unsigned int i = 1; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) {
|
||||
mesh->mNumUVComponents[i] = 0;
|
||||
}
|
||||
|
||||
group = static_cast<Texture2DGroup *>(it->second);
|
||||
const std::string name = ai_to_string(group->mTexId);
|
||||
for (size_t i = 0; i < mMaterials.size(); ++i) {
|
||||
if (name == mMaterials[i]->GetName().C_Str()) {
|
||||
mesh->mMaterialIndex = static_cast<unsigned int>(i);
|
||||
}
|
||||
}
|
||||
mesh->mTextureCoords[0] = new aiVector3D[group->mTex2dCoords.size()];
|
||||
for (unsigned int i = 0; i < group->mTex2dCoords.size(); ++i) {
|
||||
mesh->mTextureCoords[0][i] = aiVector3D(group->mTex2dCoords[i].x, group->mTex2dCoords[i].y, 0);
|
||||
}
|
||||
mesh->mTextureCoords[0] = new aiVector3D[mesh->mNumVertices];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
aiFace face = ReadTriangle(currentNode);
|
||||
// Load texture coordinates into mesh, when any
|
||||
if (group != nullptr) {
|
||||
size_t i0 = face.mIndices[0];
|
||||
size_t i1 = face.mIndices[1];
|
||||
size_t i2 = face.mIndices[2];
|
||||
mesh->mTextureCoords[0][i0] = aiVector3D(group->mTex2dCoords[texId[0]].x, group->mTex2dCoords[texId[0]].y, 0.0f);
|
||||
mesh->mTextureCoords[0][i1] = aiVector3D(group->mTex2dCoords[texId[1]].x, group->mTex2dCoords[texId[1]].y, 0.0f);
|
||||
mesh->mTextureCoords[0][i2] = aiVector3D(group->mTex2dCoords[texId[2]].x, group->mTex2dCoords[texId[2]].y, 0.0f);
|
||||
}
|
||||
|
||||
faces.push_back(face);
|
||||
}
|
||||
}
|
||||
|
@ -578,11 +597,15 @@ aiMaterial *XmlSerializer::readMaterialDef(XmlNode &node, unsigned int basemater
|
|||
}
|
||||
|
||||
void XmlSerializer::StoreMaterialsInScene(aiScene *scene) {
|
||||
if (nullptr == scene || mMaterials.empty()) {
|
||||
if (nullptr == scene) {
|
||||
return;
|
||||
}
|
||||
|
||||
scene->mNumMaterials = static_cast<unsigned int>(mMaterials.size());
|
||||
if (scene->mNumMaterials == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
scene->mMaterials = new aiMaterial *[scene->mNumMaterials];
|
||||
for (size_t i = 0; i < mMaterials.size(); ++i) {
|
||||
scene->mMaterials[i] = mMaterials[i];
|
||||
|
|
|
@ -870,6 +870,7 @@ void ASEImporter::ConvertMaterial(ASE::Material &mat) {
|
|||
unsigned int iWire = 1;
|
||||
mat.pcInstance->AddProperty<int>((int *)&iWire, 1, AI_MATKEY_ENABLE_WIREFRAME);
|
||||
}
|
||||
// fallthrough
|
||||
case D3DS::Discreet3DS::Gouraud:
|
||||
eShading = aiShadingMode_Gouraud;
|
||||
break;
|
||||
|
|
|
@ -74,7 +74,7 @@ using namespace Assimp::ASE;
|
|||
return; \
|
||||
} \
|
||||
} \
|
||||
else if ('\0' == *filePtr) { \
|
||||
if ('\0' == *filePtr) { \
|
||||
return; \
|
||||
} \
|
||||
if (IsLineEnd(*filePtr) && !bLastWasEndLine) { \
|
||||
|
@ -420,6 +420,8 @@ void Parser::ParseLV1SoftSkinBlock() {
|
|||
}
|
||||
}
|
||||
}
|
||||
if (*filePtr == '\0')
|
||||
return;
|
||||
++filePtr;
|
||||
SkipSpacesAndLineEnd(&filePtr);
|
||||
}
|
||||
|
@ -646,10 +648,13 @@ void Parser::ParseLV2MaterialBlock(ASE::Material &mat) {
|
|||
}
|
||||
|
||||
// get a reference to the material
|
||||
if (iIndex < mat.avSubMaterials.size()) {
|
||||
Material &sMat = mat.avSubMaterials[iIndex];
|
||||
|
||||
// parse the material block
|
||||
ParseLV2MaterialBlock(sMat);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -365,7 +365,7 @@ static void WriteDump(const char *pFile, const char *cmd, const aiScene *scene,
|
|||
|
||||
ioprintf(io, "\t\t\t<MatProperty key=\"%s\" \n\t\t\ttype=\"%s\" tex_usage=\"%s\" tex_index=\"%u\"",
|
||||
prop->mKey.data, sz,
|
||||
::TextureTypeToString((aiTextureType)prop->mSemantic), prop->mIndex);
|
||||
::aiTextureTypeToString((aiTextureType)prop->mSemantic), prop->mIndex);
|
||||
|
||||
if (prop->mType == aiPTI_Float) {
|
||||
ioprintf(io, " size=\"%i\">\n\t\t\t\t",
|
||||
|
|
|
@ -325,10 +325,10 @@ void SectionParser ::Next() {
|
|||
stream.SetCurrentPos(current.start + current.size);
|
||||
|
||||
const char tmp[] = {
|
||||
(const char)stream.GetI1(),
|
||||
(const char)stream.GetI1(),
|
||||
(const char)stream.GetI1(),
|
||||
(const char)stream.GetI1()
|
||||
(char)stream.GetI1(),
|
||||
(char)stream.GetI1(),
|
||||
(char)stream.GetI1(),
|
||||
(char)stream.GetI1()
|
||||
};
|
||||
current.id = std::string(tmp, tmp[3] ? 4 : tmp[2] ? 3 : tmp[1] ? 2 : 1);
|
||||
|
||||
|
|
|
@ -281,7 +281,7 @@ void BlenderImporter::ExtractScene(Scene &out, const FileDatabase &file) {
|
|||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void BlenderImporter::ParseSubCollection(const Blender::Scene &in, aiNode *root, std::shared_ptr<Collection> collection, ConversionData &conv_data) {
|
||||
void BlenderImporter::ParseSubCollection(const Blender::Scene &in, aiNode *root, const std::shared_ptr<Collection>& collection, ConversionData &conv_data) {
|
||||
|
||||
std::deque<Object *> root_objects;
|
||||
// Count number of objects
|
||||
|
@ -986,7 +986,7 @@ void BlenderImporter::ConvertMesh(const Scene & /*in*/, const Object * /*obj*/,
|
|||
// key is material number, value is the TextureUVMapping for the material
|
||||
typedef std::map<uint32_t, TextureUVMapping> MaterialTextureUVMappings;
|
||||
MaterialTextureUVMappings matTexUvMappings;
|
||||
const uint32_t maxMat = static_cast<const uint32_t>(mesh->mat.size());
|
||||
const uint32_t maxMat = static_cast<uint32_t>(mesh->mat.size());
|
||||
for (uint32_t m = 0; m < maxMat; ++m) {
|
||||
// get material by index
|
||||
const std::shared_ptr<Material> pMat = mesh->mat[m];
|
||||
|
|
|
@ -117,7 +117,7 @@ protected:
|
|||
void InternReadFile(const std::string &pFile, aiScene *pScene, IOSystem *pIOHandler) override;
|
||||
void ParseBlendFile(Blender::FileDatabase &out, std::shared_ptr<IOStream> stream);
|
||||
void ExtractScene(Blender::Scene &out, const Blender::FileDatabase &file);
|
||||
void ParseSubCollection(const Blender::Scene &in, aiNode *root, std::shared_ptr<Blender::Collection> collection, Blender::ConversionData &conv_data);
|
||||
void ParseSubCollection(const Blender::Scene &in, aiNode *root, const std::shared_ptr<Blender::Collection>& collection, Blender::ConversionData &conv_data);
|
||||
void ConvertBlendFile(aiScene *out, const Blender::Scene &in, const Blender::FileDatabase &file);
|
||||
|
||||
private:
|
||||
|
|
|
@ -621,6 +621,11 @@ struct Animation {
|
|||
|
||||
for (std::vector<Animation *>::iterator it = pParent->mSubAnims.begin(); it != pParent->mSubAnims.end();) {
|
||||
Animation *anim = *it;
|
||||
// Assign the first animation name to the parent if empty.
|
||||
// This prevents the animation name from being lost when animations are combined
|
||||
if (mName.empty()) {
|
||||
mName = anim->mName;
|
||||
}
|
||||
CombineSingleChannelAnimationsRecursively(anim);
|
||||
|
||||
if (childrenAnimationsHaveDifferentChannels && anim->mChannels.size() == 1 &&
|
||||
|
|
|
@ -102,6 +102,7 @@ ColladaLoader::ColladaLoader() :
|
|||
mTextures(),
|
||||
mAnims(),
|
||||
noSkeletonMesh(false),
|
||||
removeEmptyBones(false),
|
||||
ignoreUpDirection(false),
|
||||
useColladaName(false),
|
||||
mNodeNameCounter(0) {
|
||||
|
@ -130,6 +131,7 @@ bool ColladaLoader::CanRead(const std::string &pFile, IOSystem *pIOHandler, bool
|
|||
// ------------------------------------------------------------------------------------------------
|
||||
void ColladaLoader::SetupProperties(const Importer *pImp) {
|
||||
noSkeletonMesh = pImp->GetPropertyInteger(AI_CONFIG_IMPORT_NO_SKELETON_MESHES, 0) != 0;
|
||||
removeEmptyBones = pImp->GetPropertyInteger(AI_CONFIG_IMPORT_REMOVE_EMPTY_BONES, true) != 0;
|
||||
ignoreUpDirection = pImp->GetPropertyInteger(AI_CONFIG_IMPORT_COLLADA_IGNORE_UP_DIRECTION, 0) != 0;
|
||||
useColladaName = pImp->GetPropertyInteger(AI_CONFIG_IMPORT_COLLADA_USE_COLLADA_NAMES, 0) != 0;
|
||||
}
|
||||
|
@ -798,9 +800,10 @@ aiMesh *ColladaLoader::CreateMesh(const ColladaParser &pParser, const Mesh *pSrc
|
|||
// count the number of bones which influence vertices of the current submesh
|
||||
size_t numRemainingBones = 0;
|
||||
for (const auto & dstBone : dstBones) {
|
||||
if (!dstBone.empty()) {
|
||||
++numRemainingBones;
|
||||
if (dstBone.empty() && removeEmptyBones) {
|
||||
continue;
|
||||
}
|
||||
++numRemainingBones;
|
||||
}
|
||||
|
||||
// create bone array and copy bone weights one by one
|
||||
|
@ -809,7 +812,7 @@ aiMesh *ColladaLoader::CreateMesh(const ColladaParser &pParser, const Mesh *pSrc
|
|||
size_t boneCount = 0;
|
||||
for (size_t a = 0; a < numBones; ++a) {
|
||||
// omit bones without weights
|
||||
if (dstBones[a].empty()) {
|
||||
if (dstBones[a].empty() && removeEmptyBones) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
@ -237,6 +237,7 @@ protected:
|
|||
std::vector<aiAnimation *> mAnims;
|
||||
|
||||
bool noSkeletonMesh;
|
||||
bool removeEmptyBones;
|
||||
bool ignoreUpDirection;
|
||||
bool useColladaName;
|
||||
|
||||
|
|
|
@ -1616,6 +1616,7 @@ void ColladaParser::ReadIndexData(XmlNode &node, Mesh &pMesh) {
|
|||
XmlParser::getValueAsString(currentNode, v);
|
||||
const char *content = v.c_str();
|
||||
vcount.reserve(numPrimitives);
|
||||
SkipSpacesAndLineEnd(&content);
|
||||
for (unsigned int a = 0; a < numPrimitives; a++) {
|
||||
if (*content == 0) {
|
||||
throw DeadlyImportError("Expected more values while reading <vcount> contents.");
|
||||
|
@ -2057,7 +2058,7 @@ void ColladaParser::ReadSceneNode(XmlNode &node, Node *pNode) {
|
|||
XmlParser::getStdStrAttribute(currentNode, "id", child->mID);
|
||||
}
|
||||
if (XmlParser::hasAttribute(currentNode, "sid")) {
|
||||
XmlParser::getStdStrAttribute(currentNode, "id", child->mSID);
|
||||
XmlParser::getStdStrAttribute(currentNode, "sid", child->mSID);
|
||||
}
|
||||
if (XmlParser::hasAttribute(currentNode, "name")) {
|
||||
XmlParser::getStdStrAttribute(currentNode, "name", child->mName);
|
||||
|
|
|
@ -368,7 +368,9 @@ void DXFImporter::ExpandBlockReferences(DXF::Block& bl,const DXF::BlockMap& bloc
|
|||
// XXX this would be the place to implement recursive expansion if needed.
|
||||
const DXF::Block& bl_src = *(*it).second;
|
||||
|
||||
for (std::shared_ptr<const DXF::PolyLine> pl_in : bl_src.lines) {
|
||||
const size_t size = bl_src.lines.size(); // the size may increase in the loop
|
||||
for (size_t i = 0; i < size; ++i) {
|
||||
std::shared_ptr<const DXF::PolyLine> pl_in = bl_src.lines[i];
|
||||
if (!pl_in) {
|
||||
ASSIMP_LOG_ERROR("DXF: PolyLine instance is nullptr, skipping.");
|
||||
continue;
|
||||
|
|
|
@ -4,7 +4,6 @@ Open Asset Import Library (assimp)
|
|||
|
||||
Copyright (c) 2006-2022, assimp team
|
||||
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use of this software in source and binary forms,
|
||||
|
@ -87,11 +86,6 @@ AnimationCurve::AnimationCurve(uint64_t id, const Element &element, const std::s
|
|||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
AnimationCurve::~AnimationCurve() {
|
||||
// empty
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
AnimationCurveNode::AnimationCurveNode(uint64_t id, const Element &element, const std::string &name,
|
||||
const Document &doc, const char *const *target_prop_whitelist /*= nullptr*/,
|
||||
|
@ -147,11 +141,6 @@ AnimationCurveNode::AnimationCurveNode(uint64_t id, const Element &element, cons
|
|||
props = GetPropertyTable(doc, "AnimationCurveNode.FbxAnimCurveNode", element, sc, false);
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
AnimationCurveNode::~AnimationCurveNode() {
|
||||
// empty
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
const AnimationCurveMap &AnimationCurveNode::Curves() const {
|
||||
if (curves.empty()) {
|
||||
|
@ -193,11 +182,6 @@ AnimationLayer::AnimationLayer(uint64_t id, const Element &element, const std::s
|
|||
props = GetPropertyTable(doc, "AnimationLayer.FbxAnimLayer", element, sc, true);
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
AnimationLayer::~AnimationLayer() {
|
||||
// empty
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
AnimationCurveNodeList AnimationLayer::Nodes(const char *const *target_prop_whitelist /*= nullptr*/,
|
||||
size_t whitelist_size /*= 0*/) const {
|
||||
|
@ -279,11 +263,6 @@ AnimationStack::AnimationStack(uint64_t id, const Element &element, const std::s
|
|||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
AnimationStack::~AnimationStack() {
|
||||
// empty
|
||||
}
|
||||
|
||||
} // namespace FBX
|
||||
} // namespace Assimp
|
||||
|
||||
|
|
|
@ -472,7 +472,7 @@ void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length)
|
|||
}
|
||||
catch (const DeadlyImportError& e)
|
||||
{
|
||||
if (!is64bits && (length > std::numeric_limits<std::uint32_t>::max())) {
|
||||
if (!is64bits && (length > std::numeric_limits<uint32_t>::max())) {
|
||||
throw DeadlyImportError("The FBX file is invalid. This may be because the content is too big for this older version (", ai_to_string(version), ") of the FBX format. (", e.what(), ")");
|
||||
}
|
||||
throw;
|
||||
|
|
|
@ -50,7 +50,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
namespace Assimp {
|
||||
namespace FBX {
|
||||
|
||||
const std::string NULL_RECORD = { // 25 null bytes in 64-bit and 13 null bytes in 32-bit
|
||||
static constexpr size_t NumNullRecords = 25;
|
||||
const char NULL_RECORD[NumNullRecords] = { // 25 null bytes in 64-bit and 13 null bytes in 32-bit
|
||||
'\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0',
|
||||
'\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0'
|
||||
}; // who knows why, it looks like two integers 32/64 bit (compressed and uncompressed sizes?) + 1 byte (might be compression type?)
|
||||
|
|
|
@ -4,7 +4,6 @@ Open Asset Import Library (assimp)
|
|||
|
||||
Copyright (c) 2006-2022, assimp team
|
||||
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use of this software in source and binary forms,
|
||||
|
|
|
@ -65,12 +65,9 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
#include <stdlib.h>
|
||||
#include <cstdint>
|
||||
#include <iomanip>
|
||||
#include <iostream>
|
||||
#include <iterator>
|
||||
#include <memory>
|
||||
#include <sstream>
|
||||
#include <tuple>
|
||||
#include <vector>
|
||||
|
||||
namespace Assimp {
|
||||
namespace FBX {
|
||||
|
@ -187,8 +184,7 @@ std::string FBXConverter::MakeUniqueNodeName(const Model *const model, const aiN
|
|||
|
||||
/// This struct manages nodes which may or may not end up in the node hierarchy.
|
||||
/// When a node becomes a child of another node, that node becomes its owner and mOwnership should be released.
|
||||
struct FBXConverter::PotentialNode
|
||||
{
|
||||
struct FBXConverter::PotentialNode {
|
||||
PotentialNode() : mOwnership(new aiNode), mNode(mOwnership.get()) {}
|
||||
PotentialNode(const std::string& name) : mOwnership(new aiNode(name)), mNode(mOwnership.get()) {}
|
||||
aiNode* operator->() { return mNode; }
|
||||
|
@ -231,7 +227,6 @@ void FBXConverter::ConvertNodes(uint64_t id, aiNode *parent, aiNode *root_node)
|
|||
if (nullptr != model) {
|
||||
nodes_chain.clear();
|
||||
post_nodes_chain.clear();
|
||||
|
||||
aiMatrix4x4 new_abs_transform = parent->mTransformation;
|
||||
std::string node_name = FixNodeName(model->Name());
|
||||
// even though there is only a single input node, the design of
|
||||
|
@ -266,8 +261,6 @@ void FBXConverter::ConvertNodes(uint64_t id, aiNode *parent, aiNode *root_node)
|
|||
|
||||
child->mParent = last_parent;
|
||||
last_parent = child.mNode;
|
||||
|
||||
new_abs_transform *= child->mTransformation;
|
||||
}
|
||||
|
||||
// attach geometry
|
||||
|
@ -290,8 +283,6 @@ void FBXConverter::ConvertNodes(uint64_t id, aiNode *parent, aiNode *root_node)
|
|||
|
||||
postnode->mParent = last_parent;
|
||||
last_parent = postnode.mNode;
|
||||
|
||||
new_abs_transform *= postnode->mTransformation;
|
||||
}
|
||||
} else {
|
||||
// free the nodes we allocated as we don't need them
|
||||
|
@ -452,7 +443,7 @@ void FBXConverter::GetUniqueName(const std::string &name, std::string &uniqueNam
|
|||
auto it_pair = mNodeNames.insert({ name, 0 }); // duplicate node name instance count
|
||||
unsigned int &i = it_pair.first->second;
|
||||
while (!it_pair.second) {
|
||||
i++;
|
||||
++i;
|
||||
std::ostringstream ext;
|
||||
ext << name << std::setfill('0') << std::setw(3) << i;
|
||||
uniqueName = ext.str();
|
||||
|
@ -651,9 +642,8 @@ void FBXConverter::GetRotationMatrix(Model::RotOrder mode, const aiVector3D &rot
|
|||
|
||||
bool FBXConverter::NeedsComplexTransformationChain(const Model &model) {
|
||||
const PropertyTable &props = model.Props();
|
||||
bool ok;
|
||||
|
||||
const float zero_epsilon = ai_epsilon;
|
||||
const auto zero_epsilon = ai_epsilon;
|
||||
const aiVector3D all_ones(1.0f, 1.0f, 1.0f);
|
||||
for (size_t i = 0; i < TransformationComp_MAXIMUM; ++i) {
|
||||
const TransformationComp comp = static_cast<TransformationComp>(i);
|
||||
|
@ -665,6 +655,7 @@ bool FBXConverter::NeedsComplexTransformationChain(const Model &model) {
|
|||
|
||||
bool scale_compare = (comp == TransformationComp_GeometricScaling || comp == TransformationComp_Scaling);
|
||||
|
||||
bool ok = true;
|
||||
const aiVector3D &v = PropertyGet<aiVector3D>(props, NameTransformationCompProperty(comp), ok);
|
||||
if (ok && scale_compare) {
|
||||
if ((v - all_ones).SquareLength() > zero_epsilon) {
|
||||
|
@ -899,20 +890,17 @@ void FBXConverter::SetupNodeMetadata(const Model &model, aiNode &nd) {
|
|||
}
|
||||
}
|
||||
|
||||
void FBXConverter::ConvertModel(const Model &model, aiNode *parent, aiNode *root_node,
|
||||
const aiMatrix4x4 &absolute_transform) {
|
||||
void FBXConverter::ConvertModel(const Model &model, aiNode *parent, aiNode *root_node, const aiMatrix4x4 &absolute_transform) {
|
||||
const std::vector<const Geometry *> &geos = model.GetGeometry();
|
||||
|
||||
std::vector<unsigned int> meshes;
|
||||
meshes.reserve(geos.size());
|
||||
|
||||
for (const Geometry *geo : geos) {
|
||||
|
||||
const MeshGeometry *const mesh = dynamic_cast<const MeshGeometry *>(geo);
|
||||
const LineGeometry *const line = dynamic_cast<const LineGeometry *>(geo);
|
||||
if (mesh) {
|
||||
const std::vector<unsigned int> &indices = ConvertMesh(*mesh, model, parent, root_node,
|
||||
absolute_transform);
|
||||
const std::vector<unsigned int> &indices = ConvertMesh(*mesh, model, parent, root_node, absolute_transform);
|
||||
std::copy(indices.begin(), indices.end(), std::back_inserter(meshes));
|
||||
} else if (line) {
|
||||
const std::vector<unsigned int> &indices = ConvertLine(*line, root_node);
|
||||
|
@ -933,8 +921,7 @@ void FBXConverter::ConvertModel(const Model &model, aiNode *parent, aiNode *root
|
|||
}
|
||||
|
||||
std::vector<unsigned int>
|
||||
FBXConverter::ConvertMesh(const MeshGeometry &mesh, const Model &model, aiNode *parent, aiNode *root_node,
|
||||
const aiMatrix4x4 &absolute_transform) {
|
||||
FBXConverter::ConvertMesh(const MeshGeometry &mesh, const Model &model, aiNode *parent, aiNode *root_node, const aiMatrix4x4 &absolute_transform) {
|
||||
std::vector<unsigned int> temp;
|
||||
|
||||
MeshMap::const_iterator it = meshes_converted.find(&mesh);
|
||||
|
@ -957,7 +944,7 @@ FBXConverter::ConvertMesh(const MeshGeometry &mesh, const Model &model, aiNode *
|
|||
const MatIndexArray::value_type base = mindices[0];
|
||||
for (MatIndexArray::value_type index : mindices) {
|
||||
if (index != base) {
|
||||
return ConvertMeshMultiMaterial(mesh, model, parent, root_node, absolute_transform);
|
||||
return ConvertMeshMultiMaterial(mesh, model, absolute_transform, parent, root_node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1031,9 +1018,36 @@ aiMesh *FBXConverter::SetupEmptyMesh(const Geometry &mesh, aiNode *parent) {
|
|||
return out_mesh;
|
||||
}
|
||||
|
||||
unsigned int FBXConverter::ConvertMeshSingleMaterial(const MeshGeometry &mesh, const Model &model,
|
||||
const aiMatrix4x4 &absolute_transform, aiNode *parent,
|
||||
aiNode *) {
|
||||
static aiSkeleton *createAiSkeleton(SkeletonBoneContainer &sbc) {
|
||||
if (sbc.MeshArray.empty() || sbc.SkeletonBoneToMeshLookup.empty()) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
aiSkeleton *skeleton = new aiSkeleton;
|
||||
for (auto *mesh : sbc.MeshArray) {
|
||||
auto it = sbc.SkeletonBoneToMeshLookup.find(mesh);
|
||||
if (it == sbc.SkeletonBoneToMeshLookup.end()) {
|
||||
continue;
|
||||
}
|
||||
SkeletonBoneArray *ba = it->second;
|
||||
if (ba == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
skeleton->mNumBones = static_cast<unsigned int>(ba->size());
|
||||
skeleton->mBones = new aiSkeletonBone*[skeleton->mNumBones];
|
||||
size_t index = 0;
|
||||
for (auto bone : (* ba)) {
|
||||
skeleton->mBones[index] = bone;
|
||||
++index;
|
||||
}
|
||||
}
|
||||
|
||||
return skeleton;
|
||||
}
|
||||
|
||||
unsigned int FBXConverter::ConvertMeshSingleMaterial(const MeshGeometry &mesh, const Model &model, const aiMatrix4x4 &absolute_transform,
|
||||
aiNode *parent, aiNode *) {
|
||||
const MatIndexArray &mindices = mesh.GetMaterialIndices();
|
||||
aiMesh *const out_mesh = SetupEmptyMesh(mesh, parent);
|
||||
|
||||
|
@ -1151,8 +1165,15 @@ unsigned int FBXConverter::ConvertMeshSingleMaterial(const MeshGeometry &mesh, c
|
|||
ConvertMaterialForMesh(out_mesh, model, mesh, mindices[0]);
|
||||
}
|
||||
|
||||
if (doc.Settings().readWeights && mesh.DeformerSkin() != nullptr) {
|
||||
if (doc.Settings().readWeights && mesh.DeformerSkin() != nullptr && !doc.Settings().useSkeleton) {
|
||||
ConvertWeights(out_mesh, mesh, absolute_transform, parent, NO_MATERIAL_SEPARATION, nullptr);
|
||||
} else if (doc.Settings().readWeights && mesh.DeformerSkin() != nullptr && doc.Settings().useSkeleton) {
|
||||
SkeletonBoneContainer sbc;
|
||||
ConvertWeightsToSkeleton(out_mesh, mesh, absolute_transform, parent, NO_MATERIAL_SEPARATION, nullptr, sbc);
|
||||
aiSkeleton *skeleton = createAiSkeleton(sbc);
|
||||
if (skeleton != nullptr) {
|
||||
mSkeletons.emplace_back(skeleton);
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<aiAnimMesh *> animMeshes;
|
||||
|
@ -1199,9 +1220,8 @@ unsigned int FBXConverter::ConvertMeshSingleMaterial(const MeshGeometry &mesh, c
|
|||
}
|
||||
|
||||
std::vector<unsigned int>
|
||||
FBXConverter::ConvertMeshMultiMaterial(const MeshGeometry &mesh, const Model &model, aiNode *parent,
|
||||
aiNode *root_node,
|
||||
const aiMatrix4x4 &absolute_transform) {
|
||||
FBXConverter::ConvertMeshMultiMaterial(const MeshGeometry &mesh, const Model &model, const aiMatrix4x4 &absolute_transform, aiNode *parent,
|
||||
aiNode *root_node) {
|
||||
const MatIndexArray &mindices = mesh.GetMaterialIndices();
|
||||
ai_assert(mindices.size());
|
||||
|
||||
|
@ -1211,7 +1231,7 @@ FBXConverter::ConvertMeshMultiMaterial(const MeshGeometry &mesh, const Model &mo
|
|||
for (MatIndexArray::value_type index : mindices) {
|
||||
if (had.find(index) == had.end()) {
|
||||
|
||||
indices.push_back(ConvertMeshMultiMaterial(mesh, model, index, parent, root_node, absolute_transform));
|
||||
indices.push_back(ConvertMeshMultiMaterial(mesh, model, absolute_transform, index, parent, root_node));
|
||||
had.insert(index);
|
||||
}
|
||||
}
|
||||
|
@ -1219,10 +1239,8 @@ FBXConverter::ConvertMeshMultiMaterial(const MeshGeometry &mesh, const Model &mo
|
|||
return indices;
|
||||
}
|
||||
|
||||
unsigned int FBXConverter::ConvertMeshMultiMaterial(const MeshGeometry &mesh, const Model &model,
|
||||
MatIndexArray::value_type index,
|
||||
aiNode *parent, aiNode *,
|
||||
const aiMatrix4x4 &absolute_transform) {
|
||||
unsigned int FBXConverter::ConvertMeshMultiMaterial(const MeshGeometry &mesh, const Model &model, const aiMatrix4x4 &absolute_transform,
|
||||
MatIndexArray::value_type index, aiNode *parent, aiNode *) {
|
||||
aiMesh *const out_mesh = SetupEmptyMesh(mesh, parent);
|
||||
|
||||
const MatIndexArray &mindices = mesh.GetMaterialIndices();
|
||||
|
@ -1435,20 +1453,47 @@ unsigned int FBXConverter::ConvertMeshMultiMaterial(const MeshGeometry &mesh, co
|
|||
return static_cast<unsigned int>(mMeshes.size() - 1);
|
||||
}
|
||||
|
||||
void FBXConverter::ConvertWeights(aiMesh *out, const MeshGeometry &geo,
|
||||
const aiMatrix4x4 &absolute_transform,
|
||||
static void copyBoneToSkeletonBone(aiMesh *mesh, aiBone *bone, aiSkeletonBone *skeletonBone ) {
|
||||
skeletonBone->mNumnWeights = bone->mNumWeights;
|
||||
skeletonBone->mWeights = bone->mWeights;
|
||||
skeletonBone->mOffsetMatrix = bone->mOffsetMatrix;
|
||||
skeletonBone->mMeshId = mesh;
|
||||
skeletonBone->mNode = bone->mNode;
|
||||
skeletonBone->mParent = -1;
|
||||
}
|
||||
|
||||
void FBXConverter::ConvertWeightsToSkeleton(aiMesh *out, const MeshGeometry &geo, const aiMatrix4x4 &absolute_transform, aiNode *parent, unsigned int materialIndex,
|
||||
std::vector<unsigned int> *outputVertStartIndices, SkeletonBoneContainer &skeletonContainer) {
|
||||
|
||||
if (skeletonContainer.SkeletonBoneToMeshLookup.find(out) != skeletonContainer.SkeletonBoneToMeshLookup.end()) {
|
||||
return;
|
||||
}
|
||||
|
||||
ConvertWeights(out, geo, absolute_transform, parent, materialIndex, outputVertStartIndices);
|
||||
skeletonContainer.MeshArray.emplace_back(out);
|
||||
SkeletonBoneArray *ba = new SkeletonBoneArray;
|
||||
for (size_t i = 0; i < out->mNumBones; ++i) {
|
||||
aiBone *bone = out->mBones[i];
|
||||
if (bone == nullptr) {
|
||||
continue;
|
||||
}
|
||||
aiSkeletonBone *skeletonBone = new aiSkeletonBone;
|
||||
copyBoneToSkeletonBone(out, bone, skeletonBone);
|
||||
ba->emplace_back(skeletonBone);
|
||||
}
|
||||
skeletonContainer.SkeletonBoneToMeshLookup[out] = ba;
|
||||
}
|
||||
|
||||
void FBXConverter::ConvertWeights(aiMesh *out, const MeshGeometry &geo, const aiMatrix4x4 &absolute_transform,
|
||||
aiNode *parent, unsigned int materialIndex,
|
||||
std::vector<unsigned int> *outputVertStartIndices) {
|
||||
ai_assert(geo.DeformerSkin());
|
||||
|
||||
std::vector<size_t> out_indices;
|
||||
std::vector<size_t> index_out_indices;
|
||||
std::vector<size_t> count_out_indices;
|
||||
std::vector<size_t> out_indices, index_out_indices, count_out_indices;
|
||||
|
||||
const Skin &sk = *geo.DeformerSkin();
|
||||
|
||||
std::vector<aiBone*> bones;
|
||||
|
||||
const bool no_mat_check = materialIndex == NO_MATERIAL_SEPARATION;
|
||||
ai_assert(no_mat_check || outputVertStartIndices);
|
||||
|
||||
|
@ -1521,26 +1566,20 @@ void FBXConverter::ConvertWeights(aiMesh *out, const MeshGeometry &geo,
|
|||
out->mBones = nullptr;
|
||||
out->mNumBones = 0;
|
||||
return;
|
||||
} else {
|
||||
}
|
||||
|
||||
out->mBones = new aiBone *[bones.size()]();
|
||||
out->mNumBones = static_cast<unsigned int>(bones.size());
|
||||
|
||||
std::swap_ranges(bones.begin(), bones.end(), out->mBones);
|
||||
}
|
||||
}
|
||||
|
||||
const aiNode *GetNodeByName(aiNode *current_node) {
|
||||
aiNode *iter = current_node;
|
||||
//printf("Child count: %d", iter->mNumChildren);
|
||||
return iter;
|
||||
}
|
||||
|
||||
void FBXConverter::ConvertCluster(std::vector<aiBone *> &local_mesh_bones, const Cluster *cl,
|
||||
void FBXConverter::ConvertCluster(std::vector<aiBone*> &local_mesh_bones, const Cluster *cluster,
|
||||
std::vector<size_t> &out_indices, std::vector<size_t> &index_out_indices,
|
||||
std::vector<size_t> &count_out_indices, const aiMatrix4x4 &absolute_transform,
|
||||
std::vector<size_t> &count_out_indices, const aiMatrix4x4 & /* absolute_transform*/,
|
||||
aiNode *) {
|
||||
ai_assert(cl); // make sure cluster valid
|
||||
std::string deformer_name = cl->TargetNode()->Name();
|
||||
ai_assert(cluster != nullptr); // make sure cluster valid
|
||||
|
||||
std::string deformer_name = cluster->TargetNode()->Name();
|
||||
aiString bone_name = aiString(FixNodeName(deformer_name));
|
||||
|
||||
aiBone *bone = nullptr;
|
||||
|
@ -1553,14 +1592,16 @@ void FBXConverter::ConvertCluster(std::vector<aiBone *> &local_mesh_bones, const
|
|||
bone = new aiBone();
|
||||
bone->mName = bone_name;
|
||||
|
||||
bone->mOffsetMatrix = cluster->Transform();
|
||||
// store local transform link for post processing
|
||||
bone->mOffsetMatrix = cl->TransformLink();
|
||||
/*
|
||||
bone->mOffsetMatrix = cluster->TransformLink();
|
||||
bone->mOffsetMatrix.Inverse();
|
||||
|
||||
aiMatrix4x4 matrix = (aiMatrix4x4)absolute_transform;
|
||||
|
||||
bone->mOffsetMatrix = bone->mOffsetMatrix * matrix; // * mesh_offset
|
||||
|
||||
*/
|
||||
//
|
||||
// Now calculate the aiVertexWeights
|
||||
//
|
||||
|
@ -1571,7 +1612,7 @@ void FBXConverter::ConvertCluster(std::vector<aiBone *> &local_mesh_bones, const
|
|||
cursor = bone->mWeights = new aiVertexWeight[out_indices.size()];
|
||||
|
||||
const size_t no_index_sentinel = std::numeric_limits<size_t>::max();
|
||||
const WeightArray &weights = cl->GetWeights();
|
||||
const WeightArray &weights = cluster->GetWeights();
|
||||
|
||||
const size_t c = index_out_indices.size();
|
||||
for (size_t i = 0; i < c; ++i) {
|
||||
|
@ -2161,6 +2202,9 @@ void FBXConverter::SetShadingPropertiesCommon(aiMaterial *out_mat, const Propert
|
|||
const float ShininessExponent = PropertyGet<float>(props, "ShininessExponent", ok);
|
||||
if (ok) {
|
||||
out_mat->AddProperty(&ShininessExponent, 1, AI_MATKEY_SHININESS);
|
||||
// Match Blender behavior to extract roughness when only shininess is present
|
||||
const float roughness = 1.0f - (sqrt(ShininessExponent) / 10.0f);
|
||||
out_mat->AddProperty(&roughness, 1, AI_MATKEY_ROUGHNESS_FACTOR);
|
||||
}
|
||||
|
||||
// TransparentColor / TransparencyFactor... gee thanks FBX :rolleyes:
|
||||
|
@ -2613,7 +2657,7 @@ void FBXConverter::ConvertAnimationStack(const AnimationStack &st) {
|
|||
meshMorphAnim->mNumKeys = numKeys;
|
||||
meshMorphAnim->mKeys = new aiMeshMorphKey[numKeys];
|
||||
unsigned int j = 0;
|
||||
for (auto animIt : *animData) {
|
||||
for (auto &animIt : *animData) {
|
||||
morphKeyData *keyData = animIt.second;
|
||||
unsigned int numValuesAndWeights = static_cast<unsigned int>(keyData->values.size());
|
||||
meshMorphAnim->mKeys[j].mNumValuesAndWeights = numValuesAndWeights;
|
||||
|
@ -3188,7 +3232,7 @@ aiNodeAnim* FBXConverter::GenerateSimpleNodeAnim(const std::string& name,
|
|||
|
||||
bool ok = false;
|
||||
|
||||
const float zero_epsilon = ai_epsilon;
|
||||
const auto zero_epsilon = ai_epsilon;
|
||||
|
||||
const aiVector3D& preRotation = PropertyGet<aiVector3D>(props, "PreRotation", ok);
|
||||
if (ok && preRotation.SquareLength() > zero_epsilon) {
|
||||
|
@ -3326,13 +3370,17 @@ FBXConverter::KeyFrameListList FBXConverter::GetRotationKeyframeList(const std::
|
|||
float vc = curve->GetValues().at(1);
|
||||
for (size_t n = 1; n < count; n++) {
|
||||
while (std::abs(vc - vp) >= 180.0f) {
|
||||
float step = std::floor(float(tc - tp) / (vc - vp) * 179.0f);
|
||||
double step = std::floor(double(tc - tp) / std::abs(vc - vp) * 179.0f);
|
||||
int64_t tnew = tp + int64_t(step);
|
||||
float vnew = vp + (vc - vp) * step / float(tc - tp);
|
||||
float vnew = vp + (vc - vp) * float(step / (tc - tp));
|
||||
if (tnew >= adj_start && tnew <= adj_stop) {
|
||||
Keys->push_back(tnew);
|
||||
Values->push_back(vnew);
|
||||
}
|
||||
else {
|
||||
// Something broke
|
||||
break;
|
||||
}
|
||||
tp = tnew;
|
||||
vp = vnew;
|
||||
}
|
||||
|
@ -3633,6 +3681,12 @@ void FBXConverter::TransferDataToScene() {
|
|||
|
||||
std::swap_ranges(textures.begin(), textures.end(), mSceneOut->mTextures);
|
||||
}
|
||||
|
||||
if (!mSkeletons.empty()) {
|
||||
mSceneOut->mSkeletons = new aiSkeleton *[mSkeletons.size()];
|
||||
mSceneOut->mNumSkeletons = static_cast<unsigned int>(mSkeletons.size());
|
||||
std::swap_ranges(mSkeletons.begin(), mSkeletons.end(), mSceneOut->mSkeletons);
|
||||
}
|
||||
}
|
||||
|
||||
void FBXConverter::ConvertOrphanedEmbeddedTextures() {
|
||||
|
|
|
@ -75,7 +75,18 @@ typedef std::map<int64_t, morphKeyData*> morphAnimData;
|
|||
namespace Assimp {
|
||||
namespace FBX {
|
||||
|
||||
class MeshGeometry;
|
||||
|
||||
using SkeletonBoneArray = std::vector<aiSkeletonBone *>;
|
||||
using SkeletonBoneToMesh = std::map<aiMesh*, SkeletonBoneArray*>;
|
||||
|
||||
struct SkeletonBoneContainer {
|
||||
std::vector<aiMesh *> MeshArray;
|
||||
SkeletonBoneToMesh SkeletonBoneToMeshLookup;
|
||||
};
|
||||
|
||||
class Document;
|
||||
|
||||
/**
|
||||
* Convert a FBX #Document to #aiScene
|
||||
* @param out Empty scene to be populated
|
||||
|
@ -180,14 +191,12 @@ private:
|
|||
void SetupNodeMetadata(const Model& model, aiNode& nd);
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void ConvertModel(const Model &model, aiNode *parent, aiNode *root_node,
|
||||
const aiMatrix4x4 &absolute_transform);
|
||||
void ConvertModel(const Model &model, aiNode *parent, aiNode *root_node, const aiMatrix4x4 &absolute_transform);
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// MeshGeometry -> aiMesh, return mesh index + 1 or 0 if the conversion failed
|
||||
std::vector<unsigned int>
|
||||
ConvertMesh(const MeshGeometry &mesh, const Model &model, aiNode *parent, aiNode *root_node,
|
||||
const aiMatrix4x4 &absolute_transform);
|
||||
ConvertMesh(const MeshGeometry &mesh, const Model &model, aiNode *parent, aiNode *root_node, const aiMatrix4x4 &absolute_transform);
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
std::vector<unsigned int> ConvertLine(const LineGeometry& line, aiNode *root_node);
|
||||
|
@ -196,18 +205,16 @@ private:
|
|||
aiMesh* SetupEmptyMesh(const Geometry& mesh, aiNode *parent);
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
unsigned int ConvertMeshSingleMaterial(const MeshGeometry &mesh, const Model &model,
|
||||
const aiMatrix4x4 &absolute_transform, aiNode *parent,
|
||||
aiNode *root_node);
|
||||
unsigned int ConvertMeshSingleMaterial(const MeshGeometry &mesh, const Model &model, const aiMatrix4x4 &absolute_transform,
|
||||
aiNode *parent, aiNode *root_node);
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
std::vector<unsigned int>
|
||||
ConvertMeshMultiMaterial(const MeshGeometry &mesh, const Model &model, aiNode *parent, aiNode *root_node,
|
||||
const aiMatrix4x4 &absolute_transform);
|
||||
ConvertMeshMultiMaterial(const MeshGeometry &mesh, const Model &model, const aiMatrix4x4 &absolute_transform, aiNode *parent, aiNode *root_node);
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
unsigned int ConvertMeshMultiMaterial(const MeshGeometry &mesh, const Model &model, MatIndexArray::value_type index,
|
||||
aiNode *parent, aiNode *root_node, const aiMatrix4x4 &absolute_transform);
|
||||
unsigned int ConvertMeshMultiMaterial(const MeshGeometry &mesh, const Model &model, const aiMatrix4x4 &absolute_transform, MatIndexArray::value_type index,
|
||||
aiNode *parent, aiNode *root_node);
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
static const unsigned int NO_MATERIAL_SEPARATION = /* std::numeric_limits<unsigned int>::max() */
|
||||
|
@ -220,15 +227,19 @@ private:
|
|||
* - outputVertStartIndices is only used when a material index is specified, it gives for
|
||||
* each output vertex the DOM index it maps to.
|
||||
*/
|
||||
void ConvertWeights(aiMesh *out, const MeshGeometry &geo, const aiMatrix4x4 &absolute_transform,
|
||||
aiNode *parent = nullptr, unsigned int materialIndex = NO_MATERIAL_SEPARATION,
|
||||
void ConvertWeights(aiMesh *out, const MeshGeometry &geo, const aiMatrix4x4 &absolute_transform, aiNode *parent = nullptr,
|
||||
unsigned int materialIndex = NO_MATERIAL_SEPARATION,
|
||||
std::vector<unsigned int> *outputVertStartIndices = nullptr);
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void ConvertWeightsToSkeleton(aiMesh *out, const MeshGeometry &geo, const aiMatrix4x4 &absolute_transform,
|
||||
aiNode *parent, unsigned int materialIndex, std::vector<unsigned int> *outputVertStartIndices,
|
||||
SkeletonBoneContainer &skeletonContainer);
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void ConvertCluster(std::vector<aiBone *> &local_mesh_bones, const Cluster *cl,
|
||||
std::vector<size_t> &out_indices, std::vector<size_t> &index_out_indices,
|
||||
std::vector<size_t> &count_out_indices, const aiMatrix4x4 &absolute_transform,
|
||||
aiNode *parent );
|
||||
std::vector<size_t> &count_out_indices, const aiMatrix4x4 &absolute_transform, aiNode *parent);
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void ConvertMaterialForMesh(aiMesh* out, const Model& model, const MeshGeometry& geo,
|
||||
|
@ -301,7 +312,8 @@ private:
|
|||
void ConvertAnimationStack(const AnimationStack& st);
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void ProcessMorphAnimDatas(std::map<std::string, morphAnimData*>* morphAnimDatas, const BlendShapeChannel* bsc, const AnimationCurveNode* node);
|
||||
void ProcessMorphAnimDatas(std::map<std::string, morphAnimData*>* morphAnimDatas,
|
||||
const BlendShapeChannel* bsc, const AnimationCurveNode* node);
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void GenerateNodeAnimations(std::vector<aiNodeAnim*>& node_anims,
|
||||
|
@ -450,6 +462,7 @@ private:
|
|||
|
||||
double anim_fps;
|
||||
|
||||
std::vector<aiSkeleton *> mSkeletons;
|
||||
aiScene* const mSceneOut;
|
||||
const FBX::Document& doc;
|
||||
bool mRemoveEmptyBones;
|
||||
|
|
|
@ -4,7 +4,6 @@ Open Asset Import Library (assimp)
|
|||
|
||||
Copyright (c) 2006-2022, assimp team
|
||||
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use of this software in source and binary forms,
|
||||
|
@ -58,16 +57,14 @@ namespace FBX {
|
|||
using namespace Util;
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
Deformer::Deformer(uint64_t id, const Element& element, const Document& doc, const std::string& name)
|
||||
: Object(id,element,name)
|
||||
{
|
||||
Deformer::Deformer(uint64_t id, const Element& element, const Document& doc, const std::string& name) :
|
||||
Object(id,element,name) {
|
||||
const Scope& sc = GetRequiredScope(element);
|
||||
|
||||
const std::string& classname = ParseTokenAsString(GetRequiredToken(element,2));
|
||||
props = GetPropertyTable(doc,"Deformer.Fbx" + classname,element,sc,true);
|
||||
}
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
Deformer::~Deformer()
|
||||
{
|
||||
|
|
|
@ -544,7 +544,7 @@ std::vector<const Connection*> Document::GetConnectionsSequenced(uint64_t id, bo
|
|||
ai_assert( count != 0 );
|
||||
ai_assert( count <= MAX_CLASSNAMES);
|
||||
|
||||
size_t lengths[MAX_CLASSNAMES];
|
||||
size_t lengths[MAX_CLASSNAMES] = {};
|
||||
|
||||
const size_t c = count;
|
||||
for (size_t i = 0; i < c; ++i) {
|
||||
|
|
|
@ -164,7 +164,7 @@ class NodeAttribute : public Object {
|
|||
public:
|
||||
NodeAttribute(uint64_t id, const Element& element, const Document& doc, const std::string& name);
|
||||
|
||||
virtual ~NodeAttribute();
|
||||
virtual ~NodeAttribute() = default;
|
||||
|
||||
const PropertyTable& Props() const {
|
||||
ai_assert(props.get());
|
||||
|
@ -180,7 +180,7 @@ class CameraSwitcher : public NodeAttribute {
|
|||
public:
|
||||
CameraSwitcher(uint64_t id, const Element& element, const Document& doc, const std::string& name);
|
||||
|
||||
virtual ~CameraSwitcher();
|
||||
virtual ~CameraSwitcher() = default;
|
||||
|
||||
int CameraID() const {
|
||||
return cameraId;
|
||||
|
@ -225,7 +225,7 @@ class Camera : public NodeAttribute {
|
|||
public:
|
||||
Camera(uint64_t id, const Element& element, const Document& doc, const std::string& name);
|
||||
|
||||
virtual ~Camera();
|
||||
virtual ~Camera() = default;
|
||||
|
||||
fbx_simple_property(Position, aiVector3D, aiVector3D(0,0,0))
|
||||
fbx_simple_property(UpVector, aiVector3D, aiVector3D(0,1,0))
|
||||
|
@ -250,21 +250,21 @@ public:
|
|||
class Null : public NodeAttribute {
|
||||
public:
|
||||
Null(uint64_t id, const Element& element, const Document& doc, const std::string& name);
|
||||
virtual ~Null();
|
||||
virtual ~Null() = default;
|
||||
};
|
||||
|
||||
/** DOM base class for FBX limb node markers attached to a node */
|
||||
class LimbNode : public NodeAttribute {
|
||||
public:
|
||||
LimbNode(uint64_t id, const Element& element, const Document& doc, const std::string& name);
|
||||
virtual ~LimbNode();
|
||||
virtual ~LimbNode() = default;
|
||||
};
|
||||
|
||||
/** DOM base class for FBX lights attached to a node */
|
||||
class Light : public NodeAttribute {
|
||||
public:
|
||||
Light(uint64_t id, const Element& element, const Document& doc, const std::string& name);
|
||||
virtual ~Light();
|
||||
virtual ~Light() = default;
|
||||
|
||||
enum Type {
|
||||
Type_Point,
|
||||
|
@ -690,7 +690,7 @@ using KeyValueList = std::vector<float>;
|
|||
class AnimationCurve : public Object {
|
||||
public:
|
||||
AnimationCurve(uint64_t id, const Element& element, const std::string& name, const Document& doc);
|
||||
virtual ~AnimationCurve();
|
||||
virtual ~AnimationCurve() = default;
|
||||
|
||||
/** get list of keyframe positions (time).
|
||||
* Invariant: |GetKeys()| > 0 */
|
||||
|
@ -731,7 +731,7 @@ public:
|
|||
AnimationCurveNode(uint64_t id, const Element& element, const std::string& name, const Document& doc,
|
||||
const char *const *target_prop_whitelist = nullptr, size_t whitelist_size = 0);
|
||||
|
||||
virtual ~AnimationCurveNode();
|
||||
virtual ~AnimationCurveNode() = default;
|
||||
|
||||
const PropertyTable& Props() const {
|
||||
ai_assert(props.get());
|
||||
|
@ -776,7 +776,7 @@ using AnimationCurveNodeList = std::vector<const AnimationCurveNode*>;
|
|||
class AnimationLayer : public Object {
|
||||
public:
|
||||
AnimationLayer(uint64_t id, const Element& element, const std::string& name, const Document& doc);
|
||||
virtual ~AnimationLayer();
|
||||
virtual ~AnimationLayer() = default;
|
||||
|
||||
const PropertyTable& Props() const {
|
||||
ai_assert(props.get());
|
||||
|
@ -799,7 +799,7 @@ using AnimationLayerList = std::vector<const AnimationLayer*>;
|
|||
class AnimationStack : public Object {
|
||||
public:
|
||||
AnimationStack(uint64_t id, const Element& element, const std::string& name, const Document& doc);
|
||||
virtual ~AnimationStack();
|
||||
virtual ~AnimationStack() = default;
|
||||
|
||||
fbx_simple_property(LocalStart, int64_t, 0L)
|
||||
fbx_simple_property(LocalStop, int64_t, 0L)
|
||||
|
|
|
@ -59,14 +59,12 @@ namespace Util {
|
|||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// signal DOM construction error, this is always unrecoverable. Throws DeadlyImportError.
|
||||
void DOMError(const std::string& message, const Token& token)
|
||||
{
|
||||
void DOMError(const std::string& message, const Token& token) {
|
||||
throw DeadlyImportError("FBX-DOM", Util::GetTokenText(&token), message);
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void DOMError(const std::string& message, const Element* element /*= nullptr*/)
|
||||
{
|
||||
void DOMError(const std::string& message, const Element* element /*= nullptr*/) {
|
||||
if(element) {
|
||||
DOMError(message,element->KeyToken());
|
||||
}
|
||||
|
@ -76,8 +74,7 @@ void DOMError(const std::string& message, const Element* element /*= nullptr*/)
|
|||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// print warning, do return
|
||||
void DOMWarning(const std::string& message, const Token& token)
|
||||
{
|
||||
void DOMWarning(const std::string& message, const Token& token) {
|
||||
if(DefaultLogger::get()) {
|
||||
ASSIMP_LOG_WARN("FBX-DOM", Util::GetTokenText(&token), message);
|
||||
}
|
||||
|
|
|
@ -74,13 +74,11 @@ std::shared_ptr<const PropertyTable> GetPropertyTable(const Document& doc,
|
|||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
template <typename T>
|
||||
inline
|
||||
const T* ProcessSimpleConnection(const Connection& con,
|
||||
inline const T* ProcessSimpleConnection(const Connection& con,
|
||||
bool is_object_property_conn,
|
||||
const char* name,
|
||||
const Element& element,
|
||||
const char** propNameOut = nullptr)
|
||||
{
|
||||
const char** propNameOut = nullptr) {
|
||||
if (is_object_property_conn && !con.PropertyName().length()) {
|
||||
DOMWarning("expected incoming " + std::string(name) +
|
||||
" link to be an object-object connection, ignoring",
|
||||
|
|
|
@ -255,7 +255,7 @@ void FBXExporter::WriteBinaryHeader()
|
|||
|
||||
void FBXExporter::WriteBinaryFooter()
|
||||
{
|
||||
outfile->Write(NULL_RECORD.c_str(), NULL_RECORD.size(), 1);
|
||||
outfile->Write(NULL_RECORD, NumNullRecords, 1);
|
||||
|
||||
outfile->Write(GENERIC_FOOTID.c_str(), GENERIC_FOOTID.size(), 1);
|
||||
|
||||
|
|
|
@ -60,6 +60,7 @@ struct ImportSettings {
|
|||
readLights(true),
|
||||
readAnimations(true),
|
||||
readWeights(true),
|
||||
useSkeleton(false),
|
||||
preservePivots(true),
|
||||
optimizeEmptyAnimationCurves(true),
|
||||
useLegacyEmbeddedTextureNaming(false),
|
||||
|
@ -112,6 +113,11 @@ struct ImportSettings {
|
|||
* Default value is true. */
|
||||
bool readWeights;
|
||||
|
||||
/** will convert all animation data into a skeleton (experimental)
|
||||
* Default value is false.
|
||||
*/
|
||||
bool useSkeleton;
|
||||
|
||||
/** preserve transformation pivots and offsets. Since these can
|
||||
* not directly be represented in assimp, additional dummy
|
||||
* nodes will be generated. Note that settings this to false
|
||||
|
|
|
@ -90,12 +90,9 @@ static const aiImporterDesc desc = {
|
|||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Constructor to be privately used by #Importer
|
||||
FBXImporter::FBXImporter() {
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Destructor, private as well
|
||||
FBXImporter::~FBXImporter() {
|
||||
FBXImporter::FBXImporter() :
|
||||
mSettings() {
|
||||
// empty
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
@ -115,20 +112,21 @@ const aiImporterDesc *FBXImporter::GetInfo() const {
|
|||
// ------------------------------------------------------------------------------------------------
|
||||
// Setup configuration properties for the loader
|
||||
void FBXImporter::SetupProperties(const Importer *pImp) {
|
||||
settings.readAllLayers = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_ALL_GEOMETRY_LAYERS, true);
|
||||
settings.readAllMaterials = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_ALL_MATERIALS, false);
|
||||
settings.readMaterials = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_MATERIALS, true);
|
||||
settings.readTextures = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_TEXTURES, true);
|
||||
settings.readCameras = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_CAMERAS, true);
|
||||
settings.readLights = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_LIGHTS, true);
|
||||
settings.readAnimations = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_ANIMATIONS, true);
|
||||
settings.readWeights = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_WEIGHTS, true);
|
||||
settings.strictMode = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_STRICT_MODE, false);
|
||||
settings.preservePivots = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_PRESERVE_PIVOTS, true);
|
||||
settings.optimizeEmptyAnimationCurves = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_OPTIMIZE_EMPTY_ANIMATION_CURVES, true);
|
||||
settings.useLegacyEmbeddedTextureNaming = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_EMBEDDED_TEXTURES_LEGACY_NAMING, false);
|
||||
settings.removeEmptyBones = pImp->GetPropertyBool(AI_CONFIG_IMPORT_REMOVE_EMPTY_BONES, true);
|
||||
settings.convertToMeters = pImp->GetPropertyBool(AI_CONFIG_FBX_CONVERT_TO_M, false);
|
||||
mSettings.readAllLayers = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_ALL_GEOMETRY_LAYERS, true);
|
||||
mSettings.readAllMaterials = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_ALL_MATERIALS, false);
|
||||
mSettings.readMaterials = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_MATERIALS, true);
|
||||
mSettings.readTextures = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_TEXTURES, true);
|
||||
mSettings.readCameras = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_CAMERAS, true);
|
||||
mSettings.readLights = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_LIGHTS, true);
|
||||
mSettings.readAnimations = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_ANIMATIONS, true);
|
||||
mSettings.readWeights = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_READ_WEIGHTS, true);
|
||||
mSettings.strictMode = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_STRICT_MODE, false);
|
||||
mSettings.preservePivots = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_PRESERVE_PIVOTS, true);
|
||||
mSettings.optimizeEmptyAnimationCurves = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_OPTIMIZE_EMPTY_ANIMATION_CURVES, true);
|
||||
mSettings.useLegacyEmbeddedTextureNaming = pImp->GetPropertyBool(AI_CONFIG_IMPORT_FBX_EMBEDDED_TEXTURES_LEGACY_NAMING, false);
|
||||
mSettings.removeEmptyBones = pImp->GetPropertyBool(AI_CONFIG_IMPORT_REMOVE_EMPTY_BONES, true);
|
||||
mSettings.convertToMeters = pImp->GetPropertyBool(AI_CONFIG_FBX_CONVERT_TO_M, false);
|
||||
mSettings.useSkeleton = pImp->GetPropertyBool(AI_CONFIG_FBX_USE_SKELETON_BONE_CONTAINER, false);
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
@ -155,7 +153,7 @@ void FBXImporter::InternReadFile(const std::string &pFile, aiScene *pScene, IOSy
|
|||
contents[contents.size() - 1] = 0;
|
||||
const char *const begin = &*contents.begin();
|
||||
|
||||
// broadphase tokenizing pass in which we identify the core
|
||||
// broad-phase tokenized pass in which we identify the core
|
||||
// syntax elements of FBX (brackets, commas, key:value mappings)
|
||||
TokenList tokens;
|
||||
try {
|
||||
|
@ -173,15 +171,14 @@ void FBXImporter::InternReadFile(const std::string &pFile, aiScene *pScene, IOSy
|
|||
Parser parser(tokens, is_binary);
|
||||
|
||||
// take the raw parse-tree and convert it to a FBX DOM
|
||||
Document doc(parser, settings);
|
||||
Document doc(parser, mSettings);
|
||||
|
||||
// convert the FBX DOM to aiScene
|
||||
ConvertToAssimpScene(pScene, doc, settings.removeEmptyBones);
|
||||
ConvertToAssimpScene(pScene, doc, mSettings.removeEmptyBones);
|
||||
|
||||
// size relative to cm
|
||||
float size_relative_to_cm = doc.GlobalSettings().UnitScaleFactor();
|
||||
if (size_relative_to_cm == 0.0)
|
||||
{
|
||||
if (size_relative_to_cm == 0.0) {
|
||||
// BaseImporter later asserts that fileScale is non-zero.
|
||||
ThrowException("The UnitScaleFactor must be non-zero");
|
||||
}
|
||||
|
|
|
@ -69,13 +69,14 @@ typedef class basic_formatter<char, std::char_traits<char>, std::allocator<char>
|
|||
// -------------------------------------------------------------------------------------------
|
||||
class FBXImporter : public BaseImporter, public LogFunctions<FBXImporter> {
|
||||
public:
|
||||
/// @brief The class constructor.
|
||||
FBXImporter();
|
||||
~FBXImporter() override;
|
||||
|
||||
// --------------------
|
||||
bool CanRead(const std::string &pFile,
|
||||
IOSystem *pIOHandler,
|
||||
bool checkSig) const override;
|
||||
/// @brief The class destructor, default implementation.
|
||||
~FBXImporter() override = default;
|
||||
|
||||
/// @brief Will check the file for readability.
|
||||
bool CanRead(const std::string &pFile, IOSystem *pIOHandler, bool checkSig) const override;
|
||||
|
||||
protected:
|
||||
// --------------------
|
||||
|
@ -90,7 +91,7 @@ protected:
|
|||
IOSystem *pIOHandler) override;
|
||||
|
||||
private:
|
||||
FBX::ImportSettings settings;
|
||||
FBX::ImportSettings mSettings;
|
||||
}; // !class FBXImporter
|
||||
|
||||
} // end of namespace Assimp
|
||||
|
|
|
@ -4,7 +4,6 @@ Open Asset Import Library (assimp)
|
|||
|
||||
Copyright (c) 2006-2022, assimp team
|
||||
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use of this software in source and binary forms,
|
||||
|
@ -54,17 +53,14 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
#include "FBXImportSettings.h"
|
||||
#include "FBXDocumentUtil.h"
|
||||
|
||||
|
||||
namespace Assimp {
|
||||
namespace FBX {
|
||||
|
||||
using namespace Util;
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
Geometry::Geometry(uint64_t id, const Element& element, const std::string& name, const Document& doc)
|
||||
: Object(id, element, name)
|
||||
, skin()
|
||||
{
|
||||
Geometry::Geometry(uint64_t id, const Element& element, const std::string& name, const Document& doc) :
|
||||
Object(id, element, name), skin() {
|
||||
const std::vector<const Connection*> &conns = doc.GetConnectionsByDestinationSequenced(ID(),"Deformer");
|
||||
for(const Connection* con : conns) {
|
||||
const Skin* const sk = ProcessSimpleConnection<Skin>(*con, false, "Skin -> Geometry", element);
|
||||
|
@ -78,12 +74,6 @@ Geometry::Geometry(uint64_t id, const Element& element, const std::string& name,
|
|||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
Geometry::~Geometry()
|
||||
{
|
||||
// empty
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
const std::vector<const BlendShape*>& Geometry::GetBlendShapes() const {
|
||||
return blendShapes;
|
||||
|
@ -183,18 +173,12 @@ MeshGeometry::MeshGeometry(uint64_t id, const Element& element, const std::strin
|
|||
if(doc.Settings().readAllLayers || index == 0) {
|
||||
const Scope& layer = GetRequiredScope(*(*it).second);
|
||||
ReadLayer(layer);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
FBXImporter::LogWarn("ignoring additional geometry layers");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
MeshGeometry::~MeshGeometry() {
|
||||
// empty
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
const std::vector<aiVector3D>& MeshGeometry::GetVertices() const {
|
||||
return m_vertices;
|
||||
|
|
|
@ -55,22 +55,25 @@ namespace FBX {
|
|||
/**
|
||||
* DOM base class for all kinds of FBX geometry
|
||||
*/
|
||||
class Geometry : public Object
|
||||
{
|
||||
class Geometry : public Object {
|
||||
public:
|
||||
/// @brief The class constructor with all parameters.
|
||||
/// @param id The id.
|
||||
/// @param element
|
||||
/// @param name
|
||||
/// @param doc
|
||||
Geometry( uint64_t id, const Element& element, const std::string& name, const Document& doc );
|
||||
virtual ~Geometry();
|
||||
virtual ~Geometry() = default;
|
||||
|
||||
/** Get the Skin attached to this geometry or nullptr */
|
||||
/// Get the Skin attached to this geometry or nullptr
|
||||
const Skin* DeformerSkin() const;
|
||||
|
||||
/** Get the BlendShape attached to this geometry or nullptr */
|
||||
/// Get the BlendShape attached to this geometry or nullptr
|
||||
const std::vector<const BlendShape*>& GetBlendShapes() const;
|
||||
|
||||
private:
|
||||
const Skin* skin;
|
||||
std::vector<const BlendShape*> blendShapes;
|
||||
|
||||
};
|
||||
|
||||
typedef std::vector<int> MatIndexArray;
|
||||
|
@ -79,14 +82,13 @@ typedef std::vector<int> MatIndexArray;
|
|||
/**
|
||||
* DOM class for FBX geometry of type "Mesh"
|
||||
*/
|
||||
class MeshGeometry : public Geometry
|
||||
{
|
||||
class MeshGeometry : public Geometry {
|
||||
public:
|
||||
/** The class constructor */
|
||||
MeshGeometry( uint64_t id, const Element& element, const std::string& name, const Document& doc );
|
||||
|
||||
/** The class destructor */
|
||||
virtual ~MeshGeometry();
|
||||
virtual ~MeshGeometry() = default;
|
||||
|
||||
/** Get a list of all vertex points, non-unique*/
|
||||
const std::vector<aiVector3D>& GetVertices() const;
|
||||
|
@ -130,6 +132,7 @@ public:
|
|||
/** Determine the face to which a particular output vertex index belongs.
|
||||
* This mapping is always unique. */
|
||||
unsigned int FaceForVertexIndex( unsigned int in_index ) const;
|
||||
|
||||
private:
|
||||
void ReadLayer( const Scope& layer );
|
||||
void ReadLayerElement( const Scope& layerElement );
|
||||
|
|
|
@ -57,10 +57,8 @@ namespace FBX {
|
|||
using namespace Util;
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
NodeAttribute::NodeAttribute(uint64_t id, const Element& element, const Document& doc, const std::string& name)
|
||||
: Object(id,element,name)
|
||||
, props()
|
||||
{
|
||||
NodeAttribute::NodeAttribute(uint64_t id, const Element &element, const Document &doc, const std::string &name) :
|
||||
Object(id, element, name), props() {
|
||||
const Scope &sc = GetRequiredScope(element);
|
||||
|
||||
const std::string &classname = ParseTokenAsString(GetRequiredToken(element, 2));
|
||||
|
@ -72,18 +70,9 @@ NodeAttribute::NodeAttribute(uint64_t id, const Element& element, const Document
|
|||
props = GetPropertyTable(doc, "NodeAttribute.Fbx" + classname, element, sc, is_null_or_limb);
|
||||
}
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
NodeAttribute::~NodeAttribute()
|
||||
{
|
||||
// empty
|
||||
}
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
CameraSwitcher::CameraSwitcher(uint64_t id, const Element& element, const Document& doc, const std::string& name)
|
||||
: NodeAttribute(id,element,doc,name)
|
||||
{
|
||||
CameraSwitcher::CameraSwitcher(uint64_t id, const Element &element, const Document &doc, const std::string &name) :
|
||||
NodeAttribute(id, element, doc, name) {
|
||||
const Scope &sc = GetRequiredScope(element);
|
||||
const Element *const CameraId = sc["CameraId"];
|
||||
const Element *const CameraName = sc["CameraName"];
|
||||
|
@ -103,68 +92,30 @@ CameraSwitcher::CameraSwitcher(uint64_t id, const Element& element, const Docume
|
|||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
CameraSwitcher::~CameraSwitcher()
|
||||
{
|
||||
Camera::Camera(uint64_t id, const Element &element, const Document &doc, const std::string &name) :
|
||||
NodeAttribute(id, element, doc, name) {
|
||||
// empty
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
Camera::Camera(uint64_t id, const Element& element, const Document& doc, const std::string& name)
|
||||
: NodeAttribute(id,element,doc,name)
|
||||
{
|
||||
Light::Light(uint64_t id, const Element &element, const Document &doc, const std::string &name) :
|
||||
NodeAttribute(id, element, doc, name) {
|
||||
// empty
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
Camera::~Camera()
|
||||
{
|
||||
Null::Null(uint64_t id, const Element &element, const Document &doc, const std::string &name) :
|
||||
NodeAttribute(id, element, doc, name) {
|
||||
// empty
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
Light::Light(uint64_t id, const Element& element, const Document& doc, const std::string& name)
|
||||
: NodeAttribute(id,element,doc,name)
|
||||
{
|
||||
LimbNode::LimbNode(uint64_t id, const Element &element, const Document &doc, const std::string &name) :
|
||||
NodeAttribute(id, element, doc, name) {
|
||||
// empty
|
||||
}
|
||||
|
||||
} // namespace FBX
|
||||
} // namespace Assimp
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
Light::~Light()
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
Null::Null(uint64_t id, const Element& element, const Document& doc, const std::string& name)
|
||||
: NodeAttribute(id,element,doc,name)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
Null::~Null()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
LimbNode::LimbNode(uint64_t id, const Element& element, const Document& doc, const std::string& name)
|
||||
: NodeAttribute(id,element,doc,name)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
LimbNode::~LimbNode()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
#endif // ASSIMP_BUILD_NO_FBX_IMPORTER
|
||||
|
|
|
@ -162,12 +162,6 @@ Element::Element(const Token& key_token, Parser& parser) : key_token(key_token)
|
|||
while(n->Type() != TokenType_KEY && n->Type() != TokenType_CLOSE_BRACKET);
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
Element::~Element()
|
||||
{
|
||||
// no need to delete tokens, they are owned by the parser
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
Scope::Scope(Parser& parser,bool topLevel)
|
||||
{
|
||||
|
@ -226,12 +220,6 @@ Parser::Parser (const TokenList& tokens, bool is_binary)
|
|||
root.reset(new Scope(*this,true));
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
Parser::~Parser()
|
||||
{
|
||||
// empty
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
TokenPtr Parser::AdvanceToNextToken()
|
||||
{
|
||||
|
@ -961,8 +949,7 @@ void ParseVectorDataArray(std::vector<float>& out, const Element& el)
|
|||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// read an array of uints
|
||||
void ParseVectorDataArray(std::vector<unsigned int>& out, const Element& el)
|
||||
{
|
||||
void ParseVectorDataArray(std::vector<unsigned int>& out, const Element& el) {
|
||||
out.resize( 0 );
|
||||
const TokenList& tok = el.Tokens();
|
||||
if(tok.empty()) {
|
||||
|
@ -1186,7 +1173,6 @@ aiMatrix4x4 ReadMatrix(const Element& element)
|
|||
return result;
|
||||
}
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// wrapper around ParseTokenAsString() with ParseError handling
|
||||
std::string ParseTokenAsString(const Token& t)
|
||||
|
|
|
@ -87,7 +87,7 @@ class Element
|
|||
{
|
||||
public:
|
||||
Element(const Token& key_token, Parser& parser);
|
||||
~Element();
|
||||
~Element() = default;
|
||||
|
||||
const Scope* Compound() const {
|
||||
return compound.get();
|
||||
|
@ -160,7 +160,7 @@ public:
|
|||
/** Parse given a token list. Does not take ownership of the tokens -
|
||||
* the objects must persist during the entire parser lifetime */
|
||||
Parser (const TokenList& tokens,bool is_binary);
|
||||
~Parser();
|
||||
~Parser() = default;
|
||||
|
||||
const Scope& GetRootScope() const {
|
||||
return *root.get();
|
||||
|
|
|
@ -4,7 +4,6 @@ Open Asset Import Library (assimp)
|
|||
|
||||
Copyright (c) 2006-2022, assimp team
|
||||
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use of this software in source and binary forms,
|
||||
|
@ -51,7 +50,6 @@ namespace Assimp {
|
|||
namespace IFC {
|
||||
namespace {
|
||||
|
||||
|
||||
// --------------------------------------------------------------------------------
|
||||
// Conic is the base class for Circle and Ellipse
|
||||
// --------------------------------------------------------------------------------
|
||||
|
@ -546,8 +544,10 @@ IfcFloat RecursiveSearch(const Curve* cv, const IfcVector3& val, IfcFloat a, Ifc
|
|||
}
|
||||
}
|
||||
|
||||
#ifndef __INTEL_LLVM_COMPILER
|
||||
ai_assert( min_diff[ 0 ] != inf );
|
||||
ai_assert( min_diff[ 1 ] != inf );
|
||||
#endif // __INTEL_LLVM_COMPILER
|
||||
if ( std::fabs(a-min_point[0]) < threshold || recurse >= max_recurse) {
|
||||
return min_point[0];
|
||||
}
|
||||
|
@ -606,8 +606,10 @@ bool BoundedCurve::IsClosed() const {
|
|||
// ------------------------------------------------------------------------------------------------
|
||||
void BoundedCurve::SampleDiscrete(TempMesh& out) const {
|
||||
const ParamRange& range = GetParametricRange();
|
||||
#ifndef __INTEL_LLVM_COMPILER
|
||||
ai_assert( range.first != std::numeric_limits<IfcFloat>::infinity() );
|
||||
ai_assert( range.second != std::numeric_limits<IfcFloat>::infinity() );
|
||||
#endif // __INTEL_LLVM_COMPILER
|
||||
|
||||
return SampleDiscrete(out,range.first,range.second);
|
||||
}
|
||||
|
|
|
@ -1428,7 +1428,7 @@ bool GenerateOpenings(std::vector<TempOpening>& openings,
|
|||
return true;
|
||||
}
|
||||
|
||||
std::vector<IfcVector2> GetContourInPlane2D(std::shared_ptr<TempMesh> mesh,IfcMatrix3 planeSpace,
|
||||
std::vector<IfcVector2> GetContourInPlane2D(const std::shared_ptr<TempMesh>& mesh,IfcMatrix3 planeSpace,
|
||||
IfcVector3 planeNor,IfcFloat planeOffset,
|
||||
IfcVector3 extrusionDir,IfcVector3& wall_extrusion,bool& first,bool& ok) {
|
||||
std::vector<IfcVector2> contour;
|
||||
|
@ -1476,7 +1476,7 @@ std::vector<IfcVector2> GetContourInPlane2D(std::shared_ptr<TempMesh> mesh,IfcMa
|
|||
return contour;
|
||||
}
|
||||
|
||||
const float close{ ai_epsilon };
|
||||
const ai_real close{ ai_epsilon };
|
||||
|
||||
static bool isClose(IfcVector2 first,IfcVector2 second) {
|
||||
auto diff = (second - first);
|
||||
|
@ -1491,7 +1491,7 @@ static void logSegment(std::pair<IfcVector2,IfcVector2> segment) {
|
|||
IFCImporter::LogInfo(msg2.str().c_str());
|
||||
}
|
||||
|
||||
std::vector<std::vector<IfcVector2>> GetContoursInPlane3D(std::shared_ptr<TempMesh> mesh,IfcMatrix3 planeSpace,
|
||||
std::vector<std::vector<IfcVector2>> GetContoursInPlane3D(const std::shared_ptr<TempMesh>& mesh,IfcMatrix3 planeSpace,
|
||||
IfcFloat planeOffset) {
|
||||
|
||||
{
|
||||
|
@ -1676,7 +1676,7 @@ std::vector<std::vector<IfcVector2>> GetContoursInPlane3D(std::shared_ptr<TempMe
|
|||
std::stringstream msg;
|
||||
msg << "GetContoursInPlane3D: found " << contours.size() << " contours:\n";
|
||||
|
||||
for(auto c : contours) {
|
||||
for(const auto& c : contours) {
|
||||
msg << " Contour: \n";
|
||||
for(auto p : c) {
|
||||
msg << " " << p.x << " " << p.y << " \n";
|
||||
|
@ -1690,7 +1690,7 @@ std::vector<std::vector<IfcVector2>> GetContoursInPlane3D(std::shared_ptr<TempMe
|
|||
return contours;
|
||||
}
|
||||
|
||||
std::vector<std::vector<IfcVector2>> GetContoursInPlane(std::shared_ptr<TempMesh> mesh,IfcMatrix3 planeSpace,
|
||||
std::vector<std::vector<IfcVector2>> GetContoursInPlane(const std::shared_ptr<TempMesh>& mesh,IfcMatrix3 planeSpace,
|
||||
IfcVector3 planeNor,IfcFloat planeOffset,
|
||||
IfcVector3 extrusionDir,IfcVector3& wall_extrusion,bool& first) {
|
||||
|
||||
|
|
|
@ -287,7 +287,7 @@ void LWOImporter::InternReadFile(const std::string &pFile,
|
|||
if (UINT_MAX == iDefaultSurface) {
|
||||
pSorted.erase(pSorted.end() - 1);
|
||||
}
|
||||
for (unsigned int p = 0, j = 0; j < mSurfaces->size(); ++j) {
|
||||
for (unsigned int j = 0; j < mSurfaces->size(); ++j) {
|
||||
SortedRep &sorted = pSorted[j];
|
||||
if (sorted.empty())
|
||||
continue;
|
||||
|
@ -425,7 +425,6 @@ void LWOImporter::InternReadFile(const std::string &pFile,
|
|||
} else {
|
||||
ASSIMP_LOG_VERBOSE_DEBUG("LWO2: No need to compute normals, they're already there");
|
||||
}
|
||||
++p;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1541,6 +1540,7 @@ void LWOImporter::LoadLWO2File() {
|
|||
break;
|
||||
}
|
||||
// --- intentionally no break here
|
||||
// fallthrough
|
||||
case AI_LWO_VMAP: {
|
||||
if (skip)
|
||||
break;
|
||||
|
|
|
@ -313,6 +313,9 @@ void LWSImporter::SetupNodeName(aiNode *nd, LWS::NodeDesc &src) {
|
|||
std::string::size_type t = src.path.substr(s).find_last_of('.');
|
||||
|
||||
nd->mName.length = ::ai_snprintf(nd->mName.data, MAXLEN, "%s_(%08X)", src.path.substr(s).substr(0, t).c_str(), combined);
|
||||
if (nd->mName.length > MAXLEN) {
|
||||
nd->mName.length = MAXLEN;
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -57,9 +57,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
// Assimp specific M3D configuration. Comment out these defines to remove functionality
|
||||
//#define ASSIMP_USE_M3D_READFILECB
|
||||
|
||||
// Share stb_image's PNG loader with other importers/exporters instead of bringing our own copy.
|
||||
#define STBI_ONLY_PNG
|
||||
#include <stb/stb_image.h>
|
||||
#include "Common/StbCommon.h"
|
||||
|
||||
#include "m3d.h"
|
||||
|
||||
|
|
|
@ -449,6 +449,9 @@ void MDLImporter::ParseSkinLump_3DGS_MDL7(
|
|||
unsigned int iWidth,
|
||||
unsigned int iHeight) {
|
||||
std::unique_ptr<aiTexture> pcNew;
|
||||
if (szCurrent == nullptr) {
|
||||
return;
|
||||
}
|
||||
|
||||
// get the type of the skin
|
||||
unsigned int iMasked = (unsigned int)(iType & 0xF);
|
||||
|
|
|
@ -52,9 +52,10 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
#include <assimp/scene.h>
|
||||
#include <assimp/Importer.hpp>
|
||||
|
||||
#include <fstream>
|
||||
|
||||
#include <iomanip>
|
||||
#include <memory>
|
||||
#include <sstream>
|
||||
|
||||
static const aiImporterDesc desc = { "MMD Importer",
|
||||
"",
|
||||
|
@ -102,26 +103,32 @@ const aiImporterDesc *MMDImporter::GetInfo() const {
|
|||
// ------------------------------------------------------------------------------------------------
|
||||
// MMD import implementation
|
||||
void MMDImporter::InternReadFile(const std::string &file, aiScene *pScene,
|
||||
IOSystem * /*pIOHandler*/) {
|
||||
// Read file by istream
|
||||
std::filebuf fb;
|
||||
if (!fb.open(file, std::ios::in | std::ios::binary)) {
|
||||
IOSystem* pIOHandler) {
|
||||
|
||||
auto streamCloser = [&](IOStream *pStream) {
|
||||
pIOHandler->Close(pStream);
|
||||
};
|
||||
|
||||
static const std::string mode = "rb";
|
||||
const std::unique_ptr<IOStream, decltype(streamCloser)> fileStream(pIOHandler->Open(file, mode), streamCloser);
|
||||
|
||||
if (fileStream == nullptr) {
|
||||
throw DeadlyImportError("Failed to open file ", file, ".");
|
||||
}
|
||||
|
||||
std::istream fileStream(&fb);
|
||||
|
||||
// Get the file-size and validate it, throwing an exception when fails
|
||||
fileStream.seekg(0, fileStream.end);
|
||||
size_t fileSize = static_cast<size_t>(fileStream.tellg());
|
||||
fileStream.seekg(0, fileStream.beg);
|
||||
|
||||
if (fileSize < sizeof(pmx::PmxModel)) {
|
||||
const size_t fileSize = fileStream->FileSize();
|
||||
if (fileSize < sizeof(pmx::PmxModel))
|
||||
{
|
||||
throw DeadlyImportError(file, " is too small.");
|
||||
}
|
||||
|
||||
std::vector<char> contents(fileStream->FileSize());
|
||||
fileStream->Read(contents.data(), 1, contents.size());
|
||||
|
||||
std::istringstream iss(std::string(contents.begin(), contents.end()));
|
||||
|
||||
pmx::PmxModel model;
|
||||
model.Read(&fileStream);
|
||||
model.Read(&iss);
|
||||
|
||||
CreateDataFromImport(&model, pScene);
|
||||
}
|
||||
|
|
|
@ -357,6 +357,8 @@ namespace pmx
|
|||
{
|
||||
public:
|
||||
void virtual Read(std::istream *stream, PmxSetting *setting) = 0;
|
||||
|
||||
virtual ~PmxMorphOffset() = default;
|
||||
};
|
||||
|
||||
class PmxMorphVertexOffset : public PmxMorphOffset
|
||||
|
|
|
@ -136,7 +136,9 @@ void NDOImporter::InternReadFile( const std::string& pFile,
|
|||
ASSIMP_LOG_INFO("NDO file format is 1.2");
|
||||
}
|
||||
else {
|
||||
ASSIMP_LOG_WARN( "Unrecognized nendo file format version, continuing happily ... :", (head+6));
|
||||
char buff[4] = {0};
|
||||
memcpy(buff, head+6, 3);
|
||||
ASSIMP_LOG_WARN( "Unrecognized nendo file format version, continuing happily ... :", buff);
|
||||
}
|
||||
|
||||
reader.IncPtr(2); /* skip flags */
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
Open Asset Import Library (assimp)
|
||||
----------------------------------------------------------------------
|
||||
|
||||
Copyright (c) 2006-2020, assimp team
|
||||
Copyright (c) 2006-2022, assimp team
|
||||
|
||||
All rights reserved.
|
||||
|
||||
|
@ -47,6 +47,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
#include <assimp/types.h>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
#include "Common/Maybe.h"
|
||||
|
||||
namespace Assimp {
|
||||
namespace ObjFile {
|
||||
|
@ -63,7 +64,7 @@ struct Face {
|
|||
using IndexArray = std::vector<unsigned int>;
|
||||
|
||||
//! Primitive type
|
||||
aiPrimitiveType m_PrimitiveType;
|
||||
aiPrimitiveType mPrimitiveType;
|
||||
//! Vertex indices
|
||||
IndexArray m_vertices;
|
||||
//! Normal indices
|
||||
|
@ -75,14 +76,12 @@ struct Face {
|
|||
|
||||
//! \brief Default constructor
|
||||
Face(aiPrimitiveType pt = aiPrimitiveType_POLYGON) :
|
||||
m_PrimitiveType(pt), m_vertices(), m_normals(), m_texturCoords(), m_pMaterial(0L) {
|
||||
mPrimitiveType(pt), m_vertices(), m_normals(), m_texturCoords(), m_pMaterial(nullptr) {
|
||||
// empty
|
||||
}
|
||||
|
||||
//! \brief Destructor
|
||||
~Face() {
|
||||
// empty
|
||||
}
|
||||
~Face() = default;
|
||||
};
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
@ -183,15 +182,15 @@ struct Material {
|
|||
aiColor3D transparent;
|
||||
|
||||
//! PBR Roughness
|
||||
ai_real roughness;
|
||||
Maybe<ai_real> roughness;
|
||||
//! PBR Metallic
|
||||
ai_real metallic;
|
||||
Maybe<ai_real> metallic;
|
||||
//! PBR Metallic
|
||||
aiColor3D sheen;
|
||||
Maybe<aiColor3D> sheen;
|
||||
//! PBR Clearcoat Thickness
|
||||
ai_real clearcoat_thickness;
|
||||
Maybe<ai_real> clearcoat_thickness;
|
||||
//! PBR Clearcoat Rougness
|
||||
ai_real clearcoat_roughness;
|
||||
Maybe<ai_real> clearcoat_roughness;
|
||||
//! PBR Anisotropy
|
||||
ai_real anisotropy;
|
||||
|
||||
|
@ -206,11 +205,11 @@ struct Material {
|
|||
illumination_model(1),
|
||||
ior(ai_real(1.0)),
|
||||
transparent(ai_real(1.0), ai_real(1.0), ai_real(1.0)),
|
||||
roughness(ai_real(1.0)),
|
||||
metallic(ai_real(0.0)),
|
||||
sheen(ai_real(1.0), ai_real(1.0), ai_real(1.0)),
|
||||
clearcoat_thickness(ai_real(0.0)),
|
||||
clearcoat_roughness(ai_real(0.0)),
|
||||
roughness(),
|
||||
metallic(),
|
||||
sheen(),
|
||||
clearcoat_thickness(),
|
||||
clearcoat_roughness(),
|
||||
anisotropy(ai_real(0.0)),
|
||||
bump_multiplier(ai_real(1.0)) {
|
||||
std::fill_n(clamp, static_cast<unsigned int>(TextureTypeCount), false);
|
||||
|
@ -272,65 +271,65 @@ struct Model {
|
|||
using ConstGroupMapIt = std::map<std::string, std::vector<unsigned int> *>::const_iterator;
|
||||
|
||||
//! Model name
|
||||
std::string m_ModelName;
|
||||
std::string mModelName;
|
||||
//! List ob assigned objects
|
||||
std::vector<Object *> m_Objects;
|
||||
std::vector<Object *> mObjects;
|
||||
//! Pointer to current object
|
||||
ObjFile::Object *m_pCurrent;
|
||||
ObjFile::Object *mCurrentObject;
|
||||
//! Pointer to current material
|
||||
ObjFile::Material *m_pCurrentMaterial;
|
||||
ObjFile::Material *mCurrentMaterial;
|
||||
//! Pointer to default material
|
||||
ObjFile::Material *m_pDefaultMaterial;
|
||||
ObjFile::Material *mDefaultMaterial;
|
||||
//! Vector with all generated materials
|
||||
std::vector<std::string> m_MaterialLib;
|
||||
std::vector<std::string> mMaterialLib;
|
||||
//! Vector with all generated vertices
|
||||
std::vector<aiVector3D> m_Vertices;
|
||||
std::vector<aiVector3D> mVertices;
|
||||
//! vector with all generated normals
|
||||
std::vector<aiVector3D> m_Normals;
|
||||
std::vector<aiVector3D> mNormals;
|
||||
//! vector with all vertex colors
|
||||
std::vector<aiVector3D> m_VertexColors;
|
||||
std::vector<aiVector3D> mVertexColors;
|
||||
//! Group map
|
||||
GroupMap m_Groups;
|
||||
GroupMap mGroups;
|
||||
//! Group to face id assignment
|
||||
std::vector<unsigned int> *m_pGroupFaceIDs;
|
||||
std::vector<unsigned int> *mGroupFaceIDs;
|
||||
//! Active group
|
||||
std::string m_strActiveGroup;
|
||||
std::string mActiveGroup;
|
||||
//! Vector with generated texture coordinates
|
||||
std::vector<aiVector3D> m_TextureCoord;
|
||||
std::vector<aiVector3D> mTextureCoord;
|
||||
//! Maximum dimension of texture coordinates
|
||||
unsigned int m_TextureCoordDim;
|
||||
unsigned int mTextureCoordDim;
|
||||
//! Current mesh instance
|
||||
Mesh *m_pCurrentMesh;
|
||||
Mesh *mCurrentMesh;
|
||||
//! Vector with stored meshes
|
||||
std::vector<Mesh *> m_Meshes;
|
||||
std::vector<Mesh *> mMeshes;
|
||||
//! Material map
|
||||
std::map<std::string, Material *> m_MaterialMap;
|
||||
std::map<std::string, Material*> mMaterialMap;
|
||||
|
||||
//! \brief The default class constructor
|
||||
Model() :
|
||||
m_ModelName(),
|
||||
m_pCurrent(nullptr),
|
||||
m_pCurrentMaterial(nullptr),
|
||||
m_pDefaultMaterial(nullptr),
|
||||
m_pGroupFaceIDs(nullptr),
|
||||
m_strActiveGroup(),
|
||||
m_TextureCoordDim(0),
|
||||
m_pCurrentMesh(nullptr) {
|
||||
mModelName(),
|
||||
mCurrentObject(nullptr),
|
||||
mCurrentMaterial(nullptr),
|
||||
mDefaultMaterial(nullptr),
|
||||
mGroupFaceIDs(nullptr),
|
||||
mActiveGroup(),
|
||||
mTextureCoordDim(0),
|
||||
mCurrentMesh(nullptr) {
|
||||
// empty
|
||||
}
|
||||
|
||||
//! \brief The class destructor
|
||||
~Model() {
|
||||
for (auto & it : m_Objects) {
|
||||
for (auto & it : mObjects) {
|
||||
delete it;
|
||||
}
|
||||
for (auto & Meshe : m_Meshes) {
|
||||
for (auto & Meshe : mMeshes) {
|
||||
delete Meshe;
|
||||
}
|
||||
for (auto & Group : m_Groups) {
|
||||
for (auto & Group : mGroups) {
|
||||
delete Group.second;
|
||||
}
|
||||
for (auto & it : m_MaterialMap) {
|
||||
for (auto & it : mMaterialMap) {
|
||||
delete it.second;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -163,20 +163,20 @@ void ObjFileImporter::CreateDataFromImport(const ObjFile::Model *pModel, aiScene
|
|||
|
||||
// Create the root node of the scene
|
||||
pScene->mRootNode = new aiNode;
|
||||
if (!pModel->m_ModelName.empty()) {
|
||||
if (!pModel->mModelName.empty()) {
|
||||
// Set the name of the scene
|
||||
pScene->mRootNode->mName.Set(pModel->m_ModelName);
|
||||
pScene->mRootNode->mName.Set(pModel->mModelName);
|
||||
} else {
|
||||
// This is a fatal error, so break down the application
|
||||
ai_assert(false);
|
||||
}
|
||||
|
||||
if (!pModel->m_Objects.empty()) {
|
||||
if (!pModel->mObjects.empty()) {
|
||||
|
||||
unsigned int meshCount = 0;
|
||||
unsigned int childCount = 0;
|
||||
|
||||
for (auto object : pModel->m_Objects) {
|
||||
for (auto object : pModel->mObjects) {
|
||||
if (object) {
|
||||
++childCount;
|
||||
meshCount += (unsigned int)object->m_Meshes.size();
|
||||
|
@ -189,8 +189,8 @@ void ObjFileImporter::CreateDataFromImport(const ObjFile::Model *pModel, aiScene
|
|||
// Create nodes for the whole scene
|
||||
std::vector<aiMesh *> MeshArray;
|
||||
MeshArray.reserve(meshCount);
|
||||
for (size_t index = 0; index < pModel->m_Objects.size(); ++index) {
|
||||
createNodes(pModel, pModel->m_Objects[index], pScene->mRootNode, pScene, MeshArray);
|
||||
for (size_t index = 0; index < pModel->mObjects.size(); ++index) {
|
||||
createNodes(pModel, pModel->mObjects[index], pScene->mRootNode, pScene, MeshArray);
|
||||
}
|
||||
|
||||
ai_assert(pScene->mRootNode->mNumChildren == childCount);
|
||||
|
@ -206,31 +206,31 @@ void ObjFileImporter::CreateDataFromImport(const ObjFile::Model *pModel, aiScene
|
|||
// Create all materials
|
||||
createMaterials(pModel, pScene);
|
||||
} else {
|
||||
if (pModel->m_Vertices.empty()) {
|
||||
if (pModel->mVertices.empty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
std::unique_ptr<aiMesh> mesh(new aiMesh);
|
||||
mesh->mPrimitiveTypes = aiPrimitiveType_POINT;
|
||||
unsigned int n = (unsigned int)pModel->m_Vertices.size();
|
||||
unsigned int n = (unsigned int)pModel->mVertices.size();
|
||||
mesh->mNumVertices = n;
|
||||
|
||||
mesh->mVertices = new aiVector3D[n];
|
||||
memcpy(mesh->mVertices, pModel->m_Vertices.data(), n * sizeof(aiVector3D));
|
||||
memcpy(mesh->mVertices, pModel->mVertices.data(), n * sizeof(aiVector3D));
|
||||
|
||||
if (!pModel->m_Normals.empty()) {
|
||||
if (!pModel->mNormals.empty()) {
|
||||
mesh->mNormals = new aiVector3D[n];
|
||||
if (pModel->m_Normals.size() < n) {
|
||||
if (pModel->mNormals.size() < n) {
|
||||
throw DeadlyImportError("OBJ: vertex normal index out of range");
|
||||
}
|
||||
memcpy(mesh->mNormals, pModel->m_Normals.data(), n * sizeof(aiVector3D));
|
||||
memcpy(mesh->mNormals, pModel->mNormals.data(), n * sizeof(aiVector3D));
|
||||
}
|
||||
|
||||
if (!pModel->m_VertexColors.empty()) {
|
||||
if (!pModel->mVertexColors.empty()) {
|
||||
mesh->mColors[0] = new aiColor4D[mesh->mNumVertices];
|
||||
for (unsigned int i = 0; i < n; ++i) {
|
||||
if (i < pModel->m_VertexColors.size()) {
|
||||
const aiVector3D &color = pModel->m_VertexColors[i];
|
||||
if (i < pModel->mVertexColors.size()) {
|
||||
const aiVector3D &color = pModel->mVertexColors[i];
|
||||
mesh->mColors[0][i] = aiColor4D(color.x, color.y, color.z, 1.0);
|
||||
} else {
|
||||
throw DeadlyImportError("OBJ: vertex color index out of range");
|
||||
|
@ -315,7 +315,7 @@ aiMesh *ObjFileImporter::createTopology(const ObjFile::Model *pModel, const ObjF
|
|||
}
|
||||
|
||||
// Create faces
|
||||
ObjFile::Mesh *pObjMesh = pModel->m_Meshes[meshIndex];
|
||||
ObjFile::Mesh *pObjMesh = pModel->mMeshes[meshIndex];
|
||||
if (!pObjMesh) {
|
||||
return nullptr;
|
||||
}
|
||||
|
@ -330,13 +330,13 @@ aiMesh *ObjFileImporter::createTopology(const ObjFile::Model *pModel, const ObjF
|
|||
}
|
||||
|
||||
for (size_t index = 0; index < pObjMesh->m_Faces.size(); index++) {
|
||||
ObjFile::Face *const inp = pObjMesh->m_Faces[index];
|
||||
ai_assert(nullptr != inp);
|
||||
const ObjFile::Face *inp = pObjMesh->m_Faces[index];
|
||||
//ai_assert(nullptr != inp);
|
||||
|
||||
if (inp->m_PrimitiveType == aiPrimitiveType_LINE) {
|
||||
if (inp->mPrimitiveType == aiPrimitiveType_LINE) {
|
||||
pMesh->mNumFaces += static_cast<unsigned int>(inp->m_vertices.size() - 1);
|
||||
pMesh->mPrimitiveTypes |= aiPrimitiveType_LINE;
|
||||
} else if (inp->m_PrimitiveType == aiPrimitiveType_POINT) {
|
||||
} else if (inp->mPrimitiveType == aiPrimitiveType_POINT) {
|
||||
pMesh->mNumFaces += static_cast<unsigned int>(inp->m_vertices.size());
|
||||
pMesh->mPrimitiveTypes |= aiPrimitiveType_POINT;
|
||||
} else {
|
||||
|
@ -360,15 +360,15 @@ aiMesh *ObjFileImporter::createTopology(const ObjFile::Model *pModel, const ObjF
|
|||
|
||||
// Copy all data from all stored meshes
|
||||
for (auto &face : pObjMesh->m_Faces) {
|
||||
ObjFile::Face *const inp = face;
|
||||
if (inp->m_PrimitiveType == aiPrimitiveType_LINE) {
|
||||
const ObjFile::Face *inp = face;
|
||||
if (inp->mPrimitiveType == aiPrimitiveType_LINE) {
|
||||
for (size_t i = 0; i < inp->m_vertices.size() - 1; ++i) {
|
||||
aiFace &f = pMesh->mFaces[outIndex++];
|
||||
uiIdxCount += f.mNumIndices = 2;
|
||||
f.mIndices = new unsigned int[2];
|
||||
}
|
||||
continue;
|
||||
} else if (inp->m_PrimitiveType == aiPrimitiveType_POINT) {
|
||||
} else if (inp->mPrimitiveType == aiPrimitiveType_POINT) {
|
||||
for (size_t i = 0; i < inp->m_vertices.size(); ++i) {
|
||||
aiFace &f = pMesh->mFaces[outIndex++];
|
||||
uiIdxCount += f.mNumIndices = 1;
|
||||
|
@ -407,7 +407,7 @@ void ObjFileImporter::createVertexArray(const ObjFile::Model *pModel,
|
|||
return;
|
||||
|
||||
// Get current mesh
|
||||
ObjFile::Mesh *pObjMesh = pModel->m_Meshes[uiMeshIndex];
|
||||
ObjFile::Mesh *pObjMesh = pModel->mMeshes[uiMeshIndex];
|
||||
if (nullptr == pObjMesh || pObjMesh->m_uiNumIndices < 1) {
|
||||
return;
|
||||
}
|
||||
|
@ -422,16 +422,16 @@ void ObjFileImporter::createVertexArray(const ObjFile::Model *pModel,
|
|||
pMesh->mVertices = new aiVector3D[pMesh->mNumVertices];
|
||||
|
||||
// Allocate buffer for normal vectors
|
||||
if (!pModel->m_Normals.empty() && pObjMesh->m_hasNormals)
|
||||
if (!pModel->mNormals.empty() && pObjMesh->m_hasNormals)
|
||||
pMesh->mNormals = new aiVector3D[pMesh->mNumVertices];
|
||||
|
||||
// Allocate buffer for vertex-color vectors
|
||||
if (!pModel->m_VertexColors.empty())
|
||||
if (!pModel->mVertexColors.empty())
|
||||
pMesh->mColors[0] = new aiColor4D[pMesh->mNumVertices];
|
||||
|
||||
// Allocate buffer for texture coordinates
|
||||
if (!pModel->m_TextureCoord.empty() && pObjMesh->m_uiUVCoordinates[0]) {
|
||||
pMesh->mNumUVComponents[0] = pModel->m_TextureCoordDim;
|
||||
if (!pModel->mTextureCoord.empty() && pObjMesh->m_uiUVCoordinates[0]) {
|
||||
pMesh->mNumUVComponents[0] = pModel->mTextureCoordDim;
|
||||
pMesh->mTextureCoords[0] = new aiVector3D[pMesh->mNumVertices];
|
||||
}
|
||||
|
||||
|
@ -442,7 +442,7 @@ void ObjFileImporter::createVertexArray(const ObjFile::Model *pModel,
|
|||
// Copy all index arrays
|
||||
for (size_t vertexIndex = 0, outVertexIndex = 0; vertexIndex < sourceFace->m_vertices.size(); vertexIndex++) {
|
||||
const unsigned int vertex = sourceFace->m_vertices.at(vertexIndex);
|
||||
if (vertex >= pModel->m_Vertices.size()) {
|
||||
if (vertex >= pModel->mVertices.size()) {
|
||||
throw DeadlyImportError("OBJ: vertex index out of range");
|
||||
}
|
||||
|
||||
|
@ -450,32 +450,32 @@ void ObjFileImporter::createVertexArray(const ObjFile::Model *pModel,
|
|||
throw DeadlyImportError("OBJ: bad vertex index");
|
||||
}
|
||||
|
||||
pMesh->mVertices[newIndex] = pModel->m_Vertices[vertex];
|
||||
pMesh->mVertices[newIndex] = pModel->mVertices[vertex];
|
||||
|
||||
// Copy all normals
|
||||
if (normalsok && !pModel->m_Normals.empty() && vertexIndex < sourceFace->m_normals.size()) {
|
||||
if (normalsok && !pModel->mNormals.empty() && vertexIndex < sourceFace->m_normals.size()) {
|
||||
const unsigned int normal = sourceFace->m_normals.at(vertexIndex);
|
||||
if (normal >= pModel->m_Normals.size()) {
|
||||
if (normal >= pModel->mNormals.size()) {
|
||||
normalsok = false;
|
||||
} else {
|
||||
pMesh->mNormals[newIndex] = pModel->m_Normals[normal];
|
||||
pMesh->mNormals[newIndex] = pModel->mNormals[normal];
|
||||
}
|
||||
}
|
||||
|
||||
// Copy all vertex colors
|
||||
if (vertex < pModel->m_VertexColors.size()) {
|
||||
const aiVector3D &color = pModel->m_VertexColors[vertex];
|
||||
if (vertex < pModel->mVertexColors.size()) {
|
||||
const aiVector3D &color = pModel->mVertexColors[vertex];
|
||||
pMesh->mColors[0][newIndex] = aiColor4D(color.x, color.y, color.z, 1.0);
|
||||
}
|
||||
|
||||
// Copy all texture coordinates
|
||||
if (uvok && !pModel->m_TextureCoord.empty() && vertexIndex < sourceFace->m_texturCoords.size()) {
|
||||
if (uvok && !pModel->mTextureCoord.empty() && vertexIndex < sourceFace->m_texturCoords.size()) {
|
||||
const unsigned int tex = sourceFace->m_texturCoords.at(vertexIndex);
|
||||
|
||||
if (tex >= pModel->m_TextureCoord.size()) {
|
||||
if (tex >= pModel->mTextureCoord.size()) {
|
||||
uvok = false;
|
||||
} else {
|
||||
const aiVector3D &coord3d = pModel->m_TextureCoord[tex];
|
||||
const aiVector3D &coord3d = pModel->mTextureCoord[tex];
|
||||
pMesh->mTextureCoords[0][newIndex] = aiVector3D(coord3d.x, coord3d.y, coord3d.z);
|
||||
}
|
||||
}
|
||||
|
@ -484,15 +484,15 @@ void ObjFileImporter::createVertexArray(const ObjFile::Model *pModel,
|
|||
aiFace *pDestFace = &pMesh->mFaces[outIndex];
|
||||
|
||||
const bool last = (vertexIndex == sourceFace->m_vertices.size() - 1);
|
||||
if (sourceFace->m_PrimitiveType != aiPrimitiveType_LINE || !last) {
|
||||
if (sourceFace->mPrimitiveType != aiPrimitiveType_LINE || !last) {
|
||||
pDestFace->mIndices[outVertexIndex] = newIndex;
|
||||
outVertexIndex++;
|
||||
}
|
||||
|
||||
if (sourceFace->m_PrimitiveType == aiPrimitiveType_POINT) {
|
||||
if (sourceFace->mPrimitiveType == aiPrimitiveType_POINT) {
|
||||
outIndex++;
|
||||
outVertexIndex = 0;
|
||||
} else if (sourceFace->m_PrimitiveType == aiPrimitiveType_LINE) {
|
||||
} else if (sourceFace->mPrimitiveType == aiPrimitiveType_LINE) {
|
||||
outVertexIndex = 0;
|
||||
|
||||
if (!last)
|
||||
|
@ -501,10 +501,10 @@ void ObjFileImporter::createVertexArray(const ObjFile::Model *pModel,
|
|||
if (vertexIndex) {
|
||||
if (!last) {
|
||||
pMesh->mVertices[newIndex + 1] = pMesh->mVertices[newIndex];
|
||||
if (!sourceFace->m_normals.empty() && !pModel->m_Normals.empty()) {
|
||||
if (!sourceFace->m_normals.empty() && !pModel->mNormals.empty()) {
|
||||
pMesh->mNormals[newIndex + 1] = pMesh->mNormals[newIndex];
|
||||
}
|
||||
if (!pModel->m_TextureCoord.empty()) {
|
||||
if (!pModel->mTextureCoord.empty()) {
|
||||
for (size_t i = 0; i < pMesh->GetNumUVChannels(); i++) {
|
||||
pMesh->mTextureCoords[i][newIndex + 1] = pMesh->mTextureCoords[i][newIndex];
|
||||
}
|
||||
|
@ -565,9 +565,9 @@ void ObjFileImporter::createMaterials(const ObjFile::Model *pModel, aiScene *pSc
|
|||
return;
|
||||
}
|
||||
|
||||
const unsigned int numMaterials = (unsigned int)pModel->m_MaterialLib.size();
|
||||
const unsigned int numMaterials = (unsigned int)pModel->mMaterialLib.size();
|
||||
pScene->mNumMaterials = 0;
|
||||
if (pModel->m_MaterialLib.empty()) {
|
||||
if (pModel->mMaterialLib.empty()) {
|
||||
ASSIMP_LOG_DEBUG("OBJ: no materials specified");
|
||||
return;
|
||||
}
|
||||
|
@ -576,10 +576,10 @@ void ObjFileImporter::createMaterials(const ObjFile::Model *pModel, aiScene *pSc
|
|||
for (unsigned int matIndex = 0; matIndex < numMaterials; matIndex++) {
|
||||
// Store material name
|
||||
std::map<std::string, ObjFile::Material *>::const_iterator it;
|
||||
it = pModel->m_MaterialMap.find(pModel->m_MaterialLib[matIndex]);
|
||||
it = pModel->mMaterialMap.find(pModel->mMaterialLib[matIndex]);
|
||||
|
||||
// No material found, use the default material
|
||||
if (pModel->m_MaterialMap.end() == it)
|
||||
if (pModel->mMaterialMap.end() == it)
|
||||
continue;
|
||||
|
||||
aiMaterial *mat = new aiMaterial;
|
||||
|
@ -616,11 +616,16 @@ void ObjFileImporter::createMaterials(const ObjFile::Model *pModel, aiScene *pSc
|
|||
mat->AddProperty(&pCurrentMaterial->shineness, 1, AI_MATKEY_SHININESS);
|
||||
mat->AddProperty(&pCurrentMaterial->alpha, 1, AI_MATKEY_OPACITY);
|
||||
mat->AddProperty(&pCurrentMaterial->transparent, 1, AI_MATKEY_COLOR_TRANSPARENT);
|
||||
mat->AddProperty(&pCurrentMaterial->roughness, 1, AI_MATKEY_ROUGHNESS_FACTOR);
|
||||
mat->AddProperty(&pCurrentMaterial->metallic, 1, AI_MATKEY_METALLIC_FACTOR);
|
||||
mat->AddProperty(&pCurrentMaterial->sheen, 1, AI_MATKEY_SHEEN_COLOR_FACTOR);
|
||||
mat->AddProperty(&pCurrentMaterial->clearcoat_thickness, 1, AI_MATKEY_CLEARCOAT_FACTOR);
|
||||
mat->AddProperty(&pCurrentMaterial->clearcoat_roughness, 1, AI_MATKEY_CLEARCOAT_ROUGHNESS_FACTOR);
|
||||
if (pCurrentMaterial->roughness)
|
||||
mat->AddProperty(&pCurrentMaterial->roughness.Get(), 1, AI_MATKEY_ROUGHNESS_FACTOR);
|
||||
if (pCurrentMaterial->metallic)
|
||||
mat->AddProperty(&pCurrentMaterial->metallic.Get(), 1, AI_MATKEY_METALLIC_FACTOR);
|
||||
if (pCurrentMaterial->sheen)
|
||||
mat->AddProperty(&pCurrentMaterial->sheen.Get(), 1, AI_MATKEY_SHEEN_COLOR_FACTOR);
|
||||
if (pCurrentMaterial->clearcoat_thickness)
|
||||
mat->AddProperty(&pCurrentMaterial->clearcoat_thickness.Get(), 1, AI_MATKEY_CLEARCOAT_FACTOR);
|
||||
if (pCurrentMaterial->clearcoat_roughness)
|
||||
mat->AddProperty(&pCurrentMaterial->clearcoat_roughness.Get(), 1, AI_MATKEY_CLEARCOAT_ROUGHNESS_FACTOR);
|
||||
mat->AddProperty(&pCurrentMaterial->anisotropy, 1, AI_MATKEY_ANISOTROPY_FACTOR);
|
||||
|
||||
// Adding refraction index
|
||||
|
|
|
@ -99,9 +99,9 @@ ObjFileMtlImporter::ObjFileMtlImporter(std::vector<char> &buffer,
|
|||
ai_assert(nullptr != m_pModel);
|
||||
m_buffer.resize(BUFFERSIZE);
|
||||
std::fill(m_buffer.begin(), m_buffer.end(), '\0');
|
||||
if (nullptr == m_pModel->m_pDefaultMaterial) {
|
||||
m_pModel->m_pDefaultMaterial = new ObjFile::Material;
|
||||
m_pModel->m_pDefaultMaterial->MaterialName.Set("default");
|
||||
if (nullptr == m_pModel->mDefaultMaterial) {
|
||||
m_pModel->mDefaultMaterial = new ObjFile::Material;
|
||||
m_pModel->mDefaultMaterial->MaterialName.Set("default");
|
||||
}
|
||||
load();
|
||||
}
|
||||
|
@ -126,17 +126,21 @@ void ObjFileMtlImporter::load() {
|
|||
if (*m_DataIt == 'a') // Ambient color
|
||||
{
|
||||
++m_DataIt;
|
||||
getColorRGBA(&m_pModel->m_pCurrentMaterial->ambient);
|
||||
if (m_pModel->mCurrentMaterial != nullptr)
|
||||
getColorRGBA(&m_pModel->mCurrentMaterial->ambient);
|
||||
} else if (*m_DataIt == 'd') {
|
||||
// Diffuse color
|
||||
++m_DataIt;
|
||||
getColorRGBA(&m_pModel->m_pCurrentMaterial->diffuse);
|
||||
if (m_pModel->mCurrentMaterial != nullptr)
|
||||
getColorRGBA(&m_pModel->mCurrentMaterial->diffuse);
|
||||
} else if (*m_DataIt == 's') {
|
||||
++m_DataIt;
|
||||
getColorRGBA(&m_pModel->m_pCurrentMaterial->specular);
|
||||
if (m_pModel->mCurrentMaterial != nullptr)
|
||||
getColorRGBA(&m_pModel->mCurrentMaterial->specular);
|
||||
} else if (*m_DataIt == 'e') {
|
||||
++m_DataIt;
|
||||
getColorRGBA(&m_pModel->m_pCurrentMaterial->emissive);
|
||||
if (m_pModel->mCurrentMaterial != nullptr)
|
||||
getColorRGBA(&m_pModel->mCurrentMaterial->emissive);
|
||||
}
|
||||
m_DataIt = skipLine<DataArrayIt>(m_DataIt, m_DataItEnd, m_uiLine);
|
||||
} break;
|
||||
|
@ -145,13 +149,15 @@ void ObjFileMtlImporter::load() {
|
|||
// Material transmission color
|
||||
if (*m_DataIt == 'f') {
|
||||
++m_DataIt;
|
||||
getColorRGBA(&m_pModel->m_pCurrentMaterial->transparent);
|
||||
if (m_pModel->mCurrentMaterial != nullptr)
|
||||
getColorRGBA(&m_pModel->mCurrentMaterial->transparent);
|
||||
} else if (*m_DataIt == 'r') {
|
||||
// Material transmission alpha value
|
||||
++m_DataIt;
|
||||
ai_real d;
|
||||
getFloatValue(d);
|
||||
m_pModel->m_pCurrentMaterial->alpha = static_cast<ai_real>(1.0) - d;
|
||||
if (m_pModel->mCurrentMaterial != nullptr)
|
||||
m_pModel->mCurrentMaterial->alpha = static_cast<ai_real>(1.0) - d;
|
||||
}
|
||||
m_DataIt = skipLine<DataArrayIt>(m_DataIt, m_DataItEnd, m_uiLine);
|
||||
} break;
|
||||
|
@ -162,7 +168,8 @@ void ObjFileMtlImporter::load() {
|
|||
} else {
|
||||
// Alpha value
|
||||
++m_DataIt;
|
||||
getFloatValue(m_pModel->m_pCurrentMaterial->alpha);
|
||||
if (m_pModel->mCurrentMaterial != nullptr)
|
||||
getFloatValue(m_pModel->mCurrentMaterial->alpha);
|
||||
m_DataIt = skipLine<DataArrayIt>(m_DataIt, m_DataItEnd, m_uiLine);
|
||||
}
|
||||
} break;
|
||||
|
@ -173,11 +180,13 @@ void ObjFileMtlImporter::load() {
|
|||
switch (*m_DataIt) {
|
||||
case 's': // Specular exponent
|
||||
++m_DataIt;
|
||||
getFloatValue(m_pModel->m_pCurrentMaterial->shineness);
|
||||
if (m_pModel->mCurrentMaterial != nullptr)
|
||||
getFloatValue(m_pModel->mCurrentMaterial->shineness);
|
||||
break;
|
||||
case 'i': // Index Of refraction
|
||||
++m_DataIt;
|
||||
getFloatValue(m_pModel->m_pCurrentMaterial->ior);
|
||||
if (m_pModel->mCurrentMaterial != nullptr)
|
||||
getFloatValue(m_pModel->mCurrentMaterial->ior);
|
||||
break;
|
||||
case 'e': // New material
|
||||
createMaterial();
|
||||
|
@ -197,23 +206,28 @@ void ObjFileMtlImporter::load() {
|
|||
{
|
||||
case 'r':
|
||||
++m_DataIt;
|
||||
getFloatValue(m_pModel->m_pCurrentMaterial->roughness);
|
||||
if (m_pModel->mCurrentMaterial != nullptr)
|
||||
getFloatValue(m_pModel->mCurrentMaterial->roughness);
|
||||
break;
|
||||
case 'm':
|
||||
++m_DataIt;
|
||||
getFloatValue(m_pModel->m_pCurrentMaterial->metallic);
|
||||
if (m_pModel->mCurrentMaterial != nullptr)
|
||||
getFloatValue(m_pModel->mCurrentMaterial->metallic);
|
||||
break;
|
||||
case 's':
|
||||
++m_DataIt;
|
||||
getColorRGBA(&m_pModel->m_pCurrentMaterial->sheen);
|
||||
if (m_pModel->mCurrentMaterial != nullptr)
|
||||
getColorRGBA(m_pModel->mCurrentMaterial->sheen);
|
||||
break;
|
||||
case 'c':
|
||||
++m_DataIt;
|
||||
if (*m_DataIt == 'r') {
|
||||
++m_DataIt;
|
||||
getFloatValue(m_pModel->m_pCurrentMaterial->clearcoat_roughness);
|
||||
if (m_pModel->mCurrentMaterial != nullptr)
|
||||
getFloatValue(m_pModel->mCurrentMaterial->clearcoat_roughness);
|
||||
} else {
|
||||
getFloatValue(m_pModel->m_pCurrentMaterial->clearcoat_thickness);
|
||||
if (m_pModel->mCurrentMaterial != nullptr)
|
||||
getFloatValue(m_pModel->mCurrentMaterial->clearcoat_thickness);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@ -232,14 +246,16 @@ void ObjFileMtlImporter::load() {
|
|||
case 'i': // Illumination model
|
||||
{
|
||||
m_DataIt = getNextToken<DataArrayIt>(m_DataIt, m_DataItEnd);
|
||||
getIlluminationModel(m_pModel->m_pCurrentMaterial->illumination_model);
|
||||
if (m_pModel->mCurrentMaterial != nullptr)
|
||||
getIlluminationModel(m_pModel->mCurrentMaterial->illumination_model);
|
||||
m_DataIt = skipLine<DataArrayIt>(m_DataIt, m_DataItEnd, m_uiLine);
|
||||
} break;
|
||||
|
||||
case 'a': // Anisotropy
|
||||
{
|
||||
++m_DataIt;
|
||||
getFloatValue(m_pModel->m_pCurrentMaterial->anisotropy);
|
||||
getFloatValue(m_pModel->mCurrentMaterial->anisotropy);
|
||||
if (m_pModel->mCurrentMaterial != nullptr)
|
||||
m_DataIt = skipLine<DataArrayIt>(m_DataIt, m_DataItEnd, m_uiLine);
|
||||
} break;
|
||||
|
||||
|
@ -268,6 +284,12 @@ void ObjFileMtlImporter::getColorRGBA(aiColor3D *pColor) {
|
|||
pColor->b = b;
|
||||
}
|
||||
|
||||
void ObjFileMtlImporter::getColorRGBA(Maybe<aiColor3D> &value) {
|
||||
aiColor3D v;
|
||||
getColorRGBA(&v);
|
||||
value = Maybe<aiColor3D>(v);
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// Loads the kind of illumination model.
|
||||
void ObjFileMtlImporter::getIlluminationModel(int &illum_model) {
|
||||
|
@ -275,6 +297,7 @@ void ObjFileMtlImporter::getIlluminationModel(int &illum_model) {
|
|||
illum_model = atoi(&m_buffer[0]);
|
||||
}
|
||||
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// Loads a single float value.
|
||||
void ObjFileMtlImporter::getFloatValue(ai_real &value) {
|
||||
|
@ -288,6 +311,15 @@ void ObjFileMtlImporter::getFloatValue(ai_real &value) {
|
|||
value = (ai_real)fast_atof(&m_buffer[0]);
|
||||
}
|
||||
|
||||
void ObjFileMtlImporter::getFloatValue(Maybe<ai_real> &value) {
|
||||
m_DataIt = CopyNextWord<DataArrayIt>(m_DataIt, m_DataItEnd, &m_buffer[0], BUFFERSIZE);
|
||||
size_t len = std::strlen(&m_buffer[0]);
|
||||
if (len)
|
||||
value = Maybe<ai_real>(fast_atof(&m_buffer[0]));
|
||||
else
|
||||
value = Maybe<ai_real>();
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// Creates a material from loaded data.
|
||||
void ObjFileMtlImporter::createMaterial() {
|
||||
|
@ -313,20 +345,20 @@ void ObjFileMtlImporter::createMaterial() {
|
|||
|
||||
name = trim_whitespaces(name);
|
||||
|
||||
std::map<std::string, ObjFile::Material *>::iterator it = m_pModel->m_MaterialMap.find(name);
|
||||
if (m_pModel->m_MaterialMap.end() == it) {
|
||||
std::map<std::string, ObjFile::Material *>::iterator it = m_pModel->mMaterialMap.find(name);
|
||||
if (m_pModel->mMaterialMap.end() == it) {
|
||||
// New Material created
|
||||
m_pModel->m_pCurrentMaterial = new ObjFile::Material();
|
||||
m_pModel->m_pCurrentMaterial->MaterialName.Set(name);
|
||||
m_pModel->m_MaterialLib.push_back(name);
|
||||
m_pModel->m_MaterialMap[name] = m_pModel->m_pCurrentMaterial;
|
||||
m_pModel->mCurrentMaterial = new ObjFile::Material();
|
||||
m_pModel->mCurrentMaterial->MaterialName.Set(name);
|
||||
m_pModel->mMaterialLib.push_back(name);
|
||||
m_pModel->mMaterialMap[name] = m_pModel->mCurrentMaterial;
|
||||
|
||||
if (m_pModel->m_pCurrentMesh) {
|
||||
m_pModel->m_pCurrentMesh->m_uiMaterialIndex = static_cast<unsigned int>(m_pModel->m_MaterialLib.size() - 1);
|
||||
if (m_pModel->mCurrentMesh) {
|
||||
m_pModel->mCurrentMesh->m_uiMaterialIndex = static_cast<unsigned int>(m_pModel->mMaterialLib.size() - 1);
|
||||
}
|
||||
} else {
|
||||
// Use older material
|
||||
m_pModel->m_pCurrentMaterial = (*it).second;
|
||||
m_pModel->mCurrentMaterial = (*it).second;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -339,38 +371,38 @@ void ObjFileMtlImporter::getTexture() {
|
|||
const char *pPtr(&(*m_DataIt));
|
||||
if (!ASSIMP_strincmp(pPtr, DiffuseTexture.c_str(), static_cast<unsigned int>(DiffuseTexture.size()))) {
|
||||
// Diffuse texture
|
||||
out = &m_pModel->m_pCurrentMaterial->texture;
|
||||
out = &m_pModel->mCurrentMaterial->texture;
|
||||
clampIndex = ObjFile::Material::TextureDiffuseType;
|
||||
} else if (!ASSIMP_strincmp(pPtr, AmbientTexture.c_str(), static_cast<unsigned int>(AmbientTexture.size()))) {
|
||||
// Ambient texture
|
||||
out = &m_pModel->m_pCurrentMaterial->textureAmbient;
|
||||
out = &m_pModel->mCurrentMaterial->textureAmbient;
|
||||
clampIndex = ObjFile::Material::TextureAmbientType;
|
||||
} else if (!ASSIMP_strincmp(pPtr, SpecularTexture.c_str(), static_cast<unsigned int>(SpecularTexture.size()))) {
|
||||
// Specular texture
|
||||
out = &m_pModel->m_pCurrentMaterial->textureSpecular;
|
||||
out = &m_pModel->mCurrentMaterial->textureSpecular;
|
||||
clampIndex = ObjFile::Material::TextureSpecularType;
|
||||
} else if (!ASSIMP_strincmp(pPtr, DisplacementTexture1.c_str(), static_cast<unsigned int>(DisplacementTexture1.size())) ||
|
||||
!ASSIMP_strincmp(pPtr, DisplacementTexture2.c_str(), static_cast<unsigned int>(DisplacementTexture2.size()))) {
|
||||
// Displacement texture
|
||||
out = &m_pModel->m_pCurrentMaterial->textureDisp;
|
||||
out = &m_pModel->mCurrentMaterial->textureDisp;
|
||||
clampIndex = ObjFile::Material::TextureDispType;
|
||||
} else if (!ASSIMP_strincmp(pPtr, OpacityTexture.c_str(), static_cast<unsigned int>(OpacityTexture.size()))) {
|
||||
// Opacity texture
|
||||
out = &m_pModel->m_pCurrentMaterial->textureOpacity;
|
||||
out = &m_pModel->mCurrentMaterial->textureOpacity;
|
||||
clampIndex = ObjFile::Material::TextureOpacityType;
|
||||
} else if (!ASSIMP_strincmp(pPtr, EmissiveTexture1.c_str(), static_cast<unsigned int>(EmissiveTexture1.size())) ||
|
||||
!ASSIMP_strincmp(pPtr, EmissiveTexture2.c_str(), static_cast<unsigned int>(EmissiveTexture2.size()))) {
|
||||
// Emissive texture
|
||||
out = &m_pModel->m_pCurrentMaterial->textureEmissive;
|
||||
out = &m_pModel->mCurrentMaterial->textureEmissive;
|
||||
clampIndex = ObjFile::Material::TextureEmissiveType;
|
||||
} else if (!ASSIMP_strincmp(pPtr, BumpTexture1.c_str(), static_cast<unsigned int>(BumpTexture1.size())) ||
|
||||
!ASSIMP_strincmp(pPtr, BumpTexture2.c_str(), static_cast<unsigned int>(BumpTexture2.size()))) {
|
||||
// Bump texture
|
||||
out = &m_pModel->m_pCurrentMaterial->textureBump;
|
||||
out = &m_pModel->mCurrentMaterial->textureBump;
|
||||
clampIndex = ObjFile::Material::TextureBumpType;
|
||||
} else if (!ASSIMP_strincmp(pPtr, NormalTextureV1.c_str(), static_cast<unsigned int>(NormalTextureV1.size())) || !ASSIMP_strincmp(pPtr, NormalTextureV2.c_str(), static_cast<unsigned int>(NormalTextureV2.size()))) {
|
||||
// Normal map
|
||||
out = &m_pModel->m_pCurrentMaterial->textureNormal;
|
||||
out = &m_pModel->mCurrentMaterial->textureNormal;
|
||||
clampIndex = ObjFile::Material::TextureNormalType;
|
||||
} else if (!ASSIMP_strincmp(pPtr, ReflectionTexture.c_str(), static_cast<unsigned int>(ReflectionTexture.size()))) {
|
||||
// Reflection texture(s)
|
||||
|
@ -378,23 +410,23 @@ void ObjFileMtlImporter::getTexture() {
|
|||
return;
|
||||
} else if (!ASSIMP_strincmp(pPtr, SpecularityTexture.c_str(), static_cast<unsigned int>(SpecularityTexture.size()))) {
|
||||
// Specularity scaling (glossiness)
|
||||
out = &m_pModel->m_pCurrentMaterial->textureSpecularity;
|
||||
out = &m_pModel->mCurrentMaterial->textureSpecularity;
|
||||
clampIndex = ObjFile::Material::TextureSpecularityType;
|
||||
} else if ( !ASSIMP_strincmp( pPtr, RoughnessTexture.c_str(), static_cast<unsigned int>(RoughnessTexture.size()))) {
|
||||
// PBR Roughness texture
|
||||
out = & m_pModel->m_pCurrentMaterial->textureRoughness;
|
||||
out = & m_pModel->mCurrentMaterial->textureRoughness;
|
||||
clampIndex = ObjFile::Material::TextureRoughnessType;
|
||||
} else if ( !ASSIMP_strincmp( pPtr, MetallicTexture.c_str(), static_cast<unsigned int>(MetallicTexture.size()))) {
|
||||
// PBR Metallic texture
|
||||
out = & m_pModel->m_pCurrentMaterial->textureMetallic;
|
||||
out = & m_pModel->mCurrentMaterial->textureMetallic;
|
||||
clampIndex = ObjFile::Material::TextureMetallicType;
|
||||
} else if (!ASSIMP_strincmp( pPtr, SheenTexture.c_str(), static_cast<unsigned int>(SheenTexture.size()))) {
|
||||
// PBR Sheen (reflectance) texture
|
||||
out = & m_pModel->m_pCurrentMaterial->textureSheen;
|
||||
out = & m_pModel->mCurrentMaterial->textureSheen;
|
||||
clampIndex = ObjFile::Material::TextureSheenType;
|
||||
} else if (!ASSIMP_strincmp( pPtr, RMATexture.c_str(), static_cast<unsigned int>(RMATexture.size()))) {
|
||||
// PBR Rough/Metal/AO texture
|
||||
out = & m_pModel->m_pCurrentMaterial->textureRMA;
|
||||
out = & m_pModel->mCurrentMaterial->textureRMA;
|
||||
clampIndex = ObjFile::Material::TextureRMAType;
|
||||
} else {
|
||||
ASSIMP_LOG_ERROR("OBJ/MTL: Encountered unknown texture type");
|
||||
|
@ -403,7 +435,7 @@ void ObjFileMtlImporter::getTexture() {
|
|||
|
||||
bool clamp = false;
|
||||
getTextureOption(clamp, clampIndex, out);
|
||||
m_pModel->m_pCurrentMaterial->clamp[clampIndex] = clamp;
|
||||
m_pModel->mCurrentMaterial->clamp[clampIndex] = clamp;
|
||||
|
||||
std::string texture;
|
||||
m_DataIt = getName<DataArrayIt>(m_DataIt, m_DataItEnd, texture);
|
||||
|
@ -451,31 +483,31 @@ void ObjFileMtlImporter::getTextureOption(bool &clamp, int &clampIndex, aiString
|
|||
CopyNextWord(it, m_DataItEnd, value, sizeof(value) / sizeof(*value));
|
||||
if (!ASSIMP_strincmp(value, "cube_top", 8)) {
|
||||
clampIndex = ObjFile::Material::TextureReflectionCubeTopType;
|
||||
out = &m_pModel->m_pCurrentMaterial->textureReflection[0];
|
||||
out = &m_pModel->mCurrentMaterial->textureReflection[0];
|
||||
} else if (!ASSIMP_strincmp(value, "cube_bottom", 11)) {
|
||||
clampIndex = ObjFile::Material::TextureReflectionCubeBottomType;
|
||||
out = &m_pModel->m_pCurrentMaterial->textureReflection[1];
|
||||
out = &m_pModel->mCurrentMaterial->textureReflection[1];
|
||||
} else if (!ASSIMP_strincmp(value, "cube_front", 10)) {
|
||||
clampIndex = ObjFile::Material::TextureReflectionCubeFrontType;
|
||||
out = &m_pModel->m_pCurrentMaterial->textureReflection[2];
|
||||
out = &m_pModel->mCurrentMaterial->textureReflection[2];
|
||||
} else if (!ASSIMP_strincmp(value, "cube_back", 9)) {
|
||||
clampIndex = ObjFile::Material::TextureReflectionCubeBackType;
|
||||
out = &m_pModel->m_pCurrentMaterial->textureReflection[3];
|
||||
out = &m_pModel->mCurrentMaterial->textureReflection[3];
|
||||
} else if (!ASSIMP_strincmp(value, "cube_left", 9)) {
|
||||
clampIndex = ObjFile::Material::TextureReflectionCubeLeftType;
|
||||
out = &m_pModel->m_pCurrentMaterial->textureReflection[4];
|
||||
out = &m_pModel->mCurrentMaterial->textureReflection[4];
|
||||
} else if (!ASSIMP_strincmp(value, "cube_right", 10)) {
|
||||
clampIndex = ObjFile::Material::TextureReflectionCubeRightType;
|
||||
out = &m_pModel->m_pCurrentMaterial->textureReflection[5];
|
||||
out = &m_pModel->mCurrentMaterial->textureReflection[5];
|
||||
} else if (!ASSIMP_strincmp(value, "sphere", 6)) {
|
||||
clampIndex = ObjFile::Material::TextureReflectionSphereType;
|
||||
out = &m_pModel->m_pCurrentMaterial->textureReflection[0];
|
||||
out = &m_pModel->mCurrentMaterial->textureReflection[0];
|
||||
}
|
||||
|
||||
skipToken = 2;
|
||||
} else if (!ASSIMP_strincmp(pPtr, BumpOption.c_str(), static_cast<unsigned int>(BumpOption.size()))) {
|
||||
DataArrayIt it = getNextToken<DataArrayIt>(m_DataIt, m_DataItEnd);
|
||||
getFloat(it, m_DataItEnd, m_pModel->m_pCurrentMaterial->bump_multiplier);
|
||||
getFloat(it, m_DataItEnd, m_pModel->mCurrentMaterial->bump_multiplier);
|
||||
skipToken = 2;
|
||||
} else if (!ASSIMP_strincmp(pPtr, BlendUOption.c_str(), static_cast<unsigned int>(BlendUOption.size())) || !ASSIMP_strincmp(pPtr, BlendVOption.c_str(), static_cast<unsigned int>(BlendVOption.size())) || !ASSIMP_strincmp(pPtr, BoostOption.c_str(), static_cast<unsigned int>(BoostOption.size())) || !ASSIMP_strincmp(pPtr, ResolutionOption.c_str(), static_cast<unsigned int>(ResolutionOption.size())) || !ASSIMP_strincmp(pPtr, ChannelOption.c_str(), static_cast<unsigned int>(ChannelOption.size()))) {
|
||||
skipToken = 2;
|
||||
|
|
|
@ -43,6 +43,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
#include <assimp/defs.h>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include "Common/Maybe.h"
|
||||
|
||||
struct aiColor3D;
|
||||
struct aiString;
|
||||
|
@ -81,10 +82,12 @@ private:
|
|||
void load();
|
||||
/// Get color data.
|
||||
void getColorRGBA(aiColor3D *pColor);
|
||||
void getColorRGBA(Maybe<aiColor3D> &value);
|
||||
/// Get illumination model from loaded data
|
||||
void getIlluminationModel(int &illum_model);
|
||||
/// Gets a float value from data.
|
||||
void getFloatValue(ai_real &value);
|
||||
void getFloatValue(Maybe<ai_real> &value);
|
||||
/// Creates a new material from loaded data.
|
||||
void createMaterial();
|
||||
/// Get texture name from loaded data.
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
Open Asset Import Library (assimp)
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
Copyright (c) 2006-2020, assimp team
|
||||
Copyright (c) 2006-2022, assimp team
|
||||
|
||||
All rights reserved.
|
||||
|
||||
|
@ -52,10 +52,11 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
#include <cstdlib>
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
#include <string_view>
|
||||
|
||||
namespace Assimp {
|
||||
|
||||
constexpr char ObjFileParser::DEFAULT_MATERIAL[];
|
||||
constexpr const char ObjFileParser::DEFAULT_MATERIAL[];
|
||||
|
||||
ObjFileParser::ObjFileParser() :
|
||||
m_DataIt(),
|
||||
|
@ -84,13 +85,13 @@ ObjFileParser::ObjFileParser(IOStreamBuffer<char> &streamBuffer, const std::stri
|
|||
|
||||
// Create the model instance to store all the data
|
||||
m_pModel.reset(new ObjFile::Model());
|
||||
m_pModel->m_ModelName = modelName;
|
||||
m_pModel->mModelName = modelName;
|
||||
|
||||
// create default material and store it
|
||||
m_pModel->m_pDefaultMaterial = new ObjFile::Material;
|
||||
m_pModel->m_pDefaultMaterial->MaterialName.Set(DEFAULT_MATERIAL);
|
||||
m_pModel->m_MaterialLib.push_back(DEFAULT_MATERIAL);
|
||||
m_pModel->m_MaterialMap[DEFAULT_MATERIAL] = m_pModel->m_pDefaultMaterial;
|
||||
m_pModel->mDefaultMaterial = new ObjFile::Material;
|
||||
m_pModel->mDefaultMaterial->MaterialName.Set(DEFAULT_MATERIAL);
|
||||
m_pModel->mMaterialLib.push_back(DEFAULT_MATERIAL);
|
||||
m_pModel->mMaterialMap[DEFAULT_MATERIAL] = m_pModel->mDefaultMaterial;
|
||||
|
||||
// Start parsing the file
|
||||
parseFile(streamBuffer);
|
||||
|
@ -153,23 +154,23 @@ void ObjFileParser::parseFile(IOStreamBuffer<char> &streamBuffer) {
|
|||
size_t numComponents = getNumComponentsInDataDefinition();
|
||||
if (numComponents == 3) {
|
||||
// read in vertex definition
|
||||
getVector3(m_pModel->m_Vertices);
|
||||
getVector3(m_pModel->mVertices);
|
||||
} else if (numComponents == 4) {
|
||||
// read in vertex definition (homogeneous coords)
|
||||
getHomogeneousVector3(m_pModel->m_Vertices);
|
||||
getHomogeneousVector3(m_pModel->mVertices);
|
||||
} else if (numComponents == 6) {
|
||||
// read vertex and vertex-color
|
||||
getTwoVectors3(m_pModel->m_Vertices, m_pModel->m_VertexColors);
|
||||
getTwoVectors3(m_pModel->mVertices, m_pModel->mVertexColors);
|
||||
}
|
||||
} else if (*m_DataIt == 't') {
|
||||
// read in texture coordinate ( 2D or 3D )
|
||||
++m_DataIt;
|
||||
size_t dim = getTexCoordVector(m_pModel->m_TextureCoord);
|
||||
m_pModel->m_TextureCoordDim = std::max(m_pModel->m_TextureCoordDim, (unsigned int)dim);
|
||||
size_t dim = getTexCoordVector(m_pModel->mTextureCoord);
|
||||
m_pModel->mTextureCoordDim = std::max(m_pModel->mTextureCoordDim, (unsigned int)dim);
|
||||
} else if (*m_DataIt == 'n') {
|
||||
// Read in normal vector definition
|
||||
++m_DataIt;
|
||||
getVector3(m_pModel->m_Normals);
|
||||
getVector3(m_pModel->mNormals);
|
||||
}
|
||||
} break;
|
||||
|
||||
|
@ -424,7 +425,7 @@ void ObjFileParser::getVector2(std::vector<aiVector2D> &point2d_array) {
|
|||
m_DataIt = skipLine<DataArrayIt>(m_DataIt, m_DataItEnd, m_uiLine);
|
||||
}
|
||||
|
||||
static const std::string DefaultObjName = "defaultobject";
|
||||
static constexpr char DefaultObjName[] = "defaultobject";
|
||||
|
||||
void ObjFileParser::getFace(aiPrimitiveType type) {
|
||||
m_DataIt = getNextToken<DataArrayIt>(m_DataIt, m_DataItEnd);
|
||||
|
@ -435,12 +436,12 @@ void ObjFileParser::getFace(aiPrimitiveType type) {
|
|||
ObjFile::Face *face = new ObjFile::Face(type);
|
||||
bool hasNormal = false;
|
||||
|
||||
const int vSize = static_cast<unsigned int>(m_pModel->m_Vertices.size());
|
||||
const int vtSize = static_cast<unsigned int>(m_pModel->m_TextureCoord.size());
|
||||
const int vnSize = static_cast<unsigned int>(m_pModel->m_Normals.size());
|
||||
const int vSize = static_cast<unsigned int>(m_pModel->mVertices.size());
|
||||
const int vtSize = static_cast<unsigned int>(m_pModel->mTextureCoord.size());
|
||||
const int vnSize = static_cast<unsigned int>(m_pModel->mNormals.size());
|
||||
|
||||
const bool vt = (!m_pModel->m_TextureCoord.empty());
|
||||
const bool vn = (!m_pModel->m_Normals.empty());
|
||||
const bool vt = (!m_pModel->mTextureCoord.empty());
|
||||
const bool vn = (!m_pModel->mNormals.empty());
|
||||
int iPos = 0;
|
||||
while (m_DataIt != m_DataItEnd) {
|
||||
int iStep = 1;
|
||||
|
@ -458,7 +459,8 @@ void ObjFileParser::getFace(aiPrimitiveType type) {
|
|||
iPos = 0;
|
||||
} else {
|
||||
//OBJ USES 1 Base ARRAYS!!!!
|
||||
const int iVal(::atoi(&(*m_DataIt)));
|
||||
const char *token = &(*m_DataIt);
|
||||
const int iVal = ::atoi(token);
|
||||
|
||||
// increment iStep position based off of the sign and # of digits
|
||||
int tmp = iVal;
|
||||
|
@ -499,7 +501,7 @@ void ObjFileParser::getFace(aiPrimitiveType type) {
|
|||
} else {
|
||||
//On error, std::atoi will return 0 which is not a valid value
|
||||
delete face;
|
||||
throw DeadlyImportError("OBJ: Invalid face indice");
|
||||
throw DeadlyImportError("OBJ: Invalid face index.");
|
||||
}
|
||||
}
|
||||
m_DataIt += iStep;
|
||||
|
@ -514,28 +516,28 @@ void ObjFileParser::getFace(aiPrimitiveType type) {
|
|||
}
|
||||
|
||||
// Set active material, if one set
|
||||
if (nullptr != m_pModel->m_pCurrentMaterial) {
|
||||
face->m_pMaterial = m_pModel->m_pCurrentMaterial;
|
||||
if (nullptr != m_pModel->mCurrentMaterial) {
|
||||
face->m_pMaterial = m_pModel->mCurrentMaterial;
|
||||
} else {
|
||||
face->m_pMaterial = m_pModel->m_pDefaultMaterial;
|
||||
face->m_pMaterial = m_pModel->mDefaultMaterial;
|
||||
}
|
||||
|
||||
// Create a default object, if nothing is there
|
||||
if (nullptr == m_pModel->m_pCurrent) {
|
||||
if (nullptr == m_pModel->mCurrentObject) {
|
||||
createObject(DefaultObjName);
|
||||
}
|
||||
|
||||
// Assign face to mesh
|
||||
if (nullptr == m_pModel->m_pCurrentMesh) {
|
||||
if (nullptr == m_pModel->mCurrentMesh) {
|
||||
createMesh(DefaultObjName);
|
||||
}
|
||||
|
||||
// Store the face
|
||||
m_pModel->m_pCurrentMesh->m_Faces.push_back(face);
|
||||
m_pModel->m_pCurrentMesh->m_uiNumIndices += (unsigned int)face->m_vertices.size();
|
||||
m_pModel->m_pCurrentMesh->m_uiUVCoordinates[0] += (unsigned int)face->m_texturCoords.size();
|
||||
if (!m_pModel->m_pCurrentMesh->m_hasNormals && hasNormal) {
|
||||
m_pModel->m_pCurrentMesh->m_hasNormals = true;
|
||||
m_pModel->mCurrentMesh->m_Faces.emplace_back(face);
|
||||
m_pModel->mCurrentMesh->m_uiNumIndices += static_cast<unsigned int>(face->m_vertices.size());
|
||||
m_pModel->mCurrentMesh->m_uiUVCoordinates[0] += static_cast<unsigned int>(face->m_texturCoords.size());
|
||||
if (!m_pModel->mCurrentMesh->m_hasNormals && hasNormal) {
|
||||
m_pModel->mCurrentMesh->m_hasNormals = true;
|
||||
}
|
||||
// Skip the rest of the line
|
||||
m_DataIt = skipLine<DataArrayIt>(m_DataIt, m_DataItEnd, m_uiLine);
|
||||
|
@ -564,33 +566,33 @@ void ObjFileParser::getMaterialDesc() {
|
|||
|
||||
// If the current mesh has the same material, we simply ignore that 'usemtl' command
|
||||
// There is no need to create another object or even mesh here
|
||||
if (m_pModel->m_pCurrentMaterial && m_pModel->m_pCurrentMaterial->MaterialName == aiString(strName)) {
|
||||
if (m_pModel->mCurrentMaterial && m_pModel->mCurrentMaterial->MaterialName == aiString(strName)) {
|
||||
skip = true;
|
||||
}
|
||||
|
||||
if (!skip) {
|
||||
// Search for material
|
||||
std::map<std::string, ObjFile::Material *>::iterator it = m_pModel->m_MaterialMap.find(strName);
|
||||
if (it == m_pModel->m_MaterialMap.end()) {
|
||||
std::map<std::string, ObjFile::Material *>::iterator it = m_pModel->mMaterialMap.find(strName);
|
||||
if (it == m_pModel->mMaterialMap.end()) {
|
||||
// Not found, so we don't know anything about the material except for its name.
|
||||
// This may be the case if the material library is missing. We don't want to lose all
|
||||
// materials if that happens, so create a new named material instead of discarding it
|
||||
// completely.
|
||||
ASSIMP_LOG_ERROR("OBJ: failed to locate material ", strName, ", creating new material");
|
||||
m_pModel->m_pCurrentMaterial = new ObjFile::Material();
|
||||
m_pModel->m_pCurrentMaterial->MaterialName.Set(strName);
|
||||
m_pModel->m_MaterialLib.push_back(strName);
|
||||
m_pModel->m_MaterialMap[strName] = m_pModel->m_pCurrentMaterial;
|
||||
m_pModel->mCurrentMaterial = new ObjFile::Material();
|
||||
m_pModel->mCurrentMaterial->MaterialName.Set(strName);
|
||||
m_pModel->mMaterialLib.push_back(strName);
|
||||
m_pModel->mMaterialMap[strName] = m_pModel->mCurrentMaterial;
|
||||
} else {
|
||||
// Found, using detected material
|
||||
m_pModel->m_pCurrentMaterial = (*it).second;
|
||||
m_pModel->mCurrentMaterial = (*it).second;
|
||||
}
|
||||
|
||||
if (needsNewMesh(strName)) {
|
||||
createMesh(strName);
|
||||
}
|
||||
|
||||
m_pModel->m_pCurrentMesh->m_uiMaterialIndex = getMaterialIndex(strName);
|
||||
m_pModel->mCurrentMesh->m_uiMaterialIndex = getMaterialIndex(strName);
|
||||
}
|
||||
|
||||
// Skip rest of line
|
||||
|
@ -677,17 +679,17 @@ void ObjFileParser::getNewMaterial() {
|
|||
while (m_DataIt != m_DataItEnd && IsSpaceOrNewLine(*m_DataIt)) {
|
||||
++m_DataIt;
|
||||
}
|
||||
std::map<std::string, ObjFile::Material *>::iterator it = m_pModel->m_MaterialMap.find(strMat);
|
||||
if (it == m_pModel->m_MaterialMap.end()) {
|
||||
std::map<std::string, ObjFile::Material *>::iterator it = m_pModel->mMaterialMap.find(strMat);
|
||||
if (it == m_pModel->mMaterialMap.end()) {
|
||||
// Show a warning, if material was not found
|
||||
ASSIMP_LOG_WARN("OBJ: Unsupported material requested: ", strMat);
|
||||
m_pModel->m_pCurrentMaterial = m_pModel->m_pDefaultMaterial;
|
||||
m_pModel->mCurrentMaterial = m_pModel->mDefaultMaterial;
|
||||
} else {
|
||||
// Set new material
|
||||
if (needsNewMesh(strMat)) {
|
||||
createMesh(strMat);
|
||||
}
|
||||
m_pModel->m_pCurrentMesh->m_uiMaterialIndex = getMaterialIndex(strMat);
|
||||
m_pModel->mCurrentMesh->m_uiMaterialIndex = getMaterialIndex(strMat);
|
||||
}
|
||||
|
||||
m_DataIt = skipLine<DataArrayIt>(m_DataIt, m_DataItEnd, m_uiLine);
|
||||
|
@ -699,8 +701,8 @@ int ObjFileParser::getMaterialIndex(const std::string &strMaterialName) {
|
|||
if (strMaterialName.empty()) {
|
||||
return mat_index;
|
||||
}
|
||||
for (size_t index = 0; index < m_pModel->m_MaterialLib.size(); ++index) {
|
||||
if (strMaterialName == m_pModel->m_MaterialLib[index]) {
|
||||
for (size_t index = 0; index < m_pModel->mMaterialLib.size(); ++index) {
|
||||
if (strMaterialName == m_pModel->mMaterialLib[index]) {
|
||||
mat_index = (int)index;
|
||||
break;
|
||||
}
|
||||
|
@ -721,22 +723,22 @@ void ObjFileParser::getGroupName() {
|
|||
}
|
||||
|
||||
// Change active group, if necessary
|
||||
if (m_pModel->m_strActiveGroup != groupName) {
|
||||
if (m_pModel->mActiveGroup != groupName) {
|
||||
// Search for already existing entry
|
||||
ObjFile::Model::ConstGroupMapIt it = m_pModel->m_Groups.find(groupName);
|
||||
ObjFile::Model::ConstGroupMapIt it = m_pModel->mGroups.find(groupName);
|
||||
|
||||
// We are mapping groups into the object structure
|
||||
createObject(groupName);
|
||||
|
||||
// New group name, creating a new entry
|
||||
if (it == m_pModel->m_Groups.end()) {
|
||||
if (it == m_pModel->mGroups.end()) {
|
||||
std::vector<unsigned int> *pFaceIDArray = new std::vector<unsigned int>;
|
||||
m_pModel->m_Groups[groupName] = pFaceIDArray;
|
||||
m_pModel->m_pGroupFaceIDs = (pFaceIDArray);
|
||||
m_pModel->mGroups[groupName] = pFaceIDArray;
|
||||
m_pModel->mGroupFaceIDs = (pFaceIDArray);
|
||||
} else {
|
||||
m_pModel->m_pGroupFaceIDs = (*it).second;
|
||||
m_pModel->mGroupFaceIDs = (*it).second;
|
||||
}
|
||||
m_pModel->m_strActiveGroup = groupName;
|
||||
m_pModel->mActiveGroup = groupName;
|
||||
}
|
||||
m_DataIt = skipLine<DataArrayIt>(m_DataIt, m_DataItEnd, m_uiLine);
|
||||
}
|
||||
|
@ -773,20 +775,20 @@ void ObjFileParser::getObjectName() {
|
|||
std::string strObjectName(pStart, &(*m_DataIt));
|
||||
if (!strObjectName.empty()) {
|
||||
// Reset current object
|
||||
m_pModel->m_pCurrent = nullptr;
|
||||
m_pModel->mCurrentObject = nullptr;
|
||||
|
||||
// Search for actual object
|
||||
for (std::vector<ObjFile::Object *>::const_iterator it = m_pModel->m_Objects.begin();
|
||||
it != m_pModel->m_Objects.end();
|
||||
for (std::vector<ObjFile::Object *>::const_iterator it = m_pModel->mObjects.begin();
|
||||
it != m_pModel->mObjects.end();
|
||||
++it) {
|
||||
if ((*it)->m_strObjName == strObjectName) {
|
||||
m_pModel->m_pCurrent = *it;
|
||||
m_pModel->mCurrentObject = *it;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Allocate a new object, if current one was not found before
|
||||
if (nullptr == m_pModel->m_pCurrent) {
|
||||
if (nullptr == m_pModel->mCurrentObject) {
|
||||
createObject(strObjectName);
|
||||
}
|
||||
}
|
||||
|
@ -797,16 +799,16 @@ void ObjFileParser::getObjectName() {
|
|||
void ObjFileParser::createObject(const std::string &objName) {
|
||||
ai_assert(nullptr != m_pModel);
|
||||
|
||||
m_pModel->m_pCurrent = new ObjFile::Object;
|
||||
m_pModel->m_pCurrent->m_strObjName = objName;
|
||||
m_pModel->m_Objects.push_back(m_pModel->m_pCurrent);
|
||||
m_pModel->mCurrentObject = new ObjFile::Object;
|
||||
m_pModel->mCurrentObject->m_strObjName = objName;
|
||||
m_pModel->mObjects.push_back(m_pModel->mCurrentObject);
|
||||
|
||||
createMesh(objName);
|
||||
|
||||
if (m_pModel->m_pCurrentMaterial) {
|
||||
m_pModel->m_pCurrentMesh->m_uiMaterialIndex =
|
||||
getMaterialIndex(m_pModel->m_pCurrentMaterial->MaterialName.data);
|
||||
m_pModel->m_pCurrentMesh->m_pMaterial = m_pModel->m_pCurrentMaterial;
|
||||
if (m_pModel->mCurrentMaterial) {
|
||||
m_pModel->mCurrentMesh->m_uiMaterialIndex =
|
||||
getMaterialIndex(m_pModel->mCurrentMaterial->MaterialName.data);
|
||||
m_pModel->mCurrentMesh->m_pMaterial = m_pModel->mCurrentMaterial;
|
||||
}
|
||||
}
|
||||
// -------------------------------------------------------------------
|
||||
|
@ -814,11 +816,11 @@ void ObjFileParser::createObject(const std::string &objName) {
|
|||
void ObjFileParser::createMesh(const std::string &meshName) {
|
||||
ai_assert(nullptr != m_pModel);
|
||||
|
||||
m_pModel->m_pCurrentMesh = new ObjFile::Mesh(meshName);
|
||||
m_pModel->m_Meshes.push_back(m_pModel->m_pCurrentMesh);
|
||||
unsigned int meshId = static_cast<unsigned int>(m_pModel->m_Meshes.size() - 1);
|
||||
if (nullptr != m_pModel->m_pCurrent) {
|
||||
m_pModel->m_pCurrent->m_Meshes.push_back(meshId);
|
||||
m_pModel->mCurrentMesh = new ObjFile::Mesh(meshName);
|
||||
m_pModel->mMeshes.push_back(m_pModel->mCurrentMesh);
|
||||
unsigned int meshId = static_cast<unsigned int>(m_pModel->mMeshes.size() - 1);
|
||||
if (nullptr != m_pModel->mCurrentObject) {
|
||||
m_pModel->mCurrentObject->m_Meshes.push_back(meshId);
|
||||
} else {
|
||||
ASSIMP_LOG_ERROR("OBJ: No object detected to attach a new mesh instance.");
|
||||
}
|
||||
|
@ -828,16 +830,16 @@ void ObjFileParser::createMesh(const std::string &meshName) {
|
|||
// Returns true, if a new mesh must be created.
|
||||
bool ObjFileParser::needsNewMesh(const std::string &materialName) {
|
||||
// If no mesh data yet
|
||||
if (m_pModel->m_pCurrentMesh == nullptr) {
|
||||
if (m_pModel->mCurrentMesh == nullptr) {
|
||||
return true;
|
||||
}
|
||||
bool newMat = false;
|
||||
int matIdx = getMaterialIndex(materialName);
|
||||
int curMatIdx = m_pModel->m_pCurrentMesh->m_uiMaterialIndex;
|
||||
int curMatIdx = m_pModel->mCurrentMesh->m_uiMaterialIndex;
|
||||
if (curMatIdx != int(ObjFile::Mesh::NoMaterial) && curMatIdx != matIdx
|
||||
// no need create a new mesh if no faces in current
|
||||
// lets say 'usemtl' goes straight after 'g'
|
||||
&& !m_pModel->m_pCurrentMesh->m_Faces.empty()) {
|
||||
&& !m_pModel->mCurrentMesh->m_Faces.empty()) {
|
||||
// New material -> only one material per mesh, so we need to create a new
|
||||
// material
|
||||
newMat = true;
|
||||
|
|
|
@ -137,11 +137,8 @@ protected:
|
|||
void reportErrorTokenInFace();
|
||||
|
||||
private:
|
||||
// Copy and assignment constructor should be private
|
||||
// because the class contains pointer to allocated memory
|
||||
|
||||
/// Default material name
|
||||
static constexpr char DEFAULT_MATERIAL[] = AI_DEFAULT_MATERIAL_NAME;
|
||||
static constexpr const char DEFAULT_MATERIAL[] = AI_DEFAULT_MATERIAL_NAME;
|
||||
//! Iterator to current position in buffer
|
||||
DataArrayIt m_DataIt;
|
||||
//! Iterator to end position of buffer
|
||||
|
|
|
@ -236,7 +236,7 @@ inline char_t CopyNextWord(char_t it, char_t end, char *pBuffer, size_t length)
|
|||
template <class char_t>
|
||||
inline char_t getFloat(char_t it, char_t end, ai_real &value) {
|
||||
static const size_t BUFFERSIZE = 1024;
|
||||
char buffer[BUFFERSIZE];
|
||||
char buffer[BUFFERSIZE] = {};
|
||||
it = CopyNextWord<char_t>(it, end, buffer, BUFFERSIZE);
|
||||
value = (ai_real)fast_atof(buffer);
|
||||
|
||||
|
|
|
@ -248,6 +248,7 @@ void OgreXmlSerializer::ReadMesh(MeshXml *mesh) {
|
|||
} else if (currentName == nnBoneAssignments) {
|
||||
ReadBoneAssignments(currentNode, mesh->sharedVertexData);
|
||||
} else if (currentName == nnSkeletonLink) {
|
||||
mesh->skeletonRef = currentNode.attribute("name").as_string();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -488,6 +489,15 @@ bool OgreXmlSerializer::ImportSkeleton(Assimp::IOSystem *pIOHandler, MeshXml *me
|
|||
Skeleton *skeleton = new Skeleton();
|
||||
OgreXmlSerializer serializer(xmlParser.get());
|
||||
XmlNode root = xmlParser->getRootNode();
|
||||
if (std::string(root.name()) != nnSkeleton) {
|
||||
printf("\nSkeleton is not a valid root: %s\n", root.name());
|
||||
for (auto &a : root.children()) {
|
||||
if (std::string(a.name()) == nnSkeleton) {
|
||||
root = a;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
serializer.ReadSkeleton(root, skeleton);
|
||||
mesh->skeleton = skeleton;
|
||||
return true;
|
||||
|
@ -537,7 +547,7 @@ XmlParserPtr OgreXmlSerializer::OpenXmlParser(Assimp::IOSystem *pIOHandler, cons
|
|||
}
|
||||
|
||||
void OgreXmlSerializer::ReadSkeleton(XmlNode &node, Skeleton *skeleton) {
|
||||
if (node.name() != nnSkeleton) {
|
||||
if (std::string(node.name()) != nnSkeleton) {
|
||||
throw DeadlyImportError("Root node is <" + std::string(node.name()) + "> expecting <skeleton>");
|
||||
}
|
||||
|
||||
|
@ -574,14 +584,14 @@ void OgreXmlSerializer::ReadAnimations(XmlNode &node, Skeleton *skeleton) {
|
|||
anim->name = ReadAttribute<std::string>(currentNode, "name");
|
||||
anim->length = ReadAttribute<float>(currentNode, "length");
|
||||
for (XmlNode ¤tChildNode : currentNode.children()) {
|
||||
const std::string currentChildName = currentNode.name();
|
||||
const std::string currentChildName = currentChildNode.name();
|
||||
if (currentChildName == nnTracks) {
|
||||
ReadAnimationTracks(currentChildNode, anim);
|
||||
skeleton->animations.push_back(anim);
|
||||
} else {
|
||||
throw DeadlyImportError("No <tracks> found in <animation> ", anim->name);
|
||||
}
|
||||
}
|
||||
skeleton->animations.push_back(anim);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -594,14 +604,14 @@ void OgreXmlSerializer::ReadAnimationTracks(XmlNode &node, Animation *dest) {
|
|||
track.type = VertexAnimationTrack::VAT_TRANSFORM;
|
||||
track.boneName = ReadAttribute<std::string>(currentNode, "bone");
|
||||
for (XmlNode ¤tChildNode : currentNode.children()) {
|
||||
const std::string currentChildName = currentNode.name();
|
||||
const std::string currentChildName = currentChildNode.name();
|
||||
if (currentChildName == nnKeyFrames) {
|
||||
ReadAnimationKeyFrames(currentChildNode, dest, &track);
|
||||
dest->tracks.push_back(track);
|
||||
} else {
|
||||
throw DeadlyImportError("No <keyframes> found in <track> ", dest->name);
|
||||
}
|
||||
}
|
||||
dest->tracks.push_back(track);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -614,15 +624,15 @@ void OgreXmlSerializer::ReadAnimationKeyFrames(XmlNode &node, Animation *anim, V
|
|||
if (currentName == nnKeyFrame) {
|
||||
keyframe.timePos = ReadAttribute<float>(currentNode, "time");
|
||||
for (XmlNode ¤tChildNode : currentNode.children()) {
|
||||
const std::string currentChildName = currentNode.name();
|
||||
const std::string currentChildName = currentChildNode.name();
|
||||
if (currentChildName == nnTranslate) {
|
||||
keyframe.position.x = ReadAttribute<float>(currentChildNode, anX);
|
||||
keyframe.position.y = ReadAttribute<float>(currentChildNode, anY);
|
||||
keyframe.position.z = ReadAttribute<float>(currentChildNode, anZ);
|
||||
} else if (currentChildName == nnRotate) {
|
||||
float angle = ReadAttribute<float>(currentChildNode, "angle");
|
||||
for (XmlNode ¤tChildChildNode : currentNode.children()) {
|
||||
const std::string currentChildChildName = currentNode.name();
|
||||
for (XmlNode ¤tChildChildNode : currentChildNode.children()) {
|
||||
const std::string currentChildChildName = currentChildChildNode.name();
|
||||
if (currentChildChildName == nnAxis) {
|
||||
aiVector3D axis;
|
||||
axis.x = ReadAttribute<float>(currentChildChildNode, anX);
|
||||
|
@ -695,12 +705,12 @@ void OgreXmlSerializer::ReadBones(XmlNode &node, Skeleton *skeleton) {
|
|||
bone->id = ReadAttribute<uint16_t>(currentNode, "id");
|
||||
bone->name = ReadAttribute<std::string>(currentNode, "name");
|
||||
for (XmlNode ¤tChildNode : currentNode.children()) {
|
||||
const std::string currentChildName = currentNode.name();
|
||||
if (currentChildName == nnRotation) {
|
||||
const std::string currentChildName = currentChildNode.name();
|
||||
if (currentChildName == nnPosition) {
|
||||
bone->position.x = ReadAttribute<float>(currentChildNode, anX);
|
||||
bone->position.y = ReadAttribute<float>(currentChildNode, anY);
|
||||
bone->position.z = ReadAttribute<float>(currentChildNode, anZ);
|
||||
} else if (currentChildName == nnScale) {
|
||||
} else if (currentChildName == nnRotation) {
|
||||
float angle = ReadAttribute<float>(currentChildNode, "angle");
|
||||
for (XmlNode currentChildChildNode : currentChildNode.children()) {
|
||||
const std::string ¤tChildChildName = currentChildChildNode.name();
|
||||
|
|
|
@ -151,45 +151,46 @@ namespace Grammar {
|
|||
}
|
||||
|
||||
static TokenType matchTokenType(const char *tokenType) {
|
||||
if (MetricType == tokenType) {
|
||||
const size_t len = std::strlen(tokenType);
|
||||
if (0 == strncmp(MetricType, tokenType, len)) {
|
||||
return MetricToken;
|
||||
} else if (NameType == tokenType) {
|
||||
} else if (0 == strncmp(NameType, tokenType, len)) {
|
||||
return NameToken;
|
||||
} else if (ObjectRefType == tokenType) {
|
||||
} else if (0 == strncmp(ObjectRefType, tokenType, len)) {
|
||||
return ObjectRefToken;
|
||||
} else if (MaterialRefType == tokenType) {
|
||||
} else if (0 == strncmp(MaterialRefType, tokenType, len)) {
|
||||
return MaterialRefToken;
|
||||
} else if (MetricKeyType == tokenType) {
|
||||
} else if (0 == strncmp(MetricKeyType, tokenType, len)) {
|
||||
return MetricKeyToken;
|
||||
} else if (GeometryNodeType == tokenType) {
|
||||
} else if (0 == strncmp(GeometryNodeType, tokenType, len)) {
|
||||
return GeometryNodeToken;
|
||||
} else if (CameraNodeType == tokenType) {
|
||||
} else if (0 == strncmp(CameraNodeType, tokenType, len)) {
|
||||
return CameraNodeToken;
|
||||
} else if (LightNodeType == tokenType) {
|
||||
} else if (0 == strncmp(LightNodeType, tokenType, len)) {
|
||||
return LightNodeToken;
|
||||
} else if (GeometryObjectType == tokenType) {
|
||||
} else if (0 == strncmp(GeometryObjectType, tokenType, len)) {
|
||||
return GeometryObjectToken;
|
||||
} else if (CameraObjectType == tokenType) {
|
||||
} else if (0 == strncmp(CameraObjectType, tokenType, len)) {
|
||||
return CameraObjectToken;
|
||||
} else if (LightObjectType == tokenType) {
|
||||
} else if (0 == strncmp(LightObjectType, tokenType, len)) {
|
||||
return LightObjectToken;
|
||||
} else if (TransformType == tokenType) {
|
||||
} else if (0 == strncmp(TransformType, tokenType, len)) {
|
||||
return TransformToken;
|
||||
} else if (MeshType == tokenType) {
|
||||
} else if (0 == strncmp(MeshType, tokenType, len)) {
|
||||
return MeshToken;
|
||||
} else if (VertexArrayType == tokenType) {
|
||||
} else if (0 == strncmp(VertexArrayType, tokenType, len)) {
|
||||
return VertexArrayToken;
|
||||
} else if (IndexArrayType == tokenType) {
|
||||
} else if (0 == strncmp(IndexArrayType, tokenType, len)) {
|
||||
return IndexArrayToken;
|
||||
} else if (MaterialType == tokenType) {
|
||||
} else if (0 == strncmp(MaterialType, tokenType, len)) {
|
||||
return MaterialToken;
|
||||
} else if (ColorType == tokenType) {
|
||||
} else if (0 == strncmp(ColorType, tokenType, len)) {
|
||||
return ColorToken;
|
||||
} else if (ParamType == tokenType) {
|
||||
} else if (0 == strncmp(ParamType, tokenType, len)) {
|
||||
return ParamToken;
|
||||
} else if (TextureType == tokenType) {
|
||||
} else if (0 == strncmp(TextureType, tokenType, len)) {
|
||||
return TextureToken;
|
||||
} else if (AttenType == tokenType) {
|
||||
} else if (0 == strncmp(AttenType, tokenType, len)) {
|
||||
return AttenToken;
|
||||
}
|
||||
|
||||
|
@ -256,11 +257,6 @@ OpenGEXImporter::RefInfo::RefInfo(aiNode *node, Type type, std::vector<std::stri
|
|||
// empty
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------------------------
|
||||
OpenGEXImporter::RefInfo::~RefInfo() {
|
||||
// empty
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------------------------
|
||||
OpenGEXImporter::OpenGEXImporter() :
|
||||
m_root(nullptr),
|
||||
|
@ -285,10 +281,6 @@ OpenGEXImporter::OpenGEXImporter() :
|
|||
// empty
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------------------------
|
||||
OpenGEXImporter::~OpenGEXImporter() {
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------------------------
|
||||
bool OpenGEXImporter::CanRead(const std::string &file, IOSystem *pIOHandler, bool /*checkSig*/) const {
|
||||
static const char *tokens[] = { "Metric", "GeometryNode", "VertexArray (attrib", "IndexArray" };
|
||||
|
|
|
@ -79,12 +79,7 @@ struct MetricInfo {
|
|||
float m_floatValue;
|
||||
int m_intValue;
|
||||
|
||||
MetricInfo()
|
||||
: m_stringValue( )
|
||||
, m_floatValue( 0.0f )
|
||||
, m_intValue( -1 ) {
|
||||
// empty
|
||||
}
|
||||
MetricInfo(): m_stringValue( ), m_floatValue( 0.0f ), m_intValue( -1 ) {}
|
||||
};
|
||||
|
||||
/** @brief This class is used to implement the OpenGEX importer
|
||||
|
@ -97,7 +92,7 @@ public:
|
|||
OpenGEXImporter();
|
||||
|
||||
/// The class destructor.
|
||||
~OpenGEXImporter() override;
|
||||
~OpenGEXImporter() override = default;
|
||||
|
||||
/// BaseImporter override.
|
||||
bool CanRead( const std::string &file, IOSystem *pIOHandler, bool checkSig ) const override;
|
||||
|
@ -170,7 +165,7 @@ private:
|
|||
std::vector<std::string> m_Names;
|
||||
|
||||
RefInfo( aiNode *node, Type type, std::vector<std::string> &names );
|
||||
~RefInfo();
|
||||
~RefInfo() = default;
|
||||
|
||||
RefInfo( const RefInfo & ) = delete;
|
||||
RefInfo &operator = ( const RefInfo & ) = delete;
|
||||
|
|
|
@ -129,10 +129,20 @@ void Q3DImporter::InternReadFile(const std::string &pFile,
|
|||
unsigned int numTextures = (unsigned int)stream.GetI4();
|
||||
|
||||
std::vector<Material> materials;
|
||||
try {
|
||||
materials.reserve(numMats);
|
||||
} catch(const std::bad_alloc&) {
|
||||
ASSIMP_LOG_ERROR("Invalid alloc for materials.");
|
||||
throw DeadlyImportError("Invalid Quick3D-file, material allocation failed.");
|
||||
}
|
||||
|
||||
std::vector<Mesh> meshes;
|
||||
try {
|
||||
meshes.reserve(numMeshes);
|
||||
} catch(const std::bad_alloc&) {
|
||||
ASSIMP_LOG_ERROR("Invalid alloc for meshes.");
|
||||
throw DeadlyImportError("Invalid Quick3D-file, mesh allocation failed.");
|
||||
}
|
||||
|
||||
// Allocate the scene root node
|
||||
pScene->mRootNode = new aiNode();
|
||||
|
|
|
@ -73,7 +73,7 @@ static const aiImporterDesc desc = {
|
|||
// 1) 80 byte header
|
||||
// 2) 4 byte face count
|
||||
// 3) 50 bytes per face
|
||||
static bool IsBinarySTL(const char *buffer, unsigned int fileSize) {
|
||||
static bool IsBinarySTL(const char *buffer, size_t fileSize) {
|
||||
if (fileSize < 84) {
|
||||
return false;
|
||||
}
|
||||
|
@ -92,7 +92,7 @@ static const char UnicodeBoundary = 127;
|
|||
// An ascii STL buffer will begin with "solid NAME", where NAME is optional.
|
||||
// Note: The "solid NAME" check is necessary, but not sufficient, to determine
|
||||
// if the buffer is ASCII; a binary header could also begin with "solid NAME".
|
||||
static bool IsAsciiSTL(const char *buffer, unsigned int fileSize) {
|
||||
static bool IsAsciiSTL(const char *buffer, size_t fileSize) {
|
||||
if (IsBinarySTL(buffer, fileSize))
|
||||
return false;
|
||||
|
||||
|
@ -172,7 +172,7 @@ void STLImporter::InternReadFile(const std::string &pFile, aiScene *pScene, IOSy
|
|||
throw DeadlyImportError("Failed to open STL file ", pFile, ".");
|
||||
}
|
||||
|
||||
mFileSize = (unsigned int)file->FileSize();
|
||||
mFileSize = file->FileSize();
|
||||
|
||||
// allocate storage and copy the contents of the file to a memory buffer
|
||||
// (terminate it with zero)
|
||||
|
@ -233,7 +233,7 @@ void STLImporter::LoadASCIIFile(aiNode *root) {
|
|||
|
||||
// try to guess how many vertices we could have
|
||||
// assume we'll need 160 bytes for each face
|
||||
size_t sizeEstimate = std::max(1u, mFileSize / 160u) * 3;
|
||||
size_t sizeEstimate = std::max(1ull, mFileSize / 160ull) * 3ull;
|
||||
positionBuffer.reserve(sizeEstimate);
|
||||
normalBuffer.reserve(sizeEstimate);
|
||||
|
||||
|
@ -284,8 +284,6 @@ void STLImporter::LoadASCIIFile(aiNode *root) {
|
|||
ASSIMP_LOG_WARN("STL: A new facet begins but the old is not yet complete");
|
||||
}
|
||||
faceVertexCounter = 0;
|
||||
normalBuffer.push_back(aiVector3D());
|
||||
aiVector3D *vn = &normalBuffer.back();
|
||||
|
||||
sz += 6;
|
||||
SkipSpaces(&sz);
|
||||
|
@ -295,15 +293,17 @@ void STLImporter::LoadASCIIFile(aiNode *root) {
|
|||
if (sz[6] == '\0') {
|
||||
throw DeadlyImportError("STL: unexpected EOF while parsing facet");
|
||||
}
|
||||
aiVector3D vn;
|
||||
sz += 7;
|
||||
SkipSpaces(&sz);
|
||||
sz = fast_atoreal_move<ai_real>(sz, (ai_real &)vn->x);
|
||||
sz = fast_atoreal_move<ai_real>(sz, (ai_real &)vn.x);
|
||||
SkipSpaces(&sz);
|
||||
sz = fast_atoreal_move<ai_real>(sz, (ai_real &)vn->y);
|
||||
sz = fast_atoreal_move<ai_real>(sz, (ai_real &)vn.y);
|
||||
SkipSpaces(&sz);
|
||||
sz = fast_atoreal_move<ai_real>(sz, (ai_real &)vn->z);
|
||||
normalBuffer.push_back(*vn);
|
||||
normalBuffer.push_back(*vn);
|
||||
sz = fast_atoreal_move<ai_real>(sz, (ai_real &)vn.z);
|
||||
normalBuffer.emplace_back(vn);
|
||||
normalBuffer.emplace_back(vn);
|
||||
normalBuffer.emplace_back(vn);
|
||||
}
|
||||
} else if (!strncmp(sz, "vertex", 6) && ::IsSpaceOrNewLine(*(sz + 6))) { // vertex 1.50000 1.50000 0.00000
|
||||
if (faceVertexCounter >= 3) {
|
||||
|
@ -315,7 +315,7 @@ void STLImporter::LoadASCIIFile(aiNode *root) {
|
|||
}
|
||||
sz += 7;
|
||||
SkipSpaces(&sz);
|
||||
positionBuffer.push_back(aiVector3D());
|
||||
positionBuffer.emplace_back();
|
||||
aiVector3D *vn = &positionBuffer.back();
|
||||
sz = fast_atoreal_move<ai_real>(sz, (ai_real &)vn->x);
|
||||
SkipSpaces(&sz);
|
||||
|
@ -439,7 +439,7 @@ bool STLImporter::LoadBinaryFile() {
|
|||
pMesh->mNumFaces = *((uint32_t *)sz);
|
||||
sz += 4;
|
||||
|
||||
if (mFileSize < 84 + pMesh->mNumFaces * 50) {
|
||||
if (mFileSize < 84ull + pMesh->mNumFaces * 50ull) {
|
||||
throw DeadlyImportError("STL: file is too small to hold all facets");
|
||||
}
|
||||
|
||||
|
@ -517,13 +517,13 @@ bool STLImporter::LoadBinaryFile() {
|
|||
const ai_real invVal((ai_real)1.0 / (ai_real)31.0);
|
||||
if (bIsMaterialise) // this is reversed
|
||||
{
|
||||
clr->r = (color & 0x31u) * invVal;
|
||||
clr->g = ((color & (0x31u << 5)) >> 5u) * invVal;
|
||||
clr->b = ((color & (0x31u << 10)) >> 10u) * invVal;
|
||||
clr->r = (color & 0x1fu) * invVal;
|
||||
clr->g = ((color & (0x1fu << 5)) >> 5u) * invVal;
|
||||
clr->b = ((color & (0x1fu << 10)) >> 10u) * invVal;
|
||||
} else {
|
||||
clr->b = (color & 0x31u) * invVal;
|
||||
clr->g = ((color & (0x31u << 5)) >> 5u) * invVal;
|
||||
clr->r = ((color & (0x31u << 10)) >> 10u) * invVal;
|
||||
clr->b = (color & 0x1fu) * invVal;
|
||||
clr->g = ((color & (0x1fu << 5)) >> 5u) * invVal;
|
||||
clr->r = ((color & (0x1fu << 10)) >> 10u) * invVal;
|
||||
}
|
||||
// assign the color to all vertices of the face
|
||||
*(clr + 1) = *clr;
|
||||
|
|
|
@ -109,7 +109,7 @@ protected:
|
|||
const char* mBuffer;
|
||||
|
||||
/** Size of the file, in bytes */
|
||||
unsigned int mFileSize;
|
||||
size_t mFileSize;
|
||||
|
||||
/** Output scene */
|
||||
aiScene* mScene;
|
||||
|
|
|
@ -2,8 +2,7 @@
|
|||
Open Asset Import Library (assimp)
|
||||
----------------------------------------------------------------------
|
||||
|
||||
Copyright (c) 2006-2020, assimp team
|
||||
|
||||
Copyright (c) 2006-2022, assimp team
|
||||
|
||||
All rights reserved.
|
||||
|
||||
|
@ -59,7 +58,6 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
# pragma warning(disable : 4127 4456 4245 4512 )
|
||||
#endif // _MSC_VER
|
||||
|
||||
//
|
||||
#if _MSC_VER > 1500 || (defined __GNUC___)
|
||||
# define ASSIMP_STEP_USE_UNORDERED_MULTIMAP
|
||||
#else
|
||||
|
@ -99,13 +97,9 @@ namespace EXPRESS {
|
|||
class DataType;
|
||||
class UNSET; /*: public DataType */
|
||||
class ISDERIVED; /*: public DataType */
|
||||
// class REAL; /*: public DataType */
|
||||
class ENUM; /*: public DataType */
|
||||
// class STRING; /*: public DataType */
|
||||
// class INTEGER; /*: public DataType */
|
||||
class ENTITY; /*: public DataType */
|
||||
class LIST; /*: public DataType */
|
||||
// class SELECT; /*: public DataType */
|
||||
|
||||
// a conversion schema is not exactly an EXPRESS schema, rather it
|
||||
// is a list of pointers to conversion functions to build up the
|
||||
|
@ -127,7 +121,8 @@ namespace STEP {
|
|||
|
||||
// -------------------------------------------------------------------------------
|
||||
/** Exception class used by the STEP loading & parsing code. It is typically
|
||||
* coupled with a line number. */
|
||||
* coupled with a line number.
|
||||
*/
|
||||
// -------------------------------------------------------------------------------
|
||||
struct SyntaxError : DeadlyImportError {
|
||||
enum : uint64_t {
|
||||
|
@ -140,7 +135,8 @@ struct SyntaxError : DeadlyImportError {
|
|||
// -------------------------------------------------------------------------------
|
||||
/** Exception class used by the STEP loading & parsing code when a type
|
||||
* error (i.e. an entity expects a string but receives a bool) occurs.
|
||||
* It is typically coupled with both an entity id and a line number.*/
|
||||
* It is typically coupled with both an entity id and a line number.
|
||||
*/
|
||||
// -------------------------------------------------------------------------------
|
||||
struct TypeError : DeadlyImportError {
|
||||
enum : uint64_t {
|
||||
|
@ -167,10 +163,8 @@ public:
|
|||
typedef std::shared_ptr<const DataType> Out;
|
||||
|
||||
public:
|
||||
virtual ~DataType() {
|
||||
}
|
||||
virtual ~DataType() = default;
|
||||
|
||||
public:
|
||||
template <typename T>
|
||||
const T &To() const {
|
||||
return dynamic_cast<const T &>(*this);
|
||||
|
@ -214,8 +208,6 @@ public:
|
|||
static std::shared_ptr<const EXPRESS::DataType> Parse(const char *&inout,
|
||||
uint64_t line = SyntaxError::LINE_NOT_SPECIFIED,
|
||||
const EXPRESS::ConversionSchema *schema = NULL);
|
||||
|
||||
public:
|
||||
};
|
||||
|
||||
typedef DataType SELECT;
|
||||
|
@ -238,7 +230,8 @@ private:
|
|||
};
|
||||
|
||||
// -------------------------------------------------------------------------------
|
||||
/** Shared implementation for some of the primitive data type, i.e. int, float */
|
||||
/** Shared implementation for some of the primitive data type, i.e. int, float
|
||||
*/
|
||||
// -------------------------------------------------------------------------------
|
||||
template <typename T>
|
||||
class PrimitiveDataType : public DataType {
|
||||
|
@ -247,7 +240,7 @@ public:
|
|||
// expose this data type to the user.
|
||||
typedef T Out;
|
||||
|
||||
PrimitiveDataType() {}
|
||||
PrimitiveDataType() = default;
|
||||
PrimitiveDataType(const T &val) :
|
||||
val(val) {}
|
||||
|
||||
|
@ -280,28 +273,18 @@ class ENUMERATION : public STRING {
|
|||
public:
|
||||
ENUMERATION(const std::string &val) :
|
||||
STRING(val) {}
|
||||
|
||||
private:
|
||||
};
|
||||
|
||||
typedef ENUMERATION BOOLEAN;
|
||||
|
||||
// -------------------------------------------------------------------------------
|
||||
/** This is just a reference to an entity/object somewhere else */
|
||||
/** This is just a reference to an entity/object somewhere else
|
||||
*/
|
||||
// -------------------------------------------------------------------------------
|
||||
class ENTITY : public PrimitiveDataType<uint64_t> {
|
||||
public:
|
||||
ENTITY(uint64_t val) :
|
||||
PrimitiveDataType<uint64_t>(val) {
|
||||
ai_assert(val != 0);
|
||||
}
|
||||
|
||||
ENTITY() :
|
||||
PrimitiveDataType<uint64_t>(TypeError::ENTITY_NOT_SPECIFIED) {
|
||||
// empty
|
||||
}
|
||||
|
||||
private:
|
||||
ENTITY(uint64_t val) : PrimitiveDataType<uint64_t>(val) {}
|
||||
ENTITY() : PrimitiveDataType<uint64_t>(TypeError::ENTITY_NOT_SPECIFIED) {}
|
||||
};
|
||||
|
||||
// -------------------------------------------------------------------------------
|
||||
|
@ -319,7 +302,8 @@ public:
|
|||
}
|
||||
|
||||
public:
|
||||
/** @see DaraType::Parse */
|
||||
/** @see DaraType::Parse
|
||||
*/
|
||||
static std::shared_ptr<const EXPRESS::LIST> Parse(const char *&inout,
|
||||
uint64_t line = SyntaxError::LINE_NOT_SPECIFIED,
|
||||
const EXPRESS::ConversionSchema *schema = NULL);
|
||||
|
@ -331,29 +315,20 @@ private:
|
|||
|
||||
class BINARY : public PrimitiveDataType<uint32_t> {
|
||||
public:
|
||||
BINARY(uint32_t val) :
|
||||
PrimitiveDataType<uint32_t>(val) {
|
||||
// empty
|
||||
}
|
||||
|
||||
BINARY() :
|
||||
PrimitiveDataType<uint32_t>(TypeError::ENTITY_NOT_SPECIFIED_32) {
|
||||
// empty
|
||||
}
|
||||
BINARY(uint32_t val) : PrimitiveDataType<uint32_t>(val) {}
|
||||
BINARY() : PrimitiveDataType<uint32_t>(TypeError::ENTITY_NOT_SPECIFIED_32) {}
|
||||
};
|
||||
|
||||
// -------------------------------------------------------------------------------
|
||||
/* Not exactly a full EXPRESS schema but rather a list of conversion functions
|
||||
* to extract valid C++ objects out of a STEP file. Those conversion functions
|
||||
* may, however, perform further schema validations. */
|
||||
* may, however, perform further schema validations.
|
||||
*/
|
||||
// -------------------------------------------------------------------------------
|
||||
class ConversionSchema {
|
||||
public:
|
||||
struct SchemaEntry {
|
||||
SchemaEntry(const char *name, ConvertObjectProc func) :
|
||||
mName(name), mFunc(func) {
|
||||
// empty
|
||||
}
|
||||
SchemaEntry(const char *name, ConvertObjectProc func) : mName(name), mFunc(func) {}
|
||||
|
||||
const char *mName;
|
||||
ConvertObjectProc mFunc;
|
||||
|
@ -366,8 +341,7 @@ public:
|
|||
*this = schemas;
|
||||
}
|
||||
|
||||
ConversionSchema() {
|
||||
}
|
||||
ConversionSchema() = default;
|
||||
|
||||
ConvertObjectProc GetConverterProc(const std::string &name) const {
|
||||
ConverterMap::const_iterator it = converters.find(name);
|
||||
|
@ -400,7 +374,8 @@ private:
|
|||
// ------------------------------------------------------------------------------
|
||||
/** Bundle all the relevant info from a STEP header, parts of which may later
|
||||
* be plainly dumped to the logfile, whereas others may help the caller pick an
|
||||
* appropriate loading strategy.*/
|
||||
* appropriate loading strategy.
|
||||
*/
|
||||
// ------------------------------------------------------------------------------
|
||||
struct HeaderInfo {
|
||||
std::string timestamp;
|
||||
|
@ -409,18 +384,14 @@ struct HeaderInfo {
|
|||
};
|
||||
|
||||
// ------------------------------------------------------------------------------
|
||||
/** Base class for all concrete object instances */
|
||||
/** Base class for all concrete object instances
|
||||
*/
|
||||
// ------------------------------------------------------------------------------
|
||||
class Object {
|
||||
public:
|
||||
Object(const char *classname = "unknown") :
|
||||
id(0), classname(classname) {
|
||||
// empty
|
||||
}
|
||||
Object(const char *classname = "unknown") : id(0), classname(classname) {}
|
||||
|
||||
virtual ~Object() {
|
||||
// empty
|
||||
}
|
||||
virtual ~Object() = default;
|
||||
|
||||
// utilities to simplify casting to concrete types
|
||||
template <typename T>
|
||||
|
@ -469,26 +440,15 @@ size_t GenericFill(const STEP::DB &db, const EXPRESS::LIST ¶ms, T *in);
|
|||
// ------------------------------------------------------------------------------
|
||||
template <typename TDerived, size_t arg_count>
|
||||
struct ObjectHelper : virtual Object {
|
||||
ObjectHelper() :
|
||||
aux_is_derived(0) {
|
||||
// empty
|
||||
}
|
||||
ObjectHelper() : aux_is_derived(0) {}
|
||||
|
||||
static Object *Construct(const STEP::DB &db, const EXPRESS::LIST ¶ms) {
|
||||
// make sure we don't leak if Fill() throws an exception
|
||||
std::unique_ptr<TDerived> impl(new TDerived());
|
||||
|
||||
// GenericFill<T> is undefined so we need to have a specialization
|
||||
const size_t num_args = GenericFill<TDerived>(db, params, &*impl);
|
||||
(void)num_args;
|
||||
static_cast<void>(GenericFill<TDerived>(db, params, &*impl));
|
||||
|
||||
// the following check is commented because it will always trigger if
|
||||
// parts of the entities are generated with dummy wrapper code.
|
||||
// This is currently done to reduce the size of the loader
|
||||
// code.
|
||||
//if (num_args != params.GetSize() && impl->GetClassName() != "NotImplemented") {
|
||||
// DefaultLogger::get()->debug("STEP: not all parameters consumed");
|
||||
//}
|
||||
return impl.release();
|
||||
}
|
||||
|
||||
|
@ -502,15 +462,9 @@ struct ObjectHelper : virtual Object {
|
|||
// ------------------------------------------------------------------------------
|
||||
template <typename T>
|
||||
struct Maybe {
|
||||
Maybe() :
|
||||
have() {
|
||||
// empty
|
||||
}
|
||||
Maybe() : have() {}
|
||||
|
||||
explicit Maybe(const T &ptr) :
|
||||
ptr(ptr), have(true) {
|
||||
// empty
|
||||
}
|
||||
explicit Maybe(const T &ptr) : ptr(ptr), have(true) {}
|
||||
|
||||
void flag_invalid() {
|
||||
have = false;
|
||||
|
@ -557,7 +511,8 @@ private:
|
|||
|
||||
// ------------------------------------------------------------------------------
|
||||
/** A LazyObject is created when needed. Before this happens, we just keep
|
||||
the text line that contains the object definition. */
|
||||
* the text line that contains the object definition.
|
||||
*/
|
||||
// -------------------------------------------------------------------------------
|
||||
class LazyObject {
|
||||
friend class DB;
|
||||
|
@ -649,10 +604,7 @@ inline bool operator==(const std::pair<uint64_t, std::shared_ptr<LazyObject>> &l
|
|||
template <typename T>
|
||||
struct Lazy {
|
||||
typedef Lazy Out;
|
||||
Lazy(const LazyObject *obj = nullptr) :
|
||||
obj(obj) {
|
||||
// empty
|
||||
}
|
||||
Lazy(const LazyObject *obj = nullptr) : obj(obj) {}
|
||||
|
||||
operator const T *() const {
|
||||
return obj->ToPtr<T>();
|
||||
|
@ -786,7 +738,8 @@ inline void GenericConvert(ListOf<T1, N1, N2> &a, const std::shared_ptr<const EX
|
|||
// ------------------------------------------------------------------------------
|
||||
/** Lightweight manager class that holds the map of all objects in a
|
||||
* STEP file. DB's are exclusively maintained by the functions in
|
||||
* STEPFileReader.h*/
|
||||
* STEPFileReader.h
|
||||
*/
|
||||
// -------------------------------------------------------------------------------
|
||||
class DB {
|
||||
friend DB *ReadFileHeader(std::shared_ptr<IOStream> stream);
|
||||
|
@ -873,7 +826,7 @@ public:
|
|||
if (it != objects_bytype.end() && (*it).second.size()) {
|
||||
return *(*it).second.begin();
|
||||
}
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// same, but raise an exception if the object doesn't exist and return a reference
|
||||
|
@ -965,7 +918,6 @@ private:
|
|||
#endif // _MSC_VER
|
||||
|
||||
} // namespace STEP
|
||||
|
||||
} // namespace Assimp
|
||||
|
||||
#endif // INCLUDED_AI_STEPFILE_H
|
||||
|
|
|
@ -333,7 +333,7 @@ void X3DImporter::readHead(XmlNode &node) {
|
|||
}
|
||||
mScene->mMetaData = aiMetadata::Alloc(static_cast<unsigned int>(metaArray.size()));
|
||||
unsigned int i = 0;
|
||||
for (auto currentMeta : metaArray) {
|
||||
for (const auto& currentMeta : metaArray) {
|
||||
mScene->mMetaData->Set(i, currentMeta.name, aiString(currentMeta.value));
|
||||
++i;
|
||||
}
|
||||
|
|
|
@ -53,9 +53,9 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
#endif
|
||||
|
||||
using namespace Assimp;
|
||||
using namespace glTFCommon;
|
||||
|
||||
namespace glTF {
|
||||
using namespace glTFCommon;
|
||||
|
||||
#if _MSC_VER
|
||||
#pragma warning(push)
|
||||
|
@ -891,12 +891,12 @@ inline void Mesh::Decode_O3DGC(const SCompression_Open3DGC &pCompression_Open3DG
|
|||
auto get_buf_offset = [](Ref<Accessor> &pAccessor) -> size_t { return pAccessor->byteOffset + pAccessor->bufferView->byteOffset; };
|
||||
|
||||
// Indices
|
||||
ifs.SetCoordIndex((IndicesType *const)(decoded_data + get_buf_offset(primitives[0].indices)));
|
||||
ifs.SetCoordIndex((IndicesType *)(decoded_data + get_buf_offset(primitives[0].indices)));
|
||||
// Coordinates
|
||||
ifs.SetCoord((o3dgc::Real *const)(decoded_data + get_buf_offset(primitives[0].attributes.position[0])));
|
||||
ifs.SetCoord((o3dgc::Real *)(decoded_data + get_buf_offset(primitives[0].attributes.position[0])));
|
||||
// Normals
|
||||
if (size_normal) {
|
||||
ifs.SetNormal((o3dgc::Real *const)(decoded_data + get_buf_offset(primitives[0].attributes.normal[0])));
|
||||
ifs.SetNormal((o3dgc::Real *)(decoded_data + get_buf_offset(primitives[0].attributes.normal[0])));
|
||||
}
|
||||
|
||||
for (size_t idx = 0, idx_end = size_floatattr.size(), idx_texcoord = 0; idx < idx_end; idx++) {
|
||||
|
@ -904,7 +904,7 @@ inline void Mesh::Decode_O3DGC(const SCompression_Open3DGC &pCompression_Open3DG
|
|||
case o3dgc::O3DGC_IFS_FLOAT_ATTRIBUTE_TYPE_TEXCOORD:
|
||||
if (idx_texcoord < primitives[0].attributes.texcoord.size()) {
|
||||
// See above about absent attributes.
|
||||
ifs.SetFloatAttribute(static_cast<unsigned long>(idx), (o3dgc::Real *const)(decoded_data + get_buf_offset(primitives[0].attributes.texcoord[idx])));
|
||||
ifs.SetFloatAttribute(static_cast<unsigned long>(idx), (o3dgc::Real *)(decoded_data + get_buf_offset(primitives[0].attributes.texcoord[idx])));
|
||||
idx_texcoord++;
|
||||
}
|
||||
|
||||
|
|
|
@ -374,6 +374,8 @@ struct CustomExtension {
|
|||
mValues(other.mValues) {
|
||||
// empty
|
||||
}
|
||||
|
||||
CustomExtension& operator=(const CustomExtension&) = default;
|
||||
};
|
||||
|
||||
//! Base class for all glTF top-level objects
|
||||
|
|
|
@ -82,9 +82,21 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
// clang-format on
|
||||
|
||||
using namespace Assimp;
|
||||
using namespace glTFCommon;
|
||||
|
||||
namespace glTF2 {
|
||||
using glTFCommon::FindStringInContext;
|
||||
using glTFCommon::FindNumberInContext;
|
||||
using glTFCommon::FindUIntInContext;
|
||||
using glTFCommon::FindArrayInContext;
|
||||
using glTFCommon::FindObjectInContext;
|
||||
using glTFCommon::FindExtensionInContext;
|
||||
using glTFCommon::MemberOrDefault;
|
||||
using glTFCommon::ReadMember;
|
||||
using glTFCommon::FindMember;
|
||||
using glTFCommon::FindObject;
|
||||
using glTFCommon::FindUInt;
|
||||
using glTFCommon::FindArray;
|
||||
using glTFCommon::FindArray;
|
||||
|
||||
namespace {
|
||||
|
||||
|
|
|
@ -683,7 +683,7 @@ bool glTF2Exporter::GetMatSheen(const aiMaterial &mat, glTF2::MaterialSheen &she
|
|||
}
|
||||
|
||||
// Default Sheen color factor {0,0,0} disables Sheen, so do not export
|
||||
if (sheen.sheenColorFactor == defaultSheenFactor) {
|
||||
if (sheen.sheenColorFactor[0] == defaultSheenFactor[0] && sheen.sheenColorFactor[1] == defaultSheenFactor[1] && sheen.sheenColorFactor[2] == defaultSheenFactor[2]) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -908,7 +908,7 @@ Ref<Node> FindSkeletonRootJoint(Ref<Skin> &skinRef) {
|
|||
do {
|
||||
startNodeRef = parentNodeRef;
|
||||
parentNodeRef = startNodeRef->parent;
|
||||
} while (!parentNodeRef->jointName.empty());
|
||||
} while (parentNodeRef && !parentNodeRef->jointName.empty());
|
||||
|
||||
return parentNodeRef;
|
||||
}
|
||||
|
|
|
@ -606,7 +606,10 @@ void glTF2Importer::ImportMeshes(glTF2::Asset &r) {
|
|||
}
|
||||
}
|
||||
if (needTangents) {
|
||||
if (target.tangent[0]->count != aim->mNumVertices) {
|
||||
if (!aiAnimMesh.HasNormals()) {
|
||||
// prevent nullptr access to aiAnimMesh.mNormals below when no normals are available
|
||||
ASSIMP_LOG_WARN("Bitangents of target ", i, " in mesh \"", mesh.name, "\" can't be computed, because mesh has no normals.");
|
||||
} else if (target.tangent[0]->count != aim->mNumVertices) {
|
||||
ASSIMP_LOG_WARN("Tangents of target ", i, " in mesh \"", mesh.name, "\" does not match the vertex count");
|
||||
} else {
|
||||
Tangent *tangent = nullptr;
|
||||
|
|
|
@ -150,7 +150,7 @@ SET( Core_SRCS
|
|||
Common/Assimp.cpp
|
||||
)
|
||||
|
||||
IF(MSVC)
|
||||
IF(MSVC OR (MINGW AND BUILD_SHARED_LIBS))
|
||||
list(APPEND Core_SRCS "res/assimp.rc")
|
||||
ENDIF()
|
||||
|
||||
|
@ -167,6 +167,7 @@ SET( Logging_SRCS
|
|||
SOURCE_GROUP(Logging FILES ${Logging_SRCS})
|
||||
|
||||
SET( Common_SRCS
|
||||
Common/StbCommon.h
|
||||
Common/Compression.cpp
|
||||
Common/Compression.h
|
||||
Common/BaseImporter.cpp
|
||||
|
@ -182,6 +183,7 @@ SET( Common_SRCS
|
|||
Common/DefaultIOSystem.cpp
|
||||
Common/ZipArchiveIOSystem.cpp
|
||||
Common/PolyTools.h
|
||||
Common/Maybe.h
|
||||
Common/Importer.cpp
|
||||
Common/IFF.h
|
||||
Common/SGSpatialSort.cpp
|
||||
|
@ -1053,7 +1055,9 @@ ENDIF()
|
|||
# Check dependencies for glTF importer with Open3DGC-compression.
|
||||
# RT-extensions is used in "contrib/Open3DGC/o3dgcTimer.h" for collecting statistics. Pointed file
|
||||
# has implementation for different platforms: WIN32, __MACH__ and other ("else" block).
|
||||
IF (NOT WIN32)
|
||||
FIND_PACKAGE(RT QUIET)
|
||||
ENDIF ()
|
||||
IF (NOT ASSIMP_HUNTER_ENABLED AND (RT_FOUND OR WIN32))
|
||||
SET( ASSIMP_IMPORTER_GLTF_USE_OPEN3DGC 1 )
|
||||
ADD_DEFINITIONS( -DASSIMP_IMPORTER_GLTF_USE_OPEN3DGC=1 )
|
||||
|
@ -1096,8 +1100,6 @@ if(MSVC10)
|
|||
endif()
|
||||
endif()
|
||||
|
||||
ADD_DEFINITIONS( -DASSIMP_BUILD_DLL_EXPORT )
|
||||
|
||||
IF( MSVC OR "${CMAKE_CXX_SIMULATE_ID}" MATCHES "MSVC") # clang with MSVC ABI
|
||||
ADD_DEFINITIONS( -D_SCL_SECURE_NO_WARNINGS )
|
||||
ADD_DEFINITIONS( -D_CRT_SECURE_NO_WARNINGS )
|
||||
|
@ -1174,13 +1176,22 @@ ENDIF()
|
|||
ADD_LIBRARY( assimp ${assimp_src} )
|
||||
ADD_LIBRARY(assimp::assimp ALIAS assimp)
|
||||
|
||||
# Add or remove dllexport tags depending on the library type.
|
||||
IF (BUILD_SHARED_LIBS)
|
||||
TARGET_COMPILE_DEFINITIONS(assimp PRIVATE ASSIMP_BUILD_DLL_EXPORT)
|
||||
ELSE ()
|
||||
TARGET_COMPILE_DEFINITIONS(assimp PRIVATE OPENDDL_STATIC_LIBARY)
|
||||
ENDIF ()
|
||||
|
||||
TARGET_USE_COMMON_OUTPUT_DIRECTORY(assimp)
|
||||
|
||||
# enable warnings as errors ########################################
|
||||
IF (ASSIMP_WARNINGS_AS_ERRORS)
|
||||
MESSAGE(STATUS "Treating all warnings as errors (for assimp library only)")
|
||||
IF (MSVC)
|
||||
TARGET_COMPILE_OPTIONS(assimp PRIVATE /WX)
|
||||
TARGET_COMPILE_OPTIONS(assimp PRIVATE /W4 /WX)
|
||||
ELSE()
|
||||
TARGET_COMPILE_OPTIONS(assimp PRIVATE -Werror)
|
||||
TARGET_COMPILE_OPTIONS(assimp PRIVATE -Wall -Werror)
|
||||
ENDIF()
|
||||
ENDIF()
|
||||
|
||||
# adds C_FLAGS required to compile zip.c on old GCC 4.x compiler
|
||||
|
@ -1263,6 +1274,16 @@ if( MSVC )
|
|||
set(LIBRARY_SUFFIX "${ASSIMP_LIBRARY_SUFFIX}-${MSVC_PREFIX}-mt" CACHE STRING "the suffix for the assimp windows library")
|
||||
endif()
|
||||
|
||||
if (MINGW)
|
||||
set(LIBRARY_SUFFIX "-${ASSIMP_SOVERSION}" CACHE STRING "the suffix for the assimp MinGW shared library")
|
||||
SET_TARGET_PROPERTIES( assimp PROPERTIES
|
||||
ARCHIVE_OUTPUT_NAME assimp
|
||||
)
|
||||
if (NOT BUILD_SHARED_LIBS)
|
||||
TARGET_LINK_LIBRARIES ( assimp -static-libgcc -static-libstdc++ -Wl,-Bstatic -lstdc++ -lwinpthread ) # winpthread is for libminizip.
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
if (${CMAKE_SYSTEM_NAME} MATCHES "WindowsStore")
|
||||
target_compile_definitions(assimp PUBLIC WindowsStore)
|
||||
TARGET_LINK_LIBRARIES(assimp advapi32)
|
||||
|
@ -1274,12 +1295,6 @@ SET_TARGET_PROPERTIES( assimp PROPERTIES
|
|||
OUTPUT_NAME assimp${LIBRARY_SUFFIX}
|
||||
)
|
||||
|
||||
if (WIN32 AND CMAKE_COMPILER_IS_GNUCXX AND BUILD_SHARED_LIBS)
|
||||
set_target_properties(assimp PROPERTIES
|
||||
SUFFIX "-${ASSIMP_SOVERSION}${CMAKE_SHARED_LIBRARY_SUFFIX}"
|
||||
)
|
||||
endif()
|
||||
|
||||
if (APPLE)
|
||||
if (ASSIMP_BUILD_FRAMEWORK)
|
||||
SET_TARGET_PROPERTIES( assimp PROPERTIES
|
||||
|
|
|
@ -5,8 +5,6 @@ Open Asset Import Library (assimp)
|
|||
|
||||
Copyright (c) 2006-2022, assimp team
|
||||
|
||||
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use of this software in source and binary forms,
|
||||
|
@ -1282,18 +1280,13 @@ ASSIMP_API void aiQuaternionInterpolate(
|
|||
# define STBI_ONLY_PNG
|
||||
#endif
|
||||
|
||||
// Ensure all symbols are linked correctly
|
||||
#if ASSIMP_NEEDS_STB_IMAGE
|
||||
|
||||
# if _MSC_VER // "unreferenced function has been removed" (SSE2 detection routine in x64 builds)
|
||||
# pragma warning(push)
|
||||
# pragma warning(disable: 4505)
|
||||
// Share stb_image's PNG loader with other importers/exporters instead of bringing our own copy.
|
||||
# define STBI_ONLY_PNG
|
||||
# ifdef ASSIMP_USE_STB_IMAGE_STATIC
|
||||
# define STB_IMAGE_STATIC
|
||||
# endif
|
||||
|
||||
# define STB_IMAGE_IMPLEMENTATION
|
||||
# include "stb/stb_image.h"
|
||||
|
||||
# if _MSC_VER
|
||||
# pragma warning(pop)
|
||||
# endif
|
||||
|
||||
# include "Common/StbCommon.h"
|
||||
#endif
|
||||
|
|
|
@ -63,7 +63,7 @@ inline int select_fseek(FILE *file, int64_t offset, int origin) {
|
|||
|
||||
|
||||
|
||||
#if defined _WIN32 && (!defined __GNUC__ || __MSVCRT_VERSION__ >= 0x0601)
|
||||
#if defined _WIN32 && (!defined __GNUC__ || !defined __CLANG__ && __MSVCRT_VERSION__ >= 0x0601)
|
||||
template <>
|
||||
inline size_t select_ftell<8>(FILE *file) {
|
||||
return (size_t)::_ftelli64(file);
|
||||
|
@ -74,7 +74,7 @@ inline int select_fseek<8>(FILE *file, int64_t offset, int origin) {
|
|||
return ::_fseeki64(file, offset, origin);
|
||||
}
|
||||
|
||||
#endif // #if defined _WIN32 && (!defined __GNUC__ || __MSVCRT_VERSION__ >= 0x0601)
|
||||
#endif
|
||||
|
||||
} // namespace
|
||||
|
||||
|
@ -149,13 +149,20 @@ size_t DefaultIOStream::FileSize() const {
|
|||
//
|
||||
// See here for details:
|
||||
// https://www.securecoding.cert.org/confluence/display/seccode/FIO19-C.+Do+not+use+fseek()+and+ftell()+to+compute+the+size+of+a+regular+file
|
||||
#if defined _WIN32 && (!defined __GNUC__ || __MSVCRT_VERSION__ >= 0x0601)
|
||||
#if defined _WIN32 && (!defined __GNUC__ || !defined __CLANG__ && __MSVCRT_VERSION__ >= 0x0601)
|
||||
struct __stat64 fileStat;
|
||||
//using fileno + fstat avoids having to handle the filename
|
||||
int err = _fstat64(_fileno(mFile), &fileStat);
|
||||
if (0 != err)
|
||||
return 0;
|
||||
mCachedSize = (size_t)(fileStat.st_size);
|
||||
#elif defined _WIN32
|
||||
struct _stat32 fileStat;
|
||||
//using fileno + fstat avoids having to handle the filename
|
||||
int err = _fstat32(_fileno(mFile), &fileStat);
|
||||
if (0 != err)
|
||||
return 0;
|
||||
mCachedSize = (size_t)(fileStat.st_size);
|
||||
#elif defined __GNUC__ || defined __APPLE__ || defined __MACH__ || defined __FreeBSD__
|
||||
struct stat fileStat;
|
||||
int err = stat(mFilename.c_str(), &fileStat);
|
||||
|
|
|
@ -300,10 +300,10 @@ private:
|
|||
|
||||
const char separator = getOsSeparator();
|
||||
for (it = in.begin(); it != in.end(); ++it) {
|
||||
int remaining = std::distance(in.end(), it);
|
||||
const size_t remaining = std::distance(in.end(), it);
|
||||
// Exclude :// and \\, which remain untouched.
|
||||
// https://sourceforge.net/tracker/?func=detail&aid=3031725&group_id=226462&atid=1067632
|
||||
if (remaining >= 3 && !strncmp(&*it, "://", 3 )) {
|
||||
if (remaining >= 3u && !strncmp(&*it, "://", 3 )) {
|
||||
it += 3;
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
#pragma once
|
||||
#include <assimp/ai_assert.h>
|
||||
|
||||
template <typename T>
|
||||
struct Maybe {
|
||||
private:
|
||||
T _val;
|
||||
bool _valid;
|
||||
|
||||
public:
|
||||
Maybe() :
|
||||
_valid(false) {}
|
||||
|
||||
explicit Maybe(const T &val) :
|
||||
_val(val), _valid(true) {
|
||||
}
|
||||
|
||||
operator bool() const {
|
||||
return _valid;
|
||||
}
|
||||
|
||||
const T &Get() const {
|
||||
ai_assert(_valid);
|
||||
return _val;
|
||||
}
|
||||
|
||||
private:
|
||||
Maybe &operator&() = delete;
|
||||
};
|
|
@ -105,7 +105,9 @@ void ScenePreprocessor::ProcessMesh(aiMesh *mesh) {
|
|||
for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) {
|
||||
if (!mesh->mTextureCoords[i]) {
|
||||
mesh->mNumUVComponents[i] = 0;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!mesh->mNumUVComponents[i]) {
|
||||
mesh->mNumUVComponents[i] = 2;
|
||||
}
|
||||
|
@ -116,8 +118,10 @@ void ScenePreprocessor::ProcessMesh(aiMesh *mesh) {
|
|||
// as if they were 2D channels .. just in case an application doesn't handle
|
||||
// this case
|
||||
if (2 == mesh->mNumUVComponents[i]) {
|
||||
size_t num = 0;
|
||||
for (; p != end; ++p) {
|
||||
p->z = 0.f;
|
||||
num++;
|
||||
}
|
||||
} else if (1 == mesh->mNumUVComponents[i]) {
|
||||
for (; p != end; ++p) {
|
||||
|
@ -136,7 +140,6 @@ void ScenePreprocessor::ProcessMesh(aiMesh *mesh) {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If the information which primitive types are there in the
|
||||
// mesh is currently not available, compute it.
|
||||
|
|
|
@ -4,7 +4,6 @@ Open Asset Import Library (assimp)
|
|||
|
||||
Copyright (c) 2006-2022, assimp team
|
||||
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use of this software in source and binary forms,
|
||||
|
@ -97,13 +96,14 @@ void SkeletonMeshBuilder::CreateGeometry(const aiNode *pNode) {
|
|||
const aiMatrix4x4 &childTransform = pNode->mChildren[a]->mTransformation;
|
||||
aiVector3D childpos(childTransform.a4, childTransform.b4, childTransform.c4);
|
||||
ai_real distanceToChild = childpos.Length();
|
||||
if (distanceToChild < 0.0001)
|
||||
if (distanceToChild < ai_epsilon) {
|
||||
continue;
|
||||
}
|
||||
aiVector3D up = aiVector3D(childpos).Normalize();
|
||||
|
||||
aiVector3D orth(1.0, 0.0, 0.0);
|
||||
if (std::fabs(orth * up) > 0.99)
|
||||
if (std::fabs(orth * up) > 0.99) {
|
||||
orth.Set(0.0, 1.0, 0.0);
|
||||
}
|
||||
|
||||
aiVector3D front = (up ^ orth).Normalize();
|
||||
aiVector3D side = (front ^ up).Normalize();
|
||||
|
@ -183,8 +183,9 @@ void SkeletonMeshBuilder::CreateGeometry(const aiNode *pNode) {
|
|||
// add all the vertices to the bone's influences
|
||||
bone->mNumWeights = numVertices;
|
||||
bone->mWeights = new aiVertexWeight[numVertices];
|
||||
for (unsigned int a = 0; a < numVertices; a++)
|
||||
for (unsigned int a = 0; a < numVertices; ++a) {
|
||||
bone->mWeights[a] = aiVertexWeight(vertexStartIndex + a, 1.0);
|
||||
}
|
||||
|
||||
// HACK: (thom) transform all vertices to the bone's local space. Should be done before adding
|
||||
// them to the array, but I'm tired now and I'm annoyed.
|
||||
|
@ -194,9 +195,10 @@ void SkeletonMeshBuilder::CreateGeometry(const aiNode *pNode) {
|
|||
}
|
||||
|
||||
// and finally recurse into the children list
|
||||
for (unsigned int a = 0; a < pNode->mNumChildren; a++)
|
||||
for (unsigned int a = 0; a < pNode->mNumChildren; ++a) {
|
||||
CreateGeometry(pNode->mChildren[a]);
|
||||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Creates the mesh from the internally accumulated stuff and returns it.
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
/*
|
||||
Open Asset Import Library (assimp)
|
||||
----------------------------------------------------------------------
|
||||
|
||||
Copyright (c) 2006-2022, assimp team
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use of this software in source and binary forms,
|
||||
with or without modification, are permitted provided that the
|
||||
following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the
|
||||
following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the
|
||||
following disclaimer in the documentation and/or other
|
||||
materials provided with the distribution.
|
||||
|
||||
* Neither the name of the assimp team, nor the names of its
|
||||
contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior
|
||||
written permission of the assimp team.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
----------------------------------------------------------------------
|
||||
*/
|
||||
#pragma once
|
||||
|
||||
#if _MSC_VER // "unreferenced function has been removed" (SSE2 detection routine in x64 builds)
|
||||
#pragma warning(push)
|
||||
#pragma warning(disable : 4505)
|
||||
#else
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wunused-function"
|
||||
#endif
|
||||
|
||||
#include "stb/stb_image.h"
|
||||
|
||||
#if _MSC_VER
|
||||
#pragma warning(pop)
|
||||
#else
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
|
|
@ -135,6 +135,9 @@ ASSIMP_API aiScene::aiScene() :
|
|||
mNumCameras(0),
|
||||
mCameras(nullptr),
|
||||
mMetaData(nullptr),
|
||||
mName(),
|
||||
mNumSkeletons(0),
|
||||
mSkeletons(nullptr),
|
||||
mPrivate(new Assimp::ScenePrivateData()) {
|
||||
// empty
|
||||
}
|
||||
|
@ -180,7 +183,8 @@ ASSIMP_API aiScene::~aiScene() {
|
|||
delete[] mCameras;
|
||||
|
||||
aiMetadata::Dealloc(mMetaData);
|
||||
mMetaData = nullptr;
|
||||
|
||||
delete[] mSkeletons;
|
||||
|
||||
delete static_cast<Assimp::ScenePrivateData *>(mPrivate);
|
||||
}
|
||||
|
|
|
@ -196,7 +196,9 @@ zlib_filefunc_def IOSystem2Unzip::get(IOSystem *pIOHandler) {
|
|||
zlib_filefunc_def mapping;
|
||||
|
||||
mapping.zopen_file = (open_file_func)open;
|
||||
#ifdef _UNZ_H
|
||||
mapping.zopendisk_file = (opendisk_file_func)opendisk;
|
||||
#endif
|
||||
mapping.zread_file = (read_file_func)read;
|
||||
mapping.zwrite_file = (write_file_func)write;
|
||||
mapping.ztell_file = (tell_file_func)tell;
|
||||
|
|
|
@ -46,7 +46,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
#include <assimp/material.h>
|
||||
|
||||
// -------------------------------------------------------------------------------
|
||||
const char *TextureTypeToString(aiTextureType in) {
|
||||
const char *aiTextureTypeToString(aiTextureType in) {
|
||||
switch (in) {
|
||||
case aiTextureType_NONE:
|
||||
return "n/a";
|
||||
|
|
|
@ -83,7 +83,7 @@ Other:
|
|||
#include <sstream>
|
||||
#include <string>
|
||||
|
||||
#include "stb/stb_image.h"
|
||||
#include "Common/StbCommon.h"
|
||||
|
||||
using namespace Assimp;
|
||||
|
||||
|
@ -590,7 +590,7 @@ void PbrtExporter::WriteMaterial(int m) {
|
|||
for (int i = 1; i <= aiTextureType_UNKNOWN; i++) {
|
||||
int count = material->GetTextureCount(aiTextureType(i));
|
||||
if (count > 0)
|
||||
mOutput << TextureTypeToString(aiTextureType(i)) << ": " << count << " ";
|
||||
mOutput << aiTextureTypeToString(aiTextureType(i)) << ": " << count << " ";
|
||||
}
|
||||
mOutput << "\n";
|
||||
|
||||
|
|
|
@ -415,7 +415,7 @@ void ComputeUVMappingProcess::Execute( aiScene* pScene)
|
|||
if (!DefaultLogger::isNullLogger())
|
||||
{
|
||||
ai_snprintf(buffer, 1024, "Found non-UV mapped texture (%s,%u). Mapping type: %s",
|
||||
TextureTypeToString((aiTextureType)prop->mSemantic),prop->mIndex,
|
||||
aiTextureTypeToString((aiTextureType)prop->mSemantic),prop->mIndex,
|
||||
MappingTypeToString(mapping));
|
||||
|
||||
ASSIMP_LOG_INFO(buffer);
|
||||
|
|
|
@ -128,7 +128,7 @@ bool EmbedTexturesProcess::addTexture(aiScene *pScene, const std::string &path)
|
|||
|
||||
aiTexel* imageContent = new aiTexel[ 1ul + static_cast<unsigned long>( imageSize ) / sizeof(aiTexel)];
|
||||
pFile->Seek(0, aiOrigin_SET);
|
||||
pFile->Read(reinterpret_cast<char*>(imageContent), imageSize, 1);
|
||||
pFile->Read(reinterpret_cast<char*>(imageContent), static_cast<size_t>(imageSize), 1);
|
||||
mIOHandler->Close(pFile);
|
||||
|
||||
// Enlarging the textures table
|
||||
|
|
|
@ -5,8 +5,6 @@ Open Asset Import Library (assimp)
|
|||
|
||||
Copyright (c) 2006-2022, assimp team
|
||||
|
||||
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use of this software in source and binary forms,
|
||||
|
@ -45,41 +43,38 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
* for all imported meshes
|
||||
*/
|
||||
|
||||
|
||||
#ifndef ASSIMP_BUILD_NO_JOINVERTICES_PROCESS
|
||||
|
||||
#include "JoinVerticesProcess.h"
|
||||
#include "ProcessHelper.h"
|
||||
#include <assimp/Vertex.h>
|
||||
#include <assimp/TinyFormatter.h>
|
||||
|
||||
#include <stdio.h>
|
||||
#include <unordered_set>
|
||||
#include <unordered_map>
|
||||
|
||||
using namespace Assimp;
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Constructor to be privately used by Importer
|
||||
JoinVerticesProcess::JoinVerticesProcess()
|
||||
{
|
||||
JoinVerticesProcess::JoinVerticesProcess() {
|
||||
// nothing to do here
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Destructor, private as well
|
||||
JoinVerticesProcess::~JoinVerticesProcess()
|
||||
{
|
||||
JoinVerticesProcess::~JoinVerticesProcess() {
|
||||
// nothing to do here
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Returns whether the processing step is present in the given flag field.
|
||||
bool JoinVerticesProcess::IsActive( unsigned int pFlags) const
|
||||
{
|
||||
bool JoinVerticesProcess::IsActive( unsigned int pFlags) const {
|
||||
return (pFlags & aiProcess_JoinIdenticalVertices) != 0;
|
||||
}
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Executes the post processing step on the given imported data.
|
||||
void JoinVerticesProcess::Execute( aiScene* pScene)
|
||||
{
|
||||
void JoinVerticesProcess::Execute( aiScene* pScene) {
|
||||
ASSIMP_LOG_DEBUG("JoinVerticesProcess begin");
|
||||
|
||||
// get the total number of vertices BEFORE the step is executed
|
||||
|
@ -92,27 +87,29 @@ void JoinVerticesProcess::Execute( aiScene* pScene)
|
|||
|
||||
// execute the step
|
||||
int iNumVertices = 0;
|
||||
for( unsigned int a = 0; a < pScene->mNumMeshes; a++)
|
||||
for( unsigned int a = 0; a < pScene->mNumMeshes; a++) {
|
||||
iNumVertices += ProcessMesh( pScene->mMeshes[a],a);
|
||||
}
|
||||
|
||||
pScene->mFlags |= AI_SCENE_FLAGS_NON_VERBOSE_FORMAT;
|
||||
|
||||
// if logging is active, print detailed statistics
|
||||
if (!DefaultLogger::isNullLogger()) {
|
||||
if (iNumOldVertices == iNumVertices) {
|
||||
ASSIMP_LOG_DEBUG("JoinVerticesProcess finished ");
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
// Show statistics
|
||||
ASSIMP_LOG_INFO("JoinVerticesProcess finished | Verts in: ", iNumOldVertices,
|
||||
" out: ", iNumVertices, " | ~",
|
||||
((iNumOldVertices - iNumVertices) / (float)iNumOldVertices) * 100.f );
|
||||
}
|
||||
}
|
||||
|
||||
pScene->mFlags |= AI_SCENE_FLAGS_NON_VERBOSE_FORMAT;
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
||||
bool areVerticesEqual(const Vertex &lhs, const Vertex &rhs, bool complex)
|
||||
{
|
||||
bool areVerticesEqual(const Vertex &lhs, const Vertex &rhs, bool complex) {
|
||||
// A little helper to find locally close vertices faster.
|
||||
// Try to reuse the lookup table from the last step.
|
||||
const static float epsilon = 1e-5f;
|
||||
|
@ -171,8 +168,7 @@ void updateXMeshVertices(XMesh *pMesh, std::vector<Vertex> &uniqueVertices) {
|
|||
// ----------------------------------------------------------------------------
|
||||
|
||||
// Position, if present (check made for aiAnimMesh)
|
||||
if (pMesh->mVertices)
|
||||
{
|
||||
if (pMesh->mVertices) {
|
||||
delete [] pMesh->mVertices;
|
||||
pMesh->mVertices = new aiVector3D[pMesh->mNumVertices];
|
||||
for (unsigned int a = 0; a < pMesh->mNumVertices; a++) {
|
||||
|
@ -181,8 +177,7 @@ void updateXMeshVertices(XMesh *pMesh, std::vector<Vertex> &uniqueVertices) {
|
|||
}
|
||||
|
||||
// Normals, if present
|
||||
if (pMesh->mNormals)
|
||||
{
|
||||
if (pMesh->mNormals) {
|
||||
delete [] pMesh->mNormals;
|
||||
pMesh->mNormals = new aiVector3D[pMesh->mNumVertices];
|
||||
for( unsigned int a = 0; a < pMesh->mNumVertices; a++) {
|
||||
|
@ -190,8 +185,7 @@ void updateXMeshVertices(XMesh *pMesh, std::vector<Vertex> &uniqueVertices) {
|
|||
}
|
||||
}
|
||||
// Tangents, if present
|
||||
if (pMesh->mTangents)
|
||||
{
|
||||
if (pMesh->mTangents) {
|
||||
delete [] pMesh->mTangents;
|
||||
pMesh->mTangents = new aiVector3D[pMesh->mNumVertices];
|
||||
for (unsigned int a = 0; a < pMesh->mNumVertices; a++) {
|
||||
|
@ -199,8 +193,7 @@ void updateXMeshVertices(XMesh *pMesh, std::vector<Vertex> &uniqueVertices) {
|
|||
}
|
||||
}
|
||||
// Bitangents as well
|
||||
if (pMesh->mBitangents)
|
||||
{
|
||||
if (pMesh->mBitangents) {
|
||||
delete [] pMesh->mBitangents;
|
||||
pMesh->mBitangents = new aiVector3D[pMesh->mNumVertices];
|
||||
for (unsigned int a = 0; a < pMesh->mNumVertices; a++) {
|
||||
|
@ -208,8 +201,7 @@ void updateXMeshVertices(XMesh *pMesh, std::vector<Vertex> &uniqueVertices) {
|
|||
}
|
||||
}
|
||||
// Vertex colors
|
||||
for (unsigned int a = 0; pMesh->HasVertexColors(a); a++)
|
||||
{
|
||||
for (unsigned int a = 0; pMesh->HasVertexColors(a); a++) {
|
||||
delete [] pMesh->mColors[a];
|
||||
pMesh->mColors[a] = new aiColor4D[pMesh->mNumVertices];
|
||||
for( unsigned int b = 0; b < pMesh->mNumVertices; b++) {
|
||||
|
@ -217,8 +209,7 @@ void updateXMeshVertices(XMesh *pMesh, std::vector<Vertex> &uniqueVertices) {
|
|||
}
|
||||
}
|
||||
// Texture coords
|
||||
for (unsigned int a = 0; pMesh->HasTextureCoords(a); a++)
|
||||
{
|
||||
for (unsigned int a = 0; pMesh->HasTextureCoords(a); a++) {
|
||||
delete [] pMesh->mTextureCoords[a];
|
||||
pMesh->mTextureCoords[a] = new aiVector3D[pMesh->mNumVertices];
|
||||
for (unsigned int b = 0; b < pMesh->mNumVertices; b++) {
|
||||
|
@ -226,12 +217,40 @@ void updateXMeshVertices(XMesh *pMesh, std::vector<Vertex> &uniqueVertices) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Unites identical vertices in the given mesh
|
||||
int JoinVerticesProcess::ProcessMesh( aiMesh* pMesh, unsigned int meshIndex)
|
||||
{
|
||||
// combine hashes
|
||||
inline void hash_combine(std::size_t &) {
|
||||
// empty
|
||||
}
|
||||
|
||||
template <typename T, typename... Rest>
|
||||
inline void hash_combine(std::size_t& seed, const T& v, Rest... rest) {
|
||||
std::hash<T> hasher;
|
||||
seed ^= hasher(v) + 0x9e3779b9 + (seed<<6) + (seed>>2);
|
||||
hash_combine(seed, rest...);
|
||||
}
|
||||
//template specialization for std::hash for Vertex
|
||||
template<>
|
||||
struct std::hash<Vertex> {
|
||||
std::size_t operator()(Vertex const& v) const noexcept {
|
||||
size_t seed = 0;
|
||||
hash_combine(seed, v.position.x ,v.position.y,v.position.z);
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
//template specialization for std::equal_to for Vertex
|
||||
template<>
|
||||
struct std::equal_to<Vertex> {
|
||||
bool operator()(const Vertex &lhs, const Vertex &rhs) const {
|
||||
return areVerticesEqual(lhs, rhs, false);
|
||||
}
|
||||
};
|
||||
// now start the JoinVerticesProcess
|
||||
int JoinVerticesProcess::ProcessMesh( aiMesh* pMesh, unsigned int meshIndex) {
|
||||
static_assert( AI_MAX_NUMBER_OF_COLOR_SETS == 8, "AI_MAX_NUMBER_OF_COLOR_SETS == 8");
|
||||
static_assert( AI_MAX_NUMBER_OF_TEXTURECOORDS == 8, "AI_MAX_NUMBER_OF_TEXTURECOORDS == 8");
|
||||
|
||||
|
@ -245,8 +264,7 @@ int JoinVerticesProcess::ProcessMesh( aiMesh* pMesh, unsigned int meshIndex)
|
|||
// multiple meshes)
|
||||
std::unordered_set<unsigned int> usedVertexIndices;
|
||||
usedVertexIndices.reserve(pMesh->mNumVertices);
|
||||
for( unsigned int a = 0; a < pMesh->mNumFaces; a++)
|
||||
{
|
||||
for( unsigned int a = 0; a < pMesh->mNumFaces; a++) {
|
||||
aiFace& face = pMesh->mFaces[a];
|
||||
for( unsigned int b = 0; b < face.mNumIndices; b++) {
|
||||
usedVertexIndices.insert(face.mIndices[b]);
|
||||
|
@ -292,7 +310,6 @@ int JoinVerticesProcess::ProcessMesh( aiMesh* pMesh, unsigned int meshIndex)
|
|||
|
||||
// Run an optimized code path if we don't have multiple UVs or vertex colors.
|
||||
// This should yield false in more than 99% of all imports ...
|
||||
const bool complex = ( pMesh->GetNumColorChannels() > 0 || pMesh->GetNumUVChannels() > 1);
|
||||
const bool hasAnimMeshes = pMesh->mNumAnimMeshes > 0;
|
||||
|
||||
// We'll never have more vertices afterwards.
|
||||
|
@ -303,72 +320,38 @@ int JoinVerticesProcess::ProcessMesh( aiMesh* pMesh, unsigned int meshIndex)
|
|||
uniqueAnimatedVertices[animMeshIndex].reserve(pMesh->mNumVertices);
|
||||
}
|
||||
}
|
||||
|
||||
// a map that maps a vertix to its new index
|
||||
std::unordered_map<Vertex,int> vertex2Index;
|
||||
// we can not end up with more vertices than we started with
|
||||
vertex2Index.reserve(pMesh->mNumVertices);
|
||||
// Now check each vertex if it brings something new to the table
|
||||
int newIndex = 0;
|
||||
for( unsigned int a = 0; a < pMesh->mNumVertices; a++) {
|
||||
// if the vertex is unused Do nothing
|
||||
if (usedVertexIndices.find(a) == usedVertexIndices.end()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// collect the vertex data
|
||||
Vertex v(pMesh,a);
|
||||
|
||||
// collect all vertices that are close enough to the given position
|
||||
vertexFinder->FindIdenticalPositions( v.position, verticesFound);
|
||||
unsigned int matchIndex = 0xffffffff;
|
||||
|
||||
// check all unique vertices close to the position if this vertex is already present among them
|
||||
for( unsigned int b = 0; b < verticesFound.size(); b++) {
|
||||
const unsigned int vidx = verticesFound[b];
|
||||
const unsigned int uidx = replaceIndex[ vidx];
|
||||
if( uidx & 0x80000000)
|
||||
continue;
|
||||
|
||||
const Vertex& uv = uniqueVertices[ uidx];
|
||||
|
||||
if (!areVerticesEqual(v, uv, complex)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (hasAnimMeshes) {
|
||||
// If given vertex is animated, then it has to be preserver 1 to 1 (base mesh and animated mesh require same topology)
|
||||
// NOTE: not doing this totaly breaks anim meshes as they don't have their own faces (they use pMesh->mFaces)
|
||||
bool breaksAnimMesh = false;
|
||||
for (unsigned int animMeshIndex = 0; animMeshIndex < pMesh->mNumAnimMeshes; animMeshIndex++) {
|
||||
const Vertex& animatedUV = uniqueAnimatedVertices[animMeshIndex][ uidx];
|
||||
Vertex aniMeshVertex(pMesh->mAnimMeshes[animMeshIndex], a);
|
||||
if (!areVerticesEqual(aniMeshVertex, animatedUV, complex)) {
|
||||
breaksAnimMesh = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (breaksAnimMesh) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// we're still here -> this vertex perfectly matches our given vertex
|
||||
matchIndex = uidx;
|
||||
break;
|
||||
}
|
||||
|
||||
// found a replacement vertex among the uniques?
|
||||
if( matchIndex != 0xffffffff)
|
||||
{
|
||||
// store where to found the matching unique vertex
|
||||
replaceIndex[a] = matchIndex | 0x80000000;
|
||||
}
|
||||
else
|
||||
{
|
||||
// no unique vertex matches it up to now -> so add it
|
||||
replaceIndex[a] = (unsigned int)uniqueVertices.size();
|
||||
// is the vertex already in the map?
|
||||
auto it = vertex2Index.find(v);
|
||||
// if the vertex is not in the map then it is a new vertex add it.
|
||||
if (it == vertex2Index.end()) {
|
||||
// this is a new vertex give it a new index
|
||||
vertex2Index[v] = newIndex;
|
||||
//keep track of its index and increment 1
|
||||
replaceIndex[a] = newIndex++;
|
||||
// add the vertex to the unique vertices
|
||||
uniqueVertices.push_back(v);
|
||||
if (hasAnimMeshes) {
|
||||
for (unsigned int animMeshIndex = 0; animMeshIndex < pMesh->mNumAnimMeshes; animMeshIndex++) {
|
||||
Vertex aniMeshVertex(pMesh->mAnimMeshes[animMeshIndex], a);
|
||||
uniqueAnimatedVertices[animMeshIndex].push_back(aniMeshVertex);
|
||||
uniqueAnimatedVertices[animMeshIndex].push_back(v);
|
||||
}
|
||||
}
|
||||
} else{
|
||||
// if the vertex is already there just find the replace index that is appropriate to it
|
||||
replaceIndex[a] = it->second;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -394,8 +377,7 @@ int JoinVerticesProcess::ProcessMesh( aiMesh* pMesh, unsigned int meshIndex)
|
|||
}
|
||||
|
||||
// adjust the indices in all faces
|
||||
for( unsigned int a = 0; a < pMesh->mNumFaces; a++)
|
||||
{
|
||||
for( unsigned int a = 0; a < pMesh->mNumFaces; a++) {
|
||||
aiFace& face = pMesh->mFaces[a];
|
||||
for( unsigned int b = 0; b < face.mNumIndices; b++) {
|
||||
face.mIndices[b] = replaceIndex[face.mIndices[b]] & ~0x80000000;
|
||||
|
|
|
@ -521,7 +521,7 @@ void ValidateDSProcess::Validate(const aiAnimation *pAnimation) {
|
|||
// ------------------------------------------------------------------------------------------------
|
||||
void ValidateDSProcess::SearchForInvalidTextures(const aiMaterial *pMaterial,
|
||||
aiTextureType type) {
|
||||
const char *szType = TextureTypeToString(type);
|
||||
const char *szType = aiTextureTypeToString(type);
|
||||
|
||||
// ****************************************************************************
|
||||
// Search all keys of the material ...
|
||||
|
|
|
@ -1,80 +1,41 @@
|
|||
// Microsoft Visual C++ generated resource script.
|
||||
//
|
||||
#include "resource.h"
|
||||
#include "revision.h"
|
||||
#ifdef __GNUC__
|
||||
#include "winresrc.h"
|
||||
#else
|
||||
#include "winres.h"
|
||||
#endif
|
||||
|
||||
#define APSTUDIO_READONLY_SYMBOLS
|
||||
/////////////////////////////////////////////////////////////////////////////
|
||||
//
|
||||
// Generated from the TEXTINCLUDE 2 resource.
|
||||
//
|
||||
#define APSTUDIO_HIDDEN_SYMBOLS
|
||||
#include "windows.h"
|
||||
#undef APSTUDIO_HIDDEN_SYMBOLS
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////
|
||||
#undef APSTUDIO_READONLY_SYMBOLS
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////
|
||||
// Deutsch (Deutschland) resources
|
||||
|
||||
#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_DEU)
|
||||
#ifdef _WIN32
|
||||
LANGUAGE LANG_GERMAN, SUBLANG_GERMAN
|
||||
LANGUAGE LANG_NEUTRAL, SUBLANG_NEUTRAL
|
||||
#pragma code_page(1252)
|
||||
#endif //_WIN32
|
||||
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////
|
||||
//
|
||||
// Version
|
||||
//
|
||||
|
||||
VS_VERSION_INFO VERSIONINFO
|
||||
FILEVERSION VER_FILEVERSION
|
||||
PRODUCTVERSION VER_FILEVERSION
|
||||
FILEFLAGSMASK 0x17L
|
||||
FILEFLAGSMASK VS_FF_DEBUG
|
||||
#ifdef _DEBUG
|
||||
FILEFLAGS 0x1L
|
||||
#else
|
||||
FILEFLAGS 0x0L
|
||||
FILEFLAGS VS_FF_DEBUG
|
||||
#endif
|
||||
FILEOS 0x4L
|
||||
FILETYPE 0x7L
|
||||
FILESUBTYPE 0x0L
|
||||
FILEOS VOS_NT
|
||||
FILETYPE VFT_DLL
|
||||
BEGIN
|
||||
BLOCK "StringFileInfo"
|
||||
BEGIN
|
||||
BLOCK "040704b0"
|
||||
BLOCK "000004B0"
|
||||
BEGIN
|
||||
VALUE "Comments", "Licensed under a 3-clause BSD license"
|
||||
VALUE "CompanyName", "assimp team"
|
||||
VALUE "CompanyName", "ASSIMP Team"
|
||||
VALUE "FileDescription", "Open Asset Import Library"
|
||||
VALUE "FileVersion", VER_FILEVERSION
|
||||
VALUE "FileVersion", VER_FILEVERSION_STR
|
||||
VALUE "InternalName", "assimp"
|
||||
VALUE "LegalCopyright", "Copyright (C) 2006-2020"
|
||||
VALUE "LegalCopyright", VER_COPYRIGHT_STR
|
||||
VALUE "OriginalFilename", VER_ORIGINAL_FILENAME_STR
|
||||
VALUE "ProductName", "Open Asset Import Library"
|
||||
VALUE "ProductVersion", VER_FILEVERSION_STR
|
||||
,0
|
||||
END
|
||||
END
|
||||
BLOCK "VarFileInfo"
|
||||
BEGIN
|
||||
VALUE "Translation", 0x407, 1200
|
||||
VALUE "Translation", 0x0, 65001
|
||||
END
|
||||
END
|
||||
|
||||
#endif // Deutsch (Deutschland) resources
|
||||
/////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
#ifndef APSTUDIO_INVOKED
|
||||
/////////////////////////////////////////////////////////////////////////////
|
||||
//
|
||||
// Generated from the TEXTINCLUDE 3 resource.
|
||||
//
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////
|
||||
#endif // not APSTUDIO_INVOKED
|
||||
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
//{{NO_DEPENDENCIES}}
|
||||
// Microsoft Visual C++ generated include file.
|
||||
// Used by assimp.rc
|
||||
|
||||
// Next standard values for new objects
|
||||
//
|
||||
#ifdef APSTUDIO_INVOKED
|
||||
#ifndef APSTUDIO_READONLY_SYMBOLS
|
||||
#define _APS_NEXT_RESOURCE_VALUE 101
|
||||
#define _APS_NEXT_COMMAND_VALUE 40001
|
||||
#define _APS_NEXT_CONTROL_VALUE 1001
|
||||
#define _APS_NEXT_SYMED_VALUE 101
|
||||
#endif
|
||||
#endif
|
|
@ -86,6 +86,13 @@ class Int128
|
|||
|
||||
Int128(const Int128 &val): hi(val.hi), lo(val.lo){}
|
||||
|
||||
Int128 operator = (const Int128 &val)
|
||||
{
|
||||
lo = val.lo;
|
||||
hi = val.hi;
|
||||
return val;
|
||||
}
|
||||
|
||||
long64 operator = (const long64 &val)
|
||||
{
|
||||
lo = val;
|
||||
|
|
|
@ -10,7 +10,7 @@ endif()
|
|||
|
||||
set(draco_root "${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
set(draco_src_root "${draco_root}/src/draco")
|
||||
set(draco_build "${CMAKE_BINARY_DIR}")
|
||||
set(draco_build "${Assimp_BINARY_DIR}")
|
||||
|
||||
if("${draco_root}" STREQUAL "${draco_build}")
|
||||
message(
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
########################################################################
|
||||
# Note: CMake support is community-based. The maintainers do not use CMake
|
||||
# internally.
|
||||
#
|
||||
# CMake build script for Google Test.
|
||||
#
|
||||
# To run the tests for Google Test itself on Linux, use 'make test' or
|
||||
# ctest. You can select which tests to run using 'ctest -R regex'.
|
||||
# For more options, run 'ctest --help'.
|
||||
|
||||
# BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to
|
||||
# make it prominent in the GUI.
|
||||
option(BUILD_SHARED_LIBS "Build shared libraries (DLLs)." OFF)
|
||||
|
||||
# When other libraries are using a shared version of runtime libraries,
|
||||
# Google Test also has to use one.
|
||||
option(
|
||||
|
@ -44,13 +43,45 @@ endif()
|
|||
# as ${gtest_SOURCE_DIR} and to the root binary directory as
|
||||
# ${gtest_BINARY_DIR}.
|
||||
# Language "C" is required for find_package(Threads).
|
||||
|
||||
# Project version:
|
||||
|
||||
if (CMAKE_VERSION VERSION_LESS 3.0)
|
||||
project(gtest CXX C)
|
||||
cmake_minimum_required(VERSION 3.10)
|
||||
set(PROJECT_VERSION ${GOOGLETEST_VERSION})
|
||||
else()
|
||||
cmake_policy(SET CMP0048 NEW)
|
||||
project(gtest VERSION ${GOOGLETEST_VERSION} LANGUAGES CXX C)
|
||||
endif()
|
||||
cmake_minimum_required(VERSION 2.8.12)
|
||||
|
||||
if (POLICY CMP0063) # Visibility
|
||||
cmake_policy(SET CMP0063 NEW)
|
||||
endif (POLICY CMP0063)
|
||||
|
||||
if (COMMAND set_up_hermetic_build)
|
||||
set_up_hermetic_build()
|
||||
endif()
|
||||
|
||||
# These commands only run if this is the main project
|
||||
if(CMAKE_PROJECT_NAME STREQUAL "gtest" OR CMAKE_PROJECT_NAME STREQUAL "googletest-distribution")
|
||||
|
||||
# BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to
|
||||
# make it prominent in the GUI.
|
||||
option(BUILD_SHARED_LIBS "Build shared libraries (DLLs)." OFF)
|
||||
|
||||
else()
|
||||
|
||||
mark_as_advanced(
|
||||
gtest_force_shared_crt
|
||||
gtest_build_tests
|
||||
gtest_build_samples
|
||||
gtest_disable_pthreads
|
||||
gtest_hide_internal_symbols)
|
||||
|
||||
endif()
|
||||
|
||||
|
||||
if (gtest_hide_internal_symbols)
|
||||
set(CMAKE_CXX_VISIBILITY_PRESET hidden)
|
||||
set(CMAKE_VISIBILITY_INLINES_HIDDEN 1)
|
||||
|
@ -61,24 +92,34 @@ include(cmake/internal_utils.cmake)
|
|||
|
||||
config_compiler_and_linker() # Defined in internal_utils.cmake.
|
||||
|
||||
# Where Google Test's .h files can be found.
|
||||
include_directories(
|
||||
${gtest_SOURCE_DIR}/include
|
||||
${gtest_SOURCE_DIR})
|
||||
# Needed to set the namespace for both the export targets and the
|
||||
# alias libraries
|
||||
set(cmake_package_name GTest CACHE INTERNAL "")
|
||||
|
||||
# Where Google Test's libraries can be found.
|
||||
link_directories(${gtest_BINARY_DIR}/src)
|
||||
|
||||
# Summary of tuple support for Microsoft Visual Studio:
|
||||
# Compiler version(MS) version(cmake) Support
|
||||
# ---------- ----------- -------------- -----------------------------
|
||||
# <= VS 2010 <= 10 <= 1600 Use Google Tests's own tuple.
|
||||
# VS 2012 11 1700 std::tr1::tuple + _VARIADIC_MAX=10
|
||||
# VS 2013 12 1800 std::tr1::tuple
|
||||
if (MSVC AND MSVC_VERSION EQUAL 1700)
|
||||
add_definitions(/D _VARIADIC_MAX=10)
|
||||
# Create the CMake package file descriptors.
|
||||
if (INSTALL_GTEST)
|
||||
include(CMakePackageConfigHelpers)
|
||||
set(targets_export_name ${cmake_package_name}Targets CACHE INTERNAL "")
|
||||
set(generated_dir "${CMAKE_CURRENT_BINARY_DIR}/generated" CACHE INTERNAL "")
|
||||
set(cmake_files_install_dir "${CMAKE_INSTALL_LIBDIR}/cmake/${cmake_package_name}")
|
||||
set(version_file "${generated_dir}/${cmake_package_name}ConfigVersion.cmake")
|
||||
write_basic_package_version_file(${version_file} VERSION ${GOOGLETEST_VERSION} COMPATIBILITY AnyNewerVersion)
|
||||
install(EXPORT ${targets_export_name}
|
||||
NAMESPACE ${cmake_package_name}::
|
||||
DESTINATION ${cmake_files_install_dir})
|
||||
set(config_file "${generated_dir}/${cmake_package_name}Config.cmake")
|
||||
configure_package_config_file("${gtest_SOURCE_DIR}/cmake/Config.cmake.in"
|
||||
"${config_file}" INSTALL_DESTINATION ${cmake_files_install_dir})
|
||||
install(FILES ${version_file} ${config_file}
|
||||
DESTINATION ${cmake_files_install_dir})
|
||||
endif()
|
||||
|
||||
# Where Google Test's .h files can be found.
|
||||
set(gtest_build_include_dirs
|
||||
"${gtest_SOURCE_DIR}/include"
|
||||
"${gtest_SOURCE_DIR}")
|
||||
include_directories(${gtest_build_include_dirs})
|
||||
|
||||
########################################################################
|
||||
#
|
||||
# Defines the gtest & gtest_main libraries. User tests should link
|
||||
|
@ -88,24 +129,26 @@ endif()
|
|||
# are used for other targets, to ensure that gtest can be compiled by a user
|
||||
# aggressive about warnings.
|
||||
cxx_library(gtest "${cxx_strict}" src/gtest-all.cc)
|
||||
set_target_properties(gtest PROPERTIES VERSION ${GOOGLETEST_VERSION})
|
||||
cxx_library(gtest_main "${cxx_strict}" src/gtest_main.cc)
|
||||
target_link_libraries(gtest_main gtest)
|
||||
|
||||
set_target_properties(gtest_main PROPERTIES VERSION ${GOOGLETEST_VERSION})
|
||||
# If the CMake version supports it, attach header directory information
|
||||
# to the targets for when we are part of a parent build (ie being pulled
|
||||
# in via add_subdirectory() rather than being a standalone build).
|
||||
if (DEFINED CMAKE_VERSION AND NOT "${CMAKE_VERSION}" VERSION_LESS "2.8.11")
|
||||
target_include_directories(gtest INTERFACE "${gtest_SOURCE_DIR}/include")
|
||||
target_include_directories(gtest_main INTERFACE "${gtest_SOURCE_DIR}/include")
|
||||
target_include_directories(gtest SYSTEM INTERFACE
|
||||
"$<BUILD_INTERFACE:${gtest_build_include_dirs}>"
|
||||
"$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/${CMAKE_INSTALL_INCLUDEDIR}>")
|
||||
target_include_directories(gtest_main SYSTEM INTERFACE
|
||||
"$<BUILD_INTERFACE:${gtest_build_include_dirs}>"
|
||||
"$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/${CMAKE_INSTALL_INCLUDEDIR}>")
|
||||
endif()
|
||||
target_link_libraries(gtest_main PUBLIC gtest)
|
||||
|
||||
########################################################################
|
||||
#
|
||||
# Install rules
|
||||
install(TARGETS gtest gtest_main
|
||||
DESTINATION lib)
|
||||
install(DIRECTORY ${gtest_SOURCE_DIR}/include/gtest
|
||||
DESTINATION include)
|
||||
install_project(gtest gtest_main)
|
||||
|
||||
########################################################################
|
||||
#
|
||||
|
@ -147,33 +190,34 @@ if (gtest_build_tests)
|
|||
############################################################
|
||||
# C++ tests built with standard compiler flags.
|
||||
|
||||
cxx_test(gtest-death-test_test gtest_main)
|
||||
cxx_test(googletest-death-test-test gtest_main)
|
||||
cxx_test(gtest_environment_test gtest)
|
||||
cxx_test(gtest-filepath_test gtest_main)
|
||||
cxx_test(gtest-linked_ptr_test gtest_main)
|
||||
cxx_test(gtest-listener_test gtest_main)
|
||||
cxx_test(googletest-filepath-test gtest_main)
|
||||
cxx_test(googletest-listener-test gtest_main)
|
||||
cxx_test(gtest_main_unittest gtest_main)
|
||||
cxx_test(gtest-message_test gtest_main)
|
||||
cxx_test(googletest-message-test gtest_main)
|
||||
cxx_test(gtest_no_test_unittest gtest)
|
||||
cxx_test(gtest-options_test gtest_main)
|
||||
cxx_test(gtest-param-test_test gtest
|
||||
test/gtest-param-test2_test.cc)
|
||||
cxx_test(gtest-port_test gtest_main)
|
||||
cxx_test(googletest-options-test gtest_main)
|
||||
cxx_test(googletest-param-test-test gtest
|
||||
test/googletest-param-test2-test.cc)
|
||||
cxx_test(googletest-port-test gtest_main)
|
||||
cxx_test(gtest_pred_impl_unittest gtest_main)
|
||||
cxx_test(gtest_premature_exit_test gtest
|
||||
test/gtest_premature_exit_test.cc)
|
||||
cxx_test(gtest-printers_test gtest_main)
|
||||
cxx_test(googletest-printers-test gtest_main)
|
||||
cxx_test(gtest_prod_test gtest_main
|
||||
test/production.cc)
|
||||
cxx_test(gtest_repeat_test gtest)
|
||||
cxx_test(gtest_sole_header_test gtest_main)
|
||||
cxx_test(gtest_stress_test gtest)
|
||||
cxx_test(gtest-test-part_test gtest_main)
|
||||
cxx_test(googletest-test-part-test gtest_main)
|
||||
cxx_test(gtest_throw_on_failure_ex_test gtest)
|
||||
cxx_test(gtest-typed-test_test gtest_main
|
||||
test/gtest-typed-test2_test.cc)
|
||||
cxx_test(gtest_unittest gtest_main)
|
||||
cxx_test(gtest-unittest-api_test gtest)
|
||||
cxx_test(gtest_skip_in_environment_setup_test gtest_main)
|
||||
cxx_test(gtest_skip_test gtest_main)
|
||||
|
||||
############################################################
|
||||
# C++ tests built with non-standard compiler flags.
|
||||
|
@ -190,10 +234,10 @@ if (gtest_build_tests)
|
|||
|
||||
cxx_test_with_flags(gtest-death-test_ex_nocatch_test
|
||||
"${cxx_exception} -DGTEST_ENABLE_CATCH_EXCEPTIONS_=0"
|
||||
gtest test/gtest-death-test_ex_test.cc)
|
||||
gtest test/googletest-death-test_ex_test.cc)
|
||||
cxx_test_with_flags(gtest-death-test_ex_catch_test
|
||||
"${cxx_exception} -DGTEST_ENABLE_CATCH_EXCEPTIONS_=1"
|
||||
gtest test/gtest-death-test_ex_test.cc)
|
||||
gtest test/googletest-death-test_ex_test.cc)
|
||||
|
||||
cxx_test_with_flags(gtest_no_rtti_unittest "${cxx_no_rtti}"
|
||||
gtest_main_no_rtti test/gtest_unittest.cc)
|
||||
|
@ -207,80 +251,73 @@ if (gtest_build_tests)
|
|||
PROPERTIES
|
||||
COMPILE_DEFINITIONS "GTEST_LINKED_AS_SHARED_LIBRARY=1")
|
||||
|
||||
if (NOT MSVC OR MSVC_VERSION LESS 1600) # 1600 is Visual Studio 2010.
|
||||
# Visual Studio 2010, 2012, and 2013 define symbols in std::tr1 that
|
||||
# conflict with our own definitions. Therefore using our own tuple does not
|
||||
# work on those compilers.
|
||||
cxx_library(gtest_main_use_own_tuple "${cxx_use_own_tuple}"
|
||||
src/gtest-all.cc src/gtest_main.cc)
|
||||
|
||||
cxx_test_with_flags(gtest-tuple_test "${cxx_use_own_tuple}"
|
||||
gtest_main_use_own_tuple test/gtest-tuple_test.cc)
|
||||
|
||||
cxx_test_with_flags(gtest_use_own_tuple_test "${cxx_use_own_tuple}"
|
||||
gtest_main_use_own_tuple
|
||||
test/gtest-param-test_test.cc test/gtest-param-test2_test.cc)
|
||||
endif()
|
||||
|
||||
############################################################
|
||||
# Python tests.
|
||||
|
||||
cxx_executable(gtest_break_on_failure_unittest_ test gtest)
|
||||
py_test(gtest_break_on_failure_unittest)
|
||||
cxx_executable(googletest-break-on-failure-unittest_ test gtest)
|
||||
py_test(googletest-break-on-failure-unittest)
|
||||
|
||||
py_test(gtest_skip_check_output_test)
|
||||
py_test(gtest_skip_environment_check_output_test)
|
||||
|
||||
# Visual Studio .NET 2003 does not support STL with exceptions disabled.
|
||||
if (NOT MSVC OR MSVC_VERSION GREATER 1310) # 1310 is Visual Studio .NET 2003
|
||||
cxx_executable_with_flags(
|
||||
gtest_catch_exceptions_no_ex_test_
|
||||
googletest-catch-exceptions-no-ex-test_
|
||||
"${cxx_no_exception}"
|
||||
gtest_main_no_exception
|
||||
test/gtest_catch_exceptions_test_.cc)
|
||||
test/googletest-catch-exceptions-test_.cc)
|
||||
endif()
|
||||
|
||||
cxx_executable_with_flags(
|
||||
gtest_catch_exceptions_ex_test_
|
||||
googletest-catch-exceptions-ex-test_
|
||||
"${cxx_exception}"
|
||||
gtest_main
|
||||
test/gtest_catch_exceptions_test_.cc)
|
||||
py_test(gtest_catch_exceptions_test)
|
||||
test/googletest-catch-exceptions-test_.cc)
|
||||
py_test(googletest-catch-exceptions-test)
|
||||
|
||||
cxx_executable(gtest_color_test_ test gtest)
|
||||
py_test(gtest_color_test)
|
||||
cxx_executable(googletest-color-test_ test gtest)
|
||||
py_test(googletest-color-test)
|
||||
|
||||
cxx_executable(gtest_env_var_test_ test gtest)
|
||||
py_test(gtest_env_var_test)
|
||||
cxx_executable(googletest-env-var-test_ test gtest)
|
||||
py_test(googletest-env-var-test)
|
||||
|
||||
cxx_executable(gtest_filter_unittest_ test gtest)
|
||||
py_test(gtest_filter_unittest)
|
||||
cxx_executable(googletest-filter-unittest_ test gtest)
|
||||
py_test(googletest-filter-unittest)
|
||||
|
||||
cxx_executable(gtest_help_test_ test gtest_main)
|
||||
py_test(gtest_help_test)
|
||||
|
||||
cxx_executable(gtest_list_tests_unittest_ test gtest)
|
||||
py_test(gtest_list_tests_unittest)
|
||||
cxx_executable(googletest-list-tests-unittest_ test gtest)
|
||||
py_test(googletest-list-tests-unittest)
|
||||
|
||||
cxx_executable(gtest_output_test_ test gtest)
|
||||
py_test(gtest_output_test)
|
||||
cxx_executable(googletest-output-test_ test gtest)
|
||||
py_test(googletest-output-test --no_stacktrace_support)
|
||||
|
||||
cxx_executable(gtest_shuffle_test_ test gtest)
|
||||
py_test(gtest_shuffle_test)
|
||||
cxx_executable(googletest-shuffle-test_ test gtest)
|
||||
py_test(googletest-shuffle-test)
|
||||
|
||||
# MSVC 7.1 does not support STL with exceptions disabled.
|
||||
if (NOT MSVC OR MSVC_VERSION GREATER 1310)
|
||||
cxx_executable(gtest_throw_on_failure_test_ test gtest_no_exception)
|
||||
set_target_properties(gtest_throw_on_failure_test_
|
||||
cxx_executable(googletest-throw-on-failure-test_ test gtest_no_exception)
|
||||
set_target_properties(googletest-throw-on-failure-test_
|
||||
PROPERTIES
|
||||
COMPILE_FLAGS "${cxx_no_exception}")
|
||||
py_test(gtest_throw_on_failure_test)
|
||||
py_test(googletest-throw-on-failure-test)
|
||||
endif()
|
||||
|
||||
cxx_executable(gtest_uninitialized_test_ test gtest)
|
||||
py_test(gtest_uninitialized_test)
|
||||
cxx_executable(googletest-uninitialized-test_ test gtest)
|
||||
py_test(googletest-uninitialized-test)
|
||||
|
||||
cxx_executable(gtest_list_output_unittest_ test gtest)
|
||||
py_test(gtest_list_output_unittest)
|
||||
|
||||
cxx_executable(gtest_xml_outfile1_test_ test gtest_main)
|
||||
cxx_executable(gtest_xml_outfile2_test_ test gtest_main)
|
||||
py_test(gtest_xml_outfiles_test)
|
||||
py_test(googletest-json-outfiles-test)
|
||||
|
||||
cxx_executable(gtest_xml_output_unittest_ test gtest)
|
||||
py_test(gtest_xml_output_unittest)
|
||||
py_test(gtest_xml_output_unittest --no_stacktrace_support)
|
||||
py_test(googletest-json-output-unittest --no_stacktrace_support)
|
||||
endif()
|
||||
|
|
|
@ -1,181 +1,156 @@
|
|||
### Generic Build Instructions
|
||||
|
||||
### Generic Build Instructions ###
|
||||
#### Setup
|
||||
|
||||
#### Setup ####
|
||||
To build GoogleTest and your tests that use it, you need to tell your build
|
||||
system where to find its headers and source files. The exact way to do it
|
||||
depends on which build system you use, and is usually straightforward.
|
||||
|
||||
To build Google Test and your tests that use it, you need to tell your
|
||||
build system where to find its headers and source files. The exact
|
||||
way to do it depends on which build system you use, and is usually
|
||||
straightforward.
|
||||
### Build with CMake
|
||||
|
||||
#### Build ####
|
||||
GoogleTest comes with a CMake build script
|
||||
([CMakeLists.txt](https://github.com/google/googletest/blob/master/CMakeLists.txt))
|
||||
that can be used on a wide range of platforms ("C" stands for cross-platform.).
|
||||
If you don't have CMake installed already, you can download it for free from
|
||||
<http://www.cmake.org/>.
|
||||
|
||||
Suppose you put Google Test in directory `${GTEST_DIR}`. To build it,
|
||||
create a library build target (or a project as called by Visual Studio
|
||||
and Xcode) to compile
|
||||
CMake works by generating native makefiles or build projects that can be used in
|
||||
the compiler environment of your choice. You can either build GoogleTest as a
|
||||
standalone project or it can be incorporated into an existing CMake build for
|
||||
another project.
|
||||
|
||||
${GTEST_DIR}/src/gtest-all.cc
|
||||
#### Standalone CMake Project
|
||||
|
||||
with `${GTEST_DIR}/include` in the system header search path and `${GTEST_DIR}`
|
||||
in the normal header search path. Assuming a Linux-like system and gcc,
|
||||
something like the following will do:
|
||||
When building GoogleTest as a standalone project, the typical workflow starts
|
||||
with
|
||||
|
||||
g++ -isystem ${GTEST_DIR}/include -I${GTEST_DIR} \
|
||||
-pthread -c ${GTEST_DIR}/src/gtest-all.cc
|
||||
ar -rv libgtest.a gtest-all.o
|
||||
```
|
||||
git clone https://github.com/google/googletest.git -b release-1.10.0
|
||||
cd googletest # Main directory of the cloned repository.
|
||||
mkdir build # Create a directory to hold the build output.
|
||||
cd build
|
||||
cmake .. # Generate native build scripts for GoogleTest.
|
||||
```
|
||||
|
||||
(We need `-pthread` as Google Test uses threads.)
|
||||
The above command also includes GoogleMock by default. And so, if you want to
|
||||
build only GoogleTest, you should replace the last command with
|
||||
|
||||
Next, you should compile your test source file with
|
||||
`${GTEST_DIR}/include` in the system header search path, and link it
|
||||
with gtest and any other necessary libraries:
|
||||
```
|
||||
cmake .. -DBUILD_GMOCK=OFF
|
||||
```
|
||||
|
||||
g++ -isystem ${GTEST_DIR}/include -pthread path/to/your_test.cc libgtest.a \
|
||||
-o your_test
|
||||
If you are on a \*nix system, you should now see a Makefile in the current
|
||||
directory. Just type `make` to build GoogleTest. And then you can simply install
|
||||
GoogleTest if you are a system administrator.
|
||||
|
||||
As an example, the make/ directory contains a Makefile that you can
|
||||
use to build Google Test on systems where GNU make is available
|
||||
(e.g. Linux, Mac OS X, and Cygwin). It doesn't try to build Google
|
||||
Test's own tests. Instead, it just builds the Google Test library and
|
||||
a sample test. You can use it as a starting point for your own build
|
||||
script.
|
||||
|
||||
If the default settings are correct for your environment, the
|
||||
following commands should succeed:
|
||||
|
||||
cd ${GTEST_DIR}/make
|
||||
```
|
||||
make
|
||||
./sample1_unittest
|
||||
sudo make install # Install in /usr/local/ by default
|
||||
```
|
||||
|
||||
If you see errors, try to tweak the contents of `make/Makefile` to make
|
||||
them go away. There are instructions in `make/Makefile` on how to do
|
||||
it.
|
||||
|
||||
### Using CMake ###
|
||||
|
||||
Google Test comes with a CMake build script (
|
||||
[CMakeLists.txt](CMakeLists.txt)) that can be used on a wide range of platforms ("C" stands for
|
||||
cross-platform.). If you don't have CMake installed already, you can
|
||||
download it for free from <http://www.cmake.org/>.
|
||||
|
||||
CMake works by generating native makefiles or build projects that can
|
||||
be used in the compiler environment of your choice. The typical
|
||||
workflow starts with:
|
||||
|
||||
mkdir mybuild # Create a directory to hold the build output.
|
||||
cd mybuild
|
||||
cmake ${GTEST_DIR} # Generate native build scripts.
|
||||
|
||||
If you want to build Google Test's samples, you should replace the
|
||||
last command with
|
||||
|
||||
cmake -Dgtest_build_samples=ON ${GTEST_DIR}
|
||||
|
||||
If you are on a \*nix system, you should now see a Makefile in the
|
||||
current directory. Just type 'make' to build gtest.
|
||||
|
||||
If you use Windows and have Visual Studio installed, a `gtest.sln` file
|
||||
and several `.vcproj` files will be created. You can then build them
|
||||
using Visual Studio.
|
||||
If you use Windows and have Visual Studio installed, a `gtest.sln` file and
|
||||
several `.vcproj` files will be created. You can then build them using Visual
|
||||
Studio.
|
||||
|
||||
On Mac OS X with Xcode installed, a `.xcodeproj` file will be generated.
|
||||
|
||||
### Legacy Build Scripts ###
|
||||
#### Incorporating Into An Existing CMake Project
|
||||
|
||||
Before settling on CMake, we have been providing hand-maintained build
|
||||
projects/scripts for Visual Studio, Xcode, and Autotools. While we
|
||||
continue to provide them for convenience, they are not actively
|
||||
maintained any more. We highly recommend that you follow the
|
||||
instructions in the previous two sections to integrate Google Test
|
||||
with your existing build system.
|
||||
If you want to use GoogleTest in a project which already uses CMake, the easiest
|
||||
way is to get installed libraries and headers.
|
||||
|
||||
If you still need to use the legacy build scripts, here's how:
|
||||
* Import GoogleTest by using `find_package` (or `pkg_check_modules`). For
|
||||
example, if `find_package(GTest CONFIG REQUIRED)` succeeds, you can use the
|
||||
libraries as `GTest::gtest`, `GTest::gmock`.
|
||||
|
||||
The msvc\ folder contains two solutions with Visual C++ projects.
|
||||
Open the `gtest.sln` or `gtest-md.sln` file using Visual Studio, and you
|
||||
are ready to build Google Test the same way you build any Visual
|
||||
Studio project. Files that have names ending with -md use DLL
|
||||
versions of Microsoft runtime libraries (the /MD or the /MDd compiler
|
||||
option). Files without that suffix use static versions of the runtime
|
||||
libraries (the /MT or the /MTd option). Please note that one must use
|
||||
the same option to compile both gtest and the test code. If you use
|
||||
Visual Studio 2005 or above, we recommend the -md version as /MD is
|
||||
the default for new projects in these versions of Visual Studio.
|
||||
And a more robust and flexible approach is to build GoogleTest as part of that
|
||||
project directly. This is done by making the GoogleTest source code available to
|
||||
the main build and adding it using CMake's `add_subdirectory()` command. This
|
||||
has the significant advantage that the same compiler and linker settings are
|
||||
used between GoogleTest and the rest of your project, so issues associated with
|
||||
using incompatible libraries (eg debug/release), etc. are avoided. This is
|
||||
particularly useful on Windows. Making GoogleTest's source code available to the
|
||||
main build can be done a few different ways:
|
||||
|
||||
On Mac OS X, open the `gtest.xcodeproj` in the `xcode/` folder using
|
||||
Xcode. Build the "gtest" target. The universal binary framework will
|
||||
end up in your selected build directory (selected in the Xcode
|
||||
"Preferences..." -> "Building" pane and defaults to xcode/build).
|
||||
Alternatively, at the command line, enter:
|
||||
* Download the GoogleTest source code manually and place it at a known
|
||||
location. This is the least flexible approach and can make it more difficult
|
||||
to use with continuous integration systems, etc.
|
||||
* Embed the GoogleTest source code as a direct copy in the main project's
|
||||
source tree. This is often the simplest approach, but is also the hardest to
|
||||
keep up to date. Some organizations may not permit this method.
|
||||
* Add GoogleTest as a git submodule or equivalent. This may not always be
|
||||
possible or appropriate. Git submodules, for example, have their own set of
|
||||
advantages and drawbacks.
|
||||
* Use CMake to download GoogleTest as part of the build's configure step. This
|
||||
approach doesn't have the limitations of the other methods.
|
||||
|
||||
xcodebuild
|
||||
The last of the above methods is implemented with a small piece of CMake code
|
||||
that downloads and pulls the GoogleTest code into the main build.
|
||||
|
||||
This will build the "Release" configuration of gtest.framework in your
|
||||
default build location. See the "xcodebuild" man page for more
|
||||
information about building different configurations and building in
|
||||
different locations.
|
||||
Just add to your `CMakeLists.txt`:
|
||||
|
||||
If you wish to use the Google Test Xcode project with Xcode 4.x and
|
||||
above, you need to either:
|
||||
```cmake
|
||||
include(FetchContent)
|
||||
FetchContent_Declare(
|
||||
googletest
|
||||
# Specify the commit you depend on and update it regularly.
|
||||
URL https://github.com/google/googletest/archive/609281088cfefc76f9d0ce82e1ff6c30cc3591e5.zip
|
||||
)
|
||||
# For Windows: Prevent overriding the parent project's compiler/linker settings
|
||||
set(gtest_force_shared_crt ON CACHE BOOL "" FORCE)
|
||||
FetchContent_MakeAvailable(googletest)
|
||||
|
||||
* update the SDK configuration options in xcode/Config/General.xconfig.
|
||||
Comment options `SDKROOT`, `MACOS_DEPLOYMENT_TARGET`, and `GCC_VERSION`. If
|
||||
you choose this route you lose the ability to target earlier versions
|
||||
of MacOS X.
|
||||
* Install an SDK for an earlier version. This doesn't appear to be
|
||||
supported by Apple, but has been reported to work
|
||||
(http://stackoverflow.com/questions/5378518).
|
||||
# Now simply link against gtest or gtest_main as needed. Eg
|
||||
add_executable(example example.cpp)
|
||||
target_link_libraries(example gtest_main)
|
||||
add_test(NAME example_test COMMAND example)
|
||||
```
|
||||
|
||||
### Tweaking Google Test ###
|
||||
Note that this approach requires CMake 3.14 or later due to its use of the
|
||||
`FetchContent_MakeAvailable()` command.
|
||||
|
||||
Google Test can be used in diverse environments. The default
|
||||
configuration may not work (or may not work well) out of the box in
|
||||
some environments. However, you can easily tweak Google Test by
|
||||
defining control macros on the compiler command line. Generally,
|
||||
these macros are named like `GTEST_XYZ` and you define them to either 1
|
||||
or 0 to enable or disable a certain feature.
|
||||
##### Visual Studio Dynamic vs Static Runtimes
|
||||
|
||||
We list the most frequently used macros below. For a complete list,
|
||||
see file [include/gtest/internal/gtest-port.h](include/gtest/internal/gtest-port.h).
|
||||
By default, new Visual Studio projects link the C runtimes dynamically but
|
||||
GoogleTest links them statically. This will generate an error that looks
|
||||
something like the following: gtest.lib(gtest-all.obj) : error LNK2038: mismatch
|
||||
detected for 'RuntimeLibrary': value 'MTd_StaticDebug' doesn't match value
|
||||
'MDd_DynamicDebug' in main.obj
|
||||
|
||||
### Choosing a TR1 Tuple Library ###
|
||||
GoogleTest already has a CMake option for this: `gtest_force_shared_crt`
|
||||
|
||||
Some Google Test features require the C++ Technical Report 1 (TR1)
|
||||
tuple library, which is not yet available with all compilers. The
|
||||
good news is that Google Test implements a subset of TR1 tuple that's
|
||||
enough for its own need, and will automatically use this when the
|
||||
compiler doesn't provide TR1 tuple.
|
||||
Enabling this option will make gtest link the runtimes dynamically too, and
|
||||
match the project in which it is included.
|
||||
|
||||
Usually you don't need to care about which tuple library Google Test
|
||||
uses. However, if your project already uses TR1 tuple, you need to
|
||||
tell Google Test to use the same TR1 tuple library the rest of your
|
||||
project uses, or the two tuple implementations will clash. To do
|
||||
that, add
|
||||
#### C++ Standard Version
|
||||
|
||||
-DGTEST_USE_OWN_TR1_TUPLE=0
|
||||
An environment that supports C++11 is required in order to successfully build
|
||||
GoogleTest. One way to ensure this is to specify the standard in the top-level
|
||||
project, for example by using the `set(CMAKE_CXX_STANDARD 11)` command. If this
|
||||
is not feasible, for example in a C project using GoogleTest for validation,
|
||||
then it can be specified by adding it to the options for cmake via the
|
||||
`DCMAKE_CXX_FLAGS` option.
|
||||
|
||||
to the compiler flags while compiling Google Test and your tests. If
|
||||
you want to force Google Test to use its own tuple library, just add
|
||||
### Tweaking GoogleTest
|
||||
|
||||
-DGTEST_USE_OWN_TR1_TUPLE=1
|
||||
GoogleTest can be used in diverse environments. The default configuration may
|
||||
not work (or may not work well) out of the box in some environments. However,
|
||||
you can easily tweak GoogleTest by defining control macros on the compiler
|
||||
command line. Generally, these macros are named like `GTEST_XYZ` and you define
|
||||
them to either 1 or 0 to enable or disable a certain feature.
|
||||
|
||||
to the compiler flags instead.
|
||||
We list the most frequently used macros below. For a complete list, see file
|
||||
[include/gtest/internal/gtest-port.h](https://github.com/google/googletest/blob/master/googletest/include/gtest/internal/gtest-port.h).
|
||||
|
||||
If you don't want Google Test to use tuple at all, add
|
||||
### Multi-threaded Tests
|
||||
|
||||
-DGTEST_HAS_TR1_TUPLE=0
|
||||
GoogleTest is thread-safe where the pthread library is available. After
|
||||
`#include "gtest/gtest.h"`, you can check the
|
||||
`GTEST_IS_THREADSAFE` macro to see whether this is the case (yes if the macro is
|
||||
`#defined` to 1, no if it's undefined.).
|
||||
|
||||
and all features using tuple will be disabled.
|
||||
|
||||
### Multi-threaded Tests ###
|
||||
|
||||
Google Test is thread-safe where the pthread library is available.
|
||||
After `#include "gtest/gtest.h"`, you can check the `GTEST_IS_THREADSAFE`
|
||||
macro to see whether this is the case (yes if the macro is `#defined` to
|
||||
1, no if it's undefined.).
|
||||
|
||||
If Google Test doesn't correctly detect whether pthread is available
|
||||
in your environment, you can force it with
|
||||
If GoogleTest doesn't correctly detect whether pthread is available in your
|
||||
environment, you can force it with
|
||||
|
||||
-DGTEST_HAS_PTHREAD=1
|
||||
|
||||
|
@ -183,26 +158,24 @@ or
|
|||
|
||||
-DGTEST_HAS_PTHREAD=0
|
||||
|
||||
When Google Test uses pthread, you may need to add flags to your
|
||||
compiler and/or linker to select the pthread library, or you'll get
|
||||
link errors. If you use the CMake script or the deprecated Autotools
|
||||
script, this is taken care of for you. If you use your own build
|
||||
script, you'll need to read your compiler and linker's manual to
|
||||
figure out what flags to add.
|
||||
When GoogleTest uses pthread, you may need to add flags to your compiler and/or
|
||||
linker to select the pthread library, or you'll get link errors. If you use the
|
||||
CMake script, this is taken care of for you. If you use your own build script,
|
||||
you'll need to read your compiler and linker's manual to figure out what flags
|
||||
to add.
|
||||
|
||||
### As a Shared Library (DLL) ###
|
||||
### As a Shared Library (DLL)
|
||||
|
||||
Google Test is compact, so most users can build and link it as a
|
||||
static library for the simplicity. You can choose to use Google Test
|
||||
as a shared library (known as a DLL on Windows) if you prefer.
|
||||
GoogleTest is compact, so most users can build and link it as a static library
|
||||
for the simplicity. You can choose to use GoogleTest as a shared library (known
|
||||
as a DLL on Windows) if you prefer.
|
||||
|
||||
To compile *gtest* as a shared library, add
|
||||
|
||||
-DGTEST_CREATE_SHARED_LIBRARY=1
|
||||
|
||||
to the compiler flags. You'll also need to tell the linker to produce
|
||||
a shared library instead - consult your linker's manual for how to do
|
||||
it.
|
||||
to the compiler flags. You'll also need to tell the linker to produce a shared
|
||||
library instead - consult your linker's manual for how to do it.
|
||||
|
||||
To compile your *tests* that use the gtest shared library, add
|
||||
|
||||
|
@ -210,31 +183,28 @@ To compile your *tests* that use the gtest shared library, add
|
|||
|
||||
to the compiler flags.
|
||||
|
||||
Note: while the above steps aren't technically necessary today when
|
||||
using some compilers (e.g. GCC), they may become necessary in the
|
||||
future, if we decide to improve the speed of loading the library (see
|
||||
<http://gcc.gnu.org/wiki/Visibility> for details). Therefore you are
|
||||
recommended to always add the above flags when using Google Test as a
|
||||
shared library. Otherwise a future release of Google Test may break
|
||||
your build script.
|
||||
Note: while the above steps aren't technically necessary today when using some
|
||||
compilers (e.g. GCC), they may become necessary in the future, if we decide to
|
||||
improve the speed of loading the library (see
|
||||
<http://gcc.gnu.org/wiki/Visibility> for details). Therefore you are recommended
|
||||
to always add the above flags when using GoogleTest as a shared library.
|
||||
Otherwise a future release of GoogleTest may break your build script.
|
||||
|
||||
### Avoiding Macro Name Clashes ###
|
||||
### Avoiding Macro Name Clashes
|
||||
|
||||
In C++, macros don't obey namespaces. Therefore two libraries that
|
||||
both define a macro of the same name will clash if you `#include` both
|
||||
definitions. In case a Google Test macro clashes with another
|
||||
library, you can force Google Test to rename its macro to avoid the
|
||||
conflict.
|
||||
In C++, macros don't obey namespaces. Therefore two libraries that both define a
|
||||
macro of the same name will clash if you `#include` both definitions. In case a
|
||||
GoogleTest macro clashes with another library, you can force GoogleTest to
|
||||
rename its macro to avoid the conflict.
|
||||
|
||||
Specifically, if both Google Test and some other code define macro
|
||||
FOO, you can add
|
||||
Specifically, if both GoogleTest and some other code define macro FOO, you can
|
||||
add
|
||||
|
||||
-DGTEST_DONT_DEFINE_FOO=1
|
||||
|
||||
to the compiler flags to tell Google Test to change the macro's name
|
||||
from `FOO` to `GTEST_FOO`. Currently `FOO` can be `FAIL`, `SUCCEED`,
|
||||
or `TEST`. For example, with `-DGTEST_DONT_DEFINE_TEST=1`, you'll
|
||||
need to write
|
||||
to the compiler flags to tell GoogleTest to change the macro's name from `FOO`
|
||||
to `GTEST_FOO`. Currently `FOO` can be `FAIL`, `SUCCEED`, or `TEST`. For
|
||||
example, with `-DGTEST_DONT_DEFINE_TEST=1`, you'll need to write
|
||||
|
||||
GTEST_TEST(SomeTest, DoesThis) { ... }
|
||||
|
||||
|
@ -243,38 +213,3 @@ instead of
|
|||
TEST(SomeTest, DoesThis) { ... }
|
||||
|
||||
in order to define a test.
|
||||
|
||||
## Developing Google Test ##
|
||||
|
||||
This section discusses how to make your own changes to Google Test.
|
||||
|
||||
### Testing Google Test Itself ###
|
||||
|
||||
To make sure your changes work as intended and don't break existing
|
||||
functionality, you'll want to compile and run Google Test's own tests.
|
||||
For that you can use CMake:
|
||||
|
||||
mkdir mybuild
|
||||
cd mybuild
|
||||
cmake -Dgtest_build_tests=ON ${GTEST_DIR}
|
||||
|
||||
Make sure you have Python installed, as some of Google Test's tests
|
||||
are written in Python. If the cmake command complains about not being
|
||||
able to find Python (`Could NOT find PythonInterp (missing:
|
||||
PYTHON_EXECUTABLE)`), try telling it explicitly where your Python
|
||||
executable can be found:
|
||||
|
||||
cmake -DPYTHON_EXECUTABLE=path/to/python -Dgtest_build_tests=ON ${GTEST_DIR}
|
||||
|
||||
Next, you can build Google Test and all of its own tests. On \*nix,
|
||||
this is usually done by 'make'. To run the tests, do
|
||||
|
||||
make test
|
||||
|
||||
All tests should pass.
|
||||
|
||||
Normally you don't need to worry about regenerating the source files,
|
||||
unless you need to modify them. In that case, you should modify the
|
||||
corresponding .pump files instead and run the pump.py Python script to
|
||||
regenerate them. You can find pump.py in the [scripts/](scripts/) directory.
|
||||
Read the [Pump manual](docs/PumpManual.md) for how to use it.
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
@PACKAGE_INIT@
|
||||
include(CMakeFindDependencyMacro)
|
||||
if (@GTEST_HAS_PTHREAD@)
|
||||
set(THREADS_PREFER_PTHREAD_FLAG @THREADS_PREFER_PTHREAD_FLAG@)
|
||||
find_dependency(Threads)
|
||||
endif()
|
||||
|
||||
include("${CMAKE_CURRENT_LIST_DIR}/@targets_export_name@.cmake")
|
||||
check_required_components("@project_name@")
|
|
@ -0,0 +1,9 @@
|
|||
libdir=@CMAKE_INSTALL_FULL_LIBDIR@
|
||||
includedir=@CMAKE_INSTALL_FULL_INCLUDEDIR@
|
||||
|
||||
Name: gtest
|
||||
Description: GoogleTest (without main() function)
|
||||
Version: @PROJECT_VERSION@
|
||||
URL: https://github.com/google/googletest
|
||||
Libs: -L${libdir} -lgtest @CMAKE_THREAD_LIBS_INIT@
|
||||
Cflags: -I${includedir} @GTEST_HAS_PTHREAD_MACRO@
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue