add macOS implementation

This commit is contained in:
Nick Fisher
2023-09-05 23:13:59 +08:00
parent c522cd6ee9
commit 84e3124e04
457 changed files with 169627 additions and 15 deletions

962
macos/src/AssetManager.cpp Normal file
View File

@@ -0,0 +1,962 @@
#include "AssetManager.hpp"
#include <thread>
#include <filament/Engine.h>
#include <filament/TransformManager.h>
#include <filament/Texture.h>
#include <filament/RenderableManager.h>
#include <gltfio/Animator.h>
#include <gltfio/AssetLoader.h>
#include <gltfio/FilamentAsset.h>
#include <gltfio/ResourceLoader.h>
#include <gltfio/TextureProvider.h>
#include <gltfio/math.h>
#include <imageio/ImageDecoder.h>
#include "StreamBufferAdapter.hpp"
#include "SceneAsset.hpp"
#include "Log.hpp"
#include "material/StandardMaterialProvider.hpp"
#include "material/UnlitMaterialProvider.hpp"
#include "material/FileMaterialProvider.hpp"
#include "gltfio/materials/uberarchive.h"
extern "C" {
#include "material/image.h"
#include "material/unlit_opaque.h"
}
namespace polyvox {
using namespace std;
using namespace std::chrono;
using namespace image;
using namespace utils;
using namespace filament;
using namespace filament::gltfio;
AssetManager::AssetManager(const ResourceLoaderWrapper* const resourceLoaderWrapper,
NameComponentManager *ncm,
Engine *engine,
Scene *scene)
: _resourceLoaderWrapper(resourceLoaderWrapper),
_ncm(ncm),
_engine(engine),
_scene(scene) {
_stbDecoder = createStbProvider(_engine);
_ktxDecoder = createKtx2Provider(_engine);
_gltfResourceLoader = new ResourceLoader({.engine = _engine,
.normalizeSkinningWeights = true });
auto uberdata = resourceLoaderWrapper->load("packages/polyvox_filament/assets/materials.uberz");
_ubershaderProvider = gltfio::createUbershaderProvider(
_engine, uberdata.data, uberdata.size);
// _ubershaderProvider = gltfio::createJitShaderProvider(_engine, true);
// _ubershaderProvider = new StandardMaterialProvider(_engine);
EntityManager &em = EntityManager::get();
//_unlitProvider = new UnlitMaterialProvider(_engine);
// auto rb = _resourceLoaderWrapper->load("file:///mnt/hdd_2tb/home/hydroxide/projects/polyvox/flutter/polyvox_filament/materials/toon.filamat");
// auto toonProvider = new FileMaterialProvider(_engine, rb.data, (size_t) rb.size);
_assetLoader = AssetLoader::create({_engine, _ubershaderProvider, _ncm, &em });
_gltfResourceLoader->addTextureProvider("image/ktx2", _ktxDecoder);
_gltfResourceLoader->addTextureProvider("image/png", _stbDecoder);
_gltfResourceLoader->addTextureProvider("image/jpeg", _stbDecoder);
}
AssetManager::~AssetManager() {
_gltfResourceLoader->asyncCancelLoad();
_ubershaderProvider->destroyMaterials();
//_unlitProvider->destroyMaterials();
destroyAll();
AssetLoader::destroy(&_assetLoader);
}
EntityId AssetManager::loadGltf(const char *uri,
const char *relativeResourcePath) {
ResourceBuffer rbuf = _resourceLoaderWrapper->load(uri);
// Parse the glTF file and create Filament entities.
FilamentAsset *asset =
_assetLoader->createAsset((uint8_t *)rbuf.data, rbuf.size);
if (!asset) {
Log("Unable to parse asset");
return 0;
}
const char *const *const resourceUris = asset->getResourceUris();
const size_t resourceUriCount = asset->getResourceUriCount();
for (size_t i = 0; i < resourceUriCount; i++) {
string uri =
string(relativeResourcePath) + string("/") + string(resourceUris[i]);
ResourceBuffer buf = _resourceLoaderWrapper->load(uri.c_str());
ResourceLoader::BufferDescriptor b(buf.data, buf.size);
_gltfResourceLoader->addResourceData(resourceUris[i], std::move(b));
_resourceLoaderWrapper->free(buf);
}
_gltfResourceLoader->loadResources(asset);
const utils::Entity *entities = asset->getEntities();
RenderableManager &rm = _engine->getRenderableManager();
for (int i = 0; i < asset->getEntityCount(); i++) {
auto inst = rm.getInstance(entities[i]);
rm.setCulling(inst, false);
}
FilamentInstance* inst = asset->getInstance();
inst->getAnimator()->updateBoneMatrices();
inst->recomputeBoundingBoxes();
_scene->addEntities(asset->getEntities(), asset->getEntityCount());
asset->releaseSourceData();
Log("Load complete for GLTF at URI %s", uri);
SceneAsset sceneAsset(asset);
utils::Entity e = EntityManager::get().create();
EntityId eid = Entity::smuggle(e);
_entityIdLookup.emplace(eid, _assets.size());
_assets.push_back(sceneAsset);
return eid;
}
EntityId AssetManager::loadGlb(const char *uri, bool unlit) {
Log("Loading GLB at URI %s", uri);
ResourceBuffer rbuf = _resourceLoaderWrapper->load(uri);
FilamentAsset *asset = _assetLoader->createAsset(
(const uint8_t *)rbuf.data, rbuf.size);
if (!asset) {
Log("Unknown error loading GLB asset.");
return 0;
}
int entityCount = asset->getEntityCount();
_scene->addEntities(asset->getEntities(), entityCount);
_gltfResourceLoader->loadResources(asset);
const Entity *entities = asset->getEntities();
auto lights = asset->getLightEntities();
_scene->addEntities(lights, asset->getLightEntityCount());
FilamentInstance* inst = asset->getInstance();
inst->getAnimator()->updateBoneMatrices();
inst->recomputeBoundingBoxes();
asset->releaseSourceData();
_resourceLoaderWrapper->free(rbuf);
SceneAsset sceneAsset(asset);
utils::Entity e = EntityManager::get().create();
EntityId eid = Entity::smuggle(e);
_entityIdLookup.emplace(eid, _assets.size());
_assets.push_back(sceneAsset);
return eid;
}
bool AssetManager::hide(EntityId entityId, const char* meshName) {
auto asset = getAssetByEntityId(entityId);
if(!asset) {
return false;
}
auto entity = findEntityByName(asset, meshName);
if(entity.isNull()) {
Log("Mesh %s could not be found", meshName);
return false;
}
_scene->remove(entity);
return true;
}
bool AssetManager::reveal(EntityId entityId, const char* meshName) {
auto asset = getAssetByEntityId(entityId);
if(!asset) {
Log("No asset found under entity ID");
return false;
}
auto entity = findEntityByName(asset, meshName);
RenderableManager &rm = _engine->getRenderableManager();
if(entity.isNull()) {
Log("Mesh %s could not be found", meshName);
return false;
}
_scene->addEntity(entity);
return true;
}
void AssetManager::destroyAll() {
for (auto& asset : _assets) {
_scene->removeEntities(asset.mAsset->getEntities(),
asset.mAsset->getEntityCount());
_scene->removeEntities(asset.mAsset->getLightEntities(),
asset.mAsset->getLightEntityCount());
_gltfResourceLoader->evictResourceData();
_assetLoader->destroyAsset(asset.mAsset);
}
_assets.clear();
}
FilamentAsset* AssetManager::getAssetByEntityId(EntityId entityId) {
const auto& pos = _entityIdLookup.find(entityId);
if(pos == _entityIdLookup.end()) {
return nullptr;
}
return _assets[pos->second].mAsset;
}
void AssetManager::updateAnimations() {
auto now = high_resolution_clock::now();
RenderableManager &rm = _engine->getRenderableManager();
for (auto& asset : _assets) {
vector<AnimationStatus> completed;
for(auto& anim : asset.mAnimations) {
auto elapsed = float(std::chrono::duration_cast<std::chrono::milliseconds>(now - anim.mStart).count()) / 1000.0f;
if(anim.mLoop || elapsed < anim.mDuration) {
switch(anim.type) {
case AnimationType::GLTF: {
asset.mAnimator->applyAnimation(anim.gltfIndex, elapsed);
if(asset.fadeGltfAnimationIndex != -1 && elapsed < asset.fadeDuration) {
// cross-fade
auto fadeFromTime = asset.fadeOutAnimationStart + elapsed;
auto alpha = elapsed / asset.fadeDuration;
asset.mAnimator->applyCrossFade(asset.fadeGltfAnimationIndex, fadeFromTime, alpha);
}
break;
}
case AnimationType::MORPH: {
int lengthInFrames = static_cast<int>(
anim.mDuration * 1000.0f /
asset.mMorphAnimationBuffer.mFrameLengthInMs
);
int frameNumber = static_cast<int>(elapsed * 1000.0f / asset.mMorphAnimationBuffer.mFrameLengthInMs) % lengthInFrames;
// offset from the end if reverse
if(anim.mReverse) {
frameNumber = lengthInFrames - frameNumber;
}
auto baseOffset = frameNumber * asset.mMorphAnimationBuffer.mMorphIndices.size();
for(int i = 0; i < asset.mMorphAnimationBuffer.mMorphIndices.size(); i++) {
auto morphIndex = asset.mMorphAnimationBuffer.mMorphIndices[i];
// set the weights appropriately
rm.setMorphWeights(
rm.getInstance(asset.mMorphAnimationBuffer.mMeshTarget),
asset.mMorphAnimationBuffer.mFrameData.data() + baseOffset + i,
1,
morphIndex
);
}
break;
}
case AnimationType::BONE: {
int lengthInFrames = static_cast<int>(
anim.mDuration * 1000.0f /
asset.mBoneAnimationBuffer.mFrameLengthInMs
);
int frameNumber = static_cast<int>(elapsed * 1000.0f / asset.mBoneAnimationBuffer.mFrameLengthInMs) % lengthInFrames;
// offset from the end if reverse
if(anim.mReverse) {
frameNumber = lengthInFrames - frameNumber;
}
setBoneTransform(
asset,
frameNumber
);
break;
}
}
if(anim.mLoop && elapsed >= anim.mDuration) {
anim.mStart = now;
}
// animation has completed
} else {
completed.push_back(anim);
asset.fadeGltfAnimationIndex = -1;
}
asset.mAnimator->updateBoneMatrices();
}
}
}
void AssetManager::setBoneTransform(SceneAsset& asset, int frameNumber) {
RenderableManager& rm = _engine->getRenderableManager();
const auto& filamentInstance = asset.mAsset->getInstance();
TransformManager &transformManager = _engine->getTransformManager();
int skinIndex = 0;
for(int i = 0; i < asset.mBoneAnimationBuffer.mBones.size(); i++) {
auto mBoneIndex = asset.mBoneAnimationBuffer.mBones[i];
auto frameDataOffset = (frameNumber * asset.mBoneAnimationBuffer.mBones.size() * 7) + (i * 7);
utils::Entity joint = filamentInstance->getJointsAt(skinIndex)[mBoneIndex];
if(joint.isNull()) {
Log("ERROR : joint not found");
continue;
}
vector<float>& fd = asset.mBoneAnimationBuffer.mFrameData;
math::mat4f localTransform(math::quatf {
fd[frameDataOffset+3],
fd[frameDataOffset+4],
fd[frameDataOffset+5],
fd[frameDataOffset+6],
});
auto jointInstance = transformManager.getInstance(joint);
auto xform = asset.mBoneAnimationBuffer.mBaseTransforms[i];
transformManager.setTransform(jointInstance, xform * localTransform);
}
}
void AssetManager::remove(EntityId entityId) {
const auto& pos = _entityIdLookup.find(entityId);
if(pos == _entityIdLookup.end()) {
Log("Couldn't find asset under specified entity id.");
return;
}
SceneAsset& sceneAsset = _assets[pos->second];
_scene->removeEntities(sceneAsset.mAsset->getEntities(),
sceneAsset.mAsset->getEntityCount());
_scene->removeEntities(sceneAsset.mAsset->getLightEntities(),
sceneAsset.mAsset->getLightEntityCount());
_assetLoader->destroyAsset(sceneAsset.mAsset);
if(sceneAsset.mTexture) {
_engine->destroy(sceneAsset.mTexture);
}
EntityManager& em = EntityManager::get();
em.destroy(Entity::import(entityId));
sceneAsset.mAsset = nullptr; // still need to remove sceneAsset somewhere...
}
void AssetManager::setMorphTargetWeights(EntityId entityId, const char* const entityName, const float* const weights, const int count) {
const auto& pos = _entityIdLookup.find(entityId);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return;
}
auto& asset = _assets[pos->second];
auto entity = findEntityByName(asset, entityName);
if(!entity) {
Log("Warning: failed to find entity %s", entityName);
return;
}
RenderableManager &rm = _engine->getRenderableManager();
auto renderableInstance = rm.getInstance(entity);
if(!renderableInstance.isValid()) {
Log("Warning: failed to find renderable instance for entity %s", entityName);
return;
}
rm.setMorphWeights(
renderableInstance,
weights,
count
);
}
utils::Entity AssetManager::findEntityByName(SceneAsset asset, const char* entityName) {
utils::Entity entity;
for (size_t i = 0, c = asset.mAsset->getEntityCount(); i != c; ++i) {
auto entity = asset.mAsset->getEntities()[i];
auto nameInstance = _ncm->getInstance(entity);
if(!nameInstance.isValid()) {
continue;
}
auto name = _ncm->getName(nameInstance);
if(!name) {
continue;
}
if(strcmp(entityName,name)==0) {
return entity;
}
}
return entity;
}
bool AssetManager::setMorphAnimationBuffer(
EntityId entityId,
const char* entityName,
const float* const morphData,
const int* const morphIndices,
int numMorphTargets,
int numFrames,
float frameLengthInMs) {
const auto& pos = _entityIdLookup.find(entityId);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return false;
}
auto& asset = _assets[pos->second];
auto entity = findEntityByName(asset, entityName);
if(!entity) {
Log("Warning: failed to find entity %s", entityName);
return false;
}
asset.mMorphAnimationBuffer.mMeshTarget = entity;
asset.mMorphAnimationBuffer.mFrameData.clear();
asset.mMorphAnimationBuffer.mFrameData.insert(
asset.mMorphAnimationBuffer.mFrameData.begin(),
morphData,
morphData + (numFrames * numMorphTargets)
);
asset.mMorphAnimationBuffer.mFrameLengthInMs = frameLengthInMs;
asset.mMorphAnimationBuffer.mMorphIndices.resize(numMorphTargets);
for(int i =0; i< numMorphTargets; i++) {
asset.mMorphAnimationBuffer.mMorphIndices[i] = morphIndices[i];
}
AnimationStatus animation;
animation.mDuration = (frameLengthInMs * numFrames) / 1000.0f;
animation.mStart = high_resolution_clock::now();
animation.type = AnimationType::MORPH;
asset.mAnimations.push_back(animation);
return true;
}
bool AssetManager::setMaterialColor(EntityId entityId, const char* meshName, int materialIndex, const float r, const float g, const float b, const float a) {
const auto& pos = _entityIdLookup.find(entityId);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return false;
}
auto& asset = _assets[pos->second];
auto entity = findEntityByName(asset, meshName);
RenderableManager& rm = _engine->getRenderableManager();
auto renderable = rm.getInstance(entity);
if(!renderable.isValid()) {
Log("Renderable not valid, was the entity id correct?");
return false;
}
MaterialInstance* mi = rm.getMaterialInstanceAt(renderable, materialIndex);
if(!mi) {
Log("ERROR: material index must be less than number of material instances");
return false;
}
mi->setParameter("baseColorFactor", RgbaType::sRGB, math::float4(r, g, b, a));
Log("Set baseColorFactor for entity %d to %f %f %f %f",entityId, r,g,b,a);
return true;
}
bool AssetManager::setBoneAnimationBuffer(
EntityId entityId,
const float* const frameData,
int numFrames,
int numBones,
const char** const boneNames,
const char** const meshNames,
int numMeshTargets,
float frameLengthInMs) {
const auto& pos = _entityIdLookup.find(entityId);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return false;
}
auto& asset = _assets[pos->second];
auto filamentInstance = asset.mAsset->getInstance();
size_t skinCount = filamentInstance->getSkinCount();
if(skinCount > 1) {
Log("WARNING - skin count > 1 not currently implemented. This will probably not work");
}
TransformManager &transformManager = _engine->getTransformManager();
int skinIndex = 0;
const utils::Entity* joints = filamentInstance->getJointsAt(skinIndex);
size_t numJoints = filamentInstance->getJointCountAt(skinIndex);
BoneAnimationBuffer& animationBuffer = asset.mBoneAnimationBuffer;
// if an animation has already been set, reset the transform for the respective bones
for(int i = 0; i < animationBuffer.mBones.size(); i++) {
auto boneIndex = animationBuffer.mBones[i];
auto jointInstance = transformManager.getInstance(joints[boneIndex]);
transformManager.setTransform(jointInstance, animationBuffer.mBaseTransforms[i]);
}
asset.mAnimator->resetBoneMatrices();
animationBuffer.mBones.resize(numBones);
animationBuffer.mBaseTransforms.resize(numBones);
for(int i = 0; i < numBones; i++) {
for(int j = 0; j < numJoints; j++) {
const char* jointName = _ncm->getName(_ncm->getInstance(joints[j]));
if(strcmp(jointName, boneNames[i]) == 0) {
auto jointInstance = transformManager.getInstance(joints[j]);
// auto currentXform = ;
auto baseTransform = transformManager.getTransform(jointInstance); // inverse(filamentInstance->getInverseBindMatricesAt(skinIndex)[j]);
animationBuffer.mBaseTransforms[i] = baseTransform;
animationBuffer.mBones[i] = j;
break;
}
}
}
if(animationBuffer.mBones.size() != numBones) {
Log("Failed to find one or more bone indices");
return false;
}
animationBuffer.mFrameData.clear();
// 7 == locX, locY, locZ, rotW, rotX, rotY, rotZ
animationBuffer.mFrameData.resize(numFrames * numBones * 7);
animationBuffer.mFrameData.insert(
animationBuffer.mFrameData.begin(),
frameData,
frameData + numFrames * numBones * 7
);
animationBuffer.mFrameLengthInMs = frameLengthInMs;
animationBuffer.mNumFrames = numFrames;
animationBuffer.mMeshTargets.clear();
for(int i = 0; i < numMeshTargets; i++) {
auto entity = findEntityByName(asset, meshNames[i]);
if(!entity) {
Log("Mesh target %s for bone animation could not be found", meshNames[i]);
return false;
}
Log("Added mesh target %s", meshNames[i]);
animationBuffer.mMeshTargets.push_back(entity);
}
AnimationStatus animation;
animation.mStart = std::chrono::high_resolution_clock::now();
animation.mReverse = false;
animation.mDuration = (frameLengthInMs * numFrames) / 1000.0f;
animation.type = AnimationType::BONE;
asset.mAnimations.push_back(animation);
return true;
}
void AssetManager::playAnimation(EntityId e, int index, bool loop, bool reverse, bool replaceActive, float crossfade) {
if(index < 0) {
Log("ERROR: glTF animation index must be greater than zero.");
return;
}
const auto& pos = _entityIdLookup.find(e);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return;
}
auto& asset = _assets[pos->second];
if(replaceActive) {
vector<int> active;
for(int i = 0; i < asset.mAnimations.size(); i++) {
if(asset.mAnimations[i].type == AnimationType::GLTF) {
active.push_back(i);
}
}
if(active.size() > 0) {
auto& last = asset.mAnimations[active.back()];
asset.fadeGltfAnimationIndex = last.gltfIndex;
asset.fadeDuration = crossfade;
auto now = high_resolution_clock::now();
auto elapsed = float(std::chrono::duration_cast<std::chrono::milliseconds>(now - last.mStart).count()) / 1000.0f;
asset.fadeOutAnimationStart = elapsed;
for(int j = active.size() - 1; j >= 0; j--) {
asset.mAnimations.erase(asset.mAnimations.begin() + active[j]);
}
} else {
asset.fadeGltfAnimationIndex = -1;
asset.fadeDuration = 0.0f;
}
} else if(crossfade > 0) {
Log("ERROR: crossfade only supported when replaceActive is true.");
return;
} else {
asset.fadeGltfAnimationIndex = -1;
asset.fadeDuration = 0.0f;
}
AnimationStatus animation;
animation.gltfIndex = index;
animation.mStart = std::chrono::high_resolution_clock::now();
animation.mLoop = loop;
animation.mReverse = reverse;
animation.type = AnimationType::GLTF;
animation.mDuration = asset.mAnimator->getAnimationDuration(index);
asset.mAnimations.push_back(animation);
}
void AssetManager::stopAnimation(EntityId entityId, int index) {
const auto& pos = _entityIdLookup.find(entityId);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return;
}
auto& asset = _assets[pos->second];
asset.mAnimations.erase(std::remove_if(asset.mAnimations.begin(),
asset.mAnimations.end(),
[=](AnimationStatus& anim) { return anim.gltfIndex == index; }),
asset.mAnimations.end());
}
void AssetManager::loadTexture(EntityId entity, const char* resourcePath, int renderableIndex) {
const auto& pos = _entityIdLookup.find(entity);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return;
}
auto& asset = _assets[pos->second];
Log("Loading texture at %s for renderableIndex %d", resourcePath, renderableIndex);
string rp(resourcePath);
if(asset.mTexture) {
_engine->destroy(asset.mTexture);
asset.mTexture = nullptr;
}
ResourceBuffer imageResource = _resourceLoaderWrapper->load(rp.c_str());
StreamBufferAdapter sb((char *)imageResource.data, (char *)imageResource.data + imageResource.size);
istream *inputStream = new std::istream(&sb);
LinearImage *image = new LinearImage(ImageDecoder::decode(
*inputStream, rp.c_str(), ImageDecoder::ColorSpace::SRGB));
if (!image->isValid()) {
Log("Invalid image : %s", rp.c_str());
delete inputStream;
_resourceLoaderWrapper->free(imageResource);
return;
}
uint32_t channels = image->getChannels();
uint32_t w = image->getWidth();
uint32_t h = image->getHeight();
asset.mTexture = Texture::Builder()
.width(w)
.height(h)
.levels(0xff)
.format(channels == 3 ? Texture::InternalFormat::RGB16F
: Texture::InternalFormat::RGBA16F)
.sampler(Texture::Sampler::SAMPLER_2D)
.build(*_engine);
Texture::PixelBufferDescriptor::Callback freeCallback = [](void *buf, size_t,
void *data) {
delete reinterpret_cast<LinearImage *>(data);
};
Texture::PixelBufferDescriptor buffer(
image->getPixelRef(), size_t(w * h * channels * sizeof(float)),
channels == 3 ? Texture::Format::RGB : Texture::Format::RGBA,
Texture::Type::FLOAT, freeCallback);
asset.mTexture->setImage(*_engine, 0, std::move(buffer));
MaterialInstance* const* inst = asset.mAsset->getInstance()->getMaterialInstances();
size_t mic = asset.mAsset->getInstance()->getMaterialInstanceCount();
Log("Material instance count : %d", mic);
auto sampler = TextureSampler();
inst[0]->setParameter("baseColorIndex",0);
inst[0]->setParameter("baseColorMap",asset.mTexture,sampler);
delete inputStream;
_resourceLoaderWrapper->free(imageResource);
}
void AssetManager::setAnimationFrame(EntityId entity, int animationIndex, int animationFrame) {
const auto& pos = _entityIdLookup.find(entity);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return;
}
auto& asset = _assets[pos->second];
auto offset = 60 * animationFrame * 1000; // TODO - don't hardcore 60fps framerate
asset.mAnimator->applyAnimation(animationIndex, offset);
asset.mAnimator->updateBoneMatrices();
}
float AssetManager::getAnimationDuration(EntityId entity, int animationIndex) {
const auto& pos = _entityIdLookup.find(entity);
unique_ptr<vector<string>> names = make_unique<vector<string>>();
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity id.");
return -1.0f;
}
auto& asset = _assets[pos->second];
return asset.mAnimator->getAnimationDuration(animationIndex);
}
unique_ptr<vector<string>> AssetManager::getAnimationNames(EntityId entity) {
const auto& pos = _entityIdLookup.find(entity);
unique_ptr<vector<string>> names = make_unique<vector<string>>();
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity id.");
return names;
}
auto& asset = _assets[pos->second];
size_t count = asset.mAnimator->getAnimationCount();
for (size_t i = 0; i < count; i++) {
names->push_back(asset.mAnimator->getAnimationName(i));
}
return names;
}
unique_ptr<vector<string>> AssetManager::getMorphTargetNames(EntityId entity, const char *meshName) {
unique_ptr<vector<string>> names = make_unique<vector<string>>();
const auto& pos = _entityIdLookup.find(entity);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return names;
}
auto& asset = _assets[pos->second];
const utils::Entity *entities = asset.mAsset->getEntities();
for (int i = 0; i < asset.mAsset->getEntityCount(); i++) {
utils::Entity e = entities[i];
auto inst = _ncm->getInstance(e);
const char *name = _ncm->getName(inst);
if (name && strcmp(name, meshName) == 0) {
size_t count = asset.mAsset->getMorphTargetCountAt(e);
for (int j = 0; j < count; j++) {
const char *morphName = asset.mAsset->getMorphTargetNameAt(e, j);
names->push_back(morphName);
}
break;
}
}
return names;
}
void AssetManager::transformToUnitCube(EntityId entity) {
const auto& pos = _entityIdLookup.find(entity);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return;
}
auto& asset = _assets[pos->second];
Log("Transforming asset to unit cube.");
auto &tm = _engine->getTransformManager();
FilamentInstance* inst = asset.mAsset->getInstance();
auto aabb = inst->getBoundingBox();
auto center = aabb.center();
auto halfExtent = aabb.extent();
auto maxExtent = max(halfExtent) * 2;
auto scaleFactor = 2.0f / maxExtent;
auto transform =
math::mat4f::scaling(scaleFactor) * math::mat4f::translation(-center);
tm.setTransform(tm.getInstance(inst->getRoot()), transform);
}
void AssetManager::updateTransform(SceneAsset& asset) {
auto &tm = _engine->getTransformManager();
auto transform =
asset.mPosition * asset.mRotation * math::mat4f::scaling(asset.mScale);
tm.setTransform(tm.getInstance(asset.mAsset->getRoot()), transform);
}
void AssetManager::setScale(EntityId entity, float scale) {
const auto& pos = _entityIdLookup.find(entity);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return;
}
auto& asset = _assets[pos->second];
asset.mScale = scale;
updateTransform(asset);
}
void AssetManager::setPosition(EntityId entity, float x, float y, float z) {
const auto& pos = _entityIdLookup.find(entity);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return;
}
auto& asset = _assets[pos->second];
asset.mPosition = math::mat4f::translation(math::float3(x,y,z));
updateTransform(asset);
}
void AssetManager::setRotation(EntityId entity, float rads, float x, float y, float z) {
const auto& pos = _entityIdLookup.find(entity);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return;
}
auto& asset = _assets[pos->second];
asset.mRotation = math::mat4f::rotation(rads, math::float3(x,y,z));
updateTransform(asset);
}
const utils::Entity *AssetManager::getCameraEntities(EntityId entity) {
const auto& pos = _entityIdLookup.find(entity);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return nullptr;
}
auto& asset = _assets[pos->second];
return asset.mAsset->getCameraEntities();
}
size_t AssetManager::getCameraEntityCount(EntityId entity) {
const auto& pos = _entityIdLookup.find(entity);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return 0;
}
auto& asset = _assets[pos->second];
return asset.mAsset->getCameraEntityCount();
}
const utils::Entity* AssetManager::getLightEntities(EntityId entity) const noexcept {
const auto& pos = _entityIdLookup.find(entity);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return nullptr;
}
auto& asset = _assets[pos->second];
return asset.mAsset->getLightEntities();
}
size_t AssetManager::getLightEntityCount(EntityId entity) const noexcept {
const auto& pos = _entityIdLookup.find(entity);
if(pos == _entityIdLookup.end()) {
Log("ERROR: asset not found for entity.");
return 0;
}
auto& asset = _assets[pos->second];
return asset.mAsset->getLightEntityCount();
}
} // namespace polyvox
// auto& inverseBindMatrix = filamentInstance->getInverseBindMatricesAt(skinIndex)[mBoneIndex];
// auto globalJointTransform = transformManager.getWorldTransform(jointInstance);
// for(auto& target : asset.mBoneAnimationBuffer.mMeshTargets) {
// auto inverseGlobalTransform = inverse(
// transformManager.getWorldTransform(
// transformManager.getInstance(target)
// )
// );
// auto boneTransform = inverseGlobalTransform * globalJointTransform * localTransform * inverseBindMatrix;
// auto renderable = rm.getInstance(target);
// rm.setBones(
// renderable,
// &boneTransform,
// 1,
// mBoneIndex
// );
// }
// 1.0f, 0.0f, 0.0f, 0.0f,
// 0.0f, 0.0f, 1.0f, 0.0f,
// 0.0f, -1.0f, 0.0f, 0.0f,
// 0.0f, 0.0f, 0.0f, 1.0f
// };
// Log("TRANSFORM");
// Log("%f %f %f %f", localTransform[0][0], localTransform[1][0], localTransform[2][0], localTransform[3][0] ) ;
// Log("%f %f %f %f", localTransform[0][1], localTransform[1][1], localTransform[2][1], localTransform[3][1] ) ;
// Log("%f %f %f %f", localTransform[0][2], localTransform[1][2], localTransform[2][2], localTransform[3][2] ) ;
// Log("%f %f %f %f", localTransform[0][3], localTransform[1][3], localTransform[2][3], localTransform[3][3] ) ;
// transformManager.getTransform(jointInstance);

View File

@@ -0,0 +1,962 @@
#if __APPLE__
#include "TargetConditionals.h"
#endif
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <filament/Camera.h>
#include <backend/DriverEnums.h>
#include <filament/ColorGrading.h>
#include <filament/Engine.h>
#include <filament/IndexBuffer.h>
#include <filament/IndirectLight.h>
#include <filament/Options.h>
#include <filament/Renderer.h>
#include <filament/RenderTarget.h>
#include <filament/Scene.h>
#include <filament/Skybox.h>
#include <filament/TransformManager.h>
#include <filament/VertexBuffer.h>
#include <filament/View.h>
#include <filament/Viewport.h>
#include <filament/RenderableManager.h>
#include <filament/LightManager.h>
#include <gltfio/Animator.h>
#include <gltfio/AssetLoader.h>
#include <gltfio/FilamentAsset.h>
#include <gltfio/ResourceLoader.h>
#include <gltfio/TextureProvider.h>
#include <gltfio/materials/uberarchive.h>
#include <utils/NameComponentManager.h>
#include <imageio/ImageDecoder.h>
#include "math.h"
#include <math/mat4.h>
#include <math/TVecHelpers.h>
#include <math/quat.h>
#include <math/scalar.h>
#include <math/vec3.h>
#include <math/vec4.h>
#include <ktxreader/Ktx1Reader.h>
#include <ktxreader/Ktx2Reader.h>
#include <iostream>
#include <fstream>
#include <mutex>
#include "Log.hpp"
#include "FilamentViewer.hpp"
#include "StreamBufferAdapter.hpp"
#include "material/image.h"
#include "TimeIt.hpp"
using namespace filament;
using namespace filament::math;
using namespace gltfio;
using namespace utils;
using namespace image;
namespace filament {
class IndirectLight;
class LightManager;
} // namespace filament
namespace polyvox {
const double kNearPlane = 0.05; // 5 cm
const double kFarPlane = 1000.0; // 1 km
// const float kAperture = 1.0f;
// const float kShutterSpeed = 1.0f;
// const float kSensitivity = 50.0f;
struct Vertex {
filament::math::float2 position;
uint32_t color;
};
static constexpr float4 sFullScreenTriangleVertices[3] = {
{ -1.0f, -1.0f, 1.0f, 1.0f },
{ 3.0f, -1.0f, 1.0f, 1.0f },
{ -1.0f, 3.0f, 1.0f, 1.0f } };
static const uint16_t sFullScreenTriangleIndices[3] = {0, 1, 2};
FilamentViewer::FilamentViewer(const void* context, const ResourceLoaderWrapper* const resourceLoaderWrapper)
: _resourceLoaderWrapper(resourceLoaderWrapper) {
#if TARGET_OS_IPHONE
_engine = Engine::create(Engine::Backend::METAL);
#elif TARGET_OS_MAC
_engine = Engine::create(Engine::Backend::METAL);
#else
_engine = Engine::create(Engine::Backend::OPENGL, nullptr, (void*)context, nullptr);
#endif
_renderer = _engine->createRenderer();
float fr = 60.0f;
_renderer->setDisplayInfo({.refreshRate = fr});
Renderer::FrameRateOptions fro;
fro.interval = 1 / fr;
_renderer->setFrameRateOptions(fro);
_scene = _engine->createScene();
Log("Scene created");
utils::Entity camera = EntityManager::get().create();
_mainCamera = _engine->createCamera(camera);
Log("Main camera created");
_view = _engine->createView();
setToneMapping(ToneMapping::ACES);
setBloom(0.6f);
_view->setScene(_scene);
_view->setCamera(_mainCamera);
_cameraFocalLength = 28.0f;
_mainCamera->setLensProjection(_cameraFocalLength, 1.0f, kNearPlane,
kFarPlane);
// _mainCamera->setExposure(kAperture, kShutterSpeed, kSensitivity);
const float aperture = _mainCamera->getAperture();
const float shutterSpeed = _mainCamera->getShutterSpeed();
const float sens = _mainCamera->getSensitivity();
// _mainCamera->setExposure(2.0f, 1.0f, 1.0f);
Log("Camera aperture %f shutter %f sensitivity %f", aperture, shutterSpeed, sens);
View::DynamicResolutionOptions options;
options.enabled = false;
// options.homogeneousScaling = homogeneousScaling;
// options.minScale = filament::math::float2{ minScale };
// options.maxScale = filament::math::float2{ maxScale };
// options.sharpness = sharpness;
// options.quality = View::QualityLevel::ULTRA;
_view->setDynamicResolutionOptions(options);
View::MultiSampleAntiAliasingOptions multiSampleAntiAliasingOptions;
multiSampleAntiAliasingOptions.enabled = true;
_view->setMultiSampleAntiAliasingOptions(multiSampleAntiAliasingOptions);
_view->setAntiAliasing(AntiAliasing::NONE);
// auto materialRb = _resourceLoader->load("file:///mnt/hdd_2tb/home/hydroxide/projects/filament/unlit.filamat");
// Log("Loaded resource of size %d", materialRb.size);
// _materialProvider = new FileMaterialProvider(_engine, (void*) materialRb.data, (size_t)materialRb.size);
EntityManager &em = EntityManager::get();
_ncm = new NameComponentManager(em);
_assetManager = new AssetManager(
_resourceLoaderWrapper,
_ncm,
_engine,
_scene);
_imageTexture = Texture::Builder()
.width(1)
.height(1)
.levels(0x01)
.format(Texture::InternalFormat::RGB16F)
.sampler(Texture::Sampler::SAMPLER_2D)
.build(*_engine);
_imageMaterial =
Material::Builder()
.package(IMAGE_PACKAGE, IMAGE_IMAGE_SIZE)
.build(*_engine);
_imageMaterial->setDefaultParameter("showImage",0);
_imageMaterial->setDefaultParameter("backgroundColor", RgbaType::sRGB, float4(0.5f, 0.5f, 0.5f, 1.0f));
_imageMaterial->setDefaultParameter("image", _imageTexture, _imageSampler);
_imageScale = mat4f { 1.0f , 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f };
_imageMaterial->setDefaultParameter("transform", _imageScale);
_imageVb = VertexBuffer::Builder()
.vertexCount(3)
.bufferCount(1)
.attribute(VertexAttribute::POSITION, 0,
VertexBuffer::AttributeType::FLOAT4, 0)
.build(*_engine);
_imageVb->setBufferAt(
*_engine, 0,
{sFullScreenTriangleVertices, sizeof(sFullScreenTriangleVertices)});
_imageIb = IndexBuffer::Builder()
.indexCount(3)
.bufferType(IndexBuffer::IndexType::USHORT)
.build(*_engine);
_imageIb->setBuffer(*_engine, {sFullScreenTriangleIndices,
sizeof(sFullScreenTriangleIndices)});
utils::Entity imageEntity = em.create();
RenderableManager::Builder(1)
.boundingBox({{}, {1.0f, 1.0f, 1.0f}})
.material(0, _imageMaterial->getDefaultInstance())
.geometry(0, RenderableManager::PrimitiveType::TRIANGLES, _imageVb,
_imageIb, 0, 3)
.culling(false)
.build(*_engine, imageEntity);
_imageEntity = &imageEntity;
_scene->addEntity(imageEntity);
}
void FilamentViewer::setBloom(float strength) {
decltype(_view->getBloomOptions()) opts;
opts.enabled = true;
opts.strength = strength;
_view->setBloomOptions(opts);
}
void FilamentViewer::setToneMapping(ToneMapping toneMapping) {
ToneMapper* tm;
switch(toneMapping) {
case ToneMapping::ACES:
tm = new ACESToneMapper();
break;
case ToneMapping::LINEAR:
tm = new LinearToneMapper();
break;
case ToneMapping::FILMIC:
tm = new FilmicToneMapper();
break;
}
auto newColorGrading = ColorGrading::Builder().toneMapper(tm).build(*_engine);
_view->setColorGrading(newColorGrading);
_engine->destroy(colorGrading);
delete tm;
}
void FilamentViewer::setFrameInterval(float frameInterval) {
Renderer::FrameRateOptions fro;
fro.interval = frameInterval;
_renderer->setFrameRateOptions(fro);
Log("Set framerate interval to %f", frameInterval);
}
int32_t FilamentViewer::addLight(LightManager::Type t, float colour, float intensity, float posX, float posY, float posZ, float dirX, float dirY, float dirZ, bool shadows) {
auto light = EntityManager::get().create();
LightManager::Builder(t)
.color(Color::cct(colour))
.intensity(intensity)
.position(math::float3(posX, posY, posZ))
.direction(math::float3(dirX, dirY, dirZ))
.castShadows(shadows)
.build(*_engine, light);
_scene->addEntity(light);
_lights.push_back(light);
auto entityId = Entity::smuggle(light);
Log("Added light under entity ID %d of type %d with colour %f intensity %f at (%f, %f, %f) with direction (%f, %f, %f) with shadows %d", entityId, t, colour, intensity, posX, posY, posZ, dirX, dirY, dirZ, shadows);
return entityId;
}
void FilamentViewer::removeLight(EntityId entityId) {
Log("Removing light with entity ID %d", entityId);
auto entity = utils::Entity::import(entityId);
if(entity.isNull()) {
Log("Error: light entity not found under ID %d", entityId);
} else {
auto removed = remove(_lights.begin(), _lights.end(), entity);
_scene->remove(entity);
EntityManager::get().destroy(1, &entity);
}
}
void FilamentViewer::clearLights() {
Log("Removing all lights");
_scene->removeEntities(_lights.data(), _lights.size());
EntityManager::get().destroy(_lights.size(), _lights.data());
_lights.clear();
}
static bool endsWith(string path, string ending) {
return path.compare(path.length() - ending.length(), ending.length(), ending) == 0;
}
void FilamentViewer::loadKtx2Texture(string path, ResourceBuffer rb) {
// TODO - check all this
// ktxreader::Ktx2Reader reader(*_engine);
// reader.requestFormat(Texture::InternalFormat::DXT3_SRGBA);
// reader.requestFormat(Texture::InternalFormat::DXT3_RGBA);
// // Uncompressed formats are lower priority, so they get added last.
// reader.requestFormat(Texture::InternalFormat::SRGB8_A8);
// reader.requestFormat(Texture::InternalFormat::RGBA8);
// // std::ifstream inputStream("/data/data/app.polyvox.filament_example/foo.ktx", ios::binary);
// // auto contents = vector<uint8_t>((istreambuf_iterator<char>(inputStream)), {});
// _imageTexture = reader.load(contents.data(), contents.size(),
// ktxreader::Ktx2Reader::TransferFunction::LINEAR);
}
void FilamentViewer::loadKtxTexture(string path, ResourceBuffer rb) {
ktxreader::Ktx1Bundle *bundle =
new ktxreader::Ktx1Bundle(static_cast<const uint8_t *>(rb.data),
static_cast<uint32_t>(rb.size));
_imageTexture =
ktxreader::Ktx1Reader::createTexture(_engine, *bundle, false, [](void* userdata) {
Ktx1Bundle* bundle = (Ktx1Bundle*) userdata;
delete bundle;
}, bundle);
auto info = bundle->getInfo();
_imageWidth = info.pixelWidth;
_imageHeight = info.pixelHeight;
}
void FilamentViewer::loadPngTexture(string path, ResourceBuffer rb) {
polyvox::StreamBufferAdapter sb((char *)rb.data, (char *)rb.data + rb.size);
std::istream inputStream(&sb);
LinearImage* image = new LinearImage(ImageDecoder::decode(
inputStream, path.c_str(), ImageDecoder::ColorSpace::SRGB));
if (!image->isValid()) {
Log("Invalid image : %s", path.c_str());
return;
}
uint32_t channels = image->getChannels();
_imageWidth = image->getWidth();
_imageHeight = image->getHeight();
_imageTexture = Texture::Builder()
.width(_imageWidth)
.height(_imageHeight)
.levels(0x01)
.format(channels == 3 ? Texture::InternalFormat::RGB16F
: Texture::InternalFormat::RGBA16F)
.sampler(Texture::Sampler::SAMPLER_2D)
.build(*_engine);
Texture::PixelBufferDescriptor::Callback freeCallback = [](void *buf, size_t,
void *data) {
Log("Deleting LinearImage");
delete reinterpret_cast<LinearImage*>(data);
};
auto pbd = Texture::PixelBufferDescriptor(
image->getPixelRef(), size_t(_imageWidth * _imageHeight * channels * sizeof(float)),
channels == 3 ? Texture::Format::RGB : Texture::Format::RGBA,
Texture::Type::FLOAT, nullptr, freeCallback, image);
_imageTexture->setImage(*_engine, 0, std::move(pbd));
}
void FilamentViewer::loadTextureFromPath(string path) {
string ktxExt(".ktx");
string ktx2Ext(".ktx2");
string pngExt(".png");
if (path.length() < 5) {
Log("Invalid resource path : %s", path.c_str());
return;
}
ResourceBuffer rb = _resourceLoaderWrapper->load(path.c_str());
if(endsWith(path, ktxExt)) {
loadKtxTexture(path, rb);
} else if(endsWith(path, ktx2Ext)) {
loadKtx2Texture(path, rb);
} else if(endsWith(path, pngExt)) {
loadPngTexture(path, rb);
}
_resourceLoaderWrapper->free(rb);
}
void FilamentViewer::setBackgroundColor(const float r, const float g, const float b, const float a) {
_imageMaterial->setDefaultParameter("showImage", 0);
_imageMaterial->setDefaultParameter("backgroundColor", RgbaType::sRGB, float4(r, g, b, a));
const Viewport& vp = _view->getViewport();
_imageMaterial->setDefaultParameter("transform", _imageScale);
}
void FilamentViewer::clearBackgroundImage() {
_imageMaterial->setDefaultParameter("showImage", 0);
if (_imageTexture) {
Log("Destroying existing texture");
_engine->destroy(_imageTexture);
Log("Destroyed.");
_imageTexture = nullptr;
}
}
void FilamentViewer::setBackgroundImage(const char *resourcePath) {
string resourcePathString(resourcePath);
Log("Setting background image to %s", resourcePath);
clearBackgroundImage();
loadTextureFromPath(resourcePathString);
// This currently just anchors the image at the bottom left of the viewport at its original size
// TODO - implement stretch/etc
const Viewport& vp = _view->getViewport();
Log("Image width %d height %d vp width %d height %d", _imageWidth, _imageHeight, vp.width, vp.height);
_imageScale = mat4f { float(vp.width) / float(_imageWidth) , 0.0f, 0.0f, 0.0f, 0.0f, float(vp.height) / float(_imageHeight), 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f };
_imageMaterial->setDefaultParameter("transform", _imageScale);
_imageMaterial->setDefaultParameter("image", _imageTexture, _imageSampler);
_imageMaterial->setDefaultParameter("showImage", 1);
}
///
/// Translates the background image by (x,y) pixels.
/// If clamp is true, x/y are both clamped so that the left/top and right/bottom sides of the background image
/// are positioned at a max/min of -1/1 respectively
/// (i.e. you cannot set a position where the left/top or right/bottom sides would be "inside" the screen coordinate space).
///
void FilamentViewer::setBackgroundImagePosition(float x, float y, bool clamp=false) {
// to translate the background image, we apply a transform to the UV coordinates of the quad texture, not the quad itself (see image.mat).
// this allows us to set a background colour for the quad when the texture has been translated outside the quad's bounds.
// so we need to munge the coordinates appropriately (and take into consideration the scale transform applied when the image was loaded).
// first, convert x/y to a percentage of the original image size
x /= _imageWidth;
y /= _imageHeight;
// now scale these by the viewport dimensions so they can be incorporated directly into the UV transform matrix.
// x *= _imageScale[0][0];
// y *= _imageScale[1][1];
// TODO - I haven't updated the clamp calculations to work with scaled image width/height percentages so the below code is probably wrong, don't use it until it's fixed.
if(clamp) {
Log("Clamping background image translation");
// first, clamp x/y
auto xScale = float(_imageWidth) / _view->getViewport().width;
auto yScale = float(_imageHeight) / _view->getViewport().height;
float xMin = 0;
float xMax = 0;
float yMin = 0;
float yMax = 0;
// we need to clamp x so that it can only be translated between (left side touching viewport left) and (right side touching viewport right)
// if width is less than viewport, these values are 0/1-xScale respectively
if(xScale < 1) {
xMin = 0;
xMax = 1-xScale;
// otherwise, these value are (xScale-1 and 1-xScale)
} else {
xMin = 1-xScale;
xMax = 0;
}
// do the same for y
if(yScale < 1) {
yMin = 0;
yMax = 1-yScale;
} else {
yMin = 1-yScale;
yMax = 0;
}
x = std::max(xMin, std::min(x,xMax));
y = std::max(yMin, std::min(y,yMax));
}
// these values are then negated to account for the fact that the transform is applied to the UV coordinates, not the vertices (see image.mat).
// i.e. translating the image right by 0.5 units means translating the UV coordinates left by 0.5 units.
x = -x;
y = -y;
Log("x %f y %f", x, y);
Log("imageScale %f %f %f %f %f %f %f %f %f %f %f %f %f %f %f %f ", _imageScale[0][0],_imageScale[0][1],_imageScale[0][2], _imageScale[0][3], \
_imageScale[1][0],_imageScale[1][1],_imageScale[1][2], _imageScale[1][3],\
_imageScale[2][0],_imageScale[2][1],_imageScale[2][2], _imageScale[2][3], \
_imageScale[3][0],_imageScale[3][1],_imageScale[3][2], _imageScale[3][3]);
auto transform = math::mat4f::translation(math::float3(x, y, 0.0f)) * _imageScale;
Log("transform %f %f %f %f %f %f %f %f %f %f %f %f %f %f %f %f ", transform[0][0],transform[0][1],transform[0][2], transform[0][3], \
transform[1][0],transform[1][1],transform[1][2], transform[1][3],\
transform[2][0],transform[2][1],transform[2][2], transform[2][3], \
transform[3][0],transform[3][1],transform[3][2], transform[3][3]);
_imageMaterial->setDefaultParameter("transform", transform);
}
FilamentViewer::~FilamentViewer() {
clearAssets();
delete _assetManager;
for(auto it : _lights) {
_engine->destroy(it);
}
_engine->destroyCameraComponent(_mainCamera->getEntity());
_mainCamera = nullptr;
_engine->destroy(_view);
_engine->destroy(_scene);
_engine->destroy(_renderer);
_engine->destroy(_swapChain);
Engine::destroy(&_engine); // clears engine*
}
Renderer *FilamentViewer::getRenderer() { return _renderer; }
void FilamentViewer::createSwapChain(const void *surface, uint32_t width, uint32_t height) {
#if TARGET_OS_IPHONE
_swapChain = _engine->createSwapChain((void*)surface, filament::backend::SWAP_CHAIN_CONFIG_APPLE_CVPIXELBUFFER);
#else
if(surface) {
_swapChain = _engine->createSwapChain(width, height, filament::backend::SWAP_CHAIN_CONFIG_TRANSPARENT | filament::backend::SWAP_CHAIN_CONFIG_READABLE);
} else {
_swapChain = _engine->createSwapChain((void*)surface, filament::backend::SWAP_CHAIN_CONFIG_TRANSPARENT | filament::backend::SWAP_CHAIN_CONFIG_READABLE);
}
#endif
Log("Swapchain created.");
}
void FilamentViewer::createRenderTarget(intptr_t textureId, uint32_t width, uint32_t height) {
// Create filament textures and render targets (note the color buffer has the import call)
_rtColor = filament::Texture::Builder()
.width(width)
.height(height)
.levels(1)
.usage(filament::Texture::Usage::COLOR_ATTACHMENT | filament::Texture::Usage::SAMPLEABLE)
.format(filament::Texture::InternalFormat::RGBA8)
.import(textureId)
.build(*_engine);
_rtDepth = filament::Texture::Builder()
.width(width)
.height(height)
.levels(1)
.usage(filament::Texture::Usage::DEPTH_ATTACHMENT)
.format(filament::Texture::InternalFormat::DEPTH24)
.build(*_engine);
_rt = filament::RenderTarget::Builder()
.texture(RenderTarget::AttachmentPoint::COLOR, _rtColor)
.texture(RenderTarget::AttachmentPoint::DEPTH, _rtDepth)
.build(*_engine);
// Make a specific viewport just for our render target
_view->setRenderTarget(_rt);
Log("Set render target for textureId %u %u x %u", textureId, width, height);
}
void FilamentViewer::destroySwapChain() {
if(_rt) {
_view->setRenderTarget(nullptr);
_engine->destroy(_rtDepth);
_engine->destroy(_rtColor);
_engine->destroy(_rt);
_rt = nullptr;
_rtDepth = nullptr;
_rtColor = nullptr;
}
if (_swapChain) {
_engine->destroy(_swapChain);
_swapChain = nullptr;
Log("Swapchain destroyed.");
}
}
void FilamentViewer::clearAssets() {
Log("Clearing all assets");
if(_mainCamera) {
_view->setCamera(_mainCamera);
}
_assetManager->destroyAll();
Log("Cleared all assets");
}
void FilamentViewer::removeAsset(EntityId asset) {
Log("Removing asset from scene");
mtx.lock();
// todo - what if we are using a camera from this asset?
_view->setCamera(_mainCamera);
_assetManager->remove(asset);
mtx.unlock();
}
///
/// Set the exposure for the current active camera.
///
void FilamentViewer::setCameraExposure(float aperture, float shutterSpeed, float sensitivity) {
Camera& cam =_view->getCamera();
Log("Setting aperture (%03f) shutterSpeed (%03f) and sensitivity (%03f)", aperture, shutterSpeed, sensitivity);
cam.setExposure(aperture, shutterSpeed, sensitivity);
}
///
/// Set the focal length of the active camera.
///
void FilamentViewer::setCameraFocalLength(float focalLength) {
Camera& cam =_view->getCamera();
_cameraFocalLength = focalLength;
cam.setLensProjection(_cameraFocalLength, 1.0f, kNearPlane,
kFarPlane);
}
///
/// Set the focus distance of the active camera.
///
void FilamentViewer::setCameraFocusDistance(float focusDistance) {
Camera& cam =_view->getCamera();
_cameraFocusDistance = focusDistance;
cam.setFocusDistance(_cameraFocusDistance);
}
///
/// Sets the active camera to the GLTF camera node specified by [name] (or if null, the first camera found under that node).
/// N.B. Blender will generally export a three-node hierarchy -
/// Camera1->Camera_Orientation->Camera2. The correct name will be the Camera_Orientation.
///
bool FilamentViewer::setCamera(EntityId entityId, const char *cameraName) {
auto asset = _assetManager->getAssetByEntityId(entityId);
if(!asset) {
Log("Failed to find asset attached to specified entity id.");
}
size_t count = asset->getCameraEntityCount();
if (count == 0) {
Log("Failed, no cameras found in current asset.");
return false;
}
const utils::Entity* cameras = asset->getCameraEntities();
utils::Entity target;
if(!cameraName) {
auto inst = _ncm->getInstance(cameras[0]);
const char *name = _ncm->getName(inst);
target = cameras[0];
Log("No camera specified, using first : %s", name);
} else {
for (int j = 0; j < count; j++) {
auto inst = _ncm->getInstance(cameras[j]);
const char *name = _ncm->getName(inst);
if (strcmp(name, cameraName) == 0) {
target = cameras[j];
break;
}
}
}
if(target.isNull()) {
Log("Unable to locate camera under name %s ", cameraName);
return false;
}
Camera *camera = _engine->getCameraComponent(target);
if(!camera) {
Log("Failed to retrieve camera component for target");
}
_view->setCamera(camera);
const Viewport &vp = _view->getViewport();
const double aspect = (double)vp.width / vp.height;
// const float aperture = camera->getAperture();
// const float shutterSpeed = camera->getShutterSpeed();
// const float sens = camera->getSensitivity();
// camera->setExposure(1.0f);
camera->setScaling({1.0 / aspect, 1.0});
return true;
}
void FilamentViewer::loadSkybox(const char *const skyboxPath) {
Log("Loading skybox from %s", skyboxPath);
removeSkybox();
if (skyboxPath) {
ResourceBuffer skyboxBuffer = _resourceLoaderWrapper->load(skyboxPath);
if(skyboxBuffer.size <= 0) {
Log("Could not load skybox resource.");
return;
}
image::Ktx1Bundle *skyboxBundle =
new image::Ktx1Bundle(static_cast<const uint8_t *>(skyboxBuffer.data),
static_cast<uint32_t>(skyboxBuffer.size));
_skyboxTexture =
ktxreader::Ktx1Reader::createTexture(_engine, *skyboxBundle, false, [](void* userdata) {
image::Ktx1Bundle* bundle = (image::Ktx1Bundle*) userdata;
delete bundle;
}, skyboxBundle);
_skybox =
filament::Skybox::Builder().environment(_skyboxTexture).build(*_engine);
_scene->setSkybox(_skybox);
_resourceLoaderWrapper->free(skyboxBuffer);
}
}
void FilamentViewer::removeSkybox() {
Log("Removing skybox");
if(_skybox) {
_engine->destroy(_skybox);
_engine->destroy(_skyboxTexture);
_skybox = nullptr;
_skyboxTexture = nullptr;
}
_scene->setSkybox(nullptr);
}
void FilamentViewer::removeIbl() {
if(_indirectLight) {
_engine->destroy(_indirectLight);
_engine->destroy(_iblTexture);
_indirectLight = nullptr;
_iblTexture = nullptr;
}
_scene->setIndirectLight(nullptr);
}
void FilamentViewer::loadIbl(const char *const iblPath, float intensity) {
removeIbl();
if (iblPath) {
Log("Loading IBL from %s", iblPath);
// Load IBL.
ResourceBuffer iblBuffer = _resourceLoaderWrapper->load(iblPath);
if(iblBuffer.size == 0) {
Log("Error loading IBL, resource could not be loaded.");
return;
}
image::Ktx1Bundle *iblBundle =
new image::Ktx1Bundle(static_cast<const uint8_t *>(iblBuffer.data),
static_cast<uint32_t>(iblBuffer.size));
math::float3 harmonics[9];
iblBundle->getSphericalHarmonics(harmonics);
_iblTexture =
ktxreader::Ktx1Reader::createTexture(_engine, *iblBundle, false, [](void* userdata) {
image::Ktx1Bundle* bundle = (image::Ktx1Bundle*) userdata;
delete bundle;
}, iblBundle);
_indirectLight = IndirectLight::Builder()
.reflections(_iblTexture)
.irradiance(3, harmonics)
.intensity(intensity)
.build(*_engine);
_scene->setIndirectLight(_indirectLight);
_resourceLoaderWrapper->free(iblBuffer);
Log("Skybox/IBL load complete.");
}
}
double _elapsed = 0;
int _frameCount = 0;
void FilamentViewer::render(uint64_t frameTimeInNanos) {
if (!_view || !_mainCamera || !_swapChain) {
Log("Not ready for rendering");
return;
}
if(_frameCount == 60) {
// Log("1 sec average for asset animation update %f", _elapsed / 60);
_elapsed = 0;
_frameCount = 0;
}
Timer tmr;
_assetManager->updateAnimations();
_elapsed += tmr.elapsed();
_frameCount++;
// Render the scene, unless the renderer wants to skip the frame.
if (_renderer->beginFrame(_swapChain, frameTimeInNanos)) {
_renderer->render(_view);
_renderer->endFrame();
} else {
// skipped frame
}
}
void FilamentViewer::updateViewportAndCameraProjection(
int width, int height, float contentScaleFactor) {
if (!_view || !_mainCamera) {
Log("Skipping camera update, no view or camrea");
return;
}
const uint32_t _width = width * contentScaleFactor;
const uint32_t _height = height * contentScaleFactor;
_view->setViewport({0, 0, _width, _height});
const double aspect = (double)width / height;
Camera& cam =_view->getCamera();
cam.setLensProjection(_cameraFocalLength, 1.0f, kNearPlane,
kFarPlane);
cam.setScaling({1.0 / aspect, 1.0});
Log("Set viewport to width: %d height: %d aspect %f scaleFactor : %f", width, height, aspect,
contentScaleFactor);
}
void FilamentViewer::setCameraPosition(float x, float y, float z) {
Camera& cam =_view->getCamera();
_cameraPosition = math::mat4f::translation(math::float3(x,y,z));
cam.setModelMatrix(_cameraPosition * _cameraRotation);
}
void FilamentViewer::setCameraRotation(float rads, float x, float y, float z) {
Camera& cam =_view->getCamera();
_cameraRotation = math::mat4f::rotation(rads, math::float3(x,y,z));
cam.setModelMatrix(_cameraPosition * _cameraRotation);
}
void FilamentViewer::setCameraModelMatrix(const float* const matrix) {
Camera& cam =_view->getCamera();
mat4 modelMatrix(
matrix[0],
matrix[1],
matrix[2],
matrix[3],
matrix[4],
matrix[5],
matrix[6],
matrix[7],
matrix[8],
matrix[9],
matrix[10],
matrix[11],
matrix[12],
matrix[13],
matrix[14],
matrix[15]
);
cam.setModelMatrix(modelMatrix);
}
void FilamentViewer::grabBegin(float x, float y, bool pan) {
if (!_view || !_mainCamera || !_swapChain) {
Log("View not ready, ignoring grab");
return;
}
_panning = pan;
_startX = x;
_startY = y;
}
void FilamentViewer::grabUpdate(float x, float y) {
if (!_view || !_swapChain) {
Log("View not ready, ignoring grab");
return;
}
Camera& cam =_view->getCamera();
auto eye = cam.getPosition();// math::float3 {0.0f, 0.5f, 50.0f } ;// ; //
auto target = eye + cam.getForwardVector();
auto upward = cam.getUpVector();
Viewport const& vp = _view->getViewport();
if(_panning) {
auto trans = cam.getModelMatrix() * mat4::translation(math::float3 { 10 * (x - _startX) / vp.width, 10 * (y - _startY) / vp.height, 0.0f });
cam.setModelMatrix(trans);
} else {
auto trans = cam.getModelMatrix() * mat4::rotation(
0.01,
// math::float3 { 0.0f, 1.0f, 0.0f });
math::float3 { (y - _startY) / vp.height, (x - _startX) / vp.width, 0.0f });
cam.setModelMatrix(trans);
}
_startX = x;
_startY = y;
}
void FilamentViewer::grabEnd() {
if (!_view || !_mainCamera || !_swapChain) {
Log("View not ready, ignoring grab");
return;
}
}
void FilamentViewer::scrollBegin() {
// noop
}
void FilamentViewer::scrollUpdate(float x, float y, float delta) {
Camera& cam =_view->getCamera();
Viewport const& vp = _view->getViewport();
auto trans = cam.getModelMatrix() * mat4::translation(math::float3 {0.0f, 0.0f, delta });
cam.setModelMatrix(trans);
}
void FilamentViewer::scrollEnd() {
}
} // namespace polyvox

View File

@@ -0,0 +1,386 @@
#include "ResourceBuffer.hpp"
#include "FilamentViewer.hpp"
#include "filament/LightManager.h"
#include "Log.hpp"
#include "ThreadPool.hpp"
#include <thread>
#include <functional>
using namespace polyvox;
#define FLUTTER_PLUGIN_EXPORT __attribute__((visibility("default")))
extern "C" {
#include "PolyvoxFilamentApi.h"
FLUTTER_PLUGIN_EXPORT const void* create_filament_viewer(const void* context, const ResourceLoaderWrapper* const loader) {
return (void*) new FilamentViewer(context, loader);
}
FLUTTER_PLUGIN_EXPORT ResourceLoaderWrapper* make_resource_loader(LoadResourceFromOwner loadFn, FreeResourceFromOwner freeFn, void* const owner) {
return new ResourceLoaderWrapper(loadFn, freeFn, owner);
}
FLUTTER_PLUGIN_EXPORT void create_render_target(const void* const viewer, intptr_t textureId, uint32_t width, uint32_t height) {
((FilamentViewer*)viewer)->createRenderTarget(textureId, width, height);
}
FLUTTER_PLUGIN_EXPORT void delete_filament_viewer(const void* const viewer) {
delete((FilamentViewer*)viewer);
}
FLUTTER_PLUGIN_EXPORT void set_background_color(const void* const viewer, const float r, const float g, const float b, const float a) {
((FilamentViewer*)viewer)->setBackgroundColor(r, g, b, a);
}
FLUTTER_PLUGIN_EXPORT void clear_background_image(const void* const viewer) {
((FilamentViewer*)viewer)->clearBackgroundImage();
}
FLUTTER_PLUGIN_EXPORT void set_background_image(const void* const viewer, const char* path) {
((FilamentViewer*)viewer)->setBackgroundImage(path);
}
FLUTTER_PLUGIN_EXPORT void set_background_image_position(const void* const viewer, float x, float y, bool clamp) {
((FilamentViewer*)viewer)->setBackgroundImagePosition(x, y, clamp);
}
FLUTTER_PLUGIN_EXPORT void set_tone_mapping(const void* const viewer, int toneMapping) {
((FilamentViewer*)viewer)->setToneMapping((ToneMapping)toneMapping);
}
FLUTTER_PLUGIN_EXPORT void set_bloom(const void* const viewer, float strength) {
Log("Setting bloom to %f", strength);
((FilamentViewer*)viewer)->setBloom(strength);
}
FLUTTER_PLUGIN_EXPORT void load_skybox(const void* const viewer, const char* skyboxPath) {
((FilamentViewer*)viewer)->loadSkybox(skyboxPath);
}
FLUTTER_PLUGIN_EXPORT void load_ibl(const void* const viewer, const char* iblPath, float intensity) {
((FilamentViewer*)viewer)->loadIbl(iblPath, intensity);
}
FLUTTER_PLUGIN_EXPORT void remove_skybox(const void* const viewer) {
((FilamentViewer*)viewer)->removeSkybox();
}
FLUTTER_PLUGIN_EXPORT void remove_ibl(const void* const viewer) {
((FilamentViewer*)viewer)->removeIbl();
}
FLUTTER_PLUGIN_EXPORT EntityId add_light(const void* const viewer, uint8_t type, float colour, float intensity, float posX, float posY, float posZ, float dirX, float dirY, float dirZ, bool shadows) {
return ((FilamentViewer*)viewer)->addLight((LightManager::Type)type, colour, intensity, posX, posY, posZ, dirX, dirY, dirZ, shadows);
}
FLUTTER_PLUGIN_EXPORT void remove_light(const void* const viewer, int32_t entityId) {
((FilamentViewer*)viewer)->removeLight(entityId);
}
FLUTTER_PLUGIN_EXPORT void clear_lights(const void* const viewer) {
((FilamentViewer*)viewer)->clearLights();
}
FLUTTER_PLUGIN_EXPORT EntityId load_glb(void* assetManager, const char* assetPath, bool unlit) {
return ((AssetManager*)assetManager)->loadGlb(assetPath, unlit);
}
FLUTTER_PLUGIN_EXPORT EntityId load_gltf(void* assetManager, const char* assetPath, const char* relativePath) {
return ((AssetManager*)assetManager)->loadGltf(assetPath, relativePath);
}
FLUTTER_PLUGIN_EXPORT bool set_camera(const void* const viewer, EntityId asset, const char* nodeName) {
return ((FilamentViewer*)viewer)->setCamera(asset, nodeName);
}
FLUTTER_PLUGIN_EXPORT void set_camera_focus_distance(const void* const viewer, float distance) {
((FilamentViewer*)viewer)->setCameraFocusDistance(distance);
}
FLUTTER_PLUGIN_EXPORT void set_camera_exposure(const void* const viewer, float aperture, float shutterSpeed, float sensitivity) {
((FilamentViewer*)viewer)->setCameraExposure(aperture, shutterSpeed, sensitivity);
}
FLUTTER_PLUGIN_EXPORT void set_camera_position(const void* const viewer, float x, float y, float z) {
((FilamentViewer*)viewer)->setCameraPosition(x, y, z);
}
FLUTTER_PLUGIN_EXPORT void set_camera_rotation(const void* const viewer, float rads, float x, float y, float z) {
((FilamentViewer*)viewer)->setCameraRotation(rads, x, y, z);
}
FLUTTER_PLUGIN_EXPORT void set_camera_model_matrix(const void* const viewer, const float* const matrix) {
((FilamentViewer*)viewer)->setCameraModelMatrix(matrix);
}
FLUTTER_PLUGIN_EXPORT void set_camera_focal_length(const void* const viewer, float focalLength) {
((FilamentViewer*)viewer)->setCameraFocalLength(focalLength);
}
FLUTTER_PLUGIN_EXPORT void render(
const void* const viewer,
uint64_t frameTimeInNanos
) {
((FilamentViewer*)viewer)->render(frameTimeInNanos);
}
FLUTTER_PLUGIN_EXPORT void set_frame_interval(
const void* const viewer,
float frameInterval
) {
((FilamentViewer*)viewer)->setFrameInterval(frameInterval);
}
FLUTTER_PLUGIN_EXPORT void destroy_swap_chain(const void* const viewer) {
((FilamentViewer*)viewer)->destroySwapChain();
}
FLUTTER_PLUGIN_EXPORT void create_swap_chain(const void* const viewer, const void* const surface=nullptr, uint32_t width=0, uint32_t height=0) {
((FilamentViewer*)viewer)->createSwapChain(surface, width, height);
}
FLUTTER_PLUGIN_EXPORT void update_viewport_and_camera_projection(const void* const viewer, uint32_t width, uint32_t height, float scaleFactor) {
return ((FilamentViewer*)viewer)->updateViewportAndCameraProjection(width, height, scaleFactor);
}
FLUTTER_PLUGIN_EXPORT void scroll_update(const void* const viewer, float x, float y, float delta) {
((FilamentViewer*)viewer)->scrollUpdate(x, y, delta);
}
FLUTTER_PLUGIN_EXPORT void scroll_begin(const void* const viewer) {
((FilamentViewer*)viewer)->scrollBegin();
}
FLUTTER_PLUGIN_EXPORT void scroll_end(const void* const viewer) {
((FilamentViewer*)viewer)->scrollEnd();
}
FLUTTER_PLUGIN_EXPORT void grab_begin(const void* const viewer, float x, float y, bool pan) {
((FilamentViewer*)viewer)->grabBegin(x, y, pan);
}
FLUTTER_PLUGIN_EXPORT void grab_update(const void* const viewer, float x, float y) {
((FilamentViewer*)viewer)->grabUpdate(x, y);
}
FLUTTER_PLUGIN_EXPORT void grab_end(const void* const viewer) {
((FilamentViewer*)viewer)->grabEnd();
}
FLUTTER_PLUGIN_EXPORT void* get_asset_manager(const void* const viewer) {
return (void*)((FilamentViewer*)viewer)->getAssetManager();
}
FLUTTER_PLUGIN_EXPORT void apply_weights(
void* assetManager,
EntityId asset,
const char* const entityName,
float* const weights,
int count) {
// ((AssetManager*)assetManager)->setMorphTargetWeights(asset, entityName, weights, count);
}
FLUTTER_PLUGIN_EXPORT void set_morph_target_weights(
void* assetManager,
EntityId asset,
const char* const entityName,
const float* const weights,
const int numWeights
) {
return ((AssetManager*)assetManager)->setMorphTargetWeights(
asset,
entityName,
weights,
numWeights
);
}
FLUTTER_PLUGIN_EXPORT bool set_morph_animation(
void* assetManager,
EntityId asset,
const char* const entityName,
const float* const morphData,
const int* const morphIndices,
int numMorphTargets,
int numFrames,
float frameLengthInMs) {
return ((AssetManager*)assetManager)->setMorphAnimationBuffer(
asset,
entityName,
morphData,
morphIndices,
numMorphTargets,
numFrames,
frameLengthInMs
);
}
FLUTTER_PLUGIN_EXPORT void set_bone_animation(
void* assetManager,
EntityId asset,
const float* const frameData,
int numFrames,
int numBones,
const char** const boneNames,
const char** const meshNames,
int numMeshTargets,
float frameLengthInMs) {
((AssetManager*)assetManager)->setBoneAnimationBuffer(
asset,
frameData,
numFrames,
numBones,
boneNames,
meshNames,
numMeshTargets,
frameLengthInMs
);
}
// void set_bone_transform(
// EntityId asset,
// const char* boneName,
// const char* entityName,
// float transX,
// float transY,
// float transZ,
// float quatX,
// float quatY,
// float quatZ,
// float quatW
// ) {
// ((AssetManager*)assetManager)->setBoneTransform(
// boneName,
// entityName,
// transX,
// transY,
// transZ,
// quatX,
// quatY,
// quatZ,
// quatW,
// false
// );
// }
FLUTTER_PLUGIN_EXPORT void play_animation(
void* assetManager,
EntityId asset,
int index,
bool loop,
bool reverse,
bool replaceActive,
float crossfade) {
((AssetManager*)assetManager)->playAnimation(asset, index, loop, reverse, replaceActive, crossfade);
}
FLUTTER_PLUGIN_EXPORT void set_animation_frame(
void* assetManager,
EntityId asset,
int animationIndex,
int animationFrame) {
// ((AssetManager*)assetManager)->setAnimationFrame(asset, animationIndex, animationFrame);
}
FLUTTER_PLUGIN_EXPORT float get_animation_duration(void* assetManager, EntityId asset, int animationIndex) {
return ((AssetManager*)assetManager)->getAnimationDuration(asset, animationIndex);
}
FLUTTER_PLUGIN_EXPORT int get_animation_count(
void* assetManager,
EntityId asset) {
auto names = ((AssetManager*)assetManager)->getAnimationNames(asset);
return names->size();
}
FLUTTER_PLUGIN_EXPORT void get_animation_name(
void* assetManager,
EntityId asset,
char* const outPtr,
int index
) {
auto names = ((AssetManager*)assetManager)->getAnimationNames(asset);
string name = names->at(index);
strcpy(outPtr, name.c_str());
}
FLUTTER_PLUGIN_EXPORT int get_morph_target_name_count(void* assetManager, EntityId asset, const char* meshName) {
//std::packaged_task<int()> lambda([=]() mutable {
unique_ptr<vector<string>> names = ((AssetManager*)assetManager)->getMorphTargetNames(asset, meshName);
return names->size();
//return fut.get();
}
FLUTTER_PLUGIN_EXPORT void get_morph_target_name(void* assetManager, EntityId asset, const char* meshName, char* const outPtr, int index ) {
unique_ptr<vector<string>> names = ((AssetManager*)assetManager)->getMorphTargetNames(asset, meshName);
string name = names->at(index);
strcpy(outPtr, name.c_str());
}
FLUTTER_PLUGIN_EXPORT void remove_asset(const void* const viewer, EntityId asset) {
((FilamentViewer*)viewer)->removeAsset(asset);
}
FLUTTER_PLUGIN_EXPORT void clear_assets(const void* const viewer) {
((FilamentViewer*)viewer)->clearAssets();
}
FLUTTER_PLUGIN_EXPORT void load_texture(void* assetManager, EntityId asset, const char* assetPath, int renderableIndex) {
// ((AssetManager*)assetManager)->loadTexture(assetPath, renderableIndex);
}
FLUTTER_PLUGIN_EXPORT void set_texture(void* assetManager, EntityId asset) {
// ((AssetManager*)assetManager)->setTexture();
}
bool set_material_color(void* assetManager, EntityId asset, const char* meshName, int materialIndex, const float r, const float g, const float b, const float a) {
return ((AssetManager*)assetManager)->setMaterialColor(asset, meshName, materialIndex, r, g, b, a);
}
FLUTTER_PLUGIN_EXPORT void transform_to_unit_cube(void* assetManager, EntityId asset) {
((AssetManager*)assetManager)->transformToUnitCube(asset);
}
FLUTTER_PLUGIN_EXPORT void set_position(void* assetManager, EntityId asset, float x, float y, float z) {
((AssetManager*)assetManager)->setPosition(asset, x, y, z);
}
FLUTTER_PLUGIN_EXPORT void set_rotation(void* assetManager, EntityId asset, float rads, float x, float y, float z) {
((AssetManager*)assetManager)->setRotation(asset, rads, x, y, z);
}
FLUTTER_PLUGIN_EXPORT void set_scale(void* assetManager, EntityId asset, float scale) {
((AssetManager*)assetManager)->setScale(asset, scale);
}
FLUTTER_PLUGIN_EXPORT void stop_animation(void* assetManager, EntityId asset, int index) {
((AssetManager*)assetManager)->stopAnimation(asset, index);
}
FLUTTER_PLUGIN_EXPORT int hide_mesh(void* assetManager, EntityId asset, const char* meshName) {
return ((AssetManager*)assetManager)->hide(asset, meshName);
}
FLUTTER_PLUGIN_EXPORT int reveal_mesh(void* assetManager, EntityId asset, const char* meshName) {
return ((AssetManager*)assetManager)->reveal(asset, meshName);
}
FLUTTER_PLUGIN_EXPORT void ios_dummy() {
Log("Dummy called");
}
}

View File

@@ -0,0 +1,83 @@
#include <streambuf>
#include <functional>
#include <cassert>
#include <cstring>
using namespace std;
namespace polyvox {
class StreamBufferAdapter : public std::streambuf
{
public:
StreamBufferAdapter(const char *begin, const char *end);
~StreamBufferAdapter() {
}
streamsize size();
private:
int_type uflow() override;
int_type underflow() override;
int_type pbackfail(int_type ch) override;
streampos seekoff(streamoff off, ios_base::seekdir way, ios_base::openmode which) override;
streampos seekpos(streampos sp, ios_base::openmode which) override;
std::streamsize showmanyc() override;
};
StreamBufferAdapter::StreamBufferAdapter(const char *begin, const char *end)
{
setg((char*)begin, (char*)begin, (char*)end);
}
streamsize StreamBufferAdapter::size() {
return egptr() - eback();
}
streambuf::int_type StreamBufferAdapter::underflow()
{
if (gptr() == egptr()) {
return traits_type::eof();
}
return *(gptr());
}
streambuf::int_type StreamBufferAdapter::uflow()
{
if (gptr() == egptr()) {
return traits_type::eof();
}
gbump(1);
return *(gptr());
}
streambuf::int_type StreamBufferAdapter::pbackfail(int_type ch)
{
if (gptr() == eback() || (ch != traits_type::eof() && ch != gptr()[-1]))
return traits_type::eof();
gbump(-ch);
return *(gptr());
}
streamsize StreamBufferAdapter::showmanyc()
{
return egptr() - gptr();
}
streampos StreamBufferAdapter::seekoff(streamoff off, ios_base::seekdir way, ios_base::openmode which = ios_base::in) {
if(way == ios_base::beg) {
setg(eback(), eback()+off, egptr());
} else if(way == ios_base::cur) {
gbump(off);
} else {
setg(eback(), egptr()-off, egptr());
}
return gptr() - eback();
}
streampos StreamBufferAdapter::seekpos(streampos sp, ios_base::openmode which = ios_base::in) {
return seekoff(sp - pos_type(off_type(0)), std::ios_base::beg, which);
}
}

30
macos/src/TimeIt.cpp Normal file
View File

@@ -0,0 +1,30 @@
#include "TimeIt.hpp"
#if __cplusplus <= 199711L
void Timer::reset()
{
clock_gettime(CLOCK_REALTIME, &beg_);
}
double Timer::elapsed()
{
clock_gettime(CLOCK_REALTIME, &end_);
return end_.tv_sec - beg_.tv_sec +
(end_.tv_nsec - beg_.tv_nsec) / 1000000000.;
}
#else
void Timer::reset()
{
beg_ = clock_::now();
}
double Timer::elapsed()
{
return std::chrono::duration_cast<second_>
(clock_::now() - beg_).count();
}
#endif

View File

@@ -0,0 +1,259 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <ktxreader/Ktx1Reader.h>
#include <utils/Log.h>
#include <filament/Engine.h>
#include <filament/Texture.h>
namespace ktxreader {
namespace Ktx1Reader {
Texture* createTexture(Engine* engine, const Ktx1Bundle& ktx, bool srgb,
Callback callback, void* userdata) {
using Sampler = Texture::Sampler;
const auto& ktxinfo = ktx.getInfo();
const uint32_t nmips = ktx.getNumMipLevels();
const auto cdatatype = toCompressedPixelDataType(ktxinfo);
const auto datatype = toPixelDataType(ktxinfo);
const auto dataformat = toPixelDataFormat(ktxinfo);
auto texformat = toTextureFormat(ktxinfo);
#ifndef NDEBUG
if (srgb && !isSrgbTextureFormat(texformat)) {
utils::slog.w << "Requested sRGB format but KTX contains a linear format. "
<< utils::io::endl;
} else if (!srgb && isSrgbTextureFormat(texformat)) {
utils::slog.w << "Requested linear format but KTX contains a sRGB format. "
<< utils::io::endl;
}
#endif
Texture* texture = Texture::Builder()
.width(ktxinfo.pixelWidth)
.height(ktxinfo.pixelHeight)
.levels(static_cast<uint8_t>(nmips))
.sampler(ktx.isCubemap() ? Sampler::SAMPLER_CUBEMAP : Sampler::SAMPLER_2D)
.format(texformat)
.build(*engine);
struct Userdata {
uint32_t remainingBuffers;
Callback callback;
void* userdata;
};
Userdata* cbuser = new Userdata({nmips, callback, userdata});
PixelBufferDescriptor::Callback cb = [](void*, size_t, void* cbuserptr) {
Userdata* cbuser = (Userdata*) cbuserptr;
if (--cbuser->remainingBuffers == 0) {
if (cbuser->callback) {
cbuser->callback(cbuser->userdata);
}
delete cbuser;
}
};
uint8_t* data;
uint32_t size;
if (isCompressed(ktxinfo)) {
if (ktx.isCubemap()) {
for (uint32_t level = 0; level < nmips; ++level) {
ktx.getBlob({level, 0, 0}, &data, &size);
PixelBufferDescriptor pbd(data, size * 6, cdatatype, size, cb, cbuser);
texture->setImage(*engine, level, std::move(pbd), Texture::FaceOffsets(size));
}
return texture;
}
for (uint32_t level = 0; level < nmips; ++level) {
ktx.getBlob({level, 0, 0}, &data, &size);
PixelBufferDescriptor pbd(data, size, cdatatype, size, cb, cbuser);
texture->setImage(*engine, level, std::move(pbd));
}
return texture;
}
if (ktx.isCubemap()) {
for (uint32_t level = 0; level < nmips; ++level) {
ktx.getBlob({level, 0, 0}, &data, &size);
PixelBufferDescriptor pbd(data, size * 6, dataformat, datatype, cb, cbuser);
texture->setImage(*engine, level, std::move(pbd), Texture::FaceOffsets(size));
}
return texture;
}
for (uint32_t level = 0; level < nmips; ++level) {
ktx.getBlob({level, 0, 0}, &data, &size);
PixelBufferDescriptor pbd(data, size, dataformat, datatype, cb, cbuser);
texture->setImage(*engine, level, std::move(pbd));
}
return texture;
}
Texture* createTexture(Engine* engine, Ktx1Bundle* ktx, bool srgb) {
auto freeKtx = [] (void* userdata) {
Ktx1Bundle* ktx = (Ktx1Bundle*) userdata;
delete ktx;
};
return createTexture(engine, *ktx, srgb, freeKtx, ktx);
}
CompressedPixelDataType toCompressedPixelDataType(const KtxInfo& info) {
return toCompressedFilamentEnum<CompressedPixelDataType>(info.glInternalFormat);
}
PixelDataType toPixelDataType(const KtxInfo& info) {
switch (info.glType) {
case Ktx1Bundle::UNSIGNED_BYTE: return PixelDataType::UBYTE;
case Ktx1Bundle::UNSIGNED_SHORT: return PixelDataType::USHORT;
case Ktx1Bundle::HALF_FLOAT: return PixelDataType::HALF;
case Ktx1Bundle::FLOAT: return PixelDataType::FLOAT;
case Ktx1Bundle::R11F_G11F_B10F: return PixelDataType::UINT_10F_11F_11F_REV;
}
return (PixelDataType) 0xff;
}
PixelDataFormat toPixelDataFormat(const KtxInfo& info) {
switch (info.glFormat) {
case Ktx1Bundle::LUMINANCE:
case Ktx1Bundle::RED: return PixelDataFormat::R;
case Ktx1Bundle::RG: return PixelDataFormat::RG;
case Ktx1Bundle::RGB: return PixelDataFormat::RGB;
case Ktx1Bundle::RGBA: return PixelDataFormat::RGBA;
// glFormat should NOT be a sized format according to the spec
// however cmgen was generating incorrect files until after Filament 1.8.0
// so we keep this line here to preserve compatibility with older assets
case Ktx1Bundle::R11F_G11F_B10F: return PixelDataFormat::RGB;
}
return (PixelDataFormat) 0xff;
}
bool isCompressed(const KtxInfo& info) {
return info.glFormat == 0;
}
bool isSrgbTextureFormat(TextureFormat format) {
switch(format) {
// Non-compressed
case Texture::InternalFormat::RGB8:
case Texture::InternalFormat::RGBA8:
return false;
// ASTC
case Texture::InternalFormat::RGBA_ASTC_4x4:
case Texture::InternalFormat::RGBA_ASTC_5x4:
case Texture::InternalFormat::RGBA_ASTC_5x5:
case Texture::InternalFormat::RGBA_ASTC_6x5:
case Texture::InternalFormat::RGBA_ASTC_6x6:
case Texture::InternalFormat::RGBA_ASTC_8x5:
case Texture::InternalFormat::RGBA_ASTC_8x6:
case Texture::InternalFormat::RGBA_ASTC_8x8:
case Texture::InternalFormat::RGBA_ASTC_10x5:
case Texture::InternalFormat::RGBA_ASTC_10x6:
case Texture::InternalFormat::RGBA_ASTC_10x8:
case Texture::InternalFormat::RGBA_ASTC_10x10:
case Texture::InternalFormat::RGBA_ASTC_12x10:
case Texture::InternalFormat::RGBA_ASTC_12x12:
return false;
// ETC2
case Texture::InternalFormat::ETC2_RGB8:
case Texture::InternalFormat::ETC2_RGB8_A1:
case Texture::InternalFormat::ETC2_EAC_RGBA8:
return false;
// DXT
case Texture::InternalFormat::DXT1_RGB:
case Texture::InternalFormat::DXT1_RGBA:
case Texture::InternalFormat::DXT3_RGBA:
case Texture::InternalFormat::DXT5_RGBA:
return false;
default:
return true;
}
}
TextureFormat toTextureFormat(const KtxInfo& info) {
switch (info.glInternalFormat) {
case Ktx1Bundle::RED: return TextureFormat::R8;
case Ktx1Bundle::RG: return TextureFormat::RG8;
case Ktx1Bundle::RGB: return TextureFormat::RGB8;
case Ktx1Bundle::RGBA: return TextureFormat::RGBA8;
case Ktx1Bundle::LUMINANCE: return TextureFormat::R8;
case Ktx1Bundle::LUMINANCE_ALPHA: return TextureFormat::RG8;
case Ktx1Bundle::R8: return TextureFormat::R8;
case Ktx1Bundle::R8_SNORM: return TextureFormat::R8_SNORM;
case Ktx1Bundle::R8UI: return TextureFormat::R8UI;
case Ktx1Bundle::R8I: return TextureFormat::R8I;
case Ktx1Bundle::STENCIL_INDEX8: return TextureFormat::STENCIL8;
case Ktx1Bundle::R16F: return TextureFormat::R16F;
case Ktx1Bundle::R16UI: return TextureFormat::R16UI;
case Ktx1Bundle::R16I: return TextureFormat::R16I;
case Ktx1Bundle::RG8: return TextureFormat::RG8;
case Ktx1Bundle::RG8_SNORM: return TextureFormat::RG8_SNORM;
case Ktx1Bundle::RG8UI: return TextureFormat::RG8UI;
case Ktx1Bundle::RG8I: return TextureFormat::RG8I;
case Ktx1Bundle::RGB565: return TextureFormat::RGB565;
case Ktx1Bundle::RGB9_E5: return TextureFormat::RGB9_E5;
case Ktx1Bundle::RGB5_A1: return TextureFormat::RGB5_A1;
case Ktx1Bundle::RGBA4: return TextureFormat::RGBA4;
case Ktx1Bundle::DEPTH_COMPONENT16: return TextureFormat::DEPTH16;
case Ktx1Bundle::RGB8: return TextureFormat::RGB8;
case Ktx1Bundle::SRGB8: return TextureFormat::SRGB8;
case Ktx1Bundle::RGB8_SNORM: return TextureFormat::RGB8_SNORM;
case Ktx1Bundle::RGB8UI: return TextureFormat::RGB8UI;
case Ktx1Bundle::RGB8I: return TextureFormat::RGB8I;
case Ktx1Bundle::R32F: return TextureFormat::R32F;
case Ktx1Bundle::R32UI: return TextureFormat::R32UI;
case Ktx1Bundle::R32I: return TextureFormat::R32I;
case Ktx1Bundle::RG16F: return TextureFormat::RG16F;
case Ktx1Bundle::RG16UI: return TextureFormat::RG16UI;
case Ktx1Bundle::RG16I: return TextureFormat::RG16I;
case Ktx1Bundle::R11F_G11F_B10F: return TextureFormat::R11F_G11F_B10F;
case Ktx1Bundle::RGBA8: return TextureFormat::RGBA8;
case Ktx1Bundle::SRGB8_ALPHA8: return TextureFormat::SRGB8_A8;
case Ktx1Bundle::RGBA8_SNORM: return TextureFormat::RGBA8_SNORM;
case Ktx1Bundle::RGB10_A2: return TextureFormat::RGB10_A2;
case Ktx1Bundle::RGBA8UI: return TextureFormat::RGBA8UI;
case Ktx1Bundle::RGBA8I: return TextureFormat::RGBA8I;
case Ktx1Bundle::DEPTH24_STENCIL8: return TextureFormat::DEPTH24_STENCIL8;
case Ktx1Bundle::DEPTH32F_STENCIL8: return TextureFormat::DEPTH32F_STENCIL8;
case Ktx1Bundle::RGB16F: return TextureFormat::RGB16F;
case Ktx1Bundle::RGB16UI: return TextureFormat::RGB16UI;
case Ktx1Bundle::RGB16I: return TextureFormat::RGB16I;
case Ktx1Bundle::RG32F: return TextureFormat::RG32F;
case Ktx1Bundle::RG32UI: return TextureFormat::RG32UI;
case Ktx1Bundle::RG32I: return TextureFormat::RG32I;
case Ktx1Bundle::RGBA16F: return TextureFormat::RGBA16F;
case Ktx1Bundle::RGBA16UI: return TextureFormat::RGBA16UI;
case Ktx1Bundle::RGBA16I: return TextureFormat::RGBA16I;
case Ktx1Bundle::RGB32F: return TextureFormat::RGB32F;
case Ktx1Bundle::RGB32UI: return TextureFormat::RGB32UI;
case Ktx1Bundle::RGB32I: return TextureFormat::RGB32I;
case Ktx1Bundle::RGBA32F: return TextureFormat::RGBA32F;
case Ktx1Bundle::RGBA32UI: return TextureFormat::RGBA32UI;
case Ktx1Bundle::RGBA32I: return TextureFormat::RGBA32I;
}
return toCompressedFilamentEnum<TextureFormat>(info.glInternalFormat);
}
} // namespace Ktx1Reader
} // namespace ktxreader

View File

@@ -0,0 +1,426 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <ktxreader/Ktx2Reader.h>
#include <filament/Engine.h>
#include <filament/Texture.h>
#include <utils/Log.h>
#include <atomic>
#include <vector>
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Warray-bounds"
#include <basisu_transcoder.h>
#pragma clang diagnostic pop
using namespace basist;
using namespace filament;
using TransferFunction = ktxreader::Ktx2Reader::TransferFunction;
using Result = ktxreader::Ktx2Reader::Result;
using Async = ktxreader::Ktx2Reader::Async;
using Buffer = std::vector<uint8_t>;
namespace {
struct FinalFormatInfo {
const char* name; // <-- for debug purposes only
bool isSupported;
bool isCompressed;
TransferFunction transferFunction;
transcoder_texture_format basisFormat;
Texture::CompressedType compressedPixelDataType;
Texture::Type pixelDataType;
Texture::Format pixelDataFormat;
};
}
// This function returns various information about a Filament internal format, most notably its
// equivalent BasisU enumerant.
//
// Return by value isn't expensive here due to copy elision.
//
// Note that Filament's internal format list mimics the Vulkan format list, which
// embeds transfer function information (i.e. sRGB or not) into the format, whereas
// the basis format list does not.
//
// The following formats supported by BasisU but are not supported by Filament.
//
// transcoder_texture_format::cTFETC1_RGB
// transcoder_texture_format::cTFATC_RGB
// transcoder_texture_format::cTFATC_RGBA
// transcoder_texture_format::cTFFXT1_RGB
// transcoder_texture_format::cTFPVRTC2_4_RGB
// transcoder_texture_format::cTFPVRTC2_4_RGBA
// transcoder_texture_format::cTFPVRTC1_4_RGB
// transcoder_texture_format::cTFPVRTC1_4_RGBA
// transcoder_texture_format::cTFBC4_R
// transcoder_texture_format::cTFBC5_RG
// transcoder_texture_format::cTFBC7_RGBA (this format would add size bloat to the transcoder)
// transcoder_texture_format::cTFBGR565 (note the blue/red swap)
//
static FinalFormatInfo getFinalFormatInfo(Texture::InternalFormat fmt) {
using tif = Texture::InternalFormat;
using tct = Texture::CompressedType;
using tt = Texture::Type;
using tf = Texture::Format;
using ttf = transcoder_texture_format;
const auto sRGB = TransferFunction::sRGB;
const auto LINEAR = TransferFunction::LINEAR;
switch (fmt) {
case tif::ETC2_EAC_SRGBA8: return {"ETC2_EAC_SRGBA8", true, true, sRGB, ttf::cTFETC2_RGBA, tct::ETC2_EAC_RGBA8};
case tif::ETC2_EAC_RGBA8: return {"ETC2_EAC_RGBA8", true, true, LINEAR, ttf::cTFETC2_RGBA, tct::ETC2_EAC_SRGBA8};
case tif::DXT1_SRGB: return {"DXT1_SRGB", true, true, sRGB, ttf::cTFBC1_RGB, tct::DXT1_RGB};
case tif::DXT1_RGB: return {"DXT1_RGB", true, true, LINEAR, ttf::cTFBC1_RGB, tct::DXT1_SRGB};
case tif::DXT5_SRGBA: return {"DXT5_SRGBA", true, true, sRGB, ttf::cTFBC3_RGBA, tct::DXT5_RGBA};
case tif::DXT5_RGBA: return {"DXT5_RGBA", true, true, LINEAR, ttf::cTFBC3_RGBA, tct::DXT5_SRGBA};
case tif::SRGB8_ALPHA8_ASTC_4x4: return {"SRGB8_ALPHA8_ASTC_4x4", true, true, sRGB, ttf::cTFASTC_4x4_RGBA, tct::RGBA_ASTC_4x4};
case tif::RGBA_ASTC_4x4: return {"RGBA_ASTC_4x4", true, true, LINEAR, ttf::cTFASTC_4x4_RGBA, tct::SRGB8_ALPHA8_ASTC_4x4};
case tif::EAC_R11: return {"EAC_R11", true, true, LINEAR, ttf::cTFETC2_EAC_R11, tct::EAC_R11};
// The following format is useful for normal maps.
// Note that BasisU supports only the unsigned variant.
case tif::EAC_RG11: return {"EAC_RG11", true, true, LINEAR, ttf::cTFETC2_EAC_RG11, tct::EAC_RG11};
// Uncompressed formats.
case tif::SRGB8_A8: return {"SRGB8_A8", true, false, sRGB, ttf::cTFRGBA32, {}, tt::UBYTE, tf::RGBA};
case tif::RGBA8: return {"RGBA8", true, false, LINEAR, ttf::cTFRGBA32, {}, tt::UBYTE, tf::RGBA};
case tif::RGB565: return {"RGB565", true, false, LINEAR, ttf::cTFRGB565, {}, tt::USHORT_565, tf::RGB};
case tif::RGBA4: return {"RGBA4", true, false, LINEAR, ttf::cTFRGBA4444, {}, tt::USHORT, tf::RGBA};
default: return {};
}
}
// In theory we could pass "free" directly into the callback but doing so triggers ASAN warnings.
static void freeCallback(void* buf, size_t, void* userdata) {
free(buf);
}
// This helper is used by both the asynchronous and synchronous API's.
static Result transcodeImageLevel(ktx2_transcoder& transcoder,
ktx2_transcoder_state& transcoderState, Texture::InternalFormat format,
uint32_t levelIndex, Texture::PixelBufferDescriptor** pbd) {
using basisu::texture_format;
assert_invariant(levelIndex < KTX2_MAX_SUPPORTED_LEVEL_COUNT);
const FinalFormatInfo formatInfo = getFinalFormatInfo(format);
const texture_format destFormat = basis_get_basisu_texture_format(formatInfo.basisFormat);
const uint32_t layerIndex = 0;
const uint32_t faceIndex = 0;
const uint32_t decodeFlags = 0;
const uint32_t outputRowPitch = 0;
const uint32_t outputRowCount = 0;
const int channel0 = 0;
const int channel1 = 0;
basist::ktx2_image_level_info levelInfo;
transcoder.get_image_level_info(levelInfo, levelIndex, layerIndex, faceIndex);
if (formatInfo.isCompressed) {
const uint32_t qwordsPerBlock = basisu::get_qwords_per_block(destFormat);
const size_t byteCount = sizeof(uint64_t) * qwordsPerBlock * levelInfo.m_total_blocks;
uint64_t* const blocks = (uint64_t*) malloc(byteCount);
if (!transcoder.transcode_image_level(levelIndex, layerIndex, faceIndex, blocks,
levelInfo.m_total_blocks, formatInfo.basisFormat, decodeFlags,
outputRowPitch, outputRowCount, channel0,
channel1, &transcoderState)) {
return Result::COMPRESSED_TRANSCODE_FAILURE;
}
*pbd = new Texture::PixelBufferDescriptor(blocks,
byteCount, formatInfo.compressedPixelDataType, byteCount, freeCallback);
return Result::SUCCESS;
}
const uint32_t rowCount = levelInfo.m_orig_height;
const uint32_t bytesPerPix = basis_get_bytes_per_block_or_pixel(formatInfo.basisFormat);
const size_t byteCount = bytesPerPix * levelInfo.m_orig_width * rowCount;
uint64_t* const rows = (uint64_t*) malloc(byteCount);
if (!transcoder.transcode_image_level(levelIndex, layerIndex, faceIndex, rows,
byteCount / bytesPerPix, formatInfo.basisFormat, decodeFlags,
outputRowPitch, outputRowCount, channel0, channel1, &transcoderState)) {
return Result::UNCOMPRESSED_TRANSCODE_FAILURE;
}
*pbd = new Texture::PixelBufferDescriptor(rows, byteCount,
formatInfo.pixelDataFormat, formatInfo.pixelDataType, freeCallback);
return Result::SUCCESS;
}
namespace ktxreader {
class FAsync : public Async {
public:
FAsync(Texture* texture, Engine& engine, ktx2_transcoder* transcoder, Buffer&& buf) :
mTexture(texture), mEngine(engine), mTranscoder(transcoder),
mSourceBuffer(std::move(buf)) {}
Texture* getTexture() const noexcept { return mTexture; }
Result doTranscoding();
void uploadImages();
private:
using TranscoderResult = std::atomic<Texture::PixelBufferDescriptor*>;
// After each level is transcoded, the results are stashed in the following array until the
// foreground thread calls uploadImages(). Each slot in the array corresponds to a single
// miplevel in the texture.
TranscoderResult mTranscoderResults[KTX2_MAX_SUPPORTED_LEVEL_COUNT] = {};
Texture* const mTexture;
Engine& mEngine;
// We do not share the BasisU trancoder between Async objects. The BasisU transcoder
// allows parallelization at "level" granularity, but does not permit parallelization at
// "texture" granularity. i.e. the transcode_image_level() method is thread-safe but the
// start_transcoding() method is not.
std::unique_ptr<ktx2_transcoder> const mTranscoder;
// Storage for the content of the KTX2 file.
Buffer mSourceBuffer;
};
Ktx2Reader::Ktx2Reader(Engine& engine, bool quiet) :
mEngine(engine),
mQuiet(quiet),
mTranscoder(new ktx2_transcoder()) {
mRequestedFormats.reserve((size_t) transcoder_texture_format::cTFTotalTextureFormats);
basisu_transcoder_init();
}
Ktx2Reader::~Ktx2Reader() {
delete mTranscoder;
}
Result Ktx2Reader::requestFormat(Texture::InternalFormat format) noexcept {
if (!getFinalFormatInfo(format).isSupported) {
return Result::FORMAT_UNSUPPORTED;
}
for (Texture::InternalFormat fmt : mRequestedFormats) {
if (fmt == format) {
return Result::FORMAT_ALREADY_REQUESTED;
}
}
mRequestedFormats.push_back(format);
return Result::SUCCESS;
}
void Ktx2Reader::unrequestFormat(Texture::InternalFormat format) noexcept {
for (auto iter = mRequestedFormats.begin(); iter != mRequestedFormats.end(); ++iter) {
if (*iter == format) {
mRequestedFormats.erase(iter);
return;
}
}
}
Texture* Ktx2Reader::load(const void* data, size_t size, TransferFunction transfer) {
Texture* texture = createTexture(mTranscoder, data, size, transfer);
if (texture == nullptr) {
return nullptr;
}
if (!mTranscoder->start_transcoding()) {
mEngine.destroy(texture);
if (!mQuiet) {
utils::slog.e << "BasisU start_transcoding failed." << utils::io::endl;
}
return nullptr;
}
ktx2_transcoder_state basisThreadState;
basisThreadState.clear();
for (uint32_t levelIndex = 0, n = mTranscoder->get_levels(); levelIndex < n; levelIndex++) {
Texture::PixelBufferDescriptor* pbd;
Result result = transcodeImageLevel(*mTranscoder, basisThreadState, texture->getFormat(),
levelIndex, &pbd);
if (UTILS_UNLIKELY(result != Result::SUCCESS)) {
mEngine.destroy(texture);
if (!mQuiet) {
utils::slog.e << "Failed to transcode level " << levelIndex << utils::io::endl;
}
return nullptr;
}
texture->setImage(mEngine, levelIndex, std::move(*pbd));
}
return texture;
}
Result FAsync::doTranscoding() {
ktx2_transcoder_state basisThreadState;
basisThreadState.clear();
for (uint32_t levelIndex = 0, n = mTranscoder->get_levels(); levelIndex < n; levelIndex++) {
Texture::PixelBufferDescriptor* pbd;
Result result = transcodeImageLevel(*mTranscoder, basisThreadState, mTexture->getFormat(),
levelIndex, &pbd);
if (UTILS_UNLIKELY(result != Result::SUCCESS)) {
return result;
}
mTranscoderResults[levelIndex].store(pbd);
}
return Result::SUCCESS;
}
void FAsync::uploadImages() {
size_t levelIndex = 0;
UTILS_NOUNROLL
for (TranscoderResult& level : mTranscoderResults) {
Texture::PixelBufferDescriptor* pbd = level.load();
if (pbd) {
level.store(nullptr);
mTexture->setImage(mEngine, levelIndex, std::move(*pbd));
delete pbd;
}
++levelIndex;
}
}
Async* Ktx2Reader::asyncCreate(const void* data, size_t size, TransferFunction transfer) {
Buffer ktx2content((uint8_t*)data, (uint8_t*)data + size);
ktx2_transcoder* transcoder = new ktx2_transcoder();
Texture* texture = createTexture(transcoder, ktx2content.data(), ktx2content.size(), transfer);
if (texture == nullptr) {
delete transcoder;
return nullptr;
}
if (!transcoder->start_transcoding()) {
delete transcoder;
mEngine.destroy(texture);
return nullptr;
}
// There's no need to do any further work at this point but it should be noted that this is the
// point at which we first come to know the number of miplevels, dimensions, etc. If we had a
// dynamically sized array to store decoder results, we would reserve it here.
return new FAsync(texture, mEngine, transcoder, std::move(ktx2content));
}
void Ktx2Reader::asyncDestroy(Async** async) {
delete *async;
*async = nullptr;
}
Texture* Ktx2Reader::createTexture(ktx2_transcoder* transcoder, const void* data, size_t size,
TransferFunction transfer) {
if (!transcoder->init(data, size)) {
if (!mQuiet) {
utils::slog.e << "BasisU transcoder init failed." << utils::io::endl;
}
return nullptr;
}
if (transcoder->get_dfd_transfer_func() == KTX2_KHR_DF_TRANSFER_LINEAR &&
transfer == TransferFunction::sRGB) {
if (!mQuiet) {
utils::slog.e << "Source texture is marked linear, but client is requesting sRGB."
<< utils::io::endl;
}
return nullptr;
}
if (transcoder->get_dfd_transfer_func() == KTX2_KHR_DF_TRANSFER_SRGB &&
transfer == TransferFunction::LINEAR) {
if (!mQuiet) {
utils::slog.e << "Source texture is marked sRGB, but client is requesting linear."
<< utils::io::endl;
}
return nullptr;
}
// TODO: support cubemaps. For now we use KTX1 for cubemaps because basisu does not support HDR.
if (transcoder->get_faces() == 6) {
if (!mQuiet) {
utils::slog.e << "Cubemaps are not yet supported." << utils::io::endl;
}
return nullptr;
}
// TODO: support texture arrays.
if (transcoder->get_layers() > 1) {
if (!mQuiet) {
utils::slog.e << "Texture arrays are not yet supported." << utils::io::endl;
}
return nullptr;
}
// First pass through, just to make sure we can transcode it.
bool found = false;
Texture::InternalFormat resolvedFormat;
FinalFormatInfo info;
for (Texture::InternalFormat requestedFormat : mRequestedFormats) {
if (!Texture::isTextureFormatSupported(mEngine, requestedFormat)) {
continue;
}
info = getFinalFormatInfo(requestedFormat);
if (!info.isSupported || info.transferFunction != transfer) {
continue;
}
if (!basis_is_format_supported(info.basisFormat, transcoder->get_format())) {
continue;
}
const uint32_t layerIndex = 0;
const uint32_t faceIndex = 0;
for (uint32_t levelIndex = 0; levelIndex < transcoder->get_levels(); levelIndex++) {
basist::ktx2_image_level_info info;
if (!transcoder->get_image_level_info(info, levelIndex, layerIndex, faceIndex)) {
continue;
}
}
found = true;
resolvedFormat = requestedFormat;
break;
}
if (!found) {
if (!mQuiet) {
utils::slog.e << "Unable to decode any of the requested formats." << utils::io::endl;
}
return nullptr;
}
Texture* texture = Texture::Builder()
.width(transcoder->get_width())
.height(transcoder->get_height())
.levels(transcoder->get_levels())
.sampler(Texture::Sampler::SAMPLER_2D)
.format(resolvedFormat)
.build(mEngine);
if (texture == nullptr && !mQuiet) {
utils::slog.e << "Unable to construct texture using BasisU info." << utils::io::endl;
}
#if BASISU_FORCE_DEVEL_MESSAGES
utils::slog.e << "Ktx2Reader created "
<< transcoder->get_width() << "x" << transcoder->get_height() << " texture with format "
<< info.name << utils::io::endl;
#endif
return texture;
}
Texture* Async::getTexture() const noexcept {
return static_cast<FAsync const*>(this)->getTexture();
}
Result Async::doTranscoding() {
return static_cast<FAsync*>(this)->doTranscoding();
}
void Async::uploadImages() {
return static_cast<FAsync*>(this)->uploadImages();
}
} // namespace ktxreader