refactoring + texture projection

This commit is contained in:
Nick Fisher
2025-03-25 09:39:02 +08:00
parent 0cbbc058e0
commit 999b1e613f
33 changed files with 7357 additions and 1168 deletions

View File

@@ -0,0 +1,110 @@
# Makefile for Thermion Dart - macOS Version
# Using clang to build shared library directly
# Configuration variables
FILAMENT_VERSION = v1.58.0
PACKAGE_NAME = thermion_dart
PLATFORM = macos
# Architecture - default to x64 but can be overridden
ARCH ?= x64
# Compiler and flags
CC = clang++
CFLAGS = -std=c++17 -g -O0 -mmacosx-version-min=13.0
DEFINES = -DENABLE_TRACING=1
# Output library name
OUTPUT_NAME = libthermion_dart.dylib
# Project directory structure
PKG_ROOT = .
NATIVE_SRC_DIR = $(PKG_ROOT)/src
NATIVE_INCLUDE_DIR = $(PKG_ROOT)/include
OUTPUT_DIR = $(PKG_ROOT)/build
# Hardcoded library path (for Filament libraries)
LIB_DIR = /Users/nickfisher/Documents/thermion/thermion_dart/.dart_tool/thermion_dart/lib/v1.58.0/macos/debug
# Libraries are already in LIB_DIR, no download needed
# Source files
SOURCES = $(shell find $(NATIVE_SRC_DIR) -type f -name "*.cpp" -not -path "*CMakeLists*" -not -path "*main.cpp*" -not -path "*windows*")
MATERIAL_SOURCES = $(NATIVE_INCLUDE_DIR)/material/unlit_fixed_size.c \
$(NATIVE_INCLUDE_DIR)/material/image.c \
$(NATIVE_INCLUDE_DIR)/material/grid.c \
$(NATIVE_INCLUDE_DIR)/material/unlit.c \
$(NATIVE_INCLUDE_DIR)/material/gizmo.c
RESOURCE_SOURCES = $(NATIVE_INCLUDE_DIR)/resources/translation_gizmo_glb.c \
$(NATIVE_INCLUDE_DIR)/resources/rotation_gizmo_glb.c
ALL_SOURCES = $(SOURCES) $(MATERIAL_SOURCES) $(RESOURCE_SOURCES)
# Include paths
INCLUDES = -I$(NATIVE_INCLUDE_DIR) -I$(NATIVE_INCLUDE_DIR)/filament
# Libraries to link
LIBS = -lfilament -lbackend -lfilameshio -lviewer -lfilamat -lmeshoptimizer \
-lmikktspace -lgeometry -lutils -lfilabridge -lgltfio_core -lgltfio \
-lfilament-iblprefilter -limage -limageio -ltinyexr -lfilaflat \
-ldracodec -libl -lktxreader -lpng -lz -lstb -luberzlib -lsmol-v \
-luberarchive -lzstd -lbasis_transcoder -lmatdbg -lfgviewer -lbluegl \
-lbluevk -lstdc++
# Frameworks for macOS
FRAMEWORKS = -framework Foundation -framework CoreVideo -framework Cocoa -framework Metal
# Default target
.PHONY: all
all: setup check-libs build
# Setup directories
.PHONY: setup
setup:
mkdir -p "$(LIB_DIR)"
mkdir -p "$(OUTPUT_DIR)"
@echo "Build directories created for macOS"
# Using pre-existing Filament libraries
.PHONY: check-libs
check-libs:
@echo "Using existing Filament libraries in $(LIB_DIR)"
@if [ ! -d "$(LIB_DIR)" ]; then \
echo "ERROR: Library directory $(LIB_DIR) not found"; \
exit 1; \
fi
# Build the shared library using clang
.PHONY: build
build:
@echo "Building Thermion shared library for macOS ($(ARCH))"
$(CC) $(CFLAGS) $(DEFINES) $(INCLUDES) \
-dynamiclib -install_name @rpath/$(OUTPUT_NAME) \
$(ALL_SOURCES) \
-L$(LIB_DIR) $(LIBS) $(FRAMEWORKS) \
-o $(OUTPUT_DIR)/$(OUTPUT_NAME)
@echo "Build complete: $(OUTPUT_DIR)/$(OUTPUT_NAME)"
# Clean build artifacts
.PHONY: clean
clean:
@echo "Build artifacts cleaned" # rm -rf "$(OUTPUT_DIR)"
# Help target
.PHONY: help
help:
@echo "Thermion macOS Build System"
@echo ""
@echo "Usage:"
@echo " make [target] [ARCH=architecture]"
@echo ""
@echo "Targets:"
@echo " all Build everything (default)"
@echo " setup Create necessary directories"
@echo " check-libs Verify Filament libraries exist"
@echo " build Build the shared library"
@echo " clean Clean build artifacts"
@echo ""
@echo "Options:"
@echo " ARCH Target architecture (default: x64)"
@echo " Supported: x64, arm64"

View File

@@ -30,7 +30,7 @@ static void Log(const char *fmt, ...) {
va_end(args);
}
#define ERROR(fmt, ...) Log("Error: %s:%d " fmt, __FILENAME__, __LINE__, ##__VA_ARGS__)
#ifdef ENABLE_TRACING
#ifdef __ANDROID__
#define __FILENAME__ (strrchr(__FILE__, '/') ? strrchr(__FILE__, '/') + 1 : __FILE__)

View File

@@ -20,7 +20,8 @@ EMSCRIPTEN_KEEPALIVE void Renderer_readPixels(
TRenderTarget *tRenderTarget,
TPixelDataFormat tPixelBufferFormat,
TPixelDataType tPixelDataType,
uint8_t *out
uint8_t *out,
size_t outLength
);
EMSCRIPTEN_KEEPALIVE void Renderer_setFrameInterval(
TRenderer *tRenderer,

View File

@@ -64,6 +64,7 @@ EMSCRIPTEN_KEEPALIVE bool View_isStencilBufferEnabled(TView *tView);
EMSCRIPTEN_KEEPALIVE void View_setDitheringEnabled(TView *tView, bool enabled);
EMSCRIPTEN_KEEPALIVE bool View_isDitheringEnabled(TView *tView);
EMSCRIPTEN_KEEPALIVE void View_setScene(TView *tView, TScene *tScene);
EMSCRIPTEN_KEEPALIVE void View_setFrontFaceWindingInverted(TView *tView, bool inverted);
typedef void (*PickCallback)(uint32_t requestId, EntityId entityId, float depth, float fragX, float fragY, float fragZ);
EMSCRIPTEN_KEEPALIVE void View_pick(TView* tView, uint32_t requestId, uint32_t x, uint32_t y, PickCallback callback);

View File

@@ -77,6 +77,7 @@ namespace thermion
TPixelDataFormat tPixelBufferFormat,
TPixelDataType tPixelDataType,
uint8_t *out,
size_t outLength,
void (*onComplete)());
EMSCRIPTEN_KEEPALIVE void Material_createInstanceRenderThread(TMaterial *tMaterial, void (*onComplete)(TMaterialInstance *));

View File

@@ -0,0 +1,12 @@
.global CAPTURE_UV_CAPTURE_UV_OFFSET;
.global CAPTURE_UV_CAPTURE_UV_SIZE;
.global CAPTURE_UV_PACKAGE
.section .rodata
CAPTURE_UV_PACKAGE:
.incbin "capture_uv.bin"
CAPTURE_UV_CAPTURE_UV_OFFSET:
.int 0
CAPTURE_UV_CAPTURE_UV_SIZE:
.int 125851

View File

@@ -0,0 +1,12 @@
.global _CAPTURE_UV_CAPTURE_UV_OFFSET;
.global _CAPTURE_UV_CAPTURE_UV_SIZE;
.global _CAPTURE_UV_PACKAGE
.section __TEXT,__const
_CAPTURE_UV_PACKAGE:
.incbin "capture_uv.bin"
_CAPTURE_UV_CAPTURE_UV_OFFSET:
.int 0
_CAPTURE_UV_CAPTURE_UV_SIZE:
.int 125851

Binary file not shown.

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,13 @@
#ifndef CAPTURE_UV_H_
#define CAPTURE_UV_H_
#include <stdint.h>
extern "C" {
extern const uint8_t CAPTURE_UV_PACKAGE[];
extern int CAPTURE_UV_CAPTURE_UV_OFFSET;
extern int CAPTURE_UV_CAPTURE_UV_SIZE;
}
#define CAPTURE_UV_CAPTURE_UV_DATA (CAPTURE_UV_PACKAGE + CAPTURE_UV_CAPTURE_UV_OFFSET)
#endif

View File

@@ -51,7 +51,7 @@ const uint8_t UNLIT_PACKAGE[] = {
0x53, 0x45, 0x54, 0x44, 0x5f, 0x54, 0x41, 0x4d, 0x01, 0x00, 0x00, 0x00, 0x01, 0x54, 0x53, 0x4e, 0x49, 0x5f, 0x54, 0x41,
0x4d, 0x01, 0x00, 0x00, 0x00, 0x00, 0x53, 0x43, 0x32, 0x41, 0x5f, 0x54, 0x41, 0x4d, 0x01, 0x00, 0x00, 0x00, 0x00, 0x4f,
0x43, 0x32, 0x41, 0x5f, 0x54, 0x41, 0x4d, 0x01, 0x00, 0x00, 0x00, 0x00, 0x4f, 0x4d, 0x55, 0x43, 0x5f, 0x54, 0x41, 0x4d,
0x01, 0x00, 0x00, 0x00, 0x02, 0x50, 0x4f, 0x52, 0x50, 0x5f, 0x54, 0x41, 0x4d, 0x08, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00, 0x00, 0x50, 0x4f, 0x52, 0x50, 0x5f, 0x54, 0x41, 0x4d, 0x08, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0x45, 0x54, 0x53, 0x5f, 0x54, 0x41, 0x4d, 0x01, 0x00, 0x00, 0x00, 0x01, 0x44, 0x49,
0x55, 0x55, 0x5f, 0x54, 0x41, 0x4d, 0x08, 0x00, 0x00, 0x00, 0xf7, 0xaa, 0x19, 0x69, 0x9f, 0xc0, 0xf1, 0x29, 0x44, 0x41,
0x48, 0x53, 0x5f, 0x54, 0x41, 0x4d, 0x01, 0x00, 0x00, 0x00, 0x00, 0x4c, 0x4d, 0x48, 0x53, 0x5f, 0x54, 0x41, 0x4d, 0x01,

View File

@@ -0,0 +1,270 @@
import Foundation
import GLKit
@objc public class ThermionTextureSwift : NSObject {
public var pixelBuffer: CVPixelBuffer?
var pixelBufferAttrs = [
kCVPixelBufferPixelFormatTypeKey: NSNumber(value: kCVPixelFormatType_32ABGR ),
kCVPixelBufferIOSurfacePropertiesKey: [:] as CFDictionary
] as [CFString : Any] as CFDictionary
@objc public var cvMetalTextureCache:CVMetalTextureCache?
@objc public var metalDevice:MTLDevice?
@objc public var cvMetalTexture:CVMetalTexture?
@objc public var metalTexture:MTLTexture?
@objc public var metalTextureAddress:Int = -1
@objc override public init() {
}
@objc public init(width:Int64, height:Int64, isDepth:Bool) {
if(self.metalDevice == nil) {
self.metalDevice = MTLCreateSystemDefaultDevice()!
}
if isDepth {
print("Creating depth texture")
// Create a proper depth texture without IOSurface backing
let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(
pixelFormat: .depth32Float,
width: Int(width),
height: Int(height),
mipmapped: false)
textureDescriptor.usage = [.renderTarget, .shaderRead]
textureDescriptor.storageMode = .private // Best performance for GPU-only access
metalTexture = metalDevice?.makeTexture(descriptor: textureDescriptor)
let metalTexturePtr = Unmanaged.passRetained(metalTexture!).toOpaque()
metalTextureAddress = Int(bitPattern: metalTexturePtr)
return
}
print("Creating color texture")
let pixelFormat: MTLPixelFormat = isDepth ? .depth32Float : .bgra8Unorm
let cvPixelFormat = isDepth ? kCVPixelFormatType_DepthFloat32 : kCVPixelFormatType_32BGRA
if(CVPixelBufferCreate(kCFAllocatorDefault, Int(width), Int(height),
kCVPixelFormatType_32BGRA, pixelBufferAttrs, &pixelBuffer) != kCVReturnSuccess) {
print("Error allocating pixel buffer")
metalTextureAddress = -1;
return
}
if self.cvMetalTextureCache == nil {
let cacheCreationResult = CVMetalTextureCacheCreate(
kCFAllocatorDefault,
nil,
self.metalDevice!,
nil,
&self.cvMetalTextureCache)
if(cacheCreationResult != kCVReturnSuccess) {
print("Error creating Metal texture cache")
metalTextureAddress = -1
return
}
}
let cvret = CVMetalTextureCacheCreateTextureFromImage(
kCFAllocatorDefault,
self.cvMetalTextureCache!,
pixelBuffer!, nil,
MTLPixelFormat.bgra8Unorm,
Int(width), Int(height),
0,
&cvMetalTexture)
if(cvret != kCVReturnSuccess) {
print("Error creating texture from image")
metalTextureAddress = -1
return
}
metalTexture = CVMetalTextureGetTexture(cvMetalTexture!)
let metalTexturePtr = Unmanaged.passRetained(metalTexture!).toOpaque()
metalTextureAddress = Int(bitPattern:metalTexturePtr)
}
@objc public func destroyTexture() {
CVMetalTextureCacheFlush(self.cvMetalTextureCache!, 0)
self.metalTexture = nil
self.cvMetalTexture = nil
self.pixelBuffer = nil
self.metalDevice = nil
self.cvMetalTextureCache = nil
}
@objc public func fillWithPNGImage(imageURL: URL) -> Bool {
// Make sure we have a pixel buffer to work with
guard let pixelBuffer = self.pixelBuffer else {
print("Error: No pixel buffer available")
return false
}
// Try to load the image from the provided URL
guard let nsImage = NSImage(contentsOf: imageURL) else {
print("Error: Could not load image from \(imageURL.path)")
return false
}
// Make sure we have a CGImage to work with
guard let cgImage = nsImage.cgImage(forProposedRect: nil, context: nil, hints: nil) else {
print("Error: Could not get CGImage from NSImage")
return false
}
// Get pixel buffer dimensions
let width = CVPixelBufferGetWidth(pixelBuffer)
let height = CVPixelBufferGetHeight(pixelBuffer)
// Lock the pixel buffer for writing
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
// Get the base address of the pixel buffer
guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) else {
print("Error: Could not get base address of pixel buffer")
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
return false
}
// Create a graphics context in the pixel buffer
let colorSpace = CGColorSpaceCreateDeviceRGB()
let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
let context = CGContext(
data: baseAddress,
width: width,
height: height,
bitsPerComponent: 8,
bytesPerRow: bytesPerRow,
space: colorSpace,
bitmapInfo: CGBitmapInfo.byteOrder32Little.rawValue | CGImageAlphaInfo.premultipliedFirst.rawValue
)
// Draw the image into the context (which is backed by our pixel buffer)
if let context = context {
// Flip the coordinate system to match Metal's coordinate system
context.translateBy(x: 0, y: CGFloat(height))
context.scaleBy(x: 1, y: -1)
// Draw the image to fill the entire texture
let rect = CGRect(x: 0, y: 0, width: CGFloat(width), height: CGFloat(height))
context.draw(cgImage, in: rect)
} else {
print("Error: Could not create CGContext from pixel buffer")
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
return false
}
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
return true
}
@objc public func fillColor() {
CVPixelBufferLockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
let bufferWidth = Int(CVPixelBufferGetWidth(pixelBuffer!))
let bufferHeight = Int(CVPixelBufferGetHeight(pixelBuffer!))
let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer!)
guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer!) else {
return
}
for row in 0..<bufferHeight {
var pixel = baseAddress + row * bytesPerRow
for _ in 0..<bufferWidth {
let blue = pixel
blue.storeBytes(of: 255, as: UInt8.self)
let red = pixel + 1
red.storeBytes(of: 0, as: UInt8.self)
let green = pixel + 2
green.storeBytes(of: 0, as: UInt8.self)
let alpha = pixel + 3
alpha.storeBytes(of: 255, as: UInt8.self)
pixel += 4;
}
}
CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
}
@objc public func getTextureBytes() -> NSData? {
guard let texture = self.metalTexture else {
print("Metal texture is not available")
return nil
}
let width = texture.width
let height = texture.height
// Check what type of texture we're dealing with
let isDepthTexture = texture.pixelFormat == .depth32Float ||
texture.pixelFormat == .depth16Unorm
print("Using texture pixel format : \(texture.pixelFormat) isDepthTexture \(isDepthTexture) (depth32Float \(MTLPixelFormat.depth32Float)) (depth16Unorm \(MTLPixelFormat.depth16Unorm))")
// Determine bytes per pixel based on format
let bytesPerPixel = isDepthTexture ?
(texture.pixelFormat == .depth32Float ? 4 : 2) : 4
let bytesPerRow = width * bytesPerPixel
let byteCount = bytesPerRow * height
// Create a staging buffer that is CPU-accessible
guard let stagingBuffer = self.metalDevice?.makeBuffer(
length: byteCount,
options: .storageModeShared) else {
print("Failed to create staging buffer")
return nil
}
// Create command buffer and encoder for copying
guard let cmdQueue = self.metalDevice?.makeCommandQueue(),
let cmdBuffer = cmdQueue.makeCommandBuffer(),
let blitEncoder = cmdBuffer.makeBlitCommandEncoder() else {
print("Failed to create command objects")
return nil
}
// Copy from texture to buffer
blitEncoder.copy(
from: texture,
sourceSlice: 0,
sourceLevel: 0,
sourceOrigin: MTLOrigin(x: 0, y: 0, z: 0),
sourceSize: MTLSize(width: width, height: height, depth: 1),
to: stagingBuffer,
destinationOffset: 0,
destinationBytesPerRow: bytesPerRow,
destinationBytesPerImage: byteCount
)
blitEncoder.endEncoding()
cmdBuffer.commit()
cmdBuffer.waitUntilCompleted()
// Now the data is in the staging buffer, accessible to CPU
if isDepthTexture {
// For depth textures, just return the raw data
return NSData(bytes: stagingBuffer.contents(), length: byteCount)
} else {
// For color textures, do the BGRA to RGBA swizzling
let bytes = stagingBuffer.contents().bindMemory(to: UInt8.self, capacity: byteCount)
let data = NSMutableData(bytes: bytes, length: byteCount)
let mutableBytes = data.mutableBytes.bindMemory(to: UInt8.self, capacity: byteCount)
for i in stride(from: 0, to: byteCount, by: 4) {
let blue = mutableBytes[i]
let red = mutableBytes[i+2]
mutableBytes[i] = red
mutableBytes[i+2] = blue
}
return data
}
}
}

View File

@@ -23,6 +23,11 @@ namespace thermion
TTexture *tColor,
TTexture *tDepth)
{
if(!tColor || !tDepth) {
ERROR("Color & depth attachments must be provided");
return nullptr;
}
TRACE("Creating render target %dx%d", width, height);
auto engine = reinterpret_cast<filament::Engine *>(tEngine);
auto color = reinterpret_cast<filament::Texture *>(tColor);
auto depth = reinterpret_cast<filament::Texture *>(tDepth);

View File

@@ -93,7 +93,8 @@ EMSCRIPTEN_KEEPALIVE void Renderer_readPixels(
TRenderTarget *tRenderTarget,
TPixelDataFormat tPixelBufferFormat,
TPixelDataType tPixelDataType,
uint8_t *out) {
uint8_t *out,
size_t outLength) {
auto *renderer = reinterpret_cast<filament::Renderer *>(tRenderer);
auto *renderTarget = reinterpret_cast<filament::RenderTarget *>(tRenderTarget);
@@ -101,8 +102,6 @@ EMSCRIPTEN_KEEPALIVE void Renderer_readPixels(
filament::Viewport const &vp = view->getViewport();
size_t pixelBufferSize = vp.width * vp.height * 4;
filament::backend::PixelDataFormat pixelBufferFormat = static_cast<filament::backend::PixelDataFormat>(tPixelBufferFormat);
filament::backend::PixelDataType pixelDataType = static_cast<filament::backend::PixelDataType>(tPixelDataType);
@@ -114,7 +113,7 @@ EMSCRIPTEN_KEEPALIVE void Renderer_readPixels(
auto pbd = filament::Texture::PixelBufferDescriptor(
out, pixelBufferSize,
out, outLength,
pixelBufferFormat,
pixelDataType,
dispatcher,

View File

@@ -231,6 +231,11 @@ using namespace filament;
view->setScene(scene);
}
EMSCRIPTEN_KEEPALIVE void View_setFrontFaceWindingInverted(TView *tView, bool inverted) {
auto *view = reinterpret_cast<View*>(tView);
view->setFrontFaceWindingInverted(inverted);
}
#ifdef __cplusplus
}

View File

@@ -362,11 +362,12 @@ extern "C"
TPixelDataFormat tPixelBufferFormat,
TPixelDataType tPixelDataType,
uint8_t *out,
size_t outLength,
void (*onComplete)()) {
std::packaged_task<void()> lambda(
[=]() mutable
{
Renderer_readPixels(tRenderer, tView, tRenderTarget, tPixelBufferFormat, tPixelDataType, out);
Renderer_readPixels(tRenderer, tView, tRenderTarget, tPixelBufferFormat, tPixelDataType, out, outLength);
onComplete();
});
auto fut = _renderThread->add_task(lambda);

View File

@@ -22,11 +22,9 @@ namespace thermion
SceneAsset *sceneAsset,
Engine *engine,
View *view,
Scene *scene,
Material *material) noexcept : _source(sceneAsset),
_engine(engine),
_view(view),
_scene(scene),
_material(material)
{
auto &entityManager = _engine->getEntityManager();
@@ -80,6 +78,10 @@ namespace thermion
_materialInstances.push_back(materialInstance);
auto instance = _source->createInstance(&materialInstance, 1);
if(!instance) {
Log("FATAL: failed to create asset instance");
}
TRACE("Created Gizmo axis glTF instance with head entity %d", instance->getEntity());
auto color = filament::math::float4(AXIS_COLORS[axis], 0.5f);
materialInstance->setParameter("baseColorFactor", color);

View File

@@ -1,970 +0,0 @@
// #include <memory>
// #include <string>
// #include <sstream>
// #include <thread>
// #include <vector>
// #include <unordered_set>
// #include <stack>
// #include <filament/Engine.h>
// #include <filament/TransformManager.h>
// #include <filament/Texture.h>
// #include <filament/RenderableManager.h>
// #include <filament/Viewport.h>
// #include <filament/Frustum.h>
// #include <utils/EntityManager.h>
// #include <gltfio/Animator.h>
// #include <gltfio/AssetLoader.h>
// #include <gltfio/FilamentAsset.h>
// #include <gltfio/ResourceLoader.h>
// #include <gltfio/TextureProvider.h>
// #include <gltfio/math.h>
// #include <gltfio/materials/uberarchive.h>
// #include <imageio/ImageDecoder.h>
// #include "material/FileMaterialProvider.hpp"
// #include "material/UnlitMaterialProvider.hpp"
// #include "material/unlit.h"
// #include "material/gizmo.h"
// #include "StreamBufferAdapter.hpp"
// #include "Log.hpp"
// #include "scene/SceneManager.hpp"
// #include "scene/CustomGeometry.hpp"
// #include "scene/GeometrySceneAsset.hpp"
// #include "scene/GltfSceneAsset.hpp"
// #include "scene/Gizmo.hpp"
// #include "scene/SceneAsset.hpp"
// #include "scene/GeometrySceneAssetBuilder.hpp"
// #include "TextureProjection.hpp"
// #include "resources/translation_gizmo_glb.h"
// #include "resources/rotation_gizmo_glb.h"
// extern "C"
// {
// #include "material/image.h"
// #include "material/unlit_fixed_size.h"
// }
// namespace thermion
// {
// using namespace std::chrono;
// using namespace image;
// using namespace utils;
// using namespace filament;
// using namespace filament::gltfio;
// using std::unique_ptr;
// SceneManager::SceneManager(const ResourceLoaderWrapperImpl *const resourceLoaderWrapper,
// Engine *engine,
// Scene *scene,
// const char *uberArchivePath,
// Camera *mainCamera)
// : _resourceLoaderWrapper(resourceLoaderWrapper),
// _engine(engine),
// _scene(scene),
// _mainCamera(mainCamera)
// {
// _stbDecoder = createStbProvider(_engine);
// _ktxDecoder = createKtx2Provider(_engine);
// _gltfResourceLoader = new ResourceLoader({.engine = _engine,
// .normalizeSkinningWeights = true});
// if (uberArchivePath)
// {
// auto uberdata = resourceLoaderWrapper->load(uberArchivePath);
// if (!uberdata.data)
// {
// Log("Failed to load ubershader material. This is fatal.");
// }
// _ubershaderProvider = gltfio::createUbershaderProvider(_engine, uberdata.data, uberdata.size);
// resourceLoaderWrapper->free(uberdata);
// }
// else
// {
// _ubershaderProvider = gltfio::createUbershaderProvider(
// _engine, UBERARCHIVE_DEFAULT_DATA, UBERARCHIVE_DEFAULT_SIZE);
// }
// _unlitMaterialProvider = new UnlitMaterialProvider(_engine, UNLIT_PACKAGE, UNLIT_UNLIT_SIZE);
// utils::EntityManager &em = utils::EntityManager::get();
// _ncm = new NameComponentManager(em);
// _assetLoader = AssetLoader::create({_engine, _ubershaderProvider, _ncm, &em});
// _gltfResourceLoader->addTextureProvider("image/ktx2", _ktxDecoder);
// _gltfResourceLoader->addTextureProvider("image/png", _stbDecoder);
// _gltfResourceLoader->addTextureProvider("image/jpeg", _stbDecoder);
// auto &tm = _engine->getTransformManager();
// _collisionComponentManager = std::make_unique<CollisionComponentManager>(tm);
// _animationManager = std::make_unique<AnimationManager>(_engine, _scene);
// _unlitFixedSizeMaterial =
// Material::Builder()
// .package(UNLIT_FIXED_SIZE_UNLIT_FIXED_SIZE_DATA, UNLIT_FIXED_SIZE_UNLIT_FIXED_SIZE_SIZE)
// .build(*_engine);
// _gizmoMaterial =
// Material::Builder()
// .package(GIZMO_GIZMO_DATA, GIZMO_GIZMO_SIZE)
// .build(*_engine);
// }
// SceneManager::~SceneManager()
// {
// TRACE("Destroying cameras");
// for (auto camera : _cameras)
// {
// auto entity = camera->getEntity();
// _engine->destroyCameraComponent(entity);
// _engine->getEntityManager().destroy(entity);
// }
// TRACE("Cameras destroyed");
// destroyAll();
// TRACE("Destroyed all assets");
// _engine->destroy(_unlitFixedSizeMaterial);
// _engine->destroy(_gizmoMaterial);
// TRACE("Destroyed materials");
// _cameras.clear();
// _grid = nullptr;
// _gltfResourceLoader->asyncCancelLoad();
// _ubershaderProvider->destroyMaterials();
// _animationManager = std::nullptr_t();
// _collisionComponentManager = std::nullptr_t();
// delete _ncm;
// delete _gltfResourceLoader;
// delete _stbDecoder;
// delete _ktxDecoder;
// delete _ubershaderProvider;
// TRACE("Destroying asset loader");
// AssetLoader::destroy(&_assetLoader);
// TRACE("Destroyed asset loader");
// }
// SceneAsset *SceneManager::createGrid(Material *material)
// {
// if (!_grid)
// {
// if (!material)
// {
// material = Material::Builder()
// .package(GRID_PACKAGE, GRID_GRID_SIZE)
// .build(*_engine);
// }
// _grid = std::make_unique<GridOverlay>(*_engine, material);
// }
// return _grid.get();
// }
// bool SceneManager::isGridEntity(utils::Entity entity)
// {
// if (!_grid)
// {
// TRACE("No grid");
// return false;
// }
// if (entity == _grid->getEntity())
// {
// TRACE("%d is a grid entity.", entity);
// return true;
// }
// for (int i = 0; i < _grid->getChildEntityCount(); i++)
// {
// if (entity == _grid->getChildEntities()[i])
// {
// TRACE("%d is a child entity of grid.", entity);
// return true;
// }
// }
// return false;
// }
// Gizmo *SceneManager::createGizmo(View *view, Scene *scene, GizmoType type)
// {
// TRACE("Creating gizmo type %d", type);
// Gizmo *raw;
// switch (type)
// {
// case GizmoType::TRANSLATION:
// if (!_translationGizmoGlb)
// {
// TRACE("Translation gizmo source not found, loading");
// _translationGizmoGlb = loadGlbFromBuffer(TRANSLATION_GIZMO_GLB_TRANSLATION_GIZMO_DATA, TRANSLATION_GIZMO_GLB_TRANSLATION_GIZMO_SIZE, 100, true, 4, 0, false, false);
// }
// raw = new Gizmo(_translationGizmoGlb, _engine, view, scene, _unlitFixedSizeMaterial);
// TRACE("Built translation gizmo");
// break;
// case GizmoType::ROTATION:
// if (!_rotationGizmoGlb)
// {
// TRACE("Rotation gizmo source not found, loading");
// _rotationGizmoGlb = loadGlbFromBuffer(ROTATION_GIZMO_GLB_ROTATION_GIZMO_DATA, ROTATION_GIZMO_GLB_ROTATION_GIZMO_SIZE, 100, true, 4, 0, false, false);
// }
// raw = new Gizmo(_rotationGizmoGlb, _engine, view, scene, _unlitFixedSizeMaterial);
// TRACE("Built rotation gizmo");
// break;
// }
// _sceneAssets.push_back(std::unique_ptr<Gizmo>(raw));
// return raw;
// }
// int SceneManager::getInstanceCount(EntityId entityId)
// {
// auto entity = utils::Entity::import(entityId);
// for (auto &asset : _sceneAssets)
// {
// if (asset->getEntity() == entity)
// {
// return asset->getInstanceCount();
// }
// }
// return -1;
// }
// void SceneManager::getInstances(EntityId entityId, EntityId *out)
// {
// auto entity = utils::Entity::import(entityId);
// for (auto &asset : _sceneAssets)
// {
// if (asset->getEntity() == entity)
// {
// for (int i = 0; i < asset->getInstanceCount(); i++)
// {
// out[i] = Entity::smuggle(asset->getInstanceAt(i)->getEntity());
// }
// return;
// }
// }
// }
// SceneAsset *SceneManager::loadGltf(const char *uri,
// const char *relativeResourcePath,
// int numInstances,
// bool keepData)
// {
// if (numInstances < 1)
// {
// return std::nullptr_t();
// }
// ResourceBuffer rbuf = _resourceLoaderWrapper->load(uri);
// std::vector<FilamentInstance *> instances(numInstances);
// FilamentAsset *asset = _assetLoader->createInstancedAsset((uint8_t *)rbuf.data, rbuf.size, instances.data(), numInstances);
// if (!asset)
// {
// Log("Unable to load glTF asset at %d", uri);
// return std::nullptr_t();
// }
// const char *const *const resourceUris = asset->getResourceUris();
// const size_t resourceUriCount = asset->getResourceUriCount();
// std::vector<ResourceBuffer> resourceBuffers;
// for (size_t i = 0; i < resourceUriCount; i++)
// {
// std::string uri = std::string(relativeResourcePath) + std::string("/") + std::string(resourceUris[i]);
// ResourceBuffer buf = _resourceLoaderWrapper->load(uri.c_str());
// resourceBuffers.push_back(buf);
// ResourceLoader::BufferDescriptor b(buf.data, buf.size);
// _gltfResourceLoader->addResourceData(resourceUris[i], std::move(b));
// }
// #ifdef __EMSCRIPTEN__
// if (!_gltfResourceLoader->asyncBeginLoad(asset))
// {
// Log("Unknown error loading glTF asset");
// _resourceLoaderWrapper->free(rbuf);
// for (auto &rb : resourceBuffers)
// {
// _resourceLoaderWrapper->free(rb);
// }
// return 0;
// }
// while (_gltfResourceLoader->asyncGetLoadProgress() < 1.0f)
// {
// _gltfResourceLoader->asyncUpdateLoad();
// }
// #else
// // load resources synchronously
// if (!_gltfResourceLoader->loadResources(asset))
// {
// Log("Unknown error loading glTF asset");
// _resourceLoaderWrapper->free(rbuf);
// for (auto &rb : resourceBuffers)
// {
// _resourceLoaderWrapper->free(rb);
// }
// return std::nullptr_t();
// }
// #endif
// auto sceneAsset = std::make_unique<GltfSceneAsset>(
// asset,
// _assetLoader,
// _engine,
// _ncm);
// auto filamentInstance = asset->getInstance();
// size_t entityCount = filamentInstance->getEntityCount();
// _scene->addEntities(filamentInstance->getEntities(), entityCount);
// for (auto &rb : resourceBuffers)
// {
// _resourceLoaderWrapper->free(rb);
// }
// _resourceLoaderWrapper->free(rbuf);
// auto lights = asset->getLightEntities();
// _scene->addEntities(lights, asset->getLightEntityCount());
// sceneAsset->createInstance();
// auto entityId = Entity::smuggle(sceneAsset->getEntity());
// auto *raw = sceneAsset.get();
// _sceneAssets.push_back(std::move(sceneAsset));
// Log("Loaded glTF asset from uri: %s", uri);
// return raw;
// }
// void SceneManager::setVisibilityLayer(EntityId entityId, int layer)
// {
// utils::Entity entity = utils::Entity::import(entityId);
// for (auto &asset : _sceneAssets)
// {
// if (asset->getEntity() == entity)
// {
// asset->setLayer(_engine->getRenderableManager(), layer);
// }
// }
// }
// SceneAsset *SceneManager::loadGlbFromBuffer(const uint8_t *data, size_t length, int numInstances, bool keepData, int priority, int layer, bool loadResourcesAsync, bool addToScene)
// {
// auto &rm = _engine->getRenderableManager();
// std::vector<FilamentInstance *> instances(numInstances);
// FilamentAsset *asset = _assetLoader->createInstancedAsset((const uint8_t *)data, length, instances.data(), numInstances);
// Log("Created glTF asset with %d instances.", numInstances);
// if (!asset)
// {
// Log("Unknown error loading GLB asset.");
// return std::nullptr_t();
// }
// #ifdef __EMSCRIPTEN__
// if (!_gltfResourceLoader->asyncBeginLoad(asset))
// {
// Log("Unknown error loading glb asset");
// return 0;
// }
// while (_gltfResourceLoader->asyncGetLoadProgress() < 1.0f)
// {
// _gltfResourceLoader->asyncUpdateLoad();
// }
// #else
// if (loadResourcesAsync)
// {
// if (!_gltfResourceLoader->asyncBeginLoad(asset))
// {
// Log("Unknown error loading glb asset");
// return 0;
// }
// }
// else
// {
// if (!_gltfResourceLoader->loadResources(asset))
// {
// Log("Unknown error loading glb asset");
// return 0;
// }
// }
// #endif
// auto sceneAsset = std::make_unique<GltfSceneAsset>(
// asset,
// _assetLoader,
// _engine,
// _ncm);
// auto sceneAssetInstance = sceneAsset->createInstance();
// if (addToScene)
// {
// sceneAssetInstance->addAllEntities(_scene);
// }
// sceneAssetInstance->setPriority(_engine->getRenderableManager(), priority);
// sceneAssetInstance->setLayer(_engine->getRenderableManager(), layer);
// auto *raw = sceneAsset.get();
// _sceneAssets.push_back(std::move(sceneAsset));
// return raw;
// }
// SceneAsset *SceneManager::createInstance(SceneAsset *asset, MaterialInstance **materialInstances, size_t materialInstanceCount)
// {
// std::lock_guard lock(_mutex);
// auto instance = asset->createInstance(materialInstances, materialInstanceCount);
// if (instance)
// {
// instance->addAllEntities(_scene);
// }
// else
// {
// Log("Failed to create instance");
// }
// return instance;
// }
// SceneAsset *SceneManager::loadGlb(const char *uri, int numInstances, bool keepData)
// {
// ResourceBuffer rbuf = _resourceLoaderWrapper->load(uri);
// auto entity = loadGlbFromBuffer((const uint8_t *)rbuf.data, rbuf.size, numInstances, keepData);
// _resourceLoaderWrapper->free(rbuf);
// return entity;
// }
// bool SceneManager::removeFromScene(EntityId entityId)
// {
// _scene->remove(Entity::import(entityId));
// return true;
// }
// bool SceneManager::addToScene(EntityId entityId)
// {
// _scene->addEntity(Entity::import(entityId));
// return true;
// }
// void SceneManager::destroyAll()
// {
// destroyLights();
// destroyAssets();
// std::lock_guard lock(_mutex);
// for (auto *materialInstance : _materialInstances)
// {
// _engine->destroy(materialInstance);
// }
// _materialInstances.clear();
// }
// void SceneManager::destroy(SceneAsset *asset)
// {
// std::lock_guard lock(_mutex);
// auto entity = asset->getEntity();
// _collisionComponentManager->removeComponent(entity);
// _animationManager->removeAnimationComponent(utils::Entity::smuggle(entity));
// for (int i = 0; i < asset->getChildEntityCount(); i++)
// {
// auto childEntity = asset->getChildEntities()[i];
// _collisionComponentManager->removeComponent(childEntity);
// _animationManager->removeAnimationComponent(utils::Entity::smuggle(childEntity));
// }
// asset->removeAllEntities(_scene);
// if (asset->isInstance())
// {
// asset->destroyInstance(asset);
// }
// else
// {
// auto it = std::remove_if(_sceneAssets.begin(), _sceneAssets.end(), [=](auto &sceneAsset)
// { return sceneAsset.get() == asset; });
// _sceneAssets.erase(it, _sceneAssets.end());
// }
// }
// utils::Entity SceneManager::addLight(
// LightManager::Type t,
// float colour,
// float intensity,
// float posX,
// float posY,
// float posZ,
// float dirX,
// float dirY,
// float dirZ,
// float falloffRadius,
// float spotLightConeInner,
// float spotLightConeOuter,
// float sunAngularRadius,
// float sunHaloSize,
// float sunHaloFallof,
// bool shadows)
// {
// auto light = EntityManager::get().create();
// // LightManager::ShadowOptions shadowOptions;
// // shadowOptions.stable = true;
// auto result = LightManager::Builder(t)
// .color(Color::cct(colour))
// .intensity(intensity)
// .falloff(falloffRadius)
// .spotLightCone(spotLightConeInner, spotLightConeOuter)
// .sunAngularRadius(sunAngularRadius)
// .sunHaloSize(sunHaloSize)
// .sunHaloFalloff(sunHaloFallof)
// .position(filament::math::float3(posX, posY, posZ))
// .direction(filament::math::float3(dirX, dirY, dirZ))
// // .shadowOptions(shadowOptions)
// .castShadows(shadows)
// .build(*_engine, light);
// if (result != LightManager::Builder::Result::Success)
// {
// Log("ERROR : failed to create light");
// }
// else
// {
// _scene->addEntity(light);
// _lights.push_back(light);
// TRACE("Created light");
// }
// return light;
// }
// void SceneManager::removeLight(utils::Entity entity)
// {
// auto removed = remove(_lights.begin(), _lights.end(), entity);
// _scene->remove(entity);
// EntityManager::get().destroy(1, &entity);
// }
// void SceneManager::destroyLights()
// {
// std::lock_guard lock(_mutex);
// _scene->removeEntities(_lights.data(), _lights.size());
// EntityManager::get().destroy(_lights.size(), _lights.data());
// _lights.clear();
// }
// void SceneManager::destroyAssets()
// {
// std::lock_guard lock(_mutex);
// for (auto &asset : _sceneAssets)
// {
// asset->removeAllEntities(_scene);
// for(int i = 0; i < asset->getInstanceCount(); i++) {
// asset->getInstanceAt(i)->removeAllEntities(_scene);
// }
// }
// _sceneAssets.clear();
// }
// void SceneManager::addCollisionComponent(EntityId entityId, void (*onCollisionCallback)(const EntityId entityId1, const EntityId entityId2), bool affectsTransform)
// {
// std::lock_guard lock(_mutex);
// utils::Entity entity = utils::Entity::import(entityId);
// for (auto &asset : _sceneAssets)
// {
// auto *instance = reinterpret_cast<GltfSceneAssetInstance *>(asset->getInstanceByEntity(entity));
// if (instance)
// {
// auto collisionInstance = _collisionComponentManager->addComponent(instance->getInstance()->getRoot());
// _collisionComponentManager->elementAt<0>(collisionInstance) = instance->getInstance()->getBoundingBox();
// _collisionComponentManager->elementAt<1>(collisionInstance) = onCollisionCallback;
// _collisionComponentManager->elementAt<2>(collisionInstance) = affectsTransform;
// return;
// }
// }
// }
// void SceneManager::removeCollisionComponent(EntityId entityId)
// {
// std::lock_guard lock(_mutex);
// utils::Entity entity = utils::Entity::import(entityId);
// _collisionComponentManager->removeComponent(entity);
// }
// void SceneManager::testCollisions(EntityId entityId)
// {
// utils::Entity entity = utils::Entity::import(entityId);
// for (auto &asset : _sceneAssets)
// {
// auto *instance = reinterpret_cast<GltfSceneAssetInstance *>(asset->getInstanceByEntity(entity));
// if (instance)
// {
// const auto &tm = _engine->getTransformManager();
// auto transformInstance = tm.getInstance(entity);
// auto worldTransform = tm.getWorldTransform(transformInstance);
// auto aabb = instance->getInstance()->getBoundingBox();
// aabb = aabb.transform(worldTransform);
// _collisionComponentManager->collides(entity, aabb);
// }
// }
// }
// void SceneManager::update()
// {
// _animationManager->update();
// _updateTransforms();
// }
// void SceneManager::_updateTransforms()
// {
// std::lock_guard lock(_mutex);
// // auto &tm = _engine->getTransformManager();
// // tm.openLocalTransformTransaction();
// // for (const auto &[entityId, transformUpdate] : _transformUpdates)
// // {
// // const auto &pos = _instances.find(entityId);
// // bool isCollidable = true;
// // Entity entity;
// // filament::TransformManager::Instance transformInstance;
// // filament::math::mat4f transform;
// // Aabb boundingBox;
// // if (pos == _instances.end())
// // {
// // isCollidable = false;
// // entity = Entity::import(entityId);
// // }
// // else
// // {
// // const auto *instance = pos->second;
// // entity = instance->getRoot();
// // boundingBox = instance->getBoundingBox();
// // }
// // transformInstance = tm.getInstance(entity);
// // transform = tm.getTransform(transformInstance);
// // if (isCollidable)
// // {
// // auto transformedBB = boundingBox.transform(transform);
// // auto collisionAxes = _collisionComponentManager->collides(entity, transformedBB);
// // if (collisionAxes.size() == 1)
// // {
// // // auto globalAxis = collisionAxes[0];
// // // globalAxis *= norm(relativeTranslation);
// // // auto newRelativeTranslation = relativeTranslation + globalAxis;
// // // translation -= relativeTranslation;
// // // translation += newRelativeTranslation;
// // // transform = composeMatrix(translation, rotation, scale);
// // }
// // else if (collisionAxes.size() > 1)
// // {
// // // translation -= relativeTranslation;
// // // transform = composeMatrix(translation, rotation, scale);
// // }
// // }
// // tm.setTransform(transformInstance, transformUpdate);
// // }
// // tm.commitLocalTransformTransaction();
// // _transformUpdates.clear();
// }
// void SceneManager::queueRelativePositionUpdateFromViewportVector(View *view, EntityId entityId, float viewportCoordX, float viewportCoordY)
// {
// // Get the camera and viewport
// const auto &camera = view->getCamera();
// const auto &vp = view->getViewport();
// // Convert viewport coordinates to NDC space
// float ndcX = (2.0f * viewportCoordX) / vp.width - 1.0f;
// float ndcY = 1.0f - (2.0f * viewportCoordY) / vp.height;
// // Get the current position of the entity
// auto &tm = _engine->getTransformManager();
// auto entity = Entity::import(entityId);
// auto transformInstance = tm.getInstance(entity);
// auto currentTransform = tm.getTransform(transformInstance);
// // get entity model origin in camera space
// auto entityPositionInCameraSpace = camera.getViewMatrix() * currentTransform * filament::math::float4{0.0f, 0.0f, 0.0f, 1.0f};
// // get entity model origin in clip space
// auto entityPositionInClipSpace = camera.getProjectionMatrix() * entityPositionInCameraSpace;
// auto entityPositionInNdcSpace = entityPositionInClipSpace / entityPositionInClipSpace.w;
// // Viewport coords in NDC space (use entity position in camera space Z to project onto near plane)
// math::float4 ndcNearPlanePos = {ndcX, ndcY, -1.0f, 1.0f};
// math::float4 ndcFarPlanePos = {ndcX, ndcY, 0.99f, 1.0f};
// math::float4 ndcEntityPlanePos = {ndcX, ndcY, entityPositionInNdcSpace.z, 1.0f};
// // Get viewport coords in clip space
// math::float4 nearPlaneInClipSpace = Camera::inverseProjection(camera.getProjectionMatrix()) * ndcNearPlanePos;
// auto nearPlaneInCameraSpace = nearPlaneInClipSpace / nearPlaneInClipSpace.w;
// math::float4 farPlaneInClipSpace = Camera::inverseProjection(camera.getProjectionMatrix()) * ndcFarPlanePos;
// auto farPlaneInCameraSpace = farPlaneInClipSpace / farPlaneInClipSpace.w;
// math::float4 entityPlaneInClipSpace = Camera::inverseProjection(camera.getProjectionMatrix()) * ndcEntityPlanePos;
// auto entityPlaneInCameraSpace = entityPlaneInClipSpace / entityPlaneInClipSpace.w;
// auto entityPlaneInWorldSpace = camera.getModelMatrix() * entityPlaneInCameraSpace;
// }
// void SceneManager::queueTransformUpdates(EntityId *entities, math::mat4 *transforms, int numEntities)
// {
// std::lock_guard lock(_mutex);
// for (int i = 0; i < numEntities; i++)
// {
// auto entity = entities[i];
// const auto &pos = _transformUpdates.find(entity);
// if (pos == _transformUpdates.end())
// {
// _transformUpdates.emplace(entity, transforms[i]);
// }
// auto curr = _transformUpdates[entity];
// _transformUpdates[entity] = curr;
// }
// }
// Aabb3 SceneManager::getRenderableBoundingBox(EntityId entityId)
// {
// auto &rm = _engine->getRenderableManager();
// auto instance = rm.getInstance(Entity::import(entityId));
// if (!instance.isValid())
// {
// return Aabb3{};
// }
// auto box = rm.getAxisAlignedBoundingBox(instance);
// return Aabb3{box.center.x, box.center.y, box.center.z, box.halfExtent.x, box.halfExtent.y, box.halfExtent.z};
// }
// Aabb2 SceneManager::getScreenSpaceBoundingBox(View *view, EntityId entityId)
// {
// const auto &camera = view->getCamera();
// const auto &viewport = view->getViewport();
// auto &tcm = _engine->getTransformManager();
// auto &rcm = _engine->getRenderableManager();
// // Get the projection and view matrices
// math::mat4 projMatrix = camera.getProjectionMatrix();
// math::mat4 viewMatrix = camera.getViewMatrix();
// math::mat4 vpMatrix = projMatrix * viewMatrix;
// auto entity = Entity::import(entityId);
// auto renderable = rcm.getInstance(entity);
// auto worldTransform = tcm.getWorldTransform(tcm.getInstance(entity));
// // Get the axis-aligned bounding box in model space
// Box aabb = rcm.getAxisAlignedBoundingBox(renderable);
// auto min = aabb.getMin();
// auto max = aabb.getMax();
// // Transform the 8 corners of the AABB to clip space
// std::array<math::float4, 8> corners = {
// worldTransform * math::float4(min.x, min.y, min.z, 1.0f),
// worldTransform * math::float4(max.x, min.y, min.z, 1.0f),
// worldTransform * math::float4(min.x, max.y, min.z, 1.0f),
// worldTransform * math::float4(max.x, max.y, min.z, 1.0f),
// worldTransform * math::float4(min.x, min.y, max.z, 1.0f),
// worldTransform * math::float4(max.x, min.y, max.z, 1.0f),
// worldTransform * math::float4(min.x, max.y, max.z, 1.0f),
// worldTransform * math::float4(max.x, max.y, max.z, 1.0f)};
// // Project corners to clip space and convert to viewport space
// float minX = std::numeric_limits<float>::max();
// float minY = std::numeric_limits<float>::max();
// float maxX = std::numeric_limits<float>::lowest();
// float maxY = std::numeric_limits<float>::lowest();
// for (const auto &corner : corners)
// {
// math::float4 clipSpace = vpMatrix * corner;
// // Check if the point is behind the camera
// if (clipSpace.w <= 0)
// {
// continue; // Skip this point
// }
// // Perform perspective division
// math::float3 ndcSpace = clipSpace.xyz / clipSpace.w;
// // Clamp NDC coordinates to [-1, 1] range
// ndcSpace.x = std::max(-1.0f, std::min(1.0f, ndcSpace.x));
// ndcSpace.y = std::max(-1.0f, std::min(1.0f, ndcSpace.y));
// // Convert NDC to viewport space
// float viewportX = (ndcSpace.x * 0.5f + 0.5f) * viewport.width;
// float viewportY = (1.0f - (ndcSpace.y * 0.5f + 0.5f)) * viewport.height; // Flip Y-axis
// minX = std::min(minX, viewportX);
// minY = std::min(minY, viewportY);
// maxX = std::max(maxX, viewportX);
// maxY = std::max(maxY, viewportY);
// }
// return Aabb2{minX, minY, maxX, maxY};
// }
// static filament::gltfio::MaterialKey getDefaultUnlitMaterialConfig(int numUvs)
// {
// filament::gltfio::MaterialKey config;
// memset(&config, 0, sizeof(config));
// config.unlit = false;
// config.doubleSided = false;
// config.useSpecularGlossiness = false;
// config.alphaMode = filament::gltfio::AlphaMode::OPAQUE;
// config.hasBaseColorTexture = numUvs > 0;
// config.baseColorUV = 0;
// config.hasVertexColors = false;
// return config;
// }
// SceneAsset *SceneManager::createGeometry(
// float *vertices,
// uint32_t numVertices,
// float *normals,
// uint32_t numNormals,
// float *uvs,
// uint32_t numUvs,
// uint16_t *indices,
// uint32_t numIndices,
// filament::RenderableManager::PrimitiveType primitiveType,
// filament::MaterialInstance **materialInstances,
// size_t materialInstanceCount,
// bool keepData)
// {
// utils::Entity entity;
// auto builder = GeometrySceneAssetBuilder(_engine)
// .vertices(vertices, numVertices)
// .indices(indices, numIndices)
// .primitiveType(primitiveType);
// if (normals)
// {
// builder.normals(normals, numNormals);
// }
// if (uvs)
// {
// builder.uvs(uvs, numUvs);
// }
// builder.materials(materialInstances, materialInstanceCount);
// auto sceneAsset = builder.build();
// if (!sceneAsset)
// {
// Log("Failed to create geometry");
// return std::nullptr_t();
// }
// sceneAsset->addAllEntities(_scene);
// auto *raw = sceneAsset.get();
// _sceneAssets.push_back(std::move(sceneAsset));
// return raw;
// }
// void SceneManager::destroy(filament::MaterialInstance *instance)
// {
// auto it = std::find(_materialInstances.begin(), _materialInstances.end(), instance);
// if (it != _materialInstances.end())
// {
// _materialInstances.erase(it);
// }
// _engine->destroy(instance);
// }
// MaterialInstance *SceneManager::createUnlitFixedSizeMaterialInstance()
// {
// auto instance = _unlitFixedSizeMaterial->createInstance();
// instance->setParameter("scale", 1.0f);
// return instance;
// }
// MaterialInstance *SceneManager::createUnlitMaterialInstance()
// {
// UvMap uvmap;
// auto instance = _unlitMaterialProvider->createMaterialInstance(nullptr, &uvmap);
// instance->setParameter("baseColorFactor", filament::math::float4{1.0f, 1.0f, 1.0f, 1.0f});
// instance->setParameter("baseColorIndex", -1);
// instance->setParameter("uvScale", filament::math::float2{1.0f, 1.0f});
// _materialInstances.push_back(instance);
// return instance;
// }
// Camera *SceneManager::createCamera()
// {
// auto entity = EntityManager::get().create();
// auto camera = _engine->createCamera(entity);
// _cameras.push_back(camera);
// return camera;
// }
// void SceneManager::destroyCamera(Camera *camera)
// {
// auto entity = camera->getEntity();
// _engine->destroyCameraComponent(entity);
// _engine->getEntityManager().destroy(entity);
// auto it = std::find(_cameras.begin(), _cameras.end(), camera);
// if (it != _cameras.end())
// {
// _cameras.erase(it);
// }
// }
// size_t SceneManager::getCameraCount()
// {
// return _cameras.size() + 1;
// }
// Camera *SceneManager::getCameraAt(size_t index)
// {
// if (index == 0)
// {
// return _mainCamera;
// }
// if (index - 1 > _cameras.size() - 1)
// {
// return nullptr;
// }
// return _cameras[index - 1];
// }
// } // namespace thermion