refactoring + texture projection

This commit is contained in:
Nick Fisher
2025-03-25 09:39:02 +08:00
parent 0cbbc058e0
commit 999b1e613f
33 changed files with 7357 additions and 1168 deletions

View File

@@ -42,6 +42,11 @@ abstract class ThermionAsset {
///
Future<ThermionEntity?> getChildEntity(String childName);
///
///
///
Future<MaterialInstance> getMaterialInstanceAt({ThermionEntity? entity, int index = 0});
///
///
///

View File

@@ -1,6 +1,8 @@
import 'dart:typed_data';
import 'package:thermion_dart/src/filament/src/engine.dart';
import 'package:thermion_dart/src/filament/src/scene.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/callbacks.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/ffi_material.dart';
import 'package:thermion_dart/thermion_dart.dart';
class FilamentConfig<T, U> {
@@ -59,6 +61,16 @@ abstract class FilamentApp<T> {
///
Future<SwapChain> createSwapChain(T handle, {bool hasStencilBuffer = false});
///
///
///
Future<View> createView();
///
///
///
Future<Scene> createScene();
///
///
///
@@ -98,7 +110,7 @@ abstract class FilamentApp<T> {
int levels = 1,
Set<TextureUsage> flags = const {TextureUsage.TEXTURE_USAGE_SAMPLEABLE},
TextureSamplerType textureSamplerType = TextureSamplerType.SAMPLER_2D,
TextureFormat textureFormat = TextureFormat.RGBA16F,
TextureFormat textureFormat = TextureFormat.RGBA32F,
int? importedTextureHandle});
///
@@ -185,12 +197,17 @@ abstract class FilamentApp<T> {
///
///
///
Future setRenderable(covariant View view, bool renderable);
Future register(covariant SwapChain swapChain, covariant View view);
///
///
///
Future register(covariant SwapChain swapChain, covariant View view);
Future unregister(covariant SwapChain swapChain, covariant View view);
///
///
///
Future updateRenderOrder();
///
///
@@ -239,10 +256,19 @@ abstract class FilamentApp<T> {
Future<MaterialInstance> createImageMaterialInstance();
///
/// Returns pixel buffer(s) for [view] (or, if null, all views associated
/// with [swapChain] by calling [register]).
///
/// Pixel buffers will be returned in RGBA float32 format.
///
///
Future<Uint8List> capture(covariant View view,
{bool captureRenderTarget = false});
Future<List<(View,Uint8List)>> capture(covariant SwapChain swapChain,
{
covariant View? view,
bool captureRenderTarget = false,
PixelDataFormat pixelDataFormat = PixelDataFormat.RGBA,
PixelDataType pixelDataType = PixelDataType.UBYTE,
Future Function(View)? beforeRender}
);
///
///
@@ -269,5 +295,14 @@ abstract class FilamentApp<T> {
///
///
///
Future<GizmoAsset> createGizmo(covariant View view, T animationManager, GizmoType type);
Future<GizmoAsset> createGizmo(
covariant View view, T animationManager, GizmoType type);
///
///
///
Future<ThermionAsset> createGeometry(Geometry geometry, T animationManager,
{List<MaterialInstance>? materialInstances,
bool keepData = false});
}

View File

@@ -363,85 +363,113 @@ abstract class Texture {
Future dispose();
}
/// Pixel data format enum, representing different channel combinations
enum PixelDataFormat {
R,
/// One Red channel, float
R_INTEGER,
R(0),
/// One Red channel, integer
RG,
R_INTEGER(1),
/// Two Red and Green channels, float
RG_INTEGER,
RG(2),
/// Two Red and Green channels, integer
RGB,
RG_INTEGER(3),
/// Three Red, Green and Blue channels, float
RGB_INTEGER,
RGB(4),
/// Three Red, Green and Blue channels, integer
RGBA,
RGB_INTEGER(5),
/// Four Red, Green, Blue and Alpha channels, float
RGBA_INTEGER,
RGBA(6),
/// Four Red, Green, Blue and Alpha channels, integer
UNUSED,
RGBA_INTEGER(7),
/// Used to be rgbm
DEPTH_COMPONENT,
/// Unused format
UNUSED(8),
/// Depth, 16-bit or 24-bits usually
DEPTH_STENCIL,
DEPTH_COMPONENT(9),
/// Two Depth (24-bits) + Stencil (8-bits) channels
ALPHA
DEPTH_STENCIL(10),
/// One Alpha channel, float
/// Alpha channel only
ALPHA(11);
/// The integer value of the enum
final int value;
/// Constructor with the integer value
const PixelDataFormat(this.value);
/// Factory constructor to create a PixelDataFormat from an integer value
factory PixelDataFormat.fromValue(int value) {
return PixelDataFormat.values.firstWhere(
(format) => format.value == value,
orElse: () => throw ArgumentError('Invalid PixelDataFormat value: $value'),
);
}
}
/// Pixel Data Type
/// Pixel data type enum, representing different data types for pixel values
enum PixelDataType {
UBYTE,
/// Unsigned byte
BYTE,
UBYTE(0),
/// Signed byte
USHORT,
BYTE(1),
/// Unsigned short (16-bit)
SHORT,
USHORT(2),
/// Signed short (16-bit)
UINT,
SHORT(3),
/// Unsigned int (32-bit)
INT,
UINT(4),
/// Signed int (32-bit)
HALF,
INT(5),
/// Half-float (16-bit float)
FLOAT,
HALF(6),
/// Float (32-bits float)
COMPRESSED,
FLOAT(7),
/// Compressed pixels, see CompressedPixelDataType
UINT_10F_11F_11F_REV,
/// Compressed pixels
COMPRESSED(8),
/// Three low precision floating-point numbers
USHORT_565,
UINT_10F_11F_11F_REV(9),
/// Unsigned int (16-bit), encodes 3 RGB channels
UINT_2_10_10_10_REV,
USHORT_565(10),
/// Unsigned normalized 10 bits RGB, 2 bits alpha
UINT_2_10_10_10_REV(11);
/// The integer value of the enum
final int value;
/// Constructor with the integer value
const PixelDataType(this.value);
/// Factory constructor to create a PixelDataType from an integer value
factory PixelDataType.fromValue(int value) {
return PixelDataType.values.firstWhere(
(type) => type.value == value,
orElse: () => throw ArgumentError('Invalid PixelDataType value: $value'),
);
}
}
@deprecated
typedef ThermionTexture = Texture;

View File

@@ -24,6 +24,8 @@ abstract class View {
Future setViewport(int width, int height);
Future<RenderTarget?> getRenderTarget();
Future setRenderTarget(covariant RenderTarget? renderTarget);
int get renderOrder;
Future setRenderOrder(int order);
Future setCamera(covariant Camera camera);
Future<Camera> getCamera();
Future setPostProcessing(bool enabled);

View File

@@ -5,8 +5,8 @@ import '../../../thermion_dart.dart';
class GeometryHelper {
static Geometry fullscreenQuad() {
final vertices = Float32List.fromList(
[-1.0, -1.0, 1.0, 3.0, -1.0, 1.0, -1.0, 3.0, 1.0]);
final vertices =
Float32List.fromList([-1.0, -1.0, 1.0, 3.0, -1.0, 1.0, -1.0, 3.0, 1.0]);
final indices = [0, 1, 2];
return Geometry(vertices, indices);
}
@@ -59,7 +59,8 @@ class GeometryHelper {
return Geometry(vertices, indices, normals: _normals, uvs: _uvs);
}
static Geometry cube({bool normals = true, bool uvs = true}) {
static Geometry cube(
{bool normals = true, bool uvs = true, bool flipUvs = true}) {
final vertices = Float32List.fromList([
// Front face
-1, -1, 1, // 0
@@ -175,44 +176,49 @@ class GeometryHelper {
])
: null;
// Original UV coordinates
var originalUvs = <double>[
// front
1 / 3, 3 / 4, // 0
2 / 3, 3 / 4, // 1
2 / 3, 1, // 2
1 / 3, 1, // 3
// back
1 / 3, 1 / 4, // 4
2 / 3, 1 / 4, // 5
2 / 3, 1 / 2, // 6
1 / 3, 1 / 2, // 7
// top
2 / 3, 1 / 2, // 8
1, 1 / 2, // 9
1, 3 / 4, // 10
2 / 3, 3 / 4, // 11
// bottom
0, 1 / 2, // 12
1 / 3, 1 / 2, // 13
1 / 3, 3 / 4, // 14
0, 3 / 4, // 15
// right
1 / 3, 1 / 2, // 16
2 / 3, 1 / 2, // 17
2 / 3, 3 / 4, // 18
1 / 3, 3 / 4, // 19
// left
1 / 3, 0, // 20
2 / 3, 0, // 21
2 / 3, 1 / 4, // 22
1 / 3, 1 / 4 // 23
];
// Apply UV flipping if requested
final _uvs = uvs
? Float32List.fromList([
// front
1 / 3, 3 / 4, // 0
2 / 3, 3 / 4, // 1
2 / 3, 1, // 2
1 / 3, 1, // 3
// back
1 / 3, 1 / 4, // 4
2 / 3, 1 / 4, // 5
2 / 3, 1 / 2, // 6
1 / 3, 1 / 2, // 7
// top
2 / 3, 1 / 2, // 8
1, 1 / 2, // 9
1, 3 / 4, // 10
2 / 3, 3 / 4, // 11
// bottom
0, 1 / 2, // 12
1 / 3, 1 / 2, // 13
1 / 3, 3 / 4, // 14
0, 3 / 4, // 15
// right
1 / 3, 1 / 2, // 16
2 / 3, 1 / 2, // 17
2 / 3, 3 / 4, // 18
1 / 3, 3 / 4, // 19
// left
1 / 3, 0, // 20
2 / 3, 0, // 21
2 / 3, 1 / 4, // 22
1 / 3, 1 / 4 // 23
])
? Float32List.fromList(
flipUvs ? _flipUvCoordinates(originalUvs) : originalUvs)
: null;
final indices = [
@@ -229,9 +235,19 @@ class GeometryHelper {
// Left face
20, 21, 22, 20, 22, 23 // 4,0,3,4,3,7
];
return Geometry(vertices, indices, normals: _normals, uvs: _uvs);
}
// Helper function to flip the Y coordinate of UV coordinates (y = 1.0 - y)
static List<double> _flipUvCoordinates(List<double> uvs) {
var flippedUvs = List<double>.from(uvs);
for (var i = 1; i < flippedUvs.length; i += 2) {
flippedUvs[i] = 1.0 - flippedUvs[i];
}
return flippedUvs;
}
static Geometry cylinder(
{double radius = 1.0,
double length = 1.0,

View File

@@ -2,8 +2,8 @@ import 'dart:math';
import 'dart:typed_data';
import 'package:image/image.dart' as img;
Future<Uint8List> pixelBufferToBmp(
Uint8List pixelBuffer, int width, int height) async {
Future<Uint8List> pixelBufferToBmp(Uint8List pixelBuffer, int width, int height,
{bool hasAlpha = true, bool isFloat = false}) async {
final rowSize = (width * 3 + 3) & ~3;
final padding = rowSize - (width * 3);
final fileSize = 54 + rowSize * height;
@@ -28,14 +28,29 @@ Future<Uint8List> pixelBufferToBmp(
bd.setInt32(38, 2835, Endian.little); // X pixels per meter
bd.setInt32(42, 2835, Endian.little); // Y pixels per meter
Float32List? floatData;
if (isFloat) {
floatData = pixelBuffer.buffer.asFloat32List(
pixelBuffer.offsetInBytes, width * height * (hasAlpha ? 4 : 3));
}
// Pixel data (BMP stores in BGR format)
for (var y = 0; y < height; y++) {
for (var x = 0; x < width; x++) {
final srcIndex = (y * width + x) * 4; // RGBA format
final srcIndex = (y * width + x) * (hasAlpha ? 4 : 3); // RGBA format
final dstIndex = 54 + y * rowSize + x * 3; // BGR format
data[dstIndex] = pixelBuffer[srcIndex + 2]; // Blue
data[dstIndex + 1] = pixelBuffer[srcIndex + 1]; // Green
data[dstIndex + 2] = pixelBuffer[srcIndex]; // Red
data[dstIndex] = isFloat
? (floatData![srcIndex + 2] * 255).toInt()
: pixelBuffer[srcIndex + 2]; // Blue
data[dstIndex + 1] = isFloat
? (floatData![srcIndex + 1] * 255).toInt()
: pixelBuffer[srcIndex + 1]; // Green
data[dstIndex + 2] = isFloat
? (floatData![srcIndex] * 255).toInt()
: pixelBuffer[srcIndex]; // Red
// Alpha channel is discarded
}
// Add padding to the end of each row

View File

@@ -342,4 +342,10 @@ class BackgroundImage extends ThermionAsset {
// TODO: implement transformToUnitCube
throw UnimplementedError();
}
@override
Future<MaterialInstance> getMaterialInstanceAt({ThermionEntity? entity, int index = 0}) {
// TODO: implement getMaterialInstanceAt
throw UnimplementedError();
}
}

View File

@@ -370,6 +370,14 @@ class FFIAsset extends ThermionAsset {
// }
}
Future<MaterialInstance> getMaterialInstanceAt(
{ThermionEntity? entity, int index = 0}) async {
entity ??= this.entity;
var ptr = RenderableManager_getMaterialInstanceAt(
Engine_getRenderableManager(app.engine), entity, 0);
return FFIMaterialInstance(ptr, app);
}
///
///
///

View File

@@ -120,12 +120,7 @@ class FFIFilamentApp extends FilamentApp<Pointer> {
///
///
///
Future setRenderable(covariant FFIView view, bool renderable) async {
await view.setRenderable(renderable);
await _updateRenderableSwapChains();
}
Future _updateRenderableSwapChains() async {
Future updateRenderOrder() async {
for (final swapChain in _swapChains.keys) {
final views = _swapChains[swapChain];
if (views == null) {
@@ -174,6 +169,28 @@ class FFIFilamentApp extends FilamentApp<Pointer> {
return FFISwapChain(swapChain);
}
///
///
///
Future<View> createView() async {
final view = await FFIView(
await withPointerCallback<TView>(
(cb) => Engine_createViewRenderThread(engine, cb)),
this);
await view.setFrustumCullingEnabled(true);
View_setBlendMode(view.view, TBlendMode.TRANSLUCENT);
View_setShadowsEnabled(view.view, false);
View_setStencilBufferEnabled(view.view, false);
View_setAntiAliasing(view.view, false, false, false);
View_setDitheringEnabled(view.view, false);
View_setRenderQuality(view.view, TQualityLevel.MEDIUM);
return view;
}
Future<Scene> createScene() async {
return FFIScene(Engine_createScene(engine));
}
///
///
///
@@ -195,7 +212,7 @@ class FFIFilamentApp extends FilamentApp<Pointer> {
continue;
}
for (final view in _swapChains[swapChain]!) {
await setRenderable(view, false);
await view.setRenderable(false);
}
}
for (final swapChain in _swapChains.keys.toList()) {
@@ -224,10 +241,30 @@ class FFIFilamentApp extends FilamentApp<Pointer> {
///
Future<RenderTarget> createRenderTarget(int width, int height,
{covariant FFITexture? color, covariant FFITexture? depth}) async {
if (color == null) {
color = await createTexture(width, height,
flags: {
TextureUsage.TEXTURE_USAGE_SAMPLEABLE,
TextureUsage.TEXTURE_USAGE_COLOR_ATTACHMENT,
TextureUsage.TEXTURE_USAGE_BLIT_SRC
},
textureFormat: TextureFormat.RGBA8) as FFITexture;
}
if (depth == null) {
depth = await createTexture(width, height,
flags: {
TextureUsage.TEXTURE_USAGE_SAMPLEABLE,
TextureUsage.TEXTURE_USAGE_DEPTH_ATTACHMENT
},
textureFormat: TextureFormat.DEPTH32F) as FFITexture;
}
final renderTarget = await withPointerCallback<TRenderTarget>((cb) {
RenderTarget_createRenderThread(engine, width, height,
color?.pointer ?? nullptr, depth?.pointer ?? nullptr, cb);
RenderTarget_createRenderThread(
engine, width, height, color!.pointer, depth!.pointer, cb);
});
if (renderTarget == nullptr) {
throw Exception("Failed to create RenderTarget");
}
return FFIRenderTarget(renderTarget, this);
}
@@ -488,7 +525,24 @@ class FFIFilamentApp extends FilamentApp<Pointer> {
_swapChains[swapChain] = [];
}
_swapChains[swapChain]!.add(view);
await _updateRenderableSwapChains();
_swapChains[swapChain]!
.sort((a, b) => a.renderOrder.compareTo(b.renderOrder));
await updateRenderOrder();
}
///
///
///
@override
Future unregister(
covariant FFISwapChain swapChain, covariant FFIView view) async {
if (!_swapChains.containsKey(swapChain)) {
_swapChains[swapChain] = [];
}
_swapChains[swapChain]!.remove(view);
_swapChains[swapChain]!
.sort((a, b) => a.renderOrder.compareTo(b.renderOrder));
await updateRenderOrder();
}
final _hooks = <Future Function()>[];
@@ -603,38 +657,59 @@ class FFIFilamentApp extends FilamentApp<Pointer> {
///
///
///
Future<Uint8List> capture(covariant FFIView view,
{bool captureRenderTarget = false}) async {
final viewport = await view.getViewport();
final swapChain = _swapChains.keys
.firstWhere((x) => _swapChains[x]?.contains(view) == true);
final out = Uint8List(viewport.width * viewport.height * 4);
Future<List<(View, Uint8List)>> capture(covariant FFISwapChain swapChain,
{covariant FFIView? view,
bool captureRenderTarget = false,
PixelDataFormat pixelDataFormat = PixelDataFormat.RGBA,
PixelDataType pixelDataType = PixelDataType.FLOAT,
Future Function(View)? beforeRender}) async {
await updateRenderOrder();
await withBoolCallback((cb) {
Renderer_beginFrameRenderThread(renderer, swapChain.swapChain, 0, cb);
});
await withVoidCallback((cb) {
Renderer_renderRenderThread(
renderer,
view.view,
cb,
);
});
if (captureRenderTarget && view.renderTarget == null) {
throw Exception();
final views = <FFIView>[];
if (view != null) {
views.add(view);
} else {
views.addAll(_swapChains[swapChain]!);
}
await withVoidCallback((cb) {
Renderer_readPixelsRenderThread(
renderer,
view.view,
captureRenderTarget ? view.renderTarget!.renderTarget : nullptr,
TPixelDataFormat.PIXELDATAFORMAT_RGBA,
TPixelDataType.PIXELDATATYPE_UBYTE,
out.address,
cb,
);
});
final pixelBuffers = <(View, Uint8List)>[];
for (final view in views) {
beforeRender?.call(view);
final viewport = await view.getViewport();
final pixelBuffer = Uint8List(viewport.width * viewport.height * 4 * sizeOf<Float>());
await withVoidCallback((cb) {
Renderer_renderRenderThread(
renderer,
view.view,
cb,
);
});
if (captureRenderTarget && view.renderTarget == null) {
throw Exception();
}
await withVoidCallback((cb) {
Renderer_readPixelsRenderThread(
renderer,
view.view,
view.renderTarget == null ? nullptr : view.renderTarget!.renderTarget,
// TPixelDataFormat.PIXELDATAFORMAT_RGBA,
// TPixelDataType.PIXELDATATYPE_UBYTE,
pixelDataFormat.value,
pixelDataType.value,
pixelBuffer.address,
pixelBuffer.length,
cb
);
});
pixelBuffers.add((view, pixelBuffer));
}
await withVoidCallback((cb) {
Renderer_endFrameRenderThread(renderer, cb);
});
@@ -642,7 +717,7 @@ class FFIFilamentApp extends FilamentApp<Pointer> {
await withVoidCallback((cb) {
Engine_flushAndWaitRenderThead(engine, cb);
});
return out;
return pixelBuffers;
}
///
@@ -761,8 +836,15 @@ class FFIFilamentApp extends FilamentApp<Pointer> {
(cb) => GltfResourceLoader_createRenderThread(engine, nullptr, cb));
final gizmo = await withPointerCallback<TGizmo>((cb) {
Gizmo_createRenderThread(engine, gltfAssetLoader, gltfResourceLoader, nameComponentManager,
view.view, _gizmoMaterial!.pointer, TGizmoType.values[gizmoType.index], cb);
Gizmo_createRenderThread(
engine,
gltfAssetLoader,
gltfResourceLoader,
nameComponentManager,
view.view,
_gizmoMaterial!.pointer,
TGizmoType.values[gizmoType.index],
cb);
});
if (gizmo == nullptr) {
throw Exception("Failed to create gizmo");
@@ -779,6 +861,48 @@ class FFIFilamentApp extends FilamentApp<Pointer> {
entities: gizmoEntities.toSet()
..add(SceneAsset_getEntity(gizmo.cast<TSceneAsset>())));
}
///
///
///
@override
Future<ThermionAsset> createGeometry(Geometry geometry, Pointer animationManager,
{List<MaterialInstance>? materialInstances,
bool keepData = false,
bool addToScene = true}) async {
var assetPtr = await withPointerCallback<TSceneAsset>((callback) {
var ptrList = Int64List(materialInstances?.length ?? 0);
if (materialInstances != null && materialInstances.isNotEmpty) {
ptrList.setRange(
0,
materialInstances.length,
materialInstances
.cast<FFIMaterialInstance>()
.map((mi) => mi.pointer.address)
.toList());
}
return SceneAsset_createGeometryRenderThread(
engine,
geometry.vertices.address,
geometry.vertices.length,
geometry.normals.address,
geometry.normals.length,
geometry.uvs.address,
geometry.uvs.length,
geometry.indices.address,
geometry.indices.length,
geometry.primitiveType.index,
ptrList.address.cast<Pointer<TMaterialInstance>>(),
ptrList.length,
callback);
});
if (assetPtr == nullptr) {
throw Exception("Failed to create geometry");
}
return FFIAsset(assetPtr, this, animationManager.cast<TAnimationManager>());
}
}
class FinalizableUint8List implements Finalizable {

View File

@@ -9,6 +9,9 @@ import 'callbacks.dart';
import 'ffi_camera.dart';
class FFIView extends View {
int _renderOrder = 0;
int get renderOrder => _renderOrder;
final Pointer<TView> view;
final FFIFilamentApp app;
@@ -24,6 +27,17 @@ class FFIView extends View {
}
}
///
///
///
Future setRenderOrder(int order) async {
this._renderOrder = order;
await FilamentApp.instance!.updateRenderOrder();
}
///
///
///
Future setRenderable(bool renderable) async {
this._renderable = renderable;
}
@@ -138,6 +152,10 @@ class FFIView extends View {
View_setLayerEnabled(view, layer.value, visible);
}
Future setBlendMode(TBlendMode blendMode) async {
View_setBlendMode(view, blendMode);
}
@override
Future<Scene> getScene() async {
final ptr = View_getScene(view);

View File

@@ -659,6 +659,12 @@ external void View_setScene(
ffi.Pointer<TScene> tScene,
);
@ffi.Native<ffi.Void Function(ffi.Pointer<TView>, ffi.Bool)>(isLeaf: true)
external void View_setFrontFaceWindingInverted(
ffi.Pointer<TView> tView,
bool inverted,
);
@ffi.Native<
ffi.Void Function(ffi.Pointer<TView>, ffi.Uint32, ffi.Uint32, ffi.Uint32,
PickCallback)>(isLeaf: true)
@@ -1471,7 +1477,8 @@ external void Renderer_renderStandaloneView(
ffi.Pointer<TRenderTarget>,
ffi.Int,
ffi.Int,
ffi.Pointer<ffi.Uint8>)>(isLeaf: true)
ffi.Pointer<ffi.Uint8>,
ffi.Size)>(isLeaf: true)
external void Renderer_readPixels(
ffi.Pointer<TRenderer> tRenderer,
ffi.Pointer<TView> tView,
@@ -1479,6 +1486,7 @@ external void Renderer_readPixels(
int tPixelBufferFormat,
int tPixelDataType,
ffi.Pointer<ffi.Uint8> out,
int outLength,
);
@ffi.Native<
@@ -1976,6 +1984,7 @@ external void Renderer_renderStandaloneViewRenderThread(
ffi.UnsignedInt,
ffi.UnsignedInt,
ffi.Pointer<ffi.Uint8>,
ffi.Size,
ffi.Pointer<ffi.NativeFunction<ffi.Void Function()>>)>(isLeaf: true)
external void Renderer_readPixelsRenderThread(
ffi.Pointer<TRenderer> tRenderer,
@@ -1984,6 +1993,7 @@ external void Renderer_readPixelsRenderThread(
int tPixelBufferFormat,
int tPixelDataType,
ffi.Pointer<ffi.Uint8> out,
int outLength,
ffi.Pointer<ffi.NativeFunction<ffi.Void Function()>> onComplete,
);

View File

@@ -79,20 +79,11 @@ class ThermionViewerFFI extends ThermionViewer {
Future _initialize() async {
_logger.info("Initializing ThermionViewerFFI");
view = FFIView(
await withPointerCallback<TView>(
(cb) => Engine_createViewRenderThread(app.engine, cb)),
app);
await view.setFrustumCullingEnabled(true);
View_setBlendMode(view.view, TBlendMode.TRANSLUCENT);
View_setShadowsEnabled(view.view, false);
View_setStencilBufferEnabled(view.view, false);
View_setAntiAliasing(view.view, false, false, false);
View_setDitheringEnabled(view.view, false);
View_setRenderQuality(view.view, TQualityLevel.MEDIUM);
view = await FilamentApp.instance!.createView() as FFIView;
await FilamentApp.instance!.setClearOptions(0.0, 0.0, 0.0, 0.0);
scene = FFIScene(Engine_createScene(app.engine));
scene = await FilamentApp.instance!.createScene() as FFIScene;
await view.setScene(scene);
final camera = FFICamera(
await withPointerCallback<TCamera>(
@@ -125,7 +116,7 @@ class ThermionViewerFFI extends ThermionViewer {
@override
Future setRendering(bool render) async {
_rendering = render;
await app.setRenderable(view, render);
await view.setRenderable(render);
}
///
@@ -684,38 +675,9 @@ class ThermionViewerFFI extends ThermionViewer {
{List<MaterialInstance>? materialInstances,
bool keepData = false,
bool addToScene = true}) async {
var assetPtr = await withPointerCallback<TSceneAsset>((callback) {
var ptrList = Int64List(materialInstances?.length ?? 0);
if (materialInstances != null && materialInstances.isNotEmpty) {
ptrList.setRange(
0,
materialInstances.length,
materialInstances
.cast<FFIMaterialInstance>()
.map((mi) => mi.pointer.address)
.toList());
}
final asset =
await FilamentApp.instance!.createGeometry(geometry, animationManager, materialInstances: materialInstances) as FFIAsset;
return SceneAsset_createGeometryRenderThread(
app.engine,
geometry.vertices.address,
geometry.vertices.length,
geometry.normals.address,
geometry.normals.length,
geometry.uvs.address,
geometry.uvs.length,
geometry.indices.address,
geometry.indices.length,
geometry.primitiveType.index,
ptrList.address.cast<Pointer<TMaterialInstance>>(),
ptrList.length,
callback);
});
if (assetPtr == nullptr) {
throw Exception("Failed to create geometry");
}
var asset = FFIAsset(assetPtr, app, animationManager);
if (addToScene) {
await scene.add(asset);
}
@@ -730,8 +692,8 @@ class ThermionViewerFFI extends ThermionViewer {
@override
Future<GizmoAsset> getGizmo(GizmoType gizmoType) async {
if (_gizmos[gizmoType] == null) {
_gizmos[gizmoType] =
await FilamentApp.instance!.createGizmo(view, animationManager, gizmoType);
_gizmos[gizmoType] = await FilamentApp.instance!
.createGizmo(view, animationManager, gizmoType);
}
return _gizmos[gizmoType]!;
}

View File

@@ -0,0 +1,110 @@
# Makefile for Thermion Dart - macOS Version
# Using clang to build shared library directly
# Configuration variables
FILAMENT_VERSION = v1.58.0
PACKAGE_NAME = thermion_dart
PLATFORM = macos
# Architecture - default to x64 but can be overridden
ARCH ?= x64
# Compiler and flags
CC = clang++
CFLAGS = -std=c++17 -g -O0 -mmacosx-version-min=13.0
DEFINES = -DENABLE_TRACING=1
# Output library name
OUTPUT_NAME = libthermion_dart.dylib
# Project directory structure
PKG_ROOT = .
NATIVE_SRC_DIR = $(PKG_ROOT)/src
NATIVE_INCLUDE_DIR = $(PKG_ROOT)/include
OUTPUT_DIR = $(PKG_ROOT)/build
# Hardcoded library path (for Filament libraries)
LIB_DIR = /Users/nickfisher/Documents/thermion/thermion_dart/.dart_tool/thermion_dart/lib/v1.58.0/macos/debug
# Libraries are already in LIB_DIR, no download needed
# Source files
SOURCES = $(shell find $(NATIVE_SRC_DIR) -type f -name "*.cpp" -not -path "*CMakeLists*" -not -path "*main.cpp*" -not -path "*windows*")
MATERIAL_SOURCES = $(NATIVE_INCLUDE_DIR)/material/unlit_fixed_size.c \
$(NATIVE_INCLUDE_DIR)/material/image.c \
$(NATIVE_INCLUDE_DIR)/material/grid.c \
$(NATIVE_INCLUDE_DIR)/material/unlit.c \
$(NATIVE_INCLUDE_DIR)/material/gizmo.c
RESOURCE_SOURCES = $(NATIVE_INCLUDE_DIR)/resources/translation_gizmo_glb.c \
$(NATIVE_INCLUDE_DIR)/resources/rotation_gizmo_glb.c
ALL_SOURCES = $(SOURCES) $(MATERIAL_SOURCES) $(RESOURCE_SOURCES)
# Include paths
INCLUDES = -I$(NATIVE_INCLUDE_DIR) -I$(NATIVE_INCLUDE_DIR)/filament
# Libraries to link
LIBS = -lfilament -lbackend -lfilameshio -lviewer -lfilamat -lmeshoptimizer \
-lmikktspace -lgeometry -lutils -lfilabridge -lgltfio_core -lgltfio \
-lfilament-iblprefilter -limage -limageio -ltinyexr -lfilaflat \
-ldracodec -libl -lktxreader -lpng -lz -lstb -luberzlib -lsmol-v \
-luberarchive -lzstd -lbasis_transcoder -lmatdbg -lfgviewer -lbluegl \
-lbluevk -lstdc++
# Frameworks for macOS
FRAMEWORKS = -framework Foundation -framework CoreVideo -framework Cocoa -framework Metal
# Default target
.PHONY: all
all: setup check-libs build
# Setup directories
.PHONY: setup
setup:
mkdir -p "$(LIB_DIR)"
mkdir -p "$(OUTPUT_DIR)"
@echo "Build directories created for macOS"
# Using pre-existing Filament libraries
.PHONY: check-libs
check-libs:
@echo "Using existing Filament libraries in $(LIB_DIR)"
@if [ ! -d "$(LIB_DIR)" ]; then \
echo "ERROR: Library directory $(LIB_DIR) not found"; \
exit 1; \
fi
# Build the shared library using clang
.PHONY: build
build:
@echo "Building Thermion shared library for macOS ($(ARCH))"
$(CC) $(CFLAGS) $(DEFINES) $(INCLUDES) \
-dynamiclib -install_name @rpath/$(OUTPUT_NAME) \
$(ALL_SOURCES) \
-L$(LIB_DIR) $(LIBS) $(FRAMEWORKS) \
-o $(OUTPUT_DIR)/$(OUTPUT_NAME)
@echo "Build complete: $(OUTPUT_DIR)/$(OUTPUT_NAME)"
# Clean build artifacts
.PHONY: clean
clean:
@echo "Build artifacts cleaned" # rm -rf "$(OUTPUT_DIR)"
# Help target
.PHONY: help
help:
@echo "Thermion macOS Build System"
@echo ""
@echo "Usage:"
@echo " make [target] [ARCH=architecture]"
@echo ""
@echo "Targets:"
@echo " all Build everything (default)"
@echo " setup Create necessary directories"
@echo " check-libs Verify Filament libraries exist"
@echo " build Build the shared library"
@echo " clean Clean build artifacts"
@echo ""
@echo "Options:"
@echo " ARCH Target architecture (default: x64)"
@echo " Supported: x64, arm64"

View File

@@ -30,7 +30,7 @@ static void Log(const char *fmt, ...) {
va_end(args);
}
#define ERROR(fmt, ...) Log("Error: %s:%d " fmt, __FILENAME__, __LINE__, ##__VA_ARGS__)
#ifdef ENABLE_TRACING
#ifdef __ANDROID__
#define __FILENAME__ (strrchr(__FILE__, '/') ? strrchr(__FILE__, '/') + 1 : __FILE__)

View File

@@ -20,7 +20,8 @@ EMSCRIPTEN_KEEPALIVE void Renderer_readPixels(
TRenderTarget *tRenderTarget,
TPixelDataFormat tPixelBufferFormat,
TPixelDataType tPixelDataType,
uint8_t *out
uint8_t *out,
size_t outLength
);
EMSCRIPTEN_KEEPALIVE void Renderer_setFrameInterval(
TRenderer *tRenderer,

View File

@@ -64,6 +64,7 @@ EMSCRIPTEN_KEEPALIVE bool View_isStencilBufferEnabled(TView *tView);
EMSCRIPTEN_KEEPALIVE void View_setDitheringEnabled(TView *tView, bool enabled);
EMSCRIPTEN_KEEPALIVE bool View_isDitheringEnabled(TView *tView);
EMSCRIPTEN_KEEPALIVE void View_setScene(TView *tView, TScene *tScene);
EMSCRIPTEN_KEEPALIVE void View_setFrontFaceWindingInverted(TView *tView, bool inverted);
typedef void (*PickCallback)(uint32_t requestId, EntityId entityId, float depth, float fragX, float fragY, float fragZ);
EMSCRIPTEN_KEEPALIVE void View_pick(TView* tView, uint32_t requestId, uint32_t x, uint32_t y, PickCallback callback);

View File

@@ -77,6 +77,7 @@ namespace thermion
TPixelDataFormat tPixelBufferFormat,
TPixelDataType tPixelDataType,
uint8_t *out,
size_t outLength,
void (*onComplete)());
EMSCRIPTEN_KEEPALIVE void Material_createInstanceRenderThread(TMaterial *tMaterial, void (*onComplete)(TMaterialInstance *));

View File

@@ -0,0 +1,12 @@
.global CAPTURE_UV_CAPTURE_UV_OFFSET;
.global CAPTURE_UV_CAPTURE_UV_SIZE;
.global CAPTURE_UV_PACKAGE
.section .rodata
CAPTURE_UV_PACKAGE:
.incbin "capture_uv.bin"
CAPTURE_UV_CAPTURE_UV_OFFSET:
.int 0
CAPTURE_UV_CAPTURE_UV_SIZE:
.int 125851

View File

@@ -0,0 +1,12 @@
.global _CAPTURE_UV_CAPTURE_UV_OFFSET;
.global _CAPTURE_UV_CAPTURE_UV_SIZE;
.global _CAPTURE_UV_PACKAGE
.section __TEXT,__const
_CAPTURE_UV_PACKAGE:
.incbin "capture_uv.bin"
_CAPTURE_UV_CAPTURE_UV_OFFSET:
.int 0
_CAPTURE_UV_CAPTURE_UV_SIZE:
.int 125851

Binary file not shown.

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,13 @@
#ifndef CAPTURE_UV_H_
#define CAPTURE_UV_H_
#include <stdint.h>
extern "C" {
extern const uint8_t CAPTURE_UV_PACKAGE[];
extern int CAPTURE_UV_CAPTURE_UV_OFFSET;
extern int CAPTURE_UV_CAPTURE_UV_SIZE;
}
#define CAPTURE_UV_CAPTURE_UV_DATA (CAPTURE_UV_PACKAGE + CAPTURE_UV_CAPTURE_UV_OFFSET)
#endif

View File

@@ -51,7 +51,7 @@ const uint8_t UNLIT_PACKAGE[] = {
0x53, 0x45, 0x54, 0x44, 0x5f, 0x54, 0x41, 0x4d, 0x01, 0x00, 0x00, 0x00, 0x01, 0x54, 0x53, 0x4e, 0x49, 0x5f, 0x54, 0x41,
0x4d, 0x01, 0x00, 0x00, 0x00, 0x00, 0x53, 0x43, 0x32, 0x41, 0x5f, 0x54, 0x41, 0x4d, 0x01, 0x00, 0x00, 0x00, 0x00, 0x4f,
0x43, 0x32, 0x41, 0x5f, 0x54, 0x41, 0x4d, 0x01, 0x00, 0x00, 0x00, 0x00, 0x4f, 0x4d, 0x55, 0x43, 0x5f, 0x54, 0x41, 0x4d,
0x01, 0x00, 0x00, 0x00, 0x02, 0x50, 0x4f, 0x52, 0x50, 0x5f, 0x54, 0x41, 0x4d, 0x08, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00, 0x00, 0x50, 0x4f, 0x52, 0x50, 0x5f, 0x54, 0x41, 0x4d, 0x08, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0x45, 0x54, 0x53, 0x5f, 0x54, 0x41, 0x4d, 0x01, 0x00, 0x00, 0x00, 0x01, 0x44, 0x49,
0x55, 0x55, 0x5f, 0x54, 0x41, 0x4d, 0x08, 0x00, 0x00, 0x00, 0xf7, 0xaa, 0x19, 0x69, 0x9f, 0xc0, 0xf1, 0x29, 0x44, 0x41,
0x48, 0x53, 0x5f, 0x54, 0x41, 0x4d, 0x01, 0x00, 0x00, 0x00, 0x00, 0x4c, 0x4d, 0x48, 0x53, 0x5f, 0x54, 0x41, 0x4d, 0x01,

View File

@@ -0,0 +1,270 @@
import Foundation
import GLKit
@objc public class ThermionTextureSwift : NSObject {
public var pixelBuffer: CVPixelBuffer?
var pixelBufferAttrs = [
kCVPixelBufferPixelFormatTypeKey: NSNumber(value: kCVPixelFormatType_32ABGR ),
kCVPixelBufferIOSurfacePropertiesKey: [:] as CFDictionary
] as [CFString : Any] as CFDictionary
@objc public var cvMetalTextureCache:CVMetalTextureCache?
@objc public var metalDevice:MTLDevice?
@objc public var cvMetalTexture:CVMetalTexture?
@objc public var metalTexture:MTLTexture?
@objc public var metalTextureAddress:Int = -1
@objc override public init() {
}
@objc public init(width:Int64, height:Int64, isDepth:Bool) {
if(self.metalDevice == nil) {
self.metalDevice = MTLCreateSystemDefaultDevice()!
}
if isDepth {
print("Creating depth texture")
// Create a proper depth texture without IOSurface backing
let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(
pixelFormat: .depth32Float,
width: Int(width),
height: Int(height),
mipmapped: false)
textureDescriptor.usage = [.renderTarget, .shaderRead]
textureDescriptor.storageMode = .private // Best performance for GPU-only access
metalTexture = metalDevice?.makeTexture(descriptor: textureDescriptor)
let metalTexturePtr = Unmanaged.passRetained(metalTexture!).toOpaque()
metalTextureAddress = Int(bitPattern: metalTexturePtr)
return
}
print("Creating color texture")
let pixelFormat: MTLPixelFormat = isDepth ? .depth32Float : .bgra8Unorm
let cvPixelFormat = isDepth ? kCVPixelFormatType_DepthFloat32 : kCVPixelFormatType_32BGRA
if(CVPixelBufferCreate(kCFAllocatorDefault, Int(width), Int(height),
kCVPixelFormatType_32BGRA, pixelBufferAttrs, &pixelBuffer) != kCVReturnSuccess) {
print("Error allocating pixel buffer")
metalTextureAddress = -1;
return
}
if self.cvMetalTextureCache == nil {
let cacheCreationResult = CVMetalTextureCacheCreate(
kCFAllocatorDefault,
nil,
self.metalDevice!,
nil,
&self.cvMetalTextureCache)
if(cacheCreationResult != kCVReturnSuccess) {
print("Error creating Metal texture cache")
metalTextureAddress = -1
return
}
}
let cvret = CVMetalTextureCacheCreateTextureFromImage(
kCFAllocatorDefault,
self.cvMetalTextureCache!,
pixelBuffer!, nil,
MTLPixelFormat.bgra8Unorm,
Int(width), Int(height),
0,
&cvMetalTexture)
if(cvret != kCVReturnSuccess) {
print("Error creating texture from image")
metalTextureAddress = -1
return
}
metalTexture = CVMetalTextureGetTexture(cvMetalTexture!)
let metalTexturePtr = Unmanaged.passRetained(metalTexture!).toOpaque()
metalTextureAddress = Int(bitPattern:metalTexturePtr)
}
@objc public func destroyTexture() {
CVMetalTextureCacheFlush(self.cvMetalTextureCache!, 0)
self.metalTexture = nil
self.cvMetalTexture = nil
self.pixelBuffer = nil
self.metalDevice = nil
self.cvMetalTextureCache = nil
}
@objc public func fillWithPNGImage(imageURL: URL) -> Bool {
// Make sure we have a pixel buffer to work with
guard let pixelBuffer = self.pixelBuffer else {
print("Error: No pixel buffer available")
return false
}
// Try to load the image from the provided URL
guard let nsImage = NSImage(contentsOf: imageURL) else {
print("Error: Could not load image from \(imageURL.path)")
return false
}
// Make sure we have a CGImage to work with
guard let cgImage = nsImage.cgImage(forProposedRect: nil, context: nil, hints: nil) else {
print("Error: Could not get CGImage from NSImage")
return false
}
// Get pixel buffer dimensions
let width = CVPixelBufferGetWidth(pixelBuffer)
let height = CVPixelBufferGetHeight(pixelBuffer)
// Lock the pixel buffer for writing
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
// Get the base address of the pixel buffer
guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) else {
print("Error: Could not get base address of pixel buffer")
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
return false
}
// Create a graphics context in the pixel buffer
let colorSpace = CGColorSpaceCreateDeviceRGB()
let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
let context = CGContext(
data: baseAddress,
width: width,
height: height,
bitsPerComponent: 8,
bytesPerRow: bytesPerRow,
space: colorSpace,
bitmapInfo: CGBitmapInfo.byteOrder32Little.rawValue | CGImageAlphaInfo.premultipliedFirst.rawValue
)
// Draw the image into the context (which is backed by our pixel buffer)
if let context = context {
// Flip the coordinate system to match Metal's coordinate system
context.translateBy(x: 0, y: CGFloat(height))
context.scaleBy(x: 1, y: -1)
// Draw the image to fill the entire texture
let rect = CGRect(x: 0, y: 0, width: CGFloat(width), height: CGFloat(height))
context.draw(cgImage, in: rect)
} else {
print("Error: Could not create CGContext from pixel buffer")
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
return false
}
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
return true
}
@objc public func fillColor() {
CVPixelBufferLockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
let bufferWidth = Int(CVPixelBufferGetWidth(pixelBuffer!))
let bufferHeight = Int(CVPixelBufferGetHeight(pixelBuffer!))
let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer!)
guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer!) else {
return
}
for row in 0..<bufferHeight {
var pixel = baseAddress + row * bytesPerRow
for _ in 0..<bufferWidth {
let blue = pixel
blue.storeBytes(of: 255, as: UInt8.self)
let red = pixel + 1
red.storeBytes(of: 0, as: UInt8.self)
let green = pixel + 2
green.storeBytes(of: 0, as: UInt8.self)
let alpha = pixel + 3
alpha.storeBytes(of: 255, as: UInt8.self)
pixel += 4;
}
}
CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
}
@objc public func getTextureBytes() -> NSData? {
guard let texture = self.metalTexture else {
print("Metal texture is not available")
return nil
}
let width = texture.width
let height = texture.height
// Check what type of texture we're dealing with
let isDepthTexture = texture.pixelFormat == .depth32Float ||
texture.pixelFormat == .depth16Unorm
print("Using texture pixel format : \(texture.pixelFormat) isDepthTexture \(isDepthTexture) (depth32Float \(MTLPixelFormat.depth32Float)) (depth16Unorm \(MTLPixelFormat.depth16Unorm))")
// Determine bytes per pixel based on format
let bytesPerPixel = isDepthTexture ?
(texture.pixelFormat == .depth32Float ? 4 : 2) : 4
let bytesPerRow = width * bytesPerPixel
let byteCount = bytesPerRow * height
// Create a staging buffer that is CPU-accessible
guard let stagingBuffer = self.metalDevice?.makeBuffer(
length: byteCount,
options: .storageModeShared) else {
print("Failed to create staging buffer")
return nil
}
// Create command buffer and encoder for copying
guard let cmdQueue = self.metalDevice?.makeCommandQueue(),
let cmdBuffer = cmdQueue.makeCommandBuffer(),
let blitEncoder = cmdBuffer.makeBlitCommandEncoder() else {
print("Failed to create command objects")
return nil
}
// Copy from texture to buffer
blitEncoder.copy(
from: texture,
sourceSlice: 0,
sourceLevel: 0,
sourceOrigin: MTLOrigin(x: 0, y: 0, z: 0),
sourceSize: MTLSize(width: width, height: height, depth: 1),
to: stagingBuffer,
destinationOffset: 0,
destinationBytesPerRow: bytesPerRow,
destinationBytesPerImage: byteCount
)
blitEncoder.endEncoding()
cmdBuffer.commit()
cmdBuffer.waitUntilCompleted()
// Now the data is in the staging buffer, accessible to CPU
if isDepthTexture {
// For depth textures, just return the raw data
return NSData(bytes: stagingBuffer.contents(), length: byteCount)
} else {
// For color textures, do the BGRA to RGBA swizzling
let bytes = stagingBuffer.contents().bindMemory(to: UInt8.self, capacity: byteCount)
let data = NSMutableData(bytes: bytes, length: byteCount)
let mutableBytes = data.mutableBytes.bindMemory(to: UInt8.self, capacity: byteCount)
for i in stride(from: 0, to: byteCount, by: 4) {
let blue = mutableBytes[i]
let red = mutableBytes[i+2]
mutableBytes[i] = red
mutableBytes[i+2] = blue
}
return data
}
}
}

View File

@@ -23,6 +23,11 @@ namespace thermion
TTexture *tColor,
TTexture *tDepth)
{
if(!tColor || !tDepth) {
ERROR("Color & depth attachments must be provided");
return nullptr;
}
TRACE("Creating render target %dx%d", width, height);
auto engine = reinterpret_cast<filament::Engine *>(tEngine);
auto color = reinterpret_cast<filament::Texture *>(tColor);
auto depth = reinterpret_cast<filament::Texture *>(tDepth);

View File

@@ -93,7 +93,8 @@ EMSCRIPTEN_KEEPALIVE void Renderer_readPixels(
TRenderTarget *tRenderTarget,
TPixelDataFormat tPixelBufferFormat,
TPixelDataType tPixelDataType,
uint8_t *out) {
uint8_t *out,
size_t outLength) {
auto *renderer = reinterpret_cast<filament::Renderer *>(tRenderer);
auto *renderTarget = reinterpret_cast<filament::RenderTarget *>(tRenderTarget);
@@ -101,8 +102,6 @@ EMSCRIPTEN_KEEPALIVE void Renderer_readPixels(
filament::Viewport const &vp = view->getViewport();
size_t pixelBufferSize = vp.width * vp.height * 4;
filament::backend::PixelDataFormat pixelBufferFormat = static_cast<filament::backend::PixelDataFormat>(tPixelBufferFormat);
filament::backend::PixelDataType pixelDataType = static_cast<filament::backend::PixelDataType>(tPixelDataType);
@@ -114,7 +113,7 @@ EMSCRIPTEN_KEEPALIVE void Renderer_readPixels(
auto pbd = filament::Texture::PixelBufferDescriptor(
out, pixelBufferSize,
out, outLength,
pixelBufferFormat,
pixelDataType,
dispatcher,

View File

@@ -231,6 +231,11 @@ using namespace filament;
view->setScene(scene);
}
EMSCRIPTEN_KEEPALIVE void View_setFrontFaceWindingInverted(TView *tView, bool inverted) {
auto *view = reinterpret_cast<View*>(tView);
view->setFrontFaceWindingInverted(inverted);
}
#ifdef __cplusplus
}

View File

@@ -362,11 +362,12 @@ extern "C"
TPixelDataFormat tPixelBufferFormat,
TPixelDataType tPixelDataType,
uint8_t *out,
size_t outLength,
void (*onComplete)()) {
std::packaged_task<void()> lambda(
[=]() mutable
{
Renderer_readPixels(tRenderer, tView, tRenderTarget, tPixelBufferFormat, tPixelDataType, out);
Renderer_readPixels(tRenderer, tView, tRenderTarget, tPixelBufferFormat, tPixelDataType, out, outLength);
onComplete();
});
auto fut = _renderThread->add_task(lambda);

View File

@@ -22,11 +22,9 @@ namespace thermion
SceneAsset *sceneAsset,
Engine *engine,
View *view,
Scene *scene,
Material *material) noexcept : _source(sceneAsset),
_engine(engine),
_view(view),
_scene(scene),
_material(material)
{
auto &entityManager = _engine->getEntityManager();
@@ -80,6 +78,10 @@ namespace thermion
_materialInstances.push_back(materialInstance);
auto instance = _source->createInstance(&materialInstance, 1);
if(!instance) {
Log("FATAL: failed to create asset instance");
}
TRACE("Created Gizmo axis glTF instance with head entity %d", instance->getEntity());
auto color = filament::math::float4(AXIS_COLORS[axis], 0.5f);
materialInstance->setParameter("baseColorFactor", color);

View File

@@ -1,970 +0,0 @@
// #include <memory>
// #include <string>
// #include <sstream>
// #include <thread>
// #include <vector>
// #include <unordered_set>
// #include <stack>
// #include <filament/Engine.h>
// #include <filament/TransformManager.h>
// #include <filament/Texture.h>
// #include <filament/RenderableManager.h>
// #include <filament/Viewport.h>
// #include <filament/Frustum.h>
// #include <utils/EntityManager.h>
// #include <gltfio/Animator.h>
// #include <gltfio/AssetLoader.h>
// #include <gltfio/FilamentAsset.h>
// #include <gltfio/ResourceLoader.h>
// #include <gltfio/TextureProvider.h>
// #include <gltfio/math.h>
// #include <gltfio/materials/uberarchive.h>
// #include <imageio/ImageDecoder.h>
// #include "material/FileMaterialProvider.hpp"
// #include "material/UnlitMaterialProvider.hpp"
// #include "material/unlit.h"
// #include "material/gizmo.h"
// #include "StreamBufferAdapter.hpp"
// #include "Log.hpp"
// #include "scene/SceneManager.hpp"
// #include "scene/CustomGeometry.hpp"
// #include "scene/GeometrySceneAsset.hpp"
// #include "scene/GltfSceneAsset.hpp"
// #include "scene/Gizmo.hpp"
// #include "scene/SceneAsset.hpp"
// #include "scene/GeometrySceneAssetBuilder.hpp"
// #include "TextureProjection.hpp"
// #include "resources/translation_gizmo_glb.h"
// #include "resources/rotation_gizmo_glb.h"
// extern "C"
// {
// #include "material/image.h"
// #include "material/unlit_fixed_size.h"
// }
// namespace thermion
// {
// using namespace std::chrono;
// using namespace image;
// using namespace utils;
// using namespace filament;
// using namespace filament::gltfio;
// using std::unique_ptr;
// SceneManager::SceneManager(const ResourceLoaderWrapperImpl *const resourceLoaderWrapper,
// Engine *engine,
// Scene *scene,
// const char *uberArchivePath,
// Camera *mainCamera)
// : _resourceLoaderWrapper(resourceLoaderWrapper),
// _engine(engine),
// _scene(scene),
// _mainCamera(mainCamera)
// {
// _stbDecoder = createStbProvider(_engine);
// _ktxDecoder = createKtx2Provider(_engine);
// _gltfResourceLoader = new ResourceLoader({.engine = _engine,
// .normalizeSkinningWeights = true});
// if (uberArchivePath)
// {
// auto uberdata = resourceLoaderWrapper->load(uberArchivePath);
// if (!uberdata.data)
// {
// Log("Failed to load ubershader material. This is fatal.");
// }
// _ubershaderProvider = gltfio::createUbershaderProvider(_engine, uberdata.data, uberdata.size);
// resourceLoaderWrapper->free(uberdata);
// }
// else
// {
// _ubershaderProvider = gltfio::createUbershaderProvider(
// _engine, UBERARCHIVE_DEFAULT_DATA, UBERARCHIVE_DEFAULT_SIZE);
// }
// _unlitMaterialProvider = new UnlitMaterialProvider(_engine, UNLIT_PACKAGE, UNLIT_UNLIT_SIZE);
// utils::EntityManager &em = utils::EntityManager::get();
// _ncm = new NameComponentManager(em);
// _assetLoader = AssetLoader::create({_engine, _ubershaderProvider, _ncm, &em});
// _gltfResourceLoader->addTextureProvider("image/ktx2", _ktxDecoder);
// _gltfResourceLoader->addTextureProvider("image/png", _stbDecoder);
// _gltfResourceLoader->addTextureProvider("image/jpeg", _stbDecoder);
// auto &tm = _engine->getTransformManager();
// _collisionComponentManager = std::make_unique<CollisionComponentManager>(tm);
// _animationManager = std::make_unique<AnimationManager>(_engine, _scene);
// _unlitFixedSizeMaterial =
// Material::Builder()
// .package(UNLIT_FIXED_SIZE_UNLIT_FIXED_SIZE_DATA, UNLIT_FIXED_SIZE_UNLIT_FIXED_SIZE_SIZE)
// .build(*_engine);
// _gizmoMaterial =
// Material::Builder()
// .package(GIZMO_GIZMO_DATA, GIZMO_GIZMO_SIZE)
// .build(*_engine);
// }
// SceneManager::~SceneManager()
// {
// TRACE("Destroying cameras");
// for (auto camera : _cameras)
// {
// auto entity = camera->getEntity();
// _engine->destroyCameraComponent(entity);
// _engine->getEntityManager().destroy(entity);
// }
// TRACE("Cameras destroyed");
// destroyAll();
// TRACE("Destroyed all assets");
// _engine->destroy(_unlitFixedSizeMaterial);
// _engine->destroy(_gizmoMaterial);
// TRACE("Destroyed materials");
// _cameras.clear();
// _grid = nullptr;
// _gltfResourceLoader->asyncCancelLoad();
// _ubershaderProvider->destroyMaterials();
// _animationManager = std::nullptr_t();
// _collisionComponentManager = std::nullptr_t();
// delete _ncm;
// delete _gltfResourceLoader;
// delete _stbDecoder;
// delete _ktxDecoder;
// delete _ubershaderProvider;
// TRACE("Destroying asset loader");
// AssetLoader::destroy(&_assetLoader);
// TRACE("Destroyed asset loader");
// }
// SceneAsset *SceneManager::createGrid(Material *material)
// {
// if (!_grid)
// {
// if (!material)
// {
// material = Material::Builder()
// .package(GRID_PACKAGE, GRID_GRID_SIZE)
// .build(*_engine);
// }
// _grid = std::make_unique<GridOverlay>(*_engine, material);
// }
// return _grid.get();
// }
// bool SceneManager::isGridEntity(utils::Entity entity)
// {
// if (!_grid)
// {
// TRACE("No grid");
// return false;
// }
// if (entity == _grid->getEntity())
// {
// TRACE("%d is a grid entity.", entity);
// return true;
// }
// for (int i = 0; i < _grid->getChildEntityCount(); i++)
// {
// if (entity == _grid->getChildEntities()[i])
// {
// TRACE("%d is a child entity of grid.", entity);
// return true;
// }
// }
// return false;
// }
// Gizmo *SceneManager::createGizmo(View *view, Scene *scene, GizmoType type)
// {
// TRACE("Creating gizmo type %d", type);
// Gizmo *raw;
// switch (type)
// {
// case GizmoType::TRANSLATION:
// if (!_translationGizmoGlb)
// {
// TRACE("Translation gizmo source not found, loading");
// _translationGizmoGlb = loadGlbFromBuffer(TRANSLATION_GIZMO_GLB_TRANSLATION_GIZMO_DATA, TRANSLATION_GIZMO_GLB_TRANSLATION_GIZMO_SIZE, 100, true, 4, 0, false, false);
// }
// raw = new Gizmo(_translationGizmoGlb, _engine, view, scene, _unlitFixedSizeMaterial);
// TRACE("Built translation gizmo");
// break;
// case GizmoType::ROTATION:
// if (!_rotationGizmoGlb)
// {
// TRACE("Rotation gizmo source not found, loading");
// _rotationGizmoGlb = loadGlbFromBuffer(ROTATION_GIZMO_GLB_ROTATION_GIZMO_DATA, ROTATION_GIZMO_GLB_ROTATION_GIZMO_SIZE, 100, true, 4, 0, false, false);
// }
// raw = new Gizmo(_rotationGizmoGlb, _engine, view, scene, _unlitFixedSizeMaterial);
// TRACE("Built rotation gizmo");
// break;
// }
// _sceneAssets.push_back(std::unique_ptr<Gizmo>(raw));
// return raw;
// }
// int SceneManager::getInstanceCount(EntityId entityId)
// {
// auto entity = utils::Entity::import(entityId);
// for (auto &asset : _sceneAssets)
// {
// if (asset->getEntity() == entity)
// {
// return asset->getInstanceCount();
// }
// }
// return -1;
// }
// void SceneManager::getInstances(EntityId entityId, EntityId *out)
// {
// auto entity = utils::Entity::import(entityId);
// for (auto &asset : _sceneAssets)
// {
// if (asset->getEntity() == entity)
// {
// for (int i = 0; i < asset->getInstanceCount(); i++)
// {
// out[i] = Entity::smuggle(asset->getInstanceAt(i)->getEntity());
// }
// return;
// }
// }
// }
// SceneAsset *SceneManager::loadGltf(const char *uri,
// const char *relativeResourcePath,
// int numInstances,
// bool keepData)
// {
// if (numInstances < 1)
// {
// return std::nullptr_t();
// }
// ResourceBuffer rbuf = _resourceLoaderWrapper->load(uri);
// std::vector<FilamentInstance *> instances(numInstances);
// FilamentAsset *asset = _assetLoader->createInstancedAsset((uint8_t *)rbuf.data, rbuf.size, instances.data(), numInstances);
// if (!asset)
// {
// Log("Unable to load glTF asset at %d", uri);
// return std::nullptr_t();
// }
// const char *const *const resourceUris = asset->getResourceUris();
// const size_t resourceUriCount = asset->getResourceUriCount();
// std::vector<ResourceBuffer> resourceBuffers;
// for (size_t i = 0; i < resourceUriCount; i++)
// {
// std::string uri = std::string(relativeResourcePath) + std::string("/") + std::string(resourceUris[i]);
// ResourceBuffer buf = _resourceLoaderWrapper->load(uri.c_str());
// resourceBuffers.push_back(buf);
// ResourceLoader::BufferDescriptor b(buf.data, buf.size);
// _gltfResourceLoader->addResourceData(resourceUris[i], std::move(b));
// }
// #ifdef __EMSCRIPTEN__
// if (!_gltfResourceLoader->asyncBeginLoad(asset))
// {
// Log("Unknown error loading glTF asset");
// _resourceLoaderWrapper->free(rbuf);
// for (auto &rb : resourceBuffers)
// {
// _resourceLoaderWrapper->free(rb);
// }
// return 0;
// }
// while (_gltfResourceLoader->asyncGetLoadProgress() < 1.0f)
// {
// _gltfResourceLoader->asyncUpdateLoad();
// }
// #else
// // load resources synchronously
// if (!_gltfResourceLoader->loadResources(asset))
// {
// Log("Unknown error loading glTF asset");
// _resourceLoaderWrapper->free(rbuf);
// for (auto &rb : resourceBuffers)
// {
// _resourceLoaderWrapper->free(rb);
// }
// return std::nullptr_t();
// }
// #endif
// auto sceneAsset = std::make_unique<GltfSceneAsset>(
// asset,
// _assetLoader,
// _engine,
// _ncm);
// auto filamentInstance = asset->getInstance();
// size_t entityCount = filamentInstance->getEntityCount();
// _scene->addEntities(filamentInstance->getEntities(), entityCount);
// for (auto &rb : resourceBuffers)
// {
// _resourceLoaderWrapper->free(rb);
// }
// _resourceLoaderWrapper->free(rbuf);
// auto lights = asset->getLightEntities();
// _scene->addEntities(lights, asset->getLightEntityCount());
// sceneAsset->createInstance();
// auto entityId = Entity::smuggle(sceneAsset->getEntity());
// auto *raw = sceneAsset.get();
// _sceneAssets.push_back(std::move(sceneAsset));
// Log("Loaded glTF asset from uri: %s", uri);
// return raw;
// }
// void SceneManager::setVisibilityLayer(EntityId entityId, int layer)
// {
// utils::Entity entity = utils::Entity::import(entityId);
// for (auto &asset : _sceneAssets)
// {
// if (asset->getEntity() == entity)
// {
// asset->setLayer(_engine->getRenderableManager(), layer);
// }
// }
// }
// SceneAsset *SceneManager::loadGlbFromBuffer(const uint8_t *data, size_t length, int numInstances, bool keepData, int priority, int layer, bool loadResourcesAsync, bool addToScene)
// {
// auto &rm = _engine->getRenderableManager();
// std::vector<FilamentInstance *> instances(numInstances);
// FilamentAsset *asset = _assetLoader->createInstancedAsset((const uint8_t *)data, length, instances.data(), numInstances);
// Log("Created glTF asset with %d instances.", numInstances);
// if (!asset)
// {
// Log("Unknown error loading GLB asset.");
// return std::nullptr_t();
// }
// #ifdef __EMSCRIPTEN__
// if (!_gltfResourceLoader->asyncBeginLoad(asset))
// {
// Log("Unknown error loading glb asset");
// return 0;
// }
// while (_gltfResourceLoader->asyncGetLoadProgress() < 1.0f)
// {
// _gltfResourceLoader->asyncUpdateLoad();
// }
// #else
// if (loadResourcesAsync)
// {
// if (!_gltfResourceLoader->asyncBeginLoad(asset))
// {
// Log("Unknown error loading glb asset");
// return 0;
// }
// }
// else
// {
// if (!_gltfResourceLoader->loadResources(asset))
// {
// Log("Unknown error loading glb asset");
// return 0;
// }
// }
// #endif
// auto sceneAsset = std::make_unique<GltfSceneAsset>(
// asset,
// _assetLoader,
// _engine,
// _ncm);
// auto sceneAssetInstance = sceneAsset->createInstance();
// if (addToScene)
// {
// sceneAssetInstance->addAllEntities(_scene);
// }
// sceneAssetInstance->setPriority(_engine->getRenderableManager(), priority);
// sceneAssetInstance->setLayer(_engine->getRenderableManager(), layer);
// auto *raw = sceneAsset.get();
// _sceneAssets.push_back(std::move(sceneAsset));
// return raw;
// }
// SceneAsset *SceneManager::createInstance(SceneAsset *asset, MaterialInstance **materialInstances, size_t materialInstanceCount)
// {
// std::lock_guard lock(_mutex);
// auto instance = asset->createInstance(materialInstances, materialInstanceCount);
// if (instance)
// {
// instance->addAllEntities(_scene);
// }
// else
// {
// Log("Failed to create instance");
// }
// return instance;
// }
// SceneAsset *SceneManager::loadGlb(const char *uri, int numInstances, bool keepData)
// {
// ResourceBuffer rbuf = _resourceLoaderWrapper->load(uri);
// auto entity = loadGlbFromBuffer((const uint8_t *)rbuf.data, rbuf.size, numInstances, keepData);
// _resourceLoaderWrapper->free(rbuf);
// return entity;
// }
// bool SceneManager::removeFromScene(EntityId entityId)
// {
// _scene->remove(Entity::import(entityId));
// return true;
// }
// bool SceneManager::addToScene(EntityId entityId)
// {
// _scene->addEntity(Entity::import(entityId));
// return true;
// }
// void SceneManager::destroyAll()
// {
// destroyLights();
// destroyAssets();
// std::lock_guard lock(_mutex);
// for (auto *materialInstance : _materialInstances)
// {
// _engine->destroy(materialInstance);
// }
// _materialInstances.clear();
// }
// void SceneManager::destroy(SceneAsset *asset)
// {
// std::lock_guard lock(_mutex);
// auto entity = asset->getEntity();
// _collisionComponentManager->removeComponent(entity);
// _animationManager->removeAnimationComponent(utils::Entity::smuggle(entity));
// for (int i = 0; i < asset->getChildEntityCount(); i++)
// {
// auto childEntity = asset->getChildEntities()[i];
// _collisionComponentManager->removeComponent(childEntity);
// _animationManager->removeAnimationComponent(utils::Entity::smuggle(childEntity));
// }
// asset->removeAllEntities(_scene);
// if (asset->isInstance())
// {
// asset->destroyInstance(asset);
// }
// else
// {
// auto it = std::remove_if(_sceneAssets.begin(), _sceneAssets.end(), [=](auto &sceneAsset)
// { return sceneAsset.get() == asset; });
// _sceneAssets.erase(it, _sceneAssets.end());
// }
// }
// utils::Entity SceneManager::addLight(
// LightManager::Type t,
// float colour,
// float intensity,
// float posX,
// float posY,
// float posZ,
// float dirX,
// float dirY,
// float dirZ,
// float falloffRadius,
// float spotLightConeInner,
// float spotLightConeOuter,
// float sunAngularRadius,
// float sunHaloSize,
// float sunHaloFallof,
// bool shadows)
// {
// auto light = EntityManager::get().create();
// // LightManager::ShadowOptions shadowOptions;
// // shadowOptions.stable = true;
// auto result = LightManager::Builder(t)
// .color(Color::cct(colour))
// .intensity(intensity)
// .falloff(falloffRadius)
// .spotLightCone(spotLightConeInner, spotLightConeOuter)
// .sunAngularRadius(sunAngularRadius)
// .sunHaloSize(sunHaloSize)
// .sunHaloFalloff(sunHaloFallof)
// .position(filament::math::float3(posX, posY, posZ))
// .direction(filament::math::float3(dirX, dirY, dirZ))
// // .shadowOptions(shadowOptions)
// .castShadows(shadows)
// .build(*_engine, light);
// if (result != LightManager::Builder::Result::Success)
// {
// Log("ERROR : failed to create light");
// }
// else
// {
// _scene->addEntity(light);
// _lights.push_back(light);
// TRACE("Created light");
// }
// return light;
// }
// void SceneManager::removeLight(utils::Entity entity)
// {
// auto removed = remove(_lights.begin(), _lights.end(), entity);
// _scene->remove(entity);
// EntityManager::get().destroy(1, &entity);
// }
// void SceneManager::destroyLights()
// {
// std::lock_guard lock(_mutex);
// _scene->removeEntities(_lights.data(), _lights.size());
// EntityManager::get().destroy(_lights.size(), _lights.data());
// _lights.clear();
// }
// void SceneManager::destroyAssets()
// {
// std::lock_guard lock(_mutex);
// for (auto &asset : _sceneAssets)
// {
// asset->removeAllEntities(_scene);
// for(int i = 0; i < asset->getInstanceCount(); i++) {
// asset->getInstanceAt(i)->removeAllEntities(_scene);
// }
// }
// _sceneAssets.clear();
// }
// void SceneManager::addCollisionComponent(EntityId entityId, void (*onCollisionCallback)(const EntityId entityId1, const EntityId entityId2), bool affectsTransform)
// {
// std::lock_guard lock(_mutex);
// utils::Entity entity = utils::Entity::import(entityId);
// for (auto &asset : _sceneAssets)
// {
// auto *instance = reinterpret_cast<GltfSceneAssetInstance *>(asset->getInstanceByEntity(entity));
// if (instance)
// {
// auto collisionInstance = _collisionComponentManager->addComponent(instance->getInstance()->getRoot());
// _collisionComponentManager->elementAt<0>(collisionInstance) = instance->getInstance()->getBoundingBox();
// _collisionComponentManager->elementAt<1>(collisionInstance) = onCollisionCallback;
// _collisionComponentManager->elementAt<2>(collisionInstance) = affectsTransform;
// return;
// }
// }
// }
// void SceneManager::removeCollisionComponent(EntityId entityId)
// {
// std::lock_guard lock(_mutex);
// utils::Entity entity = utils::Entity::import(entityId);
// _collisionComponentManager->removeComponent(entity);
// }
// void SceneManager::testCollisions(EntityId entityId)
// {
// utils::Entity entity = utils::Entity::import(entityId);
// for (auto &asset : _sceneAssets)
// {
// auto *instance = reinterpret_cast<GltfSceneAssetInstance *>(asset->getInstanceByEntity(entity));
// if (instance)
// {
// const auto &tm = _engine->getTransformManager();
// auto transformInstance = tm.getInstance(entity);
// auto worldTransform = tm.getWorldTransform(transformInstance);
// auto aabb = instance->getInstance()->getBoundingBox();
// aabb = aabb.transform(worldTransform);
// _collisionComponentManager->collides(entity, aabb);
// }
// }
// }
// void SceneManager::update()
// {
// _animationManager->update();
// _updateTransforms();
// }
// void SceneManager::_updateTransforms()
// {
// std::lock_guard lock(_mutex);
// // auto &tm = _engine->getTransformManager();
// // tm.openLocalTransformTransaction();
// // for (const auto &[entityId, transformUpdate] : _transformUpdates)
// // {
// // const auto &pos = _instances.find(entityId);
// // bool isCollidable = true;
// // Entity entity;
// // filament::TransformManager::Instance transformInstance;
// // filament::math::mat4f transform;
// // Aabb boundingBox;
// // if (pos == _instances.end())
// // {
// // isCollidable = false;
// // entity = Entity::import(entityId);
// // }
// // else
// // {
// // const auto *instance = pos->second;
// // entity = instance->getRoot();
// // boundingBox = instance->getBoundingBox();
// // }
// // transformInstance = tm.getInstance(entity);
// // transform = tm.getTransform(transformInstance);
// // if (isCollidable)
// // {
// // auto transformedBB = boundingBox.transform(transform);
// // auto collisionAxes = _collisionComponentManager->collides(entity, transformedBB);
// // if (collisionAxes.size() == 1)
// // {
// // // auto globalAxis = collisionAxes[0];
// // // globalAxis *= norm(relativeTranslation);
// // // auto newRelativeTranslation = relativeTranslation + globalAxis;
// // // translation -= relativeTranslation;
// // // translation += newRelativeTranslation;
// // // transform = composeMatrix(translation, rotation, scale);
// // }
// // else if (collisionAxes.size() > 1)
// // {
// // // translation -= relativeTranslation;
// // // transform = composeMatrix(translation, rotation, scale);
// // }
// // }
// // tm.setTransform(transformInstance, transformUpdate);
// // }
// // tm.commitLocalTransformTransaction();
// // _transformUpdates.clear();
// }
// void SceneManager::queueRelativePositionUpdateFromViewportVector(View *view, EntityId entityId, float viewportCoordX, float viewportCoordY)
// {
// // Get the camera and viewport
// const auto &camera = view->getCamera();
// const auto &vp = view->getViewport();
// // Convert viewport coordinates to NDC space
// float ndcX = (2.0f * viewportCoordX) / vp.width - 1.0f;
// float ndcY = 1.0f - (2.0f * viewportCoordY) / vp.height;
// // Get the current position of the entity
// auto &tm = _engine->getTransformManager();
// auto entity = Entity::import(entityId);
// auto transformInstance = tm.getInstance(entity);
// auto currentTransform = tm.getTransform(transformInstance);
// // get entity model origin in camera space
// auto entityPositionInCameraSpace = camera.getViewMatrix() * currentTransform * filament::math::float4{0.0f, 0.0f, 0.0f, 1.0f};
// // get entity model origin in clip space
// auto entityPositionInClipSpace = camera.getProjectionMatrix() * entityPositionInCameraSpace;
// auto entityPositionInNdcSpace = entityPositionInClipSpace / entityPositionInClipSpace.w;
// // Viewport coords in NDC space (use entity position in camera space Z to project onto near plane)
// math::float4 ndcNearPlanePos = {ndcX, ndcY, -1.0f, 1.0f};
// math::float4 ndcFarPlanePos = {ndcX, ndcY, 0.99f, 1.0f};
// math::float4 ndcEntityPlanePos = {ndcX, ndcY, entityPositionInNdcSpace.z, 1.0f};
// // Get viewport coords in clip space
// math::float4 nearPlaneInClipSpace = Camera::inverseProjection(camera.getProjectionMatrix()) * ndcNearPlanePos;
// auto nearPlaneInCameraSpace = nearPlaneInClipSpace / nearPlaneInClipSpace.w;
// math::float4 farPlaneInClipSpace = Camera::inverseProjection(camera.getProjectionMatrix()) * ndcFarPlanePos;
// auto farPlaneInCameraSpace = farPlaneInClipSpace / farPlaneInClipSpace.w;
// math::float4 entityPlaneInClipSpace = Camera::inverseProjection(camera.getProjectionMatrix()) * ndcEntityPlanePos;
// auto entityPlaneInCameraSpace = entityPlaneInClipSpace / entityPlaneInClipSpace.w;
// auto entityPlaneInWorldSpace = camera.getModelMatrix() * entityPlaneInCameraSpace;
// }
// void SceneManager::queueTransformUpdates(EntityId *entities, math::mat4 *transforms, int numEntities)
// {
// std::lock_guard lock(_mutex);
// for (int i = 0; i < numEntities; i++)
// {
// auto entity = entities[i];
// const auto &pos = _transformUpdates.find(entity);
// if (pos == _transformUpdates.end())
// {
// _transformUpdates.emplace(entity, transforms[i]);
// }
// auto curr = _transformUpdates[entity];
// _transformUpdates[entity] = curr;
// }
// }
// Aabb3 SceneManager::getRenderableBoundingBox(EntityId entityId)
// {
// auto &rm = _engine->getRenderableManager();
// auto instance = rm.getInstance(Entity::import(entityId));
// if (!instance.isValid())
// {
// return Aabb3{};
// }
// auto box = rm.getAxisAlignedBoundingBox(instance);
// return Aabb3{box.center.x, box.center.y, box.center.z, box.halfExtent.x, box.halfExtent.y, box.halfExtent.z};
// }
// Aabb2 SceneManager::getScreenSpaceBoundingBox(View *view, EntityId entityId)
// {
// const auto &camera = view->getCamera();
// const auto &viewport = view->getViewport();
// auto &tcm = _engine->getTransformManager();
// auto &rcm = _engine->getRenderableManager();
// // Get the projection and view matrices
// math::mat4 projMatrix = camera.getProjectionMatrix();
// math::mat4 viewMatrix = camera.getViewMatrix();
// math::mat4 vpMatrix = projMatrix * viewMatrix;
// auto entity = Entity::import(entityId);
// auto renderable = rcm.getInstance(entity);
// auto worldTransform = tcm.getWorldTransform(tcm.getInstance(entity));
// // Get the axis-aligned bounding box in model space
// Box aabb = rcm.getAxisAlignedBoundingBox(renderable);
// auto min = aabb.getMin();
// auto max = aabb.getMax();
// // Transform the 8 corners of the AABB to clip space
// std::array<math::float4, 8> corners = {
// worldTransform * math::float4(min.x, min.y, min.z, 1.0f),
// worldTransform * math::float4(max.x, min.y, min.z, 1.0f),
// worldTransform * math::float4(min.x, max.y, min.z, 1.0f),
// worldTransform * math::float4(max.x, max.y, min.z, 1.0f),
// worldTransform * math::float4(min.x, min.y, max.z, 1.0f),
// worldTransform * math::float4(max.x, min.y, max.z, 1.0f),
// worldTransform * math::float4(min.x, max.y, max.z, 1.0f),
// worldTransform * math::float4(max.x, max.y, max.z, 1.0f)};
// // Project corners to clip space and convert to viewport space
// float minX = std::numeric_limits<float>::max();
// float minY = std::numeric_limits<float>::max();
// float maxX = std::numeric_limits<float>::lowest();
// float maxY = std::numeric_limits<float>::lowest();
// for (const auto &corner : corners)
// {
// math::float4 clipSpace = vpMatrix * corner;
// // Check if the point is behind the camera
// if (clipSpace.w <= 0)
// {
// continue; // Skip this point
// }
// // Perform perspective division
// math::float3 ndcSpace = clipSpace.xyz / clipSpace.w;
// // Clamp NDC coordinates to [-1, 1] range
// ndcSpace.x = std::max(-1.0f, std::min(1.0f, ndcSpace.x));
// ndcSpace.y = std::max(-1.0f, std::min(1.0f, ndcSpace.y));
// // Convert NDC to viewport space
// float viewportX = (ndcSpace.x * 0.5f + 0.5f) * viewport.width;
// float viewportY = (1.0f - (ndcSpace.y * 0.5f + 0.5f)) * viewport.height; // Flip Y-axis
// minX = std::min(minX, viewportX);
// minY = std::min(minY, viewportY);
// maxX = std::max(maxX, viewportX);
// maxY = std::max(maxY, viewportY);
// }
// return Aabb2{minX, minY, maxX, maxY};
// }
// static filament::gltfio::MaterialKey getDefaultUnlitMaterialConfig(int numUvs)
// {
// filament::gltfio::MaterialKey config;
// memset(&config, 0, sizeof(config));
// config.unlit = false;
// config.doubleSided = false;
// config.useSpecularGlossiness = false;
// config.alphaMode = filament::gltfio::AlphaMode::OPAQUE;
// config.hasBaseColorTexture = numUvs > 0;
// config.baseColorUV = 0;
// config.hasVertexColors = false;
// return config;
// }
// SceneAsset *SceneManager::createGeometry(
// float *vertices,
// uint32_t numVertices,
// float *normals,
// uint32_t numNormals,
// float *uvs,
// uint32_t numUvs,
// uint16_t *indices,
// uint32_t numIndices,
// filament::RenderableManager::PrimitiveType primitiveType,
// filament::MaterialInstance **materialInstances,
// size_t materialInstanceCount,
// bool keepData)
// {
// utils::Entity entity;
// auto builder = GeometrySceneAssetBuilder(_engine)
// .vertices(vertices, numVertices)
// .indices(indices, numIndices)
// .primitiveType(primitiveType);
// if (normals)
// {
// builder.normals(normals, numNormals);
// }
// if (uvs)
// {
// builder.uvs(uvs, numUvs);
// }
// builder.materials(materialInstances, materialInstanceCount);
// auto sceneAsset = builder.build();
// if (!sceneAsset)
// {
// Log("Failed to create geometry");
// return std::nullptr_t();
// }
// sceneAsset->addAllEntities(_scene);
// auto *raw = sceneAsset.get();
// _sceneAssets.push_back(std::move(sceneAsset));
// return raw;
// }
// void SceneManager::destroy(filament::MaterialInstance *instance)
// {
// auto it = std::find(_materialInstances.begin(), _materialInstances.end(), instance);
// if (it != _materialInstances.end())
// {
// _materialInstances.erase(it);
// }
// _engine->destroy(instance);
// }
// MaterialInstance *SceneManager::createUnlitFixedSizeMaterialInstance()
// {
// auto instance = _unlitFixedSizeMaterial->createInstance();
// instance->setParameter("scale", 1.0f);
// return instance;
// }
// MaterialInstance *SceneManager::createUnlitMaterialInstance()
// {
// UvMap uvmap;
// auto instance = _unlitMaterialProvider->createMaterialInstance(nullptr, &uvmap);
// instance->setParameter("baseColorFactor", filament::math::float4{1.0f, 1.0f, 1.0f, 1.0f});
// instance->setParameter("baseColorIndex", -1);
// instance->setParameter("uvScale", filament::math::float2{1.0f, 1.0f});
// _materialInstances.push_back(instance);
// return instance;
// }
// Camera *SceneManager::createCamera()
// {
// auto entity = EntityManager::get().create();
// auto camera = _engine->createCamera(entity);
// _cameras.push_back(camera);
// return camera;
// }
// void SceneManager::destroyCamera(Camera *camera)
// {
// auto entity = camera->getEntity();
// _engine->destroyCameraComponent(entity);
// _engine->getEntityManager().destroy(entity);
// auto it = std::find(_cameras.begin(), _cameras.end(), camera);
// if (it != _cameras.end())
// {
// _cameras.erase(it);
// }
// }
// size_t SceneManager::getCameraCount()
// {
// return _cameras.size() + 1;
// }
// Camera *SceneManager::getCameraAt(size_t index)
// {
// if (index == 0)
// {
// return _mainCamera;
// }
// if (index - 1 > _cameras.size() - 1)
// {
// return nullptr;
// }
// return _cameras[index - 1];
// }
// } // namespace thermion

View File

@@ -0,0 +1,169 @@
@Timeout(const Duration(seconds: 600))
import 'dart:async';
import 'dart:io';
import 'dart:math';
import 'package:test/test.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/callbacks.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/ffi_asset.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/ffi_camera.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/ffi_filament_app.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/ffi_material.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/ffi_render_target.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/ffi_scene.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/ffi_swapchain.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/ffi_view.dart';
import 'package:thermion_dart/thermion_dart.dart';
import 'helpers.dart';
Future<Texture> createTextureFromImage(TestHelper testHelper) async {
final image = await FilamentApp.instance!.decodeImage(
File("${testHelper.testDir}/assets/cube_texture2_512x512.png")
.readAsBytesSync());
final texture = await FilamentApp.instance!
.createTexture(await image.getWidth(), await image.getHeight());
await texture.setLinearImage(
image, PixelDataFormat.RGBA, PixelDataType.FLOAT);
return texture;
}
Future<ThermionAsset> _makeCube(
TestHelper testHelper, ThermionViewer viewer) async {
final cube = await testHelper.createCube(viewer);
var ubershader = await cube.getMaterialInstanceAt();
await ubershader.setDepthCullingEnabled(true);
await ubershader.setDepthWriteEnabled(true);
await ubershader.setCullingMode(CullingMode.BACK);
await ubershader.setParameterInt("baseColorIndex", 0);
return cube;
}
void main() async {
final testHelper = TestHelper("projection");
await testHelper.setup();
group('projection', () {
test('project texture & UV unwrap', () async {
await testHelper.withViewer((viewer) async {
final camera = await viewer.getActiveCamera();
await viewer.view.setFrustumCullingEnabled(false);
await camera.setLensProjection(near: 0.75, far: 100);
final dist = 5.0;
await camera.lookAt(
Vector3(
-0.5,
dist,
dist,
),
);
final cube = await _makeCube(testHelper, viewer);
final ubershader = await cube.getMaterialInstanceAt();
final originalTexture = await createTextureFromImage(testHelper);
final sampler =
await FilamentApp.instance!.createTextureSampler();
await ubershader.setParameterTexture("baseColorMap", originalTexture,
sampler);
final depthWriteView = await testHelper.createView(testHelper.swapChain,
textureFormat: TextureFormat.R32F);
final captureView = await testHelper.createView(testHelper.swapChain);
await viewer.view.setRenderOrder(0);
await depthWriteView.setRenderOrder(1);
await captureView.setRenderOrder(2);
for (var view in [captureView, depthWriteView]) {
await view.setCamera(camera);
await (view as FFIView)
.setScene(await viewer.view.getScene() as FFIScene);
}
var depthWriteMat = await FilamentApp.instance!.createMaterial(
File(
"/Users/nickfisher/Documents/thermion/materials/linear_depth.filamat",
).readAsBytesSync(),
);
var depthWriteMi = await depthWriteMat.createInstance();
var captureMat = await FilamentApp.instance!.createMaterial(
File(
"/Users/nickfisher/Documents/thermion/materials/capture_uv.filamat",
).readAsBytesSync(),
);
var captureMi = await captureMat.createInstance();
final color =
await (await viewer.view.getRenderTarget())!.getColorTexture();
final depth =
await (await depthWriteView.getRenderTarget())!.getColorTexture();
await captureMi.setParameterBool("flipUVs", true);
await captureMi.setParameterTexture(
"color", color, await FilamentApp.instance!.createTextureSampler());
await captureMi.setParameterTexture(
"depth", depth, await FilamentApp.instance!.createTextureSampler());
await captureMi.setParameterBool("useDepth", true);
await FilamentApp.instance!.setClearOptions(0, 0, 0, 1,
clearStencil: 0, discard: false, clear: true);
final divisions = 8;
final projectedImage =
await FilamentApp.instance!.createImage(512, 512, 4);
final projectedTexture = await FilamentApp.instance!.createTexture(
512,
512,
textureFormat: TextureFormat.RGBA32F,
);
for (int i = 0; i < divisions; i++) {
await camera.lookAt(
Vector3(
sin(i / divisions * pi) * dist,
dist,
cos(i / divisions * pi) * dist,
),
);
var pixelBuffers = await testHelper.capture(null, "capture_uv_$i",
beforeRender: (view) async {
if (view == viewer.view) {
await ubershader.setParameterTexture("baseColorMap", originalTexture,
sampler);
await cube.setMaterialInstanceAt(ubershader);
} else if (view == depthWriteView) {
await cube.setMaterialInstanceAt(depthWriteMi);
} else if (view == captureView) {
await cube.setMaterialInstanceAt(captureMi);
}
});
await cube.setMaterialInstanceAt(ubershader);
final data = await projectedImage.getData();
data.setRange(0, data.length,
pixelBuffers[captureView]!.buffer.asFloat32List());
await projectedTexture.setLinearImage(
projectedImage,
PixelDataFormat.RGBA,
PixelDataType.FLOAT,
);
await ubershader.setParameterTexture(
"baseColorMap",
projectedTexture,
sampler,
);
await testHelper.capture(viewer.view, "capture_uv_retextured_$i");
await ubershader.setParameterTexture("baseColorMap", originalTexture,
sampler);
}
}, createRenderTarget: true);
});
});
}