create material for texture baking

textures can now be created manually and are no longer tracked by SceneManager (and therefore require manual tracking/disposal)
This commit is contained in:
Nick Fisher
2025-03-04 14:52:48 +08:00
parent 5dca42c3c1
commit fc7f5d7b93
27 changed files with 528 additions and 571 deletions

View File

@@ -252,58 +252,117 @@ static Geometry cube({bool normals = false, bool uvs = false}) {
}
static Geometry conic({double radius = 1.0, double length = 1.0, bool normals = true, bool uvs = true}) {
int segments = 32;
List<double> verticesList = [];
List<double> normalsList = [];
List<double> uvsList = [];
List<int> indices = [];
int segments = 32;
List<double> verticesList = [];
List<double> normalsList = [];
List<double> uvsList = [];
List<int> indices = [];
// Create vertices, normals, and UVs
for (int i = 0; i <= segments; i++) {
double theta = i * 2 * pi / segments;
double x = radius * cos(theta);
double z = radius * sin(theta);
int vertexOffset = 0;
// Base circle
verticesList.addAll([x, 0, z]);
// Create side vertices (base circle + apex)
for (int i = 0; i <= segments; i++) {
double theta = i * 2 * pi / segments;
double x = radius * cos(theta);
double z = radius * sin(theta);
// Calculate normal for the side
double nx = x / sqrt(x * x + length * length);
double nz = z / sqrt(z * z + length * length);
double ny = radius / sqrt(radius * radius + length * length);
// Base circle vertex
verticesList.addAll([x, 0, z]);
if (normals) {
// Calculate normal for the side (perpendicular to the cone surface)
// The normal is perpendicular to the line from the edge point to the apex
double nx = x;
double nz = z;
double ny = radius;
// Normalize the normal vector
double normalLength = sqrt(nx * nx + ny * ny + nz * nz);
nx /= normalLength;
ny /= normalLength;
nz /= normalLength;
normalsList.addAll([nx, ny, nz]);
// UV coordinates
}
if (uvs) {
// UV coordinates for base edge
uvsList.addAll([i / segments, 0]);
}
// Apex
verticesList.addAll([0, length, 0]);
normalsList.addAll([0, 1, 0]); // Normal at apex points straight up
uvsList.addAll([0.5, 1]); // UV for apex
// Create indices
for (int i = 0; i < segments; i++) {
// Base face (fixed to counterclockwise)
indices.addAll([segments + 1, i + 1, i]);
// Side faces (already correct)
indices.addAll([i, segments, i + 1]);
}
// Add base face normals and UVs
for (int i = 0; i <= segments; i++) {
normalsList.addAll([0, -1, 0]); // Base face normal
double u = 0.5 + 0.5 * cos(i * 2 * pi / segments);
double v = 0.5 + 0.5 * sin(i * 2 * pi / segments);
uvsList.addAll([u, v]); // Base face UV
}
Float32List vertices = Float32List.fromList(verticesList);
Float32List? _normals = normals ? Float32List.fromList(normalsList) : null;
Float32List? _uvs = uvs ? Float32List.fromList(uvsList) : null;
return Geometry(vertices, indices, normals: _normals, uvs: _uvs);
}
// Add apex vertex
verticesList.addAll([0, length, 0]);
vertexOffset = segments + 1;
if (normals) {
// Apex normal points up
normalsList.addAll([0, 1, 0]);
}
if (uvs) {
// UV for apex
uvsList.addAll([0.5, 1]);
}
// Create side faces indices
for (int i = 0; i < segments; i++) {
// Create triangular faces from edge to apex
indices.addAll([i, (i + 1) % (segments + 1), vertexOffset]);
}
// Create base circle vertices (duplicate for proper normals and UVs)
int baseStartIndex = verticesList.length ~/ 3;
// Center vertex for base
verticesList.addAll([0, 0, 0]);
if (normals) {
// Base center normal points down
normalsList.addAll([0, -1, 0]);
}
if (uvs) {
// UV for base center
uvsList.addAll([0.5, 0.5]);
}
// Add base edge vertices
for (int i = 0; i <= segments; i++) {
double theta = i * 2 * pi / segments;
double x = radius * cos(theta);
double z = radius * sin(theta);
// Base circle vertex (duplicate for proper normal/UV)
verticesList.addAll([x, 0, z]);
if (normals) {
// Base normal points down
normalsList.addAll([0, -1, 0]);
}
if (uvs) {
// UV for base edge
double u = 0.5 + 0.5 * cos(theta);
double v = 0.5 + 0.5 * sin(theta);
uvsList.addAll([u, v]);
}
}
// Create base faces indices
for (int i = 0; i < segments; i++) {
// Fan triangulation from center to edge
indices.addAll([baseStartIndex, baseStartIndex + i + 1, baseStartIndex + i + 2]);
}
// Convert to Float32List
Float32List vertices = Float32List.fromList(verticesList);
Float32List? _normals = normals ? Float32List.fromList(normalsList) : null;
Float32List? _uvs = uvs ? Float32List.fromList(uvsList) : null;
return Geometry(vertices, indices, normals: _normals, uvs: _uvs);
}
static Geometry plane({double width = 1.0, double height = 1.0, bool normals = true, bool uvs = true}) {
Float32List vertices = Float32List.fromList([
-width / 2, 0, -height / 2,
@@ -615,4 +674,77 @@ static Geometry fromAabb3(Aabb3 aabb, {bool normals = true, bool uvs = true}) {
return Geometry(vertices, indices, normals: _normals, uvs: _uvs);
}
static Geometry halfPyramid({
double startX = 0.25,
double startY = 0.25,
double width = 1.0,
double height = 1.0,
double depth = 1.0,
bool normals = true,
bool uvs = true
}) {
// Define vertices for a half pyramid (triangular prism)
// Starting at (startX, startY, 0)
Float32List vertices = Float32List.fromList([
// Base rectangle (bottom face)
startX, startY, 0, // 0: front-left
startX + width, startY, 0, // 1: front-right
startX + width, startY + height, 0, // 2: back-right
startX, startY + height, 0, // 3: back-left
// Top ridge
startX, startY + height, depth, // 4: top ridge start
startX + width, startY + height, depth, // 5: top ridge end
]);
// Define normals if needed
Float32List? _normals = normals ? Float32List.fromList([
// Base rectangle
0, 0, -1, // Bottom face
0, 0, -1,
0, 0, -1,
0, 0, -1,
// Ridge normals (approximate)
0, 0.7071, 0.7071, // Angled toward ridge
0, 0.7071, 0.7071,
]) : null;
// Define UVs if needed
Float32List? _uvs = uvs ? Float32List.fromList([
// Base rectangle UVs
0, 0, // Bottom-left
1, 0, // Bottom-right
1, 1, // Top-right
0, 1, // Top-left
// Ridge UVs
0, 0.5,
1, 0.5,
]) : null;
// Define indices for triangular faces
List<int> indices = [
// Bottom face (rectangle)
0, 1, 2,
0, 2, 3,
// Front triangular face
0, 1, 5,
0, 5, 4,
// Left rectangular face
0, 4, 3,
// Right rectangular face
1, 2, 5,
// Back rectangular face
2, 3, 4,
2, 4, 5,
];
return Geometry(vertices, indices, normals: _normals, uvs: _uvs);
}
}

View File

@@ -0,0 +1,19 @@
import 'dart:ffi';
import 'package:thermion_dart/src/viewer/src/ffi/src/callbacks.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/ffi_texture.dart';
import 'package:thermion_dart/thermion_dart.dart';
class FFIRenderTarget extends RenderTarget {
final Pointer<TRenderTarget> renderTarget;
final Pointer<TViewer> viewer;
final Pointer<TEngine> engine;
FFIRenderTarget(this.renderTarget, this.viewer, this.engine);
@override
Future<Texture> getColorTexture() async {
final ptr = RenderTarget_getColorTexture(renderTarget);
return FFITexture(engine, ptr);
}
}

View File

@@ -23,9 +23,8 @@ class FFITexture extends Texture {
}
@override
Future dispose() {
// TODO: implement dispose
throw UnimplementedError();
Future dispose() async {
Engine_destroyTexture(_engine, pointer);
}
@override
@@ -130,4 +129,13 @@ class FFILinearImage extends LinearImage {
Future<int> getWidth() async {
return Image_getWidth(pointer);
}
@override
Future<Float32List> getData() async {
final height = await getHeight();
final width = await getWidth();
final channels = await getChannels();
final ptr = Image_getBytes(pointer);
return ptr.asTypedList(height * width * channels);
}
}

View File

@@ -1,4 +1,5 @@
import 'dart:ffi';
import 'package:thermion_dart/src/viewer/src/ffi/src/ffi_render_target.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/thermion_dart.g.dart';
import 'package:thermion_dart/src/viewer/src/shared_types/shared_types.dart';
import 'callbacks.dart';
@@ -8,12 +9,17 @@ import 'thermion_viewer_ffi.dart';
class FFIView extends View {
final Pointer<TView> view;
final Pointer<TViewer> viewer;
final Pointer<TEngine> engine;
FFIRenderTarget? renderTarget;
FFIView(this.view, this.viewer) {
FFIView(this.view, this.viewer, this.engine) {
final renderTargetPtr = View_getRenderTarget(view);
if (renderTargetPtr != nullptr) {
renderTarget = FFIRenderTarget(renderTargetPtr, viewer);
renderTarget = FFIRenderTarget(
renderTargetPtr,
viewer,
engine
);
}
}

View File

@@ -663,36 +663,6 @@ external void get_bounding_box_to_out(
ffi.Pointer<ffi.Float> maxY,
);
@ffi.Native<
ffi.Void Function(
ffi.Pointer<TViewer>,
EntityId,
ffi.Pointer<ffi.Uint8>,
ffi.Uint32,
ffi.Uint32,
ffi.Pointer<ffi.Uint8>,
ffi.Uint32,
ffi.Uint32)>(isLeaf: true)
external void unproject_texture(
ffi.Pointer<TViewer> viewer,
int entity,
ffi.Pointer<ffi.Uint8> input,
int inputWidth,
int inputHeight,
ffi.Pointer<ffi.Uint8> out,
int outWidth,
int outHeight,
);
@ffi.Native<
ffi.Pointer<ffi.Void> Function(ffi.Pointer<TSceneManager>,
ffi.Pointer<ffi.Uint8>, ffi.Size)>(isLeaf: true)
external ffi.Pointer<ffi.Void> create_texture(
ffi.Pointer<TSceneManager> sceneManager,
ffi.Pointer<ffi.Uint8> data,
int length,
);
@ffi.Native<
ffi.Void Function(
ffi.Pointer<TSceneManager>, ffi.Pointer<ffi.Void>)>(isLeaf: true)
@@ -701,17 +671,6 @@ external void destroy_texture(
ffi.Pointer<ffi.Void> texture,
);
@ffi.Native<
ffi.Void Function(ffi.Pointer<TSceneManager>, EntityId,
ffi.Pointer<ffi.Void>, ffi.Pointer<ffi.Char>, ffi.Int)>(isLeaf: true)
external void apply_texture_to_material(
ffi.Pointer<TSceneManager> sceneManager,
int entity,
ffi.Pointer<ffi.Void> texture,
ffi.Pointer<ffi.Char> parameterName,
int materialIndex,
);
@ffi.Native<TViewport Function(ffi.Pointer<TView>)>(isLeaf: true)
external TViewport View_getViewport(
ffi.Pointer<TView> view,
@@ -920,6 +879,15 @@ bool Texture_loadImage(
pixelDataType.value,
);
@ffi.Native<
ffi.Pointer<TLinearImage> Function(
ffi.Uint32, ffi.Uint32, ffi.Uint32)>(isLeaf: true)
external ffi.Pointer<TLinearImage> Image_createEmpty(
int width,
int height,
int channel,
);
@ffi.Native<
ffi.Pointer<TLinearImage> Function(
ffi.Pointer<ffi.Uint8>, ffi.Size, ffi.Pointer<ffi.Char>)>(isLeaf: true)
@@ -929,6 +897,12 @@ external ffi.Pointer<TLinearImage> Image_decode(
ffi.Pointer<ffi.Char> name,
);
@ffi.Native<ffi.Pointer<ffi.Float> Function(ffi.Pointer<TLinearImage>)>(
isLeaf: true)
external ffi.Pointer<ffi.Float> Image_getBytes(
ffi.Pointer<TLinearImage> tLinearImage,
);
@ffi.Native<ffi.Void Function(ffi.Pointer<TLinearImage>)>(isLeaf: true)
external void Image_destroy(
ffi.Pointer<TLinearImage> tLinearImage,
@@ -949,6 +923,12 @@ external int Image_getChannels(
ffi.Pointer<TLinearImage> tLinearImage,
);
@ffi.Native<ffi.Pointer<TTexture> Function(ffi.Pointer<TRenderTarget>)>(
isLeaf: true)
external ffi.Pointer<TTexture> RenderTarget_getColorTexture(
ffi.Pointer<TRenderTarget> tRenderTarget,
);
@ffi.Native<ffi.Pointer<TTextureSampler> Function()>(isLeaf: true)
external ffi.Pointer<TTextureSampler> TextureSampler_create();
@@ -2139,29 +2119,6 @@ external void reset_to_rest_pose_render_thread(
ffi.Pointer<ffi.NativeFunction<ffi.Void Function()>> callback,
);
@ffi.Native<
ffi.Void Function(
ffi.Pointer<TViewer>,
EntityId,
ffi.Pointer<ffi.Uint8>,
ffi.Uint32,
ffi.Uint32,
ffi.Pointer<ffi.Uint8>,
ffi.Uint32,
ffi.Uint32,
ffi.Pointer<ffi.NativeFunction<ffi.Void Function()>>)>(isLeaf: true)
external void unproject_texture_render_thread(
ffi.Pointer<TViewer> viewer,
int entity,
ffi.Pointer<ffi.Uint8> input,
int inputWidth,
int inputHeight,
ffi.Pointer<ffi.Uint8> out,
int outWidth,
int outHeight,
ffi.Pointer<ffi.NativeFunction<ffi.Void Function()>> callback,
);
@ffi.Native<
ffi.Pointer<TGizmo> Function(
ffi.Pointer<TSceneManager>,
@@ -2633,6 +2590,13 @@ ffi.Pointer<TTexture> Engine_buildTexture(
format.value,
);
@ffi.Native<ffi.Void Function(ffi.Pointer<TEngine>, ffi.Pointer<TTexture>)>(
isLeaf: true)
external void Engine_destroyTexture(
ffi.Pointer<TEngine> tEngine,
ffi.Pointer<TTexture> tTexture,
);
@ffi.Native<
ffi.Pointer<TMaterial> Function(
ffi.Pointer<TEngine>, ffi.Pointer<ffi.Uint8>, ffi.Size)>(isLeaf: true)

View File

@@ -6,6 +6,7 @@ import 'package:animation_tools_dart/animation_tools_dart.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/ffi_asset.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/ffi_gizmo.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/ffi_material.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/ffi_render_target.dart';
import 'package:thermion_dart/src/viewer/src/ffi/src/ffi_texture.dart';
import 'package:vector_math/vector_math_64.dart';
import 'package:vector_math/vector_math_64.dart' as v64;
@@ -83,7 +84,7 @@ class ThermionViewerFFI extends ThermionViewer {
_viewer!, textureHandle, width, height, cb);
});
return FFIRenderTarget(renderTarget, _viewer!);
return FFIRenderTarget(renderTarget, _viewer!, _engine!);
}
///
@@ -110,14 +111,14 @@ class ThermionViewerFFI extends ThermionViewer {
if (view == nullptr) {
throw Exception("Failed to create view");
}
return FFIView(view, _viewer!);
return FFIView(view, _viewer!,_engine!);
}
///
///
///
Future updateViewportAndCameraProjection(double width, double height) async {
var mainView = FFIView(Viewer_getViewAt(_viewer!, 0), _viewer!);
var mainView = FFIView(Viewer_getViewAt(_viewer!, 0), _viewer!, _engine!);
mainView.updateViewport(width.toInt(), height.toInt());
final cameraCount = await getCameraCount();
@@ -1817,28 +1818,6 @@ class ThermionViewerFFI extends ThermionViewer {
}
}
///
///
///
Future<Uint8List> project(ThermionEntity entity, Uint8List input,
int inputWidth, int inputHeight, int outWidth, int outHeight) async {
final outPtr = Uint8List(outWidth * outHeight * 4);
await withVoidCallback((callback) {
unproject_texture_render_thread(
_viewer!,
entity,
input.address,
inputWidth,
inputHeight,
outPtr.address,
outWidth,
outHeight,
callback);
});
return outPtr.buffer.asUint8List();
}
///
///
///
@@ -1897,6 +1876,9 @@ class ThermionViewerFFI extends ThermionViewer {
return FFITextureSampler(samplerPtr);
}
///
///
///
Future<LinearImage> decodeImage(Uint8List data) async {
final name = "image";
var ptr = Image_decode(
@@ -1910,12 +1892,12 @@ class ThermionViewerFFI extends ThermionViewer {
return FFILinearImage(ptr);
}
Future applyTexture(FFITexture texture, ThermionEntity entity,
{int materialIndex = 0, String parameterName = "baseColorMap"}) async {
using(parameterName.toNativeUtf8(), (namePtr) async {
apply_texture_to_material(_sceneManager!, entity,
texture.pointer.cast<Void>(), namePtr.cast<Char>(), materialIndex);
});
///
/// Creates an (empty) imge with the given dimensions.
///
Future<LinearImage> createImage(int width, int height, int channels) async {
final ptr = Image_createEmpty(width, height, channels);
return FFILinearImage(ptr);
}
///
@@ -2173,7 +2155,7 @@ class ThermionViewerFFI extends ThermionViewer {
if (view == nullptr) {
throw Exception("Failed to get view");
}
return FFIView(view, _viewer!);
return FFIView(view, _viewer!, _engine!);
}
@override
@@ -2205,12 +2187,7 @@ class ThermionViewerFFI extends ThermionViewer {
}
}
class FFIRenderTarget extends RenderTarget {
final Pointer<TRenderTarget> renderTarget;
final Pointer<TViewer> viewer;
FFIRenderTarget(this.renderTarget, this.viewer);
}
class FFISwapChain extends SwapChain {
final Pointer<TSwapChain> swapChain;

View File

@@ -1,3 +1,5 @@
import 'package:thermion_dart/thermion_dart.dart';
abstract class RenderTarget {
Future<Texture> getColorTexture();
}

View File

@@ -420,6 +420,7 @@ typedef ThermionTexture = Texture;
abstract class LinearImage {
Future destroy();
Future<Float32List> getData();
Future<int> getWidth();
Future<int> getHeight();
Future<int> getChannels();

View File

@@ -777,18 +777,21 @@ abstract class ThermionViewer {
///
Future<Texture> createTexture(int width, int height,
{TextureSamplerType textureSamplerType = TextureSamplerType.SAMPLER_2D,
TextureFormat textureFormat = TextureFormat.RGBA16F});
TextureFormat textureFormat = TextureFormat.RGBA32F});
Future<TextureSampler> createTextureSampler({
TextureMinFilter minFilter = TextureMinFilter.LINEAR,
TextureMagFilter magFilter = TextureMagFilter.LINEAR,
TextureWrapMode wrapS = TextureWrapMode.CLAMP_TO_EDGE,
TextureWrapMode wrapT = TextureWrapMode.CLAMP_TO_EDGE,
TextureWrapMode wrapR = TextureWrapMode.CLAMP_TO_EDGE,
double anisotropy = 0.0,
TextureCompareMode compareMode = TextureCompareMode.NONE,
TextureCompareFunc compareFunc = TextureCompareFunc.LESS_EQUAL
});
///
///
///
Future<TextureSampler> createTextureSampler({
TextureMinFilter minFilter = TextureMinFilter.LINEAR,
TextureMagFilter magFilter = TextureMagFilter.LINEAR,
TextureWrapMode wrapS = TextureWrapMode.CLAMP_TO_EDGE,
TextureWrapMode wrapT = TextureWrapMode.CLAMP_TO_EDGE,
TextureWrapMode wrapR = TextureWrapMode.CLAMP_TO_EDGE,
double anisotropy = 0.0,
TextureCompareMode compareMode = TextureCompareMode.NONE,
TextureCompareFunc compareFunc = TextureCompareFunc.LESS_EQUAL
});
///
/// Decodes the specified image data.
@@ -796,10 +799,9 @@ Future<TextureSampler> createTextureSampler({
Future<LinearImage> decodeImage(Uint8List data);
///
/// Creates an (empty) imge with the given dimensions.
///
///
Future applyTexture(covariant ThermionTexture texture, ThermionEntity entity,
{int materialIndex = 0, String parameterName = "baseColorMap"});
Future<LinearImage> createImage(int width, int height, int channels);
///
///

View File

@@ -659,19 +659,7 @@ class ThermionViewerStub extends ThermionViewer {
// TODO: implement addDirectLight
throw UnimplementedError();
}
@override
Future applyTexture(covariant ThermionTexture texture, ThermionEntity entity, {int materialIndex = 0, String parameterName = "baseColorMap"}) {
// TODO: implement applyTexture
throw UnimplementedError();
}
@override
Future<ThermionTexture> createTexture(Uint8List data) {
// TODO: implement createTexture
throw UnimplementedError();
}
@override
Future<MaterialInstance> createUbershaderMaterialInstance({bool doubleSided = false, bool unlit = false, bool hasVertexColors = false, bool hasBaseColorTexture = false, bool hasNormalTexture = false, bool hasOcclusionTexture = false, bool hasEmissiveTexture = false, bool useSpecularGlossiness = false, AlphaMode alphaMode = AlphaMode.OPAQUE, bool enableDiagnostics = false, bool hasMetallicRoughnessTexture = false, int metallicRoughnessUV = 0, int baseColorUV = 0, bool hasClearCoatTexture = false, int clearCoatUV = 0, bool hasClearCoatRoughnessTexture = false, int clearCoatRoughnessUV = 0, bool hasClearCoatNormalTexture = false, int clearCoatNormalUV = 0, bool hasClearCoat = false, bool hasTransmission = false, bool hasTextureTransforms = false, int emissiveUV = 0, int aoUV = 0, int normalUV = 0, bool hasTransmissionTexture = false, int transmissionUV = 0, bool hasSheenColorTexture = false, int sheenColorUV = 0, bool hasSheenRoughnessTexture = false, int sheenRoughnessUV = 0, bool hasVolumeThicknessTexture = false, int volumeThicknessUV = 0, bool hasSheen = false, bool hasIOR = false, bool hasVolume = false}) {
// TODO: implement createUbershaderMaterialInstance

View File

@@ -117,8 +117,6 @@ namespace thermion
return nullptr;
}
void unprojectTexture(EntityId entity, uint8_t* input, uint32_t inputWidth, uint32_t inputHeight, uint8_t* out, uint32_t outWidth, uint32_t outHeight);
private:
const ResourceLoaderWrapperImpl *const _resourceLoaderWrapper;
Scene *_scene = nullptr;

View File

@@ -19,12 +19,12 @@
namespace thermion {
class UnprojectTexture {
class TextureProjection {
public:
UnprojectTexture(const CustomGeometry * geometry, Camera& camera, Engine* engine)
TextureProjection(const CustomGeometry * geometry, Camera& camera, Engine* engine)
: _geometry(geometry), _camera(camera), _engine(engine) {}
void unproject(utils::Entity entity, const uint8_t* inputTexture, uint8_t* outputTexture, uint32_t inputWidth, uint32_t inputHeight,
void project(utils::Entity entity, const uint8_t* inputTexture, uint8_t* outputTexture, uint32_t inputWidth, uint32_t inputHeight,
uint32_t outputWidth, uint32_t outputHeight);
private:
@@ -32,7 +32,6 @@ private:
const Camera& _camera;
Engine* _engine;
math::float3 doUnproject(const math::float2& screenPos, float depth, const math::mat4& invViewProj);
bool isInsideTriangle(const math::float2& p, const math::float2& a, const math::float2& b, const math::float2& c);
math::float3 barycentric(const math::float2& p, const math::float2& a, const math::float2& b, const math::float2& c);
};

View File

@@ -26,6 +26,7 @@ EMSCRIPTEN_KEEPALIVE TTexture *Engine_buildTexture(TEngine *engine,
uint8_t levels,
TTextureSamplerType sampler,
TTextureFormat format);
EMSCRIPTEN_KEEPALIVE void Engine_destroyTexture(TEngine *tEngine, TTexture *tTexture);
EMSCRIPTEN_KEEPALIVE TMaterial *Engine_buildMaterial(TEngine *tEngine, const uint8_t* materialData, size_t length);

View File

@@ -192,11 +192,14 @@ enum TPixelDataType {
};
EMSCRIPTEN_KEEPALIVE bool Texture_loadImage(TEngine *tEngine, TTexture *tTexture, TLinearImage *tImage, TPixelDataFormat bufferFormat, TPixelDataType pixelDataType);
EMSCRIPTEN_KEEPALIVE TLinearImage* Image_decode(uint8_t* data, size_t length, const char* name = "image");
EMSCRIPTEN_KEEPALIVE void Image_destroy(TLinearImage* tLinearImage);
EMSCRIPTEN_KEEPALIVE uint32_t Image_getWidth(TLinearImage* tLinearImage);
EMSCRIPTEN_KEEPALIVE uint32_t Image_getHeight(TLinearImage* tLinearImage);
EMSCRIPTEN_KEEPALIVE uint32_t Image_getChannels(TLinearImage* tLinearImage);
EMSCRIPTEN_KEEPALIVE TLinearImage *Image_createEmpty(uint32_t width,uint32_t height,uint32_t channel);
EMSCRIPTEN_KEEPALIVE TLinearImage *Image_decode(uint8_t* data, size_t length, const char* name = "image");
EMSCRIPTEN_KEEPALIVE float *Image_getBytes(TLinearImage *tLinearImage);
EMSCRIPTEN_KEEPALIVE void Image_destroy(TLinearImage *tLinearImage);
EMSCRIPTEN_KEEPALIVE uint32_t Image_getWidth(TLinearImage *tLinearImage);
EMSCRIPTEN_KEEPALIVE uint32_t Image_getHeight(TLinearImage *tLinearImage);
EMSCRIPTEN_KEEPALIVE uint32_t Image_getChannels(TLinearImage *tLinearImage);
EMSCRIPTEN_KEEPALIVE TTexture *RenderTarget_getColorTexture(TRenderTarget *tRenderTarget);
// Texture Sampler related enums
enum TSamplerWrapMode {

View File

@@ -75,10 +75,7 @@ extern "C"
EMSCRIPTEN_KEEPALIVE Aabb2 get_bounding_box(TSceneManager *sceneManager, TView *view, EntityId entity);
EMSCRIPTEN_KEEPALIVE void get_bounding_box_to_out(TSceneManager *sceneManager, TView *view, EntityId entity, float *minX, float *minY, float *maxX, float *maxY);
EMSCRIPTEN_KEEPALIVE void unproject_texture(TViewer* viewer, EntityId entity,uint8_t* input, uint32_t inputWidth, uint32_t inputHeight, uint8_t *out, uint32_t outWidth, uint32_t outHeight);
EMSCRIPTEN_KEEPALIVE void *const create_texture(TSceneManager *sceneManager, uint8_t *data, size_t length);
EMSCRIPTEN_KEEPALIVE void destroy_texture(TSceneManager *sceneManager, void *const texture);
EMSCRIPTEN_KEEPALIVE void apply_texture_to_material(TSceneManager *sceneManager, EntityId entity, void *const texture, const char *parameterName, int materialIndex);
#ifdef __cplusplus

View File

@@ -156,7 +156,6 @@ namespace thermion
EMSCRIPTEN_KEEPALIVE void set_post_processing_render_thread(TViewer *viewer, bool enabled);
EMSCRIPTEN_KEEPALIVE void reset_to_rest_pose_render_thread(TSceneManager *sceneManager, EntityId entityId, void (*callback)());
EMSCRIPTEN_KEEPALIVE void unproject_texture_render_thread(TViewer *viewer, EntityId entity, uint8_t *input, uint32_t inputWidth, uint32_t inputHeight, uint8_t *out, uint32_t outWidth, uint32_t outHeight, void (*callback)());
#ifdef __cplusplus
}

View File

@@ -177,26 +177,6 @@ namespace thermion
/// @brief
void update();
/// @brief
/// @param data
/// @param length
/// @param name
/// @return
Texture *createTexture(const uint8_t *data, size_t length, const char *name);
/// @brief
/// @param entityId
/// @param texture
/// @param slotName
/// @param materialIndex
/// @return
bool applyTexture(EntityId entityId, Texture *texture, const char *slotName, int materialIndex);
/// @brief
/// @param texture
void destroyTexture(Texture *texture);
/// @brief
/// @param entity
/// @return
@@ -367,7 +347,6 @@ namespace thermion
utils::NameComponentManager *_ncm;
tsl::robin_map<EntityId, math::mat4> _transformUpdates;
std::set<Texture *> _textures;
std::vector<Camera *> _cameras;
std::vector<utils::Entity> _lights;
std::vector<std::unique_ptr<SceneAsset>> _sceneAssets;

View File

@@ -96,7 +96,7 @@
#include "StreamBufferAdapter.hpp"
#include "material/image.h"
#include "TimeIt.hpp"
#include "UnprojectTexture.hpp"
#include "TextureProjection.hpp"
namespace thermion
{
@@ -1058,20 +1058,5 @@ namespace thermion
return _engine->getCameraComponent(Entity::import(entity));
}
void FilamentViewer::unprojectTexture(EntityId entityId, uint8_t *input, uint32_t inputWidth, uint32_t inputHeight, uint8_t *out, uint32_t outWidth, uint32_t outHeight)
{
// const auto *geometry = _sceneManager->getGeometry(entityId);
// if (!geometry->uvs)
// {
// Log("No UVS");
// return;
// }
// UnprojectTexture unproject(geometry, _view->getCamera(), _engine);
// TODO - check that input dimensions match viewport?
// unproject.unproject(utils::Entity::import(entityId), input, out, inputWidth, inputHeight, outWidth, outHeight);
}
} // namespace thermion

View File

@@ -0,0 +1,209 @@
// #include <filament/Engine.h>
// #include <filament/Camera.h>
// #include <filament/Texture.h>
// #include <filament/VertexBuffer.h>
// #include <filament/IndexBuffer.h>
// #include <filament/RenderableManager.h>
// #include <filament/TransformManager.h>
// #include <math/mat4.h>
// #include <math/vec2.h>
// #include <math/vec3.h>
// #include <math/vec4.h>
// #include <utils/EntityManager.h>
// #include <backend/PixelBufferDescriptor.h>
// #include "Log.hpp"
// #include <vector>
// #include <algorithm>
// #include <iostream>
// #include "scene/CustomGeometry.hpp"
// #include "TextureProjection.hpp"
// namespace thermion
// {
// bool TextureProjection::isInsideTriangle(const math::float2 &p, const math::float2 &a, const math::float2 &b, const math::float2 &c)
// {
// float d1 = (p.x - b.x) * (a.y - b.y) - (a.x - b.x) * (p.y - b.y);
// float d2 = (p.x - c.x) * (b.y - c.y) - (b.x - c.x) * (p.y - c.y);
// float d3 = (p.x - a.x) * (c.y - a.y) - (c.x - a.x) * (p.y - a.y);
// return (d1 >= 0 && d2 >= 0 && d3 >= 0) || (d1 <= 0 && d2 <= 0 && d3 <= 0);
// }
// math::float3 TextureProjection::barycentric(const math::float2 &p, const math::float2 &a, const math::float2 &b, const math::float2 &c)
// {
// math::float2 v0 = b - a;
// math::float2 v1 = c - a;
// math::float2 v2 = p - a;
// float d00 = dot(v0, v0);
// float d01 = dot(v0, v1);
// float d11 = dot(v1, v1);
// float d20 = dot(v2, v0);
// float d21 = dot(v2, v1);
// float denom = d00 * d11 - d01 * d01;
// float v = (d11 * d20 - d01 * d21) / denom;
// float w = (d00 * d21 - d01 * d20) / denom;
// float u = 1.0f - v - w;
// return math::float3(u, v, w);
// }
// void TextureProjection::project(utils::Entity entity, const uint8_t *inputTexture, uint8_t *outputTexture,
// uint32_t inputWidth, uint32_t inputHeight,
// uint32_t outputWidth, uint32_t outputHeight)
// {
// // auto &rm = _engine->getRenderableManager();
// // auto &tm = _engine->getTransformManager();
// // math::mat4 invViewProj = Camera::inverseProjection(_camera.getProjectionMatrix()) * _camera.getModelMatrix();
// // auto ti = tm.getInstance(entity);
// // math::mat4f worldTransform = tm.getWorldTransform(ti);
// // auto inverseWorldTransform = inverse(worldTransform);
// // const float *vertices = _geometry->vertices;
// // const float *uvs = _geometry->uvs;
// // const uint16_t *indices = _geometry->indices;
// // uint32_t numIndices = _geometry->numIndices;
// // // Create a depth buffer
// // std::vector<float> depthBuffer(inputWidth * inputHeight, std::numeric_limits<float>::infinity());
// // // Create a buffer to store the triangle index for each pixel
// // std::vector<int> triangleIndexBuffer(inputWidth * inputHeight, -1);
// // auto max = 0.0f;
// // auto min = 99.0f;
// // // Depth pre-pass
// // for (size_t i = 0; i < numIndices; i += 3)
// // {
// // math::float3 v0(vertices[indices[i] * 3], vertices[indices[i] * 3 + 1], vertices[indices[i] * 3 + 2]);
// // math::float3 v1(vertices[indices[i + 1] * 3], vertices[indices[i + 1] * 3 + 1], vertices[indices[i + 1] * 3 + 2]);
// // math::float3 v2(vertices[indices[i + 2] * 3], vertices[indices[i + 2] * 3 + 1], vertices[indices[i + 2] * 3 + 2]);
// // math::float2 uv0(uvs[(indices[i] * 2)], uvs[(indices[i] * 2) + 1]);
// // math::float2 uv1(uvs[(indices[i + 1] * 2)], uvs[(indices[i + 1] * 2) + 1]);
// // math::float2 uv2(uvs[(indices[i + 2] * 2)], uvs[(indices[i + 2] * 2) + 1]);
// // // Transform vertices to world space
// // v0 = (worldTransform * math::float4(v0, 1.0f)).xyz;
// // v1 = (worldTransform * math::float4(v1, 1.0f)).xyz;
// // v2 = (worldTransform * math::float4(v2, 1.0f)).xyz;
// // // Project vertices to screen space
// // math::float4 clipPos0 = _camera.getProjectionMatrix() * _camera.getViewMatrix() * math::float4(v0, 1.0f);
// // math::float4 clipPos1 = _camera.getProjectionMatrix() * _camera.getViewMatrix() * math::float4(v1, 1.0f);
// // math::float4 clipPos2 = _camera.getProjectionMatrix() * _camera.getViewMatrix() * math::float4(v2, 1.0f);
// // math::float3 ndcPos0 = clipPos0.xyz / clipPos0.w;
// // math::float3 ndcPos1 = clipPos1.xyz / clipPos1.w;
// // math::float3 ndcPos2 = clipPos2.xyz / clipPos2.w;
// // // Convert NDC to screen coordinates
// // math::float2 screenPos0((ndcPos0.x * 0.5f + 0.5f) * inputWidth, (1.0f - (ndcPos0.y * 0.5f + 0.5f)) * inputHeight);
// // math::float2 screenPos1((ndcPos1.x * 0.5f + 0.5f) * inputWidth, (1.0f - (ndcPos1.y * 0.5f + 0.5f)) * inputHeight);
// // math::float2 screenPos2((ndcPos2.x * 0.5f + 0.5f) * inputWidth, (1.0f - (ndcPos2.y * 0.5f + 0.5f)) * inputHeight);
// // // Compute bounding box of the triangle
// // int minX = std::max(0, static_cast<int>(std::min({screenPos0.x, screenPos1.x, screenPos2.x})));
// // int maxX = std::min(static_cast<int>(inputWidth) - 1, static_cast<int>(std::max({screenPos0.x, screenPos1.x, screenPos2.x})));
// // int minY = std::max(0, static_cast<int>(std::min({screenPos0.y, screenPos1.y, screenPos2.y})));
// // int maxY = std::min(static_cast<int>(inputHeight) - 1, static_cast<int>(std::max({screenPos0.y, screenPos1.y, screenPos2.y})));
// // // Iterate over the bounding box
// // for (int y = minY; y <= maxY; ++y)
// // {
// // for (int x = minX; x <= maxX; ++x)
// // {
// // math::float2 pixelPos(x + 0.5f, y + 0.5f);
// // if (isInsideTriangle(pixelPos, screenPos0, screenPos1, screenPos2))
// // {
// // math::float3 bary = barycentric(pixelPos, screenPos0, screenPos1, screenPos2);
// // // Interpolate depth
// // float depth = bary.x * ndcPos0.z + bary.y * ndcPos1.z + bary.z * ndcPos2.z;
// // // Depth test
// // if (depth < depthBuffer[y * inputWidth + x])
// // {
// // if (depth > max)
// // {
// // max = depth;
// // }
// // if (depth < min)
// // {
// // min = depth;
// // }
// // depthBuffer[y * inputWidth + x] = depth;
// // triangleIndexBuffer[y * inputWidth + x] = i / 3; // Store triangle index
// // }
// // }
// // }
// // }
// // }
// // for (uint32_t y = 0; y < outputHeight; ++y)
// // {
// // for (uint32_t x = 0; x < outputWidth; ++x)
// // {
// // math::float2 uv(static_cast<float>(x) / outputWidth, static_cast<float>(y) / outputHeight);
// // // Use the UV coordinates to get the corresponding 3D position on the renderable
// // math::float3 objectPos;
// // math::float2 interpolatedUV;
// // bool found = false;
// // // Iterate over triangles to find which one contains this UV coordinate
// // for (size_t i = 0; i < numIndices; i += 3)
// // {
// // math::float2 uv0 = *(math::float2 *)&uvs[indices[i] * 2];
// // math::float2 uv1 = *(math::float2 *)&uvs[indices[i + 1] * 2];
// // math::float2 uv2 = *(math::float2 *)&uvs[indices[i + 2] * 2];
// // if (isInsideTriangle(uv, uv0, uv1, uv2))
// // {
// // // Compute barycentric coordinates in UV space
// // math::float3 bary = barycentric(uv, uv0, uv1, uv2);
// // // Interpolate 3D position
// // math::float3 v0(vertices[indices[i] * 3], vertices[indices[i] * 3 + 1], vertices[indices[i] * 3 + 2]);
// // math::float3 v1(vertices[indices[i + 1] * 3], vertices[indices[i + 1] * 3 + 1], vertices[indices[i + 1] * 3 + 2]);
// // math::float3 v2(vertices[indices[i + 2] * 3], vertices[indices[i + 2] * 3 + 1], vertices[indices[i + 2] * 3 + 2]);
// // objectPos = v0 * bary.x + v1 * bary.y + v2 * bary.z;
// // interpolatedUV = uv;
// // // Find the screen coordinates on the input texture
// // math::float3 worldPos = (worldTransform * math::float4(objectPos, 1.0f)).xyz;
// // // Project the world position to screen space
// // math::float4 clipPos = _camera.getProjectionMatrix() * _camera.getViewMatrix() * math::float4(worldPos, 1.0f);
// // math::float3 ndcPos = clipPos.xyz / clipPos.w;
// // // Convert NDC to screen coordinates
// // uint32_t screenX = (ndcPos.x * 0.5f + 0.5f) * inputWidth;
// // uint32_t screenY = (1.0f - (ndcPos.y * 0.5f + 0.5f)) * inputHeight;
// // if (triangleIndexBuffer[(screenY * inputWidth) + screenX] == i / 3)
// // {
// // if (screenX >= 0 && screenX < inputWidth && screenY >= 0 && screenY < inputHeight)
// // {
// // int inputIndex = (screenY * inputWidth + screenX) * 4;
// // int outputIndex = (y * outputWidth + x) * 4;
// // std::copy_n(&inputTexture[inputIndex], 4, &outputTexture[outputIndex]);
// // }
// // }
// // }
// // }
// // }
// // }
// }
// } // namespace thermion

View File

@@ -1,211 +0,0 @@
#include <filament/Engine.h>
#include <filament/Camera.h>
#include <filament/Texture.h>
#include <filament/VertexBuffer.h>
#include <filament/IndexBuffer.h>
#include <filament/RenderableManager.h>
#include <filament/TransformManager.h>
#include <math/mat4.h>
#include <math/vec2.h>
#include <math/vec3.h>
#include <math/vec4.h>
#include <utils/EntityManager.h>
#include <backend/PixelBufferDescriptor.h>
#include "Log.hpp"
#include <vector>
#include <algorithm>
#include <iostream>
#include "scene/CustomGeometry.hpp"
#include "UnprojectTexture.hpp"
namespace thermion
{
bool UnprojectTexture::isInsideTriangle(const math::float2 &p, const math::float2 &a, const math::float2 &b, const math::float2 &c)
{
float d1 = (p.x - b.x) * (a.y - b.y) - (a.x - b.x) * (p.y - b.y);
float d2 = (p.x - c.x) * (b.y - c.y) - (b.x - c.x) * (p.y - c.y);
float d3 = (p.x - a.x) * (c.y - a.y) - (c.x - a.x) * (p.y - a.y);
return (d1 >= 0 && d2 >= 0 && d3 >= 0) || (d1 <= 0 && d2 <= 0 && d3 <= 0);
}
math::float3 UnprojectTexture::barycentric(const math::float2 &p, const math::float2 &a, const math::float2 &b, const math::float2 &c)
{
math::float2 v0 = b - a;
math::float2 v1 = c - a;
math::float2 v2 = p - a;
float d00 = dot(v0, v0);
float d01 = dot(v0, v1);
float d11 = dot(v1, v1);
float d20 = dot(v2, v0);
float d21 = dot(v2, v1);
float denom = d00 * d11 - d01 * d01;
float v = (d11 * d20 - d01 * d21) / denom;
float w = (d00 * d21 - d01 * d20) / denom;
float u = 1.0f - v - w;
return math::float3(u, v, w);
}
void UnprojectTexture::unproject(utils::Entity entity, const uint8_t *inputTexture, uint8_t *outputTexture,
uint32_t inputWidth, uint32_t inputHeight,
uint32_t outputWidth, uint32_t outputHeight)
{
// auto &rm = _engine->getRenderableManager();
// auto &tm = _engine->getTransformManager();
// math::mat4 invViewProj = Camera::inverseProjection(_camera.getProjectionMatrix()) * _camera.getModelMatrix();
// auto ti = tm.getInstance(entity);
// math::mat4f worldTransform = tm.getWorldTransform(ti);
// auto inverseWorldTransform = inverse(worldTransform);
// const float *vertices = _geometry->vertices;
// const float *uvs = _geometry->uvs;
// const uint16_t *indices = _geometry->indices;
// uint32_t numIndices = _geometry->numIndices;
// // Create a depth buffer
// std::vector<float> depthBuffer(inputWidth * inputHeight, std::numeric_limits<float>::infinity());
// // Create a buffer to store the triangle index for each pixel
// std::vector<int> triangleIndexBuffer(inputWidth * inputHeight, -1);
// auto max = 0.0f;
// auto min = 99.0f;
// // Depth pre-pass
// for (size_t i = 0; i < numIndices; i += 3)
// {
// math::float3 v0(vertices[indices[i] * 3], vertices[indices[i] * 3 + 1], vertices[indices[i] * 3 + 2]);
// math::float3 v1(vertices[indices[i + 1] * 3], vertices[indices[i + 1] * 3 + 1], vertices[indices[i + 1] * 3 + 2]);
// math::float3 v2(vertices[indices[i + 2] * 3], vertices[indices[i + 2] * 3 + 1], vertices[indices[i + 2] * 3 + 2]);
// math::float2 uv0(uvs[(indices[i] * 2)], uvs[(indices[i] * 2) + 1]);
// math::float2 uv1(uvs[(indices[i + 1] * 2)], uvs[(indices[i + 1] * 2) + 1]);
// math::float2 uv2(uvs[(indices[i + 2] * 2)], uvs[(indices[i + 2] * 2) + 1]);
// // Transform vertices to world space
// v0 = (worldTransform * math::float4(v0, 1.0f)).xyz;
// v1 = (worldTransform * math::float4(v1, 1.0f)).xyz;
// v2 = (worldTransform * math::float4(v2, 1.0f)).xyz;
// // Project vertices to screen space
// math::float4 clipPos0 = _camera.getProjectionMatrix() * _camera.getViewMatrix() * math::float4(v0, 1.0f);
// math::float4 clipPos1 = _camera.getProjectionMatrix() * _camera.getViewMatrix() * math::float4(v1, 1.0f);
// math::float4 clipPos2 = _camera.getProjectionMatrix() * _camera.getViewMatrix() * math::float4(v2, 1.0f);
// math::float3 ndcPos0 = clipPos0.xyz / clipPos0.w;
// math::float3 ndcPos1 = clipPos1.xyz / clipPos1.w;
// math::float3 ndcPos2 = clipPos2.xyz / clipPos2.w;
// // Convert NDC to screen coordinates
// math::float2 screenPos0((ndcPos0.x * 0.5f + 0.5f) * inputWidth, (1.0f - (ndcPos0.y * 0.5f + 0.5f)) * inputHeight);
// math::float2 screenPos1((ndcPos1.x * 0.5f + 0.5f) * inputWidth, (1.0f - (ndcPos1.y * 0.5f + 0.5f)) * inputHeight);
// math::float2 screenPos2((ndcPos2.x * 0.5f + 0.5f) * inputWidth, (1.0f - (ndcPos2.y * 0.5f + 0.5f)) * inputHeight);
// // Compute bounding box of the triangle
// int minX = std::max(0, static_cast<int>(std::min({screenPos0.x, screenPos1.x, screenPos2.x})));
// int maxX = std::min(static_cast<int>(inputWidth) - 1, static_cast<int>(std::max({screenPos0.x, screenPos1.x, screenPos2.x})));
// int minY = std::max(0, static_cast<int>(std::min({screenPos0.y, screenPos1.y, screenPos2.y})));
// int maxY = std::min(static_cast<int>(inputHeight) - 1, static_cast<int>(std::max({screenPos0.y, screenPos1.y, screenPos2.y})));
// // Iterate over the bounding box
// for (int y = minY; y <= maxY; ++y)
// {
// for (int x = minX; x <= maxX; ++x)
// {
// math::float2 pixelPos(x + 0.5f, y + 0.5f);
// if (isInsideTriangle(pixelPos, screenPos0, screenPos1, screenPos2))
// {
// math::float3 bary = barycentric(pixelPos, screenPos0, screenPos1, screenPos2);
// // Interpolate depth
// float depth = bary.x * ndcPos0.z + bary.y * ndcPos1.z + bary.z * ndcPos2.z;
// // Depth test
// if (depth < depthBuffer[y * inputWidth + x])
// {
// if (depth > max)
// {
// max = depth;
// }
// if (depth < min)
// {
// min = depth;
// }
// depthBuffer[y * inputWidth + x] = depth;
// triangleIndexBuffer[y * inputWidth + x] = i / 3; // Store triangle index
// }
// }
// }
// }
// }
// for (uint32_t y = 0; y < outputHeight; ++y)
// {
// for (uint32_t x = 0; x < outputWidth; ++x)
// {
// math::float2 uv(static_cast<float>(x) / outputWidth, static_cast<float>(y) / outputHeight);
// // Use the UV coordinates to get the corresponding 3D position on the renderable
// math::float3 objectPos;
// math::float2 interpolatedUV;
// bool found = false;
// // Iterate over triangles to find which one contains this UV coordinate
// for (size_t i = 0; i < numIndices; i += 3)
// {
// math::float2 uv0 = *(math::float2 *)&uvs[indices[i] * 2];
// math::float2 uv1 = *(math::float2 *)&uvs[indices[i + 1] * 2];
// math::float2 uv2 = *(math::float2 *)&uvs[indices[i + 2] * 2];
// if (isInsideTriangle(uv, uv0, uv1, uv2))
// {
// // Compute barycentric coordinates in UV space
// math::float3 bary = barycentric(uv, uv0, uv1, uv2);
// // Interpolate 3D position
// math::float3 v0(vertices[indices[i] * 3], vertices[indices[i] * 3 + 1], vertices[indices[i] * 3 + 2]);
// math::float3 v1(vertices[indices[i + 1] * 3], vertices[indices[i + 1] * 3 + 1], vertices[indices[i + 1] * 3 + 2]);
// math::float3 v2(vertices[indices[i + 2] * 3], vertices[indices[i + 2] * 3 + 1], vertices[indices[i + 2] * 3 + 2]);
// objectPos = v0 * bary.x + v1 * bary.y + v2 * bary.z;
// interpolatedUV = uv;
// // Find the screen coordinates on the input texture
// math::float3 worldPos = (worldTransform * math::float4(objectPos, 1.0f)).xyz;
// // Project the world position to screen space
// math::float4 clipPos = _camera.getProjectionMatrix() * _camera.getViewMatrix() * math::float4(worldPos, 1.0f);
// math::float3 ndcPos = clipPos.xyz / clipPos.w;
// // Convert NDC to screen coordinates
// uint32_t screenX = (ndcPos.x * 0.5f + 0.5f) * inputWidth;
// uint32_t screenY = (1.0f - (ndcPos.y * 0.5f + 0.5f)) * inputHeight;
// if (triangleIndexBuffer[(screenY * inputWidth) + screenX] == i / 3)
// {
// if (screenX >= 0 && screenX < inputWidth && screenY >= 0 && screenY < inputHeight)
// {
// int inputIndex = (screenY * inputWidth + screenX) * 4;
// int outputIndex = (y * outputWidth + x) * 4;
// std::copy_n(&inputTexture[inputIndex], 4, &outputTexture[outputIndex]);
// }
// }
// }
// }
// }
// }
}
} // namespace thermion

View File

@@ -244,6 +244,12 @@ namespace thermion
return reinterpret_cast<TTexture *>(texture);
}
EMSCRIPTEN_KEEPALIVE void Engine_destroyTexture(TEngine *tEngine, TTexture *tTexture) {
auto *engine = reinterpret_cast<Engine *>(tEngine);
auto *texture = reinterpret_cast<Texture *>(tTexture);
engine->destroy(texture);
}
#ifdef __cplusplus
}
}

View File

@@ -2,6 +2,7 @@
#include <filament/Engine.h>
#include <filament/Material.h>
#include <filament/RenderTarget.h>
#include <filament/Scene.h>
#include <filament/Texture.h>
#include <filament/TextureSampler.h>
@@ -36,6 +37,12 @@ namespace thermion
return reinterpret_cast<TLinearImage *>(linearImage);
}
EMSCRIPTEN_KEEPALIVE float *Image_getBytes(TLinearImage *tLinearImage)
{
auto *linearImage = reinterpret_cast<::image::LinearImage *>(tLinearImage);
return linearImage->getPixelRef();
}
EMSCRIPTEN_KEEPALIVE uint32_t Image_getWidth(TLinearImage *tLinearImage)
{
auto *linearImage = reinterpret_cast<::image::LinearImage *>(tLinearImage);
@@ -98,6 +105,13 @@ namespace thermion
return true;
}
EMSCRIPTEN_KEEPALIVE TLinearImage *Image_createEmpty(uint32_t width,uint32_t height,uint32_t channel) {
auto *image = new ::image::LinearImage(width, height, channel);
return reinterpret_cast<TLinearImage*>(image);
}
EMSCRIPTEN_KEEPALIVE TTextureSampler *TextureSampler_create()
{
auto *sampler = new filament::TextureSampler();
@@ -217,6 +231,12 @@ namespace thermion
delete textureSampler;
}
}
EMSCRIPTEN_KEEPALIVE TTexture *RenderTarget_getColorTexture(TRenderTarget *tRenderTarget) {
auto renderTarget = reinterpret_cast<filament::RenderTarget*>(tRenderTarget);
auto texture = renderTarget->getTexture(filament::RenderTarget::AttachmentPoint::COLOR0);
return reinterpret_cast<TTexture*>(texture);
}
#ifdef __cplusplus
}

View File

@@ -413,20 +413,5 @@ extern "C"
free(ptr);
}
EMSCRIPTEN_KEEPALIVE void unproject_texture(TViewer *viewer, EntityId entity, uint8_t *input, uint32_t inputWidth, uint32_t inputHeight, uint8_t *out, uint32_t outWidth, uint32_t outHeight)
{
// ((FilamentViewer *)viewer)->unprojectTexture(entity, input, inputWidth, inputHeight, out, outWidth, outHeight);
}
EMSCRIPTEN_KEEPALIVE void apply_texture_to_material(TSceneManager *sceneManager, EntityId entity, void *const texture, const char *parameterName, int materialIndex)
{
((SceneManager *)sceneManager)->applyTexture(entity, reinterpret_cast<Texture *>(texture), parameterName, materialIndex);
}
EMSCRIPTEN_KEEPALIVE void destroy_texture(TSceneManager *sceneManager, void *const texture)
{
((SceneManager *)sceneManager)->destroyTexture(reinterpret_cast<Texture *>(texture));
}
}

View File

@@ -722,17 +722,6 @@ std::packaged_task<void()> lambda(
auto fut = _rl->add_task(lambda);
}
EMSCRIPTEN_KEEPALIVE void unproject_texture_render_thread(TViewer *viewer, EntityId entity, uint8_t *input, uint32_t inputWidth, uint32_t inputHeight, uint8_t *out, uint32_t outWidth, uint32_t outHeight, void (*callback)())
{
std::packaged_task<void()> lambda(
[=]
{
unproject_texture(viewer, entity, input, inputWidth, inputHeight, out, outWidth, outHeight);
callback();
});
auto fut = _rl->add_task(lambda);
}
EMSCRIPTEN_KEEPALIVE void AnimationManager_updateBoneMatricesRenderThread(
TAnimationManager *tAnimationManager,
TSceneAsset *sceneAsset,

View File

@@ -40,7 +40,7 @@
#include "scene/Gizmo.hpp"
#include "scene/SceneAsset.hpp"
#include "scene/GeometrySceneAssetBuilder.hpp"
#include "UnprojectTexture.hpp"
#include "TextureProjection.hpp"
#include "resources/translation_gizmo_glb.h"
#include "resources/rotation_gizmo_glb.h"
@@ -470,17 +470,10 @@ namespace thermion
destroyAssets();
std::lock_guard lock(_mutex);
for (auto *texture : _textures)
{
_engine->destroy(texture);
}
for (auto *materialInstance : _materialInstances)
{
_engine->destroy(materialInstance);
}
_textures.clear();
_materialInstances.clear();
}
@@ -592,102 +585,6 @@ namespace thermion
_sceneAssets.clear();
}
Texture *SceneManager::createTexture(const uint8_t *data, size_t length, const char *name)
{
// Create an input stream from the data
std::istringstream stream(std::string(reinterpret_cast<const char *>(data), length));
// Decode the image
image::LinearImage linearImage = image::ImageDecoder::decode(stream, name, image::ImageDecoder::ColorSpace::SRGB);
if (!linearImage.isValid())
{
Log("Failed to decode image.");
return nullptr;
}
uint32_t w = linearImage.getWidth();
uint32_t h = linearImage.getHeight();
uint32_t channels = linearImage.getChannels();
Texture::InternalFormat textureFormat = channels == 3 ? Texture::InternalFormat::RGB16F
: Texture::InternalFormat::RGBA16F;
Texture::Format bufferFormat = channels == 3 ? Texture::Format::RGB
: Texture::Format::RGBA;
Texture *texture = Texture::Builder()
.width(w)
.height(h)
.levels(1)
.format(textureFormat)
.sampler(Texture::Sampler::SAMPLER_2D)
.build(*_engine);
if (!texture)
{
Log("Failed to create texture: ");
return nullptr;
}
Texture::PixelBufferDescriptor buffer(
linearImage.getPixelRef(),
size_t(w * h * channels * sizeof(float)),
bufferFormat,
Texture::Type::FLOAT);
texture->setImage(*_engine, 0, std::move(buffer));
Log("Created texture: %s (%d x %d, %d channels)", name, w, h, channels);
_textures.insert(texture);
return texture;
}
bool SceneManager::applyTexture(EntityId entityId, Texture *texture, const char *parameterName, int materialIndex)
{
auto entity = Entity::import(entityId);
if (entity.isNull())
{
Log("Entity %d is null?", entityId);
return false;
}
RenderableManager &rm = _engine->getRenderableManager();
auto renderable = rm.getInstance(entity);
if (!renderable.isValid())
{
Log("Renderable not valid, was the entity id correct (%d)?", entityId);
return false;
}
MaterialInstance *mi = rm.getMaterialInstanceAt(renderable, materialIndex);
if (!mi)
{
Log("ERROR: material index must be less than number of material instances");
return false;
}
auto sampler = TextureSampler();
mi->setParameter(parameterName, texture, sampler);
Log("Applied texture to entity %d", entityId);
return true;
}
void SceneManager::destroyTexture(Texture *texture)
{
if (_textures.find(texture) == _textures.end())
{
Log("Warning: couldn't find texture");
}
_textures.erase(texture);
_engine->destroy(texture);
}
void SceneManager::addCollisionComponent(EntityId entityId, void (*onCollisionCallback)(const EntityId entityId1, const EntityId entityId2), bool affectsTransform)
{

View File

@@ -20,6 +20,7 @@ import 'package:path/path.dart' as p;
Color kWhite = ColorFloat32(4)..setRgba(1.0, 1.0, 1.0, 1.0);
Color kRed = ColorFloat32(4)..setRgba(1.0, 0.0, 0.0, 1.0);
Color kGreen = ColorFloat32(4)..setRgba(0.0, 1.0, 0.0, 1.0);
Color kBlue = ColorFloat32(4)..setRgba(0.0, 0.0, 1.0, 1.0);
/// Test files are run in a variety of ways, find this package root in all.
///

View File

@@ -22,6 +22,7 @@ void main() async {
textureFormat: TextureFormat.RGBA32F);
await texture.setLinearImage(
image, PixelDataFormat.RGBA, PixelDataType.FLOAT);
await texture.dispose();
}, bg: kRed);
});
});