implement picker/getNameForEntity

This commit is contained in:
Nick Fisher
2023-10-11 11:10:47 +08:00
parent 79292914d2
commit 98bcf5d7ad
25 changed files with 704 additions and 393 deletions

View File

@@ -1,159 +0,0 @@
// import 'dart:async';
// import 'package:flutter/gestures.dart';
// import 'package:flutter/material.dart';
// import 'filament_controller.dart';
// import 'filament_widget.dart';
// enum GestureType { RotateCamera, PanCamera, PanBackground }
// class AvatarGestureDetector extends StatefulWidget {
// final AvatarInstance controller;
// final bool showControls;
// const AvatarGestureDetector({
// Key? key,
// required this.controller,
// this.showControls = false,
// }) : super(key: key);
// @override
// State<StatefulWidget> createState() => _AvatarGestureDetectorState();
// }
// class _AvatarGestureDetectorState extends State<AvatarGestureDetector> {
// GestureType gestureType = GestureType.PanCamera;
// final _icons = {
// GestureType.PanBackground: Icons.image,
// GestureType.PanCamera: Icons.pan_tool,
// GestureType.RotateCamera: Icons.rotate_90_degrees_ccw
// };
// // to avoid duplicating code for pan/rotate (panStart, panUpdate, panEnd, rotateStart, rotateUpdate etc)
// // we have only a single function for start/update/end.
// // when the gesture type is changed, these properties are updated to point to the correct function.
// late Future Function(double x, double y) _functionStart;
// late Future Function(double x, double y) _functionUpdate;
// late Future Function() _functionEnd;
// double _lastScale = 0;
// @override
// void initState() {
// _setFunction();
// super.initState();
// }
// void _setFunction() {
// switch (gestureType) {
// case GestureType.RotateCamera:
// _functionStart = widget.controller.rotateStart;
// _functionUpdate = widget.controller.rotateUpdate;
// _functionEnd = widget.controller.rotateEnd;
// break;
// case GestureType.PanCamera:
// _functionStart = widget.controller.panStart;
// _functionUpdate = widget.controller.panUpdate;
// _functionEnd = widget.controller.panEnd;
// break;
// // TODO
// case GestureType.PanBackground:
// _functionStart = (x, y) async {};
// _functionUpdate = (x, y) async {};
// _functionEnd = () async {};
// }
// }
// @override
// void didUpdateWidget(AvatarGestureDetector oldWidget) {
// if (widget.showControls != oldWidget.showControls) {
// setState(() {});
// }
// super.didUpdateWidget(oldWidget);
// }
// Timer? _scrollTimer;
// @override
// Widget build(BuildContext context) {
// return Stack(children: [
// Positioned.fill(
// // pinch zoom on mobile
// // couldn't find any equivalent for pointerCount in Listener so we use two widgets:
// // - outer is a GestureDetector only for pinch zoom
// // - inner is a Listener for all other gestures
// child: GestureDetector(
// // onScaleStart: (d) async {
// // if (d.pointerCount == 2) {
// // await widget.controller.zoomEnd();
// // await widget.controller.zoomBegin();
// // }
// // },
// // onScaleEnd: (d) async {
// // if (d.pointerCount == 2) {
// // _lastScale = 0;
// // await widget.controller.zoomEnd();
// // }
// // },
// // onScaleUpdate: (d) async {
// // if (d.pointerCount == 2) {
// // if (_lastScale != 0) {
// // await widget.controller
// // .zoomUpdate(100 * (_lastScale - d.scale));
// // }
// // }
// // _lastScale = d.scale;
// // },
// child: Listener(
// onPointerSignal: (pointerSignal) async {
// // scroll-wheel zoom on desktop
// if (pointerSignal is PointerScrollEvent) {
// _scrollTimer?.cancel();
// await widget.controller.zoomBegin();
// await widget.controller.zoomUpdate(
// pointerSignal.scrollDelta.dy > 0 ? 10 : -10);
// _scrollTimer = Timer(Duration(milliseconds: 100), () {
// widget.controller.zoomEnd();
// _scrollTimer = null;
// });
// } else {
// print(pointerSignal);
// }
// },
// onPointerPanZoomStart: (pzs) {},
// onPointerDown: (d) async {
// await _functionStart(
// d.localPosition.dx, d.localPosition.dy);
// },
// onPointerMove: (d) async {
// await _functionUpdate(
// d.localPosition.dx, d.localPosition.dy);
// },
// onPointerUp: (d) async {
// await _functionEnd();
// },
// child: widget.child))),
// widget.showControls
// ? Align(
// alignment: Alignment.bottomRight,
// child: GestureDetector(
// onTap: () {
// setState(() {
// var curIdx = GestureType.values.indexOf(gestureType);
// var nextIdx = curIdx == GestureType.values.length - 1
// ? 0
// : curIdx + 1;
// gestureType = GestureType.values[nextIdx];
// _setFunction();
// });
// },
// child: Container(
// padding: const EdgeInsets.all(50),
// child: Icon(_icons[gestureType], color: Colors.green)),
// ))
// : Container()
// ]);
// }
// }

View File

@@ -1,11 +1,8 @@
import 'dart:async';
import 'dart:ffi';
import 'dart:io';
import 'dart:ui' as ui;
import 'package:flutter/services.dart';
import 'package:polyvox_filament/animations/bone_animation_data.dart';
import 'package:polyvox_filament/animations/morph_animation_data.dart';
import 'package:polyvox_filament/generated_bindings.dart';
typedef FilamentEntity = int;
const FilamentEntity FILAMENT_ASSET_ERROR = 0;
@@ -18,9 +15,34 @@ abstract class FilamentController {
Stream<int?> get textureId;
Future get isReadyForScene;
///
/// The result(s) of calling [pick] (see below).
/// This may be a broadcast stream, so you should ensure you have subscribed to this stream before calling [pick].
/// If [pick] is called without an active subscription to this stream, the results will be silently discarded.
///
Stream<FilamentEntity?> get pickResult;
///
/// Set to true to continuously render the scene at the framerate specified by [setFrameRate] (60 fps by default).
///
Future setRendering(bool render);
///
/// Render a single frame.
///
Future render();
///
/// Sets the framerate for continuous rendering when [setRendering] is enabled.
///
Future setFrameRate(int framerate);
///
/// Called by FilamentGestureDetector to set the pixel ratio (obtained from [MediaQuery]) before creating the texture/viewport.
/// You may call this yourself if you want to increase/decrease the pixel density of the viewport, but calling this method won't do anything on its own.
/// You will need to manually recreate the texture/viewer afterwards.
///
void setPixelRatio(double ratio);
///
@@ -80,24 +102,51 @@ abstract class FilamentController {
double dirY,
double dirZ,
bool castShadows);
Future removeLight(FilamentEntity light);
///
/// Remove all lights (excluding IBL) from the scene.
///
Future clearLights();
Future<FilamentEntity> loadGlb(String path, {bool unlit = false});
Future<FilamentEntity> loadGltf(String path, String relativeResourcePath);
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
Future panStart(double x, double y);
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
Future panUpdate(double x, double y);
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
Future panEnd();
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
Future rotateStart(double x, double y);
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
Future rotateUpdate(double x, double y);
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
Future rotateEnd();
///
/// Set the weights for all morph targets under node [meshName] in [asset] to [weights].
///
Future setMorphTargetWeights(
FilamentEntity asset, String meshName, List<double> weights);
@@ -112,6 +161,7 @@ abstract class FilamentController {
Future<double> getAnimationDuration(FilamentEntity asset, int animationIndex);
///
/// Create/start a dynamic morph target animation for [asset].
/// Animates morph target weights/bone transforms (where each frame requires a duration of [frameLengthInMs].
/// [morphWeights] is a list of doubles in frame-major format.
/// Each frame is [numWeights] in length, and each entry is the weight to be applied to the morph target located at that index in the mesh primitive at that frame.
@@ -126,11 +176,37 @@ abstract class FilamentController {
/// for now we only allow animating a single bone (though multiple skinned targets are supported)
///
Future setBoneAnimation(FilamentEntity asset, BoneAnimationData animation);
///
/// Removes/destroys the specified entity from the scene.
/// [asset] will no longer be a valid handle after this method is called; ensure you immediately discard all references once this method is complete.
///
Future removeAsset(FilamentEntity asset);
///
/// Removes/destroys all renderable entities from the scene (including cameras).
/// All [FilamentEntity] handles will no longer be valid after this method is called; ensure you immediately discard all references to all entities once this method is complete.
///
Future clearAssets();
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
Future zoomBegin();
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
Future zoomUpdate(double z);
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
Future zoomEnd();
///
/// Schedules the glTF animation at [index] in [asset] to start playing on the next frame.
///
Future playAnimation(FilamentEntity asset, int index,
{bool loop = false,
bool reverse = false,
@@ -138,13 +214,33 @@ abstract class FilamentController {
double crossfade = 0.0});
Future setAnimationFrame(FilamentEntity asset, int index, int animationFrame);
Future stopAnimation(FilamentEntity asset, int animationIndex);
///
/// Sets the current scene camera to the glTF camera under [name] in [asset].
///
Future setCamera(FilamentEntity asset, String? name);
///
/// Sets the tone mapping (requires postprocessing).
///
Future setToneMapping(ToneMapper mapper);
///
/// Sets the strength of the bloom.
///
Future setBloom(double bloom);
Future setCameraFocalLength(double focalLength);
Future setCameraFocusDistance(double focusDistance);
Future setCameraPosition(double x, double y, double z);
///
/// Repositions the camera to the last vertex of the bounding box of [asset], looking at the penultimate vertex.
///
Future moveCameraToAsset(FilamentEntity asset);
///
/// Enables/disables frustum culling. Currently we don't expose a method for manipulating the camera projection/culling matrices so this is your only option to deal with unwanted near/far clipping.
///
Future setViewFrustumCulling(bool enabled);
Future setCameraExposure(
double aperture, double shutterSpeed, double sensitivity);
@@ -153,12 +249,32 @@ abstract class FilamentController {
Future setMaterialColor(
FilamentEntity asset, String meshName, int materialIndex, Color color);
///
/// Scales [asset] up/down so it fits within a unit cube.
///
Future transformToUnitCube(FilamentEntity asset);
///
/// Sets the world space position for [asset] to the given coordinates.
///
Future setPosition(FilamentEntity asset, double x, double y, double z);
///
/// Enable/disable postprocessing.
///
Future setPostProcessing(bool enabled);
Future setScale(FilamentEntity asset, double scale);
Future setRotation(
FilamentEntity asset, double rads, double x, double y, double z);
Future hide(FilamentEntity asset, String meshName);
Future reveal(FilamentEntity asset, String meshName);
///
/// Used to select the entity in the scene at the given viewport coordinates.
/// Called by `FilamentGestureDetector` on a mouse/finger down event. You probably don't want to call this yourself.
/// This is asynchronous and will require 2-3 frames to complete - subscribe to the [pickResult] stream to receive the results of this method.
/// [x] and [y] must be in local logical coordinates (i.e. where 0,0 is at top-left of the FilamentWidget).
///
void pick(int x, int y);
}

View File

@@ -32,6 +32,9 @@ class FilamentControllerFFI extends FilamentController {
final String? uberArchivePath;
Stream<FilamentEntity> get pickResult => _pickResultController.stream;
final _pickResultController = StreamController<FilamentEntity>.broadcast();
///
/// This controller uses platform channels to bridge Dart with the C/C++ code for the Filament API.
/// Setting up the context/texture (since this is platform-specific) and the render ticker are platform-specific; all other methods are passed through by the platform channel to the methods specified in PolyvoxFilamentApi.h.
@@ -115,6 +118,14 @@ class FilamentControllerFFI extends FilamentController {
throw Exception(
"Do not call createViewer when a viewer has already been created without calling destroyViewer");
}
var loader = Pointer<ResourceLoaderWrapper>.fromAddress(
await _channel.invokeMethod("getResourceLoaderWrapper"));
if (loader == nullptr) {
throw Exception("Failed to get resource loader");
}
print("Using loader ${loader.address}");
size = ui.Size(width * _pixelRatio, height * _pixelRatio);
print("Creating viewer with size $size");
@@ -145,13 +156,12 @@ class FilamentControllerFFI extends FilamentController {
var sharedContext = await _channel.invokeMethod("getSharedContext");
print("Got shared context : $sharedContext");
var loader = await _channel.invokeMethod("getResourceLoaderWrapper");
_viewer = _lib.create_filament_viewer_ffi(
Pointer<Void>.fromAddress(sharedContext ?? 0),
driver,
uberArchivePath?.toNativeUtf8().cast<Char>() ?? nullptr,
Pointer<ResourceLoaderWrapper>.fromAddress(loader),
loader,
renderCallback,
renderCallbackOwner);
@@ -166,6 +176,7 @@ class FilamentControllerFFI extends FilamentController {
size.height.toInt());
if (nativeTexture != 0) {
assert(surfaceAddress == 0);
print("Creating render target from native texture $nativeTexture");
_lib.create_render_target_ffi(
_viewer!, nativeTexture, size.width.toInt(), size.height.toInt());
}
@@ -338,9 +349,6 @@ class FilamentControllerFFI extends FilamentController {
return asset;
}
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
@override
Future panStart(double x, double y) async {
if (_viewer == null || _resizing) {
@@ -349,9 +357,6 @@ class FilamentControllerFFI extends FilamentController {
_lib.grab_begin(_viewer!, x * _pixelRatio, y * _pixelRatio, true);
}
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
@override
Future panUpdate(double x, double y) async {
if (_viewer == null || _resizing) {
@@ -360,9 +365,6 @@ class FilamentControllerFFI extends FilamentController {
_lib.grab_update(_viewer!, x * _pixelRatio, y * _pixelRatio);
}
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
@override
Future panEnd() async {
if (_viewer == null || _resizing) {
@@ -371,9 +373,6 @@ class FilamentControllerFFI extends FilamentController {
_lib.grab_end(_viewer!);
}
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
@override
Future rotateStart(double x, double y) async {
if (_viewer == null || _resizing) {
@@ -382,9 +381,6 @@ class FilamentControllerFFI extends FilamentController {
_lib.grab_begin(_viewer!, x * _pixelRatio, y * _pixelRatio, false);
}
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
@override
Future rotateUpdate(double x, double y) async {
if (_viewer == null || _resizing) {
@@ -393,9 +389,6 @@ class FilamentControllerFFI extends FilamentController {
_lib.grab_update(_viewer!, x * _pixelRatio, y * _pixelRatio);
}
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
@override
Future rotateEnd() async {
if (_viewer == null || _resizing) {
@@ -404,9 +397,6 @@ class FilamentControllerFFI extends FilamentController {
_lib.grab_end(_viewer!);
}
///
/// Set the weights for all morph targets under node [meshName] in [asset] to [weights].
///
@override
Future setMorphTargetWeights(
FilamentEntity asset, String meshName, List<double> weights) async {
@@ -458,9 +448,6 @@ class FilamentControllerFFI extends FilamentController {
return names;
}
///
/// Returns the length (in seconds) of the animation at the given index.
///
@override
Future<double> getAnimationDuration(
FilamentEntity asset, int animationIndex) async {
@@ -473,9 +460,6 @@ class FilamentControllerFFI extends FilamentController {
return duration;
}
///
/// Create/start a dynamic morph target animation for [asset].
///
@override
Future setMorphAnimationData(
FilamentEntity asset, MorphAnimationData animation) async {
@@ -506,12 +490,6 @@ class FilamentControllerFFI extends FilamentController {
calloc.free(idxPtr);
}
///
/// Animates morph target weights/bone transforms (where each frame requires a duration of [frameLengthInMs].
/// [morphWeights] is a list of doubles in frame-major format.
/// Each frame is [numWeights] in length, and each entry is the weight to be applied to the morph target located at that index in the mesh primitive at that frame.
/// for now we only allow animating a single bone (though multiple skinned targets are supported)
///
@override
Future setBoneAnimation(
FilamentEntity asset, BoneAnimationData animation) async {
@@ -550,10 +528,6 @@ class FilamentControllerFFI extends FilamentController {
// calloc.free(data);
}
///
/// Removes/destroys the specified entity from the scene.
/// [asset] will no longer be a valid handle after this method is called; ensure you immediately discard all references once this method is complete.
///
@override
Future removeAsset(FilamentEntity asset) async {
if (_viewer == null || _resizing) {
@@ -562,10 +536,6 @@ class FilamentControllerFFI extends FilamentController {
_lib.remove_asset_ffi(_viewer!, asset);
}
///
/// Removes/destroys all renderable entities from the scene (including cameras).
/// All [FilamentEntity] handles will no longer be valid after this method is called; ensure you immediately discard all references to all entities once this method is complete.
///
@override
Future clearAssets() async {
if (_viewer == null || _resizing) {
@@ -574,9 +544,6 @@ class FilamentControllerFFI extends FilamentController {
_lib.clear_assets_ffi(_viewer!);
}
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
@override
Future zoomBegin() async {
if (_viewer == null || _resizing) {
@@ -585,9 +552,6 @@ class FilamentControllerFFI extends FilamentController {
_lib.scroll_begin(_viewer!);
}
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
@override
Future zoomUpdate(double z) async {
if (_viewer == null || _resizing) {
@@ -596,9 +560,6 @@ class FilamentControllerFFI extends FilamentController {
_lib.scroll_update(_viewer!, 0.0, 0.0, z);
}
///
/// Called by `FilamentGestureDetector`. You probably don't want to call this yourself.
///
@override
Future zoomEnd() async {
if (_viewer == null || _resizing) {
@@ -607,9 +568,6 @@ class FilamentControllerFFI extends FilamentController {
_lib.scroll_end(_viewer!);
}
///
/// Schedules the glTF animation at [index] in [asset] to start playing on the next frame.
///
@override
Future playAnimation(FilamentEntity asset, int index,
{bool loop = false,
@@ -638,9 +596,6 @@ class FilamentControllerFFI extends FilamentController {
_lib.stop_animation(_assetManager!, asset, animationIndex);
}
///
/// Sets the current scene camera to the glTF camera under [name] in [asset].
///
@override
Future setCamera(FilamentEntity asset, String? name) async {
if (_viewer == null || _resizing) {
@@ -653,9 +608,6 @@ class FilamentControllerFFI extends FilamentController {
}
}
///
/// Sets the tone mapping (requires postprocessing).
///
@override
Future setToneMapping(ToneMapper mapper) async {
if (_viewer == null || _resizing) {
@@ -665,9 +617,6 @@ class FilamentControllerFFI extends FilamentController {
_lib.set_tone_mapping_ffi(_viewer!, mapper.index);
}
///
/// Enable/disable postprocessing.
///
@override
Future setPostProcessing(bool enabled) async {
if (_viewer == null || _resizing) {
@@ -677,9 +626,6 @@ class FilamentControllerFFI extends FilamentController {
_lib.set_post_processing_ffi(_viewer!, enabled);
}
///
/// Sets the strength of the bloom.
///
@override
Future setBloom(double bloom) async {
if (_viewer == null || _resizing) {
@@ -716,9 +662,6 @@ class FilamentControllerFFI extends FilamentController {
_lib.move_camera_to_asset(_viewer!, asset);
}
///
/// Enables/disables frustum culling. Currently we don't expose a method for manipulating the camera projection/culling matrices so this is your only option to deal with unwanted near/far clipping.
///
@override
Future setViewFrustumCulling(bool enabled) async {
if (_viewer == null || _resizing) {
@@ -822,4 +765,31 @@ class FilamentControllerFFI extends FilamentController {
throw Exception("Failed to reveal mesh $meshName");
}
}
String? getNameForEntity(FilamentEntity entity) {
final result = _lib.get_name_for_entity(_assetManager!, entity);
if (result == nullptr) {
return null;
}
return result.cast<Utf8>().toDartString();
}
void pick(int x, int y) async {
if (_viewer == null || _resizing) {
throw Exception("No viewer available, ignoring");
}
final outPtr = calloc<EntityId>(1);
outPtr.value = 0;
print("height ${size.height.toInt()} y $y");
_lib.pick_ffi(_viewer!, x, size.height.toInt() - y, outPtr);
while (outPtr.value == 0) {
await Future.delayed(Duration(milliseconds: 100));
print("Waiting");
}
var entityId = outPtr.value;
_pickResultController.add(entityId);
calloc.free(outPtr);
}
}

View File

@@ -650,4 +650,13 @@ class FilamentControllerMethodChannel extends FilamentController {
// TODO: implement destroy
throw UnimplementedError();
}
@override
void pick(int x, int y) {
// TODO: implement pick
}
@override
// TODO: implement pickResult
Stream<FilamentEntity?> get pickResult => throw UnimplementedError();
}

View File

@@ -1321,6 +1321,44 @@ class NativeLibrary {
late final _set_post_processing = _set_post_processingPtr
.asFunction<void Function(ffi.Pointer<ffi.Void>, bool)>();
void pick(
ffi.Pointer<ffi.Void> viewer,
int x,
int y,
ffi.Pointer<EntityId> entityId,
) {
return _pick(
viewer,
x,
y,
entityId,
);
}
late final _pickPtr = _lookup<
ffi.NativeFunction<
ffi.Void Function(ffi.Pointer<ffi.Void>, ffi.Int, ffi.Int,
ffi.Pointer<EntityId>)>>('pick');
late final _pick = _pickPtr.asFunction<
void Function(ffi.Pointer<ffi.Void>, int, int, ffi.Pointer<EntityId>)>();
ffi.Pointer<ffi.Char> get_name_for_entity(
ffi.Pointer<ffi.Void> assetManager,
int entityId,
) {
return _get_name_for_entity(
assetManager,
entityId,
);
}
late final _get_name_for_entityPtr = _lookup<
ffi.NativeFunction<
ffi.Pointer<ffi.Char> Function(
ffi.Pointer<ffi.Void>, EntityId)>>('get_name_for_entity');
late final _get_name_for_entity = _get_name_for_entityPtr
.asFunction<ffi.Pointer<ffi.Char> Function(ffi.Pointer<ffi.Void>, int)>();
void ios_dummy() {
return _ios_dummy();
}
@@ -1412,7 +1450,7 @@ class NativeLibrary {
late final _create_render_target_ffiPtr = _lookup<
ffi.NativeFunction<
ffi.Void Function(ffi.Pointer<ffi.Void>, ffi.Uint32, ffi.Uint32,
ffi.Void Function(ffi.Pointer<ffi.Void>, ffi.IntPtr, ffi.Uint32,
ffi.Uint32)>>('create_render_target_ffi');
late final _create_render_target_ffi = _create_render_target_ffiPtr
.asFunction<void Function(ffi.Pointer<ffi.Void>, int, int, int)>();
@@ -2173,6 +2211,27 @@ class NativeLibrary {
late final _set_post_processing_ffi = _set_post_processing_ffiPtr
.asFunction<void Function(ffi.Pointer<ffi.Void>, bool)>();
void pick_ffi(
ffi.Pointer<ffi.Void> viewer,
int x,
int y,
ffi.Pointer<EntityId> entityId,
) {
return _pick_ffi(
viewer,
x,
y,
entityId,
);
}
late final _pick_ffiPtr = _lookup<
ffi.NativeFunction<
ffi.Void Function(ffi.Pointer<ffi.Void>, ffi.Int, ffi.Int,
ffi.Pointer<EntityId>)>>('pick_ffi');
late final _pick_ffi = _pick_ffiPtr.asFunction<
void Function(ffi.Pointer<ffi.Void>, int, int, ffi.Pointer<EntityId>)>();
void ios_dummy_ffi() {
return _ios_dummy_ffi();
}
@@ -2306,6 +2365,9 @@ typedef LoadFilamentResourceFromOwner = ffi.Pointer<
typedef FreeFilamentResourceFromOwner = ffi.Pointer<
ffi
.NativeFunction<ffi.Void Function(ResourceBuffer, ffi.Pointer<ffi.Void>)>>;
/// This header replicates most of the methods in PolyvoxFilamentApi.h, and is only intended to be used to generate client FFI bindings.
/// The intention is that calling one of these methods will call its respective method in PolyvoxFilamentApi.h, but wrapped in some kind of thread runner to ensure thread safety.
typedef EntityId = ffi.Int32;
typedef FilamentRenderCallback = ffi.Pointer<
ffi.NativeFunction<ffi.Void Function(ffi.Pointer<ffi.Void> owner)>>;

View File

@@ -0,0 +1,72 @@
import 'dart:async';
import 'dart:io';
import 'package:flutter/foundation.dart';
import 'package:flutter/gestures.dart';
import 'package:flutter/material.dart';
import 'package:polyvox_filament/widgets/filament_gesture_detector_desktop.dart';
import 'package:polyvox_filament/widgets/filament_gesture_detector_mobile.dart';
import '../filament_controller.dart';
enum GestureType { RotateCamera, PanCamera, PanBackground }
///
/// A widget that translates finger/mouse gestures to zoom/pan/rotate actions.
///
class FilamentGestureDetector extends StatelessWidget {
///
/// The content to display below the gesture detector/listener widget.
/// This will usually be a FilamentWidget (so you can navigate by directly interacting with the viewport), but this is not necessary.
/// It is equally possible to render the viewport/gesture controls elsewhere in the widget hierarchy. The only requirement is that they share the same [FilamentController].
///
final Widget? child;
///
/// The [controller] attached to the [FilamentWidget] you wish to control.
///
final FilamentController controller;
///
/// If true, an overlay will be shown with buttons to toggle whether pointer movements are interpreted as:
/// 1) rotate or a pan (mobile only),
/// 2) moving the camera or the background image (TODO).
///
final bool showControlOverlay;
///
/// If false, all gestures will be ignored.
///
final bool listenerEnabled;
final double zoomDelta;
const FilamentGestureDetector(
{Key? key,
required this.controller,
this.child,
this.showControlOverlay = false,
this.listenerEnabled = true,
this.zoomDelta = 1})
: super(key: key);
@override
Widget build(BuildContext context) {
if (kIsWeb) {
throw Exception("TODO");
} else if (Platform.isLinux || Platform.isWindows || Platform.isMacOS) {
return FilamentGestureDetectorDesktop(
controller: controller,
child: child,
showControlOverlay: showControlOverlay,
listenerEnabled: listenerEnabled,
);
} else {
return FilamentGestureDetectorMobile(
controller: controller,
child: child,
showControlOverlay: showControlOverlay,
listenerEnabled: listenerEnabled,
);
}
}
}

View File

@@ -0,0 +1,144 @@
import 'dart:async';
import 'package:flutter/gestures.dart';
import 'package:flutter/material.dart';
import '../filament_controller.dart';
///
/// A widget that translates finger/mouse gestures to zoom/pan/rotate actions.
///
class FilamentGestureDetectorDesktop extends StatefulWidget {
///
/// The content to display below the gesture detector/listener widget.
/// This will usually be a FilamentWidget (so you can navigate by directly interacting with the viewport), but this is not necessary.
/// It is equally possible to render the viewport/gesture controls elsewhere in the widget hierarchy. The only requirement is that they share the same [FilamentController].
///
final Widget? child;
///
/// The [controller] attached to the [FilamentWidget] you wish to control.
///
final FilamentController controller;
///
/// If true, an overlay will be shown with buttons to toggle whether pointer movements are interpreted as:
/// 1) rotate or a pan (mobile only),
/// 2) moving the camera or the background image (TODO).
///
final bool showControlOverlay;
///
/// If false, all gestures will be ignored.
///
final bool listenerEnabled;
final double zoomDelta;
const FilamentGestureDetectorDesktop(
{Key? key,
required this.controller,
this.child,
this.showControlOverlay = false,
this.listenerEnabled = true,
this.zoomDelta = 1})
: super(key: key);
@override
State<StatefulWidget> createState() => _FilamentGestureDetectorDesktopState();
}
class _FilamentGestureDetectorDesktopState
extends State<FilamentGestureDetectorDesktop> {
///
///
///
bool _scaling = false;
bool _pointerMoving = false;
@override
void didUpdateWidget(FilamentGestureDetectorDesktop oldWidget) {
if (widget.showControlOverlay != oldWidget.showControlOverlay ||
widget.listenerEnabled != oldWidget.listenerEnabled) {
setState(() {});
}
super.didUpdateWidget(oldWidget);
}
Timer? _scrollTimer;
///
/// Scroll-wheel on desktop, interpreted as zoom
///
void _zoom(PointerScrollEvent pointerSignal) {
_scrollTimer?.cancel();
widget.controller.zoomBegin();
widget.controller.zoomUpdate(pointerSignal.scrollDelta.dy > 0
? widget.zoomDelta
: -widget.zoomDelta);
_scrollTimer = Timer(const Duration(milliseconds: 100), () {
widget.controller.zoomEnd();
_scrollTimer = null;
});
}
@override
Widget build(BuildContext context) {
if (!widget.listenerEnabled) {
return widget.child ?? Container();
}
return Listener(
onPointerSignal: (PointerSignalEvent pointerSignal) async {
if (pointerSignal is PointerScrollEvent) {
_zoom(pointerSignal);
} else {
throw Exception("TODO");
}
},
onPointerPanZoomStart: (pzs) {
throw Exception("TODO - is this a pinch zoom on laptop trackpad?");
},
// ignore all pointer down events
// so we can wait to see if the pointer will be held/moved (interpreted as rotate/pan),
// or if this is a single mousedown event (interpreted as viewport pick)
onPointerDown: (d) async {},
// holding/moving the left mouse button is interpreted as a pan, middle mouse button as a rotate
onPointerMove: (PointerMoveEvent d) async {
// if this is the first move event, we need to call rotateStart/panStart to set the first coordinates
if (!_pointerMoving) {
if (d.buttons == kTertiaryButton) {
widget.controller.rotateStart(d.position.dx, d.position.dy);
} else {
widget.controller
.panStart(d.localPosition.dx, d.localPosition.dy);
}
}
// set the _pointerMoving flag so we don't call rotateStart/panStart on future move events
_pointerMoving = true;
if (d.buttons == kTertiaryButton) {
widget.controller.rotateUpdate(d.position.dx, d.position.dy);
} else {
widget.controller.panUpdate(d.localPosition.dx, d.localPosition.dy);
}
},
// when the left mouse button is released:
// 1) if _pointerMoving is true, this completes the pan
// 2) if _pointerMoving is false, this is interpreted as a pick
// same applies to middle mouse button, but this is ignored as a pick
onPointerUp: (PointerUpEvent d) async {
if (d.buttons == kTertiaryButton) {
widget.controller.rotateEnd();
} else {
if (_pointerMoving) {
widget.controller.panEnd();
} else {
widget.controller
.pick(d.localPosition.dx.toInt(), d.localPosition.dy.toInt());
}
}
_pointerMoving = false;
},
child: widget.child);
}
}

View File

@@ -1,34 +1,54 @@
import 'dart:async';
import 'dart:io';
import 'package:flutter/foundation.dart';
import 'package:flutter/gestures.dart';
import 'package:flutter/material.dart';
import 'filament_controller.dart';
import '../filament_controller.dart';
enum GestureType { RotateCamera, PanCamera, PanBackground }
class FilamentGestureDetector extends StatefulWidget {
///
/// A widget that translates finger/mouse gestures to zoom/pan/rotate actions.
///
class FilamentGestureDetectorMobile extends StatefulWidget {
///
/// The content to display below the gesture detector/listener widget.
/// This will usually be a FilamentWidget (so you can navigate by directly interacting with the viewport), but this is not necessary.
/// It is equally possible to render the viewport/gesture controls elsewhere in the widget hierarchy. The only requirement is that they share the same [FilamentController].
///
final Widget? child;
///
/// The [controller] attached to the [FilamentWidget] you wish to control.
///
final FilamentController controller;
///
/// If true, an overlay will be shown with buttons to toggle whether pointer movements are interpreted as:
/// 1) rotate or a pan (mobile only),
/// 2) moving the camera or the background image (TODO).
///
final bool showControlOverlay;
final bool enableControls;
///
/// If false, all gestures will be ignored.
///
final bool listenerEnabled;
final double zoomDelta;
const FilamentGestureDetector(
const FilamentGestureDetectorMobile(
{Key? key,
required this.controller,
this.child,
this.showControlOverlay = false,
this.enableControls = true,
this.listenerEnabled = true,
this.zoomDelta = 1})
: super(key: key);
@override
State<StatefulWidget> createState() => _FilamentGestureDetectorState();
State<StatefulWidget> createState() => _FilamentGestureDetectorMobileState();
}
class _FilamentGestureDetectorState extends State<FilamentGestureDetector> {
class _FilamentGestureDetectorMobileState
extends State<FilamentGestureDetectorMobile> {
GestureType gestureType = GestureType.PanCamera;
final _icons = {
@@ -37,7 +57,14 @@ class _FilamentGestureDetectorState extends State<FilamentGestureDetector> {
GestureType.RotateCamera: Icons.rotate_90_degrees_ccw
};
bool _rotating = false;
// on mobile, we can't differentiate between pointer down events like we do on desktop with primary/secondary/tertiary buttons
// we allow the user to toggle between panning and rotating by double-tapping the widget
bool _rotateOnPointerMove = false;
//
//
//
bool _scaling = false;
// to avoid duplicating code for pan/rotate (panStart, panUpdate, panEnd, rotateStart, rotateUpdate etc)
// we have only a single function for start/update/end.
@@ -73,9 +100,9 @@ class _FilamentGestureDetectorState extends State<FilamentGestureDetector> {
}
@override
void didUpdateWidget(FilamentGestureDetector oldWidget) {
void didUpdateWidget(FilamentGestureDetectorMobile oldWidget) {
if (widget.showControlOverlay != oldWidget.showControlOverlay ||
widget.enableControls != oldWidget.enableControls) {
widget.listenerEnabled != oldWidget.listenerEnabled) {
setState(() {});
}
@@ -84,79 +111,31 @@ class _FilamentGestureDetectorState extends State<FilamentGestureDetector> {
Timer? _scrollTimer;
Widget _desktop() {
return Listener(
onPointerSignal: !widget.enableControls
? null
: (pointerSignal) async {
// scroll-wheel zoom on desktop
if (pointerSignal is PointerScrollEvent) {
_scrollTimer?.cancel();
widget.controller.zoomBegin();
widget.controller.zoomUpdate(pointerSignal.scrollDelta.dy > 0
? widget.zoomDelta
: -widget.zoomDelta);
_scrollTimer = Timer(Duration(milliseconds: 100), () {
widget.controller.zoomEnd();
_scrollTimer = null;
});
}
},
onPointerPanZoomStart: !widget.enableControls ? null : (pzs) {},
onPointerDown: !widget.enableControls
? null
: (d) async {
if (d.buttons == kTertiaryButton || _rotating) {
widget.controller.rotateStart(
d.position.dx * 2.0,
d.position.dy *
2.0); // multiply by 2.0 to account for pixel density, TODO don't hardcode
} else {
widget.controller
.panStart(d.localPosition.dx, d.localPosition.dy);
}
},
onPointerMove: !widget.enableControls
? null
: (PointerMoveEvent d) async {
if (d.buttons == kTertiaryButton || _rotating) {
widget.controller
.rotateUpdate(d.position.dx * 2.0, d.position.dy * 2.0);
} else {
widget.controller
.panUpdate(d.localPosition.dx, d.localPosition.dy);
}
},
onPointerUp: !widget.enableControls
? null
: (d) async {
if (d.buttons == kTertiaryButton || _rotating) {
widget.controller.rotateEnd();
} else {
widget.controller.panEnd();
}
},
child: widget.child);
}
// pinch zoom on mobile
// couldn't find any equivalent for pointerCount in Listener so we use two widgets:
// - outer is a GestureDetector only for pinch zoom
// - inner is a Listener for all other gestures (including scroll zoom on desktop)
@override
Widget build(BuildContext context) {
if (!widget.listenerEnabled) {
return widget.child ?? Container();
}
bool _scaling = false;
double _lastScale = 0;
DateTime _lastUpdate = DateTime.now();
Widget _mobile() {
return GestureDetector(
behavior: HitTestBehavior.opaque,
onDoubleTap: () {
_rotating = !_rotating;
},
onScaleStart: !widget.enableControls
? null
: (d) async {
return Stack(children: [
Positioned.fill(
child: GestureDetector(
behavior: HitTestBehavior.opaque,
onDoubleTap: () {
setState(() {
_rotateOnPointerMove = !_rotateOnPointerMove;
});
},
onScaleStart: (d) async {
if (d.pointerCount == 2) {
_lastScale = 0;
_scaling = true;
widget.controller.zoomBegin();
} else if (!_scaling) {
if (_rotating) {
if (_rotateOnPointerMove) {
widget.controller
.rotateStart(d.focalPoint.dx, d.focalPoint.dy);
} else {
@@ -165,32 +144,24 @@ class _FilamentGestureDetectorState extends State<FilamentGestureDetector> {
}
}
},
onScaleEnd: !widget.enableControls
? null
: (d) async {
onScaleEnd: (d) async {
if (d.pointerCount == 2) {
widget.controller.zoomEnd();
_lastScale = 0;
_scaling = false;
} else if (!_scaling) {
if (_rotating) {
if (_rotateOnPointerMove) {
widget.controller.rotateEnd();
} else {
widget.controller.panEnd();
}
}
},
onScaleUpdate: !widget.enableControls
? null
: (ScaleUpdateDetails d) async {
onScaleUpdate: (ScaleUpdateDetails d) async {
if (d.pointerCount == 2) {
// var scale = d.horizontalScale - _lastScale;
// print(scale);
widget.controller
.zoomUpdate(d.horizontalScale > 1 ? 0.1 : -0.1);
_lastScale = d.horizontalScale;
} else if (!_scaling) {
if (_rotating) {
if (_rotateOnPointerMove) {
widget.controller
.rotateUpdate(d.focalPoint.dx, d.focalPoint.dy);
} else {
@@ -199,27 +170,7 @@ class _FilamentGestureDetectorState extends State<FilamentGestureDetector> {
}
}
},
child: widget.child);
}
@override
Widget build(BuildContext context) {
late Widget controls;
if (kIsWeb) {
controls = Container();
} else if (Platform.isLinux || Platform.isWindows || Platform.isMacOS) {
controls = _desktop();
} else {
controls = _mobile();
}
return Stack(children: [
Positioned.fill(
// pinch zoom on mobile
// couldn't find any equivalent for pointerCount in Listener so we use two widgets:
// - outer is a GestureDetector only for pinch zoom
// - inner is a Listener for all other gestures (including scroll zoom on desktop)
child: controls),
child: widget.child)),
widget.showControlOverlay
? Align(
alignment: Alignment.bottomRight,

View File

@@ -4,12 +4,10 @@ import 'dart:math';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter/rendering.dart';
import 'package:flutter/widgets.dart';
import 'package:flutter/scheduler.dart';
import 'package:polyvox_filament/filament_controller.dart';
import 'dart:async';
import 'filament_controller_method_channel.dart';
typedef ResizeCallback = void Function(Size oldSize, Size newSize);