fixes for Android transparency and add removeAsset method

This commit is contained in:
Nick Fisher
2022-07-11 19:12:49 +10:00
parent f65c7ab50b
commit e3ab9ced81
10 changed files with 139 additions and 500 deletions

View File

@@ -20,3 +20,23 @@ iOS:
Extract and move both lib/ and include/ to ./ios
# Running
## Android
- MainActivity.kt must have the following:
```
class MainActivity: FlutterActivity() {
override fun getTransparencyMode(): TransparencyMode {
return TransparencyMode.transparent
}
}
```
and theme must have the following in `styles.xml`
```
<style name="NormalTheme" parent="@android:style/Theme.Light.NoTitleBar">
<item name="android:windowBackground">@android:color/transparent</item>
</style>
```

View File

@@ -169,5 +169,9 @@ extern "C" {
void release_source_assets(void* viewer) {
((FilamentViewer*)viewer)->releaseSourceAssets();
}
void remove_asset(void* viewer) {
((FilamentViewer*)viewer)->removeAsset();
}
}

View File

@@ -62,4 +62,7 @@ interface FilamentInterop : Library {
fun release_source_assets(viewer:Pointer);
fun remove_asset(viewer:Pointer);
}

View File

@@ -104,8 +104,8 @@ PlatformView {
_methodChannel.invokeMethod("ready", null)
choreographer = Choreographer.getInstance()
_view.setZOrderOnTop(true)
_view.setZOrderOnTop(false)
_view.holder.setFormat(PixelFormat.OPAQUE)
_view.holder.addCallback (object : SurfaceHolder.Callback {
@@ -301,6 +301,10 @@ PlatformView {
_lib.release_source_assets(_viewer!!)
result.success("OK");
}
"removeAsset" -> {
_lib.remove_asset(_viewer!!)
result.success("OK");
}
"playAnimation" -> {
val args = call.arguments as ArrayList<Any?>
_lib.play_animation(_viewer!!, args[0] as Int, args[1] as Boolean)

View File

@@ -1,429 +1,10 @@
package app.polyvox.filament_example
import io.flutter.embedding.android.FlutterActivity
// import com.google.android.filament.gltf.*
import com.google.android.filament.utils.*
import android.annotation.SuppressLint
import android.app.Activity
import android.os.Bundle
import android.util.Log
import android.view.*
import android.view.GestureDetector
import android.widget.TextView
import android.widget.Toast
import com.google.android.filament.Fence
import com.google.android.filament.IndirectLight
import com.google.android.filament.Skybox
import com.google.android.filament.View
import com.google.android.filament.utils.*
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch
import kotlinx.coroutines.withContext
import java.io.File
import java.io.FileInputStream
import java.io.RandomAccessFile
import java.nio.Buffer
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import java.util.zip.ZipInputStream
import io.flutter.embedding.android.TransparencyMode
class MainActivity: FlutterActivity() {
override fun getTransparencyMode(): TransparencyMode {
return TransparencyMode.transparent
}
}
//
// import io.flutter.embedding.android.FlutterActivity
// class MainActivity : FlutterActivity() {
// // companion object {
// // // // Load the library for the utility layer, which in turn loads gltfio and the Filament core.
// // // init { Utils.init() }
// // private const val TAG = "gltf-viewer"
// // }
// // private lateinit var surfaceView : SurfaceView
// // private lateinit var choreographer: Choreographer
// // private val frameScheduler = FrameCallback()
// // private lateinit var modelViewer: ModelViewer
// // private lateinit var titlebarHint: TextView
// // private val doubleTapListener = DoubleTapListener()
// // private lateinit var doubleTapDetector: GestureDetector
// // private var remoteServer: RemoteServer? = null
// // private var statusToast: Toast? = null
// // private var statusText: String? = null
// // private var latestDownload: String? = null
// // private val automation = AutomationEngine()
// // private var loadStartTime = 0L
// // private var loadStartFence: Fence? = null
// // private val viewerContent = AutomationEngine.ViewerContent()
// // @SuppressLint("ClickableViewAccessibility")
// // override fun onCreate(savedInstanceState: Bundle?) {
// // super.onCreate(savedInstanceState)
// // surfaceView = SurfaceView(this)
// // setContentView( surfaceView)
// // // window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)
// // // // titlebarHint = findViewById(R.id.user_hint)
// // // // surfaceView = findViewById(R.id.main_sv)
// // // choreographer = Choreographer.getInstance()
// // // doubleTapDetector = GestureDetector(applicationContext, doubleTapListener)
// // // modelViewer = ModelViewer(surfaceView)
// // // viewerContent.view = modelViewer.view
// // // viewerContent.sunlight = modelViewer.light
// // // viewerContent.lightManager = modelViewer.engine.lightManager
// // // viewerContent.scene = modelViewer.scene
// // // viewerContent.renderer = modelViewer.renderer
// // // surfaceView.setOnTouchListener { _, event ->
// // // modelViewer.onTouchEvent(event)
// // // doubleTapDetector.onTouchEvent(event)
// // // true
// // // }
// // // createDefaultRenderables()
// // // createIndirectLight()
// // // setStatusText("To load a new model, go to the above URL on your host machine.")
// // // val view = modelViewer.view
// // // /*
// // // * Note: The settings below are overriden when connecting to the remote UI.
// // // */
// // // // on mobile, better use lower quality color buffer
// // // view.renderQuality = view.renderQuality.apply {
// // // hdrColorBuffer = View.QualityLevel.MEDIUM
// // // }
// // // // dynamic resolution often helps a lot
// // // view.dynamicResolutionOptions = view.dynamicResolutionOptions.apply {
// // // enabled = true
// // // quality = View.QualityLevel.MEDIUM
// // // }
// // // // MSAA is needed with dynamic resolution MEDIUM
// // // view.multiSampleAntiAliasingOptions = view.multiSampleAntiAliasingOptions.apply {
// // // enabled = true
// // // }
// // // // FXAA is pretty cheap and helps a lot
// // // view.antiAliasing = View.AntiAliasing.FXAA;
// // // // ambient occlusion is the cheapest effect that adds a lot of quality
// // // view.ambientOcclusionOptions = view.ambientOcclusionOptions.apply {
// // // enabled = true
// // // }
// // // // bloom is pretty expensive but adds a fair amount of realism
// // // view.bloomOptions = view.bloomOptions.apply {
// // // enabled = true
// // // }
// // // remoteServer = RemoteServer(8082)
// // }
// // private fun createDefaultRenderables() {
// // val buffer = assets.open("models/scene.gltf").use { input ->
// // val bytes = ByteArray(input.available())
// // input.read(bytes)
// // ByteBuffer.wrap(bytes)
// // }
// // modelViewer.loadModelGltfAsync(buffer) { uri -> readCompressedAsset("models/$uri") }
// // updateRootTransform()
// // }
// // private fun createIndirectLight() {
// // val engine = modelViewer.engine
// // val scene = modelViewer.scene
// // val ibl = "default_env"
// // readCompressedAsset("envs/$ibl/${ibl}_ibl.ktx").let {
// // scene.indirectLight = KTXLoader.createIndirectLight(engine, it)
// // scene.indirectLight!!.intensity = 30_000.0f
// // viewerContent.indirectLight = modelViewer.scene.indirectLight
// // }
// // readCompressedAsset("envs/$ibl/${ibl}_skybox.ktx").let {
// // scene.skybox = KTXLoader.createSkybox(engine, it)
// // }
// // }
// // private fun readCompressedAsset(assetName: String): ByteBuffer {
// // val input = assets.open(assetName)
// // val bytes = ByteArray(input.available())
// // input.read(bytes)
// // return ByteBuffer.wrap(bytes)
// // }
// // private fun clearStatusText() {
// // statusToast?.let {
// // it.cancel()
// // statusText = null
// // }
// // }
// // private fun setStatusText(text: String) {
// // runOnUiThread {
// // if (statusToast == null || statusText != text) {
// // statusText = text
// // statusToast = Toast.makeText(applicationContext, text, Toast.LENGTH_SHORT)
// // statusToast!!.show()
// // }
// // }
// // }
// // private suspend fun loadGlb(message: RemoteServer.ReceivedMessage) {
// // withContext(Dispatchers.Main) {
// // modelViewer.destroyModel()
// // modelViewer.loadModelGlb(message.buffer)
// // updateRootTransform()
// // loadStartTime = System.nanoTime()
// // loadStartFence = modelViewer.engine.createFence()
// // }
// // }
// // private suspend fun loadHdr(message: RemoteServer.ReceivedMessage) {
// // withContext(Dispatchers.Main) {
// // val engine = modelViewer.engine
// // val equirect = HDRLoader.createTexture(engine, message.buffer)
// // if (equirect == null) {
// // setStatusText("Could not decode HDR file.")
// // } else {
// // setStatusText("Successfully decoded HDR file.")
// // val context = IBLPrefilterContext(engine)
// // val equirectToCubemap = IBLPrefilterContext.EquirectangularToCubemap(context)
// // val skyboxTexture = equirectToCubemap.run(equirect)!!
// // engine.destroyTexture(equirect)
// // val specularFilter = IBLPrefilterContext.SpecularFilter(context)
// // val reflections = specularFilter.run(skyboxTexture)
// // val ibl = IndirectLight.Builder()
// // .reflections(reflections)
// // .intensity(30000.0f)
// // .build(engine)
// // val sky = Skybox.Builder().environment(skyboxTexture).build(engine)
// // specularFilter.destroy();
// // equirectToCubemap.destroy();
// // context.destroy();
// // // destroy the previous IBl
// // engine.destroyIndirectLight(modelViewer.scene.indirectLight!!);
// // engine.destroySkybox(modelViewer.scene.skybox!!);
// // modelViewer.scene.skybox = sky
// // modelViewer.scene.indirectLight = ibl
// // viewerContent.indirectLight = ibl
// // }
// // }
// // }
// // private suspend fun loadZip(message: RemoteServer.ReceivedMessage) {
// // // To alleviate memory pressure, remove the old model before deflating the zip.
// // withContext(Dispatchers.Main) {
// // modelViewer.destroyModel()
// // }
// // // Large zip files should first be written to a file to prevent OOM.
// // // It is also crucial that we null out the message "buffer" field.
// // val (zipStream, zipFile) = withContext(Dispatchers.IO) {
// // val file = File.createTempFile("incoming", "zip", cacheDir)
// // val raf = RandomAccessFile(file, "rw")
// // raf.getChannel().write(message.buffer);
// // message.buffer = null
// // raf.seek(0)
// // Pair(FileInputStream(file), file)
// // }
// // // Deflate each resource using the IO dispatcher, one by one.
// // var gltfPath: String? = null
// // var outOfMemory: String? = null
// // val pathToBufferMapping = withContext(Dispatchers.IO) {
// // val deflater = ZipInputStream(zipStream)
// // val mapping = HashMap<String, Buffer>()
// // while (true) {
// // val entry = deflater.nextEntry ?: break
// // if (entry.isDirectory) continue
// // // This isn't strictly required, but as an optimization
// // // we ignore common junk that often pollutes ZIP files.
// // if (entry.name.startsWith("__MACOSX")) continue
// // if (entry.name.startsWith(".DS_Store")) continue
// // val uri = entry.name
// // val byteArray: ByteArray? = try {
// // deflater.readBytes()
// // }
// // catch (e: OutOfMemoryError) {
// // outOfMemory = uri
// // break
// // }
// // Log.i(TAG, "Deflated ${byteArray!!.size} bytes from $uri")
// // val buffer = ByteBuffer.wrap(byteArray)
// // mapping[uri] = buffer
// // if (uri.endsWith(".gltf") || uri.endsWith(".glb")) {
// // gltfPath = uri
// // }
// // }
// // mapping
// // }
// // zipFile.delete()
// // if (gltfPath == null) {
// // setStatusText("Could not find .gltf or .glb in the zip.")
// // return
// // }
// // if (outOfMemory != null) {
// // setStatusText("Out of memory while deflating $outOfMemory")
// // return
// // }
// // val gltfBuffer = pathToBufferMapping[gltfPath]!!
// // // The gltf is often not at the root level (e.g. if a folder is zipped) so
// // // we need to extract its path in order to resolve the embedded uri strings.
// // var gltfPrefix = gltfPath!!.substringBeforeLast('/', "")
// // if (gltfPrefix.isNotEmpty()) {
// // gltfPrefix += "/"
// // }
// // withContext(Dispatchers.Main) {
// // if (gltfPath!!.endsWith(".glb")) {
// // modelViewer.loadModelGlb(gltfBuffer)
// // } else {
// // modelViewer.loadModelGltf(gltfBuffer) { uri ->
// // val path = gltfPrefix + uri
// // if (!pathToBufferMapping.contains(path)) {
// // Log.e(TAG, "Could not find $path in the zip.")
// // setStatusText("Zip is missing $path")
// // }
// // pathToBufferMapping[path]
// // }
// // }
// // updateRootTransform()
// // loadStartTime = System.nanoTime()
// // loadStartFence = modelViewer.engine.createFence()
// // }
// // }
// // override fun onResume() {
// // super.onResume()
// // choreographer.postFrameCallback(frameScheduler)
// // }
// // override fun onPause() {
// // super.onPause()
// // choreographer.removeFrameCallback(frameScheduler)
// // }
// // override fun onDestroy() {
// // super.onDestroy()
// // choreographer.removeFrameCallback(frameScheduler)
// // remoteServer?.close()
// // }
// // fun loadModelData(message: RemoteServer.ReceivedMessage) {
// // Log.i(TAG, "Downloaded model ${message.label} (${message.buffer.capacity()} bytes)")
// // clearStatusText()
// // // titlebarHint.text = message.label
// // CoroutineScope(Dispatchers.IO).launch {
// // if (message.label.endsWith(".zip")) {
// // loadZip(message)
// // } else if (message.label.endsWith(".hdr")) {
// // loadHdr(message)
// // } else {
// // loadGlb(message)
// // }
// // }
// // }
// // fun loadSettings(message: RemoteServer.ReceivedMessage) {
// // val json = StandardCharsets.UTF_8.decode(message.buffer).toString()
// // viewerContent.assetLights = modelViewer.asset?.lightEntities
// // automation.applySettings(json, viewerContent)
// // modelViewer.view.colorGrading = automation.getColorGrading(modelViewer.engine)
// // modelViewer.cameraFocalLength = automation.viewerOptions.cameraFocalLength
// // updateRootTransform()
// // }
// // private fun updateRootTransform() {
// // if (automation.viewerOptions.autoScaleEnabled) {
// // modelViewer.transformToUnitCube()
// // } else {
// // modelViewer.clearRootTransform()
// // }
// // }
// // inner class FrameCallback : Choreographer.FrameCallback {
// // private val startTime = System.nanoTime()
// // override fun doFrame(frameTimeNanos: Long) {
// // choreographer.postFrameCallback(this)
// // loadStartFence?.let {
// // if (it.wait(Fence.Mode.FLUSH, 0) == Fence.FenceStatus.CONDITION_SATISFIED) {
// // val end = System.nanoTime()
// // val total = (end - loadStartTime) / 1_000_000
// // Log.i(TAG, "The Filament backend took $total ms to load the model geometry.")
// // modelViewer.engine.destroyFence(it)
// // loadStartFence = null
// // }
// // }
// // modelViewer.animator?.apply {
// // if (animationCount > 0) {
// // val elapsedTimeSeconds = (frameTimeNanos - startTime).toDouble() / 1_000_000_000
// // applyAnimation(0, elapsedTimeSeconds.toFloat())
// // }
// // updateBoneMatrices()
// // }
// // modelViewer.render(frameTimeNanos)
// // // Check if a new download is in progress. If so, let the user know with toast.
// // val currentDownload = remoteServer?.peekIncomingLabel()
// // if (RemoteServer.isBinary(currentDownload) && currentDownload != latestDownload) {
// // latestDownload = currentDownload
// // Log.i(TAG, "Downloading $currentDownload")
// // setStatusText("Downloading $currentDownload")
// // }
// // // Check if a new message has been fully received from the client.
// // val message = remoteServer?.acquireReceivedMessage()
// // if (message != null) {
// // if (message.label == latestDownload) {
// // latestDownload = null
// // }
// // if (RemoteServer.isJson(message.label)) {
// // loadSettings(message)
// // } else {
// // loadModelData(message)
// // }
// // }
// // }
// // }
// // // Just for testing purposes, this releases the current model and reloads the default model.
// // inner class DoubleTapListener : GestureDetector.SimpleOnGestureListener() {
// // override fun onDoubleTap(e: MotionEvent?): Boolean {
// // modelViewer.destroyModel()
// // createDefaultRenderables()
// // return super.onDoubleTap(e)
// // }
// // }
// }

View File

@@ -13,6 +13,6 @@
This Theme is only used starting with V2 of Flutter's Android embedding. -->
<style name="NormalTheme" parent="@android:style/Theme.Light.NoTitleBar">
<item name="android:windowBackground">?android:colorBackground</item>
<item name="android:windowBackground">@android:color/transparent</item>
</style>
</resources>

View File

@@ -28,16 +28,16 @@ class _MyAppState extends State<MyApp> {
@override
Widget build(BuildContext context) {
return MaterialApp(
color: Colors.transparent,
home: Scaffold(
appBar: AppBar(
title: const Text('Plugin example app'),
),
backgroundColor: Colors.transparent,
body: Column(children: [
SizedBox(
Expanded(child:SizedBox(
height:200, width:200,
child:FilamentWidget(
controller: _filamentController,
)),
))),
Expanded(
child: Wrap(
alignment: WrapAlignment.end,
@@ -63,6 +63,12 @@ class _MyAppState extends State<MyApp> {
await _filamentController.loadGltf(
'assets/FlightHelmet/FlightHelmet.gltf', 'assets/FlightHelmet');
}),
ElevatedButton(
child: const Text('remove asset'),
onPressed: () async {
await _filamentController
.removeAsset();
}),
ElevatedButton(
child: const Text('set all weights to 1'),
onPressed: () async {

View File

@@ -116,8 +116,7 @@ namespace polyvox
LoadResource loadResource,
FreeResource freeResource) : _layer(layer),
_loadResource(loadResource),
_freeResource(freeResource),
_assetBuffer(nullptr, 0, 0)
_freeResource(freeResource)
{
_engine = Engine::create(Engine::Backend::OPENGL);
@@ -166,10 +165,23 @@ namespace polyvox
manipulator =
Manipulator<float>::Builder().orbitHomePosition(0.0f, 0.0f, 0.05f).targetPosition(0.0f, 0.0f, 0.0f).build(Mode::ORBIT);
_asset = nullptr;
// Always add a direct light source since it is required for shadowing.
_sun = EntityManager::get().create();
LightManager::Builder(LightManager::Type::DIRECTIONAL)
.color(Color::cct(6500.0f))
.intensity(100000.0f)
.direction(math::float3(0.0f, 1.0f, 0.0f))
.castShadows(false)
// .castShadows(true)
.build(*_engine, _sun);
_scene->addEntity(_sun);
}
FilamentViewer::~FilamentViewer()
{
cleanup();
}
Renderer *FilamentViewer::getRenderer()
@@ -258,15 +270,15 @@ namespace polyvox
Log("Loading GLB at URI %s", uri);
if (_asset)
{
_asset->releaseSourceData();
_resourceLoader->evictResourceData();
_scene->removeEntities(_asset->getEntities(), _asset->getEntityCount());
_assetLoader->destroyAsset(_asset);
}
_asset = nullptr;
_animator = nullptr;
// if (_asset)
// {
// _asset->releaseSourceData();
// _resourceLoader->evictResourceData();
// _scene->removeEntities(_asset->getEntities(), _asset->getEntityCount());
// _assetLoader->destroyAsset(_asset);
// }
// _asset = nullptr;
// _animator = nullptr;
ResourceBuffer rbuf = _loadResource(uri);
@@ -302,6 +314,8 @@ namespace polyvox
// transformToUnitCube();
_asset->releaseSourceData();
Log("Successfully loaded GLB.");
}
@@ -310,22 +324,14 @@ namespace polyvox
Log("Loading GLTF at URI %s", uri);
if (_asset)
{
Log("Asset already exists");
_resourceLoader->evictResourceData();
_scene->removeEntities(_asset->getEntities(), _asset->getEntityCount());
_assetLoader->destroyAsset(_asset);
_freeResource(_assetBuffer);
}
_asset = nullptr;
_animator = nullptr;
_assetBuffer = _loadResource(uri);
ResourceBuffer rbuf = _loadResource(uri);
// Parse the glTF file and create Filament entities.
Log("Creating asset from JSON");
_asset = _assetLoader->createAssetFromJson((uint8_t *)_assetBuffer.data, _assetBuffer.size);
_asset = _assetLoader->createAssetFromJson((uint8_t *)rbuf.data, rbuf.size);
Log("Created asset from JSON");
if (!_asset)
@@ -336,13 +342,29 @@ namespace polyvox
Log("Loading relative resources");
loadResources(string(relativeResourcePath) + string("/"));
Log("Loaded relative resources");
// _asset->releaseSourceData();
_asset->releaseSourceData();
Log("Load complete for GLTF at URI %s", uri);
// transformToUnitCube();
}
void FilamentViewer::removeAsset() {
if (!_asset) {
Log("No asset loaded, ignoring call.");
return;
}
_resourceLoader->evictResourceData();
_scene->removeEntities(_asset->getEntities(), _asset->getEntityCount());
_assetLoader->destroyAsset(_asset);
_asset = nullptr;
_animator = nullptr;
_morphAnimationBuffer = nullptr;
_embeddedAnimationBuffer = nullptr;
}
///
/// Sets the active camera to the GLTF camera specified by [name].
/// Blender export arranges cameras as follows
@@ -354,12 +376,15 @@ namespace polyvox
///
bool FilamentViewer::setCamera(const char *cameraName)
{
Log("Attempting to set camera to %s.", cameraName);
size_t count = _asset->getCameraEntityCount();
if(count == 0)
if(count == 0) {
Log("Failed, no cameras found in current asset.");
return false;
}
const utils::Entity* cameras = _asset->getCameraEntities();
Log("Found %d cameras in asset", count);
Log("%d cameras found in current asset", cameraName, count);
for(int i=0; i < count; i++) {
auto inst = _ncm->getInstance(cameras[i]);
@@ -376,8 +401,9 @@ namespace polyvox
camera->setLensProjection(_cameraFocalLength, aspect, kNearPlane, kFarPlane);
_view->setCamera(camera);
Log("Successfully set camera.");
return true;
}
return true;
}
Log("Unable to locate camera under name %s ", cameraName);
return false;
@@ -456,16 +482,6 @@ namespace polyvox
_freeResource(iblBuffer);
// Always add a direct light source since it is required for shadowing.
_sun = EntityManager::get().create();
LightManager::Builder(LightManager::Type::DIRECTIONAL)
.color(Color::cct(6500.0f))
.intensity(100000.0f)
.direction(math::float3(0.0f, 1.0f, 0.0f))
.castShadows(true)
.build(*_engine, _sun);
_scene->addEntity(_sun);
Log("Skybox/IBL load complete.");
}
@@ -492,7 +508,6 @@ namespace polyvox
_assetLoader->destroyAsset(_asset);
_materialProvider->destroyMaterials();
AssetLoader::destroy(&_assetLoader);
_freeResource(_assetBuffer);
};
void FilamentViewer::render()
@@ -503,12 +518,12 @@ namespace polyvox
return;
}
if (morphAnimationBuffer)
if (_morphAnimationBuffer)
{
updateMorphAnimation();
}
if(embeddedAnimationBuffer) {
if(_embeddedAnimationBuffer) {
updateEmbeddedAnimation();
}
@@ -545,35 +560,35 @@ namespace polyvox
void FilamentViewer::animateWeights(float *data, int numWeights, int numFrames, float frameLengthInMs)
{
Log("Making morph animation buffer with %d weights across %d frames and frame length %f ms ", numWeights, numFrames, frameLengthInMs);
morphAnimationBuffer = std::make_unique<MorphAnimationBuffer>(data, numWeights, numFrames, frameLengthInMs);
_morphAnimationBuffer = std::make_unique<MorphAnimationBuffer>(data, numWeights, numFrames, frameLengthInMs);
}
void FilamentViewer::updateMorphAnimation()
{
if(!morphAnimationBuffer) {
if(!_morphAnimationBuffer) {
return;
}
if (morphAnimationBuffer->frameIndex == -1) {
morphAnimationBuffer->frameIndex++;
morphAnimationBuffer->startTime = high_resolution_clock::now();
applyWeights(morphAnimationBuffer->frameData, morphAnimationBuffer->numWeights);
if (_morphAnimationBuffer->frameIndex == -1) {
_morphAnimationBuffer->frameIndex++;
_morphAnimationBuffer->startTime = high_resolution_clock::now();
applyWeights(_morphAnimationBuffer->frameData, _morphAnimationBuffer->numWeights);
}
else
{
duration<double, std::milli> dur = high_resolution_clock::now() - morphAnimationBuffer->startTime;
int frameIndex = static_cast<int>(dur.count() / morphAnimationBuffer->frameLengthInMs);
duration<double, std::milli> dur = high_resolution_clock::now() - _morphAnimationBuffer->startTime;
int frameIndex = static_cast<int>(dur.count() / _morphAnimationBuffer->frameLengthInMs);
if (frameIndex > morphAnimationBuffer->numFrames - 1)
if (frameIndex > _morphAnimationBuffer->numFrames - 1)
{
duration<double, std::milli> dur = high_resolution_clock::now() - morphAnimationBuffer->startTime;
Log("Morph animation completed in %f ms (%d frames at framerate %f), final frame was %d", dur.count(), morphAnimationBuffer->numFrames, 1000 / morphAnimationBuffer->frameLengthInMs, morphAnimationBuffer->frameIndex);
morphAnimationBuffer = nullptr;
} else if (frameIndex != morphAnimationBuffer->frameIndex) {
Log("Rendering frame %d (of a total %d)", frameIndex, morphAnimationBuffer->numFrames);
morphAnimationBuffer->frameIndex = frameIndex;
auto framePtrOffset = frameIndex * morphAnimationBuffer->numWeights;
applyWeights(morphAnimationBuffer->frameData + framePtrOffset, morphAnimationBuffer->numWeights);
duration<double, std::milli> dur = high_resolution_clock::now() - _morphAnimationBuffer->startTime;
Log("Morph animation completed in %f ms (%d frames at framerate %f), final frame was %d", dur.count(), _morphAnimationBuffer->numFrames, 1000 / _morphAnimationBuffer->frameLengthInMs, _morphAnimationBuffer->frameIndex);
_morphAnimationBuffer = nullptr;
} else if (frameIndex != _morphAnimationBuffer->frameIndex) {
Log("Rendering frame %d (of a total %d)", frameIndex, _morphAnimationBuffer->numFrames);
_morphAnimationBuffer->frameIndex = frameIndex;
auto framePtrOffset = frameIndex * _morphAnimationBuffer->numWeights;
applyWeights(_morphAnimationBuffer->frameData + framePtrOffset, _morphAnimationBuffer->numWeights);
}
}
}
@@ -582,33 +597,33 @@ namespace polyvox
if(index > _animator->getAnimationCount() - 1) {
Log("Asset does not contain an animation at index %d", index);
} else {
embeddedAnimationBuffer = make_unique<EmbeddedAnimationBuffer>(index, _animator->getAnimationDuration(index), loop);
_embeddedAnimationBuffer = make_unique<EmbeddedAnimationBuffer>(index, _animator->getAnimationDuration(index), loop);
}
}
void FilamentViewer::stopAnimation() {
// TODO - does this need to be threadsafe?
embeddedAnimationBuffer = nullptr;
_embeddedAnimationBuffer = nullptr;
}
void FilamentViewer::updateEmbeddedAnimation() {
duration<double> dur = duration_cast<duration<double>>(high_resolution_clock::now() - embeddedAnimationBuffer->lastTime);
duration<double> dur = duration_cast<duration<double>>(high_resolution_clock::now() - _embeddedAnimationBuffer->lastTime);
float startTime = 0;
if(!embeddedAnimationBuffer->hasStarted) {
embeddedAnimationBuffer->hasStarted = true;
embeddedAnimationBuffer->lastTime = high_resolution_clock::now();
} else if(dur.count() >= embeddedAnimationBuffer->duration) {
if(embeddedAnimationBuffer->loop) {
embeddedAnimationBuffer->lastTime = high_resolution_clock::now();
if(!_embeddedAnimationBuffer->hasStarted) {
_embeddedAnimationBuffer->hasStarted = true;
_embeddedAnimationBuffer->lastTime = high_resolution_clock::now();
} else if(dur.count() >= _embeddedAnimationBuffer->duration) {
if(_embeddedAnimationBuffer->loop) {
_embeddedAnimationBuffer->lastTime = high_resolution_clock::now();
} else {
embeddedAnimationBuffer = nullptr;
_embeddedAnimationBuffer = nullptr;
return;
}
} else {
startTime = dur.count();
}
_animator->applyAnimation(embeddedAnimationBuffer->animationIndex, startTime);
_animator->applyAnimation(_embeddedAnimationBuffer->animationIndex, startTime);
_animator->updateBoneMatrices();
}

View File

@@ -95,6 +95,7 @@ namespace polyvox {
void loadGlb(const char* const uri);
void loadGltf(const char* const uri, const char* relativeResourcePath);
void loadSkybox(const char* const skyboxUri, const char* const iblUri);
void removeAsset();
void updateViewportAndCameraProjection(int height, int width, float scaleFactor);
void render();
@@ -157,7 +158,7 @@ namespace polyvox {
AssetLoader* _assetLoader;
FilamentAsset* _asset = nullptr;
ResourceBuffer _assetBuffer;
// ResourceBuffer _assetBuffer;
NameComponentManager* _ncm;
Entity _sun;
@@ -181,8 +182,8 @@ namespace polyvox {
// animation flags;
bool isAnimating;
unique_ptr<MorphAnimationBuffer> morphAnimationBuffer;
unique_ptr<EmbeddedAnimationBuffer> embeddedAnimationBuffer;
unique_ptr<MorphAnimationBuffer> _morphAnimationBuffer;
unique_ptr<EmbeddedAnimationBuffer> _embeddedAnimationBuffer;
};

View File

@@ -17,6 +17,7 @@ abstract class FilamentController {
Future<List<String>> getTargetNames(String meshName);
Future<List<String>> getAnimationNames();
Future releaseSourceAssets();
Future removeAsset();
Future playAnimation(int index, {bool loop = false});
Future stopAnimation();
Future setCamera(String name);
@@ -122,6 +123,10 @@ class PolyvoxFilamentController extends FilamentController {
await _channel.invokeMethod("releaseSourceAssets");
}
Future removeAsset() async {
await _channel.invokeMethod("removeAsset");
}
Future zoom(double z) async {
await _channel.invokeMethod("zoom", z);
}