+
+[![ImageLabelOutline Example][ImageLabelOutline]](./ImageLabelOutline.html "ImageLabelOutline")
+[![ImageMapper Example][ImageMapper]](./ImageMapper.html "2D orthogonal axis image mapper")
+[![ImageResliceMapper Example][ImageResliceMapper]](./ImageResliceMapper.html "GPU 2D reslice/oblique/MPR mapper")
+[![ImageCPRMapper Example][ImageCPRMapper]](./ImageCPRMapper.html "Curved Planar Reformat GPU mapper, stretched and straightened")
+[![VolumeOutline Example][VolumeOutline]](./VolumeOutline.html "VolumeOutline")
+
+
+
+[ImageLabelOutline]: ../docs/gallery/ImageLabelOutline.jpg
+[ImageCPRMapper]: ../docs/gallery/ImageCPRMapper.jpg
+[ImageResliceMapper]: ../docs/gallery/ImageResliceMapper.gif
+[ImageMapper]: ../docs/gallery/ImageMapper.jpg
+[VolumeOutline]: ../docs/gallery/VolumeOutline.jpg
+
+## Volume Rendering
+
+
+
+[![VolumeClipPlane Example][VolumeClipPlane]](./VolumeClipPlane.html "VolumeClipPlane")
+[![VolumeContour Example][VolumeContourgif]](./VolumeContour.html "VolumeContour")
+[![VolumeMapperBlendModes Example][VolumeMapperBlendModes]](./VolumeMapperBlendModes.html "VolumeMapperBlendModes")
+[![VolumeMapperLightAndShadow Example][VolumeMapperLightAndShadow]](./VolumeMapperLightAndShadow.html "VolumeMapperLightAndShadow")
+[![VolumeMapperParallelProjection Example][VolumeMapperParallelProjection]](./VolumeMapperParallelProjection.html "VolumeMapperParallelProjection")
+[![VolumeRenderingWithPolyData Example][VolumeRenderingWithPolyData]](./VolumeRenderingWithPolyData.html "VolumeRenderingWithPolyData")
+[![VolumeMapper Example][VolumeMapper]](./VolumeMapper.html "3D volume ray cast mapper witch volumetric scattering")
+[![LabelmapEdgeProjection Example][LabelmapEdgeProjection]](./LabelmapEdgeProjection.html "Labelmap edge projection in the volume")
+
+
+
+[VolumeClipPlane]: ../docs/gallery/VolumeClipPlane.jpg
+[VolumeContourgif]: ../docs/gallery/VolumeContour.gif
+[VolumeMapperBlendModes]: ../docs/gallery/VolumeMapperBlendModes.gif
+[VolumeMapperLightAndShadow]: ../docs/gallery/VolumeMapperLightAndShadow.jpg
+[VolumeMapperParallelProjection]: ../docs/gallery/VolumeMapperParallelProjection.jpg
+[VolumeRenderingWithPolyData]: ../docs/gallery/VolumeRenderingWithPolyData.jpg
+[VolumeMapper]: ../docs/gallery/VolumeMapper.jpg
+[LabelmapEdgeProjection]: ../docs/gallery/LabelmapEdgeProjection.gif
+
+
+# Interaction/Picking/Selecting
+
+
+
+[![DeviceOrientationToCamera Example][DeviceOrientationToCamera]](./DeviceOrientationToCamera.html "DeviceOrientationToCamera")
+[![InteractorStyleHMDXR Example][InteractorStyleHMDXR]](./InteractorStyleHMDXR.html "InteractorStyleHMDXR")
+[![InteractorStyleManipulator Example][InteractorStyleManipulator]](./InteractorStyleManipulator.html "InteractorStyleManipulator")
+[![InteractorStyleTrackballCamera Example][InteractorStyleTrackballCamera]](./InteractorStyleTrackballCamera.html "InteractorStyleTrackballCamera")
+[![InteractorStyleUnicam Example][InteractorStyleUnicam]](./InteractorStyleUnicam.html "InteractorStyleUnicam")
+[![KeyboardCameraManipulator Example][KeyboardCameraManipulator]](./KeyboardCameraManipulator.html "KeyboardCameraManipulator")
+[![MouseRangeManipulator Example][MouseRangeManipulator]](./MouseRangeManipulator.html "MouseRangeManipulator")
+[![PiecewiseGaussianWidget Example][PiecewiseGaussianWidget]](./PiecewiseGaussianWidget.html "PiecewiseGaussianWidget")
+[![TimeStepBasedAnimationHandler Example][TimeStepBasedAnimationHandler]](./TimeStepBasedAnimationHandler.html "TimeStepBasedAnimationHandler")
+[![CellPicker Example][CellPicker]](./CellPicker.html "CPU cell picker/selector")
+[![PointPicker Example][PointPicker]](./PointPicker.html "CPU point picker/selector")
+[![HardwareSelector Example][HardwareSelector]](./HardwareSelector.html "GPU point/cell picker/selector with properties")
+
+
+
+[DeviceOrientationToCamera]: ../docs/gallery/DeviceOrientationToCamera.jpg
+[InteractorStyleHMDXR]: ../docs/gallery/InteractorStyleHMDXR.jpg
+[InteractorStyleManipulator]: ../docs/gallery/InteractorStyleManipulator.jpg
+[InteractorStyleTrackballCamera]: ../docs/gallery/InteractorStyleTrackballCamera.jpg
+[InteractorStyleUnicam]: ../docs/gallery/InteractorStyleUnicam.jpg
+[KeyboardCameraManipulator]: ../docs/gallery/KeyboardCameraManipulator.jpg
+[MouseRangeManipulator]: ../docs/gallery/MouseRangeManipulator.jpg
+[PiecewiseGaussianWidget]: ../docs/gallery/PiecewiseGaussianWidget.jpg
+[TimeStepBasedAnimationHandler]: ../docs/gallery/TimeStepBasedAnimationHandler.gif
+[CellPicker]: ../docs/gallery/CellPicker.jpg
+[PointPicker]: ../docs/gallery/PointPicker.jpg
+[HardwareSelector]: ../docs/gallery/HardwareSelector.jpg
+
+# Widgets
+
+
+
+[![AngleWidget Example][AngleWidget]](./AngleWidget.html "Angle (radian, degree) widget example")
+[![Box Example][Box]](./Box.html "Box")
+[![ImageCroppingWidget Example][ImageCroppingWidget]](./ImageCroppingWidget.html "Crop/Clip volume rendering with a bounding box/cube/orthogonal planes")
+[![ImplicitPlaneWidget Example][ImplicitPlaneWidget]](./ImplicitPlaneWidget.html "Translate and orient an implicit plane with normal and origin")
+[![InteractiveOrientationWidget Example][InteractiveOrientationWidget]](./InteractiveOrientationWidget.html "Corner coordinate system orientation widget")
+[![LabelWidget Example][LabelWidget]](./LabelWidget.html "Place, edit text size and color of label widget")
+[![LineWidget Example][LineWidget]](./LineWidget.html "Place and edit line/distance widget with handles")
+[![PaintWidget Example][PaintWidget]](./PaintWidget.html "Draw strokes, create rectangle, square, ellipse and disk 2D widgets")
+[![PolyLineWidget Example][PolyLineWidget]](./PolyLineWidget.html "Place multiple connected handles with text")
+[![ResliceCursorWidget Example][ResliceCursorWidget]](./ResliceCursorWidget.html "Axial Coronal and Sagittal MPR/Oblique/Reformatted/Resliced/Slab/MIP views")
+[![ShapeWidget Example][ShapeWidget]](./ShapeWidget.html "2D shape widgets with text information")
+[![SphereWidget Example][SphereWidget]](./SphereWidget.html "2D sphere widget controlled with radius")
+[![SplineWidget Example][SplineWidget]](./PaintWidget.html "Widget to draw open or closed (triangularized) sharp/smooth polygon widget")
+
+
+
+[AngleWidget]: ../docs/gallery/AngleWidget.png
+[ImageCroppingWidget]: ../docs/gallery/ImageCroppingWidget.jpg
+[ImplicitPlaneWidget]: ../docs/gallery/ImplicitPlaneWidget.png
+[InteractiveOrientationWidget]: ../docs/gallery/InteractiveOrientationWidget.png
+[LabelWidget]: ../docs/gallery/LabelWidget.png
+[LineWidget]: ../docs/gallery/LineWidget.png
+[PaintWidget]: ../docs/gallery/PaintWidget.gif
+[PolyLineWidget]: ../docs/gallery/PolyLineWidget.png
+[ResliceCursorWidget]: ../docs/gallery/ResliceCursorWidget.gif
+[ShapeWidget]: ../docs/gallery/ShapeWidget.png
+[SphereWidget]: ../docs/gallery/SphereWidget.jpg
+[SplineWidget]: ../docs/gallery/SplineWidget.gif
+[Box]: ../docs/gallery/Box.jpg
+
+# Connectivity
+
+
+
+[![AR Example][ARWithLogo]](./AR.html "AR with WebXR")
+[![VR Example][VRWithLogo]](./VR.html "VR with WebXR")
+[![LookingGlass Example][LookingGlassWithLogo]](./LookingGlass.html "Render scene into a LookingGlass device")
+[![ItkWasmVolume Example][ItkWasmVolumeWithLogo]](./ItkWasmVolume.html "ItkWasmVolume")
+[![RemoteView Example][RemoteViewWithLogos]](./RemoteView.html "Connect a VTK or ParaView Python backend server via WebSockets")
+[![ImageStream Example][ImageStreamWithLogos]](./ImageStream.html "Stream a ParaView Python backend server via WebSockets under a VTK.js rendering")
+
+
+
+[ARWithLogo]: ../docs/gallery/ArConeWithLogo.jpg
+[VRWithLogo]: ../docs/gallery/VrConeWithLogo.jpg
+[LookingGlassWithLogo]: ../docs/gallery/LookingGlassConeWithLogo.jpg
+[ItkWasmVolumeWithLogo]: ../docs/gallery/ItkWasmVolumeWithLogo.jpg
+[RemoteViewWithLogos]: ../docs/gallery/RemoteViewWithLogos.jpg
+[ImageStreamWithLogos]: ../docs/gallery/ImageStreamWithLogos.jpg
diff --git a/Sources/IO/Geometry/GLTFImporter/Animations.js b/Sources/IO/Geometry/GLTFImporter/Animations.js
new file mode 100644
index 00000000000..c413da8b4e8
--- /dev/null
+++ b/Sources/IO/Geometry/GLTFImporter/Animations.js
@@ -0,0 +1,253 @@
+import macro from 'vtk.js/Sources/macros';
+import * as vtkMath from 'vtk.js/Sources/Common/Core/Math';
+import { quat, vec3 } from 'gl-matrix';
+
+const { vtkDebugMacro, vtkWarningMacro } = macro;
+
+/**
+ * Create an animation channel
+ * @param {*} glTFChannel
+ * @returns
+ */
+function createAnimationChannel(glTFChannel, glTFSamplers) {
+ const path = glTFChannel.target.path;
+ const node = glTFChannel.target.node;
+
+ function applyAnimation(value) {
+ let axisAngle;
+ let w;
+ let nq;
+ switch (path) {
+ case 'translation':
+ node.setPosition(value[0], value[1], value[2]);
+ break;
+ case 'rotation':
+ // Convert quaternion to axis-angle representation
+ nq = quat.normalize(quat.create(), value);
+ axisAngle = new Float64Array(3);
+ w = quat.getAxisAngle(axisAngle, nq);
+ // Apply rotation using rotateWXYZ
+ node.rotateWXYZ(
+ vtkMath.degreesFromRadians(w),
+ axisAngle[0],
+ axisAngle[1],
+ axisAngle[2]
+ );
+ break;
+ case 'scale':
+ node.setScale(value[0], value[1], value[2]);
+ break;
+ default:
+ vtkWarningMacro(`Unsupported animation path: ${path}`);
+ }
+ }
+
+ function animate(currentTime) {
+ const sampler = glTFSamplers[glTFChannel.sampler];
+ const value = sampler.evaluate(currentTime, path);
+ applyAnimation(value);
+ }
+
+ return { ...glTFChannel, animate };
+}
+
+/**
+ * Create an animation sampler
+ * @param {glTFSampler} glTFSampler
+ * @returns
+ */
+function createAnimationSampler(glTFSampler) {
+ let lastKeyframeIndex = 0;
+
+ function findKeyframes(time) {
+ let i1 = lastKeyframeIndex;
+ while (i1 < glTFSampler.input.length - 1 && glTFSampler.input[i1] <= time) {
+ i1++;
+ }
+ const i0 = Math.max(0, i1 - 1);
+ lastKeyframeIndex = i0;
+ return [glTFSampler.input[i0], glTFSampler.input[i1], i0, i1];
+ }
+
+ function stepInterpolate(path, i0) {
+ const startIndex = i0 * 3;
+ const v0 = new Array(3);
+ for (let i = 0; i < 3; ++i) {
+ v0[i] = glTFSampler.output[startIndex + i];
+ }
+
+ return v0;
+ }
+
+ function linearInterpolate(path, t0, t1, i0, i1, t) {
+ const ratio = (t - t0) / (t1 - t0);
+ const startIndex = i0 * 4;
+ const endIndex = i1 * 4;
+
+ const v0 = new Array(4);
+ const v1 = new Array(4);
+ for (let i = 0; i < 4; ++i) {
+ v0[i] = glTFSampler.output[startIndex + i];
+ v1[i] = glTFSampler.output[endIndex + i];
+ }
+
+ switch (path) {
+ case 'translation':
+ case 'scale':
+ return vec3.lerp(vec3.create(), v0, v1, ratio);
+ case 'rotation':
+ return quat.slerp(quat.create(), v0, v1, ratio);
+ default:
+ vtkWarningMacro(`Unsupported animation path: ${path}`);
+ return null;
+ }
+ }
+
+ function cubicSplineInterpolate(path, t0, t1, i0, i1, time) {
+ const dt = t1 - t0;
+ const t = (time - t0) / dt;
+ const t2 = t * t;
+ const t3 = t2 * t;
+
+ const p0 = glTFSampler.output[i0 * 3 + 1];
+ const m0 = dt * glTFSampler.output[i0 * 3 + 2];
+ const p1 = glTFSampler.output[i1 * 3 + 1];
+ const m1 = dt * glTFSampler.output[i1 * 3];
+
+ if (Array.isArray(p0)) {
+ return p0.map((v, j) => {
+ const a = 2 * t3 - 3 * t2 + 1;
+ const b = t3 - 2 * t2 + t;
+ const c = -2 * t3 + 3 * t2;
+ const d = t3 - t2;
+ return a * v + b * m0[j] + c * p1[j] + d * m1[j];
+ });
+ }
+
+ const a = 2 * t3 - 3 * t2 + 1;
+ const b = t3 - 2 * t2 + t;
+ const c = -2 * t3 + 3 * t2;
+ const d = t3 - t2;
+ return a * p0 + b * m0 + c * p1 + d * m1;
+ }
+
+ function evaluate(time, path) {
+ const [t0, t1, i0, i1] = findKeyframes(time);
+
+ let result;
+
+ switch (glTFSampler.interpolation) {
+ case 'STEP':
+ result = stepInterpolate(path, i0);
+ break;
+ case 'LINEAR':
+ result = linearInterpolate(path, t0, t1, i0, i1, time);
+ break;
+ case 'CUBICSPLINE':
+ result = cubicSplineInterpolate(path, t0, t1, i0, i1, time);
+ break;
+ default:
+ throw new Error(
+ `Unknown interpolation method: ${glTFSampler.interpolation}`
+ );
+ }
+ return result;
+ }
+
+ return { ...glTFSampler, evaluate };
+}
+
+/**
+ * Create an animation
+ * @param {glTFAnimation} glTFAnimation
+ * @returns
+ */
+function createAnimation(glTFAnimation, nodes) {
+ glTFAnimation.samplers = glTFAnimation.samplers.map((sampler) =>
+ createAnimationSampler(sampler)
+ );
+
+ glTFAnimation.channels = glTFAnimation.channels.map((channel) => {
+ channel.target.node = nodes.get(`node-${channel.target.node}`);
+ return createAnimationChannel(channel, glTFAnimation.samplers);
+ });
+
+ function update(currentTime) {
+ glTFAnimation.channels.forEach((channel) => channel.animate(currentTime));
+ }
+
+ return { ...glTFAnimation, update };
+}
+
+/**
+ * Create an animation mixer
+ * @param {*} nodes
+ * @returns
+ */
+function createAnimationMixer(nodes, accessors) {
+ const animations = new Map();
+ const activeAnimations = new Map();
+
+ function addAnimation(glTFAnimation) {
+ const annimation = createAnimation(glTFAnimation, nodes, accessors);
+ animations.set(glTFAnimation.id, annimation);
+ vtkDebugMacro(`Animation "${glTFAnimation.id}" added to mixer`);
+ }
+
+ function play(name, weight = 1) {
+ if (!animations.has(name)) {
+ vtkWarningMacro(`Animation "${name}" not found in mixer`);
+ return;
+ }
+ activeAnimations.set(name, {
+ animation: animations.get(name),
+ weight,
+ time: 0,
+ });
+ vtkDebugMacro(`Playing animation "${name}" with weight ${weight}`);
+ }
+
+ function stop(name) {
+ if (activeAnimations.delete(name)) {
+ vtkWarningMacro(`Stopped animation "${name}"`);
+ } else {
+ vtkWarningMacro(`Animation "${name}" was not playing`);
+ }
+ }
+
+ function stopAll() {
+ activeAnimations.clear();
+ vtkWarningMacro('Stopped all animations');
+ }
+
+ function update(deltaTime) {
+ // Normalize weights
+ const totalWeight = Array.from(activeAnimations.values()).reduce(
+ (sum, { weight }) => sum + weight,
+ 0
+ );
+
+ activeAnimations.forEach(({ animation, weight, time }, name) => {
+ const normalizedWeight = totalWeight > 0 ? weight / totalWeight : 0;
+ const newTime = time + deltaTime;
+ activeAnimations.set(name, { animation, weight, time: newTime });
+
+ vtkDebugMacro(
+ `Updating animation "${name}" at time ${newTime.toFixed(
+ 3
+ )} with normalized weight ${normalizedWeight.toFixed(3)}`
+ );
+
+ animation.update(newTime, normalizedWeight);
+ });
+ }
+
+ return { addAnimation, play, stop, stopAll, update };
+}
+
+export {
+ createAnimation,
+ createAnimationChannel,
+ createAnimationMixer,
+ createAnimationSampler,
+};
diff --git a/Sources/IO/Geometry/GLTFImporter/Constants.js b/Sources/IO/Geometry/GLTFImporter/Constants.js
new file mode 100644
index 00000000000..e86353e40f2
--- /dev/null
+++ b/Sources/IO/Geometry/GLTFImporter/Constants.js
@@ -0,0 +1,85 @@
+export const BINARY_HEADER_MAGIC = 'glTF';
+export const BINARY_HEADER_LENGTH = 12;
+export const BINARY_CHUNK_TYPES = { JSON: 0x4e4f534a, BIN: 0x004e4942 };
+export const BINARY_HEADER_INTS = 3;
+export const BINARY_CHUNK_HEADER_INTS = 2;
+
+export const MIN_LIGHT_ATTENUATION = 0.01;
+
+export const COMPONENTS = {
+ SCALAR: 1,
+ VEC2: 2,
+ VEC3: 3,
+ VEC4: 4,
+ MAT2: 4,
+ MAT3: 9,
+ MAT4: 16,
+};
+
+export const BYTES = {
+ 5120: 1, // BYTE
+ 5121: 1, // UNSIGNED_BYTE
+ 5122: 2, // SHORT
+ 5123: 2, // UNSIGNED_SHORT
+ 5125: 4, // UNSIGNED_INT
+ 5126: 4, // FLOAT
+};
+
+export const MODES = {
+ GL_POINTS: 0,
+ GL_LINES: 1,
+ GL_LINE_LOOP: 2,
+ GL_LINE_STRIP: 3,
+ GL_TRIANGLES: 4,
+ GL_TRIANGLE_STRIP: 5,
+ GL_TRIANGLE_FAN: 6,
+};
+
+export const ARRAY_TYPES = {
+ 5120: Int8Array,
+ 5121: Uint8Array,
+ 5122: Int16Array,
+ 5123: Uint16Array,
+ 5125: Uint32Array,
+ 5126: Float32Array,
+};
+
+export const GL_SAMPLER = {
+ NEAREST: 9728,
+ LINEAR: 9729,
+ NEAREST_MIPMAP_NEAREST: 9984,
+ LINEAR_MIPMAP_NEAREST: 9985,
+ NEAREST_MIPMAP_LINEAR: 9986,
+ LINEAR_MIPMAP_LINEAR: 9987,
+ REPEAT: 10497,
+ CLAMP_TO_EDGE: 33071,
+ MIRRORED_REPEAT: 33648,
+ TEXTURE_MAG_FILTER: 10240,
+ TEXTURE_MIN_FILTER: 10241,
+ TEXTURE_WRAP_S: 10242,
+ TEXTURE_WRAP_T: 10243,
+};
+
+export const DEFAULT_SAMPLER = {
+ magFilter: GL_SAMPLER.NEAREST,
+ minFilter: GL_SAMPLER.LINEAR_MIPMAP_LINEAR,
+ wrapS: GL_SAMPLER.REPEAT,
+ wrapT: GL_SAMPLER.REPEAT,
+};
+
+export const SEMANTIC_ATTRIBUTE_MAP = {
+ NORMAL: 'normal',
+ POSITION: 'position',
+ TEXCOORD_0: 'texcoord0',
+ TEXCOORD_1: 'texcoord1',
+ WEIGHTS_0: 'weight',
+ JOINTS_0: 'joint',
+ COLOR_0: 'color',
+ TANGENT: 'tangent',
+};
+
+export const ALPHA_MODE = {
+ OPAQUE: 'OPAQUE',
+ MASK: 'MASK',
+ BLEND: 'BLEND',
+};
diff --git a/Sources/IO/Geometry/GLTFImporter/Decoder.js b/Sources/IO/Geometry/GLTFImporter/Decoder.js
new file mode 100644
index 00000000000..7a4011f1899
--- /dev/null
+++ b/Sources/IO/Geometry/GLTFImporter/Decoder.js
@@ -0,0 +1,78 @@
+import BinaryHelper from 'vtk.js/Sources/IO/Core/BinaryHelper';
+import {
+ BINARY_CHUNK_TYPES,
+ BINARY_CHUNK_HEADER_INTS,
+ BINARY_HEADER_INTS,
+ BINARY_HEADER_LENGTH,
+ BINARY_HEADER_MAGIC,
+} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants';
+
+function getChunkInfo(headerStart, data) {
+ const header = new Uint32Array(data, headerStart, BINARY_CHUNK_HEADER_INTS);
+ const chunkStart = headerStart + BINARY_CHUNK_HEADER_INTS * 4;
+ const chunkLength = header[0];
+ const chunkType = header[1];
+ return { start: chunkStart, length: chunkLength, type: chunkType };
+}
+
+function getAllChunkInfos(data) {
+ const infos = [];
+ let chunkStart = BINARY_HEADER_INTS * 4;
+ while (chunkStart < data.byteLength) {
+ const chunkInfo = getChunkInfo(chunkStart, data);
+ infos.push(chunkInfo);
+ chunkStart += chunkInfo.length + BINARY_CHUNK_HEADER_INTS * 4;
+ }
+ return infos;
+}
+
+function getJsonFromChunk(chunkInfo, data) {
+ const chunkLength = chunkInfo.length;
+ const jsonStart = (BINARY_HEADER_INTS + BINARY_CHUNK_HEADER_INTS) * 4;
+ const jsonSlice = new Uint8Array(data, jsonStart, chunkLength);
+ const stringBuffer = BinaryHelper.arrayBufferToString(jsonSlice);
+ return JSON.parse(stringBuffer);
+}
+
+function getBufferFromChunk(chunkInfo, data) {
+ return data.slice(chunkInfo.start, chunkInfo.start + chunkInfo.length);
+}
+
+function parseGLB(data) {
+ let json;
+ const buffers = [];
+
+ const headerView = new DataView(data, 0, BINARY_HEADER_LENGTH);
+
+ const header = {
+ magic: BinaryHelper.arrayBufferToString(new Uint8Array(data, 0, 4)),
+ version: headerView.getUint32(4, true),
+ length: headerView.getUint32(8, true),
+ };
+
+ if (header.magic !== BINARY_HEADER_MAGIC) {
+ throw new Error('Unsupported glTF-Binary header.');
+ } else if (header.version < 2.0) {
+ throw new Error('Unsupported legacy binary file detected.');
+ }
+
+ const chunkInfos = getAllChunkInfos(data);
+
+ chunkInfos.forEach((chunkInfo) => {
+ if (chunkInfo.type === BINARY_CHUNK_TYPES.JSON && !json) {
+ json = getJsonFromChunk(chunkInfo, data);
+ } else if (chunkInfo.type === BINARY_CHUNK_TYPES.BIN) {
+ buffers.push(getBufferFromChunk(chunkInfo, data));
+ }
+ });
+
+ if (!json) {
+ throw new Error('glTF-Binary: JSON content not found.');
+ }
+ if (!buffers) {
+ throw new Error('glTF-Binary: Binary chunk not found.');
+ }
+ return { json, buffers };
+}
+
+export default parseGLB;
diff --git a/Sources/IO/Geometry/GLTFImporter/Extensions.js b/Sources/IO/Geometry/GLTFImporter/Extensions.js
new file mode 100644
index 00000000000..3075fcc8187
--- /dev/null
+++ b/Sources/IO/Geometry/GLTFImporter/Extensions.js
@@ -0,0 +1,103 @@
+import macro from 'vtk.js/Sources/macros';
+import * as vtkMath from 'vtk.js/Sources/Common/Core/Math';
+import vtkDracoReader from 'vtk.js/Sources/IO/Geometry/DracoReader';
+import vtkLight from 'vtk.js/Sources/Rendering/Core/Light';
+
+import { MIN_LIGHT_ATTENUATION } from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants';
+
+const { vtkWarningMacro } = macro;
+
+/**
+ * Handles the KHR_materials_unlit extension.
+ *
+ * @param {object} extension - The KHR_materials_unlit extension object.
+ * @param {vtkProperty} property - The vtkProperty instance to update.
+ */
+export function handleKHRMaterialsUnlit(extension, property) {
+ property.setLighting(true);
+}
+
+/**
+ * Handles the KHR_materials_ior extension.
+ *
+ * @param {object} extension - The KHR_materials_unlit extension object.
+ * @param {vtkProperty} property - The vtkProperty instance to update.
+ */
+export function handleKHRMaterialsIor(extension, property) {
+ property.setBaseIOR(extension.ior);
+}
+
+/**
+ * Handles the KHR_materials_specular extension.
+ * @param {object} extension - The KHR_materials_specular extension object.
+ * @param {vtkProperty} property - The vtkProperty instance to update.
+ */
+export function handleKHRMaterialsSpecular(extension, property) {
+ property.setSpecular(extension.specularFactor);
+ property.setSpecularColor(extension.specularColorFactor);
+}
+
+/**
+ * Handles the KHR_lights_punctual extension.
+ *
+ * @param {object} extension - The KHR_lights_punctual extension object.
+ * @param {vtkRenderer} renderer - The vtkRenderer instance to add the light to.
+ */
+export function handleKHRLightsPunctual(extension, transformMatrix, model) {
+ const { light } = extension;
+
+ const { color, intensity, range, spot, type } = light;
+
+ const l = vtkLight.newInstance({
+ color: color || [1, 1, 1],
+ intensity: intensity || 1.0,
+ });
+
+ // Apply the global transform to the light
+ l.setTransformMatrix(transformMatrix);
+
+ // Handle range
+ if (range > 0) {
+ // Set quadratic values to get attenuation(range) ~= MIN_LIGHT_ATTENUATION
+ l.setAttenuationValues(1, 0, 1.0 / (range * range * MIN_LIGHT_ATTENUATION));
+ }
+
+ switch (type) {
+ case 'directional':
+ l.setPositional(false);
+ break;
+ case 'point':
+ l.setPositional(true);
+ l.setConeAngle(90);
+ break;
+ case 'spot':
+ l.setPositional(true);
+ l.setConeAngle(vtkMath.radiansFromDegrees(spot.outerConeAngle));
+ break;
+ default:
+ vtkWarningMacro(`Unsupported light type: ${type}`);
+ }
+
+ model.lights.set(light.name, l);
+}
+
+/**
+ * Handles the KHR_draco_mesh_compression extension.
+ *
+ * @param {object} extension - The KHR_draco_mesh_compression extension object.
+ */
+export async function handleKHRDracoMeshCompression(extension) {
+ const reader = vtkDracoReader.newInstance();
+ reader.parse(extension.bufferView);
+ return reader.getOutputData();
+}
+
+/**
+ * Handles the KHR_materials_variants extension.
+ *
+ * @param {object} extension - The KHR_materials_variants extension object.
+ * @param {object} model - The model object to update with variant information.
+ */
+export function handleKHRMaterialsVariants(extension, model) {
+ model.variants = extension.variants.map((v) => v.name);
+}
diff --git a/Sources/IO/Geometry/GLTFImporter/ORMTexture.worker.js b/Sources/IO/Geometry/GLTFImporter/ORMTexture.worker.js
new file mode 100644
index 00000000000..7c48f871149
--- /dev/null
+++ b/Sources/IO/Geometry/GLTFImporter/ORMTexture.worker.js
@@ -0,0 +1,35 @@
+import registerWebworker from 'webworker-promise/lib/register';
+
+/**
+ *
+ * @param {ArrayBuffer} imageBuffer
+ * @param {string} mimeType
+ * @param {string} channel
+ * @returns {Promise
}
+ */
+registerWebworker(async ({ imageBuffer, mimeType, channel }) => {
+ const channelsMap = {
+ r: 0,
+ g: 1,
+ b: 2,
+ };
+
+ const blob = new Blob([imageBuffer], { type: mimeType });
+ const img = await createImageBitmap(blob);
+ const canvas = new OffscreenCanvas(img.width, img.height);
+ const ctx = canvas.getContext('2d');
+
+ ctx.drawImage(img, 0, 0, img.width, img.height);
+ const bitmap = ctx.getImageData(0, 0, img.width, img.height);
+
+ if (channel) {
+ const idx = channelsMap[channel];
+ for (let i = 0; i < bitmap.data.length; i += 4) {
+ const channelValue = bitmap.data[i + idx];
+ bitmap.data[i] = channelValue; // red channel
+ bitmap.data[i + 1] = channelValue; // green channel
+ bitmap.data[i + 2] = channelValue; // blue channel
+ }
+ }
+ return { bitmap };
+});
diff --git a/Sources/IO/Geometry/GLTFImporter/Parser.js b/Sources/IO/Geometry/GLTFImporter/Parser.js
new file mode 100644
index 00000000000..3aa87b00702
--- /dev/null
+++ b/Sources/IO/Geometry/GLTFImporter/Parser.js
@@ -0,0 +1,505 @@
+/* eslint-disable guard-for-in */
+/* eslint-disable no-restricted-syntax */
+/* eslint-disable class-methods-use-this */
+import macro from 'vtk.js/Sources/macros';
+
+import {
+ ALPHA_MODE,
+ BYTES,
+ COMPONENTS,
+ DEFAULT_SAMPLER,
+ GL_SAMPLER,
+ MODES,
+ SEMANTIC_ATTRIBUTE_MAP,
+} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants';
+
+import {
+ getAccessorArrayTypeAndLength,
+ getGLEnumFromSamplerParameter,
+ resolveUrl,
+} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Utils';
+
+const { vtkDebugMacro, vtkWarningMacro } = macro;
+
+class GLTFParser {
+ constructor(glTF, options = {}) {
+ const { json, baseUri = '' } = glTF;
+
+ this.glTF = glTF;
+ this.options = options;
+ this.baseUri = baseUri;
+ this.json = json;
+ this.extensions = json.extensions || {};
+ this.extensionsUsed = json.extensionsUsed || [];
+ }
+
+ async parse() {
+ const buffers = this.json.buffers || [];
+ this.buffers = new Array(buffers.length).fill(null);
+
+ const images = this.json.images || [];
+ this.images = new Array(images.length).fill({});
+ await this.loadBuffers();
+ await this.loadImages();
+ this.resolveTree();
+
+ return this.glTF.json;
+ }
+
+ resolveTree() {
+ this.json.scenes = this.json.scenes?.map((scene, idx) =>
+ this.resolveScene(scene, idx)
+ );
+
+ this.json.cameras = this.json.cameras?.map((camera, idx) =>
+ this.resolveCamera(camera, idx)
+ );
+
+ this.json.bufferViews = this.json.bufferViews?.map((bufView, idx) =>
+ this.resolveBufferView(bufView, idx)
+ );
+
+ this.json.images = this.json.images?.map((image, idx) =>
+ this.resolveImage(image, idx)
+ );
+
+ this.json.samplers = this.json.samplers?.map((sampler, idx) =>
+ this.resolveSampler(sampler, idx)
+ );
+
+ this.json.textures = this.json.textures?.map((texture, idx) =>
+ this.resolveTexture(texture, idx)
+ );
+
+ this.json.accessors = this.json.accessors?.map((accessor, idx) =>
+ this.resolveAccessor(accessor, idx)
+ );
+
+ this.json.materials = this.json.materials?.map((material, idx) =>
+ this.resolveMaterial(material, idx)
+ );
+
+ this.json.meshes = this.json.meshes?.map((mesh, idx) =>
+ this.resolveMesh(mesh, idx)
+ );
+
+ this.json.nodes = this.json.nodes?.map((node, idx) =>
+ this.resolveNode(node, idx)
+ );
+
+ this.json.skins = this.json.skins?.map((skin, idx) =>
+ this.resolveSkin(skin, idx)
+ );
+
+ this.json.animations = this.json.animations?.map((animation, idx) =>
+ this.resolveAnimation(animation, idx)
+ );
+ }
+
+ get(array, index) {
+ // check if already resolved
+ if (typeof index === 'object') {
+ return index;
+ }
+ const object = this.json[array] && this.json[array][index];
+ if (!object) {
+ vtkWarningMacro(`glTF file error: Could not find ${array}[${index}]`);
+ }
+ return object;
+ }
+
+ resolveScene(scene, index) {
+ scene.id = scene.id || `scene-${index}`;
+ scene.nodes = (scene.nodes || []).map((node) => this.get('nodes', node));
+ return scene;
+ }
+
+ resolveNode(node, index) {
+ node.id = node.id || `node-${index}`;
+ if (node.children) {
+ node.children = node.children.map((child) => this.get('nodes', child));
+ }
+ if (node.mesh !== undefined) {
+ node.mesh = this.get('meshes', node.mesh);
+ } else if (node.meshes !== undefined && node.meshes.length) {
+ node.mesh = node.meshes.reduce(
+ (accum, meshIndex) => {
+ const mesh = this.get('meshes', meshIndex);
+ accum.id = mesh.id;
+ accum.primitives = accum.primitives.concat(mesh.primitives);
+ return accum;
+ },
+ { primitives: [] }
+ );
+ }
+ if (node.camera !== undefined) {
+ node.camera = this.get('cameras', node.camera);
+ }
+ if (node.skin !== undefined) {
+ node.skin = this.get('skins', node.skin);
+ }
+
+ // Fill punctual lights objects
+ if (node.extensions?.KHR_lights_punctual) {
+ node.extensions.KHR_lights_punctual.light =
+ this.extensions?.KHR_lights_punctual.lights[
+ node.extensions.KHR_lights_punctual.light
+ ];
+ }
+ return node;
+ }
+
+ resolveSkin(skin, index) {
+ skin.id = skin.id || `skin-${index}`;
+ skin.inverseBindMatrices = this.get('accessors', skin.inverseBindMatrices);
+ return skin;
+ }
+
+ resolveMesh(mesh, index) {
+ mesh.id = mesh.id || `mesh-${index}`;
+ if (mesh.primitives) {
+ mesh.primitives = mesh.primitives.map((primitive, idx) => {
+ const attributes = primitive.attributes;
+ primitive.name = `submesh-${idx}`;
+ primitive.attributes = {};
+ for (const attribute in attributes) {
+ const attr = SEMANTIC_ATTRIBUTE_MAP[attribute];
+ primitive.attributes[attr] = this.get(
+ 'accessors',
+ attributes[attribute]
+ );
+ }
+ if (primitive.indices !== undefined) {
+ primitive.indices = this.get('accessors', primitive.indices);
+ }
+ if (primitive.material !== undefined) {
+ primitive.material = this.get('materials', primitive.material);
+ }
+ if (primitive.mode === undefined) {
+ primitive.mode = MODES.GL_TRIANGLES; // Default one
+ }
+
+ if (primitive.extensions?.KHR_draco_mesh_compression) {
+ vtkDebugMacro('Using Draco mesh compression');
+ const bufferView = this.get(
+ 'bufferViews',
+ primitive.extensions.KHR_draco_mesh_compression.bufferView
+ );
+ primitive.extensions.KHR_draco_mesh_compression.bufferView =
+ bufferView.data;
+ }
+
+ return primitive;
+ });
+ }
+ return mesh;
+ }
+
+ resolveMaterial(material, index) {
+ material.id = material.id || `material-${index}`;
+
+ if (material.alphaMode === undefined)
+ material.alphaMode = ALPHA_MODE.OPAQUE;
+ if (material.doubleSided === undefined) material.doubleSided = false;
+ if (material.alphaCutoff === undefined) material.alphaCutoff = 0.5;
+
+ if (material.normalTexture) {
+ material.normalTexture = { ...material.normalTexture };
+ material.normalTexture.texture = this.get(
+ 'textures',
+ material.normalTexture.index
+ );
+ }
+ if (material.occlusionTexture) {
+ material.occlusionTexture = { ...material.occlusionTexture };
+ material.occlusionTexture.texture = this.get(
+ 'textures',
+ material.occlusionTexture.index
+ );
+ }
+ if (material.emissiveTexture) {
+ material.emissiveTexture = { ...material.emissiveTexture };
+ material.emissiveTexture.texture = this.get(
+ 'textures',
+ material.emissiveTexture.index
+ );
+ }
+ if (!material.emissiveFactor) {
+ material.emissiveFactor = material.emissiveTexture ? 1 : 0;
+ } else material.emissiveFactor = material.emissiveFactor[0];
+
+ if (material.pbrMetallicRoughness) {
+ material.pbrMetallicRoughness = { ...material.pbrMetallicRoughness };
+ const mr = material.pbrMetallicRoughness;
+ if (mr.baseColorTexture) {
+ mr.baseColorTexture = { ...mr.baseColorTexture };
+ mr.baseColorTexture.texture = this.get(
+ 'textures',
+ mr.baseColorTexture.index
+ );
+ }
+ if (mr.metallicRoughnessTexture) {
+ mr.metallicRoughnessTexture = { ...mr.metallicRoughnessTexture };
+ mr.metallicRoughnessTexture.texture = this.get(
+ 'textures',
+ mr.metallicRoughnessTexture.index
+ );
+ }
+ } else {
+ material.pbrMetallicRoughness = {
+ baseColorFactor: [1, 1, 1, 1],
+ metallicFactor: 1.0,
+ roughnessFactor: 1.0,
+ };
+ }
+ return material;
+ }
+
+ /**
+ * Take values of particular accessor from interleaved buffer various parts of
+ * the buffer
+ */
+ getValueFromInterleavedBuffer(
+ buffer,
+ byteOffset,
+ byteStride,
+ bytesPerElement,
+ count
+ ) {
+ const result = new Uint8Array(count * bytesPerElement);
+ for (let i = 0; i < count; i++) {
+ const elementOffset = byteOffset + i * byteStride;
+ result.set(
+ new Uint8Array(
+ buffer.arrayBuffer.slice(
+ elementOffset,
+ elementOffset + bytesPerElement
+ )
+ ),
+ i * bytesPerElement
+ );
+ }
+ return result.buffer;
+ }
+
+ resolveAccessor(accessor, index) {
+ accessor.id = accessor.id || `accessor-${index}`;
+ if (accessor.bufferView !== undefined) {
+ // Draco encoded meshes don't have bufferView
+ accessor.bufferView = this.get('bufferViews', accessor.bufferView);
+ }
+
+ // Look up enums
+ accessor.bytesPerComponent = BYTES[accessor.componentType];
+ accessor.components = COMPONENTS[accessor.type];
+ accessor.bytesPerElement = accessor.bytesPerComponent * accessor.components;
+
+ // Create TypedArray for the accessor
+ // Note: The canonical way to instantiate is to ignore this array and create
+ // WebGLBuffer's using the bufferViews.
+ if (accessor.bufferView) {
+ const buffer = accessor.bufferView.buffer;
+ const { ArrayType, byteLength } = getAccessorArrayTypeAndLength(
+ accessor,
+ accessor.bufferView
+ );
+ const byteOffset =
+ (accessor.bufferView.byteOffset || 0) +
+ (accessor.byteOffset || 0) +
+ buffer.byteOffset;
+
+ let slicedBufffer = buffer.arrayBuffer.slice(
+ byteOffset,
+ byteOffset + byteLength
+ );
+
+ if (accessor.bufferView.byteStride) {
+ slicedBufffer = this.getValueFromInterleavedBuffer(
+ buffer,
+ byteOffset,
+ accessor.bufferView.byteStride,
+ accessor.bytesPerElement,
+ accessor.count
+ );
+ }
+ accessor.value = new ArrayType(slicedBufffer);
+ }
+
+ return accessor;
+ }
+
+ resolveTexture(texture, index) {
+ texture.id = texture.id || `texture-${index}`;
+ texture.sampler =
+ 'sampler' in texture
+ ? this.get('samplers', texture.sampler)
+ : DEFAULT_SAMPLER;
+
+ texture.source = this.get('images', texture.source);
+
+ // Handle texture extensions sources
+ if (texture.extensions !== undefined) {
+ const extensionsNames = Object.keys(texture.extensions);
+ extensionsNames.forEach((extensionName) => {
+ const extension = texture.extensions[extensionName];
+ switch (extensionName) {
+ case 'KHR_texture_basisu':
+ case 'EXT_texture_webp':
+ case 'EXT_texture_avif':
+ texture.source = this.get('images', extension.source);
+ break;
+ default:
+ vtkWarningMacro(`Unhandled extension: ${extensionName}`);
+ }
+ });
+ }
+ return texture;
+ }
+
+ resolveSampler(sampler, index) {
+ sampler.id = sampler.id || `sampler-${index}`;
+
+ if (!Object.hasOwn(sampler, 'wrapS')) sampler.wrapS = GL_SAMPLER.REPEAT;
+ if (!Object.hasOwn(sampler, 'wrapT')) sampler.wrapT = GL_SAMPLER.REPEAT;
+
+ if (!Object.hasOwn(sampler, 'minFilter'))
+ sampler.minFilter = GL_SAMPLER.LINEAR_MIPMAP_LINEAR;
+ if (!Object.hasOwn(sampler, 'magFilter'))
+ sampler.magFilter = GL_SAMPLER.NEAREST;
+
+ // Map textual parameters to GL parameter values
+ sampler.parameters = {};
+ for (const key in sampler) {
+ const glEnum = getGLEnumFromSamplerParameter(key);
+ if (glEnum !== undefined) {
+ sampler.parameters[glEnum] = sampler[key];
+ }
+ }
+ return sampler;
+ }
+
+ resolveImage(image, index) {
+ image.id = image.id || `image-${index}`;
+ if (image.bufferView !== undefined) {
+ image.bufferView = this.get('bufferViews', image.bufferView);
+ }
+ return image;
+ }
+
+ resolveBufferView(bufferView, index) {
+ bufferView.id = bufferView.id || `bufferView-${index}`;
+ const bufferIndex = bufferView.buffer;
+ bufferView.buffer = this.buffers[bufferIndex];
+
+ const arrayBuffer = this.buffers[bufferIndex].arrayBuffer;
+ let byteOffset = this.buffers[bufferIndex].byteOffset || 0;
+
+ if ('byteOffset' in bufferView) {
+ byteOffset += bufferView.byteOffset;
+ }
+
+ bufferView.data = new Uint8Array(
+ arrayBuffer,
+ byteOffset,
+ bufferView.byteLength
+ );
+ return bufferView;
+ }
+
+ resolveCamera(camera, index) {
+ camera.id = camera.id || `camera-${index}`;
+ return camera;
+ }
+
+ resolveAnimation(animation, index) {
+ animation.id = animation.id || `animation-${index}`;
+ animation.samplers.map((sampler) => {
+ sampler.input = this.get('accessors', sampler.input).value;
+ sampler.output = this.get('accessors', sampler.output).value;
+ return sampler;
+ });
+ return animation;
+ }
+
+ loadBuffers() {
+ const promises = this.json.buffers.map((buffer, idx) =>
+ this.loadBuffer(buffer, idx).then(() => {
+ delete buffer.uri;
+ })
+ );
+ return Promise.all(promises);
+ }
+
+ async loadBuffer(buffer, index) {
+ let arrayBuffer = buffer;
+
+ if (buffer.uri) {
+ vtkDebugMacro('Loading uri', buffer.uri);
+ const uri = resolveUrl(buffer.uri, this.options.baseUri);
+ const response = await fetch(uri);
+ arrayBuffer = await response.arrayBuffer();
+ } else if (this.glTF.glbBuffers) {
+ arrayBuffer = this.glTF.glbBuffers[index];
+ }
+
+ this.buffers[index] = {
+ arrayBuffer,
+ byteOffset: 0,
+ byteLength: arrayBuffer.byteLength,
+ };
+ }
+
+ loadImages() {
+ const images = this.json.images || [];
+ const promises = [];
+
+ return new Promise((resolve, reject) => {
+ for (let i = 0; i < images.length; ++i) {
+ promises.push(
+ Promise.resolve(
+ this.loadImage(images[i], i).then(() => {
+ vtkDebugMacro('Texture loaded ', images[i]);
+ })
+ )
+ );
+ }
+
+ Promise.all(promises).then(() => resolve(this.images));
+ });
+ }
+
+ async loadImage(image, index) {
+ let arrayBuffer;
+ let buffer;
+
+ if (image.uri) {
+ vtkDebugMacro('Loading texture', image.uri);
+ const uri = resolveUrl(image.uri, this.options.baseUri);
+ const response = await fetch(uri);
+
+ arrayBuffer = await response.arrayBuffer();
+ image.uri = uri;
+ image.bufferView = {
+ data: arrayBuffer,
+ };
+ } else if (image.bufferView) {
+ const bufferView = this.get('bufferViews', image.bufferView);
+ buffer = this.get('buffers', bufferView.buffer);
+
+ // GLB buffer
+ if (this.glTF.glbBuffers) {
+ buffer = this.glTF.glbBuffers[bufferView.buffer];
+ arrayBuffer = buffer.slice(
+ bufferView.byteOffset,
+ bufferView.byteOffset + bufferView.byteLength
+ );
+ }
+
+ image.bufferView = {
+ data: arrayBuffer,
+ };
+ }
+ }
+}
+
+export default GLTFParser;
diff --git a/Sources/IO/Geometry/GLTFImporter/Reader.js b/Sources/IO/Geometry/GLTFImporter/Reader.js
new file mode 100644
index 00000000000..26c1193e148
--- /dev/null
+++ b/Sources/IO/Geometry/GLTFImporter/Reader.js
@@ -0,0 +1,608 @@
+import macro from 'vtk.js/Sources/macros';
+import * as vtkMath from 'vtk.js/Sources/Common/Core/Math';
+
+import vtkActor from 'vtk.js/Sources/Rendering/Core/Actor';
+import vtkCamera from 'vtk.js/Sources/Rendering/Core/Camera';
+import vtkDataArray from 'vtk.js/Sources/Common/Core/DataArray';
+import vtkPolyData from 'vtk.js/Sources/Common/DataModel/PolyData';
+import vtkMapper from 'vtk.js/Sources/Rendering/Core/Mapper';
+import vtkCellArray from 'vtk.js/Sources/Common/Core/CellArray';
+import vtkTransform from 'vtk.js/Sources/Common/Transform/Transform';
+import GLTFParser from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Parser';
+import {
+ ALPHA_MODE,
+ MODES,
+ SEMANTIC_ATTRIBUTE_MAP,
+} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants';
+import {
+ createVTKTextureFromGLTFTexture,
+ loadImage,
+} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Utils';
+import {
+ handleKHRDracoMeshCompression,
+ handleKHRLightsPunctual,
+ handleKHRMaterialsIor,
+ handleKHRMaterialsSpecular,
+ handleKHRMaterialsUnlit,
+ handleKHRMaterialsVariants,
+} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Extensions';
+
+import { mat4, quat, vec3 } from 'gl-matrix';
+
+const { vtkWarningMacro, vtkDebugMacro } = macro;
+
+/**
+ * Parses a GLTF objects
+ * @param {Object} gltf - The GLTF object to parse
+ * @returns {glTF} The parsed GLTF object
+ */
+async function parseGLTF(gltf, options) {
+ const parser = new GLTFParser(gltf, options);
+ const tree = await parser.parse();
+ return tree;
+}
+
+/**
+ * Creates VTK polydata from a GLTF mesh
+ * @param {GLTFMesh} mesh - The GLTF mesh
+ * @returns {vtkPolyData} The created VTK polydata
+ */
+async function createPolyDataFromGLTFMesh(mesh) {
+ const primitive = mesh.primitives[0]; // For simplicity, we'll just use the first primitive
+
+ if (!primitive || !primitive.attributes) {
+ vtkWarningMacro('Mesh has no position data, skipping');
+ return null;
+ }
+
+ const mode = primitive.mode;
+
+ if (primitive.extensions?.KHR_draco_mesh_compression) {
+ return handleKHRDracoMeshCompression(
+ primitive.extensions.KHR_draco_mesh_compression
+ );
+ }
+
+ const polyData = vtkPolyData.newInstance();
+ const cells = vtkCellArray.newInstance();
+ const pointData = polyData.getPointData();
+
+ const attrs = Object.entries(primitive.attributes);
+ attrs.forEach(async ([attributeName, accessor]) => {
+ switch (attributeName) {
+ case SEMANTIC_ATTRIBUTE_MAP.POSITION: {
+ const position = primitive.attributes.position.value;
+ polyData
+ .getPoints()
+ .setData(position, primitive.attributes.position.component);
+ break;
+ }
+ case SEMANTIC_ATTRIBUTE_MAP.NORMAL: {
+ const normals = primitive.attributes.normal.value;
+ pointData.setNormals(
+ vtkDataArray.newInstance({
+ name: 'Normals',
+ values: normals,
+ numberOfComponents: primitive.attributes.normal.components,
+ })
+ );
+ break;
+ }
+ case SEMANTIC_ATTRIBUTE_MAP.COLOR_0: {
+ const color = primitive.attributes.color.value;
+ pointData.setScalars(
+ vtkDataArray.newInstance({
+ name: 'Scalars',
+ values: color,
+ numberOfComponents: primitive.attributes.color.components,
+ })
+ );
+ break;
+ }
+ case SEMANTIC_ATTRIBUTE_MAP.TEXCOORD_0: {
+ const tcoords0 = primitive.attributes.texcoord0.value;
+ const da = vtkDataArray.newInstance({
+ name: 'TEXCOORD_0',
+ values: tcoords0,
+ numberOfComponents: primitive.attributes.texcoord0.components,
+ });
+ pointData.addArray(da);
+ pointData.setActiveTCoords(da.getName());
+ break;
+ }
+ case SEMANTIC_ATTRIBUTE_MAP.TEXCOORD_1: {
+ const tcoords = primitive.attributes.texcoord1.value;
+ const dac = vtkDataArray.newInstance({
+ name: 'TEXCOORD_1',
+ values: tcoords,
+ numberOfComponents: primitive.attributes.texcoord1.components,
+ });
+ pointData.addArray(dac);
+ break;
+ }
+ case SEMANTIC_ATTRIBUTE_MAP.TANGENT: {
+ const tangent = primitive.attributes.tangent.value;
+ const dat = vtkDataArray.newInstance({
+ name: 'Tangents',
+ values: tangent,
+ numberOfComponents: primitive.attributes.tangent.components,
+ });
+ pointData.addArray(dat);
+ break;
+ }
+ default:
+ vtkWarningMacro(`Unhandled attribute: ${attributeName}`);
+ }
+ });
+
+ // Handle indices if available
+ if (primitive.indices !== undefined) {
+ const indices = primitive.indices.value;
+ const nCells = indices.length - 2;
+ switch (mode) {
+ case MODES.GL_LINE_STRIP:
+ case MODES.GL_TRIANGLE_STRIP:
+ case MODES.GL_LINE_LOOP:
+ vtkWarningMacro('GL_LINE_LOOP not implemented');
+ break;
+ default:
+ cells.resize((4 * indices.length) / 3);
+ for (let cellId = 0; cellId < nCells; cellId += 3) {
+ const cell = indices.slice(cellId, cellId + 3);
+ cells.insertNextCell(cell);
+ }
+ }
+ }
+
+ switch (mode) {
+ case MODES.GL_TRIANGLES:
+ case MODES.GL_TRIANGLE_FAN:
+ polyData.setPolys(cells);
+ break;
+ case MODES.GL_LINES:
+ case MODES.GL_LINE_STRIP:
+ case MODES.GL_LINE_LOOP:
+ polyData.setLines(cells);
+ break;
+ case MODES.GL_POINTS:
+ polyData.setVerts(cells);
+ break;
+ case MODES.GL_TRIANGLE_STRIP:
+ polyData.setStrips(cells);
+ break;
+ default:
+ vtkWarningMacro('Invalid primitive draw mode. Ignoring connectivity.');
+ }
+
+ return polyData;
+}
+
+/**
+ * Creates a VTK property from a GLTF material
+ * @param {GLTFMaterial} material - The GLTF material
+ * @param {vtkActor} actor - The VTK actor
+ */
+async function createPropertyFromGLTFMaterial(model, material, actor) {
+ let metallicFactor = 1.0;
+ let roughnessFactor = 1.0;
+ const emissiveFactor = material.emissiveFactor;
+
+ const property = actor.getProperty();
+ const pbr = material.pbrMetallicRoughness;
+
+ if (pbr !== undefined) {
+ if (
+ !pbr?.metallicFactor ||
+ pbr?.metallicFactor <= 0 ||
+ pbr?.metallicFactor >= 1
+ ) {
+ vtkWarningMacro(
+ 'Invalid material.pbrMetallicRoughness.metallicFactor value. Using default value instead.'
+ );
+ } else metallicFactor = pbr.metallicFactor;
+ if (
+ !pbr?.roughnessFactor ||
+ pbr?.roughnessFactor <= 0 ||
+ pbr?.roughnessFactor >= 1
+ ) {
+ vtkWarningMacro(
+ 'Invalid material.pbrMetallicRoughness.roughnessFactor value. Using default value instead.'
+ );
+ } else roughnessFactor = pbr.roughnessFactor;
+
+ const color = pbr.baseColorFactor;
+
+ if (color !== undefined) {
+ property.setDiffuseColor(color[0], color[1], color[2]);
+ property.setOpacity(color[3]);
+ }
+
+ property.setMetallic(metallicFactor);
+ property.setRoughness(roughnessFactor);
+ property.setEmission(emissiveFactor);
+
+ if (pbr.baseColorTexture) {
+ const extensions = pbr.baseColorTexture.extensions;
+ const tex = pbr.baseColorTexture.texture;
+
+ if (tex.extensions !== undefined) {
+ const extensionsNames = Object.keys(tex.extensions);
+ extensionsNames.forEach((extensionName) => {
+ // TODO: Handle KHR_texture_basisu extension
+ // const extension = tex.extensions[extensionName];
+ switch (extensionName) {
+ default:
+ vtkWarningMacro(`Unhandled extension: ${extensionName}`);
+ }
+ });
+ }
+
+ const sampler = tex.sampler;
+ const image = await loadImage(tex.source);
+ const diffuseTex = createVTKTextureFromGLTFTexture(
+ image,
+ sampler,
+ extensions
+ );
+
+ // FIXME: Workaround for textures not showing up in WebGL
+ const viewAPI = model.renderer.getRenderWindow();
+ const isWebGL = viewAPI.getViews()[0].isA('vtkOpenGLRenderWindow');
+ if (isWebGL) {
+ actor.addTexture(diffuseTex);
+ } else {
+ property.setDiffuseTexture(diffuseTex);
+ }
+ }
+
+ if (pbr.metallicRoughnessTexture) {
+ const extensions = pbr.metallicRoughnessTexture.extensions;
+ const tex = pbr.metallicRoughnessTexture.texture;
+ const sampler = tex.sampler;
+ const metallicImage = await loadImage(tex.source, 'b');
+ const metallicTex = createVTKTextureFromGLTFTexture(
+ metallicImage,
+ sampler,
+ extensions
+ );
+ property.setMetallicTexture(metallicTex);
+
+ const roughnessImage = await loadImage(tex.source, 'g');
+ const roughnessTex = createVTKTextureFromGLTFTexture(
+ roughnessImage,
+ sampler,
+ extensions
+ );
+ property.setRoughnessTexture(roughnessTex);
+ }
+
+ // Handle ambient occlusion texture (occlusionTexture)
+ if (material.occlusionTexture) {
+ const extensions = material.occlusionTexture.extensions;
+ const tex = material.occlusionTexture.texture;
+ const sampler = tex.sampler;
+ const aoImage = await loadImage(tex.source, 'r');
+ const aoTex = createVTKTextureFromGLTFTexture(
+ aoImage,
+ sampler,
+ extensions
+ );
+ property.setAmbientOcclusionTexture(aoTex);
+ }
+
+ // Handle emissive texture (emissiveTexture)
+ if (material.emissiveTexture) {
+ const extensions = material.emissiveTexture.extensions;
+ const tex = material.emissiveTexture.texture;
+ const sampler = tex.sampler;
+ const emissiveImage = await loadImage(tex.source);
+ const emissiveTex = createVTKTextureFromGLTFTexture(
+ emissiveImage,
+ sampler,
+ extensions
+ );
+ property.setEmissionTexture(emissiveTex);
+
+ // Handle mutiple Uvs
+ if (material.emissiveTexture.texCoord !== undefined) {
+ const pd = actor.getMapper().getInputData().getPointData();
+ pd.setActiveTCoords(`TEXCOORD_${material.emissiveTexture.texCoord}`);
+ }
+ }
+
+ // Handle normal texture (normalTexture)
+ if (material.normalTexture) {
+ const extensions = material.normalTexture.extensions;
+ const tex = material.normalTexture.texture;
+ const sampler = tex.sampler;
+ const normalImage = await loadImage(tex.source);
+ const normalTex = createVTKTextureFromGLTFTexture(
+ normalImage,
+ sampler,
+ extensions
+ );
+ property.setNormalTexture(normalTex);
+
+ if (material.normalTexture.scale !== undefined) {
+ property.setNormalStrength(material.normalTexture.scale);
+ }
+ }
+ }
+
+ // Material extensions
+ if (material.extensions !== undefined) {
+ const extensionsNames = Object.keys(material.extensions);
+ extensionsNames.forEach((extensionName) => {
+ const extension = material.extensions[extensionName];
+ switch (extensionName) {
+ case 'KHR_materials_unlit':
+ handleKHRMaterialsUnlit(extension, property);
+ break;
+ case 'KHR_materials_ior':
+ handleKHRMaterialsIor(extension, property);
+ break;
+ case 'KHR_materials_specular':
+ handleKHRMaterialsSpecular(extension, property);
+ break;
+ default:
+ vtkWarningMacro(`Unhandled extension: ${extensionName}`);
+ }
+ });
+ }
+
+ if (material.alphaMode !== ALPHA_MODE.OPAQUE) {
+ actor.setForceTranslucent(true);
+ }
+
+ property.setBackfaceCulling(!material.doubleSided);
+}
+
+/**
+ * Creates a VTK actor from a GLTF mesh
+ * @param {GLTFMesh} mesh - The GLTF mesh
+ * @returns {vtkActor} The created VTK actor
+ */
+async function createActorFromGTLFNode(model, node, worldMatrix) {
+ let polyData;
+ const mesh = node.mesh;
+ const actor = vtkActor.newInstance();
+ const mapper = vtkMapper.newInstance();
+ mapper.setColorModeToDirectScalars();
+ actor.setMapper(mapper);
+ actor.setUserMatrix(worldMatrix);
+
+ if (node.mesh !== undefined) {
+ polyData = await createPolyDataFromGLTFMesh(mesh);
+ mapper.setInputData(polyData);
+
+ const primitive = mesh.primitives[0]; // the first one for now
+
+ // support for materials
+ if (primitive.material !== undefined) {
+ const material = mesh.primitives[0].material;
+ await createPropertyFromGLTFMaterial(model, material, actor);
+ }
+
+ if (primitive.extensions !== undefined) {
+ const extensionsNames = Object.keys(primitive.extensions);
+ extensionsNames.forEach((extensionName) => {
+ const extension = primitive.extensions[extensionName];
+
+ switch (extensionName) {
+ case 'KHR_materials_variants':
+ model.variantMappings.set(node.id, extension.mappings);
+ break;
+ default:
+ vtkWarningMacro(`Unhandled extension: ${extensionName}`);
+ }
+ });
+ }
+ } else {
+ polyData = vtkPolyData.newInstance();
+ mapper.setInputData(polyData);
+ }
+ return actor;
+}
+
+/**
+ *
+ * @param {GLTFAnimation} animation
+ * @returns
+ */
+function createGLTFAnimation(animation) {
+ vtkDebugMacro('Creating animation:', animation);
+ return {
+ name: animation.name,
+ channels: animation.channels,
+ samplers: animation.samplers,
+ getChannelByTargetNode(nodeIndex) {
+ return this.channels.filter(
+ (channel) => channel.target.node === nodeIndex
+ );
+ },
+ };
+}
+
+/**
+ * Gets the transformation matrix for a GLTF node
+ * @param {GLTFNode} node - The GLTF node
+ * @returns {mat4} The transformation matrix
+ */
+function getTransformationMatrix(node) {
+ // TRS
+ const translation = node.translation ?? vec3.create();
+ const rotation = node.rotation ?? quat.create();
+ const scale = node.scale ?? vec3.fromValues(1.0, 1.0, 1.0);
+
+ const matrix =
+ node.matrix !== undefined
+ ? mat4.clone(node.matrix)
+ : mat4.fromRotationTranslationScale(
+ mat4.create(),
+ rotation,
+ translation,
+ scale
+ );
+ return matrix;
+}
+
+/**
+ * Processes a GLTF node
+ * @param {GLTFnode} node - The GLTF node
+ * @param {object} model The model object
+ * @param {vtkActor} parentActor The parent actor
+ * @param {mat4} parentMatrix The parent matrix
+ */
+async function processNode(
+ node,
+ model,
+ parentActor = null,
+ parentMatrix = mat4.create()
+) {
+ node.transform = getTransformationMatrix(node);
+ const worldMatrix = mat4.multiply(
+ mat4.create(),
+ parentMatrix,
+ node.transform
+ );
+
+ const actor = await createActorFromGTLFNode(model, node, worldMatrix);
+ if (actor) {
+ actor.setUserMatrix(worldMatrix);
+ if (parentActor) {
+ actor.setParentProp(parentActor);
+ }
+ model.actors.set(node.id, actor);
+ }
+
+ // Handle KHRLightsPunctual extension
+ if (node.extensions?.KHR_lights_punctual) {
+ handleKHRLightsPunctual(
+ node.extensions.KHR_lights_punctual,
+ node.transform,
+ model
+ );
+ }
+
+ if (
+ node?.children &&
+ Array.isArray(node.children) &&
+ node.children.length > 0
+ ) {
+ await Promise.all(
+ node.children.map(async (child) => {
+ const parent = model.actors.get(node.id);
+ await processNode(child, model, parent, worldMatrix);
+ })
+ );
+ }
+}
+
+/**
+ * Creates VTK actors from a GLTF object
+ * @param {glTF} glTF - The GLTF object
+ * @param {number} sceneId - The scene index to create actors for
+ * @returns {vtkActor[]} The created VTK actors
+ */
+async function createVTKObjects(model) {
+ model.animations = model.glTFTree.animations?.map(createGLTFAnimation);
+
+ const extensionsNames = Object.keys(model.glTFTree?.extensions || []);
+ extensionsNames.forEach((extensionName) => {
+ const extension = model.glTFTree.extensions[extensionName];
+ switch (extensionName) {
+ case 'KHR_materials_variants':
+ handleKHRMaterialsVariants(extension, model);
+ break;
+ case 'KHR_draco_mesh_compression':
+ break;
+ default:
+ vtkWarningMacro(`Unhandled extension: ${extensionName}`);
+ }
+ });
+
+ // Get the sceneId to process
+ let sceneId = model.sceneId;
+ if (sceneId === undefined) {
+ sceneId = model.glTFTree.scene;
+ }
+
+ if (model.glTFTree.scenes?.length && model.glTFTree.scenes[sceneId]?.nodes) {
+ await Promise.all(
+ model.glTFTree.scenes[sceneId].nodes.map(async (node) => {
+ if (node) {
+ await processNode(node, model);
+ } else {
+ vtkWarningMacro(`Node not found in glTF.nodes`);
+ }
+ })
+ );
+ } else {
+ vtkWarningMacro('No valid scenes found in the glTF data');
+ }
+}
+
+/**
+ * Sets up the camera for a vtk renderer based on the bounds of the given actors.
+ *
+ * @param {GLTCamera} camera - The GLTF camera object
+ */
+function GLTFCameraToVTKCamera(glTFCamera) {
+ const camera = vtkCamera.newInstance();
+ if (glTFCamera.type === 'perspective') {
+ const { yfov, znear, zfar } = glTFCamera.perspective;
+ camera.setClippingRange(znear, zfar);
+ camera.setParallelProjection(false);
+ camera.setViewAngle(vtkMath.degreesFromRadians(yfov));
+ } else if (glTFCamera.type === 'orthographic') {
+ const { ymag, znear, zfar } = glTFCamera.orthographic;
+ camera.setClippingRange(znear, zfar);
+ camera.setParallelProjection(true);
+ camera.setParallelScale(ymag);
+ } else {
+ throw new Error('Unsupported camera type');
+ }
+
+ return camera;
+}
+
+/**
+ *
+ * @param {vtkCamera} camera
+ * @param {*} transformMatrix
+ */
+function applyTransformToCamera(camera, transformMatrix) {
+ if (!camera || !transformMatrix) {
+ return;
+ }
+
+ // At identity, camera position is origin, +y up, -z view direction
+ const position = [0, 0, 0];
+ const viewUp = [0, 1, 0];
+ const focus = [0, 0, -1];
+
+ const t = vtkTransform.newInstance();
+ t.setMatrix(transformMatrix);
+
+ // Transform position
+ t.transformPoint(position, position);
+ t.transformPoints(viewUp, viewUp);
+ t.transformPoints(focus, focus);
+
+ focus[0] += position[0];
+ focus[1] += position[1];
+ focus[2] += position[2];
+
+ // Apply the transformed values to the camera
+ camera.setPosition(position);
+ camera.setFocalPoint(focus);
+ camera.setViewUp(viewUp);
+}
+
+export {
+ applyTransformToCamera,
+ createPropertyFromGLTFMaterial,
+ parseGLTF,
+ createVTKObjects,
+ GLTFCameraToVTKCamera,
+};
diff --git a/Sources/IO/Geometry/GLTFImporter/Utils.js b/Sources/IO/Geometry/GLTFImporter/Utils.js
new file mode 100644
index 00000000000..5f98fdc91be
--- /dev/null
+++ b/Sources/IO/Geometry/GLTFImporter/Utils.js
@@ -0,0 +1,195 @@
+import WebworkerPromise from 'webworker-promise';
+import macro from 'vtk.js/Sources/macros';
+import vtkTexture from 'vtk.js/Sources/Rendering/Core/Texture';
+import Worker from 'vtk.js/Sources/IO/Geometry/GLTFImporter/ORMTexture.worker';
+import {
+ BYTES,
+ COMPONENTS,
+ ARRAY_TYPES,
+ GL_SAMPLER,
+} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants';
+
+const { vtkWarningMacro, vtkErrorMacro } = macro;
+
+/**
+ * Get GL enum from sampler parameter
+ * @param {*} parameter The sampler parameter
+ * @returns The GL enum
+ */
+export function getGLEnumFromSamplerParameter(parameter) {
+ const GL_TEXTURE_MAG_FILTER = 0x2800;
+ const GL_TEXTURE_MIN_FILTER = 0x2801;
+ const GL_TEXTURE_WRAP_S = 0x2802;
+ const GL_TEXTURE_WRAP_T = 0x2803;
+
+ const Mapping = {
+ magFilter: GL_TEXTURE_MAG_FILTER,
+ minFilter: GL_TEXTURE_MIN_FILTER,
+ wrapS: GL_TEXTURE_WRAP_S,
+ wrapT: GL_TEXTURE_WRAP_T,
+ };
+
+ return Mapping[parameter];
+}
+
+export function getAccessorArrayTypeAndLength(accessor, bufferView) {
+ const ArrayType = ARRAY_TYPES[accessor.componentType];
+ const components = COMPONENTS[accessor.type];
+ const bytesPerComponent = BYTES[accessor.componentType];
+ const length = accessor.count * components;
+ const byteLength = accessor.count * components * bytesPerComponent;
+ return { ArrayType, length, byteLength };
+}
+
+/**
+ * Resolves a URL based on the original path
+ * @param {*} url The URL to resolve
+ * @param {*} originalPath The original path to resolve the URL against
+ * @returns The resolved URL or an empty string if the URL is invalid
+ */
+export function resolveUrl(url, originalPath) {
+ // Invalid URL
+ if (typeof url !== 'string' || url === '') return '';
+
+ try {
+ // Data URI
+ if (url.startsWith('data:')) return url;
+
+ // Blob URL
+ if (url.startsWith('blob:')) return url;
+
+ // Create URL object from the original path
+ const baseUrl = new URL(originalPath);
+ if (!baseUrl.pathname.includes('.') && !baseUrl.pathname.endsWith('/')) {
+ baseUrl.pathname += '/';
+ }
+
+ // Absolute URL (http://, https://, //)
+ if (
+ url.startsWith('http:') ||
+ url.startsWith('https:') ||
+ url.startsWith('//')
+ ) {
+ return new URL(url, baseUrl).href;
+ }
+
+ // Host Relative URL
+ if (url.startsWith('/')) {
+ return new URL(url, baseUrl).href;
+ }
+
+ // Relative URL
+ return new URL(url, baseUrl).href;
+ } catch (error) {
+ vtkErrorMacro('Error resolving URL:', error);
+ return '';
+ }
+}
+
+/**
+ * Loads image from buffer or URI
+ * @param {*} image
+ * @param {*} channel
+ * @returns
+ */
+export async function loadImage(image, channel, forceReLoad = false) {
+ // Initialize cache if it doesn't exist
+ if (!image.cache) {
+ image.cache = {};
+ }
+
+ // Return cached result for the channel if available and not forced to reload
+ if (!forceReLoad && image.cache[channel]) {
+ return image.cache[channel];
+ }
+
+ const worker = new WebworkerPromise(new Worker());
+
+ if (image.bufferView) {
+ return worker
+ .postMessage({
+ imageBuffer: image.bufferView.data,
+ mimeType: image.mimeType,
+ channel,
+ })
+ .then((result) => {
+ // Cache the bitmap based on the channel
+ image.cache[channel] = result.bitmap;
+ return result.bitmap;
+ })
+ .finally(() => {
+ worker.terminate();
+ });
+ }
+
+ if (image.uri) {
+ vtkWarningMacro('Falling back to image uri', image.uri);
+ return new Promise((resolve, reject) => {
+ const img = new Image();
+ img.crossOrigin = 'Anonymous';
+ img.onload = () => {
+ image.cache[channel] = img; // Cache the loaded image based on the channel
+ resolve(img);
+ };
+ img.onerror = reject;
+ img.src = image.uri;
+ });
+ }
+
+ return null;
+}
+
+/**
+ *
+ * @param {*} image
+ * @param {*} sampler
+ * @param {*} extensions
+ * @returns
+ */
+export function createVTKTextureFromGLTFTexture(image, sampler, extensions) {
+ const texture = vtkTexture.newInstance();
+ // Apply sampler settings
+ if (sampler) {
+ if (
+ ('wrapS' in sampler && 'wrapT' in sampler) ||
+ ('minFilter' in sampler && 'magFilter' in sampler)
+ ) {
+ if (
+ sampler.wrapS === GL_SAMPLER.CLAMP_TO_EDGE ||
+ sampler.wrapT === GL_SAMPLER.CLAMP_TO_EDGE
+ ) {
+ texture.setRepeat(false);
+ texture.setEdgeClamp(true);
+ } else if (
+ sampler.wrapS === GL_SAMPLER.REPEAT ||
+ sampler.wrapT === GL_SAMPLER.REPEAT
+ ) {
+ texture.setRepeat(true);
+ texture.setEdgeClamp(false);
+ } else {
+ vtkWarningMacro('Mirrored texture wrapping is not supported!');
+ }
+
+ const linearFilters = [
+ GL_SAMPLER.LINEAR,
+ GL_SAMPLER.LINEAR_MIPMAP_NEAREST,
+ GL_SAMPLER.NEAREST_MIPMAP_LINEAR,
+ GL_SAMPLER.LINEAR_MIPMAP_LINEAR,
+ ];
+
+ if (
+ linearFilters.includes(sampler.minFilter) ||
+ linearFilters.includes(sampler.magFilter)
+ ) {
+ texture.setInterpolate(true);
+ }
+ } else {
+ texture.MipmapOn();
+ texture.setInterpolate(true);
+ texture.setEdgeClamp(true);
+ }
+ }
+
+ texture.setJsImageData(image);
+ return texture;
+}
diff --git a/Sources/IO/Geometry/GLTFImporter/example/controller.html b/Sources/IO/Geometry/GLTFImporter/example/controller.html
new file mode 100644
index 00000000000..0470eff0f47
--- /dev/null
+++ b/Sources/IO/Geometry/GLTFImporter/example/controller.html
@@ -0,0 +1,89 @@
+
+
diff --git a/Sources/IO/Geometry/GLTFImporter/example/index.js b/Sources/IO/Geometry/GLTFImporter/example/index.js
new file mode 100644
index 00000000000..9e5a39dbea0
--- /dev/null
+++ b/Sources/IO/Geometry/GLTFImporter/example/index.js
@@ -0,0 +1,342 @@
+import '@kitware/vtk.js/Rendering/Profiles/Geometry';
+
+// Enable data soure for DataAccessHelper
+import '@kitware/vtk.js/IO/Core/DataAccessHelper/LiteHttpDataAccessHelper'; // Just need HTTP
+// import '@kitware/vtk.js/IO/Core/DataAccessHelper/HttpDataAccessHelper'; // HTTP + zip
+// import '@kitware/vtk.js/IO/Core/DataAccessHelper/HtmlDataAccessHelper'; // html + base64 + zip
+// import '@kitware/vtk.js/IO/Core/DataAccessHelper/JSZipDataAccessHelper'; // zip
+
+import vtkFullScreenRenderWindow from '@kitware/vtk.js/Rendering/Misc/FullScreenRenderWindow';
+import vtkTexture from '@kitware/vtk.js/Rendering/Core/Texture';
+import vtkURLExtract from '@kitware/vtk.js/Common/Core/URLExtract';
+import vtkResourceLoader from '@kitware/vtk.js/IO/Core/ResourceLoader';
+
+import vtkGLTFImporter from '@kitware/vtk.js/IO/Geometry/GLTFImporter';
+
+import controlPanel from './controller.html';
+
+// ----------------------------------------------------------------------------
+// Example code
+// ----------------------------------------------------------------------------
+let mixer;
+let selectedModel;
+let selectedFlavor;
+const userParms = vtkURLExtract.extractURLParameters();
+const selectedScene = userParms.scene || 0;
+const viewAPI = userParms.viewAPI || 'WebGL';
+
+const baseUrl =
+ 'https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main';
+const modelsFolder = 'Models';
+const modelsDictionary = {};
+
+function createTextureWithMipmap(src, level) {
+ const img = new Image();
+ img.crossOrigin = 'Anonymous';
+ img.src = src;
+ const tex = vtkTexture.newInstance();
+ tex.setMipLevel(level);
+ img.onload = () => {
+ tex.setInterpolate(true);
+ tex.setEdgeClamp(true);
+ tex.setImage(img);
+ };
+ return tex;
+}
+
+const fullScreenRenderer = vtkFullScreenRenderWindow.newInstance();
+fullScreenRenderer.addController(controlPanel);
+
+const renderer = fullScreenRenderer.getRenderer();
+const renderWindow = fullScreenRenderer.getRenderWindow();
+
+// Workaround for the variant switch
+const variantsModels = [
+ 'MaterialsVariantsShoe',
+ 'GlamVelvetSofa',
+ 'SheenChair',
+];
+
+if (!variantsModels.includes(selectedModel)) {
+ renderer.setUseEnvironmentTextureAsBackground(false);
+ const environmentTex = createTextureWithMipmap(
+ '/Data/pbr/kiara_dawn_4k.jpg',
+ 8
+ );
+ renderer.setEnvironmentTexture(environmentTex);
+ renderer.setEnvironmentTextureDiffuseStrength(1);
+ renderer.setEnvironmentTextureSpecularStrength(1);
+}
+const reader = vtkGLTFImporter.newInstance({
+ renderer,
+});
+
+const rootContainer = document.querySelector('body');
+const modelSelector = document.querySelector('.models');
+const flavorSelector = document.querySelector('.flavor');
+const scenesSelector = document.querySelector('.scenes');
+const camerasSelector = document.querySelector('.cameras');
+const animationsSelector = document.querySelector('.animations');
+const variantsSelector = document.querySelector('.variants');
+
+const eSpecularChange = document.querySelector('.e-specular');
+const eDiffuseChange = document.querySelector('.e-diffuse');
+const angleChange = document.querySelector('.angle');
+const useTextureBackgroundChange = document.querySelector('.use-background');
+
+// add a loading svg to the container and remove once the reader is ready
+const loading = document.createElement('div');
+loading.innerHTML = `
+
+
+
+
+
+`;
+// loading message should center in the window
+loading.style.position = 'absolute';
+loading.style.left = '50%';
+loading.style.top = '50%';
+loading.style.transform = 'translate(-50%, -50%)';
+
+// ----------------------------------------------------------------------------
+function animateScene(lastTime = 0) {
+ const currentTime = performance.now();
+ const dt = (currentTime - lastTime) / 1000;
+
+ mixer.update(dt);
+
+ renderWindow.render();
+ requestAnimationFrame(() => animateScene(currentTime));
+}
+
+function ready() {
+ console.log('Ready');
+ // remove loading message
+ loading.remove();
+
+ reader.importActors();
+ reader.importCameras();
+ reader.importLights();
+ reader.importAnimations();
+
+ renderer.resetCamera();
+ renderWindow.render();
+
+ // Play animations
+ const animations = reader.getAnimations();
+ if (animations.length > 0) {
+ animations.forEach((animation, name) => {
+ const option = document.createElement('option');
+ option.value = animation.id;
+ option.textContent = animation.id;
+ animationsSelector.appendChild(option);
+ });
+
+ // Play the first animation by default
+ const defaultAnimation = animations[0];
+ mixer = reader.getAnimationMixer();
+ mixer.play(defaultAnimation.id);
+ animateScene();
+ document.querySelector('.animations-container').style.display = 'table-row';
+ }
+
+ const cameras = reader.getCameras();
+ cameras.forEach((camera, name) => {
+ const option = document.createElement('option');
+ option.value = name;
+ option.textContent = name;
+ camerasSelector.appendChild(option);
+ });
+
+ const scenes = reader.getScenes();
+ if (scenes.length > 1) {
+ scenesSelector.innerHTML = '';
+ scenes.forEach((scene, index) => {
+ const option = document.createElement('option');
+ option.value = index;
+ option.textContent = `Scene ${index}`;
+ if (index === selectedScene) {
+ option.selected = true;
+ }
+ scenesSelector.appendChild(option);
+ });
+ }
+
+ const variants = reader.getVariants();
+ if (variants.length > 1) {
+ variantsSelector.innerHTML = '';
+ variants.forEach((variant, index) => {
+ console.log('Adding variant', variant);
+ const option = document.createElement('option');
+ option.value = index;
+ option.textContent = variant;
+ variantsSelector.appendChild(option);
+ });
+ document.querySelector('.variants-container').style.display = 'table-row';
+ }
+}
+
+// Convert the await fetch to a promise chain
+fetch(`${baseUrl}/${modelsFolder}/model-index.json`)
+ .then((response) => response.json())
+ .then((modelsJson) => {
+ modelsJson.forEach((entry) => {
+ if (entry.variants !== undefined && entry.name !== undefined) {
+ const variants = [];
+
+ Object.keys(entry.variants).forEach((variant) => {
+ const fileName = entry.variants[variant];
+ variants[
+ variant
+ ] = `${modelsFolder}/${entry.name}/${variant}/${fileName}`;
+ });
+
+ modelsDictionary[entry.name] = variants;
+ }
+ });
+
+ const modelsNames = Object.keys(modelsDictionary);
+ modelsNames.forEach((modelName) => {
+ const option = document.createElement('option');
+ option.value = modelName;
+ option.textContent = modelName;
+ if (userParms.model === modelName) {
+ option.selected = true;
+ }
+ modelSelector.appendChild(option);
+ });
+
+ selectedModel = userParms.model || modelsNames[0];
+ const variants = Object.keys(modelsDictionary[selectedModel]);
+
+ selectedFlavor = userParms.flavor || variants[0];
+ variants.forEach((variant) => {
+ const option = document.createElement('option');
+ option.value = variant;
+ option.textContent = variant;
+ if (variant === selectedFlavor) {
+ option.selected = true;
+ }
+ flavorSelector.appendChild(option);
+ });
+
+ const path = modelsDictionary[selectedModel][selectedFlavor];
+ const url = `${baseUrl}/${path}`;
+
+ if (selectedFlavor === 'glTF-Draco') {
+ vtkResourceLoader
+ .loadScript('https://unpkg.com/draco3d@1.3.4/draco_decoder_nodejs.js')
+ .then(() => {
+ // Set decoder function to the vtk reader
+ reader.setDracoDecoder(window.CreateDracoModule);
+ reader
+ .setUrl(url, { binary: true, sceneId: selectedScene })
+ .then(reader.onReady(ready));
+ });
+ } else {
+ reader
+ .setUrl(url, { binary: true, sceneId: selectedScene })
+ .then(reader.onReady(ready));
+ }
+ })
+ .catch((error) => {
+ console.error('Error fetching the model index:', error);
+ });
+
+// ----------------------------------------------------------------------------
+// Use a file reader to load a local file
+// ----------------------------------------------------------------------------
+
+// Get the value of the radio button named 'renderer' and set the view API accordingly
+document.querySelectorAll("input[name='viewAPI']").forEach((input) => {
+ if (input.value === viewAPI) {
+ input.checked = true;
+ }
+ input.addEventListener('change', (evt) => {
+ window.location = `?model=${selectedModel}&viewAPI=${evt.target.value}`;
+ });
+});
+
+modelSelector.onchange = (evt) => {
+ window.location = `?model=${evt.target.value}&viewAPI=${viewAPI}`;
+};
+
+flavorSelector.onchange = (evt) => {
+ window.location = `?model=${selectedModel}&flavor=${evt.target.value}&scene=${selectedScene}&viewAPI=${viewAPI}`;
+};
+
+scenesSelector.onchange = (evt) => {
+ window.location = `?model=${selectedModel}&flavor=${selectedFlavor}&scene=${evt.target.value}&viewAPI=${viewAPI}`;
+};
+
+camerasSelector.onchange = (evt) => {
+ reader.setCamera(evt.target.value);
+ renderWindow.render();
+};
+
+variantsSelector.onchange = async (evt) => {
+ console.log('Switching to variant', evt.target.value);
+ await reader.switchToVariant(Number(evt.target.value));
+ renderWindow.render();
+};
+
+useTextureBackgroundChange.addEventListener('input', (e) => {
+ const useTexturedBackground = Boolean(e.target.checked);
+ renderer.setUseEnvironmentTextureAsBackground(useTexturedBackground);
+ renderWindow.render();
+});
+
+angleChange.addEventListener('input', (e) => {
+ const angle = Number(e.target.value);
+ renderer.getActiveCamera().setViewAngle(angle);
+ renderWindow.render();
+});
+
+eSpecularChange.addEventListener('input', (e) => {
+ const specular = Number(e.target.value);
+ renderer.setEnvironmentTextureSpecularStrength(specular);
+ renderWindow.render();
+});
+
+eDiffuseChange.addEventListener('input', (e) => {
+ const diffuse = Number(e.target.value);
+ renderer.setEnvironmentTextureDiffuseStrength(diffuse);
+ renderWindow.render();
+});
+
+rootContainer.appendChild(loading);
diff --git a/Sources/IO/Geometry/GLTFImporter/index.d.ts b/Sources/IO/Geometry/GLTFImporter/index.d.ts
new file mode 100644
index 00000000000..8009302b4be
--- /dev/null
+++ b/Sources/IO/Geometry/GLTFImporter/index.d.ts
@@ -0,0 +1,266 @@
+import { vtkAlgorithm, vtkObject } from '../../../interfaces';
+import HtmlDataAccessHelper from '../../Core/DataAccessHelper/HtmlDataAccessHelper';
+import HttpDataAccessHelper from '../../Core/DataAccessHelper/HttpDataAccessHelper';
+import JSZipDataAccessHelper from '../../Core/DataAccessHelper/JSZipDataAccessHelper';
+import LiteHttpDataAccessHelper from '../../Core/DataAccessHelper/LiteHttpDataAccessHelper';
+
+import vtkActor from '../../../Rendering/Core/Actor';
+import vtkRenderer from '../../../Rendering/Core/Renderer';
+import vtkCamera from '../../../Rendering/Core/Camera';
+
+interface IGLTFImporterOptions {
+ binary?: boolean;
+ compression?: string;
+ progressCallback?: any;
+}
+
+export interface IGLTFAnimation {
+ id: string;
+ name: string;
+ channels: any[];
+ samplers: any[];
+}
+
+export interface IGLTFAnimationMixer {
+ addAnimation: (glTFAnimation: object) => void;
+ play: (name: string, weight?: number) => void;
+ stop: (name: string) => void;
+ stopAll: () => void;
+ update: (deltaTime: number) => void;
+}
+
+export interface IGLTFMaterialVariant {
+ material: number;
+ variants: number[];
+}
+
+/**
+ *
+ */
+export interface IGLTFImporterInitialValues {}
+
+type vtkGLTFImporterBase = vtkObject &
+ Omit<
+ vtkAlgorithm,
+ | 'getInputData'
+ | 'setInputData'
+ | 'setInputConnection'
+ | 'getInputConnection'
+ | 'addInputConnection'
+ | 'addInputData'
+ >;
+
+export interface vtkGLTFImporter extends vtkGLTFImporterBase {
+ /**
+ * Get the actors.
+ */
+ getActors(): Map;
+
+ /**
+ * Get the animation mixer.
+ */
+ getAnimationMixer(): IGLTFAnimationMixer;
+
+ /**
+ * Get the animations.
+ */
+ getAnimations(): IGLTFAnimation[];
+
+ /**
+ * Get the base url.
+ */
+ getBaseURL(): string;
+
+ /**
+ * Get the cameras.
+ */
+ getCameras(): Map;
+
+ /**
+ *
+ */
+ getDataAccessHelper():
+ | HtmlDataAccessHelper
+ | HttpDataAccessHelper
+ | JSZipDataAccessHelper
+ | LiteHttpDataAccessHelper;
+
+ /**
+ * Get the url of the object to load.
+ */
+ getUrl(): string;
+
+ /**
+ * Get the variant array.
+ */
+ getVariants(): string[];
+
+ /**
+ * Get the variant mappings.
+ */
+ getVariantMappings(): Map;
+
+ /**
+ * Import the actors.
+ */
+ importActors(): void;
+
+ /**
+ * Import the animations.
+ */
+ importAnimations(): void;
+
+ /**
+ * Import the cameras.
+ */
+ importCameras(): void;
+
+ /**
+ * Import the lights.
+ */
+ importLights(): void;
+
+ /**
+ * Invoke the ready event.
+ */
+ invokeReady(): void;
+
+ /**
+ * Load the object data.
+ * @param {IGLTFImporterOptions} [options]
+ */
+ loadData(options?: IGLTFImporterOptions): Promise;
+
+ /**
+ *
+ * @param callback
+ */
+ onReady(callback: () => void): void;
+
+ /**
+ * Parse data.
+ * @param {String | ArrayBuffer} content The content to parse.
+ */
+ parse(content: string | ArrayBuffer): void;
+
+ /**
+ * Parse data as ArrayBuffer.
+ * @param {ArrayBuffer} content The content to parse.
+ */
+ parseAsArrayBuffer(content: ArrayBuffer): void;
+
+ /**
+ * Parse data as text.
+ * @param {String} content The content to parse.
+ */
+ parseAsText(content: string): void;
+
+ /**
+ *
+ * @param inData
+ * @param outData
+ */
+ requestData(inData: any, outData: any): void;
+
+ /**
+ *
+ * @param dataAccessHelper
+ */
+ setDataAccessHelper(
+ dataAccessHelper:
+ | HtmlDataAccessHelper
+ | HttpDataAccessHelper
+ | JSZipDataAccessHelper
+ | LiteHttpDataAccessHelper
+ ): boolean;
+
+ /**
+ * Set the url of the object to load.
+ * @param {String} url the url of the object to load.
+ * @param {IGLTFImporterOptions} [option] The Draco reader options.
+ */
+ setUrl(url: string, option?: IGLTFImporterOptions): Promise;
+
+ /**
+ * Set the camera id.
+ * @param cameraId
+ */
+ setCamera(cameraId: string): void;
+
+ /**
+ * Set the Draco decoder.
+ * @param mappings
+ */
+ setDracoDecoder(decoder: any): void;
+
+ /**
+ * Set the vtk Renderer.
+ * @param renderer
+ */
+ setRenderer(renderer: vtkRenderer): void;
+
+ /**
+ * Switch to a variant.
+ * @param variantIndex
+ */
+ switchToVariant(variantIndex: number): void;
+}
+
+/**
+ * Method used to decorate a given object (publicAPI+model) with vtkGLTFImporter characteristics.
+ *
+ * @param publicAPI object on which methods will be bounds (public)
+ * @param model object on which data structure will be bounds (protected)
+ * @param {IGLTFImporterInitialValues} [initialValues] (default: {})
+ */
+export function extend(
+ publicAPI: object,
+ model: object,
+ initialValues?: IGLTFImporterInitialValues
+): void;
+
+/**
+ * Method used to create a new instance of vtkGLTFImporter
+ * @param {IGLTFImporterInitialValues} [initialValues] for pre-setting some of its content
+ */
+export function newInstance(
+ initialValues?: IGLTFImporterInitialValues
+): vtkGLTFImporter;
+
+/**
+ * Load the WASM decoder from url and set the decoderModule
+ * @param url
+ * @param binaryName
+ */
+export function setWasmBinary(
+ url: string,
+ binaryName: string
+): Promise;
+
+/**
+ * vtkGLTFImporter can import glTF 2.0 files.
+ *
+ * The GL Transmission Format (glTF) is an API-neutral runtime asset delivery
+ * format. A glTF asset is represented by:
+ * * A JSON-formatted file (.gltf) containing a full scene description: node
+ * hierarchy, materials, cameras, as well as descriptor information for
+ * meshes, animations, and other constructs
+ * * Binary files (.bin) containing geometry and animation data, and other
+ * buffer-based data
+ * * Image files (.jpg, .png) for textures
+ *
+ * Supported extensions:
+ * * KHR_draco_mesh_compression
+ * * KHR_lights_punctual
+ * * KHR_materials_unlit
+ * * KHR_materials_ior
+ * * KHR_materials_specular
+ * * KHR_materials_variants
+ * * EXT_texture_webp
+ * * EXT_texture_avif
+ */
+export declare const vtkGLTFImporter: {
+ newInstance: typeof newInstance;
+ extend: typeof extend;
+};
+export default vtkGLTFImporter;
diff --git a/Sources/IO/Geometry/GLTFImporter/index.js b/Sources/IO/Geometry/GLTFImporter/index.js
new file mode 100644
index 00000000000..553e60f680a
--- /dev/null
+++ b/Sources/IO/Geometry/GLTFImporter/index.js
@@ -0,0 +1,277 @@
+import macro from 'vtk.js/Sources/macros';
+
+import BinaryHelper from 'vtk.js/Sources/IO/Core/BinaryHelper';
+import DataAccessHelper from 'vtk.js/Sources/IO/Core/DataAccessHelper';
+import vtkDracoReader from 'vtk.js/Sources/IO/Geometry/DracoReader';
+import {
+ createVTKObjects,
+ parseGLTF,
+ GLTFCameraToVTKCamera,
+ applyTransformToCamera,
+ createPropertyFromGLTFMaterial,
+} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Reader';
+import parseGLB from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Decoder';
+import { createAnimationMixer } from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Animations';
+import { BINARY_HEADER_MAGIC } from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants';
+
+const { vtkDebugMacro, vtkErrorMacro } = macro;
+
+// ----------------------------------------------------------------------------
+// vtkGLTFImporter methods
+// ----------------------------------------------------------------------------
+
+function vtkGLTFImporter(publicAPI, model) {
+ // Set our className
+ model.classHierarchy.push('vtkGLTFImporter');
+
+ // Create default dataAccessHelper if not available
+ if (!model.dataAccessHelper) {
+ model.dataAccessHelper = DataAccessHelper.get('http');
+ }
+
+ // Internal method to fetch Array
+ function fetchData(url, option = {}) {
+ const { compression, progressCallback } = model;
+ if (option.binary) {
+ return model.dataAccessHelper.fetchBinary(url, {
+ compression,
+ progressCallback,
+ });
+ }
+ return model.dataAccessHelper.fetchText(publicAPI, url, {
+ compression,
+ progressCallback,
+ });
+ }
+
+ // Set DataSet url
+ publicAPI.setUrl = (url, option = { binary: true }) => {
+ model.url = url;
+
+ // Remove the file in the URL
+ const path = url.split('/');
+ path.pop();
+ model.baseURL = path.join('/');
+
+ model.compression = option.compression;
+ model.sceneId = option.sceneId ? option.sceneId : 0;
+
+ // Fetch metadata
+ return publicAPI.loadData({
+ progressCallback: option.progressCallback,
+ binary: !!option.binary,
+ });
+ };
+
+ // Fetch the actual data arrays
+ publicAPI.loadData = (option = {}) => {
+ const promise = fetchData(model.url, option);
+ promise.then(publicAPI.parse);
+ return promise;
+ };
+
+ publicAPI.parse = (content) => {
+ if (typeof content === 'string') {
+ publicAPI.parseAsText(content);
+ } else {
+ publicAPI.parseAsBinary(content);
+ }
+ };
+
+ publicAPI.parseAsBinary = async (content) => {
+ if (!content) {
+ return;
+ }
+ if (content !== model.parseData) {
+ publicAPI.modified();
+ } else {
+ return;
+ }
+
+ const glTF = {};
+ const options = {
+ baseUri: model.baseURL,
+ };
+
+ const magic = BinaryHelper.arrayBufferToString(
+ new Uint8Array(content, 0, 4)
+ );
+
+ if (magic === BINARY_HEADER_MAGIC) {
+ const { json, buffers } = parseGLB(content);
+ vtkDebugMacro('Loaded GLB', json, buffers);
+ glTF.glbBuffers = buffers;
+ glTF.json = json;
+ } else {
+ glTF.json = JSON.parse(BinaryHelper.arrayBufferToString(content));
+ }
+
+ if (glTF.json.asset === undefined || glTF.json.asset.version[0] < 2) {
+ vtkErrorMacro('Unsupported asset. glTF versions >=2.0 are supported.');
+ return;
+ }
+
+ model.glTFTree = await parseGLTF(glTF, options);
+
+ model.actors = new Map();
+ model.cameras = new Map();
+ model.lights = new Map();
+ model.animations = [];
+ model.variants = [];
+ model.variantMappings = new Map();
+
+ await createVTKObjects(model);
+
+ model.scenes = model.glTFTree.scenes;
+
+ publicAPI.invokeReady();
+ };
+
+ publicAPI.parseAsText = (content) => {
+ if (!content) {
+ return;
+ }
+ if (content !== model.parseData) {
+ publicAPI.modified();
+ } else {
+ return;
+ }
+
+ model.parseData = content;
+ };
+
+ publicAPI.requestData = (inData, outData) => {
+ publicAPI.parse(model.parseData);
+ };
+
+ publicAPI.setDracoDecoder = (decoder) => {
+ vtkDracoReader.setDracoDecoder(decoder);
+ };
+
+ publicAPI.importActors = () => {
+ // Add actors to renderer
+ model.actors.forEach((actor) => model.renderer.addActor(actor));
+ };
+
+ publicAPI.importCameras = () => {
+ // Set up camera
+ model.glTFTree.cameras?.forEach((glTFcamera) => {
+ const camera = GLTFCameraToVTKCamera(glTFcamera);
+ model.cameras.set(glTFcamera.id, camera);
+ });
+
+ model.scenes.forEach((scene) => {
+ scene.nodes.forEach((node) => {
+ const camera = model.cameras.get(node.camera?.id);
+ if (camera) {
+ applyTransformToCamera(camera, node.transform);
+ }
+ });
+ });
+ };
+
+ publicAPI.importAnimations = () => {
+ // Set up animations
+ if (model.glTFTree.animations?.length > 0) {
+ model.animationMixer = createAnimationMixer(
+ model.actors,
+ model.glTFTree.accessors
+ );
+ model.glTFTree.animations.forEach((animation) => {
+ model.animationMixer.addAnimation(animation);
+ });
+ }
+ model.animations = model.glTFTree.animations || [];
+ };
+
+ publicAPI.importLights = () => {
+ // Set up lights
+ model.lights?.forEach((light) => {
+ vtkDebugMacro('Adding light', light);
+ model.renderer.addLight(light);
+ });
+ };
+
+ publicAPI.setCamera = (cameraId) => {
+ const camera = model.cameras.get(cameraId);
+
+ if (!camera) {
+ vtkErrorMacro(`Camera ${cameraId} not found`);
+ return;
+ }
+ vtkDebugMacro('Setting camera', camera);
+ model.renderer.setActiveCamera(camera);
+ };
+
+ publicAPI.switchToVariant = async (variantIndex) => {
+ const promises = Array.from(model.actors).map(async ([nodeId, actor]) => {
+ vtkDebugMacro('Switching to variant', variantIndex, 'for node', nodeId);
+ const variantMappings = model.variantMappings.get(nodeId);
+
+ if (variantMappings) {
+ const mapping = variantMappings.find((m) =>
+ m.variants.includes(variantIndex)
+ );
+ if (mapping) {
+ const variantMaterial = model.glTFTree.materials[mapping.material];
+ await createPropertyFromGLTFMaterial(model, variantMaterial, actor);
+ }
+ }
+ });
+
+ await Promise.all(promises);
+ };
+}
+
+// ----------------------------------------------------------------------------
+// Object factory
+// ----------------------------------------------------------------------------
+
+const DEFAULT_VALUES = {
+ // baseURL: null,
+ // dataAccessHelper: null,
+ // url: null,
+};
+
+// ----------------------------------------------------------------------------
+
+export function extend(publicAPI, model, initialValues = {}) {
+ Object.assign(model, DEFAULT_VALUES, initialValues);
+
+ // Build VTK API
+ macro.obj(publicAPI, model);
+ macro.get(publicAPI, model, [
+ 'url',
+ 'baseURL',
+ 'actors',
+ 'scenes',
+ 'cameras',
+ 'animations',
+ 'animationMixer',
+ 'variants',
+ 'variantMappings',
+ ]);
+ macro.set(publicAPI, model, ['renderer', 'dracoDecoder']);
+ macro.event(publicAPI, model, 'ready');
+
+ // vtkGLTFImporter methods
+ vtkGLTFImporter(publicAPI, model);
+
+ // To support destructuring
+ if (!model.compression) {
+ model.compression = null;
+ }
+ if (!model.progressCallback) {
+ model.progressCallback = null;
+ }
+}
+// ----------------------------------------------------------------------------
+
+export const newInstance = macro.newInstance(extend, 'vtkGLTFImporter');
+
+// ----------------------------------------------------------------------------
+
+export default {
+ extend,
+ newInstance,
+};
diff --git a/Sources/IO/Geometry/index.js b/Sources/IO/Geometry/index.js
index 084317488c5..d83543eb7ed 100644
--- a/Sources/IO/Geometry/index.js
+++ b/Sources/IO/Geometry/index.js
@@ -3,6 +3,7 @@ import vtkPLYReader from './PLYReader';
import vtkDracoReader from './DracoReader';
import vtkSTLWriter from './STLWriter';
import vtkPLYWriter from './PLYWriter';
+import vtkGLTFImporter from './GLTFImporter';
export default {
vtkSTLReader,
@@ -10,4 +11,5 @@ export default {
vtkDracoReader,
vtkSTLWriter,
vtkPLYWriter,
+ vtkGLTFImporter,
};