Three.js


Creative Commons License
This -Three.js- tutorial is licensed under a Creative Commons Attribution-NonCommercial 4.0 International License
Preamble

This tutorial is about basic and advanced features of Three.js. This JavaScript software library a superset of the native WebGL API, which is available in modern browsers. Applications have been designed with Three.js ver. 106.

Headlines
Installation and basic execution of Three.js
Rule(s)
Example (HTML)
<script src="js/lib/three.js"></script>
 
Example (JavaScript)
/*
 * Sphere_in_parts.js
 */

'use strict';

window.document.onreadystatechange = () => { // Called *TWO TIMES*: when "interactive" and later on... when "complete"
    if (window.document.readyState === "interactive") {
        window.console.log(window.document.readyState + ": DOM just loaded...");
        const sphere_in_parts = new Sphere_in_parts();
        sphere_in_parts._animate();
    } else {
        if (window.document.readyState === "complete")
            window.console.log(window.document.readyState + ": All resources just loaded...");
    }
};

Object.defineProperty(window, "Back_sphere_geometry", {value: new THREE.SphereGeometry(1000, 30, 30, 0, Math.PI), enumerable: false, configurable: false, writable: false});
// window.Back_sphere_geometry.computeBoundingSphere();
Object.defineProperty(window, "Front_sphere_geometry", {value: new THREE.SphereGeometry(1000, 10, 10, 0, Math.PI), enumerable: false, configurable: false, writable: false});
// window.Front_sphere_geometry.computeBoundingSphere();

class Sphere_in_parts {
    constructor() {
        window.document.body.innerWidth = 800;
        window.document.body.innerHeight = 600;

        this._camera = new THREE.PerspectiveCamera(70, window.innerWidth / window.innerHeight, 0.1, 10000);
        this._camera.position.z = 5000; // By default, camera looks at (0,0,0) from this (new) position...
        this._renderer = new THREE.WebGLRenderer(); // Browser must support WebGL
        this._renderer.setClearColor(new THREE.Color(0xC0C0C0)/* Silver */); // For areas without filmed objects, useful for debugging!
        this._renderer.setPixelRatio((window.devicePixelRatio) ? window.devicePixelRatio : 1); // Retina -> '2' while usually '1'
        this._renderer.setSize(window.innerWidth, window.innerHeight);
        window.document.body.appendChild(this._renderer.domElement); // Type of 'this._renderer.domElement' is 'HTMLCanvasElement'
        this._scene = new THREE.Scene();

        /** Light is MANDATORY for 'THREE.MeshLambertMaterial' and 'THREE.MeshPhongMaterial' */
        this._scene.add(new THREE.AmbientLight(0x404040, 5)); // Soft white light with high intensity

        // Other stuff here...
    }
    _animate() {
        window.requestAnimationFrame(this._animate.bind(this));

        // Animation logic...

        this._renderer.render(this._scene, this._camera);
    }
    // Other methods...
}
Fundamentals of Three.js

THREE.Geometry type

Rule(s)
Application(s)
Example
Given front as a blue half sphere, compute mouth_geometry (in pink: raw location of a hypothetical mouth) from scratch.
Listing 1 (copy of a subset of faces from source geometry, i.e., front.geometry)
// Find 'front' in Object3D hierarchy from scene:
        let front = this._scene.getObjectByName("My group").children.find(child => {
            return child.name === this._scene.getObjectByName("My group").name + "FRONT";
        });

        // 'faceVertexUvs[0]' records the UV mapping:
        window.console.assert(front.geometry.faceVertexUvs[0] && Array.isArray(front.geometry.faceVertexUvs[0]) && front.geometry.faceVertexUvs[0].length > 0);

        let mouth_geometry = new THREE.Geometry;

        /** Test A */
//        let extracted_faces = new Array();
        /** End of test A */

        for (let i = 0; i < front.geometry.faces.length; i++) {
            let max_x = Math.max(front.geometry.faceVertexUvs[0][i][0].x, front.geometry.faceVertexUvs[0][i][1].x, front.geometry.faceVertexUvs[0][i][2].x);
            let min_x = Math.min(front.geometry.faceVertexUvs[0][i][0].x, front.geometry.faceVertexUvs[0][i][1].x, front.geometry.faceVertexUvs[0][i][2].x);
            let max_y = Math.max(front.geometry.faceVertexUvs[0][i][0].y, front.geometry.faceVertexUvs[0][i][1].y, front.geometry.faceVertexUvs[0][i][2].y);
            let min_y = Math.min(front.geometry.faceVertexUvs[0][i][0].y, front.geometry.faceVertexUvs[0][i][1].y, front.geometry.faceVertexUvs[0][i][2].y);
// Three.js 'y' axis is bottom-up:
            if (min_x > (1 / 3) && max_x < (2 / 3) && min_y > (1 / 6) && max_y < (2 / 6)) { // Approximative location of a mouth...
                /** Test A */
//                extracted_faces.push(front.geometry.faces[i]);
                /** End of test A */
                mouth_geometry.faces.push(front.geometry.faces[i].clone());
            }
        }
Listing 2 (copy of a subset of vertices from source geometry and consistently rearrange dependency between faces and vertices in target geometry , i.e., mouth_geometry)
//        window.alert(mouth_geometry.faces.length + ": " + JSON.stringify(mouth_geometry.faces)); // 4 faces with 'new THREE.SphereGeometry(1000, 10, 10, 0, Math.PI)')
        let step_1 = mouth_geometry.faces.map(face => {
            // For each face, return a 3-element array in which an element is a face's edge
            window.console.log(" a: " + face.a + " b: " + face.b + " c: " + face.c);
            return [front.geometry.vertices[face.a], front.geometry.vertices[face.b], front.geometry.vertices[face.c]]; // Caution, vertices are not cloned!
        });
//        window.alert(step_1.length + ": " + JSON.stringify(step_1)); // 4

        let step_2 = step_1.reduce(function (previous, current) {
            return previous.concat(current); // Array merging that keeps duplicates...
        }, []); // '[]' required as initial value, otherwise bug: 'reduce of empty array with no initial value'...
//        window.alert(step_2.length + ": " + JSON.stringify(step_2)); // 4 faces x 3 edges = 12 vertices

        mouth_geometry.vertices = Array.from(new Set(step_2)); // Step 3: duplicates are removed...
//        window.alert(mouth_geometry.vertices.length + ": " + JSON.stringify(mouth_geometry.vertices)); // 6 vertices

        // Step 4: cloned faces of mouth must point to safe indexes in 'mouth_geometry.vertices':
        for (let i = 0; i < mouth_geometry.faces.length; i++) {
            mouth_geometry.faces[i].a = mouth_geometry.vertices.indexOf(front.geometry.vertices[mouth_geometry.faces[i].a]);
            mouth_geometry.faces[i].b = mouth_geometry.vertices.indexOf(front.geometry.vertices[mouth_geometry.faces[i].b]);
            mouth_geometry.faces[i].c = mouth_geometry.vertices.indexOf(front.geometry.vertices[mouth_geometry.faces[i].c]);
            window.console.log(" a: " + mouth_geometry.faces[i].a + " b: " + mouth_geometry.faces[i].b + " c: " + mouth_geometry.faces[i].c);
        }

THREE.Object3D type

Rule(s)
Example
window.console.assert(my_Object3D.up.y === 1); // By default: 'up' type is 'THREE.Vector3' with '(0,1,0)' value...
my_camera.lookAt(my_Object3D.position);

THREE.Group type

Rule(s)
Example
let my_group = new THREE.Group();
my_group.name = "My group"; // Used by 'getObjectByName' in scenes...
// my_group.visible = false; // 'visible' is a Boolean attribute inherited from 'THREE.Object3D'
this._scene.add(my_group);
…
let mouth = new THREE.Mesh(this._extract_mouth_geometry(), new THREE.MeshBasicMaterial({
    color: new THREE.Color(0xFFC0CB) // Pink
}));
mouth.name = my_group.name + "MOUTH"; // Used by 'getObjectByName' in scenes...
// mouth.visible = false; // 'visible' is a Boolean attribute inherited from 'THREE.Object3D'
my_group.add(mouth);

THREE.Mesh and THREE.Materialtypes

Types of materials http://blog.cjgammon.com/threejs-materials
Rule(s)
Example
let back = new THREE.Mesh(window.Back_sphere_geometry, new THREE.MeshLambertMaterial({ //Light required!
    color: new THREE.Color(0x00FF00), // Green
    side: THREE.DoubleSide
    }));
back.rotateY(Math.PI);
back.name = my_group.name + "BACK"; // Used by 'getObjectByName' in scenes...
// back.visible = false; // 'visible' is a Boolean attribute inherited from 'THREE.Object3D'
my_group.add(back);

let front = new THREE.Mesh(window.Front_sphere_geometry, new THREE.MeshPhongMaterial({ //Light required!
    blending: THREE.NormalBlending, // Default
    color: new THREE.Color(0x0000FF), // Blue
//            morphTargets: true,
//            opacity: 0.5,
//            transparent: true
//            vertexColors: THREE.FaceColors // For 'Test A'
    }));
front.name = my_group.name + "FRONT"; // Used by 'getObjectByName' in scenes...
// front.visible = false; // 'visible' is a Boolean attribute inherited from 'THREE.Object3D'
my_group.add(front);
Exercise
The purpose of this exercise of the creation of the My house app. with, for instance, THREE.BoxGeometry (here…), THREE.TetrahedronGeometry (here…) or, THREE.PlaneGeometry (here…) objects.
Create a door and two windows on the house's front based on the mouth_geometry principle above.
Note that THREE.ExtrudeGeometry (here…) may also be used for constructing the house in one-shot way!
UV mapping is the process of coloring faces based on a well-defined distribution of texture pieces (a.k.a. “texels”) on materials (map field).

Compute UV mapping

Rule(s)
Application(s)
Example
Compute UV mapping for mouth_geometry in Sphere in parts app. above.
Listing
// Step 5: compute UV mapping for mouth geometry:
mouth_geometry.computeBoundingBox(); // '.boundingBox' is 'null' by default!
let x_min = mouth_geometry.boundingBox.min.x;
let x_max = mouth_geometry.boundingBox.max.x;
let y_min = mouth_geometry.boundingBox.min.y;
let y_max = mouth_geometry.boundingBox.max.y;
for (let j = 0; j < mouth_geometry.faces.length; j++) {
    let x_a = mouth_geometry.vertices[mouth_geometry.faces[j].a].x;
    let y_a = mouth_geometry.vertices[mouth_geometry.faces[j].a].y;
    let x_b = mouth_geometry.vertices[mouth_geometry.faces[j].b].x;
    let y_b = mouth_geometry.vertices[mouth_geometry.faces[j].b].y;
    let x_c = mouth_geometry.vertices[mouth_geometry.faces[j].c].x;
    let y_c = mouth_geometry.vertices[mouth_geometry.faces[j].c].y;
    mouth_geometry.faceVertexUvs[0][j] = [
        new THREE.Vector2((x_a - x_min) / (x_max - x_min), (y_a - y_min) / (y_max - y_min)),
        new THREE.Vector2((x_b - x_min) / (x_max - x_min), (y_b - y_min) / (y_max - y_min)),
        new THREE.Vector2((x_c - x_min) / (x_max - x_min), (y_c - y_min) / (y_max - y_min))
    ];
}

Apply textures

Rule(s)
Example
_mouth_texture(flag, image_URL) { // Add or remove texture from flag value...
    let mouth = this._scene.getObjectByName("My group").children.find(child => {
        return child.name === this._scene.getObjectByName("My group").name + "MOUTH";
    });
    if (flag)
        new THREE.TextureLoader().load(image_URL, texture => { // Performance improvement: instead of load, record texture at next call...
            texture.minFilter = THREE.LinearFilter;
//          mouth.material.blending = THREE.NoBlending; // How to "mix" material color (if any) with just added texture? 
            mouth.material.map = texture;
        });
    else
        mouth.material.map = null;
    mouth.material.needsUpdate = true; // Tell WebGL to re-code new look...
}
Rule(s)
Application(s)
Example
_create_3D_bus(image, image_URL) {
    if (!(image !== undefined && image !== null && image instanceof Image && image.complete))
            throw("Abnormal situation...");
    const canvas = window.document.createElement('canvas');
    canvas.width = image.width;
    canvas.height = image.height;
    canvas.getContext('2d').drawImage(image, 0, 0, image.width, image.height);
    const texture = new THREE.CanvasTexture(canvas);
    texture.wrapS = THREE.RepeatWrapping;
    texture.wrapT = THREE.RepeatWrapping;
    texture.repeat.set(2, 2);
    texture.minFilter = THREE.LinearFilter;
    const bus = new THREE.Mesh(new THREE.SphereGeometry(600, 0, 0, Math.PI / 4, Math.PI / 2, Math.PI / 4, Math.PI / 2), new THREE.MeshBasicMaterial({
        map: texture,
        opacity: 1,
        side: THREE.DoubleSide,
        transparent: true
    }));
    bus.name = image_URL;
    this._scene.add(bus);
}
Exercise
The purpose of this exercise of the use of the Drag & Drop JavaScript API in order to dynamically inject a new texture on the bus in Catch the bus app orin the My house app.
This section copes with more subtle geometries as Three.js reusable types. Beyond, custom geometries as outputs of tools like Blender are the WebGL panacea!

THREE.TextGeometry as example of THREE.ExtrudeGeometry

Rule(s)
Example
Catch the bus app. above owns a 3D text object built from several data in the app.
Listing
_create_3D_text() {
    new Promise(get => {
        new THREE.FontLoader().load('./fonts/helvetiker_regular.typeface.json', font => {
            get(new THREE.TextGeometry(`${this._count_down}` + " sec. to catch the bus...", {
                font: font,
                size: 5,
                height: 2,
                curveSegments: 20,
                bevelEnabled: false, // "biseau" in French!
                bevelThickness: 5,
                bevelSize: 5,
                bevelSegments: 5
            }));
        });
    }).then((text_geometry) => {
        text_geometry.computeBoundingBox();
        text_geometry.center();
        const text = new THREE.Mesh(text_geometry, new THREE.MeshBasicMaterial({
            color: new THREE.Color(0xFF0000) // Red
        }));
        text.name = `${this._count_down}`;
        text.scale.multiplyScalar(10);
        this._scene.add(text);
    });
}

Custom geometries

Rule(s)

Before preprocessing

After preprocessing

Application(s)
Example
class Lips_geometry_processing {
    constructor() {
       …
        window.addEventListener("Lips geometry is ready...", this._animate.bind(this), false);
        (new THREE.LegacyJSONLoader()).load('models/72707_Mouth/Mouth.json', this._lips_geometry.bind(this)); // "Lips geometry is ready..." is sent when finished
        …
    }
    _animate() {
        window.requestAnimationFrame(this._animate.bind(this));

        this._scene.getObjectByName("Lips geometry").rotation.x += 0.01;
        this._renderer.render(this._scene, this._camera);
    }
    _lips_geometry(geometry) {
        geometry.name = 'models/72707_Mouth/Mouth.json'; // Loading 'models/72707_Mouth/Mouth.json' with predefined features
        window.console.assert(geometry.vertices.length === 3052);
        window.console.assert(geometry.faces.length === 5952);
        window.console.assert(geometry.faceVertexUvs.length === 1);
        window.console.assert(geometry.faceVertexUvs[0].length === 0); // Only one (empty) UV layer at position '0'
        window.console.assert(geometry.morphTargets.length === 0); // No morphing yet

        /** Erasure of useless geometry (faces): */
        geometry.faces.splice(5600, 5664 - 5600);
        geometry.faces.splice(5440, 5472 - 5440);

        // Etc.

        // Ready:
        window.dispatchEvent(new Event("Lips geometry is ready..."));
    }
}

Morphing and animation

Rule(s)
  • Three.js offers the possibility of animating 3D objects based on morph and skeletal animations. Morphing relies on the computation (in tools or in app.) of geometry deformation and, later on, playing this deformation from initial position to intermediate or extreme positions (negative values are allowed). Note: from Three.js ver. 100, example just below does not work!
Example (compute morphing)
_morphing(lips_geometry) {
// Step 1: overall geometry must be cloned:
    let lips_morphing = []; // Empty morphing
    lips_geometry.vertices.forEach((vertex, index) => {
        let vertex_clone = vertex.clone();
        lips_morphing.push(vertex_clone);
        if (lips_geometry.faces.find((face, index_) => {
            return (face.a === index || face.b === index || face.c === index) && index_ < 1920 / 2; // Outermost upper lips...
        }))
            vertex_clone.y += 1; // Deformation
    });
// Step 2: morphing is registered ('morphTargets: true' required in material!):
    lips_geometry.morphTargets[0] = {name: "lips_morphing)", vertices: lips_morphing};
}
Example (assign morphing to lips and play)
// Morphing ('morphTargets' in 'lips.geometry' must be setup *BEFORE* lips' creation.
// Otherwise, 'morphTargetInfluences' in lips is 'undefined'):
this._morphing(geometry); // Compute morphing

const lips = new THREE.Mesh(geometry, new THREE.MeshBasicMaterial({
    color: new THREE.Color("rgb(255,255,255)"), // White
    morphTargets: true,
    side: THREE.DoubleSide,
    vertexColors: THREE.FaceColors
}));
lips.name = "Lips";
lips.scale.set(10, 10, 10); // Requires greater size...
this._scene.add(lips);

// Step 3: morphing is associated with animation library:        
this._tween = 0;
createjs.Tween.get(this, {onChange: function () { // https://www.createjs.com/tweenjs
    lips.morphTargetInfluences[0] = this._tween;
}.bind(this), loop: true, paused: false})
.to({_tween: 0.1}, 1000, createjs.Ease.linear)
.to({_tween: 0}, 300, createjs.Ease.linear);
Exercise
The purpose of this exercise is the completion of the extant deformation in order to imitate a kiss in the Lips geometry processing app.

THREE.BufferGeometry and THREE.BufferAttribute types

Rule(s)
Application(s)
Example (reuse of a Three.js THREE.BufferGeometry-based geometry)
 // 'PlaneBufferGeometry(width, height, widthSegments, heightSegments)'
let my_geometry = new THREE.PlaneBufferGeometry(400, 300, 1, 1);
// 'positions' has 12 elements if 'widthSegments === 1' and 'heightSegments === 1', i.e., 4 edges = 12 / 3
let positions = my_geometry.attributes.position.array;
window.console.assert(positions instanceof Float32Array);
window.console.log(JSON.stringify(positions)); // {"0":-200,"1":150,"2":0,"3":200,"4":150,"5":0,"6":-200,"7":-150,"8":0,"9":200,"10":-150,"11":0}
Example (definition of colors for a custom THREE.BufferGeometry-based geometry)
// Coloring vertices:
const pink = new THREE.Color("rgb(255,192,203)");
const hotpink = new THREE.Color("rgb(255,105,180)");
const deeppink = new THREE.Color("rgb(255,20,147)");
let colors = new Float32Array(child.geometry.attributes.position.array.length);
child.geometry.addAttribute('color', new THREE.BufferAttribute(colors, 3));
for (let i = 0; i < 5760; i++) { // [0-5760[ included in 'child.geometry.attributes.color.count'
    colors[ i * 3 ] = pink.r;
    colors[ i * 3 + 1 ] = pink.g;
    colors[ i * 3 + 2 ] = pink.b;
}
for (let i = 5760; i < 7488; i++) {
    colors[ i * 3 ] = hotpink.r;
    colors[ i * 3 + 1 ] = hotpink.g;
    colors[ i * 3 + 2 ] = hotpink.b;
}
for (let i = 7488; i < 9216; i++) {
    colors[ i * 3 ] = deeppink.r;
    colors[ i * 3 + 1 ] = deeppink.g;
    colors[ i * 3 + 2 ] = deeppink.b;
}

Morphing and animation

Rule(s)
  • Morphing for a THREE.BufferGeometry-based geometry required manipulation of THREE.BufferAttribute objects.
Example (compute morphing)
_morphing(lips) {
// Clone the 'BufferAttribute':
let position = new THREE.BufferAttribute(Float32Array.from(lips.geometry.attributes.position.array), lips.geometry.attributes.position.itemSize);
lips.geometry.morphAttributes.position = [];
lips.geometry.morphAttributes.position.push(position);

for (let i = 0; i < 5760 / 2; i++) {
    position.setY(i, position.getY(i) + 1);
}
// Other deformations here...

lips.updateMorphTargets(); // It *NO LONGER* works with 'THREE.Geometry'!
lips.morphTargetInfluences[0] = 0;

createjs.Tween.get(this, {onChange: function () {
    lips.morphTargetInfluences[0] = this._tween;
}.bind(this), loop: true, paused: false})
    .to({_tween: 0.1}, 1000, createjs.Ease.linear)
    .to({_tween: 0}, 300, createjs.Ease.linear);
}

UV mapping

Rule(s)
  • UV mapping for a THREE.BufferGeometry-based geometry required manipulation of THREE.BufferAttribute objects.
Example (compute UV mapping based on vertices -THREE.BufferGeometry object IS NOT indexed-)
_UV_mapping(lips) { // https://stackoverflow.com/questions/19504337/three-js-buffergeometry-with-texture-coordinates
    window.console.assert(lips.geometry.attributes.position.count === 12672); // 12672 edges
// By definition:
    window.console.assert(lips.geometry.attributes.position.count === lips.geometry.attributes.position.array.length / 3);
// Each vertex has one uv coordinate for texture mapping:
    let uvs = new Float32Array(lips.geometry.attributes.position.count * 2);
    lips.geometry.addAttribute('uv', new THREE.BufferAttribute(uvs, 2));

// https://stackoverflow.com/questions/36730365/how-can-i-add-faces-to-an-indexed-three-buffergeometry
// Faces require manuel def.:
//        let indices = new Uint8Array(?)
//        lips.geometry.setIndex(new THREE.BufferAttribute(indices, 1));

    lips.geometry.computeBoundingBox();
    let x_min = lips.geometry.boundingBox.min.x;
    let x_max = lips.geometry.boundingBox.max.x;
    let y_min = lips.geometry.boundingBox.min.y;
    let y_max = lips.geometry.boundingBox.max.y;
    for (let i = 0; i < lips.geometry.attributes.position.count; i++) {
        let x = lips.geometry.attributes.position.array[i * 2];
        let y = lips.geometry.attributes.position.array[i * 2 + 1];
        uvs[ i * 2 ] = (x - x_min) / (x_max - x_min);
        uvs[ i * 2 + 1 ] = (y - y_min) / (y_max - y_min);
    }
}

Merging and simplification

Rule(s)
  • Custom geometries come with unrefined structure designed in tools like Blender. Three.js supports off-library facilities (typically, THREE.BufferGeometryUtils for THREE.BufferGeometry) that allow the merging of several geometries into one. Moreover, custom geometries may benefit from being simplified in drastically reducing their number of vertices and thus faces.
Application(s)
  • Woman_head_three_js.zip 
Example
_woman_head_as_GLTF_format(ready, head) { // Unrefined Blender model from https://www.blendswap.com/blend/6855
    const geometries = new Array();
    const meshes = new Array();
    head.scene.traverse(child => {
        if (child.isMesh) {
            window.console.assert(child.geometry.isBufferGeometry);
            geometries.push(child.geometry);
            meshes.push(child);
        }
        if (child.type === 'Scene') {
            window.console.assert(child.name === child.type); // For 'getObjectByName' in animation loop...
            this._scene = child;
        }
    });

    window.console.assert(this._scene !== null);
    this._scene.children.forEach(object => {
        window.console.log(object.name + ' with type: ' + object.type); // 'Camera'-'Object3D' 'Cube'-'Group'
    });
    this._scene.children = []; // Empty scene...
    this._scene.add(new THREE.AmbientLight(0xFFFFFF, 5));

    meshes.forEach(mesh => {
        window.console.log(mesh.name + ' with type: ' + mesh.type); // 4 elements as part of 'Cube'
        mesh.material.dispose();
    });

// Create unified geometry:
    let geometry = THREE.BufferGeometryUtils.mergeBufferGeometries(geometries, false); // External lib. required
    window.console.assert(geometry.attributes.position.count === 139216); // Too much big!
// Discard source geometries:
    geometries.forEach(geometry => {
        geometry.dispose();
    });
    THREE.BufferGeometryUtils.mergeVertices(geometry); // No effect?
// Simplify geometry (have to wait a couple of minutes!):
    geometry = (new THREE.SimplifyModifier()).modify(geometry, Math.floor(geometry.attributes.position.count * 0.75)); // External lib. required
    window.console.assert(geometry.attributes.position.count === 32204); // Not too much big!
    …
    ready("Woman head as GLTF ready");
}
Three.js offers 3D objects' “loaders”, which enable the load of any model in any format coming, for instance, from Blender. “Exporters” are “loaders” counterparts.

Material Template Library (MTL)

Rule(s)
Application(s)
Example (HTML)
<script src="js/three.js"></script>
<script src="js/OBJLoader.js"></script> <!--Requires three.js-->
<script src="js/MTL_loader.js"></script> <!--Requires three.js-->
Example (JavaScript)
(new THREE.MTLLoader).setMaterialOptions({side: THREE.DoubleSide}).setPath('models/').load('LC2.mtl', function (name, materials) {
    materials.preload();
    (new THREE.OBJLoader()).setMaterials(materials).setPath('models/').load('LC2.obj', function (model) {
        window.console.assert(model.isObject3D);

//      model.name = name; // For 'this._scene.getObjectByName'
//      this._scene.add(model); // Parent

        model.traverse(child => { // Model owns only one child...
        if (child instanceof THREE.Mesh) {
            window.console.assert(child.material.length === 16); // Caution: this results from designed model specificities!
            window.console.assert(child.material[0].type === 'MeshPhongMaterial');
            child.material[0].color.set(0xFFFF00); // Yellow
            child.material[0].wireframe = true;
//          child.material[0].needsUpdate = true; // No need before scene insertion...
//          Etc.

            child.name = name; // For 'this._scene.getObjectByName'
            this._scene.add(child); // Child instead...
        }
    });
// Ready:
        window.dispatchEvent(new Event("Model is ready..."));
    }.bind(this));
}.bind(this, 'LouiseChopin'));
Exercise
The purpose of this exercise is the processing of the Man.obj and Woman.obj models in order to simply apply some texture.
Three.js THREE.Raycaster class is the means for picking 3D objects from an origin (THREE.Vector3) towards a direction (a normalized THREE.Vector3 object).

Picking 3D objects

Rule(s)
Application(s)
Model(s)
Example
// Set up 'front' morphing:
const morphing = new THREE.BufferAttribute(Float32Array.from(front.geometry.attributes.position.array), front.geometry.attributes.position.itemSize);
front.geometry.morphAttributes.position = [];
front.geometry.morphAttributes.position.push(morphing);
// Compute morphing:
const direction = new THREE.Vector3(0, 0, 0);
const origin = new THREE.Vector3(0, 0, 0);
let position = front.geometry.getAttribute('position').array;
// 'front' is designed such that vertex number is close to half of '2_Head_sculpt_retopo_mesh', i.e., 5784:
window.console.assert(front.geometry.attributes.position.count === 2829);
const intersection = new Array();
const raycaster = new THREE.Raycaster();
for (let i = 0; i < front.geometry.attributes.position.count; i++) {
    direction.set(position[ i * 3 ], position[ i * 3 + 1 ], position[ i * 3 + 2 ]);
    direction.negate().normalize(); // To scene center
    origin.set(position[ i * 3 ], position[ i * 3 + 1 ], position[ i * 3 + 2 ]);

    raycaster.set(origin, direction); // From point of 'front' geometry towards scene center as direction...
/** DEBUG */
//            scene.add(new THREE.ArrowHelper(direction, origin));
/** End of DEBUG */
    intersection.length = 0; // Clear...
    raycaster.intersectObject(head, false, intersection);
            /* Returned by 'intersectObject':
             [ { distance, point, face, faceIndex, object }, ... ]
             
             distance – distance between the origin of the ray and the intersection
             point – point of intersection, in world coordinates
             face – intersected face
             faceIndex – index of the intersected face
             object – the intersected object
             */
    if (intersection.length > 0) // 'intersection[0].point' is the closest point:
        morphing.setXYZ(i, intersection[0].point.x, intersection[0].point.y, intersection[0].point.z);
    else {
        /** DEBUG */
        scene.add(new THREE.ArrowHelper(direction, origin, 10, 0xFF0000));
        window.console.log("origin " + JSON.stringify(origin) + " direction " + JSON.stringify(direction));
        /** End of DEBUG */
    }
}
front.updateMorphTargets();
Exercise
The purpose of this exercise is the processing of a woman head (Blender format) as done within the example of the man head.

Picking from camera

Advanced texturing is the possibility of playing with material attributes, 'map' and siblings in particular for enhanced texturing.
Rule(s)
Example (initialization)
Rule(s)
Example (initialization)
Lights are necessary for shiny materials, and more generally lights simulate different kinds of lights (daylight, spotlight, etc.). Having shadow is the direct benefit of lights, but it is a question of tradeoff between nice effects and performance. Three.js light types are detailed here… or there
Rule(s)
Example (initialization)
this._directional_light = new THREE.DirectionalLight(0xFFFF00, 0.5); // Yellow
this._scene.add(this._directional_light);
if (DLH) { // https://stackoverflow.com/questions/32950023/three-js-directionallighthelper-when-light-is-attached-to-camera
    let dlh = new THREE.DirectionalLightHelper(this._directional_light, 100, this._directional_light.color.getHex());
    dlh.name = 'dlh'; // For 'this._scene.getObjectByName('dlh');'
    DLH_MARKER.material.color.setHex(this._directional_light.color.getHex());
    this._scene.add(DLH_MARKER);
    this._scene.add(dlh);
}
Example (rendering loop)
this._directional_light.position.copy(this._perspective_camera.position);
this._directional_light.target = this._noLanguageCharacters_meshes[this._index_of_noLanguageCharacter_has_focus];
if (DLH) {
    DLH_MARKER.position.copy(this._directional_light.position);
// By default, helpers don't update their matrix:
//                                window.console.assert(this._scene.getObjectByName('dlh').matrixAutoUpdate === false);
    this._scene.getObjectByName('dlh').update(); // Updates the helper to match the position and direction of the 'DirectionalLight' object being visualized.
}
Exercise
The purpose of this exercise is the introduction of a directional light and and a hemisphere light in the Sphere in parts app. Each light has a helper and is ruled through dat.gui (color, intensity, even position and illumination direction).
Cameras are divided between perspective and orthographic cameras. Three.js camera types are detailed here
Rule(s)
Example (initialization)
xxx
Example (rendering loop)
yyy
Rule(s)
Example (initialization)
xxx
Example (rendering loop)
yyy
Controls are facilities that automate keyboard and mouse interactions.
Rule(s)
Example (initialization)
// Using Three.js common controls (second argument is important because, when absent, Three.js conflicts with dat.gui):
this._controls = new THREE.OrbitControls(this._perspective_camera, this._renderer.domElement);
Example (rendering loop)
// This is only useful whether 'this._perspective_camera' has changed of position, rotation, frustum, zoom…:
this._controls.update();
Physics encompasses the ability of moving 3D objects as objects in real life that are subject to gravity, damping, friction, bounce…
Rule(s)
Example
SHADER https://stackoverflow.com/questions/16287547/multiple-transparent-textures-on-the-same-mesh-face-in-three-js
Draw call number: https://discourse.threejs.org/t/where-can-i-see-the-number-of-draw-calls-per-frame/4311/2
Rule(s)
Example
renderer.info.render.calls