File size: 8,271 Bytes
76a6df1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
<!DOCTYPE html>
<html lang="en">
<head>
    <!-- =============================================
         Lip‑Sync Avatar 
         ---------------------------------------------
         This page  display a 3‑D
         character in front of a looping background
         texture and drive its mouth‑opening morph
         target in real‑time, synchronised with speech
         synthesis.  
         ‑ Technologies used:
           • three.js (WebGL) – core 3‑D engine
           • GLTFLoader           – load the avatar
           • Web Speech API       – text‑to‑speech
           • Standard JS / CSS

         ✅  Place the following assets next to this file
             ├─ avatar.glb   ← your character exported from
             │                 the video, with a morph target
             │                 named "viseme_aa" (or rename in
             │                 code below).
             └─ bg.jpg       ← background image (optional
                               video texture shown later)
         ============================================= -->

    <meta charset="UTF-8" />
    <meta name="viewport" content="width=device-width, initial-scale=1.0" />
    <title>Lip‑Sync Ark Avatar</title>

    <!-- Page styling inspired by the glass‑look & gradients
         of the uploaded examples → custom properties keep
         things tidy. -->
    <style>
        :root {
            --accent1: #6965db; /* primary violet */
            --accent2: #3a86ff; /* secondary blue  */
            --bg‑dark : #0f0f1a;
            --text‑lite: #e5e5f7;
        }

        * { box‑sizing: border‑box; margin: 0; padding: 0; }
        html,body { height: 100%; overflow: hidden; font‑family: 'Segoe UI', Tahoma, sans‑serif; background: var(--bg‑dark); color: var(--text‑lite); }

        /* Three.js full‑screen canvas */
        #threecanvas { position: fixed; inset: 0; z‑index: 1; }

        /* UI overlay */
        #ui   { position: fixed; left: 0; right: 0; bottom: 2rem; display: flex; justify‑content: center; gap: 1rem; z‑index: 2; }
        button {
            padding: .8rem 1.6rem; border: none; border‑radius: 40px;
            background: linear‑gradient(100deg,var(--accent‑1),var(--accent‑2));
            color: #fff; font‑size: 1rem; font‑weight: 600; cursor: pointer;
            box‑shadow: 0 4px 15px rgba(0,0,0,.25); transition: transform .2s;
        }
        button:hover { transform: translateY(-3px); }
    </style>
</head>
<body>

    <!-- Three.js draws here -->
    <canvas id="three‑canvas"></canvas>

    <!-- Simple UI -->
    <div id="ui">
        <button id="speakBtn">Say it 👉 “Hello I’m your personal assistant”</button>
    </div>

    <!-- Three.js & loader -->
    <script src="https://cdn.jsdelivr.net/npm/three@0.160.0/build/three.min.js"></script>
    <script src="https://cdn.jsdelivr.net/npm/three@0.160.0/examples/js/controls/OrbitControls.js"></script>
    <script src="https://cdn.jsdelivr.net/npm/three@0.160.0/examples/js/loaders/GLTFLoader.js"></script>

    <script>
    /* ==========================================================
       1. Basic scene set‑up
    ========================================================== */
    const canvas   = document.getElementById('three‑canvas');
    const renderer = new THREE.WebGLRenderer({ canvas, antialias:true, alpha:true });
    renderer.setPixelRatio(Math.min(window.devicePixelRatio,2));
    const scene    = new THREE.Scene();

    // Camera
    const camera = new THREE.PerspectiveCamera(35, window.innerWidth/window.innerHeight, 0.1, 100);
    camera.position.set(0, 1.55, 3.5);

    // Controls (for debugging – remove on prod)
    const controls = new THREE.OrbitControls(camera, canvas);
    controls.enableDamping = true;

    // Resize handling
    function onResize(){
        camera.aspect = window.innerWidth / window.innerHeight;
        camera.updateProjectionMatrix();
        renderer.setSize(window.innerWidth, window.innerHeight);
    }
    window.addEventListener('resize', onResize);
    onResize();

    /* ==========================================================
       2. Background – simple textured quad (static image)
          Replace with THREE.VideoTexture for a live video bg.
    ========================================================== */
    const bgTex = new THREE.TextureLoader().load('bg.jpg', tex=>{ tex.encoding = THREE.sRGBEncoding; });
    const bgMat = new THREE.MeshBasicMaterial({ map:bgTex });
    const bgGeo = new THREE.PlaneGeometry(16, 9);
    const bg    = new THREE.Mesh(bgGeo, bgMat);
    bg.position.z = -5; // push back
    bg.scale.set(2,2,1); // cover
    scene.add(bg);

    /* ==========================================================
       3. Avatar loading – expects a morph target named
          "viseme_aa" (common for open‑mouth). Adjust index/name
          below if different.  
    ========================================================== */
    let avatar, mouthIndex = null; // will store index of morph

    const loader = new THREE.GLTFLoader();
    loader.load('avatar.glb', gltf=>{
        avatar = gltf.scene;
        avatar.traverse(obj=>{
            if (obj.isMesh && obj.morphTargetDictionary) {
                // try to find a suitable mouth‑open morph
                const dict = obj.morphTargetDictionary;
                const possible = ['viseme_aa','mouthOpen','jawOpen','vrc.v_morph_aa'];
                for(const key of possible){ if(key in dict){ mouthIndex = dict[key]; break; } }
                if(mouthIndex!==null){ obj.userData.isMouth = true; }
            }
        });

        // Center & scale heuristic – adjust as needed
        const box = new THREE.Box3().setFromObject(avatar);
        const size = new THREE.Vector3(); box.getSize(size);
        avatar.scale.setScalar(1.6/size.y);
        box.setFromObject(avatar);
        const center = new THREE.Vector3(); box.getCenter(center);
        avatar.position.sub(center); avatar.position.y -= box.min.y; // feet to ground

        scene.add(avatar);
    });

    /* ==========================================================
       4. Lip‑sync logic (very lightweight)  
          – Uses SpeechSynthesisUtterance and its onboundary
            event (fires at each word).  
          – At each word start we trigger a quick mouth‑open
            impulse, which then eases back to closed inside the
            render loop. For higher fidelity, integrate a full
            phoneme‑to‑viseme mapper (Google TTS marks, deepspeech
            etc.).
    ========================================================== */
    const mouthAnim = {
        strength: 0  // 0 = closed, 1 = fully open
    };

    function speak(text){
        if(!window.speechSynthesis) return alert('SpeechSynthesis unsupported');
        const utter = new SpeechSynthesisUtterance(text);
        utter.lang = 'en-US';
        utter.rate = 1;
        utter.pitch = 1;
        utter.onboundary = ({ name }) => {
            if(name === 'word') {
                // quick open kick
                mouthAnim.strength = 1;
            }
        };
        window.speechSynthesis.speak(utter);
    }

    // UI button
    document.getElementById('speakBtn').addEventListener('click', ()=>{
        speak("Hello I'm your personal assistant");
    });

    /* ==========================================================
       5. Render loop – drive mouth closing + optional subtle
          idle movement.  
    ========================================================== */
    const clock = new THREE.Clock();
    function tick(){
        requestAnimationFrame(tick);
        const dt = clock.getDelta();

        // Ease mouth strength back to 0
        mouthAnim.strength = THREE.MathUtils.damp(mouthAnim.strength, 0, 5, dt);

        if(avatar && mouthIndex!==null){
            avatar.traverse(obj=>{
                if(obj.userData.isMouth){
                    obj.morphTargetInfluences[mouthIndex] = mouthAnim.strength;
                }
            });
        }

        controls.update();
        renderer.render(scene, camera);
    }
    tick();
    </script>
</body>
</html>