File size: 2,230 Bytes
a462a68
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1a2f17b
 
 
ea0528a
1a2f17b
ea0528a
a462a68
ea0528a
a462a68
ea0528a
a462a68
ea0528a
a462a68
 
 
 
ea0528a
a462a68
 
1a2f17b
 
a462a68
 
ea0528a
 
 
 
 
a462a68
ea0528a
a462a68
 
ea0528a
 
a462a68
ea0528a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
import { pipeline, env } from 'https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.0.0';

// CONFIG MUST COME FIRST
env.allowRemoteModels = true;
env.allowLocalModels = false;
env.backends.onnx.wasm.numThreads = 1; // Stick to 1 for stability on mobile
env.backends.onnx.wasm.proxy = false;

const status = document.getElementById('status');
const btn = document.getElementById('main-btn');
const input = document.getElementById('chat-input');
const chatBox = document.getElementById('chat-box');
let generator = null;

async function init() {
    try {
        btn.disabled = true;
        status.textContent = "Downloading Model (Stay on page)...";

        generator = await pipeline(
            'text-generation',
            'Xenova/phi-1_5-tiny-onnx',
            {
                device: 'wasm',
                dtype: 'q4', // Critical: Using 4-bit quantization to save RAM
                progress_callback: (d) => {
                    if (d.status === 'progress') {
                        status.textContent = `Loading: ${Math.round(d.progress)}%`;
                    }
                }
            }
        );

        status.textContent = "Ready!";
        btn.textContent = "Send";
        input.disabled = false;
        btn.disabled = false;

        btn.onclick = async () => {
            const userText = input.value.trim();
            if (!userText) return;

            addMessage('user', userText);
            input.value = '';
            status.textContent = "AI is thinking...";
            
            const output = await generator(userText, { 
                max_new_tokens: 30, // Keep this low for mobile
                temperature: 0.7 
            });

            addMessage('ai', output[0].generated_text.replace(userText, '').trim());
            status.textContent = "Ready!";
        };
    } catch (e) {
        status.textContent = "Error: Mobile RAM limit hit.";
        console.error(e);
    }
}

function addMessage(sender, text) {
    const msgDiv = document.createElement('div');
    msgDiv.className = `msg ${sender}-message`;
    msgDiv.textContent = text;
    chatBox.appendChild(msgDiv);
    chatBox.scrollTop = chatBox.scrollHeight;
}

// Start the process
init();