Spaces:
Running
Running
| class GemmaChatbot { | |
| constructor() { | |
| this.generator = null; | |
| this.isInitialized = false; | |
| this.isGenerating = false; | |
| this.currentDevice = 'cpu'; | |
| this.messages = []; | |
| this.maxTokens = 1000; | |
| this.initializeElements(); | |
| this.attachEventListeners(); | |
| this.initializeModel(); | |
| } | |
| initializeElements() { | |
| // Input elements | |
| this.messageInput = document.getElementById('messageInput'); | |
| this.sendButton = document.getElementById('sendButton'); | |
| this.messagesContainer = document.getElementById('messagesContainer'); | |
| this.charCount = document.getElementById('charCount'); | |
| // UI elements | |
| this.loadingOverlay = document.getElementById('loadingOverlay'); | |
| this.errorModal = document.getElementById('errorModal'); | |
| this.errorMessage = document.getElementById('errorMessage'); | |
| this.retryButton = document.getElementById('retryButton'); | |
| this.deviceToggle = document.getElementById('deviceToggle'); | |
| this.statusText = document.getElementById('statusText'); | |
| this.progressFill = document.getElementById('progressFill'); | |
| } | |
| attachEventListeners() { | |
| // Send message | |
| this.sendButton.addEventListener('click', () => this.sendMessage()); | |
| // Enter key to send (Ctrl+Enter for new line) | |
| this.messageInput.addEventListener('keydown', (e) => { | |
| if (e.key === 'Enter' && !e.ctrlKey) { | |
| e.preventDefault(); | |
| this.sendMessage(); | |
| } | |
| }); | |
| // Auto-resize textarea | |
| this.messageInput.addEventListener('input', () => { | |
| this.autoResizeTextarea(); | |
| this.updateCharCount(); | |
| }); | |
| // Device toggle | |
| this.deviceToggle.addEventListener('click', () => this.toggleDevice()); | |
| // Retry button | |
| this.retryButton.addEventListener('click', () => this.retryInitialization()); | |
| // WebGPU support detection | |
| this.checkWebGPUSupport(); | |
| } | |
| async checkWebGPUSupport() { | |
| if ('gpu' in navigator) { | |
| try { | |
| const adapter = await navigator.gpu.requestAdapter(); | |
| if (adapter) { | |
| this.statusText.textContent = 'Ready (GPU available)'; | |
| this.statusText.parentElement.classList.add('ready'); | |
| } | |
| } catch (error) { | |
| console.log('WebGPU not available'); | |
| } | |
| } | |
| } | |
| async initializeModel() { | |
| try { | |
| this.updateProgress(10, 'Initializing transformers.js...'); | |
| // Wait for transformers to be available | |
| while (!window.transformers) { | |
| await new Promise(resolve => setTimeout(resolve, 100)); | |
| } | |
| const { pipeline, TextStreamer } = window.transformers; | |
| this.updateProgress(30, 'Loading Gemma model...'); | |
| // Create pipeline with error handling | |
| this.generator = await pipeline( | |
| 'text-generation', | |
| 'onnx-community/gemma-3-270m-it-ONNX', | |
| { | |
| dtype: 'fp32', | |
| device: this.currentDevice | |
| } | |
| ); | |
| this.updateProgress(90, 'Model loaded successfully!'); | |
| // Initialize with system message | |
| this.messages = [ | |
| { role: 'system', content: 'You are a helpful assistant.' } | |
| ]; | |
| this.isInitialized = true; | |
| this.updateProgress(100, 'Ready!'); | |
| setTimeout(() => { | |
| this.hideLoadingOverlay(); | |
| this.enableInput(); | |
| }, 500); | |
| } catch (error) { | |
| console.error('Model initialization failed:', error); | |
| this.showError('Failed to initialize the AI model. Please check your internet connection and try again.'); | |
| } | |
| } | |
| async toggleDevice() { | |
| if (this.isGenerating) return; | |
| const options = this.deviceToggle.querySelectorAll('.device-option'); | |
| const statusCircle = this.statusText.parentElement.querySelector('i'); | |
| if (this.currentDevice === 'cpu') { | |
| // Try to switch to GPU | |
| if ('gpu' in navigator) { | |
| try { | |
| this.currentDevice = 'webgpu'; | |
| options[0].classList.remove('active'); | |
| options[1].classList.add('active'); | |
| this.statusText.textContent = 'Switching to GPU...'; | |
| statusCircle.style.color = '#ff9800'; | |
| // Reinitialize model with GPU | |
| await this.reinitializeModel(); | |
| } catch (error) { | |
| console.log('GPU initialization failed, staying on CPU'); | |
| this.currentDevice = 'cpu'; | |
| statusCircle.style.color = '#4caf50'; | |
| this.statusText.textContent = 'Ready (CPU)'; | |
| } | |
| } else { | |
| this.showToast('WebGPU not supported in this browser'); | |
| } | |
| } else { | |
| // Switch to CPU | |
| this.currentDevice = 'cpu'; | |
| options[1].classList.remove('active'); | |
| options[0].classList.add('active'); | |
| this.statusText.textContent = 'Switching to CPU...'; | |
| await this.reinitializeModel(); | |
| } | |
| } | |
| async reinitializeModel() { | |
| try { | |
| this.isInitialized = false; | |
| this.disableInput(); | |
| const { pipeline, TextStreamer } = window.transformers; | |
| this.generator = await pipeline( | |
| 'text-generation', | |
| 'onnx-community/gemma-3-270m-it-ONNX', | |
| { | |
| dtype: 'fp32', | |
| device: this.currentDevice | |
| } | |
| ); | |
| this.isInitialized = true; | |
| this.enableInput(); | |
| const deviceName = this.currentDevice === 'gpu' ? 'GPU' : 'CPU'; | |
| this.statusText.textContent = `Ready (${deviceName})`; | |
| this.showToast(`Switched to ${deviceName} execution`); | |
| } catch (error) { | |
| console.error('Model reinitialization failed:', error); | |
| this.showError('Failed to switch execution device. Please try again.'); | |
| } | |
| } | |
| async sendMessage() { | |
| if (!this.isInitialized || this.isGenerating) return; | |
| const message = this.messageInput.value.trim(); | |
| if (!message) return; | |
| // Add user message to UI | |
| this.addMessage('user', message); | |
| // Clear input | |
| this.messageInput.value = ''; | |
| this.autoResizeTextarea(); | |
| this.updateCharCount(); | |
| // Add to conversation history | |
| this.messages.push({ role: 'user', content: message }); | |
| // Generate response | |
| await this.generateResponse(); | |
| } | |
| async generateResponse() { | |
| if (this.isGenerating) return; | |
| this.isGenerating = true; | |
| this.disableInput(); | |
| try { | |
| // Add assistant message placeholder | |
| const assistantMessageEl = this.addMessage('assistant', ''); | |
| // Create streaming response | |
| const { pipeline, TextStreamer } = window.transformers; | |
| const streamer = new TextStreamer(this.generator.tokenizer, { | |
| skip_prompt: true, | |
| skip_special_tokens: true, | |
| callback_function: (text) => { | |
| this.updateStreamingMessage(assistantMessageEl, text); | |
| } | |
| }); | |
| // Generate response with streaming | |
| const output = await this.generator(this.messages, { | |
| max_new_tokens: 512, | |
| do_sample: false, | |
| streamer: streamer | |
| }); | |
| // Get final response | |
| const finalResponse = output[0].generated_text.at(-1).content; | |
| // Add final response to conversation | |
| this.messages.push({ role: 'assistant', content: finalResponse }); | |
| } catch (error) { | |
| console.error('Generation failed:', error); | |
| this.updateStreamingMessage(assistantMessageEl, 'I apologize, but I encountered an error while generating a response. Please try again.'); | |
| this.showToast('Generation failed. Please try again.'); | |
| } finally { | |
| this.isGenerating = false; | |
| this.enableInput(); | |
| } | |
| } | |
| addMessage(role, content) { | |
| const messageEl = document.createElement('div'); | |
| messageEl.className = `message ${role}`; | |
| const avatar = role === 'user' ? 'fas fa-user' : 'fas fa-robot'; | |
| const roleName = role === 'user' ? 'You' : 'Gemma AI'; | |
| messageEl.innerHTML = ` | |
| <div class="message-avatar"> | |
| <i class="${avatar}"></i> | |
| </div> | |
| <div class="message-content"> | |
| <div class="message-header"> | |
| <span class="message-role">${roleName}</span> | |
| </div> | |
| <div class="message-text">${this.escapeHtml(content)}</div> | |
| </div> | |
| `; | |
| this.messagesContainer.appendChild(messageEl); | |
| this.scrollToBottom(); | |
| return messageEl; | |
| } | |
| updateStreamingMessage(messageEl, content) { | |
| const messageText = messageEl.querySelector('.message-text'); | |
| messageText.innerHTML = this.escapeHtml(content); | |
| this.scrollToBottom(); | |
| } | |
| autoResizeTextarea() { | |
| const textarea = this.messageInput; | |
| textarea.style.height = 'auto'; | |
| textarea.style.height = Math.min(textarea.scrollHeight, 120) + 'px'; | |
| } | |
| updateCharCount() { | |
| const count = this.messageInput.value.length; | |
| this.charCount.textContent = count; | |
| if (count > this.maxTokens * 0.9) { | |
| this.charCount.style.color = '#f44336'; | |
| } else if (count > this.maxTokens * 0.7) { | |
| this.charCount.style.color = '#ff9800'; | |
| } else { | |
| this.charCount.style.color = ''; | |
| } | |
| } | |
| enableInput() { | |
| this.messageInput.disabled = false; | |
| this.sendButton.disabled = false; | |
| this.messageInput.placeholder = "Type your message here..."; | |
| } | |
| disableInput() { | |
| this.messageInput.disabled = true; | |
| this.sendButton.disabled = true; | |
| this.messageInput.placeholder = "AI is generating a response..."; | |
| } | |
| scrollToBottom() { | |
| this.messagesContainer.scrollTop = this.messagesContainer.scrollHeight; | |
| } | |
| escapeHtml(text) { | |
| const div = document.createElement('div'); | |
| div.textContent = text; | |
| return div.innerHTML; | |
| } | |
| updateProgress(percentage, status) { | |
| this.progressFill.style.width = `${percentage}%`; | |
| this.statusText.textContent = status; | |
| } | |
| hideLoadingOverlay() { | |
| this.loadingOverlay.style.display = 'none'; | |
| } | |
| showError(message) { | |
| this.errorMessage.textContent = message; | |
| this.errorModal.style.display = 'flex'; | |
| this.hideLoadingOverlay(); | |
| } | |
| hideError() { | |
| this.errorModal.style.display = 'none'; | |
| } | |
| retryInitialization() { | |
| this.hideError(); | |
| this.showLoadingOverlay(); | |
| this.initializeModel(); | |
| } | |
| showLoadingOverlay() { | |
| this.loadingOverlay.style.display = 'flex'; | |
| this.updateProgress(0, 'Initializing...'); | |
| } | |
| showToast(message) { | |
| const toast = document.createElement('div'); | |
| toast.className = 'toast'; | |
| toast.textContent = message; | |
| document.body.appendChild(toast); | |
| setTimeout(() => toast.classList.add('show'), 100); | |
| setTimeout(() => { | |
| toast.classList.remove('show'); | |
| setTimeout(() => document.body.removeChild(toast), 300); | |
| }, 3000); | |
| } | |
| } | |
| // Initialize the chatbot when the page loads | |
| document.addEventListener('DOMContentLoaded', () => { | |
| new GemmaChatbot(); | |
| }); | |
| // Handle page visibility change | |
| document.addEventListener('visibilitychange', () => { | |
| if (document.hidden) { | |
| // Page is hidden, can pause any ongoing operations | |
| } else { | |
| // Page is visible, resume operations | |
| } | |
| }); |