akhaliq HF Staff commited on
Commit
26ba43f
·
verified ·
1 Parent(s): d0d9152

Update index.js

Browse files
Files changed (1) hide show
  1. index.js +87 -45
index.js CHANGED
@@ -1,11 +1,12 @@
1
  import { pipeline, TextStreamer } from 'https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.7.3';
2
 
3
- class Chatbot {
4
  constructor() {
5
  this.generator = null;
6
  this.messages = [];
7
  this.isProcessing = false;
8
  this.useWebGPU = false;
 
9
  this.initElements();
10
  this.initEventListeners();
11
  this.initModel();
@@ -50,7 +51,6 @@ class Chatbot {
50
  }
51
  });
52
 
53
- // Check WebGPU support
54
  this.checkWebGPUSupport();
55
  }
56
 
@@ -77,6 +77,9 @@ class Chatbot {
77
  updateCharCount() {
78
  const count = this.userInput.value.length;
79
  this.charCount.textContent = `${count} / 1000`;
 
 
 
80
  }
81
 
82
  autoResize() {
@@ -141,60 +144,77 @@ class Chatbot {
141
  const responseContainer = this.addMessage('', 'assistant', true);
142
  const messageContent = responseContainer.querySelector('.message-content');
143
 
144
- // Custom streamer to update UI
145
- const streamer = {
146
- tokenizer: this.generator.tokenizer,
 
 
 
 
 
 
 
 
147
  skip_prompt: true,
148
  skip_special_tokens: true,
149
- text_cache: '',
150
- put: function(value) {
151
- const decoded = this.tokenizer.decode(value[0], {
152
- skip_special_tokens: this.skip_special_tokens
153
- });
154
- if (this.skip_prompt && !this.started) {
155
- this.started = true;
156
- return;
157
- }
158
- this.text_cache += decoded;
159
- // Only update when we have complete words (space or punctuation)
160
- if (/[.!?;:\s\n]$/.test(this.text_cache)) {
161
- messageContent.textContent += this.text_cache;
162
- this.scrollToBottom();
163
- this.text_cache = '';
164
  }
165
- },
166
- end: function() {
167
- if (this.text_cache) {
168
- messageContent.textContent += this.text_cache;
169
- this.scrollToBottom();
170
- this.text_cache = '';
171
- }
172
- },
173
- on_finalized_text: function(text, stream_end) {
174
  messageContent.textContent += text;
175
  this.scrollToBottom();
176
- },
177
- scrollToBottom: () => this.scrollToBottom()
178
- };
 
 
 
 
179
 
180
- // Generate response
181
- const output = await this.generator(this.messages, {
182
- max_new_tokens: 500,
183
- do_sample: false,
184
- streamer: streamer,
 
 
 
185
  });
186
 
187
- const assistantMessage = output[0].generated_text.at(-1).content;
188
- this.messages.push({ role: "assistant", content: assistantMessage });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
189
 
190
  } catch (error) {
191
  console.error('Error generating response:', error);
192
- this.addMessage('Sorry, I encountered an error. Please try again.', 'error');
 
 
 
 
 
193
  } finally {
194
  this.loadingIndicator.classList.add('hidden');
195
  this.isProcessing = false;
196
  this.sendButton.disabled = false;
 
197
  this.userInput.focus();
 
198
  }
199
  }
200
 
@@ -226,32 +246,54 @@ class Chatbot {
226
  }
227
 
228
  scrollToBottom() {
229
- this.chatMessages.scrollTop = this.chatMessages.scrollHeight;
 
 
230
  }
231
 
232
  clearChat() {
 
 
 
 
 
233
  this.messages = [];
234
  this.chatMessages.innerHTML = `
235
  <div class="welcome-message">
236
  <div class="welcome-icon">🤖</div>
237
  <h2>Welcome to AI Assistant</h2>
238
- <p>I'm powered by MobileLLM, a lightweight language model. Ask me anything!</p>
239
  </div>
240
  `;
 
241
  }
242
 
243
  showNotification(message, type = 'info') {
244
  const notification = document.createElement('div');
245
  notification.className = `notification ${type}`;
246
  notification.textContent = message;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
247
  document.body.appendChild(notification);
248
 
249
  setTimeout(() => {
250
- notification.classList.add('show');
251
  }, 10);
252
 
253
  setTimeout(() => {
254
- notification.classList.remove('show');
255
  setTimeout(() => notification.remove(), 300);
256
  }, 3000);
257
  }
@@ -259,5 +301,5 @@ class Chatbot {
259
 
260
  // Initialize chatbot when DOM is ready
261
  document.addEventListener('DOMContentLoaded', () => {
262
- new Chatbot();
263
  });
 
1
  import { pipeline, TextStreamer } from 'https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.7.3';
2
 
3
+ class StreamingChatbot {
4
  constructor() {
5
  this.generator = null;
6
  this.messages = [];
7
  this.isProcessing = false;
8
  this.useWebGPU = false;
9
+ this.currentStreamer = null;
10
  this.initElements();
11
  this.initEventListeners();
12
  this.initModel();
 
51
  }
52
  });
53
 
 
54
  this.checkWebGPUSupport();
55
  }
56
 
 
77
  updateCharCount() {
78
  const count = this.userInput.value.length;
79
  this.charCount.textContent = `${count} / 1000`;
80
+ if (this.charCount) {
81
+ this.charCount.style.color = count > 900 ? '#ef4444' : '#64748b';
82
+ }
83
  }
84
 
85
  autoResize() {
 
144
  const responseContainer = this.addMessage('', 'assistant', true);
145
  const messageContent = responseContainer.querySelector('.message-content');
146
 
147
+ // Add streaming indicator
148
+ const streamingIndicator = document.createElement('span');
149
+ streamingIndicator.className = 'streaming-indicator';
150
+ streamingIndicator.textContent = '●';
151
+ streamingIndicator.style.animation = 'pulse 1.5s infinite';
152
+ streamingIndicator.style.color = '#10b981';
153
+ streamingIndicator.style.marginLeft = '4px';
154
+ messageContent.appendChild(streamingIndicator);
155
+
156
+ // Create enhanced TextStreamer with proper callback handling
157
+ this.currentStreamer = new TextStreamer(this.generator.tokenizer, {
158
  skip_prompt: true,
159
  skip_special_tokens: true,
160
+ callback_function: (text) => {
161
+ // Remove streaming indicator on first text
162
+ if (streamingIndicator.parentNode) {
163
+ streamingIndicator.remove();
 
 
 
 
 
 
 
 
 
 
 
164
  }
165
+ // Append new text and scroll
 
 
 
 
 
 
 
 
166
  messageContent.textContent += text;
167
  this.scrollToBottom();
168
+ }
169
+ });
170
+
171
+ // Prepare conversation context
172
+ const conversationText = this.messages.map(msg =>
173
+ `${msg.role === 'user' ? 'User' : 'Assistant'}: ${msg.content}`
174
+ ).join('\n') + '\nAssistant:';
175
 
176
+ // Generate response with streaming
177
+ const output = await this.generator(conversationText, {
178
+ max_new_tokens: 300,
179
+ do_sample: true,
180
+ temperature: 0.7,
181
+ top_p: 0.9,
182
+ streamer: this.currentStreamer,
183
+ return_full_text: false
184
  });
185
 
186
+ // Clean up streaming indicator if still present
187
+ if (streamingIndicator.parentNode) {
188
+ streamingIndicator.remove();
189
+ }
190
+
191
+ // Get the generated text and add to conversation history
192
+ const generatedText = output[0].generated_text || messageContent.textContent;
193
+ if (generatedText && !messageContent.textContent) {
194
+ messageContent.textContent = generatedText;
195
+ }
196
+
197
+ // Add assistant's response to message history
198
+ const finalResponse = messageContent.textContent.trim();
199
+ if (finalResponse) {
200
+ this.messages.push({ role: "assistant", content: finalResponse });
201
+ }
202
 
203
  } catch (error) {
204
  console.error('Error generating response:', error);
205
+ // Remove any existing content and show error
206
+ const responseContainer = this.chatMessages.lastElementChild;
207
+ if (responseContainer && responseContainer.classList.contains('assistant')) {
208
+ responseContainer.remove();
209
+ }
210
+ this.addMessage('Sorry, I encountered an error while generating the response. Please try again.', 'error');
211
  } finally {
212
  this.loadingIndicator.classList.add('hidden');
213
  this.isProcessing = false;
214
  this.sendButton.disabled = false;
215
+ this.currentStreamer = null;
216
  this.userInput.focus();
217
+ this.scrollToBottom();
218
  }
219
  }
220
 
 
246
  }
247
 
248
  scrollToBottom() {
249
+ requestAnimationFrame(() => {
250
+ this.chatMessages.scrollTop = this.chatMessages.scrollHeight;
251
+ });
252
  }
253
 
254
  clearChat() {
255
+ if (this.isProcessing) {
256
+ this.showNotification('Please wait for the current response to complete');
257
+ return;
258
+ }
259
+
260
  this.messages = [];
261
  this.chatMessages.innerHTML = `
262
  <div class="welcome-message">
263
  <div class="welcome-icon">🤖</div>
264
  <h2>Welcome to AI Assistant</h2>
265
+ <p>I'm powered by MobileLLM with real-time streaming. Ask me anything!</p>
266
  </div>
267
  `;
268
+ this.showNotification('Chat cleared!', 'info');
269
  }
270
 
271
  showNotification(message, type = 'info') {
272
  const notification = document.createElement('div');
273
  notification.className = `notification ${type}`;
274
  notification.textContent = message;
275
+ notification.style.cssText = `
276
+ position: fixed;
277
+ top: 20px;
278
+ right: 20px;
279
+ padding: 12px 20px;
280
+ border-radius: 8px;
281
+ color: white;
282
+ font-weight: 500;
283
+ transform: translateX(100%);
284
+ transition: all 0.3s;
285
+ z-index: 1001;
286
+ background: ${type === 'success' ? '#10b981' : type === 'error' ? '#ef4444' : '#3b82f6'};
287
+ `;
288
+
289
  document.body.appendChild(notification);
290
 
291
  setTimeout(() => {
292
+ notification.style.transform = 'translateX(0)';
293
  }, 10);
294
 
295
  setTimeout(() => {
296
+ notification.style.transform = 'translateX(100%)';
297
  setTimeout(() => notification.remove(), 300);
298
  }, 3000);
299
  }
 
301
 
302
  // Initialize chatbot when DOM is ready
303
  document.addEventListener('DOMContentLoaded', () => {
304
+ new StreamingChatbot();
305
  });