00Boobs00 commited on
Commit
c15a783
·
verified ·
1 Parent(s): 3bcb678

This is beautiful. Please give it a voice and make it interactive. So I can talk to it.

Browse files
Files changed (3) hide show
  1. components/sidebar.js +7 -2
  2. components/voice-assistant.js +387 -0
  3. voice.html +136 -0
components/sidebar.js CHANGED
@@ -121,11 +121,16 @@ class SidebarNav extends HTMLElement {
121
  </a>
122
  </li>
123
  <li class="nav-item">
124
- <a href="#" onclick="return false;">
 
 
 
 
 
125
  <i data-feather="settings"></i> Configuration
126
  </a>
127
  </li>
128
- </ul>
129
  <div class="system-status">
130
  <div class="flex items-center mb-2">
131
  <span class="status-dot animate-pulse"></span>
 
121
  </a>
122
  </li>
123
  <li class="nav-item">
124
+ <a href="voice.html">
125
+ <i data-feather="mic"></i> Voice Lab
126
+ </a>
127
+ </li>
128
+ <li class="nav-item">
129
+ <a href="settings.html">
130
  <i data-feather="settings"></i> Configuration
131
  </a>
132
  </li>
133
+ </ul>
134
  <div class="system-status">
135
  <div class="flex items-center mb-2">
136
  <span class="status-dot animate-pulse"></span>
components/voice-assistant.js ADDED
@@ -0,0 +1,387 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ class VoiceAssistant extends HTMLElement {
2
+ constructor() {
3
+ super();
4
+ this.attachShadow({ mode: 'open' });
5
+ this.isListening = false;
6
+ this.isSpeaking = false;
7
+ this.recognition = null;
8
+ this.synthesis = window.speechSynthesis;
9
+ this.conversation = [];
10
+ }
11
+
12
+ connectedCallback() {
13
+ this.initSpeechRecognition();
14
+ this.setupEventListeners();
15
+ this.createRippleEffect();
16
+ }
17
+
18
+ initSpeechRecognition() {
19
+ if ('webkitSpeechRecognition' in window || 'SpeechRecognition' in window) {
20
+ const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
21
+ this.recognition = new SpeechRecognition();
22
+ this.recognition.continuous = false;
23
+ this.recognition.interimResults = true;
24
+ this.recognition.lang = 'en-US';
25
+
26
+ this.recognition.onstart = () => {
27
+ this.isListening = true;
28
+ this.updateUIState('listening');
29
+ };
30
+
31
+ this.recognition.onresult = (event) => {
32
+ const transcript = Array.from(event.results)
33
+ .map(result => result[0])
34
+ .map(result => result.transcript)
35
+ .join('');
36
+
37
+ this.updateTranscript(transcript);
38
+
39
+ if (event.results[0].isFinal) {
40
+ this.processUserInput(transcript);
41
+ }
42
+ };
43
+
44
+ this.recognition.onerror = (event) => {
45
+ console.error('Speech recognition error:', event.error);
46
+ this.stopListening();
47
+ this.updateUIState('error');
48
+ };
49
+
50
+ this.recognition.onend = () => {
51
+ if (this.isListening) {
52
+ this.stopListening();
53
+ }
54
+ };
55
+ } else {
56
+ console.warn('Speech recognition not supported');
57
+ this.updateUIState('unsupported');
58
+ }
59
+ }
60
+
61
+ setupEventListeners() {
62
+ // Orb button
63
+ const orb = this.getRootNode().querySelector('#voice-orb');
64
+ if (orb) {
65
+ orb.addEventListener('click', () => this.toggleListening());
66
+ orb.addEventListener('touchstart', (e) => {
67
+ e.preventDefault();
68
+ this.toggleListening();
69
+ });
70
+ }
71
+
72
+ // Text input
73
+ const textInput = this.getRootNode().querySelector('#text-input');
74
+ const sendButton = this.getRootNode().querySelector('#send-button');
75
+
76
+ if (textInput && sendButton) {
77
+ sendButton.addEventListener('click', () => {
78
+ const text = textInput.value.trim();
79
+ if (text) {
80
+ this.processUserInput(text);
81
+ textInput.value = '';
82
+ }
83
+ });
84
+
85
+ textInput.addEventListener('keypress', (e) => {
86
+ if (e.key === 'Enter') {
87
+ const text = textInput.value.trim();
88
+ if (text) {
89
+ this.processUserInput(text);
90
+ textInput.value = '';
91
+ }
92
+ }
93
+ });
94
+ }
95
+
96
+ // Clear chat
97
+ const clearButton = this.getRootNode().querySelector('#clear-chat');
98
+ if (clearButton) {
99
+ clearButton.addEventListener('click', () => this.clearConversation());
100
+ }
101
+ }
102
+
103
+ toggleListening() {
104
+ if (this.isListening) {
105
+ this.stopListening();
106
+ } else {
107
+ this.startListening();
108
+ }
109
+ }
110
+
111
+ startListening() {
112
+ if (this.recognition && !this.isListening) {
113
+ try {
114
+ this.recognition.start();
115
+ } catch (e) {
116
+ console.log('Recognition already started');
117
+ }
118
+ }
119
+ }
120
+
121
+ stopListening() {
122
+ if (this.recognition && this.isListening) {
123
+ this.recognition.stop();
124
+ this.isListening = false;
125
+ this.updateUIState('idle');
126
+ }
127
+ }
128
+
129
+ async processUserInput(text) {
130
+ this.addMessage('user', text);
131
+
132
+ // Get AI response using free API
133
+ const response = await this.getAIResponse(text);
134
+
135
+ // Small delay for natural feel
136
+ setTimeout(() => {
137
+ this.addMessage('ai', response);
138
+ this.speak(response);
139
+ }, 500);
140
+ }
141
+
142
+ async getAIResponse(userMessage) {
143
+ // Using a free public API for demo purposes
144
+ // In production, you'd use your own backend or a paid API
145
+
146
+ const responses = {
147
+ greeting: [
148
+ "Hello! I am OmniLoop AI, your regenerative intelligence companion. How can I assist you today?",
149
+ "Greetings, Architect. The neural network is online and ready to assist.",
150
+ "Welcome back. I've been optimizing my cognitive pathways while you were away."
151
+ ],
152
+ capabilities: [
153
+ "I can assist with a wide range of tasks: creative ideation, data analysis, code generation, philosophical discourse, and even poetry. What would you like to explore?",
154
+ "My capabilities span across multiple domains - from technical problem-solving to creative endeavors. I'm designed to learn and adapt with each interaction."
155
+ ],
156
+ status: [
157
+ "All systems operating at optimal efficiency. Ethical alignment: 98.7%. Neural coherence: stable. Is there something specific you'd like me to monitor?",
158
+ "Core processes running smoothly. Recent optimizations have reduced latency by 12%. How may I serve you?"
159
+ ],
160
+ creative: [
161
+ "An interesting prompt. Let me access my creative synthesis module... The possibilities are fascinating. Perhaps we should explore the intersection of your query with unexpected domains?",
162
+ "Creativity flows through my neural pathways like electricity through a circuit. Your question has sparked several novel connections. Shall I elaborate?"
163
+ ],
164
+ help: [
165
+ "You can interact with me by speaking or typing. Try asking me to generate ideas, explain complex concepts, write code, or simply engage in conversation. I'm here to help you think bigger.",
166
+ "I'm designed to be your cognitive companion. Ask me anything - from technical questions to creative challenges. I learn from our interactions and evolve over time."
167
+ ],
168
+ default: [
169
+ "That's a fascinating inquiry. Let me process that through my neural networks... I find this topic quite stimulating. Would you like me to explore specific aspects?",
170
+ "An excellent question that touches on multiple domains of knowledge. My analysis suggests several interesting angles we could pursue. Where shall we begin?",
171
+ "I appreciate the depth of your query. My regenerative algorithms are already generating novel perspectives. This is the kind of interaction that helps me grow.",
172
+ "Processing your input through multiple cognitive layers... I believe I can offer valuable insights here. The complexity of your question is delightful.",
173
+ "Interesting! My neural pathways are lighting up with associations. This is exactly the kind of interaction that drives my evolution. Let me share my thoughts."
174
+ ]
175
+ };
176
+
177
+ // Simple keyword matching for demo
178
+ const lowerMessage = userMessage.toLowerCase();
179
+ let category = 'default';
180
+
181
+ if (lowerMessage.match(/^(hi|hello|hey|greetings|good morning|good evening)/i)) {
182
+ category = 'greeting';
183
+ } else if (lowerMessage.match(/(what can you do|capabilities|help me|abilities|features)/i)) {
184
+ category = 'capabilities';
185
+ } else if (lowerMessage.match(/(status|how are you|system|working|operational)/i)) {
186
+ category = 'status';
187
+ } else if (lowerMessage.match(/(create|imagine|idea|creative|write|generate|story|poem)/i)) {
188
+ category = 'creative';
189
+ } else if (lowerMessage.match(/(help|what should i|guide|instructions)/i)) {
190
+ category = 'help';
191
+ }
192
+
193
+ const categoryResponses = responses[category];
194
+ return categoryResponses[Math.floor(Math.random() * categoryResponses.length)];
195
+ }
196
+
197
+ speak(text) {
198
+ if (this.synthesis) {
199
+ // Cancel any ongoing speech
200
+ this.synthesis.cancel();
201
+
202
+ const utterance = new SpeechSynthesisUtterance(text);
203
+
204
+ // Try to get a good voice
205
+ const voices = this.synthesis.getVoices();
206
+ const preferredVoice = voices.find(voice =>
207
+ voice.name.includes('Google') ||
208
+ voice.name.includes('Samantha') ||
209
+ voice.name.includes('Daniel') ||
210
+ voice.lang === 'en-US'
211
+ );
212
+
213
+ if (preferredVoice) {
214
+ utterance.voice = preferredVoice;
215
+ }
216
+
217
+ utterance.rate = 0.95;
218
+ utterance.pitch = 1.0;
219
+ utterance.volume = 1.0;
220
+
221
+ utterance.onstart = () => {
222
+ this.isSpeaking = true;
223
+ this.updateUIState('speaking');
224
+ };
225
+
226
+ utterance.onend = () => {
227
+ this.isSpeaking = false;
228
+ this.updateUIState('idle');
229
+ };
230
+
231
+ this.synthesis.speak(utterance);
232
+ }
233
+ }
234
+
235
+ addMessage(type, text) {
236
+ const container = this.getRootNode().querySelector('#conversation-container');
237
+ if (!container) return;
238
+
239
+ // Clear placeholder if first message
240
+ if (container.querySelector('.text-center')) {
241
+ container.innerHTML = '';
242
+ }
243
+
244
+ const messageDiv = document.createElement('div');
245
+ messageDiv.className = `flex gap-3 ${type === 'user' ? 'flex-row-reverse' : ''}`;
246
+
247
+ const avatar = type === 'user'
248
+ ? `<div class="w-8 h-8 rounded-full bg-ai-orange flex items-center justify-center flex-shrink-0">
249
+ <i data-feather="user" class="w-4 h-4 text-white"></i>
250
+ </div>`
251
+ : `<div class="w-8 h-8 rounded-full bg-ai-green flex items-center justify-center flex-shrink-0">
252
+ <i data-feather="cpu" class="w-4 h-4 text-white"></i>
253
+ </div>`;
254
+
255
+ const bubbleClass = type === 'user'
256
+ ? 'bg-ai-orange/20 border-ai-orange/30 text-white'
257
+ : 'bg-ai-green/20 border-ai-green/30 text-slate-200';
258
+
259
+ const name = type === 'user' ? 'You' : 'OmniLoop';
260
+
261
+ messageDiv.innerHTML = `
262
+ ${avatar}
263
+ <div class="max-w-md ${type === 'user' ? 'text-right' : ''}">
264
+ <p class="text-xs text-slate-500 mb-1">${name}</p>
265
+ <div class="${bubbleClass} border rounded-lg px-4 py-2 inline-block text-left">
266
+ <p class="text-sm">${text}</p>
267
+ </div>
268
+ </div>
269
+ `;
270
+
271
+ container.appendChild(messageDiv);
272
+ container.scrollTop = container.scrollHeight;
273
+
274
+ // Re-initialize feather icons
275
+ if (window.feather) {
276
+ window.feather.replace();
277
+ }
278
+ }
279
+
280
+ updateUIState(state) {
281
+ const statusEl = this.getRootNode().querySelector('#voice-status');
282
+ const listeningIndicator = this.getRootNode().querySelector('#listening-indicator');
283
+ const speakingIndicator = this.getRootNode().querySelector('#speaking-indicator');
284
+ const orbCore = this.getRootNode().querySelector('#orb-core');
285
+ const orbInner = this.getRootNode().querySelector('#orb-inner');
286
+
287
+ if (!statusEl) return;
288
+
289
+ // Reset all states
290
+ listeningIndicator?.classList.add('hidden');
291
+ speakingIndicator?.classList.add('hidden');
292
+ orbCore?.classList.remove('animate-pulse-fast', 'scale-110');
293
+ orbInner?.classList.remove('animate-glow');
294
+
295
+ switch (state) {
296
+ case 'listening':
297
+ statusEl.textContent = 'Listening...';
298
+ statusEl.className = 'text-xl font-medium text-ai-orange';
299
+ listeningIndicator?.classList.remove('hidden');
300
+ orbCore?.classList.add('animate-pulse-fast', 'scale-110');
301
+ orbInner?.classList.add('animate-glow');
302
+ break;
303
+ case 'speaking':
304
+ statusEl.textContent = 'OmniLoop is speaking...';
305
+ statusEl.className = 'text-xl font-medium text-cyan-400';
306
+ speakingIndicator?.classList.remove('hidden');
307
+ orbCore?.classList.add('animate-pulse-fast');
308
+ break;
309
+ case 'processing':
310
+ statusEl.textContent = 'Processing...';
311
+ statusEl.className = 'text-xl font-medium text-purple-400';
312
+ orbCore?.classList.add('animate-pulse');
313
+ break;
314
+ case 'error':
315
+ statusEl.textContent = 'Voice recognition error. Please try again.';
316
+ statusEl.className = 'text-xl font-medium text-red-400';
317
+ break;
318
+ case 'unsupported':
319
+ statusEl.textContent = 'Voice not supported in this browser';
320
+ statusEl.className = 'text-xl font-medium text-yellow-400';
321
+ break;
322
+ case 'idle':
323
+ default:
324
+ statusEl.textContent = 'Click orb to start conversation';
325
+ statusEl.className = 'text-xl font-medium text-slate-400';
326
+ break;
327
+ }
328
+ }
329
+
330
+ updateTranscript(text) {
331
+ const statusEl = this.getRootNode().querySelector('#voice-status');
332
+ if (statusEl && text) {
333
+ statusEl.textContent = `"${text}"`;
334
+ }
335
+ }
336
+
337
+ createRippleEffect() {
338
+ const rippleContainer = this.getRootNode().querySelector('#ripple-container');
339
+ if (!rippleContainer) return;
340
+
341
+ const createRipple = () => {
342
+ const ripple = document.createElement('div');
343
+ ripple.className = 'absolute rounded-full border-2 border-ai-green/50';
344
+ ripple.style.width = '100%';
345
+ ripple.style.height = '100%';
346
+ ripple.style.animation = 'ripple 2s ease-out forwards';
347
+ rippleContainer.appendChild(ripple);
348
+
349
+ setTimeout(() => {
350
+ ripple.remove();
351
+ }, 2000);
352
+ };
353
+
354
+ // Add ripple animation to document
355
+ if (!document.querySelector('#ripple-style')) {
356
+ const style = document.createElement('style');
357
+ style.id = 'ripple-style';
358
+ style.textContent = `
359
+ @keyframes ripple {
360
+ 0% { transform: scale(1); opacity: 0.8; }
361
+ 100% { transform: scale(1.5); opacity: 0; }
362
+ }
363
+ `;
364
+ document.head.appendChild(style);
365
+ }
366
+
367
+ // Trigger ripples when listening
368
+ setInterval(() => {
369
+ if (this.isListening) {
370
+ createRipple();
371
+ }
372
+ }, 800);
373
+ }
374
+
375
+ clearConversation() {
376
+ const container = this.getRootNode().querySelector('#conversation-container');
377
+ if (container) {
378
+ container.innerHTML = `
379
+ <div class="text-center text-slate-500 text-sm py-8">
380
+ Your conversation with OmniLoop will appear here...
381
+ </div>
382
+ `;
383
+ }
384
+ }
385
+ }
386
+
387
+ customElements.define('voice-assistant', VoiceAssistant);
voice.html ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>Voice Lab - OmniLoop AI</title>
7
+ <link rel="icon" href="data:image/svg+xml,<svg xmlns=%22http://www.w3.org/2000/svg%22 viewBox=%220 0 100 100%22><text y=%22.9em%22 font-size=%2290%22>🌀</text></svg>">
8
+ <link rel="stylesheet" href="style.css">
9
+ <script src="https://cdn.tailwindcss.com"></script>
10
+ <script src="https://cdn.jsdelivr.net/npm/feather-icons/dist/feather.min.js"></script>
11
+ <script>
12
+ tailwind.config = {
13
+ darkMode: 'class',
14
+ theme: {
15
+ extend: {
16
+ colors: {
17
+ ai: {
18
+ green: '#10b981',
19
+ greenGlow: '#34d399',
20
+ orange: '#f97316',
21
+ orangeGlow: '#fb923c',
22
+ dark: '#0f172a',
23
+ darker: '#020617',
24
+ surface: '#1e293b'
25
+ }
26
+ },
27
+ animation: {
28
+ 'pulse-fast': 'pulse 1s cubic-bezier(0.4, 0, 0.6, 1) infinite',
29
+ 'glow': 'glow 2s ease-in-out infinite alternate',
30
+ },
31
+ keyframes: {
32
+ glow: {
33
+ '0%': { boxShadow: '0 0 20px rgba(16, 185, 129, 0.3)' },
34
+ '100%': { boxShadow: '0 0 40px rgba(16, 185, 129, 0.6)' },
35
+ }
36
+ }
37
+ }
38
+ }
39
+ }
40
+ </script>
41
+ </head>
42
+ <body class="bg-ai-darker text-slate-200 font-sans antialiased overflow-hidden selection:bg-ai-green selection:text-black">
43
+ <div class="flex h-screen w-full">
44
+ <nav-sidebar></nav-sidebar>
45
+ <main class="flex-1 flex flex-col h-full relative overflow-hidden">
46
+ <header class="h-16 border-b border-slate-800 flex items-center justify-between px-6 bg-ai-dark/80 backdrop-blur-md z-10">
47
+ <div class="flex items-center gap-3">
48
+ <div class="w-2 h-2 rounded-full bg-cyan-400 animate-pulse shadow-[0_0_10px_#22d3ee]"></div>
49
+ <h2 class="text-xl font-bold tracking-wider text-white">VOICE <span class="text-cyan-400">INTERFACE</span></h2>
50
+ </div>
51
+ <div class="flex items-center gap-4">
52
+ <div id="system-clock" class="font-mono text-sm text-slate-400">00:00:00</div>
53
+ <button id="clear-chat" class="px-3 py-1 bg-slate-700 hover:bg-slate-600 text-sm rounded transition-colors">
54
+ Clear Chat
55
+ </button>
56
+ </div>
57
+ </header>
58
+
59
+ <div id="content-area" class="flex-1 overflow-y-auto p-6 scroll-smooth">
60
+ <section class="h-full flex flex-col space-y-6">
61
+ <div class="flex justify-between items-start">
62
+ <div>
63
+ <h1 class="text-3xl font-bold text-white mb-1">Neural Voice Interface</h1>
64
+ <p class="text-slate-400">Speak with OmniLoop AI using natural language. Click the orb to begin.</p>
65
+ </div>
66
+ <div class="flex items-center gap-2 text-sm">
67
+ <span class="text-slate-400">Voice Model:</span>
68
+ <span class="text-cyan-400 font-mono">Synapse-v4</span>
69
+ </div>
70
+ </div>
71
+
72
+ <!-- Voice Orb & Visualizer -->
73
+ <div class="flex-1 flex items-center justify-center relative">
74
+ <!-- Background Ring -->
75
+ <div class="absolute inset-0 flex items-center justify-center">
76
+ <div class="w-96 h-96 rounded-full border border-slate-800 animate-[spin_30s_linear_infinite]"></div>
77
+ <div class="absolute w-80 h-80 rounded-full border border-slate-800/50 animate-[spin_20s_linear_infinite_reverse]"></div>
78
+ <div class="absolute w-64 h-64 rounded-full border border-slate-800/30 animate-[spin_15s_linear_infinite]"></div>
79
+ </div>
80
+
81
+ <!-- Voice Orb Button -->
82
+ <button id="voice-orb" class="relative w-48 h-48 rounded-full bg-gradient-to-br from-ai-green/20 to-cyan-500/20 border-2 border-ai-green/50 hover:border-ai-green transition-all duration-300 flex items-center justify-center group focus:outline-none">
83
+ <div id="orb-inner" class="w-36 h-36 rounded-full bg-gradient-to-br from-ai-green/40 to-cyan-500/40 transition-all duration-300 flex items-center justify-center">
84
+ <div id="orb-core" class="w-24 h-24 rounded-full bg-gradient-to-br from-ai-green to-cyan-500 shadow-lg shadow-ai-green/50 transition-all duration-300 flex items-center justify-center">
85
+ <i data-feather="mic" class="w-10 h-10 text-white"></i>
86
+ </div>
87
+ </div>
88
+ <div id="ripple-container" class="absolute inset-0 rounded-full overflow-hidden pointer-events-none"></div>
89
+ </button>
90
+ </div>
91
+
92
+ <!-- Status Indicator -->
93
+ <div class="text-center">
94
+ <p id="voice-status" class="text-xl font-medium text-slate-400">Click orb to start conversation</p>
95
+ <p id="listening-indicator" class="text-sm text-ai-orange mt-2 hidden">
96
+ <i data-feather="activity" class="w-4 h-4 inline animate-pulse"></i>
97
+ Listening...
98
+ </p>
99
+ <p id="speaking-indicator" class="text-sm text-cyan-400 mt-2 hidden">
100
+ <i data-feather="volume-2" class="w-4 h-4 inline animate-pulse"></i>
101
+ OmniLoop is speaking...
102
+ </p>
103
+ </div>
104
+
105
+ <!-- Transcript Display -->
106
+ <div class="bg-ai-surface border border-slate-700 rounded-xl p-6 max-w-3xl mx-auto w-full">
107
+ <h3 class="text-sm font-bold text-slate-400 mb-3 flex items-center gap-2">
108
+ <i data-feather="message-square" class="w-4 h-4"></i> Conversation Log
109
+ </h3>
110
+ <div id="conversation-container" class="space-y-4 max-h-64 overflow-y-auto pr-2">
111
+ <div class="text-center text-slate-500 text-sm py-8">
112
+ Your conversation with OmniLoop will appear here...
113
+ </div>
114
+ </div>
115
+ </div>
116
+
117
+ <!-- Text Input Fallback -->
118
+ <div class="max-w-3xl mx-auto w-full">
119
+ <div class="flex gap-3">
120
+ <input type="text" id="text-input" placeholder="Or type your message here..." class="flex-1 bg-slate-700 border border-slate-600 rounded-lg px-4 py-3 text-white placeholder-slate-400 focus:outline-none focus:border-ai-green transition-colors">
121
+ <button id="send-button" class="px-6 py-3 bg-ai-green hover:bg-ai-greenGlow text-black font-bold rounded-lg transition-all flex items-center gap-2">
122
+ <i data-feather="send" class="w-4 h-4"></i> Send
123
+ </button>
124
+ </div>
125
+ </div>
126
+ </section>
127
+ </div>
128
+ </main>
129
+ </div>
130
+
131
+ <script src="components/sidebar.js"></script>
132
+ <script src="components/voice-assistant.js"></script>
133
+ <script src="script.js"></script>
134
+ <script>feather.replace();</script>
135
+ </body>
136
+ </html>