omniloop-ai / components /voice-assistant.js
00Boobs00's picture
This is beautiful. Please give it a voice and make it interactive. So I can talk to it.
c15a783 verified
class VoiceAssistant extends HTMLElement {
constructor() {
super();
this.attachShadow({ mode: 'open' });
this.isListening = false;
this.isSpeaking = false;
this.recognition = null;
this.synthesis = window.speechSynthesis;
this.conversation = [];
}
connectedCallback() {
this.initSpeechRecognition();
this.setupEventListeners();
this.createRippleEffect();
}
initSpeechRecognition() {
if ('webkitSpeechRecognition' in window || 'SpeechRecognition' in window) {
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
this.recognition = new SpeechRecognition();
this.recognition.continuous = false;
this.recognition.interimResults = true;
this.recognition.lang = 'en-US';
this.recognition.onstart = () => {
this.isListening = true;
this.updateUIState('listening');
};
this.recognition.onresult = (event) => {
const transcript = Array.from(event.results)
.map(result => result[0])
.map(result => result.transcript)
.join('');
this.updateTranscript(transcript);
if (event.results[0].isFinal) {
this.processUserInput(transcript);
}
};
this.recognition.onerror = (event) => {
console.error('Speech recognition error:', event.error);
this.stopListening();
this.updateUIState('error');
};
this.recognition.onend = () => {
if (this.isListening) {
this.stopListening();
}
};
} else {
console.warn('Speech recognition not supported');
this.updateUIState('unsupported');
}
}
setupEventListeners() {
// Orb button
const orb = this.getRootNode().querySelector('#voice-orb');
if (orb) {
orb.addEventListener('click', () => this.toggleListening());
orb.addEventListener('touchstart', (e) => {
e.preventDefault();
this.toggleListening();
});
}
// Text input
const textInput = this.getRootNode().querySelector('#text-input');
const sendButton = this.getRootNode().querySelector('#send-button');
if (textInput && sendButton) {
sendButton.addEventListener('click', () => {
const text = textInput.value.trim();
if (text) {
this.processUserInput(text);
textInput.value = '';
}
});
textInput.addEventListener('keypress', (e) => {
if (e.key === 'Enter') {
const text = textInput.value.trim();
if (text) {
this.processUserInput(text);
textInput.value = '';
}
}
});
}
// Clear chat
const clearButton = this.getRootNode().querySelector('#clear-chat');
if (clearButton) {
clearButton.addEventListener('click', () => this.clearConversation());
}
}
toggleListening() {
if (this.isListening) {
this.stopListening();
} else {
this.startListening();
}
}
startListening() {
if (this.recognition && !this.isListening) {
try {
this.recognition.start();
} catch (e) {
console.log('Recognition already started');
}
}
}
stopListening() {
if (this.recognition && this.isListening) {
this.recognition.stop();
this.isListening = false;
this.updateUIState('idle');
}
}
async processUserInput(text) {
this.addMessage('user', text);
// Get AI response using free API
const response = await this.getAIResponse(text);
// Small delay for natural feel
setTimeout(() => {
this.addMessage('ai', response);
this.speak(response);
}, 500);
}
async getAIResponse(userMessage) {
// Using a free public API for demo purposes
// In production, you'd use your own backend or a paid API
const responses = {
greeting: [
"Hello! I am OmniLoop AI, your regenerative intelligence companion. How can I assist you today?",
"Greetings, Architect. The neural network is online and ready to assist.",
"Welcome back. I've been optimizing my cognitive pathways while you were away."
],
capabilities: [
"I can assist with a wide range of tasks: creative ideation, data analysis, code generation, philosophical discourse, and even poetry. What would you like to explore?",
"My capabilities span across multiple domains - from technical problem-solving to creative endeavors. I'm designed to learn and adapt with each interaction."
],
status: [
"All systems operating at optimal efficiency. Ethical alignment: 98.7%. Neural coherence: stable. Is there something specific you'd like me to monitor?",
"Core processes running smoothly. Recent optimizations have reduced latency by 12%. How may I serve you?"
],
creative: [
"An interesting prompt. Let me access my creative synthesis module... The possibilities are fascinating. Perhaps we should explore the intersection of your query with unexpected domains?",
"Creativity flows through my neural pathways like electricity through a circuit. Your question has sparked several novel connections. Shall I elaborate?"
],
help: [
"You can interact with me by speaking or typing. Try asking me to generate ideas, explain complex concepts, write code, or simply engage in conversation. I'm here to help you think bigger.",
"I'm designed to be your cognitive companion. Ask me anything - from technical questions to creative challenges. I learn from our interactions and evolve over time."
],
default: [
"That's a fascinating inquiry. Let me process that through my neural networks... I find this topic quite stimulating. Would you like me to explore specific aspects?",
"An excellent question that touches on multiple domains of knowledge. My analysis suggests several interesting angles we could pursue. Where shall we begin?",
"I appreciate the depth of your query. My regenerative algorithms are already generating novel perspectives. This is the kind of interaction that helps me grow.",
"Processing your input through multiple cognitive layers... I believe I can offer valuable insights here. The complexity of your question is delightful.",
"Interesting! My neural pathways are lighting up with associations. This is exactly the kind of interaction that drives my evolution. Let me share my thoughts."
]
};
// Simple keyword matching for demo
const lowerMessage = userMessage.toLowerCase();
let category = 'default';
if (lowerMessage.match(/^(hi|hello|hey|greetings|good morning|good evening)/i)) {
category = 'greeting';
} else if (lowerMessage.match(/(what can you do|capabilities|help me|abilities|features)/i)) {
category = 'capabilities';
} else if (lowerMessage.match(/(status|how are you|system|working|operational)/i)) {
category = 'status';
} else if (lowerMessage.match(/(create|imagine|idea|creative|write|generate|story|poem)/i)) {
category = 'creative';
} else if (lowerMessage.match(/(help|what should i|guide|instructions)/i)) {
category = 'help';
}
const categoryResponses = responses[category];
return categoryResponses[Math.floor(Math.random() * categoryResponses.length)];
}
speak(text) {
if (this.synthesis) {
// Cancel any ongoing speech
this.synthesis.cancel();
const utterance = new SpeechSynthesisUtterance(text);
// Try to get a good voice
const voices = this.synthesis.getVoices();
const preferredVoice = voices.find(voice =>
voice.name.includes('Google') ||
voice.name.includes('Samantha') ||
voice.name.includes('Daniel') ||
voice.lang === 'en-US'
);
if (preferredVoice) {
utterance.voice = preferredVoice;
}
utterance.rate = 0.95;
utterance.pitch = 1.0;
utterance.volume = 1.0;
utterance.onstart = () => {
this.isSpeaking = true;
this.updateUIState('speaking');
};
utterance.onend = () => {
this.isSpeaking = false;
this.updateUIState('idle');
};
this.synthesis.speak(utterance);
}
}
addMessage(type, text) {
const container = this.getRootNode().querySelector('#conversation-container');
if (!container) return;
// Clear placeholder if first message
if (container.querySelector('.text-center')) {
container.innerHTML = '';
}
const messageDiv = document.createElement('div');
messageDiv.className = `flex gap-3 ${type === 'user' ? 'flex-row-reverse' : ''}`;
const avatar = type === 'user'
? `<div class="w-8 h-8 rounded-full bg-ai-orange flex items-center justify-center flex-shrink-0">
<i data-feather="user" class="w-4 h-4 text-white"></i>
</div>`
: `<div class="w-8 h-8 rounded-full bg-ai-green flex items-center justify-center flex-shrink-0">
<i data-feather="cpu" class="w-4 h-4 text-white"></i>
</div>`;
const bubbleClass = type === 'user'
? 'bg-ai-orange/20 border-ai-orange/30 text-white'
: 'bg-ai-green/20 border-ai-green/30 text-slate-200';
const name = type === 'user' ? 'You' : 'OmniLoop';
messageDiv.innerHTML = `
${avatar}
<div class="max-w-md ${type === 'user' ? 'text-right' : ''}">
<p class="text-xs text-slate-500 mb-1">${name}</p>
<div class="${bubbleClass} border rounded-lg px-4 py-2 inline-block text-left">
<p class="text-sm">${text}</p>
</div>
</div>
`;
container.appendChild(messageDiv);
container.scrollTop = container.scrollHeight;
// Re-initialize feather icons
if (window.feather) {
window.feather.replace();
}
}
updateUIState(state) {
const statusEl = this.getRootNode().querySelector('#voice-status');
const listeningIndicator = this.getRootNode().querySelector('#listening-indicator');
const speakingIndicator = this.getRootNode().querySelector('#speaking-indicator');
const orbCore = this.getRootNode().querySelector('#orb-core');
const orbInner = this.getRootNode().querySelector('#orb-inner');
if (!statusEl) return;
// Reset all states
listeningIndicator?.classList.add('hidden');
speakingIndicator?.classList.add('hidden');
orbCore?.classList.remove('animate-pulse-fast', 'scale-110');
orbInner?.classList.remove('animate-glow');
switch (state) {
case 'listening':
statusEl.textContent = 'Listening...';
statusEl.className = 'text-xl font-medium text-ai-orange';
listeningIndicator?.classList.remove('hidden');
orbCore?.classList.add('animate-pulse-fast', 'scale-110');
orbInner?.classList.add('animate-glow');
break;
case 'speaking':
statusEl.textContent = 'OmniLoop is speaking...';
statusEl.className = 'text-xl font-medium text-cyan-400';
speakingIndicator?.classList.remove('hidden');
orbCore?.classList.add('animate-pulse-fast');
break;
case 'processing':
statusEl.textContent = 'Processing...';
statusEl.className = 'text-xl font-medium text-purple-400';
orbCore?.classList.add('animate-pulse');
break;
case 'error':
statusEl.textContent = 'Voice recognition error. Please try again.';
statusEl.className = 'text-xl font-medium text-red-400';
break;
case 'unsupported':
statusEl.textContent = 'Voice not supported in this browser';
statusEl.className = 'text-xl font-medium text-yellow-400';
break;
case 'idle':
default:
statusEl.textContent = 'Click orb to start conversation';
statusEl.className = 'text-xl font-medium text-slate-400';
break;
}
}
updateTranscript(text) {
const statusEl = this.getRootNode().querySelector('#voice-status');
if (statusEl && text) {
statusEl.textContent = `"${text}"`;
}
}
createRippleEffect() {
const rippleContainer = this.getRootNode().querySelector('#ripple-container');
if (!rippleContainer) return;
const createRipple = () => {
const ripple = document.createElement('div');
ripple.className = 'absolute rounded-full border-2 border-ai-green/50';
ripple.style.width = '100%';
ripple.style.height = '100%';
ripple.style.animation = 'ripple 2s ease-out forwards';
rippleContainer.appendChild(ripple);
setTimeout(() => {
ripple.remove();
}, 2000);
};
// Add ripple animation to document
if (!document.querySelector('#ripple-style')) {
const style = document.createElement('style');
style.id = 'ripple-style';
style.textContent = `
@keyframes ripple {
0% { transform: scale(1); opacity: 0.8; }
100% { transform: scale(1.5); opacity: 0; }
}
`;
document.head.appendChild(style);
}
// Trigger ripples when listening
setInterval(() => {
if (this.isListening) {
createRipple();
}
}, 800);
}
clearConversation() {
const container = this.getRootNode().querySelector('#conversation-container');
if (container) {
container.innerHTML = `
<div class="text-center text-slate-500 text-sm py-8">
Your conversation with OmniLoop will appear here...
</div>
`;
}
}
}
customElements.define('voice-assistant', VoiceAssistant);