gnumanth's picture
Upload folder using huggingface_hub
e97881c verified
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Nemotron Speech Streaming - UI Preview</title>
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600&display=swap" rel="stylesheet">
<style>
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
background: #0a0a0f;
font-family: 'Inter', -apple-system, BlinkMacSystemFont, sans-serif;
min-height: 100vh;
overflow: hidden;
}
/* Main App Container */
#app-container {
position: fixed;
top: 0;
left: 0;
width: 100vw;
height: 100vh;
background: linear-gradient(180deg, #0a0a0f 0%, #12121a 50%, #0a0a0f 100%);
display: flex;
flex-direction: column;
overflow: hidden;
}
/* Header */
#header {
padding: 20px;
text-align: center;
z-index: 10;
}
#session-badge {
display: inline-flex;
align-items: center;
gap: 8px;
background: rgba(255, 255, 255, 0.05);
border: 1px solid rgba(255, 255, 255, 0.1);
border-radius: 20px;
padding: 8px 16px;
color: rgba(255, 255, 255, 0.7);
font-size: 13px;
font-weight: 500;
}
#live-dot {
width: 8px;
height: 8px;
background: #22c55e;
border-radius: 50%;
animation: pulse 2s ease-in-out infinite;
}
@keyframes pulse {
0%, 100% { opacity: 1; transform: scale(1); }
50% { opacity: 0.5; transform: scale(0.9); }
}
/* Transcript Area */
#transcript-area {
flex: 1;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
padding: 40px 20px;
text-align: center;
z-index: 10;
}
.transcript-current {
color: #ffffff;
font-size: clamp(24px, 5vw, 42px);
font-weight: 400;
line-height: 1.4;
max-width: 800px;
text-shadow: 0 4px 20px rgba(0, 0, 0, 0.5);
animation: fadeIn 0.3s ease-out;
}
.transcript-history {
color: rgba(255, 255, 255, 0.35);
font-size: clamp(14px, 2.5vw, 18px);
font-weight: 300;
line-height: 1.8;
max-width: 700px;
margin-top: 30px;
}
@keyframes fadeIn {
from { opacity: 0; transform: translateY(10px); }
to { opacity: 1; transform: translateY(0); }
}
/* Ambient Glow */
#ambient-glow {
position: absolute;
bottom: 0;
left: 50%;
transform: translateX(-50%);
width: 150%;
height: 50vh;
background: radial-gradient(ellipse at center bottom, rgba(99, 102, 241, 0.15) 0%, transparent 70%);
pointer-events: none;
z-index: 1;
}
/* Controls Area */
#controls-area {
padding: 30px 20px 50px;
display: flex;
justify-content: center;
align-items: center;
gap: 40px;
z-index: 20;
}
/* Mic Button */
#mic-btn {
width: 80px;
height: 80px;
border-radius: 50%;
background: linear-gradient(135deg, #6366f1 0%, #8b5cf6 100%);
border: none;
cursor: pointer;
transition: all 0.2s ease;
box-shadow: 0 8px 30px rgba(99, 102, 241, 0.4);
display: flex;
align-items: center;
justify-content: center;
}
#mic-btn:hover {
transform: scale(1.05);
box-shadow: 0 12px 40px rgba(99, 102, 241, 0.5);
}
#mic-btn:active,
#mic-btn.recording {
transform: scale(0.95);
background: linear-gradient(135deg, #ef4444 0%, #f97316 100%);
box-shadow: 0 8px 30px rgba(239, 68, 68, 0.4);
}
#mic-btn svg {
width: 32px;
height: 32px;
fill: white;
}
/* Reset Button */
#reset-btn {
background: rgba(255, 255, 255, 0.05);
border: 1px solid rgba(255, 255, 255, 0.1);
color: rgba(255, 255, 255, 0.6);
padding: 12px 24px;
border-radius: 12px;
font-size: 14px;
font-weight: 500;
cursor: pointer;
transition: all 0.2s ease;
font-family: inherit;
}
#reset-btn:hover {
background: rgba(255, 255, 255, 0.1);
color: rgba(255, 255, 255, 0.9);
}
/* Recording animation */
@keyframes recording-pulse {
0%, 100% { box-shadow: 0 8px 30px rgba(239, 68, 68, 0.4); }
50% { box-shadow: 0 8px 50px rgba(239, 68, 68, 0.7); }
}
#mic-btn.recording {
animation: recording-pulse 1.5s ease-in-out infinite;
}
</style>
</head>
<body>
<div id="app-container">
<div id="header">
<div id="session-badge">
<div id="live-dot"></div>
<span>Live Session • DEMO_MODE</span>
</div>
</div>
<div id="transcript-area">
<div class="transcript-current" id="current-text">Tap the microphone to start speaking...</div>
<div class="transcript-history" id="history-text"></div>
</div>
<div id="ambient-glow"></div>
<div id="controls-area">
<button id="mic-btn" aria-label="Start recording">
<svg viewBox="0 0 24 24">
<path d="M12 14c1.66 0 3-1.34 3-3V5c0-1.66-1.34-3-3-3S9 3.34 9 5v6c0 1.66 1.34 3 3 3z"/>
<path d="M17 11c0 2.76-2.24 5-5 5s-5-2.24-5-5H5c0 3.53 2.61 6.43 6 6.92V21h2v-3.08c3.39-.49 6-3.39 6-6.92h-2z"/>
</svg>
</button>
<button id="reset-btn">Reset</button>
</div>
</div>
<script>
const micBtn = document.getElementById('mic-btn');
const resetBtn = document.getElementById('reset-btn');
const currentText = document.getElementById('current-text');
const historyText = document.getElementById('history-text');
let isRecording = false;
let recognition = null;
let transcript = [];
// Check for Web Speech API support
if ('webkitSpeechRecognition' in window || 'SpeechRecognition' in window) {
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
recognition = new SpeechRecognition();
recognition.continuous = true;
recognition.interimResults = true;
recognition.lang = 'en-US';
recognition.onresult = (event) => {
let interimTranscript = '';
let finalTranscript = '';
for (let i = event.resultIndex; i < event.results.length; i++) {
const result = event.results[i];
if (result.isFinal) {
finalTranscript += result[0].transcript;
} else {
interimTranscript += result[0].transcript;
}
}
if (finalTranscript) {
transcript.push(finalTranscript.trim());
updateDisplay();
} else if (interimTranscript) {
currentText.textContent = interimTranscript;
}
};
recognition.onerror = (event) => {
console.error('Speech recognition error:', event.error);
stopRecording();
};
recognition.onend = () => {
if (isRecording) {
recognition.start(); // Restart if still in recording mode
}
};
}
function updateDisplay() {
if (transcript.length > 0) {
currentText.textContent = transcript[transcript.length - 1];
if (transcript.length > 1) {
historyText.innerHTML = transcript.slice(0, -1).join('<br>');
}
}
}
function startRecording() {
if (recognition) {
isRecording = true;
micBtn.classList.add('recording');
currentText.textContent = 'Listening...';
recognition.start();
} else {
// Fallback demo mode
isRecording = true;
micBtn.classList.add('recording');
currentText.textContent = 'Listening... (Demo mode - no speech recognition)';
}
}
function stopRecording() {
isRecording = false;
micBtn.classList.remove('recording');
if (recognition) {
recognition.stop();
}
}
function resetSession() {
stopRecording();
transcript = [];
currentText.textContent = 'Tap the microphone to start speaking...';
historyText.innerHTML = '';
}
micBtn.addEventListener('click', () => {
if (isRecording) {
stopRecording();
} else {
startRecording();
}
});
resetBtn.addEventListener('click', resetSession);
</script>
</body>
</html>