finetunedmodel / test.html
YaTharThShaRma999's picture
Create test.html
ed9ee9b verified
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Voice Assistant</title>
<script src="https://cdn.tailwindcss.com"></script>
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;600;700;800&display=swap" rel="stylesheet">
<style>
/* Base styles for body - now the main canvas */
body {
font-family: 'Inter', sans-serif;
/* Soft, calming gradient background */
background: linear-gradient(135deg, #e0f7fa 0%, #e8f5e9 100%); /* Very light aqua to very light green */
display: flex;
flex-direction: column; /* Arrange content vertically */
justify-content: center;
align-items: center;
min-height: 100vh;
margin: 0;
padding: 20px;
box-sizing: border-box;
overflow: hidden; /* Prevent scrollbar from background */
position: relative;
gap: 2.5rem; /* Spacing between elements */
animation: fadeIn 0.9s ease-out forwards; /* Fade in the whole page */
}
@keyframes fadeIn {
from { opacity: 0; transform: translateY(30px); }
to { opacity: 1; transform: translateY(0); }
}
/* Animated background elements for a more dynamic look */
body::before, body::after {
content: '';
position: absolute;
border-radius: 50%;
opacity: 0.5; /* Softer opacity for background glows */
filter: blur(150px); /* Even more blur for a smoother, larger glow */
z-index: -1; /* Ensure they stay behind content */
}
body::before {
width: 450px;
height: 450px;
background: #4dd0e1; /* Cyan */
top: 5%;
left: 10%;
animation: moveBlob1 25s infinite alternate ease-in-out; /* Slower, calmer animation */
}
body::after {
width: 550px;
height: 550px;
background: #a5d6a7; /* Light Green */
bottom: 5%;
right: 8%;
animation: moveBlob2 28s infinite alternate-reverse ease-in-out; /* Slower, calmer animation */
}
@keyframes moveBlob1 {
0% { transform: translate(0, 0) scale(1); }
33% { transform: translate(70px, -50px) scale(1.08); }
66% { transform: translate(-40px, 60px) scale(0.95); }
100% { transform: translate(0, 0) scale(1); }
}
@keyframes moveBlob2 {
0% { transform: translate(0, 0) scale(1); }
33% { transform: translate(-60px, 80px) scale(0.9); }
66% { transform: translate(50px, -70px) scale(1.1); }
100% { transform: translate(0, 0) scale(1); }
}
/* Main title styling */
h1 {
color: #37474f; /* Dark text for contrast on light background */
text-shadow: 0 2px 4px rgba(0, 0, 0, 0.08); /* Subtle text shadow */
margin-bottom: 0; /* Adjust spacing as body handles gap */
}
/* Status text styling */
.status-text {
font-size: 2rem; /* Even larger font for main status */
color: #546e7a; /* Muted dark gray for status */
font-weight: 700;
min-height: 3.5rem; /* Reserve more space to prevent CLS */
text-shadow: 0 2px 4px rgba(0, 0, 0, 0.05); /* Subtle text shadow */
}
/* Microphone Button Styling */
.microphone-button {
width: 160px; /* Even larger button */
height: 160px;
border-radius: 50%;
/* Default: subtle, almost translucent white with a light inner glow */
background: rgba(255, 255, 255, 0.9); /* Very subtle transparency */
color: #546e7a; /* Muted gray for default icon */
display: flex;
align-items: center;
justify-content: center;
font-size: 4rem; /* Larger icon */
box-shadow: 0 10px 30px rgba(0, 0, 0, 0.1), inset 0 0 20px rgba(255, 255, 255, 0.5); /* Stronger outer shadow, subtle inner glow */
cursor: pointer;
transition: all 0.4s cubic-bezier(0.25, 0.8, 0.25, 1);
position: relative;
overflow: hidden;
border: none;
margin: 0 auto; /* Center the button */
}
.microphone-button:hover {
transform: translateY(-5px) scale(1.05); /* More pronounced lift and grow */
box-shadow: 0 15px 40px rgba(0, 0, 0, 0.15), inset 0 0 25px rgba(255, 255, 255, 0.7);
}
.microphone-button:active {
transform: translateY(0) scale(0.98);
box-shadow: 0 5px 15px rgba(0, 0, 0, 0.1);
}
/* Active states for microphone button */
.microphone-button.listening {
animation: pulse-active 2.5s infinite ease-in-out; /* Slower, more ethereal pulse */
background: linear-gradient(45deg, #26c6da, #80deea); /* Cyan gradient */
color: white; /* White icon when active */
box-shadow: 0 15px 30px rgba(38, 198, 218, 0.4), inset 0 0 30px rgba(255, 255, 255, 0.5); /* Soft cyan glow */
}
.microphone-button.speaking {
animation: pulse-active 2.5s infinite ease-in-out;
background: linear-gradient(45deg, #66bb6a, #81c784); /* Light green gradient */
color: white; /* White icon when active */
box-shadow: 0 15px 30px rgba(102, 187, 106, 0.4), inset 0 0 30px rgba(255, 255, 255, 0.5); /* Soft green glow */
}
.microphone-button.disabled {
opacity: 0.5;
cursor: not-allowed;
transform: none;
box-shadow: none;
background: #e0e0e0; /* Light gray for disabled */
color: #9e9e9e; /* Darker muted gray */
animation: none;
}
/* Specific pulse animation for active states */
@keyframes pulse-active {
0% { transform: scale(1); box-shadow: 0 0 0 0 currentColor; }
70% { transform: scale(1.08); box-shadow: 0 0 0 45px rgba(38, 198, 218, 0); } /* Uses currentColor for dynamic color */
100% { transform: scale(1); box-shadow: 0 0 0 0 currentColor; }
}
/* SVG Icon styling */
.icon {
width: 4rem; /* Match button size */
height: 4rem;
fill: currentColor; /* Inherit color from parent button */
}
/* Toast Notification System */
#toastContainer {
position: fixed;
top: 20px;
right: 20px;
z-index: 1001;
display: flex;
flex-direction: column;
gap: 10px;
max-width: 300px;
}
.toast-message {
background-color: rgba(0, 0, 0, 0.75); /* Darker toast background for contrast */
color: white;
padding: 12px 20px;
border-radius: 8px;
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.2);
opacity: 0;
transform: translateX(100%);
animation: slideIn 0.5s forwards, fadeOut 0.5s 2.5s forwards;
font-size: 0.9rem;
font-weight: 500;
}
@keyframes slideIn {
to { opacity: 1; transform: translateX(0); }
}
@keyframes fadeOut {
from { opacity: 1; }
to { opacity: 0; transform: translateX(100%); }
}
/* Custom Alert Modal (for critical errors) - adjusted for new light theme */
.modal {
position: fixed;
top: 0;
left: 0;
width: 100%;
height: 100%;
background-color: rgba(0, 0, 0, 0.6); /* Softer overlay */
display: flex;
justify-content: center;
align-items: center;
z-index: 1000;
opacity: 0;
visibility: hidden;
transition: opacity 0.4s ease, visibility 0.4s ease;
}
.modal.show {
opacity: 1;
visibility: visible;
}
.modal-content {
background-color: white; /* White modal content */
color: #37474f; /* Dark text */
padding: 3rem;
border-radius: 2rem;
box-shadow: 0 15px 30px rgba(0, 0, 0, 0.2); /* Softer shadow */
text-align: center;
max-width: 500px;
width: 90%;
transform: translateY(-50px) scale(0.9);
transition: transform 0.4s cubic-bezier(0.68, -0.55, 0.27, 1.55), opacity 0.4s ease;
/* No border */
}
.modal.show .modal-content {
transform: translateY(0) scale(1);
}
.modal-button {
background: linear-gradient(45deg, #4dd0e1, #26c6da); /* Calming blue/cyan gradient for modal button */
color: white;
padding: 0.8rem 1.8rem;
border-radius: 1rem;
margin-top: 2rem;
cursor: pointer;
transition: all 0.3s ease;
font-weight: 600;
border: none;
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);
}
.modal-button:hover {
background: linear-gradient(45deg, #00acc1, #4dd0e1);
transform: translateY(-2px);
box-shadow: 0 6px 12px rgba(0, 0, 0, 0.15);
}
/* New Styles for More Options panel */
.options-button {
position: fixed;
top: 20px;
left: 20px;
background: rgba(255, 255, 255, 0.8);
backdrop-filter: blur(10px);
padding: 0.75rem 1.25rem;
border-radius: 1.5rem;
font-weight: 600;
color: #37474f;
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1);
cursor: pointer;
transition: all 0.3s ease;
z-index: 10;
}
.options-button:hover {
background: white;
transform: translateY(-2px);
box-shadow: 0 6px 16px rgba(0, 0, 0, 0.15);
}
.settings-panel {
position: fixed;
top: 0;
left: 0;
width: 350px;
height: 100vh;
padding: 4rem 2.5rem;
background: rgba(255, 255, 255, 0.75);
backdrop-filter: blur(20px);
-webkit-backdrop-filter: blur(20px);
box-shadow: 5px 0 30px rgba(0, 0, 0, 0.2);
transform: translateX(-100%);
transition: transform 0.5s cubic-bezier(0.7, 0, 0.3, 1);
z-index: 9;
display: flex;
flex-direction: column;
gap: 2rem;
}
.settings-panel.open {
transform: translateX(0);
}
</style>
</head>
<body>
<h1 class="text-5xl font-extrabold mb-4 text-shadow-lg">Voice Assistant</h1>
<div class="flex flex-col items-center gap-2">
<p id="statusText" class="status-text">Tap to speak</p>
</div>
<button id="microphoneButton" class="microphone-button">
<svg class="icon" viewBox="0 0 24 24" fill="currentColor" xmlns="http://www.w3.org/2000/svg">
<path d="M12 2C10.9 2 10 2.9 10 4v8c0 1.1.9 2 2 2s2-.9 2-2V4c0-1.1-.9-2-2-2zm-1 17.92V22h2v-2.08c3.61-.49 6.4-3.5 6.4-7.42h-2c0 3.03-2.47 5.5-5.5 5.5S6.6 15.53 6.6 12H4.6c0 3.92 2.79 6.93 6.4 7.42z"/>
</svg>
</button>
<audio id="audio_output_component_id" autoplay class="hidden"></audio>
<button id="optionsButton" class="options-button">More Options</button>
<div id="settingsPanel" class="settings-panel">
<h2 class="text-3xl font-extrabold text-gray-800">Settings</h2>
<div class="flex flex-col gap-4">
<h3 class="text-xl font-bold text-gray-600">Upload File</h3>
<p class="text-sm text-gray-500">Upload a file to be used by the assistant.</p>
<input type="file" id="fileInput" class="rounded-lg p-2 text-gray-700 bg-gray-100 border border-gray-300">
<button id="uploadButton" class="btn bg-gray-700 text-white p-3 rounded-xl transition hover:bg-gray-800 disabled:opacity-50 disabled:cursor-not-allowed">Upload File</button>
</div>
<div id="fileUploadStatus" class="text-sm text-gray-700 mt-4"></div>
</div>
<div id="toastContainer"></div>
<div id="customAlertModal" class="modal">
<div class="modal-content">
<p id="alertMessage" class="text-2xl font-semibold"></p>
<button id="alertCloseButton" class="modal-button">OK</button>
</div>
</div>
<script type="module">
// Firebase imports
import { initializeApp } from "https://www.gstatic.com/firebasejs/11.6.1/firebase-app.js";
import { getAuth, signInAnonymously, signInWithCustomToken, onAuthStateChanged } from "https://www.gstatic.com/firebasejs/11.6.1/firebase-auth.js";
import { getFirestore } from "https://www.gstatic.com/firebasejs/11.6.1/firebase-firestore.js";
// Global WebRTC variables
let pc; // RTCPeerConnection instance
let localStream; // User's microphone stream
let webrtc_id; // Unique ID for this WebRTC session
let isWebRTCConnected = false; // Track connection state
let isListening = false; // Track if microphone is active for listening
// ** Explicitly define the signaling server URL **
const SIGNALING_SERVER_URL = 'https://c71505dfda9a.ngrok-free.app'; // Base URL
// Firebase variables
let db;
let auth;
let userId; // userId will still be used internally for signaling
// ** Your Firebase configuration **
const firebaseConfig = {
apiKey: "AIzaSyBWv5gIXhyHMIzBCH5oFDlSsJhXWzov-ms",
authDomain: "test2-947ce.firebaseapp.com",
projectId: "test2-947ce",
storageBucket: "test2-947ce.firebasestorage.app",
messagingSenderId: "211407958561",
appId: "1:211407958561:web:ea24b5a4da5e1053a5e960",
measurementId: "G-673MR7TX2C"
};
const appId = firebaseConfig.appId;
// UI Elements
const microphoneButton = document.getElementById('microphoneButton');
const statusText = document.getElementById('statusText');
const audioOutputComponent = document.getElementById("audio_output_component_id");
const customAlertModal = document.getElementById('customAlertModal');
const alertMessage = document.getElementById('alertMessage');
const alertCloseButton = document.getElementById('alertCloseButton');
const toastContainer = document.getElementById('toastContainer');
const optionsButton = document.getElementById('optionsButton');
const settingsPanel = document.getElementById('settingsPanel');
const fileInput = document.getElementById('fileInput');
const uploadButton = document.getElementById('uploadButton');
const fileUploadStatus = document.getElementById('fileUploadStatus');
/**
* Displays a custom alert modal instead of window.alert().
* Used for critical errors that require user attention.
* @param {string} message The message to display.
*/
function showAlert(message) {
alertMessage.textContent = message;
customAlertModal.classList.add('show');
}
// Close the custom alert modal
alertCloseButton.addEventListener('click', () => {
customAlertModal.classList.remove('show');
});
/**
* Displays a transient toast message in the top-right corner.
* @param {string} message The message to display.
* @param {number} duration The duration in milliseconds for the toast to be visible (default: 3000ms).
*/
function showToast(message, duration = 3000) {
const toast = document.createElement('div');
toast.classList.add('toast-message');
toast.textContent = message;
toastContainer.appendChild(toast);
setTimeout(() => {
toast.remove();
}, duration);
}
/**
* Updates the main status text on the UI.
* @param {string} text The status message.
*/
function updateStatus(text) {
statusText.textContent = text;
}
/**
* Initializes Firebase and authenticates the user.
*/
async function initializeFirebase() {
try {
const app = initializeApp(firebaseConfig);
auth = getAuth(app);
db = getFirestore(app);
onAuthStateChanged(auth, async (user) => {
if (user) {
userId = user.uid;
// User ID is no longer displayed on UI, but kept for internal use.
} else {
try {
if (typeof __initial_auth_token !== 'undefined' && __initial_auth_token) {
await signInWithCustomToken(auth, __initial_auth_token);
} else {
await signInAnonymously(auth);
}
} catch (error) {
console.error("Firebase Auth Error:", error);
showAlert(`Firebase authentication failed: ${error.message}`);
}
}
});
} catch (error) {
console.error("Error initializing Firebase:", error);
showAlert(`Failed to initialize Firebase: ${error.message}`);
}
}
/**
* Sets up the WebRTC peer connection.
*/
async function startWebRTC() {
if (isWebRTCConnected) {
showToast('WebRTC is already connected.');
return;
}
try {
updateStatus('Connecting...');
microphoneButton.classList.add('disabled'); // Disable button during connection setup
showToast('Initializing connection...');
pc = new RTCPeerConnection({
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' }
]
});
localStream = await navigator.mediaDevices.getUserMedia({ audio: true });
showToast('Microphone access granted.');
localStream.getTracks().forEach((track) => {
pc.addTrack(track, localStream);
});
pc.addEventListener("track", (evt) => {
if (audioOutputComponent && audioOutputComponent.srcObject !== evt.streams[0]) {
audioOutputComponent.srcObject = evt.streams[0];
audioOutputComponent.play().then(() => {
showToast('Assistant is speaking...');
updateStatus('Speaking...');
microphoneButton.classList.remove('listening');
microphoneButton.classList.add('speaking');
}).catch(e => console.error("Error playing audio:", e));
// Listen for when audio finishes playing
audioOutputComponent.onended = () => {
updateStatus('Listening...');
microphoneButton.classList.remove('speaking');
microphoneButton.classList.add('listening');
showToast('Ready for your next command.');
};
}
});
const dataChannel = pc.createDataChannel("text");
dataChannel.onopen = () => showToast('Data channel opened.');
dataChannel.onclose = () => showToast('Data channel closed.');
dataChannel.onerror = (err) => showToast(`Data channel error: ${err.message}`, 5000);
webrtc_id = Math.random().toString(36).substring(7);
pc.onicecandidate = ({ candidate }) => {
if (candidate) {
fetch(`${SIGNALING_SERVER_URL}/webrtc/offer`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
candidate: candidate.toJSON(),
webrtc_id: webrtc_id,
type: "ice-candidate",
userId: userId
})
})
.then(response => {
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
return response.json();
})
.catch(error => showToast(`Error sending ICE candidate: ${error.message}`, 5000));
}
};
const offer = await pc.createOffer();
await pc.setLocalDescription(offer);
const response = await fetch(`${SIGNALING_SERVER_URL}/webrtc/offer`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
sdp: offer.sdp,
type: offer.type,
webrtc_id: webrtc_id,
userId: userId
})
});
const serverResponse = await response.json();
// FIX: Explicitly create RTCSessionDescription with type 'answer'
await pc.setRemoteDescription(new RTCSessionDescription({ type: 'answer', sdp: serverResponse.sdp }));
isWebRTCConnected = true;
microphoneButton.classList.remove('disabled');
microphoneButton.classList.add('listening'); // Indicate ready to listen
updateStatus('Listening...');
showToast('WebRTC connected. Tap to speak!');
pc.onconnectionstatechange = () => {
if (pc.connectionState === 'disconnected' || pc.connectionState === 'failed' || pc.connectionState === 'closed') {
showToast('WebRTC connection lost or failed.', 5000);
stopWebRTC();
} else if (pc.connectionState === 'connected') {
updateStatus('Listening...');
microphoneButton.classList.add('listening');
microphoneButton.classList.remove('speaking');
}
};
} catch (error) {
console.error("Error setting up WebRTC:", error);
showAlert(`Failed to start WebRTC: ${error.message}. Please ensure your microphone is available and try again.`);
updateStatus('Error');
microphoneButton.classList.remove('disabled', 'listening', 'speaking');
stopWebRTC(); // Clean up if setup fails
}
}
/**
* Stops the WebRTC peer connection.
*/
function stopWebRTC() {
if (!isWebRTCConnected && !pc) {
showToast('WebRTC is already stopped.');
return;
}
updateStatus('Stopping...');
showToast('Stopping connection...');
if (localStream) {
localStream.getTracks().forEach(track => track.stop());
localStream = null;
}
if (pc && pc.connectionState !== 'closed') {
pc.close();
}
pc = null;
if (audioOutputComponent) {
audioOutputComponent.srcObject = null;
audioOutputComponent.pause();
audioOutputComponent.currentTime = 0;
}
isWebRTCConnected = false;
isListening = false;
microphoneButton.classList.remove('listening', 'speaking', 'disabled');
updateStatus('Tap to speak');
showToast('WebRTC connection ended.');
}
/**
* Toggles the WebRTC connection based on its current state.
*/
async function toggleWebRTC() {
if (isWebRTCConnected) {
stopWebRTC();
} else {
await startWebRTC();
}
}
// Event Listener for the central microphone button
microphoneButton.addEventListener('click', toggleWebRTC);
// Options panel logic
optionsButton.addEventListener('click', () => {
settingsPanel.classList.toggle('open');
});
// File upload logic
uploadButton.addEventListener('click', async () => {
const file = fileInput.files[0];
if (!file) {
fileUploadStatus.textContent = 'Please select a file to upload.';
return;
}
if (!userId) {
fileUploadStatus.textContent = 'Authentication not ready, please wait.';
return;
}
fileUploadStatus.textContent = 'Uploading...';
uploadButton.disabled = true;
const reader = new FileReader();
reader.onload = async function(event) {
const base64String = event.target.result.split(',')[1];
try {
const response = await fetch(`${SIGNALING_SERVER_URL}/settings`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
userId: userId,
fileName: file.name,
fileType: file.type,
voice_cloning_file: base64String // Changed key name here
})
});
if (response.ok) {
fileUploadStatus.textContent = `File "${file.name}" uploaded successfully!`;
fileInput.value = ''; // Clear the file input
showToast('Settings updated with file upload!');
} else {
const errorData = await response.json();
fileUploadStatus.textContent = `Upload failed: ${errorData.error}`;
showToast('File upload failed.', 5000);
}
} catch (error) {
fileUploadStatus.textContent = `An error occurred: ${error.message}`;
showToast('An error occurred during upload.', 5000);
} finally {
uploadButton.disabled = false;
}
};
reader.readAsDataURL(file);
});
// Initialize Firebase when the window loads
window.onload = initializeFirebase;
</script>
</body>
</html>