|
|
<!DOCTYPE html> |
|
|
<html lang="en"> |
|
|
<head> |
|
|
<meta charset="UTF-8"> |
|
|
<meta name="viewport" content="width=device-width, initial-scale=1.0"> |
|
|
<title>Voice Assistant</title> |
|
|
<script src="https://cdn.tailwindcss.com"></script> |
|
|
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;600;700;800&display=swap" rel="stylesheet"> |
|
|
<style> |
|
|
|
|
|
body { |
|
|
font-family: 'Inter', sans-serif; |
|
|
|
|
|
background: linear-gradient(135deg, #e0f7fa 0%, #e8f5e9 100%); |
|
|
display: flex; |
|
|
flex-direction: column; |
|
|
justify-content: center; |
|
|
align-items: center; |
|
|
min-height: 100vh; |
|
|
margin: 0; |
|
|
padding: 20px; |
|
|
box-sizing: border-box; |
|
|
overflow: hidden; |
|
|
position: relative; |
|
|
gap: 2.5rem; |
|
|
animation: fadeIn 0.9s ease-out forwards; |
|
|
} |
|
|
|
|
|
@keyframes fadeIn { |
|
|
from { opacity: 0; transform: translateY(30px); } |
|
|
to { opacity: 1; transform: translateY(0); } |
|
|
} |
|
|
|
|
|
|
|
|
body::before, body::after { |
|
|
content: ''; |
|
|
position: absolute; |
|
|
border-radius: 50%; |
|
|
opacity: 0.5; |
|
|
filter: blur(150px); |
|
|
z-index: -1; |
|
|
} |
|
|
|
|
|
body::before { |
|
|
width: 450px; |
|
|
height: 450px; |
|
|
background: #4dd0e1; |
|
|
top: 5%; |
|
|
left: 10%; |
|
|
animation: moveBlob1 25s infinite alternate ease-in-out; |
|
|
} |
|
|
|
|
|
body::after { |
|
|
width: 550px; |
|
|
height: 550px; |
|
|
background: #a5d6a7; |
|
|
bottom: 5%; |
|
|
right: 8%; |
|
|
animation: moveBlob2 28s infinite alternate-reverse ease-in-out; |
|
|
} |
|
|
|
|
|
@keyframes moveBlob1 { |
|
|
0% { transform: translate(0, 0) scale(1); } |
|
|
33% { transform: translate(70px, -50px) scale(1.08); } |
|
|
66% { transform: translate(-40px, 60px) scale(0.95); } |
|
|
100% { transform: translate(0, 0) scale(1); } |
|
|
} |
|
|
|
|
|
@keyframes moveBlob2 { |
|
|
0% { transform: translate(0, 0) scale(1); } |
|
|
33% { transform: translate(-60px, 80px) scale(0.9); } |
|
|
66% { transform: translate(50px, -70px) scale(1.1); } |
|
|
100% { transform: translate(0, 0) scale(1); } |
|
|
} |
|
|
|
|
|
|
|
|
h1 { |
|
|
color: #37474f; |
|
|
text-shadow: 0 2px 4px rgba(0, 0, 0, 0.08); |
|
|
margin-bottom: 0; |
|
|
} |
|
|
|
|
|
|
|
|
.status-text { |
|
|
font-size: 2rem; |
|
|
color: #546e7a; |
|
|
font-weight: 700; |
|
|
min-height: 3.5rem; |
|
|
text-shadow: 0 2px 4px rgba(0, 0, 0, 0.05); |
|
|
} |
|
|
|
|
|
|
|
|
.microphone-button { |
|
|
width: 160px; |
|
|
height: 160px; |
|
|
border-radius: 50%; |
|
|
|
|
|
background: rgba(255, 255, 255, 0.9); |
|
|
color: #546e7a; |
|
|
display: flex; |
|
|
align-items: center; |
|
|
justify-content: center; |
|
|
font-size: 4rem; |
|
|
box-shadow: 0 10px 30px rgba(0, 0, 0, 0.1), inset 0 0 20px rgba(255, 255, 255, 0.5); |
|
|
cursor: pointer; |
|
|
transition: all 0.4s cubic-bezier(0.25, 0.8, 0.25, 1); |
|
|
position: relative; |
|
|
overflow: hidden; |
|
|
border: none; |
|
|
margin: 0 auto; |
|
|
} |
|
|
.microphone-button:hover { |
|
|
transform: translateY(-5px) scale(1.05); |
|
|
box-shadow: 0 15px 40px rgba(0, 0, 0, 0.15), inset 0 0 25px rgba(255, 255, 255, 0.7); |
|
|
} |
|
|
.microphone-button:active { |
|
|
transform: translateY(0) scale(0.98); |
|
|
box-shadow: 0 5px 15px rgba(0, 0, 0, 0.1); |
|
|
} |
|
|
|
|
|
|
|
|
.microphone-button.listening { |
|
|
animation: pulse-active 2.5s infinite ease-in-out; |
|
|
background: linear-gradient(45deg, #26c6da, #80deea); |
|
|
color: white; |
|
|
box-shadow: 0 15px 30px rgba(38, 198, 218, 0.4), inset 0 0 30px rgba(255, 255, 255, 0.5); |
|
|
} |
|
|
.microphone-button.speaking { |
|
|
animation: pulse-active 2.5s infinite ease-in-out; |
|
|
background: linear-gradient(45deg, #66bb6a, #81c784); |
|
|
color: white; |
|
|
box-shadow: 0 15px 30px rgba(102, 187, 106, 0.4), inset 0 0 30px rgba(255, 255, 255, 0.5); |
|
|
} |
|
|
.microphone-button.disabled { |
|
|
opacity: 0.5; |
|
|
cursor: not-allowed; |
|
|
transform: none; |
|
|
box-shadow: none; |
|
|
background: #e0e0e0; |
|
|
color: #9e9e9e; |
|
|
animation: none; |
|
|
} |
|
|
|
|
|
|
|
|
@keyframes pulse-active { |
|
|
0% { transform: scale(1); box-shadow: 0 0 0 0 currentColor; } |
|
|
70% { transform: scale(1.08); box-shadow: 0 0 0 45px rgba(38, 198, 218, 0); } |
|
|
100% { transform: scale(1); box-shadow: 0 0 0 0 currentColor; } |
|
|
} |
|
|
|
|
|
|
|
|
.icon { |
|
|
width: 4rem; |
|
|
height: 4rem; |
|
|
fill: currentColor; |
|
|
} |
|
|
|
|
|
|
|
|
#toastContainer { |
|
|
position: fixed; |
|
|
top: 20px; |
|
|
right: 20px; |
|
|
z-index: 1001; |
|
|
display: flex; |
|
|
flex-direction: column; |
|
|
gap: 10px; |
|
|
max-width: 300px; |
|
|
} |
|
|
.toast-message { |
|
|
background-color: rgba(0, 0, 0, 0.75); |
|
|
color: white; |
|
|
padding: 12px 20px; |
|
|
border-radius: 8px; |
|
|
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.2); |
|
|
opacity: 0; |
|
|
transform: translateX(100%); |
|
|
animation: slideIn 0.5s forwards, fadeOut 0.5s 2.5s forwards; |
|
|
font-size: 0.9rem; |
|
|
font-weight: 500; |
|
|
} |
|
|
|
|
|
@keyframes slideIn { |
|
|
to { opacity: 1; transform: translateX(0); } |
|
|
} |
|
|
@keyframes fadeOut { |
|
|
from { opacity: 1; } |
|
|
to { opacity: 0; transform: translateX(100%); } |
|
|
} |
|
|
|
|
|
|
|
|
.modal { |
|
|
position: fixed; |
|
|
top: 0; |
|
|
left: 0; |
|
|
width: 100%; |
|
|
height: 100%; |
|
|
background-color: rgba(0, 0, 0, 0.6); |
|
|
display: flex; |
|
|
justify-content: center; |
|
|
align-items: center; |
|
|
z-index: 1000; |
|
|
opacity: 0; |
|
|
visibility: hidden; |
|
|
transition: opacity 0.4s ease, visibility 0.4s ease; |
|
|
} |
|
|
.modal.show { |
|
|
opacity: 1; |
|
|
visibility: visible; |
|
|
} |
|
|
.modal-content { |
|
|
background-color: white; |
|
|
color: #37474f; |
|
|
padding: 3rem; |
|
|
border-radius: 2rem; |
|
|
box-shadow: 0 15px 30px rgba(0, 0, 0, 0.2); |
|
|
text-align: center; |
|
|
max-width: 500px; |
|
|
width: 90%; |
|
|
transform: translateY(-50px) scale(0.9); |
|
|
transition: transform 0.4s cubic-bezier(0.68, -0.55, 0.27, 1.55), opacity 0.4s ease; |
|
|
|
|
|
} |
|
|
.modal.show .modal-content { |
|
|
transform: translateY(0) scale(1); |
|
|
} |
|
|
.modal-button { |
|
|
background: linear-gradient(45deg, #4dd0e1, #26c6da); |
|
|
color: white; |
|
|
padding: 0.8rem 1.8rem; |
|
|
border-radius: 1rem; |
|
|
margin-top: 2rem; |
|
|
cursor: pointer; |
|
|
transition: all 0.3s ease; |
|
|
font-weight: 600; |
|
|
border: none; |
|
|
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1); |
|
|
} |
|
|
.modal-button:hover { |
|
|
background: linear-gradient(45deg, #00acc1, #4dd0e1); |
|
|
transform: translateY(-2px); |
|
|
box-shadow: 0 6px 12px rgba(0, 0, 0, 0.15); |
|
|
} |
|
|
|
|
|
|
|
|
.options-button { |
|
|
position: fixed; |
|
|
top: 20px; |
|
|
left: 20px; |
|
|
background: rgba(255, 255, 255, 0.8); |
|
|
backdrop-filter: blur(10px); |
|
|
padding: 0.75rem 1.25rem; |
|
|
border-radius: 1.5rem; |
|
|
font-weight: 600; |
|
|
color: #37474f; |
|
|
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1); |
|
|
cursor: pointer; |
|
|
transition: all 0.3s ease; |
|
|
z-index: 10; |
|
|
} |
|
|
|
|
|
.options-button:hover { |
|
|
background: white; |
|
|
transform: translateY(-2px); |
|
|
box-shadow: 0 6px 16px rgba(0, 0, 0, 0.15); |
|
|
} |
|
|
|
|
|
.settings-panel { |
|
|
position: fixed; |
|
|
top: 0; |
|
|
left: 0; |
|
|
width: 350px; |
|
|
height: 100vh; |
|
|
padding: 4rem 2.5rem; |
|
|
background: rgba(255, 255, 255, 0.75); |
|
|
backdrop-filter: blur(20px); |
|
|
-webkit-backdrop-filter: blur(20px); |
|
|
box-shadow: 5px 0 30px rgba(0, 0, 0, 0.2); |
|
|
transform: translateX(-100%); |
|
|
transition: transform 0.5s cubic-bezier(0.7, 0, 0.3, 1); |
|
|
z-index: 9; |
|
|
display: flex; |
|
|
flex-direction: column; |
|
|
gap: 2rem; |
|
|
} |
|
|
|
|
|
.settings-panel.open { |
|
|
transform: translateX(0); |
|
|
} |
|
|
</style> |
|
|
</head> |
|
|
<body> |
|
|
<h1 class="text-5xl font-extrabold mb-4 text-shadow-lg">Voice Assistant</h1> |
|
|
|
|
|
<div class="flex flex-col items-center gap-2"> |
|
|
<p id="statusText" class="status-text">Tap to speak</p> |
|
|
</div> |
|
|
|
|
|
<button id="microphoneButton" class="microphone-button"> |
|
|
<svg class="icon" viewBox="0 0 24 24" fill="currentColor" xmlns="http://www.w3.org/2000/svg"> |
|
|
<path d="M12 2C10.9 2 10 2.9 10 4v8c0 1.1.9 2 2 2s2-.9 2-2V4c0-1.1-.9-2-2-2zm-1 17.92V22h2v-2.08c3.61-.49 6.4-3.5 6.4-7.42h-2c0 3.03-2.47 5.5-5.5 5.5S6.6 15.53 6.6 12H4.6c0 3.92 2.79 6.93 6.4 7.42z"/> |
|
|
</svg> |
|
|
</button> |
|
|
|
|
|
<audio id="audio_output_component_id" autoplay class="hidden"></audio> |
|
|
|
|
|
<button id="optionsButton" class="options-button">More Options</button> |
|
|
|
|
|
<div id="settingsPanel" class="settings-panel"> |
|
|
<h2 class="text-3xl font-extrabold text-gray-800">Settings</h2> |
|
|
<div class="flex flex-col gap-4"> |
|
|
<h3 class="text-xl font-bold text-gray-600">Upload File</h3> |
|
|
<p class="text-sm text-gray-500">Upload a file to be used by the assistant.</p> |
|
|
<input type="file" id="fileInput" class="rounded-lg p-2 text-gray-700 bg-gray-100 border border-gray-300"> |
|
|
<button id="uploadButton" class="btn bg-gray-700 text-white p-3 rounded-xl transition hover:bg-gray-800 disabled:opacity-50 disabled:cursor-not-allowed">Upload File</button> |
|
|
</div> |
|
|
<div id="fileUploadStatus" class="text-sm text-gray-700 mt-4"></div> |
|
|
</div> |
|
|
|
|
|
<div id="toastContainer"></div> |
|
|
|
|
|
<div id="customAlertModal" class="modal"> |
|
|
<div class="modal-content"> |
|
|
<p id="alertMessage" class="text-2xl font-semibold"></p> |
|
|
<button id="alertCloseButton" class="modal-button">OK</button> |
|
|
</div> |
|
|
</div> |
|
|
|
|
|
<script type="module"> |
|
|
|
|
|
import { initializeApp } from "https://www.gstatic.com/firebasejs/11.6.1/firebase-app.js"; |
|
|
import { getAuth, signInAnonymously, signInWithCustomToken, onAuthStateChanged } from "https://www.gstatic.com/firebasejs/11.6.1/firebase-auth.js"; |
|
|
import { getFirestore } from "https://www.gstatic.com/firebasejs/11.6.1/firebase-firestore.js"; |
|
|
|
|
|
|
|
|
let pc; |
|
|
let localStream; |
|
|
let webrtc_id; |
|
|
let isWebRTCConnected = false; |
|
|
let isListening = false; |
|
|
|
|
|
|
|
|
const SIGNALING_SERVER_URL = 'https://c71505dfda9a.ngrok-free.app'; |
|
|
|
|
|
|
|
|
let db; |
|
|
let auth; |
|
|
let userId; |
|
|
|
|
|
|
|
|
const firebaseConfig = { |
|
|
apiKey: "AIzaSyBWv5gIXhyHMIzBCH5oFDlSsJhXWzov-ms", |
|
|
authDomain: "test2-947ce.firebaseapp.com", |
|
|
projectId: "test2-947ce", |
|
|
storageBucket: "test2-947ce.firebasestorage.app", |
|
|
messagingSenderId: "211407958561", |
|
|
appId: "1:211407958561:web:ea24b5a4da5e1053a5e960", |
|
|
measurementId: "G-673MR7TX2C" |
|
|
}; |
|
|
const appId = firebaseConfig.appId; |
|
|
|
|
|
|
|
|
|
|
|
const microphoneButton = document.getElementById('microphoneButton'); |
|
|
const statusText = document.getElementById('statusText'); |
|
|
const audioOutputComponent = document.getElementById("audio_output_component_id"); |
|
|
const customAlertModal = document.getElementById('customAlertModal'); |
|
|
const alertMessage = document.getElementById('alertMessage'); |
|
|
const alertCloseButton = document.getElementById('alertCloseButton'); |
|
|
const toastContainer = document.getElementById('toastContainer'); |
|
|
const optionsButton = document.getElementById('optionsButton'); |
|
|
const settingsPanel = document.getElementById('settingsPanel'); |
|
|
const fileInput = document.getElementById('fileInput'); |
|
|
const uploadButton = document.getElementById('uploadButton'); |
|
|
const fileUploadStatus = document.getElementById('fileUploadStatus'); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function showAlert(message) { |
|
|
alertMessage.textContent = message; |
|
|
customAlertModal.classList.add('show'); |
|
|
} |
|
|
|
|
|
|
|
|
alertCloseButton.addEventListener('click', () => { |
|
|
customAlertModal.classList.remove('show'); |
|
|
}); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function showToast(message, duration = 3000) { |
|
|
const toast = document.createElement('div'); |
|
|
toast.classList.add('toast-message'); |
|
|
toast.textContent = message; |
|
|
toastContainer.appendChild(toast); |
|
|
|
|
|
setTimeout(() => { |
|
|
toast.remove(); |
|
|
}, duration); |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function updateStatus(text) { |
|
|
statusText.textContent = text; |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async function initializeFirebase() { |
|
|
try { |
|
|
const app = initializeApp(firebaseConfig); |
|
|
auth = getAuth(app); |
|
|
db = getFirestore(app); |
|
|
|
|
|
onAuthStateChanged(auth, async (user) => { |
|
|
if (user) { |
|
|
userId = user.uid; |
|
|
|
|
|
} else { |
|
|
try { |
|
|
if (typeof __initial_auth_token !== 'undefined' && __initial_auth_token) { |
|
|
await signInWithCustomToken(auth, __initial_auth_token); |
|
|
} else { |
|
|
await signInAnonymously(auth); |
|
|
} |
|
|
} catch (error) { |
|
|
console.error("Firebase Auth Error:", error); |
|
|
showAlert(`Firebase authentication failed: ${error.message}`); |
|
|
} |
|
|
} |
|
|
}); |
|
|
} catch (error) { |
|
|
console.error("Error initializing Firebase:", error); |
|
|
showAlert(`Failed to initialize Firebase: ${error.message}`); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async function startWebRTC() { |
|
|
if (isWebRTCConnected) { |
|
|
showToast('WebRTC is already connected.'); |
|
|
return; |
|
|
} |
|
|
|
|
|
try { |
|
|
updateStatus('Connecting...'); |
|
|
microphoneButton.classList.add('disabled'); |
|
|
showToast('Initializing connection...'); |
|
|
|
|
|
pc = new RTCPeerConnection({ |
|
|
iceServers: [ |
|
|
{ urls: 'stun:stun.l.google.com:19302' } |
|
|
] |
|
|
}); |
|
|
|
|
|
localStream = await navigator.mediaDevices.getUserMedia({ audio: true }); |
|
|
showToast('Microphone access granted.'); |
|
|
|
|
|
localStream.getTracks().forEach((track) => { |
|
|
pc.addTrack(track, localStream); |
|
|
}); |
|
|
|
|
|
pc.addEventListener("track", (evt) => { |
|
|
if (audioOutputComponent && audioOutputComponent.srcObject !== evt.streams[0]) { |
|
|
audioOutputComponent.srcObject = evt.streams[0]; |
|
|
audioOutputComponent.play().then(() => { |
|
|
showToast('Assistant is speaking...'); |
|
|
updateStatus('Speaking...'); |
|
|
microphoneButton.classList.remove('listening'); |
|
|
microphoneButton.classList.add('speaking'); |
|
|
}).catch(e => console.error("Error playing audio:", e)); |
|
|
|
|
|
|
|
|
audioOutputComponent.onended = () => { |
|
|
updateStatus('Listening...'); |
|
|
microphoneButton.classList.remove('speaking'); |
|
|
microphoneButton.classList.add('listening'); |
|
|
showToast('Ready for your next command.'); |
|
|
}; |
|
|
} |
|
|
}); |
|
|
|
|
|
const dataChannel = pc.createDataChannel("text"); |
|
|
dataChannel.onopen = () => showToast('Data channel opened.'); |
|
|
dataChannel.onclose = () => showToast('Data channel closed.'); |
|
|
dataChannel.onerror = (err) => showToast(`Data channel error: ${err.message}`, 5000); |
|
|
|
|
|
webrtc_id = Math.random().toString(36).substring(7); |
|
|
|
|
|
pc.onicecandidate = ({ candidate }) => { |
|
|
if (candidate) { |
|
|
fetch(`${SIGNALING_SERVER_URL}/webrtc/offer`, { |
|
|
method: 'POST', |
|
|
headers: { 'Content-Type': 'application/json' }, |
|
|
body: JSON.stringify({ |
|
|
candidate: candidate.toJSON(), |
|
|
webrtc_id: webrtc_id, |
|
|
type: "ice-candidate", |
|
|
userId: userId |
|
|
}) |
|
|
}) |
|
|
.then(response => { |
|
|
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`); |
|
|
return response.json(); |
|
|
}) |
|
|
.catch(error => showToast(`Error sending ICE candidate: ${error.message}`, 5000)); |
|
|
} |
|
|
}; |
|
|
|
|
|
const offer = await pc.createOffer(); |
|
|
await pc.setLocalDescription(offer); |
|
|
|
|
|
const response = await fetch(`${SIGNALING_SERVER_URL}/webrtc/offer`, { |
|
|
method: 'POST', |
|
|
headers: { 'Content-Type': 'application/json' }, |
|
|
body: JSON.stringify({ |
|
|
sdp: offer.sdp, |
|
|
type: offer.type, |
|
|
webrtc_id: webrtc_id, |
|
|
userId: userId |
|
|
}) |
|
|
}); |
|
|
|
|
|
const serverResponse = await response.json(); |
|
|
|
|
|
await pc.setRemoteDescription(new RTCSessionDescription({ type: 'answer', sdp: serverResponse.sdp })); |
|
|
|
|
|
isWebRTCConnected = true; |
|
|
microphoneButton.classList.remove('disabled'); |
|
|
microphoneButton.classList.add('listening'); |
|
|
updateStatus('Listening...'); |
|
|
showToast('WebRTC connected. Tap to speak!'); |
|
|
|
|
|
pc.onconnectionstatechange = () => { |
|
|
if (pc.connectionState === 'disconnected' || pc.connectionState === 'failed' || pc.connectionState === 'closed') { |
|
|
showToast('WebRTC connection lost or failed.', 5000); |
|
|
stopWebRTC(); |
|
|
} else if (pc.connectionState === 'connected') { |
|
|
updateStatus('Listening...'); |
|
|
microphoneButton.classList.add('listening'); |
|
|
microphoneButton.classList.remove('speaking'); |
|
|
} |
|
|
}; |
|
|
|
|
|
} catch (error) { |
|
|
console.error("Error setting up WebRTC:", error); |
|
|
showAlert(`Failed to start WebRTC: ${error.message}. Please ensure your microphone is available and try again.`); |
|
|
updateStatus('Error'); |
|
|
microphoneButton.classList.remove('disabled', 'listening', 'speaking'); |
|
|
stopWebRTC(); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function stopWebRTC() { |
|
|
if (!isWebRTCConnected && !pc) { |
|
|
showToast('WebRTC is already stopped.'); |
|
|
return; |
|
|
} |
|
|
|
|
|
updateStatus('Stopping...'); |
|
|
showToast('Stopping connection...'); |
|
|
|
|
|
if (localStream) { |
|
|
localStream.getTracks().forEach(track => track.stop()); |
|
|
localStream = null; |
|
|
} |
|
|
|
|
|
if (pc && pc.connectionState !== 'closed') { |
|
|
pc.close(); |
|
|
} |
|
|
pc = null; |
|
|
|
|
|
if (audioOutputComponent) { |
|
|
audioOutputComponent.srcObject = null; |
|
|
audioOutputComponent.pause(); |
|
|
audioOutputComponent.currentTime = 0; |
|
|
} |
|
|
|
|
|
isWebRTCConnected = false; |
|
|
isListening = false; |
|
|
microphoneButton.classList.remove('listening', 'speaking', 'disabled'); |
|
|
updateStatus('Tap to speak'); |
|
|
showToast('WebRTC connection ended.'); |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async function toggleWebRTC() { |
|
|
if (isWebRTCConnected) { |
|
|
stopWebRTC(); |
|
|
} else { |
|
|
await startWebRTC(); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
microphoneButton.addEventListener('click', toggleWebRTC); |
|
|
|
|
|
|
|
|
optionsButton.addEventListener('click', () => { |
|
|
settingsPanel.classList.toggle('open'); |
|
|
}); |
|
|
|
|
|
|
|
|
uploadButton.addEventListener('click', async () => { |
|
|
const file = fileInput.files[0]; |
|
|
if (!file) { |
|
|
fileUploadStatus.textContent = 'Please select a file to upload.'; |
|
|
return; |
|
|
} |
|
|
if (!userId) { |
|
|
fileUploadStatus.textContent = 'Authentication not ready, please wait.'; |
|
|
return; |
|
|
} |
|
|
|
|
|
fileUploadStatus.textContent = 'Uploading...'; |
|
|
uploadButton.disabled = true; |
|
|
|
|
|
const reader = new FileReader(); |
|
|
reader.onload = async function(event) { |
|
|
const base64String = event.target.result.split(',')[1]; |
|
|
|
|
|
try { |
|
|
const response = await fetch(`${SIGNALING_SERVER_URL}/settings`, { |
|
|
method: 'POST', |
|
|
headers: { 'Content-Type': 'application/json' }, |
|
|
body: JSON.stringify({ |
|
|
userId: userId, |
|
|
fileName: file.name, |
|
|
fileType: file.type, |
|
|
voice_cloning_file: base64String |
|
|
}) |
|
|
}); |
|
|
|
|
|
if (response.ok) { |
|
|
fileUploadStatus.textContent = `File "${file.name}" uploaded successfully!`; |
|
|
fileInput.value = ''; |
|
|
showToast('Settings updated with file upload!'); |
|
|
} else { |
|
|
const errorData = await response.json(); |
|
|
fileUploadStatus.textContent = `Upload failed: ${errorData.error}`; |
|
|
showToast('File upload failed.', 5000); |
|
|
} |
|
|
} catch (error) { |
|
|
fileUploadStatus.textContent = `An error occurred: ${error.message}`; |
|
|
showToast('An error occurred during upload.', 5000); |
|
|
} finally { |
|
|
uploadButton.disabled = false; |
|
|
} |
|
|
}; |
|
|
reader.readAsDataURL(file); |
|
|
}); |
|
|
|
|
|
|
|
|
window.onload = initializeFirebase; |
|
|
|
|
|
</script> |
|
|
</body> |
|
|
</html> |