cduss Claude Opus 4.5 commited on
Commit
5333310
·
1 Parent(s): f7818b6

Remove microphone and audio functionality

Browse files

- Remove voice chat / telephone section
- Remove audio element and mute controls
- Keep video streaming, controls, and latency monitor
- Simplify to control + listening only mode

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>

Files changed (1) hide show
  1. index.html +6 -132
index.html CHANGED
@@ -280,7 +280,7 @@
280
  text-align: right;
281
  }
282
 
283
- /* Sound & Voice */
284
  .sound-row {
285
  display: flex;
286
  gap: 8px;
@@ -319,34 +319,6 @@
319
  color: var(--pollen-coral);
320
  }
321
 
322
- .voice-section {
323
- padding-top: 10px;
324
- border-top: 1px solid var(--pollen-card-light);
325
- }
326
-
327
- .voice-label {
328
- font-size: 0.75em;
329
- color: var(--text-muted);
330
- margin-bottom: 8px;
331
- display: block;
332
- }
333
-
334
- .voice-buttons {
335
- display: flex;
336
- gap: 8px;
337
- }
338
-
339
- .voice-buttons .btn {
340
- flex: 1;
341
- }
342
-
343
- #micStatus {
344
- margin-top: 8px;
345
- font-size: 0.75em;
346
- color: var(--text-muted);
347
- text-align: center;
348
- }
349
-
350
  /* Action buttons */
351
  .action-row {
352
  display: flex;
@@ -443,7 +415,7 @@
443
  grid-row: 2;
444
  }
445
 
446
- .panel:nth-of-type(3) { /* Sound & Voice */
447
  grid-column: 2;
448
  grid-row: 3;
449
  }
@@ -542,7 +514,6 @@
542
  <!-- Video -->
543
  <div class="video-container">
544
  <video id="remoteVideo" autoplay playsinline></video>
545
- <audio id="remoteAudio" autoplay></audio>
546
 
547
  <div class="video-overlay-top">
548
  <div class="connection-badge">
@@ -611,9 +582,9 @@
611
  </div>
612
  </div>
613
 
614
- <!-- Sound & Voice -->
615
  <div class="panel">
616
- <div class="panel-header">Sound & Voice</div>
617
  <div class="panel-content">
618
  <div class="sound-row">
619
  <input type="text" class="sound-input" id="soundInput" placeholder="Sound file...">
@@ -625,14 +596,6 @@
625
  <span class="preset-chip" onclick="playSoundPreset('yes.wav')">yes</span>
626
  <span class="preset-chip" onclick="playSoundPreset('no.wav')">no</span>
627
  </div>
628
- <div class="voice-section">
629
- <label class="voice-label">Voice Chat (Telephone)</label>
630
- <div class="voice-buttons">
631
- <button class="btn btn-primary" id="btnMic" onclick="toggleMicrophone()">Enable Mic</button>
632
- <button class="btn btn-secondary" id="btnMute" onclick="toggleMute()">Unmute Robot</button>
633
- </div>
634
- <div id="micStatus"></div>
635
- </div>
636
  </div>
637
  </div>
638
 
@@ -668,11 +631,7 @@
668
  // Head control state
669
  let headSlidersActive = false; // True while user is dragging a slider
670
 
671
- // Audio state
672
- let localStream = null;
673
- let micEnabled = false;
674
- let robotMuted = true;
675
- let audioSender = null;
676
  let latencyMonitorId = null;
677
 
678
  // Export functions
@@ -683,8 +642,6 @@
683
  window.stopStream = stopStream;
684
  window.playSound = playSound;
685
  window.playSoundPreset = playSoundPreset;
686
- window.toggleMicrophone = toggleMicrophone;
687
- window.toggleMute = toggleMute;
688
  window.startRecording = startRecording;
689
  window.stopRecording = stopRecording;
690
 
@@ -875,18 +832,11 @@
875
  if (e.track.kind === 'video') {
876
  const video = document.getElementById('remoteVideo');
877
  video.srcObject = e.streams[0];
878
- // Low-latency optimizations
879
  video.playsInline = true;
880
  if ('requestVideoFrameCallback' in video) {
881
- // Start latency monitoring
882
  startLatencyMonitor(video);
883
  }
884
  }
885
- if (e.track.kind === 'audio') {
886
- const audioEl = document.getElementById('remoteAudio');
887
- audioEl.srcObject = new MediaStream([e.track]);
888
- audioEl.muted = robotMuted;
889
- }
890
  };
891
 
892
  peerConnection.onicecandidate = async (e) => {
@@ -930,25 +880,6 @@
930
  if (msg.sdp) {
931
  await peerConnection.setRemoteDescription(new RTCSessionDescription(msg.sdp));
932
  if (msg.sdp.type === 'offer') {
933
- // Find the audio transceiver and change direction to sendrecv
934
- // so we can both receive robot audio AND send our mic audio
935
- const transceivers = peerConnection.getTransceivers();
936
- for (const transceiver of transceivers) {
937
- if (transceiver.receiver.track?.kind === 'audio') {
938
- transceiver.direction = 'sendrecv';
939
- audioSender = transceiver.sender;
940
- console.log('Audio transceiver set to sendrecv');
941
- break;
942
- }
943
- }
944
-
945
- // If no audio transceiver found, add one
946
- if (!audioSender) {
947
- const transceiver = peerConnection.addTransceiver('audio', { direction: 'sendrecv' });
948
- audioSender = transceiver.sender;
949
- console.log('Added new audio transceiver');
950
- }
951
-
952
  const answer = await peerConnection.createAnswer();
953
  await peerConnection.setLocalDescription(answer);
954
  await sendToServer({ type: 'peer', sessionId: currentSessionId, sdp: { type: 'answer', sdp: answer.sdp } });
@@ -991,9 +922,7 @@
991
  peerConnection = null;
992
  dataChannel = null;
993
  currentSessionId = null;
994
- audioSender = null;
995
  document.getElementById('remoteVideo').srcObject = null;
996
- document.getElementById('remoteAudio').srcObject = null;
997
  document.getElementById('startBtn').disabled = !selectedProducerId;
998
  document.getElementById('stopBtn').disabled = true;
999
  document.getElementById('robotSelector').classList.remove('hidden');
@@ -1137,7 +1066,7 @@
1137
  });
1138
  }
1139
 
1140
- // ===================== Sound & Voice =====================
1141
  function playSound() {
1142
  const file = document.getElementById('soundInput').value.trim();
1143
  if (file) sendCommand({ play_sound: file });
@@ -1148,61 +1077,6 @@
1148
  sendCommand({ play_sound: file });
1149
  }
1150
 
1151
- async function toggleMicrophone() {
1152
- const btn = document.getElementById('btnMic');
1153
- const status = document.getElementById('micStatus');
1154
-
1155
- if (micEnabled) {
1156
- if (localStream) localStream.getTracks().forEach(t => t.stop());
1157
- localStream = null;
1158
- if (audioSender) await audioSender.replaceTrack(null);
1159
- micEnabled = false;
1160
- btn.textContent = 'Enable Mic';
1161
- btn.classList.remove('btn-danger');
1162
- btn.classList.add('btn-primary');
1163
- status.textContent = '';
1164
- } else {
1165
- try {
1166
- localStream = await navigator.mediaDevices.getUserMedia({
1167
- audio: { echoCancellation: true, noiseSuppression: true, autoGainControl: true }
1168
- });
1169
- console.log('Got mic stream:', localStream.getAudioTracks()[0].label);
1170
- console.log('audioSender exists:', !!audioSender);
1171
- if (audioSender) {
1172
- await audioSender.replaceTrack(localStream.getAudioTracks()[0]);
1173
- console.log('Track replaced on sender');
1174
- // Log connection state
1175
- console.log('PeerConnection state:', peerConnection?.connectionState);
1176
- console.log('ICE state:', peerConnection?.iceConnectionState);
1177
- // Log all transceivers
1178
- peerConnection?.getTransceivers().forEach((t, i) => {
1179
- console.log(`Transceiver ${i}: kind=${t.receiver.track?.kind}, direction=${t.direction}, currentDirection=${t.currentDirection}`);
1180
- });
1181
- } else {
1182
- console.error('No audioSender available!');
1183
- }
1184
- micEnabled = true;
1185
- btn.textContent = 'Disable Mic';
1186
- btn.classList.remove('btn-primary');
1187
- btn.classList.add('btn-danger');
1188
- status.textContent = 'Speaking to robot...';
1189
- status.style.color = 'var(--success)';
1190
- } catch (e) {
1191
- status.textContent = 'Mic access denied';
1192
- status.style.color = 'var(--danger)';
1193
- }
1194
- }
1195
- }
1196
-
1197
- function toggleMute() {
1198
- robotMuted = !robotMuted;
1199
- document.getElementById('remoteAudio').muted = robotMuted;
1200
- const btn = document.getElementById('btnMute');
1201
- btn.textContent = robotMuted ? 'Unmute Robot' : 'Mute Robot';
1202
- btn.classList.toggle('btn-secondary', robotMuted);
1203
- btn.classList.toggle('btn-danger', !robotMuted);
1204
- }
1205
-
1206
  function startRecording() { sendCommand({ start_recording: true }); }
1207
  function stopRecording() { sendCommand({ stop_recording: true }); }
1208
  </script>
 
280
  text-align: right;
281
  }
282
 
283
+ /* Sound */
284
  .sound-row {
285
  display: flex;
286
  gap: 8px;
 
319
  color: var(--pollen-coral);
320
  }
321
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
322
  /* Action buttons */
323
  .action-row {
324
  display: flex;
 
415
  grid-row: 2;
416
  }
417
 
418
+ .panel:nth-of-type(3) { /* Sound */
419
  grid-column: 2;
420
  grid-row: 3;
421
  }
 
514
  <!-- Video -->
515
  <div class="video-container">
516
  <video id="remoteVideo" autoplay playsinline></video>
 
517
 
518
  <div class="video-overlay-top">
519
  <div class="connection-badge">
 
582
  </div>
583
  </div>
584
 
585
+ <!-- Sound -->
586
  <div class="panel">
587
+ <div class="panel-header">Sound</div>
588
  <div class="panel-content">
589
  <div class="sound-row">
590
  <input type="text" class="sound-input" id="soundInput" placeholder="Sound file...">
 
596
  <span class="preset-chip" onclick="playSoundPreset('yes.wav')">yes</span>
597
  <span class="preset-chip" onclick="playSoundPreset('no.wav')">no</span>
598
  </div>
 
 
 
 
 
 
 
 
599
  </div>
600
  </div>
601
 
 
631
  // Head control state
632
  let headSlidersActive = false; // True while user is dragging a slider
633
 
634
+ // Latency monitor
 
 
 
 
635
  let latencyMonitorId = null;
636
 
637
  // Export functions
 
642
  window.stopStream = stopStream;
643
  window.playSound = playSound;
644
  window.playSoundPreset = playSoundPreset;
 
 
645
  window.startRecording = startRecording;
646
  window.stopRecording = stopRecording;
647
 
 
832
  if (e.track.kind === 'video') {
833
  const video = document.getElementById('remoteVideo');
834
  video.srcObject = e.streams[0];
 
835
  video.playsInline = true;
836
  if ('requestVideoFrameCallback' in video) {
 
837
  startLatencyMonitor(video);
838
  }
839
  }
 
 
 
 
 
840
  };
841
 
842
  peerConnection.onicecandidate = async (e) => {
 
880
  if (msg.sdp) {
881
  await peerConnection.setRemoteDescription(new RTCSessionDescription(msg.sdp));
882
  if (msg.sdp.type === 'offer') {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
883
  const answer = await peerConnection.createAnswer();
884
  await peerConnection.setLocalDescription(answer);
885
  await sendToServer({ type: 'peer', sessionId: currentSessionId, sdp: { type: 'answer', sdp: answer.sdp } });
 
922
  peerConnection = null;
923
  dataChannel = null;
924
  currentSessionId = null;
 
925
  document.getElementById('remoteVideo').srcObject = null;
 
926
  document.getElementById('startBtn').disabled = !selectedProducerId;
927
  document.getElementById('stopBtn').disabled = true;
928
  document.getElementById('robotSelector').classList.remove('hidden');
 
1066
  });
1067
  }
1068
 
1069
+ // ===================== Sound =====================
1070
  function playSound() {
1071
  const file = document.getElementById('soundInput').value.trim();
1072
  if (file) sendCommand({ play_sound: file });
 
1077
  sendCommand({ play_sound: file });
1078
  }
1079
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1080
  function startRecording() { sendCommand({ start_recording: true }); }
1081
  function stopRecording() { sendCommand({ stop_recording: true }); }
1082
  </script>