cduss commited on
Commit
c521401
·
1 Parent(s): f15e207

use reachy mini js lib from gtihub

Browse files
Files changed (2) hide show
  1. index.html +1 -1
  2. reachy-mini.js +0 -772
index.html CHANGED
@@ -159,7 +159,7 @@
159
  </div>
160
 
161
  <script type="module">
162
- import { ReachyMini } from "./reachy-mini.js";
163
 
164
  const robot = new ReachyMini();
165
 
 
159
  </div>
160
 
161
  <script type="module">
162
+ import { ReachyMini } from "https://cdn.jsdelivr.net/gh/pollen-robotics/reachy_mini@develop/js/reachy-mini.js";
163
 
164
  const robot = new ReachyMini();
165
 
reachy-mini.js DELETED
@@ -1,772 +0,0 @@
1
- /**
2
- * reachy-mini.js — Browser SDK for controlling a Reachy Mini robot over WebRTC.
3
- * https://github.com/pollen-robotics/reachy-mini
4
- *
5
- * QUICK START
6
- * ───────────
7
- * import { ReachyMini } from "./reachy-mini.js";
8
- * const robot = new ReachyMini();
9
- *
10
- * // 1. Auth (HuggingFace OAuth — required for the signaling server)
11
- * if (!await robot.authenticate()) { robot.login(); return; }
12
- *
13
- * // 2. Connect to signaling server (SSE)
14
- * await robot.connect();
15
- *
16
- * // 3. Pick a robot once the list arrives
17
- * robot.addEventListener("robotsChanged", (e) => {
18
- * const robots = e.detail.robots; // [{ id, meta: { name } }, ...]
19
- * });
20
- *
21
- * // 4. Start a WebRTC session (resolves when video + data channel ready)
22
- * const detach = robot.attachVideo(document.querySelector("video"));
23
- * await robot.startSession(robotId);
24
- *
25
- * // 5. Send commands
26
- * robot.setHeadPose(0, 10, -5); // roll, pitch, yaw in degrees
27
- * robot.setAntennas(30, -30); // right, left in degrees
28
- * robot.playSound("wake_up.wav"); // filename on robot
29
- *
30
- * // 6. Receive live state (emitted every ~500 ms while streaming)
31
- * robot.addEventListener("state", (e) => {
32
- * const { head, antennas } = e.detail;
33
- * // head: { roll, pitch, yaw } — degrees
34
- * // antennas: { right, left } — degrees
35
- * });
36
- *
37
- * // 7. Audio controls
38
- * robot.setAudioMuted(false); // unmute robot speaker (muted by default)
39
- * robot.setMicMuted(false); // unmute your mic → robot speaker (if supported)
40
- *
41
- * // 8. Cleanup
42
- * detach(); // remove video binding
43
- * await robot.stopSession(); // back to 'connected'
44
- * robot.disconnect(); // back to 'disconnected' (keeps auth)
45
- * robot.logout(); // clear HF credentials too
46
- *
47
- *
48
- * STATE MACHINE
49
- * ─────────────
50
- * 'disconnected' ──connect()──▸ 'connected' ──startSession()──▸ 'streaming'
51
- * ▴ disconnect() ▴ stopSession()
52
- * └─────────────────────────────┘
53
- *
54
- *
55
- * CONSTRUCTOR OPTIONS
56
- * ───────────────────
57
- * new ReachyMini({
58
- * signalingUrl: string, // default: "https://cduss-reachy-mini-central.hf.space"
59
- * enableMicrophone: boolean, // default: true — acquire mic for bidirectional audio
60
- * })
61
- *
62
- *
63
- * READ-ONLY PROPERTIES
64
- * ────────────────────
65
- * .state "disconnected" | "connected" | "streaming"
66
- * .robots Array<{ id: string, meta: { name: string } }>
67
- * .robotState { head: { roll, pitch, yaw }, antennas: { right, left } } (degrees)
68
- * .username string | null — HF username after authenticate()
69
- * .isAuthenticated boolean — true if a valid HF token is available
70
- * .micSupported boolean — true if robot offers bidirectional audio
71
- * .micMuted boolean — your microphone mute state
72
- * .audioMuted boolean — robot speaker mute state (local)
73
- *
74
- *
75
- * EVENTS (EventTarget — use addEventListener)
76
- * ──────────────────────────────────────────────
77
- * "connected" { peerId: string }
78
- * "disconnected" { reason: string }
79
- * "robotsChanged" { robots: Array<{ id, meta }> }
80
- * "streaming" { sessionId: string, robotId: string }
81
- * "sessionStopped" { reason: string }
82
- * "state" { head: { roll, pitch, yaw }, antennas: { right, left } }
83
- * "videoTrack" { track: MediaStreamTrack, stream: MediaStream }
84
- * "micSupported" { supported: boolean }
85
- * "error" { source: "signaling"|"webrtc"|"robot", error: Error|string }
86
- *
87
- *
88
- * EXPORTS
89
- * ───────
90
- * export default ReachyMini;
91
- * export { ReachyMini, rpyToMatrix, matrixToRpy, degToRad, radToDeg };
92
- */
93
-
94
- import {
95
- oauthLoginUrl,
96
- oauthHandleRedirectIfPresent,
97
- } from "https://cdn.jsdelivr.net/npm/@huggingface/hub@0.15.2/+esm";
98
-
99
- // ─── Math utilities ──────────────────────────────────────────────────────────
100
-
101
- /** @param {number} deg @returns {number} */
102
- export function degToRad(deg) { return deg * Math.PI / 180; }
103
-
104
- /** @param {number} rad @returns {number} */
105
- export function radToDeg(rad) { return rad * 180 / Math.PI; }
106
-
107
- /**
108
- * Roll/pitch/yaw (degrees) → 4×4 rotation matrix (ZYX convention).
109
- * This is the wire format for the robot's `set_target` command.
110
- * @param {number} rollDeg @param {number} pitchDeg @param {number} yawDeg
111
- * @returns {number[][]} 4×4 matrix
112
- */
113
- export function rpyToMatrix(rollDeg, pitchDeg, yawDeg) {
114
- const r = degToRad(rollDeg), p = degToRad(pitchDeg), y = degToRad(yawDeg);
115
- const cy = Math.cos(y), sy = Math.sin(y);
116
- const cp = Math.cos(p), sp = Math.sin(p);
117
- const cr = Math.cos(r), sr = Math.sin(r);
118
- return [
119
- [cy * cp, cy * sp * sr - sy * cr, cy * sp * cr + sy * sr, 0],
120
- [sy * cp, sy * sp * sr + cy * cr, sy * sp * cr - cy * sr, 0],
121
- [-sp, cp * sr, cp * cr, 0],
122
- [0, 0, 0, 1],
123
- ];
124
- }
125
-
126
- /**
127
- * Rotation matrix (3×3 or 4×4) → { roll, pitch, yaw } in degrees.
128
- * @param {number[][]} m @returns {{ roll: number, pitch: number, yaw: number }}
129
- */
130
- export function matrixToRpy(m) {
131
- return {
132
- roll: radToDeg(Math.atan2(m[2][1], m[2][2])),
133
- pitch: radToDeg(Math.asin(-m[2][0])),
134
- yaw: radToDeg(Math.atan2(m[1][0], m[0][0])),
135
- };
136
- }
137
-
138
- // ─── Internal helpers ────────────────────────────────────────────────────────
139
-
140
- /** Check if the audio m= section of an SDP has a=sendrecv (bidirectional audio). */
141
- function sdpHasAudioSendRecv(sdp) {
142
- const lines = sdp.split('\r\n');
143
- let inAudio = false;
144
- for (const line of lines) {
145
- if (line.startsWith('m=audio')) inAudio = true;
146
- else if (line.startsWith('m=')) inAudio = false;
147
- if (inAudio && line === 'a=sendrecv') return true;
148
- }
149
- return false;
150
- }
151
-
152
- // ─── ReachyMini class ────────────────────────────────────────────────────────
153
-
154
- export class ReachyMini extends EventTarget {
155
-
156
- /** @param {{ signalingUrl?: string, enableMicrophone?: boolean }} [options] */
157
- constructor(options = {}) {
158
- super();
159
- this._signalingUrl = options.signalingUrl || 'https://cduss-reachy-mini-central.hf.space';
160
- this._enableMicrophone = options.enableMicrophone !== false;
161
-
162
- this._state = 'disconnected'; // 'disconnected' | 'connected' | 'streaming'
163
- this._robots = []; // latest robot list from signaling
164
- this._robotState = { // updated every ~500 ms while streaming
165
- head: { roll: 0, pitch: 0, yaw: 0 },
166
- antennas: { right: 0, left: 0 },
167
- };
168
-
169
- // Auth
170
- this._token = null;
171
- this._username = null;
172
- this._tokenExpires = null;
173
-
174
- // Signaling
175
- this._peerId = null;
176
- this._sseAbortController = null;
177
-
178
- // WebRTC
179
- this._pc = null; // RTCPeerConnection
180
- this._dc = null; // RTCDataChannel (robot commands)
181
- this._sessionId = null;
182
- this._selectedRobotId = null;
183
-
184
- // Audio
185
- this._micStream = null; // MediaStream from getUserMedia
186
- this._micMuted = true;
187
- this._audioMuted = true;
188
- this._micSupported = false; // set after SDP negotiation
189
-
190
- // Timers
191
- this._latencyMonitorId = null;
192
- this._stateRefreshInterval = null;
193
-
194
- // startSession() promise plumbing
195
- this._sessionResolve = null;
196
- this._sessionReject = null;
197
- this._iceConnected = false;
198
- this._dcOpen = false;
199
-
200
- // Set by attachVideo()
201
- this._videoElement = null;
202
- }
203
-
204
- // ─── Read-only properties ────────────────────────────────────────────
205
-
206
- /** @returns {"disconnected"|"connected"|"streaming"} */
207
- get state() { return this._state; }
208
-
209
- /** @returns {Array<{id: string, meta: {name: string}}>} */
210
- get robots() { return this._robots; }
211
-
212
- /** @returns {{head: {roll:number,pitch:number,yaw:number}, antennas: {right:number,left:number}}} */
213
- get robotState() { return this._robotState; }
214
-
215
- /** @returns {string|null} HuggingFace username, set after authenticate(). */
216
- get username() { return this._username; }
217
-
218
- /** @returns {boolean} True if a valid HF token is available. */
219
- get isAuthenticated() { return !!this._token; }
220
-
221
- /** @returns {boolean} True if the robot's SDP offered bidirectional audio. */
222
- get micSupported() { return this._micSupported; }
223
-
224
- /** @returns {boolean} */
225
- get micMuted() { return this._micMuted; }
226
-
227
- /** @returns {boolean} */
228
- get audioMuted() { return this._audioMuted; }
229
-
230
- // ─── Auth ────────────────────────────────────────────────────────────
231
-
232
- /**
233
- * Check for a valid HuggingFace token.
234
- * Tries the OAuth redirect callback first, then falls back to sessionStorage.
235
- * @returns {Promise<boolean>} true → token ready, false → call login()
236
- */
237
- async authenticate() {
238
- try {
239
- const result = await oauthHandleRedirectIfPresent();
240
- if (result) {
241
- this._username = result.userInfo.name || result.userInfo.preferred_username;
242
- this._token = result.accessToken;
243
- this._tokenExpires = result.accessTokenExpiresAt;
244
- sessionStorage.setItem('hf_token', this._token);
245
- sessionStorage.setItem('hf_username', this._username);
246
- sessionStorage.setItem('hf_token_expires', this._tokenExpires);
247
- return true;
248
- }
249
- const t = sessionStorage.getItem('hf_token');
250
- const u = sessionStorage.getItem('hf_username');
251
- const e = sessionStorage.getItem('hf_token_expires');
252
- if (t && u && e && new Date(e) > new Date()) {
253
- this._token = t;
254
- this._username = u;
255
- this._tokenExpires = e;
256
- return true;
257
- }
258
- return false;
259
- } catch (e) {
260
- console.error('Auth error:', e);
261
- return false;
262
- }
263
- }
264
-
265
- /** Redirect the browser to the HuggingFace OAuth login page. */
266
- async login() {
267
- window.location.href = await oauthLoginUrl();
268
- }
269
-
270
- /** Clear stored HF credentials and disconnect everything. */
271
- logout() {
272
- sessionStorage.removeItem('hf_token');
273
- sessionStorage.removeItem('hf_username');
274
- sessionStorage.removeItem('hf_token_expires');
275
- this._username = null;
276
- this._tokenExpires = null;
277
- this.disconnect();
278
- }
279
-
280
- // ─── Lifecycle ───────────────────────────────────────────────────────
281
-
282
- /**
283
- * Open SSE signaling connection. Resolves once the server sends `welcome`.
284
- * Emits "robotsChanged" as robots come and go.
285
- * @param {string} [token] — HF access token. Omit to use the one from authenticate().
286
- * @returns {Promise<void>}
287
- */
288
- async connect(token) {
289
- if (this._state !== 'disconnected') throw new Error('Already connected');
290
- if (token) this._token = token;
291
- if (!this._token) throw new Error('No token — call authenticate() first or pass a token');
292
- this._sseAbortController = new AbortController();
293
-
294
- let res;
295
- try {
296
- res = await fetch(
297
- `${this._signalingUrl}/events?token=${encodeURIComponent(this._token)}`,
298
- { signal: this._sseAbortController.signal },
299
- );
300
- } catch (e) {
301
- this._sseAbortController = null;
302
- throw e;
303
- }
304
- if (!res.ok) {
305
- this._sseAbortController = null;
306
- throw new Error(`HTTP ${res.status}`);
307
- }
308
-
309
- return new Promise((resolve, reject) => {
310
- let welcomed = false;
311
- const reader = res.body.getReader();
312
- const decoder = new TextDecoder();
313
- let buffer = '';
314
-
315
- const readLoop = async () => {
316
- try {
317
- while (true) {
318
- const { done, value } = await reader.read();
319
- if (done) break;
320
- buffer += decoder.decode(value, { stream: true });
321
- const lines = buffer.split('\n');
322
- buffer = lines.pop();
323
- for (const line of lines) {
324
- if (!line.startsWith('data:')) continue;
325
- try {
326
- const msg = JSON.parse(line.slice(5).trim());
327
- if (!welcomed && msg.type === 'welcome') {
328
- welcomed = true;
329
- this._peerId = msg.peerId;
330
- this._state = 'connected';
331
- await this._sendToServer({
332
- type: 'setPeerStatus',
333
- roles: ['listener'],
334
- meta: { name: 'Telepresence' },
335
- });
336
- this._emit('connected', { peerId: msg.peerId });
337
- resolve();
338
- }
339
- this._handleSignalingMessage(msg);
340
- } catch (_) { /* malformed JSON — skip */ }
341
- }
342
- }
343
- } catch (e) {
344
- if (e.name !== 'AbortError') {
345
- this._emit('error', { source: 'signaling', error: e });
346
- }
347
- if (!welcomed) { reject(e); return; }
348
- }
349
- // SSE stream ended (server closed or network drop)
350
- if (this._state !== 'disconnected') {
351
- this._state = 'disconnected';
352
- this._emit('disconnected', { reason: 'SSE closed' });
353
- }
354
- if (!welcomed) reject(new Error('Connection closed before welcome'));
355
- };
356
-
357
- readLoop();
358
- });
359
- }
360
-
361
- /**
362
- * Start a WebRTC session with the given robot.
363
- * Acquires the microphone (if enabled), negotiates SDP, and waits for
364
- * both ICE connection and data channel to be ready before resolving.
365
- * Emits "videoTrack" when the robot's camera stream arrives.
366
- * Emits "micSupported" once SDP negotiation reveals whether the robot
367
- * accepts bidirectional audio.
368
- * @param {string} robotId — one of the ids from the robots list
369
- * @returns {Promise<void>}
370
- */
371
- async startSession(robotId) {
372
- if (this._state !== 'connected') throw new Error('Not connected');
373
- this._selectedRobotId = robotId;
374
- this._iceConnected = false;
375
- this._dcOpen = false;
376
- this._micSupported = false;
377
-
378
- // Acquire mic eagerly so the browser permission prompt appears now,
379
- // but tracks stay disabled (muted) until the user explicitly unmutes.
380
- if (this._enableMicrophone) {
381
- try {
382
- this._micStream = await navigator.mediaDevices.getUserMedia({ audio: true });
383
- this._micStream.getAudioTracks().forEach(t => { t.enabled = false; });
384
- this._micMuted = true;
385
- } catch (e) {
386
- console.warn('Microphone not available:', e);
387
- this._micStream = null;
388
- }
389
- }
390
-
391
- this._pc = new RTCPeerConnection({
392
- iceServers: [{ urls: 'stun:stun.l.google.com:19302' }],
393
- });
394
-
395
- return new Promise((resolve, reject) => {
396
- this._sessionResolve = resolve;
397
- this._sessionReject = reject;
398
-
399
- this._pc.ontrack = (e) => {
400
- if (e.track.kind === 'video') {
401
- this._emit('videoTrack', { track: e.track, stream: e.streams[0] });
402
- }
403
- };
404
-
405
- this._pc.onicecandidate = async (e) => {
406
- if (e.candidate && this._sessionId) {
407
- await this._sendToServer({
408
- type: 'peer',
409
- sessionId: this._sessionId,
410
- ice: {
411
- candidate: e.candidate.candidate,
412
- sdpMLineIndex: e.candidate.sdpMLineIndex,
413
- sdpMid: e.candidate.sdpMid,
414
- },
415
- });
416
- }
417
- };
418
-
419
- this._pc.oniceconnectionstatechange = () => {
420
- const s = this._pc?.iceConnectionState;
421
- if (!s) return;
422
- if (s === 'connected' || s === 'completed') {
423
- this._iceConnected = true;
424
- this._checkSessionReady();
425
- } else if (s === 'failed') {
426
- const err = new Error('ICE connection failed');
427
- if (this._sessionReject) {
428
- this._sessionReject(err);
429
- this._sessionResolve = null;
430
- this._sessionReject = null;
431
- }
432
- this._emit('error', { source: 'webrtc', error: err });
433
- } else if (s === 'disconnected') {
434
- this._emit('error', { source: 'webrtc', error: new Error('ICE disconnected') });
435
- }
436
- };
437
-
438
- this._pc.ondatachannel = (e) => {
439
- this._dc = e.channel;
440
- this._dc.onopen = () => {
441
- this._dcOpen = true;
442
- this._checkSessionReady();
443
- };
444
- this._dc.onmessage = (ev) => this._handleRobotMessage(JSON.parse(ev.data));
445
- };
446
-
447
- this._sendToServer({ type: 'startSession', peerId: robotId }).then((r) => {
448
- if (r?.sessionId) this._sessionId = r.sessionId;
449
- });
450
- });
451
- }
452
-
453
- /**
454
- * End the WebRTC session. Returns to "connected" state so you can
455
- * startSession() again with the same or a different robot.
456
- * @returns {Promise<void>}
457
- */
458
- async stopSession() {
459
- if (this._sessionReject) {
460
- this._sessionReject(new Error('Session stopped'));
461
- this._sessionResolve = null;
462
- this._sessionReject = null;
463
- }
464
-
465
- if (this._stateRefreshInterval) { clearInterval(this._stateRefreshInterval); this._stateRefreshInterval = null; }
466
- if (this._latencyMonitorId) { clearInterval(this._latencyMonitorId); this._latencyMonitorId = null; }
467
-
468
- if (this._sessionId) {
469
- await this._sendToServer({ type: 'endSession', sessionId: this._sessionId });
470
- }
471
-
472
- if (this._micStream) { this._micStream.getTracks().forEach(t => t.stop()); this._micStream = null; }
473
- this._micMuted = true;
474
- this._micSupported = false;
475
-
476
- if (this._pc) { this._pc.close(); this._pc = null; }
477
- if (this._dc) { this._dc.close(); this._dc = null; }
478
-
479
- this._sessionId = null;
480
- this._iceConnected = false;
481
- this._dcOpen = false;
482
-
483
- const wasStreaming = this._state === 'streaming';
484
- if (wasStreaming) {
485
- this._state = 'connected';
486
- this._emit('sessionStopped', { reason: 'user' });
487
- }
488
- }
489
-
490
- /**
491
- * Full teardown — abort SSE, close WebRTC.
492
- * Auth state is preserved (call logout() to also clear credentials).
493
- */
494
- disconnect() {
495
- if (this._sseAbortController) { this._sseAbortController.abort(); this._sseAbortController = null; }
496
-
497
- if (this._sessionReject) {
498
- this._sessionReject(new Error('Disconnected'));
499
- this._sessionResolve = null;
500
- this._sessionReject = null;
501
- }
502
-
503
- if (this._stateRefreshInterval) { clearInterval(this._stateRefreshInterval); this._stateRefreshInterval = null; }
504
- if (this._latencyMonitorId) { clearInterval(this._latencyMonitorId); this._latencyMonitorId = null; }
505
-
506
- if (this._sessionId && this._token) {
507
- this._sendToServer({ type: 'endSession', sessionId: this._sessionId }); // fire-and-forget
508
- }
509
-
510
- if (this._micStream) { this._micStream.getTracks().forEach(t => t.stop()); this._micStream = null; }
511
- if (this._pc) { this._pc.close(); this._pc = null; }
512
- if (this._dc) { this._dc.close(); this._dc = null; }
513
-
514
- this._sessionId = null;
515
- this._micMuted = true;
516
- this._micSupported = false;
517
- this._iceConnected = false;
518
- this._dcOpen = false;
519
- this._peerId = null;
520
- this._robots = [];
521
- this._state = 'disconnected';
522
- this._emit('disconnected', { reason: 'user' });
523
- }
524
-
525
- // ─── Commands ────────────────────────────────────────────────────────
526
- // All return false if the data channel is not open, true if sent.
527
-
528
- /**
529
- * Set the head orientation.
530
- * @param {number} roll — degrees @param {number} pitch — degrees @param {number} yaw — degrees
531
- * @returns {boolean}
532
- */
533
- setHeadPose(roll, pitch, yaw) {
534
- return this._sendCommand({ set_target: rpyToMatrix(roll, pitch, yaw) });
535
- }
536
-
537
- /**
538
- * Set antenna positions.
539
- * @param {number} rightDeg @param {number} leftDeg
540
- * @returns {boolean}
541
- */
542
- setAntennas(rightDeg, leftDeg) {
543
- return this._sendCommand({ set_antennas: [degToRad(rightDeg), degToRad(leftDeg)] });
544
- }
545
-
546
- /**
547
- * Play a sound file on the robot.
548
- * @param {string} file — filename available on the robot (e.g. "wake_up.wav")
549
- * @returns {boolean}
550
- */
551
- playSound(file) {
552
- return this._sendCommand({ play_sound: file });
553
- }
554
-
555
- /**
556
- * Send an arbitrary JSON command over the data channel.
557
- * @param {object} data @returns {boolean}
558
- */
559
- sendRaw(data) {
560
- return this._sendCommand(data);
561
- }
562
-
563
- /**
564
- * Request a state snapshot. The response arrives as a "state" event.
565
- * Called automatically every 500 ms while streaming.
566
- * @returns {boolean}
567
- */
568
- requestState() {
569
- return this._sendCommand({ get_state: true });
570
- }
571
-
572
- // ─── Audio ───────────────────────────────────────────────────────────
573
-
574
- /**
575
- * Mute/unmute the robot's audio playback (speaker) locally.
576
- * Audio is muted by default — browsers require a user gesture to unmute.
577
- * @param {boolean} muted
578
- */
579
- setAudioMuted(muted) {
580
- this._audioMuted = muted;
581
- if (this._videoElement) this._videoElement.muted = muted;
582
- }
583
-
584
- /**
585
- * Mute/unmute your microphone. Only works if micSupported is true.
586
- * Mic is muted by default even after acquisition.
587
- * @param {boolean} muted
588
- */
589
- setMicMuted(muted) {
590
- this._micMuted = muted;
591
- if (this._micStream) {
592
- this._micStream.getAudioTracks().forEach(t => { t.enabled = !muted; });
593
- }
594
- }
595
-
596
- // ─── Video helper ────────────────────────────────────────────────────
597
-
598
- /**
599
- * Bind a `<video>` element to this robot's stream.
600
- * Call before startSession(). Sets srcObject when the video track arrives,
601
- * applies audio mute state, and runs a latency monitor that snaps to the
602
- * live edge if the buffer grows > 0.5 s.
603
- *
604
- * @param {HTMLVideoElement} videoElement
605
- * @returns {() => void} cleanup function — call to detach video and stop monitoring
606
- */
607
- attachVideo(videoElement) {
608
- this._videoElement = videoElement;
609
- videoElement.muted = this._audioMuted;
610
-
611
- const onVideoTrack = (e) => {
612
- videoElement.srcObject = e.detail.stream;
613
- videoElement.playsInline = true;
614
- if ('requestVideoFrameCallback' in videoElement) {
615
- this._startLatencyMonitor(videoElement);
616
- }
617
- };
618
-
619
- const onSessionStopped = () => { videoElement.srcObject = null; };
620
-
621
- this.addEventListener('videoTrack', onVideoTrack);
622
- this.addEventListener('sessionStopped', onSessionStopped);
623
-
624
- return () => {
625
- this.removeEventListener('videoTrack', onVideoTrack);
626
- this.removeEventListener('sessionStopped', onSessionStopped);
627
- if (this._latencyMonitorId) { clearInterval(this._latencyMonitorId); this._latencyMonitorId = null; }
628
- videoElement.srcObject = null;
629
- this._videoElement = null;
630
- };
631
- }
632
-
633
- // ─── Private ─────────────────────────────────────────────────────────
634
-
635
- _emit(name, detail) {
636
- this.dispatchEvent(new CustomEvent(name, { detail }));
637
- }
638
-
639
- async _sendToServer(message) {
640
- try {
641
- const res = await fetch(`${this._signalingUrl}/send?token=${encodeURIComponent(this._token)}`, {
642
- method: 'POST',
643
- headers: { 'Content-Type': 'application/json' },
644
- body: JSON.stringify(message),
645
- });
646
- return await res.json();
647
- } catch (e) {
648
- console.error('Send error:', e);
649
- return null;
650
- }
651
- }
652
-
653
- _sendCommand(cmd) {
654
- if (!this._dc || this._dc.readyState !== 'open') return false;
655
- this._dc.send(JSON.stringify(cmd));
656
- return true;
657
- }
658
-
659
- /** Resolves the startSession() promise once both ICE and datachannel are ready. */
660
- _checkSessionReady() {
661
- if (this._iceConnected && this._dcOpen && this._sessionResolve) {
662
- this._state = 'streaming';
663
- this.requestState();
664
- this._stateRefreshInterval = setInterval(() => this.requestState(), 500);
665
- this._emit('streaming', { sessionId: this._sessionId, robotId: this._selectedRobotId });
666
- this._sessionResolve();
667
- this._sessionResolve = null;
668
- this._sessionReject = null;
669
- }
670
- }
671
-
672
- async _handleSignalingMessage(msg) {
673
- switch (msg.type) {
674
- case 'welcome':
675
- break; // handled in connect()
676
- case 'list':
677
- this._robots = msg.producers || [];
678
- this._emit('robotsChanged', { robots: this._robots });
679
- break;
680
- case 'peerStatusChanged': {
681
- const list = await this._sendToServer({ type: 'list' });
682
- if (list?.producers) {
683
- this._robots = list.producers;
684
- this._emit('robotsChanged', { robots: this._robots });
685
- }
686
- break;
687
- }
688
- case 'sessionStarted':
689
- this._sessionId = msg.sessionId;
690
- break;
691
- case 'peer':
692
- this._handlePeerMessage(msg);
693
- break;
694
- }
695
- }
696
-
697
- async _handlePeerMessage(msg) {
698
- if (!this._pc) return;
699
- try {
700
- if (msg.sdp) {
701
- const sdp = msg.sdp;
702
- if (sdp.type === 'offer') {
703
- const supportsMic = sdpHasAudioSendRecv(sdp.sdp);
704
- this._micSupported = supportsMic;
705
- this._emit('micSupported', { supported: supportsMic });
706
-
707
- // Mic track must be added BEFORE setRemoteDescription so the
708
- // generated answer naturally includes sendrecv for audio.
709
- if (supportsMic && this._micStream) {
710
- for (const track of this._micStream.getAudioTracks()) {
711
- this._pc.addTrack(track, this._micStream);
712
- }
713
- }
714
-
715
- await this._pc.setRemoteDescription(new RTCSessionDescription(sdp));
716
- const answer = await this._pc.createAnswer();
717
- await this._pc.setLocalDescription(answer);
718
- await this._sendToServer({
719
- type: 'peer',
720
- sessionId: this._sessionId,
721
- sdp: { type: 'answer', sdp: answer.sdp },
722
- });
723
- } else {
724
- await this._pc.setRemoteDescription(new RTCSessionDescription(sdp));
725
- }
726
- }
727
- if (msg.ice) {
728
- await this._pc.addIceCandidate(new RTCIceCandidate(msg.ice));
729
- }
730
- } catch (e) {
731
- console.error('WebRTC error:', e);
732
- this._emit('error', { source: 'webrtc', error: e });
733
- }
734
- }
735
-
736
- /** Parse robot state (rotation matrix + radians) into degrees and emit. */
737
- _handleRobotMessage(data) {
738
- if (data.state) {
739
- const s = data.state;
740
- if (s.head_pose) this._robotState.head = matrixToRpy(s.head_pose);
741
- if (s.antennas) {
742
- this._robotState.antennas = {
743
- right: radToDeg(s.antennas[0]),
744
- left: radToDeg(s.antennas[1]),
745
- };
746
- }
747
- this._emit('state', { ...this._robotState });
748
- }
749
- if (data.error) {
750
- this._emit('error', { source: 'robot', error: data.error });
751
- }
752
- }
753
-
754
- /** Snap video playback to live edge if buffered lag exceeds 0.5 s. */
755
- _startLatencyMonitor(video) {
756
- if (this._latencyMonitorId) clearInterval(this._latencyMonitorId);
757
- this._latencyMonitorId = setInterval(() => {
758
- if (!video.srcObject || video.paused) return;
759
- const buf = video.buffered;
760
- if (buf.length > 0) {
761
- const end = buf.end(buf.length - 1);
762
- const lag = end - video.currentTime;
763
- if (lag > 0.5) {
764
- console.log(`Latency correction: was ${lag.toFixed(2)}s behind`);
765
- video.currentTime = end - 0.1;
766
- }
767
- }
768
- }, 2000);
769
- }
770
- }
771
-
772
- export default ReachyMini;