VirtualKimi commited on
Commit
6b01c51
Β·
verified Β·
1 Parent(s): 64e1e98

Upload 31 files

Browse files
kimi-js/kimi-emotion-system.js CHANGED
@@ -3,22 +3,6 @@
3
 
4
  class KimiEmotionSystem {
5
  constructor(database = null) {
6
- /*
7
- * Personality Update Pipeline (Refactored)
8
- * 1. Emotion detected -> base deltas applied via EMOTION_TRAIT_EFFECTS (central map).
9
- * - Each delta passes through adjustUp / adjustDown with global + per-trait multipliers
10
- * (window.KIMI_TRAIT_ADJUSTMENT) for consistent scaling.
11
- * 2. Content keyword analysis (_analyzeTextContent) may override interim trait values (explicit matches).
12
- * 3. Cross-trait modifiers (_applyCrossTraitModifiers) apply synergy / balancing rules (e.g. high empathy boosts affection, high romance stabilizes affection, intelligence supports empathy/humor).
13
- * 4. Conversation-based drift (updatePersonalityFromConversation) uses TRAIT_KEYWORD_MODEL:
14
- * - Counts positive/negative keyword hits (user weighted 1.0, model weighted 0.5).
15
- * - Computes rawDelta = posHits*posFactor - negHits*negFactor.
16
- * - Applies sustained negativity amplification after streakPenaltyAfter.
17
- * - Clamps magnitude to maxStep per trait, then applies directly with bounds [0,100].
18
- * 5. Persistence: _preparePersistTrait decides threshold & smoothing before batch write.
19
- * 6. Global personality average (UI) = mean of six core traits (affection included).
20
- * NOTE: Affection is fully independent (no derived average). All adjustments centralized here to avoid duplication.
21
- */
22
  this.db = database;
23
  this.negativeStreaks = {};
24
 
 
3
 
4
  class KimiEmotionSystem {
5
  constructor(database = null) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  this.db = database;
7
  this.negativeStreaks = {};
8
 
kimi-js/kimi-utils.js CHANGED
@@ -329,1562 +329,9 @@ class KimiBaseManager {
329
  }
330
  }
331
 
332
- // Utility class for centralized video management
333
- class KimiVideoManager {
334
- constructor(video1, video2, characterName = "kimi") {
335
- this.characterName = characterName;
336
- this.video1 = video1;
337
- this.video2 = video2;
338
- this.activeVideo = video1;
339
- this.inactiveVideo = video2;
340
- this.currentContext = "neutral";
341
- this.currentEmotion = "neutral";
342
- this.lastSwitchTime = Date.now();
343
- this.pendingSwitch = null;
344
- this.autoTransitionDuration = 9900;
345
- this.transitionDuration = 300;
346
- this._prefetchCache = new Map();
347
- this._prefetchInFlight = new Set();
348
- this._maxPrefetch = 3;
349
- this._loadTimeout = null;
350
- this.updateVideoCategories();
351
- // Use centralized emotion mapping from emotion system
352
- this.emotionToCategory = null; // Will be fetched from emotion system when needed
353
- this.positiveVideos = this.videoCategories.speakingPositive;
354
- this.negativeVideos = this.videoCategories.speakingNegative;
355
- this.neutralVideos = this.videoCategories.neutral;
356
-
357
- // Anti-repetition and scoring - Adaptive history based on available videos
358
- this.playHistory = {
359
- listening: [],
360
- speakingPositive: [],
361
- speakingNegative: [],
362
- neutral: [],
363
- dancing: []
364
- };
365
- this.maxHistoryPerCategory = 5; // Will be dynamically adjusted per category
366
-
367
- this.emotionHistory = [];
368
- this.maxEmotionHistory = 5;
369
- this._neutralLock = false;
370
- this.isEmotionVideoPlaying = false;
371
- this.currentEmotionContext = null;
372
- this._switchInProgress = false;
373
- this._loadingInProgress = false;
374
- this._currentLoadHandler = null;
375
- this._currentErrorHandler = null;
376
- this._stickyContext = null;
377
- this._stickyUntil = 0;
378
- this._pendingSwitches = [];
379
- this._debug = false;
380
- // Adaptive timeout refinements (A+B+C)
381
- this._maxTimeout = 6000; // Reduced upper bound (was 10000) for 10s clips
382
- this._timeoutExtension = 1200; // Extension when metadata only
383
- this._timeoutCapRatio = 0.7; // Cap total wait <= 70% clip length
384
- // Initialize adaptive loading metrics and failure tracking
385
- this._avgLoadTime = null;
386
- this._loadTimeSamples = [];
387
- this._maxSamples = 10;
388
- this._minTimeout = 3000;
389
- this._recentFailures = new Map();
390
- this._failureCooldown = 5000;
391
- this._consecutiveErrorCount = 0;
392
- }
393
-
394
- //Centralized crossfade transition between two videos.
395
- static crossfadeVideos(fromVideo, toVideo, duration = 300, onComplete) {
396
- // Resolve duration from CSS variable if present
397
- try {
398
- const cssDur = getComputedStyle(document.documentElement).getPropertyValue("--video-fade-duration").trim();
399
- if (cssDur) {
400
- // Convert CSS time to ms number if needed (e.g., '300ms' or '0.3s')
401
- if (cssDur.endsWith("ms")) duration = parseFloat(cssDur);
402
- else if (cssDur.endsWith("s")) duration = Math.round(parseFloat(cssDur) * 1000);
403
- }
404
- } catch {}
405
-
406
- // Preload and strict synchronization
407
- const easing = "ease-in-out";
408
- fromVideo.style.transition = `opacity ${duration}ms ${easing}`;
409
- toVideo.style.transition = `opacity ${duration}ms ${easing}`;
410
- // Prepare target video (opacity 0, top z-index)
411
- toVideo.style.opacity = "0";
412
- toVideo.style.zIndex = "2";
413
- fromVideo.style.zIndex = "1";
414
-
415
- // Start target video slightly before the crossfade
416
- const startTarget = () => {
417
- if (toVideo.paused) toVideo.play().catch(() => {});
418
- // Lance le fondu croisΓ©
419
- setTimeout(() => {
420
- fromVideo.style.opacity = "0";
421
- toVideo.style.opacity = "1";
422
- }, 20);
423
- // After transition, adjust z-index and call the callback
424
- setTimeout(() => {
425
- fromVideo.style.zIndex = "1";
426
- toVideo.style.zIndex = "2";
427
- if (onComplete) onComplete();
428
- }, duration + 30);
429
- };
430
-
431
- // If target video is not ready, wait for canplay
432
- if (toVideo.readyState < 3) {
433
- toVideo.addEventListener("canplay", startTarget, { once: true });
434
- toVideo.load();
435
- } else {
436
- startTarget();
437
- }
438
- // Ensure source video is playing
439
- if (fromVideo.paused) fromVideo.play().catch(() => {});
440
- }
441
-
442
- //Centralized video element creation utility.
443
- static createVideoElement(id, className = "bg-video") {
444
- const video = document.createElement("video");
445
- video.id = id;
446
- video.className = className;
447
- video.autoplay = true;
448
- video.muted = true;
449
- video.playsinline = true;
450
- video.preload = "auto";
451
- video.style.opacity = "0";
452
- video.innerHTML =
453
- '<source src="" type="video/mp4" /><span data-i18n="video_not_supported">Your browser does not support the video tag.</span>';
454
- return video;
455
- }
456
-
457
- //Centralized video selection utility.
458
- static getVideoElement(selector) {
459
- if (typeof selector === "string") {
460
- if (selector.startsWith("#")) {
461
- return document.getElementById(selector.slice(1));
462
- }
463
- return document.querySelector(selector);
464
- }
465
- return selector;
466
- }
467
-
468
- setDebug(enabled) {
469
- this._debug = !!enabled;
470
- }
471
-
472
- _logDebug(message, payload = null) {
473
- if (!this._debug) return;
474
- if (payload) console.log("🎬 VideoManager:", message, payload);
475
- else console.log("🎬 VideoManager:", message);
476
- }
477
-
478
- _logSelection(category, selectedSrc, candidates = []) {
479
- if (!this._debug) return;
480
- const recent = (this.playHistory && this.playHistory[category]) || [];
481
- const adaptive = typeof this.getAdaptiveHistorySize === "function" ? this.getAdaptiveHistorySize(category) : null;
482
- console.log("🎬 VideoManager: selection", {
483
- category,
484
- selected: selectedSrc,
485
- candidatesCount: Array.isArray(candidates) ? candidates.length : 0,
486
- adaptiveHistorySize: adaptive,
487
- recentHistory: recent
488
- });
489
- }
490
-
491
- debugPrintHistory(category = null) {
492
- if (!this._debug) return;
493
- if (!this.playHistory) {
494
- console.log("🎬 VideoManager: no play history yet");
495
- return;
496
- }
497
- if (category) {
498
- const recent = this.playHistory[category] || [];
499
- console.log("🎬 VideoManager: history", { category, recent });
500
- return;
501
- }
502
- const summary = Object.keys(this.playHistory).reduce((acc, key) => {
503
- acc[key] = this.playHistory[key];
504
- return acc;
505
- }, {});
506
- console.log("🎬 VideoManager: history summary", summary);
507
- }
508
-
509
- _priorityWeight(context) {
510
- if (context === "speaking" || context === "speakingPositive" || context === "speakingNegative") return 3;
511
- if (context === "dancing" || context === "listening") return 2;
512
- return 1;
513
- }
514
-
515
- _enqueuePendingSwitch(req) {
516
- // Keep small bounded list; prefer newest higher-priority
517
- const maxSize = 5;
518
- this._pendingSwitches.push(req);
519
- if (this._pendingSwitches.length > maxSize) {
520
- this._pendingSwitches = this._pendingSwitches.slice(-maxSize);
521
- }
522
- }
523
-
524
- _takeNextPendingSwitch() {
525
- if (!this._pendingSwitches.length) return null;
526
- let bestIdx = 0;
527
- let best = this._pendingSwitches[0];
528
- for (let i = 1; i < this._pendingSwitches.length; i++) {
529
- const cand = this._pendingSwitches[i];
530
- if (cand.priorityWeight > best.priorityWeight) {
531
- best = cand;
532
- bestIdx = i;
533
- } else if (cand.priorityWeight === best.priorityWeight && cand.requestedAt > best.requestedAt) {
534
- best = cand;
535
- bestIdx = i;
536
- }
537
- }
538
- this._pendingSwitches.splice(bestIdx, 1);
539
- return best;
540
- }
541
-
542
- _processPendingSwitches() {
543
- if (this._stickyContext === "dancing") return false;
544
- const next = this._takeNextPendingSwitch();
545
- if (!next) return false;
546
- this._logDebug("Processing pending switch", next);
547
- this.switchToContext(next.context, next.emotion, next.specificVideo, next.traits, next.affection);
548
- return true;
549
- }
550
-
551
- setCharacter(characterName) {
552
- this.characterName = characterName;
553
-
554
- // Nettoyer les handlers en cours lors du changement de personnage
555
- this._cleanupLoadingHandlers();
556
- // Reset per-character fallback pool so it will be rebuilt for the new character
557
- this._fallbackPool = null;
558
- this._fallbackIndex = 0;
559
- this._fallbackPoolCharacter = null;
560
-
561
- this.updateVideoCategories();
562
- }
563
-
564
- updateVideoCategories() {
565
- const folder = getCharacterInfo(this.characterName).videoFolder;
566
- this.videoCategories = {
567
- listening: [
568
- `${folder}listening/listening-gentle-sway.mp4`,
569
- `${folder}listening/listening-magnetic-eye-gaze.mp4`,
570
- `${folder}listening/listening-silky-caressing-hairplay.mp4`,
571
- `${folder}listening/listening-softly-velvet-glance.mp4`,
572
- `${folder}listening/listening-surprise-sweet-shiver.mp4`,
573
- `${folder}listening/listening-whispered-attention.mp4`,
574
- `${folder}listening/listening-hand-gesture.mp4`,
575
- `${folder}listening/listening-hair-touch.mp4`,
576
- `${folder}listening/listening-full-spin.mp4`,
577
- `${folder}listening/listening-teasing-smile.mp4`,
578
- `${folder}listening/listening-dreamy-gaze-romantic.mp4`
579
- ],
580
- speakingPositive: [
581
- `${folder}speaking-positive/speaking-happy-gestures.mp4`,
582
- `${folder}speaking-positive/speaking-positive-heartfelt-shine.mp4`,
583
- `${folder}speaking-positive/speaking-positive-joyful-flutter.mp4`,
584
- `${folder}speaking-positive/speaking-positive-mischief-touch.mp4`,
585
- `${folder}speaking-positive/speaking-positive-sparkling-tease.mp4`,
586
- `${folder}speaking-positive/speaking-playful-wink.mp4`,
587
- `${folder}speaking-positive/speaking-excited-clapping.mp4`,
588
- `${folder}speaking-positive/speaking-heart-gesture.mp4`,
589
- `${folder}speaking-positive/speaking-surprise-graceful-gasp.mp4`,
590
- `${folder}speaking-positive/speaking-laughing-melodious.mp4`,
591
- `${folder}speaking-positive/speaking-gentle-smile.mp4`,
592
- `${folder}speaking-positive/speaking-graceful-arms.mp4`,
593
- `${folder}speaking-positive/speaking-flirtatious-tease.mp4`
594
- ],
595
- speakingNegative: [
596
- `${folder}speaking-negative/speaking-negative-anxious-caress.mp4`,
597
- `${folder}speaking-negative/speaking-negative-frosted-glance.mp4`,
598
- `${folder}speaking-negative/speaking-negative-muted-longing.mp4`,
599
- `${folder}speaking-negative/speaking-negative-shadowed-sigh.mp4`,
600
- `${folder}speaking-negative/speaking-sad-elegant.mp4`,
601
- `${folder}speaking-negative/speaking-frustrated-graceful.mp4`,
602
- `${folder}speaking-negative/speaking-worried-tender.mp4`,
603
- `${folder}speaking-negative/speaking-disappointed-elegant.mp4`,
604
- `${folder}speaking-negative/speaking-gentle-wave-goodbye.mp4`
605
- ],
606
- neutral: [
607
- `${folder}neutral/neutral-thinking-pose.mp4`,
608
- `${folder}neutral/neutral-shy-blush-adorable.mp4`,
609
- `${folder}neutral/neutral-confident-chic-flair.mp4`,
610
- `${folder}neutral/neutral-dreamy-soft-reverie.mp4`,
611
- `${folder}neutral/neutral-flirt-wink-whisper.mp4`,
612
- `${folder}neutral/neutral-goodbye-tender-wave.mp4`,
613
- `${folder}neutral/neutral-hair-twirl.mp4`,
614
- `${folder}neutral/neutral-kiss-air-caress.mp4`,
615
- `${folder}neutral/neutral-poised-shift.mp4`,
616
- `${folder}neutral/neutral-shy-blush-glow.mp4`,
617
- `${folder}neutral/neutral-speaking-dreamy-flow.mp4`,
618
- `${folder}neutral/neutral-gentle-breathing.mp4`,
619
- `${folder}neutral/neutral-hair-adjustment.mp4`,
620
- `${folder}neutral/neutral-arms-crossed-elegant.mp4`,
621
- `${folder}neutral/neutral-seductive-slow-gaze.mp4`,
622
- `${folder}neutral/neutral-confident-pose-alluring.mp4`,
623
- `${folder}neutral/neutral-affectionate-kiss-blow.mp4`
624
- ],
625
- dancing: [
626
- `${folder}dancing/dancing-chin-hand.mp4`,
627
- `${folder}dancing/dancing-bow-promise.mp4`,
628
- `${folder}dancing/dancing-enchanting-flow.mp4`,
629
- `${folder}dancing/dancing-magnetic-spin.mp4`,
630
- `${folder}dancing/dancing-playful-glimmer.mp4`,
631
- `${folder}dancing/dancing-silken-undulation.mp4`,
632
- `${folder}dancing/dancing-full-spin.mp4`,
633
- `${folder}dancing/dancing-seductive-dance-undulation.mp4`,
634
- `${folder}dancing/dancing-slow-seductive.mp4`,
635
- `${folder}dancing/dancing-spinning-elegance-twirl.mp4`
636
- ]
637
- };
638
- this.positiveVideos = this.videoCategories.speakingPositive;
639
- this.negativeVideos = this.videoCategories.speakingNegative;
640
- this.neutralVideos = this.videoCategories.neutral;
641
-
642
- const neutrals = this.neutralVideos || [];
643
- // Progressive warm-up phase: start with only 2 neutrals (adaptive on network), others scheduled later
644
- let neutralPrefetchCount = 2;
645
- try {
646
- const conn = navigator.connection || navigator.webkitConnection || navigator.mozConnection;
647
- if (conn && conn.effectiveType) {
648
- // Reduce on slower connections
649
- if (/2g/i.test(conn.effectiveType)) neutralPrefetchCount = 1;
650
- else if (/3g/i.test(conn.effectiveType)) neutralPrefetchCount = 2;
651
- }
652
- } catch {}
653
- neutrals.slice(0, neutralPrefetchCount).forEach(src => this._prefetch(src));
654
-
655
- // Schedule warm-up step 2: after 5s prefetch the 3rd neutral if not already cached
656
- if (!this._warmupTimer) {
657
- this._warmupTimer = setTimeout(() => {
658
- try {
659
- const target = neutrals[2];
660
- if (target && !this._prefetchCache.has(target)) this._prefetch(target);
661
- } catch {}
662
- }, 5000);
663
- }
664
-
665
- // Mark waiting for first interaction to fetch 4th neutral later
666
- this._awaitingFirstInteraction = true;
667
- }
668
-
669
- async init(database = null) {
670
- // Attach lightweight visibility guard
671
- if (!this._visibilityHandler) {
672
- this._visibilityHandler = this.onVisibilityChange.bind(this);
673
- document.addEventListener("visibilitychange", this._visibilityHandler);
674
- }
675
- // Hook basic user interaction (first click / keypress) to advance warm-up
676
- if (!this._firstInteractionHandler) {
677
- this._firstInteractionHandler = () => {
678
- if (this._awaitingFirstInteraction) {
679
- this._awaitingFirstInteraction = false;
680
- try {
681
- const neutrals = this.neutralVideos || [];
682
- const fourth = neutrals[3];
683
- if (fourth && !this._prefetchCache.has(fourth)) this._prefetch(fourth);
684
- } catch {}
685
- }
686
- };
687
- window.addEventListener("click", this._firstInteractionHandler, { once: true });
688
- window.addEventListener("keydown", this._firstInteractionHandler, { once: true });
689
- }
690
- }
691
-
692
- onVisibilityChange() {
693
- if (document.visibilityState !== "visible") return;
694
- const v = this.activeVideo;
695
- if (!v) return;
696
- try {
697
- if (v.ended) {
698
- if (typeof this.returnToNeutral === "function") this.returnToNeutral();
699
- } else if (v.paused) {
700
- v.play().catch(() => {
701
- if (typeof this.returnToNeutral === "function") this.returnToNeutral();
702
- });
703
- }
704
- } catch {}
705
- }
706
-
707
- // Intelligent contextual management
708
- switchToContext(context, emotion = "neutral", specificVideo = null, traits = null, affection = null) {
709
- // Respect sticky context (avoid overrides while dancing is requested/playing)
710
- if (this._stickyContext === "dancing" && context !== "dancing") {
711
- const categoryForPriority = this.determineCategory(context, emotion, traits);
712
- const priorityWeight = this._priorityWeight(
713
- categoryForPriority === "speakingPositive" || categoryForPriority === "speakingNegative" ? "speaking" : context
714
- );
715
- if (Date.now() < (this._stickyUntil || 0)) {
716
- this._enqueuePendingSwitch({
717
- context,
718
- emotion,
719
- specificVideo,
720
- traits,
721
- affection,
722
- requestedAt: Date.now(),
723
- priorityWeight
724
- });
725
- this._logDebug("Queued during dancing (sticky)", { context, emotion, priorityWeight });
726
- return;
727
- }
728
- this._stickyContext = null;
729
- this._stickyUntil = 0;
730
- // Do not reset adaptive loading metrics here; preserve rolling stats across sticky context release
731
- }
732
- // While an emotion video is playing (speaking), block non-speaking context switches
733
- if (
734
- this.isEmotionVideoPlaying &&
735
- (this.currentContext === "speaking" ||
736
- this.currentContext === "speakingPositive" ||
737
- this.currentContext === "speakingNegative") &&
738
- !(context === "speaking" || context === "speakingPositive" || context === "speakingNegative")
739
- ) {
740
- // Queue the request with appropriate priority to be processed after current clip
741
- const categoryForPriority = this.determineCategory(context, emotion, traits);
742
- const priorityWeight = this._priorityWeight(
743
- categoryForPriority === "speakingPositive" || categoryForPriority === "speakingNegative" ? "speaking" : context
744
- );
745
- this._enqueuePendingSwitch({
746
- context,
747
- emotion,
748
- specificVideo,
749
- traits,
750
- affection,
751
- requestedAt: Date.now(),
752
- priorityWeight
753
- });
754
- this._logDebug("Queued non-speaking during speaking emotion", { context, emotion, priorityWeight });
755
- return;
756
- }
757
-
758
- // While speaking emotion video is playing, also queue speaking→speaking changes (avoid mid-clip replacement)
759
- if (
760
- this.isEmotionVideoPlaying &&
761
- (this.currentContext === "speaking" ||
762
- this.currentContext === "speakingPositive" ||
763
- this.currentContext === "speakingNegative") &&
764
- (context === "speaking" || context === "speakingPositive" || context === "speakingNegative") &&
765
- this.currentEmotionContext &&
766
- this.currentEmotionContext !== emotion
767
- ) {
768
- const priorityWeight = this._priorityWeight("speaking");
769
- this._enqueuePendingSwitch({
770
- context,
771
- emotion,
772
- specificVideo,
773
- traits,
774
- affection,
775
- requestedAt: Date.now(),
776
- priorityWeight
777
- });
778
- this._logDebug("Queued speaking→speaking during active emotion", { from: this.currentEmotionContext, to: emotion });
779
- return;
780
- }
781
- if (context === "neutral" && this._neutralLock) return;
782
- if (
783
- (context === "speaking" || context === "speakingPositive" || context === "speakingNegative") &&
784
- this.isEmotionVideoPlaying &&
785
- this.currentEmotionContext === emotion
786
- )
787
- return;
788
-
789
- if (this.currentContext === context && this.currentEmotion === emotion && !specificVideo) {
790
- const category = this.determineCategory(context, emotion, traits);
791
- const currentVideoSrc = this.activeVideo.querySelector("source").getAttribute("src");
792
- const availableVideos = this.videoCategories[category] || this.videoCategories.neutral;
793
- const differentVideos = availableVideos.filter(v => v !== currentVideoSrc);
794
-
795
- if (differentVideos.length > 0) {
796
- const nextVideo =
797
- typeof this._pickScoredVideo === "function"
798
- ? this._pickScoredVideo(category, differentVideos, traits)
799
- : differentVideos[Math.floor(Math.random() * differentVideos.length)];
800
- this.loadAndSwitchVideo(nextVideo, "normal");
801
- // Track play history to avoid immediate repeats
802
- if (typeof this.updatePlayHistory === "function") this.updatePlayHistory(category, nextVideo);
803
- this._logSelection(category, nextVideo, differentVideos);
804
- this.lastSwitchTime = Date.now();
805
- }
806
- return;
807
- }
808
-
809
- // Determine the category FIRST to ensure correct video selection
810
- const category = this.determineCategory(context, emotion, traits);
811
-
812
- // DΓ©terminer la prioritΓ© selon le contexte
813
- let priority = "normal";
814
- if (context === "speaking" || context === "speakingPositive" || context === "speakingNegative") {
815
- priority = "speaking";
816
- } else if (context === "dancing" || context === "listening") {
817
- priority = "high";
818
- }
819
-
820
- // Set sticky lock for dancing to avoid being interrupted by emotion/neutral updates
821
- if (context === "dancing") {
822
- this._stickyContext = "dancing";
823
- // Lock roughly for one clip duration; will also be cleared on end/neutral
824
- this._stickyUntil = Date.now() + 9500;
825
- }
826
-
827
- // Chemin optimisΓ© lorsque TTS parle/Γ©coute (Γ©vite clignotements)
828
- if (
829
- window.voiceManager &&
830
- window.voiceManager.isSpeaking &&
831
- (context === "speaking" || context === "speakingPositive" || context === "speakingNegative")
832
- ) {
833
- const speakingPath = this.selectOptimalVideo(category, specificVideo, traits, affection, emotion);
834
- const speakingCurrent = this.activeVideo.querySelector("source").getAttribute("src");
835
- if (speakingCurrent !== speakingPath || this.activeVideo.ended) {
836
- this.loadAndSwitchVideo(speakingPath, priority);
837
- }
838
- // IMPORTANT: normalize to the resolved category (e.g., speakingPositive/Negative)
839
- this.currentContext = category;
840
- this.currentEmotion = emotion;
841
- this.lastSwitchTime = Date.now();
842
- return;
843
- }
844
- if (window.voiceManager && window.voiceManager.isListening && context === "listening") {
845
- const listeningPath = this.selectOptimalVideo(category, specificVideo, traits, affection, emotion);
846
- const listeningCurrent = this.activeVideo.querySelector("source").getAttribute("src");
847
- if (listeningCurrent !== listeningPath || this.activeVideo.ended) {
848
- this.loadAndSwitchVideo(listeningPath, priority);
849
- }
850
- // Normalize to category for consistency
851
- this.currentContext = category;
852
- this.currentEmotion = emotion;
853
- this.lastSwitchTime = Date.now();
854
- return;
855
- }
856
-
857
- // SΓ©lection standard
858
- let videoPath = this.selectOptimalVideo(category, specificVideo, traits, affection, emotion);
859
- const currentVideoSrc = this.activeVideo.querySelector("source").getAttribute("src");
860
-
861
- // Anti-rΓ©pΓ©tition si plusieurs vidΓ©os disponibles
862
- if (videoPath === currentVideoSrc && (this.videoCategories[category] || []).length > 1) {
863
- const alternatives = this.videoCategories[category].filter(v => v !== currentVideoSrc);
864
- if (alternatives.length > 0) {
865
- videoPath =
866
- typeof this._pickScoredVideo === "function"
867
- ? this._pickScoredVideo(category, alternatives, traits)
868
- : alternatives[Math.floor(Math.random() * alternatives.length)];
869
- }
870
- }
871
-
872
- // Adaptive transition timing based on context and priority
873
- let minTransitionDelay = 300;
874
-
875
- const now = Date.now();
876
- const timeSinceLastSwitch = now - (this.lastSwitchTime || 0);
877
-
878
- // Context-specific timing adjustments
879
- if (priority === "speaking") {
880
- minTransitionDelay = 200;
881
- } else if (context === "listening") {
882
- minTransitionDelay = 250;
883
- } else if (context === "dancing") {
884
- minTransitionDelay = 600;
885
- } else if (context === "neutral") {
886
- minTransitionDelay = 1200;
887
- }
888
-
889
- // Prevent rapid switching only if not critical
890
- if (
891
- this.currentContext === context &&
892
- this.currentEmotion === emotion &&
893
- currentVideoSrc === videoPath &&
894
- !this.activeVideo.paused &&
895
- !this.activeVideo.ended &&
896
- timeSinceLastSwitch < minTransitionDelay &&
897
- priority !== "speaking" // Always allow speech to interrupt
898
- ) {
899
- return;
900
- }
901
-
902
- this._prefetchLikely(category);
903
-
904
- this.loadAndSwitchVideo(videoPath, priority);
905
- // Always store normalized category as currentContext so event bindings match speakingPositive/Negative
906
- this.currentContext = category;
907
- this.currentEmotion = emotion;
908
- this.lastSwitchTime = now;
909
- }
910
-
911
- setupEventListenersForContext(context) {
912
- // Clean previous
913
- if (this._globalEndedHandler) {
914
- this.activeVideo.removeEventListener("ended", this._globalEndedHandler);
915
- this.inactiveVideo.removeEventListener("ended", this._globalEndedHandler);
916
- }
917
-
918
- // Defensive: ensure helpers exist
919
- if (!this.playHistory) this.playHistory = {};
920
- if (!this.maxHistoryPerCategory) this.maxHistoryPerCategory = 8;
921
-
922
- // For dancing: auto-return to neutral after video ends to avoid freeze
923
- if (context === "dancing") {
924
- this._globalEndedHandler = () => {
925
- this._stickyContext = null;
926
- this._stickyUntil = 0;
927
- if (!this._processPendingSwitches()) {
928
- this.returnToNeutral();
929
- }
930
- };
931
- this.activeVideo.addEventListener("ended", this._globalEndedHandler, { once: true });
932
- // Safety timer
933
- if (typeof this.scheduleAutoTransition === "function") {
934
- this.scheduleAutoTransition(this.autoTransitionDuration || 10000);
935
- }
936
- return;
937
- }
938
-
939
- if (context === "speakingPositive" || context === "speakingNegative") {
940
- this._globalEndedHandler = () => {
941
- // If TTS is still speaking, keep the speaking flow by chaining another speaking clip
942
- if (window.voiceManager && window.voiceManager.isSpeaking) {
943
- const emotion = this.currentEmotion || this.currentEmotionContext || "positive";
944
- // Preserve speaking context while chaining
945
- const category = emotion === "negative" ? "speakingNegative" : "speakingPositive";
946
- const next = this.selectOptimalVideo(category, null, null, null, emotion);
947
- if (next) {
948
- this.loadAndSwitchVideo(next, "speaking");
949
- this.currentContext = category;
950
- this.currentEmotion = emotion;
951
- this.isEmotionVideoPlaying = true;
952
- this.currentEmotionContext = emotion;
953
- this.lastSwitchTime = Date.now();
954
- return;
955
- }
956
- }
957
- // Otherwise, allow pending high-priority switch or return to neutral
958
- this.isEmotionVideoPlaying = false;
959
- this.currentEmotionContext = null;
960
- this._neutralLock = false;
961
- if (!this._processPendingSwitches()) {
962
- this.returnToNeutral();
963
- }
964
- };
965
- this.activeVideo.addEventListener("ended", this._globalEndedHandler, { once: true });
966
- return;
967
- }
968
-
969
- if (context === "listening") {
970
- this._globalEndedHandler = () => {
971
- this.switchToContext("listening", "listening");
972
- };
973
- this.activeVideo.addEventListener("ended", this._globalEndedHandler, { once: true });
974
- return;
975
- }
976
-
977
- // Neutral: on end, pick another neutral to avoid static last frame
978
- if (context === "neutral") {
979
- this._globalEndedHandler = () => this.returnToNeutral();
980
- this.activeVideo.addEventListener("ended", this._globalEndedHandler, { once: true });
981
- }
982
- }
983
-
984
- // keep only the augmented determineCategory above (with traits)
985
- selectOptimalVideo(category, specificVideo = null, traits = null, affection = null, emotion = null) {
986
- const availableVideos = this.videoCategories[category] || this.videoCategories.neutral;
987
-
988
- if (specificVideo && availableVideos.includes(specificVideo)) {
989
- if (typeof this.updatePlayHistory === "function") this.updatePlayHistory(category, specificVideo);
990
- this._logSelection(category, specificVideo, availableVideos);
991
- return specificVideo;
992
- }
993
-
994
- const currentVideoSrc = this.activeVideo.querySelector("source").getAttribute("src");
995
-
996
- // Filter out recently played videos using adaptive history
997
- const recentlyPlayed = this.playHistory[category] || [];
998
- let candidateVideos = availableVideos.filter(video => video !== currentVideoSrc && !recentlyPlayed.includes(video));
999
-
1000
- // If no fresh videos, allow recently played but not current
1001
- if (candidateVideos.length === 0) {
1002
- candidateVideos = availableVideos.filter(video => video !== currentVideoSrc);
1003
- }
1004
-
1005
- // Ultimate fallback
1006
- if (candidateVideos.length === 0) {
1007
- candidateVideos = availableVideos;
1008
- }
1009
-
1010
- // Ensure we're not falling back to wrong category
1011
- if (candidateVideos.length === 0) {
1012
- candidateVideos = this.videoCategories.neutral;
1013
- }
1014
-
1015
- // If traits and affection are provided, weight the selection more subtly
1016
- if (traits && typeof affection === "number") {
1017
- let weights = candidateVideos.map(video => {
1018
- if (category === "speakingPositive") {
1019
- // Positive videos favored by affection, romance, and humor
1020
- const base = 1 + (affection / 100) * 0.4; // Affection influence factor
1021
- let bonus = 0;
1022
- const rom = typeof traits.romance === "number" ? traits.romance : 50;
1023
- const hum = typeof traits.humor === "number" ? traits.humor : 50;
1024
- if (emotion === "romantic") bonus += (rom / 100) * 0.3; // Romance context bonus
1025
- if (emotion === "laughing") bonus += (hum / 100) * 0.3; // Humor context bonus
1026
- return base + bonus;
1027
- }
1028
- if (category === "speakingNegative") {
1029
- // Negative videos when affection is low (reduced weight to balance)
1030
- return 1 + ((100 - affection) / 100) * 0.3; // Low-affection influence factor
1031
- }
1032
- if (category === "neutral") {
1033
- // Neutral videos when affection is moderate, also influenced by intelligence
1034
- const distance = Math.abs(50 - affection) / 50; // 0 at 50, 1 at 0 or 100
1035
- const intBonus = ((traits.intelligence || 50) / 100) * 0.1; // Intelligence adds to neutral thoughtfulness
1036
- return 1 + (1 - Math.min(1, distance)) * 0.2 + intBonus;
1037
- }
1038
- if (category === "dancing") {
1039
- // Dancing strongly influenced by playfulness, romance also adds excitement
1040
- const playBonus = Math.min(0.6, (traits.playfulness / 100) * 0.7);
1041
- const romanceBonus = ((traits.romance || 50) / 100) * 0.2; // Romance adds to dance appeal
1042
- return 1 + playBonus + romanceBonus;
1043
- }
1044
- if (category === "listening") {
1045
- // Listening influenced by empathy, intelligence, and affection
1046
- const empathyWeight = (traits.empathy || 50) / 100;
1047
- const intWeight = ((traits.intelligence || 50) / 100) * 0.1; // Intelligence improves listening quality
1048
- return 1 + empathyWeight * 0.3 + (affection / 100) * 0.1 + intWeight;
1049
- }
1050
- return 1;
1051
- });
1052
-
1053
- const total = weights.reduce((a, b) => a + b, 0);
1054
- let r = Math.random() * total;
1055
- for (let i = 0; i < candidateVideos.length; i++) {
1056
- if (r < weights[i]) {
1057
- const chosen = candidateVideos[i];
1058
- if (typeof this.updatePlayHistory === "function") this.updatePlayHistory(category, chosen);
1059
- this._logSelection(category, chosen, candidateVideos);
1060
- return chosen;
1061
- }
1062
- r -= weights[i];
1063
- }
1064
- const selectedVideo = candidateVideos[0];
1065
- if (typeof this.updatePlayHistory === "function") this.updatePlayHistory(category, selectedVideo);
1066
- this._logSelection(category, selectedVideo, candidateVideos);
1067
- return selectedVideo;
1068
- }
1069
-
1070
- // No traits weighting: random pick
1071
- if (candidateVideos.length === 0) {
1072
- return availableVideos && availableVideos[0] ? availableVideos[0] : null;
1073
- }
1074
- const selectedVideo = candidateVideos[Math.floor(Math.random() * candidateVideos.length)];
1075
- if (typeof this.updatePlayHistory === "function") this.updatePlayHistory(category, selectedVideo);
1076
- this._logSelection(category, selectedVideo, candidateVideos);
1077
- return selectedVideo;
1078
- }
1079
-
1080
- // Get adaptive history size based on available videos
1081
- getAdaptiveHistorySize(category) {
1082
- const availableVideos = this.videoCategories[category] || [];
1083
- const videoCount = availableVideos.length;
1084
-
1085
- // Adaptive history: keep 40-60% of available videos in history
1086
- // Minimum 2, maximum 8 to prevent extreme cases
1087
- if (videoCount <= 3) return Math.max(1, videoCount - 1);
1088
- if (videoCount <= 6) return Math.max(2, Math.floor(videoCount * 0.5));
1089
- return Math.min(8, Math.floor(videoCount * 0.6));
1090
- }
1091
-
1092
- // Update history with adaptive sizing
1093
- updatePlayHistory(category, videoPath) {
1094
- if (!this.playHistory[category]) {
1095
- this.playHistory[category] = [];
1096
- }
1097
-
1098
- const adaptiveSize = this.getAdaptiveHistorySize(category);
1099
- this.playHistory[category].push(videoPath);
1100
-
1101
- // Trim to adaptive size
1102
- if (this.playHistory[category].length > adaptiveSize) {
1103
- this.playHistory[category] = this.playHistory[category].slice(-adaptiveSize);
1104
- }
1105
- }
1106
-
1107
- // Ensure determineCategory exists as a class method (used at line ~494 and ~537)
1108
- determineCategory(context, emotion = "neutral", traits = null) {
1109
- // Get emotion mapping from centralized emotion system
1110
- const emotionToCategory = window.kimiEmotionSystem?.emotionToVideoCategory || {
1111
- listening: "listening",
1112
- positive: "speakingPositive",
1113
- negative: "speakingNegative",
1114
- neutral: "neutral",
1115
- surprise: "speakingPositive",
1116
- laughing: "speakingPositive",
1117
- shy: "neutral",
1118
- confident: "speakingPositive",
1119
- romantic: "speakingPositive",
1120
- flirtatious: "speakingPositive",
1121
- goodbye: "neutral",
1122
- kiss: "speakingPositive",
1123
- dancing: "dancing",
1124
- speaking: "speakingPositive",
1125
- speakingPositive: "speakingPositive",
1126
- speakingNegative: "speakingNegative"
1127
- };
1128
-
1129
- // Prefer explicit context mapping if provided (e.g., 'listening','dancing')
1130
- if (emotionToCategory[context]) {
1131
- return emotionToCategory[context];
1132
- }
1133
- // Normalize generic 'speaking' by emotion polarity
1134
- if (context === "speaking") {
1135
- if (emotion === "positive") return "speakingPositive";
1136
- if (emotion === "negative") return "speakingNegative";
1137
- return "neutral";
1138
- }
1139
- // Map by emotion label when possible
1140
- if (emotionToCategory[emotion]) {
1141
- return emotionToCategory[emotion];
1142
- }
1143
- return "neutral";
1144
- }
1145
-
1146
- // SPECIALIZED METHODS FOR EACH CONTEXT
1147
- async startListening(traits = null, affection = null) {
1148
- // If already listening and playing, avoid redundant switch
1149
- if (this.currentContext === "listening" && !this.activeVideo.paused && !this.activeVideo.ended) {
1150
- return;
1151
- }
1152
- // Immediate switch to keep UI responsive
1153
- this.switchToContext("listening");
1154
-
1155
- // Add a short grace window to prevent immediate switch to speaking before TTS starts
1156
- clearTimeout(this._listeningGraceTimer);
1157
- this._listeningGraceTimer = setTimeout(() => {
1158
- // No-op; used as a time marker to let LLM prepare the answer
1159
- }, 1500);
1160
-
1161
- // If caller did not provide traits, try to fetch and refine selection
1162
- try {
1163
- if (!traits && window.kimiDB && typeof window.kimiDB.getAllPersonalityTraits === "function") {
1164
- const selectedCharacter = await window.kimiDB.getSelectedCharacter();
1165
- const allTraits = await window.kimiDB.getAllPersonalityTraits(selectedCharacter);
1166
- if (allTraits && typeof allTraits === "object") {
1167
- const aff = typeof allTraits.affection === "number" ? allTraits.affection : undefined;
1168
- // Re-issue context switch with weighting parameters to better pick listening videos
1169
- this.switchToContext("listening", "listening", null, allTraits, aff);
1170
- }
1171
- } else if (traits) {
1172
- this.switchToContext("listening", "listening", null, traits, affection);
1173
- }
1174
- } catch (e) {
1175
- // Non-fatal: keep basic listening behavior
1176
- console.warn("Listening refinement skipped due to error:", e);
1177
- }
1178
- }
1179
-
1180
- respondWithEmotion(emotion, traits = null, affection = null) {
1181
- // Ignore neutral emotion to avoid unintended overrides (use returnToNeutral when appropriate)
1182
- if (emotion === "neutral") {
1183
- if (this._stickyContext === "dancing" || this.currentContext === "dancing") return;
1184
- this.returnToNeutral();
1185
- return;
1186
- }
1187
- // Do not override dancing while sticky
1188
- if (this._stickyContext === "dancing" || this.currentContext === "dancing") return;
1189
- // If we are already playing the same emotion video, do nothing
1190
- if (this.isEmotionVideoPlaying && this.currentEmotionContext === emotion) return;
1191
- // If we just entered listening and TTS isn’t started yet, wait a bit to avoid desync
1192
- const now = Date.now();
1193
- const stillInGrace = this._listeningGraceTimer != null;
1194
- const ttsNotStarted = !(window.voiceManager && window.voiceManager.isSpeaking);
1195
- if (this.currentContext === "listening" && stillInGrace && ttsNotStarted) {
1196
- clearTimeout(this._pendingSpeakSwitch);
1197
- this._pendingSpeakSwitch = setTimeout(() => {
1198
- // Re-check speaking state; only switch when we have an actual emotion to play alongside TTS
1199
- if (window.voiceManager && window.voiceManager.isSpeaking) {
1200
- this.switchToContext("speaking", emotion, null, traits, affection);
1201
- this.isEmotionVideoPlaying = true;
1202
- this.currentEmotionContext = emotion;
1203
- }
1204
- }, 900);
1205
- return;
1206
- }
1207
-
1208
- // First switch context (so internal guards don't see the new flags yet)
1209
- this.switchToContext("speaking", emotion, null, traits, affection);
1210
- // Then mark the emotion video as playing for override protection
1211
- this.isEmotionVideoPlaying = true;
1212
- this.currentEmotionContext = emotion;
1213
- }
1214
-
1215
- returnToNeutral() {
1216
- // Always ensure we resume playback with a fresh neutral video to avoid freeze
1217
- if (this._neutralLock) return;
1218
- this._neutralLock = true;
1219
- setTimeout(() => {
1220
- this._neutralLock = false;
1221
- }, 1000);
1222
- this._stickyContext = null;
1223
- this._stickyUntil = 0;
1224
- this.isEmotionVideoPlaying = false;
1225
- this.currentEmotionContext = null;
1226
-
1227
- // Si la voix est encore en cours, relancer une vidΓ©o neutre en boucle
1228
- const category = "neutral";
1229
- const currentVideoSrc = this.activeVideo.querySelector("source").getAttribute("src");
1230
- const available = this.videoCategories[category] || [];
1231
- let nextSrc = null;
1232
- if (available.length > 0) {
1233
- const candidates = available.filter(v => v !== currentVideoSrc);
1234
- nextSrc =
1235
- candidates.length > 0
1236
- ? candidates[Math.floor(Math.random() * candidates.length)]
1237
- : available[Math.floor(Math.random() * available.length)];
1238
- }
1239
- if (nextSrc) {
1240
- this.loadAndSwitchVideo(nextSrc, "normal");
1241
- if (typeof this.updatePlayHistory === "function") this.updatePlayHistory(category, nextSrc);
1242
- this.currentContext = "neutral";
1243
- this.currentEmotion = "neutral";
1244
- this.lastSwitchTime = Date.now();
1245
- // Si la voix est encore en cours, s'assurer qu'on relance une vidΓ©o neutre Γ  la fin
1246
- if (window.voiceManager && window.voiceManager.isSpeaking) {
1247
- this.activeVideo.addEventListener(
1248
- "ended",
1249
- () => {
1250
- if (window.voiceManager && window.voiceManager.isSpeaking) {
1251
- this.returnToNeutral();
1252
- }
1253
- },
1254
- { once: true }
1255
- );
1256
- }
1257
- } else {
1258
- // Fallback to existing path if list empty
1259
- this.switchToContext("neutral");
1260
- }
1261
- }
1262
-
1263
- // ADVANCED CONTEXTUAL ANALYSIS
1264
- async analyzeAndSelectVideo(userMessage, kimiResponse, emotionAnalysis, traits = null, affection = null, lang = null) {
1265
- // Do not analyze-switch away while dancing is sticky/playing
1266
- if (this._stickyContext === "dancing" || this.currentContext === "dancing") {
1267
- return; // let dancing finish
1268
- }
1269
- // Auto-detect language if not specified
1270
- let userLang = lang;
1271
- if (!userLang && window.kimiDB && window.kimiDB.getPreference) {
1272
- userLang = await window.KimiLanguageUtils.getLanguage();
1273
- }
1274
-
1275
- // Use existing emotion analysis instead of creating new system
1276
- let detectedEmotion = "neutral";
1277
- if (window.kimiAnalyzeEmotion) {
1278
- // Analyze combined user message and Kimi response using existing function
1279
- const combinedText = [userMessage, kimiResponse].filter(Boolean).join(" ");
1280
- detectedEmotion = window.kimiAnalyzeEmotion(combinedText, userLang);
1281
- console.log(`🎭 Emotion detected: "${detectedEmotion}" from text: "${combinedText.substring(0, 50)}..."`);
1282
- } else if (emotionAnalysis && emotionAnalysis.reaction) {
1283
- // Fallback to provided emotion analysis
1284
- detectedEmotion = emotionAnalysis.reaction;
1285
- }
1286
-
1287
- // Special case: Auto-dancing if playfulness high (more accessible)
1288
- if (traits && typeof traits.playfulness === "number" && traits.playfulness >= 75) {
1289
- this.switchToContext("dancing", "dancing", null, traits, affection);
1290
- return;
1291
- }
1292
-
1293
- // Add to emotion history
1294
- this.emotionHistory.push(detectedEmotion);
1295
- if (this.emotionHistory.length > this.maxEmotionHistory) {
1296
- this.emotionHistory.shift();
1297
- }
1298
-
1299
- // Analyze emotion trend - support all possible emotions
1300
- const counts = {
1301
- positive: 0,
1302
- negative: 0,
1303
- neutral: 0,
1304
- dancing: 0,
1305
- listening: 0,
1306
- romantic: 0,
1307
- laughing: 0,
1308
- surprise: 0,
1309
- confident: 0,
1310
- shy: 0,
1311
- flirtatious: 0,
1312
- kiss: 0,
1313
- goodbye: 0
1314
- };
1315
- for (let i = 0; i < this.emotionHistory.length; i++) {
1316
- const emo = this.emotionHistory[i];
1317
- if (counts[emo] !== undefined) counts[emo]++;
1318
- }
1319
-
1320
- // Find dominant emotion
1321
- let dominant = null;
1322
- let max = 0;
1323
- for (const key in counts) {
1324
- if (counts[key] > max) {
1325
- max = counts[key];
1326
- dominant = key;
1327
- }
1328
- }
1329
-
1330
- // Switch to appropriate context based on dominant emotion
1331
- if (max >= 1 && dominant) {
1332
- // Map emotion to context using centralized emotion mapping
1333
- const emotionToCategory = window.kimiEmotionSystem?.emotionToVideoCategory || {};
1334
- const targetCategory = emotionToCategory[dominant];
1335
- if (targetCategory) {
1336
- this.switchToContext(targetCategory, dominant, null, traits, affection);
1337
- return;
1338
- }
1339
-
1340
- // Fallback for unmapped emotions
1341
- if (dominant === "dancing") {
1342
- this.switchToContext("dancing", "dancing", null, traits, affection);
1343
- return;
1344
- }
1345
- if (dominant === "positive") {
1346
- this.switchToContext("speakingPositive", "positive", null, traits, affection);
1347
- return;
1348
- }
1349
- if (dominant === "negative") {
1350
- this.switchToContext("speakingNegative", "negative", null, traits, affection);
1351
- return;
1352
- }
1353
- if (dominant === "listening") {
1354
- this.switchToContext("listening", "listening", null, traits, affection);
1355
- return;
1356
- }
1357
- }
1358
-
1359
- // Default to neutral context, with a positive bias at high affection (more accessible)
1360
- if (traits && typeof traits.affection === "number" && traits.affection >= 80) {
1361
- const chance = Math.random();
1362
- if (chance < 0.35) {
1363
- // Increased chance from 0.25 to 0.35
1364
- this.switchToContext("speakingPositive", "positive", null, traits, affection);
1365
- return;
1366
- }
1367
- }
1368
- // Avoid neutral override if a transient state should persist (handled elsewhere)
1369
- this.switchToContext("neutral", "neutral", null, traits, affection);
1370
- }
1371
-
1372
- // AUTOMATIC TRANSITION TO NEUTRAL
1373
- scheduleAutoTransition(delayMs) {
1374
- clearTimeout(this.autoTransitionTimer);
1375
-
1376
- // Ne pas programmer d'auto-transition pour les contextes de base
1377
- if (this.currentContext === "neutral" || this.currentContext === "listening") {
1378
- return;
1379
- }
1380
-
1381
- // DurΓ©es adaptΓ©es selon le contexte (toutes les vidΓ©os font 10s)
1382
- let duration;
1383
- if (typeof delayMs === "number") {
1384
- duration = delayMs;
1385
- } else {
1386
- switch (this.currentContext) {
1387
- case "dancing":
1388
- duration = 10000; // 10 secondes pour dancing (durΓ©e rΓ©elle des vidΓ©os)
1389
- break;
1390
- case "speakingPositive":
1391
- case "speakingNegative":
1392
- duration = 10000; // 10 secondes pour speaking (durΓ©e rΓ©elle des vidΓ©os)
1393
- break;
1394
- case "neutral":
1395
- // Pas d'auto-transition pour neutral (Γ©tat par dΓ©faut, boucle en continu)
1396
- return;
1397
- case "listening":
1398
- // Pas d'auto-transition pour listening (personnage Γ©coute l'utilisateur)
1399
- return;
1400
- default:
1401
- duration = this.autoTransitionDuration; // 10 secondes par dΓ©faut
1402
- }
1403
- }
1404
-
1405
- console.log(`Auto-transition scheduled in ${duration / 1000}s (${this.currentContext} β†’ neutral)`);
1406
- this.autoTransitionTimer = setTimeout(() => {
1407
- if (this.currentContext !== "neutral" && this.currentContext !== "listening") {
1408
- if (!this._processPendingSwitches()) {
1409
- this.returnToNeutral();
1410
- }
1411
- }
1412
- }, duration);
1413
- }
1414
-
1415
- // COMPATIBILITY WITH THE OLD SYSTEM
1416
- switchVideo(emotion = null) {
1417
- if (emotion) {
1418
- this.switchToContext("speaking", emotion);
1419
- } else {
1420
- this.switchToContext("neutral");
1421
- }
1422
- }
1423
-
1424
- autoSwitchToNeutral() {
1425
- this._neutralLock = false;
1426
- this.isEmotionVideoPlaying = false;
1427
- this.currentEmotionContext = null;
1428
- this.switchToContext("neutral");
1429
- }
1430
-
1431
- getNextVideo(emotion, currentSrc) {
1432
- // Adapt the old method for compatibility
1433
- const category = this.determineCategory("speaking", emotion);
1434
- return this.selectOptimalVideo(category);
1435
- }
1436
-
1437
- loadAndSwitchVideo(videoSrc, priority = "normal") {
1438
- const startTs = performance.now();
1439
- // Guard: ignore if recently failed and still in cooldown
1440
- const lastFail = this._recentFailures.get(videoSrc);
1441
- if (lastFail && performance.now() - lastFail < this._failureCooldown) {
1442
- // Pick an alternative neutral as quick substitution
1443
- const neutralList = (this.videoCategories && this.videoCategories.neutral) || [];
1444
- const alt = neutralList.find(v => v !== videoSrc) || neutralList[0];
1445
- if (alt && alt !== videoSrc) {
1446
- console.warn(`Skipping recently failed video (cooldown): ${videoSrc} -> trying alt: ${alt}`);
1447
- return this.loadAndSwitchVideo(alt, priority);
1448
- }
1449
- }
1450
- // Avoid redundant loading if the requested source is already active or currently loading in inactive element
1451
- const activeSrc = this.activeVideo?.querySelector("source")?.getAttribute("src");
1452
- const inactiveSrc = this.inactiveVideo?.querySelector("source")?.getAttribute("src");
1453
- if (videoSrc && (videoSrc === activeSrc || (this._loadingInProgress && videoSrc === inactiveSrc))) {
1454
- if (priority !== "high" && priority !== "speaking") {
1455
- return; // no need to reload same video
1456
- }
1457
- }
1458
- // Only log high priority or error cases to reduce noise
1459
- if (priority === "speaking" || priority === "high") {
1460
- console.log(`🎬 Loading video: ${videoSrc} (priority: ${priority})`);
1461
- }
1462
-
1463
- // Si une vidΓ©o haute prioritΓ© arrive, on peut interrompre le chargement en cours
1464
- if (this._loadingInProgress) {
1465
- if (priority === "high" || priority === "speaking") {
1466
- this._loadingInProgress = false;
1467
- // Nettoyer les event listeners en cours sur la vidΓ©o inactive
1468
- this.inactiveVideo.removeEventListener("canplay", this._currentLoadHandler);
1469
- this.inactiveVideo.removeEventListener("loadeddata", this._currentLoadHandler);
1470
- this.inactiveVideo.removeEventListener("error", this._currentErrorHandler);
1471
- if (this._loadTimeout) {
1472
- clearTimeout(this._loadTimeout);
1473
- this._loadTimeout = null;
1474
- }
1475
- } else {
1476
- return;
1477
- }
1478
- }
1479
-
1480
- this._loadingInProgress = true;
1481
-
1482
- // Nettoyer tous les timers en cours
1483
- clearTimeout(this.autoTransitionTimer);
1484
- if (this._loadTimeout) {
1485
- clearTimeout(this._loadTimeout);
1486
- this._loadTimeout = null;
1487
- }
1488
-
1489
- const pref = this._prefetchCache.get(videoSrc);
1490
- if (pref && (pref.readyState >= 2 || pref.buffered.length > 0)) {
1491
- const source = this.inactiveVideo.querySelector("source");
1492
- source.setAttribute("src", videoSrc);
1493
- try {
1494
- this.inactiveVideo.currentTime = 0;
1495
- } catch {}
1496
- this.inactiveVideo.load();
1497
- } else {
1498
- this.inactiveVideo.querySelector("source").setAttribute("src", videoSrc);
1499
- this.inactiveVideo.load();
1500
- }
1501
-
1502
- // Stocker les rΓ©fΓ©rences aux handlers pour pouvoir les nettoyer
1503
- let fired = false;
1504
- const onReady = () => {
1505
- if (fired) return;
1506
- fired = true;
1507
- this._loadingInProgress = false;
1508
- if (this._loadTimeout) {
1509
- clearTimeout(this._loadTimeout);
1510
- this._loadTimeout = null;
1511
- }
1512
- this.inactiveVideo.removeEventListener("canplay", this._currentLoadHandler);
1513
- this.inactiveVideo.removeEventListener("loadeddata", this._currentLoadHandler);
1514
- this.inactiveVideo.removeEventListener("error", this._currentErrorHandler);
1515
- // Update rolling average load time
1516
- const duration = performance.now() - startTs;
1517
- this._loadTimeSamples.push(duration);
1518
- if (this._loadTimeSamples.length > this._maxSamples) this._loadTimeSamples.shift();
1519
- const sum = this._loadTimeSamples.reduce((a, b) => a + b, 0);
1520
- this._avgLoadTime = sum / this._loadTimeSamples.length;
1521
- this._consecutiveErrorCount = 0; // reset on success
1522
- this.performSwitch();
1523
- };
1524
- this._currentLoadHandler = onReady;
1525
-
1526
- const folder = getCharacterInfo(this.characterName).videoFolder;
1527
- // Rotating fallback pool (stable neutrals first positions)
1528
- // Build or rebuild fallback pool when absent or when character changed
1529
- if (!this._fallbackPool || this._fallbackPoolCharacter !== this.characterName) {
1530
- const neutralList = (this.videoCategories && this.videoCategories.neutral) || [];
1531
- // Choose first 3 as core reliable set; if less than 3 available, take all
1532
- this._fallbackPool = neutralList.slice(0, 3);
1533
- this._fallbackIndex = 0;
1534
- this._fallbackPoolCharacter = this.characterName;
1535
- }
1536
- const fallbackVideo = this._fallbackPool[this._fallbackIndex % this._fallbackPool.length];
1537
-
1538
- this._currentErrorHandler = e => {
1539
- const mediaEl = this.inactiveVideo;
1540
- const readyState = mediaEl ? mediaEl.readyState : -1;
1541
- const networkState = mediaEl ? mediaEl.networkState : -1;
1542
- let mediaErrorCode = null;
1543
- if (mediaEl && mediaEl.error) mediaErrorCode = mediaEl.error.code;
1544
- console.warn(
1545
- `Error loading video: ${videoSrc} (readyState=${readyState} networkState=${networkState} mediaError=${mediaErrorCode}) falling back to: ${fallbackVideo}`
1546
- );
1547
- this._loadingInProgress = false;
1548
- if (this._loadTimeout) {
1549
- clearTimeout(this._loadTimeout);
1550
- this._loadTimeout = null;
1551
- }
1552
- this._recentFailures.set(videoSrc, performance.now());
1553
- this._consecutiveErrorCount++;
1554
- // Stop runaway fallback loop: pause if too many sequential errors relative to pool size
1555
- if (this._fallbackPool && this._consecutiveErrorCount >= this._fallbackPool.length * 2) {
1556
- console.error("Temporarily pausing fallback loop after repeated failures. Retrying in 2s.");
1557
- setTimeout(() => {
1558
- this._consecutiveErrorCount = 0;
1559
- this.loadAndSwitchVideo(fallbackVideo, "high");
1560
- }, 2000);
1561
- return;
1562
- }
1563
- if (videoSrc !== fallbackVideo) {
1564
- // Try fallback video
1565
- this._fallbackIndex = (this._fallbackIndex + 1) % this._fallbackPool.length; // advance for next time
1566
- this.loadAndSwitchVideo(fallbackVideo, "high");
1567
- } else {
1568
- // Ultimate fallback: try any neutral video
1569
- console.error(`Fallback video also failed: ${fallbackVideo}. Trying ultimate fallback.`);
1570
- const neutralVideos = this.videoCategories.neutral || [];
1571
- if (neutralVideos.length > 0) {
1572
- // Try a different neutral video
1573
- const ultimateFallback = neutralVideos.find(video => video !== fallbackVideo);
1574
- if (ultimateFallback) {
1575
- this.loadAndSwitchVideo(ultimateFallback, "high");
1576
- } else {
1577
- // Last resort: try first neutral video anyway
1578
- this.loadAndSwitchVideo(neutralVideos[0], "high");
1579
- }
1580
- } else {
1581
- // Critical error: no neutral videos available
1582
- console.error("CRITICAL: No neutral videos available!");
1583
- this._switchInProgress = false;
1584
- }
1585
- }
1586
- // Escalate diagnostics if many consecutive errors
1587
- if (this._consecutiveErrorCount >= 3) {
1588
- console.info(
1589
- `Diagnostics: avgLoadTime=${this._avgLoadTime?.toFixed(1) || "n/a"}ms samples=${this._loadTimeSamples.length} prefetchCache=${this._prefetchCache.size}`
1590
- );
1591
- }
1592
- };
1593
-
1594
- this.inactiveVideo.addEventListener("loadeddata", this._currentLoadHandler, { once: true });
1595
- this.inactiveVideo.addEventListener("canplay", this._currentLoadHandler, { once: true });
1596
- this.inactiveVideo.addEventListener("error", this._currentErrorHandler, { once: true });
1597
-
1598
- if (this.inactiveVideo.readyState >= 2) {
1599
- queueMicrotask(() => onReady());
1600
- }
1601
-
1602
- // Dynamic timeout: refined formula avg*1.5 + buffer, bounded
1603
- let adaptiveTimeout = this._minTimeout;
1604
- if (this._avgLoadTime) {
1605
- adaptiveTimeout = Math.min(this._maxTimeout, Math.max(this._minTimeout, this._avgLoadTime * 1.5 + 400));
1606
- }
1607
- // Cap by clip length ratio if we know (assume 10000ms default when metadata absent)
1608
- const currentClipMs = 10000; // All clips are 10s
1609
- adaptiveTimeout = Math.min(adaptiveTimeout, Math.floor(currentClipMs * this._timeoutCapRatio));
1610
- this._loadTimeout = setTimeout(() => {
1611
- if (!fired) {
1612
- // If metadata is there but not canplay yet, extend once
1613
- if (this.inactiveVideo.readyState >= 1 && this.inactiveVideo.readyState < 2) {
1614
- console.debug(
1615
- `Extending timeout for ${videoSrc} (readyState=${this.inactiveVideo.readyState}) by ${this._timeoutExtension}ms`
1616
- );
1617
- this._loadTimeout = setTimeout(() => {
1618
- if (!fired) {
1619
- if (this.inactiveVideo.readyState >= 2) onReady();
1620
- else this._currentErrorHandler();
1621
- }
1622
- }, this._timeoutExtension);
1623
- return;
1624
- }
1625
- // Grace retry: still fetching over network (networkState=2) with no data (readyState=0)
1626
- if (
1627
- this.inactiveVideo.networkState === 2 &&
1628
- this.inactiveVideo.readyState === 0 &&
1629
- (this._graceRetryCounts?.[videoSrc] || 0) < 1
1630
- ) {
1631
- if (!this._graceRetryCounts) this._graceRetryCounts = {};
1632
- this._graceRetryCounts[videoSrc] = (this._graceRetryCounts[videoSrc] || 0) + 1;
1633
- const extra = this._timeoutExtension + 600;
1634
- console.debug(`Grace retry for ${videoSrc} (network loading). Extending by ${extra}ms`);
1635
- this._loadTimeout = setTimeout(() => {
1636
- if (!fired) {
1637
- if (this.inactiveVideo.readyState >= 2) onReady();
1638
- else this._currentErrorHandler();
1639
- }
1640
- }, extra);
1641
- return;
1642
- }
1643
- if (this.inactiveVideo.readyState >= 2) {
1644
- onReady();
1645
- } else {
1646
- this._currentErrorHandler();
1647
- }
1648
- }
1649
- }, adaptiveTimeout);
1650
- }
1651
-
1652
- usePreloadedVideo(preloadedVideo, videoSrc) {
1653
- const source = this.inactiveVideo.querySelector("source");
1654
- source.setAttribute("src", videoSrc);
1655
-
1656
- this.inactiveVideo.currentTime = 0;
1657
- this.inactiveVideo.load();
1658
-
1659
- this._currentLoadHandler = () => {
1660
- this._loadingInProgress = false;
1661
- this.performSwitch();
1662
- };
1663
-
1664
- this.inactiveVideo.addEventListener("canplay", this._currentLoadHandler, { once: true });
1665
- }
1666
-
1667
- performSwitch() {
1668
- // Prevent rapid double toggles
1669
- if (this._switchInProgress) return;
1670
- this._switchInProgress = true;
1671
-
1672
- const fromVideo = this.activeVideo;
1673
- const toVideo = this.inactiveVideo;
1674
-
1675
- // Perform a JS-managed crossfade for smoother transitions
1676
- // Let crossfadeVideos resolve duration from CSS variable (--video-fade-duration)
1677
- this.constructor.crossfadeVideos(fromVideo, toVideo, undefined, () => {
1678
- // After crossfade completion, finalize state and classes
1679
- fromVideo.classList.remove("active");
1680
- toVideo.classList.add("active");
1681
-
1682
- // Swap references
1683
- const prevActive = this.activeVideo;
1684
- const prevInactive = this.inactiveVideo;
1685
- this.activeVideo = prevInactive;
1686
- this.inactiveVideo = prevActive;
1687
-
1688
- const playPromise = this.activeVideo.play();
1689
- if (playPromise && typeof playPromise.then === "function") {
1690
- playPromise
1691
- .then(() => {
1692
- try {
1693
- const src = this.activeVideo?.querySelector("source")?.getAttribute("src");
1694
- const info = { context: this.currentContext, emotion: this.currentEmotion };
1695
- console.log("🎬 VideoManager: Now playing:", src, info);
1696
- // Recompute autoTransitionDuration from actual duration if available (C)
1697
- try {
1698
- const d = this.activeVideo.duration;
1699
- if (!isNaN(d) && d > 0.5) {
1700
- // Keep 1s headroom before natural end for auto scheduling
1701
- const target = Math.max(1000, d * 1000 - 1100);
1702
- this.autoTransitionDuration = target;
1703
- } else {
1704
- this.autoTransitionDuration = 9900; // fallback for 10s clips
1705
- }
1706
- // Dynamic neutral prefetch to widen diversity without burst
1707
- this._prefetchNeutralDynamic();
1708
- } catch {}
1709
- } catch {}
1710
- this._switchInProgress = false;
1711
- this.setupEventListenersForContext(this.currentContext);
1712
- })
1713
- .catch(error => {
1714
- console.warn("Failed to play video:", error);
1715
- // Revert to previous video to avoid frozen state
1716
- toVideo.classList.remove("active");
1717
- fromVideo.classList.add("active");
1718
- this.activeVideo = fromVideo;
1719
- this.inactiveVideo = toVideo;
1720
- try {
1721
- this.activeVideo.play().catch(() => {});
1722
- } catch {}
1723
- this._switchInProgress = false;
1724
- this.setupEventListenersForContext(this.currentContext);
1725
- });
1726
- } else {
1727
- // Non-promise play fallback
1728
- this._switchInProgress = false;
1729
- try {
1730
- const d = this.activeVideo.duration;
1731
- if (!isNaN(d) && d > 0.5) {
1732
- const target = Math.max(1000, d * 1000 - 1100);
1733
- this.autoTransitionDuration = target;
1734
- } else {
1735
- this.autoTransitionDuration = 9900;
1736
- }
1737
- this._prefetchNeutralDynamic();
1738
- } catch {}
1739
- this.setupEventListenersForContext(this.currentContext);
1740
- }
1741
- });
1742
- }
1743
-
1744
- _prefetchNeutralDynamic() {
1745
- try {
1746
- const neutrals = (this.videoCategories && this.videoCategories.neutral) || [];
1747
- if (!neutrals.length) return;
1748
- // Build a set of already cached or in-flight
1749
- const cached = new Set(
1750
- [...this._prefetchCache.keys(), ...this._prefetchInFlight.values()].map(v => (typeof v === "string" ? v : v?.src))
1751
- ); // defensive
1752
- const current = this.activeVideo?.querySelector("source")?.getAttribute("src");
1753
- // Choose up to 2 unseen neutral videos different from current
1754
- const candidates = neutrals.filter(s => s && s !== current && !cached.has(s));
1755
- if (!candidates.length) return;
1756
- let limit = 2;
1757
- // Network-aware limiting
1758
- try {
1759
- const conn = navigator.connection || navigator.webkitConnection || navigator.mozConnection;
1760
- if (conn && conn.effectiveType) {
1761
- if (/2g/i.test(conn.effectiveType)) limit = 0;
1762
- else if (/3g/i.test(conn.effectiveType)) limit = 1;
1763
- }
1764
- } catch {}
1765
- if (limit <= 0) return;
1766
- candidates.slice(0, limit).forEach(src => this._prefetch(src));
1767
- } catch {}
1768
- }
1769
-
1770
- _prefetch(src) {
1771
- if (!src || this._prefetchCache.has(src) || this._prefetchInFlight.has(src)) return;
1772
- if (this._prefetchCache.size + this._prefetchInFlight.size >= this._maxPrefetch) return;
1773
- this._prefetchInFlight.add(src);
1774
- const v = document.createElement("video");
1775
- v.preload = "auto";
1776
- v.muted = true;
1777
- v.playsInline = true;
1778
- v.src = src;
1779
- const cleanup = () => {
1780
- v.oncanplaythrough = null;
1781
- v.oncanplay = null;
1782
- v.onerror = null;
1783
- this._prefetchInFlight.delete(src);
1784
- };
1785
- v.oncanplay = () => {
1786
- this._prefetchCache.set(src, v);
1787
- this._trimPrefetchCacheIfNeeded();
1788
- cleanup();
1789
- };
1790
- v.oncanplaythrough = () => {
1791
- this._prefetchCache.set(src, v);
1792
- this._trimPrefetchCacheIfNeeded();
1793
- cleanup();
1794
- };
1795
- v.onerror = () => {
1796
- cleanup();
1797
- };
1798
- try {
1799
- v.load();
1800
- } catch {}
1801
- }
1802
-
1803
- _trimPrefetchCacheIfNeeded() {
1804
- try {
1805
- // Only apply LRU trimming to neutral videos; cap at 6 neutrals cached
1806
- const MAX_NEUTRAL = 6;
1807
- const entries = [...this._prefetchCache.entries()];
1808
- const neutralEntries = entries.filter(([src]) => /\/neutral\//.test(src));
1809
- if (neutralEntries.length <= MAX_NEUTRAL) return;
1810
- // LRU heuristic: older insertion first (Map preserves insertion order)
1811
- const excess = neutralEntries.length - MAX_NEUTRAL;
1812
- let removed = 0;
1813
- for (const [src, vid] of neutralEntries) {
1814
- if (removed >= excess) break;
1815
- // Avoid removing currently active or about to be used
1816
- const current = this.activeVideo?.querySelector("source")?.getAttribute("src");
1817
- if (src === current) continue;
1818
- this._prefetchCache.delete(src);
1819
- try {
1820
- vid.removeAttribute("src");
1821
- vid.load();
1822
- } catch {}
1823
- removed++;
1824
- }
1825
- } catch {}
1826
- }
1827
-
1828
- _prefetchLikely(category) {
1829
- const list = this.videoCategories[category] || [];
1830
- // Prefetch 1-2 next likely videos different from current
1831
- const current = this.activeVideo?.querySelector("source")?.getAttribute("src") || null;
1832
- const candidates = list.filter(s => s && s !== current).slice(0, 2);
1833
- candidates.forEach(src => this._prefetch(src));
1834
- }
1835
-
1836
- // DIAGNOSTIC AND DEBUG METHODS
1837
- getCurrentVideoInfo() {
1838
- const currentSrc = this.activeVideo.querySelector("source").getAttribute("src");
1839
- return {
1840
- currentVideo: currentSrc,
1841
- context: this.currentContext,
1842
- emotion: this.currentEmotion,
1843
- category: this.determineCategory(this.currentContext, this.currentEmotion)
1844
- };
1845
- }
1846
-
1847
- // METHODS TO ANALYZE EMOTIONS FROM TEXT
1848
- // CLEANUP
1849
- destroy() {
1850
- clearTimeout(this.autoTransitionTimer);
1851
- this.autoTransitionTimer = null;
1852
- if (this._visibilityHandler) {
1853
- document.removeEventListener("visibilitychange", this._visibilityHandler);
1854
- this._visibilityHandler = null;
1855
- }
1856
- }
1857
-
1858
- // Utilitaire pour dΓ©terminer la catΓ©gorie vidΓ©o selon la moyenne des traits
1859
- setMoodByPersonality(traits) {
1860
- if (this._stickyContext === "dancing" || this.currentContext === "dancing") return;
1861
- const category = getMoodCategoryFromPersonality(traits);
1862
- // Normalize emotion so validation uses base emotion labels
1863
- let emotion = category;
1864
- if (category === "speakingPositive") emotion = "positive";
1865
- else if (category === "speakingNegative") emotion = "negative";
1866
- // For other categories (neutral, listening, dancing) emotion can equal category
1867
- this.switchToContext(category, emotion, null, traits, traits.affection);
1868
- }
1869
-
1870
- _cleanupLoadingHandlers() {
1871
- if (this._currentLoadHandler) {
1872
- this.inactiveVideo.removeEventListener("canplay", this._currentLoadHandler);
1873
- this.inactiveVideo.removeEventListener("loadeddata", this._currentLoadHandler);
1874
- this._currentLoadHandler = null;
1875
- }
1876
- if (this._currentErrorHandler) {
1877
- this.inactiveVideo.removeEventListener("error", this._currentErrorHandler);
1878
- this._currentErrorHandler = null;
1879
- }
1880
- if (this._loadTimeout) {
1881
- clearTimeout(this._loadTimeout);
1882
- this._loadTimeout = null;
1883
- }
1884
- this._loadingInProgress = false;
1885
- this._switchInProgress = false;
1886
- }
1887
- }
1888
 
1889
  function getMoodCategoryFromPersonality(traits) {
1890
  // Use unified emotion system
@@ -1911,6 +358,9 @@ function getMoodCategoryFromPersonality(traits) {
1911
  return "speakingNegative";
1912
  }
1913
 
 
 
 
1914
  // Centralized initialization manager
1915
  class KimiInitManager {
1916
  constructor() {
@@ -2474,9 +924,13 @@ window.KimiFallbackManager = {
2474
  };
2475
 
2476
  window.KimiBaseManager = KimiBaseManager;
2477
- window.KimiVideoManager = KimiVideoManager;
 
 
2478
  window.KimiSecurityUtils = KimiSecurityUtils;
2479
  window.KimiCacheManager = new KimiCacheManager(); // Create global instance
 
 
2480
  window.KimiInitManager = KimiInitManager;
2481
  window.KimiDOMUtils = KimiDOMUtils;
2482
  window.KimiOverlayManager = KimiOverlayManager;
 
329
  }
330
  }
331
 
332
+ // KimiVideoManager implementation moved to ./kimi-videos.js
333
+ // Ensure the video manager module is evaluated so it registers itself on window
334
+ import "./kimi-videos.js";
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
335
 
336
  function getMoodCategoryFromPersonality(traits) {
337
  // Use unified emotion system
 
358
  return "speakingNegative";
359
  }
360
 
361
+ // Expose personality β†’ mood helper for video manager
362
+ window.getMoodCategoryFromPersonality = getMoodCategoryFromPersonality;
363
+
364
  // Centralized initialization manager
365
  class KimiInitManager {
366
  constructor() {
 
924
  };
925
 
926
  window.KimiBaseManager = KimiBaseManager;
927
+ // KimiVideoManager is provided by the separate module `kimi-videos.js` which sets
928
+ // `window.KimiVideoManager` when executed. Do not reference the symbol here to
929
+ // avoid ReferenceError during module evaluation.
930
  window.KimiSecurityUtils = KimiSecurityUtils;
931
  window.KimiCacheManager = new KimiCacheManager(); // Create global instance
932
+ // Expose helper used by the video manager
933
+ window.getCharacterInfo = getCharacterInfo;
934
  window.KimiInitManager = KimiInitManager;
935
  window.KimiDOMUtils = KimiDOMUtils;
936
  window.KimiOverlayManager = KimiOverlayManager;
kimi-js/kimi-videos.js ADDED
@@ -0,0 +1,1562 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Utility class for centralized video management
2
+ class KimiVideoManager {
3
+ constructor(video1, video2, characterName = "kimi") {
4
+ this.characterName = characterName;
5
+ this.video1 = video1;
6
+ this.video2 = video2;
7
+ this.activeVideo = video1;
8
+ this.inactiveVideo = video2;
9
+ this.currentContext = "neutral";
10
+ this.currentEmotion = "neutral";
11
+ this.lastSwitchTime = Date.now();
12
+ this.pendingSwitch = null;
13
+ this.autoTransitionDuration = 9900;
14
+ this.transitionDuration = 300;
15
+ this._prefetchCache = new Map();
16
+ this._prefetchInFlight = new Set();
17
+ this._maxPrefetch = 3;
18
+ this._loadTimeout = null;
19
+ this.updateVideoCategories();
20
+ // Use centralized emotion mapping from emotion system
21
+ this.emotionToCategory = null; // Will be fetched from emotion system when needed
22
+ this.positiveVideos = this.videoCategories.speakingPositive;
23
+ this.negativeVideos = this.videoCategories.speakingNegative;
24
+ this.neutralVideos = this.videoCategories.neutral;
25
+
26
+ // Anti-repetition and scoring - Adaptive history based on available videos
27
+ this.playHistory = {
28
+ listening: [],
29
+ speakingPositive: [],
30
+ speakingNegative: [],
31
+ neutral: [],
32
+ dancing: []
33
+ };
34
+ this.maxHistoryPerCategory = 5; // Will be dynamically adjusted per category
35
+
36
+ this.emotionHistory = [];
37
+ this.maxEmotionHistory = 5;
38
+ this._neutralLock = false;
39
+ this.isEmotionVideoPlaying = false;
40
+ this.currentEmotionContext = null;
41
+ this._switchInProgress = false;
42
+ this._loadingInProgress = false;
43
+ this._currentLoadHandler = null;
44
+ this._currentErrorHandler = null;
45
+ this._stickyContext = null;
46
+ this._stickyUntil = 0;
47
+ this._pendingSwitches = [];
48
+ this._debug = false;
49
+ // Adaptive timeout refinements (A+B+C)
50
+ this._maxTimeout = 6000; // Reduced upper bound (was 10000) for 10s clips
51
+ this._timeoutExtension = 1200; // Extension when metadata only
52
+ this._timeoutCapRatio = 0.7; // Cap total wait <= 70% clip length
53
+ // Initialize adaptive loading metrics and failure tracking
54
+ this._avgLoadTime = null;
55
+ this._loadTimeSamples = [];
56
+ this._maxSamples = 10;
57
+ this._minTimeout = 3000;
58
+ this._recentFailures = new Map();
59
+ this._failureCooldown = 5000;
60
+ this._consecutiveErrorCount = 0;
61
+ }
62
+
63
+ //Centralized crossfade transition between two videos.
64
+ static crossfadeVideos(fromVideo, toVideo, duration = 300, onComplete) {
65
+ // Resolve duration from CSS variable if present
66
+ try {
67
+ const cssDur = getComputedStyle(document.documentElement).getPropertyValue("--video-fade-duration").trim();
68
+ if (cssDur) {
69
+ // Convert CSS time to ms number if needed (e.g., '300ms' or '0.3s')
70
+ if (cssDur.endsWith("ms")) duration = parseFloat(cssDur);
71
+ else if (cssDur.endsWith("s")) duration = Math.round(parseFloat(cssDur) * 1000);
72
+ }
73
+ } catch {}
74
+
75
+ // Preload and strict synchronization
76
+ const easing = "ease-in-out";
77
+ fromVideo.style.transition = `opacity ${duration}ms ${easing}`;
78
+ toVideo.style.transition = `opacity ${duration}ms ${easing}`;
79
+ // Prepare target video (opacity 0, top z-index)
80
+ toVideo.style.opacity = "0";
81
+ toVideo.style.zIndex = "2";
82
+ fromVideo.style.zIndex = "1";
83
+
84
+ // Start target video slightly before the crossfade
85
+ const startTarget = () => {
86
+ if (toVideo.paused) toVideo.play().catch(() => {});
87
+ // Lance le fondu croisΓ©
88
+ setTimeout(() => {
89
+ fromVideo.style.opacity = "0";
90
+ toVideo.style.opacity = "1";
91
+ }, 20);
92
+ // After transition, adjust z-index and call the callback
93
+ setTimeout(() => {
94
+ fromVideo.style.zIndex = "1";
95
+ toVideo.style.zIndex = "2";
96
+ if (onComplete) onComplete();
97
+ }, duration + 30);
98
+ };
99
+
100
+ // If target video is not ready, wait for canplay
101
+ if (toVideo.readyState < 3) {
102
+ toVideo.addEventListener("canplay", startTarget, { once: true });
103
+ toVideo.load();
104
+ } else {
105
+ startTarget();
106
+ }
107
+ // Ensure source video is playing
108
+ if (fromVideo.paused) fromVideo.play().catch(() => {});
109
+ }
110
+
111
+ //Centralized video element creation utility.
112
+ static createVideoElement(id, className = "bg-video") {
113
+ const video = document.createElement("video");
114
+ video.id = id;
115
+ video.className = className;
116
+ video.autoplay = true;
117
+ video.muted = true;
118
+ video.playsinline = true;
119
+ video.preload = "auto";
120
+ video.style.opacity = "0";
121
+ video.innerHTML =
122
+ '<source src="" type="video/mp4" /><span data-i18n="video_not_supported">Your browser does not support the video tag.</span>';
123
+ return video;
124
+ }
125
+
126
+ //Centralized video selection utility.
127
+ static getVideoElement(selector) {
128
+ if (typeof selector === "string") {
129
+ if (selector.startsWith("#")) {
130
+ return document.getElementById(selector.slice(1));
131
+ }
132
+ return document.querySelector(selector);
133
+ }
134
+ return selector;
135
+ }
136
+
137
+ setDebug(enabled) {
138
+ this._debug = !!enabled;
139
+ }
140
+
141
+ _logDebug(message, payload = null) {
142
+ if (!this._debug) return;
143
+ if (payload) console.log("🎬 VideoManager:", message, payload);
144
+ else console.log("🎬 VideoManager:", message);
145
+ }
146
+
147
+ _logSelection(category, selectedSrc, candidates = []) {
148
+ if (!this._debug) return;
149
+ const recent = (this.playHistory && this.playHistory[category]) || [];
150
+ const adaptive = typeof this.getAdaptiveHistorySize === "function" ? this.getAdaptiveHistorySize(category) : null;
151
+ console.log("🎬 VideoManager: selection", {
152
+ category,
153
+ selected: selectedSrc,
154
+ candidatesCount: Array.isArray(candidates) ? candidates.length : 0,
155
+ adaptiveHistorySize: adaptive,
156
+ recentHistory: recent
157
+ });
158
+ }
159
+
160
+ debugPrintHistory(category = null) {
161
+ if (!this._debug) return;
162
+ if (!this.playHistory) {
163
+ console.log("🎬 VideoManager: no play history yet");
164
+ return;
165
+ }
166
+ if (category) {
167
+ const recent = this.playHistory[category] || [];
168
+ console.log("🎬 VideoManager: history", { category, recent });
169
+ return;
170
+ }
171
+ const summary = Object.keys(this.playHistory).reduce((acc, key) => {
172
+ acc[key] = this.playHistory[key];
173
+ return acc;
174
+ }, {});
175
+ console.log("🎬 VideoManager: history summary", summary);
176
+ }
177
+
178
+ _priorityWeight(context) {
179
+ if (context === "speaking" || context === "speakingPositive" || context === "speakingNegative") return 3;
180
+ if (context === "dancing" || context === "listening") return 2;
181
+ return 1;
182
+ }
183
+
184
+ _enqueuePendingSwitch(req) {
185
+ // Keep small bounded list; prefer newest higher-priority
186
+ const maxSize = 5;
187
+ this._pendingSwitches.push(req);
188
+ if (this._pendingSwitches.length > maxSize) {
189
+ this._pendingSwitches = this._pendingSwitches.slice(-maxSize);
190
+ }
191
+ }
192
+
193
+ _takeNextPendingSwitch() {
194
+ if (!this._pendingSwitches.length) return null;
195
+ let bestIdx = 0;
196
+ let best = this._pendingSwitches[0];
197
+ for (let i = 1; i < this._pendingSwitches.length; i++) {
198
+ const cand = this._pendingSwitches[i];
199
+ if (cand.priorityWeight > best.priorityWeight) {
200
+ best = cand;
201
+ bestIdx = i;
202
+ } else if (cand.priorityWeight === best.priorityWeight && cand.requestedAt > best.requestedAt) {
203
+ best = cand;
204
+ bestIdx = i;
205
+ }
206
+ }
207
+ this._pendingSwitches.splice(bestIdx, 1);
208
+ return best;
209
+ }
210
+
211
+ _processPendingSwitches() {
212
+ if (this._stickyContext === "dancing") return false;
213
+ const next = this._takeNextPendingSwitch();
214
+ if (!next) return false;
215
+ this._logDebug("Processing pending switch", next);
216
+ this.switchToContext(next.context, next.emotion, next.specificVideo, next.traits, next.affection);
217
+ return true;
218
+ }
219
+
220
+ setCharacter(characterName) {
221
+ this.characterName = characterName;
222
+
223
+ // Nettoyer les handlers en cours lors du changement de personnage
224
+ this._cleanupLoadingHandlers();
225
+ // Reset per-character fallback pool so it will be rebuilt for the new character
226
+ this._fallbackPool = null;
227
+ this._fallbackIndex = 0;
228
+ this._fallbackPoolCharacter = null;
229
+
230
+ this.updateVideoCategories();
231
+ }
232
+
233
+ updateVideoCategories() {
234
+ const folder = getCharacterInfo(this.characterName).videoFolder;
235
+ this.videoCategories = {
236
+ listening: [
237
+ `${folder}listening/listening-gentle-sway.mp4`,
238
+ `${folder}listening/listening-magnetic-eye-gaze.mp4`,
239
+ `${folder}listening/listening-silky-caressing-hairplay.mp4`,
240
+ `${folder}listening/listening-softly-velvet-glance.mp4`,
241
+ `${folder}listening/listening-surprise-sweet-shiver.mp4`,
242
+ `${folder}listening/listening-whispered-attention.mp4`,
243
+ `${folder}listening/listening-hand-gesture.mp4`,
244
+ `${folder}listening/listening-hair-touch.mp4`,
245
+ `${folder}listening/listening-full-spin.mp4`,
246
+ `${folder}listening/listening-teasing-smile.mp4`,
247
+ `${folder}listening/listening-dreamy-gaze-romantic.mp4`
248
+ ],
249
+ speakingPositive: [
250
+ `${folder}speaking-positive/speaking-happy-gestures.mp4`,
251
+ `${folder}speaking-positive/speaking-positive-heartfelt-shine.mp4`,
252
+ `${folder}speaking-positive/speaking-positive-joyful-flutter.mp4`,
253
+ `${folder}speaking-positive/speaking-positive-mischief-touch.mp4`,
254
+ `${folder}speaking-positive/speaking-positive-sparkling-tease.mp4`,
255
+ `${folder}speaking-positive/speaking-playful-wink.mp4`,
256
+ `${folder}speaking-positive/speaking-excited-clapping.mp4`,
257
+ `${folder}speaking-positive/speaking-heart-gesture.mp4`,
258
+ `${folder}speaking-positive/speaking-surprise-graceful-gasp.mp4`,
259
+ `${folder}speaking-positive/speaking-laughing-melodious.mp4`,
260
+ `${folder}speaking-positive/speaking-gentle-smile.mp4`,
261
+ `${folder}speaking-positive/speaking-graceful-arms.mp4`,
262
+ `${folder}speaking-positive/speaking-flirtatious-tease.mp4`
263
+ ],
264
+ speakingNegative: [
265
+ `${folder}speaking-negative/speaking-negative-anxious-caress.mp4`,
266
+ `${folder}speaking-negative/speaking-negative-frosted-glance.mp4`,
267
+ `${folder}speaking-negative/speaking-negative-muted-longing.mp4`,
268
+ `${folder}speaking-negative/speaking-negative-shadowed-sigh.mp4`,
269
+ `${folder}speaking-negative/speaking-sad-elegant.mp4`,
270
+ `${folder}speaking-negative/speaking-frustrated-graceful.mp4`,
271
+ `${folder}speaking-negative/speaking-worried-tender.mp4`,
272
+ `${folder}speaking-negative/speaking-disappointed-elegant.mp4`,
273
+ `${folder}speaking-negative/speaking-gentle-wave-goodbye.mp4`
274
+ ],
275
+ neutral: [
276
+ `${folder}neutral/neutral-thinking-pose.mp4`,
277
+ `${folder}neutral/neutral-shy-blush-adorable.mp4`,
278
+ `${folder}neutral/neutral-confident-chic-flair.mp4`,
279
+ `${folder}neutral/neutral-dreamy-soft-reverie.mp4`,
280
+ `${folder}neutral/neutral-flirt-wink-whisper.mp4`,
281
+ `${folder}neutral/neutral-goodbye-tender-wave.mp4`,
282
+ `${folder}neutral/neutral-hair-twirl.mp4`,
283
+ `${folder}neutral/neutral-kiss-air-caress.mp4`,
284
+ `${folder}neutral/neutral-poised-shift.mp4`,
285
+ `${folder}neutral/neutral-shy-blush-glow.mp4`,
286
+ `${folder}neutral/neutral-speaking-dreamy-flow.mp4`,
287
+ `${folder}neutral/neutral-gentle-breathing.mp4`,
288
+ `${folder}neutral/neutral-hair-adjustment.mp4`,
289
+ `${folder}neutral/neutral-arms-crossed-elegant.mp4`,
290
+ `${folder}neutral/neutral-seductive-slow-gaze.mp4`,
291
+ `${folder}neutral/neutral-confident-pose-alluring.mp4`,
292
+ `${folder}neutral/neutral-affectionate-kiss-blow.mp4`
293
+ ],
294
+ dancing: [
295
+ `${folder}dancing/dancing-chin-hand.mp4`,
296
+ `${folder}dancing/dancing-bow-promise.mp4`,
297
+ `${folder}dancing/dancing-enchanting-flow.mp4`,
298
+ `${folder}dancing/dancing-magnetic-spin.mp4`,
299
+ `${folder}dancing/dancing-playful-glimmer.mp4`,
300
+ `${folder}dancing/dancing-silken-undulation.mp4`,
301
+ `${folder}dancing/dancing-full-spin.mp4`,
302
+ `${folder}dancing/dancing-seductive-dance-undulation.mp4`,
303
+ `${folder}dancing/dancing-slow-seductive.mp4`,
304
+ `${folder}dancing/dancing-spinning-elegance-twirl.mp4`
305
+ ]
306
+ };
307
+ this.positiveVideos = this.videoCategories.speakingPositive;
308
+ this.negativeVideos = this.videoCategories.speakingNegative;
309
+ this.neutralVideos = this.videoCategories.neutral;
310
+
311
+ const neutrals = this.neutralVideos || [];
312
+ // Progressive warm-up phase: start with only 2 neutrals (adaptive on network), others scheduled later
313
+ let neutralPrefetchCount = 2;
314
+ try {
315
+ const conn = navigator.connection || navigator.webkitConnection || navigator.mozConnection;
316
+ if (conn && conn.effectiveType) {
317
+ // Reduce on slower connections
318
+ if (/2g/i.test(conn.effectiveType)) neutralPrefetchCount = 1;
319
+ else if (/3g/i.test(conn.effectiveType)) neutralPrefetchCount = 2;
320
+ }
321
+ } catch {}
322
+ neutrals.slice(0, neutralPrefetchCount).forEach(src => this._prefetch(src));
323
+
324
+ // Schedule warm-up step 2: after 5s prefetch the 3rd neutral if not already cached
325
+ if (!this._warmupTimer) {
326
+ this._warmupTimer = setTimeout(() => {
327
+ try {
328
+ const target = neutrals[2];
329
+ if (target && !this._prefetchCache.has(target)) this._prefetch(target);
330
+ } catch {}
331
+ }, 5000);
332
+ }
333
+
334
+ // Mark waiting for first interaction to fetch 4th neutral later
335
+ this._awaitingFirstInteraction = true;
336
+ }
337
+
338
+ async init(database = null) {
339
+ // Attach lightweight visibility guard
340
+ if (!this._visibilityHandler) {
341
+ this._visibilityHandler = this.onVisibilityChange.bind(this);
342
+ document.addEventListener("visibilitychange", this._visibilityHandler);
343
+ }
344
+ // Hook basic user interaction (first click / keypress) to advance warm-up
345
+ if (!this._firstInteractionHandler) {
346
+ this._firstInteractionHandler = () => {
347
+ if (this._awaitingFirstInteraction) {
348
+ this._awaitingFirstInteraction = false;
349
+ try {
350
+ const neutrals = this.neutralVideos || [];
351
+ const fourth = neutrals[3];
352
+ if (fourth && !this._prefetchCache.has(fourth)) this._prefetch(fourth);
353
+ } catch {}
354
+ }
355
+ };
356
+ window.addEventListener("click", this._firstInteractionHandler, { once: true });
357
+ window.addEventListener("keydown", this._firstInteractionHandler, { once: true });
358
+ }
359
+ }
360
+
361
+ onVisibilityChange() {
362
+ if (document.visibilityState !== "visible") return;
363
+ const v = this.activeVideo;
364
+ if (!v) return;
365
+ try {
366
+ if (v.ended) {
367
+ if (typeof this.returnToNeutral === "function") this.returnToNeutral();
368
+ } else if (v.paused) {
369
+ v.play().catch(() => {
370
+ if (typeof this.returnToNeutral === "function") this.returnToNeutral();
371
+ });
372
+ }
373
+ } catch {}
374
+ }
375
+
376
+ // Intelligent contextual management
377
+ switchToContext(context, emotion = "neutral", specificVideo = null, traits = null, affection = null) {
378
+ // Respect sticky context (avoid overrides while dancing is requested/playing)
379
+ if (this._stickyContext === "dancing" && context !== "dancing") {
380
+ const categoryForPriority = this.determineCategory(context, emotion, traits);
381
+ const priorityWeight = this._priorityWeight(
382
+ categoryForPriority === "speakingPositive" || categoryForPriority === "speakingNegative" ? "speaking" : context
383
+ );
384
+ if (Date.now() < (this._stickyUntil || 0)) {
385
+ this._enqueuePendingSwitch({
386
+ context,
387
+ emotion,
388
+ specificVideo,
389
+ traits,
390
+ affection,
391
+ requestedAt: Date.now(),
392
+ priorityWeight
393
+ });
394
+ this._logDebug("Queued during dancing (sticky)", { context, emotion, priorityWeight });
395
+ return;
396
+ }
397
+ this._stickyContext = null;
398
+ this._stickyUntil = 0;
399
+ // Do not reset adaptive loading metrics here; preserve rolling stats across sticky context release
400
+ }
401
+ // While an emotion video is playing (speaking), block non-speaking context switches
402
+ if (
403
+ this.isEmotionVideoPlaying &&
404
+ (this.currentContext === "speaking" ||
405
+ this.currentContext === "speakingPositive" ||
406
+ this.currentContext === "speakingNegative") &&
407
+ !(context === "speaking" || context === "speakingPositive" || context === "speakingNegative")
408
+ ) {
409
+ // Queue the request with appropriate priority to be processed after current clip
410
+ const categoryForPriority = this.determineCategory(context, emotion, traits);
411
+ const priorityWeight = this._priorityWeight(
412
+ categoryForPriority === "speakingPositive" || categoryForPriority === "speakingNegative" ? "speaking" : context
413
+ );
414
+ this._enqueuePendingSwitch({
415
+ context,
416
+ emotion,
417
+ specificVideo,
418
+ traits,
419
+ affection,
420
+ requestedAt: Date.now(),
421
+ priorityWeight
422
+ });
423
+ this._logDebug("Queued non-speaking during speaking emotion", { context, emotion, priorityWeight });
424
+ return;
425
+ }
426
+
427
+ // While speaking emotion video is playing, also queue speaking→speaking changes (avoid mid-clip replacement)
428
+ if (
429
+ this.isEmotionVideoPlaying &&
430
+ (this.currentContext === "speaking" ||
431
+ this.currentContext === "speakingPositive" ||
432
+ this.currentContext === "speakingNegative") &&
433
+ (context === "speaking" || context === "speakingPositive" || context === "speakingNegative") &&
434
+ this.currentEmotionContext &&
435
+ this.currentEmotionContext !== emotion
436
+ ) {
437
+ const priorityWeight = this._priorityWeight("speaking");
438
+ this._enqueuePendingSwitch({
439
+ context,
440
+ emotion,
441
+ specificVideo,
442
+ traits,
443
+ affection,
444
+ requestedAt: Date.now(),
445
+ priorityWeight
446
+ });
447
+ this._logDebug("Queued speaking→speaking during active emotion", { from: this.currentEmotionContext, to: emotion });
448
+ return;
449
+ }
450
+ if (context === "neutral" && this._neutralLock) return;
451
+ if (
452
+ (context === "speaking" || context === "speakingPositive" || context === "speakingNegative") &&
453
+ this.isEmotionVideoPlaying &&
454
+ this.currentEmotionContext === emotion
455
+ )
456
+ return;
457
+
458
+ if (this.currentContext === context && this.currentEmotion === emotion && !specificVideo) {
459
+ const category = this.determineCategory(context, emotion, traits);
460
+ const currentVideoSrc = this.activeVideo.querySelector("source").getAttribute("src");
461
+ const availableVideos = this.videoCategories[category] || this.videoCategories.neutral;
462
+ const differentVideos = availableVideos.filter(v => v !== currentVideoSrc);
463
+
464
+ if (differentVideos.length > 0) {
465
+ const nextVideo =
466
+ typeof this._pickScoredVideo === "function"
467
+ ? this._pickScoredVideo(category, differentVideos, traits)
468
+ : differentVideos[Math.floor(Math.random() * differentVideos.length)];
469
+ this.loadAndSwitchVideo(nextVideo, "normal");
470
+ // Track play history to avoid immediate repeats
471
+ if (typeof this.updatePlayHistory === "function") this.updatePlayHistory(category, nextVideo);
472
+ this._logSelection(category, nextVideo, differentVideos);
473
+ this.lastSwitchTime = Date.now();
474
+ }
475
+ return;
476
+ }
477
+
478
+ // Determine the category FIRST to ensure correct video selection
479
+ const category = this.determineCategory(context, emotion, traits);
480
+
481
+ // DΓ©terminer la prioritΓ© selon le contexte
482
+ let priority = "normal";
483
+ if (context === "speaking" || context === "speakingPositive" || context === "speakingNegative") {
484
+ priority = "speaking";
485
+ } else if (context === "dancing" || context === "listening") {
486
+ priority = "high";
487
+ }
488
+
489
+ // Set sticky lock for dancing to avoid being interrupted by emotion/neutral updates
490
+ if (context === "dancing") {
491
+ this._stickyContext = "dancing";
492
+ // Lock roughly for one clip duration; will also be cleared on end/neutral
493
+ this._stickyUntil = Date.now() + 9500;
494
+ }
495
+
496
+ // Chemin optimisΓ© lorsque TTS parle/Γ©coute (Γ©vite clignotements)
497
+ if (
498
+ window.voiceManager &&
499
+ window.voiceManager.isSpeaking &&
500
+ (context === "speaking" || context === "speakingPositive" || context === "speakingNegative")
501
+ ) {
502
+ const speakingPath = this.selectOptimalVideo(category, specificVideo, traits, affection, emotion);
503
+ const speakingCurrent = this.activeVideo.querySelector("source").getAttribute("src");
504
+ if (speakingCurrent !== speakingPath || this.activeVideo.ended) {
505
+ this.loadAndSwitchVideo(speakingPath, priority);
506
+ }
507
+ // IMPORTANT: normalize to the resolved category (e.g., speakingPositive/Negative)
508
+ this.currentContext = category;
509
+ this.currentEmotion = emotion;
510
+ this.lastSwitchTime = Date.now();
511
+ return;
512
+ }
513
+ if (window.voiceManager && window.voiceManager.isListening && context === "listening") {
514
+ const listeningPath = this.selectOptimalVideo(category, specificVideo, traits, affection, emotion);
515
+ const listeningCurrent = this.activeVideo.querySelector("source").getAttribute("src");
516
+ if (listeningCurrent !== listeningPath || this.activeVideo.ended) {
517
+ this.loadAndSwitchVideo(listeningPath, priority);
518
+ }
519
+ // Normalize to category for consistency
520
+ this.currentContext = category;
521
+ this.currentEmotion = emotion;
522
+ this.lastSwitchTime = Date.now();
523
+ return;
524
+ }
525
+
526
+ // SΓ©lection standard
527
+ let videoPath = this.selectOptimalVideo(category, specificVideo, traits, affection, emotion);
528
+ const currentVideoSrc = this.activeVideo.querySelector("source").getAttribute("src");
529
+
530
+ // Anti-rΓ©pΓ©tition si plusieurs vidΓ©os disponibles
531
+ if (videoPath === currentVideoSrc && (this.videoCategories[category] || []).length > 1) {
532
+ const alternatives = this.videoCategories[category].filter(v => v !== currentVideoSrc);
533
+ if (alternatives.length > 0) {
534
+ videoPath =
535
+ typeof this._pickScoredVideo === "function"
536
+ ? this._pickScoredVideo(category, alternatives, traits)
537
+ : alternatives[Math.floor(Math.random() * alternatives.length)];
538
+ }
539
+ }
540
+
541
+ // Adaptive transition timing based on context and priority
542
+ let minTransitionDelay = 300;
543
+
544
+ const now = Date.now();
545
+ const timeSinceLastSwitch = now - (this.lastSwitchTime || 0);
546
+
547
+ // Context-specific timing adjustments
548
+ if (priority === "speaking") {
549
+ minTransitionDelay = 200;
550
+ } else if (context === "listening") {
551
+ minTransitionDelay = 250;
552
+ } else if (context === "dancing") {
553
+ minTransitionDelay = 600;
554
+ } else if (context === "neutral") {
555
+ minTransitionDelay = 1200;
556
+ }
557
+
558
+ // Prevent rapid switching only if not critical
559
+ if (
560
+ this.currentContext === context &&
561
+ this.currentEmotion === emotion &&
562
+ currentVideoSrc === videoPath &&
563
+ !this.activeVideo.paused &&
564
+ !this.activeVideo.ended &&
565
+ timeSinceLastSwitch < minTransitionDelay &&
566
+ priority !== "speaking" // Always allow speech to interrupt
567
+ ) {
568
+ return;
569
+ }
570
+
571
+ this._prefetchLikely(category);
572
+
573
+ this.loadAndSwitchVideo(videoPath, priority);
574
+ // Always store normalized category as currentContext so event bindings match speakingPositive/Negative
575
+ this.currentContext = category;
576
+ this.currentEmotion = emotion;
577
+ this.lastSwitchTime = now;
578
+ }
579
+
580
+ setupEventListenersForContext(context) {
581
+ // Clean previous
582
+ if (this._globalEndedHandler) {
583
+ this.activeVideo.removeEventListener("ended", this._globalEndedHandler);
584
+ this.inactiveVideo.removeEventListener("ended", this._globalEndedHandler);
585
+ }
586
+
587
+ // Defensive: ensure helpers exist
588
+ if (!this.playHistory) this.playHistory = {};
589
+ if (!this.maxHistoryPerCategory) this.maxHistoryPerCategory = 8;
590
+
591
+ // For dancing: auto-return to neutral after video ends to avoid freeze
592
+ if (context === "dancing") {
593
+ this._globalEndedHandler = () => {
594
+ this._stickyContext = null;
595
+ this._stickyUntil = 0;
596
+ if (!this._processPendingSwitches()) {
597
+ this.returnToNeutral();
598
+ }
599
+ };
600
+ this.activeVideo.addEventListener("ended", this._globalEndedHandler, { once: true });
601
+ // Safety timer
602
+ if (typeof this.scheduleAutoTransition === "function") {
603
+ this.scheduleAutoTransition(this.autoTransitionDuration || 10000);
604
+ }
605
+ return;
606
+ }
607
+
608
+ if (context === "speakingPositive" || context === "speakingNegative") {
609
+ this._globalEndedHandler = () => {
610
+ // If TTS is still speaking, keep the speaking flow by chaining another speaking clip
611
+ if (window.voiceManager && window.voiceManager.isSpeaking) {
612
+ const emotion = this.currentEmotion || this.currentEmotionContext || "positive";
613
+ // Preserve speaking context while chaining
614
+ const category = emotion === "negative" ? "speakingNegative" : "speakingPositive";
615
+ const next = this.selectOptimalVideo(category, null, null, null, emotion);
616
+ if (next) {
617
+ this.loadAndSwitchVideo(next, "speaking");
618
+ this.currentContext = category;
619
+ this.currentEmotion = emotion;
620
+ this.isEmotionVideoPlaying = true;
621
+ this.currentEmotionContext = emotion;
622
+ this.lastSwitchTime = Date.now();
623
+ return;
624
+ }
625
+ }
626
+ // Otherwise, allow pending high-priority switch or return to neutral
627
+ this.isEmotionVideoPlaying = false;
628
+ this.currentEmotionContext = null;
629
+ this._neutralLock = false;
630
+ if (!this._processPendingSwitches()) {
631
+ this.returnToNeutral();
632
+ }
633
+ };
634
+ this.activeVideo.addEventListener("ended", this._globalEndedHandler, { once: true });
635
+ return;
636
+ }
637
+
638
+ if (context === "listening") {
639
+ this._globalEndedHandler = () => {
640
+ this.switchToContext("listening", "listening");
641
+ };
642
+ this.activeVideo.addEventListener("ended", this._globalEndedHandler, { once: true });
643
+ return;
644
+ }
645
+
646
+ // Neutral: on end, pick another neutral to avoid static last frame
647
+ if (context === "neutral") {
648
+ this._globalEndedHandler = () => this.returnToNeutral();
649
+ this.activeVideo.addEventListener("ended", this._globalEndedHandler, { once: true });
650
+ }
651
+ }
652
+
653
+ // keep only the augmented determineCategory above (with traits)
654
+ selectOptimalVideo(category, specificVideo = null, traits = null, affection = null, emotion = null) {
655
+ const availableVideos = this.videoCategories[category] || this.videoCategories.neutral;
656
+
657
+ if (specificVideo && availableVideos.includes(specificVideo)) {
658
+ if (typeof this.updatePlayHistory === "function") this.updatePlayHistory(category, specificVideo);
659
+ this._logSelection(category, specificVideo, availableVideos);
660
+ return specificVideo;
661
+ }
662
+
663
+ const currentVideoSrc = this.activeVideo.querySelector("source").getAttribute("src");
664
+
665
+ // Filter out recently played videos using adaptive history
666
+ const recentlyPlayed = this.playHistory[category] || [];
667
+ let candidateVideos = availableVideos.filter(video => video !== currentVideoSrc && !recentlyPlayed.includes(video));
668
+
669
+ // If no fresh videos, allow recently played but not current
670
+ if (candidateVideos.length === 0) {
671
+ candidateVideos = availableVideos.filter(video => video !== currentVideoSrc);
672
+ }
673
+
674
+ // Ultimate fallback
675
+ if (candidateVideos.length === 0) {
676
+ candidateVideos = availableVideos;
677
+ }
678
+
679
+ // Ensure we're not falling back to wrong category
680
+ if (candidateVideos.length === 0) {
681
+ candidateVideos = this.videoCategories.neutral;
682
+ }
683
+
684
+ // If traits and affection are provided, weight the selection more subtly
685
+ if (traits && typeof affection === "number") {
686
+ let weights = candidateVideos.map(video => {
687
+ if (category === "speakingPositive") {
688
+ // Positive videos favored by affection, romance, and humor
689
+ const base = 1 + (affection / 100) * 0.4; // Affection influence factor
690
+ let bonus = 0;
691
+ const rom = typeof traits.romance === "number" ? traits.romance : 50;
692
+ const hum = typeof traits.humor === "number" ? traits.humor : 50;
693
+ if (emotion === "romantic") bonus += (rom / 100) * 0.3; // Romance context bonus
694
+ if (emotion === "laughing") bonus += (hum / 100) * 0.3; // Humor context bonus
695
+ return base + bonus;
696
+ }
697
+ if (category === "speakingNegative") {
698
+ // Negative videos when affection is low (reduced weight to balance)
699
+ return 1 + ((100 - affection) / 100) * 0.3; // Low-affection influence factor
700
+ }
701
+ if (category === "neutral") {
702
+ // Neutral videos when affection is moderate, also influenced by intelligence
703
+ const distance = Math.abs(50 - affection) / 50; // 0 at 50, 1 at 0 or 100
704
+ const intBonus = ((traits.intelligence || 50) / 100) * 0.1; // Intelligence adds to neutral thoughtfulness
705
+ return 1 + (1 - Math.min(1, distance)) * 0.2 + intBonus;
706
+ }
707
+ if (category === "dancing") {
708
+ // Dancing strongly influenced by playfulness, romance also adds excitement
709
+ const playBonus = Math.min(0.6, (traits.playfulness / 100) * 0.7);
710
+ const romanceBonus = ((traits.romance || 50) / 100) * 0.2; // Romance adds to dance appeal
711
+ return 1 + playBonus + romanceBonus;
712
+ }
713
+ if (category === "listening") {
714
+ // Listening influenced by empathy, intelligence, and affection
715
+ const empathyWeight = (traits.empathy || 50) / 100;
716
+ const intWeight = ((traits.intelligence || 50) / 100) * 0.1; // Intelligence improves listening quality
717
+ return 1 + empathyWeight * 0.3 + (affection / 100) * 0.1 + intWeight;
718
+ }
719
+ return 1;
720
+ });
721
+
722
+ const total = weights.reduce((a, b) => a + b, 0);
723
+ let r = Math.random() * total;
724
+ for (let i = 0; i < candidateVideos.length; i++) {
725
+ if (r < weights[i]) {
726
+ const chosen = candidateVideos[i];
727
+ if (typeof this.updatePlayHistory === "function") this.updatePlayHistory(category, chosen);
728
+ this._logSelection(category, chosen, candidateVideos);
729
+ return chosen;
730
+ }
731
+ r -= weights[i];
732
+ }
733
+ const selectedVideo = candidateVideos[0];
734
+ if (typeof this.updatePlayHistory === "function") this.updatePlayHistory(category, selectedVideo);
735
+ this._logSelection(category, selectedVideo, candidateVideos);
736
+ return selectedVideo;
737
+ }
738
+
739
+ // No traits weighting: random pick
740
+ if (candidateVideos.length === 0) {
741
+ return availableVideos && availableVideos[0] ? availableVideos[0] : null;
742
+ }
743
+ const selectedVideo = candidateVideos[Math.floor(Math.random() * candidateVideos.length)];
744
+ if (typeof this.updatePlayHistory === "function") this.updatePlayHistory(category, selectedVideo);
745
+ this._logSelection(category, selectedVideo, candidateVideos);
746
+ return selectedVideo;
747
+ }
748
+
749
+ // Get adaptive history size based on available videos
750
+ getAdaptiveHistorySize(category) {
751
+ const availableVideos = this.videoCategories[category] || [];
752
+ const videoCount = availableVideos.length;
753
+
754
+ // Adaptive history: keep 40-60% of available videos in history
755
+ // Minimum 2, maximum 8 to prevent extreme cases
756
+ if (videoCount <= 3) return Math.max(1, videoCount - 1);
757
+ if (videoCount <= 6) return Math.max(2, Math.floor(videoCount * 0.5));
758
+ return Math.min(8, Math.floor(videoCount * 0.6));
759
+ }
760
+
761
+ // Update history with adaptive sizing
762
+ updatePlayHistory(category, videoPath) {
763
+ if (!this.playHistory[category]) {
764
+ this.playHistory[category] = [];
765
+ }
766
+
767
+ const adaptiveSize = this.getAdaptiveHistorySize(category);
768
+ this.playHistory[category].push(videoPath);
769
+
770
+ // Trim to adaptive size
771
+ if (this.playHistory[category].length > adaptiveSize) {
772
+ this.playHistory[category] = this.playHistory[category].slice(-adaptiveSize);
773
+ }
774
+ }
775
+
776
+ // Ensure determineCategory exists as a class method (used at line ~494 and ~537)
777
+ determineCategory(context, emotion = "neutral", traits = null) {
778
+ // Get emotion mapping from centralized emotion system
779
+ const emotionToCategory = window.kimiEmotionSystem?.emotionToVideoCategory || {
780
+ listening: "listening",
781
+ positive: "speakingPositive",
782
+ negative: "speakingNegative",
783
+ neutral: "neutral",
784
+ surprise: "speakingPositive",
785
+ laughing: "speakingPositive",
786
+ shy: "neutral",
787
+ confident: "speakingPositive",
788
+ romantic: "speakingPositive",
789
+ flirtatious: "speakingPositive",
790
+ goodbye: "neutral",
791
+ kiss: "speakingPositive",
792
+ dancing: "dancing",
793
+ speaking: "speakingPositive",
794
+ speakingPositive: "speakingPositive",
795
+ speakingNegative: "speakingNegative"
796
+ };
797
+
798
+ // Prefer explicit context mapping if provided (e.g., 'listening','dancing')
799
+ if (emotionToCategory[context]) {
800
+ return emotionToCategory[context];
801
+ }
802
+ // Normalize generic 'speaking' by emotion polarity
803
+ if (context === "speaking") {
804
+ if (emotion === "positive") return "speakingPositive";
805
+ if (emotion === "negative") return "speakingNegative";
806
+ return "neutral";
807
+ }
808
+ // Map by emotion label when possible
809
+ if (emotionToCategory[emotion]) {
810
+ return emotionToCategory[emotion];
811
+ }
812
+ return "neutral";
813
+ }
814
+
815
+ // SPECIALIZED METHODS FOR EACH CONTEXT
816
+ async startListening(traits = null, affection = null) {
817
+ // If already listening and playing, avoid redundant switch
818
+ if (this.currentContext === "listening" && !this.activeVideo.paused && !this.activeVideo.ended) {
819
+ return;
820
+ }
821
+ // Immediate switch to keep UI responsive
822
+ this.switchToContext("listening");
823
+
824
+ // Add a short grace window to prevent immediate switch to speaking before TTS starts
825
+ clearTimeout(this._listeningGraceTimer);
826
+ this._listeningGraceTimer = setTimeout(() => {
827
+ // No-op; used as a time marker to let LLM prepare the answer
828
+ }, 1500);
829
+
830
+ // If caller did not provide traits, try to fetch and refine selection
831
+ try {
832
+ if (!traits && window.kimiDB && typeof window.kimiDB.getAllPersonalityTraits === "function") {
833
+ const selectedCharacter = await window.kimiDB.getSelectedCharacter();
834
+ const allTraits = await window.kimiDB.getAllPersonalityTraits(selectedCharacter);
835
+ if (allTraits && typeof allTraits === "object") {
836
+ const aff = typeof allTraits.affection === "number" ? allTraits.affection : undefined;
837
+ // Re-issue context switch with weighting parameters to better pick listening videos
838
+ this.switchToContext("listening", "listening", null, allTraits, aff);
839
+ }
840
+ } else if (traits) {
841
+ this.switchToContext("listening", "listening", null, traits, affection);
842
+ }
843
+ } catch (e) {
844
+ // Non-fatal: keep basic listening behavior
845
+ console.warn("Listening refinement skipped due to error:", e);
846
+ }
847
+ }
848
+
849
+ respondWithEmotion(emotion, traits = null, affection = null) {
850
+ // Ignore neutral emotion to avoid unintended overrides (use returnToNeutral when appropriate)
851
+ if (emotion === "neutral") {
852
+ if (this._stickyContext === "dancing" || this.currentContext === "dancing") return;
853
+ this.returnToNeutral();
854
+ return;
855
+ }
856
+ // Do not override dancing while sticky
857
+ if (this._stickyContext === "dancing" || this.currentContext === "dancing") return;
858
+ // If we are already playing the same emotion video, do nothing
859
+ if (this.isEmotionVideoPlaying && this.currentEmotionContext === emotion) return;
860
+ // If we just entered listening and TTS isn’t started yet, wait a bit to avoid desync
861
+ const now = Date.now();
862
+ const stillInGrace = this._listeningGraceTimer != null;
863
+ const ttsNotStarted = !(window.voiceManager && window.voiceManager.isSpeaking);
864
+ if (this.currentContext === "listening" && stillInGrace && ttsNotStarted) {
865
+ clearTimeout(this._pendingSpeakSwitch);
866
+ this._pendingSpeakSwitch = setTimeout(() => {
867
+ // Re-check speaking state; only switch when we have an actual emotion to play alongside TTS
868
+ if (window.voiceManager && window.voiceManager.isSpeaking) {
869
+ this.switchToContext("speaking", emotion, null, traits, affection);
870
+ this.isEmotionVideoPlaying = true;
871
+ this.currentEmotionContext = emotion;
872
+ }
873
+ }, 900);
874
+ return;
875
+ }
876
+
877
+ // First switch context (so internal guards don't see the new flags yet)
878
+ this.switchToContext("speaking", emotion, null, traits, affection);
879
+ // Then mark the emotion video as playing for override protection
880
+ this.isEmotionVideoPlaying = true;
881
+ this.currentEmotionContext = emotion;
882
+ }
883
+
884
+ returnToNeutral() {
885
+ // Always ensure we resume playback with a fresh neutral video to avoid freeze
886
+ if (this._neutralLock) return;
887
+ this._neutralLock = true;
888
+ setTimeout(() => {
889
+ this._neutralLock = false;
890
+ }, 1000);
891
+ this._stickyContext = null;
892
+ this._stickyUntil = 0;
893
+ this.isEmotionVideoPlaying = false;
894
+ this.currentEmotionContext = null;
895
+
896
+ // Si la voix est encore en cours, relancer une vidΓ©o neutre en boucle
897
+ const category = "neutral";
898
+ const currentVideoSrc = this.activeVideo.querySelector("source").getAttribute("src");
899
+ const available = this.videoCategories[category] || [];
900
+ let nextSrc = null;
901
+ if (available.length > 0) {
902
+ const candidates = available.filter(v => v !== currentVideoSrc);
903
+ nextSrc =
904
+ candidates.length > 0
905
+ ? candidates[Math.floor(Math.random() * candidates.length)]
906
+ : available[Math.floor(Math.random() * available.length)];
907
+ }
908
+ if (nextSrc) {
909
+ this.loadAndSwitchVideo(nextSrc, "normal");
910
+ if (typeof this.updatePlayHistory === "function") this.updatePlayHistory(category, nextSrc);
911
+ this.currentContext = "neutral";
912
+ this.currentEmotion = "neutral";
913
+ this.lastSwitchTime = Date.now();
914
+ // Si la voix est encore en cours, s'assurer qu'on relance une vidΓ©o neutre Γ  la fin
915
+ if (window.voiceManager && window.voiceManager.isSpeaking) {
916
+ this.activeVideo.addEventListener(
917
+ "ended",
918
+ () => {
919
+ if (window.voiceManager && window.voiceManager.isSpeaking) {
920
+ this.returnToNeutral();
921
+ }
922
+ },
923
+ { once: true }
924
+ );
925
+ }
926
+ } else {
927
+ // Fallback to existing path if list empty
928
+ this.switchToContext("neutral");
929
+ }
930
+ }
931
+
932
+ // ADVANCED CONTEXTUAL ANALYSIS
933
+ async analyzeAndSelectVideo(userMessage, kimiResponse, emotionAnalysis, traits = null, affection = null, lang = null) {
934
+ // Do not analyze-switch away while dancing is sticky/playing
935
+ if (this._stickyContext === "dancing" || this.currentContext === "dancing") {
936
+ return; // let dancing finish
937
+ }
938
+ // Auto-detect language if not specified
939
+ let userLang = lang;
940
+ if (!userLang && window.kimiDB && window.kimiDB.getPreference) {
941
+ userLang = await window.KimiLanguageUtils.getLanguage();
942
+ }
943
+
944
+ // Use existing emotion analysis instead of creating new system
945
+ let detectedEmotion = "neutral";
946
+ if (window.kimiAnalyzeEmotion) {
947
+ // Analyze combined user message and Kimi response using existing function
948
+ const combinedText = [userMessage, kimiResponse].filter(Boolean).join(" ");
949
+ detectedEmotion = window.kimiAnalyzeEmotion(combinedText, userLang);
950
+ console.log(`🎭 Emotion detected: "${detectedEmotion}" from text: "${combinedText.substring(0, 50)}..."`);
951
+ } else if (emotionAnalysis && emotionAnalysis.reaction) {
952
+ // Fallback to provided emotion analysis
953
+ detectedEmotion = emotionAnalysis.reaction;
954
+ }
955
+
956
+ // Special case: Auto-dancing if playfulness high (more accessible)
957
+ if (traits && typeof traits.playfulness === "number" && traits.playfulness >= 75) {
958
+ this.switchToContext("dancing", "dancing", null, traits, affection);
959
+ return;
960
+ }
961
+
962
+ // Add to emotion history
963
+ this.emotionHistory.push(detectedEmotion);
964
+ if (this.emotionHistory.length > this.maxEmotionHistory) {
965
+ this.emotionHistory.shift();
966
+ }
967
+
968
+ // Analyze emotion trend - support all possible emotions
969
+ const counts = {
970
+ positive: 0,
971
+ negative: 0,
972
+ neutral: 0,
973
+ dancing: 0,
974
+ listening: 0,
975
+ romantic: 0,
976
+ laughing: 0,
977
+ surprise: 0,
978
+ confident: 0,
979
+ shy: 0,
980
+ flirtatious: 0,
981
+ kiss: 0,
982
+ goodbye: 0
983
+ };
984
+ for (let i = 0; i < this.emotionHistory.length; i++) {
985
+ const emo = this.emotionHistory[i];
986
+ if (counts[emo] !== undefined) counts[emo]++;
987
+ }
988
+
989
+ // Find dominant emotion
990
+ let dominant = null;
991
+ let max = 0;
992
+ for (const key in counts) {
993
+ if (counts[key] > max) {
994
+ max = counts[key];
995
+ dominant = key;
996
+ }
997
+ }
998
+
999
+ // Switch to appropriate context based on dominant emotion
1000
+ if (max >= 1 && dominant) {
1001
+ // Map emotion to context using centralized emotion mapping
1002
+ const emotionToCategory = window.kimiEmotionSystem?.emotionToVideoCategory || {};
1003
+ const targetCategory = emotionToCategory[dominant];
1004
+ if (targetCategory) {
1005
+ this.switchToContext(targetCategory, dominant, null, traits, affection);
1006
+ return;
1007
+ }
1008
+
1009
+ // Fallback for unmapped emotions
1010
+ if (dominant === "dancing") {
1011
+ this.switchToContext("dancing", "dancing", null, traits, affection);
1012
+ return;
1013
+ }
1014
+ if (dominant === "positive") {
1015
+ this.switchToContext("speakingPositive", "positive", null, traits, affection);
1016
+ return;
1017
+ }
1018
+ if (dominant === "negative") {
1019
+ this.switchToContext("speakingNegative", "negative", null, traits, affection);
1020
+ return;
1021
+ }
1022
+ if (dominant === "listening") {
1023
+ this.switchToContext("listening", "listening", null, traits, affection);
1024
+ return;
1025
+ }
1026
+ }
1027
+
1028
+ // Default to neutral context, with a positive bias at high affection (more accessible)
1029
+ if (traits && typeof traits.affection === "number" && traits.affection >= 80) {
1030
+ const chance = Math.random();
1031
+ if (chance < 0.35) {
1032
+ // Increased chance from 0.25 to 0.35
1033
+ this.switchToContext("speakingPositive", "positive", null, traits, affection);
1034
+ return;
1035
+ }
1036
+ }
1037
+ // Avoid neutral override if a transient state should persist (handled elsewhere)
1038
+ this.switchToContext("neutral", "neutral", null, traits, affection);
1039
+ }
1040
+
1041
+ // AUTOMATIC TRANSITION TO NEUTRAL
1042
+ scheduleAutoTransition(delayMs) {
1043
+ clearTimeout(this.autoTransitionTimer);
1044
+
1045
+ // Ne pas programmer d'auto-transition pour les contextes de base
1046
+ if (this.currentContext === "neutral" || this.currentContext === "listening") {
1047
+ return;
1048
+ }
1049
+
1050
+ // DurΓ©es adaptΓ©es selon le contexte (toutes les vidΓ©os font 10s)
1051
+ let duration;
1052
+ if (typeof delayMs === "number") {
1053
+ duration = delayMs;
1054
+ } else {
1055
+ switch (this.currentContext) {
1056
+ case "dancing":
1057
+ duration = 10000; // 10 secondes pour dancing (durΓ©e rΓ©elle des vidΓ©os)
1058
+ break;
1059
+ case "speakingPositive":
1060
+ case "speakingNegative":
1061
+ duration = 10000; // 10 secondes pour speaking (durΓ©e rΓ©elle des vidΓ©os)
1062
+ break;
1063
+ case "neutral":
1064
+ // Pas d'auto-transition pour neutral (Γ©tat par dΓ©faut, boucle en continu)
1065
+ return;
1066
+ case "listening":
1067
+ // Pas d'auto-transition pour listening (personnage Γ©coute l'utilisateur)
1068
+ return;
1069
+ default:
1070
+ duration = this.autoTransitionDuration; // 10 secondes par dΓ©faut
1071
+ }
1072
+ }
1073
+
1074
+ console.log(`Auto-transition scheduled in ${duration / 1000}s (${this.currentContext} β†’ neutral)`);
1075
+ this.autoTransitionTimer = setTimeout(() => {
1076
+ if (this.currentContext !== "neutral" && this.currentContext !== "listening") {
1077
+ if (!this._processPendingSwitches()) {
1078
+ this.returnToNeutral();
1079
+ }
1080
+ }
1081
+ }, duration);
1082
+ }
1083
+
1084
+ // COMPATIBILITY WITH THE OLD SYSTEM
1085
+ switchVideo(emotion = null) {
1086
+ if (emotion) {
1087
+ this.switchToContext("speaking", emotion);
1088
+ } else {
1089
+ this.switchToContext("neutral");
1090
+ }
1091
+ }
1092
+
1093
+ autoSwitchToNeutral() {
1094
+ this._neutralLock = false;
1095
+ this.isEmotionVideoPlaying = false;
1096
+ this.currentEmotionContext = null;
1097
+ this.switchToContext("neutral");
1098
+ }
1099
+
1100
+ getNextVideo(emotion, currentSrc) {
1101
+ // Adapt the old method for compatibility
1102
+ const category = this.determineCategory("speaking", emotion);
1103
+ return this.selectOptimalVideo(category);
1104
+ }
1105
+
1106
+ loadAndSwitchVideo(videoSrc, priority = "normal") {
1107
+ const startTs = performance.now();
1108
+ // Guard: ignore if recently failed and still in cooldown
1109
+ const lastFail = this._recentFailures.get(videoSrc);
1110
+ if (lastFail && performance.now() - lastFail < this._failureCooldown) {
1111
+ // Pick an alternative neutral as quick substitution
1112
+ const neutralList = (this.videoCategories && this.videoCategories.neutral) || [];
1113
+ const alt = neutralList.find(v => v !== videoSrc) || neutralList[0];
1114
+ if (alt && alt !== videoSrc) {
1115
+ console.warn(`Skipping recently failed video (cooldown): ${videoSrc} -> trying alt: ${alt}`);
1116
+ return this.loadAndSwitchVideo(alt, priority);
1117
+ }
1118
+ }
1119
+ // Avoid redundant loading if the requested source is already active or currently loading in inactive element
1120
+ const activeSrc = this.activeVideo?.querySelector("source")?.getAttribute("src");
1121
+ const inactiveSrc = this.inactiveVideo?.querySelector("source")?.getAttribute("src");
1122
+ if (videoSrc && (videoSrc === activeSrc || (this._loadingInProgress && videoSrc === inactiveSrc))) {
1123
+ if (priority !== "high" && priority !== "speaking") {
1124
+ return; // no need to reload same video
1125
+ }
1126
+ }
1127
+ // Only log high priority or error cases to reduce noise
1128
+ if (priority === "speaking" || priority === "high") {
1129
+ console.log(`🎬 Loading video: ${videoSrc} (priority: ${priority})`);
1130
+ }
1131
+
1132
+ // Si une vidΓ©o haute prioritΓ© arrive, on peut interrompre le chargement en cours
1133
+ if (this._loadingInProgress) {
1134
+ if (priority === "high" || priority === "speaking") {
1135
+ this._loadingInProgress = false;
1136
+ // Nettoyer les event listeners en cours sur la vidΓ©o inactive
1137
+ this.inactiveVideo.removeEventListener("canplay", this._currentLoadHandler);
1138
+ this.inactiveVideo.removeEventListener("loadeddata", this._currentLoadHandler);
1139
+ this.inactiveVideo.removeEventListener("error", this._currentErrorHandler);
1140
+ if (this._loadTimeout) {
1141
+ clearTimeout(this._loadTimeout);
1142
+ this._loadTimeout = null;
1143
+ }
1144
+ } else {
1145
+ return;
1146
+ }
1147
+ }
1148
+
1149
+ this._loadingInProgress = true;
1150
+
1151
+ // Nettoyer tous les timers en cours
1152
+ clearTimeout(this.autoTransitionTimer);
1153
+ if (this._loadTimeout) {
1154
+ clearTimeout(this._loadTimeout);
1155
+ this._loadTimeout = null;
1156
+ }
1157
+
1158
+ const pref = this._prefetchCache.get(videoSrc);
1159
+ if (pref && (pref.readyState >= 2 || pref.buffered.length > 0)) {
1160
+ const source = this.inactiveVideo.querySelector("source");
1161
+ source.setAttribute("src", videoSrc);
1162
+ try {
1163
+ this.inactiveVideo.currentTime = 0;
1164
+ } catch {}
1165
+ this.inactiveVideo.load();
1166
+ } else {
1167
+ this.inactiveVideo.querySelector("source").setAttribute("src", videoSrc);
1168
+ this.inactiveVideo.load();
1169
+ }
1170
+
1171
+ // Stocker les rΓ©fΓ©rences aux handlers pour pouvoir les nettoyer
1172
+ let fired = false;
1173
+ const onReady = () => {
1174
+ if (fired) return;
1175
+ fired = true;
1176
+ this._loadingInProgress = false;
1177
+ if (this._loadTimeout) {
1178
+ clearTimeout(this._loadTimeout);
1179
+ this._loadTimeout = null;
1180
+ }
1181
+ this.inactiveVideo.removeEventListener("canplay", this._currentLoadHandler);
1182
+ this.inactiveVideo.removeEventListener("loadeddata", this._currentLoadHandler);
1183
+ this.inactiveVideo.removeEventListener("error", this._currentErrorHandler);
1184
+ // Update rolling average load time
1185
+ const duration = performance.now() - startTs;
1186
+ this._loadTimeSamples.push(duration);
1187
+ if (this._loadTimeSamples.length > this._maxSamples) this._loadTimeSamples.shift();
1188
+ const sum = this._loadTimeSamples.reduce((a, b) => a + b, 0);
1189
+ this._avgLoadTime = sum / this._loadTimeSamples.length;
1190
+ this._consecutiveErrorCount = 0; // reset on success
1191
+ this.performSwitch();
1192
+ };
1193
+ this._currentLoadHandler = onReady;
1194
+
1195
+ const folder = getCharacterInfo(this.characterName).videoFolder;
1196
+ // Rotating fallback pool (stable neutrals first positions)
1197
+ // Build or rebuild fallback pool when absent or when character changed
1198
+ if (!this._fallbackPool || this._fallbackPoolCharacter !== this.characterName) {
1199
+ const neutralList = (this.videoCategories && this.videoCategories.neutral) || [];
1200
+ // Choose first 3 as core reliable set; if less than 3 available, take all
1201
+ this._fallbackPool = neutralList.slice(0, 3);
1202
+ this._fallbackIndex = 0;
1203
+ this._fallbackPoolCharacter = this.characterName;
1204
+ }
1205
+ const fallbackVideo = this._fallbackPool[this._fallbackIndex % this._fallbackPool.length];
1206
+
1207
+ this._currentErrorHandler = e => {
1208
+ const mediaEl = this.inactiveVideo;
1209
+ const readyState = mediaEl ? mediaEl.readyState : -1;
1210
+ const networkState = mediaEl ? mediaEl.networkState : -1;
1211
+ let mediaErrorCode = null;
1212
+ if (mediaEl && mediaEl.error) mediaErrorCode = mediaEl.error.code;
1213
+ console.warn(
1214
+ `Error loading video: ${videoSrc} (readyState=${readyState} networkState=${networkState} mediaError=${mediaErrorCode}) falling back to: ${fallbackVideo}`
1215
+ );
1216
+ this._loadingInProgress = false;
1217
+ if (this._loadTimeout) {
1218
+ clearTimeout(this._loadTimeout);
1219
+ this._loadTimeout = null;
1220
+ }
1221
+ this._recentFailures.set(videoSrc, performance.now());
1222
+ this._consecutiveErrorCount++;
1223
+ // Stop runaway fallback loop: pause if too many sequential errors relative to pool size
1224
+ if (this._fallbackPool && this._consecutiveErrorCount >= this._fallbackPool.length * 2) {
1225
+ console.error("Temporarily pausing fallback loop after repeated failures. Retrying in 2s.");
1226
+ setTimeout(() => {
1227
+ this._consecutiveErrorCount = 0;
1228
+ this.loadAndSwitchVideo(fallbackVideo, "high");
1229
+ }, 2000);
1230
+ return;
1231
+ }
1232
+ if (videoSrc !== fallbackVideo) {
1233
+ // Try fallback video
1234
+ this._fallbackIndex = (this._fallbackIndex + 1) % this._fallbackPool.length; // advance for next time
1235
+ this.loadAndSwitchVideo(fallbackVideo, "high");
1236
+ } else {
1237
+ // Ultimate fallback: try any neutral video
1238
+ console.error(`Fallback video also failed: ${fallbackVideo}. Trying ultimate fallback.`);
1239
+ const neutralVideos = this.videoCategories.neutral || [];
1240
+ if (neutralVideos.length > 0) {
1241
+ // Try a different neutral video
1242
+ const ultimateFallback = neutralVideos.find(video => video !== fallbackVideo);
1243
+ if (ultimateFallback) {
1244
+ this.loadAndSwitchVideo(ultimateFallback, "high");
1245
+ } else {
1246
+ // Last resort: try first neutral video anyway
1247
+ this.loadAndSwitchVideo(neutralVideos[0], "high");
1248
+ }
1249
+ } else {
1250
+ // Critical error: no neutral videos available
1251
+ console.error("CRITICAL: No neutral videos available!");
1252
+ this._switchInProgress = false;
1253
+ }
1254
+ }
1255
+ // Escalate diagnostics if many consecutive errors
1256
+ if (this._consecutiveErrorCount >= 3) {
1257
+ console.info(
1258
+ `Diagnostics: avgLoadTime=${this._avgLoadTime?.toFixed(1) || "n/a"}ms samples=${this._loadTimeSamples.length} prefetchCache=${this._prefetchCache.size}`
1259
+ );
1260
+ }
1261
+ };
1262
+
1263
+ this.inactiveVideo.addEventListener("loadeddata", this._currentLoadHandler, { once: true });
1264
+ this.inactiveVideo.addEventListener("canplay", this._currentLoadHandler, { once: true });
1265
+ this.inactiveVideo.addEventListener("error", this._currentErrorHandler, { once: true });
1266
+
1267
+ if (this.inactiveVideo.readyState >= 2) {
1268
+ queueMicrotask(() => onReady());
1269
+ }
1270
+
1271
+ // Dynamic timeout: refined formula avg*1.5 + buffer, bounded
1272
+ let adaptiveTimeout = this._minTimeout;
1273
+ if (this._avgLoadTime) {
1274
+ adaptiveTimeout = Math.min(this._maxTimeout, Math.max(this._minTimeout, this._avgLoadTime * 1.5 + 400));
1275
+ }
1276
+ // Cap by clip length ratio if we know (assume 10000ms default when metadata absent)
1277
+ const currentClipMs = 10000; // All clips are 10s
1278
+ adaptiveTimeout = Math.min(adaptiveTimeout, Math.floor(currentClipMs * this._timeoutCapRatio));
1279
+ this._loadTimeout = setTimeout(() => {
1280
+ if (!fired) {
1281
+ // If metadata is there but not canplay yet, extend once
1282
+ if (this.inactiveVideo.readyState >= 1 && this.inactiveVideo.readyState < 2) {
1283
+ console.debug(
1284
+ `Extending timeout for ${videoSrc} (readyState=${this.inactiveVideo.readyState}) by ${this._timeoutExtension}ms`
1285
+ );
1286
+ this._loadTimeout = setTimeout(() => {
1287
+ if (!fired) {
1288
+ if (this.inactiveVideo.readyState >= 2) onReady();
1289
+ else this._currentErrorHandler();
1290
+ }
1291
+ }, this._timeoutExtension);
1292
+ return;
1293
+ }
1294
+ // Grace retry: still fetching over network (networkState=2) with no data (readyState=0)
1295
+ if (
1296
+ this.inactiveVideo.networkState === 2 &&
1297
+ this.inactiveVideo.readyState === 0 &&
1298
+ (this._graceRetryCounts?.[videoSrc] || 0) < 1
1299
+ ) {
1300
+ if (!this._graceRetryCounts) this._graceRetryCounts = {};
1301
+ this._graceRetryCounts[videoSrc] = (this._graceRetryCounts[videoSrc] || 0) + 1;
1302
+ const extra = this._timeoutExtension + 600;
1303
+ console.debug(`Grace retry for ${videoSrc} (network loading). Extending by ${extra}ms`);
1304
+ this._loadTimeout = setTimeout(() => {
1305
+ if (!fired) {
1306
+ if (this.inactiveVideo.readyState >= 2) onReady();
1307
+ else this._currentErrorHandler();
1308
+ }
1309
+ }, extra);
1310
+ return;
1311
+ }
1312
+ if (this.inactiveVideo.readyState >= 2) {
1313
+ onReady();
1314
+ } else {
1315
+ this._currentErrorHandler();
1316
+ }
1317
+ }
1318
+ }, adaptiveTimeout);
1319
+ }
1320
+
1321
+ usePreloadedVideo(preloadedVideo, videoSrc) {
1322
+ const source = this.inactiveVideo.querySelector("source");
1323
+ source.setAttribute("src", videoSrc);
1324
+
1325
+ this.inactiveVideo.currentTime = 0;
1326
+ this.inactiveVideo.load();
1327
+
1328
+ this._currentLoadHandler = () => {
1329
+ this._loadingInProgress = false;
1330
+ this.performSwitch();
1331
+ };
1332
+
1333
+ this.inactiveVideo.addEventListener("canplay", this._currentLoadHandler, { once: true });
1334
+ }
1335
+
1336
+ performSwitch() {
1337
+ // Prevent rapid double toggles
1338
+ if (this._switchInProgress) return;
1339
+ this._switchInProgress = true;
1340
+
1341
+ const fromVideo = this.activeVideo;
1342
+ const toVideo = this.inactiveVideo;
1343
+
1344
+ // Perform a JS-managed crossfade for smoother transitions
1345
+ // Let crossfadeVideos resolve duration from CSS variable (--video-fade-duration)
1346
+ this.constructor.crossfadeVideos(fromVideo, toVideo, undefined, () => {
1347
+ // After crossfade completion, finalize state and classes
1348
+ fromVideo.classList.remove("active");
1349
+ toVideo.classList.add("active");
1350
+
1351
+ // Swap references
1352
+ const prevActive = this.activeVideo;
1353
+ const prevInactive = this.inactiveVideo;
1354
+ this.activeVideo = prevInactive;
1355
+ this.inactiveVideo = prevActive;
1356
+
1357
+ const playPromise = this.activeVideo.play();
1358
+ if (playPromise && typeof playPromise.then === "function") {
1359
+ playPromise
1360
+ .then(() => {
1361
+ try {
1362
+ const src = this.activeVideo?.querySelector("source")?.getAttribute("src");
1363
+ const info = { context: this.currentContext, emotion: this.currentEmotion };
1364
+ console.log("🎬 VideoManager: Now playing:", src, info);
1365
+ // Recompute autoTransitionDuration from actual duration if available (C)
1366
+ try {
1367
+ const d = this.activeVideo.duration;
1368
+ if (!isNaN(d) && d > 0.5) {
1369
+ // Keep 1s headroom before natural end for auto scheduling
1370
+ const target = Math.max(1000, d * 1000 - 1100);
1371
+ this.autoTransitionDuration = target;
1372
+ } else {
1373
+ this.autoTransitionDuration = 9900; // fallback for 10s clips
1374
+ }
1375
+ // Dynamic neutral prefetch to widen diversity without burst
1376
+ this._prefetchNeutralDynamic();
1377
+ } catch {}
1378
+ } catch {}
1379
+ this._switchInProgress = false;
1380
+ this.setupEventListenersForContext(this.currentContext);
1381
+ })
1382
+ .catch(error => {
1383
+ console.warn("Failed to play video:", error);
1384
+ // Revert to previous video to avoid frozen state
1385
+ toVideo.classList.remove("active");
1386
+ fromVideo.classList.add("active");
1387
+ this.activeVideo = fromVideo;
1388
+ this.inactiveVideo = toVideo;
1389
+ try {
1390
+ this.activeVideo.play().catch(() => {});
1391
+ } catch {}
1392
+ this._switchInProgress = false;
1393
+ this.setupEventListenersForContext(this.currentContext);
1394
+ });
1395
+ } else {
1396
+ // Non-promise play fallback
1397
+ this._switchInProgress = false;
1398
+ try {
1399
+ const d = this.activeVideo.duration;
1400
+ if (!isNaN(d) && d > 0.5) {
1401
+ const target = Math.max(1000, d * 1000 - 1100);
1402
+ this.autoTransitionDuration = target;
1403
+ } else {
1404
+ this.autoTransitionDuration = 9900;
1405
+ }
1406
+ this._prefetchNeutralDynamic();
1407
+ } catch {}
1408
+ this.setupEventListenersForContext(this.currentContext);
1409
+ }
1410
+ });
1411
+ }
1412
+
1413
+ _prefetchNeutralDynamic() {
1414
+ try {
1415
+ const neutrals = (this.videoCategories && this.videoCategories.neutral) || [];
1416
+ if (!neutrals.length) return;
1417
+ // Build a set of already cached or in-flight
1418
+ const cached = new Set(
1419
+ [...this._prefetchCache.keys(), ...this._prefetchInFlight.values()].map(v => (typeof v === "string" ? v : v?.src))
1420
+ ); // defensive
1421
+ const current = this.activeVideo?.querySelector("source")?.getAttribute("src");
1422
+ // Choose up to 2 unseen neutral videos different from current
1423
+ const candidates = neutrals.filter(s => s && s !== current && !cached.has(s));
1424
+ if (!candidates.length) return;
1425
+ let limit = 2;
1426
+ // Network-aware limiting
1427
+ try {
1428
+ const conn = navigator.connection || navigator.webkitConnection || navigator.mozConnection;
1429
+ if (conn && conn.effectiveType) {
1430
+ if (/2g/i.test(conn.effectiveType)) limit = 0;
1431
+ else if (/3g/i.test(conn.effectiveType)) limit = 1;
1432
+ }
1433
+ } catch {}
1434
+ if (limit <= 0) return;
1435
+ candidates.slice(0, limit).forEach(src => this._prefetch(src));
1436
+ } catch {}
1437
+ }
1438
+
1439
+ _prefetch(src) {
1440
+ if (!src || this._prefetchCache.has(src) || this._prefetchInFlight.has(src)) return;
1441
+ if (this._prefetchCache.size + this._prefetchInFlight.size >= this._maxPrefetch) return;
1442
+ this._prefetchInFlight.add(src);
1443
+ const v = document.createElement("video");
1444
+ v.preload = "auto";
1445
+ v.muted = true;
1446
+ v.playsInline = true;
1447
+ v.src = src;
1448
+ const cleanup = () => {
1449
+ v.oncanplaythrough = null;
1450
+ v.oncanplay = null;
1451
+ v.onerror = null;
1452
+ this._prefetchInFlight.delete(src);
1453
+ };
1454
+ v.oncanplay = () => {
1455
+ this._prefetchCache.set(src, v);
1456
+ this._trimPrefetchCacheIfNeeded();
1457
+ cleanup();
1458
+ };
1459
+ v.oncanplaythrough = () => {
1460
+ this._prefetchCache.set(src, v);
1461
+ this._trimPrefetchCacheIfNeeded();
1462
+ cleanup();
1463
+ };
1464
+ v.onerror = () => {
1465
+ cleanup();
1466
+ };
1467
+ try {
1468
+ v.load();
1469
+ } catch {}
1470
+ }
1471
+
1472
+ _trimPrefetchCacheIfNeeded() {
1473
+ try {
1474
+ // Only apply LRU trimming to neutral videos; cap at 6 neutrals cached
1475
+ const MAX_NEUTRAL = 6;
1476
+ const entries = [...this._prefetchCache.entries()];
1477
+ const neutralEntries = entries.filter(([src]) => /\/neutral\//.test(src));
1478
+ if (neutralEntries.length <= MAX_NEUTRAL) return;
1479
+ // LRU heuristic: older insertion first (Map preserves insertion order)
1480
+ const excess = neutralEntries.length - MAX_NEUTRAL;
1481
+ let removed = 0;
1482
+ for (const [src, vid] of neutralEntries) {
1483
+ if (removed >= excess) break;
1484
+ // Avoid removing currently active or about to be used
1485
+ const current = this.activeVideo?.querySelector("source")?.getAttribute("src");
1486
+ if (src === current) continue;
1487
+ this._prefetchCache.delete(src);
1488
+ try {
1489
+ vid.removeAttribute("src");
1490
+ vid.load();
1491
+ } catch {}
1492
+ removed++;
1493
+ }
1494
+ } catch {}
1495
+ }
1496
+
1497
+ _prefetchLikely(category) {
1498
+ const list = this.videoCategories[category] || [];
1499
+ // Prefetch 1-2 next likely videos different from current
1500
+ const current = this.activeVideo?.querySelector("source")?.getAttribute("src") || null;
1501
+ const candidates = list.filter(s => s && s !== current).slice(0, 2);
1502
+ candidates.forEach(src => this._prefetch(src));
1503
+ }
1504
+
1505
+ // DIAGNOSTIC AND DEBUG METHODS
1506
+ getCurrentVideoInfo() {
1507
+ const currentSrc = this.activeVideo.querySelector("source").getAttribute("src");
1508
+ return {
1509
+ currentVideo: currentSrc,
1510
+ context: this.currentContext,
1511
+ emotion: this.currentEmotion,
1512
+ category: this.determineCategory(this.currentContext, this.currentEmotion)
1513
+ };
1514
+ }
1515
+
1516
+ // METHODS TO ANALYZE EMOTIONS FROM TEXT
1517
+ // CLEANUP
1518
+ destroy() {
1519
+ clearTimeout(this.autoTransitionTimer);
1520
+ this.autoTransitionTimer = null;
1521
+ if (this._visibilityHandler) {
1522
+ document.removeEventListener("visibilitychange", this._visibilityHandler);
1523
+ this._visibilityHandler = null;
1524
+ }
1525
+ }
1526
+
1527
+ // Utilitaire pour dΓ©terminer la catΓ©gorie vidΓ©o selon la moyenne des traits
1528
+ setMoodByPersonality(traits) {
1529
+ if (this._stickyContext === "dancing" || this.currentContext === "dancing") return;
1530
+ const category = window.getMoodCategoryFromPersonality ? window.getMoodCategoryFromPersonality(traits) : "neutral";
1531
+ // Normalize emotion so validation uses base emotion labels
1532
+ let emotion = category;
1533
+ if (category === "speakingPositive") emotion = "positive";
1534
+ else if (category === "speakingNegative") emotion = "negative";
1535
+ // For other categories (neutral, listening, dancing) emotion can equal category
1536
+ this.switchToContext(category, emotion, null, traits, traits.affection);
1537
+ }
1538
+
1539
+ _cleanupLoadingHandlers() {
1540
+ if (this._currentLoadHandler) {
1541
+ this.inactiveVideo.removeEventListener("canplay", this._currentLoadHandler);
1542
+ this.inactiveVideo.removeEventListener("loadeddata", this._currentLoadHandler);
1543
+ this._currentLoadHandler = null;
1544
+ }
1545
+ if (this._currentErrorHandler) {
1546
+ this.inactiveVideo.removeEventListener("error", this._currentErrorHandler);
1547
+ this._currentErrorHandler = null;
1548
+ }
1549
+ if (this._loadTimeout) {
1550
+ clearTimeout(this._loadTimeout);
1551
+ this._loadTimeout = null;
1552
+ }
1553
+ this._loadingInProgress = false;
1554
+ this._switchInProgress = false;
1555
+ }
1556
+ }
1557
+
1558
+ // Expose globally for code that expects a window-level KimiVideoManager
1559
+ window.KimiVideoManager = KimiVideoManager;
1560
+
1561
+ // Also provide ES module exports for modern imports
1562
+ export { KimiVideoManager };
kimi-js/kimi-voices.js CHANGED
@@ -198,29 +198,24 @@ class KimiVoiceManager {
198
  return;
199
  }
200
  this._initializingVoices = true;
201
-
202
  this.availableVoices = this.speechSynthesis.getVoices();
203
-
204
- // Handle case where voices are not loaded yet (common timing issue)
 
 
 
 
 
 
 
205
  if (this.availableVoices.length === 0) {
206
  this._initializingVoices = false;
207
- // The onvoiceschanged listener will retry initialization
208
- return;
209
- }
210
-
211
- // Resolve effective selectedLanguage if missing
212
- if (!this.selectedLanguage) {
213
- const selectedLanguage = await this.db?.getPreference("selectedLanguage", "en");
214
- this.selectedLanguage = window.KimiLanguageUtils.normalizeLanguageCode(selectedLanguage || "en") || "en";
215
  }
216
  const effectiveLang = await this.getEffectiveLanguage(this.selectedLanguage);
217
-
218
  const savedVoice = await this.db?.getPreference("selectedVoice", "auto");
219
-
220
  const filteredVoices = this.getVoicesForLanguage(effectiveLang);
221
-
222
  if (savedVoice && savedVoice !== "auto") {
223
- // Only search within language-compatible voices
224
  const foundVoice = filteredVoices.find(voice => voice.name === savedVoice);
225
  if (foundVoice) {
226
  this.currentVoice = foundVoice;
@@ -231,14 +226,12 @@ class KimiVoiceManager {
231
  this._initializingVoices = false;
232
  return;
233
  } else {
234
- // Saved voice not compatible with current language, fall back to auto-selection
235
  console.log(
236
  `🎀 Saved voice "${savedVoice}" not compatible with language "${effectiveLang}", using auto-selection`
237
  );
238
  await this.db?.setPreference("selectedVoice", "auto");
239
  }
240
  }
241
-
242
  // Prefer female voices if available in the language-compatible voices
243
  // Use real voice names since voice.gender is rarely provided by browsers
244
  const femaleVoice = filteredVoices.find(voice => {
@@ -1339,7 +1332,10 @@ class KimiVoiceManager {
1339
  }
1340
 
1341
  async handleLanguageChange(e) {
1342
- const newLang = e.target.value;
 
 
 
1343
  const oldLang = this.selectedLanguage;
1344
  console.log(`🎀 Language changing: "${oldLang}" β†’ "${newLang}"`);
1345
 
@@ -1369,12 +1365,15 @@ class KimiVoiceManager {
1369
 
1370
  // Re-init voices to pick a correct voice for the new language
1371
  await this.initVoices();
 
 
1372
  } catch (err) {
1373
  // On error, fall back to safe behavior: init voices and set 'auto'
1374
  try {
1375
  await this.db?.setPreference("selectedVoice", "auto");
1376
  } catch {}
1377
  await this.initVoices();
 
1378
  }
1379
 
1380
  if (this.currentVoice) {
@@ -1383,7 +1382,7 @@ class KimiVoiceManager {
1383
  console.warn(`🎀 No voice found for language "${newLang}"`);
1384
  }
1385
 
1386
- // Update recognition language safely (recreate instance to avoid stale internal state)
1387
  this._refreshRecognitionLanguage(newLang);
1388
  }
1389
 
 
198
  return;
199
  }
200
  this._initializingVoices = true;
 
201
  this.availableVoices = this.speechSynthesis.getVoices();
202
+ // Resolve selectedLanguage before any early return so SR uses correct language
203
+ if (!this.selectedLanguage) {
204
+ try {
205
+ const selectedLanguage = await this.db?.getPreference("selectedLanguage", "en");
206
+ this.selectedLanguage = window.KimiLanguageUtils.normalizeLanguageCode(selectedLanguage || "en") || "en";
207
+ } catch (_) {
208
+ this.selectedLanguage = "en";
209
+ }
210
+ }
211
  if (this.availableVoices.length === 0) {
212
  this._initializingVoices = false;
213
+ return; // onvoiceschanged will retry later
 
 
 
 
 
 
 
214
  }
215
  const effectiveLang = await this.getEffectiveLanguage(this.selectedLanguage);
 
216
  const savedVoice = await this.db?.getPreference("selectedVoice", "auto");
 
217
  const filteredVoices = this.getVoicesForLanguage(effectiveLang);
 
218
  if (savedVoice && savedVoice !== "auto") {
 
219
  const foundVoice = filteredVoices.find(voice => voice.name === savedVoice);
220
  if (foundVoice) {
221
  this.currentVoice = foundVoice;
 
226
  this._initializingVoices = false;
227
  return;
228
  } else {
 
229
  console.log(
230
  `🎀 Saved voice "${savedVoice}" not compatible with language "${effectiveLang}", using auto-selection`
231
  );
232
  await this.db?.setPreference("selectedVoice", "auto");
233
  }
234
  }
 
235
  // Prefer female voices if available in the language-compatible voices
236
  // Use real voice names since voice.gender is rarely provided by browsers
237
  const femaleVoice = filteredVoices.find(voice => {
 
1332
  }
1333
 
1334
  async handleLanguageChange(e) {
1335
+ const rawLang = e.target.value;
1336
+ const newLang = window.KimiLanguageUtils?.normalizeLanguageCode
1337
+ ? window.KimiLanguageUtils.normalizeLanguageCode(rawLang)
1338
+ : rawLang;
1339
  const oldLang = this.selectedLanguage;
1340
  console.log(`🎀 Language changing: "${oldLang}" β†’ "${newLang}"`);
1341
 
 
1365
 
1366
  // Re-init voices to pick a correct voice for the new language
1367
  await this.initVoices();
1368
+ // Ensure voice selector reflects new language even if no voice chosen
1369
+ this.updateVoiceSelector();
1370
  } catch (err) {
1371
  // On error, fall back to safe behavior: init voices and set 'auto'
1372
  try {
1373
  await this.db?.setPreference("selectedVoice", "auto");
1374
  } catch {}
1375
  await this.initVoices();
1376
+ this.updateVoiceSelector();
1377
  }
1378
 
1379
  if (this.currentVoice) {
 
1382
  console.warn(`🎀 No voice found for language "${newLang}"`);
1383
  }
1384
 
1385
+ // Single clear path: recreate recognition instance with new language
1386
  this._refreshRecognitionLanguage(newLang);
1387
  }
1388