VirtualKimi commited on
Commit
6b7272b
·
verified ·
1 Parent(s): 16b167f

Upload 40 files

Browse files
CHANGELOG.md CHANGED
@@ -1,12 +1,5 @@
1
  # Virtual Kimi App Changelog
2
 
3
- # [1.1.6] - 2025-09-04
4
-
5
- ### Bug Fixes
6
-
7
- - Fixed a bug where sliders refused the value 0 (0 was treated as falsy and reset to defaults).
8
- - Updated crossfade transition from video playback to avoid visual glitches during video changes.
9
-
10
  # [1.1.5] - 2025-09-03
11
 
12
  ### Bug Fixes
 
1
  # Virtual Kimi App Changelog
2
 
 
 
 
 
 
 
 
3
  # [1.1.5] - 2025-09-03
4
 
5
  ### Bug Fixes
index.html CHANGED
@@ -14,8 +14,11 @@
14
  <!-- SEO Meta Tags -->
15
  <meta name="description"
16
  content="Virtual Kimi is a an AI girlfriend and companion with evolving personality, advanced voice recognition and immersive interface. Discover the future of human-AI girlfriend relationships.">
 
 
17
  <meta name="author" content="Jean & Kimi">
18
- <meta name="robots" content="noindex, nofollow">
 
19
  <link rel="canonical" href="https://virtualkimi.com/virtual-kimi-app/" />
20
 
21
  <!-- Open Graph / Facebook -->
@@ -34,6 +37,45 @@
34
  content="Virtual AI companion with evolving personality and advanced voice recognition.">
35
  <meta property="twitter:image" content="kimi-icons/virtualkimi-logo.png">
36
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  <!-- Favicon -->
38
  <link rel="icon" type="image/x-icon" href="favicon.ico">
39
  <!-- Multi-size Favicons -->
@@ -1045,8 +1087,8 @@
1045
  <h3><i class="fas fa-code"></i> Technical Information</h3>
1046
  <div class="tech-info">
1047
  <p><strong>Created date :</strong> July 16, 2025</p>
1048
- <p><strong>Version :</strong> v1.1.6</p>
1049
- <p><strong>Last update :</strong> September 04, 2025</p>
1050
  <p><strong>Technologies :</strong> HTML5, CSS3, JavaScript ES6+, IndexedDB, Web Speech
1051
  API</p>
1052
  <p><strong>Status :</strong> ✅ Stable and functional</p>
 
14
  <!-- SEO Meta Tags -->
15
  <meta name="description"
16
  content="Virtual Kimi is a an AI girlfriend and companion with evolving personality, advanced voice recognition and immersive interface. Discover the future of human-AI girlfriend relationships.">
17
+ <meta name="keywords"
18
+ content="artificial intelligence, virtual companion, emotional AI, voice recognition, advanced chatbot, Virtual Kimi, personalized AI assistant, girlfriend">
19
  <meta name="author" content="Jean & Kimi">
20
+ <meta name="robots" content="index, follow">
21
+ <meta name="language" content="EN">
22
  <link rel="canonical" href="https://virtualkimi.com/virtual-kimi-app/" />
23
 
24
  <!-- Open Graph / Facebook -->
 
37
  content="Virtual AI companion with evolving personality and advanced voice recognition.">
38
  <meta property="twitter:image" content="kimi-icons/virtualkimi-logo.png">
39
 
40
+ <!-- Schema.org consolidated JSON-LD (WebPage + mainEntity SoftwareApplication) -->
41
+ <script type="application/ld+json">
42
+ {
43
+ "@context": "https://schema.org",
44
+ "@type": "WebPage",
45
+ "name": "Virtual Kimi - Virtual AI Companion",
46
+ "description": "Virtual Kimi, your virtual AI girlfriend and companion with an evolving personality, multi-provider AI support, advanced voice recognition and immersive interface.",
47
+ "url": "https://virtualkimi.com/virtual-kimi-app/index.html",
48
+ "mainEntity": {
49
+ "@type": "SoftwareApplication",
50
+ "@id": "https://virtualkimi.com/virtual-kimi-app/#app",
51
+ "name": "Virtual Kimi",
52
+ "description": "Virtual Kimi, your virtual AI girlfriend and companion with an evolving personality, multi-provider AI support, voice recognition and immersive interface",
53
+ "applicationCategory": "AI Companion",
54
+ "operatingSystem": "Web Browser",
55
+ "offers": {
56
+ "@type": "Offer",
57
+ "price": "0",
58
+ "priceCurrency": "USD"
59
+ },
60
+ "creator": {
61
+ "@type": "Person",
62
+ "name": "Jean & Kimi"
63
+ },
64
+ "dateCreated": "2025-07-16",
65
+ "dateModified": "2025-09-03",
66
+ "version": "v1.1.5",
67
+ "features": [
68
+ "Advanced voice recognition",
69
+ "Evolving personality with 6 adjustable traits",
70
+ "Premium LLM integration",
71
+ "5 customizable visual themes",
72
+ "Persistent memory",
73
+ "Intelligent affection system"
74
+ ]
75
+ }
76
+ }
77
+ </script>
78
+
79
  <!-- Favicon -->
80
  <link rel="icon" type="image/x-icon" href="favicon.ico">
81
  <!-- Multi-size Favicons -->
 
1087
  <h3><i class="fas fa-code"></i> Technical Information</h3>
1088
  <div class="tech-info">
1089
  <p><strong>Created date :</strong> July 16, 2025</p>
1090
+ <p><strong>Version :</strong> v1.1.5</p>
1091
+ <p><strong>Last update :</strong> September 03, 2025</p>
1092
  <p><strong>Technologies :</strong> HTML5, CSS3, JavaScript ES6+, IndexedDB, Web Speech
1093
  API</p>
1094
  <p><strong>Status :</strong> ✅ Stable and functional</p>
kimi-css/kimi-style.css CHANGED
@@ -119,6 +119,9 @@
119
  --mic-pulse-color: rgba(39, 174, 96, 0.5);
120
  --mic-pulse-listening-color: rgba(39, 174, 96, 0.4);
121
 
 
 
 
122
  /* Cards & Stats */
123
  --card-bg: rgba(255, 255, 255, 0.02);
124
  --card-border: rgba(255, 255, 255, 0.05);
@@ -896,10 +899,16 @@ body {
896
  height: 100%;
897
  object-fit: contain;
898
  opacity: 0;
 
899
  background-color: #1a1a1a;
900
- backface-visibility: hidden;
901
- transition: opacity 300ms cubic-bezier(0.4, 0, 0.2, 1);
902
  will-change: opacity;
 
 
 
 
 
 
 
903
  }
904
 
905
  .content-overlay {
 
119
  --mic-pulse-color: rgba(39, 174, 96, 0.5);
120
  --mic-pulse-listening-color: rgba(39, 174, 96, 0.4);
121
 
122
+ /* Video crossfade timing */
123
+ --video-fade-duration: 400ms;
124
+
125
  /* Cards & Stats */
126
  --card-bg: rgba(255, 255, 255, 0.02);
127
  --card-border: rgba(255, 255, 255, 0.05);
 
899
  height: 100%;
900
  object-fit: contain;
901
  opacity: 0;
902
+ transition: opacity var(--video-fade-duration) cubic-bezier(0.4, 0, 0.2, 1);
903
  background-color: #1a1a1a;
 
 
904
  will-change: opacity;
905
+ backface-visibility: hidden;
906
+ }
907
+
908
+ .bg-video.transitioning {
909
+ opacity: 0;
910
+ transition: opacity var(--video-fade-duration) cubic-bezier(0.4, 0, 0.2, 1);
911
+ pointer-events: none;
912
  }
913
 
914
  .content-overlay {
kimi-js/kimi-config.js CHANGED
@@ -113,8 +113,10 @@ window.KIMI_CONFIG.validate = function (value, type) {
113
  try {
114
  const range = this.RANGES[type];
115
  if (!range) return { valid: true, value };
 
116
  const numValue = parseFloat(value);
117
  if (isNaN(numValue)) return { valid: false, value: this.DEFAULTS[type] };
 
118
  const clampedValue = Math.max(range.min, Math.min(range.max, numValue));
119
  return { valid: true, value: clampedValue };
120
  } catch (error) {
 
113
  try {
114
  const range = this.RANGES[type];
115
  if (!range) return { valid: true, value };
116
+
117
  const numValue = parseFloat(value);
118
  if (isNaN(numValue)) return { valid: false, value: this.DEFAULTS[type] };
119
+
120
  const clampedValue = Math.max(range.min, Math.min(range.max, numValue));
121
  return { valid: true, value: clampedValue };
122
  } catch (error) {
kimi-js/kimi-memory-ui.js CHANGED
@@ -191,9 +191,15 @@ class KimiMemoryUI {
191
  if (kv && kv.activeVideo) {
192
  try {
193
  const v = kv.activeVideo;
194
- if (v.ended || v.paused) {
195
- if (kv.ensureActivePlayback) kv.ensureActivePlayback();
196
- else if (kv.returnToNeutral) kv.returnToNeutral();
 
 
 
 
 
 
197
  }
198
  } catch {}
199
  }
 
191
  if (kv && kv.activeVideo) {
192
  try {
193
  const v = kv.activeVideo;
194
+ if (v.ended) {
195
+ if (typeof kv.returnToNeutral === "function") kv.returnToNeutral();
196
+ } else if (v.paused) {
197
+ // Use centralized video utility for play
198
+ window.KimiVideoManager.getVideoElement(v)
199
+ .play()
200
+ .catch(() => {
201
+ if (typeof kv.returnToNeutral === "function") kv.returnToNeutral();
202
+ });
203
  }
204
  } catch {}
205
  }
kimi-js/kimi-module.js CHANGED
@@ -1442,23 +1442,6 @@ async function sendMessage() {
1442
  }
1443
 
1444
  function setupSettingsListeners(kimiDB, kimiMemory) {
1445
- // ---------------------------------------------------------------------------
1446
- // Slider value coercion utilities
1447
- // Ensures that numeric sliders preserve explicit 0 instead of falling back
1448
- // to defaults via the logical OR (||) operator. We only fall back when the
1449
- // parsed value is NaN or validation returns undefined (never when value === 0).
1450
- // Use coerceFloat / coerceInt in all handlers to standardize behavior.
1451
- // ---------------------------------------------------------------------------
1452
- const coerceFloat = (raw, fallback, validationValue) => {
1453
- if (validationValue !== undefined) return validationValue;
1454
- const parsed = parseFloat(raw);
1455
- return Number.isNaN(parsed) ? fallback : parsed;
1456
- };
1457
- const coerceInt = (raw, fallback, validationValue) => {
1458
- if (validationValue !== undefined) return validationValue;
1459
- const parsed = parseInt(raw, 10);
1460
- return Number.isNaN(parsed) ? fallback : parsed;
1461
- };
1462
  const voiceRateSlider = document.getElementById("voice-rate");
1463
  const voicePitchSlider = document.getElementById("voice-pitch");
1464
  const voiceVolumeSlider = document.getElementById("voice-volume");
@@ -1543,7 +1526,7 @@ function setupSettingsListeners(kimiDB, kimiMemory) {
1543
  if (voiceRateSlider) {
1544
  const listener = e => {
1545
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "voiceRate");
1546
- const value = coerceFloat(e.target.value, 1.1, validation?.value);
1547
 
1548
  document.getElementById("voice-rate-value").textContent = value;
1549
  e.target.value = value; // Ensure slider shows validated value
@@ -1555,7 +1538,7 @@ function setupSettingsListeners(kimiDB, kimiMemory) {
1555
  if (voicePitchSlider) {
1556
  const listener = e => {
1557
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "voicePitch");
1558
- const value = coerceFloat(e.target.value, 1.1, validation?.value);
1559
 
1560
  document.getElementById("voice-pitch-value").textContent = value;
1561
  e.target.value = value;
@@ -1567,7 +1550,7 @@ function setupSettingsListeners(kimiDB, kimiMemory) {
1567
  if (voiceVolumeSlider) {
1568
  const listener = e => {
1569
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "voiceVolume");
1570
- const value = coerceFloat(e.target.value, 0.8, validation?.value);
1571
 
1572
  document.getElementById("voice-volume-value").textContent = value;
1573
  e.target.value = value;
@@ -1630,7 +1613,7 @@ function setupSettingsListeners(kimiDB, kimiMemory) {
1630
  if (llmTemperatureSlider) {
1631
  const listener = e => {
1632
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "llmTemperature");
1633
- const value = coerceFloat(e.target.value, 0.9, validation?.value);
1634
 
1635
  document.getElementById("llm-temperature-value").textContent = value;
1636
  e.target.value = value;
@@ -1642,7 +1625,7 @@ function setupSettingsListeners(kimiDB, kimiMemory) {
1642
  if (llmMaxTokensSlider) {
1643
  const listener = e => {
1644
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "llmMaxTokens");
1645
- const value = coerceInt(e.target.value, 400, validation?.value);
1646
 
1647
  document.getElementById("llm-max-tokens-value").textContent = value;
1648
  e.target.value = value;
@@ -1654,7 +1637,7 @@ function setupSettingsListeners(kimiDB, kimiMemory) {
1654
  if (llmTopPSlider) {
1655
  const listener = e => {
1656
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "llmTopP");
1657
- const value = coerceFloat(e.target.value, 0.9, validation?.value);
1658
 
1659
  document.getElementById("llm-top-p-value").textContent = value;
1660
  e.target.value = value;
@@ -1666,7 +1649,7 @@ function setupSettingsListeners(kimiDB, kimiMemory) {
1666
  if (llmFrequencyPenaltySlider) {
1667
  const listener = e => {
1668
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "llmFrequencyPenalty");
1669
- const value = coerceFloat(e.target.value, 0.9, validation?.value);
1670
 
1671
  document.getElementById("llm-frequency-penalty-value").textContent = value;
1672
  e.target.value = value;
@@ -1678,7 +1661,7 @@ function setupSettingsListeners(kimiDB, kimiMemory) {
1678
  if (llmPresencePenaltySlider) {
1679
  const listener = e => {
1680
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "llmPresencePenalty");
1681
- const value = coerceFloat(e.target.value, 0.8, validation?.value);
1682
 
1683
  document.getElementById("llm-presence-penalty-value").textContent = value;
1684
  e.target.value = value;
@@ -1714,7 +1697,7 @@ function setupSettingsListeners(kimiDB, kimiMemory) {
1714
  if (interfaceOpacitySlider) {
1715
  const listener = e => {
1716
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "interfaceOpacity");
1717
- const value = coerceFloat(e.target.value, 0.8, validation?.value);
1718
 
1719
  document.getElementById("interface-opacity-value").textContent = value;
1720
  e.target.value = value;
 
1442
  }
1443
 
1444
  function setupSettingsListeners(kimiDB, kimiMemory) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1445
  const voiceRateSlider = document.getElementById("voice-rate");
1446
  const voicePitchSlider = document.getElementById("voice-pitch");
1447
  const voiceVolumeSlider = document.getElementById("voice-volume");
 
1526
  if (voiceRateSlider) {
1527
  const listener = e => {
1528
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "voiceRate");
1529
+ const value = validation?.value || parseFloat(e.target.value) || 1.1;
1530
 
1531
  document.getElementById("voice-rate-value").textContent = value;
1532
  e.target.value = value; // Ensure slider shows validated value
 
1538
  if (voicePitchSlider) {
1539
  const listener = e => {
1540
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "voicePitch");
1541
+ const value = validation?.value || parseFloat(e.target.value) || 1.1;
1542
 
1543
  document.getElementById("voice-pitch-value").textContent = value;
1544
  e.target.value = value;
 
1550
  if (voiceVolumeSlider) {
1551
  const listener = e => {
1552
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "voiceVolume");
1553
+ const value = validation?.value || parseFloat(e.target.value) || 0.8;
1554
 
1555
  document.getElementById("voice-volume-value").textContent = value;
1556
  e.target.value = value;
 
1613
  if (llmTemperatureSlider) {
1614
  const listener = e => {
1615
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "llmTemperature");
1616
+ const value = validation?.value || parseFloat(e.target.value) || 0.9;
1617
 
1618
  document.getElementById("llm-temperature-value").textContent = value;
1619
  e.target.value = value;
 
1625
  if (llmMaxTokensSlider) {
1626
  const listener = e => {
1627
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "llmMaxTokens");
1628
+ const value = validation?.value || parseInt(e.target.value) || 400;
1629
 
1630
  document.getElementById("llm-max-tokens-value").textContent = value;
1631
  e.target.value = value;
 
1637
  if (llmTopPSlider) {
1638
  const listener = e => {
1639
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "llmTopP");
1640
+ const value = validation?.value || parseFloat(e.target.value) || 0.9;
1641
 
1642
  document.getElementById("llm-top-p-value").textContent = value;
1643
  e.target.value = value;
 
1649
  if (llmFrequencyPenaltySlider) {
1650
  const listener = e => {
1651
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "llmFrequencyPenalty");
1652
+ const value = validation?.value || parseFloat(e.target.value) || 0.9;
1653
 
1654
  document.getElementById("llm-frequency-penalty-value").textContent = value;
1655
  e.target.value = value;
 
1661
  if (llmPresencePenaltySlider) {
1662
  const listener = e => {
1663
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "llmPresencePenalty");
1664
+ const value = validation?.value || parseFloat(e.target.value) || 0.8;
1665
 
1666
  document.getElementById("llm-presence-penalty-value").textContent = value;
1667
  e.target.value = value;
 
1697
  if (interfaceOpacitySlider) {
1698
  const listener = e => {
1699
  const validation = window.KimiValidationUtils?.validateRange(e.target.value, "interfaceOpacity");
1700
+ const value = validation?.value || parseFloat(e.target.value) || 0.8;
1701
 
1702
  document.getElementById("interface-opacity-value").textContent = value;
1703
  e.target.value = value;
kimi-js/kimi-plugin-manager.js CHANGED
@@ -15,16 +15,6 @@ class KimiPluginManager {
15
  path.startsWith("kimi-plugins/")
16
  );
17
  }
18
- // New: validate file name inside a plugin directory (relative path only)
19
- isValidPluginFileName(file) {
20
- return (
21
- typeof file === "string" &&
22
- /^[-a-zA-Z0-9_\/.]+$/.test(file) &&
23
- !file.startsWith("/") &&
24
- !file.includes("..") &&
25
- !/^https?:\/:/i.test(file)
26
- );
27
- }
28
  async loadPlugins() {
29
  const pluginDirs = await this.getPluginDirs();
30
  this.plugins = [];
@@ -37,17 +27,22 @@ class KimiPluginManager {
37
 
38
  // Basic manifest validation and path sanitization (deny external or absolute URLs)
39
  const validTypes = new Set(["theme", "voice", "behavior"]);
40
- // DEPRECATION: inlined isSafePath replaced by isValidPluginFileName()
 
 
 
 
 
41
 
42
  if (!manifest.name || !manifest.type || !validTypes.has(manifest.type)) {
43
  console.warn(`Invalid plugin manifest in ${dir}: missing name or invalid type`);
44
  continue;
45
  }
46
- if (manifest.style && !this.isValidPluginFileName(manifest.style)) {
47
  console.warn(`Blocked unsafe style path in ${dir}: ${manifest.style}`);
48
  delete manifest.style;
49
  }
50
- if (manifest.main && !this.isValidPluginFileName(manifest.main)) {
51
  console.warn(`Blocked unsafe main path in ${dir}: ${manifest.main}`);
52
  delete manifest.main;
53
  }
 
15
  path.startsWith("kimi-plugins/")
16
  );
17
  }
 
 
 
 
 
 
 
 
 
 
18
  async loadPlugins() {
19
  const pluginDirs = await this.getPluginDirs();
20
  this.plugins = [];
 
27
 
28
  // Basic manifest validation and path sanitization (deny external or absolute URLs)
29
  const validTypes = new Set(["theme", "voice", "behavior"]);
30
+ const isSafePath = p =>
31
+ typeof p === "string" &&
32
+ /^[-a-zA-Z0-9_\/.]+$/.test(p) &&
33
+ !p.startsWith("/") &&
34
+ !p.includes("..") &&
35
+ !/^https?:\/\//i.test(p);
36
 
37
  if (!manifest.name || !manifest.type || !validTypes.has(manifest.type)) {
38
  console.warn(`Invalid plugin manifest in ${dir}: missing name or invalid type`);
39
  continue;
40
  }
41
+ if (manifest.style && !isSafePath(manifest.style)) {
42
  console.warn(`Blocked unsafe style path in ${dir}: ${manifest.style}`);
43
  delete manifest.style;
44
  }
45
+ if (manifest.main && !isSafePath(manifest.main)) {
46
  console.warn(`Blocked unsafe main path in ${dir}: ${manifest.main}`);
47
  delete manifest.main;
48
  }
kimi-js/kimi-utils.js CHANGED
@@ -20,10 +20,24 @@ window.KimiValidationUtils = {
20
  return div.innerHTML;
21
  },
22
  validateRange(value, key) {
23
- if (!window.KimiRange) {
24
- throw new Error("KimiRange not initialized before validateRange call");
25
- }
26
- return window.KimiRange.clamp(key, value);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  }
28
  };
29
 
@@ -76,40 +90,6 @@ const KimiProviderPlaceholders = {
76
  window.KimiProviderPlaceholders = KimiProviderPlaceholders;
77
  export { KimiProviderUtils, KimiProviderPlaceholders };
78
 
79
- // Unified range management (central source of truth for numeric clamping)
80
- // Keys map UI/logic identifiers to CONFIG constant names.
81
- window.KimiRange = {
82
- KEY_MAP: {
83
- voiceRate: "VOICE_RATE",
84
- voicePitch: "VOICE_PITCH",
85
- voiceVolume: "VOICE_VOLUME",
86
- llmTemperature: "LLM_TEMPERATURE",
87
- llmMaxTokens: "LLM_MAX_TOKENS",
88
- llmTopP: "LLM_TOP_P",
89
- llmFrequencyPenalty: "LLM_FREQUENCY_PENALTY",
90
- llmPresencePenalty: "LLM_PRESENCE_PENALTY",
91
- interfaceOpacity: "INTERFACE_OPACITY"
92
- },
93
- getBounds(key) {
94
- try {
95
- const configKey = this.KEY_MAP[key];
96
- if (configKey && window.KIMI_CONFIG && window.KIMI_CONFIG.RANGES && window.KIMI_CONFIG.RANGES[configKey]) {
97
- const range = window.KIMI_CONFIG.RANGES[configKey];
98
- const def = window.KIMI_CONFIG.DEFAULTS?.[configKey] ?? range.min;
99
- return { min: range.min, max: range.max, def };
100
- }
101
- } catch {}
102
- return { min: 0, max: 100, def: 0 };
103
- },
104
- clamp(key, value) {
105
- const b = this.getBounds(key);
106
- const num = parseFloat(value);
107
- if (isNaN(num)) return { value: b.def, clamped: true };
108
- const v = Math.max(b.min, Math.min(b.max, num));
109
- return { value: v, clamped: v !== num };
110
- }
111
- };
112
-
113
  // Performance utility functions for debouncing and throttling
114
  window.KimiPerformanceUtils = {
115
  debounce: function (func, wait, immediate = false, context = null) {
@@ -221,8 +201,12 @@ class KimiSecurityUtils {
221
 
222
  switch (type) {
223
  case "html":
224
- // Reuse centralized escape logic (removes duplication with KimiValidationUtils.escapeHtml)
225
- return window.KimiValidationUtils?.escapeHtml(input) || input;
 
 
 
 
226
  case "number":
227
  const num = parseFloat(input);
228
  return isNaN(num) ? 0 : num;
@@ -241,6 +225,12 @@ class KimiSecurityUtils {
241
  }
242
  }
243
 
 
 
 
 
 
 
244
  static validateApiKey(key) {
245
  if (!key || typeof key !== "string") return false;
246
  if (window.KIMI_VALIDATORS && typeof window.KIMI_VALIDATORS.validateApiKey === "function") {
@@ -531,43 +521,6 @@ class KimiOverlayManager {
531
  open(name) {
532
  const el = this.overlays[name];
533
  if (el) el.classList.add("visible");
534
- // Special handling: opening settings overlay sometimes causes active video to freeze (browser rendering stall)
535
- if (name === "settings-overlay") {
536
- const kv = window.kimiVideo;
537
- if (kv && kv.activeVideo) {
538
- // Short delay so layout / repaint settles before forcing playback
539
- setTimeout(() => {
540
- try {
541
- const v = kv.activeVideo;
542
- if (!v) return;
543
- // If ended -> immediately cycle neutral to avoid static frame
544
- if (v.ended) {
545
- // Let manager handle picking next neutral
546
- if (kv.ensureActivePlayback) kv.ensureActivePlayback();
547
- else if (kv.returnToNeutral) kv.returnToNeutral();
548
- } else {
549
- if (v.duration && !isNaN(v.duration) && v.duration - v.currentTime < 0.4) {
550
- if (kv.returnToNeutral) kv.returnToNeutral();
551
- } else if (v.paused) {
552
- kv.ensureActivePlayback ? kv.ensureActivePlayback() : v.play().catch(() => {});
553
- }
554
- }
555
- // Restart freeze watchdog if available
556
- if (typeof kv._startFreezeWatchdog === "function") kv._startFreezeWatchdog();
557
- } catch {}
558
- }, 50);
559
- // Deferred recheck (covers cases where autoplay is blocked after overlay animation)
560
- setTimeout(() => {
561
- try {
562
- const v = kv.activeVideo;
563
- if (!v) return;
564
- if (!v.ended && (v.paused || v.readyState < 2)) {
565
- v.play().catch(() => {});
566
- }
567
- } catch {}
568
- }, 600);
569
- }
570
- }
571
  }
572
  close(name) {
573
  const el = this.overlays[name];
@@ -577,9 +530,12 @@ class KimiOverlayManager {
577
  if (kv && kv.activeVideo) {
578
  try {
579
  const v = kv.activeVideo;
580
- if (v.ended || v.paused) {
581
- if (kv.ensureActivePlayback) kv.ensureActivePlayback();
582
- else if (kv.returnToNeutral) kv.returnToNeutral();
 
 
 
583
  }
584
  } catch {}
585
  }
 
20
  return div.innerHTML;
21
  },
22
  validateRange(value, key) {
23
+ const bounds = {
24
+ voiceRate: { min: 0.5, max: 2, def: 1.1 },
25
+ voicePitch: { min: 0.5, max: 2, def: 1.1 },
26
+ voiceVolume: { min: 0, max: 1, def: 0.8 },
27
+ llmTemperature: { min: 0, max: 1, def: 0.9 },
28
+ llmMaxTokens: { min: 1, max: 8192, def: 400 },
29
+ llmTopP: { min: 0, max: 1, def: 0.9 },
30
+ llmFrequencyPenalty: { min: 0, max: 2, def: 0.9 },
31
+ llmPresencePenalty: { min: 0, max: 2, def: 0.8 },
32
+ interfaceOpacity: { min: 0.1, max: 1, def: 0.8 }
33
+ };
34
+ const b = bounds[key] || { min: 0, max: 100, def: 0 };
35
+ const v = window.KimiSecurityUtils
36
+ ? window.KimiSecurityUtils.validateRange(value, b.min, b.max, b.def)
37
+ : isNaN(parseFloat(value))
38
+ ? b.def
39
+ : Math.max(b.min, Math.min(b.max, parseFloat(value)));
40
+ return { value: v, clamped: v !== parseFloat(value) };
41
  }
42
  };
43
 
 
90
  window.KimiProviderPlaceholders = KimiProviderPlaceholders;
91
  export { KimiProviderUtils, KimiProviderPlaceholders };
92
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
93
  // Performance utility functions for debouncing and throttling
94
  window.KimiPerformanceUtils = {
95
  debounce: function (func, wait, immediate = false, context = null) {
 
201
 
202
  switch (type) {
203
  case "html":
204
+ return input
205
+ .replace(/&/g, "&amp;")
206
+ .replace(/</g, "&lt;")
207
+ .replace(/>/g, "&gt;")
208
+ .replace(/"/g, "&quot;")
209
+ .replace(/'/g, "&#x27;");
210
  case "number":
211
  const num = parseFloat(input);
212
  return isNaN(num) ? 0 : num;
 
225
  }
226
  }
227
 
228
+ static validateRange(value, min, max, defaultValue = 0) {
229
+ const num = parseFloat(value);
230
+ if (isNaN(num)) return defaultValue;
231
+ return Math.max(min, Math.min(max, num));
232
+ }
233
+
234
  static validateApiKey(key) {
235
  if (!key || typeof key !== "string") return false;
236
  if (window.KIMI_VALIDATORS && typeof window.KIMI_VALIDATORS.validateApiKey === "function") {
 
521
  open(name) {
522
  const el = this.overlays[name];
523
  if (el) el.classList.add("visible");
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
524
  }
525
  close(name) {
526
  const el = this.overlays[name];
 
530
  if (kv && kv.activeVideo) {
531
  try {
532
  const v = kv.activeVideo;
533
+ if (v.ended) {
534
+ if (typeof kv.returnToNeutral === "function") kv.returnToNeutral();
535
+ } else if (v.paused) {
536
+ v.play().catch(() => {
537
+ if (typeof kv.returnToNeutral === "function") kv.returnToNeutral();
538
+ });
539
  }
540
  } catch {}
541
  }
kimi-js/kimi-videos.js CHANGED
@@ -1,8 +1,6 @@
1
  // Utility class for centralized video management
2
  class KimiVideoManager {
3
  constructor(video1, video2, characterName = "kimi") {
4
- // Fixed clip duration (all character videos are 10s)
5
- this.CLIP_DURATION_MS = 10000;
6
  this.characterName = characterName;
7
  this.video1 = video1;
8
  this.video2 = video2;
@@ -12,7 +10,7 @@ class KimiVideoManager {
12
  this.currentEmotion = "neutral";
13
  this.lastSwitchTime = Date.now();
14
  this.pendingSwitch = null;
15
- this.autoTransitionDuration = this.CLIP_DURATION_MS;
16
  this.transitionDuration = 300;
17
  this._prefetchCache = new Map();
18
  this._prefetchInFlight = new Set();
@@ -62,30 +60,54 @@ class KimiVideoManager {
62
  this._consecutiveErrorCount = 0;
63
  // Track per-video load attempts to adapt timeouts & avoid faux échecs
64
  this._videoAttempts = new Map();
65
- // Neutral pipeline disabled (previously handled seamless chaining)
66
- this.enableNeutralPipeline = false;
67
- this._nextNeutralPlannedAt = 0;
68
- this._scheduledNextNeutral = null;
69
- this._scheduledNeutralReady = false;
70
- this._neutralGapMetrics = null;
71
- // Speaking polarity throttling (prevents rapid flip-flop between positive / negative clips)
72
- this._speakingPolarityLast = null; // 'positive' | 'negative'
73
- this._speakingPolarityLastTs = 0;
74
- this._speakingPolarityBaseInterval = 2500; // base ms between polarity changes (will be scaled)
75
- this._speakingPolarityOverride = null; // manual override (ms) if set
76
- this._speakingPolarityRejectCount = 0; // number of consecutive rejected flips
77
- this._speakingPolarityMaxRejects = 2; // after this many rejects, allow next flip
78
- this._avgSpeakingDuration = 10000; // ms approximate; can refine after metadata
79
-
80
- // Ensure the initially active video is visible (remove any stale inline opacity)
81
  try {
82
- if (this.activeVideo && this.activeVideo.style && this.activeVideo.classList.contains("active")) {
83
- this.activeVideo.style.opacity = ""; // rely purely on CSS class
 
 
 
84
  }
85
  } catch {}
86
 
87
- // Optional debug overlay (activated via setDebug(true))
88
- this._debugOverlay = null;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
89
  }
90
 
91
  //Centralized video element creation utility.
@@ -97,6 +119,7 @@ class KimiVideoManager {
97
  video.muted = true;
98
  video.playsinline = true;
99
  video.preload = "auto";
 
100
  video.innerHTML =
101
  '<source src="" type="video/mp4" /><span data-i18n="video_not_supported">Your browser does not support the video tag.</span>';
102
  return video;
@@ -115,21 +138,12 @@ class KimiVideoManager {
115
 
116
  setDebug(enabled) {
117
  this._debug = !!enabled;
118
- if (this._debug && !this._debugOverlay) {
119
- this._installDebugOverlay();
120
- } else if (!this._debug && this._debugOverlay) {
121
- try {
122
- this._debugOverlay.remove();
123
- } catch {}
124
- this._debugOverlay = null;
125
- }
126
  }
127
 
128
  _logDebug(message, payload = null) {
129
  if (!this._debug) return;
130
  if (payload) console.log("🎬 VideoManager:", message, payload);
131
  else console.log("🎬 VideoManager:", message);
132
- this._updateDebugOverlay();
133
  }
134
 
135
  _logSelection(category, selectedSrc, candidates = []) {
@@ -145,31 +159,6 @@ class KimiVideoManager {
145
  });
146
  }
147
 
148
- // Dynamically derive minimum interval between polarity changes using avg clip duration, with optional override.
149
- _getPolarityMinInterval() {
150
- if (typeof this._speakingPolarityOverride === "number") return this._speakingPolarityOverride;
151
- const avg = this._avgSpeakingDuration || 10000; // ms
152
- const derived = Math.round(avg * 0.22); // ~2200ms for 10s
153
- const blended = Math.round((derived + this._speakingPolarityBaseInterval) / 2);
154
- return Math.min(3400, Math.max(1900, blended));
155
- }
156
-
157
- setPolarityInterval(ms) {
158
- if (typeof ms === "number" && ms >= 500) this._speakingPolarityOverride = ms;
159
- }
160
-
161
- clearPolarityIntervalOverride() {
162
- this._speakingPolarityOverride = null;
163
- }
164
-
165
- _updateAvgSpeakingDuration(sampleDurationMs) {
166
- if (!sampleDurationMs || isNaN(sampleDurationMs) || sampleDurationMs < 500) return;
167
- const alpha = 0.25;
168
- this._avgSpeakingDuration = this._avgSpeakingDuration
169
- ? Math.round(alpha * sampleDurationMs + (1 - alpha) * this._avgSpeakingDuration)
170
- : sampleDurationMs;
171
- }
172
-
173
  debugPrintHistory(category = null) {
174
  if (!this._debug) return;
175
  if (!this.playHistory) {
@@ -233,7 +222,7 @@ class KimiVideoManager {
233
  setCharacter(characterName) {
234
  this.characterName = characterName;
235
 
236
- // Clean up the ongoing handlers when changing characters.
237
  this._cleanupLoadingHandlers();
238
  // Reset per-character fallback pool so it will be rebuilt for the new character
239
  this._fallbackPool = null;
@@ -491,7 +480,7 @@ class KimiVideoManager {
491
  // Determine the category FIRST to ensure correct video selection
492
  const category = this.determineCategory(context, emotion, traits);
493
 
494
- // Determine the priority according to the context.
495
  let priority = "normal";
496
  if (context === "speaking" || context === "speakingPositive" || context === "speakingNegative") {
497
  priority = "speaking";
@@ -503,30 +492,15 @@ class KimiVideoManager {
503
  if (context === "dancing") {
504
  this._stickyContext = "dancing";
505
  // Lock roughly for one clip duration; will also be cleared on end/neutral
506
- this._stickyUntil = Date.now() + (this.CLIP_DURATION_MS - 500);
507
  }
508
 
509
- // Optimized path when TTS is speaking/listening (avoids flickering)
510
  if (
511
  window.voiceManager &&
512
  window.voiceManager.isSpeaking &&
513
  (context === "speaking" || context === "speakingPositive" || context === "speakingNegative")
514
  ) {
515
- // Throttle polarity oscillations (e.g., positive -> negative -> positive too fast)
516
- const nowTs = Date.now();
517
- const desiredPolarity = emotion === "negative" ? "negative" : "positive";
518
- const polarityInterval = this._getPolarityMinInterval();
519
- if (
520
- this._speakingPolarityLast &&
521
- this._speakingPolarityLast !== desiredPolarity &&
522
- nowTs - this._speakingPolarityLastTs < polarityInterval &&
523
- this._speakingPolarityRejectCount < this._speakingPolarityMaxRejects
524
- ) {
525
- // Force reuse last polarity within throttle window
526
- emotion = this._speakingPolarityLast;
527
- context = emotion === "negative" ? "speakingNegative" : "speakingPositive";
528
- this._speakingPolarityRejectCount++;
529
- }
530
  const speakingPath = this.selectOptimalVideo(category, specificVideo, traits, affection, emotion);
531
  const speakingCurrent = this.activeVideo.querySelector("source").getAttribute("src");
532
  if (speakingCurrent !== speakingPath || this.activeVideo.ended) {
@@ -536,24 +510,6 @@ class KimiVideoManager {
536
  this.currentContext = category;
537
  this.currentEmotion = emotion;
538
  this.lastSwitchTime = Date.now();
539
- this._speakingPolarityLast = emotion === "negative" ? "negative" : "positive";
540
- this._speakingPolarityLastTs = nowTs;
541
- if (this._debug)
542
- this._logDebug("Polarity throttle state", {
543
- last: this._speakingPolarityLast,
544
- interval: this._getPolarityMinInterval()
545
- });
546
- if (desiredPolarity === this._speakingPolarityLast) {
547
- // Flip accepted -> reset rejection counter
548
- this._speakingPolarityRejectCount = 0;
549
- }
550
- if (this._debug)
551
- this._logDebug("Polarity throttle state", {
552
- last: this._speakingPolarityLast,
553
- interval: polarityInterval,
554
- rejected: this._speakingPolarityRejectCount,
555
- override: this._speakingPolarityOverride
556
- });
557
  return;
558
  }
559
  if (window.voiceManager && window.voiceManager.isListening && context === "listening") {
@@ -569,7 +525,7 @@ class KimiVideoManager {
569
  return;
570
  }
571
 
572
- // Standard selection
573
  let videoPath = this.selectOptimalVideo(category, specificVideo, traits, affection, emotion);
574
  const currentVideoSrc = this.activeVideo.querySelector("source").getAttribute("src");
575
 
@@ -669,14 +625,6 @@ class KimiVideoManager {
669
  return;
670
  }
671
  }
672
- // If still in active listening phase, loop listening instead of neutral
673
- if (window.voiceManager && window.voiceManager.isListening) {
674
- this.isEmotionVideoPlaying = false;
675
- this.currentEmotionContext = null;
676
- this._neutralLock = false;
677
- this.switchToContext("listening", "listening");
678
- return;
679
- }
680
  // Otherwise, allow pending high-priority switch or return to neutral
681
  this.isEmotionVideoPlaying = false;
682
  this.currentEmotionContext = null;
@@ -699,7 +647,6 @@ class KimiVideoManager {
699
 
700
  // Neutral: on end, pick another neutral to avoid static last frame
701
  if (context === "neutral") {
702
- // Simple neutral loop: rely on returnToNeutral after ended
703
  this._globalEndedHandler = () => this.returnToNeutral();
704
  this.activeVideo.addEventListener("ended", this._globalEndedHandler, { once: true });
705
  }
@@ -936,33 +883,7 @@ class KimiVideoManager {
936
  this.currentEmotionContext = emotion;
937
  }
938
 
939
- // Infer appropriate speaking category based on current / recent emotions
940
- _inferSpeakingCategory(defaultEmotion = "positive") {
941
- // If we still have an explicit current emotion context, prefer it
942
- let emo = this.currentEmotionContext || this.currentEmotion || defaultEmotion;
943
- if (emo !== "positive" && emo !== "negative") {
944
- // Look back into emotion history for a recent polarity
945
- if (Array.isArray(this.emotionHistory)) {
946
- for (let i = this.emotionHistory.length - 1; i >= 0; i--) {
947
- const e = this.emotionHistory[i];
948
- if (e === "positive" || e === "negative") {
949
- emo = e;
950
- break;
951
- }
952
- }
953
- }
954
- }
955
- if (emo !== "negative") return { category: "speakingPositive", emotion: "positive" };
956
- return { category: "speakingNegative", emotion: "negative" };
957
- }
958
-
959
  returnToNeutral() {
960
- // Throttle neutral transitions to avoid churn when multiple triggers fire close together
961
- const nowTs = Date.now();
962
- if (!this._lastNeutralAt) this._lastNeutralAt = 0;
963
- const MIN_NEUTRAL_INTERVAL = 800; // ms
964
- if (nowTs - this._lastNeutralAt < MIN_NEUTRAL_INTERVAL) return;
965
- this._lastNeutralAt = nowTs;
966
  // Always ensure we resume playback with a fresh neutral video to avoid freeze
967
  if (this._neutralLock) return;
968
  this._neutralLock = true;
@@ -974,19 +895,7 @@ class KimiVideoManager {
974
  this.isEmotionVideoPlaying = false;
975
  this.currentEmotionContext = null;
976
 
977
- // Neutral pipeline disabled: pick a fresh neutral only if not actively speaking.
978
- // If the voice (TTS) is in progress, we switch to an adapted speaking video (positive/negative) instead of looping on neutral.
979
- if (window.voiceManager && window.voiceManager.isSpeaking) {
980
- const { category, emotion } = this._inferSpeakingCategory();
981
- this.switchToContext(category, emotion);
982
- return;
983
- }
984
- // Maintain listening loop while user input capture is active
985
- if (window.voiceManager && window.voiceManager.isListening) {
986
- this.switchToContext("listening", "listening");
987
- return;
988
- }
989
-
990
  const category = "neutral";
991
  const currentVideoSrc = this.activeVideo.querySelector("source").getAttribute("src");
992
  const available = this.videoCategories[category] || [];
@@ -1004,6 +913,18 @@ class KimiVideoManager {
1004
  this.currentContext = "neutral";
1005
  this.currentEmotion = "neutral";
1006
  this.lastSwitchTime = Date.now();
 
 
 
 
 
 
 
 
 
 
 
 
1007
  } else {
1008
  // Fallback to existing path if list empty
1009
  this.switchToContext("neutral");
@@ -1135,11 +1056,11 @@ class KimiVideoManager {
1135
  } else {
1136
  switch (this.currentContext) {
1137
  case "dancing":
1138
- duration = this.CLIP_DURATION_MS; // dancing clip length
1139
  break;
1140
  case "speakingPositive":
1141
  case "speakingNegative":
1142
- duration = this.CLIP_DURATION_MS; // speaking clip length
1143
  break;
1144
  case "neutral":
1145
  // Pas d'auto-transition pour neutral (état par défaut, boucle en continu)
@@ -1148,7 +1069,7 @@ class KimiVideoManager {
1148
  // Pas d'auto-transition pour listening (personnage écoute l'utilisateur)
1149
  return;
1150
  default:
1151
- duration = this.autoTransitionDuration; // default derived duration
1152
  }
1153
  }
1154
 
@@ -1163,53 +1084,109 @@ class KimiVideoManager {
1163
  }
1164
 
1165
  // COMPATIBILITY WITH THE OLD SYSTEM
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1166
 
1167
  loadAndSwitchVideo(videoSrc, priority = "normal") {
1168
  const startTs = performance.now();
1169
- // Basic attempt count (max 2 attempts per source in one call chain)
1170
- const attempts = (this._videoAttempts.get(videoSrc) || 0) + 1;
 
1171
  this._videoAttempts.set(videoSrc, attempts);
1172
- if (attempts > 2) return; // hard stop
1173
-
1174
- // Cooldown skip (simple): if failed recently, choose another neutral immediately
 
 
 
 
 
1175
  const lastFail = this._recentFailures.get(videoSrc);
1176
  if (lastFail && performance.now() - lastFail < this._failureCooldown) {
1177
- const neutrals = (this.videoCategories && this.videoCategories.neutral) || [];
1178
- const alt = neutrals.find(v => v !== videoSrc) || neutrals[0];
 
1179
  if (alt && alt !== videoSrc) {
1180
- this.loadAndSwitchVideo(alt, priority);
1181
- return;
1182
  }
1183
  }
1184
-
1185
  const activeSrc = this.activeVideo?.querySelector("source")?.getAttribute("src");
1186
- if (videoSrc === activeSrc && priority !== "speaking" && priority !== "high") return;
1187
- if (this._loadingInProgress && priority !== "speaking" && priority !== "high") return;
 
 
 
 
 
 
 
 
1188
 
 
1189
  if (this._loadingInProgress) {
1190
- // Cancel current load listeners
1191
- this.inactiveVideo.removeEventListener("canplay", this._currentLoadHandler);
1192
- this.inactiveVideo.removeEventListener("loadeddata", this._currentLoadHandler);
1193
- this.inactiveVideo.removeEventListener("error", this._currentErrorHandler);
1194
- if (this._loadTimeout) clearTimeout(this._loadTimeout);
1195
- this._loadingInProgress = false;
 
 
 
 
 
 
 
1196
  }
1197
 
1198
  this._loadingInProgress = true;
 
 
1199
  clearTimeout(this.autoTransitionTimer);
1200
- if (this._loadTimeout) clearTimeout(this._loadTimeout);
1201
- this._loadTimeout = null;
 
 
1202
 
1203
- this.inactiveVideo.querySelector("source").setAttribute("src", videoSrc);
1204
- try {
1205
- this.inactiveVideo.currentTime = 0;
1206
- } catch {}
1207
- this.inactiveVideo.load();
 
 
 
 
 
 
 
1208
 
1209
- let finished = false;
1210
- const finalizeSuccess = () => {
1211
- if (finished) return;
1212
- finished = true;
 
 
1213
  this._loadingInProgress = false;
1214
  if (this._loadTimeout) {
1215
  clearTimeout(this._loadTimeout);
@@ -1218,69 +1195,162 @@ class KimiVideoManager {
1218
  this.inactiveVideo.removeEventListener("canplay", this._currentLoadHandler);
1219
  this.inactiveVideo.removeEventListener("loadeddata", this._currentLoadHandler);
1220
  this.inactiveVideo.removeEventListener("error", this._currentErrorHandler);
1221
- // Rolling avg (light)
1222
- const dt = performance.now() - startTs;
1223
- this._loadTimeSamples.push(dt);
1224
  if (this._loadTimeSamples.length > this._maxSamples) this._loadTimeSamples.shift();
1225
- this._avgLoadTime = this._loadTimeSamples.reduce((a, b) => a + b, 0) / this._loadTimeSamples.length;
1226
- this._consecutiveErrorCount = 0;
 
1227
  this.performSwitch();
1228
  };
 
1229
 
1230
- this._currentLoadHandler = () => finalizeSuccess();
1231
-
1232
- this._currentErrorHandler = () => {
1233
- if (finished) return;
1234
- finished = true;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1235
  this._loadingInProgress = false;
1236
  if (this._loadTimeout) {
1237
  clearTimeout(this._loadTimeout);
1238
  this._loadTimeout = null;
1239
  }
1240
- this.inactiveVideo.removeEventListener("canplay", this._currentLoadHandler);
1241
- this.inactiveVideo.removeEventListener("loadeddata", this._currentLoadHandler);
1242
- this.inactiveVideo.removeEventListener("error", this._currentErrorHandler);
1243
- this._recentFailures.set(videoSrc, performance.now());
1244
- this._consecutiveErrorCount++;
1245
- // Single retry with alternative neutral if first attempt
1246
- if (attempts === 1) {
1247
- const neutrals = (this.videoCategories && this.videoCategories.neutral) || [];
1248
- const alt = neutrals.find(v => v !== videoSrc);
1249
- if (alt) {
1250
- setTimeout(() => this.loadAndSwitchVideo(alt, priority), 0);
1251
- return;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1252
  }
1253
  }
1254
- // If no retry path succeeded, invoke centralized recovery
1255
- try {
1256
- this._logDebug && this._logDebug("Video load error → recovery", { src: videoSrc, attempts });
1257
- } catch {}
1258
- this._recoverFromVideoError(videoSrc, priority);
 
1259
  };
1260
 
1261
  this.inactiveVideo.addEventListener("loadeddata", this._currentLoadHandler, { once: true });
1262
  this.inactiveVideo.addEventListener("canplay", this._currentLoadHandler, { once: true });
1263
  this.inactiveVideo.addEventListener("error", this._currentErrorHandler, { once: true });
1264
 
1265
- // Simple timeout: 5000ms + single extension 1500ms if metadata only
1266
- const baseTimeout = 5000;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1267
  this._loadTimeout = setTimeout(() => {
1268
- if (finished) return;
1269
- if (this.inactiveVideo.readyState === 1) {
1270
- // HAVE_METADATA only
1271
- this._loadTimeout = setTimeout(() => {
1272
- if (!finished) this._currentErrorHandler();
1273
- }, 1500);
1274
- return;
1275
- }
1276
- if (this.inactiveVideo.readyState >= 2) finalizeSuccess();
1277
- else {
1278
- try {
1279
- this._logDebug && this._logDebug("Video load timeout", { src: videoSrc, rs: this.inactiveVideo.readyState });
1280
- } catch {}
1281
- this._currentErrorHandler();
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1282
  }
1283
- }, baseTimeout);
1284
  }
1285
 
1286
  usePreloadedVideo(preloadedVideo, videoSrc) {
@@ -1306,11 +1376,10 @@ class KimiVideoManager {
1306
  const fromVideo = this.activeVideo;
1307
  const toVideo = this.inactiveVideo;
1308
 
1309
- const finalizeSwap = () => {
1310
- // Clear any inline opacity to rely solely on class-based visibility
1311
- fromVideo.style.opacity = "";
1312
- toVideo.style.opacity = "";
1313
-
1314
  fromVideo.classList.remove("active");
1315
  toVideo.classList.add("active");
1316
 
@@ -1328,20 +1397,22 @@ class KimiVideoManager {
1328
  const src = this.activeVideo?.querySelector("source")?.getAttribute("src");
1329
  const info = { context: this.currentContext, emotion: this.currentEmotion };
1330
  console.log("🎬 VideoManager: Now playing:", src, info);
 
1331
  try {
1332
  const d = this.activeVideo.duration;
1333
  if (!isNaN(d) && d > 0.5) {
 
1334
  const target = Math.max(1000, d * 1000 - 1100);
1335
  this.autoTransitionDuration = target;
1336
  } else {
1337
- this.autoTransitionDuration = this.CLIP_DURATION_MS;
1338
  }
 
1339
  this._prefetchNeutralDynamic();
1340
  } catch {}
1341
  } catch {}
1342
  this._switchInProgress = false;
1343
  this.setupEventListenersForContext(this.currentContext);
1344
- this._startFreezeWatchdog();
1345
  })
1346
  .catch(error => {
1347
  console.warn("Failed to play video:", error);
@@ -1357,7 +1428,7 @@ class KimiVideoManager {
1357
  this.setupEventListenersForContext(this.currentContext);
1358
  });
1359
  } else {
1360
- // Non-promise fallback
1361
  this._switchInProgress = false;
1362
  try {
1363
  const d = this.activeVideo.duration;
@@ -1365,172 +1436,15 @@ class KimiVideoManager {
1365
  const target = Math.max(1000, d * 1000 - 1100);
1366
  this.autoTransitionDuration = target;
1367
  } else {
1368
- this.autoTransitionDuration = this.CLIP_DURATION_MS;
1369
  }
1370
  this._prefetchNeutralDynamic();
1371
  } catch {}
1372
  this.setupEventListenersForContext(this.currentContext);
1373
- this._startFreezeWatchdog();
1374
- }
1375
- };
1376
-
1377
- // Ensure target video is at start and attempt playback ahead of swap
1378
- try {
1379
- toVideo.currentTime = 0;
1380
- } catch {}
1381
- const ready = toVideo.readyState >= 2; // HAVE_CURRENT_DATA
1382
- const performSimpleSwap = () => {
1383
- // Remove active class from old, add to new, rely on CSS transitions (opacity) only
1384
- fromVideo.classList.remove("active");
1385
- toVideo.classList.add("active");
1386
- finalizeSwap();
1387
- };
1388
- if (!ready) {
1389
- const onReady = () => {
1390
- toVideo.removeEventListener("canplay", onReady);
1391
- performSimpleSwap();
1392
- };
1393
- toVideo.addEventListener("canplay", onReady, { once: true });
1394
- try {
1395
- toVideo.load();
1396
- } catch {}
1397
- toVideo.play().catch(() => {});
1398
- } else {
1399
- toVideo.play().catch(() => {});
1400
- performSimpleSwap();
1401
- }
1402
- }
1403
-
1404
- /**
1405
- * Ensure videos resume correctly when a blocking modal (settings or memory) is closed.
1406
- * Some browsers may pause autoplaying inline videos when large overlays appear; we hook
1407
- * into overlay class / style changes to attempt a resume if appropriate.
1408
- */
1409
- // Attempt to chain another speaking video when current one stalls/ends but TTS continues
1410
- _chainSpeakingFallback() {
1411
- try {
1412
- const emotion = this.currentEmotionContext || this.currentEmotion || "positive";
1413
- const category = emotion === "negative" ? "speakingNegative" : "speakingPositive";
1414
- const next = this.selectOptimalVideo(category, null, null, null, emotion);
1415
- if (next) {
1416
- this.loadAndSwitchVideo(next, "speaking");
1417
- this.currentContext = category;
1418
- this.currentEmotion = emotion === "negative" ? "negative" : "positive";
1419
- this.isEmotionVideoPlaying = true;
1420
- this.currentEmotionContext = emotion;
1421
- this.lastSwitchTime = Date.now();
1422
  }
1423
- } catch {}
1424
- }
1425
-
1426
- // Central recovery path for load errors or repeated stalls
1427
- _recoverFromVideoError(failedSrc, priority) {
1428
- try {
1429
- if (window.voiceManager && window.voiceManager.isSpeaking) {
1430
- this._chainSpeakingFallback();
1431
- return;
1432
- }
1433
- if (window.voiceManager && window.voiceManager.isListening) {
1434
- this.switchToContext("listening", "listening");
1435
- return;
1436
- }
1437
- this.returnToNeutral();
1438
- } catch {}
1439
- }
1440
-
1441
- _installModalResumeObserver() {
1442
- if (this._modalObserverInstalled) return;
1443
- this._modalObserverInstalled = true;
1444
- const tryResume = () => {
1445
- try {
1446
- const v = this.activeVideo;
1447
- if (v && v.paused && !v.ended) {
1448
- v.play().catch(() => {});
1449
- } else if (v && v.ended && typeof this.returnToNeutral === "function") {
1450
- this.returnToNeutral();
1451
- }
1452
- } catch {}
1453
- };
1454
- const observeEl = id => {
1455
- const el = document.getElementById(id);
1456
- if (!el) return;
1457
- const obs = new MutationObserver(muts => {
1458
- for (const m of muts) {
1459
- if (m.type === "attributes" && (m.attributeName === "class" || m.attributeName === "style")) {
1460
- // When modal becomes hidden
1461
- const hidden = (el.style.display && el.style.display === "none") || !el.classList.contains("visible");
1462
- if (hidden) setTimeout(tryResume, 30);
1463
- }
1464
- }
1465
- });
1466
- obs.observe(el, { attributes: true, attributeFilter: ["class", "style"] });
1467
- };
1468
- // Known modals
1469
- observeEl("memory-overlay");
1470
- observeEl("settings-overlay");
1471
- // Visibility change (tab switching)
1472
- document.addEventListener("visibilitychange", () => {
1473
- if (!document.hidden) setTimeout(tryResume, 60);
1474
  });
1475
  }
1476
 
1477
- // (Removed JS crossfade: now handled purely by CSS transitions on the .active class.)
1478
-
1479
- // Watchdog to detect freeze when a 10s clip reaches end but 'ended' listener may not fire (browser quirk)
1480
- _startFreezeWatchdog() {
1481
- clearInterval(this._freezeInterval);
1482
- const v = this.activeVideo;
1483
- if (!v) return;
1484
- const CHECK_MS = 1000;
1485
- this._lastProgressTime = Date.now();
1486
- let lastTime = v.currentTime;
1487
- // Stalled detection via progress event
1488
- const onStalled = () => {
1489
- this._lastProgressTime = Date.now();
1490
- };
1491
- v.addEventListener("timeupdate", onStalled);
1492
- v.addEventListener("progress", onStalled);
1493
- this._freezeInterval = setInterval(() => {
1494
- if (v !== this.activeVideo) return; // switched
1495
- const dur = v.duration || 9.9; // assume 9.9s
1496
- const nearEnd = v.currentTime >= dur - 0.25; // last 250ms
1497
- const progressed = v.currentTime !== lastTime;
1498
- if (progressed) {
1499
- lastTime = v.currentTime;
1500
- this._lastProgressTime = Date.now();
1501
- }
1502
- // If near end and not auto-transitioned within 500ms, trigger manual neutral
1503
- if (nearEnd && Date.now() - this._lastProgressTime > 600) {
1504
- // Ensure we are not already neutral cycling
1505
- if (this.currentContext === "neutral") {
1506
- // Pick another neutral to animate
1507
- try {
1508
- this.returnToNeutral();
1509
- } catch {}
1510
- } else {
1511
- if (!this._processPendingSwitches()) this.returnToNeutral();
1512
- }
1513
- }
1514
- // Extra safety: if video paused unexpectedly before end
1515
- if (!v.paused && !v.ended && Date.now() - this._lastProgressTime > 4000) {
1516
- try {
1517
- v.play().catch(() => {});
1518
- } catch {}
1519
- } else if (v.paused && !v.ended) {
1520
- // Resume if paused but not finished
1521
- try {
1522
- v.play().catch(() => {});
1523
- } catch {}
1524
- }
1525
- // Cleanup if naturally ended (ended handler will schedule next)
1526
- if (v.ended) {
1527
- clearInterval(this._freezeInterval);
1528
- v.removeEventListener("timeupdate", onStalled);
1529
- v.removeEventListener("progress", onStalled);
1530
- }
1531
- }, CHECK_MS);
1532
- }
1533
-
1534
  _prefetchNeutralDynamic() {
1535
  try {
1536
  const neutrals = (this.videoCategories && this.videoCategories.neutral) || [];
@@ -1634,58 +1548,6 @@ class KimiVideoManager {
1634
  };
1635
  }
1636
 
1637
- _installDebugOverlay() {
1638
- const div = document.createElement("div");
1639
- div.style.position = "fixed";
1640
- div.style.bottom = "6px";
1641
- div.style.left = "6px";
1642
- div.style.padding = "6px 8px";
1643
- div.style.background = "rgba(0,0,0,0.55)";
1644
- div.style.color = "#fff";
1645
- div.style.font = "12px/1.35 monospace";
1646
- div.style.zIndex = 9999;
1647
- div.style.pointerEvents = "none";
1648
- div.style.borderRadius = "4px";
1649
- div.style.maxWidth = "300px";
1650
- div.style.whiteSpace = "pre-wrap";
1651
- div.id = "kimi-video-debug";
1652
- document.body.appendChild(div);
1653
- this._debugOverlay = div;
1654
- this._updateDebugOverlay();
1655
- }
1656
-
1657
- _updateDebugOverlay() {
1658
- if (!this._debug || !this._debugOverlay) return;
1659
- const v = this.activeVideo;
1660
- let info = this.getCurrentVideoInfo();
1661
- let status = "";
1662
- try {
1663
- status =
1664
- `t=${v.currentTime.toFixed(2)} / ${isNaN(v.duration) ? "?" : v.duration.toFixed(2)}\n` +
1665
- `paused=${v.paused} ended=${v.ended} ready=${v.readyState}\n` +
1666
- `ctx=${info.context} emo=${info.emotion}\n` +
1667
- `switching=${this._switchInProgress} loading=${this._loadingInProgress}`;
1668
- } catch {
1669
- status = "n/a";
1670
- }
1671
- this._debugOverlay.textContent = status;
1672
- }
1673
-
1674
- // Public helper to ensure the active clip is playing (centralized safety)
1675
- ensureActivePlayback() {
1676
- try {
1677
- const v = this.activeVideo;
1678
- if (!v) return;
1679
- if (v.ended) {
1680
- this.returnToNeutral();
1681
- return;
1682
- }
1683
- if (v.paused) v.play().catch(() => {});
1684
- } catch {}
1685
- }
1686
-
1687
- // Neutral pipeline methods removed (simplified looping now handled by returnToNeutral + ended handlers)
1688
-
1689
  // METHODS TO ANALYZE EMOTIONS FROM TEXT
1690
  // CLEANUP
1691
  destroy() {
 
1
  // Utility class for centralized video management
2
  class KimiVideoManager {
3
  constructor(video1, video2, characterName = "kimi") {
 
 
4
  this.characterName = characterName;
5
  this.video1 = video1;
6
  this.video2 = video2;
 
10
  this.currentEmotion = "neutral";
11
  this.lastSwitchTime = Date.now();
12
  this.pendingSwitch = null;
13
+ this.autoTransitionDuration = 9900;
14
  this.transitionDuration = 300;
15
  this._prefetchCache = new Map();
16
  this._prefetchInFlight = new Set();
 
60
  this._consecutiveErrorCount = 0;
61
  // Track per-video load attempts to adapt timeouts & avoid faux échecs
62
  this._videoAttempts = new Map();
63
+ }
64
+
65
+ //Centralized crossfade transition between two videos.
66
+ static crossfadeVideos(fromVideo, toVideo, duration = 300, onComplete) {
67
+ // Resolve duration from CSS variable if present
 
 
 
 
 
 
 
 
 
 
 
68
  try {
69
+ const cssDur = getComputedStyle(document.documentElement).getPropertyValue("--video-fade-duration").trim();
70
+ if (cssDur) {
71
+ // Convert CSS time to ms number if needed (e.g., '300ms' or '0.3s')
72
+ if (cssDur.endsWith("ms")) duration = parseFloat(cssDur);
73
+ else if (cssDur.endsWith("s")) duration = Math.round(parseFloat(cssDur) * 1000);
74
  }
75
  } catch {}
76
 
77
+ // Preload and strict synchronization
78
+ const easing = "ease-in-out";
79
+ fromVideo.style.transition = `opacity ${duration}ms ${easing}`;
80
+ toVideo.style.transition = `opacity ${duration}ms ${easing}`;
81
+ // Prepare target video (opacity 0, top z-index)
82
+ toVideo.style.opacity = "0";
83
+ toVideo.style.zIndex = "2";
84
+ fromVideo.style.zIndex = "1";
85
+
86
+ // Start target video slightly before the crossfade
87
+ const startTarget = () => {
88
+ if (toVideo.paused) toVideo.play().catch(() => {});
89
+ // Lance le fondu croisé
90
+ setTimeout(() => {
91
+ fromVideo.style.opacity = "0";
92
+ toVideo.style.opacity = "1";
93
+ }, 20);
94
+ // After transition, adjust z-index and call the callback
95
+ setTimeout(() => {
96
+ fromVideo.style.zIndex = "1";
97
+ toVideo.style.zIndex = "2";
98
+ if (onComplete) onComplete();
99
+ }, duration + 30);
100
+ };
101
+
102
+ // If target video is not ready, wait for canplay
103
+ if (toVideo.readyState < 3) {
104
+ toVideo.addEventListener("canplay", startTarget, { once: true });
105
+ toVideo.load();
106
+ } else {
107
+ startTarget();
108
+ }
109
+ // Ensure source video is playing
110
+ if (fromVideo.paused) fromVideo.play().catch(() => {});
111
  }
112
 
113
  //Centralized video element creation utility.
 
119
  video.muted = true;
120
  video.playsinline = true;
121
  video.preload = "auto";
122
+ video.style.opacity = "0";
123
  video.innerHTML =
124
  '<source src="" type="video/mp4" /><span data-i18n="video_not_supported">Your browser does not support the video tag.</span>';
125
  return video;
 
138
 
139
  setDebug(enabled) {
140
  this._debug = !!enabled;
 
 
 
 
 
 
 
 
141
  }
142
 
143
  _logDebug(message, payload = null) {
144
  if (!this._debug) return;
145
  if (payload) console.log("🎬 VideoManager:", message, payload);
146
  else console.log("🎬 VideoManager:", message);
 
147
  }
148
 
149
  _logSelection(category, selectedSrc, candidates = []) {
 
159
  });
160
  }
161
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
162
  debugPrintHistory(category = null) {
163
  if (!this._debug) return;
164
  if (!this.playHistory) {
 
222
  setCharacter(characterName) {
223
  this.characterName = characterName;
224
 
225
+ // Nettoyer les handlers en cours lors du changement de personnage
226
  this._cleanupLoadingHandlers();
227
  // Reset per-character fallback pool so it will be rebuilt for the new character
228
  this._fallbackPool = null;
 
480
  // Determine the category FIRST to ensure correct video selection
481
  const category = this.determineCategory(context, emotion, traits);
482
 
483
+ // Déterminer la priorité selon le contexte
484
  let priority = "normal";
485
  if (context === "speaking" || context === "speakingPositive" || context === "speakingNegative") {
486
  priority = "speaking";
 
492
  if (context === "dancing") {
493
  this._stickyContext = "dancing";
494
  // Lock roughly for one clip duration; will also be cleared on end/neutral
495
+ this._stickyUntil = Date.now() + 9500;
496
  }
497
 
498
+ // Chemin optimisé lorsque TTS parle/écoute (évite clignotements)
499
  if (
500
  window.voiceManager &&
501
  window.voiceManager.isSpeaking &&
502
  (context === "speaking" || context === "speakingPositive" || context === "speakingNegative")
503
  ) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
504
  const speakingPath = this.selectOptimalVideo(category, specificVideo, traits, affection, emotion);
505
  const speakingCurrent = this.activeVideo.querySelector("source").getAttribute("src");
506
  if (speakingCurrent !== speakingPath || this.activeVideo.ended) {
 
510
  this.currentContext = category;
511
  this.currentEmotion = emotion;
512
  this.lastSwitchTime = Date.now();
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
513
  return;
514
  }
515
  if (window.voiceManager && window.voiceManager.isListening && context === "listening") {
 
525
  return;
526
  }
527
 
528
+ // Sélection standard
529
  let videoPath = this.selectOptimalVideo(category, specificVideo, traits, affection, emotion);
530
  const currentVideoSrc = this.activeVideo.querySelector("source").getAttribute("src");
531
 
 
625
  return;
626
  }
627
  }
 
 
 
 
 
 
 
 
628
  // Otherwise, allow pending high-priority switch or return to neutral
629
  this.isEmotionVideoPlaying = false;
630
  this.currentEmotionContext = null;
 
647
 
648
  // Neutral: on end, pick another neutral to avoid static last frame
649
  if (context === "neutral") {
 
650
  this._globalEndedHandler = () => this.returnToNeutral();
651
  this.activeVideo.addEventListener("ended", this._globalEndedHandler, { once: true });
652
  }
 
883
  this.currentEmotionContext = emotion;
884
  }
885
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
886
  returnToNeutral() {
 
 
 
 
 
 
887
  // Always ensure we resume playback with a fresh neutral video to avoid freeze
888
  if (this._neutralLock) return;
889
  this._neutralLock = true;
 
895
  this.isEmotionVideoPlaying = false;
896
  this.currentEmotionContext = null;
897
 
898
+ // Si la voix est encore en cours, relancer une vidéo neutre en boucle
 
 
 
 
 
 
 
 
 
 
 
 
899
  const category = "neutral";
900
  const currentVideoSrc = this.activeVideo.querySelector("source").getAttribute("src");
901
  const available = this.videoCategories[category] || [];
 
913
  this.currentContext = "neutral";
914
  this.currentEmotion = "neutral";
915
  this.lastSwitchTime = Date.now();
916
+ // Si la voix est encore en cours, s'assurer qu'on relance une vidéo neutre à la fin
917
+ if (window.voiceManager && window.voiceManager.isSpeaking) {
918
+ this.activeVideo.addEventListener(
919
+ "ended",
920
+ () => {
921
+ if (window.voiceManager && window.voiceManager.isSpeaking) {
922
+ this.returnToNeutral();
923
+ }
924
+ },
925
+ { once: true }
926
+ );
927
+ }
928
  } else {
929
  // Fallback to existing path if list empty
930
  this.switchToContext("neutral");
 
1056
  } else {
1057
  switch (this.currentContext) {
1058
  case "dancing":
1059
+ duration = 10000; // 10 secondes pour dancing (durée réelle des vidéos)
1060
  break;
1061
  case "speakingPositive":
1062
  case "speakingNegative":
1063
+ duration = 10000; // 10 secondes pour speaking (durée réelle des vidéos)
1064
  break;
1065
  case "neutral":
1066
  // Pas d'auto-transition pour neutral (état par défaut, boucle en continu)
 
1069
  // Pas d'auto-transition pour listening (personnage écoute l'utilisateur)
1070
  return;
1071
  default:
1072
+ duration = this.autoTransitionDuration; // 10 secondes par défaut
1073
  }
1074
  }
1075
 
 
1084
  }
1085
 
1086
  // COMPATIBILITY WITH THE OLD SYSTEM
1087
+ switchVideo(emotion = null) {
1088
+ if (emotion) {
1089
+ this.switchToContext("speaking", emotion);
1090
+ } else {
1091
+ this.switchToContext("neutral");
1092
+ }
1093
+ }
1094
+
1095
+ autoSwitchToNeutral() {
1096
+ this._neutralLock = false;
1097
+ this.isEmotionVideoPlaying = false;
1098
+ this.currentEmotionContext = null;
1099
+ this.switchToContext("neutral");
1100
+ }
1101
+
1102
+ getNextVideo(emotion, currentSrc) {
1103
+ // Adapt the old method for compatibility
1104
+ const category = this.determineCategory("speaking", emotion);
1105
+ return this.selectOptimalVideo(category);
1106
+ }
1107
 
1108
  loadAndSwitchVideo(videoSrc, priority = "normal") {
1109
  const startTs = performance.now();
1110
+ // Register attempt count (used for adaptive backoff)
1111
+ const prevAttempts = this._videoAttempts.get(videoSrc) || 0;
1112
+ const attempts = prevAttempts + 1;
1113
  this._videoAttempts.set(videoSrc, attempts);
1114
+ // Light trimming to avoid unbounded growth
1115
+ if (this._videoAttempts.size > 300) {
1116
+ for (const key of this._videoAttempts.keys()) {
1117
+ if (this._videoAttempts.size <= 200) break;
1118
+ this._videoAttempts.delete(key);
1119
+ }
1120
+ }
1121
+ // Guard: ignore if recently failed and still in cooldown
1122
  const lastFail = this._recentFailures.get(videoSrc);
1123
  if (lastFail && performance.now() - lastFail < this._failureCooldown) {
1124
+ // Pick an alternative neutral as quick substitution
1125
+ const neutralList = (this.videoCategories && this.videoCategories.neutral) || [];
1126
+ const alt = neutralList.find(v => v !== videoSrc) || neutralList[0];
1127
  if (alt && alt !== videoSrc) {
1128
+ console.warn(`Skipping recently failed video (cooldown): ${videoSrc} -> trying alt: ${alt}`);
1129
+ return this.loadAndSwitchVideo(alt, priority);
1130
  }
1131
  }
1132
+ // Avoid redundant loading if the requested source is already active or currently loading in inactive element
1133
  const activeSrc = this.activeVideo?.querySelector("source")?.getAttribute("src");
1134
+ const inactiveSrc = this.inactiveVideo?.querySelector("source")?.getAttribute("src");
1135
+ if (videoSrc && (videoSrc === activeSrc || (this._loadingInProgress && videoSrc === inactiveSrc))) {
1136
+ if (priority !== "high" && priority !== "speaking") {
1137
+ return; // no need to reload same video
1138
+ }
1139
+ }
1140
+ // Only log high priority or error cases to reduce noise
1141
+ if (priority === "speaking" || priority === "high") {
1142
+ console.log(`🎬 Loading video: ${videoSrc} (priority: ${priority})`);
1143
+ }
1144
 
1145
+ // Si une vidéo haute priorité arrive, on peut interrompre le chargement en cours
1146
  if (this._loadingInProgress) {
1147
+ if (priority === "high" || priority === "speaking") {
1148
+ this._loadingInProgress = false;
1149
+ // Nettoyer les event listeners en cours sur la vidéo inactive
1150
+ this.inactiveVideo.removeEventListener("canplay", this._currentLoadHandler);
1151
+ this.inactiveVideo.removeEventListener("loadeddata", this._currentLoadHandler);
1152
+ this.inactiveVideo.removeEventListener("error", this._currentErrorHandler);
1153
+ if (this._loadTimeout) {
1154
+ clearTimeout(this._loadTimeout);
1155
+ this._loadTimeout = null;
1156
+ }
1157
+ } else {
1158
+ return;
1159
+ }
1160
  }
1161
 
1162
  this._loadingInProgress = true;
1163
+
1164
+ // Nettoyer tous les timers en cours
1165
  clearTimeout(this.autoTransitionTimer);
1166
+ if (this._loadTimeout) {
1167
+ clearTimeout(this._loadTimeout);
1168
+ this._loadTimeout = null;
1169
+ }
1170
 
1171
+ const pref = this._prefetchCache.get(videoSrc);
1172
+ if (pref && (pref.readyState >= 2 || pref.buffered.length > 0)) {
1173
+ const source = this.inactiveVideo.querySelector("source");
1174
+ source.setAttribute("src", videoSrc);
1175
+ try {
1176
+ this.inactiveVideo.currentTime = 0;
1177
+ } catch {}
1178
+ this.inactiveVideo.load();
1179
+ } else {
1180
+ this.inactiveVideo.querySelector("source").setAttribute("src", videoSrc);
1181
+ this.inactiveVideo.load();
1182
+ }
1183
 
1184
+ // Stocker les références aux handlers pour pouvoir les nettoyer
1185
+ let fired = false;
1186
+ let errorCause = "error-event"; // will be overwritten if timeout based
1187
+ const onReady = () => {
1188
+ if (fired) return;
1189
+ fired = true;
1190
  this._loadingInProgress = false;
1191
  if (this._loadTimeout) {
1192
  clearTimeout(this._loadTimeout);
 
1195
  this.inactiveVideo.removeEventListener("canplay", this._currentLoadHandler);
1196
  this.inactiveVideo.removeEventListener("loadeddata", this._currentLoadHandler);
1197
  this.inactiveVideo.removeEventListener("error", this._currentErrorHandler);
1198
+ // Update rolling average load time
1199
+ const duration = performance.now() - startTs;
1200
+ this._loadTimeSamples.push(duration);
1201
  if (this._loadTimeSamples.length > this._maxSamples) this._loadTimeSamples.shift();
1202
+ const sum = this._loadTimeSamples.reduce((a, b) => a + b, 0);
1203
+ this._avgLoadTime = sum / this._loadTimeSamples.length;
1204
+ this._consecutiveErrorCount = 0; // reset on success
1205
  this.performSwitch();
1206
  };
1207
+ this._currentLoadHandler = onReady;
1208
 
1209
+ const folder = getCharacterInfo(this.characterName).videoFolder;
1210
+ // Rotating fallback pool (stable neutrals first positions)
1211
+ // Build or rebuild fallback pool when absent or when character changed
1212
+ if (!this._fallbackPool || this._fallbackPoolCharacter !== this.characterName) {
1213
+ const neutralList = (this.videoCategories && this.videoCategories.neutral) || [];
1214
+ // Choose first 3 as core reliable set; if less than 3 available, take all
1215
+ this._fallbackPool = neutralList.slice(0, 3);
1216
+ this._fallbackIndex = 0;
1217
+ this._fallbackPoolCharacter = this.characterName;
1218
+ }
1219
+ const fallbackVideo = this._fallbackPool[this._fallbackIndex % this._fallbackPool.length];
1220
+
1221
+ this._currentErrorHandler = e => {
1222
+ const mediaEl = this.inactiveVideo;
1223
+ const readyState = mediaEl ? mediaEl.readyState : -1;
1224
+ const networkState = mediaEl ? mediaEl.networkState : -1;
1225
+ let mediaErrorCode = null;
1226
+ if (mediaEl && mediaEl.error) mediaErrorCode = mediaEl.error.code;
1227
+ const stillLoading = !mediaEl?.error && networkState === 2;
1228
+ const realMediaError = !!mediaEl?.error;
1229
+ // Differentiate timeout vs real media error for clarity
1230
+ const tag = realMediaError
1231
+ ? "VideoLoadFail:media-error"
1232
+ : errorCause.startsWith("timeout")
1233
+ ? `VideoLoadFail:${errorCause}`
1234
+ : "VideoLoadFail:unknown";
1235
+ console.warn(
1236
+ `[${tag}] src=${videoSrc} readyState=${readyState} networkState=${networkState} mediaError=${mediaErrorCode} attempts=${attempts} fallback=${fallbackVideo}`
1237
+ );
1238
  this._loadingInProgress = false;
1239
  if (this._loadTimeout) {
1240
  clearTimeout(this._loadTimeout);
1241
  this._loadTimeout = null;
1242
  }
1243
+ // Only mark as failure if c'est une vraie erreur décodage OU plusieurs timeouts persistants
1244
+ if (realMediaError || (!stillLoading && errorCause.startsWith("timeout")) || attempts >= 3) {
1245
+ this._recentFailures.set(videoSrc, performance.now());
1246
+ this._consecutiveErrorCount++;
1247
+ }
1248
+ // Stop runaway fallback loop: pause if too many sequential errors relative to pool size
1249
+ if (this._fallbackPool && this._consecutiveErrorCount >= this._fallbackPool.length * 2) {
1250
+ console.error("Temporarily pausing fallback loop after repeated failures. Retrying in 2s.");
1251
+ setTimeout(() => {
1252
+ this._consecutiveErrorCount = 0;
1253
+ this.loadAndSwitchVideo(fallbackVideo, "high");
1254
+ }, 2000);
1255
+ return;
1256
+ }
1257
+ if (videoSrc !== fallbackVideo) {
1258
+ // Try fallback video
1259
+ this._fallbackIndex = (this._fallbackIndex + 1) % this._fallbackPool.length; // advance for next time
1260
+ this.loadAndSwitchVideo(fallbackVideo, "high");
1261
+ } else {
1262
+ // Ultimate fallback: try any neutral video
1263
+ console.error(`Fallback video also failed: ${fallbackVideo}. Trying ultimate fallback.`);
1264
+ const neutralVideos = this.videoCategories.neutral || [];
1265
+ if (neutralVideos.length > 0) {
1266
+ // Try a different neutral video
1267
+ const ultimateFallback = neutralVideos.find(video => video !== fallbackVideo);
1268
+ if (ultimateFallback) {
1269
+ this.loadAndSwitchVideo(ultimateFallback, "high");
1270
+ } else {
1271
+ // Last resort: try first neutral video anyway
1272
+ this.loadAndSwitchVideo(neutralVideos[0], "high");
1273
+ }
1274
+ } else {
1275
+ // Critical error: no neutral videos available
1276
+ console.error("CRITICAL: No neutral videos available!");
1277
+ this._switchInProgress = false;
1278
  }
1279
  }
1280
+ // Escalate diagnostics if many consecutive errors
1281
+ if (this._consecutiveErrorCount >= 3) {
1282
+ console.info(
1283
+ `Diagnostics: avgLoadTime=${this._avgLoadTime?.toFixed(1) || "n/a"}ms samples=${this._loadTimeSamples.length} prefetchCache=${this._prefetchCache.size}`
1284
+ );
1285
+ }
1286
  };
1287
 
1288
  this.inactiveVideo.addEventListener("loadeddata", this._currentLoadHandler, { once: true });
1289
  this.inactiveVideo.addEventListener("canplay", this._currentLoadHandler, { once: true });
1290
  this.inactiveVideo.addEventListener("error", this._currentErrorHandler, { once: true });
1291
 
1292
+ if (this.inactiveVideo.readyState >= 2) {
1293
+ queueMicrotask(() => onReady());
1294
+ }
1295
+
1296
+ // Dynamic timeout: refined formula avg*1.5 + buffer, bounded
1297
+ let adaptiveTimeout = this._minTimeout;
1298
+ if (this._avgLoadTime) {
1299
+ adaptiveTimeout = Math.min(this._maxTimeout, Math.max(this._minTimeout, this._avgLoadTime * 1.5 + 400));
1300
+ }
1301
+ // Cap by clip length ratio if we know (assume 10000ms default when metadata absent)
1302
+ const currentClipMs = 10000; // All clips are 10s
1303
+ adaptiveTimeout = Math.min(adaptiveTimeout, Math.floor(currentClipMs * this._timeoutCapRatio));
1304
+ // First ever attempt for a video: be more lenient if no historical avg yet
1305
+ if (attempts === 1 && !this._avgLoadTime) {
1306
+ adaptiveTimeout = Math.floor(adaptiveTimeout * 1.8); // ~5400ms au lieu de 3000ms typique
1307
+ }
1308
  this._loadTimeout = setTimeout(() => {
1309
+ if (!fired) {
1310
+ // If metadata is there but not canplay yet, extend once
1311
+ if (this.inactiveVideo.readyState >= 1 && this.inactiveVideo.readyState < 2) {
1312
+ errorCause = "timeout-metadata";
1313
+ console.debug(
1314
+ `Extending timeout (metadata) for ${videoSrc} readyState=${this.inactiveVideo.readyState} +${this._timeoutExtension}ms`
1315
+ );
1316
+ this._loadTimeout = setTimeout(() => {
1317
+ if (!fired) {
1318
+ if (this.inactiveVideo.readyState >= 2) onReady();
1319
+ else this._currentErrorHandler();
1320
+ }
1321
+ }, this._timeoutExtension);
1322
+ return;
1323
+ }
1324
+ // Grace retry: still fetching over network (networkState=2) with no data (readyState=0)
1325
+ const maxGrace = 2; // allow up to two grace extensions
1326
+ if (
1327
+ this.inactiveVideo.networkState === 2 &&
1328
+ this.inactiveVideo.readyState === 0 &&
1329
+ (this._graceRetryCounts?.[videoSrc] || 0) < maxGrace
1330
+ ) {
1331
+ if (!this._graceRetryCounts) this._graceRetryCounts = {};
1332
+ this._graceRetryCounts[videoSrc] = (this._graceRetryCounts[videoSrc] || 0) + 1;
1333
+ const extra = this._timeoutExtension + 900;
1334
+ errorCause = "timeout-grace";
1335
+ console.debug(
1336
+ `Grace retry #${this._graceRetryCounts[videoSrc]} for ${videoSrc} (still NETWORK_LOADING). Ext +${extra}ms`
1337
+ );
1338
+ this._loadTimeout = setTimeout(() => {
1339
+ if (!fired) {
1340
+ if (this.inactiveVideo.readyState >= 2) onReady();
1341
+ else this._currentErrorHandler();
1342
+ }
1343
+ }, extra);
1344
+ return;
1345
+ }
1346
+ if (this.inactiveVideo.readyState >= 2) {
1347
+ onReady();
1348
+ } else {
1349
+ errorCause = errorCause === "error-event" ? "timeout-final" : errorCause;
1350
+ this._currentErrorHandler();
1351
+ }
1352
  }
1353
+ }, adaptiveTimeout);
1354
  }
1355
 
1356
  usePreloadedVideo(preloadedVideo, videoSrc) {
 
1376
  const fromVideo = this.activeVideo;
1377
  const toVideo = this.inactiveVideo;
1378
 
1379
+ // Perform a JS-managed crossfade for smoother transitions
1380
+ // Let crossfadeVideos resolve duration from CSS variable (--video-fade-duration)
1381
+ this.constructor.crossfadeVideos(fromVideo, toVideo, undefined, () => {
1382
+ // After crossfade completion, finalize state and classes
 
1383
  fromVideo.classList.remove("active");
1384
  toVideo.classList.add("active");
1385
 
 
1397
  const src = this.activeVideo?.querySelector("source")?.getAttribute("src");
1398
  const info = { context: this.currentContext, emotion: this.currentEmotion };
1399
  console.log("🎬 VideoManager: Now playing:", src, info);
1400
+ // Recompute autoTransitionDuration from actual duration if available (C)
1401
  try {
1402
  const d = this.activeVideo.duration;
1403
  if (!isNaN(d) && d > 0.5) {
1404
+ // Keep 1s headroom before natural end for auto scheduling
1405
  const target = Math.max(1000, d * 1000 - 1100);
1406
  this.autoTransitionDuration = target;
1407
  } else {
1408
+ this.autoTransitionDuration = 9900; // fallback for 10s clips
1409
  }
1410
+ // Dynamic neutral prefetch to widen diversity without burst
1411
  this._prefetchNeutralDynamic();
1412
  } catch {}
1413
  } catch {}
1414
  this._switchInProgress = false;
1415
  this.setupEventListenersForContext(this.currentContext);
 
1416
  })
1417
  .catch(error => {
1418
  console.warn("Failed to play video:", error);
 
1428
  this.setupEventListenersForContext(this.currentContext);
1429
  });
1430
  } else {
1431
+ // Non-promise play fallback
1432
  this._switchInProgress = false;
1433
  try {
1434
  const d = this.activeVideo.duration;
 
1436
  const target = Math.max(1000, d * 1000 - 1100);
1437
  this.autoTransitionDuration = target;
1438
  } else {
1439
+ this.autoTransitionDuration = 9900;
1440
  }
1441
  this._prefetchNeutralDynamic();
1442
  } catch {}
1443
  this.setupEventListenersForContext(this.currentContext);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1444
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1445
  });
1446
  }
1447
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1448
  _prefetchNeutralDynamic() {
1449
  try {
1450
  const neutrals = (this.videoCategories && this.videoCategories.neutral) || [];
 
1548
  };
1549
  }
1550
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1551
  // METHODS TO ANALYZE EMOTIONS FROM TEXT
1552
  // CLEANUP
1553
  destroy() {
kimi-js/kimi-voices.js CHANGED
@@ -498,10 +498,9 @@ class KimiVoiceManager {
498
  getVoicePreference(paramType, options = {}) {
499
  // Hierarchy: options > memory.preferences > kimiMemory.preferences > DOM element > default
500
  const defaults = {
501
- // Use nullish coalescing to preserve explicit 0 values in config
502
- rate: window.KIMI_CONFIG?.DEFAULTS?.VOICE_RATE ?? 1.1,
503
- pitch: window.KIMI_CONFIG?.DEFAULTS?.VOICE_PITCH ?? 1.1,
504
- volume: window.KIMI_CONFIG?.DEFAULTS?.VOICE_VOLUME ?? 0.8
505
  };
506
 
507
  const elementIds = {
@@ -696,14 +695,12 @@ class KimiVoiceManager {
696
  info.context === "dancing");
697
  if (!isEmotionClip) {
698
  requestAnimationFrame(() => {
699
- if (window.kimiVideo.ensureActivePlayback) window.kimiVideo.ensureActivePlayback();
700
- else window.kimiVideo.returnToNeutral();
701
  });
702
  }
703
  } catch (_) {
704
  requestAnimationFrame(() => {
705
- if (window.kimiVideo.ensureActivePlayback) window.kimiVideo.ensureActivePlayback();
706
- else window.kimiVideo.returnToNeutral();
707
  });
708
  }
709
  }
 
498
  getVoicePreference(paramType, options = {}) {
499
  // Hierarchy: options > memory.preferences > kimiMemory.preferences > DOM element > default
500
  const defaults = {
501
+ rate: window.KIMI_CONFIG?.DEFAULTS?.VOICE_RATE || 1.1,
502
+ pitch: window.KIMI_CONFIG?.DEFAULTS?.VOICE_PITCH || 1.1,
503
+ volume: window.KIMI_CONFIG?.DEFAULTS?.VOICE_VOLUME || 0.8
 
504
  };
505
 
506
  const elementIds = {
 
695
  info.context === "dancing");
696
  if (!isEmotionClip) {
697
  requestAnimationFrame(() => {
698
+ window.kimiVideo.returnToNeutral();
 
699
  });
700
  }
701
  } catch (_) {
702
  requestAnimationFrame(() => {
703
+ window.kimiVideo.returnToNeutral();
 
704
  });
705
  }
706
  }