aarnal80 commited on
Commit
30640f2
verified
1 Parent(s): de7af8e

Update js/iaConfigModule.js

Browse files
Files changed (1) hide show
  1. js/iaConfigModule.js +242 -198
js/iaConfigModule.js CHANGED
@@ -1,225 +1,269 @@
1
- // js/llmClient.js
2
- import { getIaConfig, llmProviders } from "./iaConfigModule.js";
3
-
4
- /** Detecta familia GPT-5 (incluye submodelos y alias) */
5
- function isGpt5(model = "") {
6
- const m = String(model || "").toLowerCase();
7
- return m.startsWith("gpt-5");
8
- }
9
-
10
- /** Borrado profundo de claves en cualquier nivel */
11
- function deepDeleteKeys(obj, keys = []) {
12
- if (!obj || typeof obj !== "object") return;
13
- for (const k of Object.keys(obj)) {
14
- if (keys.includes(k)) {
15
- delete obj[k];
16
- continue;
17
- }
18
- const val = obj[k];
19
- if (Array.isArray(val)) {
20
- for (const item of val) deepDeleteKeys(item, keys);
21
- } else if (val && typeof val === "object") {
22
- deepDeleteKeys(val, keys);
23
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
  }
25
  }
26
 
27
- /** Sanea el payload para modelos GPT-5: elimina params no soportados */
28
- function sanitizeForGpt5(payload) {
29
- // Claves creativas y afines que algunos SDKs/flows reinyectan
30
- const toStrip = [
31
- "temperature",
32
- "top_p",
33
- "frequency_penalty",
34
- "presence_penalty",
35
- "best_of",
36
- "n",
37
- "logit_bias",
38
- // Algunas libs anidan config aqu铆:
39
- "generation_config"
40
- ];
41
- deepDeleteKeys(payload, toStrip);
42
-
43
- // Si exist铆a generation_config, aseg煤rate de borrarlo entero
44
- if (payload.generation_config) delete payload.generation_config;
45
-
46
- return payload;
47
  }
48
 
49
- /** Construye body para Chat Completions (OpenAI) */
50
- function buildOpenAIChatBody(model, messages, options = {}) {
51
- const body = { model, messages };
52
-
53
- if (!isGpt5(model)) {
54
- if (typeof options.temperature === "number") body.temperature = options.temperature;
55
- if (typeof options.top_p === "number") body.top_p = options.top_p;
56
- if (typeof options.frequency_penalty === "number") body.frequency_penalty = options.frequency_penalty;
57
- if (typeof options.presence_penalty === "number") body.presence_penalty = options.presence_penalty;
58
  }
59
 
60
- if (options.response_format) body.response_format = options.response_format;
61
- if (options.tools) body.tools = options.tools;
62
- if (options.tool_choice) body.tool_choice = options.tool_choice;
63
-
64
- return isGpt5(model) ? sanitizeForGpt5(body) : body;
65
- }
 
 
 
 
66
 
67
- /** Construye body para Responses API (por si la usas en otro flujo) */
68
- function buildOpenAIResponsesBody(model, input, options = {}) {
69
- const body = { model, input };
 
 
 
 
70
 
71
- if (!isGpt5(model)) {
72
- if (typeof options.temperature === "number") body.temperature = options.temperature;
73
- if (typeof options.top_p === "number") body.top_p = options.top_p;
74
- if (typeof options.frequency_penalty === "number") body.frequency_penalty = options.frequency_penalty;
75
- if (typeof options.presence_penalty === "number") body.presence_penalty = options.presence_penalty;
76
  }
77
 
78
- if (options.response_format) body.response_format = options.response_format;
79
- if (options.tools) body.tools = options.tools;
80
- if (options.tool_choice) body.tool_choice = options.tool_choice;
 
 
 
81
 
82
- return isGpt5(model) ? sanitizeForGpt5(body) : body;
 
 
 
83
  }
84
 
85
- /** POST con reintento si hay error de par谩metros no soportados */
86
- async function hardenedPost(url, apiKey, body, model) {
87
- // Sanea una vez m谩s justo antes de enviar
88
- if (isGpt5(model)) sanitizeForGpt5(body);
89
-
90
- let res = await fetch(url, {
91
- method: "POST",
92
- headers: {
93
- "Authorization": `Bearer ${apiKey}`,
94
- "Content-Type": "application/json"
95
- },
96
- body: JSON.stringify(body)
97
- });
98
-
99
- if (!res.ok) {
100
- let errJson = null, txt = "";
101
- try { errJson = await res.json(); } catch { txt = await res.text(); }
102
-
103
- const msg = errJson?.error?.message || txt || "";
104
- const code = errJson?.error?.code || "";
105
- const unsupported =
106
- msg.includes("Unsupported value") ||
107
- msg.includes("does not support") ||
108
- code === "unsupported_value";
109
-
110
- if (unsupported) {
111
- // Reintento: payload m铆nimo (sin ning煤n creativo), ultra-saneado
112
- const minimal = isGpt5(model)
113
- ? sanitizeForGpt5({ ...body, model })
114
- : { ...body, model };
115
- delete minimal.temperature;
116
- delete minimal.top_p;
117
- delete minimal.frequency_penalty;
118
- delete minimal.presence_penalty;
119
- delete minimal.best_of;
120
- delete minimal.n;
121
- delete minimal.logit_bias;
122
- delete minimal.generation_config;
123
-
124
- res = await fetch(url, {
125
- method: "POST",
126
- headers: {
127
- "Authorization": `Bearer ${apiKey}`,
128
- "Content-Type": "application/json"
129
- },
130
- body: JSON.stringify(minimal)
131
- });
132
- }
133
-
134
- if (!res.ok) {
135
- const finalTxt = await res.text();
136
- throw new Error(`OpenAI error ${res.status}: ${finalTxt}`);
137
- }
138
  }
139
 
140
- return res.json();
141
- }
142
-
143
- /** Chat Completions */
144
- async function callOpenAIChat({ apiKey, model, messages, options }) {
145
- const providerInfo = llmProviders.find(p => p.value === "openai");
146
- const base = providerInfo?.url || "https://api.openai.com";
147
- const url = `${base}/v1/chat/completions`;
148
 
149
- const body = buildOpenAIChatBody(model, messages, options);
150
- const data = await hardenedPost(url, apiKey, body, model);
151
- const text = data?.choices?.[0]?.message?.content ?? "";
152
- return { text, raw: data };
153
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
154
 
155
- /** Responses API (por si alguna parte de tu app la usa) */
156
- async function callOpenAIResponses({ apiKey, model, input, options }) {
157
- const providerInfo = llmProviders.find(p => p.value === "openai");
158
- const base = providerInfo?.url || "https://api.openai.com";
159
- const url = `${base}/v1/responses`;
160
-
161
- const body = buildOpenAIResponsesBody(model, input, options);
162
- const data = await hardenedPost(url, apiKey, body, model);
163
- // Normalizamos a "text"
164
- const outMsg =
165
- data?.output?.[0]?.content?.[0]?.text ??
166
- data?.content?.[0]?.text ??
167
- data?.choices?.[0]?.message?.content ??
168
- "";
169
- return { text: outMsg, raw: data };
170
- }
171
 
172
- /** DeepSeek */
173
- async function callDeepSeekChat({ apiKey, model, messages, options }) {
174
- const providerInfo = llmProviders.find(p => p.value === "deepseek");
175
- const base = providerInfo?.url || "https://api.deepseek.com";
176
- const url = `${base}/chat/completions`;
177
-
178
- const body = { model, messages };
179
- if (typeof options?.temperature === "number") body.temperature = options.temperature;
180
- if (typeof options?.top_p === "number") body.top_p = options.top_p;
181
- if (typeof options?.frequency_penalty === "number") body.frequency_penalty = options.frequency_penalty;
182
- if (typeof options?.presence_penalty === "number") body.presence_penalty = options.presence_penalty;
183
-
184
- const res = await fetch(url, {
185
- method: "POST",
186
- headers: {
187
- "Authorization": `Bearer ${apiKey}`,
188
- "Content-Type": "application/json"
189
- },
190
- body: JSON.stringify(body)
191
  });
192
 
193
- if (!res.ok) {
194
- const txt = await res.text();
195
- throw new Error(`DeepSeek error ${res.status}: ${txt}`);
196
- }
197
-
198
- const data = await res.json();
199
- const text = data?.choices?.[0]?.message?.content ?? "";
200
- return { text, raw: data };
201
- }
202
 
203
- /** Punto 煤nico */
204
- export async function callLLM(messages, options = {}) {
205
- const cfg = getIaConfig();
206
- const provider = cfg.llm.provider;
207
- const model = cfg.llm.model;
208
- if (!provider) throw new Error("Proveedor LLM no configurado.");
209
- const apiKey = cfg.llm.apiKeys?.[provider];
210
- if (!apiKey) throw new Error(`API Key no configurada para proveedor '${provider}'.`);
211
-
212
- if (provider === "openai") {
213
- // Si en alg煤n lugar pasan "input" en vez de "messages", usa Responses API
214
- if (options && Object.prototype.hasOwnProperty.call(options, "input")) {
215
- return await callOpenAIResponses({ apiKey, model, input: options.input, options });
216
  }
217
- return await callOpenAIChat({ apiKey, model, messages, options });
218
  }
219
-
220
- if (provider === "deepseek") {
221
- return await callDeepSeekChat({ apiKey, model, messages, options });
 
 
 
 
 
222
  }
223
 
224
- throw new Error(`Proveedor no soportado: ${provider}`);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
225
  }
 
1
+ // js/iaConfigModule.js
2
+ const defaultConfig = {
3
+ llm: {
4
+ provider: "deepseek",
5
+ apiKeys: { deepseek: "", openai: "" },
6
+ model: "deepseek-chat"
7
+ },
8
+ transcription: {
9
+ provider: "openai",
10
+ apiKeys: { openai: "", deepgram: "" },
11
+ models: { openai: "whisper-1", deepgram: "nova-2" }
12
+ }
13
+ };
14
+
15
+ // Lista de modelos actualizada (2025)
16
+ export const llmProviders = [
17
+ {
18
+ name: "OpenAI",
19
+ value: "openai",
20
+ models: [
21
+ "gpt-5",
22
+ "gpt-5-mini",
23
+ "gpt-5-nano",
24
+ "gpt-5-chat-latest",
25
+ "gpt-4o-mini-2024-07-18",
26
+ "chatgpt-4o-latest",
27
+ "o1-mini-2024-09-12",
28
+ "o4-mini-2025-04-16"
29
+ ],
30
+ url: "https://api.openai.com"
31
+ },
32
+ {
33
+ name: "DeepSeek",
34
+ value: "deepseek",
35
+ models: ["deepseek-chat", "deepseek-reasoner"],
36
+ url: "https://api.deepseek.com"
37
+ }
38
+ ];
39
+
40
+ export const transcriptionProviders = [
41
+ { name: "OpenAI Whisper", value: "openai", models: ["whisper-1"], url: "https://api.openai.com" },
42
+ { name: "Deepgram", value: "deepgram", models: ["nova-2", "whisper-large"], url: "https://api.deepgram.com" }
43
+ ];
44
+
45
+ function saveConfig(config) {
46
+ try {
47
+ localStorage.setItem("iaConfig", JSON.stringify(config));
48
+ } catch (e) {
49
+ console.error("[iaConfigModule] Error guardando config:", e);
50
  }
51
  }
52
 
53
+ function clone(obj) {
54
+ try { return structuredClone(obj); } catch { return JSON.parse(JSON.stringify(obj)); }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
  }
56
 
57
+ function loadConfig() {
58
+ let config;
59
+ try {
60
+ config = JSON.parse(localStorage.getItem("iaConfig")) || clone(defaultConfig);
61
+ } catch {
62
+ config = clone(defaultConfig);
 
 
 
63
  }
64
 
65
+ // Migraci贸n transcription
66
+ if (config.transcription.apiKey !== undefined) {
67
+ const oldKey = config.transcription.apiKey;
68
+ const oldModel = config.transcription.model;
69
+ config.transcription.apiKeys = { [config.transcription.provider]: oldKey, deepgram: "" };
70
+ config.transcription.models = { [config.transcription.provider]: oldModel, deepgram: "nova-2" };
71
+ delete config.transcription.apiKey;
72
+ delete config.transcription.model;
73
+ saveConfig(config);
74
+ }
75
 
76
+ // Migraci贸n LLM apiKey -> apiKeys
77
+ if (config.llm.apiKey !== undefined) {
78
+ const old = config.llm.apiKey;
79
+ config.llm.apiKeys = { ...defaultConfig.llm.apiKeys, [config.llm.provider]: old };
80
+ delete config.llm.apiKey;
81
+ saveConfig(config);
82
+ }
83
 
84
+ // Migrar deepseek obsoletos
85
+ if (config.llm.provider === "deepseek" && (config.llm.model === "deepseek-v3" || config.llm.model === "deepseek-llm")) {
86
+ config.llm.model = "deepseek-chat";
87
+ console.log("[iaConfigModule] Migrado modelo DeepSeek a deepseek-chat");
88
+ saveConfig(config);
89
  }
90
 
91
+ // Migrar IDs antiguos inventados GPT-5
92
+ if (config.llm.provider === "openai") {
93
+ if (config.llm.model === "gpt-5-2025-05-01") config.llm.model = "gpt-5";
94
+ if (config.llm.model === "gpt-5-mini-2025-05-01") config.llm.model = "gpt-5-mini";
95
+ saveConfig(config);
96
+ }
97
 
98
+ return config;
99
+ }
100
+ export function getIaConfig() {
101
+ return loadConfig();
102
  }
103
 
104
+ export function renderIaConfigForm(containerId) {
105
+ let config = loadConfig();
106
+ const container = document.getElementById(containerId);
107
+ if (!container) {
108
+ console.error(`[iaConfigModule] No se encontr贸 el contenedor '${containerId}'`);
109
+ document.body.insertAdjacentHTML(
110
+ "beforeend",
111
+ `<div style='color:red'>[Error] No se encontr贸 el contenedor '${containerId}' para la configuraci贸n IA.</div>`
112
+ );
113
+ return;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
114
  }
115
 
116
+ function maskApiKey(key) {
117
+ if (!key) return "";
118
+ if (key.length <= 8) return "*".repeat(key.length);
119
+ return key.substring(0, 3) + "-****" + key.slice(-4);
120
+ }
 
 
 
121
 
122
+ container.innerHTML = `
123
+ <div class="flex justify-between items-center mb-6 border-b pb-2 border-blue-100">
124
+ <h2 class="text-xl font-bold text-blue-700 flex items-center">
125
+ <i class='fas fa-cogs mr-2'></i>Configurar Proveedores IA
126
+ </h2>
127
+ <button id="btnCloseConfig" type="button" class="text-gray-500 hover:text-blue-600 text-2xl focus:outline-none" aria-label="Cerrar">
128
+ <i class="fas fa-times"></i>
129
+ </button>
130
+ </div>
131
+ <form id="iaConfigForm" class="space-y-6">
132
+ <div class="bg-blue-50 p-4 rounded-lg border border-blue-100 mb-2">
133
+ <label class="block font-semibold text-blue-800 mb-2">Proveedor LLM</label>
134
+ <select id="llmProvider" class="w-full mb-3 p-2 rounded border border-gray-300 focus:ring-2 focus:ring-blue-300">
135
+ ${llmProviders.map(p => `<option value="${p.value}">${p.name}</option>`).join("")}
136
+ </select>
137
+ <div class="flex items-center mb-3">
138
+ <input type="password" id="llmApiKey" class="flex-1 p-2 rounded border border-gray-300 mr-2 bg-gray-100" placeholder="API Key LLM" autocomplete="off">
139
+ <button class="text-blue-700 hover:text-blue-900 px-3 py-2 rounded focus:outline-none border border-blue-200 bg-white" type="button" id="toggleLlmApiKey">
140
+ <i class="fas fa-eye"></i>
141
+ </button>
142
+ </div>
143
+ <select id="llmModel" class="w-full p-2 rounded border border-gray-300 focus:ring-2 focus:ring-blue-300"></select>
144
+ </div>
145
+ <div class="bg-purple-50 p-4 rounded-lg border border-purple-100 mb-2">
146
+ <label class="block font-semibold text-purple-800 mb-2">Proveedor Transcripci贸n</label>
147
+ <select id="transProvider" class="w-full mb-3 p-2 rounded border border-gray-300 focus:ring-2 focus:ring-purple-300">
148
+ ${transcriptionProviders.map(p => `<option value="${p.value}">${p.name}</option>`).join("")}
149
+ </select>
150
+ <div class="flex items-center mb-3">
151
+ <input type="password" id="transApiKey" class="flex-1 p-2 rounded border border-gray-300 mr-2 bg-gray-100" placeholder="API Key Transcripci贸n" autocomplete="off">
152
+ <button class="text-purple-700 hover:text-purple-900 px-3 py-2 rounded focus:outline-none border border-purple-200 bg-white" type="button" id="toggleTransApiKey">
153
+ <i class="fas fa-eye"></i>
154
+ </button>
155
+ </div>
156
+ <select id="transModel" class="w-full p-2 rounded border border-gray-300 focus:ring-2 focus:ring-purple-300"></select>
157
+ </div>
158
+ <button type="submit" class="w-full bg-blue-600 hover:bg-blue-700 text-white font-semibold py-3 rounded-lg shadow transition-colors flex items-center justify-center text-lg">
159
+ <i class="fas fa-save mr-2"></i>Guardar configuraci贸n
160
+ </button>
161
+ </form>
162
+ `;
163
+
164
+ const closeBtn = document.getElementById("btnCloseConfig");
165
+ if (closeBtn) {
166
+ closeBtn.addEventListener("click", () => {
167
+ const modal = document.getElementById("configModal");
168
+ if (modal) modal.classList.remove("active");
169
+ });
170
+ }
171
 
172
+ // Iniciales
173
+ document.getElementById("llmProvider").value = config.llm.provider;
174
+ document.getElementById("llmApiKey").value = maskApiKey(config.llm.apiKeys[config.llm.provider] || "");
175
+ document.getElementById("transProvider").value = config.transcription.provider;
176
+ document.getElementById("transApiKey").value = maskApiKey(config.transcription.apiKeys[config.transcription.provider] || "");
 
 
 
 
 
 
 
 
 
 
 
177
 
178
+ // Toggle
179
+ document.getElementById("toggleLlmApiKey").addEventListener("click", () => {
180
+ const input = document.getElementById("llmApiKey");
181
+ input.type = input.type === "password" ? "text" : "password";
182
+ });
183
+ document.getElementById("toggleTransApiKey").addEventListener("click", () => {
184
+ const input = document.getElementById("transApiKey");
185
+ input.type = input.type === "password" ? "text" : "password";
 
 
 
 
 
 
 
 
 
 
 
186
  });
187
 
188
+ function updateLlmModels() {
189
+ const prov = document.getElementById("llmProvider").value;
190
+ const providerObj = llmProviders.find(p => p.value === prov);
191
+ const models = providerObj.models;
192
+ const sel = document.getElementById("llmModel");
193
+ sel.innerHTML = models.map(m => `<option value="${m}">${m}</option>`).join("");
 
 
 
194
 
195
+ if (!models.includes(config.llm.model)) {
196
+ config.llm.model = models[0];
197
+ saveConfig(config);
 
 
 
 
 
 
 
 
 
 
198
  }
199
+ sel.value = config.llm.model;
200
  }
201
+ function updateTransModels() {
202
+ const prov = document.getElementById("transProvider").value;
203
+ const providerObj = transcriptionProviders.find(p => p.value === prov);
204
+ const models = providerObj.models;
205
+ const sel = document.getElementById("transModel");
206
+ sel.innerHTML = models.map(m => `<option value="${m}">${m}</option>`).join("");
207
+ sel.value = config.transcription.models[prov] || models[0];
208
+ document.getElementById("transApiKey").value = maskApiKey(config.transcription.apiKeys[prov] || "");
209
  }
210
 
211
+ document.getElementById("llmProvider").addEventListener("change", () => {
212
+ const p = document.getElementById("llmProvider").value;
213
+ updateLlmModels();
214
+ const fresh = loadConfig();
215
+ const keyEl = document.getElementById("llmApiKey");
216
+ if (keyEl) keyEl.value = maskApiKey(fresh.llm.apiKeys[p] || "");
217
+ });
218
+ document.getElementById("transProvider").addEventListener("change", updateTransModels);
219
+
220
+ updateLlmModels();
221
+ updateTransModels();
222
+
223
+ document.getElementById("iaConfigForm").addEventListener("submit", e => {
224
+ e.preventDefault();
225
+ const prev = config;
226
+ const newConfig = clone(prev);
227
+
228
+ const llProv = document.getElementById("llmProvider").value;
229
+ const rawKey = document.getElementById("llmApiKey").value;
230
+ const oldKey = prev.llm.apiKeys[llProv] || "";
231
+ const newKey = rawKey === maskApiKey(oldKey) ? oldKey : rawKey;
232
+
233
+ newConfig.llm = { ...prev.llm, provider: llProv, model: document.getElementById("llmModel").value };
234
+ newConfig.llm.apiKeys = { ...prev.llm.apiKeys, [llProv]: newKey };
235
+
236
+ const tp = document.getElementById("transProvider").value;
237
+ const rawKeyTrans = document.getElementById("transApiKey").value;
238
+ const existingKeyTrans = prev.transcription.apiKeys[tp] || "";
239
+ const actualKeyTrans = rawKeyTrans === maskApiKey(existingKeyTrans) ? existingKeyTrans : rawKeyTrans;
240
+
241
+ newConfig.transcription.provider = tp;
242
+ newConfig.transcription.apiKeys = { ...prev.transcription.apiKeys, [tp]: actualKeyTrans };
243
+ newConfig.transcription.models = { ...prev.transcription.models, [tp]: document.getElementById("transModel").value };
244
+
245
+ saveConfig(newConfig);
246
+ config = newConfig;
247
+ document.dispatchEvent(new CustomEvent("iaConfigChanged"));
248
+
249
+ document.getElementById("llmApiKey").value = maskApiKey(newConfig.llm.apiKeys[newConfig.llm.provider] || "");
250
+ document.getElementById("transApiKey").value = maskApiKey(newConfig.transcription.apiKeys[tp] || "");
251
+ document.getElementById("llmApiKey").type = "password";
252
+ document.getElementById("transApiKey").type = "password";
253
+
254
+ let msg = document.getElementById('iaConfigSavedMsg');
255
+ if (!msg) {
256
+ msg = document.createElement('div');
257
+ msg.id = 'iaConfigSavedMsg';
258
+ msg.className = 'fixed left-1/2 top-6 -translate-x-1/2 bg-green-500 text-white px-6 py-3 rounded shadow text-lg z-50';
259
+ msg.innerHTML = '<i class="fas fa-check-circle mr-2"></i>隆Configuraci贸n guardada!';
260
+ document.body.appendChild(msg);
261
+ } else {
262
+ msg.style.display = 'block';
263
+ }
264
+ setTimeout(() => { msg.style.display = 'none'; }, 1600);
265
+
266
+ const modal = document.getElementById("configModal");
267
+ if (modal) modal.classList.remove("active");
268
+ });
269
  }