arshenoy commited on
Commit
1fbc07b
·
verified ·
1 Parent(s): b7c3149

Update services/geminiService.ts

Browse files
Files changed (1) hide show
  1. services/geminiService.ts +345 -351
services/geminiService.ts CHANGED
@@ -3,18 +3,18 @@ import { PatientProfile, ClinicalVitals, AppMode, RiskAnalysisResult, ChatMessag
3
 
4
  // --- API KEY & CONFIG ---
5
  const getApiKey = () => {
6
- try {
7
- // @ts-ignore
8
- if (typeof import.meta !== 'undefined' && import.meta.env && import.meta.env.VITE_API_KEY) {
9
- // @ts-ignore
10
- return import.meta.env.VITE_API_KEY;
11
- }
12
- } catch (e) {}
13
-
14
- if (typeof process !== 'undefined' && process.env && process.env.API_KEY) {
15
- return process.env.API_KEY;
16
- }
17
- return '';
18
  };
19
 
20
  // --- BACKEND CONFIGURATION ---
@@ -22,403 +22,397 @@ const getApiKey = () => {
22
  const PRIMARY_API_BASE = 'https://arshenoy-somai-backend.hf.space';
23
 
24
  // Secondary Backend (Media - Moondream/Whisper)
25
- // Defaults to primary unless VITE_MEDIA_API_URL is set (for split architecture)
 
26
  const getMediaApiBase = () => {
27
- try {
28
- // @ts-ignore
29
- if (typeof import.meta !== 'undefined' && import.meta.env && import.meta.env.VITE_MEDIA_API_URL) {
30
- // @ts-ignore
31
- return import.meta.env.VITE_MEDIA_API_URL;
32
- }
33
- } catch (e) {}
34
- return PRIMARY_API_BASE;
35
  };
36
-
37
  const MEDIA_API_BASE = getMediaApiBase();
 
38
  const API_KEY = getApiKey();
39
  const ai = new GoogleGenAI({ apiKey: API_KEY });
40
 
41
  // --- TIERED MODEL STRATEGY ---
42
- const MODEL_TIER_1 = 'gemini-2.5-flash-lite';
43
- const MODEL_TIER_2 = 'gemini-2.5-flash';
44
  const MODEL_TTS = 'gemini-2.5-flash-preview-tts';
45
 
46
  const cleanText = (text: string) => {
47
- if (!text) return "";
48
- return text.replace(/\*\*/g, '').replace(/###/g, '').replace(/\*/g, '-').trim();
49
  };
50
 
51
  const compressImage = async (base64Str: string, maxWidth = 800): Promise<string> => {
52
- return new Promise((resolve) => {
53
- const img = new Image();
54
- img.src = base64Str;
55
- img.onload = () => {
56
- const canvas = document.createElement('canvas');
57
- let width = img.width;
58
- let height = img.height;
59
- if (width > maxWidth) {
60
- height = (height * maxWidth) / width;
61
- width = maxWidth;
62
- }
63
- canvas.width = width;
64
- canvas.height = height;
65
- const ctx = canvas.getContext('2d');
66
- ctx?.drawImage(img, 0, 0, width, height);
67
- resolve(canvas.toDataURL('image/jpeg', 0.7));
68
- };
69
- img.onerror = () => resolve(base64Str);
70
- });
 
 
71
  };
72
 
73
  export const wakeUpBackend = async () => {
74
- try {
75
- // Ping both potential backends to warm them up
76
- fetch(`${PRIMARY_API_BASE}/`, { method: 'GET', mode: 'cors' }).catch(()=>{});
77
- if (PRIMARY_API_BASE !== MEDIA_API_BASE) {
78
- fetch(`${MEDIA_API_BASE}/`, { method: 'GET', mode: 'cors' }).catch(()=>{});
79
- }
80
- } catch (e) {}
81
  };
82
 
83
- // Generic Fallback Caller with Status Updates
84
  const callBackend = async (baseUrl: string, endpoint: string, payload: any, onStatus?: (msg: string) => void): Promise<string> => {
85
- const url = `${baseUrl}${endpoint}`;
86
- console.info(`[SomAI] Calling Backend: ${url}`);
87
- if (onStatus) onStatus("🐢 Switching to local backup...");
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
88
 
89
- const makeRequest = async (retries = 2) => {
90
- const controller = new AbortController();
91
- const timeoutId = setTimeout(() => controller.abort(), 90000); // 90s timeout for CPU
92
-
93
- try {
94
- const response = await fetch(url, {
95
- method: 'POST',
96
- mode: 'cors',
97
- credentials: 'omit',
98
- headers: { 'Content-Type': 'application/json', 'Accept': 'application/json' },
99
- body: JSON.stringify(payload),
100
- signal: controller.signal
101
- });
102
- clearTimeout(timeoutId);
103
-
104
- if (!response.ok && (response.status === 503 || response.status === 504) && retries > 0) {
105
- if (onStatus) onStatus(`💤 Backend waking up... (${retries} retries left)`);
106
- await new Promise(r => setTimeout(r, 5000));
107
- return makeRequest(retries - 1);
108
- }
109
-
110
- if (!response.ok) {
111
- const err = await response.text().catch(() => "Unknown");
112
- throw new Error(`API Error ${response.status}: ${err.substring(0, 50)}`);
113
- }
114
-
115
- const data = await response.json();
116
-
117
- if (typeof data === 'string') return data;
118
- if (data.text) return data.text;
119
- if (data.response) return data.response;
120
- if (data.generated_text) return data.generated_text;
121
-
122
- return JSON.stringify(data);
123
-
124
- } catch (error: any) {
125
- clearTimeout(timeoutId);
126
- if (retries > 0 && (error.name === 'AbortError' || error.message.includes('Failed'))) {
127
- if (onStatus) onStatus("📡 Connection unstable, retrying...");
128
- await new Promise(r => setTimeout(r, 5000));
129
- return makeRequest(retries - 1);
130
- }
131
- throw error;
132
- }
133
- };
134
 
135
- try { return await makeRequest(); } catch (error) { throw error; }
 
 
 
 
 
 
 
 
 
 
 
 
 
136
  };
137
 
138
  const parseRiskResponse = (text: string, calculatedScore: number): RiskAnalysisResult => {
139
- try {
140
- let jsonStr = text;
141
- const codeBlockMatch = text.match(/```json\s*(\{[\s\S]*?\})\s*```/);
142
- if (codeBlockMatch) jsonStr = codeBlockMatch[1];
143
- else {
144
- const braceMatch = text.match(/\{[\s\S]*\}/);
145
- if (braceMatch) jsonStr = braceMatch[0];
146
- }
147
- const data = JSON.parse(jsonStr);
148
- const pipeline = [
149
- { code: data.primaryConditionCode?.code || "N/A", description: data.primaryConditionCode?.description || "Unknown", type: 'Primary' },
150
- ...(data.historyCodes || []).map((h: any) => ({ code: h.code, description: h.description, type: 'History' }))
151
- ];
152
- return {
153
- numericScore: calculatedScore,
154
- summary: cleanText(data.summary || "Analysis completed."),
155
- actionItems: (data.actionItems || []).map(cleanText),
156
- icd10Codes: [],
157
- codingPipeline: pipeline as any,
158
- insuranceNote: cleanText(data.insuranceNote || "Review required."),
159
- timestamp: new Date().toISOString()
160
- };
161
- } catch (e) {
162
- return {
163
- numericScore: calculatedScore,
164
- summary: cleanText(text).substring(0, 500) || "Analysis currently unavailable.",
165
- actionItems: ["Review inputs", "Consult provider"],
166
- icd10Codes: [],
167
- codingPipeline: [],
168
- insuranceNote: "Automated analysis fallback.",
169
- timestamp: new Date().toISOString()
170
- }
171
- }
 
 
 
172
  };
173
 
174
  // --- VISION EXTRACTION ---
175
  export const extractClinicalData = async (imageBase64: string, onStatus?: (msg: string) => void): Promise<ExtractionResult> => {
176
- const base64Data = imageBase64.includes('base64,') ? imageBase64.split('base64,')[1] : imageBase64;
177
- const prompt = `Analyze this medical document. CRITICAL: Look for Patient Name. Extract JSON: { name, age, condition, history, allergies, systolicBp, glucose, heartRate, weight, temperature, spo2, clinicalNote }. Return JSON only.`;
178
-
179
- const callGeminiVision = async (modelName: string) => {
180
- if (onStatus) onStatus(`⚡ Scanning with ${modelName}...`);
181
- const response = await ai.models.generateContent({
182
- model: modelName,
183
- contents: [{ role: 'user', parts: [{ text: prompt }, { inlineData: { mimeType: 'image/jpeg', data: base64Data } }] }],
184
- config: { responseMimeType: "application/json", maxOutputTokens: 2000 }
185
- });
186
- const text = response.text || "{}";
187
- const data = JSON.parse(text);
188
- return {
189
- profile: { name: data.name, age: data.age, condition: data.condition, history: data.history, allergies: data.allergies },
190
- vitals: { systolicBp: data.systolicBp, glucose: data.glucose, heartRate: data.heartRate, weight: data.weight, temperature: data.temperature, spo2: data.spo2, clinicalNote: data.clinicalNote },
191
- confidence: 0.9
192
- };
193
- };
194
-
195
- try {
196
- if (!API_KEY) throw new Error("API Key missing");
197
- return await callGeminiVision(MODEL_TIER_1);
198
- } catch (e: any) {
199
- if (e.toString().includes('429') || e.toString().includes('Quota')) {
200
- try {
201
- return await callGeminiVision(MODEL_TIER_2);
202
- } catch (e2) {}
203
- }
204
 
205
- // Fallback: Moondream on Media Backend
206
- try {
207
- if (onStatus) onStatus("🐢 Compressing for Moondream...");
208
- const compressedBase64 = await compressImage(imageBase64);
209
- const cleanBase64 = compressedBase64.includes('base64,') ? compressedBase64.split('base64,')[1] : compressedBase64;
210
-
211
- if (onStatus) onStatus("🐢 Using Local Vision Node...");
212
- const resText = await callBackend(MEDIA_API_BASE, '/vision', { image: cleanBase64, prompt: "Extract patient name and vitals from this document." }, onStatus);
213
-
214
- return {
215
- profile: {},
216
- vitals: { clinicalNote: `[Auto-Scanned]: ${resText}` },
217
- confidence: 0.6
218
- }
219
- } catch (fallbackError) {
220
- throw new Error("Scan failed. Please type details manually.");
 
 
 
 
 
 
 
221
  }
222
- }
 
 
 
223
  };
224
 
225
  export const generateSpeech = async (text: string): Promise<string | null> => {
226
- if (!API_KEY) return null;
227
- try {
228
- const response = await ai.models.generateContent({
229
- model: MODEL_TTS,
230
- contents: [{ parts: [{ text }] }],
231
- config: {
232
- responseModalities: ['AUDIO'],
233
- speechConfig: { voiceConfig: { prebuiltVoiceConfig: { voiceName: 'Fenrir' } } },
234
- },
235
- });
236
- return response.candidates?.[0]?.content?.parts?.[0]?.inlineData?.data || null;
237
- } catch (e) {
238
- return null;
239
- }
240
  };
241
 
242
  export const transcribeAudio = async (audioBlob: Blob): Promise<string> => {
243
- const reader = new FileReader();
244
- return new Promise((resolve, reject) => {
245
- reader.onloadend = async () => {
246
- const base64 = (reader.result as string).split(',')[1];
247
- try {
248
- // Whisper calls go to Media Backend
249
- const text = await callBackend(MEDIA_API_BASE, '/transcribe', { audio: base64 });
250
- resolve(text);
251
- } catch (e) { reject("Voice transcription failed."); }
252
- };
253
- reader.readAsDataURL(audioBlob);
254
- });
255
  };
256
 
257
  // --- RISK ANALYSIS ---
258
  export const analyzeRisk = async (
259
- profile: PatientProfile,
260
- vitals: ClinicalVitals,
261
- calculatedScore: number,
262
- onStatus?: (msg: string) => void
263
  ): Promise<RiskAnalysisResult> => {
264
- const prompt = `
265
- Act as a Senior Clinical Risk Assessor.
266
- Patient: ${profile.name} (${profile.age}, ${profile.gender}). Condition: ${profile.condition}.
267
- History: ${profile.history}.
268
- Vitals: BP ${vitals.systolicBp}, Glucose ${vitals.glucose}, SpO2 ${vitals.spo2}%.
269
- Note: ${vitals.clinicalNote}.
270
- Task: 1. Summary. 2. 3 Action Items. 3. ICD-10 Pipeline (Condition, History). 4. Insurance Note.
271
- Return JSON.
272
- `;
273
-
274
- const callGeminiRisk = async (modelName: string) => {
275
- if (onStatus) onStatus(`⚡ Analyzing with ${modelName}...`);
276
- const response = await ai.models.generateContent({
277
- model: modelName,
278
- contents: prompt,
279
- config: {
280
- responseMimeType: "application/json",
281
- maxOutputTokens: 4000,
282
- responseSchema: {
283
- type: Type.OBJECT,
284
- properties: {
285
- summary: { type: Type.STRING },
286
- actionItems: { type: Type.ARRAY, items: { type: Type.STRING } },
287
- primaryConditionCode: { type: Type.OBJECT, properties: { code: {type: Type.STRING}, description: {type: Type.STRING} } },
288
- historyCodes: { type: Type.ARRAY, items: { type: Type.OBJECT, properties: { code: {type: Type.STRING}, description: {type: Type.STRING} } } },
289
- insuranceNote: { type: Type.STRING }
290
- },
291
- required: ["summary", "actionItems", "primaryConditionCode", "historyCodes", "insuranceNote"]
292
- }
293
- }
294
- });
295
- return { ...parseRiskResponse(response.text || "{}", calculatedScore), source: modelName === MODEL_TIER_1 ? 'Gemini 2.5 Flash-Lite' : 'Gemini 2.5 Flash' };
296
- };
297
-
298
- try {
299
- if (!API_KEY) throw new Error("API Key missing");
300
- return await callGeminiRisk(MODEL_TIER_1);
301
- } catch (err: any) {
302
- if (err.toString().includes('429') || err.toString().includes('Quota')) {
303
- try { return await callGeminiRisk(MODEL_TIER_2); } catch (e2) {}
304
- }
305
 
306
- try {
307
- const payload = { ...profile, ...vitals, riskScore: calculatedScore, prompt };
308
- // Fallback goes to Primary Backend (Text Node)
309
- const fallback = await callBackend(PRIMARY_API_BASE, '/analyze', payload, onStatus);
310
- return {
311
- ...parseRiskResponse(fallback, calculatedScore),
312
- source: 'Phi-3 Mini (Fallback)'
313
- };
314
- } catch {
315
- throw new Error("Analysis failed");
316
- }
317
- }
 
 
 
 
 
 
 
 
318
  };
319
 
320
  export const generateHealthInsights = async (profile: PatientProfile, vitals: ClinicalVitals): Promise<HealthInsights> => {
321
- const prompt = `Based on Patient: ${profile.name}, ${profile.age}y, ${profile.condition}. Vitals: BP ${vitals.systolicBp}. Generate JSON: { weeklySummary, progress, tips: [] }.`;
322
-
323
- const callGeminiInsights = async (model: string) => {
324
- const response = await ai.models.generateContent({
325
- model: model,
326
- contents: prompt,
327
- config: { responseMimeType: "application/json", maxOutputTokens: 2000 }
328
- });
329
- return JSON.parse(response.text || "{}");
330
- }
331
-
332
- try {
333
- if (!API_KEY) throw new Error("No Key");
334
- return await callGeminiInsights(MODEL_TIER_1);
335
- } catch (err: any) {
336
- if (err.toString().includes('429')) {
337
- try { return await callGeminiInsights(MODEL_TIER_2); } catch (e) {}
338
- }
339
- return { weeklySummary: "Keep tracking your vitals.", progress: "Data accumulated.", tips: ["Maintain a balanced diet.", "Stay hydrated."] };
340
- }
341
  };
342
 
343
  export const generateSessionName = async (userText: string, aiText: string): Promise<string> => {
344
- const prompt = `Generate a very short, specific title (max 4 words) for a medical chat session based on this context. User: ${userText}. AI: ${aiText}. Title:`;
345
- try {
346
- if (!API_KEY) return "New Consultation";
347
- const response = await ai.models.generateContent({ model: MODEL_TIER_1, contents: prompt, config: { maxOutputTokens: 20 } });
348
- return cleanText(response.text || "New Consultation").replace(/^["']|["']$/g, '');
349
- } catch (e) {
350
- return "New Consultation";
351
- }
352
  };
353
 
354
  // --- CHAT ---
355
  export const generateChatResponse = async (
356
- history: ChatMessage[],
357
- currentMessage: string,
358
- image: string | undefined,
359
- profile: PatientProfile,
360
- mode: AppMode,
361
- onSource: (source: string) => void,
362
- onStatus?: (msg: string) => void
363
  ): Promise<string> => {
364
- const context = `
365
- Patient: ${profile.name} (${profile.age}y).
366
- Condition: ${profile.condition}. History: ${profile.history}.
367
- Tone: ${mode === AppMode.THERAPY ? 'Empathetic CBT' : 'Medical Guide'}.
368
- Format: Plain text. No markdown.
369
- `;
370
-
371
- const contents = history.map(msg => ({ role: msg.role === 'user' ? 'user' : 'model', parts: [{ text: msg.text }, ...(msg.image ? [{ inlineData: { mimeType: 'image/jpeg', data: msg.image.split('base64,')[1] } }] : [])] }));
372
- contents.push({ role: 'user', parts: [{ text: context + "\nUser: " + currentMessage }, ...(image ? [{ inlineData: { mimeType: 'image/jpeg', data: image.split('base64,')[1] } }] : [])] });
373
-
374
- const callGeminiChat = async (modelName: string) => {
375
- if (onStatus) onStatus(`Generating with ${modelName}...`);
376
- onSource(modelName === MODEL_TIER_1 ? 'Gemini 2.5 Flash-Lite' : 'Gemini 2.5 Flash');
377
- const response = await ai.models.generateContent({
378
- model: modelName,
379
- contents: contents,
380
- config: { maxOutputTokens: 4000, temperature: 0.7 }
381
- });
382
- return cleanText(response.text || "I didn't catch that.");
383
- };
384
-
385
- try {
386
- if (!API_KEY) throw new Error("No Key");
387
- return await callGeminiChat(MODEL_TIER_1);
388
- } catch (e: any) {
389
- if (e.toString().includes('429') || e.toString().includes('Quota')) {
390
- try {
391
- return await callGeminiChat(MODEL_TIER_2);
392
- } catch (e2) {}
393
- }
394
 
395
- try {
396
- if (onStatus) onStatus("Falling back to Local Phi-3...");
397
- onSource('Phi-3 Mini (Fallback)');
398
- const fallbackPrompt = `${context}\n\nChat History:\n${history.slice(-3).map(m => m.text).join('\n')}\nUser: ${currentMessage}`;
399
- // Fallback goes to Primary Backend (Text Node)
400
- const responseText = await callBackend(PRIMARY_API_BASE, '/generate', { prompt: fallbackPrompt }, onStatus);
401
- return cleanText(responseText);
402
- } catch {
403
- return "I'm having trouble connecting. Please check your internet.";
404
- }
405
- }
 
 
 
 
 
 
 
 
 
 
406
  };
407
 
408
  export const generateQuickReplies = async (history: ChatMessage[]) => {
409
- if (!API_KEY || history.length === 0) return [];
410
- const recentContext = history.slice(-3).map(m => `${m.role}: ${m.text}`).join('\n');
411
- const prompt = `Based on: ${recentContext}. Suggest 3 short follow-up questions. JSON array.`;
412
- try {
413
- const res = await ai.models.generateContent({ model: MODEL_TIER_1, contents: prompt, config: { responseMimeType: "application/json" } });
414
- return JSON.parse(res.text || "[]");
415
- } catch { return []; }
 
416
  };
417
 
418
  export const summarizeConversation = async (history: ChatMessage[]) => {
419
- if (!API_KEY) return "Summary unavailable.";
420
- try {
421
- const res = await ai.models.generateContent({ model: MODEL_TIER_1, contents: `Summarize:\n${history.map(m=>m.text).join('\n')}` });
422
- return cleanText(res.text || "");
423
- } catch { return "Could not summarize."; }
424
  };
 
3
 
4
  // --- API KEY & CONFIG ---
5
  const getApiKey = () => {
6
+ try {
7
+ // @ts-ignore
8
+ if (typeof import.meta !== 'undefined' && import.meta.env && import.meta.env.VITE_API_KEY) {
9
+ // @ts-ignore
10
+ return import.meta.env.VITE_API_KEY;
11
+ }
12
+ } catch (e) {}
13
+
14
+ if (typeof process !== 'undefined' && process.env && process.env.API_KEY) {
15
+ return process.env.API_KEY;
16
+ }
17
+ return '';
18
  };
19
 
20
  // --- BACKEND CONFIGURATION ---
 
22
  const PRIMARY_API_BASE = 'https://arshenoy-somai-backend.hf.space';
23
 
24
  // Secondary Backend (Media - Moondream/Whisper)
25
+ // If you create a new space, put its URL here (e.g. via VITE_MEDIA_API_URL env var),
26
+ // otherwise it defaults to the primary one.
27
  const getMediaApiBase = () => {
28
+ try {
29
+ // @ts-ignore
30
+ if (typeof import.meta !== 'undefined' && import.meta.env && import.meta.env.VITE_MEDIA_API_URL) {
31
+ // @ts-ignore
32
+ return import.meta.env.VITE_MEDIA_API_URL;
33
+ }
34
+ } catch (e) {}
35
+ return PRIMARY_API_BASE;
36
  };
 
37
  const MEDIA_API_BASE = getMediaApiBase();
38
+
39
  const API_KEY = getApiKey();
40
  const ai = new GoogleGenAI({ apiKey: API_KEY });
41
 
42
  // --- TIERED MODEL STRATEGY ---
43
+ const MODEL_TIER_1 = 'gemini-2.5-flash-lite';
44
+ const MODEL_TIER_2 = 'gemini-2.5-flash';
45
  const MODEL_TTS = 'gemini-2.5-flash-preview-tts';
46
 
47
  const cleanText = (text: string) => {
48
+ if (!text) return "";
49
+ return text.replace(/\*\*/g, '').replace(/###/g, '').replace(/\*/g, '-').trim();
50
  };
51
 
52
  const compressImage = async (base64Str: string, maxWidth = 800): Promise<string> => {
53
+ return new Promise((resolve) => {
54
+ const img = new Image();
55
+ img.src = base64Str;
56
+ img.onload = () => {
57
+ const canvas = document.createElement('canvas');
58
+ let width = img.width;
59
+ let height = img.height;
60
+
61
+ if (width > maxWidth) {
62
+ height = (height * maxWidth) / width;
63
+ width = maxWidth;
64
+ }
65
+
66
+ canvas.width = width;
67
+ canvas.height = height;
68
+ const ctx = canvas.getContext('2d');
69
+ ctx?.drawImage(img, 0, 0, width, height);
70
+ resolve(canvas.toDataURL('image/jpeg', 0.7));
71
+ };
72
+ img.onerror = () => resolve(base64Str);
73
+ });
74
  };
75
 
76
  export const wakeUpBackend = async () => {
77
+ try {
78
+ // Ping both potential backends
79
+ fetch(`${PRIMARY_API_BASE}/`, { method: 'GET', mode: 'cors' }).catch(()=>{});
80
+ if (PRIMARY_API_BASE !== MEDIA_API_BASE) {
81
+ fetch(`${MEDIA_API_BASE}/`, { method: 'GET', mode: 'cors' }).catch(()=>{});
82
+ }
83
+ } catch (e) {}
84
  };
85
 
86
+ // Generic Fallback Caller
87
  const callBackend = async (baseUrl: string, endpoint: string, payload: any, onStatus?: (msg: string) => void): Promise<string> => {
88
+ const url = `${baseUrl}${endpoint}`;
89
+ console.info(`[SomAI] Calling Backend: ${url}`);
90
+ if (onStatus) onStatus("🐢 Switching to local backup...");
91
+
92
+ const makeRequest = async (retries = 2) => {
93
+ const controller = new AbortController();
94
+ const timeoutId = setTimeout(() => controller.abort(), 90000); // 90s timeout for CPU
95
+
96
+ try {
97
+ const response = await fetch(url, {
98
+ method: 'POST',
99
+ mode: 'cors',
100
+ credentials: 'omit',
101
+ headers: { 'Content-Type': 'application/json', 'Accept': 'application/json' },
102
+ body: JSON.stringify(payload),
103
+ signal: controller.signal
104
+ });
105
+ clearTimeout(timeoutId);
106
+
107
+ if (!response.ok && (response.status === 503 || response.status === 504) && retries > 0) {
108
+ if (onStatus) onStatus(`💤 Backend waking up... (${retries} retries left)`);
109
+ await new Promise(r => setTimeout(r, 5000));
110
+ return makeRequest(retries - 1);
111
+ }
112
+
113
+ if (!response.ok) {
114
+ const err = await response.text().catch(() => "Unknown");
115
+ throw new Error(`API Error ${response.status}: ${err.substring(0, 50)}`);
116
+ }
117
+
118
+ const data = await response.json();
119
 
120
+ if (typeof data === 'string') return data;
121
+ if (data.text) return data.text;
122
+ if (data.response) return data.response;
123
+ if (data.generated_text) return data.generated_text;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
124
 
125
+ return JSON.stringify(data);
126
+
127
+ } catch (error: any) {
128
+ clearTimeout(timeoutId);
129
+ if (retries > 0 && (error.name === 'AbortError' || error.message.includes('Failed'))) {
130
+ if (onStatus) onStatus("📡 Connection unstable, retrying...");
131
+ await new Promise(r => setTimeout(r, 5000));
132
+ return makeRequest(retries - 1);
133
+ }
134
+ throw error;
135
+ }
136
+ };
137
+
138
+ try { return await makeRequest(); } catch (error) { throw error; }
139
  };
140
 
141
  const parseRiskResponse = (text: string, calculatedScore: number): RiskAnalysisResult => {
142
+ try {
143
+ let jsonStr = text;
144
+ const codeBlockMatch = text.match(/json\s*(\{[\s\S]*?\})\s*/);
145
+ if (codeBlockMatch) jsonStr = codeBlockMatch[1];
146
+ else {
147
+ const braceMatch = text.match(/{[\s\S]*}/);
148
+ if (braceMatch) jsonStr = braceMatch[0];
149
+ }
150
+
151
+ const data = JSON.parse(jsonStr);
152
+
153
+ const pipeline = [
154
+ { code: data.primaryConditionCode?.code || "N/A", description: data.primaryConditionCode?.description || "Unknown", type: 'Primary' },
155
+ ...(data.historyCodes || []).map((h: any) => ({ code: h.code, description: h.description, type: 'History' }))
156
+ ];
157
+
158
+ return {
159
+ numericScore: calculatedScore,
160
+ summary: cleanText(data.summary || "Analysis completed."),
161
+ actionItems: (data.actionItems || []).map(cleanText),
162
+ icd10Codes: [],
163
+ codingPipeline: pipeline as any,
164
+ insuranceNote: cleanText(data.insuranceNote || "Review required."),
165
+ timestamp: new Date().toISOString()
166
+ };
167
+ } catch (e) {
168
+ return {
169
+ numericScore: calculatedScore,
170
+ summary: cleanText(text).substring(0, 500) || "Analysis currently unavailable.",
171
+ actionItems: ["Review inputs", "Consult provider"],
172
+ icd10Codes: [],
173
+ codingPipeline: [],
174
+ insuranceNote: "Automated analysis fallback.",
175
+ timestamp: new Date().toISOString()
176
+ }
177
+ }
178
  };
179
 
180
  // --- VISION EXTRACTION ---
181
  export const extractClinicalData = async (imageBase64: string, onStatus?: (msg: string) => void): Promise<ExtractionResult> => {
182
+ const base64Data = imageBase64.includes('base64,') ? imageBase64.split('base64,')[1] : imageBase64;
183
+ const prompt = `Analyze this medical document. CRITICAL: Look for Patient Name. Extract JSON: { name, age, condition, history, allergies, systolicBp, glucose, heartRate, weight, temperature, spo2, clinicalNote }. Return JSON only.`;
184
+
185
+ const callGeminiVision = async (modelName: string) => {
186
+ if (onStatus) onStatus(`⚡ Scanning with ${modelName}...`);
187
+ const response = await ai.models.generateContent({
188
+ model: modelName,
189
+ contents: [{ role: 'user', parts: [{ text: prompt }, { inlineData: { mimeType: 'image/jpeg', data: base64Data } }] }],
190
+ config: { responseMimeType: "application/json", maxOutputTokens: 2000 }
191
+ });
192
+ const text = response.text || "{}";
193
+ const data = JSON.parse(text);
194
+ return {
195
+ profile: { name: data.name, age: data.age, condition: data.condition, history: data.history, allergies: data.allergies },
196
+ vitals: { systolicBp: data.systolicBp, glucose: data.glucose, heartRate: data.heartRate, weight: data.weight, temperature: data.temperature, spo2: data.spo2, clinicalNote: data.clinicalNote },
197
+ confidence: 0.9
198
+ };
199
+ };
 
 
 
 
 
 
 
 
 
 
200
 
201
+ try {
202
+ if (!API_KEY) throw new Error("API Key missing");
203
+ return await callGeminiVision(MODEL_TIER_1);
204
+ } catch (e: any) {
205
+ if (e.toString().includes('429') || e.toString().includes('Quota')) {
206
+ try {
207
+ return await callGeminiVision(MODEL_TIER_2);
208
+ } catch (e2) {}
209
+ }
210
+
211
+ // Fallback: Moondream on Media Backend
212
+ try {
213
+ if (onStatus) onStatus("🐢 Compressing for Moondream...");
214
+ const compressedBase64 = await compressImage(imageBase64);
215
+ const cleanBase64 = compressedBase64.includes('base64,') ? compressedBase64.split('base64,')[1] : compressedBase64;
216
+
217
+ if (onStatus) onStatus("🐢 Using Local Vision Node...");
218
+ const resText = await callBackend(MEDIA_API_BASE, '/vision', { image: cleanBase64, prompt: "Extract patient name and vitals from this document." }, onStatus);
219
+
220
+ return {
221
+ profile: {},
222
+ vitals: { clinicalNote: `[Auto-Scanned]: ${resText}` },
223
+ confidence: 0.6
224
  }
225
+ } catch (fallbackError) {
226
+ throw new Error("Scan failed. Please type details manually.");
227
+ }
228
+ }
229
  };
230
 
231
  export const generateSpeech = async (text: string): Promise<string | null> => {
232
+ if (!API_KEY) return null;
233
+ try {
234
+ const response = await ai.models.generateContent({
235
+ model: MODEL_TTS,
236
+ contents: [{ parts: [{ text }] }],
237
+ config: {
238
+ responseModalities: ['AUDIO'],
239
+ speechConfig: { voiceConfig: { prebuiltVoiceConfig: { voiceName: 'Fenrir' } } },
240
+ },
241
+ });
242
+ return response.candidates?.[0]?.content?.parts?.[0]?.inlineData?.data || null;
243
+ } catch (e) {
244
+ return null;
245
+ }
246
  };
247
 
248
  export const transcribeAudio = async (audioBlob: Blob): Promise<string> => {
249
+ const reader = new FileReader();
250
+ return new Promise((resolve, reject) => {
251
+ reader.onloadend = async () => {
252
+ const base64 = (reader.result as string).split(',')[1];
253
+ try {
254
+ // Whisper calls go to Media Backend
255
+ const text = await callBackend(MEDIA_API_BASE, '/transcribe', { audio: base64 });
256
+ resolve(text);
257
+ } catch (e) { reject("Voice transcription failed."); }
258
+ };
259
+ reader.readAsDataURL(audioBlob);
260
+ });
261
  };
262
 
263
  // --- RISK ANALYSIS ---
264
  export const analyzeRisk = async (
265
+ profile: PatientProfile,
266
+ vitals: ClinicalVitals,
267
+ calculatedScore: number,
268
+ onStatus?: (msg: string) => void
269
  ): Promise<RiskAnalysisResult> => {
270
+ const prompt = `Act as a Senior Clinical Risk Assessor. Patient: ${profile.name} (${profile.age}, ${profile.gender}). Condition: ${profile.condition}. History: ${profile.history}. Vitals: BP ${vitals.systolicBp}, Glucose ${vitals.glucose}, SpO2 ${vitals.spo2}%. Note: ${vitals.clinicalNote}. Task: 1. Summary. 2. 3 Action Items. 3. ICD-10 Pipeline (Condition, History). 4. Insurance Note. Return JSON.`;
271
+
272
+ const callGeminiRisk = async (modelName: string) => {
273
+ if (onStatus) onStatus(`⚡ Analyzing with ${modelName}...`);
274
+ const response = await ai.models.generateContent({
275
+ model: modelName,
276
+ contents: prompt,
277
+ config: {
278
+ responseMimeType: "application/json",
279
+ maxOutputTokens: 4000,
280
+ responseSchema: {
281
+ type: Type.OBJECT,
282
+ properties: {
283
+ summary: { type: Type.STRING },
284
+ actionItems: { type: Type.ARRAY, items: { type: Type.STRING } },
285
+ primaryConditionCode: { type: Type.OBJECT, properties: { code: {type: Type.STRING}, description: {type: Type.STRING} } },
286
+ historyCodes: { type: Type.ARRAY, items: { type: Type.OBJECT, properties: { code: {type: Type.STRING}, description: {type: Type.STRING} } } },
287
+ insuranceNote: { type: Type.STRING }
288
+ },
289
+ required: ["summary", "actionItems", "primaryConditionCode", "historyCodes", "insuranceNote"]
290
+ }
291
+ }
292
+ });
293
+ return { ...parseRiskResponse(response.text || "{}", calculatedScore), source: modelName === MODEL_TIER_1 ? 'Gemini 2.5 Flash-Lite' : 'Gemini 2.5 Flash' };
294
+ };
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
295
 
296
+ try {
297
+ if (!API_KEY) throw new Error("API Key missing");
298
+ return await callGeminiRisk(MODEL_TIER_1);
299
+ } catch (err: any) {
300
+ if (err.toString().includes('429') || err.toString().includes('Quota')) {
301
+ try { return await callGeminiRisk(MODEL_TIER_2); } catch (e2) {}
302
+ }
303
+
304
+ try {
305
+ const payload = { ...profile, ...vitals, riskScore: calculatedScore, prompt };
306
+ // Fallback goes to Primary Backend (Text Node)
307
+ const fallback = await callBackend(PRIMARY_API_BASE, '/analyze', payload, onStatus);
308
+ return {
309
+ ...parseRiskResponse(fallback, calculatedScore),
310
+ source: 'Phi-3 Mini (Fallback)'
311
+ };
312
+ } catch {
313
+ throw new Error("Analysis failed");
314
+ }
315
+ }
316
  };
317
 
318
  export const generateHealthInsights = async (profile: PatientProfile, vitals: ClinicalVitals): Promise<HealthInsights> => {
319
+ const prompt = `Based on Patient: ${profile.name}, ${profile.age}y, ${profile.condition}. Vitals: BP ${vitals.systolicBp}. Generate JSON: { weeklySummary, progress, tips: [] }.`;
320
+
321
+ const callGeminiInsights = async (model: string) => {
322
+ const response = await ai.models.generateContent({
323
+ model: model,
324
+ contents: prompt,
325
+ config: { responseMimeType: "application/json", maxOutputTokens: 2000 }
326
+ });
327
+ return JSON.parse(response.text || "{}");
328
+ }
329
+
330
+ try {
331
+ if (!API_KEY) throw new Error("No Key");
332
+ return await callGeminiInsights(MODEL_TIER_1);
333
+ } catch (err: any) {
334
+ if (err.toString().includes('429')) {
335
+ try { return await callGeminiInsights(MODEL_TIER_2); } catch (e) {}
336
+ }
337
+ return { weeklySummary: "Keep tracking your vitals.", progress: "Data accumulated.", tips: ["Maintain a balanced diet.", "Stay hydrated."] };
338
+ }
339
  };
340
 
341
  export const generateSessionName = async (userText: string, aiText: string): Promise<string> => {
342
+ const prompt = `Generate a very short, specific title (max 4 words) for a medical chat session based on this context. User: ${userText}. AI: ${aiText}. Title:`;
343
+ try {
344
+ if (!API_KEY) return "New Consultation";
345
+ const response = await ai.models.generateContent({ model: MODEL_TIER_1, contents: prompt, config: { maxOutputTokens: 20 } });
346
+ return cleanText(response.text || "New Consultation").replace(/^["']|["']$/g, '');
347
+ } catch (e) {
348
+ return "New Consultation";
349
+ }
350
  };
351
 
352
  // --- CHAT ---
353
  export const generateChatResponse = async (
354
+ history: ChatMessage[],
355
+ currentMessage: string,
356
+ image: string | undefined,
357
+ profile: PatientProfile,
358
+ mode: AppMode,
359
+ onSource: (source: string) => void,
360
+ onStatus?: (msg: string) => void
361
  ): Promise<string> => {
362
+ const context = `Patient: ${profile.name} (${profile.age}y). Condition: ${profile.condition}. History: ${profile.history}. Tone: ${mode === AppMode.THERAPY ? 'Empathetic CBT' : 'Medical Guide'}. Format: Plain text. No markdown.`;
363
+
364
+ const contents = history.map(msg => ({ role: msg.role === 'user' ? 'user' : 'model', parts: [{ text: msg.text }, ...(msg.image ? [{ inlineData: { mimeType: 'image/jpeg', data: msg.image.split('base64,')[1] } }] : [])] }));
365
+ contents.push({ role: 'user', parts: [{ text: context + "\nUser: " + currentMessage }, ...(image ? [{ inlineData: { mimeType: 'image/jpeg', data: image.split('base64,')[1] } }] : [])] });
366
+
367
+ const callGeminiChat = async (modelName: string) => {
368
+ if (onStatus) onStatus(`Generating with ${modelName}...`);
369
+ onSource(modelName === MODEL_TIER_1 ? 'Gemini 2.5 Flash-Lite' : 'Gemini 2.5 Flash');
370
+ const response = await ai.models.generateContent({
371
+ model: modelName,
372
+ contents: contents,
373
+ config: { maxOutputTokens: 4000, temperature: 0.7 }
374
+ });
375
+ return cleanText(response.text || "I didn't catch that.");
376
+ };
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
377
 
378
+ try {
379
+ if (!API_KEY) throw new Error("No Key");
380
+ return await callGeminiChat(MODEL_TIER_1);
381
+ } catch (e: any) {
382
+ if (e.toString().includes('429') || e.toString().includes('Quota')) {
383
+ try {
384
+ return await callGeminiChat(MODEL_TIER_2);
385
+ } catch (e2) {}
386
+ }
387
+
388
+ try {
389
+ if (onStatus) onStatus("Falling back to Local Phi-3...");
390
+ onSource('Phi-3 Mini (Fallback)');
391
+ const fallbackPrompt = `${context}\n\nChat History:\n${history.slice(-3).map(m => m.text).join('\n')}\nUser: ${currentMessage}`;
392
+ // Fallback goes to Primary Backend (Text Node)
393
+ const responseText = await callBackend(PRIMARY_API_BASE, '/generate', { prompt: fallbackPrompt }, onStatus);
394
+ return cleanText(responseText);
395
+ } catch {
396
+ return "I'm having trouble connecting. Please check your internet.";
397
+ }
398
+ }
399
  };
400
 
401
  export const generateQuickReplies = async (history: ChatMessage[]) => {
402
+ if (!API_KEY || history.length === 0) return [];
403
+ const recentContext = history.slice(-3).map(m => `${m.role}: ${m.text}`).join('\n');
404
+ const prompt = `Based on: ${recentContext}. Suggest 3 short follow-up questions. JSON array.`;
405
+
406
+ try {
407
+ const res = await ai.models.generateContent({ model: MODEL_TIER_1, contents: prompt, config: { responseMimeType: "application/json" } });
408
+ return JSON.parse(res.text || "[]");
409
+ } catch { return []; }
410
  };
411
 
412
  export const summarizeConversation = async (history: ChatMessage[]) => {
413
+ if (!API_KEY) return "Summary unavailable.";
414
+ try {
415
+ const res = await ai.models.generateContent({ model: MODEL_TIER_1, contents: `Summarize:\n${history.map(m=>m.text).join('\n')}` });
416
+ return cleanText(res.text || "");
417
+ } catch { return "Could not summarize."; }
418
  };