arshenoy commited on
Commit
936f565
·
verified ·
1 Parent(s): a0f40b8

updated with rollback logic

Browse files
Files changed (1) hide show
  1. services/geminiService.ts +85 -69
services/geminiService.ts CHANGED
@@ -1,7 +1,7 @@
1
  import { GoogleGenAI, Type } from "@google/genai";
2
  import { PatientProfile, ClinicalVitals, AppMode, RiskAnalysisResult, ChatMessage, ExtractionResult, HealthInsights } from "../types";
3
 
4
- // --- API KEY & CLIENT INITIALIZATION ---
5
  const getApiKey = () => {
6
  try {
7
  // @ts-ignore
@@ -17,20 +17,34 @@ const getApiKey = () => {
17
  return '';
18
  };
19
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  const API_KEY = getApiKey();
21
  const ai = new GoogleGenAI({ apiKey: API_KEY });
22
 
23
  // --- TIERED MODEL STRATEGY ---
24
- // Tier 1: 1,000 RPD (Primary - Fast & High Quota)
25
  const MODEL_TIER_1 = 'gemini-2.5-flash-lite';
26
- // Tier 2: 250 RPD (Backup - Very Reliable)
27
  const MODEL_TIER_2 = 'gemini-2.5-flash';
28
- // TTS Model
29
  const MODEL_TTS = 'gemini-2.5-flash-preview-tts';
30
 
31
- const FALLBACK_API_BASE = 'https://arshenoy-somai-backend.hf.space';
32
-
33
- // Cleaning for final blocks
34
  const cleanText = (text: string) => {
35
  if (!text) return "";
36
  return text.replace(/\*\*/g, '').replace(/###/g, '').replace(/\*/g, '-').trim();
@@ -44,12 +58,10 @@ const compressImage = async (base64Str: string, maxWidth = 800): Promise<string>
44
  const canvas = document.createElement('canvas');
45
  let width = img.width;
46
  let height = img.height;
47
-
48
  if (width > maxWidth) {
49
  height = (height * maxWidth) / width;
50
  width = maxWidth;
51
  }
52
-
53
  canvas.width = width;
54
  canvas.height = height;
55
  const ctx = canvas.getContext('2d');
@@ -62,19 +74,26 @@ const compressImage = async (base64Str: string, maxWidth = 800): Promise<string>
62
 
63
  export const wakeUpBackend = async () => {
64
  try {
65
- await fetch(`${FALLBACK_API_BASE}/`, { method: 'GET', mode: 'cors' });
 
 
 
 
66
  } catch (e) {}
67
  };
68
 
69
- const callFallbackAPI = async (endpoint: string, payload: any): Promise<string> => {
70
- console.info(`[SomAI System] Switching to Fallback API (Tier 3): ${FALLBACK_API_BASE}${endpoint}`);
 
 
 
71
 
72
  const makeRequest = async (retries = 2) => {
73
  const controller = new AbortController();
74
- const timeoutId = setTimeout(() => controller.abort(), 90000);
75
 
76
  try {
77
- const response = await fetch(`${FALLBACK_API_BASE}${endpoint}`, {
78
  method: 'POST',
79
  mode: 'cors',
80
  credentials: 'omit',
@@ -85,6 +104,7 @@ const callFallbackAPI = async (endpoint: string, payload: any): Promise<string>
85
  clearTimeout(timeoutId);
86
 
87
  if (!response.ok && (response.status === 503 || response.status === 504) && retries > 0) {
 
88
  await new Promise(r => setTimeout(r, 5000));
89
  return makeRequest(retries - 1);
90
  }
@@ -106,6 +126,7 @@ const callFallbackAPI = async (endpoint: string, payload: any): Promise<string>
106
  } catch (error: any) {
107
  clearTimeout(timeoutId);
108
  if (retries > 0 && (error.name === 'AbortError' || error.message.includes('Failed'))) {
 
109
  await new Promise(r => setTimeout(r, 5000));
110
  return makeRequest(retries - 1);
111
  }
@@ -152,16 +173,13 @@ const parseRiskResponse = (text: string, calculatedScore: number): RiskAnalysisR
152
  }
153
  };
154
 
155
- // --- UPDATED: VISION EXTRACTION (TIER 1 -> TIER 2 -> FALLBACK) ---
156
- export const extractClinicalData = async (imageBase64: string): Promise<ExtractionResult> => {
157
  const base64Data = imageBase64.includes('base64,') ? imageBase64.split('base64,')[1] : imageBase64;
158
- const prompt = `Analyze this medical document.
159
- CRITICAL: Look for the Patient's Name at the top, headers, or labeled 'Patient', 'Name', 'Mr/Mrs'.
160
- Extract JSON: { name, age, condition, history, allergies, systolicBp, glucose, heartRate, weight, temperature, spo2, clinicalNote }.
161
- If name is missing, use "Guest". Return JSON only.`;
162
 
163
  const callGeminiVision = async (modelName: string) => {
164
- console.log(`[Vision] Trying ${modelName}...`);
165
  const response = await ai.models.generateContent({
166
  model: modelName,
167
  contents: [{ role: 'user', parts: [{ text: prompt }, { inlineData: { mimeType: 'image/jpeg', data: base64Data } }] }],
@@ -180,18 +198,21 @@ export const extractClinicalData = async (imageBase64: string): Promise<Extracti
180
  if (!API_KEY) throw new Error("API Key missing");
181
  return await callGeminiVision(MODEL_TIER_1);
182
  } catch (e: any) {
183
- console.warn(`[Vision] Tier 1 Failed: ${e.message}. Trying Tier 2...`);
184
- try {
185
- return await callGeminiVision(MODEL_TIER_2);
186
- } catch (e2) {
187
- console.warn(`[Vision] Tier 2 Failed. Trying Tier 3 (Backend)...`);
188
  }
189
 
190
- // 3. Fallback (Moondream)
191
  try {
 
192
  const compressedBase64 = await compressImage(imageBase64);
193
  const cleanBase64 = compressedBase64.includes('base64,') ? compressedBase64.split('base64,')[1] : compressedBase64;
194
- const resText = await callFallbackAPI('/vision', { image: cleanBase64, prompt: "Extract patient name and vitals from this document in JSON format." });
 
 
 
195
  return {
196
  profile: {},
197
  vitals: { clinicalNote: `[Auto-Scanned]: ${resText}` },
@@ -203,7 +224,6 @@ export const extractClinicalData = async (imageBase64: string): Promise<Extracti
203
  }
204
  };
205
 
206
- // NEW: Gemini TTS Service
207
  export const generateSpeech = async (text: string): Promise<string | null> => {
208
  if (!API_KEY) return null;
209
  try {
@@ -217,7 +237,6 @@ export const generateSpeech = async (text: string): Promise<string | null> => {
217
  });
218
  return response.candidates?.[0]?.content?.parts?.[0]?.inlineData?.data || null;
219
  } catch (e) {
220
- console.warn("TTS Failed", e);
221
  return null;
222
  }
223
  };
@@ -228,7 +247,8 @@ export const transcribeAudio = async (audioBlob: Blob): Promise<string> => {
228
  reader.onloadend = async () => {
229
  const base64 = (reader.result as string).split(',')[1];
230
  try {
231
- const text = await callFallbackAPI('/transcribe', { audio: base64 });
 
232
  resolve(text);
233
  } catch (e) { reject("Voice transcription failed."); }
234
  };
@@ -236,21 +256,25 @@ export const transcribeAudio = async (audioBlob: Blob): Promise<string> => {
236
  });
237
  };
238
 
239
- // --- UPDATED: RISK ANALYSIS (TIER 1 -> TIER 2 -> FALLBACK) ---
240
- export const analyzeRisk = async (profile: PatientProfile, vitals: ClinicalVitals, calculatedScore: number): Promise<RiskAnalysisResult> => {
 
 
 
 
 
241
  const prompt = `
242
  Act as a Senior Clinical Risk Assessor.
243
  Patient: ${profile.name} (${profile.age}, ${profile.gender}). Condition: ${profile.condition}.
244
- History: ${profile.history}. Surgeries: ${profile.surgeries}. Family History: ${profile.familyHistory}.
245
- Lifestyle: Diet-${profile.diet}, Exercise-${profile.exerciseFrequency}, Smoke-${profile.smokingStatus}, Alcohol-${profile.alcoholConsumption}.
246
- Vitals: BP Morning ${vitals.systolicBpMorning} / Evening ${vitals.systolicBpEvening}. Glucose ${vitals.glucose}. HR ${vitals.heartRate}. SpO2 ${vitals.spo2}%. Temp ${vitals.temperature}F. Weight ${vitals.weight}kg.
247
  Note: ${vitals.clinicalNote}.
248
- Task: 1. Summary (Risk level). 2. 3 Action Items. 3. ICD-10 Pipeline (Condition, History, Symptoms). 4. Insurance Note.
249
  Return JSON.
250
  `;
251
 
252
  const callGeminiRisk = async (modelName: string) => {
253
- console.log(`[Risk] Trying ${modelName}...`);
254
  const response = await ai.models.generateContent({
255
  model: modelName,
256
  contents: prompt,
@@ -277,16 +301,14 @@ export const analyzeRisk = async (profile: PatientProfile, vitals: ClinicalVital
277
  if (!API_KEY) throw new Error("API Key missing");
278
  return await callGeminiRisk(MODEL_TIER_1);
279
  } catch (err: any) {
280
- console.warn(`[Risk] Tier 1 Failed: ${err.message}. Trying Tier 2...`);
281
- try {
282
- return await callGeminiRisk(MODEL_TIER_2);
283
- } catch (e2) {
284
- console.warn(`[Risk] Tier 2 Failed. Trying Tier 3 (Backend)...`);
285
  }
286
 
287
  try {
288
  const payload = { ...profile, ...vitals, riskScore: calculatedScore, prompt };
289
- const fallback = await callFallbackAPI('/analyze', payload);
 
290
  return {
291
  ...parseRiskResponse(fallback, calculatedScore),
292
  source: 'Phi-3 Mini (Fallback)'
@@ -298,7 +320,7 @@ export const analyzeRisk = async (profile: PatientProfile, vitals: ClinicalVital
298
  };
299
 
300
  export const generateHealthInsights = async (profile: PatientProfile, vitals: ClinicalVitals): Promise<HealthInsights> => {
301
- const prompt = `Based on Patient: ${profile.name}, ${profile.age}y, ${profile.condition}. Vitals: BP ${vitals.systolicBp}, SpO2 ${vitals.spo2}%. Generate JSON: { weeklySummary, progress, tips: [] }.`;
302
 
303
  const callGeminiInsights = async (model: string) => {
304
  const response = await ai.models.generateContent({
@@ -313,7 +335,9 @@ export const generateHealthInsights = async (profile: PatientProfile, vitals: Cl
313
  if (!API_KEY) throw new Error("No Key");
314
  return await callGeminiInsights(MODEL_TIER_1);
315
  } catch (err: any) {
316
- try { return await callGeminiInsights(MODEL_TIER_2); } catch (e) {}
 
 
317
  return { weeklySummary: "Keep tracking your vitals.", progress: "Data accumulated.", tips: ["Maintain a balanced diet.", "Stay hydrated."] };
318
  }
319
  };
@@ -325,38 +349,32 @@ export const generateSessionName = async (userText: string, aiText: string): Pro
325
  const response = await ai.models.generateContent({ model: MODEL_TIER_1, contents: prompt, config: { maxOutputTokens: 20 } });
326
  return cleanText(response.text || "New Consultation").replace(/^["']|["']$/g, '');
327
  } catch (e) {
328
- try {
329
- const fallbackRes = await callFallbackAPI('/generate', { prompt: prompt });
330
- return cleanText(fallbackRes).replace(/^["']|["']$/g, '');
331
- } catch { return "New Consultation"; }
332
  }
333
  };
334
 
335
- // --- UPDATED: CHAT (TIER 1 -> TIER 2 -> FALLBACK) ---
336
  export const generateChatResponse = async (
337
  history: ChatMessage[],
338
  currentMessage: string,
339
  image: string | undefined,
340
  profile: PatientProfile,
341
  mode: AppMode,
342
- onSource: (source: string) => void
 
343
  ): Promise<string> => {
344
  const context = `
345
  Patient: ${profile.name} (${profile.age}y).
346
  Condition: ${profile.condition}. History: ${profile.history}.
347
- Surgeries: ${profile.surgeries}. Family Hx: ${profile.familyHistory}.
348
- Lifestyle: ${profile.diet}, ${profile.exerciseFrequency}, Smoke: ${profile.smokingStatus}.
349
- Emergency Contact: ${profile.emergencyContactName} (${profile.emergencyContactPhone}).
350
- Tone: ${mode === AppMode.THERAPY ? 'Empathetic, calm, therapeutic (CBT).' : 'Professional, educational, clear.'}
351
  Format: Plain text. No markdown.
352
  `;
353
 
354
  const contents = history.map(msg => ({ role: msg.role === 'user' ? 'user' : 'model', parts: [{ text: msg.text }, ...(msg.image ? [{ inlineData: { mimeType: 'image/jpeg', data: msg.image.split('base64,')[1] } }] : [])] }));
355
  contents.push({ role: 'user', parts: [{ text: context + "\nUser: " + currentMessage }, ...(image ? [{ inlineData: { mimeType: 'image/jpeg', data: image.split('base64,')[1] } }] : [])] });
356
 
357
- // Helper for Chat
358
  const callGeminiChat = async (modelName: string) => {
359
- console.log(`[Chat] Trying ${modelName}...`);
360
  onSource(modelName === MODEL_TIER_1 ? 'Gemini 2.5 Flash-Lite' : 'Gemini 2.5 Flash');
361
  const response = await ai.models.generateContent({
362
  model: modelName,
@@ -368,22 +386,20 @@ export const generateChatResponse = async (
368
 
369
  try {
370
  if (!API_KEY) throw new Error("No Key");
371
- // 1. Try Tier 1
372
  return await callGeminiChat(MODEL_TIER_1);
373
  } catch (e: any) {
374
- console.warn(`[Chat] Tier 1 Failed: ${e.message}. Switching to Tier 2...`);
375
- // 2. Try Tier 2 on ANY failure
376
- try {
377
- return await callGeminiChat(MODEL_TIER_2);
378
- } catch (e2) {
379
- console.warn(`[Chat] Tier 2 Failed. Switching to Tier 3 (Backend)...`);
380
  }
381
 
382
- // 3. Fallback
383
  try {
 
384
  onSource('Phi-3 Mini (Fallback)');
385
  const fallbackPrompt = `${context}\n\nChat History:\n${history.slice(-3).map(m => m.text).join('\n')}\nUser: ${currentMessage}`;
386
- const responseText = await callFallbackAPI('/generate', { prompt: fallbackPrompt });
 
387
  return cleanText(responseText);
388
  } catch {
389
  return "I'm having trouble connecting. Please check your internet.";
@@ -394,7 +410,7 @@ export const generateChatResponse = async (
394
  export const generateQuickReplies = async (history: ChatMessage[]) => {
395
  if (!API_KEY || history.length === 0) return [];
396
  const recentContext = history.slice(-3).map(m => `${m.role}: ${m.text}`).join('\n');
397
- const prompt = `Based on this conversation:\n${recentContext}\n\nSuggest 3 short, relevant follow-up questions the USER might want to ask next. Return ONLY a JSON array of strings.`;
398
  try {
399
  const res = await ai.models.generateContent({ model: MODEL_TIER_1, contents: prompt, config: { responseMimeType: "application/json" } });
400
  return JSON.parse(res.text || "[]");
@@ -404,7 +420,7 @@ export const generateQuickReplies = async (history: ChatMessage[]) => {
404
  export const summarizeConversation = async (history: ChatMessage[]) => {
405
  if (!API_KEY) return "Summary unavailable.";
406
  try {
407
- const res = await ai.models.generateContent({ model: MODEL_TIER_1, contents: `Summarize clinical conversation:\n${history.map(m=>m.text).join('\n')}` });
408
  return cleanText(res.text || "");
409
  } catch { return "Could not summarize."; }
410
  };
 
1
  import { GoogleGenAI, Type } from "@google/genai";
2
  import { PatientProfile, ClinicalVitals, AppMode, RiskAnalysisResult, ChatMessage, ExtractionResult, HealthInsights } from "../types";
3
 
4
+ // --- API KEY & CONFIG ---
5
  const getApiKey = () => {
6
  try {
7
  // @ts-ignore
 
17
  return '';
18
  };
19
 
20
+ // --- BACKEND CONFIGURATION ---
21
+ // Primary Backend (Text Logic - Phi-3)
22
+ const PRIMARY_API_BASE = 'https://arshenoy-somai-backend.hf.space';
23
+
24
+ // Secondary Backend (Media - Moondream/Whisper)
25
+ // If you create a new space, put its URL here (e.g. via VITE_MEDIA_API_URL env var),
26
+ // otherwise it defaults to the primary one.
27
+ const getMediaApiBase = () => {
28
+ try {
29
+ // @ts-ignore
30
+ if (typeof import.meta !== 'undefined' && import.meta.env && import.meta.env.VITE_MEDIA_API_URL) {
31
+ // @ts-ignore
32
+ return import.meta.env.VITE_MEDIA_API_URL;
33
+ }
34
+ } catch (e) {}
35
+ return PRIMARY_API_BASE;
36
+ };
37
+
38
+ const MEDIA_API_BASE = getMediaApiBase();
39
+
40
  const API_KEY = getApiKey();
41
  const ai = new GoogleGenAI({ apiKey: API_KEY });
42
 
43
  // --- TIERED MODEL STRATEGY ---
 
44
  const MODEL_TIER_1 = 'gemini-2.5-flash-lite';
 
45
  const MODEL_TIER_2 = 'gemini-2.5-flash';
 
46
  const MODEL_TTS = 'gemini-2.5-flash-preview-tts';
47
 
 
 
 
48
  const cleanText = (text: string) => {
49
  if (!text) return "";
50
  return text.replace(/\*\*/g, '').replace(/###/g, '').replace(/\*/g, '-').trim();
 
58
  const canvas = document.createElement('canvas');
59
  let width = img.width;
60
  let height = img.height;
 
61
  if (width > maxWidth) {
62
  height = (height * maxWidth) / width;
63
  width = maxWidth;
64
  }
 
65
  canvas.width = width;
66
  canvas.height = height;
67
  const ctx = canvas.getContext('2d');
 
74
 
75
  export const wakeUpBackend = async () => {
76
  try {
77
+ // Ping both potential backends
78
+ fetch(`${PRIMARY_API_BASE}/`, { method: 'GET', mode: 'cors' }).catch(()=>{});
79
+ if (PRIMARY_API_BASE !== MEDIA_API_BASE) {
80
+ fetch(`${MEDIA_API_BASE}/`, { method: 'GET', mode: 'cors' }).catch(()=>{});
81
+ }
82
  } catch (e) {}
83
  };
84
 
85
+ // Generic Fallback Caller
86
+ const callBackend = async (baseUrl: string, endpoint: string, payload: any, onStatus?: (msg: string) => void): Promise<string> => {
87
+ const url = `${baseUrl}${endpoint}`;
88
+ console.info(`[SomAI] Calling Backend: ${url}`);
89
+ if (onStatus) onStatus("🐢 Switching to local backup...");
90
 
91
  const makeRequest = async (retries = 2) => {
92
  const controller = new AbortController();
93
+ const timeoutId = setTimeout(() => controller.abort(), 90000); // 90s timeout for CPU
94
 
95
  try {
96
+ const response = await fetch(url, {
97
  method: 'POST',
98
  mode: 'cors',
99
  credentials: 'omit',
 
104
  clearTimeout(timeoutId);
105
 
106
  if (!response.ok && (response.status === 503 || response.status === 504) && retries > 0) {
107
+ if (onStatus) onStatus(`💤 Backend waking up... (${retries} retries left)`);
108
  await new Promise(r => setTimeout(r, 5000));
109
  return makeRequest(retries - 1);
110
  }
 
126
  } catch (error: any) {
127
  clearTimeout(timeoutId);
128
  if (retries > 0 && (error.name === 'AbortError' || error.message.includes('Failed'))) {
129
+ if (onStatus) onStatus("📡 Connection unstable, retrying...");
130
  await new Promise(r => setTimeout(r, 5000));
131
  return makeRequest(retries - 1);
132
  }
 
173
  }
174
  };
175
 
176
+ // --- VISION EXTRACTION ---
177
+ export const extractClinicalData = async (imageBase64: string, onStatus?: (msg: string) => void): Promise<ExtractionResult> => {
178
  const base64Data = imageBase64.includes('base64,') ? imageBase64.split('base64,')[1] : imageBase64;
179
+ const prompt = `Analyze this medical document. CRITICAL: Look for Patient Name. Extract JSON: { name, age, condition, history, allergies, systolicBp, glucose, heartRate, weight, temperature, spo2, clinicalNote }. Return JSON only.`;
 
 
 
180
 
181
  const callGeminiVision = async (modelName: string) => {
182
+ if (onStatus) onStatus(` Scanning with ${modelName}...`);
183
  const response = await ai.models.generateContent({
184
  model: modelName,
185
  contents: [{ role: 'user', parts: [{ text: prompt }, { inlineData: { mimeType: 'image/jpeg', data: base64Data } }] }],
 
198
  if (!API_KEY) throw new Error("API Key missing");
199
  return await callGeminiVision(MODEL_TIER_1);
200
  } catch (e: any) {
201
+ if (e.toString().includes('429') || e.toString().includes('Quota')) {
202
+ try {
203
+ return await callGeminiVision(MODEL_TIER_2);
204
+ } catch (e2) {}
 
205
  }
206
 
207
+ // Fallback: Moondream on Media Backend
208
  try {
209
+ if (onStatus) onStatus("🐢 Compressing for Moondream...");
210
  const compressedBase64 = await compressImage(imageBase64);
211
  const cleanBase64 = compressedBase64.includes('base64,') ? compressedBase64.split('base64,')[1] : compressedBase64;
212
+
213
+ if (onStatus) onStatus("🐢 Using Local Vision Node...");
214
+ const resText = await callBackend(MEDIA_API_BASE, '/vision', { image: cleanBase64, prompt: "Extract patient name and vitals from this document." }, onStatus);
215
+
216
  return {
217
  profile: {},
218
  vitals: { clinicalNote: `[Auto-Scanned]: ${resText}` },
 
224
  }
225
  };
226
 
 
227
  export const generateSpeech = async (text: string): Promise<string | null> => {
228
  if (!API_KEY) return null;
229
  try {
 
237
  });
238
  return response.candidates?.[0]?.content?.parts?.[0]?.inlineData?.data || null;
239
  } catch (e) {
 
240
  return null;
241
  }
242
  };
 
247
  reader.onloadend = async () => {
248
  const base64 = (reader.result as string).split(',')[1];
249
  try {
250
+ // Whisper calls go to Media Backend
251
+ const text = await callBackend(MEDIA_API_BASE, '/transcribe', { audio: base64 });
252
  resolve(text);
253
  } catch (e) { reject("Voice transcription failed."); }
254
  };
 
256
  });
257
  };
258
 
259
+ // --- RISK ANALYSIS ---
260
+ export const analyzeRisk = async (
261
+ profile: PatientProfile,
262
+ vitals: ClinicalVitals,
263
+ calculatedScore: number,
264
+ onStatus?: (msg: string) => void
265
+ ): Promise<RiskAnalysisResult> => {
266
  const prompt = `
267
  Act as a Senior Clinical Risk Assessor.
268
  Patient: ${profile.name} (${profile.age}, ${profile.gender}). Condition: ${profile.condition}.
269
+ History: ${profile.history}.
270
+ Vitals: BP ${vitals.systolicBp}, Glucose ${vitals.glucose}, SpO2 ${vitals.spo2}%.
 
271
  Note: ${vitals.clinicalNote}.
272
+ Task: 1. Summary. 2. 3 Action Items. 3. ICD-10 Pipeline (Condition, History). 4. Insurance Note.
273
  Return JSON.
274
  `;
275
 
276
  const callGeminiRisk = async (modelName: string) => {
277
+ if (onStatus) onStatus(` Analyzing with ${modelName}...`);
278
  const response = await ai.models.generateContent({
279
  model: modelName,
280
  contents: prompt,
 
301
  if (!API_KEY) throw new Error("API Key missing");
302
  return await callGeminiRisk(MODEL_TIER_1);
303
  } catch (err: any) {
304
+ if (err.toString().includes('429') || err.toString().includes('Quota')) {
305
+ try { return await callGeminiRisk(MODEL_TIER_2); } catch (e2) {}
 
 
 
306
  }
307
 
308
  try {
309
  const payload = { ...profile, ...vitals, riskScore: calculatedScore, prompt };
310
+ // Fallback goes to Primary Backend (Text Node)
311
+ const fallback = await callBackend(PRIMARY_API_BASE, '/analyze', payload, onStatus);
312
  return {
313
  ...parseRiskResponse(fallback, calculatedScore),
314
  source: 'Phi-3 Mini (Fallback)'
 
320
  };
321
 
322
  export const generateHealthInsights = async (profile: PatientProfile, vitals: ClinicalVitals): Promise<HealthInsights> => {
323
+ const prompt = `Based on Patient: ${profile.name}, ${profile.age}y, ${profile.condition}. Vitals: BP ${vitals.systolicBp}. Generate JSON: { weeklySummary, progress, tips: [] }.`;
324
 
325
  const callGeminiInsights = async (model: string) => {
326
  const response = await ai.models.generateContent({
 
335
  if (!API_KEY) throw new Error("No Key");
336
  return await callGeminiInsights(MODEL_TIER_1);
337
  } catch (err: any) {
338
+ if (err.toString().includes('429')) {
339
+ try { return await callGeminiInsights(MODEL_TIER_2); } catch (e) {}
340
+ }
341
  return { weeklySummary: "Keep tracking your vitals.", progress: "Data accumulated.", tips: ["Maintain a balanced diet.", "Stay hydrated."] };
342
  }
343
  };
 
349
  const response = await ai.models.generateContent({ model: MODEL_TIER_1, contents: prompt, config: { maxOutputTokens: 20 } });
350
  return cleanText(response.text || "New Consultation").replace(/^["']|["']$/g, '');
351
  } catch (e) {
352
+ return "New Consultation";
 
 
 
353
  }
354
  };
355
 
356
+ // --- CHAT ---
357
  export const generateChatResponse = async (
358
  history: ChatMessage[],
359
  currentMessage: string,
360
  image: string | undefined,
361
  profile: PatientProfile,
362
  mode: AppMode,
363
+ onSource: (source: string) => void,
364
+ onStatus?: (msg: string) => void
365
  ): Promise<string> => {
366
  const context = `
367
  Patient: ${profile.name} (${profile.age}y).
368
  Condition: ${profile.condition}. History: ${profile.history}.
369
+ Tone: ${mode === AppMode.THERAPY ? 'Empathetic CBT' : 'Medical Guide'}.
 
 
 
370
  Format: Plain text. No markdown.
371
  `;
372
 
373
  const contents = history.map(msg => ({ role: msg.role === 'user' ? 'user' : 'model', parts: [{ text: msg.text }, ...(msg.image ? [{ inlineData: { mimeType: 'image/jpeg', data: msg.image.split('base64,')[1] } }] : [])] }));
374
  contents.push({ role: 'user', parts: [{ text: context + "\nUser: " + currentMessage }, ...(image ? [{ inlineData: { mimeType: 'image/jpeg', data: image.split('base64,')[1] } }] : [])] });
375
 
 
376
  const callGeminiChat = async (modelName: string) => {
377
+ if (onStatus) onStatus(`Generating with ${modelName}...`);
378
  onSource(modelName === MODEL_TIER_1 ? 'Gemini 2.5 Flash-Lite' : 'Gemini 2.5 Flash');
379
  const response = await ai.models.generateContent({
380
  model: modelName,
 
386
 
387
  try {
388
  if (!API_KEY) throw new Error("No Key");
 
389
  return await callGeminiChat(MODEL_TIER_1);
390
  } catch (e: any) {
391
+ if (e.toString().includes('429') || e.toString().includes('Quota')) {
392
+ try {
393
+ return await callGeminiChat(MODEL_TIER_2);
394
+ } catch (e2) {}
 
 
395
  }
396
 
 
397
  try {
398
+ if (onStatus) onStatus("Falling back to Local Phi-3...");
399
  onSource('Phi-3 Mini (Fallback)');
400
  const fallbackPrompt = `${context}\n\nChat History:\n${history.slice(-3).map(m => m.text).join('\n')}\nUser: ${currentMessage}`;
401
+ // Fallback goes to Primary Backend (Text Node)
402
+ const responseText = await callBackend(PRIMARY_API_BASE, '/generate', { prompt: fallbackPrompt }, onStatus);
403
  return cleanText(responseText);
404
  } catch {
405
  return "I'm having trouble connecting. Please check your internet.";
 
410
  export const generateQuickReplies = async (history: ChatMessage[]) => {
411
  if (!API_KEY || history.length === 0) return [];
412
  const recentContext = history.slice(-3).map(m => `${m.role}: ${m.text}`).join('\n');
413
+ const prompt = `Based on: ${recentContext}. Suggest 3 short follow-up questions. JSON array.`;
414
  try {
415
  const res = await ai.models.generateContent({ model: MODEL_TIER_1, contents: prompt, config: { responseMimeType: "application/json" } });
416
  return JSON.parse(res.text || "[]");
 
420
  export const summarizeConversation = async (history: ChatMessage[]) => {
421
  if (!API_KEY) return "Summary unavailable.";
422
  try {
423
+ const res = await ai.models.generateContent({ model: MODEL_TIER_1, contents: `Summarize:\n${history.map(m=>m.text).join('\n')}` });
424
  return cleanText(res.text || "");
425
  } catch { return "Could not summarize."; }
426
  };