admin08077 commited on
Commit
7b53239
·
verified ·
1 Parent(s): 1d7156d

Update services/geminiService.ts

Browse files
Files changed (1) hide show
  1. services/geminiService.ts +57 -88
services/geminiService.ts CHANGED
@@ -1,7 +1,7 @@
1
 
 
2
  import { SimulationResult, AIInsight } from "../types/index";
3
 
4
- // Added exported constants for TTS voices and languages as required by the UI in views/Broadcast.tsx
5
  export const TTS_VOICES = [
6
  { name: 'Puck', style: 'Energetic' },
7
  { name: 'Charon', style: 'Calm' },
@@ -21,131 +21,100 @@ export const TTS_LANGUAGES = [
21
  { code: 'zh', name: 'Chinese' }
22
  ];
23
 
 
 
 
 
 
 
 
 
 
24
  export const callGemini = async (model: string, contents: any, config: any = {}) => {
25
- // Ensure contents matches the expected API structure
26
- const normalizedContents = typeof contents === 'string' ? [{ parts: [{ text: contents }] }] :
27
- (Array.isArray(contents) ? contents : [contents]);
28
-
29
- const response = await fetch('/api/gemini/generate', {
30
- method: 'POST',
31
- headers: { 'Content-Type': 'application/json' },
32
- body: JSON.stringify({ contents: normalizedContents })
33
  });
34
-
35
- if (!response.ok) {
36
- const err = await response.json().catch(() => ({ error: 'Handshake Failed' }));
37
- throw new Error(err.error || 'AI Bridge Failed');
38
- }
39
- return await response.json();
40
  };
41
 
42
- // Helper function for base64 decoding
43
- function decode(base64: string) {
44
- const binaryString = atob(base64);
45
- const len = binaryString.length;
46
- const bytes = new Uint8Array(len);
47
- for (let i = 0; i < len; i++) {
48
- bytes[i] = binaryString.charCodeAt(i);
49
- }
50
- return bytes;
51
- }
52
-
53
- // Helper function for audio decoding
54
- async function decodeAudioData(
55
- data: Uint8Array,
56
- ctx: AudioContext,
57
- sampleRate: number,
58
- numChannels: number,
59
- ): Promise<AudioBuffer> {
60
- const dataInt16 = new Int16Array(data.buffer);
61
- const frameCount = dataInt16.length / numChannels;
62
- const buffer = ctx.createBuffer(numChannels, frameCount, sampleRate);
63
-
64
- for (let channel = 0; channel < numChannels; channel++) {
65
- const channelData = buffer.getChannelData(channel);
66
- for (let i = 0; i < frameCount; i++) {
67
- channelData[i] = dataInt16[i * numChannels + channel] / 32768.0;
68
- }
69
- }
70
- return buffer;
71
- }
72
-
73
- // Fixed: Updated synthesizeSpeech to include multiSpeaker parameter as expected by views/Broadcast.tsx
74
  export const synthesizeSpeech = async (params: {
75
  text: string;
76
  voiceName: string;
77
  directorNotes?: string;
78
- multiSpeaker?: {
79
- speaker1: string;
80
- voice1: string;
81
- speaker2: string;
82
- voice2: string;
83
- };
84
  }) => {
85
- // This is a simplified proxy call for TTS since the server endpoint is hardcoded to gemini-2.5-flash for content
86
- // Note: Standard content generation and TTS share the same proxy logic here for simplicity
87
- try {
88
- const prompt = params.directorNotes ? `${params.directorNotes} Text: ${params.text}` : params.text;
89
- const data = await callGemini('gemini-2.5-flash', prompt);
90
- // If the proxy were expanded for Audio modalities, we would handle data.audio here
91
- return true;
92
- } catch (error) {
93
- console.error("Speech synthesis failed:", error);
94
- return false;
95
- }
96
  };
97
 
98
  export const speakText = async (text: string) => {
99
- console.log("Synthesizing Signal: ", text);
100
  return true;
101
  };
102
 
103
  export const processVoiceCommand = async (command: string) => {
104
  try {
105
- const prompt = `Analyze: "${command}". Return JSON: { "action": "SEND_MONEY", "amount": number, "recipient": string, "category": string, "narration": "Confirming..." }`;
106
- const data = await callGemini('gemini-2.5-flash', prompt);
107
- return JSON.parse(data.text || '{}');
 
 
 
 
108
  } catch (error) {
109
- return { action: "ERROR", narration: "Link unstable." };
110
  }
111
  };
112
 
113
  export const getFinancialAdviceStream = async (query: string, context: any) => {
114
- const prompt = `Context: ${JSON.stringify(context)}. User: ${query}`;
115
- const data = await callGemini('gemini-2.5-flash', prompt);
116
- // Return an array to mimic the structure expected by the Advisor view iterator
117
- return [{ text: data.text }];
 
 
118
  };
119
 
120
  export const getSystemIntelligenceFeed = async (): Promise<AIInsight[]> => {
121
  try {
122
- const prompt = "Generate 4 brief alerts JSON: [{title, description, severity: 'INFO'|'CRITICAL'}]";
123
- const data = await callGemini('gemini-2.5-flash', prompt);
124
- // Use a regex to extract JSON if the model returns markdown
125
- const jsonStr = data.text.match(/\[.*\]/s)?.[0] || '[]';
126
- return JSON.parse(jsonStr);
 
 
127
  } catch {
128
- return [{ id: '1', title: "Node Sync Active", description: "Operational parity.", severity: "INFO" }];
129
  }
130
  };
131
 
132
  export const runSimulationForecast = async (prompt: string): Promise<SimulationResult> => {
133
  try {
134
- const fullPrompt = `Simulate: ${prompt}. Return JSON with outcomeNarrative, projectedValue, confidenceScore, status, simulationId.`;
135
- const data = await callGemini('gemini-2.5-flash', fullPrompt);
136
- const jsonStr = data.text.match(/\{.*\}/s)?.[0] || '{}';
137
- return JSON.parse(jsonStr);
 
 
 
138
  } catch {
139
- return { outcomeNarrative: "Simulation Failed.", projectedValue: 0, confidenceScore: 0, status: "ERROR", simulationId: "ERR_A1" };
140
  }
141
  };
142
 
143
  export const getPortfolioSuggestions = async (context: any) => {
144
  try {
145
- const prompt = `Strategize based on: ${JSON.stringify(context)}. Return JSON array of 3 objects with type, title, description.`;
146
- const data = await callGemini('gemini-2.5-flash', prompt);
147
- const jsonStr = data.text.match(/\[.*\]/s)?.[0] || '[]';
148
- return JSON.parse(jsonStr);
 
 
 
149
  } catch {
150
  return [];
151
  }
 
1
 
2
+ import { GoogleGenAI, GenerateContentResponse } from "@google/genai";
3
  import { SimulationResult, AIInsight } from "../types/index";
4
 
 
5
  export const TTS_VOICES = [
6
  { name: 'Puck', style: 'Energetic' },
7
  { name: 'Charon', style: 'Calm' },
 
21
  { code: 'zh', name: 'Chinese' }
22
  ];
23
 
24
+ const TEXT_MODEL = 'gemini-3-flash-preview';
25
+ const IMAGE_MODEL = 'gemini-2.5-flash-image';
26
+
27
+ const getAI = () => {
28
+ const apiKey = localStorage.getItem('LQI_API_KEY') || process.env.API_KEY || process.env.GEMINI_API_KEY;
29
+ if (!apiKey) throw new Error("API_KEY_MISSING");
30
+ return new GoogleGenAI({ apiKey });
31
+ };
32
+
33
  export const callGemini = async (model: string, contents: any, config: any = {}) => {
34
+ const ai = getAI();
35
+ const response: GenerateContentResponse = await ai.models.generateContent({
36
+ model: model || TEXT_MODEL,
37
+ contents: typeof contents === 'string' ? contents : contents,
38
+ config: config,
 
 
 
39
  });
40
+ return response;
 
 
 
 
 
41
  };
42
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  export const synthesizeSpeech = async (params: {
44
  text: string;
45
  voiceName: string;
46
  directorNotes?: string;
47
+ multiSpeaker?: any;
 
 
 
 
 
48
  }) => {
49
+ console.log(`Synthesizing: ${params.text}`);
50
+ return true;
 
 
 
 
 
 
 
 
 
51
  };
52
 
53
  export const speakText = async (text: string) => {
54
+ console.log("Neural Audio Signal: ", text);
55
  return true;
56
  };
57
 
58
  export const processVoiceCommand = async (command: string) => {
59
  try {
60
+ const ai = getAI();
61
+ const response = await ai.models.generateContent({
62
+ model: TEXT_MODEL,
63
+ contents: `Analyze this treasury command: "${command}". Return a JSON object with: { "action": "SEND_MONEY" | "QUERY", "amount": number, "recipient": string, "category": string, "narration": string }.`,
64
+ config: { responseMimeType: "application/json" }
65
+ });
66
+ return JSON.parse(response.text || '{}');
67
  } catch (error) {
68
+ return { action: "ERROR", narration: "Handshake sync failed." };
69
  }
70
  };
71
 
72
  export const getFinancialAdviceStream = async (query: string, context: any) => {
73
+ const ai = getAI();
74
+ const response = await ai.models.generateContent({
75
+ model: TEXT_MODEL,
76
+ contents: `Context: ${JSON.stringify(context)}. User: ${query}`,
77
+ });
78
+ return [{ text: response.text }];
79
  };
80
 
81
  export const getSystemIntelligenceFeed = async (): Promise<AIInsight[]> => {
82
  try {
83
+ const ai = getAI();
84
+ const response = await ai.models.generateContent({
85
+ model: TEXT_MODEL,
86
+ contents: "Generate 4 brief institutional ledger alerts in JSON format: [{title, description, severity: 'INFO'|'CRITICAL'}]",
87
+ config: { responseMimeType: "application/json" }
88
+ });
89
+ return JSON.parse(response.text || '[]');
90
  } catch {
91
+ return [{ id: '1', title: "Node Sync Active", description: "Operational parity achieved.", severity: "INFO" }];
92
  }
93
  };
94
 
95
  export const runSimulationForecast = async (prompt: string): Promise<SimulationResult> => {
96
  try {
97
+ const ai = getAI();
98
+ const response = await ai.models.generateContent({
99
+ model: TEXT_MODEL,
100
+ contents: `Simulate this scenario: ${prompt}. Return JSON: { outcomeNarrative, projectedValue, confidenceScore, status, simulationId }.`,
101
+ config: { responseMimeType: "application/json" }
102
+ });
103
+ return JSON.parse(response.text || '{}');
104
  } catch {
105
+ return { outcomeNarrative: "Handshake failed.", projectedValue: 0, confidenceScore: 0, status: "ERROR", simulationId: "ERR_01" };
106
  }
107
  };
108
 
109
  export const getPortfolioSuggestions = async (context: any) => {
110
  try {
111
+ const ai = getAI();
112
+ const response = await ai.models.generateContent({
113
+ model: TEXT_MODEL,
114
+ contents: `Suggest 3 strategic treasury actions based on: ${JSON.stringify(context)}. Return JSON array of {type: 'ALPHA'|'RISK'|'LIQUIDITY', title, description}.`,
115
+ config: { responseMimeType: "application/json" }
116
+ });
117
+ return JSON.parse(response.text || '[]');
118
  } catch {
119
  return [];
120
  }