Spaces:
Paused
Paused
File size: 5,229 Bytes
529090e | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 | /**
* GeminiService - WidgeTDC's Neural Core
* =====================================
* Native Google Gemini integration for Docker-compatible AI operations.
* This replaces CLI-based AI calls with direct API access.
*/
import { GoogleGenerativeAI, GenerativeModel } from '@google/generative-ai';
interface GeminiConfig {
model: string;
temperature?: number;
maxOutputTokens?: number;
}
interface ThinkRequest {
prompt: string;
context?: string;
systemInstruction?: string;
}
export class GeminiService {
private genAI!: GoogleGenerativeAI;
private model!: GenerativeModel;
private initialized: boolean = false;
private readonly defaultConfig: GeminiConfig = {
model: 'gemini-2.0-flash',
temperature: 0.7,
maxOutputTokens: 4096
};
constructor() {
const apiKey = process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY;
if (!apiKey) {
console.warn('⚠️ GEMINI_API_KEY not found - AI features will be disabled');
this.initialized = false;
return;
}
try {
this.genAI = new GoogleGenerativeAI(apiKey);
this.model = this.genAI.getGenerativeModel({
model: this.defaultConfig.model,
generationConfig: {
temperature: this.defaultConfig.temperature,
maxOutputTokens: this.defaultConfig.maxOutputTokens
}
});
this.initialized = true;
console.log('🧠 Gemini Service Initialized (Model: gemini-2.0-flash)');
} catch (error) {
console.error('❌ Failed to initialize Gemini:', error);
this.initialized = false;
}
}
/**
* Check if the service is ready
*/
public isReady(): boolean {
return this.initialized;
}
/**
* Generate a thought/response from Gemini
*/
public async generateThought(prompt: string): Promise<string> {
if (!this.initialized) {
return '[NEURAL OFFLINE]: Gemini service not initialized. Check GEMINI_API_KEY.';
}
try {
const result = await this.model.generateContent(prompt);
const response = result.response;
const text = response.text();
return text;
} catch (error) {
console.error('🔴 Gemini Error:', error);
return `[NEURAL FAILURE]: ${error instanceof Error ? error.message : 'Unknown error'}`;
}
}
/**
* Advanced thinking with context and system instructions
*/
public async think(request: ThinkRequest): Promise<string> {
if (!this.initialized) {
return '[NEURAL OFFLINE]: Gemini service not initialized.';
}
try {
let fullPrompt = '';
if (request.systemInstruction) {
fullPrompt += `System: ${request.systemInstruction}\n\n`;
}
if (request.context) {
fullPrompt += `Context:\n${request.context}\n\n`;
}
fullPrompt += `User: ${request.prompt}`;
const result = await this.model.generateContent(fullPrompt);
return result.response.text();
} catch (error) {
console.error('🔴 Gemini Think Error:', error);
return `[NEURAL FAILURE]: ${error instanceof Error ? error.message : 'Unknown error'}`;
}
}
/**
* MCP Handler wrapper - allows Agents to call Gemini via MCP protocol
*/
public async handleMcpRequest(args: {
prompt: string;
context?: string;
systemInstruction?: string;
}): Promise<{ success: boolean; response: string; model: string }> {
console.log(`🤖 MCP AI Request: "${args.prompt.substring(0, 60)}..."`);
const response = await this.think({
prompt: args.prompt,
context: args.context,
systemInstruction: args.systemInstruction
});
return {
success: !response.startsWith('[NEURAL'),
response,
model: this.defaultConfig.model
};
}
/**
* Analyze data with AI
*/
public async analyze(data: any, question: string): Promise<string> {
const prompt = `
Analyze the following data and answer the question.
DATA:
${JSON.stringify(data, null, 2)}
QUESTION: ${question}
Provide a clear, concise analysis.
`;
return this.generateThought(prompt);
}
/**
* Summarize text content
*/
public async summarize(content: string, maxLength?: number): Promise<string> {
const lengthInstruction = maxLength
? `Keep the summary under ${maxLength} characters.`
: 'Provide a comprehensive summary.';
const prompt = `
Summarize the following content. ${lengthInstruction}
CONTENT:
${content}
SUMMARY:
`;
return this.generateThought(prompt);
}
/**
* Extract structured data from text
*/
public async extractStructured(text: string, schema: object): Promise<string> {
const prompt = `
Extract structured data from the following text according to the schema.
Return ONLY valid JSON matching the schema.
SCHEMA:
${JSON.stringify(schema, null, 2)}
TEXT:
${text}
JSON OUTPUT:
`;
return this.generateThought(prompt);
}
}
// Export singleton instance
let geminiServiceInstance: GeminiService | null = null;
export function getGeminiService(): GeminiService {
if (!geminiServiceInstance) {
geminiServiceInstance = new GeminiService();
}
return geminiServiceInstance;
}
export const geminiService = new GeminiService();
|