Spaces:
Sleeping
Sleeping
File size: 4,265 Bytes
b5386e2 a2e1879 b5386e2 a2e1879 b5386e2 a2e1879 b5386e2 a2e1879 b5386e2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 |
# llm_clients/gemini.py
from typing import Generator, Any, Dict, List, Optional
import google.generativeai as genai
from .base import LlmClient
import config
import tempfile
import os
class GeminiClient(LlmClient):
"""LLM client for Google's Gemini models."""
def __init__(self, config_dict: Dict[str, Any], system_prompt: str):
super().__init__(config_dict, system_prompt)
if not config.GEMINI_API_KEY or "YOUR_GOOGLE_API_KEY" in config.GEMINI_API_KEY:
raise ValueError("Please set your GEMINI_API_KEY in the config.py file or as an environment variable.")
genai.configure(api_key=config.GEMINI_API_KEY)
self.model = genai.GenerativeModel(
self.config['model'],
system_instruction=self.system_prompt
)
print(f"✅ Gemini Client initialized with model '{self.config['model']}'.")
def generate_content(self, prompt: str, files: Optional[List[Dict[str, Any]]] = None) -> str:
"""Generates a non-streaming response from Gemini with optional file attachments."""
content_parts = [prompt]
# Upload files to Gemini if provided
if files:
for file_info in files:
try:
# Write file content to temporary file
with tempfile.NamedTemporaryFile(delete=False, suffix=f".{file_info['extension'].lstrip('.')}") as tmp_file:
tmp_file.write(file_info['content'])
tmp_file_path = tmp_file.name
# Upload file to Gemini
uploaded_file = genai.upload_file(tmp_file_path, display_name=file_info['filename'])
content_parts.append(uploaded_file)
# Clean up temporary file
os.unlink(tmp_file_path)
print(f" 📎 Uploaded {file_info['filename']} to Gemini")
except Exception as e:
print(f" ⚠️ Failed to upload {file_info.get('filename', 'unknown file')} to Gemini: {e}")
# Continue without this file
pass
response = self.model.generate_content(content_parts, stream=False)
return response.text
def generate_content_stream(self, prompt: str, files: Optional[List[Dict[str, Any]]] = None) -> Generator[Any, None, None]:
"""Generates a streaming response from Gemini with optional file attachments."""
content_parts = [prompt]
# Upload files to Gemini if provided
if files:
for file_info in files:
try:
# Write file content to temporary file
with tempfile.NamedTemporaryFile(delete=False, suffix=f".{file_info['extension'].lstrip('.')}") as tmp_file:
tmp_file.write(file_info['content'])
tmp_file_path = tmp_file.name
# Upload file to Gemini
uploaded_file = genai.upload_file(tmp_file_path, display_name=file_info['filename'])
content_parts.append(uploaded_file)
# Clean up temporary file
os.unlink(tmp_file_path)
print(f" 📎 Uploaded {file_info['filename']} to Gemini")
except Exception as e:
print(f" ⚠️ Failed to upload {file_info.get('filename', 'unknown file')} to Gemini: {e}")
# Continue without this file
pass
return self.model.generate_content(content_parts, stream=True)
def _generate_content_impl(self, prompt: str) -> str:
"""Fallback implementation for clients that don't support files."""
response = self.model.generate_content(prompt, stream=False)
return response.text
def _generate_content_stream_impl(self, prompt: str) -> Generator[Any, None, None]:
"""Fallback implementation for clients that don't support files."""
return self.model.generate_content(prompt, stream=True) |