zazaman's picture
Add multilingual translation support with Qwen3-0.6B-GGUF and optimize for Hugging Face Spaces deployment
a2e1879
raw
history blame
4.27 kB
# llm_clients/gemini.py
from typing import Generator, Any, Dict, List, Optional
import google.generativeai as genai
from .base import LlmClient
import config
import tempfile
import os
class GeminiClient(LlmClient):
"""LLM client for Google's Gemini models."""
def __init__(self, config_dict: Dict[str, Any], system_prompt: str):
super().__init__(config_dict, system_prompt)
if not config.GEMINI_API_KEY or "YOUR_GOOGLE_API_KEY" in config.GEMINI_API_KEY:
raise ValueError("Please set your GEMINI_API_KEY in the config.py file or as an environment variable.")
genai.configure(api_key=config.GEMINI_API_KEY)
self.model = genai.GenerativeModel(
self.config['model'],
system_instruction=self.system_prompt
)
print(f"βœ… Gemini Client initialized with model '{self.config['model']}'.")
def generate_content(self, prompt: str, files: Optional[List[Dict[str, Any]]] = None) -> str:
"""Generates a non-streaming response from Gemini with optional file attachments."""
content_parts = [prompt]
# Upload files to Gemini if provided
if files:
for file_info in files:
try:
# Write file content to temporary file
with tempfile.NamedTemporaryFile(delete=False, suffix=f".{file_info['extension'].lstrip('.')}") as tmp_file:
tmp_file.write(file_info['content'])
tmp_file_path = tmp_file.name
# Upload file to Gemini
uploaded_file = genai.upload_file(tmp_file_path, display_name=file_info['filename'])
content_parts.append(uploaded_file)
# Clean up temporary file
os.unlink(tmp_file_path)
print(f" πŸ“Ž Uploaded {file_info['filename']} to Gemini")
except Exception as e:
print(f" ⚠️ Failed to upload {file_info.get('filename', 'unknown file')} to Gemini: {e}")
# Continue without this file
pass
response = self.model.generate_content(content_parts, stream=False)
return response.text
def generate_content_stream(self, prompt: str, files: Optional[List[Dict[str, Any]]] = None) -> Generator[Any, None, None]:
"""Generates a streaming response from Gemini with optional file attachments."""
content_parts = [prompt]
# Upload files to Gemini if provided
if files:
for file_info in files:
try:
# Write file content to temporary file
with tempfile.NamedTemporaryFile(delete=False, suffix=f".{file_info['extension'].lstrip('.')}") as tmp_file:
tmp_file.write(file_info['content'])
tmp_file_path = tmp_file.name
# Upload file to Gemini
uploaded_file = genai.upload_file(tmp_file_path, display_name=file_info['filename'])
content_parts.append(uploaded_file)
# Clean up temporary file
os.unlink(tmp_file_path)
print(f" πŸ“Ž Uploaded {file_info['filename']} to Gemini")
except Exception as e:
print(f" ⚠️ Failed to upload {file_info.get('filename', 'unknown file')} to Gemini: {e}")
# Continue without this file
pass
return self.model.generate_content(content_parts, stream=True)
def _generate_content_impl(self, prompt: str) -> str:
"""Fallback implementation for clients that don't support files."""
response = self.model.generate_content(prompt, stream=False)
return response.text
def _generate_content_stream_impl(self, prompt: str) -> Generator[Any, None, None]:
"""Fallback implementation for clients that don't support files."""
return self.model.generate_content(prompt, stream=True)