ecologits-chat / mistral_client.py
yipfram's picture
Upload 14 files
b00f1d1 verified
from mistralai import Mistral
from ecologits import EcoLogits
from config import MISTRAL_API_KEY, FALLBACK_MODELS
class MistralChatClient:
"""Simple wrapper for Mistral API with EcoLogits tracking"""
def __init__(self, api_key=None):
# Initialize EcoLogits tracking
EcoLogits.init(providers=["mistralai"])
# Use provided API key or fall back to config
self.api_key = api_key or MISTRAL_API_KEY
if not self.api_key:
raise ValueError("Mistral API key is required. Please provide it via parameter or environment variable.")
# Initialize Mistral client
self.client = Mistral(api_key=self.api_key)
# Get available models
self.available_models = self._get_available_models()
def update_api_key(self, new_api_key):
"""Update the API key and reinitialize the client"""
if not new_api_key:
raise ValueError("API key cannot be empty")
self.api_key = new_api_key
self.client = Mistral(api_key=self.api_key)
self.available_models = self._get_available_models()
return True
def _get_available_models(self):
"""Get list of available text-to-text models from API"""
try:
models_response = self.client.models.list()
if models_response.data:
models = []
excluded_models = []
for model in models_response.data:
model_id = model.id
# Check if model has capabilities and supports completion_chat
if (hasattr(model, 'capabilities') and
hasattr(model.capabilities, 'completion_chat') and
model.capabilities.completion_chat):
# Exclude specialized models that aren't suitable for general chat
model_id_lower = model_id.lower()
excluded_terms = ['ocr', 'embed', 'vision', 'classifier', 'moderation']
if any(term in model_id_lower for term in excluded_terms):
excluded_models.append(f"{model_id} (specialized model)")
else:
models.append(model_id)
else:
excluded_models.append(f"{model_id} (no chat completion capability)")
# Debug output
if models:
print(f"✅ Found {len(models)} suitable chat models: {models}")
if excluded_models:
print(f"⚠️ Excluded {len(excluded_models)} models: {excluded_models[:5]}") # Show first 5
return models if models else FALLBACK_MODELS
except Exception as e:
print(f"Warning: Could not fetch models: {e}")
# Return fallback models if API call fails
print(f"📋 Using fallback models: {FALLBACK_MODELS}")
return FALLBACK_MODELS
def chat(self, messages, model):
"""Send chat request and return response with impacts"""
try:
response = self.client.chat.complete(
messages=messages,
model=model
)
# Collect warnings and errors
warnings = []
errors = []
# Type none becausse Ecologits does not run in runtime, errors is not a real error.
# Source: Trust me :)
if response.impacts.has_warnings: # type: ignore
for warning in response.impacts.warnings: # type: ignore
warnings.append(str(warning))
print(f"Warning: {warning}") # type: ignore
if response.impacts.has_errors: # type: ignore
for error in response.impacts.errors: # type: ignore
errors.append(str(error))
print(f"Error: {error}") # type: ignore
# Type none becausse Ecologits does not run in runtime, errors is not a real error.
# Source: Trust me :)
return {
'content': response.choices[0].message.content,
'impacts': response.impacts, # type: ignore
'warnings': warnings,
'errors': errors
}
except Exception as e:
print(f"Error during chat completion: {e}")
return {
'content': f"Erreur: {str(e)}",
'impacts': None,
'warnings': [],
'errors': [str(e)]
}