# chat_model_wrapper.py import os import google.generativeai as genai from dotenv import load_dotenv load_dotenv(override=True) class ChatRefiner: def __init__(self, model_name: str = "gemini-1.5-flash"): """ Wraps the Gemini model for prompt→text generation. """ api_key = os.getenv("GEMINI_API_KEY") api_key="AIzaSyDAC_1r8FuL9JckNno5hCI5vMD2MWFYEfg" if not api_key: raise ValueError("GEMINI_API_KEY not set in environment or .env file.") genai.configure(api_key=api_key) self.model = genai.GenerativeModel(model_name) def answer(self, prompt: str) -> str: """ Sends `prompt` to Gemini and returns the generated text. """ try: response = self.model.generate_content(prompt) return response.text.strip() except Exception as e: # Log or handle as you prefer print(f"[ChatRefiner.answer] Error: {e}") raise