# MediGuard AI RAG-Helper - Environment Configuration Template # Copy this file to .env and fill in your values # ============================================================================ # LLM PROVIDER CONFIGURATION (Choose ONE - all have FREE tiers) # ============================================================================ # Option 1: GROQ (RECOMMENDED - FREE, fast, llama-3.3-70b) # Get FREE API key: https://console.groq.com/keys GROQ_API_KEY="your_groq_api_key_here" # Option 2: Google Gemini (FREE tier available) # Get FREE API key: https://aistudio.google.com/app/apikey GOOGLE_API_KEY="your_google_api_key_here" # Provider selection: "groq" (default), "gemini", or "ollama" (local) LLM_PROVIDER="groq" # Embedding provider: "google" (default, FREE), "huggingface" (local), or "ollama" EMBEDDING_PROVIDER="google" # ============================================================================ # LANGSMITH (Optional - for tracing/debugging) # ============================================================================ LANGCHAIN_API_KEY="your_langsmith_api_key_here" LANGCHAIN_TRACING_V2="true" LANGCHAIN_PROJECT="MediGuard_AI_RAG_Helper" # ============================================================================ # APPLICATION SETTINGS # ============================================================================ LOG_LEVEL="INFO" # ============================================================================ # OLLAMA (Only needed if using LLM_PROVIDER="ollama") # ============================================================================ # OLLAMA_HOST="http://localhost:11434"