| HUGGINGFACE_API_TOKEN=your_huggingface_api_token_here | |
| # Persistent path for ChromaDB (vector store) | |
| CHROMA_DB_DIR=./chroma_db | |
| # Embedding model (Hugging Face) — multilingual small/faster option | |
| # Default in code: sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2 | |
| EMBEDDING_MODEL_NAME=sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2 | |
| # LLM model for generation (Hugging Face Inference API) | |
| # Examples: meta-llama/Llama-3.1-8B-Instruct or meta-llama/Llama-3.2-3B-Instruct | |
| LLM_MODEL_NAME=meta-llama/Llama-3.2-3B-Instruct | |
| # Optional: custom QA system prompt. If unset, the project default in | |
| # `src/config.py` will be used. Use single-line or encode newlines with \n. | |
| # Example: | |
| QA_SYSTEM_PROMPT= | |