ProjectEcho / .env.example
jmisak's picture
Upload 23 files
196c707 verified
# ConversAI Environment Configuration
# Copy this file to .env and fill in your credentials
# ===========================
# LLM Provider Configuration
# ===========================
# Specify which provider to use (openai, anthropic, huggingface, lm_studio)
LLM_PROVIDER=huggingface
# OpenAI Configuration
OPENAI_API_KEY=your_openai_api_key_here
# Optional: Override default model
# LLM_MODEL=gpt-4o-mini
# Anthropic Configuration
ANTHROPIC_API_KEY=your_anthropic_api_key_here
# Optional: Override default model
# LLM_MODEL=claude-3-5-sonnet-20241022
# HuggingFace Configuration
HUGGINGFACE_API_KEY=your_huggingface_api_key_here
# Optional: Override default model
# LLM_MODEL=mistralai/Mixtral-8x7B-Instruct-v0.1
# LM Studio Configuration (for local development)
LM_STUDIO_URL=http://localhost:1234/v1/chat/completions
# LLM_MODEL=your_local_model_name
# ===========================
# Application Settings
# ===========================
# Port for local development (HF Spaces uses 7860 by default)
PORT=7860
# Enable debug mode
DEBUG=false