NBA_Analysis / config.py
shekkari21's picture
changed base model
ac32153
"""
Configuration settings for the NBA data analysis project.
"""
import os
from crewai import LLM
# NBA Data Configuration
NBA_DATA_PATH = "nba24-25.csv"
# LLM Configuration - Choose your provider
# Options: "openai", "ollama", "litellm", "openrouter", "huggingface"
# Default to huggingface for best open-source model
LLM_PROVIDER = os.getenv("LLM_PROVIDER", "huggingface")
# OpenAI Configuration
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
OPENAI_MODEL = os.getenv("OPENAI_MODEL", "gpt-4o")
# Ollama Configuration (for local open-source models)
OLLAMA_BASE_URL = os.getenv("OLLAMA_BASE_URL", "http://localhost:11434/v1") # /v1 for OpenAI-compatible API
OLLAMA_MODEL = os.getenv("OLLAMA_MODEL", "mistral") # Model name (Ollama handles :latest automatically)
# LiteLLM Configuration (for Hugging Face or other providers)
LITELLM_MODEL = os.getenv("LITELLM_MODEL", "huggingface/meta-llama/Llama-3.2-3B-Instruct")
LITELLM_API_KEY = os.getenv("LITELLM_API_KEY", "") # Optional, depends on provider
# Hugging Face Configuration (for using HF Inference API)
HF_API_KEY = os.getenv("HF_API_KEY", "") # Get from https://huggingface.co/settings/tokens
HF_MODEL = os.getenv("HF_MODEL", "Qwen/Qwen2.5-7B-Instruct") # Best open-source model (8B params)
# OpenRouter Configuration (backup option)
OPENROUTER_API_KEY = os.getenv("OPENROUTER_API_KEY")
OPENROUTER_MODEL = os.getenv("OPENROUTER_MODEL", "google/gemma-2-2b-it:free")
def get_llm() -> LLM:
"""
Create and return a CrewAI LLM instance based on the configured provider.
Returns:
LLM: Configured CrewAI LLM instance
Raises:
ValueError: If required configuration is not set
"""
if LLM_PROVIDER == "ollama":
# Ollama (local models only, not for cloud deployment)
return LLM(
model=OLLAMA_MODEL,
base_url=OLLAMA_BASE_URL,
api_key="ollama"
)
elif LLM_PROVIDER == "huggingface":
# Hugging Face Inference API - Best open-source models
if not HF_API_KEY:
raise ValueError(
"HF_API_KEY environment variable is not set. "
"Get a free token from https://huggingface.co/settings/tokens"
)
return LLM(
model=f"huggingface/{HF_MODEL}",
api_key=HF_API_KEY
)
elif LLM_PROVIDER == "litellm":
# LiteLLM (alternative provider)
return LLM(
model=f"litellm/{LITELLM_MODEL}",
api_key=LITELLM_API_KEY if LITELLM_API_KEY else "dummy"
)
elif LLM_PROVIDER == "openrouter":
# OpenRouter (backup option if HF is unavailable)
if not OPENROUTER_API_KEY:
raise ValueError(
"OPENROUTER_API_KEY environment variable is not set. "
"Get a free key at https://openrouter.ai"
)
return LLM(
model=f"openrouter/{OPENROUTER_MODEL}",
api_key=OPENROUTER_API_KEY,
temperature=0.3
)
else:
# OpenAI (paid option)
if not OPENAI_API_KEY:
raise ValueError(
"OPENAI_API_KEY environment variable is not set. "
"Please set it using: export OPENAI_API_KEY='your-api-key'"
)
return LLM(
model=OPENAI_MODEL,
api_key=OPENAI_API_KEY
)