shekkari21 commited on
Commit
ece54a0
·
1 Parent(s): fea19b5

added hugging face inference

Browse files
Files changed (1) hide show
  1. config.py +28 -21
config.py CHANGED
@@ -8,9 +8,9 @@ from crewai import LLM
8
  NBA_DATA_PATH = "nba24-25.csv"
9
 
10
  # LLM Configuration - Choose your provider
11
- # Options: "openai", "ollama", "litellm", "openrouter"
12
- # Default to openrouter for free tier access
13
- LLM_PROVIDER = os.getenv("LLM_PROVIDER", "openrouter") # Default to free tier
14
 
15
  # OpenAI Configuration
16
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
@@ -24,11 +24,13 @@ OLLAMA_MODEL = os.getenv("OLLAMA_MODEL", "llama3.2") # Options: llama3.2, mistr
24
  LITELLM_MODEL = os.getenv("LITELLM_MODEL", "huggingface/meta-llama/Llama-3.2-3B-Instruct")
25
  LITELLM_API_KEY = os.getenv("LITELLM_API_KEY", "") # Optional, depends on provider
26
 
27
- # OpenRouter Configuration (access to many open-source models)
 
 
 
 
28
  OPENROUTER_API_KEY = os.getenv("OPENROUTER_API_KEY")
29
- # Try different free models if one is rate-limited
30
- OPENROUTER_MODEL = os.getenv("OPENROUTER_MODEL", "google/gemma-2-2b-it:free") # Alternative free model
31
- # Other free options: "meta-llama/llama-3.2-3b-instruct:free", "mistralai/mistral-7b-instruct:free"
32
 
33
 
34
  def get_llm() -> LLM:
@@ -42,42 +44,47 @@ def get_llm() -> LLM:
42
  ValueError: If required configuration is not set
43
  """
44
  if LLM_PROVIDER == "ollama":
45
- # Option 1: Ollama (local open-source models)
46
- # Install: brew install ollama (Mac) or see https://ollama.ai
47
- # Run: ollama pull llama3.2
48
  return LLM(
49
  model=OLLAMA_MODEL,
50
  base_url=OLLAMA_BASE_URL,
51
- api_key="ollama" # Ollama doesn't require a real API key, but CrewAI needs something
 
 
 
 
 
 
 
 
 
 
 
 
52
  )
53
 
54
  elif LLM_PROVIDER == "litellm":
55
- # Option 2: LiteLLM (supports Hugging Face, Together AI, etc.)
56
- # Format: huggingface/model-name or together_ai/model-name
57
  return LLM(
58
  model=f"litellm/{LITELLM_MODEL}",
59
- api_key=LITELLM_API_KEY if LITELLM_API_KEY else "dummy" # Some providers don't need keys
60
  )
61
 
62
  elif LLM_PROVIDER == "openrouter":
63
- # Option 3: OpenRouter (access to many open-source models)
64
- # Free tier available: https://openrouter.ai
65
- # Note: For Hugging Face Spaces, set OPENROUTER_API_KEY as a secret
66
  if not OPENROUTER_API_KEY:
67
  raise ValueError(
68
  "OPENROUTER_API_KEY environment variable is not set. "
69
  "Get a free key at https://openrouter.ai"
70
  )
71
- # CrewAI uses LiteLLM internally, so we need to format as openrouter/model-name
72
- # Using gemma-2-2b as default (less likely to be rate-limited than llama-3.2)
73
  return LLM(
74
  model=f"openrouter/{OPENROUTER_MODEL}",
75
  api_key=OPENROUTER_API_KEY,
76
- temperature=0.3 # Add temperature for better responses
77
  )
78
 
79
  else:
80
- # Default: OpenAI (original configuration)
81
  if not OPENAI_API_KEY:
82
  raise ValueError(
83
  "OPENAI_API_KEY environment variable is not set. "
 
8
  NBA_DATA_PATH = "nba24-25.csv"
9
 
10
  # LLM Configuration - Choose your provider
11
+ # Options: "openai", "ollama", "litellm", "openrouter", "huggingface"
12
+ # Default to huggingface for best open-source model
13
+ LLM_PROVIDER = os.getenv("LLM_PROVIDER", "huggingface")
14
 
15
  # OpenAI Configuration
16
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
 
24
  LITELLM_MODEL = os.getenv("LITELLM_MODEL", "huggingface/meta-llama/Llama-3.2-3B-Instruct")
25
  LITELLM_API_KEY = os.getenv("LITELLM_API_KEY", "") # Optional, depends on provider
26
 
27
+ # Hugging Face Configuration (for using HF Inference API)
28
+ HF_API_KEY = os.getenv("HF_API_KEY", "") # Get from https://huggingface.co/settings/tokens
29
+ HF_MODEL = os.getenv("HF_MODEL", "meta-llama/Llama-3.1-8B-Instruct") # Best open-source model (8B params)
30
+
31
+ # OpenRouter Configuration (backup option)
32
  OPENROUTER_API_KEY = os.getenv("OPENROUTER_API_KEY")
33
+ OPENROUTER_MODEL = os.getenv("OPENROUTER_MODEL", "google/gemma-2-2b-it:free")
 
 
34
 
35
 
36
  def get_llm() -> LLM:
 
44
  ValueError: If required configuration is not set
45
  """
46
  if LLM_PROVIDER == "ollama":
47
+ # Ollama (local models only, not for cloud deployment)
 
 
48
  return LLM(
49
  model=OLLAMA_MODEL,
50
  base_url=OLLAMA_BASE_URL,
51
+ api_key="ollama"
52
+ )
53
+
54
+ elif LLM_PROVIDER == "huggingface":
55
+ # Hugging Face Inference API - Best open-source models
56
+ if not HF_API_KEY:
57
+ raise ValueError(
58
+ "HF_API_KEY environment variable is not set. "
59
+ "Get a free token from https://huggingface.co/settings/tokens"
60
+ )
61
+ return LLM(
62
+ model=f"huggingface/{HF_MODEL}",
63
+ api_key=HF_API_KEY
64
  )
65
 
66
  elif LLM_PROVIDER == "litellm":
67
+ # LiteLLM (alternative provider)
 
68
  return LLM(
69
  model=f"litellm/{LITELLM_MODEL}",
70
+ api_key=LITELLM_API_KEY if LITELLM_API_KEY else "dummy"
71
  )
72
 
73
  elif LLM_PROVIDER == "openrouter":
74
+ # OpenRouter (backup option if HF is unavailable)
 
 
75
  if not OPENROUTER_API_KEY:
76
  raise ValueError(
77
  "OPENROUTER_API_KEY environment variable is not set. "
78
  "Get a free key at https://openrouter.ai"
79
  )
 
 
80
  return LLM(
81
  model=f"openrouter/{OPENROUTER_MODEL}",
82
  api_key=OPENROUTER_API_KEY,
83
+ temperature=0.3
84
  )
85
 
86
  else:
87
+ # OpenAI (paid option)
88
  if not OPENAI_API_KEY:
89
  raise ValueError(
90
  "OPENAI_API_KEY environment variable is not set. "