mwmathis commited on
Commit
6163f38
·
verified ·
1 Parent(s): bddfb9b

fix imports/reqs

Browse files
Files changed (2) hide show
  1. llm_service.py +23 -2
  2. requirements.txt +11 -4
llm_service.py CHANGED
@@ -5,11 +5,29 @@ Supports Hugging Face models including DeepSeek-V3.2.
5
  import os
6
  import logging
7
  from typing import List, Dict, Optional
8
- import torch
9
- from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
 
 
 
 
10
 
11
  logger = logging.getLogger(__name__)
12
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  class LLMService:
14
  """Service for interacting with Hugging Face LLM models"""
15
 
@@ -40,6 +58,9 @@ class LLMService:
40
  def _load_local_model(self):
41
  """Load model locally using transformers"""
42
  try:
 
 
 
43
  logger.info(f"Loading model: {self.model_name}")
44
 
45
  # Check if CUDA is available
 
5
  import os
6
  import logging
7
  from typing import List, Dict, Optional
8
+
9
+ # Conditional imports for local model mode (only needed if not using API)
10
+ torch = None
11
+ AutoTokenizer = None
12
+ AutoModelForCausalLM = None
13
+ pipeline = None
14
 
15
  logger = logging.getLogger(__name__)
16
 
17
+ def _import_local_dependencies():
18
+ """Import torch and transformers only when needed for local model mode"""
19
+ global torch, AutoTokenizer, AutoModelForCausalLM, pipeline
20
+ if torch is None:
21
+ try:
22
+ import torch
23
+ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
24
+ except ImportError as e:
25
+ raise ImportError(
26
+ "torch and transformers are required for local model mode. "
27
+ "Install them with: pip install torch transformers accelerate sentencepiece\n"
28
+ "Or use API mode by setting USE_HF_API=true"
29
+ ) from e
30
+
31
  class LLMService:
32
  """Service for interacting with Hugging Face LLM models"""
33
 
 
58
  def _load_local_model(self):
59
  """Load model locally using transformers"""
60
  try:
61
+ # Import dependencies if not already imported
62
+ _import_local_dependencies()
63
+
64
  logger.info(f"Loading model: {self.model_name}")
65
 
66
  # Check if CUDA is available
requirements.txt CHANGED
@@ -1,11 +1,18 @@
1
  gradio>=4.0.0
2
  requests>=2.31.0
 
 
 
 
3
  # LLM dependencies for AI-powered ranking
4
- # Using Hugging Face API mode (no local model needed)
5
- # Set HUGGINGFACE_API_TOKEN environment variable in HF Spaces secrets
6
- # Optional: If you want to use local models, uncomment these:
 
 
 
 
7
  # transformers>=4.40.0
8
  # torch>=2.0.0
9
  # accelerate>=0.27.0
10
  # sentencepiece>=0.1.99
11
-
 
1
  gradio>=4.0.0
2
  requests>=2.31.0
3
+ # Standard library imports (no install needed):
4
+ # - re, os, logging, typing are built-in
5
+ # - typing is built-in in Python 3.5+
6
+
7
  # LLM dependencies for AI-powered ranking
8
+ # For Hugging Face API mode (recommended for HF Spaces):
9
+ # - Only requests is needed (already above)
10
+ # - Set HUGGINGFACE_API_TOKEN environment variable in HF Spaces secrets
11
+ # - Set USE_HF_API=true in secrets
12
+
13
+ # For local model mode (optional, large packages):
14
+ # Uncomment these if you want to run models locally instead of using API:
15
  # transformers>=4.40.0
16
  # torch>=2.0.0
17
  # accelerate>=0.27.0
18
  # sentencepiece>=0.1.99