Sp2503 commited on
Commit
0656339
·
verified ·
1 Parent(s): 8e8e88c

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +8 -13
main.py CHANGED
@@ -8,8 +8,8 @@ from transformers import AutoTokenizer, AutoModelForSequenceClassification
8
  from typing import Dict, List, Optional
9
 
10
  # --- Configuration ---
11
- # ** FIX: Hardcoding the correct model name from the URL to bypass any issues with secrets. **
12
- MODEL_NAME = "Sp2503/Bertmodel"
13
 
14
  # The MongoDB URI is loaded from Hugging Face Space secrets for security
15
  MONGO_URI = os.getenv("MONGO_URI")
@@ -54,17 +54,12 @@ def startup_event():
54
  return
55
 
56
  try:
57
- cache_dir = "/tmp"
58
- tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME, cache_dir=cache_dir)
59
- num_labels = len(intent_map)
60
- model = AutoModelForSequenceClassification.from_pretrained(
61
- MODEL_NAME,
62
- num_labels=num_labels,
63
- cache_dir=cache_dir
64
- )
65
- print(f"✅ Model '{MODEL_NAME}' and tokenizer loaded successfully.")
66
  except Exception as e:
67
- print(f"❌ Critical Error loading Hugging Face model: {e}")
68
  model = None
69
 
70
  try:
@@ -93,7 +88,7 @@ def get_legal_solution(request: QueryRequest):
93
  logits = model(**inputs).logits
94
 
95
  prediction_id = torch.argmax(logits, dim=1).item()
96
- predicted_intent = intent_map.get(prediction_id, "Unknown Intent")
97
 
98
  document = collection.find_one({"intent": predicted_intent})
99
  solution = document["answer"] if document and "answer" in document else "No specific solution was found for this topic."
 
8
  from typing import Dict, List, Optional
9
 
10
  # --- Configuration ---
11
+ # ** FIX: Loading the model from a local directory within the Space **
12
+ LOCAL_MODEL_PATH = "./"
13
 
14
  # The MongoDB URI is loaded from Hugging Face Space secrets for security
15
  MONGO_URI = os.getenv("MONGO_URI")
 
54
  return
55
 
56
  try:
57
+ # Load the model and tokenizer from the LOCAL directory
58
+ tokenizer = AutoTokenizer.from_pretrained(LOCAL_MODEL_PATH)
59
+ model = AutoModelForSequenceClassification.from_pretrained(LOCAL_MODEL_PATH)
60
+ print(f"✅ Model and tokenizer loaded successfully from '{LOCAL_MODEL_PATH}'.")
 
 
 
 
 
61
  except Exception as e:
62
+ print(f"❌ Critical Error loading model from local directory: {e}")
63
  model = None
64
 
65
  try:
 
88
  logits = model(**inputs).logits
89
 
90
  prediction_id = torch.argmax(logits, dim=1).item()
91
+ predicted_intent = model.config.id2label.get(prediction_id, "Unknown Intent")
92
 
93
  document = collection.find_one({"intent": predicted_intent})
94
  solution = document["answer"] if document and "answer" in document else "No specific solution was found for this topic."