ShanukaB commited on
Commit
3b41f32
·
verified ·
1 Parent(s): f4968dc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -18
app.py CHANGED
@@ -8,27 +8,44 @@ from transformers import pipeline
8
  logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
9
 
10
  app = FastAPI()
 
11
  models = None
12
 
13
  def load_models():
14
  global models
15
  if models is not None:
16
  return
 
17
  logging.info("Loading models...")
18
 
19
- en_vectorizer = joblib.load(hf_hub_download("E-motionAssistant/Englsih_Trained_Model_LR", "tfidf_vectorizer.joblib"))
20
- en_classifier = joblib.load(hf_hub_download("E-motionAssistant/Englsih_Trained_Model_LR", "logreg_model.joblib"))
21
- en_label_encoder = joblib.load(hf_hub_download("E-motionAssistant/Englsih_Trained_Model_LR", "label_encoder.joblib"))
 
 
 
 
22
 
23
- si_vectorizer = joblib.load(hf_hub_download("E-motionAssistant/Sinhala_Text_Emotion_Model_LR", "tfidf_vectorizer.joblib"))
24
- si_classifier = joblib.load(hf_hub_download("E-motionAssistant/Sinhala_Text_Emotion_Model_LR", "logreg_model.joblib"))
25
- si_label_encoder = joblib.load(hf_hub_download("E-motionAssistant/Sinhala_Text_Emotion_Model_LR", "label_encoder.joblib"))
 
26
 
27
- tamil_pipe = pipeline("text-classification", model="E-motionAssistant/Tamil_Emotion_Recognition_Model", device=-1)
 
 
 
 
 
28
 
29
- models = (en_vectorizer, en_classifier, en_label_encoder,
30
- si_vectorizer, si_classifier, si_label_encoder, tamil_pipe)
31
- logging.info("✅ All models loaded.")
 
 
 
 
 
32
 
33
  @app.on_event("startup")
34
  def startup_event():
@@ -36,7 +53,7 @@ def startup_event():
36
 
37
  class PredictRequest(BaseModel):
38
  text: str
39
- language: str # "English", "Sinhala", or "Tamil"
40
 
41
  @app.get("/")
42
  def root():
@@ -44,31 +61,34 @@ def root():
44
 
45
  @app.post("/predict")
46
  def predict(req: PredictRequest):
47
- if not req.text.strip():
48
  return {"error": "Text cannot be empty"}
49
 
50
  en_vec, en_clf, en_le, si_vec, si_clf, si_le, tamil_pipe = models
51
 
52
  try:
53
- if req.language == "English":
54
  X = en_vec.transform([req.text])
55
  pred = en_clf.predict(X)[0]
56
  emotion = en_le.inverse_transform([pred])[0]
57
  return {"emotion": emotion, "language": "English"}
58
 
59
- elif req.language == "Sinhala":
60
  X = si_vec.transform([req.text])
61
  pred = si_clf.predict(X)[0]
62
  emotion = si_le.inverse_transform([pred])[0]
63
  return {"emotion": emotion, "language": "Sinhala"}
64
 
65
- elif req.language == "Tamil":
66
  res = tamil_pipe(req.text)[0]
67
- return {"emotion": res["label"], "confidence": round(res["score"], 3), "language": "Tamil"}
68
-
 
 
 
69
  else:
70
  return {"error": f"Unsupported language: {req.language}"}
71
 
72
  except Exception as e:
73
  logging.error(f"Prediction error: {e}")
74
- return {"error": str(e)}
 
8
  logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
9
 
10
  app = FastAPI()
11
+
12
  models = None
13
 
14
  def load_models():
15
  global models
16
  if models is not None:
17
  return
18
+
19
  logging.info("Loading models...")
20
 
21
+ try:
22
+ # ==================== NEW ENGLISH LR MODEL ====================
23
+ repo_id = "E-motionAssistant/English_LR_Model_New"
24
+
25
+ en_vectorizer = joblib.load(hf_hub_download(repo_id, "tfidf_vectorizer.joblib"))
26
+ en_classifier = joblib.load(hf_hub_download(repo_id, "logreg_model.joblib"))
27
+ en_label_encoder = joblib.load(hf_hub_download(repo_id, "label_encoder.joblib"))
28
 
29
+ # ==================== Sinhala Model (unchanged) ====================
30
+ si_vectorizer = joblib.load(hf_hub_download("E-motionAssistant/Sinhala_Text_Emotion_Model_LR", "tfidf_vectorizer.joblib"))
31
+ si_classifier = joblib.load(hf_hub_download("E-motionAssistant/Sinhala_Text_Emotion_Model_LR", "logreg_model.joblib"))
32
+ si_label_encoder = joblib.load(hf_hub_download("E-motionAssistant/Sinhala_Text_Emotion_Model_LR", "label_encoder.joblib"))
33
 
34
+ # ==================== Tamil Model (unchanged) ====================
35
+ tamil_pipe = pipeline(
36
+ "text-classification",
37
+ model="E-motionAssistant/Tamil_Emotion_Recognition_Model",
38
+ device=-1
39
+ )
40
 
41
+ models = (en_vectorizer, en_classifier, en_label_encoder,
42
+ si_vectorizer, si_classifier, si_label_encoder, tamil_pipe)
43
+
44
+ logging.info("✅ All models loaded successfully.")
45
+
46
+ except Exception as e:
47
+ logging.error(f"❌ Model loading failed: {type(e).__name__} - {e}")
48
+ raise # Important: This will show the real error in HF Space logs
49
 
50
  @app.on_event("startup")
51
  def startup_event():
 
53
 
54
  class PredictRequest(BaseModel):
55
  text: str
56
+ language: str # "English", "Sinhala", or "Tamil"
57
 
58
  @app.get("/")
59
  def root():
 
61
 
62
  @app.post("/predict")
63
  def predict(req: PredictRequest):
64
+ if not req.text or not req.text.strip():
65
  return {"error": "Text cannot be empty"}
66
 
67
  en_vec, en_clf, en_le, si_vec, si_clf, si_le, tamil_pipe = models
68
 
69
  try:
70
+ if req.language.lower() == "english":
71
  X = en_vec.transform([req.text])
72
  pred = en_clf.predict(X)[0]
73
  emotion = en_le.inverse_transform([pred])[0]
74
  return {"emotion": emotion, "language": "English"}
75
 
76
+ elif req.language.lower() == "sinhala":
77
  X = si_vec.transform([req.text])
78
  pred = si_clf.predict(X)[0]
79
  emotion = si_le.inverse_transform([pred])[0]
80
  return {"emotion": emotion, "language": "Sinhala"}
81
 
82
+ elif req.language.lower() == "tamil":
83
  res = tamil_pipe(req.text)[0]
84
+ return {
85
+ "emotion": res["label"],
86
+ "confidence": round(res["score"], 3),
87
+ "language": "Tamil"
88
+ }
89
  else:
90
  return {"error": f"Unsupported language: {req.language}"}
91
 
92
  except Exception as e:
93
  logging.error(f"Prediction error: {e}")
94
+ return {"error": "Prediction failed. Please try again."}