Satyam0077 commited on
Commit
ccb623b
Β·
verified Β·
1 Parent(s): f06f03c

Update src/inference.py

Browse files
Files changed (1) hide show
  1. src/inference.py +55 -31
src/inference.py CHANGED
@@ -5,41 +5,65 @@ import scipy.sparse
5
  from textblob import TextBlob
6
 
7
  from src.preprocessing import clean_text
8
- from src.features import create_features
9
  from src.model import load_model
10
  from src.entity_extraction import extract_entities
11
 
12
- # ─── Adjusted BASE_PATH to point at ../models ──────────────────────────────────
13
  BASE_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "models"))
14
 
15
- # Load models and TF-IDF vectorizer from the models/ folder at repo root
16
- model_issue = load_model(os.path.join(BASE_PATH, "model_issue_type.pkl"))
17
- model_urgency = load_model(os.path.join(BASE_PATH, "model_urgency_level.pkl"))
18
- tfidf = joblib.load(os.path.join(BASE_PATH, "tfidf.pkl"))
 
 
 
 
 
 
19
 
20
  def predict_ticket(ticket_text):
21
- # Preprocess
22
- clean = clean_text(ticket_text)
23
- # TF-IDF transform
24
- X_tfidf = tfidf.transform([clean])
25
- # Additional features
26
- ticket_length = len(clean.split())
27
- sentiment = TextBlob(clean).sentiment.polarity
28
-
29
- # Combine features (sparse + dense)
30
- X_features = scipy.sparse.hstack([
31
- X_tfidf,
32
- np.array([[ticket_length]]),
33
- np.array([[sentiment]])
34
- ])
35
-
36
- # Predictions
37
- issue_pred = model_issue.predict(X_features)[0]
38
- urgency_pred = model_urgency.predict(X_features)[0]
39
- entities = extract_entities(ticket_text)
40
-
41
- return {
42
- "issue_type": issue_pred,
43
- "urgency_level": urgency_pred,
44
- "entities": entities
45
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  from textblob import TextBlob
6
 
7
  from src.preprocessing import clean_text
 
8
  from src.model import load_model
9
  from src.entity_extraction import extract_entities
10
 
11
+ # Define path to models folder, adjust as needed
12
  BASE_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "models"))
13
 
14
+ # Load models and vectorizer safely with error handling
15
+ try:
16
+ model_issue = load_model(os.path.join(BASE_PATH, "model_issue_type.pkl"))
17
+ model_urgency = load_model(os.path.join(BASE_PATH, "model_urgency_level.pkl"))
18
+ tfidf = joblib.load(os.path.join(BASE_PATH, "tfidf.pkl"))
19
+ except Exception as e:
20
+ print(f"Error loading models or vectorizer: {e}")
21
+ model_issue = None
22
+ model_urgency = None
23
+ tfidf = None
24
 
25
  def predict_ticket(ticket_text):
26
+ if not all([model_issue, model_urgency, tfidf]):
27
+ return {
28
+ "issue_type": "Model not loaded",
29
+ "urgency_level": "Model not loaded",
30
+ "entities": {}
31
+ }
32
+ try:
33
+ # Preprocess text
34
+ clean = clean_text(ticket_text)
35
+
36
+ # Transform text with loaded TF-IDF vectorizer
37
+ X_tfidf = tfidf.transform([clean])
38
+
39
+ # Additional numeric features
40
+ ticket_length = len(clean.split())
41
+ sentiment = TextBlob(clean).sentiment.polarity
42
+
43
+ # Combine sparse and dense features
44
+ X_features = scipy.sparse.hstack([
45
+ X_tfidf,
46
+ np.array([[ticket_length]]),
47
+ np.array([[sentiment]])
48
+ ])
49
+
50
+ # Predict using models
51
+ issue_pred = model_issue.predict(X_features)[0]
52
+ urgency_pred = model_urgency.predict(X_features)[0]
53
+
54
+ # Extract entities from original text
55
+ entities = extract_entities(ticket_text)
56
+
57
+ return {
58
+ "issue_type": issue_pred,
59
+ "urgency_level": urgency_pred,
60
+ "entities": entities
61
+ }
62
+
63
+ except Exception as e:
64
+ # Catch any runtime errors and report them for debugging
65
+ return {
66
+ "issue_type": f"Prediction error: {str(e)}",
67
+ "urgency_level": f"Prediction error: {str(e)}",
68
+ "entities": {}
69
+ }