Sai809701 commited on
Commit
bfcc3bc
·
1 Parent(s): 5d68747

added source in response

Browse files
Files changed (1) hide show
  1. app.py +30 -16
app.py CHANGED
@@ -3,7 +3,7 @@ import json
3
  from fastapi import FastAPI
4
  from pydantic import BaseModel
5
  from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline
6
-
7
  # --- 1. Load Model and Tokenizer ---
8
 
9
  # Define the path to your trained model
@@ -32,26 +32,32 @@ print("Classification pipeline loaded successfully.")
32
 
33
  # --- 2. Load Knowledge Base (Answers) ---
34
 
35
- intent_to_answer_map = {}
 
 
36
  print(f"Loading knowledge base from: {KNOWLEDGE_BASE_PATH}")
37
  try:
38
  with open(KNOWLEDGE_BASE_PATH, 'r', encoding='utf-8') as f:
39
  knowledge_base_data = json.load(f)
40
 
41
- # Create a simple lookup map: Intent -> Answer
42
- # This assumes the answer is the same for all questions of the same intent.
43
- intent_to_answer_map = {
44
- item['intent']: item['answer'] for item in knowledge_base_data
45
- }
46
- print(f"Knowledge base loaded with {len(intent_to_answer_map)} intent-to-answer mappings.")
 
 
 
 
47
 
48
  except FileNotFoundError:
49
  print(f"CRITICAL ERROR: Knowledge base file not found at {KNOWLEDGE_BASE_PATH}")
50
- # In a real app, you might want to exit if the KB can't be loaded
51
  except Exception as e:
52
  print(f"Error loading knowledge base: {e}")
53
 
54
 
 
55
  # --- 3. Initialize FastAPI App ---
56
 
57
  app = FastAPI(
@@ -62,17 +68,21 @@ app = FastAPI(
62
 
63
  # --- 4. Define Request and Response Models ---
64
 
 
 
65
  # This is what the user must send in their POST request
66
  class Query(BaseModel):
67
  text: str
68
 
69
- # This is what the API will return
70
  class PredictionResponse(BaseModel):
71
  query: str
72
  predicted_intent: str
73
  confidence_score: float
74
  answer: str
 
75
 
 
76
  # --- 5. Define API Endpoints ---
77
 
78
  @app.get("/")
@@ -81,7 +91,7 @@ def read_root():
81
  Root endpoint for health check.
82
  """
83
  return {"status": "API is running",
84
- "message": "Post to /predict with a 'text' field to get an intent and answer."}
85
 
86
 
87
  @app.post("/predict", response_model=PredictionResponse)
@@ -96,15 +106,19 @@ def predict_intent(query: Query):
96
  predicted_intent = model_result['label']
97
  confidence_score = model_result['score']
98
 
99
- # 2. Retrieve answer from our knowledge base
100
- fallback_answer = "Could not find a specific answer for this intent. Please rephrase your question or contact a legal professional for advice."
101
-
102
- answer = intent_to_answer_map.get(predicted_intent, fallback_answer)
 
 
103
 
104
  # 3. Return the combined response
105
  return {
106
  "query": query.text,
107
  "predicted_intent": predicted_intent,
108
  "confidence_score": confidence_score,
109
- "answer": answer
 
110
  }
 
 
3
  from fastapi import FastAPI
4
  from pydantic import BaseModel
5
  from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline
6
+ from typing import Optional
7
  # --- 1. Load Model and Tokenizer ---
8
 
9
  # Define the path to your trained model
 
32
 
33
  # --- 2. Load Knowledge Base (Answers) ---
34
 
35
+
36
+ # We will now store a dictionary for each intent
37
+ intent_details_map = {}
38
  print(f"Loading knowledge base from: {KNOWLEDGE_BASE_PATH}")
39
  try:
40
  with open(KNOWLEDGE_BASE_PATH, 'r', encoding='utf-8') as f:
41
  knowledge_base_data = json.load(f)
42
 
43
+ # Create a lookup map: Intent -> {answer, source}
44
+ # We only need to add each intent once, since the answer/source is the same
45
+ for item in knowledge_base_data:
46
+ if item['intent'] not in intent_details_map:
47
+ intent_details_map[item['intent']] = {
48
+ "answer": item.get('answer', 'No answer found.'),
49
+ "source": item.get('source', 'No source found.')
50
+ }
51
+
52
+ print(f"Knowledge base loaded with {len(intent_details_map)} intent-to-detail mappings.")
53
 
54
  except FileNotFoundError:
55
  print(f"CRITICAL ERROR: Knowledge base file not found at {KNOWLEDGE_BASE_PATH}")
 
56
  except Exception as e:
57
  print(f"Error loading knowledge base: {e}")
58
 
59
 
60
+
61
  # --- 3. Initialize FastAPI App ---
62
 
63
  app = FastAPI(
 
68
 
69
  # --- 4. Define Request and Response Models ---
70
 
71
+ # This is what the API will return
72
+
73
  # This is what the user must send in their POST request
74
  class Query(BaseModel):
75
  text: str
76
 
77
+ # This is what the API will return (now includes 'source')
78
  class PredictionResponse(BaseModel):
79
  query: str
80
  predicted_intent: str
81
  confidence_score: float
82
  answer: str
83
+ source: Optional[str] # Make source optional in case it's missing
84
 
85
+
86
  # --- 5. Define API Endpoints ---
87
 
88
  @app.get("/")
 
91
  Root endpoint for health check.
92
  """
93
  return {"status": "API is running",
94
+ "message": "Post to /predict with a 'text' field to get an intent and answer and source."}
95
 
96
 
97
  @app.post("/predict", response_model=PredictionResponse)
 
106
  predicted_intent = model_result['label']
107
  confidence_score = model_result['score']
108
 
109
+ # 2. Retrieve details (answer and source) from our knowledge base
110
+ fallback_details = {
111
+ "answer": "Could not find a specific answer for this intent. Please rephrase your question or contact a legal professional for advice.",
112
+ "source": None
113
+ }
114
+ details = intent_details_map.get(predicted_intent, fallback_details)
115
 
116
  # 3. Return the combined response
117
  return {
118
  "query": query.text,
119
  "predicted_intent": predicted_intent,
120
  "confidence_score": confidence_score,
121
+ "answer": details.get('answer'),
122
+ "source": details.get('source')
123
  }
124
+