LeemahLee commited on
Commit
7964c3f
Β·
verified Β·
1 Parent(s): 9a9a9f8

trying out another approach of chatbot

Browse files
Files changed (1) hide show
  1. app.py +40 -8
app.py CHANGED
@@ -80,29 +80,61 @@ qa_chain = RetrievalQA.from_chain_type(
80
  avatar_img = "images/UH.png" # Avatar shown beside bot messages
81
  logo = "images/UH Crest.png" # Crest image
82
 
83
- # === Chat Logic with Course Memory ===
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
84
  def chat_with_bot(message, history, course_state):
85
  lower_msg = message.lower()
86
 
87
- if "msc" in lower_msg or "course:" in lower_msg:
88
- course_state = message.strip()
89
- response = f"βœ… Got it! You're now asking about: **{course_state}**"
 
90
  elif "change course to" in lower_msg:
91
  course_state = message.replace("change course to", "").strip()
92
  response = f"πŸ” Course changed. Now answering based on: **{course_state}**"
 
 
93
  elif course_state:
94
  full_query = f"For the course '{course_state}': {message}"
 
 
 
 
95
  raw_output = qa_chain.run(full_query)
96
  response = raw_output.split("Answer:")[-1].strip()
97
-
98
- # Clean LLM artifacts
99
  response = response.replace("<|assistant|>", "").strip()
100
- else:
101
- response = "⚠️ Please first mention the course you're asking about (e.g., 'MSc Data Science')."
102
 
103
  history.append((message, response))
104
  return "", history, course_state
105
 
 
 
 
106
  # === Build Gradio UI ===
107
  initial_message = (
108
  "πŸ‘‹ Welcome! I'm your Assistant for the University of Hertfordshire.\n"
 
80
  avatar_img = "images/UH.png" # Avatar shown beside bot messages
81
  logo = "images/UH Crest.png" # Crest image
82
 
83
+ # # === Chat Logic with Course Memory ===
84
+ # def chat_with_bot(message, history, course_state):
85
+ # lower_msg = message.lower()
86
+
87
+ # if "msc" in lower_msg or "course:" in lower_msg:
88
+ # course_state = message.strip()
89
+ # response = f"βœ… Got it! You're now asking about: **{course_state}**"
90
+ # elif "change course to" in lower_msg:
91
+ # course_state = message.replace("change course to", "").strip()
92
+ # response = f"πŸ” Course changed. Now answering based on: **{course_state}**"
93
+ # elif course_state:
94
+ # full_query = f"For the course '{course_state}': {message}"
95
+ # raw_output = qa_chain.run(full_query)
96
+ # response = raw_output.split("Answer:")[-1].strip()
97
+
98
+ # # Clean LLM artifacts
99
+ # response = response.replace("<|assistant|>", "").strip()
100
+ # else:
101
+ # response = "⚠️ Please first mention the course you're asking about (e.g., 'MSc Data Science')."
102
+
103
+ # history.append((message, response))
104
+ # return "", history, course_state
105
+
106
+
107
+
108
  def chat_with_bot(message, history, course_state):
109
  lower_msg = message.lower()
110
 
111
+ # Try to detect course from first question
112
+ if "msc" in lower_msg:
113
+ course_state = message.strip() # Store it for later use
114
+ full_query = f"For the course '{course_state}': {message}"
115
  elif "change course to" in lower_msg:
116
  course_state = message.replace("change course to", "").strip()
117
  response = f"πŸ” Course changed. Now answering based on: **{course_state}**"
118
+ history.append((message, response))
119
+ return "", history, course_state
120
  elif course_state:
121
  full_query = f"For the course '{course_state}': {message}"
122
+ else:
123
+ full_query = message # No course memory yet
124
+
125
+ try:
126
  raw_output = qa_chain.run(full_query)
127
  response = raw_output.split("Answer:")[-1].strip()
 
 
128
  response = response.replace("<|assistant|>", "").strip()
129
+ except Exception as e:
130
+ response = f"⚠️ An error occurred: {str(e)}"
131
 
132
  history.append((message, response))
133
  return "", history, course_state
134
 
135
+
136
+
137
+
138
  # === Build Gradio UI ===
139
  initial_message = (
140
  "πŸ‘‹ Welcome! I'm your Assistant for the University of Hertfordshire.\n"