Deevyankar commited on
Commit
49d922d
·
1 Parent(s): ddeebed

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -63
app.py CHANGED
@@ -1,7 +1,5 @@
1
  import os
2
  import subprocess
3
- from typing import List, Dict, Any
4
-
5
  import gradio as gr
6
  import chromadb
7
 
@@ -15,6 +13,8 @@ INDEX = None
15
 
16
 
17
  def get_persist_dir():
 
 
18
  return "/data/chroma" if os.path.exists("/data") else "storage/chroma"
19
 
20
 
@@ -91,36 +91,17 @@ def format_sources(response, max_sources=3):
91
  return output
92
 
93
 
94
- def respond(
95
- message: str,
96
- history: List[Dict[str, Any]],
97
- model_name: str,
98
- temperature: float,
99
- top_k: int,
100
- show_sources: bool,
101
- ):
102
- if history is None:
103
- history = []
104
-
105
- if not message or not message.strip():
106
- return history, ""
107
-
108
  if not os.getenv("OPENAI_API_KEY"):
109
- history = history + [{
110
- "role": "assistant",
111
- "content": "OPENAI_API_KEY missing. Add it in Hugging Face Space secrets."
112
- }]
113
- return history, ""
114
-
115
- history = history + [{"role": "user", "content": message.strip()}]
116
 
117
  try:
118
  index = get_index()
119
- llm = OpenAI(model=model_name, temperature=float(temperature))
120
 
121
  query_engine = index.as_query_engine(
122
  llm=llm,
123
- similarity_top_k=int(top_k),
124
  response_mode="compact"
125
  )
126
 
@@ -135,53 +116,28 @@ def respond(
135
  response = query_engine.query(prompt)
136
  answer = str(response)
137
 
138
- if show_sources:
139
- answer += format_sources(response, max_sources=min(int(top_k), 3))
140
 
141
- except Exception as e:
142
- answer = f"Error: {str(e)}"
143
-
144
- history = history + [{"role": "assistant", "content": answer}]
145
- return history, ""
146
 
147
-
148
- def clear_chat():
149
- return []
150
 
151
 
152
  with gr.Blocks() as demo:
153
- gr.Markdown("# 🧠 Neurology Tutor")
154
  gr.Markdown("Automatic pipeline: PDF extraction → chapter text → vector DB → chatbot")
155
 
156
- chatbot = gr.Chatbot(height=500, type="messages")
157
- msg = gr.Textbox(placeholder="Ask a question...", lines=1)
158
-
159
- with gr.Row():
160
- model_name = gr.Dropdown(
161
- ["gpt-4o-mini", "gpt-4.1-mini"],
162
- value="gpt-4o-mini",
163
- label="Model"
164
  )
165
- temperature = gr.Slider(0.0, 0.8, value=0.2, step=0.1, label="Temperature")
166
-
167
- with gr.Row():
168
- top_k = gr.Slider(1, 5, value=3, step=1, label="Top-K Chunks")
169
- show_sources = gr.Checkbox(value=False, label="Show Sources")
170
-
171
- clear_btn = gr.Button("Clear Chat")
172
-
173
- msg.submit(
174
- respond,
175
- inputs=[msg, chatbot, model_name, temperature, top_k, show_sources],
176
- outputs=[chatbot, msg]
177
  )
178
 
179
- clear_btn.click(
180
- clear_chat,
181
- inputs=[],
182
- outputs=[chatbot]
183
- )
184
-
185
-
186
  if __name__ == "__main__":
187
  demo.launch()
 
1
  import os
2
  import subprocess
 
 
3
  import gradio as gr
4
  import chromadb
5
 
 
13
 
14
 
15
  def get_persist_dir():
16
+ # On free Spaces, /data usually won't exist.
17
+ # If you prebuilt locally and uploaded storage/chroma, this will use that.
18
  return "/data/chroma" if os.path.exists("/data") else "storage/chroma"
19
 
20
 
 
91
  return output
92
 
93
 
94
+ def chat_fn(message, history):
 
 
 
 
 
 
 
 
 
 
 
 
 
95
  if not os.getenv("OPENAI_API_KEY"):
96
+ return "OPENAI_API_KEY missing. Add it in Hugging Face Space secrets."
 
 
 
 
 
 
97
 
98
  try:
99
  index = get_index()
100
+ llm = OpenAI(model="gpt-4o-mini", temperature=0.2)
101
 
102
  query_engine = index.as_query_engine(
103
  llm=llm,
104
+ similarity_top_k=3,
105
  response_mode="compact"
106
  )
107
 
 
116
  response = query_engine.query(prompt)
117
  answer = str(response)
118
 
119
+ # Optional: enable this if you want evidence snippets always appended
120
+ # answer += format_sources(response, max_sources=3)
121
 
122
+ return answer
 
 
 
 
123
 
124
+ except Exception as e:
125
+ return f"Error: {str(e)}"
 
126
 
127
 
128
  with gr.Blocks() as demo:
129
+ gr.Markdown("# 🧠 BrainChat")
130
  gr.Markdown("Automatic pipeline: PDF extraction → chapter text → vector DB → chatbot")
131
 
132
+ gr.ChatInterface(
133
+ fn=chat_fn,
134
+ title="Neurology Tutor",
135
+ description="Ask questions from your uploaded neurology PDFs.",
136
+ textbox=gr.Textbox(
137
+ placeholder="Ask a question...",
138
+ lines=1
 
139
  )
 
 
 
 
 
 
 
 
 
 
 
 
140
  )
141
 
 
 
 
 
 
 
 
142
  if __name__ == "__main__":
143
  demo.launch()