Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -87,7 +87,7 @@ def get_conversational_chain(api_key):
|
|
| 87 |
|
| 88 |
Answer:
|
| 89 |
"""
|
| 90 |
-
model = ChatGoogleGenerativeAI(model="gemini-
|
| 91 |
prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question", "chat_history"])
|
| 92 |
chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
|
| 93 |
return chain
|
|
@@ -120,7 +120,7 @@ def get_response(user_question, api_key, chat_history):
|
|
| 120 |
def main():
|
| 121 |
st.set_page_config(
|
| 122 |
page_title="Antimicrobial Pharmacology Chatbot",
|
| 123 |
-
page_icon="",
|
| 124 |
layout="wide"
|
| 125 |
)
|
| 126 |
|
|
@@ -151,7 +151,7 @@ def main():
|
|
| 151 |
if st.button("Continue", type="primary") and user_api_key:
|
| 152 |
st.session_state["user_api_key"] = user_api_key
|
| 153 |
st.session_state["api_entered"] = True
|
| 154 |
-
st.
|
| 155 |
st.stop()
|
| 156 |
else:
|
| 157 |
st.session_state["user_api_key"] = api_key
|
|
@@ -176,7 +176,7 @@ def main():
|
|
| 176 |
get_vector_store(text_chunks, api_key)
|
| 177 |
st.session_state["pdf_processed"] = True
|
| 178 |
st.success("PDF processed successfully. You can now ask questions.")
|
| 179 |
-
st.
|
| 180 |
except FileNotFoundError as e:
|
| 181 |
st.error(str(e))
|
| 182 |
st.stop()
|
|
@@ -192,11 +192,11 @@ def main():
|
|
| 192 |
|
| 193 |
if st.button("Reprocess PDF"):
|
| 194 |
st.session_state["pdf_processed"] = False
|
| 195 |
-
st.
|
| 196 |
|
| 197 |
if st.button("Clear Chat History"):
|
| 198 |
st.session_state["messages"] = []
|
| 199 |
-
st.
|
| 200 |
|
| 201 |
st.markdown("---")
|
| 202 |
st.subheader("Quick Actions")
|
|
@@ -237,7 +237,7 @@ def main():
|
|
| 237 |
with st.spinner("Generating question..."):
|
| 238 |
response = get_response(quick_question, api_key, st.session_state["messages"])
|
| 239 |
st.session_state["messages"].append({"role": "assistant", "content": response})
|
| 240 |
-
st.
|
| 241 |
|
| 242 |
with col2:
|
| 243 |
if st.button("Summarize Topics", use_container_width=True):
|
|
@@ -246,7 +246,7 @@ def main():
|
|
| 246 |
with st.spinner("Analyzing..."):
|
| 247 |
response = get_response(quick_question, api_key, st.session_state["messages"])
|
| 248 |
st.session_state["messages"].append({"role": "assistant", "content": response})
|
| 249 |
-
st.
|
| 250 |
|
| 251 |
with col3:
|
| 252 |
if st.button("How can you help?", use_container_width=True):
|
|
@@ -255,7 +255,7 @@ def main():
|
|
| 255 |
with st.spinner("Processing..."):
|
| 256 |
response = get_response(quick_question, api_key, st.session_state["messages"])
|
| 257 |
st.session_state["messages"].append({"role": "assistant", "content": response})
|
| 258 |
-
st.
|
| 259 |
|
| 260 |
# Chat input
|
| 261 |
if user_question := st.chat_input("Ask a question or answer an MCQ..."):
|
|
|
|
| 87 |
|
| 88 |
Answer:
|
| 89 |
"""
|
| 90 |
+
model = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0.3, google_api_key=api_key)
|
| 91 |
prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question", "chat_history"])
|
| 92 |
chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
|
| 93 |
return chain
|
|
|
|
| 120 |
def main():
|
| 121 |
st.set_page_config(
|
| 122 |
page_title="Antimicrobial Pharmacology Chatbot",
|
| 123 |
+
page_icon="💊",
|
| 124 |
layout="wide"
|
| 125 |
)
|
| 126 |
|
|
|
|
| 151 |
if st.button("Continue", type="primary") and user_api_key:
|
| 152 |
st.session_state["user_api_key"] = user_api_key
|
| 153 |
st.session_state["api_entered"] = True
|
| 154 |
+
st.rerun()
|
| 155 |
st.stop()
|
| 156 |
else:
|
| 157 |
st.session_state["user_api_key"] = api_key
|
|
|
|
| 176 |
get_vector_store(text_chunks, api_key)
|
| 177 |
st.session_state["pdf_processed"] = True
|
| 178 |
st.success("PDF processed successfully. You can now ask questions.")
|
| 179 |
+
st.rerun()
|
| 180 |
except FileNotFoundError as e:
|
| 181 |
st.error(str(e))
|
| 182 |
st.stop()
|
|
|
|
| 192 |
|
| 193 |
if st.button("Reprocess PDF"):
|
| 194 |
st.session_state["pdf_processed"] = False
|
| 195 |
+
st.rerun()
|
| 196 |
|
| 197 |
if st.button("Clear Chat History"):
|
| 198 |
st.session_state["messages"] = []
|
| 199 |
+
st.rerun()
|
| 200 |
|
| 201 |
st.markdown("---")
|
| 202 |
st.subheader("Quick Actions")
|
|
|
|
| 237 |
with st.spinner("Generating question..."):
|
| 238 |
response = get_response(quick_question, api_key, st.session_state["messages"])
|
| 239 |
st.session_state["messages"].append({"role": "assistant", "content": response})
|
| 240 |
+
st.rerun()
|
| 241 |
|
| 242 |
with col2:
|
| 243 |
if st.button("Summarize Topics", use_container_width=True):
|
|
|
|
| 246 |
with st.spinner("Analyzing..."):
|
| 247 |
response = get_response(quick_question, api_key, st.session_state["messages"])
|
| 248 |
st.session_state["messages"].append({"role": "assistant", "content": response})
|
| 249 |
+
st.rerun()
|
| 250 |
|
| 251 |
with col3:
|
| 252 |
if st.button("How can you help?", use_container_width=True):
|
|
|
|
| 255 |
with st.spinner("Processing..."):
|
| 256 |
response = get_response(quick_question, api_key, st.session_state["messages"])
|
| 257 |
st.session_state["messages"].append({"role": "assistant", "content": response})
|
| 258 |
+
st.rerun()
|
| 259 |
|
| 260 |
# Chat input
|
| 261 |
if user_question := st.chat_input("Ask a question or answer an MCQ..."):
|