Update app.py
Browse files
app.py
CHANGED
|
@@ -291,37 +291,40 @@ def streamlit_app(db):
|
|
| 291 |
convert_sbml_to_antimony(model_file_path, antimony_file_path)
|
| 292 |
|
| 293 |
items = split_biomodels(antimony_file_path)
|
| 294 |
-
if not final_items
|
| 295 |
st.write("No content found in the biomodel.")
|
| 296 |
continue
|
| 297 |
-
|
| 298 |
final_items.extend(items)
|
| 299 |
|
| 300 |
-
|
|
|
|
|
|
|
| 301 |
|
| 302 |
-
st.write("Models have been processed and added to the database.")
|
| 303 |
-
|
| 304 |
-
|
| 305 |
@st.cache_resource
|
| 306 |
def get_messages(db):
|
| 307 |
if "messages" not in st.session_state:
|
| 308 |
st.session_state.messages = []
|
| 309 |
return st.session_state.messages
|
| 310 |
-
|
|
|
|
| 311 |
|
| 312 |
for message in st.session_state.messages:
|
| 313 |
with st.chat_message(message["role"]):
|
| 314 |
st.markdown(message["content"])
|
| 315 |
|
|
|
|
|
|
|
| 316 |
if prompt := st.chat_input(query_text):
|
| 317 |
st.chat_message("user").markdown(prompt)
|
| 318 |
-
st.session_state.messages.append({"role": "user", "content":prompt})
|
| 319 |
response = generate_response(db, query_text, st.session_state)
|
| 320 |
|
| 321 |
with st.chat_message("assistant"):
|
| 322 |
st.markdown(response)
|
| 323 |
|
| 324 |
-
st.session_state.messages.append({"role":"assistant","content":response})
|
|
|
|
| 325 |
|
| 326 |
if __name__ == "__main__":
|
| 327 |
streamlit_app(db)
|
|
|
|
| 291 |
convert_sbml_to_antimony(model_file_path, antimony_file_path)
|
| 292 |
|
| 293 |
items = split_biomodels(antimony_file_path)
|
| 294 |
+
if not items: # Check if 'items' is empty, not 'final_items'
|
| 295 |
st.write("No content found in the biomodel.")
|
| 296 |
continue
|
| 297 |
+
|
| 298 |
final_items.extend(items)
|
| 299 |
|
| 300 |
+
vector_db = create_vector_db(final_items) # Renamed 'db' to avoid overwriting
|
| 301 |
+
|
| 302 |
+
st.write("Models have been processed and added to the database.")
|
| 303 |
|
|
|
|
|
|
|
|
|
|
| 304 |
@st.cache_resource
|
| 305 |
def get_messages(db):
|
| 306 |
if "messages" not in st.session_state:
|
| 307 |
st.session_state.messages = []
|
| 308 |
return st.session_state.messages
|
| 309 |
+
|
| 310 |
+
st.session_state.messages = get_messages(db)
|
| 311 |
|
| 312 |
for message in st.session_state.messages:
|
| 313 |
with st.chat_message(message["role"]):
|
| 314 |
st.markdown(message["content"])
|
| 315 |
|
| 316 |
+
query_text = st.text_input("Enter your query:") # Initialize 'query_text'
|
| 317 |
+
|
| 318 |
if prompt := st.chat_input(query_text):
|
| 319 |
st.chat_message("user").markdown(prompt)
|
| 320 |
+
st.session_state.messages.append({"role": "user", "content": prompt})
|
| 321 |
response = generate_response(db, query_text, st.session_state)
|
| 322 |
|
| 323 |
with st.chat_message("assistant"):
|
| 324 |
st.markdown(response)
|
| 325 |
|
| 326 |
+
st.session_state.messages.append({"role": "assistant", "content": response})
|
| 327 |
+
|
| 328 |
|
| 329 |
if __name__ == "__main__":
|
| 330 |
streamlit_app(db)
|