NexusInstruments commited on
Commit
326ee9c
·
verified ·
1 Parent(s): f2a829a

Update pages/Example.py

Browse files
Files changed (1) hide show
  1. pages/Example.py +17 -3
pages/Example.py CHANGED
@@ -2,26 +2,40 @@ import streamlit as st
2
  from utils.backend import run_llm
3
 
4
  st.title("🧪 Example Page with Chatbot")
5
- st.write("This is a demo page showing how any `/pages/*.py` file becomes a sidebar entry.")
6
 
7
  # Initialize session state
8
  if "example_chat" not in st.session_state:
9
  st.session_state.example_chat = []
10
 
 
 
 
 
 
 
 
 
 
 
 
11
  # Display chat history
12
  for msg in st.session_state.example_chat:
13
  with st.chat_message(msg["role"]):
14
  st.markdown(msg["content"])
15
 
16
  # Chat input
17
- if prompt := st.chat_input("Say something to the Example Chatbot..."):
18
  # Add user message
19
  st.session_state.example_chat.append({"role": "user", "content": prompt})
20
  with st.chat_message("user"):
21
  st.markdown(prompt)
22
 
 
 
 
23
  # Generate AI response
24
  with st.chat_message("assistant"):
25
- ai_reply = run_llm(prompt)
26
  st.markdown(ai_reply)
27
  st.session_state.example_chat.append({"role": "assistant", "content": ai_reply})
 
2
  from utils.backend import run_llm
3
 
4
  st.title("🧪 Example Page with Chatbot")
5
+ st.write("This demo chatbot also ingests Omnieye + Omnilog outputs.")
6
 
7
  # Initialize session state
8
  if "example_chat" not in st.session_state:
9
  st.session_state.example_chat = []
10
 
11
+ # Collect context from Omnieye + Omnilog
12
+ system_context = []
13
+ if "omnieye_output" in st.session_state:
14
+ system_context.append(f"Omnieye preview:\n{st.session_state.omnieye_output.get('file_preview', '')}")
15
+ if st.session_state.omnieye_output.get("matches"):
16
+ system_context.append("Keyword matches:\n" + "\n".join(st.session_state.omnieye_output["matches"]))
17
+ if "omnilog_output" in st.session_state:
18
+ system_context.append(f"Omnilog preview:\n{st.session_state.omnilog_output.get('normalized_preview', '')}")
19
+ if st.session_state.omnilog_output.get("matches"):
20
+ system_context.append("Log matches:\n" + "\n".join(st.session_state.omnilog_output["matches"]))
21
+
22
  # Display chat history
23
  for msg in st.session_state.example_chat:
24
  with st.chat_message(msg["role"]):
25
  st.markdown(msg["content"])
26
 
27
  # Chat input
28
+ if prompt := st.chat_input("Ask the Example Chatbot about files or logs..."):
29
  # Add user message
30
  st.session_state.example_chat.append({"role": "user", "content": prompt})
31
  with st.chat_message("user"):
32
  st.markdown(prompt)
33
 
34
+ # Build context-aware input
35
+ ai_input = "\n\n".join(system_context + [prompt])
36
+
37
  # Generate AI response
38
  with st.chat_message("assistant"):
39
+ ai_reply = run_llm(ai_input)
40
  st.markdown(ai_reply)
41
  st.session_state.example_chat.append({"role": "assistant", "content": ai_reply})