NexusInstruments commited on
Commit
b636b92
·
verified ·
1 Parent(s): 44806de

Update pages/Chatbot.py

Browse files
Files changed (1) hide show
  1. pages/Chatbot.py +33 -13
pages/Chatbot.py CHANGED
@@ -1,32 +1,48 @@
1
  import streamlit as st
 
 
 
 
 
 
 
 
 
2
  from omniscientframework.utils.backend import run_llm
3
 
 
4
  st.title("🧪 Example Page with Chatbot")
5
  st.write("This demo chatbot also ingests Omnieye + Omnilog outputs.")
6
 
7
- # Initialize session state
8
  if "example_chat" not in st.session_state:
9
  st.session_state.example_chat = []
10
 
11
- # Collect context from Omnieye + Omnilog
12
  system_context = []
13
  if "omnieye_output" in st.session_state:
14
- system_context.append(f"Omnieye preview:\n{st.session_state.omnieye_output.get('file_preview', '')}")
15
- if st.session_state.omnieye_output.get("matches"):
16
- system_context.append("Keyword matches:\n" + "\n".join(st.session_state.omnieye_output["matches"]))
 
 
 
 
17
  if "omnilog_output" in st.session_state:
18
- system_context.append(f"Omnilog preview:\n{st.session_state.omnilog_output.get('normalized_preview', '')}")
19
- if st.session_state.omnilog_output.get("matches"):
20
- system_context.append("Log matches:\n" + "\n".join(st.session_state.omnilog_output["matches"]))
 
 
 
21
 
22
- # Display chat history
23
  for msg in st.session_state.example_chat:
24
  with st.chat_message(msg["role"]):
25
  st.markdown(msg["content"])
26
 
27
- # Chat input
28
  if prompt := st.chat_input("Ask the Example Chatbot about files or logs..."):
29
- # Add user message
30
  st.session_state.example_chat.append({"role": "user", "content": prompt})
31
  with st.chat_message("user"):
32
  st.markdown(prompt)
@@ -35,7 +51,11 @@ if prompt := st.chat_input("Ask the Example Chatbot about files or logs..."):
35
  ai_input = "\n\n".join(system_context + [prompt])
36
 
37
  # Generate AI response
38
- with st.chat_message("assistant"):
39
  ai_reply = run_llm(ai_input)
 
 
 
 
40
  st.markdown(ai_reply)
41
- st.session_state.example_chat.append({"role": "assistant", "content": ai_reply})
 
1
  import streamlit as st
2
+ import os, sys
3
+
4
+ # ─── Ensure omniscientframework package is importable ────────────────
5
+ ROOT_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
6
+ PACKAGE_PATH = os.path.abspath(os.path.join(ROOT_PATH, ".."))
7
+ if PACKAGE_PATH not in sys.path:
8
+ sys.path.insert(0, PACKAGE_PATH)
9
+
10
+ # ─── Import project utilities ────────────────────────────────────────
11
  from omniscientframework.utils.backend import run_llm
12
 
13
+ # ─── Page Setup ─────────────────────────────────────────────────────
14
  st.title("🧪 Example Page with Chatbot")
15
  st.write("This demo chatbot also ingests Omnieye + Omnilog outputs.")
16
 
17
+ # ─── Initialize Session State ───────────────────────────────────────
18
  if "example_chat" not in st.session_state:
19
  st.session_state.example_chat = []
20
 
21
+ # ─── Collect context from Omnieye + Omnilog ─────────────────────────
22
  system_context = []
23
  if "omnieye_output" in st.session_state:
24
+ preview = st.session_state.omnieye_output.get("file_preview", "")
25
+ matches = st.session_state.omnieye_output.get("matches", [])
26
+ if preview:
27
+ system_context.append(f"Omnieye preview:\n{preview}")
28
+ if matches:
29
+ system_context.append("Keyword matches:\n" + "\n".join(matches))
30
+
31
  if "omnilog_output" in st.session_state:
32
+ normalized = st.session_state.omnilog_output.get("normalized_preview", "")
33
+ matches = st.session_state.omnilog_output.get("matches", [])
34
+ if normalized:
35
+ system_context.append(f"Omnilog preview:\n{normalized}")
36
+ if matches:
37
+ system_context.append("Log matches:\n" + "\n".join(matches))
38
 
39
+ # ─── Display Chat History ───────────────────────────────────────────
40
  for msg in st.session_state.example_chat:
41
  with st.chat_message(msg["role"]):
42
  st.markdown(msg["content"])
43
 
44
+ # ─── Chat Input ─────────────────────────────────────────────────────
45
  if prompt := st.chat_input("Ask the Example Chatbot about files or logs..."):
 
46
  st.session_state.example_chat.append({"role": "user", "content": prompt})
47
  with st.chat_message("user"):
48
  st.markdown(prompt)
 
51
  ai_input = "\n\n".join(system_context + [prompt])
52
 
53
  # Generate AI response
54
+ try:
55
  ai_reply = run_llm(ai_input)
56
+ except Exception as e:
57
+ ai_reply = f"⚠️ Error running LLM: {e}"
58
+
59
+ with st.chat_message("assistant"):
60
  st.markdown(ai_reply)
61
+ st.session_state.example_chat.append({"role": "assistant", "content": ai_reply})