NexusInstruments commited on
Commit
e740a63
·
verified ·
1 Parent(s): 97f803c

Update pages/Omnilog.py

Browse files
Files changed (1) hide show
  1. pages/Omnilog.py +87 -46
pages/Omnilog.py CHANGED
@@ -1,10 +1,10 @@
1
  import streamlit as st
2
- import re, hashlib
3
  from utils.file_utils import normalize_log_line, keyword_search
4
  from utils.summarizer import summarize_text
5
 
6
  st.title("📜 Omnilog")
7
- st.write("Log analyzer with normalization, search, and AI summaries")
8
 
9
  # Init counters
10
  if "uploaded_files" not in st.session_state:
@@ -12,47 +12,88 @@ if "uploaded_files" not in st.session_state:
12
  if "errors" not in st.session_state:
13
  st.session_state.errors = []
14
 
15
- uploaded = st.file_uploader("Upload a log file", type=["log", "txt"])
16
- if uploaded:
17
- try:
18
- content = uploaded.read().decode("utf-8", errors="ignore")
19
-
20
- # Track uploaded file
21
- st.session_state.uploaded_files.append(uploaded.name)
22
-
23
- # Normalize
24
- normalized = [normalize_log_line(line) for line in content.splitlines()]
25
-
26
- # Display normalized preview
27
- st.subheader("🧹 Normalized Logs")
28
- st.code("\n".join(normalized[:50]))
29
-
30
- # Stats
31
- st.subheader("📊 Stats")
32
- st.metric("Total Lines", len(normalized))
33
- st.metric("Unique Entries", len(set(normalized)))
34
- st.write("SHA1 Digest:", hashlib.sha1(content.encode()).hexdigest())
35
-
36
- # Keyword search
37
- query = st.text_input("🔍 Search logs")
38
- matches = []
39
- if query:
40
- matches = keyword_search("\n".join(normalized), query)
41
- st.success(f"Found {len(matches)} matches")
42
- st.code("\n".join(matches[:50]))
43
-
44
- # AI summarization
45
- st.subheader("🧠 AI Summary")
46
- summary = summarize_text("\n".join(normalized))
47
- st.write(summary)
48
-
49
- # Save results for Chatbot context
50
- st.session_state.omnilog_output = {
51
- "normalized_preview": "\n".join(normalized[:50]),
52
- "matches": matches[:50],
53
- "summary": summary,
54
- }
55
-
56
- except Exception as e:
57
- st.error(f"⚠️ Error processing log: {e}")
58
- st.session_state.errors.append(str(e))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
+ import re, hashlib, time, os
3
  from utils.file_utils import normalize_log_line, keyword_search
4
  from utils.summarizer import summarize_text
5
 
6
  st.title("📜 Omnilog")
7
+ st.write("Log analyzer with normalization, search, AI summaries, and real-time tailing")
8
 
9
  # Init counters
10
  if "uploaded_files" not in st.session_state:
 
12
  if "errors" not in st.session_state:
13
  st.session_state.errors = []
14
 
15
+ mode = st.radio("Mode", ["Upload Log File", "Live Syslog Tail"])
16
+
17
+ # ─── Upload Mode ──────────────────────────────────────────────
18
+ if mode == "Upload Log File":
19
+ uploaded = st.file_uploader("Upload a log file", type=["log", "txt"])
20
+ if uploaded:
21
+ try:
22
+ content = uploaded.read().decode("utf-8", errors="ignore")
23
+
24
+ # Track upload
25
+ st.session_state.uploaded_files.append(uploaded.name)
26
+
27
+ # Normalize
28
+ normalized = [normalize_log_line(line) for line in content.splitlines()]
29
+
30
+ # Display normalized preview
31
+ st.subheader("🧹 Normalized Logs")
32
+ st.code("\n".join(normalized[:50]))
33
+
34
+ # Stats
35
+ st.subheader("📊 Stats")
36
+ st.metric("Total Lines", len(normalized))
37
+ st.metric("Unique Entries", len(set(normalized)))
38
+ st.write("SHA1 Digest:", hashlib.sha1(content.encode()).hexdigest())
39
+
40
+ # Keyword search
41
+ query = st.text_input("🔍 Search logs")
42
+ matches = []
43
+ if query:
44
+ matches = keyword_search("\n".join(normalized), query)
45
+ st.success(f"Found {len(matches)} matches")
46
+ st.code("\n".join(matches[:50]))
47
+
48
+ # AI summarization
49
+ st.subheader("🧠 AI Summary")
50
+ summary = summarize_text("\n".join(normalized))
51
+ st.write(summary)
52
+
53
+ # Save for Chatbot
54
+ st.session_state.omnilog_output = {
55
+ "normalized_preview": "\n".join(normalized[:50]),
56
+ "matches": matches[:50],
57
+ "summary": summary,
58
+ }
59
+
60
+ except Exception as e:
61
+ st.error(f"⚠️ Error processing log: {e}")
62
+ st.session_state.errors.append(str(e))
63
+
64
+ # ─── Live Tailing Mode ────────────────────────────────────────
65
+ elif mode == "Live Syslog Tail":
66
+ logfile = "/var/log/syslog"
67
+ if not os.path.exists(logfile):
68
+ st.error("⚠️ Syslog not found. This mode only works locally.")
69
+ else:
70
+ st.info(f"Streaming last 50 lines from {logfile} (auto-refreshing)")
71
+
72
+ # Read last 50 lines
73
+ def tail_file(path, n=50):
74
+ with open(path, "r", errors="ignore") as f:
75
+ lines = f.readlines()
76
+ return lines[-n:]
77
+
78
+ # Placeholder container
79
+ placeholder = st.empty()
80
+
81
+ # Live loop
82
+ for _ in range(200): # ~200 refreshes (stop after)
83
+ lines = tail_file(logfile, 50)
84
+ normalized = [normalize_log_line(l) for l in lines]
85
+
86
+ # Highlight critical warnings
87
+ highlighted = []
88
+ for line in normalized:
89
+ if re.search(r"(CRITICAL|FAILED|WARNING)", line, re.IGNORECASE):
90
+ highlighted.append("⚠️ " + line)
91
+ st.session_state.errors.append(line)
92
+ else:
93
+ highlighted.append(line)
94
+
95
+ # Update live view
96
+ with placeholder.container():
97
+ st.code("\n".join(highlighted))
98
+
99
+ time.sleep(2) # refresh every 2s