NexusInstruments commited on
Commit
36f2b27
·
verified ·
1 Parent(s): 3b7413d

Update pages/Omnilog.py

Browse files
Files changed (1) hide show
  1. pages/Omnilog.py +47 -15
pages/Omnilog.py CHANGED
@@ -1,26 +1,58 @@
1
  import streamlit as st
2
  import re, hashlib
 
 
3
 
4
  st.title("📜 Omnilog")
5
- st.write("Log analyzer module")
 
 
 
 
 
 
6
 
7
  uploaded = st.file_uploader("Upload a log file", type=["log", "txt"])
8
  if uploaded:
9
- content = uploaded.read().decode("utf-8", errors="ignore")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
11
- noise = re.compile(r"^\w{3}\s+\d+\s+\d{2}:\d{2}:\d{2}\s+[\w\-\.\:]+\s+")
12
- normalized = [noise.sub("", line) for line in content.splitlines()]
 
 
 
 
 
13
 
14
- st.subheader("🧹 Normalized Logs")
15
- st.code("\n".join(normalized[:50]))
 
 
16
 
17
- st.subheader("📊 Stats")
18
- st.metric("Total Lines", len(normalized))
19
- st.metric("Unique Entries", len(set(normalized)))
20
- st.write("SHA1 Digest:", hashlib.sha1(content.encode()).hexdigest())
 
 
21
 
22
- query = st.text_input("🔍 Search logs")
23
- if query:
24
- matches = [line for line in normalized if query.lower() in line.lower()]
25
- st.success(f"Found {len(matches)} matches")
26
- st.code("\n".join(matches[:50]))
 
1
  import streamlit as st
2
  import re, hashlib
3
+ from utils.file_utils import normalize_log_line, keyword_search
4
+ from utils.summarizer import summarize_text
5
 
6
  st.title("📜 Omnilog")
7
+ st.write("Log analyzer with normalization, search, and AI summaries")
8
+
9
+ # Init counters
10
+ if "uploaded_files" not in st.session_state:
11
+ st.session_state.uploaded_files = []
12
+ if "errors" not in st.session_state:
13
+ st.session_state.errors = []
14
 
15
  uploaded = st.file_uploader("Upload a log file", type=["log", "txt"])
16
  if uploaded:
17
+ try:
18
+ content = uploaded.read().decode("utf-8", errors="ignore")
19
+
20
+ # Track uploaded file
21
+ st.session_state.uploaded_files.append(uploaded.name)
22
+
23
+ # Normalize
24
+ normalized = [normalize_log_line(line) for line in content.splitlines()]
25
+
26
+ # Display normalized preview
27
+ st.subheader("🧹 Normalized Logs")
28
+ st.code("\n".join(normalized[:50]))
29
+
30
+ # Stats
31
+ st.subheader("📊 Stats")
32
+ st.metric("Total Lines", len(normalized))
33
+ st.metric("Unique Entries", len(set(normalized)))
34
+ st.write("SHA1 Digest:", hashlib.sha1(content.encode()).hexdigest())
35
 
36
+ # Keyword search
37
+ query = st.text_input("🔍 Search logs")
38
+ matches = []
39
+ if query:
40
+ matches = keyword_search("\n".join(normalized), query)
41
+ st.success(f"Found {len(matches)} matches")
42
+ st.code("\n".join(matches[:50]))
43
 
44
+ # AI summarization
45
+ st.subheader("🧠 AI Summary")
46
+ summary = summarize_text("\n".join(normalized))
47
+ st.write(summary)
48
 
49
+ # Save results for Chatbot context
50
+ st.session_state.omnilog_output = {
51
+ "normalized_preview": "\n".join(normalized[:50]),
52
+ "matches": matches[:50],
53
+ "summary": summary,
54
+ }
55
 
56
+ except Exception as e:
57
+ st.error(f"⚠️ Error processing log: {e}")
58
+ st.session_state.errors.append(str(e))