NexusInstruments commited on
Commit
3b7413d
·
verified ·
1 Parent(s): 740a6e1

Delete pages/pages

Browse files
Files changed (1) hide show
  1. pages/pages/BulkDigest.py +0 -75
pages/pages/BulkDigest.py DELETED
@@ -1,75 +0,0 @@
1
- import streamlit as st
2
- import os, hashlib, re
3
- from utils.file_utils import normalize_log_line
4
- from utils.summarizer import summarize_text
5
- from utils.docgen import generate_doc
6
-
7
- st.title("📂 AI Bulk Digest")
8
- st.write("Upload multiple files for batch digestion, summarization, and documentation.")
9
-
10
- # Init counters
11
- if "uploaded_files" not in st.session_state:
12
- st.session_state.uploaded_files = []
13
- if "errors" not in st.session_state:
14
- st.session_state.errors = []
15
- if "bulk_digests" not in st.session_state:
16
- st.session_state.bulk_digests = []
17
-
18
- uploads = st.file_uploader(
19
- "Upload files (scripts, logs, text, PDFs)",
20
- type=["py", "sh", "txt", "log", "pdf"],
21
- accept_multiple_files=True
22
- )
23
-
24
- if uploads:
25
- digests = []
26
- for f in uploads:
27
- try:
28
- content = f.read().decode("utf-8", errors="ignore")
29
- sha1 = hashlib.sha1(content.encode()).hexdigest()
30
-
31
- # Detect type
32
- is_log = f.name.endswith(".log")
33
- is_script = f.name.endswith(".py") or f.name.endswith(".sh")
34
-
35
- # Normalize logs
36
- if is_log:
37
- normalized = [normalize_log_line(line) for line in content.splitlines()]
38
- preview = "\n".join(normalized[:30])
39
- else:
40
- preview = "\n".join(content.splitlines()[:30])
41
-
42
- # AI summary
43
- summary = summarize_text(content)
44
-
45
- # Script docs if applicable
46
- doc = generate_doc(f.name, "uploaded", content) if is_script else None
47
-
48
- result = {
49
- "name": f.name,
50
- "sha1": sha1,
51
- "preview": preview,
52
- "summary": summary,
53
- "doc": doc,
54
- }
55
- digests.append(result)
56
-
57
- # Track upload
58
- st.session_state.uploaded_files.append(f.name)
59
-
60
- # Display in UI
61
- st.subheader(f"📄 {f.name}")
62
- st.code(preview)
63
- st.markdown(f"**SHA1:** `{sha1}`")
64
- st.write("🧠 **Summary:**", summary)
65
- if doc:
66
- st.write("📘 **Generated Documentation:**")
67
- st.markdown(doc)
68
-
69
- except Exception as e:
70
- st.error(f"⚠️ Error processing {f.name}: {e}")
71
- st.session_state.errors.append(str(e))
72
-
73
- # Save to session state for Chatbot
74
- st.session_state.bulk_digests = digests
75
- st.success(f"✅ Bulk digestion complete for {len(digests)} files.")