import sys, os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) UTILS_DIR = os.path.join(BASE_DIR, "utils") if UTILS_DIR not in sys.path: sys.path.insert(0, UTILS_DIR) import streamlit as st import sys, os, json, glob, hashlib # ─── Ensure utils/ is importable ────────────────────────────── UTILS_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) if UTILS_PATH not in sys.path: sys.path.append(UTILS_PATH) from utils.summarizer import summarize_text st.title("🧠 Ollama Conversation History") st.write("Ingest and digest local Ollama conversation blobs for analysis.") # Init session state if "ollama_history" not in st.session_state: st.session_state.ollama_history = [] # Default location (adjust if needed) OLLAMA_HISTORY_DIR = os.path.expanduser("~/.ollama/history") def load_conversations(): conversations = [] if not os.path.exists(OLLAMA_HISTORY_DIR): st.warning(f"No Ollama history found at {OLLAMA_HISTORY_DIR}") return [] files = glob.glob(os.path.join(OLLAMA_HISTORY_DIR, "*.json")) for f in files: try: with open(f, "r", errors="ignore") as fh: data = json.load(fh) # Extract conversation summary convo_text = "" for msg in data.get("messages", []): role = msg.get("role", "unknown") content = msg.get("content", "") convo_text += f"{role.upper()}: {content}\n" sha1 = hashlib.sha1(convo_text.encode()).hexdigest() summary = summarize_text(convo_text) conversations.append({ "file": os.path.basename(f), "sha1": sha1, "preview": convo_text[:500], "summary": summary, }) except Exception as e: st.error(f"⚠️ Error reading {f}: {e}") return conversations # UI if st.button("📥 Load Ollama Conversations"): st.session_state.ollama_history = load_conversations() st.success(f"✅ Loaded {len(st.session_state.ollama_history)} conversations.") if st.session_state.ollama_history: for c in st.session_state.ollama_history: st.subheader(f"💬 {c['file']} ({c['sha1'][:8]})") st.text_area("Preview", c["preview"], height=150) st.write("🧠 Summary:", c["summary"])