AI_Toolkit / src /modules /admin_panel.py
NavyDevilDoc's picture
Update src/modules/admin_panel.py
b663de0 verified
import streamlit as st
import tracker
from datetime import datetime
import json
def render_admin_sidebar():
"""Renders admin-only tools in the sidebar."""
st.divider()
st.header("πŸ›‘οΈ Admin Console")
# 1. Debug Toggle
# This controls whether the "Debug Overlay" appears in the main app
if st.toggle("🐞 Enable Debug Overlay", value=st.session_state.get("debug_mode", False)):
st.session_state.debug_mode = True
st.caption("Showing raw prompts & token counts.")
else:
st.session_state.debug_mode = False
# 2. Log Downloader
log_path = tracker.get_log_path()
if log_path.exists():
with open(log_path, "r") as f:
log_data = f.read()
st.download_button(
label="πŸ“₯ Download Usage Logs",
data=log_data,
file_name=f"usage_log_{datetime.now().strftime('%Y-%m-%d')}.json",
mime="application/json"
)
def render_debug_overlay(location="Generic"):
"""
Renders a collapsible expander showing the LAST input sent to the LLM.
We pull this data from session_state, which the main app populates.
"""
if not st.session_state.get("debug_mode", False):
return
# Check if we have data to show
last_prompt = st.session_state.get("last_prompt_sent")
last_context = st.session_state.get("last_context_used")
if last_prompt:
with st.expander(f"🐞 Debug: Raw LLM Input ({location})", expanded=False):
tab_p, tab_c = st.tabs(["πŸ“ Prompt", "πŸ“š Context"])
with tab_p:
st.caption(f"Length: {len(last_prompt)} chars")
st.code(last_prompt, language="text")
with tab_c:
if last_context:
st.caption("Raw retrieved chunks passed to model:")
st.text(last_context)
else:
st.info("No context used for this turn.")