Spaces:
Sleeping
Sleeping
| # app.py | |
| import streamlit as st | |
| import asyncio | |
| from pathlib import Path | |
| import tempfile | |
| import time | |
| from typing import Dict, List | |
| import pandas as pd | |
| from core.document_processor import DocumentProcessor | |
| from core.embeddings import DocumentEmbedder | |
| from core.vector_store import FAISSVectorStore | |
| from modules.qa_module import EnhancedQAModule | |
| # Page configuration | |
| st.set_page_config( | |
| page_title="SYNAPTYX - AI Accelerator", | |
| page_icon="π§ ", | |
| layout="wide", | |
| initial_sidebar_state="expanded" | |
| ) | |
| # Custom CSS | |
| st.markdown(""" | |
| <style> | |
| .main { | |
| background-color: #f5f5f5; | |
| } | |
| .stButton>button { | |
| background-color: #4CAF50; | |
| color: white; | |
| border-radius: 5px; | |
| border: none; | |
| padding: 10px 24px; | |
| } | |
| .stButton>button:hover { | |
| background-color: #45a049; | |
| } | |
| .css-1d391kg { | |
| padding: 2rem 1rem; | |
| } | |
| .stAlert { | |
| background-color: rgba(255, 255, 255, 0.9); | |
| } | |
| .custom-title { | |
| font-size: 2.5rem; | |
| font-weight: bold; | |
| color: #1E3D59; | |
| text-align: center; | |
| margin-bottom: 2rem; | |
| } | |
| .metric-card { | |
| background-color: white; | |
| padding: 1rem; | |
| border-radius: 10px; | |
| box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); | |
| } | |
| </style> | |
| """, unsafe_allow_html=True) | |
| # Initialize session state | |
| if 'processed_docs' not in st.session_state: | |
| st.session_state.processed_docs = 0 | |
| if 'total_chunks' not in st.session_state: | |
| st.session_state.total_chunks = 0 | |
| if 'demo' not in st.session_state: | |
| st.session_state.demo = None | |
| if 'history' not in st.session_state: | |
| st.session_state.history = [] | |
| class SynaptyxDemo: | |
| def __init__(self): | |
| self.embedder = DocumentEmbedder() | |
| self.vector_store = FAISSVectorStore() | |
| self.qa_module = EnhancedQAModule() | |
| self.doc_processor = DocumentProcessor() | |
| async def process_document(self, file) -> Dict: | |
| try: | |
| # Create temp file path | |
| with tempfile.NamedTemporaryFile(delete=False, suffix=Path(file.name).suffix) as tmp_file: | |
| tmp_file.write(file.getbuffer()) | |
| temp_path = tmp_file.name | |
| # Process document | |
| doc_content = self.doc_processor.process_document(temp_path) | |
| # Process for vector store | |
| chunks, embeddings, metadata = self.embedder.process_documents([{ | |
| "content": doc_content["content"], | |
| "source": file.name | |
| }]) | |
| # Add to vector store | |
| self.vector_store.add_documents(chunks, embeddings, metadata) | |
| return { | |
| "status": "success", | |
| "chunks": len(chunks), | |
| "metadata": doc_content["metadata"] | |
| } | |
| except Exception as e: | |
| return {"status": "error", "error": str(e)} | |
| finally: | |
| Path(temp_path).unlink(missing_ok=True) | |
| async def query(self, question: str, k: int = 5) -> Dict: | |
| try: | |
| # Get relevant documents | |
| relevant_docs = self.vector_store.similarity_search( | |
| question, | |
| self.embedder, | |
| k=k | |
| ) | |
| # Get answer | |
| answer = await self.qa_module.process(question, relevant_docs) | |
| return { | |
| "status": "success", | |
| "answer": answer["answer"], | |
| "confidence": answer["confidence"], | |
| "sources": answer["sources"] | |
| } | |
| except Exception as e: | |
| return {"status": "error", "error": str(e)} | |
| def main(): | |
| # Initialize demo instance if not exists | |
| if st.session_state.demo is None: | |
| st.session_state.demo = SynaptyxDemo() | |
| # Sidebar | |
| with st.sidebar: | |
| st.image("https://via.placeholder.com/150?text=SYNAPTYX", width=150) | |
| st.markdown("### π§ SYNAPTYX") | |
| st.markdown("#### AI Accelerator Platform") | |
| st.markdown("---") | |
| st.markdown("### π Analytics") | |
| st.markdown(f"Documents Processed: {st.session_state.processed_docs}") | |
| st.markdown(f"Total Chunks: {st.session_state.total_chunks}") | |
| st.markdown("---") | |
| st.markdown("### π§ Settings") | |
| k_value = st.slider("Number of relevant chunks", 1, 10, 5) | |
| # Clear history button | |
| if st.button("ποΈ Clear History"): | |
| st.session_state.history = [] | |
| st.success("History cleared!") | |
| # Main content | |
| st.markdown("<h1 class='custom-title'>π§ SYNAPTYX - Document Analytics</h1>", unsafe_allow_html=True) | |
| # Document upload section | |
| st.markdown("### π Document Upload") | |
| uploaded_files = st.file_uploader( | |
| "Upload your documents (PDF, DOCX, or TXT)", | |
| type=['pdf', 'docx', 'txt'], | |
| accept_multiple_files=True | |
| ) | |
| if uploaded_files: | |
| for file in uploaded_files: | |
| with st.spinner(f"Processing {file.name}..."): | |
| result = asyncio.run(st.session_state.demo.process_document(file)) | |
| if result["status"] == "success": | |
| st.session_state.processed_docs += 1 | |
| st.session_state.total_chunks += result["chunks"] | |
| with st.expander(f"π {file.name} - Details"): | |
| st.json(result["metadata"]) | |
| st.success(f"Successfully processed {file.name}") | |
| else: | |
| st.error(f"Error processing {file.name}: {result['error']}") | |
| # Query section | |
| st.markdown("### π¬ Ask Questions") | |
| query = st.text_input("What would you like to know about your documents?") | |
| if query: | |
| with st.spinner("Analyzing..."): | |
| result = asyncio.run(st.session_state.demo.query(query, k=k_value)) | |
| if result["status"] == "success": | |
| # Add to history | |
| st.session_state.history.append({ | |
| "question": query, | |
| "answer": result["answer"], | |
| "confidence": result["confidence"], | |
| "sources": result["sources"], | |
| "timestamp": time.strftime("%Y-%m-%d %H:%M:%S") | |
| }) | |
| # Display answer | |
| st.markdown("#### Answer") | |
| st.info(result["answer"]) | |
| col1, col2 = st.columns(2) | |
| with col1: | |
| st.markdown("##### Confidence Score") | |
| st.progress(result["confidence"]) | |
| with col2: | |
| st.markdown("##### Sources") | |
| for source in result["sources"]: | |
| st.markdown(f"- {source}") | |
| else: | |
| st.error(f"Error: {result['error']}") | |
| # History section | |
| if st.session_state.history: | |
| st.markdown("### π History") | |
| for i, item in enumerate(reversed(st.session_state.history)): | |
| with st.expander(f"Q: {item['question']} ({item['timestamp']})"): | |
| st.markdown(f"**Answer:** {item['answer']}") | |
| st.markdown(f"**Confidence:** {item['confidence']:.2f}") | |
| st.markdown("**Sources:**") | |
| for source in item['sources']: | |
| st.markdown(f"- {source}") | |
| if __name__ == "__main__": | |
| main() |