Spaces:
Sleeping
Sleeping
File size: 6,368 Bytes
a1b286c 4470539 75e3217 4470539 a1b286c 981d5ac 4470539 a1b286c 4470539 75e3217 4470539 75e3217 4470539 75e3217 4470539 a1b286c 4470539 897f20e 4470539 897f20e 4470539 75e3217 4470539 75e3217 4470539 75e3217 4470539 75e3217 4470539 a1b286c 4470539 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 | # app.py
import streamlit as st
import os
from pathlib import Path
import tempfile
# Import local modules
from components.notebook_ui import NotebookUI
from utils.document_processor import DocumentProcessor
from utils.llm_gateway import LLMGateway
from utils.ontology_manager import LegalOntology
class LegalAIApp:
def __init__(self):
self.setup_environment()
self.setup_session_state()
self.init_components()
self.setup_page_config()
def setup_environment(self):
"""Setup environment variables and data directories"""
self.anthropic_api_key = os.environ.get('ANTHROPIC_API_KEY')
if not self.anthropic_api_key:
st.error("Please set the ANTHROPIC_API_KEY environment variable")
st.stop()
# Setup data directory
data_dir = Path("data") # Local directory instead of root
data_dir.mkdir(parents=True, exist_ok=True)
def setup_session_state(self):
"""Initialize session state variables"""
if 'processed_docs' not in st.session_state:
st.session_state.processed_docs = {}
if 'conversation_history' not in st.session_state:
st.session_state.conversation_history = []
def init_components(self):
"""Initialize application components"""
# Initialize components
self.llm = LLMGateway(self.anthropic_api_key)
self.doc_processor = DocumentProcessor()
self.notebook_ui = NotebookUI()
self.legal_ontology = LegalOntology()
def setup_page_config(self):
"""Configure Streamlit page"""
st.set_page_config(
page_title="Legal AI Assistant",
page_icon="⚖️",
layout="wide",
initial_sidebar_state="expanded"
)
def show_disclaimer(self):
"""Show legal disclaimer"""
with st.expander("ℹ️ Legal Disclaimer", expanded=False):
st.warning(
"This tool is for research assistance only and does not constitute "
"legal advice. Always consult with a qualified legal professional."
)
def render_sidebar(self):
"""Render sidebar content"""
st.sidebar.header("Document Upload")
# Jurisdiction selection
jurisdiction = st.sidebar.selectbox(
"Select Jurisdiction",
["UK", "India", "UAE"]
)
# File uploader
uploaded_files = st.sidebar.file_uploader(
"Upload Documents",
accept_multiple_files=True,
type=['pdf', 'docx', 'txt', 'jpg', 'png']
)
if uploaded_files:
self.process_uploaded_files(uploaded_files)
return jurisdiction
def process_uploaded_files(self, files):
"""Process uploaded files"""
for file in files:
if file.name not in st.session_state.processed_docs:
with st.spinner(f"Processing {file.name}..."):
try:
# Process document directly from uploaded file
doc_content = self.doc_processor.process_document(file)
if doc_content:
chunks = self.doc_processor.chunk_document(doc_content)
# Store in session state
st.session_state.processed_docs[file.name] = {
'content': doc_content,
'chunks': chunks,
'status': 'processed'
}
st.sidebar.success(f"✔️ Processed {file.name}")
else:
st.sidebar.error(f"❌ Could not extract text from {file.name}")
except Exception as e:
st.sidebar.error(f"❌ Error processing {file.name}: {str(e)}")
def render_analysis_options(self, jurisdiction):
"""Render analysis template options"""
st.subheader("Analysis Options")
analysis_type = st.selectbox(
"Select Analysis Type",
[
"Document Summary",
"Key Elements Analysis",
"Risk Assessment",
"Chronological Summary",
"Stakeholder Analysis"
]
)
# Get case type based on document content or user selection
case_type = st.selectbox(
"Select Case Type",
["contract", "commercial", "employment", "dispute_resolution"]
)
if st.button("Generate Analysis"):
template = self.legal_ontology.generate_analysis_template(case_type, jurisdiction)
# Get relevant document content
doc_content = ""
for doc in st.session_state.processed_docs.values():
doc_content += doc['content'] + "\n\n"
# Generate analysis using LLM
analysis = self.llm.generate(
f"Based on the following template and document content, provide a detailed legal analysis:\n\n"
f"Template:\n{template}\n\n"
f"Document Content:\n{doc_content}"
)
# Add to notebook
self.notebook_ui.add_cell("analysis", {
'template': analysis_type,
'content': analysis,
'metadata': {
'jurisdiction': jurisdiction,
'case_type': case_type
}
})
def render_main_content(self):
"""Render main content area"""
st.title("Legal AI Assistant")
# Get selected jurisdiction
jurisdiction = self.render_sidebar()
# Show analysis options
self.render_analysis_options(jurisdiction)
# Render notebook cells
st.header("Analysis Results")
self.notebook_ui.render_cells()
def run(self):
"""Run the application"""
self.show_disclaimer()
self.render_main_content()
if __name__ == "__main__":
app = LegalAIApp()
app.run() |