cryogenic22 commited on
Commit
deb1607
Β·
verified Β·
1 Parent(s): 466afe3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +68 -63
app.py CHANGED
@@ -15,12 +15,6 @@ st.set_page_config(
15
  initial_sidebar_state="expanded"
16
  )
17
 
18
- # Sidebar navigation
19
- tab = st.sidebar.radio(
20
- "Navigation",
21
- ["πŸ“ Manage Documents", "πŸ“ Generate Templates", "πŸ” Search Documents"]
22
- )
23
-
24
  def get_vector_store():
25
  """Lazy initialization of VectorStore to avoid circular imports."""
26
  global vector_store
@@ -28,68 +22,79 @@ def get_vector_store():
28
  vector_store = VectorStore()
29
  return vector_store
30
 
31
- # Tab 1: Manage Documents
32
- if tab == "πŸ“ Manage Documents":
33
- st.title("πŸ“ Manage Documents")
34
- uploaded_file = st.file_uploader("Upload Document", type=["pdf", "docx", "txt"])
 
 
 
35
 
36
- if uploaded_file:
37
- try:
38
- with st.spinner("Processing document..."):
39
- # Process document and extract text/chunks
40
- text, chunks = doc_processor.process_document(uploaded_file)
41
- st.success("Document processed successfully!")
42
 
43
- # Add to vector store
44
- vector_store_instance = get_vector_store()
45
- vector_store_instance.add_texts(
46
- texts=[chunk["text"] for chunk in chunks],
47
- metadatas=[{
48
- "text": chunk["text"],
49
- "chunk_id": chunk["chunk_id"],
50
- "filename": uploaded_file.name
51
- } for chunk in chunks]
52
- )
53
- st.success("Document added to vector store!")
54
 
55
- # Display document preview
56
- with st.expander("Document Preview", expanded=False):
57
- st.text_area(
58
- "Content",
59
- value=text[:1000] + "..." if len(text) > 1000 else text,
60
- height=300,
61
- disabled=True
 
 
62
  )
 
63
 
64
- except Exception as e:
65
- st.error(f"Error processing document: {e}")
 
 
 
 
 
 
66
 
67
- # List processed documents
68
- st.subheader("Processed Documents")
69
- vector_store_instance = get_vector_store()
70
- processed_docs = vector_store_instance.metadata
71
- if processed_docs:
72
- for idx, doc in enumerate(processed_docs):
73
- st.markdown(f"{idx+1}. **{doc.get('filename', 'Unknown')}** - Chunk ID: {doc['chunk_id']}")
74
- else:
75
- st.info("No documents uploaded yet.")
76
 
77
- # Tab 2: Generate Templates
78
- elif tab == "πŸ“ Generate Templates":
79
- st.title("πŸ“ Generate Templates")
80
- render_template_generator()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
 
82
- # Tab 3: Search Documents
83
- elif tab == "πŸ” Search Documents":
84
- st.title("πŸ” Search Documents")
85
- query = st.text_input("Enter your query:")
86
- if query:
87
- with st.spinner("Searching for relevant chunks..."):
88
- vector_store_instance = get_vector_store()
89
- results = vector_store_instance.similarity_search(query)
90
- if results:
91
- st.success("Found relevant chunks:")
92
- for result in results:
93
- st.markdown(f"- **Chunk:** {result['text'][:200]}... (Relevance: {result['distance']:.2f})")
94
- else:
95
- st.warning("No relevant chunks found.")
 
15
  initial_sidebar_state="expanded"
16
  )
17
 
 
 
 
 
 
 
18
  def get_vector_store():
19
  """Lazy initialization of VectorStore to avoid circular imports."""
20
  global vector_store
 
22
  vector_store = VectorStore()
23
  return vector_store
24
 
25
+ def main():
26
+ """Main function to handle app navigation and functionality."""
27
+ # Sidebar navigation
28
+ tab = st.sidebar.radio(
29
+ "Navigation",
30
+ ["πŸ“ Manage Documents", "πŸ“ Generate Templates", "πŸ” Search Documents"]
31
+ )
32
 
33
+ # Tab 1: Manage Documents
34
+ if tab == "πŸ“ Manage Documents":
35
+ st.title("πŸ“ Manage Documents")
36
+ uploaded_file = st.file_uploader("Upload Document", type=["pdf", "docx", "txt"])
 
 
37
 
38
+ if uploaded_file:
39
+ try:
40
+ with st.spinner("Processing document..."):
41
+ # Process document and extract text/chunks
42
+ text, chunks = doc_processor.process_document(uploaded_file)
43
+ st.success("Document processed successfully!")
 
 
 
 
 
44
 
45
+ # Add to vector store
46
+ vector_store_instance = get_vector_store()
47
+ vector_store_instance.add_texts(
48
+ texts=[chunk["text"] for chunk in chunks],
49
+ metadatas=[{
50
+ "text": chunk["text"],
51
+ "chunk_id": chunk["chunk_id"],
52
+ "filename": uploaded_file.name
53
+ } for chunk in chunks]
54
  )
55
+ st.success("Document added to vector store!")
56
 
57
+ # Display document preview
58
+ with st.expander("Document Preview", expanded=False):
59
+ st.text_area(
60
+ "Content",
61
+ value=text[:1000] + "..." if len(text) > 1000 else text,
62
+ height=300,
63
+ disabled=True
64
+ )
65
 
66
+ except Exception as e:
67
+ st.error(f"Error processing document: {e}")
 
 
 
 
 
 
 
68
 
69
+ # List processed documents
70
+ st.subheader("Processed Documents")
71
+ vector_store_instance = get_vector_store()
72
+ processed_docs = vector_store_instance.metadata
73
+ if processed_docs:
74
+ for idx, doc in enumerate(processed_docs):
75
+ st.markdown(f"{idx+1}. **{doc.get('filename', 'Unknown')}** - Chunk ID: {doc['chunk_id']}")
76
+ else:
77
+ st.info("No documents uploaded yet.")
78
+
79
+ # Tab 2: Generate Templates
80
+ elif tab == "πŸ“ Generate Templates":
81
+ st.title("πŸ“ Generate Templates")
82
+ render_template_generator()
83
+
84
+ # Tab 3: Search Documents
85
+ elif tab == "πŸ” Search Documents":
86
+ st.title("πŸ” Search Documents")
87
+ query = st.text_input("Enter your query:")
88
+ if query:
89
+ with st.spinner("Searching for relevant chunks..."):
90
+ vector_store_instance = get_vector_store()
91
+ results = vector_store_instance.similarity_search(query)
92
+ if results:
93
+ st.success("Found relevant chunks:")
94
+ for result in results:
95
+ st.markdown(f"- **Chunk:** {result['text'][:200]}... (Relevance: {result['distance']:.2f})")
96
+ else:
97
+ st.warning("No relevant chunks found.")
98
 
99
+ if __name__ == "__main__":
100
+ main()