purajith commited on
Commit
535fb2d
·
verified ·
1 Parent(s): ece1493

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -80
app.py DELETED
@@ -1,80 +0,0 @@
1
- import streamlit as st
2
- import os
3
- import shutil
4
- from hybrid_search import multimodelrag
5
- import warnings
6
-
7
- warnings.filterwarnings("ignore")
8
-
9
- # Streamlit UI
10
- st.set_page_config(layout="wide")
11
- st.title("AI Document Processor with Conversational RAG")
12
-
13
- # Initialize chat history in session state
14
- if "messages" not in st.session_state:
15
- st.session_state.messages = []
16
-
17
- # Sidebar for file upload and settings
18
- with st.sidebar:
19
- uploaded_files = st.file_uploader(
20
- "Upload multiple files (PDF, DOCX, Excel, CSV)",
21
- type=["pdf", "docx", "xlsx", "csv"],
22
- accept_multiple_files=True
23
- )
24
-
25
- embeding = st.selectbox("Select Memory Mode", ["open_source", "openai"], index=0)
26
- conversation = st.selectbox("Number of conversation", [2, 4, 6], index=0)
27
- llm_option = st.selectbox("Select LLM Model", ["GPT-4o", "GPT-4o-mini"], index=1)
28
-
29
- temp_dir = "temp_uploaded_files"
30
-
31
- # Initialize file_paths to prevent undefined variable errors
32
- file_paths = []
33
-
34
- # Clear the previous uploads when new files are uploaded
35
- if uploaded_files:
36
- if os.path.exists(temp_dir):
37
- shutil.rmtree(temp_dir) # Delete the old directory and its contents
38
- os.makedirs(temp_dir) # Create a fresh directory
39
-
40
- for file in uploaded_files:
41
- file_path = os.path.join(temp_dir, file.name)
42
- with open(file_path, "wb") as f:
43
- f.write(file.read()) # Save file locally
44
- file_paths.append(file_path)
45
- st.write(f"✅ Saved: {file.name}")
46
-
47
- # Button to clear conversation history
48
- if st.button("Clear Conversation History"):
49
- st.session_state.messages = []
50
-
51
- # Display previous chat messages
52
- for message in st.session_state.messages:
53
- with st.chat_message(message["role"]):
54
- st.markdown(message["content"], unsafe_allow_html=True) # Allow better formatting
55
-
56
- # User input
57
- if user_input := st.chat_input("Ask a question:"):
58
- # Display user message
59
- with st.chat_message("user"):
60
- st.markdown(user_input)
61
- st.session_state.messages.append({"role": "user", "content": user_input})
62
-
63
- # Get AI response
64
- answer = multimodelrag(user_input, file_paths, embeding, llm_option, conversation)
65
-
66
- # Check if answer is valid
67
- if isinstance(answer, dict):
68
- chat_display = "\n\n".join(
69
- [f"📂 <b>File:</b> <code>{key}</code><br><br>📝 <b>Extracted Text:</b><br><br>{value}<br>{'-'*50}"
70
- for key, value in answer.items()]
71
- )
72
- else:
73
- chat_display = f"❌ **Error:** Unexpected response format from AI model.\n\n{answer}"
74
-
75
- # Display AI response in chat
76
- with st.chat_message("assistant"):
77
- st.markdown(chat_display, unsafe_allow_html=True) # Allow HTML formatting for bold text
78
-
79
- # Store assistant response in conversation history
80
- st.session_state.messages.append({"role": "assistant", "content": chat_display})