harshith1411 commited on
Commit
4ddce82
Β·
verified Β·
1 Parent(s): 547c0d5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -46
app.py CHANGED
@@ -8,18 +8,14 @@ from langchain.prompts import ChatPromptTemplate
8
  from langchain_core.output_parsers import StrOutputParser
9
  import tempfile
10
 
11
- # YOUR API KEY
12
  os.environ["OPENAI_API_KEY"] = "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"
13
 
14
  @st.cache_resource
15
  def load_vectorstore(_file_path):
16
- """Load or create vectorstore from uploaded file"""
17
  if os.path.exists("faiss_index"):
18
  embeddings = OpenAIEmbeddings()
19
- vectorstore = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
20
- return vectorstore
21
 
22
- # Process uploaded file
23
  file_ext = _file_path.split('.')[-1].lower()
24
  if file_ext == 'pdf':
25
  loader = PyPDFLoader(_file_path)
@@ -33,24 +29,21 @@ def load_vectorstore(_file_path):
33
  embeddings = OpenAIEmbeddings()
34
  vectorstore = FAISS.from_documents(splits, embeddings)
35
  vectorstore.save_local("faiss_index")
36
-
37
- st.sidebar.success("βœ… Vector index created!")
38
  return vectorstore
39
 
40
  def get_rag_chain(vectorstore):
41
- """Create RAG chain - FIXED SYNTAX"""
42
  retriever = vectorstore.as_retriever(search_kwargs={"k": 4})
43
  llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
44
 
45
- prompt = ChatPromptTemplate.from_template(
46
- """You are a helpful assistant. Answer using ONLY the following context:
47
-
48
- {context}
49
 
50
  Question: {question}
51
 
52
- Answer concisely and accurately. If answer not in context, say "I don't have that information.""""
53
- )
 
54
 
55
  chain = (
56
  {"context": retriever, "question": lambda x: x}
@@ -60,80 +53,62 @@ Answer concisely and accurately. If answer not in context, say "I don't have tha
60
  )
61
  return chain
62
 
63
- # === MAIN UI ===
64
  st.title("🧠 Dynamic RAG Chatbot")
65
- st.markdown("**Upload ANY document β†’ Ask ANY question β†’ Get accurate answers!**")
66
 
67
- # File upload
68
  uploaded_file = st.file_uploader("πŸ“€ Upload PDF or TXT", type=['pdf', 'txt'])
69
 
70
  if uploaded_file is not None:
71
- # Save uploaded file
72
  with tempfile.NamedTemporaryFile(delete=False, suffix=f".{uploaded_file.name.split('.')[-1]}") as tmp_file:
73
  tmp_file.write(uploaded_file.getvalue())
74
  file_path = tmp_file.name
75
 
76
  st.success(f"βœ… Loaded: {uploaded_file.name}")
77
 
78
- # Load vectorstore
79
- with st.spinner("πŸ”„ Indexing your document..."):
80
  try:
81
  vectorstore = load_vectorstore(file_path)
82
  chain = get_rag_chain(vectorstore)
83
  st.session_state.chain = chain
84
  st.session_state.ready = True
85
- st.session_state.document_name = uploaded_file.name
86
  except Exception as e:
87
- st.error(f"Error indexing: {str(e)}")
88
  st.session_state.ready = False
89
 
90
- # Chat interface
91
  if 'ready' in st.session_state and st.session_state.ready:
92
- st.success(f"πŸš€ Ready! Chatting about: **{st.session_state.document_name}**")
93
 
94
  if "messages" not in st.session_state:
95
  st.session_state.messages = []
96
 
97
- # Chat history
98
  for message in st.session_state.messages:
99
  with st.chat_message(message["role"]):
100
  st.markdown(message["content"])
101
 
102
- # Chat input
103
- if query := st.chat_input("πŸ’¬ Ask ANY question about your document..."):
104
  st.session_state.messages.append({"role": "user", "content": query})
105
  with st.chat_message("user"):
106
  st.markdown(query)
107
 
108
  with st.chat_message("assistant"):
109
- with st.spinner("πŸ” Searching document..."):
110
  response = st.session_state.chain.invoke(query)
111
  st.markdown(response)
112
 
113
  st.session_state.messages.append({"role": "assistant", "content": response})
114
 
115
- # Clear chat
116
  if st.button("πŸ—‘οΈ Clear Chat"):
117
  st.session_state.messages = []
118
  st.rerun()
119
 
120
  else:
121
- st.info("πŸ‘† **Upload a PDF or TXT file to start chatting!**")
122
- st.markdown("""
123
- **Perfect for:**
124
- β€’ πŸ“„ Your **resume/CV**
125
- β€’ πŸ“š Research papers
126
- β€’ πŸ“ Lecture notes
127
- β€’ πŸ’Ό Company documents
128
- β€’ **ANY text content!**
129
- """)
130
 
131
- # Sidebar
132
  with st.sidebar:
133
- st.markdown("### 🎯 Medium-Level RAG Features")
134
- st.markdown("βœ… Dynamic file upload")
135
- st.markdown("βœ… Vector embeddings")
136
- st.markdown("βœ… FAISS similarity search")
137
- st.markdown("βœ… Proper text chunking")
138
- st.markdown("βœ… LCEL RAG pipeline")
139
- st.markdown("βœ… Production UI")
 
8
  from langchain_core.output_parsers import StrOutputParser
9
  import tempfile
10
 
 
11
  os.environ["OPENAI_API_KEY"] = "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"
12
 
13
  @st.cache_resource
14
  def load_vectorstore(_file_path):
 
15
  if os.path.exists("faiss_index"):
16
  embeddings = OpenAIEmbeddings()
17
+ return FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
 
18
 
 
19
  file_ext = _file_path.split('.')[-1].lower()
20
  if file_ext == 'pdf':
21
  loader = PyPDFLoader(_file_path)
 
29
  embeddings = OpenAIEmbeddings()
30
  vectorstore = FAISS.from_documents(splits, embeddings)
31
  vectorstore.save_local("faiss_index")
32
+ st.sidebar.success("βœ… Document indexed!")
 
33
  return vectorstore
34
 
35
  def get_rag_chain(vectorstore):
 
36
  retriever = vectorstore.as_retriever(search_kwargs={"k": 4})
37
  llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
38
 
39
+ # FIXED: Clean single-line prompt
40
+ prompt_template = """You are a helpful assistant. Answer using ONLY this context: {context}
 
 
41
 
42
  Question: {question}
43
 
44
+ Answer concisely. If not in context: "I don't have that information.""""
45
+
46
+ prompt = ChatPromptTemplate.from_template(prompt_template)
47
 
48
  chain = (
49
  {"context": retriever, "question": lambda x: x}
 
53
  )
54
  return chain
55
 
 
56
  st.title("🧠 Dynamic RAG Chatbot")
57
+ st.markdown("**Upload PDF/TXT β†’ Ask ANY question β†’ Perfect answers!**")
58
 
 
59
  uploaded_file = st.file_uploader("πŸ“€ Upload PDF or TXT", type=['pdf', 'txt'])
60
 
61
  if uploaded_file is not None:
 
62
  with tempfile.NamedTemporaryFile(delete=False, suffix=f".{uploaded_file.name.split('.')[-1]}") as tmp_file:
63
  tmp_file.write(uploaded_file.getvalue())
64
  file_path = tmp_file.name
65
 
66
  st.success(f"βœ… Loaded: {uploaded_file.name}")
67
 
68
+ with st.spinner("πŸ”„ Indexing document..."):
 
69
  try:
70
  vectorstore = load_vectorstore(file_path)
71
  chain = get_rag_chain(vectorstore)
72
  st.session_state.chain = chain
73
  st.session_state.ready = True
74
+ st.session_state.doc_name = uploaded_file.name
75
  except Exception as e:
76
+ st.error(f"Error: {str(e)}")
77
  st.session_state.ready = False
78
 
 
79
  if 'ready' in st.session_state and st.session_state.ready:
80
+ st.success(f"πŸš€ Ready! Document: **{st.session_state.doc_name}**")
81
 
82
  if "messages" not in st.session_state:
83
  st.session_state.messages = []
84
 
 
85
  for message in st.session_state.messages:
86
  with st.chat_message(message["role"]):
87
  st.markdown(message["content"])
88
 
89
+ if query := st.chat_input("πŸ’¬ Ask about your document..."):
 
90
  st.session_state.messages.append({"role": "user", "content": query})
91
  with st.chat_message("user"):
92
  st.markdown(query)
93
 
94
  with st.chat_message("assistant"):
95
+ with st.spinner("πŸ” Searching..."):
96
  response = st.session_state.chain.invoke(query)
97
  st.markdown(response)
98
 
99
  st.session_state.messages.append({"role": "assistant", "content": response})
100
 
 
101
  if st.button("πŸ—‘οΈ Clear Chat"):
102
  st.session_state.messages = []
103
  st.rerun()
104
 
105
  else:
106
+ st.info("πŸ‘† **Upload PDF/TXT to start!**")
107
+ st.markdown("**Test with:** Resume, notes, research papers...")
 
 
 
 
 
 
 
108
 
 
109
  with st.sidebar:
110
+ st.markdown("### βœ… Medium-Level RAG")
111
+ st.markdown("- Dynamic upload")
112
+ st.markdown("- Vector search")
113
+ st.markdown("- FAISS indexing")
114
+ st.markdown("- LCEL pipeline")