harshith1411 commited on
Commit
f7af5de
Β·
verified Β·
1 Parent(s): 1c73e2f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -29
app.py CHANGED
@@ -2,12 +2,11 @@ import streamlit as st
2
  import os
3
  from langchain_openai import ChatOpenAI, OpenAIEmbeddings
4
  from langchain_community.document_loaders import TextLoader, PyPDFLoader
5
- from langchain.text_splitter import RecursiveCharacterTextSplitter
6
  from langchain.vectorstores import FAISS
7
  from langchain.prompts import ChatPromptTemplate
8
  from langchain_core.output_parsers import StrOutputParser
9
  import tempfile
10
- import time
11
 
12
  # YOUR API KEY
13
  os.environ["OPENAI_API_KEY"] = "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"
@@ -35,21 +34,21 @@ def load_vectorstore(_file_path):
35
  vectorstore = FAISS.from_documents(splits, embeddings)
36
  vectorstore.save_local("faiss_index")
37
 
38
- st.sidebar.success("βœ… Vector index created from your document!")
39
  return vectorstore
40
 
41
  def get_rag_chain(vectorstore):
42
- """Create RAG chain"""
43
  retriever = vectorstore.as_retriever(search_kwargs={"k": 4})
44
  llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
45
 
46
  prompt = ChatPromptTemplate.from_template(
47
  """You are a helpful assistant. Answer using ONLY the following context:
48
-
49
  {context}
50
-
51
  Question: {question}
52
-
53
  Answer concisely and accurately. If answer not in context, say "I don't have that information.""""
54
  )
55
 
@@ -83,13 +82,14 @@ if uploaded_file is not None:
83
  chain = get_rag_chain(vectorstore)
84
  st.session_state.chain = chain
85
  st.session_state.ready = True
 
86
  except Exception as e:
87
- st.error(f"Error: {str(e)}")
88
  st.session_state.ready = False
89
 
90
- # Chat interface (only if ready)
91
  if 'ready' in st.session_state and st.session_state.ready:
92
- st.success("πŸš€ Chatbot ready! Ask questions about your document.")
93
 
94
  if "messages" not in st.session_state:
95
  st.session_state.messages = []
@@ -106,13 +106,9 @@ if 'ready' in st.session_state and st.session_state.ready:
106
  st.markdown(query)
107
 
108
  with st.chat_message("assistant"):
109
- with st.spinner("Searching document..."):
110
- try:
111
- response = st.session_state.chain.invoke(query)
112
- st.markdown(response)
113
- except Exception as e:
114
- st.error(f"Error: {str(e)}")
115
- st.markdown("Try re-uploading your document.")
116
 
117
  st.session_state.messages.append({"role": "assistant", "content": response})
118
 
@@ -124,19 +120,20 @@ if 'ready' in st.session_state and st.session_state.ready:
124
  else:
125
  st.info("πŸ‘† **Upload a PDF or TXT file to start chatting!**")
126
  st.markdown("""
127
- **Works with:**
128
- β€’ Resumes/CVs (.pdf, .txt)
129
- β€’ Research papers
130
- β€’ Notes/lecture slides
131
- β€’ Company docs
132
- β€’ ANY text content!
133
  """)
134
 
135
  # Sidebar
136
  with st.sidebar:
137
- st.markdown("### 🎯 Features")
138
- st.markdown("- Dynamic document upload")
139
- st.markdown("- True RAG (vector search)")
140
- st.markdown("- Handles PDF/TXT")
141
- st.markdown("- Production UI")
142
- st.markdown("- Portfolio ready!")
 
 
2
  import os
3
  from langchain_openai import ChatOpenAI, OpenAIEmbeddings
4
  from langchain_community.document_loaders import TextLoader, PyPDFLoader
5
+ from langchain_community.text_splitter import RecursiveCharacterTextSplitter
6
  from langchain.vectorstores import FAISS
7
  from langchain.prompts import ChatPromptTemplate
8
  from langchain_core.output_parsers import StrOutputParser
9
  import tempfile
 
10
 
11
  # YOUR API KEY
12
  os.environ["OPENAI_API_KEY"] = "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"
 
34
  vectorstore = FAISS.from_documents(splits, embeddings)
35
  vectorstore.save_local("faiss_index")
36
 
37
+ st.sidebar.success("βœ… Vector index created!")
38
  return vectorstore
39
 
40
  def get_rag_chain(vectorstore):
41
+ """Create RAG chain - FIXED SYNTAX"""
42
  retriever = vectorstore.as_retriever(search_kwargs={"k": 4})
43
  llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
44
 
45
  prompt = ChatPromptTemplate.from_template(
46
  """You are a helpful assistant. Answer using ONLY the following context:
47
+
48
  {context}
49
+
50
  Question: {question}
51
+
52
  Answer concisely and accurately. If answer not in context, say "I don't have that information.""""
53
  )
54
 
 
82
  chain = get_rag_chain(vectorstore)
83
  st.session_state.chain = chain
84
  st.session_state.ready = True
85
+ st.session_state.document_name = uploaded_file.name
86
  except Exception as e:
87
+ st.error(f"Error indexing: {str(e)}")
88
  st.session_state.ready = False
89
 
90
+ # Chat interface
91
  if 'ready' in st.session_state and st.session_state.ready:
92
+ st.success(f"πŸš€ Ready! Chatting about: **{st.session_state.document_name}**")
93
 
94
  if "messages" not in st.session_state:
95
  st.session_state.messages = []
 
106
  st.markdown(query)
107
 
108
  with st.chat_message("assistant"):
109
+ with st.spinner("πŸ” Searching document..."):
110
+ response = st.session_state.chain.invoke(query)
111
+ st.markdown(response)
 
 
 
 
112
 
113
  st.session_state.messages.append({"role": "assistant", "content": response})
114
 
 
120
  else:
121
  st.info("πŸ‘† **Upload a PDF or TXT file to start chatting!**")
122
  st.markdown("""
123
+ **Perfect for:**
124
+ β€’ πŸ“„ Your **resume/CV**
125
+ β€’ πŸ“š Research papers
126
+ β€’ πŸ“ Lecture notes
127
+ β€’ πŸ’Ό Company documents
128
+ β€’ **ANY text content!**
129
  """)
130
 
131
  # Sidebar
132
  with st.sidebar:
133
+ st.markdown("### 🎯 Medium-Level RAG Features")
134
+ st.markdown("βœ… Dynamic file upload")
135
+ st.markdown("βœ… Vector embeddings")
136
+ st.markdown("βœ… FAISS similarity search")
137
+ st.markdown("βœ… Proper text chunking")
138
+ st.markdown("βœ… LCEL RAG pipeline")
139
+ st.markdown("βœ… Production UI")