harshith1411 commited on
Commit
efd2bac
Β·
verified Β·
1 Parent(s): e5e5513

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -36
app.py CHANGED
@@ -10,17 +10,16 @@ import tempfile
10
 
11
  os.environ["OPENAI_API_KEY"] = "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"
12
 
13
- @st.cache_resource
14
- def load_vectorstore(_file_path):
15
  if os.path.exists("faiss_index"):
16
  embeddings = OpenAIEmbeddings()
17
  return FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
18
 
19
- file_ext = _file_path.split('.')[-1].lower()
20
  if file_ext == 'pdf':
21
- loader = PyPDFLoader(_file_path)
22
  else:
23
- loader = TextLoader(_file_path)
24
 
25
  docs = loader.load()
26
  text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
@@ -29,21 +28,13 @@ def load_vectorstore(_file_path):
29
  embeddings = OpenAIEmbeddings()
30
  vectorstore = FAISS.from_documents(splits, embeddings)
31
  vectorstore.save_local("faiss_index")
32
- st.sidebar.success("βœ… Document indexed!")
33
  return vectorstore
34
 
35
  def get_rag_chain(vectorstore):
36
  retriever = vectorstore.as_retriever(search_kwargs={"k": 4})
37
  llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
38
 
39
- # FIXED: Clean single-line prompt
40
- prompt_template = """You are a helpful assistant. Answer using ONLY this context: {context}
41
-
42
- Question: {question}
43
-
44
- Answer concisely. If not in context: "I don't have that information.""""
45
-
46
- prompt = ChatPromptTemplate.from_template(prompt_template)
47
 
48
  chain = (
49
  {"context": retriever, "question": lambda x: x}
@@ -54,7 +45,7 @@ Answer concisely. If not in context: "I don't have that information.""""
54
  return chain
55
 
56
  st.title("🧠 Dynamic RAG Chatbot")
57
- st.markdown("**Upload PDF/TXT β†’ Ask ANY question β†’ Perfect answers!**")
58
 
59
  uploaded_file = st.file_uploader("πŸ“€ Upload PDF or TXT", type=['pdf', 'txt'])
60
 
@@ -65,19 +56,15 @@ if uploaded_file is not None:
65
 
66
  st.success(f"βœ… Loaded: {uploaded_file.name}")
67
 
68
- with st.spinner("πŸ”„ Indexing document..."):
69
- try:
70
- vectorstore = load_vectorstore(file_path)
71
- chain = get_rag_chain(vectorstore)
72
- st.session_state.chain = chain
73
- st.session_state.ready = True
74
- st.session_state.doc_name = uploaded_file.name
75
- except Exception as e:
76
- st.error(f"Error: {str(e)}")
77
- st.session_state.ready = False
78
 
79
  if 'ready' in st.session_state and st.session_state.ready:
80
- st.success(f"πŸš€ Ready! Document: **{st.session_state.doc_name}**")
81
 
82
  if "messages" not in st.session_state:
83
  st.session_state.messages = []
@@ -92,7 +79,7 @@ if 'ready' in st.session_state and st.session_state.ready:
92
  st.markdown(query)
93
 
94
  with st.chat_message("assistant"):
95
- with st.spinner("πŸ” Searching..."):
96
  response = st.session_state.chain.invoke(query)
97
  st.markdown(response)
98
 
@@ -103,12 +90,4 @@ if 'ready' in st.session_state and st.session_state.ready:
103
  st.rerun()
104
 
105
  else:
106
- st.info("πŸ‘† **Upload PDF/TXT to start!**")
107
- st.markdown("**Test with:** Resume, notes, research papers...")
108
-
109
- with st.sidebar:
110
- st.markdown("### βœ… Medium-Level RAG")
111
- st.markdown("- Dynamic upload")
112
- st.markdown("- Vector search")
113
- st.markdown("- FAISS indexing")
114
- st.markdown("- LCEL pipeline")
 
10
 
11
  os.environ["OPENAI_API_KEY"] = "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"
12
 
13
+ def load_vectorstore(file_path):
 
14
  if os.path.exists("faiss_index"):
15
  embeddings = OpenAIEmbeddings()
16
  return FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
17
 
18
+ file_ext = file_path.split('.')[-1].lower()
19
  if file_ext == 'pdf':
20
+ loader = PyPDFLoader(file_path)
21
  else:
22
+ loader = TextLoader(file_path)
23
 
24
  docs = loader.load()
25
  text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
 
28
  embeddings = OpenAIEmbeddings()
29
  vectorstore = FAISS.from_documents(splits, embeddings)
30
  vectorstore.save_local("faiss_index")
 
31
  return vectorstore
32
 
33
  def get_rag_chain(vectorstore):
34
  retriever = vectorstore.as_retriever(search_kwargs={"k": 4})
35
  llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
36
 
37
+ prompt = ChatPromptTemplate.from_template("Context: {context}. Question: {question}. Answer using context only.")
 
 
 
 
 
 
 
38
 
39
  chain = (
40
  {"context": retriever, "question": lambda x: x}
 
45
  return chain
46
 
47
  st.title("🧠 Dynamic RAG Chatbot")
48
+ st.markdown("Upload PDF/TXT β†’ Ask ANY question!")
49
 
50
  uploaded_file = st.file_uploader("πŸ“€ Upload PDF or TXT", type=['pdf', 'txt'])
51
 
 
56
 
57
  st.success(f"βœ… Loaded: {uploaded_file.name}")
58
 
59
+ with st.spinner("πŸ”„ Indexing..."):
60
+ vectorstore = load_vectorstore(file_path)
61
+ chain = get_rag_chain(vectorstore)
62
+ st.session_state.chain = chain
63
+ st.session_state.ready = True
64
+ st.session_state.doc_name = uploaded_file.name
 
 
 
 
65
 
66
  if 'ready' in st.session_state and st.session_state.ready:
67
+ st.success(f"πŸš€ Ready! Document: {st.session_state.doc_name}")
68
 
69
  if "messages" not in st.session_state:
70
  st.session_state.messages = []
 
79
  st.markdown(query)
80
 
81
  with st.chat_message("assistant"):
82
+ with st.spinner("Searching..."):
83
  response = st.session_state.chain.invoke(query)
84
  st.markdown(response)
85
 
 
90
  st.rerun()
91
 
92
  else:
93
+ st.info("πŸ‘† Upload PDF/TXT to start chatting!")