harshith1411 commited on
Commit
e135161
·
verified ·
1 Parent(s): cee10ae

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -44
app.py CHANGED
@@ -5,19 +5,13 @@ import os
5
  API_KEY = "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"
6
  os.environ["OPENAI_API_KEY"] = API_KEY
7
 
8
- from langchain_openai import ChatOpenAI, OpenAIEmbeddings
9
- from langchain_community.document_loaders import TextLoader
10
- from langchain.text_splitter import CharacterTextSplitter
11
- from langchain.vectorstores import FAISS
12
- from langchain.prompts import ChatPromptTemplate
13
- from langchain_core.output_parsers import StrOutputParser
14
 
15
  @st.cache_resource
16
  def get_chatbot():
17
- # Auto-create knowledge base
18
- if not os.path.exists("knowledge.txt"):
19
- with open("knowledge.txt", "w") as f:
20
- f.write("""
21
  SR University is located in Warangal, Telangana, India.
22
  Computer Science program focuses on AI/ML, DSA, Java/Python, AWS/Azure, software engineering.
23
 
@@ -30,59 +24,45 @@ Internship tips:
30
  3. Apply startups: AngelList, Y Combinator
31
  4. Practice system design, behavioral interviews
32
  5. Target: Google, Microsoft, Hyderabad/Bangalore startups
33
- """)
34
-
35
- # Create FAISS index if missing
36
- if not os.path.exists("faiss_index"):
37
- loader = TextLoader("knowledge.txt")
38
- docs = loader.load()
39
- text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
40
- splits = text_splitter.split_documents(docs)
41
-
42
- embeddings = OpenAIEmbeddings()
43
- vectorstore = FAISS.from_documents(splits, embeddings)
44
- vectorstore.save_local("faiss_index")
45
-
46
- # Load retriever
47
- embeddings = OpenAIEmbeddings()
48
- vectorstore = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
49
- retriever = vectorstore.as_retriever()
50
-
51
- # LLM
52
  llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
53
- prompt = ChatPromptTemplate.from_template(
54
- "Answer using ONLY this context:\n{context}\n\nQuestion: {question}\nAnswer:"
55
- )
56
 
57
- def rag_chain(query):
58
- context_docs = retriever.invoke(query)
59
- context = "\n".join([doc.page_content for doc in context_docs])
60
- chain = (
61
- {"context": lambda x: context, "question": lambda x: query}
62
- | prompt | llm | StrOutputParser()
63
- )
64
- return chain.invoke(query)
 
 
65
 
66
- return rag_chain
67
 
 
68
  st.title("🧠 RAG Chatbot")
69
- st.info("💡 Ask about SR University, AI internships, projects...")
70
 
71
  chatbot = get_chatbot()
 
72
  if "messages" not in st.session_state:
73
  st.session_state.messages = []
74
 
 
75
  for message in st.session_state.messages:
76
  with st.chat_message(message["role"]):
77
  st.markdown(message["content"])
78
 
79
- if prompt := st.chat_input("Ask a question..."):
 
80
  st.session_state.messages.append({"role": "user", "content": prompt})
81
  with st.chat_message("user"):
82
  st.markdown(prompt)
83
 
84
  with st.chat_message("assistant"):
85
- with st.spinner("Thinking..."):
86
  response = chatbot(prompt)
87
  st.markdown(response)
88
 
 
5
  API_KEY = "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"
6
  os.environ["OPENAI_API_KEY"] = API_KEY
7
 
8
+ from langchain_openai import ChatOpenAI
9
+ import tiktoken
 
 
 
 
10
 
11
  @st.cache_resource
12
  def get_chatbot():
13
+ # Simple knowledge base - no complex splitting
14
+ knowledge = """
 
 
15
  SR University is located in Warangal, Telangana, India.
16
  Computer Science program focuses on AI/ML, DSA, Java/Python, AWS/Azure, software engineering.
17
 
 
24
  3. Apply startups: AngelList, Y Combinator
25
  4. Practice system design, behavioral interviews
26
  5. Target: Google, Microsoft, Hyderabad/Bangalore startups
27
+ """
28
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
 
 
 
30
 
31
+ def rag_chat(query):
32
+ prompt = f"""
33
+ Use ONLY this context to answer:
34
+ {knowledge}
35
+
36
+ Question: {query}
37
+
38
+ Answer concisely and accurately:
39
+ """
40
+ return llm.invoke(prompt).content
41
 
42
+ return rag_chat
43
 
44
+ # UI
45
  st.title("🧠 RAG Chatbot")
46
+ st.info("💡 Answers about SR University, AI internships, projects")
47
 
48
  chatbot = get_chatbot()
49
+
50
  if "messages" not in st.session_state:
51
  st.session_state.messages = []
52
 
53
+ # Chat history
54
  for message in st.session_state.messages:
55
  with st.chat_message(message["role"]):
56
  st.markdown(message["content"])
57
 
58
+ # New message
59
+ if prompt := st.chat_input("Ask about university, internships..."):
60
  st.session_state.messages.append({"role": "user", "content": prompt})
61
  with st.chat_message("user"):
62
  st.markdown(prompt)
63
 
64
  with st.chat_message("assistant"):
65
+ with st.spinner("Answering..."):
66
  response = chatbot(prompt)
67
  st.markdown(response)
68