harshith1411 commited on
Commit
b2c1899
·
verified ·
1 Parent(s): 6b379ed

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -17
app.py CHANGED
@@ -1,8 +1,10 @@
1
  import streamlit as st
2
  import os
3
 
4
- # HF Spaces secret
5
- os.environ["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"] = st.secrets["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"]
 
 
6
 
7
  from langchain_openai import ChatOpenAI, OpenAIEmbeddings
8
  from langchain_community.document_loaders import TextLoader
@@ -13,25 +15,43 @@ from langchain_core.output_parsers import StrOutputParser
13
 
14
  @st.cache_resource
15
  def get_chatbot():
16
- # Load or create vector store
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  if not os.path.exists("faiss_index"):
18
- with open("knowledge.txt", "r") as f:
19
- content = f.read()
20
- docs = [content] # Simple doc
21
-
22
  text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
23
- splits = text_splitter.create_documents([content])
24
 
25
  embeddings = OpenAIEmbeddings()
26
  vectorstore = FAISS.from_documents(splits, embeddings)
27
  vectorstore.save_local("faiss_index")
28
-
29
  # Load retriever
30
  embeddings = OpenAIEmbeddings()
31
  vectorstore = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
32
  retriever = vectorstore.as_retriever()
33
-
34
- # LLM
35
  llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
36
 
37
  prompt = ChatPromptTemplate.from_template(
@@ -51,29 +71,37 @@ def get_chatbot():
51
 
52
  return rag_chain
53
 
54
- # UI
55
  st.title("🧠 RAG Chatbot")
 
 
56
  chatbot = get_chatbot()
57
 
58
  if "messages" not in st.session_state:
59
  st.session_state.messages = []
60
 
61
- # Show chat history
62
  for message in st.session_state.messages:
63
  with st.chat_message(message["role"]):
64
  st.markdown(message["content"])
65
 
66
- # Chat input
67
- if prompt := st.chat_input("Ask about your knowledge base..."):
68
  # Add user message
69
  st.session_state.messages.append({"role": "user", "content": prompt})
70
  with st.chat_message("user"):
71
  st.markdown(prompt)
72
 
73
- # Generate response
74
  with st.chat_message("assistant"):
75
- with st.spinner("Searching knowledge base..."):
76
  response = chatbot(prompt)
77
  st.markdown(response)
78
 
 
79
  st.session_state.messages.append({"role": "assistant", "content": response})
 
 
 
 
 
 
1
  import streamlit as st
2
  import os
3
 
4
+ # YOUR API KEY - DIRECTLY EMBEDDED (Line 5)
5
+ API_KEY = "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"
6
+
7
+ os.environ["OPENAI_API_KEY"] = API_KEY
8
 
9
  from langchain_openai import ChatOpenAI, OpenAIEmbeddings
10
  from langchain_community.document_loaders import TextLoader
 
15
 
16
  @st.cache_resource
17
  def get_chatbot():
18
+ # Auto-create knowledge.txt if missing
19
+ if not os.path.exists("knowledge.txt"):
20
+ with open("knowledge.txt", "w") as f:
21
+ f.write("""
22
+ SR University is located in Warangal, Telangana, India.
23
+ The Computer Science program focuses on AI/ML, Data Structures & Algorithms,
24
+ Java/Python programming, Cloud Computing (AWS/Azure), and software engineering.
25
+
26
+ You are a B.Tech Computer Science student preparing for AI/ML internships.
27
+ Key skills: DSA (LeetCode, GFG), AI projects (robotic arms, drones),
28
+ cloud certifications, competitive programming.
29
+
30
+ Internship preparation tips:
31
+ 1. Solve 300+ LeetCode problems (Easy:100, Medium:150, Hard:50)
32
+ 2. Build 3 portfolio projects: RAG chatbot, object detection, RL agent
33
+ 3. Apply to startups via AngelList, Y Combinator jobs
34
+ 4. Practice system design and behavioral interviews
35
+ 5. Target companies: Google, Microsoft, startups in Hyderabad/Bangalore
36
+ """)
37
+
38
+ # Create vector store if missing
39
  if not os.path.exists("faiss_index"):
40
+ loader = TextLoader("knowledge.txt")
41
+ docs = loader.load()
 
 
42
  text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
43
+ splits = text_splitter.split_documents(docs)
44
 
45
  embeddings = OpenAIEmbeddings()
46
  vectorstore = FAISS.from_documents(splits, embeddings)
47
  vectorstore.save_local("faiss_index")
48
+
49
  # Load retriever
50
  embeddings = OpenAIEmbeddings()
51
  vectorstore = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
52
  retriever = vectorstore.as_retriever()
53
+
54
+ # LLM setup
55
  llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
56
 
57
  prompt = ChatPromptTemplate.from_template(
 
71
 
72
  return rag_chain
73
 
74
+ # Main UI
75
  st.title("🧠 RAG Chatbot")
76
+ st.info("💡 Answers questions about SR University, AI internships, your projects")
77
+
78
  chatbot = get_chatbot()
79
 
80
  if "messages" not in st.session_state:
81
  st.session_state.messages = []
82
 
83
+ # Display chat history
84
  for message in st.session_state.messages:
85
  with st.chat_message(message["role"]):
86
  st.markdown(message["content"])
87
 
88
+ # New message input
89
+ if prompt := st.chat_input("Ask about university, internships, projects..."):
90
  # Add user message
91
  st.session_state.messages.append({"role": "user", "content": prompt})
92
  with st.chat_message("user"):
93
  st.markdown(prompt)
94
 
95
+ # Generate and display response
96
  with st.chat_message("assistant"):
97
+ with st.spinner("Searching your knowledge base..."):
98
  response = chatbot(prompt)
99
  st.markdown(response)
100
 
101
+ # Store assistant response
102
  st.session_state.messages.append({"role": "assistant", "content": response})
103
+
104
+ # Sidebar info
105
+ with st.sidebar:
106
+ st.success("✅ RAG Chatbot Live!")
107
+ st.balloons()