Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import os | |
| import google.generativeai as genai | |
| from langchain_community.embeddings import HuggingFaceEmbeddings | |
| from langchain_community.vectorstores import FAISS | |
| from langchain_community.document_loaders import TextLoader | |
| from langchain.text_splitter import RecursiveCharacterTextSplitter | |
| # ------------------------------- | |
| # 1. Setup Gemini | |
| # ------------------------------- | |
| genai.configure(api_key=os.getenv("GOOGLE_API_KEY")) | |
| gemini_model = genai.GenerativeModel("gemini-2.5-flash") | |
| # ------------------------------- | |
| # 2. Ensure about_me.txt exists | |
| # ------------------------------- | |
| if not os.path.exists("about_me.txt"): | |
| with open("about_me.txt", "w") as f: | |
| f.write(""" | |
| Hello! I am a portfolio chatbot. I can help answer questions about projects, skills, and experience. | |
| This is a sample portfolio text. Please replace this with your actual portfolio content. | |
| """) | |
| # ------------------------------- | |
| # 3. Load data | |
| # ------------------------------- | |
| try: | |
| loader = TextLoader("about_me.txt") | |
| docs = loader.load() | |
| except Exception as e: | |
| print(f"Error loading document: {e}") | |
| from langchain.schema import Document | |
| docs = [Document(page_content="Hello! I am a portfolio chatbot ready to help you.")] | |
| # ------------------------------- | |
| # 4. Split documents | |
| # ------------------------------- | |
| text_splitter = RecursiveCharacterTextSplitter( | |
| chunk_size=500, | |
| chunk_overlap=50 | |
| ) | |
| split_docs = text_splitter.split_documents(docs) | |
| # ------------------------------- | |
| # 5. Create embeddings & FAISS | |
| # ------------------------------- | |
| print("Loading embeddings...") | |
| embedding_model = HuggingFaceEmbeddings( | |
| model_name="sentence-transformers/all-MiniLM-L6-v2", | |
| model_kwargs={'device': 'cpu'} | |
| ) | |
| print("Creating vector database...") | |
| db = FAISS.from_documents(split_docs, embedding_model) | |
| # ------------------------------- | |
| # 6. Ask function with Gemini refinement | |
| # ------------------------------- | |
| def ask_bot_alternative(question: str): | |
| try: | |
| if not question.strip(): | |
| return "Please ask me a question about the portfolio!" | |
| # Retrieve top documents | |
| retriever = db.as_retriever(search_kwargs={"k": 2}) | |
| context_docs = retriever.get_relevant_documents(question) | |
| if not context_docs: | |
| return "I could not find an answer in the portfolio content." | |
| # Combine retrieved docs into context | |
| context = "\n".join([doc.page_content for doc in context_docs]) | |
| # Send to Gemini for refinement | |
| prompt = f""" | |
| You are a helpful assistant. | |
| Answer the following question using only the given context. | |
| If the answer is not present, say "I don’t know". | |
| Question: {question} | |
| Context: | |
| {context} | |
| Final Answer: | |
| """ | |
| response = gemini_model.generate_content(prompt) | |
| return response.text.strip() | |
| except Exception as e: | |
| return f"Sorry, I encountered an error: {str(e)[:200]}" | |
| # ------------------------------- | |
| # 7. Gradio Interface | |
| # ------------------------------- | |
| iface = gr.Interface( | |
| fn=ask_bot_alternative, | |
| inputs=gr.Textbox( | |
| label="Ask me about the portfolio", | |
| placeholder="What would you like to know?", | |
| max_lines=3 | |
| ), | |
| outputs=gr.Textbox( | |
| label="Response", | |
| max_lines=10 | |
| ), | |
| title="Portfolio Chatbot", | |
| description="Ask me questions about skills, projects, and experience!", | |
| examples=[ | |
| "What are your technical skills?", | |
| "Tell me about your projects", | |
| "What is your background?" | |
| ], | |
| cache_examples=False, | |
| allow_flagging="never" | |
| ) | |
| if __name__ == "__main__": | |
| print("Launching chatbot...") | |
| iface.launch( | |
| server_name="0.0.0.0", | |
| server_port=7860, | |
| show_error=True | |
| ) | |