Spaces:
Sleeping
Sleeping
| import os | |
| from pathlib import Path | |
| from typing import Optional, List, Any | |
| import gradio as gr | |
| # Fix OpenMP environment variable issue BEFORE other imports | |
| os.environ["OMP_NUM_THREADS"] = "1" | |
| # NO langchain.chains anymore (hopefully this fixes it) | |
| from langchain_huggingface import HuggingFaceEmbeddings | |
| from langchain_community.document_loaders import UnstructuredMarkdownLoader | |
| from langchain_text_splitters import MarkdownHeaderTextSplitter | |
| from langchain_community.vectorstores import Chroma | |
| from langchain_core.prompts import PromptTemplate | |
| from langchain_core.runnables import RunnablePassthrough | |
| from langchain_core.output_parsers import StrOutputParser | |
| from langchain_core.language_models.llms import LLM | |
| from huggingface_hub import InferenceClient | |
| # Get HF token | |
| hf_token = os.getenv("HUGGINGFACEHUB_API_TOKEN") | |
| if not hf_token: | |
| print("ERROR: Token not found in environment variables!") | |
| exit(1) | |
| print("Loading portfolio data...") | |
| # Load the markdown portfolio | |
| loader = UnstructuredMarkdownLoader("portfolio_data.md") | |
| data = loader.load() | |
| # Split text by Headers | |
| headers_to_split_on = [ | |
| ("#", "Header 1"), | |
| ("##", "Header 2"), | |
| ("###", "Header 3"), | |
| ] | |
| markdown_splitter = MarkdownHeaderTextSplitter(headers_to_split_on=headers_to_split_on) | |
| splits = markdown_splitter.split_text(data[0].page_content) | |
| # Creating searchable "Embeddings" | |
| embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2") | |
| # Store embeddings in ChromaDB | |
| vectorstore = Chroma.from_documents(documents=splits, embedding=embeddings) | |
| # Load system prompt | |
| try: | |
| system_prompt_content = Path("system_prompt.txt").read_text(encoding="utf-8") | |
| except FileNotFoundError: | |
| print("Warning: system_prompt.txt not found, using default.") | |
| system_prompt_content = "You are a helpful assistant." | |
| # Create prompt template | |
| template = """ | |
| {system_prompt} | |
| Context: {context} | |
| Question: {input} | |
| Answer:""" | |
| prompt = PromptTemplate( | |
| template=template, | |
| input_variables=["context", "input"], | |
| partial_variables={"system_prompt": system_prompt_content} | |
| ) | |
| print("Initializing model...") | |
| # Initialize InferenceClient | |
| client = InferenceClient(token=hf_token) | |
| # Custom LLM wrapper for InferenceClient | |
| class HFInferenceClientLLM(LLM): | |
| client: Any | |
| model: str = "meta-llama/Llama-3.2-3B-Instruct" | |
| max_new_tokens: int = 512 | |
| def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: | |
| messages = [{"role": "user", "content": prompt}] | |
| response = self.client.chat_completion( | |
| messages=messages, | |
| model=self.model, | |
| max_tokens=self.max_new_tokens, | |
| temperature=0.7 | |
| ) | |
| return response.choices[0].message.content | |
| def _llm_type(self) -> str: | |
| return "huggingface_inference_client" | |
| # Create LLM instance | |
| llm = HFInferenceClientLLM(client=client, model="meta-llama/Llama-3.2-3B-Instruct") | |
| # Create retriever | |
| retriever = vectorstore.as_retriever() | |
| # Build RAG chain manually - NO langchain.chains imports needed! | |
| def format_docs(docs): | |
| return "\n\n".join([d.page_content for d in docs]) | |
| # Create RAG chain using RunnablePassthrough | |
| rag_chain = ( | |
| { | |
| "context": retriever | format_docs, | |
| "input": RunnablePassthrough() | |
| } | |
| | prompt | |
| | llm | |
| | StrOutputParser() | |
| ) | |
| print("RAG system ready!") | |
| # Gradio chat function | |
| def chat_with_portfolio(message, history): | |
| """Process user message and return AI response""" | |
| try: | |
| response = rag_chain.invoke(message) | |
| return response | |
| except Exception as e: | |
| return f"Error: {str(e)}" | |
| # Fixed CSS for Gradio 5.0 - properly sized icons and full background | |
| custom_css = """ | |
| @import url('https://fonts.googleapis.com/css2?family=Quicksand:wght@400;600&display=swap'); | |
| * { | |
| font-family: 'Quicksand', sans-serif !important; | |
| } | |
| /* Fix for white space - apply gradient to body and html */ | |
| body, html { | |
| margin: 0 !important; | |
| padding: 0 !important; | |
| height: 100% !important; | |
| background: linear-gradient(135deg, #a78bfa 0%, #6366f1 50%, #3b82f6 100%) !important; | |
| background-attachment: fixed !important; | |
| } | |
| .gradio-container { | |
| background: transparent !important; | |
| min-height: 100vh !important; | |
| } | |
| .main { | |
| background: transparent !important; | |
| } | |
| /* Make the app container fill the space */ | |
| .app { | |
| background: transparent !important; | |
| } | |
| /* Chat messages */ | |
| .message-wrap { | |
| background: rgba(255, 255, 255, 0.15) !important; | |
| backdrop-filter: blur(12px) !important; | |
| -webkit-backdrop-filter: blur(12px) !important; | |
| border: 1px solid rgba(255, 255, 255, 0.2) !important; | |
| border-radius: 20px !important; | |
| } | |
| .message { | |
| color: white !important; | |
| } | |
| /* Input textarea */ | |
| .input-wrap textarea, | |
| textarea { | |
| background: rgba(255, 255, 255, 0.1) !important; | |
| backdrop-filter: blur(8px) !important; | |
| color: white !important; | |
| border: 1px solid rgba(255, 255, 255, 0.3) !important; | |
| border-radius: 12px !important; | |
| } | |
| textarea::placeholder { | |
| color: rgba(255, 255, 255, 0.6) !important; | |
| } | |
| /* Fix button and icon sizes for Gradio 5.0 */ | |
| button { | |
| height: 44px !important; | |
| min-height: 44px !important; | |
| max-height: 44px !important; | |
| padding: 8px 12px !important; | |
| } | |
| /* Target SVG icons inside buttons */ | |
| button svg, | |
| button img { | |
| width: 20px !important; | |
| height: 20px !important; | |
| min-width: 20px !important; | |
| min-height: 20px !important; | |
| max-width: 20px !important; | |
| max-height: 20px !important; | |
| } | |
| /* Submit button specific sizing */ | |
| .submit-btn, | |
| button[type="submit"], | |
| .chatbot button[aria-label*="Submit"], | |
| .chatbot button[aria-label*="Send"] { | |
| width: 60px !important; | |
| min-width: 60px !important; | |
| max-width: 60px !important; | |
| } | |
| /* Action buttons (clear, undo, etc) */ | |
| .chatbot .action-button, | |
| .chatbot button[aria-label*="Clear"], | |
| .chatbot button[aria-label*="Undo"], | |
| .chatbot button[aria-label*="Delete"] { | |
| width: 44px !important; | |
| min-width: 44px !important; | |
| max-width: 44px !important; | |
| } | |
| /* Example buttons */ | |
| .examples button { | |
| background: rgba(255, 255, 255, 0.1) !important; | |
| border: 1px solid rgba(255, 255, 255, 0.2) !important; | |
| color: white !important; | |
| border-radius: 12px !important; | |
| height: auto !important; | |
| max-height: none !important; | |
| padding: 12px 16px !important; | |
| } | |
| .examples button:hover { | |
| background: rgba(255, 255, 255, 0.2) !important; | |
| } | |
| /* Chatbot container - INCREASED SIZE to 700px */ | |
| .chatbot { | |
| background: rgba(255, 255, 255, 0.05) !important; | |
| border-radius: 20px !important; | |
| min-height: 700px !important; | |
| /* Delete this line - height: 700px !important; */ | |
| } | |
| /* Center and constrain the content */ | |
| .contain { | |
| max-width: 1200px !important; | |
| margin: 0 auto !important; | |
| padding: 20px !important; | |
| } | |
| /* Hide footer */ | |
| footer { | |
| display: none !important; | |
| } | |
| /* Additional icon fix for any nested elements */ | |
| button * svg, | |
| button * img { | |
| width: 20px !important; | |
| height: 20px !important; | |
| } | |
| """ | |
| with gr.Blocks(theme=gr.themes.Soft(), css=custom_css) as demo: | |
| with gr.Column(): | |
| gr.Markdown( | |
| "<h1 style='text-align: center; color: white; text-shadow: 2px 2px 4px rgba(0,0,0,0.3); margin-top: 20px;'>πββ¬ Chat with Adda-Bot</h1>" | |
| ) | |
| gr.ChatInterface( | |
| fn=chat_with_portfolio, | |
| description="<span style='color: white; font-weight: 600;'>Ask me anything about Adda's portfolio!</span>", | |
| examples=[ | |
| "What is Adda's experience with Python?", | |
| "Tell me about her education.", | |
| "What projects has Adda worked on?" | |
| ], | |
| type="messages", | |
| textbox=gr.Textbox(placeholder="Ask about Adda's portfolio...", scale=7), | |
| submit_btn="Send", | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() | |