Spaces:
Running
Running
| import os | |
| import gradio as gr | |
| from langchain_huggingface import HuggingFaceEmbeddings | |
| from langchain_community.vectorstores import FAISS | |
| from huggingface_hub import InferenceClient | |
| # 1. Load the FULL pre-computed Vector Database | |
| embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2") | |
| vectorstore = FAISS.load_local("learncpp_faiss_index_full", embeddings, allow_dangerous_deserialization=True) | |
| retriever = vectorstore.as_retriever(search_kwargs={"k": 5}) | |
| # 2. Set up the Native Hugging Face Client | |
| hf_token = os.environ.get("HF_TOKEN") | |
| client = InferenceClient( | |
| model="Qwen/Qwen2.5-Coder-7B-Instruct", | |
| token=hf_token | |
| ) | |
| # 3. Define the Chat Interface | |
| def chat_function(message, history): | |
| try: | |
| # Step A: Retrieve relevant C++ tutorials from our database | |
| docs = retriever.invoke(message) | |
| context = "\n\n".join(doc.page_content for doc in docs) | |
| # Step B: Build the system prompt with the injected context | |
| system_prompt = ( | |
| "You are an expert C++ programming assistant. You answer questions strictly based " | |
| "on the provided context from learncpp.com. If the answer is not in the context, " | |
| "say 'I cannot find the answer in the LearnCpp documentation.'\n\n" | |
| f"Context:\n{context}" | |
| ) | |
| # Step C: Format the conversation history for the modern Chat API | |
| messages = [{"role": "system", "content": system_prompt}] | |
| if history: | |
| for msg in history: | |
| if isinstance(msg, dict): | |
| role = msg.get("role", "user") | |
| messages.append({"role": role, "content": msg.get("content", "")}) | |
| else: | |
| messages.append({"role": "user", "content": msg[0]}) | |
| messages.append({"role": "assistant", "content": msg[1]}) | |
| messages.append({"role": "user", "content": message}) | |
| # Step D: Call the conversational backend directly (Bypassing LangChain's broken wrapper!) | |
| response = client.chat_completion( | |
| messages=messages, | |
| max_tokens=512, | |
| temperature=0.1 | |
| ) | |
| return response.choices[0].message.content | |
| except Exception as e: | |
| return f"Error: {str(e)}" | |
| demo = gr.ChatInterface( | |
| fn=chat_function, | |
| title="LearnCpp.com AI Assistant", | |
| description="Ask me any C++ question! I retrieve my answers directly from the complete LearnCpp tutorials.", | |
| examples=["What is a pointer?", "Explain dynamic memory allocation.", "Give me an example of the previous concept."] | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() |