Spaces:
Runtime error
Runtime error
| # app.py | |
| import os | |
| from docx import Document | |
| from sentence_transformers import SentenceTransformer | |
| import numpy as np | |
| import gradio as gr | |
| # Load the KB document | |
| kb_path = "model/kb.docx" | |
| doc = Document(kb_path) | |
| kb_text = [p.text for p in doc.paragraphs if p.text.strip() != ""] | |
| # Split into chunks (for simplicity, one paragraph per chunk) | |
| chunks = kb_text | |
| # Load sentence embedding model | |
| embed_model = SentenceTransformer('all-MiniLM-L6-v2') | |
| # Create embeddings for KB chunks | |
| embeddings = embed_model.encode(chunks, convert_to_tensor=True) | |
| # Simple cosine similarity function | |
| def get_most_relevant(query, top_k=1): | |
| query_emb = embed_model.encode([query], convert_to_tensor=True) | |
| # Compute cosine similarity | |
| cos_sim = np.dot(embeddings, query_emb.T) / (np.linalg.norm(embeddings, axis=1) * np.linalg.norm(query_emb)) | |
| top_indices = cos_sim.flatten().argsort()[-top_k:][::-1] | |
| return [chunks[i] for i in top_indices] | |
| # Gradio interface function | |
| def answer_query(query): | |
| relevant_texts = get_most_relevant(query, top_k=3) | |
| # Combine top chunks into one answer | |
| answer = "\n\n".join(relevant_texts) | |
| return answer | |
| # Gradio UI | |
| iface = gr.Interface( | |
| fn=answer_query, | |
| inputs=gr.Textbox(lines=2, placeholder="Ask me anything..."), | |
| outputs="textbox", | |
| title="FPV2 KB Chatbot", | |
| description="Ask questions about your KB document." | |
| ) | |
| iface.launch(server_name="0.0.0.0", server_port=7860) | |