Proooof's picture
Update app.py
8b5432b verified
import pandas as pd
from sentence_transformers import SentenceTransformer
from sklearn.metrics.pairwise import cosine_similarity
import numpy as np
faq_df = pd.read_csv("data/koda_faq.csv")
faq_questions = faq_df["question"].tolist()
faq_answers = faq_df["answer"].tolist()
embedder = SentenceTransformer("all-MiniLM-L6-v2")
faq_embs = embedder.encode(faq_questions, normalize_embeddings=True)
def retrieve_answer(user_q, top_k=1, thresh=0.35):
q_emb = embedder.encode([user_q], normalize_embeddings=True)
sims = cosine_similarity(q_emb, faq_embs)[0]
idx = int(np.argmax(sims))
if sims[idx] >= thresh:
return faq_answers[idx], float(sims[idx])
return None, float(sims[idx])
def koda_assistant(user_input, history):
# Check if input matches FAQ keywords
for key, answer in FAQ.items():
if key in user_input.lower():
history.append(("👤 " + user_input, "🤖 " + answer))
return history, history
# Fallback: generic QA pipeline (using a sample context)
context = "Koda Appliance Assistant helps with appliance troubleshooting and leasing information."
try:
result = qa_pipeline(question=user_input, context=context)
answer = result["answer"]
except Exception:
answer = "I'm not sure about that, but our support team can help."
history.append(("👤 " + user_input, "🤖 " + answer))
return history, history
with gr.Blocks(title="Koda Appliance Assistant") as demo:
gr.Markdown("# ⚙️ Koda Appliance Assistant")
gr.Markdown("Ask me about appliance issues or Koda leasing FAQs.")
chatbot = gr.Chatbot(height=400)
user_input = gr.Textbox(placeholder="Type your question here...")
state = gr.State([])
submit_btn = gr.Button("Ask")
submit_btn.click(fn=koda_assistant, inputs=[user_input, state], outputs=[chatbot, state])
user_input.submit(fn=koda_assistant, inputs=[user_input, state], outputs=[chatbot, state])
# Expose app object
app = demo
if __name__ == "__main__":
demo.launch()