|
|
import os |
|
|
import json |
|
|
|
|
|
import gradio as gr |
|
|
from openai import OpenAI |
|
|
from pinecone import Pinecone |
|
|
from dotenv import load_dotenv |
|
|
|
|
|
load_dotenv(verbose=True) |
|
|
|
|
|
PINECONE_API_KEY = os.getenv("PINECONE_API_KEY") |
|
|
INDEX_NAME = "whatsapp-history-1" |
|
|
EMBED_MODEL = "text-embedding-3-small" |
|
|
TOP_K = 5 |
|
|
|
|
|
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) |
|
|
pc = Pinecone(api_key=PINECONE_API_KEY) |
|
|
index = pc.Index(INDEX_NAME) |
|
|
|
|
|
def retrieve_context(query): |
|
|
response = client.embeddings.create( |
|
|
model=EMBED_MODEL, |
|
|
input=query |
|
|
) |
|
|
query_emb = response.data[0].embedding |
|
|
|
|
|
|
|
|
result = index.query(vector=query_emb, top_k=TOP_K, include_metadata=True) |
|
|
|
|
|
contexts = [] |
|
|
for match in result.matches: |
|
|
meta = match.metadata |
|
|
contexts.append(f"{meta['sender']}: {meta['message']}") |
|
|
|
|
|
return "\n".join(contexts) |
|
|
|
|
|
def respond(message, chat_history_json): |
|
|
chat_history = json.loads(chat_history_json) |
|
|
|
|
|
context = retrieve_context(message) |
|
|
|
|
|
system_prompt = ( |
|
|
"You are a helpful assistant for carching. Use the following past conversation data on whatsapp " |
|
|
"to answer the user's question if relevant:\n\n" + context |
|
|
) |
|
|
|
|
|
messages = [{"role": "system", "content": system_prompt}] |
|
|
messages.extend(chat_history or []) |
|
|
messages.append({"role": "user", "content": message}) |
|
|
|
|
|
response = client.chat.completions.create( |
|
|
model="gpt-5-mini", |
|
|
messages=messages, |
|
|
) |
|
|
|
|
|
bot_reply = response.choices[0].message.content |
|
|
|
|
|
chat_history.append({"role": "user", "content": message}) |
|
|
chat_history.append({"role": "assistant", "content": bot_reply}) |
|
|
|
|
|
|
|
|
return chat_history, json.dumps(chat_history) |
|
|
|
|
|
with gr.Blocks() as demo: |
|
|
gr.Markdown("# Customer Support Chatbot") |
|
|
|
|
|
chatbot = gr.Chatbot(type="messages") |
|
|
msg = gr.Textbox(placeholder="Ask a question...", show_label=False) |
|
|
state = gr.State(json.dumps([])) |
|
|
|
|
|
with gr.Row(): |
|
|
submit_btn = gr.Button("Send") |
|
|
|
|
|
def submit_message(msg, state): |
|
|
|
|
|
return "", *respond(msg, state) |
|
|
|
|
|
|
|
|
submit_btn.click(fn=submit_message, inputs=[msg, state], outputs=[msg, chatbot, state]) |
|
|
msg.submit(fn=submit_message, inputs=[msg, state], outputs=[msg, chatbot, state]) |
|
|
|
|
|
demo.launch() |