File size: 2,499 Bytes
c623717
 
 
302c422
fd925d9
c623717
fd925d9
302c422
fd925d9
302c422
c623717
 
b6ca784
 
 
c623717
 
 
b6ca784
c623717
 
b6ca784
c623717
b6ca784
c623717
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b6ca784
 
c623717
 
 
 
 
 
 
fd925d9
302c422
c623717
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import os
import json

import gradio as gr
from openai import OpenAI
from pinecone import Pinecone
from dotenv import load_dotenv

load_dotenv(verbose=True)

PINECONE_API_KEY = os.getenv("PINECONE_API_KEY")
INDEX_NAME = "whatsapp-history-1"
EMBED_MODEL = "text-embedding-3-small"
TOP_K = 5

client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
pc = Pinecone(api_key=PINECONE_API_KEY)
index = pc.Index(INDEX_NAME)

def retrieve_context(query):
    response = client.embeddings.create(
        model=EMBED_MODEL,
        input=query
    )
    query_emb = response.data[0].embedding

    # Use keyword argument 'vector' for the query
    result = index.query(vector=query_emb, top_k=TOP_K, include_metadata=True)

    contexts = []
    for match in result.matches:
        meta = match.metadata
        contexts.append(f"{meta['sender']}: {meta['message']}")

    return "\n".join(contexts)

def respond(message, chat_history_json):
    chat_history = json.loads(chat_history_json)

    context = retrieve_context(message)

    system_prompt = (
        "You are a helpful assistant for carching. Use the following past conversation data on whatsapp "
        "to answer the user's question if relevant:\n\n" + context
    )

    messages = [{"role": "system", "content": system_prompt}]
    messages.extend(chat_history or [])
    messages.append({"role": "user", "content": message})

    response = client.chat.completions.create(
        model="gpt-5-mini",
        messages=messages,
    )

    bot_reply = response.choices[0].message.content

    chat_history.append({"role": "user", "content": message})
    chat_history.append({"role": "assistant", "content": bot_reply})

    # Return the history directly and the serialized state
    return chat_history, json.dumps(chat_history)

with gr.Blocks() as demo:
    gr.Markdown("# Customer Support Chatbot")
    # Set chatbot type to 'messages' to resolve the warning
    chatbot = gr.Chatbot(type="messages")
    msg = gr.Textbox(placeholder="Ask a question...", show_label=False)
    state = gr.State(json.dumps([]))

    with gr.Row():
        submit_btn = gr.Button("Send")

    def submit_message(msg, state):
        # Clear the textbox after submission
        return "", *respond(msg, state)

    # Bind submit button AND hitting enter in textbox
    submit_btn.click(fn=submit_message, inputs=[msg, state], outputs=[msg, chatbot, state])
    msg.submit(fn=submit_message, inputs=[msg, state], outputs=[msg, chatbot, state])

demo.launch()