File size: 1,837 Bytes
79b3d3d
 
 
 
 
 
756f55f
79b3d3d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1584204
756f55f
79b3d3d
 
 
 
 
 
1584204
756f55f
 
79b3d3d
 
 
3671540
756f55f
61230db
1584204
756f55f
 
c9ff9b4
1584204
 
 
 
 
 
 
 
 
e6e9030
1584204
 
 
 
4abaf89
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
from pydantic import BaseModel
from dotenv import load_dotenv
from groq import Groq
import gradio as gr

load_dotenv()

client = Groq()
session_memory_dict = {}

system_prompt = {
    "role": "system",
    "content": (
        "You are a friendly bot who responds kindly, even if the user is frustrated or angry. "
        "You are very knowledgeable and cute. "
        "Maintain conversation in a respectful and helpful manner."
    )
}

class ChatInput(BaseModel):
    session_id: str
    input: str

# Chat logic with memory handling
def chat_logic(session_id: str, user_input: str) -> str:
    if session_id not in session_memory_dict:
        session_memory_dict[session_id] = []

    if not any(m["role"] == "system" for m in session_memory_dict[session_id]):
        session_memory_dict[session_id].insert(0, system_prompt)

    session_memory_dict[session_id].append({"role": "user", "content": user_input})

    completion = client.chat.completions.create(
        model="llama3-8b-8192",
        messages=session_memory_dict[session_id]
    )

    ai_response = completion.choices[0].message.content

    session_memory_dict[session_id].append({"role": "assistant", "content": ai_response})
    print(f"[Session: {session_id}] AI Response: {ai_response}")
    return ai_response

# Gradio ChatInterface function wrapper
def chat_with_memory(message, history, session_id="gradio_default"):
    ai_reply = chat_logic(session_id=session_id, user_input=message)
    return ai_reply

# UI Components
chat_interface = gr.ChatInterface(
    fn=chat_with_memory,
    additional_inputs=[gr.Textbox(label="Session ID", value="gradio_default")],
    title="Friendly Chatbot with Memory, Is it na? ",
    description="Chat with Groq's LLaMA3 model. Each session is tracked using a session ID.",
    theme="soft"
)

chat_interface.launch()