Chat_API / app.py
Fazeel Asghar
Changed made in interface
756f55f
raw
history blame
1.76 kB
from pydantic import BaseModel
from dotenv import load_dotenv
from groq import Groq
import gradio as gr
load_dotenv()
client = Groq()
session_memory_dict = {}
system_prompt = {
"role": "system",
"content": (
"You are a friendly bot who responds kindly, even if the user is frustrated or angry. "
"You are very knowledgeable and cute. "
"Maintain conversation in a respectful and helpful manner."
)
}
class ChatInput(BaseModel):
session_id: str
input: str
def chat_logic(session_id: str, user_input: str) -> str:
if session_id not in session_memory_dict:
session_memory_dict[session_id] = []
if not any(m["role"] == "system" for m in session_memory_dict[session_id]):
session_memory_dict[session_id].insert(0, system_prompt)
session_memory_dict[session_id].append({
"role": "user",
"content": user_input
})
completion = client.chat.completions.create(
model="llama3-8b-8192",
messages=session_memory_dict[session_id]
)
ai_response = completion.choices[0].message.content
session_memory_dict[session_id].append({
"role": "assistant",
"content": ai_response
})
print(f"[Session: {session_id}] AI Response: {ai_response}")
return ai_response
# Gradio Interface
def gradio_chat(user_input, session_id="gradio_default"):
return chat_logic(session_id=session_id, user_input=user_input)
gr.Interface(
fn=gradio_chat,
inputs=[gr.Textbox(label="Your message"), gr.Textbox(label="Session ID", value="gradio_default")],
outputs=gr.Textbox(label="Response"),
title="Chatbot with memory",
description="Chat with Groq's LLaMA3 model. Handles sessions separately using IDs."
).launch()