chatbotgroq / app.py
aartstudio's picture
Update app.py
e386127 verified
# -*- coding: utf-8 -*-
"""app.ipynb
Automatically generated by Colab.
Original file is located at
https://colab.research.google.com/drive/1g3WXzYMdAfSHH2i3i1Ezy2DQUYYifFJI
"""
#!pip install groq gradio
import os
from groq import Groq
import gradio as gr
# ---------- SET YOUR API KEY ----------
# Option 1: Set as environment variable before running:
# export GROQ_API_KEY="your_api_key_here"
# Option 2: Put it directly here (less safe):
# os.environ["GROQ_API_KEY"] = "your_api_key_here"
#the below 3 lines is for colab, comment if you are using HF
#from google.colab import userdata
#GROQ_API_KEY = userdata.get('GROQ_API_KEY')
#client = Groq(api_key=GROQ_API_KEY)
#the below 1 lines is for HF, comment if you are using colab
client = Groq(api_key=os.environ.get("GROQ_API_KEY"))
# Choose a Llama model available on Groq, e.g.:
MODEL_NAME = "llama-3.3-70b-versatile" # check console for latest names
def chat_with_groq(message, history):
"""
message: latest user input (string)
history: list of [user, assistant] pairs from Gradio
returns: assistant reply (string)
"""
# Convert Gradio history into Groq-style messages
messages = []
for user_msg, bot_msg in history:
messages.append({"role": "user", "content": user_msg})
if bot_msg is not None:
messages.append({"role": "assistant", "content": bot_msg})
# Add latest user message
messages.append({"role": "user", "content": message})
# Call Groq chat completion
response = client.chat.completions.create(
model=MODEL_NAME,
messages=messages,
temperature=0.7,
max_tokens=512,
)
reply = response.choices[0].message.content
return reply
# ---------- GRADIO UI ----------
with gr.Blocks() as demo:
gr.Markdown("# 💬 Groq Llama Chatbot")
gr.Markdown(
"Chat with a Llama model served through the Groq API. "
"Make sure you set your `GROQ_API_KEY` before running."
)
chatbot = gr.Chatbot(height=400)
msg = gr.Textbox(label="Type your message here")
clear = gr.Button("Clear")
def user_send(user_message, chat_history):
# Append user message to history; bot response handled by second fn
chat_history = chat_history + [[user_message, None]]
return "", chat_history
def bot_reply(chat_history):
user_message = chat_history[-1][0]
bot_answer = chat_with_groq(user_message, chat_history[:-1])
chat_history[-1][1] = bot_answer
return chat_history
msg.submit(user_send, [msg, chatbot], [msg, chatbot]).then(
bot_reply, [chatbot], [chatbot]
)
clear.click(lambda: None, None, chatbot, queue=False)
# launch app
if __name__ == "__main__":
demo.launch()