Spaces:
Running
Running
File size: 1,874 Bytes
98ad472 a93642a 98ad472 2aaaf0b 98ad472 2aaaf0b 98ad472 2aaaf0b a93642a 98ad472 a93642a 2aaaf0b a93642a 2aaaf0b a93642a | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 | import os
import gradio as gr
from groq import Groq
# =========================
# 1. Setup
# =========================
groq_api_key = os.environ.get("GROQ_API_KEY")
if not groq_api_key:
raise ValueError("GROQ_API_KEY not found. Please set it in Hugging Face Spaces → Settings → Repository Secrets.")
client = Groq(api_key=groq_api_key)
MODEL_NAME = "llama-3.3-70b-versatile"
# =========================
# 2. Chat Function
# =========================
def chat_with_groq(message, history):
messages = [{"role": "system", "content": "You are a helpful, clear, and practical AI assistant."}]
if history:
for item in history:
if item.get("role") in ["user", "assistant"] and isinstance(item.get("content"), str):
messages.append({"role": item["role"], "content": item["content"]})
messages.append({"role": "user", "content": message})
response = client.chat.completions.create(
model=MODEL_NAME,
messages=messages,
temperature=0.7,
max_completion_tokens=1024,
)
return response.choices[0].message.content
# =========================
# 3. Gradio App
# =========================
demo = gr.ChatInterface(
fn=chat_with_groq,
title="⚡ Groq AI Assistant",
description="A simple Gradio chatbot powered by Groq and Llama 3.3.",
examples=[
"Explain Groq in simple terms.",
"Give me 5 use cases of fast LLM inference.",
"Write a Python function to clean a dataset.",
"Explain RAG in beginner-friendly language."
],
fill_height=True,
fill_width=False,
)
# =========================
# 4. Launch
# =========================
if __name__ == "__main__":
demo.launch(
server_name="0.0.0.0",
server_port=7860,
debug=True
# Remove theme if Gradio < 6
# theme=gr.themes.Soft()
)
|