Spaces:
Running
Running
| import os | |
| import gradio as gr | |
| from groq import Groq | |
| # ========================= | |
| # 1. Setup | |
| # ========================= | |
| groq_api_key = os.environ.get("GROQ_API_KEY") | |
| if not groq_api_key: | |
| raise ValueError("GROQ_API_KEY not found. Please set it in Hugging Face Spaces → Settings → Repository Secrets.") | |
| client = Groq(api_key=groq_api_key) | |
| MODEL_NAME = "llama-3.3-70b-versatile" | |
| # ========================= | |
| # 2. Chat Function | |
| # ========================= | |
| def chat_with_groq(message, history): | |
| messages = [{"role": "system", "content": "You are a helpful, clear, and practical AI assistant."}] | |
| if history: | |
| for item in history: | |
| if item.get("role") in ["user", "assistant"] and isinstance(item.get("content"), str): | |
| messages.append({"role": item["role"], "content": item["content"]}) | |
| messages.append({"role": "user", "content": message}) | |
| response = client.chat.completions.create( | |
| model=MODEL_NAME, | |
| messages=messages, | |
| temperature=0.7, | |
| max_completion_tokens=1024, | |
| ) | |
| return response.choices[0].message.content | |
| # ========================= | |
| # 3. Gradio App | |
| # ========================= | |
| demo = gr.ChatInterface( | |
| fn=chat_with_groq, | |
| title="⚡ Groq AI Assistant", | |
| description="A simple Gradio chatbot powered by Groq and Llama 3.3.", | |
| examples=[ | |
| "Explain Groq in simple terms.", | |
| "Give me 5 use cases of fast LLM inference.", | |
| "Write a Python function to clean a dataset.", | |
| "Explain RAG in beginner-friendly language." | |
| ], | |
| fill_height=True, | |
| fill_width=False, | |
| ) | |
| # ========================= | |
| # 4. Launch | |
| # ========================= | |
| if __name__ == "__main__": | |
| demo.launch( | |
| server_name="0.0.0.0", | |
| server_port=7860, | |
| debug=True | |
| # Remove theme if Gradio < 6 | |
| # theme=gr.themes.Soft() | |
| ) | |