import gradio as gr import requests import os # Load API key from environment variables (Recommended for security) GROQ_API_KEY = os.getenv("GROQ_API_KEY") # Groq API URL GROQ_API_URL = "https://api.groq.com/openai/v1/chat/completions" def chat_with_groq(user_input): if not GROQ_API_KEY: return "Error: API key is missing. Set the GROQ_API_KEY environment variable." headers = { "Authorization": f"Bearer {GROQ_API_KEY}", "Content-Type": "application/json" } data = { "model": "llama3-8b-8192", # ✅ Using only LLaMA 3-8B model "messages": [{"role": "user", "content": user_input}] } try: response = requests.post(GROQ_API_URL, json=data, headers=headers) response_json = response.json() if response.status_code == 200: return response_json.get("choices", [{}])[0].get("message", {}).get("content", "No response") else: error_message = response_json.get("error", {}).get("message", "Unknown error") return f"Error {response.status_code}: {error_message}" except requests.exceptions.RequestException as e: return f"Request failed: {e}" # Create Gradio UI iface = gr.Interface( fn=chat_with_groq, inputs=gr.Textbox(lines=2, placeholder="Enter your message..."), outputs="text", title="Groq API Chatbot", description="A simple chatbot using LLaMA 3-8B on Groq API", ) # Launch the chatbot iface.launch()