Sentinel02 / app.py
Shreyas94's picture
Update app.py
a1c1286 verified
raw
history blame
1.7 kB
import os
import gradio as gr
from groq import Groq
import traceback
import sys
# Get GROQ API Key from environment
api_key = os.environ.get("GROQ_API_KEY")
if not api_key:
print("❌ GROQ_API_KEY is missing.", file=sys.stderr)
client = Groq(api_key=api_key)
# Chat function with logging to stderr (for Spaces)
def chat_inference(message, history, example_prompt):
try:
print(f"πŸ“₯ User message: {message}", file=sys.stderr)
response = client.chat.completions.create(
messages=[{"role": "user", "content": message}],
model="compound-beta"
)
reply = response.choices[0].message.content
print(f"πŸ“€ Groq reply: {reply}", file=sys.stderr)
except Exception as e:
error_trace = traceback.format_exc()
print("❌ Exception occurred:", file=sys.stderr)
print(error_trace, file=sys.stderr)
reply = "⚠️ Error: Connection error or API issue.\n\nCheck the Logs tab in Hugging Face Spaces for more details."
return reply
# Optional textbox for additional input
additional_inputs = [
gr.Textbox(label="πŸ” Example Prompt", value="What were the main highlights from the latest Apple keynote?")
]
# Gradio interface
chat_interface = gr.ChatInterface(
fn=chat_inference,
additional_inputs=additional_inputs,
additional_inputs_accordion=gr.Accordion("βš™οΈ Configuration & Advanced Parameters", open=True),
title="πŸ” AI-Powered Real-Time Search with Groq",
description="Ask anything that requires real-time info β€” powered by Groq’s `compound-beta` model.",
theme="default",
type="messages",
)
if __name__ == "__main__":
chat_interface.launch()