Spaces:
Sleeping
Sleeping
File size: 1,699 Bytes
73b3250 6bd0088 73b3250 b67aee1 a1c1286 e867839 a1c1286 b67aee1 a1c1286 b67aee1 a1c1286 86eac15 e867839 a1c1286 73b3250 e867839 a1c1286 73b3250 a1c1286 e867839 a1c1286 73b3250 a1c1286 73b3250 a1c1286 73b3250 86eac15 73b3250 a1c1286 73b3250 6bd0088 86eac15 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
import os
import gradio as gr
from groq import Groq
import traceback
import sys
# Get GROQ API Key from environment
api_key = os.environ.get("GROQ_API_KEY")
if not api_key:
print("β GROQ_API_KEY is missing.", file=sys.stderr)
client = Groq(api_key=api_key)
# Chat function with logging to stderr (for Spaces)
def chat_inference(message, history, example_prompt):
try:
print(f"π₯ User message: {message}", file=sys.stderr)
response = client.chat.completions.create(
messages=[{"role": "user", "content": message}],
model="compound-beta"
)
reply = response.choices[0].message.content
print(f"π€ Groq reply: {reply}", file=sys.stderr)
except Exception as e:
error_trace = traceback.format_exc()
print("β Exception occurred:", file=sys.stderr)
print(error_trace, file=sys.stderr)
reply = "β οΈ Error: Connection error or API issue.\n\nCheck the Logs tab in Hugging Face Spaces for more details."
return reply
# Optional textbox for additional input
additional_inputs = [
gr.Textbox(label="π Example Prompt", value="What were the main highlights from the latest Apple keynote?")
]
# Gradio interface
chat_interface = gr.ChatInterface(
fn=chat_inference,
additional_inputs=additional_inputs,
additional_inputs_accordion=gr.Accordion("βοΈ Configuration & Advanced Parameters", open=True),
title="π AI-Powered Real-Time Search with Groq",
description="Ask anything that requires real-time info β powered by Groqβs `compound-beta` model.",
theme="default",
type="messages",
)
if __name__ == "__main__":
chat_interface.launch()
|