Gradio-to-api / app.py
Real Null
Update app.py
fb622ce verified
import os
import gradio as gr
from gradio_client import Client
# Configuration
GRADIO_SPACE = os.environ.get("GRADIO_SPACE", "amd/gpt-oss-120b-chatbot")
API_KEY = os.environ.get("PROXY_API_KEY")
GRADIO_API = "chat" # remove leading slash for gradio_client
client = Client(GRADIO_SPACE)
def chat_proxy(messages, api_key=""):
# Check API key
if not API_KEY:
return {"error": "Server API key not set."}
if api_key != API_KEY:
return {"error": "Invalid API key."}
# Extract user and system messages
user_message = ""
system_prompt = "You are a helpful assistant."
for m in reversed(messages):
if m["role"] == "user" and not user_message:
user_message = m["content"]
elif m["role"] == "system":
system_prompt = m["content"]
# Call Gradio space
try:
result = client.predict(
message=user_message,
system_prompt=system_prompt,
temperature=0.7,
api_name=GRADIO_API
)
except Exception as e:
return {"error": f"Gradio client error: {e}"}
# Return OpenAI-compatible response
return {
"id": "chatcmpl-proxy",
"object": "chat.completion",
"created": 0,
"model": "gradio-proxy",
"choices": [
{
"index": 0,
"message": {"role": "assistant", "content": str(result)},
"finish_reason": "stop"
}
],
"usage": {"prompt_tokens": 0, "completion_tokens": 0, "total_tokens": 0}
}
# Expose as a pure API via Gradio Interface
demo = gr.Interface(
fn=chat_proxy,
inputs=[gr.JSON(label="Messages"), gr.Textbox(label="API Key")],
outputs=gr.JSON(label="Response")
)
# Launch without share (HF Spaces does not support share)
demo.launch()