Spaces:
Sleeping
Sleeping
| import os | |
| import gradio as gr | |
| from openai import OpenAI | |
| # ---- Configuration ---- | |
| HF_API_KEY = os.getenv("HF_TOKEN") | |
| client = OpenAI( | |
| base_url="https://router.huggingface.co/v1", | |
| api_key=HF_API_KEY | |
| ) | |
| # ---- Chat function ---- | |
| def chat_with_model(user_message, history): | |
| if history is None: | |
| history = [] | |
| messages = [{"role": "system", "content": "You are a helpful assistant."}] | |
| for human, bot in history: | |
| messages.append({"role": "user", "content": human}) | |
| messages.append({"role": "assistant", "content": bot}) | |
| messages.append({"role": "user", "content": user_message}) | |
| try: | |
| completion = client.chat.completions.create( | |
| model="openai/gpt-oss-20b:nebius", | |
| messages=messages | |
| ) | |
| assistant_reply = completion.choices[0].message["content"] | |
| history.append((user_message, assistant_reply)) | |
| return assistant_reply, history | |
| except Exception as e: | |
| return f"Error: {str(e)}", history | |
| # ---- Gradio UI ---- | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# π€ Chatbot using HuggingFace Router (OpenAI API Compatible)") | |
| chatbot = gr.Chatbot(height=450) | |
| text_input = gr.Textbox(label="Your message") | |
| text_input.submit(chat_with_model, [text_input, chatbot], [chatbot]) | |
| text_input.submit(lambda: "", None, text_input) # clear input | |
| demo.launch() | |