|
|
import os |
|
|
import requests |
|
|
import gradio as gr |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
API_KEY = os.getenv("Open_Router") or "" |
|
|
|
|
|
|
|
|
|
|
|
if not API_KEY: |
|
|
raise ValueError( |
|
|
"OPENROUTER_API_KEY not found. " |
|
|
"Set it in your shell, or paste the key directly in the script (not recommended)." |
|
|
) |
|
|
|
|
|
MODEL_URL = "https://openrouter.ai/api/v1/chat/completions" |
|
|
HEADERS = { |
|
|
"Authorization": f"Bearer {API_KEY}", |
|
|
"Content-Type": "application/json", |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def query_gpt35(messages, max_tokens=500, temperature=0.7, timeout=30): |
|
|
""" |
|
|
Send OpenAI-style messages to OpenRouter and return assistant text. |
|
|
messages: list of {"role": "...", "content": "..."} |
|
|
""" |
|
|
payload = { |
|
|
"model": "openai/gpt-3.5-turbo", |
|
|
"messages": messages, |
|
|
"max_tokens": max_tokens, |
|
|
"temperature": temperature, |
|
|
} |
|
|
|
|
|
try: |
|
|
resp = requests.post(MODEL_URL, headers=HEADERS, json=payload, timeout=timeout) |
|
|
resp.raise_for_status() |
|
|
data = resp.json() |
|
|
|
|
|
if "choices" in data and len(data["choices"]) > 0: |
|
|
choice = data["choices"][0] |
|
|
|
|
|
msg = choice.get("message", {}).get("content") |
|
|
if msg: |
|
|
return msg.strip() |
|
|
|
|
|
if "text" in choice: |
|
|
return choice["text"].strip() |
|
|
|
|
|
if isinstance(data, dict) and "text" in data: |
|
|
return data["text"].strip() |
|
|
return "Error: Unexpected response format from API" |
|
|
except requests.exceptions.RequestException as e: |
|
|
return f"Error: Could not connect to the API - {e}" |
|
|
except ValueError: |
|
|
return "Error: Could not decode JSON response from API" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def respond(user_message, chat_history): |
|
|
""" |
|
|
Gradio passes chat_history as a list of (user, bot) tuples, or None. |
|
|
Return updated history and an empty string to clear the input box. |
|
|
""" |
|
|
if chat_history is None: |
|
|
chat_history = [] |
|
|
|
|
|
|
|
|
messages = [ |
|
|
{"role": "system", "content": "You are LocoBot, a helpful assistant. Answer concisely and politely."} |
|
|
] |
|
|
|
|
|
|
|
|
for user, bot in chat_history: |
|
|
messages.append({"role": "user", "content": user}) |
|
|
messages.append({"role": "assistant", "content": bot}) |
|
|
|
|
|
|
|
|
messages.append({"role": "user", "content": user_message}) |
|
|
|
|
|
assistant_reply = query_gpt35(messages) |
|
|
|
|
|
|
|
|
new_history = chat_history + [(user_message, assistant_reply)] |
|
|
return new_history, "" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
with gr.Blocks() as demo: |
|
|
gr.Markdown("## LocoBot — Your AI Companion (OpenRouter)") |
|
|
chatbot = gr.Chatbot() |
|
|
message = gr.Textbox(placeholder="Type your message and press Enter", lines=2) |
|
|
send = gr.Button("Send") |
|
|
|
|
|
send.click(fn=respond, inputs=[message, chatbot], outputs=[chatbot, message]) |
|
|
message.submit(fn=respond, inputs=[message, chatbot], outputs=[chatbot, message]) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch(server_name="0.0.0.0", server_port=7860) |
|
|
|