File size: 3,883 Bytes
f75b1ea 8972bbd 0dc9778 ae02656 d11f457 f75b1ea d11f457 ae02656 55df7ee ae02656 55df7ee f75b1ea ae02656 d11f457 f75b1ea d11f457 f75b1ea ae02656 55df7ee 6a90857 f75b1ea ae02656 6a90857 ae02656 d11f457 6a90857 d11f457 ae02656 6a90857 d11f457 ae02656 d11f457 6a90857 f75b1ea d11f457 f75b1ea d11f457 6a90857 f75b1ea 6a90857 f75b1ea d11f457 f75b1ea 6a90857 f75b1ea 6a90857 0dc9778 d11f457 f75b1ea 0dc9778 d11f457 f75b1ea d11f457 6a90857 d11f457 6a90857 ae02656 c7ec854 6a90857 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 |
import os
import requests
import gradio as gr
# -------------------------
# Configuration (edit here)
# -------------------------
# Option A (recommended): set an environment variable named OPENROUTER_API_KEY
# Option B: paste your key directly (not recommended for production)
API_KEY = os.getenv("Open_Router") or "" # read from env
# If you prefer to paste manually, replace the line above with e.g.
# API_KEY = "sk-or-v1-your-openrouter-key-here"
if not API_KEY:
raise ValueError(
"OPENROUTER_API_KEY not found. "
"Set it in your shell, or paste the key directly in the script (not recommended)."
)
MODEL_URL = "https://openrouter.ai/api/v1/chat/completions"
HEADERS = {
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json",
}
# -------------------------
# Helper: call OpenRouter
# -------------------------
def query_gpt35(messages, max_tokens=500, temperature=0.7, timeout=30):
"""
Send OpenAI-style messages to OpenRouter and return assistant text.
messages: list of {"role": "...", "content": "..."}
"""
payload = {
"model": "openai/gpt-3.5-turbo",
"messages": messages,
"max_tokens": max_tokens,
"temperature": temperature,
}
try:
resp = requests.post(MODEL_URL, headers=HEADERS, json=payload, timeout=timeout)
resp.raise_for_status()
data = resp.json()
# Support both typical "choices[0].message.content" and possible alternative formats
if "choices" in data and len(data["choices"]) > 0:
choice = data["choices"][0]
# Typical OpenAI-style response:
msg = choice.get("message", {}).get("content")
if msg:
return msg.strip()
# Fall back to 'text' or other fields if present
if "text" in choice:
return choice["text"].strip()
# fallback: try to find a top-level text
if isinstance(data, dict) and "text" in data:
return data["text"].strip()
return "Error: Unexpected response format from API"
except requests.exceptions.RequestException as e:
return f"Error: Could not connect to the API - {e}"
except ValueError:
return "Error: Could not decode JSON response from API"
# -------------------------
# Gradio response function
# -------------------------
def respond(user_message, chat_history):
"""
Gradio passes chat_history as a list of (user, bot) tuples, or None.
Return updated history and an empty string to clear the input box.
"""
if chat_history is None:
chat_history = []
# Build messages for the model
messages = [
{"role": "system", "content": "You are LocoBot, a helpful assistant. Answer concisely and politely."}
]
# Re-add previous messages so model has context
for user, bot in chat_history:
messages.append({"role": "user", "content": user})
messages.append({"role": "assistant", "content": bot})
# Add current user message
messages.append({"role": "user", "content": user_message})
assistant_reply = query_gpt35(messages)
# Append to history and return (and clear the input textbox)
new_history = chat_history + [(user_message, assistant_reply)]
return new_history, ""
# -------------------------
# Build and launch UI
# -------------------------
with gr.Blocks() as demo:
gr.Markdown("## LocoBot — Your AI Companion (OpenRouter)")
chatbot = gr.Chatbot()
message = gr.Textbox(placeholder="Type your message and press Enter", lines=2)
send = gr.Button("Send")
send.click(fn=respond, inputs=[message, chatbot], outputs=[chatbot, message])
message.submit(fn=respond, inputs=[message, chatbot], outputs=[chatbot, message])
if __name__ == "__main__":
demo.launch(server_name="0.0.0.0", server_port=7860)
|