import os import requests import gradio as gr # ------------------------- # Configuration (edit here) # ------------------------- # Option A (recommended): set an environment variable named OPENROUTER_API_KEY # Option B: paste your key directly (not recommended for production) API_KEY = os.getenv("Open_Router") or "" # read from env # If you prefer to paste manually, replace the line above with e.g. # API_KEY = "sk-or-v1-your-openrouter-key-here" if not API_KEY: raise ValueError( "OPENROUTER_API_KEY not found. " "Set it in your shell, or paste the key directly in the script (not recommended)." ) MODEL_URL = "https://openrouter.ai/api/v1/chat/completions" HEADERS = { "Authorization": f"Bearer {API_KEY}", "Content-Type": "application/json", } # ------------------------- # Helper: call OpenRouter # ------------------------- def query_gpt35(messages, max_tokens=500, temperature=0.7, timeout=30): """ Send OpenAI-style messages to OpenRouter and return assistant text. messages: list of {"role": "...", "content": "..."} """ payload = { "model": "openai/gpt-3.5-turbo", "messages": messages, "max_tokens": max_tokens, "temperature": temperature, } try: resp = requests.post(MODEL_URL, headers=HEADERS, json=payload, timeout=timeout) resp.raise_for_status() data = resp.json() # Support both typical "choices[0].message.content" and possible alternative formats if "choices" in data and len(data["choices"]) > 0: choice = data["choices"][0] # Typical OpenAI-style response: msg = choice.get("message", {}).get("content") if msg: return msg.strip() # Fall back to 'text' or other fields if present if "text" in choice: return choice["text"].strip() # fallback: try to find a top-level text if isinstance(data, dict) and "text" in data: return data["text"].strip() return "Error: Unexpected response format from API" except requests.exceptions.RequestException as e: return f"Error: Could not connect to the API - {e}" except ValueError: return "Error: Could not decode JSON response from API" # ------------------------- # Gradio response function # ------------------------- def respond(user_message, chat_history): """ Gradio passes chat_history as a list of (user, bot) tuples, or None. Return updated history and an empty string to clear the input box. """ if chat_history is None: chat_history = [] # Build messages for the model messages = [ {"role": "system", "content": "You are LocoBot, a helpful assistant. Answer concisely and politely."} ] # Re-add previous messages so model has context for user, bot in chat_history: messages.append({"role": "user", "content": user}) messages.append({"role": "assistant", "content": bot}) # Add current user message messages.append({"role": "user", "content": user_message}) assistant_reply = query_gpt35(messages) # Append to history and return (and clear the input textbox) new_history = chat_history + [(user_message, assistant_reply)] return new_history, "" # ------------------------- # Build and launch UI # ------------------------- with gr.Blocks() as demo: gr.Markdown("## LocoBot — Your AI Companion (OpenRouter)") chatbot = gr.Chatbot() message = gr.Textbox(placeholder="Type your message and press Enter", lines=2) send = gr.Button("Send") send.click(fn=respond, inputs=[message, chatbot], outputs=[chatbot, message]) message.submit(fn=respond, inputs=[message, chatbot], outputs=[chatbot, message]) if __name__ == "__main__": demo.launch(server_name="0.0.0.0", server_port=7860)