Spaces:
Runtime error
Runtime error
| import os | |
| import json | |
| import gradio as gr | |
| import requests | |
| from dotenv import load_dotenv | |
| from llama_index.core.agent import ReActAgent | |
| from llama_index.core.tools import FunctionTool | |
| from llama_index.llms.openai import OpenAI | |
| # Load environment variables | |
| load_dotenv() | |
| # Get OpenRouter token | |
| openrouter_token = os.getenv("OPENROUTER_API_KEY") | |
| if not openrouter_token: | |
| raise ValueError("OpenRouter token not found. Configure OPENROUTER_API_KEY in your environment variables") | |
| # Define weather function with static data | |
| def get_current_weather(location: str, unit: str = "fahrenheit") -> dict: | |
| """ | |
| Get the current weather in a given location | |
| Args: | |
| location (str): The city name, e.g. San Francisco, Tokyo | |
| unit (str): The unit of temperature, either celsius or fahrenheit | |
| Returns: | |
| dict: Weather information including location, temperature and unit | |
| """ | |
| location = location.lower() | |
| if "tokyo" in location: | |
| return {"location": "Tokyo", "temperature": "10", "unit": "celsius"} | |
| elif "san francisco" in location: | |
| return {"location": "San Francisco", "temperature": "72", "unit": "fahrenheit"} | |
| elif "paris" in location: | |
| return {"location": "Paris", "temperature": "22", "unit": "celsius"} | |
| else: | |
| return {"location": location, "temperature": "unknown", "unit": unit} | |
| # Create a tool for the agent | |
| weather_tool = FunctionTool.from_defaults( | |
| name="get_current_weather", | |
| fn=get_current_weather, | |
| description="Get the current weather in a given location" | |
| ) | |
| # Custom OpenRouter implementation using OpenAI-compatible interface | |
| class OpenRouterLLM(OpenAI): | |
| def __init__(self, model_name="qwen/qwen-2.5-coder-32b-instruct:free", temperature=0.7, max_tokens=512, api_key=None): | |
| # Initialize with custom base URL and model name | |
| super().__init__( | |
| model=model_name, | |
| temperature=temperature, | |
| max_tokens=max_tokens, | |
| api_key=api_key, | |
| api_base="https://openrouter.ai/api/v1", | |
| additional_headers={ | |
| "HTTP-Referer": "weather-assistant-app", | |
| "X-Title": "Weather Assistant" | |
| } | |
| ) | |
| # Configure the language model with OpenRouter | |
| llm = OpenRouterLLM( | |
| model_name="qwen/qwen-2.5-coder-32b-instruct:free", | |
| temperature=0.7, | |
| max_tokens=512, | |
| api_key=openrouter_token | |
| ) | |
| # Create the agent with an appropriate system prompt | |
| agent = ReActAgent.from_tools( | |
| [weather_tool], | |
| llm=llm, | |
| verbose=False | |
| ) | |
| def respond(message, history): | |
| # Execute the agent with user input | |
| response = agent.chat(message) | |
| return str(response) | |
| # Create Gradio interface | |
| with gr.Blocks(title="Weather Assistant") as demo: | |
| gr.Markdown("# 🌤️ Weather Assistant") | |
| gr.Markdown("### Ask about the weather in Tokyo, San Francisco, or Paris") | |
| chatbot = gr.ChatInterface( | |
| respond, | |
| examples=[ | |
| "What's the weather like in Tokyo?", | |
| "How's the weather in San Francisco?", | |
| "Tell me about the current weather in Paris", | |
| "What should I wear in Tokyo based on the weather?", | |
| "Is it warm in San Francisco?" | |
| ], | |
| title="Chat with Weather Assistant" | |
| ) | |
| gr.Markdown("### Built with LlamaIndex and OpenRouter API") | |
| # Launch the application | |
| if __name__ == "__main__": | |
| demo.launch() |