Spaces:
Sleeping
Sleeping
| import os | |
| import gradio as gr | |
| import datetime | |
| import pytz | |
| from llama_index.core.agent import ReActAgent | |
| from llama_index.core.tools import FunctionTool | |
| from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI | |
| # 1. SETUP LLM | |
| hf_token = os.getenv("HF_TOKEN") | |
| # Fix: Change task to "conversational" as required by the provider | |
| llm = HuggingFaceInferenceAPI( | |
| model_name="Qwen/Qwen2.5-7B-Instruct", | |
| token=hf_token, | |
| task="conversational", | |
| provider="together", | |
| is_function_calling_model=False | |
| ) | |
| # 2. DEFINE YOUR TOOLS (Must be defined before the Agent) | |
| def get_tokyo_time() -> str: | |
| """Returns the current time in Tokyo, Japan.""" | |
| tz = pytz.timezone('Asia/Tokyo') | |
| return f"The current time in Tokyo is {datetime.datetime.now(tz).strftime('%H:%M:%S')}" | |
| def multiply(a: float, b: float) -> float: | |
| """Multiplies two numbers (a and b) and returns the result.""" | |
| return a * b | |
| # Wrap them in LlamaIndex Tool objects | |
| tools = [ | |
| FunctionTool.from_defaults(fn=get_tokyo_time), | |
| FunctionTool.from_defaults(fn=multiply) | |
| ] | |
| # 3. THE "STABILITY" PROMPT | |
| RE_ACT_PROMPT = """You are a helpful assistant. | |
| For every query, you MUST follow this sequence: | |
| Thought: <your reasoning> | |
| Action: <tool_name> | |
| Action Input: {"arg1": value} | |
| Observation: <result from tool> | |
| (repeat if needed) | |
| Thought: I have the final answer. | |
| Answer: <your final response to the user> | |
| """ | |
| # 4. CREATE THE AGENT | |
| # Now 'tools' and 'llm' are both correctly defined and configured | |
| agent = ReActAgent.from_tools( | |
| tools, | |
| llm=llm, | |
| verbose=True, | |
| context=RE_ACT_PROMPT | |
| ) | |
| # 5. GRADIO INTERFACE | |
| def chat(message, history): | |
| try: | |
| response = agent.chat(message) | |
| return str(response) | |
| except Exception as e: | |
| return f"System Error: {str(e)}" | |
| gr.ChatInterface(chat, title="Unit 2: LlamaIndex Agent (Fixed)").launch() |