Spaces:
Sleeping
Sleeping
| import os | |
| import json | |
| from smolagents import create_agent_executor, Agent, Task | |
| from langgraph.prebuilt import ToolExecutor | |
| from huggingface_hub import InferenceClient | |
| from tools import * | |
| # Load Hugging Face endpoint from environment | |
| HF_API_URL = os.getenv("HF_ENDPOINT_URL") | |
| HF_API_TOKEN = os.getenv("HF_TOKEN") | |
| if not HF_API_URL or not HF_API_TOKEN: | |
| raise ValueError("Missing Hugging Face endpoint URL or token.") | |
| llm = InferenceClient( | |
| model=HF_API_URL, | |
| token=HF_API_TOKEN | |
| ) | |
| def run_llm(prompt: str) -> str: | |
| response = llm.text_generation( | |
| prompt, | |
| max_new_tokens=512, | |
| do_sample=False, | |
| temperature=0.0, | |
| return_full_text=False, | |
| ) | |
| return response.strip() | |
| tool_list = [ | |
| GetAttachmentTool(), | |
| GoogleSearchTool(), | |
| GoogleSiteSearchTool(), | |
| ContentRetrieverTool(), | |
| SpeechRecognitionTool(), | |
| YouTubeVideoTool(), | |
| ClassifierTool(), | |
| ImageToChessBoardFENTool() | |
| ] | |
| # Create tool executor for LangGraph | |
| tool_executor = ToolExecutor(tool_list) | |
| # Create agent instance | |
| agent = Agent( | |
| llm=run_llm, | |
| tools=tool_list, | |
| ) | |
| agent_executor = create_agent_executor( | |
| agent=agent, | |
| tool_executor=tool_executor, | |
| stream=False, | |
| ) | |
| def load_tasks(metadata_path="metadata.jsonl") -> list[Task]: | |
| with open(metadata_path, "r") as f: | |
| tasks = [] | |
| for line in f: | |
| data = json.loads(line) | |
| tasks.append(Task( | |
| task_id=data["question_id"], | |
| input=data["answer"] | |
| )) | |
| return tasks | |
| def solve_task(task: Task) -> str: | |
| result = agent_executor.invoke(task.input) | |
| return result.get("output", "") |