| import yaml | |
| import os | |
| from smolagents import GradioUI, CodeAgent, LiteLLMModel, ToolCollection | |
| # Get current directory path | |
| CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) | |
| model = LiteLLMModel("openrouter/qwen/qwen-2.5-coder-32b-instruct:free", | |
| temperature = 0.1, | |
| api_key=os.environ["OPENROUTER_API_KEY"]) | |
| with open(os.path.join(CURRENT_DIR, "prompts.yaml"), 'r') as stream: | |
| prompt_templates = yaml.safe_load(stream) | |
| url = "https://agents-mcp-hackathon-tw-lawmcp.hf.space/gradio_api/mcp/sse" | |
| server_parameters = {"url": url, "transport": "sse"} | |
| if __name__ == "__main__": | |
| with ToolCollection.from_mcp(server_parameters, trust_remote_code=True) as mcp_tools: | |
| agent = CodeAgent( | |
| model=model, | |
| tools=[*mcp_tools.tools], | |
| managed_agents=[], | |
| max_steps=20, | |
| verbosity_level=1, | |
| grammar=None, | |
| planning_interval=None, | |
| name=None, | |
| description=None, | |
| executor_type='local', | |
| executor_kwargs={}, | |
| max_print_outputs_length=None, | |
| prompt_templates=prompt_templates | |
| ) | |
| GradioUI(agent).launch() | |