Spaces:
Runtime error
Runtime error
| { | |
| "models": [ | |
| { | |
| "name": "Hugging Face API", | |
| "provider": "huggingface", | |
| "description": "Use hosted models via HF token (e.g. distilbert, flan-t5)." | |
| }, | |
| { | |
| "name": "Transformers (local)", | |
| "provider": "transformers", | |
| "description": "Run local models using transformers pipeline." | |
| }, | |
| { | |
| "name": "LiteLLM", | |
| "provider": "litellm", | |
| "description": "Connect to 100+ LLMs with unified API routing." | |
| }, | |
| { | |
| "name": "OpenAI", | |
| "provider": "openai", | |
| "description": "Connect to OpenAI's ChatGPT and GPT-4." | |
| }, | |
| { | |
| "name": "Anthropic", | |
| "provider": "anthropic", | |
| "description": "Access Claude models via Anthropic token." | |
| }, | |
| { | |
| "name": "Ollama", | |
| "provider": "ollama", | |
| "description": "Serve local models like llama2 and phi-2 via Ollama server." | |
| }, | |
| { | |
| "name": "Together.ai", | |
| "provider": "together", | |
| "description": "Use hosted open source models via Together.ai." | |
| }, | |
| { | |
| "name": "OpenRouter", | |
| "provider": "openrouter", | |
| "description": "Router for multiple LLM providers under one token." | |
| } | |
| ], | |
| "agents": [ | |
| { | |
| "name": "CodeAgent", | |
| "description": "Executes Python code generated by the model." | |
| }, | |
| { | |
| "name": "ToolCallingAgent", | |
| "description": "Uses structured tool calls to query external functions." | |
| }, | |
| { | |
| "name": "ManagedAgent", | |
| "description": "Controls multiple agents or tool groups hierarchically." | |
| } | |
| ] | |
| } | |