Spaces:
Sleeping
Sleeping
File size: 578 Bytes
352b2ab | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 | import os
from smolagents import CodeAgent, DuckDuckGoSearchTool, GradioUI, InferenceClientModel, LiteLLMModel
USE_LOCAL_MODEL = os.getenv("USE_LOCAL_MODEL", "false").lower() == "true"
if USE_LOCAL_MODEL:
model = LiteLLMModel(
model_id="ollama_chat/llama3.2",
api_base="http://localhost:11434",
)
else:
model = InferenceClientModel(
model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
max_tokens=8192,
)
agent = CodeAgent(
tools=[DuckDuckGoSearchTool()],
model=model,
)
if __name__ == "__main__":
GradioUI(agent).launch()
|