jpuri's picture
init
352b2ab
raw
history blame contribute delete
578 Bytes
import os
from smolagents import CodeAgent, DuckDuckGoSearchTool, GradioUI, InferenceClientModel, LiteLLMModel
USE_LOCAL_MODEL = os.getenv("USE_LOCAL_MODEL", "false").lower() == "true"
if USE_LOCAL_MODEL:
model = LiteLLMModel(
model_id="ollama_chat/llama3.2",
api_base="http://localhost:11434",
)
else:
model = InferenceClientModel(
model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
max_tokens=8192,
)
agent = CodeAgent(
tools=[DuckDuckGoSearchTool()],
model=model,
)
if __name__ == "__main__":
GradioUI(agent).launch()