asthara commited on
Commit
0ac2e8a
·
1 Parent(s): 9359c59

override main.py changes

Browse files
Files changed (1) hide show
  1. main.py +18 -5
main.py CHANGED
@@ -44,11 +44,24 @@ def chat_with_agent(message, history):
44
  trust_remote_code=True # Acknowledge that we trust this remote MCP server
45
  ) as tool_collection:
46
 
47
- model = LiteLLMModel(
48
- model_id="ollama/devstral:latest",
49
- #model_id="ollama/qwen3:0.6b",
50
- api_base="http://localhost:11434",
51
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
52
 
53
  result = agent.run(message)
54
  return str(result)
 
44
  trust_remote_code=True # Acknowledge that we trust this remote MCP server
45
  ) as tool_collection:
46
 
47
+
48
+ model = LiteLLMModel(
49
+ model_id="ollama/devstral:latest",
50
+ api_base="http://localhost:11434",
51
+ )
52
+
53
+ agent = CodeAgent(
54
+ tools=tool_collection.tools,
55
+ model=model,
56
+ additional_authorized_imports=["inspect", "json"]
57
+ )
58
+
59
+ additional_instructions = """
60
+ ADDITIONAL IMPORTANT INSTRUCTIONS:
61
+ use the tool "final_answer" in the code block to provide the answer to the user. Prints are only for debugging purposes. So, to give your results concatenate everything you want to print in a single "final_answer" call as such : final_answer(f"your answer here").
62
+ """
63
+
64
+ agent.system_prompt += additional_instructions
65
 
66
  result = agent.run(message)
67
  return str(result)