File size: 1,398 Bytes
ad3baff
e42ee9c
ad3baff
f8d7e03
 
e42ee9c
 
 
 
df2761c
d5d74f1
e42ee9c
 
49d0ec7
f8d7e03
27b0c70
e42ee9c
 
 
ad3baff
8c79ae9
 
8464f47
ca94485
e42ee9c
d56d7b0
017e4e7
8464f47
 
e42ee9c
 
d56d7b0
 
 
 
 
 
e42ee9c
 
 
 
 
9495824
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
from smolagents import ToolCallingAgent, ActionStep, CodeAgent, DuckDuckGoSearchTool, LiteLLMModel
import os
import time
#import litellm
#litellm._turn_on_debug()  # Enable debugging for litellm

# Set the API key for Sambanova
os.environ['SAMBANOVA_API_KEY'] = os.getenv('sambanova_token')

# Meta-Llama-3.1-405B-Instruct-8k
# Meta-Llama-3.1-70B-Instruct-8k
# Create the model with explicit provider specification
model = LiteLLMModel(
    model_id="sambanova/Meta-Llama-3.1-405B-Instruct-8k",
    max_tokens=2096,
    temperature=0.5,
    api_base="https://api.sambanova.ai/v1",  # Specify the base URL for Sambanova, as in your working requests example
)

def my_paused(step_log: ActionStep, agent: ToolCallingAgent) -> None:
    print('Paused 10sec.')
    time.sleep(10.0)
    return

# Create the agent
agent = ToolCallingAgent(
    tools=[DuckDuckGoSearchTool()],
    model=model,
    step_callbacks=[my_paused],
)

# Когда запуск идет через streamlingt то ответы переводятс в json автоматом
#agent = CodeAgent(
#    tools=[DuckDuckGoSearchTool()],
#    model=model
#)

# Run the agent with error handling
try:
    result = agent.run("Search for the best music recommendations for a party at the Wayne's mansion.")
    print("\n++++\nResult:\n")
    print(result)
except Exception as e:
    print(f"\n++++\nError occurred:\n")
    print(f"{e}")