File size: 1,093 Bytes
f0c3317
 
 
 
 
 
 
 
 
3147ef1
 
 
 
 
 
 
 
f0c3317
3147ef1
79b608e
f0c3317
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import yaml
import os
from smolagents import GradioUI, CodeAgent, InferenceClientModel

# Get current directory path
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))

from tools.final_answer import FinalAnswerTool as FinalAnswer

import os
from smolagents import CodeAgent, InferenceClientModel

# Get token from secrets
hf_token = os.environ.get("HF_TOKEN")

# Initialize model with token
model = InferenceClientModel(api_key=hf_token)

# Create your agent
agent = CodeAgent(tools=[], model=model)

model = InferenceClientModel(
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
)

final_answer = FinalAnswer()


with open(os.path.join(CURRENT_DIR, "prompts.yaml"), 'r') as stream:
    prompt_templates = yaml.safe_load(stream)

agent = CodeAgent(
    model=model,
    tools=[],
    managed_agents=[],
    max_steps=20,
    verbosity_level=1,
    planning_interval=None,
    name=None,
    description=None,
    executor_type='local',
    executor_kwargs={},
    max_print_outputs_length=None,
    prompt_templates=prompt_templates
)
if __name__ == "__main__":
    GradioUI(agent).launch()