davidtalmaciu commited on
Commit
626c310
·
verified ·
1 Parent(s): 6d936f8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -12
app.py CHANGED
@@ -4,34 +4,31 @@ import random
4
  from tools.final_answer import FinalAnswerTool
5
  from Gradio_UI import GradioUI
6
 
7
- # Tool: tells a random joke
8
  @tool
9
  def tell_joke(_: str = "") -> str:
10
- """A tool that tells a random joke.
11
- Args:
12
- _: Dummy argument (not used)
13
- """
14
  jokes = [
15
  "Why don't scientists trust atoms? Because they make up everything!",
16
- "I told my computer I needed a break, and it said 'No problem — I'll go to sleep.'",
17
  "Why did the scarecrow win an award? Because he was outstanding in his field!",
18
- "What’s a programmer’s favorite hangout place? The Foo Bar.",
19
- "Parallel lines have so much in common… it’s a shame they’ll never meet."
20
  ]
21
  return random.choice(jokes)
22
 
23
  # Final answer tool
24
  final_answer = FinalAnswerTool()
25
 
26
- # Model
27
  model = HfApiModel(
28
  max_tokens=1024,
29
  temperature=0.7,
30
  model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
31
  )
32
 
33
- # Load prompts
34
- with open("prompts.yaml", 'r') as stream:
35
  prompt_templates = yaml.safe_load(stream)
36
 
37
  # Create the agent
@@ -43,5 +40,5 @@ agent = CodeAgent(
43
  prompt_templates=prompt_templates
44
  )
45
 
46
- # Launch UI
47
  GradioUI(agent).launch()
 
4
  from tools.final_answer import FinalAnswerTool
5
  from Gradio_UI import GradioUI
6
 
7
+ # Define the tell_joke tool
8
  @tool
9
  def tell_joke(_: str = "") -> str:
10
+ """A tool that tells a random joke."""
 
 
 
11
  jokes = [
12
  "Why don't scientists trust atoms? Because they make up everything!",
13
+ "Why was the math book sad? Because it had too many problems.",
14
  "Why did the scarecrow win an award? Because he was outstanding in his field!",
15
+ "Why did the tomato turn red? Because it saw the salad dressing!",
16
+ "Why did the computer get cold? Because it forgot to close its windows."
17
  ]
18
  return random.choice(jokes)
19
 
20
  # Final answer tool
21
  final_answer = FinalAnswerTool()
22
 
23
+ # LLM model
24
  model = HfApiModel(
25
  max_tokens=1024,
26
  temperature=0.7,
27
  model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
28
  )
29
 
30
+ # Load the prompt template
31
+ with open("prompts.yaml", "r") as stream:
32
  prompt_templates = yaml.safe_load(stream)
33
 
34
  # Create the agent
 
40
  prompt_templates=prompt_templates
41
  )
42
 
43
+ # Launch Gradio interface
44
  GradioUI(agent).launch()