pararthdave commited on
Commit
6a34e9e
·
1 Parent(s): dd16d02

agent basic initialization for qa

Browse files
Files changed (3) hide show
  1. agent.py +79 -5
  2. api_integration.py +48 -0
  3. app.py +2 -1
agent.py CHANGED
@@ -1,15 +1,24 @@
1
- from smolagents import CodeAgent, InferenceClientModel, ToolCallingAgent, DuckDuckGoSearchTool
 
 
 
 
 
 
 
 
2
  from dotenv import load_dotenv
 
3
  import os
4
  load_dotenv()
5
  model_id = "meta-llama/Llama-3.3-70B-Instruct"
6
 
7
-
8
  def basic_inference(
9
  prompt: str,
10
  model_id: str = "meta-llama/Llama-3.3-70B-Instruct",
11
  provider: str = "groq",
12
- ):
13
  """
14
  Run a basic inference using the specified model and provider.
15
 
@@ -31,11 +40,12 @@ def basic_inference(
31
  # Run the agent with the provided prompt
32
  return agent.run(prompt)
33
 
 
34
  def toolcalling(
35
  prompt: str,
36
  model_id: str = "meta-llama/Llama-3.3-70B-Instruct",
37
  provider: str = "groq",
38
- ):
39
  """
40
  Run a tool calling inference using the specified model and provider.
41
 
@@ -57,6 +67,7 @@ def toolcalling(
57
  # Run the agent with the provided prompt
58
  return agent.run(prompt)
59
 
 
60
  def web_search(query: str) -> str:
61
  """Search DuckDuckGo for a query and return maximum 3 result.
62
 
@@ -64,4 +75,67 @@ def web_search(query: str) -> str:
64
  query: The search query."""
65
  search_tool = DuckDuckGoSearchTool()
66
  search_docs = search_tool(query)
67
- return search_docs
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from smolagents import (
2
+ CodeAgent,
3
+ InferenceClientModel,
4
+ ToolCallingAgent,
5
+ DuckDuckGoSearchTool,
6
+ HfApiModel,
7
+ LiteLLMModel,
8
+ tool
9
+ )
10
  from dotenv import load_dotenv
11
+ from typing import Optional, List, Dict, Any
12
  import os
13
  load_dotenv()
14
  model_id = "meta-llama/Llama-3.3-70B-Instruct"
15
 
16
+ @tool
17
  def basic_inference(
18
  prompt: str,
19
  model_id: str = "meta-llama/Llama-3.3-70B-Instruct",
20
  provider: str = "groq",
21
+ ) -> str:
22
  """
23
  Run a basic inference using the specified model and provider.
24
 
 
40
  # Run the agent with the provided prompt
41
  return agent.run(prompt)
42
 
43
+ @tool
44
  def toolcalling(
45
  prompt: str,
46
  model_id: str = "meta-llama/Llama-3.3-70B-Instruct",
47
  provider: str = "groq",
48
+ ) -> str:
49
  """
50
  Run a tool calling inference using the specified model and provider.
51
 
 
67
  # Run the agent with the provided prompt
68
  return agent.run(prompt)
69
 
70
+ @tool
71
  def web_search(query: str) -> str:
72
  """Search DuckDuckGo for a query and return maximum 3 result.
73
 
 
75
  query: The search query."""
76
  search_tool = DuckDuckGoSearchTool()
77
  search_docs = search_tool(query)
78
+ return search_docs
79
+
80
+
81
+ class BotMan:
82
+ def __init__(self,
83
+ model_type: str = "HfApiModel",
84
+ model_id: Optional[str] = None,
85
+ api_key: Optional[str] = None,
86
+ provider: Optional[str] = None,
87
+ timeout: Optional[int] = None,
88
+ temperature: Optional[float] = 0,
89
+ additional_imports: List[str] = None,
90
+ executor_type: str = "local",
91
+ ):
92
+ """
93
+ Initialize the BotMan class.
94
+ """
95
+ if model_type == "HfApiModel":
96
+ if api_key is None:
97
+ api_key = os.environ.get("HUGGINGFACEHUB_API_KEY")
98
+ if not api_key:
99
+ raise ValueError("API key is required for HfApiModel.")
100
+ self.model = InferenceClientModel(
101
+ model_id=model_id or "meta-llama/Llama-3.3-70B-Instruct",
102
+ token=api_key,
103
+ provider=provider or "hf-inference",
104
+ temperature=temperature,
105
+ timeout=timeout or 80
106
+ )
107
+ self.tools = [
108
+ web_search,
109
+ basic_inference,
110
+ toolcalling,
111
+ ]
112
+ executor_kwargs = {}
113
+ self.imports = ["pandas", "numpy", "datetime", "json", "re", "math", "os", "requests", "csv", "urllib"]
114
+ if additional_imports:
115
+ self.imports.extend(additional_imports)
116
+
117
+ self.agent = CodeAgent(
118
+ tools=self.tools,
119
+ model=self.model,
120
+ additional_authorized_imports=self.imports,
121
+ executor_type=executor_type,
122
+ executor_kwargs=executor_kwargs,
123
+ )
124
+
125
+ def answer(self, question: str) -> str:
126
+ """
127
+ Answer a question using the agent.
128
+ """
129
+ try:
130
+ result = self.agent.run(question)
131
+ return result
132
+ except Exception as e:
133
+ print(f"Error during inference: {e}")
134
+ return str(e)
135
+
136
+ if __name__ == '__main__':
137
+ # Example usage
138
+ bot = BotMan(model_type="HfApiModel", model_id=model_id, api_key=os.environ.get("HUGGINGFACEHUB_API_TOKEN"))
139
+ question = "What is the capital of France?"
140
+ answer = bot.answer(question)
141
+ print(answer)
api_integration.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ from typing import List, Dict, Any
3
+ # from core_agent import GAIAAgent
4
+
5
+
6
+ class GAIAApiClient:
7
+ def __init__(self, api_url="https://agents-course-unit4-scoring.hf.space"):
8
+ self.api_url = api_url
9
+ self.questions_url = f"{api_url}/questions"
10
+ self.submit_url = f"{api_url}/submit"
11
+ self.files_url = f"{api_url}/files"
12
+
13
+ def get_questions(self) -> List[Dict[str, Any]]:
14
+ """Fetch all evaluation questions"""
15
+ response = requests.get(self.questions_url)
16
+ response.raise_for_status()
17
+ return response.json()
18
+
19
+ def get_random_question(self) -> Dict[str, Any]:
20
+ """Fetch a single random question"""
21
+ response = requests.get(f"{self.api_url}/random-question")
22
+ response.raise_for_status()
23
+ return response.json()
24
+
25
+ def get_file(self, task_id: str) -> bytes:
26
+ """Download a file for a specific task"""
27
+ response = requests.get(f"{self.files_url}/{task_id}")
28
+ response.raise_for_status()
29
+ return response.content
30
+
31
+ def submit_answers(self, username: str, agent_code: str, answers: List[Dict[str, Any]]) -> Dict[str, Any]:
32
+ """Submit agent answers and get score"""
33
+ data = {
34
+ "username": username,
35
+ "agent_code": agent_code,
36
+ "answers": answers
37
+ }
38
+ response = requests.post(self.submit_url, json=data)
39
+ response.raise_for_status()
40
+ return response.json()
41
+
42
+ if __name__ == '__main__':
43
+ # Example usage
44
+ api_client = GAIAApiClient()
45
+ questions = api_client.get_questions()
46
+ for q in questions:
47
+ print(q)
48
+
app.py CHANGED
@@ -3,7 +3,8 @@ import gradio as gr
3
  import requests
4
  import inspect
5
  import pandas as pd
6
-
 
7
  # (Keep Constants as is)
8
  # --- Constants ---
9
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
 
3
  import requests
4
  import inspect
5
  import pandas as pd
6
+ from dotenv import load_dotenv
7
+ load_dotenv()
8
  # (Keep Constants as is)
9
  # --- Constants ---
10
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"