FD900 commited on
Commit
92dc164
Β·
verified Β·
1 Parent(s): 89ab973

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +35 -14
agent.py CHANGED
@@ -2,45 +2,66 @@ import os
2
  import requests
3
  from typing import Dict, List
4
 
5
- from run import run_and_submit_all
6
 
7
  class GaiaAgent:
8
  def __init__(self):
9
- self.api_url = os.environ["HF_MISTRAL_ENDPOINT"]
10
- self.api_key = os.environ["HF_TOKEN"]
 
 
 
 
 
 
 
 
 
11
  self.headers = {
12
  "Authorization": f"Bearer {self.api_key}",
13
  "Content-Type": "application/json",
14
  }
15
 
16
  def generate(self, prompt: str, stop: List[str] = []) -> str:
17
- print("πŸ“€ [generate()] Sending prompt to model.")
18
- print(prompt[:200]) # Print first 200 chars of the prompt for sanity
19
-
20
  payload = {
21
  "inputs": prompt,
22
  "parameters": {
23
- "temperature": 0.0,
24
- "max_new_tokens": 1024,
25
- "stop": stop,
 
26
  }
27
- }
28
- response = requests.post(self.api_url, headers=self.headers, json=payload)
29
- response.raise_for_status()
 
 
 
 
 
30
  output = response.json()
31
-
 
32
  if isinstance(output, dict) and "generated_text" in output:
33
  return output["generated_text"]
34
  elif isinstance(output, list) and "generated_text" in output[0]:
35
  return output[0]["generated_text"]
36
- return str(output)
 
37
 
38
  def answer_question(self, question: Dict) -> str:
39
  q = question["question"]
 
 
40
  prompt = f"""You are a helpful agent answering a science question.
41
  Question: {q}
42
  Answer:"""
 
43
  return self.generate(prompt).strip()
44
 
45
  def run(self):
 
46
  run_and_submit_all(self.answer_question)
 
2
  import requests
3
  from typing import Dict, List
4
 
5
+ from run import run_and_submit_all # Adjust path if needed
6
 
7
  class GaiaAgent:
8
  def __init__(self):
9
+ self.api_url = os.environ.get("HF_MISTRAL_ENDPOINT")
10
+ self.api_key = os.environ.get("HF_TOKEN")
11
+ self.model_id = os.environ.get("LLM_MODEL_ID")
12
+
13
+ assert self.api_url, "❌ HF_MISTRAL_ENDPOINT is missing!"
14
+ assert self.api_key, "❌ HF_TOKEN is missing!"
15
+ assert self.model_id, "❌ LLM_MODEL_ID is missing!"
16
+
17
+ print(f"βœ… [INIT] Model ID: {self.model_id}")
18
+ print(f"βœ… [INIT] Endpoint: {self.api_url}")
19
+
20
  self.headers = {
21
  "Authorization": f"Bearer {self.api_key}",
22
  "Content-Type": "application/json",
23
  }
24
 
25
  def generate(self, prompt: str, stop: List[str] = []) -> str:
26
+ print("🧠 [GENERATE] Prompt sent to model:")
27
+ print(prompt)
28
+
29
  payload = {
30
  "inputs": prompt,
31
  "parameters": {
32
+ "temperature": 0.0,
33
+ "max_new_tokens": 1024,
34
+ "stop": stop,
35
+ }
36
  }
37
+
38
+ try:
39
+ response = requests.post(self.api_url, headers=self.headers, json=payload)
40
+ response.raise_for_status()
41
+ except Exception as e:
42
+ print(f"❌ [ERROR] Request failed: {e}")
43
+ return "ERROR: Model call failed"
44
+
45
  output = response.json()
46
+ print(f"βœ… [RESPONSE] Raw output: {output}")
47
+
48
  if isinstance(output, dict) and "generated_text" in output:
49
  return output["generated_text"]
50
  elif isinstance(output, list) and "generated_text" in output[0]:
51
  return output[0]["generated_text"]
52
+ else:
53
+ return str(output)
54
 
55
  def answer_question(self, question: Dict) -> str:
56
  q = question["question"]
57
+ print(f"πŸ“Œ [QUESTION] ID: {question.get('id', 'N/A')} - {q}")
58
+
59
  prompt = f"""You are a helpful agent answering a science question.
60
  Question: {q}
61
  Answer:"""
62
+
63
  return self.generate(prompt).strip()
64
 
65
  def run(self):
66
+ print("πŸš€ [RUN] Starting submission...")
67
  run_and_submit_all(self.answer_question)