lopera47 commited on
Commit
c199d3c
·
verified ·
1 Parent(s): eda4c08

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -20
app.py CHANGED
@@ -33,39 +33,37 @@ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
33
  class BasicAgent:
34
  def __init__(self):
35
  print("BasicAgent initialized.")
36
-
37
- # Initialize the Agent
 
 
 
 
 
 
38
  model = InferenceClientModel(
39
- model_id="HuggingFaceH4/zephyr-7b-beta",
40
- token=os.getenv("HUGGINGFACEHUB_API_TOKEN"),
41
- provider="hf-inference"
 
42
  )
43
 
 
44
  self.agent = ToolCallingAgent(
45
  tools=[DuckDuckGoSearchTool()],
46
  model=model
47
  )
48
 
49
  def __call__(self, question: str) -> str:
50
- print(f"Agent received question (first 50 chars): {question[:50]}...")
51
- # #print("Your token:", repr(os.getenv("HUGGINGFACEHUB_API_TOKEN"))) ##########
52
- # try:
53
- # # Example: replace this with your actual inference call
54
- # response = self.agent.run(question)
55
- # print("Agent response:", response)
56
-
57
- # except Exception as e:
58
- # import traceback
59
- # print("====== Exception occurred during inference ======")
60
- # traceback.print_exc() # This prints the full stack trace to your console/log
61
- # print("Exception message:", str(e))
62
-
63
- # Run the agent to get the answer
64
  answer = self.agent.run(question)
65
  print(f"Agent returning answer: {answer.strip()}")
66
- # Return the answer stripped (no extra spaces/newlines)
67
  return answer.strip()
68
 
 
69
  # Put it all together
70
  def run_and_submit_all( profile: gr.OAuthProfile | None):
71
  """
 
33
  class BasicAgent:
34
  def __init__(self):
35
  print("BasicAgent initialized.")
36
+ openaikey = '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'
37
+ # self.system_message = (
38
+ # "You are a question answering agent for scientific and encyclopedic questions. "
39
+ # "Your goal is to provide concise, factually correct answers using only verified sources. "
40
+ # "Always return a direct answer, no extra text or explanation. "
41
+ # "If a question asks for a count, return only the number."
42
+ # )
43
+
44
  model = InferenceClientModel(
45
+ model_id="gpt-4o",
46
+ token=openaikey,
47
+ provider="openai"#,
48
+ # temperature=0.0 # 0.7 by default (not sure)
49
  )
50
 
51
+
52
  self.agent = ToolCallingAgent(
53
  tools=[DuckDuckGoSearchTool()],
54
  model=model
55
  )
56
 
57
  def __call__(self, question: str) -> str:
58
+ print(f"Agent received question (first 50 chars): {question[:50]}...")
59
+ # Inject system prompt + question
60
+ # question_with_prompt = f"{self.system_message}\n\n{question.strip()}"
61
+ # answer = self.agent.run(question_with_prompt)
 
 
 
 
 
 
 
 
 
 
62
  answer = self.agent.run(question)
63
  print(f"Agent returning answer: {answer.strip()}")
 
64
  return answer.strip()
65
 
66
+
67
  # Put it all together
68
  def run_and_submit_all( profile: gr.OAuthProfile | None):
69
  """