zineb-chgari commited on
Commit
ec83752
·
verified ·
1 Parent(s): 9431b22

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -18
app.py CHANGED
@@ -76,30 +76,23 @@ class BasicAgent:
76
 
77
 
78
  def __call__(self, question: str) -> str:
79
-
80
- try:
81
  print(f"\n➡️ Agent received question: {question[:80]}")
82
 
83
- if getattr(self, "model_type", None) == "local":
84
- # Use local model
85
- messages = [{"role": "user", "content": question}]
86
- response = self.pipe(messages, max_new_tokens=200)
87
- answer = response[0]["generated_text"]
88
- else:
89
- # Use Hugging Face router (OpenAI-style)
90
- response = self.client.chat.completions.create(
91
- model=self.model_id,
92
- messages=[
93
- {"role": "system", "content": "You are a helpful assistant."},
94
- {"role": "user", "content": question}
95
- ],
96
- )
97
- answer = response.choices[0].message["content"].strip()
98
 
99
  print(f"✅ Agent returning answer: {answer[:100]}...")
100
  return answer
101
 
102
- except Exception as e:
103
  print(f"❌ Agent encountered an error: {e}")
104
  traceback.print_exc()
105
  return f"Error generating answer: {e}"
@@ -107,6 +100,7 @@ class BasicAgent:
107
 
108
 
109
 
 
110
  def run_and_submit_all( profile: gr.OAuthProfile | None):
111
  """
112
  Fetches all questions, runs the BasicAgent on them, submits all answers,
 
76
 
77
 
78
  def __call__(self, question: str) -> str:
79
+ try:
 
80
  print(f"\n➡️ Agent received question: {question[:80]}")
81
 
82
+ # Use local model
83
+ response = self.pipe(question, max_new_tokens=150)
84
+ raw_answer = response[0]["generated_text"]
85
+
86
+ # Clean output: remove question and artifacts
87
+ answer = raw_answer.replace(question, "").strip()
88
+
89
+ # Guarantee a valid string (the API rejects non-strings)
90
+ answer = str(answer)
 
 
 
 
 
 
91
 
92
  print(f"✅ Agent returning answer: {answer[:100]}...")
93
  return answer
94
 
95
+ except Exception as e:
96
  print(f"❌ Agent encountered an error: {e}")
97
  traceback.print_exc()
98
  return f"Error generating answer: {e}"
 
100
 
101
 
102
 
103
+
104
  def run_and_submit_all( profile: gr.OAuthProfile | None):
105
  """
106
  Fetches all questions, runs the BasicAgent on them, submits all answers,