Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -26,6 +26,10 @@ from langchain.agents import create_react_agent, AgentExecutor
|
|
| 26 |
from datasets import load_dataset
|
| 27 |
from huggingface_hub import login
|
| 28 |
import os
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29 |
|
| 30 |
# تحقق من وجود توكن في متغير البيئة
|
| 31 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
|
@@ -241,25 +245,32 @@ class GaiaRunner:
|
|
| 241 |
self.username = username
|
| 242 |
|
| 243 |
def run_on_question(self, question_text: str, file: Optional[str] = None) -> str:
|
| 244 |
-
|
| 245 |
-
|
| 246 |
-
|
| 247 |
-
|
| 248 |
-
|
|
|
|
|
|
|
| 249 |
result = self.agent.invoke({"input": prompt})
|
| 250 |
-
|
| 251 |
-
|
| 252 |
-
|
| 253 |
-
|
| 254 |
-
|
| 255 |
-
|
| 256 |
-
|
| 257 |
|
|
|
|
| 258 |
final_out = final_client.predict_text(output)
|
| 259 |
-
|
| 260 |
-
logger.
|
|
|
|
|
|
|
|
|
|
|
|
|
| 261 |
|
| 262 |
-
|
| 263 |
|
| 264 |
def run_all_and_submit(self) -> Dict[str, Any]:
|
| 265 |
questions_url =f"{GAIA_API_BASE}/questions"
|
|
@@ -445,5 +456,4 @@ def gradio_interface():
|
|
| 445 |
|
| 446 |
if __name__ == "__main__":
|
| 447 |
demo = gradio_interface()
|
| 448 |
-
demo.launch(show_error=True, share=False)
|
| 449 |
-
|
|
|
|
| 26 |
from datasets import load_dataset
|
| 27 |
from huggingface_hub import login
|
| 28 |
import os
|
| 29 |
+
import threading
|
| 30 |
+
|
| 31 |
+
# قفل عام لحماية الوصول إلى LLM/Agent داخل الخيوط
|
| 32 |
+
llama_lock = threading.Lock()
|
| 33 |
|
| 34 |
# تحقق من وجود توكن في متغير البيئة
|
| 35 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
|
|
|
| 245 |
self.username = username
|
| 246 |
|
| 247 |
def run_on_question(self, question_text: str, file: Optional[str] = None) -> str:
|
| 248 |
+
import time
|
| 249 |
+
start = time.time()
|
| 250 |
+
|
| 251 |
+
prompt = SYSTEM_INSTRUCTIONS + "\n\n" + question_text
|
| 252 |
+
try:
|
| 253 |
+
# حماية استدعاء الـ agent (langchain / llama-cpp) من الوصول المتوازي
|
| 254 |
+
with llama_lock:
|
| 255 |
result = self.agent.invoke({"input": prompt})
|
| 256 |
+
if isinstance(result, dict):
|
| 257 |
+
output = result.get("output") or result.get("text") or str(result)
|
| 258 |
+
else:
|
| 259 |
+
output = getattr(result, "output", str(result))
|
| 260 |
+
except Exception as e:
|
| 261 |
+
logger.exception("Agent execution failed")
|
| 262 |
+
output = f"AGENT_ERROR: {e}"
|
| 263 |
|
| 264 |
+
try:
|
| 265 |
final_out = final_client.predict_text(output)
|
| 266 |
+
except Exception as e:
|
| 267 |
+
logger.exception("Final client failed")
|
| 268 |
+
final_out = f"FINAL_AGENT_ERROR: {e}"
|
| 269 |
+
|
| 270 |
+
end = time.time()
|
| 271 |
+
logger.info(f"⏱️ Time for question: {end - start:.2f} sec")
|
| 272 |
|
| 273 |
+
return final_out
|
| 274 |
|
| 275 |
def run_all_and_submit(self) -> Dict[str, Any]:
|
| 276 |
questions_url =f"{GAIA_API_BASE}/questions"
|
|
|
|
| 456 |
|
| 457 |
if __name__ == "__main__":
|
| 458 |
demo = gradio_interface()
|
| 459 |
+
demo.launch(server_name="0.0.0.0", server_port=int(os.getenv("PORT", 7860)), show_error=True, share=False)
|
|
|