shekkari21 commited on
Commit
757a9cf
·
1 Parent(s): 4eaaf4a

fix agent error handling, add Dockerfile for HF Spaces

Browse files

- Fix AgentResult dataclass using wrong Field import
- Surface LLM errors instead of silently failing
- Add Dockerfile for Hugging Face Spaces deployment
- Web app reads port from PORT env var (HF requirement)

Files changed (3) hide show
  1. Dockerfile +40 -0
  2. agent_framework/agent.py +16 -2
  3. web_app/app.py +2 -1
Dockerfile ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.12-slim
2
+
3
+ WORKDIR /app
4
+
5
+ # Install system dependencies
6
+ RUN apt-get update && apt-get install -y --no-install-recommends \
7
+ build-essential \
8
+ && rm -rf /var/lib/apt/lists/*
9
+
10
+ # Copy and install Python dependencies
11
+ COPY pyproject.toml .
12
+ RUN pip install --no-cache-dir \
13
+ fastapi>=0.100.0 \
14
+ litellm>=1.81.3 \
15
+ openai>=1.101.0 \
16
+ pydantic>=2.11.7 \
17
+ python-dotenv>=1.1.1 \
18
+ python-multipart>=0.0.6 \
19
+ uvicorn>=0.23.0 \
20
+ tavily-python>=0.7.11 \
21
+ openpyxl>=3.1.5 \
22
+ pandas>=2.3.3 \
23
+ pymupdf>=1.26.7 \
24
+ scikit-learn>=1.0.0 \
25
+ tqdm>=4.67.1 \
26
+ numpy \
27
+ mcp>=1.13.1 \
28
+ fastmcp>=2.11.3 \
29
+ tiktoken
30
+
31
+ # Copy application code
32
+ COPY agent_framework/ agent_framework/
33
+ COPY agent_tools/ agent_tools/
34
+ COPY rag/ rag/
35
+ COPY web_app/ web_app/
36
+
37
+ # HF Spaces runs on port 7860
38
+ EXPOSE 7860
39
+
40
+ CMD ["uvicorn", "web_app.app:app", "--host", "0.0.0.0", "--port", "7860"]
agent_framework/agent.py CHANGED
@@ -211,8 +211,22 @@ class Agent:
211
 
212
  # Get LLM's decision
213
  llm_response = await self.think(llm_request)
214
-
215
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
216
  # Record LLM response as an event
217
  response_event = Event(
218
  execution_id=context.execution_id,
 
211
 
212
  # Get LLM's decision
213
  llm_response = await self.think(llm_request)
214
+
215
+ # Handle LLM errors - surface them instead of silently failing
216
+ if llm_response.error_message:
217
+ error_content = [Message(
218
+ role="assistant",
219
+ content=f"Error from LLM: {llm_response.error_message}"
220
+ )]
221
+ error_event = Event(
222
+ execution_id=context.execution_id,
223
+ author=self.name,
224
+ content=error_content,
225
+ )
226
+ context.add_event(error_event)
227
+ context.final_result = error_content[0].content
228
+ return
229
+
230
  # Record LLM response as an event
231
  response_event = Event(
232
  execution_id=context.execution_id,
web_app/app.py CHANGED
@@ -240,5 +240,6 @@ app.mount("/static", StaticFiles(directory=static_dir), name="static")
240
 
241
  if __name__ == "__main__":
242
  import uvicorn
243
- uvicorn.run(app, host="0.0.0.0", port=8000)
 
244
 
 
240
 
241
  if __name__ == "__main__":
242
  import uvicorn
243
+ port = int(os.getenv("PORT", 7860))
244
+ uvicorn.run(app, host="0.0.0.0", port=port)
245