Spaces:
Sleeping
Sleeping
Update src/proxy_app/main.py
Browse files- src/proxy_app/main.py +52 -1
src/proxy_app/main.py
CHANGED
|
@@ -1515,6 +1515,57 @@ async def refresh_quota_stats(
|
|
| 1515 |
raise HTTPException(status_code=500, detail=str(e))
|
| 1516 |
|
| 1517 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1518 |
@app.post("/v1/token-count")
|
| 1519 |
async def token_count(
|
| 1520 |
request: Request,
|
|
@@ -1763,4 +1814,4 @@ if __name__ == "__main__":
|
|
| 1763 |
workers=workers,
|
| 1764 |
limit_concurrency=100, # Max simultaneous connections per worker
|
| 1765 |
timeout_keep_alive=30, # Keep-alive for connection reuse
|
| 1766 |
-
)
|
|
|
|
| 1515 |
raise HTTPException(status_code=500, detail=str(e))
|
| 1516 |
|
| 1517 |
|
| 1518 |
+
# --- Log Viewing Endpoint ---
|
| 1519 |
+
@app.get("/logs/{log_type}")
|
| 1520 |
+
async def view_logs(
|
| 1521 |
+
log_type: str,
|
| 1522 |
+
lines: int = 50,
|
| 1523 |
+
_: str = Depends(verify_api_key),
|
| 1524 |
+
):
|
| 1525 |
+
"""
|
| 1526 |
+
View the last N lines of a log file.
|
| 1527 |
+
Authenticated with PROXY_API_KEY.
|
| 1528 |
+
|
| 1529 |
+
Path Params:
|
| 1530 |
+
- log_type: 'proxy', 'debug', or 'failures'
|
| 1531 |
+
|
| 1532 |
+
Query Params:
|
| 1533 |
+
- lines: number of lines to return (default: 50)
|
| 1534 |
+
|
| 1535 |
+
Example:
|
| 1536 |
+
curl -H "Authorization: Bearer YOUR_KEY" "https://your-space.hf.space/logs/failures?lines=100"
|
| 1537 |
+
"""
|
| 1538 |
+
allowed_logs = {
|
| 1539 |
+
"failures": "failures.log",
|
| 1540 |
+
"proxy": "proxy.log",
|
| 1541 |
+
"debug": "proxy_debug.log",
|
| 1542 |
+
}
|
| 1543 |
+
|
| 1544 |
+
if log_type not in allowed_logs:
|
| 1545 |
+
raise HTTPException(
|
| 1546 |
+
status_code=400,
|
| 1547 |
+
detail=f"Invalid log type. Choose from: {list(allowed_logs.keys())}"
|
| 1548 |
+
)
|
| 1549 |
+
|
| 1550 |
+
log_file = LOG_DIR / allowed_logs[log_type]
|
| 1551 |
+
|
| 1552 |
+
if not log_file.exists():
|
| 1553 |
+
return JSONResponse(
|
| 1554 |
+
status_code=404,
|
| 1555 |
+
content={"error": "Log file not found", "path": str(log_file), "available_logs": list(allowed_logs.keys())}
|
| 1556 |
+
)
|
| 1557 |
+
|
| 1558 |
+
try:
|
| 1559 |
+
# Simple/naive implementation - fine for small log files
|
| 1560 |
+
with open(log_file, "r", encoding="utf-8", errors="replace") as f:
|
| 1561 |
+
all_lines = f.readlines()
|
| 1562 |
+
# Return last N lines as a single string
|
| 1563 |
+
result = "".join(all_lines[-lines:])
|
| 1564 |
+
return JSONResponse(content={"log": allowed_logs[log_type], "lines": lines, "content": result})
|
| 1565 |
+
except Exception as e:
|
| 1566 |
+
raise HTTPException(status_code=500, detail=f"Failed to read log file: {str(e)}")
|
| 1567 |
+
|
| 1568 |
+
|
| 1569 |
@app.post("/v1/token-count")
|
| 1570 |
async def token_count(
|
| 1571 |
request: Request,
|
|
|
|
| 1814 |
workers=workers,
|
| 1815 |
limit_concurrency=100, # Max simultaneous connections per worker
|
| 1816 |
timeout_keep_alive=30, # Keep-alive for connection reuse
|
| 1817 |
+
)
|