Spaces:
Sleeping
Sleeping
Christopher Tan commited on
Commit ·
2fb15e2
1
Parent(s): 7b21005
added debugging message for inference
Browse files- app.py +11 -2
- inference_openpi.py +8 -2
app.py
CHANGED
|
@@ -622,8 +622,17 @@ def run_pi0_inference(request: InferenceRequest) -> Tuple[Optional[str], str]:
|
|
| 622 |
# Send request
|
| 623 |
request_dict = request.to_dict()
|
| 624 |
request_json = json.dumps(request_dict)
|
| 625 |
-
|
| 626 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 627 |
|
| 628 |
request.progress(0.2, desc="Waiting for inference result...")
|
| 629 |
|
|
|
|
| 622 |
# Send request
|
| 623 |
request_dict = request.to_dict()
|
| 624 |
request_json = json.dumps(request_dict)
|
| 625 |
+
print(f"DEBUG: Sending request to {model_key} worker: {request_json[:150]}...", flush=True)
|
| 626 |
+
try:
|
| 627 |
+
worker.stdin.write(request_json + "\n")
|
| 628 |
+
worker.stdin.flush()
|
| 629 |
+
except BrokenPipeError:
|
| 630 |
+
# Worker died - restart it
|
| 631 |
+
print(f"⚠️ Worker {model_key} stdin broken, restarting...", flush=True)
|
| 632 |
+
_INFERENCE_WORKERS[model_key] = None
|
| 633 |
+
worker = get_inference_worker(model_key)
|
| 634 |
+
worker.stdin.write(request_json + "\n")
|
| 635 |
+
worker.stdin.flush()
|
| 636 |
|
| 637 |
request.progress(0.2, desc="Waiting for inference result...")
|
| 638 |
|
inference_openpi.py
CHANGED
|
@@ -535,8 +535,12 @@ def main():
|
|
| 535 |
continue
|
| 536 |
|
| 537 |
try:
|
|
|
|
| 538 |
result = run_inference(request)
|
| 539 |
-
print(
|
|
|
|
|
|
|
|
|
|
| 540 |
except Exception as e:
|
| 541 |
# Error during inference - send error response as JSON
|
| 542 |
import traceback
|
|
@@ -548,7 +552,9 @@ def main():
|
|
| 548 |
"status_message": f"❌ Worker error: {str(e)}",
|
| 549 |
"error": str(e)
|
| 550 |
}
|
| 551 |
-
|
|
|
|
|
|
|
| 552 |
|
| 553 |
except KeyboardInterrupt:
|
| 554 |
print("===== OpenPI Worker: interrupted =====", file=sys.stderr, flush=True)
|
|
|
|
| 535 |
continue
|
| 536 |
|
| 537 |
try:
|
| 538 |
+
print(f"DEBUG: Starting inference for task: {request.get('task_name', 'unknown')}", file=sys.stderr, flush=True)
|
| 539 |
result = run_inference(request)
|
| 540 |
+
print(f"DEBUG: Inference completed, sending result", file=sys.stderr, flush=True)
|
| 541 |
+
result_json = json.dumps(result)
|
| 542 |
+
print(result_json, flush=True)
|
| 543 |
+
print(f"DEBUG: Result sent successfully", file=sys.stderr, flush=True)
|
| 544 |
except Exception as e:
|
| 545 |
# Error during inference - send error response as JSON
|
| 546 |
import traceback
|
|
|
|
| 552 |
"status_message": f"❌ Worker error: {str(e)}",
|
| 553 |
"error": str(e)
|
| 554 |
}
|
| 555 |
+
error_json = json.dumps(error_result)
|
| 556 |
+
print(error_json, flush=True)
|
| 557 |
+
print(f"DEBUG: Error result sent successfully", file=sys.stderr, flush=True)
|
| 558 |
|
| 559 |
except KeyboardInterrupt:
|
| 560 |
print("===== OpenPI Worker: interrupted =====", file=sys.stderr, flush=True)
|