Spaces:
Paused
Paused
Zhen Ye
commited on
Commit
·
16cab91
1
Parent(s):
0a016b0
Optimize MJPEG stream and fix first-frame UX
Browse files- LaserPerception/LaserPerception.js +5 -6
- app.py +23 -7
LaserPerception/LaserPerception.js
CHANGED
|
@@ -950,12 +950,11 @@
|
|
| 950 |
const streamUrl = `${state.hf.baseUrl}${data.stream_url}`;
|
| 951 |
setStreamingMode(streamUrl);
|
| 952 |
|
| 953 |
-
//
|
| 954 |
-
|
| 955 |
-
|
| 956 |
-
|
| 957 |
-
|
| 958 |
-
// Trigger resize/render
|
| 959 |
resizeOverlays();
|
| 960 |
renderRadar();
|
| 961 |
renderTrackCards();
|
|
|
|
| 950 |
const streamUrl = `${state.hf.baseUrl}${data.stream_url}`;
|
| 951 |
setStreamingMode(streamUrl);
|
| 952 |
|
| 953 |
+
// NOTE: Auto-switch removed to allow viewing First Frame on Tab 1
|
| 954 |
+
log("Live view available in 'Engage' tab.", "g");
|
| 955 |
+
setStatus("warn", "Live processing... View in Engage tab");
|
| 956 |
+
|
| 957 |
+
// Trigger resize/render (background setup)
|
|
|
|
| 958 |
resizeOverlays();
|
| 959 |
renderRadar();
|
| 960 |
renderTrackCards();
|
app.py
CHANGED
|
@@ -517,28 +517,44 @@ async def detect_first_frame_depth(job_id: str):
|
|
| 517 |
|
| 518 |
@app.get("/detect/stream/{job_id}")
|
| 519 |
async def stream_video(job_id: str):
|
| 520 |
-
"""MJPEG stream of the processing video."""
|
| 521 |
import queue
|
| 522 |
|
| 523 |
async def stream_generator():
|
|
|
|
| 524 |
while True:
|
| 525 |
-
# Check if stream exists
|
| 526 |
q = get_stream(job_id)
|
| 527 |
if not q:
|
| 528 |
-
# Job finished or not started
|
| 529 |
break
|
| 530 |
|
| 531 |
try:
|
| 532 |
-
#
|
|
|
|
|
|
|
| 533 |
frame = q.get_nowait()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 534 |
|
| 535 |
-
# Encode
|
| 536 |
-
success, buffer = cv2.imencode('.jpg', frame)
|
| 537 |
if success:
|
| 538 |
yield (b'--frame\r\n'
|
| 539 |
b'Content-Type: image/jpeg\r\n\r\n' + buffer.tobytes() + b'\r\n')
|
| 540 |
except queue.Empty:
|
| 541 |
-
await asyncio.sleep(0.02)
|
| 542 |
except Exception:
|
| 543 |
await asyncio.sleep(0.1)
|
| 544 |
|
|
|
|
| 517 |
|
| 518 |
@app.get("/detect/stream/{job_id}")
|
| 519 |
async def stream_video(job_id: str):
|
| 520 |
+
"""MJPEG stream of the processing video (optimized)."""
|
| 521 |
import queue
|
| 522 |
|
| 523 |
async def stream_generator():
|
| 524 |
+
loop = asyncio.get_running_loop()
|
| 525 |
while True:
|
|
|
|
| 526 |
q = get_stream(job_id)
|
| 527 |
if not q:
|
|
|
|
| 528 |
break
|
| 529 |
|
| 530 |
try:
|
| 531 |
+
# Get latest frame (skipping updated ones if laggy?)
|
| 532 |
+
# Actually, standard queue get is fine if we consume fast enough.
|
| 533 |
+
# To be super real-time, we could drain the queue?
|
| 534 |
frame = q.get_nowait()
|
| 535 |
+
while not q.empty():
|
| 536 |
+
try:
|
| 537 |
+
frame = q.get_nowait()
|
| 538 |
+
except queue.Empty:
|
| 539 |
+
break
|
| 540 |
+
|
| 541 |
+
# Resize if too big (e.g. > 640 width)
|
| 542 |
+
h, w = frame.shape[:2]
|
| 543 |
+
if w > 640:
|
| 544 |
+
scale = 640 / w
|
| 545 |
+
new_h = int(h * scale)
|
| 546 |
+
frame = cv2.resize(frame, (640, new_h), interpolation=cv2.INTER_NEAREST)
|
| 547 |
+
|
| 548 |
+
# Encode in thread
|
| 549 |
+
# JPEG Quality = 50 (Balance between speed/size)
|
| 550 |
+
encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 50]
|
| 551 |
+
success, buffer = await loop.run_in_executor(None, cv2.imencode, '.jpg', frame, encode_param)
|
| 552 |
|
|
|
|
|
|
|
| 553 |
if success:
|
| 554 |
yield (b'--frame\r\n'
|
| 555 |
b'Content-Type: image/jpeg\r\n\r\n' + buffer.tobytes() + b'\r\n')
|
| 556 |
except queue.Empty:
|
| 557 |
+
await asyncio.sleep(0.02)
|
| 558 |
except Exception:
|
| 559 |
await asyncio.sleep(0.1)
|
| 560 |
|