Spaces:
Running on T4
Running on T4
Add OC-SORT tracker support and align dependency versions
Browse filesIntegrate `OCSORTTracker` with direction consistency weight and `delta_t`
controls.
Update tracker parameter descriptions to show which trackers use each
setting.
Bump:
- `trackers` to `2.3.0`
- `inference-models` to `>=0.19.0`
- app.py +59 -10
- requirements.txt +2 -2
app.py
CHANGED
|
@@ -13,7 +13,7 @@ import torch
|
|
| 13 |
from tqdm import tqdm
|
| 14 |
from inference_models import AutoModel
|
| 15 |
|
| 16 |
-
from trackers import ByteTrackTracker, SORTTracker, frames_from_source
|
| 17 |
|
| 18 |
MAX_DURATION_SECONDS = 30
|
| 19 |
|
|
@@ -28,7 +28,7 @@ MODELS = [
|
|
| 28 |
"rfdetr-seg-large",
|
| 29 |
]
|
| 30 |
|
| 31 |
-
TRACKERS = ["bytetrack", "sort"]
|
| 32 |
|
| 33 |
COCO_CLASSES = [
|
| 34 |
"person",
|
|
@@ -147,13 +147,15 @@ VIDEO_EXAMPLES = [
|
|
| 147 |
[
|
| 148 |
"https://storage.googleapis.com/com-roboflow-marketing/supervision/video-examples/bikes-1280x720-1.mp4",
|
| 149 |
"rfdetr-small",
|
| 150 |
-
"
|
| 151 |
0.2,
|
| 152 |
30,
|
| 153 |
0.3,
|
| 154 |
3,
|
| 155 |
0.1,
|
| 156 |
0.6,
|
|
|
|
|
|
|
| 157 |
[],
|
| 158 |
"",
|
| 159 |
True,
|
|
@@ -166,13 +168,15 @@ VIDEO_EXAMPLES = [
|
|
| 166 |
[
|
| 167 |
"https://storage.googleapis.com/com-roboflow-marketing/supervision/video-examples/bikes-1280x720-1.mp4",
|
| 168 |
"rfdetr-small",
|
| 169 |
-
"
|
| 170 |
0.2,
|
| 171 |
30,
|
| 172 |
0.3,
|
| 173 |
3,
|
| 174 |
0.1,
|
| 175 |
0.6,
|
|
|
|
|
|
|
| 176 |
["person"],
|
| 177 |
"",
|
| 178 |
True,
|
|
@@ -192,6 +196,8 @@ VIDEO_EXAMPLES = [
|
|
| 192 |
3,
|
| 193 |
0.3,
|
| 194 |
0.6,
|
|
|
|
|
|
|
| 195 |
[],
|
| 196 |
"",
|
| 197 |
True,
|
|
@@ -211,6 +217,8 @@ VIDEO_EXAMPLES = [
|
|
| 211 |
3,
|
| 212 |
0.1,
|
| 213 |
0.6,
|
|
|
|
|
|
|
| 214 |
[],
|
| 215 |
"",
|
| 216 |
True,
|
|
@@ -230,6 +238,8 @@ VIDEO_EXAMPLES = [
|
|
| 230 |
3,
|
| 231 |
0.1,
|
| 232 |
0.6,
|
|
|
|
|
|
|
| 233 |
[],
|
| 234 |
"",
|
| 235 |
True,
|
|
@@ -249,6 +259,8 @@ VIDEO_EXAMPLES = [
|
|
| 249 |
3,
|
| 250 |
0.1,
|
| 251 |
0.6,
|
|
|
|
|
|
|
| 252 |
[],
|
| 253 |
"",
|
| 254 |
True,
|
|
@@ -268,6 +280,8 @@ VIDEO_EXAMPLES = [
|
|
| 268 |
3,
|
| 269 |
0.1,
|
| 270 |
0.6,
|
|
|
|
|
|
|
| 271 |
[],
|
| 272 |
"1",
|
| 273 |
True,
|
|
@@ -287,6 +301,8 @@ VIDEO_EXAMPLES = [
|
|
| 287 |
3,
|
| 288 |
0.1,
|
| 289 |
0.6,
|
|
|
|
|
|
|
| 290 |
[],
|
| 291 |
"",
|
| 292 |
True,
|
|
@@ -299,13 +315,15 @@ VIDEO_EXAMPLES = [
|
|
| 299 |
[
|
| 300 |
"https://storage.googleapis.com/com-roboflow-marketing/supervision/video-examples/vehicles-1280x720.mp4",
|
| 301 |
"rfdetr-medium",
|
| 302 |
-
"
|
| 303 |
0.2,
|
| 304 |
30,
|
| 305 |
0.3,
|
| 306 |
3,
|
| 307 |
0.1,
|
| 308 |
0.6,
|
|
|
|
|
|
|
| 309 |
[],
|
| 310 |
"",
|
| 311 |
True,
|
|
@@ -383,6 +401,8 @@ def track(
|
|
| 383 |
minimum_consecutive_frames: int,
|
| 384 |
minimum_iou_threshold: float,
|
| 385 |
high_conf_det_threshold: float,
|
|
|
|
|
|
|
| 386 |
classes: list[str] | None = None,
|
| 387 |
track_ids: str = "",
|
| 388 |
show_boxes: bool = True,
|
|
@@ -419,6 +439,15 @@ def track(
|
|
| 419 |
minimum_iou_threshold=minimum_iou_threshold,
|
| 420 |
high_conf_det_threshold=high_conf_det_threshold,
|
| 421 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 422 |
else:
|
| 423 |
tracker = SORTTracker(
|
| 424 |
lost_track_buffer=lost_track_buffer,
|
|
@@ -558,7 +587,7 @@ with gr.Blocks(title="Trackers Playground 🔥") as demo:
|
|
| 558 |
value=30,
|
| 559 |
step=1,
|
| 560 |
label="Lost Track Buffer",
|
| 561 |
-
info="Frames to keep a lost track before removing it.",
|
| 562 |
)
|
| 563 |
track_activation_slider = gr.Slider(
|
| 564 |
minimum=0.0,
|
|
@@ -566,7 +595,7 @@ with gr.Blocks(title="Trackers Playground 🔥") as demo:
|
|
| 566 |
value=0.3,
|
| 567 |
step=0.05,
|
| 568 |
label="Track Activation Threshold",
|
| 569 |
-
info="Minimum score for a track to be activated.",
|
| 570 |
)
|
| 571 |
minimum_consecutive_slider = gr.Slider(
|
| 572 |
minimum=1,
|
|
@@ -574,7 +603,7 @@ with gr.Blocks(title="Trackers Playground 🔥") as demo:
|
|
| 574 |
value=2,
|
| 575 |
step=1,
|
| 576 |
label="Minimum Consecutive Frames",
|
| 577 |
-
info="Detections needed before a track is confirmed.",
|
| 578 |
)
|
| 579 |
minimum_iou_slider = gr.Slider(
|
| 580 |
minimum=0.0,
|
|
@@ -582,7 +611,7 @@ with gr.Blocks(title="Trackers Playground 🔥") as demo:
|
|
| 582 |
value=0.1,
|
| 583 |
step=0.05,
|
| 584 |
label="Minimum IoU Threshold",
|
| 585 |
-
info="Overlap required to match a detection to a track.",
|
| 586 |
)
|
| 587 |
high_confidence_slider = gr.Slider(
|
| 588 |
minimum=0.0,
|
|
@@ -590,7 +619,23 @@ with gr.Blocks(title="Trackers Playground 🔥") as demo:
|
|
| 590 |
value=0.6,
|
| 591 |
step=0.05,
|
| 592 |
label="High Confidence Detection Threshold",
|
| 593 |
-
info="Detections above this are matched first (ByteTrack
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 594 |
)
|
| 595 |
|
| 596 |
with gr.Column():
|
|
@@ -640,6 +685,8 @@ with gr.Blocks(title="Trackers Playground 🔥") as demo:
|
|
| 640 |
minimum_consecutive_slider,
|
| 641 |
minimum_iou_slider,
|
| 642 |
high_confidence_slider,
|
|
|
|
|
|
|
| 643 |
class_filter,
|
| 644 |
track_id_filter,
|
| 645 |
show_boxes_checkbox,
|
|
@@ -664,6 +711,8 @@ with gr.Blocks(title="Trackers Playground 🔥") as demo:
|
|
| 664 |
minimum_consecutive_slider,
|
| 665 |
minimum_iou_slider,
|
| 666 |
high_confidence_slider,
|
|
|
|
|
|
|
| 667 |
class_filter,
|
| 668 |
track_id_filter,
|
| 669 |
show_boxes_checkbox,
|
|
|
|
| 13 |
from tqdm import tqdm
|
| 14 |
from inference_models import AutoModel
|
| 15 |
|
| 16 |
+
from trackers import ByteTrackTracker, OCSORTTracker, SORTTracker, frames_from_source
|
| 17 |
|
| 18 |
MAX_DURATION_SECONDS = 30
|
| 19 |
|
|
|
|
| 28 |
"rfdetr-seg-large",
|
| 29 |
]
|
| 30 |
|
| 31 |
+
TRACKERS = ["bytetrack", "sort", "ocsort"]
|
| 32 |
|
| 33 |
COCO_CLASSES = [
|
| 34 |
"person",
|
|
|
|
| 147 |
[
|
| 148 |
"https://storage.googleapis.com/com-roboflow-marketing/supervision/video-examples/bikes-1280x720-1.mp4",
|
| 149 |
"rfdetr-small",
|
| 150 |
+
"ocsort",
|
| 151 |
0.2,
|
| 152 |
30,
|
| 153 |
0.3,
|
| 154 |
3,
|
| 155 |
0.1,
|
| 156 |
0.6,
|
| 157 |
+
0.2,
|
| 158 |
+
3,
|
| 159 |
[],
|
| 160 |
"",
|
| 161 |
True,
|
|
|
|
| 168 |
[
|
| 169 |
"https://storage.googleapis.com/com-roboflow-marketing/supervision/video-examples/bikes-1280x720-1.mp4",
|
| 170 |
"rfdetr-small",
|
| 171 |
+
"ocsort",
|
| 172 |
0.2,
|
| 173 |
30,
|
| 174 |
0.3,
|
| 175 |
3,
|
| 176 |
0.1,
|
| 177 |
0.6,
|
| 178 |
+
0.2,
|
| 179 |
+
3,
|
| 180 |
["person"],
|
| 181 |
"",
|
| 182 |
True,
|
|
|
|
| 196 |
3,
|
| 197 |
0.3,
|
| 198 |
0.6,
|
| 199 |
+
0.2,
|
| 200 |
+
3,
|
| 201 |
[],
|
| 202 |
"",
|
| 203 |
True,
|
|
|
|
| 217 |
3,
|
| 218 |
0.1,
|
| 219 |
0.6,
|
| 220 |
+
0.2,
|
| 221 |
+
3,
|
| 222 |
[],
|
| 223 |
"",
|
| 224 |
True,
|
|
|
|
| 238 |
3,
|
| 239 |
0.1,
|
| 240 |
0.6,
|
| 241 |
+
0.2,
|
| 242 |
+
3,
|
| 243 |
[],
|
| 244 |
"",
|
| 245 |
True,
|
|
|
|
| 259 |
3,
|
| 260 |
0.1,
|
| 261 |
0.6,
|
| 262 |
+
0.2,
|
| 263 |
+
3,
|
| 264 |
[],
|
| 265 |
"",
|
| 266 |
True,
|
|
|
|
| 280 |
3,
|
| 281 |
0.1,
|
| 282 |
0.6,
|
| 283 |
+
0.2,
|
| 284 |
+
3,
|
| 285 |
[],
|
| 286 |
"1",
|
| 287 |
True,
|
|
|
|
| 301 |
3,
|
| 302 |
0.1,
|
| 303 |
0.6,
|
| 304 |
+
0.2,
|
| 305 |
+
3,
|
| 306 |
[],
|
| 307 |
"",
|
| 308 |
True,
|
|
|
|
| 315 |
[
|
| 316 |
"https://storage.googleapis.com/com-roboflow-marketing/supervision/video-examples/vehicles-1280x720.mp4",
|
| 317 |
"rfdetr-medium",
|
| 318 |
+
"ocsort",
|
| 319 |
0.2,
|
| 320 |
30,
|
| 321 |
0.3,
|
| 322 |
3,
|
| 323 |
0.1,
|
| 324 |
0.6,
|
| 325 |
+
0.2,
|
| 326 |
+
3,
|
| 327 |
[],
|
| 328 |
"",
|
| 329 |
True,
|
|
|
|
| 401 |
minimum_consecutive_frames: int,
|
| 402 |
minimum_iou_threshold: float,
|
| 403 |
high_conf_det_threshold: float,
|
| 404 |
+
direction_consistency_weight: float,
|
| 405 |
+
delta_t: int,
|
| 406 |
classes: list[str] | None = None,
|
| 407 |
track_ids: str = "",
|
| 408 |
show_boxes: bool = True,
|
|
|
|
| 439 |
minimum_iou_threshold=minimum_iou_threshold,
|
| 440 |
high_conf_det_threshold=high_conf_det_threshold,
|
| 441 |
)
|
| 442 |
+
elif tracker_type == "ocsort":
|
| 443 |
+
tracker = OCSORTTracker(
|
| 444 |
+
lost_track_buffer=lost_track_buffer,
|
| 445 |
+
minimum_consecutive_frames=minimum_consecutive_frames,
|
| 446 |
+
minimum_iou_threshold=minimum_iou_threshold,
|
| 447 |
+
high_conf_det_threshold=high_conf_det_threshold,
|
| 448 |
+
direction_consistency_weight=direction_consistency_weight,
|
| 449 |
+
delta_t=delta_t,
|
| 450 |
+
)
|
| 451 |
else:
|
| 452 |
tracker = SORTTracker(
|
| 453 |
lost_track_buffer=lost_track_buffer,
|
|
|
|
| 587 |
value=30,
|
| 588 |
step=1,
|
| 589 |
label="Lost Track Buffer",
|
| 590 |
+
info="Frames to keep a lost track before removing it (ByteTrack, SORT, OC-SORT).",
|
| 591 |
)
|
| 592 |
track_activation_slider = gr.Slider(
|
| 593 |
minimum=0.0,
|
|
|
|
| 595 |
value=0.3,
|
| 596 |
step=0.05,
|
| 597 |
label="Track Activation Threshold",
|
| 598 |
+
info="Minimum score for a track to be activated (ByteTrack, SORT).",
|
| 599 |
)
|
| 600 |
minimum_consecutive_slider = gr.Slider(
|
| 601 |
minimum=1,
|
|
|
|
| 603 |
value=2,
|
| 604 |
step=1,
|
| 605 |
label="Minimum Consecutive Frames",
|
| 606 |
+
info="Detections needed before a track is confirmed (ByteTrack, SORT, OC-SORT).",
|
| 607 |
)
|
| 608 |
minimum_iou_slider = gr.Slider(
|
| 609 |
minimum=0.0,
|
|
|
|
| 611 |
value=0.1,
|
| 612 |
step=0.05,
|
| 613 |
label="Minimum IoU Threshold",
|
| 614 |
+
info="Overlap required to match a detection to a track (ByteTrack, SORT, OC-SORT).",
|
| 615 |
)
|
| 616 |
high_confidence_slider = gr.Slider(
|
| 617 |
minimum=0.0,
|
|
|
|
| 619 |
value=0.6,
|
| 620 |
step=0.05,
|
| 621 |
label="High Confidence Detection Threshold",
|
| 622 |
+
info="Detections above this are matched first (ByteTrack / OC-SORT).",
|
| 623 |
+
)
|
| 624 |
+
direction_consistency_slider = gr.Slider(
|
| 625 |
+
minimum=0.0,
|
| 626 |
+
maximum=1.0,
|
| 627 |
+
value=0.2,
|
| 628 |
+
step=0.05,
|
| 629 |
+
label="Direction Consistency Weight",
|
| 630 |
+
info="Weight for direction consistency in association cost (OC-SORT only).",
|
| 631 |
+
)
|
| 632 |
+
delta_t_slider = gr.Slider(
|
| 633 |
+
minimum=1,
|
| 634 |
+
maximum=10,
|
| 635 |
+
value=3,
|
| 636 |
+
step=1,
|
| 637 |
+
label="Delta T",
|
| 638 |
+
info="Past frames for velocity estimation during occlusion (OC-SORT only).",
|
| 639 |
)
|
| 640 |
|
| 641 |
with gr.Column():
|
|
|
|
| 685 |
minimum_consecutive_slider,
|
| 686 |
minimum_iou_slider,
|
| 687 |
high_confidence_slider,
|
| 688 |
+
direction_consistency_slider,
|
| 689 |
+
delta_t_slider,
|
| 690 |
class_filter,
|
| 691 |
track_id_filter,
|
| 692 |
show_boxes_checkbox,
|
|
|
|
| 711 |
minimum_consecutive_slider,
|
| 712 |
minimum_iou_slider,
|
| 713 |
high_confidence_slider,
|
| 714 |
+
direction_consistency_slider,
|
| 715 |
+
delta_t_slider,
|
| 716 |
class_filter,
|
| 717 |
track_id_filter,
|
| 718 |
show_boxes_checkbox,
|
requirements.txt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
gradio>=6.3.0,<6.4.0
|
| 2 |
-
inference-models=
|
| 3 |
-
trackers==2.
|
|
|
|
| 1 |
gradio>=6.3.0,<6.4.0
|
| 2 |
+
inference-models>=0.19.0
|
| 3 |
+
trackers==2.3.0
|