Training in progress, step 2240
Browse files- {checkpoint-1920 β checkpoint-2240}/config.json +0 -0
- {checkpoint-1920 β checkpoint-2240}/optimizer.pt +1 -1
- {checkpoint-1920 β checkpoint-2240}/pytorch_model.bin +1 -1
- {checkpoint-1920 β checkpoint-2240}/rng_state.pth +1 -1
- {checkpoint-1920 β checkpoint-2240}/scheduler.pt +1 -1
- {checkpoint-1920 β checkpoint-2240}/trainer_state.json +2227 -3
- {checkpoint-1920 β checkpoint-2240}/training_args.bin +0 -0
- pytorch_model.bin +1 -1
- runs/Feb25_17-51-35_robolidar/events.out.tfevents.1708905105.robolidar.3970122.0 +2 -2
{checkpoint-1920 β checkpoint-2240}/config.json
RENAMED
|
File without changes
|
{checkpoint-1920 β checkpoint-2240}/optimizer.pt
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 677462463
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:64ff459d6e274f1b17f34c7aa3e473e91fdde56043aa3a7d95a7ea1995fab441
|
| 3 |
size 677462463
|
{checkpoint-1920 β checkpoint-2240}/pytorch_model.bin
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 338799561
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5fa2c3f05d0b10e1c3533945acbfb9eae36020ad3ea05f13ebff672020dca64c
|
| 3 |
size 338799561
|
{checkpoint-1920 β checkpoint-2240}/rng_state.pth
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14575
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:63c92ac00a4aa09cf58dcaf7ae9d07d199ccfb0bf44a7cabe8289c0cd0c5af57
|
| 3 |
size 14575
|
{checkpoint-1920 β checkpoint-2240}/scheduler.pt
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 627
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:54036e085ada119b2e9231a567f5d7fa605d1b8cb9705720e2f4aa9d21eb8883
|
| 3 |
size 627
|
{checkpoint-1920 β checkpoint-2240}/trainer_state.json
RENAMED
|
@@ -1,8 +1,8 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": 0.01912616565823555,
|
| 3 |
"best_model_checkpoint": "/robodata/smodak/Projects/nspl/scripts/terrainseg/training/models/parking-terrain/checkpoint-560",
|
| 4 |
-
"epoch":
|
| 5 |
-
"global_step":
|
| 6 |
"is_hyper_param_search": false,
|
| 7 |
"is_local_process_zero": true,
|
| 8 |
"is_world_process_zero": true,
|
|
@@ -13350,11 +13350,2235 @@
|
|
| 13350 |
"eval_samples_per_second": 1.364,
|
| 13351 |
"eval_steps_per_second": 1.364,
|
| 13352 |
"step": 1920
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13353 |
}
|
| 13354 |
],
|
| 13355 |
"max_steps": 2400,
|
| 13356 |
"num_train_epochs": 600,
|
| 13357 |
-
"total_flos": 1.
|
| 13358 |
"trial_name": null,
|
| 13359 |
"trial_params": null
|
| 13360 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": 0.01912616565823555,
|
| 3 |
"best_model_checkpoint": "/robodata/smodak/Projects/nspl/scripts/terrainseg/training/models/parking-terrain/checkpoint-560",
|
| 4 |
+
"epoch": 560.0,
|
| 5 |
+
"global_step": 2240,
|
| 6 |
"is_hyper_param_search": false,
|
| 7 |
"is_local_process_zero": true,
|
| 8 |
"is_world_process_zero": true,
|
|
|
|
| 13350 |
"eval_samples_per_second": 1.364,
|
| 13351 |
"eval_steps_per_second": 1.364,
|
| 13352 |
"step": 1920
|
| 13353 |
+
},
|
| 13354 |
+
{
|
| 13355 |
+
"epoch": 480.25,
|
| 13356 |
+
"learning_rate": 1.050438596491228e-05,
|
| 13357 |
+
"loss": 0.0069,
|
| 13358 |
+
"step": 1921
|
| 13359 |
+
},
|
| 13360 |
+
{
|
| 13361 |
+
"epoch": 480.5,
|
| 13362 |
+
"learning_rate": 1.0482456140350879e-05,
|
| 13363 |
+
"loss": 0.0064,
|
| 13364 |
+
"step": 1922
|
| 13365 |
+
},
|
| 13366 |
+
{
|
| 13367 |
+
"epoch": 480.75,
|
| 13368 |
+
"learning_rate": 1.0460526315789475e-05,
|
| 13369 |
+
"loss": 0.0079,
|
| 13370 |
+
"step": 1923
|
| 13371 |
+
},
|
| 13372 |
+
{
|
| 13373 |
+
"epoch": 481.0,
|
| 13374 |
+
"learning_rate": 1.0438596491228072e-05,
|
| 13375 |
+
"loss": 0.0072,
|
| 13376 |
+
"step": 1924
|
| 13377 |
+
},
|
| 13378 |
+
{
|
| 13379 |
+
"epoch": 481.25,
|
| 13380 |
+
"learning_rate": 1.0416666666666668e-05,
|
| 13381 |
+
"loss": 0.0071,
|
| 13382 |
+
"step": 1925
|
| 13383 |
+
},
|
| 13384 |
+
{
|
| 13385 |
+
"epoch": 481.5,
|
| 13386 |
+
"learning_rate": 1.0394736842105264e-05,
|
| 13387 |
+
"loss": 0.0069,
|
| 13388 |
+
"step": 1926
|
| 13389 |
+
},
|
| 13390 |
+
{
|
| 13391 |
+
"epoch": 481.75,
|
| 13392 |
+
"learning_rate": 1.037280701754386e-05,
|
| 13393 |
+
"loss": 0.007,
|
| 13394 |
+
"step": 1927
|
| 13395 |
+
},
|
| 13396 |
+
{
|
| 13397 |
+
"epoch": 482.0,
|
| 13398 |
+
"learning_rate": 1.0350877192982457e-05,
|
| 13399 |
+
"loss": 0.0069,
|
| 13400 |
+
"step": 1928
|
| 13401 |
+
},
|
| 13402 |
+
{
|
| 13403 |
+
"epoch": 482.25,
|
| 13404 |
+
"learning_rate": 1.0328947368421054e-05,
|
| 13405 |
+
"loss": 0.007,
|
| 13406 |
+
"step": 1929
|
| 13407 |
+
},
|
| 13408 |
+
{
|
| 13409 |
+
"epoch": 482.5,
|
| 13410 |
+
"learning_rate": 1.030701754385965e-05,
|
| 13411 |
+
"loss": 0.0068,
|
| 13412 |
+
"step": 1930
|
| 13413 |
+
},
|
| 13414 |
+
{
|
| 13415 |
+
"epoch": 482.75,
|
| 13416 |
+
"learning_rate": 1.0285087719298246e-05,
|
| 13417 |
+
"loss": 0.0071,
|
| 13418 |
+
"step": 1931
|
| 13419 |
+
},
|
| 13420 |
+
{
|
| 13421 |
+
"epoch": 483.0,
|
| 13422 |
+
"learning_rate": 1.0263157894736843e-05,
|
| 13423 |
+
"loss": 0.0065,
|
| 13424 |
+
"step": 1932
|
| 13425 |
+
},
|
| 13426 |
+
{
|
| 13427 |
+
"epoch": 483.25,
|
| 13428 |
+
"learning_rate": 1.0241228070175439e-05,
|
| 13429 |
+
"loss": 0.0078,
|
| 13430 |
+
"step": 1933
|
| 13431 |
+
},
|
| 13432 |
+
{
|
| 13433 |
+
"epoch": 483.5,
|
| 13434 |
+
"learning_rate": 1.0219298245614035e-05,
|
| 13435 |
+
"loss": 0.0069,
|
| 13436 |
+
"step": 1934
|
| 13437 |
+
},
|
| 13438 |
+
{
|
| 13439 |
+
"epoch": 483.75,
|
| 13440 |
+
"learning_rate": 1.0197368421052632e-05,
|
| 13441 |
+
"loss": 0.0065,
|
| 13442 |
+
"step": 1935
|
| 13443 |
+
},
|
| 13444 |
+
{
|
| 13445 |
+
"epoch": 484.0,
|
| 13446 |
+
"learning_rate": 1.0175438596491228e-05,
|
| 13447 |
+
"loss": 0.0074,
|
| 13448 |
+
"step": 1936
|
| 13449 |
+
},
|
| 13450 |
+
{
|
| 13451 |
+
"epoch": 484.25,
|
| 13452 |
+
"learning_rate": 1.0153508771929825e-05,
|
| 13453 |
+
"loss": 0.0073,
|
| 13454 |
+
"step": 1937
|
| 13455 |
+
},
|
| 13456 |
+
{
|
| 13457 |
+
"epoch": 484.5,
|
| 13458 |
+
"learning_rate": 1.0131578947368421e-05,
|
| 13459 |
+
"loss": 0.0076,
|
| 13460 |
+
"step": 1938
|
| 13461 |
+
},
|
| 13462 |
+
{
|
| 13463 |
+
"epoch": 484.75,
|
| 13464 |
+
"learning_rate": 1.0109649122807017e-05,
|
| 13465 |
+
"loss": 0.0067,
|
| 13466 |
+
"step": 1939
|
| 13467 |
+
},
|
| 13468 |
+
{
|
| 13469 |
+
"epoch": 485.0,
|
| 13470 |
+
"learning_rate": 1.0087719298245614e-05,
|
| 13471 |
+
"loss": 0.0073,
|
| 13472 |
+
"step": 1940
|
| 13473 |
+
},
|
| 13474 |
+
{
|
| 13475 |
+
"epoch": 485.0,
|
| 13476 |
+
"eval_accuracy_ELSE": 0.9971809494951681,
|
| 13477 |
+
"eval_accuracy_road": 0.9935780600330353,
|
| 13478 |
+
"eval_accuracy_sidewalk": 0.9083356800901154,
|
| 13479 |
+
"eval_accuracy_unlabeled": NaN,
|
| 13480 |
+
"eval_iou_ELSE": 0.9936432453416149,
|
| 13481 |
+
"eval_iou_road": 0.9875035380696292,
|
| 13482 |
+
"eval_iou_sidewalk": 0.8449246889325475,
|
| 13483 |
+
"eval_iou_unlabeled": NaN,
|
| 13484 |
+
"eval_loss": 0.022527044638991356,
|
| 13485 |
+
"eval_mean_accuracy": 0.9663648965394396,
|
| 13486 |
+
"eval_mean_iou": 0.9420238241145972,
|
| 13487 |
+
"eval_overall_accuracy": 0.9938087463378906,
|
| 13488 |
+
"eval_runtime": 0.603,
|
| 13489 |
+
"eval_samples_per_second": 1.658,
|
| 13490 |
+
"eval_steps_per_second": 1.658,
|
| 13491 |
+
"step": 1940
|
| 13492 |
+
},
|
| 13493 |
+
{
|
| 13494 |
+
"epoch": 485.25,
|
| 13495 |
+
"learning_rate": 1.006578947368421e-05,
|
| 13496 |
+
"loss": 0.0079,
|
| 13497 |
+
"step": 1941
|
| 13498 |
+
},
|
| 13499 |
+
{
|
| 13500 |
+
"epoch": 485.5,
|
| 13501 |
+
"learning_rate": 1.0043859649122808e-05,
|
| 13502 |
+
"loss": 0.0075,
|
| 13503 |
+
"step": 1942
|
| 13504 |
+
},
|
| 13505 |
+
{
|
| 13506 |
+
"epoch": 485.75,
|
| 13507 |
+
"learning_rate": 1.0021929824561404e-05,
|
| 13508 |
+
"loss": 0.0066,
|
| 13509 |
+
"step": 1943
|
| 13510 |
+
},
|
| 13511 |
+
{
|
| 13512 |
+
"epoch": 486.0,
|
| 13513 |
+
"learning_rate": 1e-05,
|
| 13514 |
+
"loss": 0.007,
|
| 13515 |
+
"step": 1944
|
| 13516 |
+
},
|
| 13517 |
+
{
|
| 13518 |
+
"epoch": 486.25,
|
| 13519 |
+
"learning_rate": 9.978070175438597e-06,
|
| 13520 |
+
"loss": 0.0069,
|
| 13521 |
+
"step": 1945
|
| 13522 |
+
},
|
| 13523 |
+
{
|
| 13524 |
+
"epoch": 486.5,
|
| 13525 |
+
"learning_rate": 9.956140350877194e-06,
|
| 13526 |
+
"loss": 0.0068,
|
| 13527 |
+
"step": 1946
|
| 13528 |
+
},
|
| 13529 |
+
{
|
| 13530 |
+
"epoch": 486.75,
|
| 13531 |
+
"learning_rate": 9.93421052631579e-06,
|
| 13532 |
+
"loss": 0.0072,
|
| 13533 |
+
"step": 1947
|
| 13534 |
+
},
|
| 13535 |
+
{
|
| 13536 |
+
"epoch": 487.0,
|
| 13537 |
+
"learning_rate": 9.912280701754386e-06,
|
| 13538 |
+
"loss": 0.0064,
|
| 13539 |
+
"step": 1948
|
| 13540 |
+
},
|
| 13541 |
+
{
|
| 13542 |
+
"epoch": 487.25,
|
| 13543 |
+
"learning_rate": 9.890350877192983e-06,
|
| 13544 |
+
"loss": 0.0069,
|
| 13545 |
+
"step": 1949
|
| 13546 |
+
},
|
| 13547 |
+
{
|
| 13548 |
+
"epoch": 487.5,
|
| 13549 |
+
"learning_rate": 9.868421052631579e-06,
|
| 13550 |
+
"loss": 0.0073,
|
| 13551 |
+
"step": 1950
|
| 13552 |
+
},
|
| 13553 |
+
{
|
| 13554 |
+
"epoch": 487.75,
|
| 13555 |
+
"learning_rate": 9.846491228070175e-06,
|
| 13556 |
+
"loss": 0.0072,
|
| 13557 |
+
"step": 1951
|
| 13558 |
+
},
|
| 13559 |
+
{
|
| 13560 |
+
"epoch": 488.0,
|
| 13561 |
+
"learning_rate": 9.824561403508772e-06,
|
| 13562 |
+
"loss": 0.0082,
|
| 13563 |
+
"step": 1952
|
| 13564 |
+
},
|
| 13565 |
+
{
|
| 13566 |
+
"epoch": 488.25,
|
| 13567 |
+
"learning_rate": 9.802631578947368e-06,
|
| 13568 |
+
"loss": 0.0068,
|
| 13569 |
+
"step": 1953
|
| 13570 |
+
},
|
| 13571 |
+
{
|
| 13572 |
+
"epoch": 488.5,
|
| 13573 |
+
"learning_rate": 9.780701754385965e-06,
|
| 13574 |
+
"loss": 0.0074,
|
| 13575 |
+
"step": 1954
|
| 13576 |
+
},
|
| 13577 |
+
{
|
| 13578 |
+
"epoch": 488.75,
|
| 13579 |
+
"learning_rate": 9.758771929824561e-06,
|
| 13580 |
+
"loss": 0.0068,
|
| 13581 |
+
"step": 1955
|
| 13582 |
+
},
|
| 13583 |
+
{
|
| 13584 |
+
"epoch": 489.0,
|
| 13585 |
+
"learning_rate": 9.736842105263157e-06,
|
| 13586 |
+
"loss": 0.007,
|
| 13587 |
+
"step": 1956
|
| 13588 |
+
},
|
| 13589 |
+
{
|
| 13590 |
+
"epoch": 489.25,
|
| 13591 |
+
"learning_rate": 9.714912280701754e-06,
|
| 13592 |
+
"loss": 0.0072,
|
| 13593 |
+
"step": 1957
|
| 13594 |
+
},
|
| 13595 |
+
{
|
| 13596 |
+
"epoch": 489.5,
|
| 13597 |
+
"learning_rate": 9.692982456140352e-06,
|
| 13598 |
+
"loss": 0.0069,
|
| 13599 |
+
"step": 1958
|
| 13600 |
+
},
|
| 13601 |
+
{
|
| 13602 |
+
"epoch": 489.75,
|
| 13603 |
+
"learning_rate": 9.671052631578948e-06,
|
| 13604 |
+
"loss": 0.007,
|
| 13605 |
+
"step": 1959
|
| 13606 |
+
},
|
| 13607 |
+
{
|
| 13608 |
+
"epoch": 490.0,
|
| 13609 |
+
"learning_rate": 9.649122807017545e-06,
|
| 13610 |
+
"loss": 0.0079,
|
| 13611 |
+
"step": 1960
|
| 13612 |
+
},
|
| 13613 |
+
{
|
| 13614 |
+
"epoch": 490.0,
|
| 13615 |
+
"eval_accuracy_ELSE": 0.9969212289112297,
|
| 13616 |
+
"eval_accuracy_road": 0.9938913253972774,
|
| 13617 |
+
"eval_accuracy_sidewalk": 0.9083356800901154,
|
| 13618 |
+
"eval_accuracy_unlabeled": NaN,
|
| 13619 |
+
"eval_iou_ELSE": 0.9934165830933972,
|
| 13620 |
+
"eval_iou_road": 0.9875213989615313,
|
| 13621 |
+
"eval_iou_sidewalk": 0.843709128956317,
|
| 13622 |
+
"eval_iou_unlabeled": NaN,
|
| 13623 |
+
"eval_loss": 0.022614937275648117,
|
| 13624 |
+
"eval_mean_accuracy": 0.9663827447995409,
|
| 13625 |
+
"eval_mean_iou": 0.9415490370037484,
|
| 13626 |
+
"eval_overall_accuracy": 0.9937095642089844,
|
| 13627 |
+
"eval_runtime": 0.6147,
|
| 13628 |
+
"eval_samples_per_second": 1.627,
|
| 13629 |
+
"eval_steps_per_second": 1.627,
|
| 13630 |
+
"step": 1960
|
| 13631 |
+
},
|
| 13632 |
+
{
|
| 13633 |
+
"epoch": 490.25,
|
| 13634 |
+
"learning_rate": 9.627192982456141e-06,
|
| 13635 |
+
"loss": 0.0068,
|
| 13636 |
+
"step": 1961
|
| 13637 |
+
},
|
| 13638 |
+
{
|
| 13639 |
+
"epoch": 490.5,
|
| 13640 |
+
"learning_rate": 9.605263157894737e-06,
|
| 13641 |
+
"loss": 0.0073,
|
| 13642 |
+
"step": 1962
|
| 13643 |
+
},
|
| 13644 |
+
{
|
| 13645 |
+
"epoch": 490.75,
|
| 13646 |
+
"learning_rate": 9.583333333333334e-06,
|
| 13647 |
+
"loss": 0.0071,
|
| 13648 |
+
"step": 1963
|
| 13649 |
+
},
|
| 13650 |
+
{
|
| 13651 |
+
"epoch": 491.0,
|
| 13652 |
+
"learning_rate": 9.561403508771932e-06,
|
| 13653 |
+
"loss": 0.0069,
|
| 13654 |
+
"step": 1964
|
| 13655 |
+
},
|
| 13656 |
+
{
|
| 13657 |
+
"epoch": 491.25,
|
| 13658 |
+
"learning_rate": 9.539473684210528e-06,
|
| 13659 |
+
"loss": 0.0072,
|
| 13660 |
+
"step": 1965
|
| 13661 |
+
},
|
| 13662 |
+
{
|
| 13663 |
+
"epoch": 491.5,
|
| 13664 |
+
"learning_rate": 9.517543859649124e-06,
|
| 13665 |
+
"loss": 0.0063,
|
| 13666 |
+
"step": 1966
|
| 13667 |
+
},
|
| 13668 |
+
{
|
| 13669 |
+
"epoch": 491.75,
|
| 13670 |
+
"learning_rate": 9.49561403508772e-06,
|
| 13671 |
+
"loss": 0.0076,
|
| 13672 |
+
"step": 1967
|
| 13673 |
+
},
|
| 13674 |
+
{
|
| 13675 |
+
"epoch": 492.0,
|
| 13676 |
+
"learning_rate": 9.473684210526317e-06,
|
| 13677 |
+
"loss": 0.0071,
|
| 13678 |
+
"step": 1968
|
| 13679 |
+
},
|
| 13680 |
+
{
|
| 13681 |
+
"epoch": 492.25,
|
| 13682 |
+
"learning_rate": 9.451754385964914e-06,
|
| 13683 |
+
"loss": 0.0067,
|
| 13684 |
+
"step": 1969
|
| 13685 |
+
},
|
| 13686 |
+
{
|
| 13687 |
+
"epoch": 492.5,
|
| 13688 |
+
"learning_rate": 9.42982456140351e-06,
|
| 13689 |
+
"loss": 0.0068,
|
| 13690 |
+
"step": 1970
|
| 13691 |
+
},
|
| 13692 |
+
{
|
| 13693 |
+
"epoch": 492.75,
|
| 13694 |
+
"learning_rate": 9.407894736842106e-06,
|
| 13695 |
+
"loss": 0.0077,
|
| 13696 |
+
"step": 1971
|
| 13697 |
+
},
|
| 13698 |
+
{
|
| 13699 |
+
"epoch": 493.0,
|
| 13700 |
+
"learning_rate": 9.385964912280703e-06,
|
| 13701 |
+
"loss": 0.0066,
|
| 13702 |
+
"step": 1972
|
| 13703 |
+
},
|
| 13704 |
+
{
|
| 13705 |
+
"epoch": 493.25,
|
| 13706 |
+
"learning_rate": 9.364035087719299e-06,
|
| 13707 |
+
"loss": 0.0067,
|
| 13708 |
+
"step": 1973
|
| 13709 |
+
},
|
| 13710 |
+
{
|
| 13711 |
+
"epoch": 493.5,
|
| 13712 |
+
"learning_rate": 9.342105263157895e-06,
|
| 13713 |
+
"loss": 0.0072,
|
| 13714 |
+
"step": 1974
|
| 13715 |
+
},
|
| 13716 |
+
{
|
| 13717 |
+
"epoch": 493.75,
|
| 13718 |
+
"learning_rate": 9.320175438596492e-06,
|
| 13719 |
+
"loss": 0.0066,
|
| 13720 |
+
"step": 1975
|
| 13721 |
+
},
|
| 13722 |
+
{
|
| 13723 |
+
"epoch": 494.0,
|
| 13724 |
+
"learning_rate": 9.298245614035088e-06,
|
| 13725 |
+
"loss": 0.0065,
|
| 13726 |
+
"step": 1976
|
| 13727 |
+
},
|
| 13728 |
+
{
|
| 13729 |
+
"epoch": 494.25,
|
| 13730 |
+
"learning_rate": 9.276315789473685e-06,
|
| 13731 |
+
"loss": 0.0077,
|
| 13732 |
+
"step": 1977
|
| 13733 |
+
},
|
| 13734 |
+
{
|
| 13735 |
+
"epoch": 494.5,
|
| 13736 |
+
"learning_rate": 9.254385964912281e-06,
|
| 13737 |
+
"loss": 0.0063,
|
| 13738 |
+
"step": 1978
|
| 13739 |
+
},
|
| 13740 |
+
{
|
| 13741 |
+
"epoch": 494.75,
|
| 13742 |
+
"learning_rate": 9.232456140350877e-06,
|
| 13743 |
+
"loss": 0.0068,
|
| 13744 |
+
"step": 1979
|
| 13745 |
+
},
|
| 13746 |
+
{
|
| 13747 |
+
"epoch": 495.0,
|
| 13748 |
+
"learning_rate": 9.210526315789474e-06,
|
| 13749 |
+
"loss": 0.007,
|
| 13750 |
+
"step": 1980
|
| 13751 |
+
},
|
| 13752 |
+
{
|
| 13753 |
+
"epoch": 495.0,
|
| 13754 |
+
"eval_accuracy_ELSE": 0.996991569902713,
|
| 13755 |
+
"eval_accuracy_road": 0.9940764367488751,
|
| 13756 |
+
"eval_accuracy_sidewalk": 0.9059419881723458,
|
| 13757 |
+
"eval_accuracy_unlabeled": NaN,
|
| 13758 |
+
"eval_iou_ELSE": 0.9935241751546163,
|
| 13759 |
+
"eval_iou_road": 0.9874957564784429,
|
| 13760 |
+
"eval_iou_sidewalk": 0.843360859876786,
|
| 13761 |
+
"eval_iou_unlabeled": NaN,
|
| 13762 |
+
"eval_loss": 0.02254437282681465,
|
| 13763 |
+
"eval_mean_accuracy": 0.9656699982746447,
|
| 13764 |
+
"eval_mean_iou": 0.9414602638366151,
|
| 13765 |
+
"eval_overall_accuracy": 0.993743896484375,
|
| 13766 |
+
"eval_runtime": 0.5975,
|
| 13767 |
+
"eval_samples_per_second": 1.674,
|
| 13768 |
+
"eval_steps_per_second": 1.674,
|
| 13769 |
+
"step": 1980
|
| 13770 |
+
},
|
| 13771 |
+
{
|
| 13772 |
+
"epoch": 495.25,
|
| 13773 |
+
"learning_rate": 9.18859649122807e-06,
|
| 13774 |
+
"loss": 0.007,
|
| 13775 |
+
"step": 1981
|
| 13776 |
+
},
|
| 13777 |
+
{
|
| 13778 |
+
"epoch": 495.5,
|
| 13779 |
+
"learning_rate": 9.166666666666666e-06,
|
| 13780 |
+
"loss": 0.0067,
|
| 13781 |
+
"step": 1982
|
| 13782 |
+
},
|
| 13783 |
+
{
|
| 13784 |
+
"epoch": 495.75,
|
| 13785 |
+
"learning_rate": 9.144736842105264e-06,
|
| 13786 |
+
"loss": 0.0075,
|
| 13787 |
+
"step": 1983
|
| 13788 |
+
},
|
| 13789 |
+
{
|
| 13790 |
+
"epoch": 496.0,
|
| 13791 |
+
"learning_rate": 9.122807017543861e-06,
|
| 13792 |
+
"loss": 0.0077,
|
| 13793 |
+
"step": 1984
|
| 13794 |
+
},
|
| 13795 |
+
{
|
| 13796 |
+
"epoch": 496.25,
|
| 13797 |
+
"learning_rate": 9.100877192982457e-06,
|
| 13798 |
+
"loss": 0.0079,
|
| 13799 |
+
"step": 1985
|
| 13800 |
+
},
|
| 13801 |
+
{
|
| 13802 |
+
"epoch": 496.5,
|
| 13803 |
+
"learning_rate": 9.078947368421054e-06,
|
| 13804 |
+
"loss": 0.0066,
|
| 13805 |
+
"step": 1986
|
| 13806 |
+
},
|
| 13807 |
+
{
|
| 13808 |
+
"epoch": 496.75,
|
| 13809 |
+
"learning_rate": 9.05701754385965e-06,
|
| 13810 |
+
"loss": 0.0069,
|
| 13811 |
+
"step": 1987
|
| 13812 |
+
},
|
| 13813 |
+
{
|
| 13814 |
+
"epoch": 497.0,
|
| 13815 |
+
"learning_rate": 9.035087719298246e-06,
|
| 13816 |
+
"loss": 0.0066,
|
| 13817 |
+
"step": 1988
|
| 13818 |
+
},
|
| 13819 |
+
{
|
| 13820 |
+
"epoch": 497.25,
|
| 13821 |
+
"learning_rate": 9.013157894736843e-06,
|
| 13822 |
+
"loss": 0.0068,
|
| 13823 |
+
"step": 1989
|
| 13824 |
+
},
|
| 13825 |
+
{
|
| 13826 |
+
"epoch": 497.5,
|
| 13827 |
+
"learning_rate": 8.991228070175439e-06,
|
| 13828 |
+
"loss": 0.0066,
|
| 13829 |
+
"step": 1990
|
| 13830 |
+
},
|
| 13831 |
+
{
|
| 13832 |
+
"epoch": 497.75,
|
| 13833 |
+
"learning_rate": 8.969298245614035e-06,
|
| 13834 |
+
"loss": 0.0071,
|
| 13835 |
+
"step": 1991
|
| 13836 |
+
},
|
| 13837 |
+
{
|
| 13838 |
+
"epoch": 498.0,
|
| 13839 |
+
"learning_rate": 8.947368421052632e-06,
|
| 13840 |
+
"loss": 0.0064,
|
| 13841 |
+
"step": 1992
|
| 13842 |
+
},
|
| 13843 |
+
{
|
| 13844 |
+
"epoch": 498.25,
|
| 13845 |
+
"learning_rate": 8.925438596491228e-06,
|
| 13846 |
+
"loss": 0.0067,
|
| 13847 |
+
"step": 1993
|
| 13848 |
+
},
|
| 13849 |
+
{
|
| 13850 |
+
"epoch": 498.5,
|
| 13851 |
+
"learning_rate": 8.903508771929825e-06,
|
| 13852 |
+
"loss": 0.0078,
|
| 13853 |
+
"step": 1994
|
| 13854 |
+
},
|
| 13855 |
+
{
|
| 13856 |
+
"epoch": 498.75,
|
| 13857 |
+
"learning_rate": 8.881578947368421e-06,
|
| 13858 |
+
"loss": 0.007,
|
| 13859 |
+
"step": 1995
|
| 13860 |
+
},
|
| 13861 |
+
{
|
| 13862 |
+
"epoch": 499.0,
|
| 13863 |
+
"learning_rate": 8.859649122807017e-06,
|
| 13864 |
+
"loss": 0.0065,
|
| 13865 |
+
"step": 1996
|
| 13866 |
+
},
|
| 13867 |
+
{
|
| 13868 |
+
"epoch": 499.25,
|
| 13869 |
+
"learning_rate": 8.837719298245614e-06,
|
| 13870 |
+
"loss": 0.007,
|
| 13871 |
+
"step": 1997
|
| 13872 |
+
},
|
| 13873 |
+
{
|
| 13874 |
+
"epoch": 499.5,
|
| 13875 |
+
"learning_rate": 8.81578947368421e-06,
|
| 13876 |
+
"loss": 0.0069,
|
| 13877 |
+
"step": 1998
|
| 13878 |
+
},
|
| 13879 |
+
{
|
| 13880 |
+
"epoch": 499.75,
|
| 13881 |
+
"learning_rate": 8.793859649122806e-06,
|
| 13882 |
+
"loss": 0.0069,
|
| 13883 |
+
"step": 1999
|
| 13884 |
+
},
|
| 13885 |
+
{
|
| 13886 |
+
"epoch": 500.0,
|
| 13887 |
+
"learning_rate": 8.771929824561403e-06,
|
| 13888 |
+
"loss": 0.0072,
|
| 13889 |
+
"step": 2000
|
| 13890 |
+
},
|
| 13891 |
+
{
|
| 13892 |
+
"epoch": 500.0,
|
| 13893 |
+
"eval_accuracy_ELSE": 0.9968995855292347,
|
| 13894 |
+
"eval_accuracy_road": 0.9945036167910235,
|
| 13895 |
+
"eval_accuracy_sidewalk": 0.905519571951563,
|
| 13896 |
+
"eval_accuracy_unlabeled": NaN,
|
| 13897 |
+
"eval_iou_ELSE": 0.9935021515697292,
|
| 13898 |
+
"eval_iou_road": 0.9878083276759447,
|
| 13899 |
+
"eval_iou_sidewalk": 0.843520461699895,
|
| 13900 |
+
"eval_iou_unlabeled": NaN,
|
| 13901 |
+
"eval_loss": 0.022378191351890564,
|
| 13902 |
+
"eval_mean_accuracy": 0.9656409247572738,
|
| 13903 |
+
"eval_mean_iou": 0.941610313648523,
|
| 13904 |
+
"eval_overall_accuracy": 0.9937820434570312,
|
| 13905 |
+
"eval_runtime": 0.7261,
|
| 13906 |
+
"eval_samples_per_second": 1.377,
|
| 13907 |
+
"eval_steps_per_second": 1.377,
|
| 13908 |
+
"step": 2000
|
| 13909 |
+
},
|
| 13910 |
+
{
|
| 13911 |
+
"epoch": 500.25,
|
| 13912 |
+
"learning_rate": 8.75e-06,
|
| 13913 |
+
"loss": 0.0065,
|
| 13914 |
+
"step": 2001
|
| 13915 |
+
},
|
| 13916 |
+
{
|
| 13917 |
+
"epoch": 500.5,
|
| 13918 |
+
"learning_rate": 8.728070175438596e-06,
|
| 13919 |
+
"loss": 0.0073,
|
| 13920 |
+
"step": 2002
|
| 13921 |
+
},
|
| 13922 |
+
{
|
| 13923 |
+
"epoch": 500.75,
|
| 13924 |
+
"learning_rate": 8.706140350877192e-06,
|
| 13925 |
+
"loss": 0.0068,
|
| 13926 |
+
"step": 2003
|
| 13927 |
+
},
|
| 13928 |
+
{
|
| 13929 |
+
"epoch": 501.0,
|
| 13930 |
+
"learning_rate": 8.68421052631579e-06,
|
| 13931 |
+
"loss": 0.0073,
|
| 13932 |
+
"step": 2004
|
| 13933 |
+
},
|
| 13934 |
+
{
|
| 13935 |
+
"epoch": 501.25,
|
| 13936 |
+
"learning_rate": 8.662280701754386e-06,
|
| 13937 |
+
"loss": 0.0071,
|
| 13938 |
+
"step": 2005
|
| 13939 |
+
},
|
| 13940 |
+
{
|
| 13941 |
+
"epoch": 501.5,
|
| 13942 |
+
"learning_rate": 8.640350877192983e-06,
|
| 13943 |
+
"loss": 0.007,
|
| 13944 |
+
"step": 2006
|
| 13945 |
+
},
|
| 13946 |
+
{
|
| 13947 |
+
"epoch": 501.75,
|
| 13948 |
+
"learning_rate": 8.61842105263158e-06,
|
| 13949 |
+
"loss": 0.0067,
|
| 13950 |
+
"step": 2007
|
| 13951 |
+
},
|
| 13952 |
+
{
|
| 13953 |
+
"epoch": 502.0,
|
| 13954 |
+
"learning_rate": 8.596491228070176e-06,
|
| 13955 |
+
"loss": 0.0074,
|
| 13956 |
+
"step": 2008
|
| 13957 |
+
},
|
| 13958 |
+
{
|
| 13959 |
+
"epoch": 502.25,
|
| 13960 |
+
"learning_rate": 8.574561403508772e-06,
|
| 13961 |
+
"loss": 0.0067,
|
| 13962 |
+
"step": 2009
|
| 13963 |
+
},
|
| 13964 |
+
{
|
| 13965 |
+
"epoch": 502.5,
|
| 13966 |
+
"learning_rate": 8.552631578947368e-06,
|
| 13967 |
+
"loss": 0.0068,
|
| 13968 |
+
"step": 2010
|
| 13969 |
+
},
|
| 13970 |
+
{
|
| 13971 |
+
"epoch": 502.75,
|
| 13972 |
+
"learning_rate": 8.530701754385965e-06,
|
| 13973 |
+
"loss": 0.0075,
|
| 13974 |
+
"step": 2011
|
| 13975 |
+
},
|
| 13976 |
+
{
|
| 13977 |
+
"epoch": 503.0,
|
| 13978 |
+
"learning_rate": 8.508771929824563e-06,
|
| 13979 |
+
"loss": 0.0074,
|
| 13980 |
+
"step": 2012
|
| 13981 |
+
},
|
| 13982 |
+
{
|
| 13983 |
+
"epoch": 503.25,
|
| 13984 |
+
"learning_rate": 8.486842105263159e-06,
|
| 13985 |
+
"loss": 0.0062,
|
| 13986 |
+
"step": 2013
|
| 13987 |
+
},
|
| 13988 |
+
{
|
| 13989 |
+
"epoch": 503.5,
|
| 13990 |
+
"learning_rate": 8.464912280701755e-06,
|
| 13991 |
+
"loss": 0.0073,
|
| 13992 |
+
"step": 2014
|
| 13993 |
+
},
|
| 13994 |
+
{
|
| 13995 |
+
"epoch": 503.75,
|
| 13996 |
+
"learning_rate": 8.442982456140352e-06,
|
| 13997 |
+
"loss": 0.0066,
|
| 13998 |
+
"step": 2015
|
| 13999 |
+
},
|
| 14000 |
+
{
|
| 14001 |
+
"epoch": 504.0,
|
| 14002 |
+
"learning_rate": 8.421052631578948e-06,
|
| 14003 |
+
"loss": 0.0076,
|
| 14004 |
+
"step": 2016
|
| 14005 |
+
},
|
| 14006 |
+
{
|
| 14007 |
+
"epoch": 504.25,
|
| 14008 |
+
"learning_rate": 8.399122807017545e-06,
|
| 14009 |
+
"loss": 0.0075,
|
| 14010 |
+
"step": 2017
|
| 14011 |
+
},
|
| 14012 |
+
{
|
| 14013 |
+
"epoch": 504.5,
|
| 14014 |
+
"learning_rate": 8.377192982456141e-06,
|
| 14015 |
+
"loss": 0.0077,
|
| 14016 |
+
"step": 2018
|
| 14017 |
+
},
|
| 14018 |
+
{
|
| 14019 |
+
"epoch": 504.75,
|
| 14020 |
+
"learning_rate": 8.355263157894737e-06,
|
| 14021 |
+
"loss": 0.0068,
|
| 14022 |
+
"step": 2019
|
| 14023 |
+
},
|
| 14024 |
+
{
|
| 14025 |
+
"epoch": 505.0,
|
| 14026 |
+
"learning_rate": 8.333333333333334e-06,
|
| 14027 |
+
"loss": 0.0068,
|
| 14028 |
+
"step": 2020
|
| 14029 |
+
},
|
| 14030 |
+
{
|
| 14031 |
+
"epoch": 505.0,
|
| 14032 |
+
"eval_accuracy_ELSE": 0.9969266397567283,
|
| 14033 |
+
"eval_accuracy_road": 0.994176112092043,
|
| 14034 |
+
"eval_accuracy_sidewalk": 0.9080540692762602,
|
| 14035 |
+
"eval_accuracy_unlabeled": NaN,
|
| 14036 |
+
"eval_iou_ELSE": 0.9936201996451473,
|
| 14037 |
+
"eval_iou_road": 0.9876366825569717,
|
| 14038 |
+
"eval_iou_sidewalk": 0.8427861996863565,
|
| 14039 |
+
"eval_iou_unlabeled": NaN,
|
| 14040 |
+
"eval_loss": 0.02252509817481041,
|
| 14041 |
+
"eval_mean_accuracy": 0.9663856070416772,
|
| 14042 |
+
"eval_mean_iou": 0.941347693962825,
|
| 14043 |
+
"eval_overall_accuracy": 0.9937820434570312,
|
| 14044 |
+
"eval_runtime": 0.6101,
|
| 14045 |
+
"eval_samples_per_second": 1.639,
|
| 14046 |
+
"eval_steps_per_second": 1.639,
|
| 14047 |
+
"step": 2020
|
| 14048 |
+
},
|
| 14049 |
+
{
|
| 14050 |
+
"epoch": 505.25,
|
| 14051 |
+
"learning_rate": 8.31140350877193e-06,
|
| 14052 |
+
"loss": 0.0063,
|
| 14053 |
+
"step": 2021
|
| 14054 |
+
},
|
| 14055 |
+
{
|
| 14056 |
+
"epoch": 505.5,
|
| 14057 |
+
"learning_rate": 8.289473684210526e-06,
|
| 14058 |
+
"loss": 0.0071,
|
| 14059 |
+
"step": 2022
|
| 14060 |
+
},
|
| 14061 |
+
{
|
| 14062 |
+
"epoch": 505.75,
|
| 14063 |
+
"learning_rate": 8.267543859649123e-06,
|
| 14064 |
+
"loss": 0.0074,
|
| 14065 |
+
"step": 2023
|
| 14066 |
+
},
|
| 14067 |
+
{
|
| 14068 |
+
"epoch": 506.0,
|
| 14069 |
+
"learning_rate": 8.245614035087721e-06,
|
| 14070 |
+
"loss": 0.0068,
|
| 14071 |
+
"step": 2024
|
| 14072 |
+
},
|
| 14073 |
+
{
|
| 14074 |
+
"epoch": 506.25,
|
| 14075 |
+
"learning_rate": 8.223684210526317e-06,
|
| 14076 |
+
"loss": 0.007,
|
| 14077 |
+
"step": 2025
|
| 14078 |
+
},
|
| 14079 |
+
{
|
| 14080 |
+
"epoch": 506.5,
|
| 14081 |
+
"learning_rate": 8.201754385964914e-06,
|
| 14082 |
+
"loss": 0.0064,
|
| 14083 |
+
"step": 2026
|
| 14084 |
+
},
|
| 14085 |
+
{
|
| 14086 |
+
"epoch": 506.75,
|
| 14087 |
+
"learning_rate": 8.17982456140351e-06,
|
| 14088 |
+
"loss": 0.0065,
|
| 14089 |
+
"step": 2027
|
| 14090 |
+
},
|
| 14091 |
+
{
|
| 14092 |
+
"epoch": 507.0,
|
| 14093 |
+
"learning_rate": 8.157894736842106e-06,
|
| 14094 |
+
"loss": 0.0073,
|
| 14095 |
+
"step": 2028
|
| 14096 |
+
},
|
| 14097 |
+
{
|
| 14098 |
+
"epoch": 507.25,
|
| 14099 |
+
"learning_rate": 8.135964912280703e-06,
|
| 14100 |
+
"loss": 0.0072,
|
| 14101 |
+
"step": 2029
|
| 14102 |
+
},
|
| 14103 |
+
{
|
| 14104 |
+
"epoch": 507.5,
|
| 14105 |
+
"learning_rate": 8.1140350877193e-06,
|
| 14106 |
+
"loss": 0.0065,
|
| 14107 |
+
"step": 2030
|
| 14108 |
+
},
|
| 14109 |
+
{
|
| 14110 |
+
"epoch": 507.75,
|
| 14111 |
+
"learning_rate": 8.092105263157896e-06,
|
| 14112 |
+
"loss": 0.0064,
|
| 14113 |
+
"step": 2031
|
| 14114 |
+
},
|
| 14115 |
+
{
|
| 14116 |
+
"epoch": 508.0,
|
| 14117 |
+
"learning_rate": 8.070175438596492e-06,
|
| 14118 |
+
"loss": 0.0076,
|
| 14119 |
+
"step": 2032
|
| 14120 |
+
},
|
| 14121 |
+
{
|
| 14122 |
+
"epoch": 508.25,
|
| 14123 |
+
"learning_rate": 8.048245614035088e-06,
|
| 14124 |
+
"loss": 0.0075,
|
| 14125 |
+
"step": 2033
|
| 14126 |
+
},
|
| 14127 |
+
{
|
| 14128 |
+
"epoch": 508.5,
|
| 14129 |
+
"learning_rate": 8.026315789473685e-06,
|
| 14130 |
+
"loss": 0.0062,
|
| 14131 |
+
"step": 2034
|
| 14132 |
+
},
|
| 14133 |
+
{
|
| 14134 |
+
"epoch": 508.75,
|
| 14135 |
+
"learning_rate": 8.004385964912281e-06,
|
| 14136 |
+
"loss": 0.0071,
|
| 14137 |
+
"step": 2035
|
| 14138 |
+
},
|
| 14139 |
+
{
|
| 14140 |
+
"epoch": 509.0,
|
| 14141 |
+
"learning_rate": 7.982456140350877e-06,
|
| 14142 |
+
"loss": 0.0067,
|
| 14143 |
+
"step": 2036
|
| 14144 |
+
},
|
| 14145 |
+
{
|
| 14146 |
+
"epoch": 509.25,
|
| 14147 |
+
"learning_rate": 7.960526315789474e-06,
|
| 14148 |
+
"loss": 0.0075,
|
| 14149 |
+
"step": 2037
|
| 14150 |
+
},
|
| 14151 |
+
{
|
| 14152 |
+
"epoch": 509.5,
|
| 14153 |
+
"learning_rate": 7.93859649122807e-06,
|
| 14154 |
+
"loss": 0.0073,
|
| 14155 |
+
"step": 2038
|
| 14156 |
+
},
|
| 14157 |
+
{
|
| 14158 |
+
"epoch": 509.75,
|
| 14159 |
+
"learning_rate": 7.916666666666667e-06,
|
| 14160 |
+
"loss": 0.0066,
|
| 14161 |
+
"step": 2039
|
| 14162 |
+
},
|
| 14163 |
+
{
|
| 14164 |
+
"epoch": 510.0,
|
| 14165 |
+
"learning_rate": 7.894736842105263e-06,
|
| 14166 |
+
"loss": 0.0061,
|
| 14167 |
+
"step": 2040
|
| 14168 |
+
},
|
| 14169 |
+
{
|
| 14170 |
+
"epoch": 510.0,
|
| 14171 |
+
"eval_accuracy_ELSE": 0.9972675230231476,
|
| 14172 |
+
"eval_accuracy_road": 0.993720453380418,
|
| 14173 |
+
"eval_accuracy_sidewalk": 0.9065052098000563,
|
| 14174 |
+
"eval_accuracy_unlabeled": NaN,
|
| 14175 |
+
"eval_iou_ELSE": 0.9936598646790845,
|
| 14176 |
+
"eval_iou_road": 0.9873935312261242,
|
| 14177 |
+
"eval_iou_sidewalk": 0.8481096034778026,
|
| 14178 |
+
"eval_iou_unlabeled": NaN,
|
| 14179 |
+
"eval_loss": 0.02259432151913643,
|
| 14180 |
+
"eval_mean_accuracy": 0.9658310620678741,
|
| 14181 |
+
"eval_mean_iou": 0.9430543331276704,
|
| 14182 |
+
"eval_overall_accuracy": 0.9938583374023438,
|
| 14183 |
+
"eval_runtime": 0.6162,
|
| 14184 |
+
"eval_samples_per_second": 1.623,
|
| 14185 |
+
"eval_steps_per_second": 1.623,
|
| 14186 |
+
"step": 2040
|
| 14187 |
+
},
|
| 14188 |
+
{
|
| 14189 |
+
"epoch": 510.25,
|
| 14190 |
+
"learning_rate": 7.87280701754386e-06,
|
| 14191 |
+
"loss": 0.007,
|
| 14192 |
+
"step": 2041
|
| 14193 |
+
},
|
| 14194 |
+
{
|
| 14195 |
+
"epoch": 510.5,
|
| 14196 |
+
"learning_rate": 7.850877192982456e-06,
|
| 14197 |
+
"loss": 0.0066,
|
| 14198 |
+
"step": 2042
|
| 14199 |
+
},
|
| 14200 |
+
{
|
| 14201 |
+
"epoch": 510.75,
|
| 14202 |
+
"learning_rate": 7.828947368421052e-06,
|
| 14203 |
+
"loss": 0.0065,
|
| 14204 |
+
"step": 2043
|
| 14205 |
+
},
|
| 14206 |
+
{
|
| 14207 |
+
"epoch": 511.0,
|
| 14208 |
+
"learning_rate": 7.80701754385965e-06,
|
| 14209 |
+
"loss": 0.008,
|
| 14210 |
+
"step": 2044
|
| 14211 |
+
},
|
| 14212 |
+
{
|
| 14213 |
+
"epoch": 511.25,
|
| 14214 |
+
"learning_rate": 7.785087719298246e-06,
|
| 14215 |
+
"loss": 0.0064,
|
| 14216 |
+
"step": 2045
|
| 14217 |
+
},
|
| 14218 |
+
{
|
| 14219 |
+
"epoch": 511.5,
|
| 14220 |
+
"learning_rate": 7.763157894736843e-06,
|
| 14221 |
+
"loss": 0.0073,
|
| 14222 |
+
"step": 2046
|
| 14223 |
+
},
|
| 14224 |
+
{
|
| 14225 |
+
"epoch": 511.75,
|
| 14226 |
+
"learning_rate": 7.74122807017544e-06,
|
| 14227 |
+
"loss": 0.007,
|
| 14228 |
+
"step": 2047
|
| 14229 |
+
},
|
| 14230 |
+
{
|
| 14231 |
+
"epoch": 512.0,
|
| 14232 |
+
"learning_rate": 7.719298245614036e-06,
|
| 14233 |
+
"loss": 0.0076,
|
| 14234 |
+
"step": 2048
|
| 14235 |
+
},
|
| 14236 |
+
{
|
| 14237 |
+
"epoch": 512.25,
|
| 14238 |
+
"learning_rate": 7.697368421052632e-06,
|
| 14239 |
+
"loss": 0.0069,
|
| 14240 |
+
"step": 2049
|
| 14241 |
+
},
|
| 14242 |
+
{
|
| 14243 |
+
"epoch": 512.5,
|
| 14244 |
+
"learning_rate": 7.675438596491228e-06,
|
| 14245 |
+
"loss": 0.0071,
|
| 14246 |
+
"step": 2050
|
| 14247 |
+
},
|
| 14248 |
+
{
|
| 14249 |
+
"epoch": 512.75,
|
| 14250 |
+
"learning_rate": 7.653508771929825e-06,
|
| 14251 |
+
"loss": 0.0062,
|
| 14252 |
+
"step": 2051
|
| 14253 |
+
},
|
| 14254 |
+
{
|
| 14255 |
+
"epoch": 513.0,
|
| 14256 |
+
"learning_rate": 7.631578947368421e-06,
|
| 14257 |
+
"loss": 0.0072,
|
| 14258 |
+
"step": 2052
|
| 14259 |
+
},
|
| 14260 |
+
{
|
| 14261 |
+
"epoch": 513.25,
|
| 14262 |
+
"learning_rate": 7.609649122807018e-06,
|
| 14263 |
+
"loss": 0.0068,
|
| 14264 |
+
"step": 2053
|
| 14265 |
+
},
|
| 14266 |
+
{
|
| 14267 |
+
"epoch": 513.5,
|
| 14268 |
+
"learning_rate": 7.587719298245615e-06,
|
| 14269 |
+
"loss": 0.0079,
|
| 14270 |
+
"step": 2054
|
| 14271 |
+
},
|
| 14272 |
+
{
|
| 14273 |
+
"epoch": 513.75,
|
| 14274 |
+
"learning_rate": 7.565789473684211e-06,
|
| 14275 |
+
"loss": 0.0072,
|
| 14276 |
+
"step": 2055
|
| 14277 |
+
},
|
| 14278 |
+
{
|
| 14279 |
+
"epoch": 514.0,
|
| 14280 |
+
"learning_rate": 7.5438596491228074e-06,
|
| 14281 |
+
"loss": 0.0065,
|
| 14282 |
+
"step": 2056
|
| 14283 |
+
},
|
| 14284 |
+
{
|
| 14285 |
+
"epoch": 514.25,
|
| 14286 |
+
"learning_rate": 7.521929824561404e-06,
|
| 14287 |
+
"loss": 0.0065,
|
| 14288 |
+
"step": 2057
|
| 14289 |
+
},
|
| 14290 |
+
{
|
| 14291 |
+
"epoch": 514.5,
|
| 14292 |
+
"learning_rate": 7.5e-06,
|
| 14293 |
+
"loss": 0.0071,
|
| 14294 |
+
"step": 2058
|
| 14295 |
+
},
|
| 14296 |
+
{
|
| 14297 |
+
"epoch": 514.75,
|
| 14298 |
+
"learning_rate": 7.4780701754385966e-06,
|
| 14299 |
+
"loss": 0.0073,
|
| 14300 |
+
"step": 2059
|
| 14301 |
+
},
|
| 14302 |
+
{
|
| 14303 |
+
"epoch": 515.0,
|
| 14304 |
+
"learning_rate": 7.456140350877193e-06,
|
| 14305 |
+
"loss": 0.0066,
|
| 14306 |
+
"step": 2060
|
| 14307 |
+
},
|
| 14308 |
+
{
|
| 14309 |
+
"epoch": 515.0,
|
| 14310 |
+
"eval_accuracy_ELSE": 0.9968725313017412,
|
| 14311 |
+
"eval_accuracy_road": 0.9952440621974141,
|
| 14312 |
+
"eval_accuracy_sidewalk": 0.9021402421852999,
|
| 14313 |
+
"eval_accuracy_unlabeled": NaN,
|
| 14314 |
+
"eval_iou_ELSE": 0.9937216828478964,
|
| 14315 |
+
"eval_iou_road": 0.9877474880230636,
|
| 14316 |
+
"eval_iou_sidewalk": 0.844137022397892,
|
| 14317 |
+
"eval_iou_unlabeled": NaN,
|
| 14318 |
+
"eval_loss": 0.02251315861940384,
|
| 14319 |
+
"eval_mean_accuracy": 0.9647522785614852,
|
| 14320 |
+
"eval_mean_iou": 0.9418687310896173,
|
| 14321 |
+
"eval_overall_accuracy": 0.9938697814941406,
|
| 14322 |
+
"eval_runtime": 0.7112,
|
| 14323 |
+
"eval_samples_per_second": 1.406,
|
| 14324 |
+
"eval_steps_per_second": 1.406,
|
| 14325 |
+
"step": 2060
|
| 14326 |
+
},
|
| 14327 |
+
{
|
| 14328 |
+
"epoch": 515.25,
|
| 14329 |
+
"learning_rate": 7.434210526315789e-06,
|
| 14330 |
+
"loss": 0.0071,
|
| 14331 |
+
"step": 2061
|
| 14332 |
+
},
|
| 14333 |
+
{
|
| 14334 |
+
"epoch": 515.5,
|
| 14335 |
+
"learning_rate": 7.412280701754386e-06,
|
| 14336 |
+
"loss": 0.0069,
|
| 14337 |
+
"step": 2062
|
| 14338 |
+
},
|
| 14339 |
+
{
|
| 14340 |
+
"epoch": 515.75,
|
| 14341 |
+
"learning_rate": 7.390350877192982e-06,
|
| 14342 |
+
"loss": 0.0065,
|
| 14343 |
+
"step": 2063
|
| 14344 |
+
},
|
| 14345 |
+
{
|
| 14346 |
+
"epoch": 516.0,
|
| 14347 |
+
"learning_rate": 7.3684210526315784e-06,
|
| 14348 |
+
"loss": 0.0069,
|
| 14349 |
+
"step": 2064
|
| 14350 |
+
},
|
| 14351 |
+
{
|
| 14352 |
+
"epoch": 516.25,
|
| 14353 |
+
"learning_rate": 7.3464912280701765e-06,
|
| 14354 |
+
"loss": 0.0068,
|
| 14355 |
+
"step": 2065
|
| 14356 |
+
},
|
| 14357 |
+
{
|
| 14358 |
+
"epoch": 516.5,
|
| 14359 |
+
"learning_rate": 7.324561403508773e-06,
|
| 14360 |
+
"loss": 0.0073,
|
| 14361 |
+
"step": 2066
|
| 14362 |
+
},
|
| 14363 |
+
{
|
| 14364 |
+
"epoch": 516.75,
|
| 14365 |
+
"learning_rate": 7.302631578947369e-06,
|
| 14366 |
+
"loss": 0.0072,
|
| 14367 |
+
"step": 2067
|
| 14368 |
+
},
|
| 14369 |
+
{
|
| 14370 |
+
"epoch": 517.0,
|
| 14371 |
+
"learning_rate": 7.280701754385966e-06,
|
| 14372 |
+
"loss": 0.0069,
|
| 14373 |
+
"step": 2068
|
| 14374 |
+
},
|
| 14375 |
+
{
|
| 14376 |
+
"epoch": 517.25,
|
| 14377 |
+
"learning_rate": 7.258771929824562e-06,
|
| 14378 |
+
"loss": 0.0079,
|
| 14379 |
+
"step": 2069
|
| 14380 |
+
},
|
| 14381 |
+
{
|
| 14382 |
+
"epoch": 517.5,
|
| 14383 |
+
"learning_rate": 7.236842105263158e-06,
|
| 14384 |
+
"loss": 0.0065,
|
| 14385 |
+
"step": 2070
|
| 14386 |
+
},
|
| 14387 |
+
{
|
| 14388 |
+
"epoch": 517.75,
|
| 14389 |
+
"learning_rate": 7.214912280701755e-06,
|
| 14390 |
+
"loss": 0.0068,
|
| 14391 |
+
"step": 2071
|
| 14392 |
+
},
|
| 14393 |
+
{
|
| 14394 |
+
"epoch": 518.0,
|
| 14395 |
+
"learning_rate": 7.192982456140351e-06,
|
| 14396 |
+
"loss": 0.0062,
|
| 14397 |
+
"step": 2072
|
| 14398 |
+
},
|
| 14399 |
+
{
|
| 14400 |
+
"epoch": 518.25,
|
| 14401 |
+
"learning_rate": 7.1710526315789475e-06,
|
| 14402 |
+
"loss": 0.0067,
|
| 14403 |
+
"step": 2073
|
| 14404 |
+
},
|
| 14405 |
+
{
|
| 14406 |
+
"epoch": 518.5,
|
| 14407 |
+
"learning_rate": 7.149122807017544e-06,
|
| 14408 |
+
"loss": 0.0072,
|
| 14409 |
+
"step": 2074
|
| 14410 |
+
},
|
| 14411 |
+
{
|
| 14412 |
+
"epoch": 518.75,
|
| 14413 |
+
"learning_rate": 7.12719298245614e-06,
|
| 14414 |
+
"loss": 0.0062,
|
| 14415 |
+
"step": 2075
|
| 14416 |
+
},
|
| 14417 |
+
{
|
| 14418 |
+
"epoch": 519.0,
|
| 14419 |
+
"learning_rate": 7.1052631578947375e-06,
|
| 14420 |
+
"loss": 0.007,
|
| 14421 |
+
"step": 2076
|
| 14422 |
+
},
|
| 14423 |
+
{
|
| 14424 |
+
"epoch": 519.25,
|
| 14425 |
+
"learning_rate": 7.083333333333334e-06,
|
| 14426 |
+
"loss": 0.0064,
|
| 14427 |
+
"step": 2077
|
| 14428 |
+
},
|
| 14429 |
+
{
|
| 14430 |
+
"epoch": 519.5,
|
| 14431 |
+
"learning_rate": 7.06140350877193e-06,
|
| 14432 |
+
"loss": 0.0068,
|
| 14433 |
+
"step": 2078
|
| 14434 |
+
},
|
| 14435 |
+
{
|
| 14436 |
+
"epoch": 519.75,
|
| 14437 |
+
"learning_rate": 7.039473684210527e-06,
|
| 14438 |
+
"loss": 0.0069,
|
| 14439 |
+
"step": 2079
|
| 14440 |
+
},
|
| 14441 |
+
{
|
| 14442 |
+
"epoch": 520.0,
|
| 14443 |
+
"learning_rate": 7.017543859649123e-06,
|
| 14444 |
+
"loss": 0.0089,
|
| 14445 |
+
"step": 2080
|
| 14446 |
+
},
|
| 14447 |
+
{
|
| 14448 |
+
"epoch": 520.0,
|
| 14449 |
+
"eval_accuracy_ELSE": 0.997202592877163,
|
| 14450 |
+
"eval_accuracy_road": 0.9941618727573048,
|
| 14451 |
+
"eval_accuracy_sidewalk": 0.9059419881723458,
|
| 14452 |
+
"eval_accuracy_unlabeled": NaN,
|
| 14453 |
+
"eval_iou_ELSE": 0.9937344642212025,
|
| 14454 |
+
"eval_iou_road": 0.9875526889020906,
|
| 14455 |
+
"eval_iou_sidewalk": 0.8485887628594039,
|
| 14456 |
+
"eval_iou_unlabeled": NaN,
|
| 14457 |
+
"eval_loss": 0.022204779088497162,
|
| 14458 |
+
"eval_mean_accuracy": 0.9657688179356047,
|
| 14459 |
+
"eval_mean_iou": 0.9432919719942324,
|
| 14460 |
+
"eval_overall_accuracy": 0.9939155578613281,
|
| 14461 |
+
"eval_runtime": 0.6058,
|
| 14462 |
+
"eval_samples_per_second": 1.651,
|
| 14463 |
+
"eval_steps_per_second": 1.651,
|
| 14464 |
+
"step": 2080
|
| 14465 |
+
},
|
| 14466 |
+
{
|
| 14467 |
+
"epoch": 520.25,
|
| 14468 |
+
"learning_rate": 6.995614035087719e-06,
|
| 14469 |
+
"loss": 0.0066,
|
| 14470 |
+
"step": 2081
|
| 14471 |
+
},
|
| 14472 |
+
{
|
| 14473 |
+
"epoch": 520.5,
|
| 14474 |
+
"learning_rate": 6.973684210526316e-06,
|
| 14475 |
+
"loss": 0.0064,
|
| 14476 |
+
"step": 2082
|
| 14477 |
+
},
|
| 14478 |
+
{
|
| 14479 |
+
"epoch": 520.75,
|
| 14480 |
+
"learning_rate": 6.951754385964912e-06,
|
| 14481 |
+
"loss": 0.0073,
|
| 14482 |
+
"step": 2083
|
| 14483 |
+
},
|
| 14484 |
+
{
|
| 14485 |
+
"epoch": 521.0,
|
| 14486 |
+
"learning_rate": 6.9298245614035085e-06,
|
| 14487 |
+
"loss": 0.0069,
|
| 14488 |
+
"step": 2084
|
| 14489 |
+
},
|
| 14490 |
+
{
|
| 14491 |
+
"epoch": 521.25,
|
| 14492 |
+
"learning_rate": 6.9078947368421065e-06,
|
| 14493 |
+
"loss": 0.0074,
|
| 14494 |
+
"step": 2085
|
| 14495 |
+
},
|
| 14496 |
+
{
|
| 14497 |
+
"epoch": 521.5,
|
| 14498 |
+
"learning_rate": 6.885964912280703e-06,
|
| 14499 |
+
"loss": 0.007,
|
| 14500 |
+
"step": 2086
|
| 14501 |
+
},
|
| 14502 |
+
{
|
| 14503 |
+
"epoch": 521.75,
|
| 14504 |
+
"learning_rate": 6.864035087719299e-06,
|
| 14505 |
+
"loss": 0.0066,
|
| 14506 |
+
"step": 2087
|
| 14507 |
+
},
|
| 14508 |
+
{
|
| 14509 |
+
"epoch": 522.0,
|
| 14510 |
+
"learning_rate": 6.842105263157896e-06,
|
| 14511 |
+
"loss": 0.0064,
|
| 14512 |
+
"step": 2088
|
| 14513 |
+
},
|
| 14514 |
+
{
|
| 14515 |
+
"epoch": 522.25,
|
| 14516 |
+
"learning_rate": 6.820175438596492e-06,
|
| 14517 |
+
"loss": 0.0072,
|
| 14518 |
+
"step": 2089
|
| 14519 |
+
},
|
| 14520 |
+
{
|
| 14521 |
+
"epoch": 522.5,
|
| 14522 |
+
"learning_rate": 6.798245614035088e-06,
|
| 14523 |
+
"loss": 0.0067,
|
| 14524 |
+
"step": 2090
|
| 14525 |
+
},
|
| 14526 |
+
{
|
| 14527 |
+
"epoch": 522.75,
|
| 14528 |
+
"learning_rate": 6.776315789473685e-06,
|
| 14529 |
+
"loss": 0.0065,
|
| 14530 |
+
"step": 2091
|
| 14531 |
+
},
|
| 14532 |
+
{
|
| 14533 |
+
"epoch": 523.0,
|
| 14534 |
+
"learning_rate": 6.754385964912281e-06,
|
| 14535 |
+
"loss": 0.0075,
|
| 14536 |
+
"step": 2092
|
| 14537 |
+
},
|
| 14538 |
+
{
|
| 14539 |
+
"epoch": 523.25,
|
| 14540 |
+
"learning_rate": 6.7324561403508775e-06,
|
| 14541 |
+
"loss": 0.0065,
|
| 14542 |
+
"step": 2093
|
| 14543 |
+
},
|
| 14544 |
+
{
|
| 14545 |
+
"epoch": 523.5,
|
| 14546 |
+
"learning_rate": 6.710526315789474e-06,
|
| 14547 |
+
"loss": 0.0066,
|
| 14548 |
+
"step": 2094
|
| 14549 |
+
},
|
| 14550 |
+
{
|
| 14551 |
+
"epoch": 523.75,
|
| 14552 |
+
"learning_rate": 6.68859649122807e-06,
|
| 14553 |
+
"loss": 0.0072,
|
| 14554 |
+
"step": 2095
|
| 14555 |
+
},
|
| 14556 |
+
{
|
| 14557 |
+
"epoch": 524.0,
|
| 14558 |
+
"learning_rate": 6.666666666666667e-06,
|
| 14559 |
+
"loss": 0.0065,
|
| 14560 |
+
"step": 2096
|
| 14561 |
+
},
|
| 14562 |
+
{
|
| 14563 |
+
"epoch": 524.25,
|
| 14564 |
+
"learning_rate": 6.644736842105263e-06,
|
| 14565 |
+
"loss": 0.0067,
|
| 14566 |
+
"step": 2097
|
| 14567 |
+
},
|
| 14568 |
+
{
|
| 14569 |
+
"epoch": 524.5,
|
| 14570 |
+
"learning_rate": 6.622807017543859e-06,
|
| 14571 |
+
"loss": 0.0092,
|
| 14572 |
+
"step": 2098
|
| 14573 |
+
},
|
| 14574 |
+
{
|
| 14575 |
+
"epoch": 524.75,
|
| 14576 |
+
"learning_rate": 6.600877192982456e-06,
|
| 14577 |
+
"loss": 0.0064,
|
| 14578 |
+
"step": 2099
|
| 14579 |
+
},
|
| 14580 |
+
{
|
| 14581 |
+
"epoch": 525.0,
|
| 14582 |
+
"learning_rate": 6.578947368421053e-06,
|
| 14583 |
+
"loss": 0.0066,
|
| 14584 |
+
"step": 2100
|
| 14585 |
+
},
|
| 14586 |
+
{
|
| 14587 |
+
"epoch": 525.0,
|
| 14588 |
+
"eval_accuracy_ELSE": 0.9970186241302066,
|
| 14589 |
+
"eval_accuracy_road": 0.9940052400751836,
|
| 14590 |
+
"eval_accuracy_sidewalk": 0.9074908476485497,
|
| 14591 |
+
"eval_accuracy_unlabeled": NaN,
|
| 14592 |
+
"eval_iou_ELSE": 0.993636858765227,
|
| 14593 |
+
"eval_iou_road": 0.9875367813490267,
|
| 14594 |
+
"eval_iou_sidewalk": 0.8433656110965716,
|
| 14595 |
+
"eval_iou_unlabeled": NaN,
|
| 14596 |
+
"eval_loss": 0.022893136367201805,
|
| 14597 |
+
"eval_mean_accuracy": 0.96617157061798,
|
| 14598 |
+
"eval_mean_iou": 0.9415130837369418,
|
| 14599 |
+
"eval_overall_accuracy": 0.9937858581542969,
|
| 14600 |
+
"eval_runtime": 0.5997,
|
| 14601 |
+
"eval_samples_per_second": 1.667,
|
| 14602 |
+
"eval_steps_per_second": 1.667,
|
| 14603 |
+
"step": 2100
|
| 14604 |
+
},
|
| 14605 |
+
{
|
| 14606 |
+
"epoch": 525.25,
|
| 14607 |
+
"learning_rate": 6.557017543859649e-06,
|
| 14608 |
+
"loss": 0.0069,
|
| 14609 |
+
"step": 2101
|
| 14610 |
+
},
|
| 14611 |
+
{
|
| 14612 |
+
"epoch": 525.5,
|
| 14613 |
+
"learning_rate": 6.535087719298246e-06,
|
| 14614 |
+
"loss": 0.0063,
|
| 14615 |
+
"step": 2102
|
| 14616 |
+
},
|
| 14617 |
+
{
|
| 14618 |
+
"epoch": 525.75,
|
| 14619 |
+
"learning_rate": 6.513157894736842e-06,
|
| 14620 |
+
"loss": 0.0076,
|
| 14621 |
+
"step": 2103
|
| 14622 |
+
},
|
| 14623 |
+
{
|
| 14624 |
+
"epoch": 526.0,
|
| 14625 |
+
"learning_rate": 6.4912280701754385e-06,
|
| 14626 |
+
"loss": 0.0063,
|
| 14627 |
+
"step": 2104
|
| 14628 |
+
},
|
| 14629 |
+
{
|
| 14630 |
+
"epoch": 526.25,
|
| 14631 |
+
"learning_rate": 6.469298245614036e-06,
|
| 14632 |
+
"loss": 0.0063,
|
| 14633 |
+
"step": 2105
|
| 14634 |
+
},
|
| 14635 |
+
{
|
| 14636 |
+
"epoch": 526.5,
|
| 14637 |
+
"learning_rate": 6.447368421052632e-06,
|
| 14638 |
+
"loss": 0.0067,
|
| 14639 |
+
"step": 2106
|
| 14640 |
+
},
|
| 14641 |
+
{
|
| 14642 |
+
"epoch": 526.75,
|
| 14643 |
+
"learning_rate": 6.425438596491229e-06,
|
| 14644 |
+
"loss": 0.0071,
|
| 14645 |
+
"step": 2107
|
| 14646 |
+
},
|
| 14647 |
+
{
|
| 14648 |
+
"epoch": 527.0,
|
| 14649 |
+
"learning_rate": 6.403508771929826e-06,
|
| 14650 |
+
"loss": 0.0067,
|
| 14651 |
+
"step": 2108
|
| 14652 |
+
},
|
| 14653 |
+
{
|
| 14654 |
+
"epoch": 527.25,
|
| 14655 |
+
"learning_rate": 6.381578947368422e-06,
|
| 14656 |
+
"loss": 0.0064,
|
| 14657 |
+
"step": 2109
|
| 14658 |
+
},
|
| 14659 |
+
{
|
| 14660 |
+
"epoch": 527.5,
|
| 14661 |
+
"learning_rate": 6.3596491228070184e-06,
|
| 14662 |
+
"loss": 0.0071,
|
| 14663 |
+
"step": 2110
|
| 14664 |
+
},
|
| 14665 |
+
{
|
| 14666 |
+
"epoch": 527.75,
|
| 14667 |
+
"learning_rate": 6.337719298245615e-06,
|
| 14668 |
+
"loss": 0.0066,
|
| 14669 |
+
"step": 2111
|
| 14670 |
+
},
|
| 14671 |
+
{
|
| 14672 |
+
"epoch": 528.0,
|
| 14673 |
+
"learning_rate": 6.315789473684211e-06,
|
| 14674 |
+
"loss": 0.0071,
|
| 14675 |
+
"step": 2112
|
| 14676 |
+
},
|
| 14677 |
+
{
|
| 14678 |
+
"epoch": 528.25,
|
| 14679 |
+
"learning_rate": 6.2938596491228076e-06,
|
| 14680 |
+
"loss": 0.0075,
|
| 14681 |
+
"step": 2113
|
| 14682 |
+
},
|
| 14683 |
+
{
|
| 14684 |
+
"epoch": 528.5,
|
| 14685 |
+
"learning_rate": 6.271929824561404e-06,
|
| 14686 |
+
"loss": 0.0067,
|
| 14687 |
+
"step": 2114
|
| 14688 |
+
},
|
| 14689 |
+
{
|
| 14690 |
+
"epoch": 528.75,
|
| 14691 |
+
"learning_rate": 6.25e-06,
|
| 14692 |
+
"loss": 0.0067,
|
| 14693 |
+
"step": 2115
|
| 14694 |
+
},
|
| 14695 |
+
{
|
| 14696 |
+
"epoch": 529.0,
|
| 14697 |
+
"learning_rate": 6.228070175438597e-06,
|
| 14698 |
+
"loss": 0.0068,
|
| 14699 |
+
"step": 2116
|
| 14700 |
+
},
|
| 14701 |
+
{
|
| 14702 |
+
"epoch": 529.25,
|
| 14703 |
+
"learning_rate": 6.206140350877193e-06,
|
| 14704 |
+
"loss": 0.0072,
|
| 14705 |
+
"step": 2117
|
| 14706 |
+
},
|
| 14707 |
+
{
|
| 14708 |
+
"epoch": 529.5,
|
| 14709 |
+
"learning_rate": 6.184210526315789e-06,
|
| 14710 |
+
"loss": 0.006,
|
| 14711 |
+
"step": 2118
|
| 14712 |
+
},
|
| 14713 |
+
{
|
| 14714 |
+
"epoch": 529.75,
|
| 14715 |
+
"learning_rate": 6.162280701754386e-06,
|
| 14716 |
+
"loss": 0.0069,
|
| 14717 |
+
"step": 2119
|
| 14718 |
+
},
|
| 14719 |
+
{
|
| 14720 |
+
"epoch": 530.0,
|
| 14721 |
+
"learning_rate": 6.140350877192982e-06,
|
| 14722 |
+
"loss": 0.0075,
|
| 14723 |
+
"step": 2120
|
| 14724 |
+
},
|
| 14725 |
+
{
|
| 14726 |
+
"epoch": 530.0,
|
| 14727 |
+
"eval_accuracy_ELSE": 0.9968562987652451,
|
| 14728 |
+
"eval_accuracy_road": 0.9940337187446603,
|
| 14729 |
+
"eval_accuracy_sidewalk": 0.9067868206139116,
|
| 14730 |
+
"eval_accuracy_unlabeled": NaN,
|
| 14731 |
+
"eval_iou_ELSE": 0.993480441323972,
|
| 14732 |
+
"eval_iou_road": 0.9874812572495544,
|
| 14733 |
+
"eval_iou_sidewalk": 0.8396349413298566,
|
| 14734 |
+
"eval_iou_unlabeled": NaN,
|
| 14735 |
+
"eval_loss": 0.023344410583376884,
|
| 14736 |
+
"eval_mean_accuracy": 0.9658922793746058,
|
| 14737 |
+
"eval_mean_iou": 0.9401988799677943,
|
| 14738 |
+
"eval_overall_accuracy": 0.9936599731445312,
|
| 14739 |
+
"eval_runtime": 0.7035,
|
| 14740 |
+
"eval_samples_per_second": 1.421,
|
| 14741 |
+
"eval_steps_per_second": 1.421,
|
| 14742 |
+
"step": 2120
|
| 14743 |
+
},
|
| 14744 |
+
{
|
| 14745 |
+
"epoch": 530.25,
|
| 14746 |
+
"learning_rate": 6.118421052631579e-06,
|
| 14747 |
+
"loss": 0.0073,
|
| 14748 |
+
"step": 2121
|
| 14749 |
+
},
|
| 14750 |
+
{
|
| 14751 |
+
"epoch": 530.5,
|
| 14752 |
+
"learning_rate": 6.096491228070176e-06,
|
| 14753 |
+
"loss": 0.0061,
|
| 14754 |
+
"step": 2122
|
| 14755 |
+
},
|
| 14756 |
+
{
|
| 14757 |
+
"epoch": 530.75,
|
| 14758 |
+
"learning_rate": 6.074561403508772e-06,
|
| 14759 |
+
"loss": 0.0064,
|
| 14760 |
+
"step": 2123
|
| 14761 |
+
},
|
| 14762 |
+
{
|
| 14763 |
+
"epoch": 531.0,
|
| 14764 |
+
"learning_rate": 6.0526315789473685e-06,
|
| 14765 |
+
"loss": 0.007,
|
| 14766 |
+
"step": 2124
|
| 14767 |
+
},
|
| 14768 |
+
{
|
| 14769 |
+
"epoch": 531.25,
|
| 14770 |
+
"learning_rate": 6.030701754385965e-06,
|
| 14771 |
+
"loss": 0.006,
|
| 14772 |
+
"step": 2125
|
| 14773 |
+
},
|
| 14774 |
+
{
|
| 14775 |
+
"epoch": 531.5,
|
| 14776 |
+
"learning_rate": 6.008771929824561e-06,
|
| 14777 |
+
"loss": 0.0072,
|
| 14778 |
+
"step": 2126
|
| 14779 |
+
},
|
| 14780 |
+
{
|
| 14781 |
+
"epoch": 531.75,
|
| 14782 |
+
"learning_rate": 5.986842105263158e-06,
|
| 14783 |
+
"loss": 0.008,
|
| 14784 |
+
"step": 2127
|
| 14785 |
+
},
|
| 14786 |
+
{
|
| 14787 |
+
"epoch": 532.0,
|
| 14788 |
+
"learning_rate": 5.964912280701755e-06,
|
| 14789 |
+
"loss": 0.0071,
|
| 14790 |
+
"step": 2128
|
| 14791 |
+
},
|
| 14792 |
+
{
|
| 14793 |
+
"epoch": 532.25,
|
| 14794 |
+
"learning_rate": 5.942982456140351e-06,
|
| 14795 |
+
"loss": 0.0068,
|
| 14796 |
+
"step": 2129
|
| 14797 |
+
},
|
| 14798 |
+
{
|
| 14799 |
+
"epoch": 532.5,
|
| 14800 |
+
"learning_rate": 5.921052631578948e-06,
|
| 14801 |
+
"loss": 0.0067,
|
| 14802 |
+
"step": 2130
|
| 14803 |
+
},
|
| 14804 |
+
{
|
| 14805 |
+
"epoch": 532.75,
|
| 14806 |
+
"learning_rate": 5.899122807017545e-06,
|
| 14807 |
+
"loss": 0.0067,
|
| 14808 |
+
"step": 2131
|
| 14809 |
+
},
|
| 14810 |
+
{
|
| 14811 |
+
"epoch": 533.0,
|
| 14812 |
+
"learning_rate": 5.877192982456141e-06,
|
| 14813 |
+
"loss": 0.0065,
|
| 14814 |
+
"step": 2132
|
| 14815 |
+
},
|
| 14816 |
+
{
|
| 14817 |
+
"epoch": 533.25,
|
| 14818 |
+
"learning_rate": 5.855263157894738e-06,
|
| 14819 |
+
"loss": 0.0061,
|
| 14820 |
+
"step": 2133
|
| 14821 |
+
},
|
| 14822 |
+
{
|
| 14823 |
+
"epoch": 533.5,
|
| 14824 |
+
"learning_rate": 5.833333333333334e-06,
|
| 14825 |
+
"loss": 0.0069,
|
| 14826 |
+
"step": 2134
|
| 14827 |
+
},
|
| 14828 |
+
{
|
| 14829 |
+
"epoch": 533.75,
|
| 14830 |
+
"learning_rate": 5.81140350877193e-06,
|
| 14831 |
+
"loss": 0.0066,
|
| 14832 |
+
"step": 2135
|
| 14833 |
+
},
|
| 14834 |
+
{
|
| 14835 |
+
"epoch": 534.0,
|
| 14836 |
+
"learning_rate": 5.789473684210527e-06,
|
| 14837 |
+
"loss": 0.007,
|
| 14838 |
+
"step": 2136
|
| 14839 |
+
},
|
| 14840 |
+
{
|
| 14841 |
+
"epoch": 534.25,
|
| 14842 |
+
"learning_rate": 5.767543859649123e-06,
|
| 14843 |
+
"loss": 0.0073,
|
| 14844 |
+
"step": 2137
|
| 14845 |
+
},
|
| 14846 |
+
{
|
| 14847 |
+
"epoch": 534.5,
|
| 14848 |
+
"learning_rate": 5.7456140350877194e-06,
|
| 14849 |
+
"loss": 0.0068,
|
| 14850 |
+
"step": 2138
|
| 14851 |
+
},
|
| 14852 |
+
{
|
| 14853 |
+
"epoch": 534.75,
|
| 14854 |
+
"learning_rate": 5.723684210526316e-06,
|
| 14855 |
+
"loss": 0.0055,
|
| 14856 |
+
"step": 2139
|
| 14857 |
+
},
|
| 14858 |
+
{
|
| 14859 |
+
"epoch": 535.0,
|
| 14860 |
+
"learning_rate": 5.701754385964912e-06,
|
| 14861 |
+
"loss": 0.0075,
|
| 14862 |
+
"step": 2140
|
| 14863 |
+
},
|
| 14864 |
+
{
|
| 14865 |
+
"epoch": 535.0,
|
| 14866 |
+
"eval_accuracy_ELSE": 0.9969212289112297,
|
| 14867 |
+
"eval_accuracy_road": 0.9943612234436408,
|
| 14868 |
+
"eval_accuracy_sidewalk": 0.9053787665446353,
|
| 14869 |
+
"eval_accuracy_unlabeled": NaN,
|
| 14870 |
+
"eval_iou_ELSE": 0.993593373347786,
|
| 14871 |
+
"eval_iou_road": 0.9877088018557022,
|
| 14872 |
+
"eval_iou_sidewalk": 0.8408526219432457,
|
| 14873 |
+
"eval_iou_unlabeled": NaN,
|
| 14874 |
+
"eval_loss": 0.023249920457601547,
|
| 14875 |
+
"eval_mean_accuracy": 0.9655537396331685,
|
| 14876 |
+
"eval_mean_iou": 0.9407182657155779,
|
| 14877 |
+
"eval_overall_accuracy": 0.9937553405761719,
|
| 14878 |
+
"eval_runtime": 0.6056,
|
| 14879 |
+
"eval_samples_per_second": 1.651,
|
| 14880 |
+
"eval_steps_per_second": 1.651,
|
| 14881 |
+
"step": 2140
|
| 14882 |
+
},
|
| 14883 |
+
{
|
| 14884 |
+
"epoch": 535.25,
|
| 14885 |
+
"learning_rate": 5.679824561403509e-06,
|
| 14886 |
+
"loss": 0.0074,
|
| 14887 |
+
"step": 2141
|
| 14888 |
+
},
|
| 14889 |
+
{
|
| 14890 |
+
"epoch": 535.5,
|
| 14891 |
+
"learning_rate": 5.657894736842106e-06,
|
| 14892 |
+
"loss": 0.0069,
|
| 14893 |
+
"step": 2142
|
| 14894 |
+
},
|
| 14895 |
+
{
|
| 14896 |
+
"epoch": 535.75,
|
| 14897 |
+
"learning_rate": 5.635964912280702e-06,
|
| 14898 |
+
"loss": 0.0078,
|
| 14899 |
+
"step": 2143
|
| 14900 |
+
},
|
| 14901 |
+
{
|
| 14902 |
+
"epoch": 536.0,
|
| 14903 |
+
"learning_rate": 5.6140350877192985e-06,
|
| 14904 |
+
"loss": 0.0066,
|
| 14905 |
+
"step": 2144
|
| 14906 |
+
},
|
| 14907 |
+
{
|
| 14908 |
+
"epoch": 536.25,
|
| 14909 |
+
"learning_rate": 5.592105263157895e-06,
|
| 14910 |
+
"loss": 0.0065,
|
| 14911 |
+
"step": 2145
|
| 14912 |
+
},
|
| 14913 |
+
{
|
| 14914 |
+
"epoch": 536.5,
|
| 14915 |
+
"learning_rate": 5.570175438596491e-06,
|
| 14916 |
+
"loss": 0.0075,
|
| 14917 |
+
"step": 2146
|
| 14918 |
+
},
|
| 14919 |
+
{
|
| 14920 |
+
"epoch": 536.75,
|
| 14921 |
+
"learning_rate": 5.548245614035088e-06,
|
| 14922 |
+
"loss": 0.0072,
|
| 14923 |
+
"step": 2147
|
| 14924 |
+
},
|
| 14925 |
+
{
|
| 14926 |
+
"epoch": 537.0,
|
| 14927 |
+
"learning_rate": 5.526315789473684e-06,
|
| 14928 |
+
"loss": 0.0065,
|
| 14929 |
+
"step": 2148
|
| 14930 |
+
},
|
| 14931 |
+
{
|
| 14932 |
+
"epoch": 537.25,
|
| 14933 |
+
"learning_rate": 5.50438596491228e-06,
|
| 14934 |
+
"loss": 0.0068,
|
| 14935 |
+
"step": 2149
|
| 14936 |
+
},
|
| 14937 |
+
{
|
| 14938 |
+
"epoch": 537.5,
|
| 14939 |
+
"learning_rate": 5.482456140350877e-06,
|
| 14940 |
+
"loss": 0.007,
|
| 14941 |
+
"step": 2150
|
| 14942 |
+
},
|
| 14943 |
+
{
|
| 14944 |
+
"epoch": 537.75,
|
| 14945 |
+
"learning_rate": 5.460526315789474e-06,
|
| 14946 |
+
"loss": 0.0078,
|
| 14947 |
+
"step": 2151
|
| 14948 |
+
},
|
| 14949 |
+
{
|
| 14950 |
+
"epoch": 538.0,
|
| 14951 |
+
"learning_rate": 5.43859649122807e-06,
|
| 14952 |
+
"loss": 0.006,
|
| 14953 |
+
"step": 2152
|
| 14954 |
+
},
|
| 14955 |
+
{
|
| 14956 |
+
"epoch": 538.25,
|
| 14957 |
+
"learning_rate": 5.416666666666667e-06,
|
| 14958 |
+
"loss": 0.0066,
|
| 14959 |
+
"step": 2153
|
| 14960 |
+
},
|
| 14961 |
+
{
|
| 14962 |
+
"epoch": 538.5,
|
| 14963 |
+
"learning_rate": 5.394736842105263e-06,
|
| 14964 |
+
"loss": 0.0062,
|
| 14965 |
+
"step": 2154
|
| 14966 |
+
},
|
| 14967 |
+
{
|
| 14968 |
+
"epoch": 538.75,
|
| 14969 |
+
"learning_rate": 5.37280701754386e-06,
|
| 14970 |
+
"loss": 0.0071,
|
| 14971 |
+
"step": 2155
|
| 14972 |
+
},
|
| 14973 |
+
{
|
| 14974 |
+
"epoch": 539.0,
|
| 14975 |
+
"learning_rate": 5.350877192982457e-06,
|
| 14976 |
+
"loss": 0.0071,
|
| 14977 |
+
"step": 2156
|
| 14978 |
+
},
|
| 14979 |
+
{
|
| 14980 |
+
"epoch": 539.25,
|
| 14981 |
+
"learning_rate": 5.328947368421053e-06,
|
| 14982 |
+
"loss": 0.0064,
|
| 14983 |
+
"step": 2157
|
| 14984 |
+
},
|
| 14985 |
+
{
|
| 14986 |
+
"epoch": 539.5,
|
| 14987 |
+
"learning_rate": 5.3070175438596495e-06,
|
| 14988 |
+
"loss": 0.0069,
|
| 14989 |
+
"step": 2158
|
| 14990 |
+
},
|
| 14991 |
+
{
|
| 14992 |
+
"epoch": 539.75,
|
| 14993 |
+
"learning_rate": 5.285087719298246e-06,
|
| 14994 |
+
"loss": 0.0065,
|
| 14995 |
+
"step": 2159
|
| 14996 |
+
},
|
| 14997 |
+
{
|
| 14998 |
+
"epoch": 540.0,
|
| 14999 |
+
"learning_rate": 5.263157894736842e-06,
|
| 15000 |
+
"loss": 0.007,
|
| 15001 |
+
"step": 2160
|
| 15002 |
+
},
|
| 15003 |
+
{
|
| 15004 |
+
"epoch": 540.0,
|
| 15005 |
+
"eval_accuracy_ELSE": 0.9968779421472399,
|
| 15006 |
+
"eval_accuracy_road": 0.9945748134647149,
|
| 15007 |
+
"eval_accuracy_sidewalk": 0.9070684314277668,
|
| 15008 |
+
"eval_accuracy_unlabeled": NaN,
|
| 15009 |
+
"eval_iou_ELSE": 0.9935984554318751,
|
| 15010 |
+
"eval_iou_road": 0.9880048093924606,
|
| 15011 |
+
"eval_iou_sidewalk": 0.8428627502289677,
|
| 15012 |
+
"eval_iou_unlabeled": NaN,
|
| 15013 |
+
"eval_loss": 0.02291146293282509,
|
| 15014 |
+
"eval_mean_accuracy": 0.9661737290132405,
|
| 15015 |
+
"eval_mean_iou": 0.9414886716844345,
|
| 15016 |
+
"eval_overall_accuracy": 0.9938278198242188,
|
| 15017 |
+
"eval_runtime": 0.6091,
|
| 15018 |
+
"eval_samples_per_second": 1.642,
|
| 15019 |
+
"eval_steps_per_second": 1.642,
|
| 15020 |
+
"step": 2160
|
| 15021 |
+
},
|
| 15022 |
+
{
|
| 15023 |
+
"epoch": 540.25,
|
| 15024 |
+
"learning_rate": 5.2412280701754394e-06,
|
| 15025 |
+
"loss": 0.006,
|
| 15026 |
+
"step": 2161
|
| 15027 |
+
},
|
| 15028 |
+
{
|
| 15029 |
+
"epoch": 540.5,
|
| 15030 |
+
"learning_rate": 5.219298245614036e-06,
|
| 15031 |
+
"loss": 0.0066,
|
| 15032 |
+
"step": 2162
|
| 15033 |
+
},
|
| 15034 |
+
{
|
| 15035 |
+
"epoch": 540.75,
|
| 15036 |
+
"learning_rate": 5.197368421052632e-06,
|
| 15037 |
+
"loss": 0.0071,
|
| 15038 |
+
"step": 2163
|
| 15039 |
+
},
|
| 15040 |
+
{
|
| 15041 |
+
"epoch": 541.0,
|
| 15042 |
+
"learning_rate": 5.1754385964912286e-06,
|
| 15043 |
+
"loss": 0.008,
|
| 15044 |
+
"step": 2164
|
| 15045 |
+
},
|
| 15046 |
+
{
|
| 15047 |
+
"epoch": 541.25,
|
| 15048 |
+
"learning_rate": 5.153508771929825e-06,
|
| 15049 |
+
"loss": 0.0071,
|
| 15050 |
+
"step": 2165
|
| 15051 |
+
},
|
| 15052 |
+
{
|
| 15053 |
+
"epoch": 541.5,
|
| 15054 |
+
"learning_rate": 5.131578947368421e-06,
|
| 15055 |
+
"loss": 0.0066,
|
| 15056 |
+
"step": 2166
|
| 15057 |
+
},
|
| 15058 |
+
{
|
| 15059 |
+
"epoch": 541.75,
|
| 15060 |
+
"learning_rate": 5.109649122807018e-06,
|
| 15061 |
+
"loss": 0.0071,
|
| 15062 |
+
"step": 2167
|
| 15063 |
+
},
|
| 15064 |
+
{
|
| 15065 |
+
"epoch": 542.0,
|
| 15066 |
+
"learning_rate": 5.087719298245614e-06,
|
| 15067 |
+
"loss": 0.0068,
|
| 15068 |
+
"step": 2168
|
| 15069 |
+
},
|
| 15070 |
+
{
|
| 15071 |
+
"epoch": 542.25,
|
| 15072 |
+
"learning_rate": 5.0657894736842104e-06,
|
| 15073 |
+
"loss": 0.0066,
|
| 15074 |
+
"step": 2169
|
| 15075 |
+
},
|
| 15076 |
+
{
|
| 15077 |
+
"epoch": 542.5,
|
| 15078 |
+
"learning_rate": 5.043859649122807e-06,
|
| 15079 |
+
"loss": 0.0068,
|
| 15080 |
+
"step": 2170
|
| 15081 |
+
},
|
| 15082 |
+
{
|
| 15083 |
+
"epoch": 542.75,
|
| 15084 |
+
"learning_rate": 5.021929824561404e-06,
|
| 15085 |
+
"loss": 0.0069,
|
| 15086 |
+
"step": 2171
|
| 15087 |
+
},
|
| 15088 |
+
{
|
| 15089 |
+
"epoch": 543.0,
|
| 15090 |
+
"learning_rate": 5e-06,
|
| 15091 |
+
"loss": 0.0074,
|
| 15092 |
+
"step": 2172
|
| 15093 |
+
},
|
| 15094 |
+
{
|
| 15095 |
+
"epoch": 543.25,
|
| 15096 |
+
"learning_rate": 4.978070175438597e-06,
|
| 15097 |
+
"loss": 0.0066,
|
| 15098 |
+
"step": 2173
|
| 15099 |
+
},
|
| 15100 |
+
{
|
| 15101 |
+
"epoch": 543.5,
|
| 15102 |
+
"learning_rate": 4.956140350877193e-06,
|
| 15103 |
+
"loss": 0.0069,
|
| 15104 |
+
"step": 2174
|
| 15105 |
+
},
|
| 15106 |
+
{
|
| 15107 |
+
"epoch": 543.75,
|
| 15108 |
+
"learning_rate": 4.9342105263157895e-06,
|
| 15109 |
+
"loss": 0.0066,
|
| 15110 |
+
"step": 2175
|
| 15111 |
+
},
|
| 15112 |
+
{
|
| 15113 |
+
"epoch": 544.0,
|
| 15114 |
+
"learning_rate": 4.912280701754386e-06,
|
| 15115 |
+
"loss": 0.0064,
|
| 15116 |
+
"step": 2176
|
| 15117 |
+
},
|
| 15118 |
+
{
|
| 15119 |
+
"epoch": 544.25,
|
| 15120 |
+
"learning_rate": 4.890350877192982e-06,
|
| 15121 |
+
"loss": 0.0067,
|
| 15122 |
+
"step": 2177
|
| 15123 |
+
},
|
| 15124 |
+
{
|
| 15125 |
+
"epoch": 544.5,
|
| 15126 |
+
"learning_rate": 4.868421052631579e-06,
|
| 15127 |
+
"loss": 0.0061,
|
| 15128 |
+
"step": 2178
|
| 15129 |
+
},
|
| 15130 |
+
{
|
| 15131 |
+
"epoch": 544.75,
|
| 15132 |
+
"learning_rate": 4.846491228070176e-06,
|
| 15133 |
+
"loss": 0.0072,
|
| 15134 |
+
"step": 2179
|
| 15135 |
+
},
|
| 15136 |
+
{
|
| 15137 |
+
"epoch": 545.0,
|
| 15138 |
+
"learning_rate": 4.824561403508772e-06,
|
| 15139 |
+
"loss": 0.0071,
|
| 15140 |
+
"step": 2180
|
| 15141 |
+
},
|
| 15142 |
+
{
|
| 15143 |
+
"epoch": 545.0,
|
| 15144 |
+
"eval_accuracy_ELSE": 0.9970023915937104,
|
| 15145 |
+
"eval_accuracy_road": 0.9949450361679102,
|
| 15146 |
+
"eval_accuracy_sidewalk": 0.905519571951563,
|
| 15147 |
+
"eval_accuracy_unlabeled": NaN,
|
| 15148 |
+
"eval_iou_ELSE": 0.9937868099152158,
|
| 15149 |
+
"eval_iou_road": 0.9882747305592486,
|
| 15150 |
+
"eval_iou_sidewalk": 0.8450722733245729,
|
| 15151 |
+
"eval_iou_unlabeled": NaN,
|
| 15152 |
+
"eval_loss": 0.022428084164857864,
|
| 15153 |
+
"eval_mean_accuracy": 0.9658223332377278,
|
| 15154 |
+
"eval_mean_iou": 0.9423779379330125,
|
| 15155 |
+
"eval_overall_accuracy": 0.9939727783203125,
|
| 15156 |
+
"eval_runtime": 0.6452,
|
| 15157 |
+
"eval_samples_per_second": 1.55,
|
| 15158 |
+
"eval_steps_per_second": 1.55,
|
| 15159 |
+
"step": 2180
|
| 15160 |
+
},
|
| 15161 |
+
{
|
| 15162 |
+
"epoch": 545.25,
|
| 15163 |
+
"learning_rate": 4.802631578947369e-06,
|
| 15164 |
+
"loss": 0.0065,
|
| 15165 |
+
"step": 2181
|
| 15166 |
+
},
|
| 15167 |
+
{
|
| 15168 |
+
"epoch": 545.5,
|
| 15169 |
+
"learning_rate": 4.780701754385966e-06,
|
| 15170 |
+
"loss": 0.0066,
|
| 15171 |
+
"step": 2182
|
| 15172 |
+
},
|
| 15173 |
+
{
|
| 15174 |
+
"epoch": 545.75,
|
| 15175 |
+
"learning_rate": 4.758771929824562e-06,
|
| 15176 |
+
"loss": 0.007,
|
| 15177 |
+
"step": 2183
|
| 15178 |
+
},
|
| 15179 |
+
{
|
| 15180 |
+
"epoch": 546.0,
|
| 15181 |
+
"learning_rate": 4.736842105263159e-06,
|
| 15182 |
+
"loss": 0.0067,
|
| 15183 |
+
"step": 2184
|
| 15184 |
+
},
|
| 15185 |
+
{
|
| 15186 |
+
"epoch": 546.25,
|
| 15187 |
+
"learning_rate": 4.714912280701755e-06,
|
| 15188 |
+
"loss": 0.0066,
|
| 15189 |
+
"step": 2185
|
| 15190 |
+
},
|
| 15191 |
+
{
|
| 15192 |
+
"epoch": 546.5,
|
| 15193 |
+
"learning_rate": 4.692982456140351e-06,
|
| 15194 |
+
"loss": 0.0067,
|
| 15195 |
+
"step": 2186
|
| 15196 |
+
},
|
| 15197 |
+
{
|
| 15198 |
+
"epoch": 546.75,
|
| 15199 |
+
"learning_rate": 4.671052631578948e-06,
|
| 15200 |
+
"loss": 0.0068,
|
| 15201 |
+
"step": 2187
|
| 15202 |
+
},
|
| 15203 |
+
{
|
| 15204 |
+
"epoch": 547.0,
|
| 15205 |
+
"learning_rate": 4.649122807017544e-06,
|
| 15206 |
+
"loss": 0.0068,
|
| 15207 |
+
"step": 2188
|
| 15208 |
+
},
|
| 15209 |
+
{
|
| 15210 |
+
"epoch": 547.25,
|
| 15211 |
+
"learning_rate": 4.6271929824561405e-06,
|
| 15212 |
+
"loss": 0.0068,
|
| 15213 |
+
"step": 2189
|
| 15214 |
+
},
|
| 15215 |
+
{
|
| 15216 |
+
"epoch": 547.5,
|
| 15217 |
+
"learning_rate": 4.605263157894737e-06,
|
| 15218 |
+
"loss": 0.0067,
|
| 15219 |
+
"step": 2190
|
| 15220 |
+
},
|
| 15221 |
+
{
|
| 15222 |
+
"epoch": 547.75,
|
| 15223 |
+
"learning_rate": 4.583333333333333e-06,
|
| 15224 |
+
"loss": 0.0081,
|
| 15225 |
+
"step": 2191
|
| 15226 |
+
},
|
| 15227 |
+
{
|
| 15228 |
+
"epoch": 548.0,
|
| 15229 |
+
"learning_rate": 4.5614035087719304e-06,
|
| 15230 |
+
"loss": 0.0081,
|
| 15231 |
+
"step": 2192
|
| 15232 |
+
},
|
| 15233 |
+
{
|
| 15234 |
+
"epoch": 548.25,
|
| 15235 |
+
"learning_rate": 4.539473684210527e-06,
|
| 15236 |
+
"loss": 0.0072,
|
| 15237 |
+
"step": 2193
|
| 15238 |
+
},
|
| 15239 |
+
{
|
| 15240 |
+
"epoch": 548.5,
|
| 15241 |
+
"learning_rate": 4.517543859649123e-06,
|
| 15242 |
+
"loss": 0.0061,
|
| 15243 |
+
"step": 2194
|
| 15244 |
+
},
|
| 15245 |
+
{
|
| 15246 |
+
"epoch": 548.75,
|
| 15247 |
+
"learning_rate": 4.4956140350877196e-06,
|
| 15248 |
+
"loss": 0.0068,
|
| 15249 |
+
"step": 2195
|
| 15250 |
+
},
|
| 15251 |
+
{
|
| 15252 |
+
"epoch": 549.0,
|
| 15253 |
+
"learning_rate": 4.473684210526316e-06,
|
| 15254 |
+
"loss": 0.0077,
|
| 15255 |
+
"step": 2196
|
| 15256 |
+
},
|
| 15257 |
+
{
|
| 15258 |
+
"epoch": 549.25,
|
| 15259 |
+
"learning_rate": 4.451754385964912e-06,
|
| 15260 |
+
"loss": 0.0074,
|
| 15261 |
+
"step": 2197
|
| 15262 |
+
},
|
| 15263 |
+
{
|
| 15264 |
+
"epoch": 549.5,
|
| 15265 |
+
"learning_rate": 4.429824561403509e-06,
|
| 15266 |
+
"loss": 0.006,
|
| 15267 |
+
"step": 2198
|
| 15268 |
+
},
|
| 15269 |
+
{
|
| 15270 |
+
"epoch": 549.75,
|
| 15271 |
+
"learning_rate": 4.407894736842105e-06,
|
| 15272 |
+
"loss": 0.0073,
|
| 15273 |
+
"step": 2199
|
| 15274 |
+
},
|
| 15275 |
+
{
|
| 15276 |
+
"epoch": 550.0,
|
| 15277 |
+
"learning_rate": 4.3859649122807014e-06,
|
| 15278 |
+
"loss": 0.0069,
|
| 15279 |
+
"step": 2200
|
| 15280 |
+
},
|
| 15281 |
+
{
|
| 15282 |
+
"epoch": 550.0,
|
| 15283 |
+
"eval_accuracy_ELSE": 0.9971214301946822,
|
| 15284 |
+
"eval_accuracy_road": 0.9940764367488751,
|
| 15285 |
+
"eval_accuracy_sidewalk": 0.9076316530554773,
|
| 15286 |
+
"eval_accuracy_unlabeled": NaN,
|
| 15287 |
+
"eval_iou_ELSE": 0.9937178816589106,
|
| 15288 |
+
"eval_iou_road": 0.9878031525030422,
|
| 15289 |
+
"eval_iou_sidewalk": 0.8451553690835191,
|
| 15290 |
+
"eval_iou_unlabeled": NaN,
|
| 15291 |
+
"eval_loss": 0.02252400852739811,
|
| 15292 |
+
"eval_mean_accuracy": 0.966276506666345,
|
| 15293 |
+
"eval_mean_iou": 0.9422254677484906,
|
| 15294 |
+
"eval_overall_accuracy": 0.9938812255859375,
|
| 15295 |
+
"eval_runtime": 0.6237,
|
| 15296 |
+
"eval_samples_per_second": 1.603,
|
| 15297 |
+
"eval_steps_per_second": 1.603,
|
| 15298 |
+
"step": 2200
|
| 15299 |
+
},
|
| 15300 |
+
{
|
| 15301 |
+
"epoch": 550.25,
|
| 15302 |
+
"learning_rate": 4.364035087719298e-06,
|
| 15303 |
+
"loss": 0.0066,
|
| 15304 |
+
"step": 2201
|
| 15305 |
+
},
|
| 15306 |
+
{
|
| 15307 |
+
"epoch": 550.5,
|
| 15308 |
+
"learning_rate": 4.342105263157895e-06,
|
| 15309 |
+
"loss": 0.0066,
|
| 15310 |
+
"step": 2202
|
| 15311 |
+
},
|
| 15312 |
+
{
|
| 15313 |
+
"epoch": 550.75,
|
| 15314 |
+
"learning_rate": 4.320175438596491e-06,
|
| 15315 |
+
"loss": 0.0072,
|
| 15316 |
+
"step": 2203
|
| 15317 |
+
},
|
| 15318 |
+
{
|
| 15319 |
+
"epoch": 551.0,
|
| 15320 |
+
"learning_rate": 4.298245614035088e-06,
|
| 15321 |
+
"loss": 0.0071,
|
| 15322 |
+
"step": 2204
|
| 15323 |
+
},
|
| 15324 |
+
{
|
| 15325 |
+
"epoch": 551.25,
|
| 15326 |
+
"learning_rate": 4.276315789473684e-06,
|
| 15327 |
+
"loss": 0.0071,
|
| 15328 |
+
"step": 2205
|
| 15329 |
+
},
|
| 15330 |
+
{
|
| 15331 |
+
"epoch": 551.5,
|
| 15332 |
+
"learning_rate": 4.254385964912281e-06,
|
| 15333 |
+
"loss": 0.0078,
|
| 15334 |
+
"step": 2206
|
| 15335 |
+
},
|
| 15336 |
+
{
|
| 15337 |
+
"epoch": 551.75,
|
| 15338 |
+
"learning_rate": 4.232456140350878e-06,
|
| 15339 |
+
"loss": 0.0063,
|
| 15340 |
+
"step": 2207
|
| 15341 |
+
},
|
| 15342 |
+
{
|
| 15343 |
+
"epoch": 552.0,
|
| 15344 |
+
"learning_rate": 4.210526315789474e-06,
|
| 15345 |
+
"loss": 0.0062,
|
| 15346 |
+
"step": 2208
|
| 15347 |
+
},
|
| 15348 |
+
{
|
| 15349 |
+
"epoch": 552.25,
|
| 15350 |
+
"learning_rate": 4.1885964912280705e-06,
|
| 15351 |
+
"loss": 0.0077,
|
| 15352 |
+
"step": 2209
|
| 15353 |
+
},
|
| 15354 |
+
{
|
| 15355 |
+
"epoch": 552.5,
|
| 15356 |
+
"learning_rate": 4.166666666666667e-06,
|
| 15357 |
+
"loss": 0.0068,
|
| 15358 |
+
"step": 2210
|
| 15359 |
+
},
|
| 15360 |
+
{
|
| 15361 |
+
"epoch": 552.75,
|
| 15362 |
+
"learning_rate": 4.144736842105263e-06,
|
| 15363 |
+
"loss": 0.0072,
|
| 15364 |
+
"step": 2211
|
| 15365 |
+
},
|
| 15366 |
+
{
|
| 15367 |
+
"epoch": 553.0,
|
| 15368 |
+
"learning_rate": 4.1228070175438605e-06,
|
| 15369 |
+
"loss": 0.007,
|
| 15370 |
+
"step": 2212
|
| 15371 |
+
},
|
| 15372 |
+
{
|
| 15373 |
+
"epoch": 553.25,
|
| 15374 |
+
"learning_rate": 4.100877192982457e-06,
|
| 15375 |
+
"loss": 0.007,
|
| 15376 |
+
"step": 2213
|
| 15377 |
+
},
|
| 15378 |
+
{
|
| 15379 |
+
"epoch": 553.5,
|
| 15380 |
+
"learning_rate": 4.078947368421053e-06,
|
| 15381 |
+
"loss": 0.0065,
|
| 15382 |
+
"step": 2214
|
| 15383 |
+
},
|
| 15384 |
+
{
|
| 15385 |
+
"epoch": 553.75,
|
| 15386 |
+
"learning_rate": 4.05701754385965e-06,
|
| 15387 |
+
"loss": 0.0073,
|
| 15388 |
+
"step": 2215
|
| 15389 |
+
},
|
| 15390 |
+
{
|
| 15391 |
+
"epoch": 554.0,
|
| 15392 |
+
"learning_rate": 4.035087719298246e-06,
|
| 15393 |
+
"loss": 0.0061,
|
| 15394 |
+
"step": 2216
|
| 15395 |
+
},
|
| 15396 |
+
{
|
| 15397 |
+
"epoch": 554.25,
|
| 15398 |
+
"learning_rate": 4.013157894736842e-06,
|
| 15399 |
+
"loss": 0.0076,
|
| 15400 |
+
"step": 2217
|
| 15401 |
+
},
|
| 15402 |
+
{
|
| 15403 |
+
"epoch": 554.5,
|
| 15404 |
+
"learning_rate": 3.991228070175439e-06,
|
| 15405 |
+
"loss": 0.0069,
|
| 15406 |
+
"step": 2218
|
| 15407 |
+
},
|
| 15408 |
+
{
|
| 15409 |
+
"epoch": 554.75,
|
| 15410 |
+
"learning_rate": 3.969298245614035e-06,
|
| 15411 |
+
"loss": 0.007,
|
| 15412 |
+
"step": 2219
|
| 15413 |
+
},
|
| 15414 |
+
{
|
| 15415 |
+
"epoch": 555.0,
|
| 15416 |
+
"learning_rate": 3.9473684210526315e-06,
|
| 15417 |
+
"loss": 0.0067,
|
| 15418 |
+
"step": 2220
|
| 15419 |
+
},
|
| 15420 |
+
{
|
| 15421 |
+
"epoch": 555.0,
|
| 15422 |
+
"eval_accuracy_ELSE": 0.9968617096107438,
|
| 15423 |
+
"eval_accuracy_road": 0.9945890527994532,
|
| 15424 |
+
"eval_accuracy_sidewalk": 0.9069276260208392,
|
| 15425 |
+
"eval_accuracy_unlabeled": NaN,
|
| 15426 |
+
"eval_iou_ELSE": 0.9935876347594419,
|
| 15427 |
+
"eval_iou_road": 0.9879630546400939,
|
| 15428 |
+
"eval_iou_sidewalk": 0.842731911553055,
|
| 15429 |
+
"eval_iou_unlabeled": NaN,
|
| 15430 |
+
"eval_loss": 0.02233085222542286,
|
| 15431 |
+
"eval_mean_accuracy": 0.9661261294770122,
|
| 15432 |
+
"eval_mean_iou": 0.9414275336508636,
|
| 15433 |
+
"eval_overall_accuracy": 0.9938163757324219,
|
| 15434 |
+
"eval_runtime": 0.6378,
|
| 15435 |
+
"eval_samples_per_second": 1.568,
|
| 15436 |
+
"eval_steps_per_second": 1.568,
|
| 15437 |
+
"step": 2220
|
| 15438 |
+
},
|
| 15439 |
+
{
|
| 15440 |
+
"epoch": 555.25,
|
| 15441 |
+
"learning_rate": 3.925438596491228e-06,
|
| 15442 |
+
"loss": 0.0068,
|
| 15443 |
+
"step": 2221
|
| 15444 |
+
},
|
| 15445 |
+
{
|
| 15446 |
+
"epoch": 555.5,
|
| 15447 |
+
"learning_rate": 3.903508771929825e-06,
|
| 15448 |
+
"loss": 0.0074,
|
| 15449 |
+
"step": 2222
|
| 15450 |
+
},
|
| 15451 |
+
{
|
| 15452 |
+
"epoch": 555.75,
|
| 15453 |
+
"learning_rate": 3.8815789473684214e-06,
|
| 15454 |
+
"loss": 0.007,
|
| 15455 |
+
"step": 2223
|
| 15456 |
+
},
|
| 15457 |
+
{
|
| 15458 |
+
"epoch": 556.0,
|
| 15459 |
+
"learning_rate": 3.859649122807018e-06,
|
| 15460 |
+
"loss": 0.0066,
|
| 15461 |
+
"step": 2224
|
| 15462 |
+
},
|
| 15463 |
+
{
|
| 15464 |
+
"epoch": 556.25,
|
| 15465 |
+
"learning_rate": 3.837719298245614e-06,
|
| 15466 |
+
"loss": 0.007,
|
| 15467 |
+
"step": 2225
|
| 15468 |
+
},
|
| 15469 |
+
{
|
| 15470 |
+
"epoch": 556.5,
|
| 15471 |
+
"learning_rate": 3.8157894736842105e-06,
|
| 15472 |
+
"loss": 0.0063,
|
| 15473 |
+
"step": 2226
|
| 15474 |
+
},
|
| 15475 |
+
{
|
| 15476 |
+
"epoch": 556.75,
|
| 15477 |
+
"learning_rate": 3.7938596491228073e-06,
|
| 15478 |
+
"loss": 0.0062,
|
| 15479 |
+
"step": 2227
|
| 15480 |
+
},
|
| 15481 |
+
{
|
| 15482 |
+
"epoch": 557.0,
|
| 15483 |
+
"learning_rate": 3.7719298245614037e-06,
|
| 15484 |
+
"loss": 0.007,
|
| 15485 |
+
"step": 2228
|
| 15486 |
+
},
|
| 15487 |
+
{
|
| 15488 |
+
"epoch": 557.25,
|
| 15489 |
+
"learning_rate": 3.75e-06,
|
| 15490 |
+
"loss": 0.0071,
|
| 15491 |
+
"step": 2229
|
| 15492 |
+
},
|
| 15493 |
+
{
|
| 15494 |
+
"epoch": 557.5,
|
| 15495 |
+
"learning_rate": 3.7280701754385965e-06,
|
| 15496 |
+
"loss": 0.0074,
|
| 15497 |
+
"step": 2230
|
| 15498 |
+
},
|
| 15499 |
+
{
|
| 15500 |
+
"epoch": 557.75,
|
| 15501 |
+
"learning_rate": 3.706140350877193e-06,
|
| 15502 |
+
"loss": 0.0067,
|
| 15503 |
+
"step": 2231
|
| 15504 |
+
},
|
| 15505 |
+
{
|
| 15506 |
+
"epoch": 558.0,
|
| 15507 |
+
"learning_rate": 3.6842105263157892e-06,
|
| 15508 |
+
"loss": 0.0059,
|
| 15509 |
+
"step": 2232
|
| 15510 |
+
},
|
| 15511 |
+
{
|
| 15512 |
+
"epoch": 558.25,
|
| 15513 |
+
"learning_rate": 3.6622807017543864e-06,
|
| 15514 |
+
"loss": 0.007,
|
| 15515 |
+
"step": 2233
|
| 15516 |
+
},
|
| 15517 |
+
{
|
| 15518 |
+
"epoch": 558.5,
|
| 15519 |
+
"learning_rate": 3.640350877192983e-06,
|
| 15520 |
+
"loss": 0.0072,
|
| 15521 |
+
"step": 2234
|
| 15522 |
+
},
|
| 15523 |
+
{
|
| 15524 |
+
"epoch": 558.75,
|
| 15525 |
+
"learning_rate": 3.618421052631579e-06,
|
| 15526 |
+
"loss": 0.0074,
|
| 15527 |
+
"step": 2235
|
| 15528 |
+
},
|
| 15529 |
+
{
|
| 15530 |
+
"epoch": 559.0,
|
| 15531 |
+
"learning_rate": 3.5964912280701756e-06,
|
| 15532 |
+
"loss": 0.006,
|
| 15533 |
+
"step": 2236
|
| 15534 |
+
},
|
| 15535 |
+
{
|
| 15536 |
+
"epoch": 559.25,
|
| 15537 |
+
"learning_rate": 3.574561403508772e-06,
|
| 15538 |
+
"loss": 0.0069,
|
| 15539 |
+
"step": 2237
|
| 15540 |
+
},
|
| 15541 |
+
{
|
| 15542 |
+
"epoch": 559.5,
|
| 15543 |
+
"learning_rate": 3.5526315789473687e-06,
|
| 15544 |
+
"loss": 0.0066,
|
| 15545 |
+
"step": 2238
|
| 15546 |
+
},
|
| 15547 |
+
{
|
| 15548 |
+
"epoch": 559.75,
|
| 15549 |
+
"learning_rate": 3.530701754385965e-06,
|
| 15550 |
+
"loss": 0.0069,
|
| 15551 |
+
"step": 2239
|
| 15552 |
+
},
|
| 15553 |
+
{
|
| 15554 |
+
"epoch": 560.0,
|
| 15555 |
+
"learning_rate": 3.5087719298245615e-06,
|
| 15556 |
+
"loss": 0.0066,
|
| 15557 |
+
"step": 2240
|
| 15558 |
+
},
|
| 15559 |
+
{
|
| 15560 |
+
"epoch": 560.0,
|
| 15561 |
+
"eval_accuracy_ELSE": 0.9969320506022271,
|
| 15562 |
+
"eval_accuracy_road": 0.9948453608247423,
|
| 15563 |
+
"eval_accuracy_sidewalk": 0.9058011827654182,
|
| 15564 |
+
"eval_accuracy_unlabeled": NaN,
|
| 15565 |
+
"eval_iou_ELSE": 0.9937434940428355,
|
| 15566 |
+
"eval_iou_road": 0.9880080323556862,
|
| 15567 |
+
"eval_iou_sidewalk": 0.844114945545204,
|
| 15568 |
+
"eval_iou_unlabeled": NaN,
|
| 15569 |
+
"eval_loss": 0.02265537902712822,
|
| 15570 |
+
"eval_mean_accuracy": 0.9658595313974625,
|
| 15571 |
+
"eval_mean_iou": 0.9419554906479086,
|
| 15572 |
+
"eval_overall_accuracy": 0.9939041137695312,
|
| 15573 |
+
"eval_runtime": 0.632,
|
| 15574 |
+
"eval_samples_per_second": 1.582,
|
| 15575 |
+
"eval_steps_per_second": 1.582,
|
| 15576 |
+
"step": 2240
|
| 15577 |
}
|
| 15578 |
],
|
| 15579 |
"max_steps": 2400,
|
| 15580 |
"num_train_epochs": 600,
|
| 15581 |
+
"total_flos": 1.408292894980178e+19,
|
| 15582 |
"trial_name": null,
|
| 15583 |
"trial_params": null
|
| 15584 |
}
|
{checkpoint-1920 β checkpoint-2240}/training_args.bin
RENAMED
|
File without changes
|
pytorch_model.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 338799561
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5fa2c3f05d0b10e1c3533945acbfb9eae36020ad3ea05f13ebff672020dca64c
|
| 3 |
size 338799561
|
runs/Feb25_17-51-35_robolidar/events.out.tfevents.1708905105.robolidar.3970122.0
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e3aa5b31897a17596e2a651879604ac1c5b596de919cf6ffd16ce66d61817cc3
|
| 3 |
+
size 456037
|