Coaster41 commited on
Commit
f2f459d
·
verified ·
1 Parent(s): d5fddc1

Upload folder using huggingface_hub

Browse files
README.md ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ tags:
4
+ - generated_from_trainer
5
+ model-index:
6
+ - name: patchtst-tsmixup
7
+ results: []
8
+ ---
9
+
10
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
11
+ should probably proofread and complete it, then remove this comment. -->
12
+
13
+ # patchtst-tsmixup
14
+
15
+ This model is a fine-tuned version of [](https://huggingface.co/) on an unknown dataset.
16
+ It achieves the following results on the evaluation set:
17
+ - Loss: 0.1553
18
+ - Mse: 280.0361
19
+ - Mae: 0.6489
20
+ - Rmse: 16.7343
21
+ - Smape: 100.3318
22
+
23
+ ## Model description
24
+
25
+ More information needed
26
+
27
+ ## Intended uses & limitations
28
+
29
+ More information needed
30
+
31
+ ## Training and evaluation data
32
+
33
+ More information needed
34
+
35
+ ## Training procedure
36
+
37
+ ### Training hyperparameters
38
+
39
+ The following hyperparameters were used during training:
40
+ - learning_rate: 0.0001
41
+ - train_batch_size: 256
42
+ - eval_batch_size: 512
43
+ - seed: 42
44
+ - gradient_accumulation_steps: 2
45
+ - total_train_batch_size: 512
46
+ - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
47
+ - lr_scheduler_type: linear
48
+ - lr_scheduler_warmup_steps: 1000
49
+ - num_epochs: 100
50
+
51
+ ### Training results
52
+
53
+ | Training Loss | Epoch | Step | Validation Loss | Mse | Mae | Rmse | Smape |
54
+ |:-------------:|:------:|:-----:|:---------------:|:--------:|:------:|:-------:|:--------:|
55
+ | 0.1797 | 0.0952 | 1000 | 0.1756 | 447.3596 | 0.7397 | 21.1509 | 90.8971 |
56
+ | 0.1709 | 0.1904 | 2000 | 0.1691 | 425.0924 | 0.7153 | 20.6178 | 112.3049 |
57
+ | 0.1722 | 0.2857 | 3000 | 0.1662 | 516.2153 | 0.7009 | 22.7204 | 89.5236 |
58
+ | 0.1694 | 0.3809 | 4000 | 0.1643 | 321.2047 | 0.6708 | 17.9222 | 93.0515 |
59
+ | 0.1648 | 0.4761 | 5000 | 0.1626 | 350.6870 | 0.6731 | 18.7266 | 94.0748 |
60
+ | 0.1672 | 0.5713 | 6000 | 0.1612 | 370.8825 | 0.6797 | 19.2583 | 84.6619 |
61
+ | 0.1623 | 0.6666 | 7000 | 0.1605 | 400.0790 | 0.6715 | 20.0020 | 89.7598 |
62
+ | 0.1638 | 0.7618 | 8000 | 0.1613 | 387.6971 | 0.6771 | 19.6900 | 122.3799 |
63
+ | 0.1609 | 0.8570 | 9000 | 0.1602 | 335.3427 | 0.6603 | 18.3124 | 109.3877 |
64
+ | 0.1618 | 0.9522 | 10000 | 0.1592 | 318.1492 | 0.6688 | 17.8367 | 76.3322 |
65
+ | 0.1588 | 1.0474 | 11000 | 0.1586 | 345.3675 | 0.6628 | 18.5841 | 94.5032 |
66
+ | 0.1601 | 1.1426 | 12000 | 0.1580 | 326.8865 | 0.6540 | 18.0800 | 81.2504 |
67
+ | 0.1585 | 1.2379 | 13000 | 0.1575 | 279.7964 | 0.6532 | 16.7271 | 107.6181 |
68
+ | 0.1567 | 1.3331 | 14000 | 0.1575 | 328.3490 | 0.6622 | 18.1204 | 91.9899 |
69
+ | 0.1592 | 1.4283 | 15000 | 0.1567 | 376.8973 | 0.6523 | 19.4138 | 89.7952 |
70
+ | 0.16 | 1.5235 | 16000 | 0.1576 | 327.5271 | 0.6580 | 18.0977 | 105.7316 |
71
+ | 0.1586 | 1.6188 | 17000 | 0.1568 | 399.5775 | 0.6602 | 19.9894 | 88.6057 |
72
+ | 0.1593 | 1.7140 | 18000 | 0.1565 | 359.5630 | 0.6604 | 18.9621 | 325.5064 |
73
+ | 0.1562 | 1.8092 | 19000 | 0.1566 | 281.2739 | 0.6545 | 16.7712 | 80.4528 |
74
+ | 0.1601 | 1.9044 | 20000 | 0.1570 | 287.3577 | 0.6543 | 16.9516 | 79.5544 |
75
+ | 0.1551 | 1.9997 | 21000 | 0.1561 | 279.2150 | 0.6444 | 16.7097 | 102.6016 |
76
+ | 0.1532 | 2.0948 | 22000 | 0.1554 | 282.9574 | 0.6454 | 16.8213 | 85.0121 |
77
+ | 0.1564 | 2.1901 | 23000 | 0.1554 | 332.3758 | 0.6485 | 18.2312 | 76.0350 |
78
+ | 0.1568 | 2.2853 | 24000 | 0.1551 | 356.0441 | 0.6528 | 18.8691 | 92.2597 |
79
+ | 0.1569 | 2.3805 | 25000 | 0.1562 | 333.3135 | 0.6536 | 18.2569 | 180.8556 |
80
+ | 0.1569 | 2.4757 | 26000 | 0.1551 | 291.0384 | 0.6491 | 17.0598 | 80.7309 |
81
+ | 0.1532 | 2.5710 | 27000 | 0.1553 | 280.0361 | 0.6489 | 16.7343 | 100.3318 |
82
+
83
+
84
+ ### Framework versions
85
+
86
+ - Transformers 4.51.3
87
+ - Pytorch 2.7.1+cu126
88
+ - Datasets 2.17.1
89
+ - Tokenizers 0.21.1
checkpoint-22000/config.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "gelu",
3
+ "architectures": [
4
+ "PatchTSTForPrediction"
5
+ ],
6
+ "attention_dropout": 0.1,
7
+ "bias": true,
8
+ "channel_attention": false,
9
+ "channel_consistent_masking": false,
10
+ "context_length": 512,
11
+ "d_model": 256,
12
+ "distribution_output": "normal",
13
+ "do_mask_input": null,
14
+ "dropout": 0.1,
15
+ "ff_dropout": 0.0,
16
+ "ffn_dim": 256,
17
+ "head_dropout": 0.1,
18
+ "init_std": 0.02,
19
+ "loss": "mse",
20
+ "mask_type": "random",
21
+ "mask_value": 0,
22
+ "model_type": "patchtst",
23
+ "norm_eps": 1e-05,
24
+ "norm_type": "batchnorm",
25
+ "num_attention_heads": 16,
26
+ "num_forecast_mask_patches": [
27
+ 2
28
+ ],
29
+ "num_hidden_layers": 3,
30
+ "num_input_channels": 1,
31
+ "num_parallel_samples": 100,
32
+ "num_targets": 1,
33
+ "output_range": null,
34
+ "patch_length": 16,
35
+ "patch_stride": 16,
36
+ "path_dropout": 0.0,
37
+ "pooling_type": null,
38
+ "positional_dropout": 0.0,
39
+ "positional_encoding_type": "sincos",
40
+ "pre_norm": true,
41
+ "prediction_length": 1,
42
+ "prenorm": true,
43
+ "random_mask_ratio": 0.5,
44
+ "scaling": "std",
45
+ "share_embedding": true,
46
+ "share_projection": true,
47
+ "torch_dtype": "float32",
48
+ "transformers_version": "4.51.3",
49
+ "unmasked_channel_indices": null,
50
+ "use_cls_token": false
51
+ }
checkpoint-22000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:38dc07f0d4fbcddedd5b90a31accf907e9df3c96ba8135cbca1ee82bcedda91e
3
+ size 4852676
checkpoint-22000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:13aec75816bd129b6f7ef2cb7a1f34e10cd5107afc02383a1556d31a609e4d6d
3
+ size 9643275
checkpoint-22000/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:94841014e97c25e660feae325c1fe7c36a7d3e9c2aac6b0bcb5a2540d379d08a
3
+ size 14645
checkpoint-22000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b61915c995a1192f8bc18a0f6b6cf8cc65d63100e6eb053994546352674d96f1
3
+ size 1465
checkpoint-22000/trainer_state.json ADDED
@@ -0,0 +1,3387 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": 22000,
3
+ "best_metric": 0.1553574949502945,
4
+ "best_model_checkpoint": "./patchtst_tsmixup_final/checkpoint-22000",
5
+ "epoch": 2.0948435937723184,
6
+ "eval_steps": 1000,
7
+ "global_step": 22000,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.004761224586963767,
14
+ "grad_norm": 2.066147804260254,
15
+ "learning_rate": 4.9000000000000005e-06,
16
+ "loss": 0.5158,
17
+ "step": 50
18
+ },
19
+ {
20
+ "epoch": 0.009522449173927534,
21
+ "grad_norm": 1.1728284358978271,
22
+ "learning_rate": 9.900000000000002e-06,
23
+ "loss": 0.4068,
24
+ "step": 100
25
+ },
26
+ {
27
+ "epoch": 0.014283673760891302,
28
+ "grad_norm": 1.0786150693893433,
29
+ "learning_rate": 1.49e-05,
30
+ "loss": 0.3107,
31
+ "step": 150
32
+ },
33
+ {
34
+ "epoch": 0.01904489834785507,
35
+ "grad_norm": 1.0987305641174316,
36
+ "learning_rate": 1.9900000000000003e-05,
37
+ "loss": 0.2444,
38
+ "step": 200
39
+ },
40
+ {
41
+ "epoch": 0.023806122934818836,
42
+ "grad_norm": 0.9221014976501465,
43
+ "learning_rate": 2.4900000000000002e-05,
44
+ "loss": 0.2106,
45
+ "step": 250
46
+ },
47
+ {
48
+ "epoch": 0.028567347521782603,
49
+ "grad_norm": 1.1374775171279907,
50
+ "learning_rate": 2.9900000000000002e-05,
51
+ "loss": 0.1931,
52
+ "step": 300
53
+ },
54
+ {
55
+ "epoch": 0.03332857210874637,
56
+ "grad_norm": 1.2504770755767822,
57
+ "learning_rate": 3.49e-05,
58
+ "loss": 0.1971,
59
+ "step": 350
60
+ },
61
+ {
62
+ "epoch": 0.03808979669571014,
63
+ "grad_norm": 0.8367598056793213,
64
+ "learning_rate": 3.99e-05,
65
+ "loss": 0.1918,
66
+ "step": 400
67
+ },
68
+ {
69
+ "epoch": 0.0428510212826739,
70
+ "grad_norm": 0.9541486501693726,
71
+ "learning_rate": 4.49e-05,
72
+ "loss": 0.192,
73
+ "step": 450
74
+ },
75
+ {
76
+ "epoch": 0.04761224586963767,
77
+ "grad_norm": 0.9134742021560669,
78
+ "learning_rate": 4.99e-05,
79
+ "loss": 0.1875,
80
+ "step": 500
81
+ },
82
+ {
83
+ "epoch": 0.052373470456601436,
84
+ "grad_norm": 2.1370866298675537,
85
+ "learning_rate": 5.4900000000000006e-05,
86
+ "loss": 0.19,
87
+ "step": 550
88
+ },
89
+ {
90
+ "epoch": 0.057134695043565206,
91
+ "grad_norm": 1.8110992908477783,
92
+ "learning_rate": 5.99e-05,
93
+ "loss": 0.1902,
94
+ "step": 600
95
+ },
96
+ {
97
+ "epoch": 0.06189591963052897,
98
+ "grad_norm": 0.8074783682823181,
99
+ "learning_rate": 6.49e-05,
100
+ "loss": 0.1867,
101
+ "step": 650
102
+ },
103
+ {
104
+ "epoch": 0.06665714421749273,
105
+ "grad_norm": 1.8686498403549194,
106
+ "learning_rate": 6.99e-05,
107
+ "loss": 0.1839,
108
+ "step": 700
109
+ },
110
+ {
111
+ "epoch": 0.07141836880445651,
112
+ "grad_norm": 0.9226100444793701,
113
+ "learning_rate": 7.49e-05,
114
+ "loss": 0.1832,
115
+ "step": 750
116
+ },
117
+ {
118
+ "epoch": 0.07617959339142027,
119
+ "grad_norm": 1.0714315176010132,
120
+ "learning_rate": 7.99e-05,
121
+ "loss": 0.1862,
122
+ "step": 800
123
+ },
124
+ {
125
+ "epoch": 0.08094081797838404,
126
+ "grad_norm": 0.7746614813804626,
127
+ "learning_rate": 8.49e-05,
128
+ "loss": 0.1836,
129
+ "step": 850
130
+ },
131
+ {
132
+ "epoch": 0.0857020425653478,
133
+ "grad_norm": 0.8154539465904236,
134
+ "learning_rate": 8.99e-05,
135
+ "loss": 0.1863,
136
+ "step": 900
137
+ },
138
+ {
139
+ "epoch": 0.09046326715231158,
140
+ "grad_norm": 0.7890446186065674,
141
+ "learning_rate": 9.49e-05,
142
+ "loss": 0.1865,
143
+ "step": 950
144
+ },
145
+ {
146
+ "epoch": 0.09522449173927534,
147
+ "grad_norm": 0.9673619270324707,
148
+ "learning_rate": 9.99e-05,
149
+ "loss": 0.1797,
150
+ "step": 1000
151
+ },
152
+ {
153
+ "epoch": 0.09522449173927534,
154
+ "eval_loss": 0.17560431361198425,
155
+ "eval_mae": 0.7397370934486389,
156
+ "eval_mse": 447.359619140625,
157
+ "eval_rmse": 21.15087750285139,
158
+ "eval_runtime": 58.709,
159
+ "eval_samples_per_second": 10175.863,
160
+ "eval_smape": 90.89710712432861,
161
+ "eval_steps_per_second": 19.878,
162
+ "step": 1000
163
+ },
164
+ {
165
+ "epoch": 0.09998571632623911,
166
+ "grad_norm": 0.8122360110282898,
167
+ "learning_rate": 9.999532932990182e-05,
168
+ "loss": 0.1813,
169
+ "step": 1050
170
+ },
171
+ {
172
+ "epoch": 0.10474694091320287,
173
+ "grad_norm": 1.058344841003418,
174
+ "learning_rate": 9.999056334000573e-05,
175
+ "loss": 0.1797,
176
+ "step": 1100
177
+ },
178
+ {
179
+ "epoch": 0.10950816550016665,
180
+ "grad_norm": 0.9517110586166382,
181
+ "learning_rate": 9.998579735010962e-05,
182
+ "loss": 0.1816,
183
+ "step": 1150
184
+ },
185
+ {
186
+ "epoch": 0.11426939008713041,
187
+ "grad_norm": 1.0868945121765137,
188
+ "learning_rate": 9.998103136021352e-05,
189
+ "loss": 0.1786,
190
+ "step": 1200
191
+ },
192
+ {
193
+ "epoch": 0.11903061467409418,
194
+ "grad_norm": 1.1176084280014038,
195
+ "learning_rate": 9.997626537031743e-05,
196
+ "loss": 0.1766,
197
+ "step": 1250
198
+ },
199
+ {
200
+ "epoch": 0.12379183926105794,
201
+ "grad_norm": 1.489853858947754,
202
+ "learning_rate": 9.997149938042131e-05,
203
+ "loss": 0.1763,
204
+ "step": 1300
205
+ },
206
+ {
207
+ "epoch": 0.1285530638480217,
208
+ "grad_norm": 1.1758699417114258,
209
+ "learning_rate": 9.996673339052521e-05,
210
+ "loss": 0.1808,
211
+ "step": 1350
212
+ },
213
+ {
214
+ "epoch": 0.13331428843498547,
215
+ "grad_norm": 1.3600101470947266,
216
+ "learning_rate": 9.996196740062911e-05,
217
+ "loss": 0.177,
218
+ "step": 1400
219
+ },
220
+ {
221
+ "epoch": 0.13807551302194926,
222
+ "grad_norm": 0.7045785784721375,
223
+ "learning_rate": 9.995720141073301e-05,
224
+ "loss": 0.1781,
225
+ "step": 1450
226
+ },
227
+ {
228
+ "epoch": 0.14283673760891302,
229
+ "grad_norm": 1.386483907699585,
230
+ "learning_rate": 9.995243542083691e-05,
231
+ "loss": 0.1773,
232
+ "step": 1500
233
+ },
234
+ {
235
+ "epoch": 0.1475979621958768,
236
+ "grad_norm": 1.3609524965286255,
237
+ "learning_rate": 9.994766943094081e-05,
238
+ "loss": 0.1777,
239
+ "step": 1550
240
+ },
241
+ {
242
+ "epoch": 0.15235918678284055,
243
+ "grad_norm": 0.7538266777992249,
244
+ "learning_rate": 9.994290344104471e-05,
245
+ "loss": 0.1771,
246
+ "step": 1600
247
+ },
248
+ {
249
+ "epoch": 0.1571204113698043,
250
+ "grad_norm": 0.6669739484786987,
251
+ "learning_rate": 9.993813745114861e-05,
252
+ "loss": 0.1757,
253
+ "step": 1650
254
+ },
255
+ {
256
+ "epoch": 0.16188163595676808,
257
+ "grad_norm": 1.321413278579712,
258
+ "learning_rate": 9.993337146125251e-05,
259
+ "loss": 0.1729,
260
+ "step": 1700
261
+ },
262
+ {
263
+ "epoch": 0.16664286054373184,
264
+ "grad_norm": 0.7625342011451721,
265
+ "learning_rate": 9.992860547135641e-05,
266
+ "loss": 0.1744,
267
+ "step": 1750
268
+ },
269
+ {
270
+ "epoch": 0.1714040851306956,
271
+ "grad_norm": 1.4134427309036255,
272
+ "learning_rate": 9.99238394814603e-05,
273
+ "loss": 0.1726,
274
+ "step": 1800
275
+ },
276
+ {
277
+ "epoch": 0.17616530971765937,
278
+ "grad_norm": 0.8000154495239258,
279
+ "learning_rate": 9.991907349156421e-05,
280
+ "loss": 0.175,
281
+ "step": 1850
282
+ },
283
+ {
284
+ "epoch": 0.18092653430462316,
285
+ "grad_norm": 0.8776112198829651,
286
+ "learning_rate": 9.99143075016681e-05,
287
+ "loss": 0.1742,
288
+ "step": 1900
289
+ },
290
+ {
291
+ "epoch": 0.18568775889158692,
292
+ "grad_norm": 0.9763234853744507,
293
+ "learning_rate": 9.9909541511772e-05,
294
+ "loss": 0.1753,
295
+ "step": 1950
296
+ },
297
+ {
298
+ "epoch": 0.1904489834785507,
299
+ "grad_norm": 0.6149079203605652,
300
+ "learning_rate": 9.99047755218759e-05,
301
+ "loss": 0.1709,
302
+ "step": 2000
303
+ },
304
+ {
305
+ "epoch": 0.1904489834785507,
306
+ "eval_loss": 0.1690738946199417,
307
+ "eval_mae": 0.7152817249298096,
308
+ "eval_mse": 425.0924072265625,
309
+ "eval_rmse": 20.617769210721185,
310
+ "eval_runtime": 60.02,
311
+ "eval_samples_per_second": 9953.597,
312
+ "eval_smape": 112.30494976043701,
313
+ "eval_steps_per_second": 19.444,
314
+ "step": 2000
315
+ },
316
+ {
317
+ "epoch": 0.19521020806551445,
318
+ "grad_norm": 0.8438695669174194,
319
+ "learning_rate": 9.99000095319798e-05,
320
+ "loss": 0.1753,
321
+ "step": 2050
322
+ },
323
+ {
324
+ "epoch": 0.19997143265247821,
325
+ "grad_norm": 0.8969755172729492,
326
+ "learning_rate": 9.98952435420837e-05,
327
+ "loss": 0.1745,
328
+ "step": 2100
329
+ },
330
+ {
331
+ "epoch": 0.20473265723944198,
332
+ "grad_norm": 0.9189475178718567,
333
+ "learning_rate": 9.98904775521876e-05,
334
+ "loss": 0.1709,
335
+ "step": 2150
336
+ },
337
+ {
338
+ "epoch": 0.20949388182640574,
339
+ "grad_norm": 0.8711380362510681,
340
+ "learning_rate": 9.98857115622915e-05,
341
+ "loss": 0.1692,
342
+ "step": 2200
343
+ },
344
+ {
345
+ "epoch": 0.2142551064133695,
346
+ "grad_norm": 0.7816225290298462,
347
+ "learning_rate": 9.98809455723954e-05,
348
+ "loss": 0.1709,
349
+ "step": 2250
350
+ },
351
+ {
352
+ "epoch": 0.2190163310003333,
353
+ "grad_norm": 0.6408753395080566,
354
+ "learning_rate": 9.987617958249928e-05,
355
+ "loss": 0.1707,
356
+ "step": 2300
357
+ },
358
+ {
359
+ "epoch": 0.22377755558729706,
360
+ "grad_norm": 0.7021253705024719,
361
+ "learning_rate": 9.987141359260319e-05,
362
+ "loss": 0.1739,
363
+ "step": 2350
364
+ },
365
+ {
366
+ "epoch": 0.22853878017426082,
367
+ "grad_norm": 0.9026205539703369,
368
+ "learning_rate": 9.986664760270709e-05,
369
+ "loss": 0.1705,
370
+ "step": 2400
371
+ },
372
+ {
373
+ "epoch": 0.2333000047612246,
374
+ "grad_norm": 0.6956352591514587,
375
+ "learning_rate": 9.986188161281098e-05,
376
+ "loss": 0.1705,
377
+ "step": 2450
378
+ },
379
+ {
380
+ "epoch": 0.23806122934818835,
381
+ "grad_norm": 0.7024583220481873,
382
+ "learning_rate": 9.985711562291489e-05,
383
+ "loss": 0.1718,
384
+ "step": 2500
385
+ },
386
+ {
387
+ "epoch": 0.24282245393515212,
388
+ "grad_norm": 0.6184080839157104,
389
+ "learning_rate": 9.985234963301878e-05,
390
+ "loss": 0.1697,
391
+ "step": 2550
392
+ },
393
+ {
394
+ "epoch": 0.24758367852211588,
395
+ "grad_norm": 0.9684680700302124,
396
+ "learning_rate": 9.984758364312268e-05,
397
+ "loss": 0.1696,
398
+ "step": 2600
399
+ },
400
+ {
401
+ "epoch": 0.25234490310907964,
402
+ "grad_norm": 0.8625733852386475,
403
+ "learning_rate": 9.984281765322659e-05,
404
+ "loss": 0.1731,
405
+ "step": 2650
406
+ },
407
+ {
408
+ "epoch": 0.2571061276960434,
409
+ "grad_norm": 0.6156722903251648,
410
+ "learning_rate": 9.983805166333048e-05,
411
+ "loss": 0.1731,
412
+ "step": 2700
413
+ },
414
+ {
415
+ "epoch": 0.26186735228300717,
416
+ "grad_norm": 0.7371954917907715,
417
+ "learning_rate": 9.983328567343438e-05,
418
+ "loss": 0.173,
419
+ "step": 2750
420
+ },
421
+ {
422
+ "epoch": 0.26662857686997093,
423
+ "grad_norm": 0.658812940120697,
424
+ "learning_rate": 9.982851968353828e-05,
425
+ "loss": 0.1691,
426
+ "step": 2800
427
+ },
428
+ {
429
+ "epoch": 0.2713898014569347,
430
+ "grad_norm": 0.8242245316505432,
431
+ "learning_rate": 9.982375369364217e-05,
432
+ "loss": 0.1715,
433
+ "step": 2850
434
+ },
435
+ {
436
+ "epoch": 0.2761510260438985,
437
+ "grad_norm": 0.6063680052757263,
438
+ "learning_rate": 9.981898770374607e-05,
439
+ "loss": 0.1722,
440
+ "step": 2900
441
+ },
442
+ {
443
+ "epoch": 0.2809122506308623,
444
+ "grad_norm": 0.7695409655570984,
445
+ "learning_rate": 9.981422171384997e-05,
446
+ "loss": 0.1712,
447
+ "step": 2950
448
+ },
449
+ {
450
+ "epoch": 0.28567347521782605,
451
+ "grad_norm": 0.665675163269043,
452
+ "learning_rate": 9.980945572395387e-05,
453
+ "loss": 0.1722,
454
+ "step": 3000
455
+ },
456
+ {
457
+ "epoch": 0.28567347521782605,
458
+ "eval_loss": 0.16621138155460358,
459
+ "eval_mae": 0.7008960247039795,
460
+ "eval_mse": 516.21533203125,
461
+ "eval_rmse": 22.72037262087156,
462
+ "eval_runtime": 62.5998,
463
+ "eval_samples_per_second": 9543.397,
464
+ "eval_smape": 89.52359557151794,
465
+ "eval_steps_per_second": 18.642,
466
+ "step": 3000
467
+ },
468
+ {
469
+ "epoch": 0.2904346998047898,
470
+ "grad_norm": 0.7732612490653992,
471
+ "learning_rate": 9.980468973405776e-05,
472
+ "loss": 0.1708,
473
+ "step": 3050
474
+ },
475
+ {
476
+ "epoch": 0.2951959243917536,
477
+ "grad_norm": 1.0461828708648682,
478
+ "learning_rate": 9.979992374416167e-05,
479
+ "loss": 0.1692,
480
+ "step": 3100
481
+ },
482
+ {
483
+ "epoch": 0.29995714897871734,
484
+ "grad_norm": 0.6057863831520081,
485
+ "learning_rate": 9.979515775426557e-05,
486
+ "loss": 0.1681,
487
+ "step": 3150
488
+ },
489
+ {
490
+ "epoch": 0.3047183735656811,
491
+ "grad_norm": 0.5380491018295288,
492
+ "learning_rate": 9.979039176436946e-05,
493
+ "loss": 0.1687,
494
+ "step": 3200
495
+ },
496
+ {
497
+ "epoch": 0.30947959815264486,
498
+ "grad_norm": 0.7164149284362793,
499
+ "learning_rate": 9.978562577447337e-05,
500
+ "loss": 0.1678,
501
+ "step": 3250
502
+ },
503
+ {
504
+ "epoch": 0.3142408227396086,
505
+ "grad_norm": 0.9223781228065491,
506
+ "learning_rate": 9.978085978457726e-05,
507
+ "loss": 0.1698,
508
+ "step": 3300
509
+ },
510
+ {
511
+ "epoch": 0.3190020473265724,
512
+ "grad_norm": 0.645452082157135,
513
+ "learning_rate": 9.977609379468116e-05,
514
+ "loss": 0.17,
515
+ "step": 3350
516
+ },
517
+ {
518
+ "epoch": 0.32376327191353615,
519
+ "grad_norm": 0.6378370523452759,
520
+ "learning_rate": 9.977132780478507e-05,
521
+ "loss": 0.1673,
522
+ "step": 3400
523
+ },
524
+ {
525
+ "epoch": 0.3285244965004999,
526
+ "grad_norm": 0.511216402053833,
527
+ "learning_rate": 9.976656181488896e-05,
528
+ "loss": 0.1678,
529
+ "step": 3450
530
+ },
531
+ {
532
+ "epoch": 0.3332857210874637,
533
+ "grad_norm": 0.64838707447052,
534
+ "learning_rate": 9.976179582499286e-05,
535
+ "loss": 0.1675,
536
+ "step": 3500
537
+ },
538
+ {
539
+ "epoch": 0.33804694567442745,
540
+ "grad_norm": 0.6467918753623962,
541
+ "learning_rate": 9.975702983509676e-05,
542
+ "loss": 0.1699,
543
+ "step": 3550
544
+ },
545
+ {
546
+ "epoch": 0.3428081702613912,
547
+ "grad_norm": 0.6198284029960632,
548
+ "learning_rate": 9.975226384520065e-05,
549
+ "loss": 0.169,
550
+ "step": 3600
551
+ },
552
+ {
553
+ "epoch": 0.347569394848355,
554
+ "grad_norm": 0.6328741312026978,
555
+ "learning_rate": 9.974749785530455e-05,
556
+ "loss": 0.1685,
557
+ "step": 3650
558
+ },
559
+ {
560
+ "epoch": 0.35233061943531874,
561
+ "grad_norm": 0.8264518976211548,
562
+ "learning_rate": 9.974273186540844e-05,
563
+ "loss": 0.1731,
564
+ "step": 3700
565
+ },
566
+ {
567
+ "epoch": 0.35709184402228256,
568
+ "grad_norm": 0.7238495945930481,
569
+ "learning_rate": 9.973796587551235e-05,
570
+ "loss": 0.1674,
571
+ "step": 3750
572
+ },
573
+ {
574
+ "epoch": 0.3618530686092463,
575
+ "grad_norm": 0.6243422031402588,
576
+ "learning_rate": 9.973319988561624e-05,
577
+ "loss": 0.1699,
578
+ "step": 3800
579
+ },
580
+ {
581
+ "epoch": 0.3666142931962101,
582
+ "grad_norm": 0.76638263463974,
583
+ "learning_rate": 9.972843389572014e-05,
584
+ "loss": 0.1707,
585
+ "step": 3850
586
+ },
587
+ {
588
+ "epoch": 0.37137551778317385,
589
+ "grad_norm": 0.5346329212188721,
590
+ "learning_rate": 9.972366790582405e-05,
591
+ "loss": 0.1669,
592
+ "step": 3900
593
+ },
594
+ {
595
+ "epoch": 0.3761367423701376,
596
+ "grad_norm": 0.6198967695236206,
597
+ "learning_rate": 9.971890191592794e-05,
598
+ "loss": 0.1663,
599
+ "step": 3950
600
+ },
601
+ {
602
+ "epoch": 0.3808979669571014,
603
+ "grad_norm": 0.936530590057373,
604
+ "learning_rate": 9.971413592603184e-05,
605
+ "loss": 0.1694,
606
+ "step": 4000
607
+ },
608
+ {
609
+ "epoch": 0.3808979669571014,
610
+ "eval_loss": 0.16426624357700348,
611
+ "eval_mae": 0.6708112359046936,
612
+ "eval_mse": 321.2046813964844,
613
+ "eval_rmse": 17.922184057655596,
614
+ "eval_runtime": 59.1224,
615
+ "eval_samples_per_second": 10104.719,
616
+ "eval_smape": 93.0514931678772,
617
+ "eval_steps_per_second": 19.739,
618
+ "step": 4000
619
+ },
620
+ {
621
+ "epoch": 0.38565919154406514,
622
+ "grad_norm": 0.5151750445365906,
623
+ "learning_rate": 9.970936993613574e-05,
624
+ "loss": 0.1676,
625
+ "step": 4050
626
+ },
627
+ {
628
+ "epoch": 0.3904204161310289,
629
+ "grad_norm": 0.8430535793304443,
630
+ "learning_rate": 9.970460394623964e-05,
631
+ "loss": 0.1699,
632
+ "step": 4100
633
+ },
634
+ {
635
+ "epoch": 0.39518164071799267,
636
+ "grad_norm": 0.7711997628211975,
637
+ "learning_rate": 9.969983795634354e-05,
638
+ "loss": 0.1664,
639
+ "step": 4150
640
+ },
641
+ {
642
+ "epoch": 0.39994286530495643,
643
+ "grad_norm": 0.5547206401824951,
644
+ "learning_rate": 9.969507196644744e-05,
645
+ "loss": 0.1679,
646
+ "step": 4200
647
+ },
648
+ {
649
+ "epoch": 0.4047040898919202,
650
+ "grad_norm": 0.7514538764953613,
651
+ "learning_rate": 9.969030597655134e-05,
652
+ "loss": 0.1685,
653
+ "step": 4250
654
+ },
655
+ {
656
+ "epoch": 0.40946531447888396,
657
+ "grad_norm": 0.6667706370353699,
658
+ "learning_rate": 9.968553998665524e-05,
659
+ "loss": 0.1691,
660
+ "step": 4300
661
+ },
662
+ {
663
+ "epoch": 0.4142265390658477,
664
+ "grad_norm": 0.5886721611022949,
665
+ "learning_rate": 9.968077399675914e-05,
666
+ "loss": 0.1649,
667
+ "step": 4350
668
+ },
669
+ {
670
+ "epoch": 0.4189877636528115,
671
+ "grad_norm": 0.5160133838653564,
672
+ "learning_rate": 9.967600800686303e-05,
673
+ "loss": 0.1673,
674
+ "step": 4400
675
+ },
676
+ {
677
+ "epoch": 0.42374898823977525,
678
+ "grad_norm": 0.6817535758018494,
679
+ "learning_rate": 9.967124201696692e-05,
680
+ "loss": 0.1687,
681
+ "step": 4450
682
+ },
683
+ {
684
+ "epoch": 0.428510212826739,
685
+ "grad_norm": 0.5424938201904297,
686
+ "learning_rate": 9.966647602707083e-05,
687
+ "loss": 0.1687,
688
+ "step": 4500
689
+ },
690
+ {
691
+ "epoch": 0.43327143741370283,
692
+ "grad_norm": 0.483815461397171,
693
+ "learning_rate": 9.966171003717473e-05,
694
+ "loss": 0.1673,
695
+ "step": 4550
696
+ },
697
+ {
698
+ "epoch": 0.4380326620006666,
699
+ "grad_norm": 0.555853009223938,
700
+ "learning_rate": 9.965694404727862e-05,
701
+ "loss": 0.1667,
702
+ "step": 4600
703
+ },
704
+ {
705
+ "epoch": 0.44279388658763036,
706
+ "grad_norm": 0.45629894733428955,
707
+ "learning_rate": 9.965217805738253e-05,
708
+ "loss": 0.1678,
709
+ "step": 4650
710
+ },
711
+ {
712
+ "epoch": 0.4475551111745941,
713
+ "grad_norm": 0.47480854392051697,
714
+ "learning_rate": 9.964741206748642e-05,
715
+ "loss": 0.1651,
716
+ "step": 4700
717
+ },
718
+ {
719
+ "epoch": 0.4523163357615579,
720
+ "grad_norm": 0.5411455631256104,
721
+ "learning_rate": 9.964264607759032e-05,
722
+ "loss": 0.168,
723
+ "step": 4750
724
+ },
725
+ {
726
+ "epoch": 0.45707756034852165,
727
+ "grad_norm": 0.7176097631454468,
728
+ "learning_rate": 9.963788008769422e-05,
729
+ "loss": 0.1654,
730
+ "step": 4800
731
+ },
732
+ {
733
+ "epoch": 0.4618387849354854,
734
+ "grad_norm": 0.7010710835456848,
735
+ "learning_rate": 9.963311409779812e-05,
736
+ "loss": 0.165,
737
+ "step": 4850
738
+ },
739
+ {
740
+ "epoch": 0.4666000095224492,
741
+ "grad_norm": 0.5730286240577698,
742
+ "learning_rate": 9.962834810790202e-05,
743
+ "loss": 0.1654,
744
+ "step": 4900
745
+ },
746
+ {
747
+ "epoch": 0.47136123410941294,
748
+ "grad_norm": 0.532320499420166,
749
+ "learning_rate": 9.96235821180059e-05,
750
+ "loss": 0.1665,
751
+ "step": 4950
752
+ },
753
+ {
754
+ "epoch": 0.4761224586963767,
755
+ "grad_norm": 0.5974966883659363,
756
+ "learning_rate": 9.961881612810982e-05,
757
+ "loss": 0.1648,
758
+ "step": 5000
759
+ },
760
+ {
761
+ "epoch": 0.4761224586963767,
762
+ "eval_loss": 0.16261516511440277,
763
+ "eval_mae": 0.6730566620826721,
764
+ "eval_mse": 350.6869812011719,
765
+ "eval_rmse": 18.726638278163325,
766
+ "eval_runtime": 61.1422,
767
+ "eval_samples_per_second": 9770.909,
768
+ "eval_smape": 94.07484531402588,
769
+ "eval_steps_per_second": 19.087,
770
+ "step": 5000
771
+ },
772
+ {
773
+ "epoch": 0.48088368328334047,
774
+ "grad_norm": 0.6836587190628052,
775
+ "learning_rate": 9.961405013821372e-05,
776
+ "loss": 0.164,
777
+ "step": 5050
778
+ },
779
+ {
780
+ "epoch": 0.48564490787030423,
781
+ "grad_norm": 0.6935145854949951,
782
+ "learning_rate": 9.96092841483176e-05,
783
+ "loss": 0.1656,
784
+ "step": 5100
785
+ },
786
+ {
787
+ "epoch": 0.490406132457268,
788
+ "grad_norm": 0.5300805568695068,
789
+ "learning_rate": 9.960451815842151e-05,
790
+ "loss": 0.1661,
791
+ "step": 5150
792
+ },
793
+ {
794
+ "epoch": 0.49516735704423176,
795
+ "grad_norm": 0.6059597134590149,
796
+ "learning_rate": 9.95997521685254e-05,
797
+ "loss": 0.1665,
798
+ "step": 5200
799
+ },
800
+ {
801
+ "epoch": 0.4999285816311955,
802
+ "grad_norm": 0.6202102303504944,
803
+ "learning_rate": 9.95949861786293e-05,
804
+ "loss": 0.1667,
805
+ "step": 5250
806
+ },
807
+ {
808
+ "epoch": 0.5046898062181593,
809
+ "grad_norm": 0.6857314705848694,
810
+ "learning_rate": 9.959022018873321e-05,
811
+ "loss": 0.1648,
812
+ "step": 5300
813
+ },
814
+ {
815
+ "epoch": 0.5094510308051231,
816
+ "grad_norm": 0.5026215314865112,
817
+ "learning_rate": 9.95854541988371e-05,
818
+ "loss": 0.1656,
819
+ "step": 5350
820
+ },
821
+ {
822
+ "epoch": 0.5142122553920868,
823
+ "grad_norm": 0.8072870969772339,
824
+ "learning_rate": 9.9580688208941e-05,
825
+ "loss": 0.1637,
826
+ "step": 5400
827
+ },
828
+ {
829
+ "epoch": 0.5189734799790506,
830
+ "grad_norm": 0.5563872456550598,
831
+ "learning_rate": 9.95759222190449e-05,
832
+ "loss": 0.1665,
833
+ "step": 5450
834
+ },
835
+ {
836
+ "epoch": 0.5237347045660143,
837
+ "grad_norm": 0.4486568868160248,
838
+ "learning_rate": 9.95711562291488e-05,
839
+ "loss": 0.1665,
840
+ "step": 5500
841
+ },
842
+ {
843
+ "epoch": 0.5284959291529782,
844
+ "grad_norm": 0.5072858929634094,
845
+ "learning_rate": 9.95663902392527e-05,
846
+ "loss": 0.1671,
847
+ "step": 5550
848
+ },
849
+ {
850
+ "epoch": 0.5332571537399419,
851
+ "grad_norm": 0.4768078327178955,
852
+ "learning_rate": 9.95616242493566e-05,
853
+ "loss": 0.165,
854
+ "step": 5600
855
+ },
856
+ {
857
+ "epoch": 0.5380183783269057,
858
+ "grad_norm": 0.5484294891357422,
859
+ "learning_rate": 9.95568582594605e-05,
860
+ "loss": 0.1615,
861
+ "step": 5650
862
+ },
863
+ {
864
+ "epoch": 0.5427796029138694,
865
+ "grad_norm": 0.5098631978034973,
866
+ "learning_rate": 9.955209226956438e-05,
867
+ "loss": 0.1663,
868
+ "step": 5700
869
+ },
870
+ {
871
+ "epoch": 0.5475408275008332,
872
+ "grad_norm": 0.5663777589797974,
873
+ "learning_rate": 9.95473262796683e-05,
874
+ "loss": 0.1653,
875
+ "step": 5750
876
+ },
877
+ {
878
+ "epoch": 0.552302052087797,
879
+ "grad_norm": 0.5557841658592224,
880
+ "learning_rate": 9.95425602897722e-05,
881
+ "loss": 0.163,
882
+ "step": 5800
883
+ },
884
+ {
885
+ "epoch": 0.5570632766747607,
886
+ "grad_norm": 0.545656144618988,
887
+ "learning_rate": 9.953779429987608e-05,
888
+ "loss": 0.1619,
889
+ "step": 5850
890
+ },
891
+ {
892
+ "epoch": 0.5618245012617246,
893
+ "grad_norm": 0.6774228811264038,
894
+ "learning_rate": 9.953302830998e-05,
895
+ "loss": 0.1649,
896
+ "step": 5900
897
+ },
898
+ {
899
+ "epoch": 0.5665857258486883,
900
+ "grad_norm": 0.4831783175468445,
901
+ "learning_rate": 9.952826232008388e-05,
902
+ "loss": 0.1626,
903
+ "step": 5950
904
+ },
905
+ {
906
+ "epoch": 0.5713469504356521,
907
+ "grad_norm": 0.46657130122184753,
908
+ "learning_rate": 9.952349633018778e-05,
909
+ "loss": 0.1672,
910
+ "step": 6000
911
+ },
912
+ {
913
+ "epoch": 0.5713469504356521,
914
+ "eval_loss": 0.16118212044239044,
915
+ "eval_mae": 0.6796970963478088,
916
+ "eval_mse": 370.8824768066406,
917
+ "eval_rmse": 19.25830929252723,
918
+ "eval_runtime": 58.3327,
919
+ "eval_samples_per_second": 10241.503,
920
+ "eval_smape": 84.66194272041321,
921
+ "eval_steps_per_second": 20.006,
922
+ "step": 6000
923
+ },
924
+ {
925
+ "epoch": 0.5761081750226158,
926
+ "grad_norm": 0.5452147126197815,
927
+ "learning_rate": 9.95187303402917e-05,
928
+ "loss": 0.1645,
929
+ "step": 6050
930
+ },
931
+ {
932
+ "epoch": 0.5808693996095796,
933
+ "grad_norm": 0.6225939989089966,
934
+ "learning_rate": 9.951396435039558e-05,
935
+ "loss": 0.1629,
936
+ "step": 6100
937
+ },
938
+ {
939
+ "epoch": 0.5856306241965433,
940
+ "grad_norm": 0.618532121181488,
941
+ "learning_rate": 9.950919836049948e-05,
942
+ "loss": 0.1638,
943
+ "step": 6150
944
+ },
945
+ {
946
+ "epoch": 0.5903918487835071,
947
+ "grad_norm": 0.6065341830253601,
948
+ "learning_rate": 9.950443237060338e-05,
949
+ "loss": 0.1672,
950
+ "step": 6200
951
+ },
952
+ {
953
+ "epoch": 0.5951530733704709,
954
+ "grad_norm": 0.7495716214179993,
955
+ "learning_rate": 9.949966638070728e-05,
956
+ "loss": 0.1648,
957
+ "step": 6250
958
+ },
959
+ {
960
+ "epoch": 0.5999142979574347,
961
+ "grad_norm": 0.6554955244064331,
962
+ "learning_rate": 9.949490039081118e-05,
963
+ "loss": 0.1654,
964
+ "step": 6300
965
+ },
966
+ {
967
+ "epoch": 0.6046755225443984,
968
+ "grad_norm": 0.5830172300338745,
969
+ "learning_rate": 9.949013440091506e-05,
970
+ "loss": 0.1629,
971
+ "step": 6350
972
+ },
973
+ {
974
+ "epoch": 0.6094367471313622,
975
+ "grad_norm": 0.5021042823791504,
976
+ "learning_rate": 9.948536841101898e-05,
977
+ "loss": 0.1622,
978
+ "step": 6400
979
+ },
980
+ {
981
+ "epoch": 0.6141979717183259,
982
+ "grad_norm": 0.47169509530067444,
983
+ "learning_rate": 9.948060242112288e-05,
984
+ "loss": 0.1632,
985
+ "step": 6450
986
+ },
987
+ {
988
+ "epoch": 0.6189591963052897,
989
+ "grad_norm": 0.7609395980834961,
990
+ "learning_rate": 9.947583643122676e-05,
991
+ "loss": 0.1641,
992
+ "step": 6500
993
+ },
994
+ {
995
+ "epoch": 0.6237204208922534,
996
+ "grad_norm": 0.5191305875778198,
997
+ "learning_rate": 9.947107044133068e-05,
998
+ "loss": 0.1672,
999
+ "step": 6550
1000
+ },
1001
+ {
1002
+ "epoch": 0.6284816454792173,
1003
+ "grad_norm": 0.5454711318016052,
1004
+ "learning_rate": 9.946630445143456e-05,
1005
+ "loss": 0.1648,
1006
+ "step": 6600
1007
+ },
1008
+ {
1009
+ "epoch": 0.6332428700661811,
1010
+ "grad_norm": 0.49112918972969055,
1011
+ "learning_rate": 9.946153846153846e-05,
1012
+ "loss": 0.1648,
1013
+ "step": 6650
1014
+ },
1015
+ {
1016
+ "epoch": 0.6380040946531448,
1017
+ "grad_norm": 0.4859708249568939,
1018
+ "learning_rate": 9.945677247164236e-05,
1019
+ "loss": 0.1652,
1020
+ "step": 6700
1021
+ },
1022
+ {
1023
+ "epoch": 0.6427653192401086,
1024
+ "grad_norm": 0.506971001625061,
1025
+ "learning_rate": 9.945200648174626e-05,
1026
+ "loss": 0.1623,
1027
+ "step": 6750
1028
+ },
1029
+ {
1030
+ "epoch": 0.6475265438270723,
1031
+ "grad_norm": 0.5732383131980896,
1032
+ "learning_rate": 9.944724049185016e-05,
1033
+ "loss": 0.1657,
1034
+ "step": 6800
1035
+ },
1036
+ {
1037
+ "epoch": 0.6522877684140361,
1038
+ "grad_norm": 0.548362672328949,
1039
+ "learning_rate": 9.944247450195406e-05,
1040
+ "loss": 0.1628,
1041
+ "step": 6850
1042
+ },
1043
+ {
1044
+ "epoch": 0.6570489930009998,
1045
+ "grad_norm": 0.5271615982055664,
1046
+ "learning_rate": 9.943770851205796e-05,
1047
+ "loss": 0.1627,
1048
+ "step": 6900
1049
+ },
1050
+ {
1051
+ "epoch": 0.6618102175879637,
1052
+ "grad_norm": 0.7555857300758362,
1053
+ "learning_rate": 9.943294252216186e-05,
1054
+ "loss": 0.1635,
1055
+ "step": 6950
1056
+ },
1057
+ {
1058
+ "epoch": 0.6665714421749274,
1059
+ "grad_norm": 0.5426679849624634,
1060
+ "learning_rate": 9.942817653226576e-05,
1061
+ "loss": 0.1623,
1062
+ "step": 7000
1063
+ },
1064
+ {
1065
+ "epoch": 0.6665714421749274,
1066
+ "eval_loss": 0.1605178564786911,
1067
+ "eval_mae": 0.6715303063392639,
1068
+ "eval_mse": 400.0790100097656,
1069
+ "eval_rmse": 20.001975152713435,
1070
+ "eval_runtime": 61.0562,
1071
+ "eval_samples_per_second": 9784.671,
1072
+ "eval_smape": 89.75983262062073,
1073
+ "eval_steps_per_second": 19.114,
1074
+ "step": 7000
1075
+ },
1076
+ {
1077
+ "epoch": 0.6713326667618912,
1078
+ "grad_norm": 0.5322990417480469,
1079
+ "learning_rate": 9.942341054236966e-05,
1080
+ "loss": 0.1625,
1081
+ "step": 7050
1082
+ },
1083
+ {
1084
+ "epoch": 0.6760938913488549,
1085
+ "grad_norm": 0.6016077995300293,
1086
+ "learning_rate": 9.941864455247354e-05,
1087
+ "loss": 0.1647,
1088
+ "step": 7100
1089
+ },
1090
+ {
1091
+ "epoch": 0.6808551159358187,
1092
+ "grad_norm": 0.5076338648796082,
1093
+ "learning_rate": 9.941387856257746e-05,
1094
+ "loss": 0.1618,
1095
+ "step": 7150
1096
+ },
1097
+ {
1098
+ "epoch": 0.6856163405227824,
1099
+ "grad_norm": 0.5658571124076843,
1100
+ "learning_rate": 9.940911257268136e-05,
1101
+ "loss": 0.1662,
1102
+ "step": 7200
1103
+ },
1104
+ {
1105
+ "epoch": 0.6903775651097462,
1106
+ "grad_norm": 0.6107982993125916,
1107
+ "learning_rate": 9.940434658278524e-05,
1108
+ "loss": 0.1608,
1109
+ "step": 7250
1110
+ },
1111
+ {
1112
+ "epoch": 0.69513878969671,
1113
+ "grad_norm": 0.4623304307460785,
1114
+ "learning_rate": 9.939958059288916e-05,
1115
+ "loss": 0.1635,
1116
+ "step": 7300
1117
+ },
1118
+ {
1119
+ "epoch": 0.6999000142836738,
1120
+ "grad_norm": 0.6437474489212036,
1121
+ "learning_rate": 9.939481460299304e-05,
1122
+ "loss": 0.1606,
1123
+ "step": 7350
1124
+ },
1125
+ {
1126
+ "epoch": 0.7046612388706375,
1127
+ "grad_norm": 0.6315158605575562,
1128
+ "learning_rate": 9.939004861309694e-05,
1129
+ "loss": 0.1615,
1130
+ "step": 7400
1131
+ },
1132
+ {
1133
+ "epoch": 0.7094224634576013,
1134
+ "grad_norm": 0.6503571271896362,
1135
+ "learning_rate": 9.938528262320085e-05,
1136
+ "loss": 0.1619,
1137
+ "step": 7450
1138
+ },
1139
+ {
1140
+ "epoch": 0.7141836880445651,
1141
+ "grad_norm": 0.46252092719078064,
1142
+ "learning_rate": 9.938051663330474e-05,
1143
+ "loss": 0.1625,
1144
+ "step": 7500
1145
+ },
1146
+ {
1147
+ "epoch": 0.7189449126315288,
1148
+ "grad_norm": 0.5186336636543274,
1149
+ "learning_rate": 9.937575064340864e-05,
1150
+ "loss": 0.1628,
1151
+ "step": 7550
1152
+ },
1153
+ {
1154
+ "epoch": 0.7237061372184926,
1155
+ "grad_norm": 0.5236070156097412,
1156
+ "learning_rate": 9.937098465351254e-05,
1157
+ "loss": 0.1624,
1158
+ "step": 7600
1159
+ },
1160
+ {
1161
+ "epoch": 0.7284673618054563,
1162
+ "grad_norm": 0.4777911901473999,
1163
+ "learning_rate": 9.936621866361644e-05,
1164
+ "loss": 0.1625,
1165
+ "step": 7650
1166
+ },
1167
+ {
1168
+ "epoch": 0.7332285863924202,
1169
+ "grad_norm": 0.5092161297798157,
1170
+ "learning_rate": 9.936145267372034e-05,
1171
+ "loss": 0.163,
1172
+ "step": 7700
1173
+ },
1174
+ {
1175
+ "epoch": 0.7379898109793839,
1176
+ "grad_norm": 0.5161564350128174,
1177
+ "learning_rate": 9.935668668382424e-05,
1178
+ "loss": 0.1615,
1179
+ "step": 7750
1180
+ },
1181
+ {
1182
+ "epoch": 0.7427510355663477,
1183
+ "grad_norm": 0.48548147082328796,
1184
+ "learning_rate": 9.935192069392814e-05,
1185
+ "loss": 0.1592,
1186
+ "step": 7800
1187
+ },
1188
+ {
1189
+ "epoch": 0.7475122601533114,
1190
+ "grad_norm": 0.6095620393753052,
1191
+ "learning_rate": 9.934715470403202e-05,
1192
+ "loss": 0.1613,
1193
+ "step": 7850
1194
+ },
1195
+ {
1196
+ "epoch": 0.7522734847402752,
1197
+ "grad_norm": 0.49965670704841614,
1198
+ "learning_rate": 9.934238871413592e-05,
1199
+ "loss": 0.163,
1200
+ "step": 7900
1201
+ },
1202
+ {
1203
+ "epoch": 0.7570347093272389,
1204
+ "grad_norm": 0.5934204459190369,
1205
+ "learning_rate": 9.933762272423984e-05,
1206
+ "loss": 0.1638,
1207
+ "step": 7950
1208
+ },
1209
+ {
1210
+ "epoch": 0.7617959339142027,
1211
+ "grad_norm": 0.6522780060768127,
1212
+ "learning_rate": 9.933285673434372e-05,
1213
+ "loss": 0.1638,
1214
+ "step": 8000
1215
+ },
1216
+ {
1217
+ "epoch": 0.7617959339142027,
1218
+ "eval_loss": 0.16129492223262787,
1219
+ "eval_mae": 0.6771246194839478,
1220
+ "eval_mse": 387.6971435546875,
1221
+ "eval_rmse": 19.69002649959333,
1222
+ "eval_runtime": 57.6529,
1223
+ "eval_samples_per_second": 10362.273,
1224
+ "eval_smape": 122.37988710403442,
1225
+ "eval_steps_per_second": 20.242,
1226
+ "step": 8000
1227
+ },
1228
+ {
1229
+ "epoch": 0.7665571585011665,
1230
+ "grad_norm": 0.47631314396858215,
1231
+ "learning_rate": 9.932809074444762e-05,
1232
+ "loss": 0.1594,
1233
+ "step": 8050
1234
+ },
1235
+ {
1236
+ "epoch": 0.7713183830881303,
1237
+ "grad_norm": 0.4288536310195923,
1238
+ "learning_rate": 9.932332475455152e-05,
1239
+ "loss": 0.1627,
1240
+ "step": 8100
1241
+ },
1242
+ {
1243
+ "epoch": 0.776079607675094,
1244
+ "grad_norm": 0.4548576772212982,
1245
+ "learning_rate": 9.931855876465542e-05,
1246
+ "loss": 0.1638,
1247
+ "step": 8150
1248
+ },
1249
+ {
1250
+ "epoch": 0.7808408322620578,
1251
+ "grad_norm": 0.5950626730918884,
1252
+ "learning_rate": 9.931379277475932e-05,
1253
+ "loss": 0.1646,
1254
+ "step": 8200
1255
+ },
1256
+ {
1257
+ "epoch": 0.7856020568490216,
1258
+ "grad_norm": 0.5772454738616943,
1259
+ "learning_rate": 9.930902678486322e-05,
1260
+ "loss": 0.1629,
1261
+ "step": 8250
1262
+ },
1263
+ {
1264
+ "epoch": 0.7903632814359853,
1265
+ "grad_norm": 0.5833305716514587,
1266
+ "learning_rate": 9.930426079496712e-05,
1267
+ "loss": 0.1635,
1268
+ "step": 8300
1269
+ },
1270
+ {
1271
+ "epoch": 0.7951245060229492,
1272
+ "grad_norm": 0.4767976701259613,
1273
+ "learning_rate": 9.929949480507102e-05,
1274
+ "loss": 0.1621,
1275
+ "step": 8350
1276
+ },
1277
+ {
1278
+ "epoch": 0.7998857306099129,
1279
+ "grad_norm": 0.586681604385376,
1280
+ "learning_rate": 9.929472881517492e-05,
1281
+ "loss": 0.1633,
1282
+ "step": 8400
1283
+ },
1284
+ {
1285
+ "epoch": 0.8046469551968767,
1286
+ "grad_norm": 0.46445733308792114,
1287
+ "learning_rate": 9.928996282527882e-05,
1288
+ "loss": 0.1621,
1289
+ "step": 8450
1290
+ },
1291
+ {
1292
+ "epoch": 0.8094081797838404,
1293
+ "grad_norm": 0.4659370183944702,
1294
+ "learning_rate": 9.92851968353827e-05,
1295
+ "loss": 0.1644,
1296
+ "step": 8500
1297
+ },
1298
+ {
1299
+ "epoch": 0.8141694043708042,
1300
+ "grad_norm": 0.48823997378349304,
1301
+ "learning_rate": 9.928043084548662e-05,
1302
+ "loss": 0.1654,
1303
+ "step": 8550
1304
+ },
1305
+ {
1306
+ "epoch": 0.8189306289577679,
1307
+ "grad_norm": 0.5804855823516846,
1308
+ "learning_rate": 9.92756648555905e-05,
1309
+ "loss": 0.1624,
1310
+ "step": 8600
1311
+ },
1312
+ {
1313
+ "epoch": 0.8236918535447317,
1314
+ "grad_norm": 0.4181581139564514,
1315
+ "learning_rate": 9.92708988656944e-05,
1316
+ "loss": 0.1593,
1317
+ "step": 8650
1318
+ },
1319
+ {
1320
+ "epoch": 0.8284530781316954,
1321
+ "grad_norm": 0.6322731971740723,
1322
+ "learning_rate": 9.926613287579832e-05,
1323
+ "loss": 0.1628,
1324
+ "step": 8700
1325
+ },
1326
+ {
1327
+ "epoch": 0.8332143027186593,
1328
+ "grad_norm": 0.39184707403182983,
1329
+ "learning_rate": 9.92613668859022e-05,
1330
+ "loss": 0.1639,
1331
+ "step": 8750
1332
+ },
1333
+ {
1334
+ "epoch": 0.837975527305623,
1335
+ "grad_norm": 0.5011768341064453,
1336
+ "learning_rate": 9.92566008960061e-05,
1337
+ "loss": 0.1634,
1338
+ "step": 8800
1339
+ },
1340
+ {
1341
+ "epoch": 0.8427367518925868,
1342
+ "grad_norm": 0.47292882204055786,
1343
+ "learning_rate": 9.925183490611e-05,
1344
+ "loss": 0.1609,
1345
+ "step": 8850
1346
+ },
1347
+ {
1348
+ "epoch": 0.8474979764795505,
1349
+ "grad_norm": 0.5086949467658997,
1350
+ "learning_rate": 9.92470689162139e-05,
1351
+ "loss": 0.1616,
1352
+ "step": 8900
1353
+ },
1354
+ {
1355
+ "epoch": 0.8522592010665143,
1356
+ "grad_norm": 0.4720959961414337,
1357
+ "learning_rate": 9.92423029263178e-05,
1358
+ "loss": 0.1611,
1359
+ "step": 8950
1360
+ },
1361
+ {
1362
+ "epoch": 0.857020425653478,
1363
+ "grad_norm": 0.8099896311759949,
1364
+ "learning_rate": 9.92375369364217e-05,
1365
+ "loss": 0.1609,
1366
+ "step": 9000
1367
+ },
1368
+ {
1369
+ "epoch": 0.857020425653478,
1370
+ "eval_loss": 0.16015625,
1371
+ "eval_mae": 0.6603007912635803,
1372
+ "eval_mse": 335.3426818847656,
1373
+ "eval_rmse": 18.31236418065034,
1374
+ "eval_runtime": 56.2142,
1375
+ "eval_samples_per_second": 10627.482,
1376
+ "eval_smape": 109.38767194747925,
1377
+ "eval_steps_per_second": 20.76,
1378
+ "step": 9000
1379
+ },
1380
+ {
1381
+ "epoch": 0.8617816502404418,
1382
+ "grad_norm": 0.5228590369224548,
1383
+ "learning_rate": 9.92327709465256e-05,
1384
+ "loss": 0.1617,
1385
+ "step": 9050
1386
+ },
1387
+ {
1388
+ "epoch": 0.8665428748274057,
1389
+ "grad_norm": 0.5515425205230713,
1390
+ "learning_rate": 9.92280049566295e-05,
1391
+ "loss": 0.1612,
1392
+ "step": 9100
1393
+ },
1394
+ {
1395
+ "epoch": 0.8713040994143694,
1396
+ "grad_norm": 0.5289241075515747,
1397
+ "learning_rate": 9.92232389667334e-05,
1398
+ "loss": 0.1646,
1399
+ "step": 9150
1400
+ },
1401
+ {
1402
+ "epoch": 0.8760653240013332,
1403
+ "grad_norm": 0.5692815780639648,
1404
+ "learning_rate": 9.92184729768373e-05,
1405
+ "loss": 0.1603,
1406
+ "step": 9200
1407
+ },
1408
+ {
1409
+ "epoch": 0.8808265485882969,
1410
+ "grad_norm": 0.41486117243766785,
1411
+ "learning_rate": 9.921370698694119e-05,
1412
+ "loss": 0.1632,
1413
+ "step": 9250
1414
+ },
1415
+ {
1416
+ "epoch": 0.8855877731752607,
1417
+ "grad_norm": 0.488235741853714,
1418
+ "learning_rate": 9.920894099704509e-05,
1419
+ "loss": 0.1613,
1420
+ "step": 9300
1421
+ },
1422
+ {
1423
+ "epoch": 0.8903489977622244,
1424
+ "grad_norm": 0.6576530337333679,
1425
+ "learning_rate": 9.9204175007149e-05,
1426
+ "loss": 0.1618,
1427
+ "step": 9350
1428
+ },
1429
+ {
1430
+ "epoch": 0.8951102223491882,
1431
+ "grad_norm": 0.49431854486465454,
1432
+ "learning_rate": 9.919940901725288e-05,
1433
+ "loss": 0.1618,
1434
+ "step": 9400
1435
+ },
1436
+ {
1437
+ "epoch": 0.899871446936152,
1438
+ "grad_norm": 0.5491801500320435,
1439
+ "learning_rate": 9.919464302735678e-05,
1440
+ "loss": 0.162,
1441
+ "step": 9450
1442
+ },
1443
+ {
1444
+ "epoch": 0.9046326715231158,
1445
+ "grad_norm": 0.5839897990226746,
1446
+ "learning_rate": 9.918987703746068e-05,
1447
+ "loss": 0.1587,
1448
+ "step": 9500
1449
+ },
1450
+ {
1451
+ "epoch": 0.9093938961100795,
1452
+ "grad_norm": 0.5631112456321716,
1453
+ "learning_rate": 9.918511104756458e-05,
1454
+ "loss": 0.1607,
1455
+ "step": 9550
1456
+ },
1457
+ {
1458
+ "epoch": 0.9141551206970433,
1459
+ "grad_norm": 0.5420098900794983,
1460
+ "learning_rate": 9.918034505766848e-05,
1461
+ "loss": 0.1642,
1462
+ "step": 9600
1463
+ },
1464
+ {
1465
+ "epoch": 0.918916345284007,
1466
+ "grad_norm": 0.568087637424469,
1467
+ "learning_rate": 9.917557906777238e-05,
1468
+ "loss": 0.1624,
1469
+ "step": 9650
1470
+ },
1471
+ {
1472
+ "epoch": 0.9236775698709708,
1473
+ "grad_norm": 0.5823555588722229,
1474
+ "learning_rate": 9.917081307787628e-05,
1475
+ "loss": 0.1639,
1476
+ "step": 9700
1477
+ },
1478
+ {
1479
+ "epoch": 0.9284387944579345,
1480
+ "grad_norm": 0.5538271069526672,
1481
+ "learning_rate": 9.916604708798017e-05,
1482
+ "loss": 0.1625,
1483
+ "step": 9750
1484
+ },
1485
+ {
1486
+ "epoch": 0.9332000190448984,
1487
+ "grad_norm": 0.5160115957260132,
1488
+ "learning_rate": 9.916128109808408e-05,
1489
+ "loss": 0.1582,
1490
+ "step": 9800
1491
+ },
1492
+ {
1493
+ "epoch": 0.9379612436318621,
1494
+ "grad_norm": 0.48957574367523193,
1495
+ "learning_rate": 9.915651510818798e-05,
1496
+ "loss": 0.16,
1497
+ "step": 9850
1498
+ },
1499
+ {
1500
+ "epoch": 0.9427224682188259,
1501
+ "grad_norm": 0.601917564868927,
1502
+ "learning_rate": 9.915174911829187e-05,
1503
+ "loss": 0.162,
1504
+ "step": 9900
1505
+ },
1506
+ {
1507
+ "epoch": 0.9474836928057897,
1508
+ "grad_norm": 0.40893155336380005,
1509
+ "learning_rate": 9.914698312839578e-05,
1510
+ "loss": 0.165,
1511
+ "step": 9950
1512
+ },
1513
+ {
1514
+ "epoch": 0.9522449173927534,
1515
+ "grad_norm": 0.5392901301383972,
1516
+ "learning_rate": 9.914221713849967e-05,
1517
+ "loss": 0.1618,
1518
+ "step": 10000
1519
+ },
1520
+ {
1521
+ "epoch": 0.9522449173927534,
1522
+ "eval_loss": 0.15920588374137878,
1523
+ "eval_mae": 0.6688477993011475,
1524
+ "eval_mse": 318.14923095703125,
1525
+ "eval_rmse": 17.83673823761035,
1526
+ "eval_runtime": 59.3096,
1527
+ "eval_samples_per_second": 10072.824,
1528
+ "eval_smape": 76.33218169212341,
1529
+ "eval_steps_per_second": 19.676,
1530
+ "step": 10000
1531
+ },
1532
+ {
1533
+ "epoch": 0.9570061419797172,
1534
+ "grad_norm": 0.600141704082489,
1535
+ "learning_rate": 9.913745114860357e-05,
1536
+ "loss": 0.1596,
1537
+ "step": 10050
1538
+ },
1539
+ {
1540
+ "epoch": 0.9617673665666809,
1541
+ "grad_norm": 0.49183207750320435,
1542
+ "learning_rate": 9.913268515870748e-05,
1543
+ "loss": 0.1636,
1544
+ "step": 10100
1545
+ },
1546
+ {
1547
+ "epoch": 0.9665285911536448,
1548
+ "grad_norm": 0.4760478138923645,
1549
+ "learning_rate": 9.912791916881136e-05,
1550
+ "loss": 0.1584,
1551
+ "step": 10150
1552
+ },
1553
+ {
1554
+ "epoch": 0.9712898157406085,
1555
+ "grad_norm": 0.4729287624359131,
1556
+ "learning_rate": 9.912315317891526e-05,
1557
+ "loss": 0.1617,
1558
+ "step": 10200
1559
+ },
1560
+ {
1561
+ "epoch": 0.9760510403275723,
1562
+ "grad_norm": 0.578062117099762,
1563
+ "learning_rate": 9.911838718901916e-05,
1564
+ "loss": 0.1606,
1565
+ "step": 10250
1566
+ },
1567
+ {
1568
+ "epoch": 0.980812264914536,
1569
+ "grad_norm": 0.43438589572906494,
1570
+ "learning_rate": 9.911362119912306e-05,
1571
+ "loss": 0.1617,
1572
+ "step": 10300
1573
+ },
1574
+ {
1575
+ "epoch": 0.9855734895014998,
1576
+ "grad_norm": 0.5887606143951416,
1577
+ "learning_rate": 9.910885520922696e-05,
1578
+ "loss": 0.1616,
1579
+ "step": 10350
1580
+ },
1581
+ {
1582
+ "epoch": 0.9903347140884635,
1583
+ "grad_norm": 0.4835382401943207,
1584
+ "learning_rate": 9.910408921933086e-05,
1585
+ "loss": 0.16,
1586
+ "step": 10400
1587
+ },
1588
+ {
1589
+ "epoch": 0.9950959386754273,
1590
+ "grad_norm": 0.4186175763607025,
1591
+ "learning_rate": 9.909932322943476e-05,
1592
+ "loss": 0.1636,
1593
+ "step": 10450
1594
+ },
1595
+ {
1596
+ "epoch": 0.999857163262391,
1597
+ "grad_norm": 0.6031454205513,
1598
+ "learning_rate": 9.909455723953865e-05,
1599
+ "loss": 0.16,
1600
+ "step": 10500
1601
+ },
1602
+ {
1603
+ "epoch": 1.0045707756034852,
1604
+ "grad_norm": 0.444181352853775,
1605
+ "learning_rate": 9.908979124964256e-05,
1606
+ "loss": 0.1588,
1607
+ "step": 10550
1608
+ },
1609
+ {
1610
+ "epoch": 1.0093320001904489,
1611
+ "grad_norm": 0.474399596452713,
1612
+ "learning_rate": 9.908502525974646e-05,
1613
+ "loss": 0.1651,
1614
+ "step": 10600
1615
+ },
1616
+ {
1617
+ "epoch": 1.0140932247774128,
1618
+ "grad_norm": 0.4467703700065613,
1619
+ "learning_rate": 9.908025926985035e-05,
1620
+ "loss": 0.1598,
1621
+ "step": 10650
1622
+ },
1623
+ {
1624
+ "epoch": 1.0188544493643765,
1625
+ "grad_norm": 0.5107735395431519,
1626
+ "learning_rate": 9.907549327995426e-05,
1627
+ "loss": 0.1609,
1628
+ "step": 10700
1629
+ },
1630
+ {
1631
+ "epoch": 1.0236156739513402,
1632
+ "grad_norm": 0.4578382968902588,
1633
+ "learning_rate": 9.907072729005815e-05,
1634
+ "loss": 0.1614,
1635
+ "step": 10750
1636
+ },
1637
+ {
1638
+ "epoch": 1.028376898538304,
1639
+ "grad_norm": 0.4924687445163727,
1640
+ "learning_rate": 9.906596130016205e-05,
1641
+ "loss": 0.1594,
1642
+ "step": 10800
1643
+ },
1644
+ {
1645
+ "epoch": 1.0331381231252679,
1646
+ "grad_norm": 0.4967709183692932,
1647
+ "learning_rate": 9.906119531026595e-05,
1648
+ "loss": 0.1614,
1649
+ "step": 10850
1650
+ },
1651
+ {
1652
+ "epoch": 1.0378993477122316,
1653
+ "grad_norm": 0.5059126615524292,
1654
+ "learning_rate": 9.905642932036984e-05,
1655
+ "loss": 0.1622,
1656
+ "step": 10900
1657
+ },
1658
+ {
1659
+ "epoch": 1.0426605722991953,
1660
+ "grad_norm": 0.5229778289794922,
1661
+ "learning_rate": 9.905166333047374e-05,
1662
+ "loss": 0.1592,
1663
+ "step": 10950
1664
+ },
1665
+ {
1666
+ "epoch": 1.0474217968861592,
1667
+ "grad_norm": 0.45143234729766846,
1668
+ "learning_rate": 9.904689734057764e-05,
1669
+ "loss": 0.1588,
1670
+ "step": 11000
1671
+ },
1672
+ {
1673
+ "epoch": 1.0474217968861592,
1674
+ "eval_loss": 0.15856099128723145,
1675
+ "eval_mae": 0.6628284454345703,
1676
+ "eval_mse": 345.36749267578125,
1677
+ "eval_rmse": 18.584065558315846,
1678
+ "eval_runtime": 57.9218,
1679
+ "eval_samples_per_second": 10314.166,
1680
+ "eval_smape": 94.50321197509766,
1681
+ "eval_steps_per_second": 20.148,
1682
+ "step": 11000
1683
+ },
1684
+ {
1685
+ "epoch": 1.052183021473123,
1686
+ "grad_norm": 0.7248756885528564,
1687
+ "learning_rate": 9.904213135068154e-05,
1688
+ "loss": 0.1614,
1689
+ "step": 11050
1690
+ },
1691
+ {
1692
+ "epoch": 1.0569442460600866,
1693
+ "grad_norm": 0.5051783323287964,
1694
+ "learning_rate": 9.903736536078544e-05,
1695
+ "loss": 0.1567,
1696
+ "step": 11100
1697
+ },
1698
+ {
1699
+ "epoch": 1.0617054706470503,
1700
+ "grad_norm": 0.5285612940788269,
1701
+ "learning_rate": 9.903259937088933e-05,
1702
+ "loss": 0.1613,
1703
+ "step": 11150
1704
+ },
1705
+ {
1706
+ "epoch": 1.0664666952340143,
1707
+ "grad_norm": 0.5270511507987976,
1708
+ "learning_rate": 9.902783338099324e-05,
1709
+ "loss": 0.1615,
1710
+ "step": 11200
1711
+ },
1712
+ {
1713
+ "epoch": 1.071227919820978,
1714
+ "grad_norm": 0.5635538101196289,
1715
+ "learning_rate": 9.902306739109714e-05,
1716
+ "loss": 0.1606,
1717
+ "step": 11250
1718
+ },
1719
+ {
1720
+ "epoch": 1.0759891444079417,
1721
+ "grad_norm": 0.5780921578407288,
1722
+ "learning_rate": 9.901830140120103e-05,
1723
+ "loss": 0.1619,
1724
+ "step": 11300
1725
+ },
1726
+ {
1727
+ "epoch": 1.0807503689949054,
1728
+ "grad_norm": 0.5149776935577393,
1729
+ "learning_rate": 9.901353541130494e-05,
1730
+ "loss": 0.1638,
1731
+ "step": 11350
1732
+ },
1733
+ {
1734
+ "epoch": 1.0855115935818693,
1735
+ "grad_norm": 0.46075335144996643,
1736
+ "learning_rate": 9.900876942140883e-05,
1737
+ "loss": 0.1597,
1738
+ "step": 11400
1739
+ },
1740
+ {
1741
+ "epoch": 1.090272818168833,
1742
+ "grad_norm": 0.42272669076919556,
1743
+ "learning_rate": 9.900400343151273e-05,
1744
+ "loss": 0.16,
1745
+ "step": 11450
1746
+ },
1747
+ {
1748
+ "epoch": 1.0950340427557967,
1749
+ "grad_norm": 0.5383277535438538,
1750
+ "learning_rate": 9.899923744161663e-05,
1751
+ "loss": 0.159,
1752
+ "step": 11500
1753
+ },
1754
+ {
1755
+ "epoch": 1.0997952673427607,
1756
+ "grad_norm": 0.45635008811950684,
1757
+ "learning_rate": 9.899447145172053e-05,
1758
+ "loss": 0.1596,
1759
+ "step": 11550
1760
+ },
1761
+ {
1762
+ "epoch": 1.1045564919297244,
1763
+ "grad_norm": 0.4863174259662628,
1764
+ "learning_rate": 9.898970546182443e-05,
1765
+ "loss": 0.1613,
1766
+ "step": 11600
1767
+ },
1768
+ {
1769
+ "epoch": 1.109317716516688,
1770
+ "grad_norm": 0.5197418332099915,
1771
+ "learning_rate": 9.898493947192832e-05,
1772
+ "loss": 0.1601,
1773
+ "step": 11650
1774
+ },
1775
+ {
1776
+ "epoch": 1.1140789411036518,
1777
+ "grad_norm": 0.6299956440925598,
1778
+ "learning_rate": 9.898017348203222e-05,
1779
+ "loss": 0.1621,
1780
+ "step": 11700
1781
+ },
1782
+ {
1783
+ "epoch": 1.1188401656906157,
1784
+ "grad_norm": 0.47120076417922974,
1785
+ "learning_rate": 9.897540749213612e-05,
1786
+ "loss": 0.1624,
1787
+ "step": 11750
1788
+ },
1789
+ {
1790
+ "epoch": 1.1236013902775794,
1791
+ "grad_norm": 0.4576870799064636,
1792
+ "learning_rate": 9.897064150224002e-05,
1793
+ "loss": 0.1629,
1794
+ "step": 11800
1795
+ },
1796
+ {
1797
+ "epoch": 1.1283626148645431,
1798
+ "grad_norm": 0.531906008720398,
1799
+ "learning_rate": 9.896587551234392e-05,
1800
+ "loss": 0.1593,
1801
+ "step": 11850
1802
+ },
1803
+ {
1804
+ "epoch": 1.1331238394515069,
1805
+ "grad_norm": 0.4709097743034363,
1806
+ "learning_rate": 9.896110952244781e-05,
1807
+ "loss": 0.1613,
1808
+ "step": 11900
1809
+ },
1810
+ {
1811
+ "epoch": 1.1378850640384708,
1812
+ "grad_norm": 0.5573967695236206,
1813
+ "learning_rate": 9.895634353255172e-05,
1814
+ "loss": 0.1623,
1815
+ "step": 11950
1816
+ },
1817
+ {
1818
+ "epoch": 1.1426462886254345,
1819
+ "grad_norm": 0.5798735022544861,
1820
+ "learning_rate": 9.895157754265562e-05,
1821
+ "loss": 0.1601,
1822
+ "step": 12000
1823
+ },
1824
+ {
1825
+ "epoch": 1.1426462886254345,
1826
+ "eval_loss": 0.15796181559562683,
1827
+ "eval_mae": 0.65403813123703,
1828
+ "eval_mse": 326.8865051269531,
1829
+ "eval_rmse": 18.0800029072717,
1830
+ "eval_runtime": 56.7165,
1831
+ "eval_samples_per_second": 10533.356,
1832
+ "eval_smape": 81.25044703483582,
1833
+ "eval_steps_per_second": 20.576,
1834
+ "step": 12000
1835
+ },
1836
+ {
1837
+ "epoch": 1.1474075132123982,
1838
+ "grad_norm": 0.5247554183006287,
1839
+ "learning_rate": 9.894681155275951e-05,
1840
+ "loss": 0.1574,
1841
+ "step": 12050
1842
+ },
1843
+ {
1844
+ "epoch": 1.152168737799362,
1845
+ "grad_norm": 0.6056792736053467,
1846
+ "learning_rate": 9.894204556286342e-05,
1847
+ "loss": 0.1583,
1848
+ "step": 12100
1849
+ },
1850
+ {
1851
+ "epoch": 1.1569299623863258,
1852
+ "grad_norm": 0.5114259123802185,
1853
+ "learning_rate": 9.893727957296731e-05,
1854
+ "loss": 0.1641,
1855
+ "step": 12150
1856
+ },
1857
+ {
1858
+ "epoch": 1.1616911869732895,
1859
+ "grad_norm": 0.40764451026916504,
1860
+ "learning_rate": 9.89325135830712e-05,
1861
+ "loss": 0.1621,
1862
+ "step": 12200
1863
+ },
1864
+ {
1865
+ "epoch": 1.1664524115602533,
1866
+ "grad_norm": 0.5216367244720459,
1867
+ "learning_rate": 9.89277475931751e-05,
1868
+ "loss": 0.1597,
1869
+ "step": 12250
1870
+ },
1871
+ {
1872
+ "epoch": 1.171213636147217,
1873
+ "grad_norm": 0.48652514815330505,
1874
+ "learning_rate": 9.8922981603279e-05,
1875
+ "loss": 0.1605,
1876
+ "step": 12300
1877
+ },
1878
+ {
1879
+ "epoch": 1.175974860734181,
1880
+ "grad_norm": 0.46615609526634216,
1881
+ "learning_rate": 9.89182156133829e-05,
1882
+ "loss": 0.1571,
1883
+ "step": 12350
1884
+ },
1885
+ {
1886
+ "epoch": 1.1807360853211446,
1887
+ "grad_norm": 0.5413913130760193,
1888
+ "learning_rate": 9.891344962348679e-05,
1889
+ "loss": 0.1599,
1890
+ "step": 12400
1891
+ },
1892
+ {
1893
+ "epoch": 1.1854973099081083,
1894
+ "grad_norm": 0.42148470878601074,
1895
+ "learning_rate": 9.89086836335907e-05,
1896
+ "loss": 0.1605,
1897
+ "step": 12450
1898
+ },
1899
+ {
1900
+ "epoch": 1.1902585344950722,
1901
+ "grad_norm": 0.7329843044281006,
1902
+ "learning_rate": 9.89039176436946e-05,
1903
+ "loss": 0.1603,
1904
+ "step": 12500
1905
+ },
1906
+ {
1907
+ "epoch": 1.195019759082036,
1908
+ "grad_norm": 0.42273497581481934,
1909
+ "learning_rate": 9.889915165379849e-05,
1910
+ "loss": 0.1617,
1911
+ "step": 12550
1912
+ },
1913
+ {
1914
+ "epoch": 1.1997809836689997,
1915
+ "grad_norm": 0.5869929790496826,
1916
+ "learning_rate": 9.88943856639024e-05,
1917
+ "loss": 0.1612,
1918
+ "step": 12600
1919
+ },
1920
+ {
1921
+ "epoch": 1.2045422082559634,
1922
+ "grad_norm": 0.4401240944862366,
1923
+ "learning_rate": 9.888961967400629e-05,
1924
+ "loss": 0.1577,
1925
+ "step": 12650
1926
+ },
1927
+ {
1928
+ "epoch": 1.2093034328429273,
1929
+ "grad_norm": 0.48275691270828247,
1930
+ "learning_rate": 9.888485368411019e-05,
1931
+ "loss": 0.1595,
1932
+ "step": 12700
1933
+ },
1934
+ {
1935
+ "epoch": 1.214064657429891,
1936
+ "grad_norm": 0.5482587218284607,
1937
+ "learning_rate": 9.88800876942141e-05,
1938
+ "loss": 0.16,
1939
+ "step": 12750
1940
+ },
1941
+ {
1942
+ "epoch": 1.2188258820168547,
1943
+ "grad_norm": 0.6347602009773254,
1944
+ "learning_rate": 9.887532170431799e-05,
1945
+ "loss": 0.1596,
1946
+ "step": 12800
1947
+ },
1948
+ {
1949
+ "epoch": 1.2235871066038184,
1950
+ "grad_norm": 0.4231024384498596,
1951
+ "learning_rate": 9.887055571442189e-05,
1952
+ "loss": 0.1624,
1953
+ "step": 12850
1954
+ },
1955
+ {
1956
+ "epoch": 1.2283483311907823,
1957
+ "grad_norm": 0.5505658984184265,
1958
+ "learning_rate": 9.886578972452579e-05,
1959
+ "loss": 0.1582,
1960
+ "step": 12900
1961
+ },
1962
+ {
1963
+ "epoch": 1.233109555777746,
1964
+ "grad_norm": 0.4918171763420105,
1965
+ "learning_rate": 9.886102373462969e-05,
1966
+ "loss": 0.1577,
1967
+ "step": 12950
1968
+ },
1969
+ {
1970
+ "epoch": 1.2378707803647098,
1971
+ "grad_norm": 0.45393800735473633,
1972
+ "learning_rate": 9.885625774473359e-05,
1973
+ "loss": 0.1585,
1974
+ "step": 13000
1975
+ },
1976
+ {
1977
+ "epoch": 1.2378707803647098,
1978
+ "eval_loss": 0.15753023326396942,
1979
+ "eval_mae": 0.6532484889030457,
1980
+ "eval_mse": 279.79644775390625,
1981
+ "eval_rmse": 16.72711713816539,
1982
+ "eval_runtime": 57.893,
1983
+ "eval_samples_per_second": 10319.3,
1984
+ "eval_smape": 107.6181173324585,
1985
+ "eval_steps_per_second": 20.158,
1986
+ "step": 13000
1987
+ },
1988
+ {
1989
+ "epoch": 1.2426320049516735,
1990
+ "grad_norm": 0.3777833878993988,
1991
+ "learning_rate": 9.885149175483749e-05,
1992
+ "loss": 0.1577,
1993
+ "step": 13050
1994
+ },
1995
+ {
1996
+ "epoch": 1.2473932295386374,
1997
+ "grad_norm": 0.5700230002403259,
1998
+ "learning_rate": 9.884672576494139e-05,
1999
+ "loss": 0.1587,
2000
+ "step": 13100
2001
+ },
2002
+ {
2003
+ "epoch": 1.2521544541256011,
2004
+ "grad_norm": 0.4470745027065277,
2005
+ "learning_rate": 9.884195977504529e-05,
2006
+ "loss": 0.1604,
2007
+ "step": 13150
2008
+ },
2009
+ {
2010
+ "epoch": 1.2569156787125648,
2011
+ "grad_norm": 0.4185906648635864,
2012
+ "learning_rate": 9.883719378514918e-05,
2013
+ "loss": 0.1562,
2014
+ "step": 13200
2015
+ },
2016
+ {
2017
+ "epoch": 1.2616769032995285,
2018
+ "grad_norm": 0.3835722804069519,
2019
+ "learning_rate": 9.883242779525308e-05,
2020
+ "loss": 0.1583,
2021
+ "step": 13250
2022
+ },
2023
+ {
2024
+ "epoch": 1.2664381278864925,
2025
+ "grad_norm": 0.45621258020401,
2026
+ "learning_rate": 9.882766180535697e-05,
2027
+ "loss": 0.1577,
2028
+ "step": 13300
2029
+ },
2030
+ {
2031
+ "epoch": 1.2711993524734562,
2032
+ "grad_norm": 0.5590381622314453,
2033
+ "learning_rate": 9.882289581546088e-05,
2034
+ "loss": 0.1608,
2035
+ "step": 13350
2036
+ },
2037
+ {
2038
+ "epoch": 1.2759605770604199,
2039
+ "grad_norm": 0.5501840114593506,
2040
+ "learning_rate": 9.881812982556477e-05,
2041
+ "loss": 0.1623,
2042
+ "step": 13400
2043
+ },
2044
+ {
2045
+ "epoch": 1.2807218016473838,
2046
+ "grad_norm": 0.4480155408382416,
2047
+ "learning_rate": 9.881336383566867e-05,
2048
+ "loss": 0.1566,
2049
+ "step": 13450
2050
+ },
2051
+ {
2052
+ "epoch": 1.2854830262343475,
2053
+ "grad_norm": 0.5472989678382874,
2054
+ "learning_rate": 9.880859784577258e-05,
2055
+ "loss": 0.1613,
2056
+ "step": 13500
2057
+ },
2058
+ {
2059
+ "epoch": 1.2902442508213112,
2060
+ "grad_norm": 0.48176899552345276,
2061
+ "learning_rate": 9.880383185587647e-05,
2062
+ "loss": 0.1614,
2063
+ "step": 13550
2064
+ },
2065
+ {
2066
+ "epoch": 1.295005475408275,
2067
+ "grad_norm": 0.40140554308891296,
2068
+ "learning_rate": 9.879906586598037e-05,
2069
+ "loss": 0.1586,
2070
+ "step": 13600
2071
+ },
2072
+ {
2073
+ "epoch": 1.2997666999952386,
2074
+ "grad_norm": 0.5117682218551636,
2075
+ "learning_rate": 9.879429987608427e-05,
2076
+ "loss": 0.1602,
2077
+ "step": 13650
2078
+ },
2079
+ {
2080
+ "epoch": 1.3045279245822026,
2081
+ "grad_norm": 0.7169548273086548,
2082
+ "learning_rate": 9.878953388618817e-05,
2083
+ "loss": 0.1623,
2084
+ "step": 13700
2085
+ },
2086
+ {
2087
+ "epoch": 1.3092891491691663,
2088
+ "grad_norm": 0.44949373602867126,
2089
+ "learning_rate": 9.878476789629207e-05,
2090
+ "loss": 0.1601,
2091
+ "step": 13750
2092
+ },
2093
+ {
2094
+ "epoch": 1.31405037375613,
2095
+ "grad_norm": 0.597175121307373,
2096
+ "learning_rate": 9.878000190639595e-05,
2097
+ "loss": 0.1606,
2098
+ "step": 13800
2099
+ },
2100
+ {
2101
+ "epoch": 1.318811598343094,
2102
+ "grad_norm": 0.553895115852356,
2103
+ "learning_rate": 9.877523591649987e-05,
2104
+ "loss": 0.1617,
2105
+ "step": 13850
2106
+ },
2107
+ {
2108
+ "epoch": 1.3235728229300576,
2109
+ "grad_norm": 0.5089927315711975,
2110
+ "learning_rate": 9.877046992660377e-05,
2111
+ "loss": 0.1595,
2112
+ "step": 13900
2113
+ },
2114
+ {
2115
+ "epoch": 1.3283340475170213,
2116
+ "grad_norm": 0.430867463350296,
2117
+ "learning_rate": 9.876570393670765e-05,
2118
+ "loss": 0.1582,
2119
+ "step": 13950
2120
+ },
2121
+ {
2122
+ "epoch": 1.3330952721039853,
2123
+ "grad_norm": 0.39970675110816956,
2124
+ "learning_rate": 9.876093794681156e-05,
2125
+ "loss": 0.1567,
2126
+ "step": 14000
2127
+ },
2128
+ {
2129
+ "epoch": 1.3330952721039853,
2130
+ "eval_loss": 0.15753522515296936,
2131
+ "eval_mae": 0.6622462272644043,
2132
+ "eval_mse": 328.3490295410156,
2133
+ "eval_rmse": 18.120403680409982,
2134
+ "eval_runtime": 58.0545,
2135
+ "eval_samples_per_second": 10290.588,
2136
+ "eval_smape": 91.98985695838928,
2137
+ "eval_steps_per_second": 20.102,
2138
+ "step": 14000
2139
+ },
2140
+ {
2141
+ "epoch": 1.337856496690949,
2142
+ "grad_norm": 0.5808453559875488,
2143
+ "learning_rate": 9.875617195691545e-05,
2144
+ "loss": 0.1589,
2145
+ "step": 14050
2146
+ },
2147
+ {
2148
+ "epoch": 1.3426177212779127,
2149
+ "grad_norm": 0.4418608546257019,
2150
+ "learning_rate": 9.875140596701935e-05,
2151
+ "loss": 0.1584,
2152
+ "step": 14100
2153
+ },
2154
+ {
2155
+ "epoch": 1.3473789458648764,
2156
+ "grad_norm": 0.6623143553733826,
2157
+ "learning_rate": 9.874663997712325e-05,
2158
+ "loss": 0.1623,
2159
+ "step": 14150
2160
+ },
2161
+ {
2162
+ "epoch": 1.35214017045184,
2163
+ "grad_norm": 0.4194190204143524,
2164
+ "learning_rate": 9.874187398722715e-05,
2165
+ "loss": 0.1596,
2166
+ "step": 14200
2167
+ },
2168
+ {
2169
+ "epoch": 1.356901395038804,
2170
+ "grad_norm": 0.6208611130714417,
2171
+ "learning_rate": 9.873710799733105e-05,
2172
+ "loss": 0.1583,
2173
+ "step": 14250
2174
+ },
2175
+ {
2176
+ "epoch": 1.3616626196257677,
2177
+ "grad_norm": 0.4435657262802124,
2178
+ "learning_rate": 9.873234200743495e-05,
2179
+ "loss": 0.16,
2180
+ "step": 14300
2181
+ },
2182
+ {
2183
+ "epoch": 1.3664238442127314,
2184
+ "grad_norm": 0.4906177222728729,
2185
+ "learning_rate": 9.872757601753885e-05,
2186
+ "loss": 0.1581,
2187
+ "step": 14350
2188
+ },
2189
+ {
2190
+ "epoch": 1.3711850687996954,
2191
+ "grad_norm": 0.5340787172317505,
2192
+ "learning_rate": 9.872281002764275e-05,
2193
+ "loss": 0.1588,
2194
+ "step": 14400
2195
+ },
2196
+ {
2197
+ "epoch": 1.375946293386659,
2198
+ "grad_norm": 0.4307633340358734,
2199
+ "learning_rate": 9.871804403774665e-05,
2200
+ "loss": 0.1585,
2201
+ "step": 14450
2202
+ },
2203
+ {
2204
+ "epoch": 1.3807075179736228,
2205
+ "grad_norm": 0.49194300174713135,
2206
+ "learning_rate": 9.871327804785055e-05,
2207
+ "loss": 0.1572,
2208
+ "step": 14500
2209
+ },
2210
+ {
2211
+ "epoch": 1.3854687425605867,
2212
+ "grad_norm": 0.4391520917415619,
2213
+ "learning_rate": 9.870851205795443e-05,
2214
+ "loss": 0.1573,
2215
+ "step": 14550
2216
+ },
2217
+ {
2218
+ "epoch": 1.3902299671475504,
2219
+ "grad_norm": 0.4503444731235504,
2220
+ "learning_rate": 9.870374606805835e-05,
2221
+ "loss": 0.1577,
2222
+ "step": 14600
2223
+ },
2224
+ {
2225
+ "epoch": 1.3949911917345141,
2226
+ "grad_norm": 0.5113334655761719,
2227
+ "learning_rate": 9.869898007816225e-05,
2228
+ "loss": 0.1594,
2229
+ "step": 14650
2230
+ },
2231
+ {
2232
+ "epoch": 1.3997524163214778,
2233
+ "grad_norm": 0.3871005177497864,
2234
+ "learning_rate": 9.869421408826613e-05,
2235
+ "loss": 0.1559,
2236
+ "step": 14700
2237
+ },
2238
+ {
2239
+ "epoch": 1.4045136409084416,
2240
+ "grad_norm": 0.5482053756713867,
2241
+ "learning_rate": 9.868944809837004e-05,
2242
+ "loss": 0.1592,
2243
+ "step": 14750
2244
+ },
2245
+ {
2246
+ "epoch": 1.4092748654954055,
2247
+ "grad_norm": 0.45771437883377075,
2248
+ "learning_rate": 9.868468210847393e-05,
2249
+ "loss": 0.1572,
2250
+ "step": 14800
2251
+ },
2252
+ {
2253
+ "epoch": 1.4140360900823692,
2254
+ "grad_norm": 0.4550696015357971,
2255
+ "learning_rate": 9.867991611857783e-05,
2256
+ "loss": 0.1605,
2257
+ "step": 14850
2258
+ },
2259
+ {
2260
+ "epoch": 1.418797314669333,
2261
+ "grad_norm": 0.6991235613822937,
2262
+ "learning_rate": 9.867515012868174e-05,
2263
+ "loss": 0.1589,
2264
+ "step": 14900
2265
+ },
2266
+ {
2267
+ "epoch": 1.4235585392562968,
2268
+ "grad_norm": 0.5531545877456665,
2269
+ "learning_rate": 9.867038413878563e-05,
2270
+ "loss": 0.1574,
2271
+ "step": 14950
2272
+ },
2273
+ {
2274
+ "epoch": 1.4283197638432605,
2275
+ "grad_norm": 0.4692751169204712,
2276
+ "learning_rate": 9.866561814888953e-05,
2277
+ "loss": 0.1592,
2278
+ "step": 15000
2279
+ },
2280
+ {
2281
+ "epoch": 1.4283197638432605,
2282
+ "eval_loss": 0.15674826502799988,
2283
+ "eval_mae": 0.6522776484489441,
2284
+ "eval_mse": 376.8973388671875,
2285
+ "eval_rmse": 19.41384400027948,
2286
+ "eval_runtime": 57.2088,
2287
+ "eval_samples_per_second": 10442.706,
2288
+ "eval_smape": 89.79519009590149,
2289
+ "eval_steps_per_second": 20.399,
2290
+ "step": 15000
2291
+ },
2292
+ {
2293
+ "epoch": 1.4330809884302242,
2294
+ "grad_norm": 0.41401219367980957,
2295
+ "learning_rate": 9.866085215899343e-05,
2296
+ "loss": 0.1594,
2297
+ "step": 15050
2298
+ },
2299
+ {
2300
+ "epoch": 1.437842213017188,
2301
+ "grad_norm": 0.6155771613121033,
2302
+ "learning_rate": 9.865608616909733e-05,
2303
+ "loss": 0.1597,
2304
+ "step": 15100
2305
+ },
2306
+ {
2307
+ "epoch": 1.4426034376041517,
2308
+ "grad_norm": 0.6229146718978882,
2309
+ "learning_rate": 9.865132017920123e-05,
2310
+ "loss": 0.1595,
2311
+ "step": 15150
2312
+ },
2313
+ {
2314
+ "epoch": 1.4473646621911156,
2315
+ "grad_norm": 0.5311764478683472,
2316
+ "learning_rate": 9.864655418930511e-05,
2317
+ "loss": 0.1613,
2318
+ "step": 15200
2319
+ },
2320
+ {
2321
+ "epoch": 1.4521258867780793,
2322
+ "grad_norm": 0.4075564742088318,
2323
+ "learning_rate": 9.864178819940903e-05,
2324
+ "loss": 0.1581,
2325
+ "step": 15250
2326
+ },
2327
+ {
2328
+ "epoch": 1.456887111365043,
2329
+ "grad_norm": 0.5737677216529846,
2330
+ "learning_rate": 9.863702220951291e-05,
2331
+ "loss": 0.1622,
2332
+ "step": 15300
2333
+ },
2334
+ {
2335
+ "epoch": 1.461648335952007,
2336
+ "grad_norm": 0.5977826118469238,
2337
+ "learning_rate": 9.863225621961681e-05,
2338
+ "loss": 0.1586,
2339
+ "step": 15350
2340
+ },
2341
+ {
2342
+ "epoch": 1.4664095605389706,
2343
+ "grad_norm": 0.4717673659324646,
2344
+ "learning_rate": 9.862749022972073e-05,
2345
+ "loss": 0.1566,
2346
+ "step": 15400
2347
+ },
2348
+ {
2349
+ "epoch": 1.4711707851259344,
2350
+ "grad_norm": 0.4783164858818054,
2351
+ "learning_rate": 9.862272423982461e-05,
2352
+ "loss": 0.162,
2353
+ "step": 15450
2354
+ },
2355
+ {
2356
+ "epoch": 1.4759320097128983,
2357
+ "grad_norm": 0.4709276258945465,
2358
+ "learning_rate": 9.861795824992851e-05,
2359
+ "loss": 0.1628,
2360
+ "step": 15500
2361
+ },
2362
+ {
2363
+ "epoch": 1.480693234299862,
2364
+ "grad_norm": 0.46055328845977783,
2365
+ "learning_rate": 9.861319226003241e-05,
2366
+ "loss": 0.1573,
2367
+ "step": 15550
2368
+ },
2369
+ {
2370
+ "epoch": 1.4854544588868257,
2371
+ "grad_norm": 0.4675583839416504,
2372
+ "learning_rate": 9.860842627013631e-05,
2373
+ "loss": 0.1612,
2374
+ "step": 15600
2375
+ },
2376
+ {
2377
+ "epoch": 1.4902156834737894,
2378
+ "grad_norm": 0.49638524651527405,
2379
+ "learning_rate": 9.860366028024021e-05,
2380
+ "loss": 0.1565,
2381
+ "step": 15650
2382
+ },
2383
+ {
2384
+ "epoch": 1.4949769080607531,
2385
+ "grad_norm": 0.4782148003578186,
2386
+ "learning_rate": 9.859889429034411e-05,
2387
+ "loss": 0.1559,
2388
+ "step": 15700
2389
+ },
2390
+ {
2391
+ "epoch": 1.499738132647717,
2392
+ "grad_norm": 0.39972543716430664,
2393
+ "learning_rate": 9.859412830044801e-05,
2394
+ "loss": 0.1597,
2395
+ "step": 15750
2396
+ },
2397
+ {
2398
+ "epoch": 1.5044993572346808,
2399
+ "grad_norm": 0.46148207783699036,
2400
+ "learning_rate": 9.858936231055191e-05,
2401
+ "loss": 0.165,
2402
+ "step": 15800
2403
+ },
2404
+ {
2405
+ "epoch": 1.5092605818216445,
2406
+ "grad_norm": 0.6166296005249023,
2407
+ "learning_rate": 9.858459632065581e-05,
2408
+ "loss": 0.1601,
2409
+ "step": 15850
2410
+ },
2411
+ {
2412
+ "epoch": 1.5140218064086084,
2413
+ "grad_norm": 0.6877084970474243,
2414
+ "learning_rate": 9.857983033075971e-05,
2415
+ "loss": 0.1622,
2416
+ "step": 15900
2417
+ },
2418
+ {
2419
+ "epoch": 1.518783030995572,
2420
+ "grad_norm": 0.4697044789791107,
2421
+ "learning_rate": 9.85750643408636e-05,
2422
+ "loss": 0.1591,
2423
+ "step": 15950
2424
+ },
2425
+ {
2426
+ "epoch": 1.5235442555825358,
2427
+ "grad_norm": 0.4562913179397583,
2428
+ "learning_rate": 9.857029835096751e-05,
2429
+ "loss": 0.16,
2430
+ "step": 16000
2431
+ },
2432
+ {
2433
+ "epoch": 1.5235442555825358,
2434
+ "eval_loss": 0.15759705007076263,
2435
+ "eval_mae": 0.6579864621162415,
2436
+ "eval_mse": 327.5270690917969,
2437
+ "eval_rmse": 18.097708945935583,
2438
+ "eval_runtime": 59.3074,
2439
+ "eval_samples_per_second": 10073.191,
2440
+ "eval_smape": 105.73155879974365,
2441
+ "eval_steps_per_second": 19.677,
2442
+ "step": 16000
2443
+ },
2444
+ {
2445
+ "epoch": 1.5283054801694997,
2446
+ "grad_norm": 0.4631439447402954,
2447
+ "learning_rate": 9.856553236107139e-05,
2448
+ "loss": 0.1596,
2449
+ "step": 16050
2450
+ },
2451
+ {
2452
+ "epoch": 1.5330667047564632,
2453
+ "grad_norm": 0.5179547667503357,
2454
+ "learning_rate": 9.856076637117529e-05,
2455
+ "loss": 0.161,
2456
+ "step": 16100
2457
+ },
2458
+ {
2459
+ "epoch": 1.5378279293434272,
2460
+ "grad_norm": 0.5846447348594666,
2461
+ "learning_rate": 9.85560003812792e-05,
2462
+ "loss": 0.157,
2463
+ "step": 16150
2464
+ },
2465
+ {
2466
+ "epoch": 1.5425891539303909,
2467
+ "grad_norm": 0.41618812084198,
2468
+ "learning_rate": 9.855123439138309e-05,
2469
+ "loss": 0.1604,
2470
+ "step": 16200
2471
+ },
2472
+ {
2473
+ "epoch": 1.5473503785173546,
2474
+ "grad_norm": 0.5047721862792969,
2475
+ "learning_rate": 9.854646840148699e-05,
2476
+ "loss": 0.1607,
2477
+ "step": 16250
2478
+ },
2479
+ {
2480
+ "epoch": 1.5521116031043185,
2481
+ "grad_norm": 0.5991541147232056,
2482
+ "learning_rate": 9.854170241159089e-05,
2483
+ "loss": 0.1596,
2484
+ "step": 16300
2485
+ },
2486
+ {
2487
+ "epoch": 1.5568728276912822,
2488
+ "grad_norm": 0.43497374653816223,
2489
+ "learning_rate": 9.853693642169479e-05,
2490
+ "loss": 0.1577,
2491
+ "step": 16350
2492
+ },
2493
+ {
2494
+ "epoch": 1.561634052278246,
2495
+ "grad_norm": 0.40854403376579285,
2496
+ "learning_rate": 9.853217043179869e-05,
2497
+ "loss": 0.1599,
2498
+ "step": 16400
2499
+ },
2500
+ {
2501
+ "epoch": 1.5663952768652099,
2502
+ "grad_norm": 0.4918048679828644,
2503
+ "learning_rate": 9.852740444190259e-05,
2504
+ "loss": 0.1603,
2505
+ "step": 16450
2506
+ },
2507
+ {
2508
+ "epoch": 1.5711565014521733,
2509
+ "grad_norm": 0.3920314908027649,
2510
+ "learning_rate": 9.852263845200649e-05,
2511
+ "loss": 0.1573,
2512
+ "step": 16500
2513
+ },
2514
+ {
2515
+ "epoch": 1.5759177260391373,
2516
+ "grad_norm": 0.3981036841869354,
2517
+ "learning_rate": 9.851787246211039e-05,
2518
+ "loss": 0.1586,
2519
+ "step": 16550
2520
+ },
2521
+ {
2522
+ "epoch": 1.5806789506261012,
2523
+ "grad_norm": 0.6724033355712891,
2524
+ "learning_rate": 9.851310647221427e-05,
2525
+ "loss": 0.1577,
2526
+ "step": 16600
2527
+ },
2528
+ {
2529
+ "epoch": 1.5854401752130647,
2530
+ "grad_norm": 0.45915940403938293,
2531
+ "learning_rate": 9.850834048231819e-05,
2532
+ "loss": 0.1634,
2533
+ "step": 16650
2534
+ },
2535
+ {
2536
+ "epoch": 1.5902013998000286,
2537
+ "grad_norm": 0.4456646740436554,
2538
+ "learning_rate": 9.850357449242207e-05,
2539
+ "loss": 0.1598,
2540
+ "step": 16700
2541
+ },
2542
+ {
2543
+ "epoch": 1.5949626243869923,
2544
+ "grad_norm": 0.4617702066898346,
2545
+ "learning_rate": 9.849880850252597e-05,
2546
+ "loss": 0.1588,
2547
+ "step": 16750
2548
+ },
2549
+ {
2550
+ "epoch": 1.599723848973956,
2551
+ "grad_norm": 0.4858147203922272,
2552
+ "learning_rate": 9.849404251262989e-05,
2553
+ "loss": 0.1603,
2554
+ "step": 16800
2555
+ },
2556
+ {
2557
+ "epoch": 1.60448507356092,
2558
+ "grad_norm": 0.5424185395240784,
2559
+ "learning_rate": 9.848927652273377e-05,
2560
+ "loss": 0.1588,
2561
+ "step": 16850
2562
+ },
2563
+ {
2564
+ "epoch": 1.6092462981478837,
2565
+ "grad_norm": 0.432170033454895,
2566
+ "learning_rate": 9.848451053283767e-05,
2567
+ "loss": 0.1593,
2568
+ "step": 16900
2569
+ },
2570
+ {
2571
+ "epoch": 1.6140075227348474,
2572
+ "grad_norm": 0.403414249420166,
2573
+ "learning_rate": 9.847974454294157e-05,
2574
+ "loss": 0.1564,
2575
+ "step": 16950
2576
+ },
2577
+ {
2578
+ "epoch": 1.6187687473218113,
2579
+ "grad_norm": 0.5686953067779541,
2580
+ "learning_rate": 9.847497855304547e-05,
2581
+ "loss": 0.1586,
2582
+ "step": 17000
2583
+ },
2584
+ {
2585
+ "epoch": 1.6187687473218113,
2586
+ "eval_loss": 0.15684476494789124,
2587
+ "eval_mae": 0.6601889133453369,
2588
+ "eval_mse": 399.5775146484375,
2589
+ "eval_rmse": 19.989435075770338,
2590
+ "eval_runtime": 60.8742,
2591
+ "eval_samples_per_second": 9813.93,
2592
+ "eval_smape": 88.6057436466217,
2593
+ "eval_steps_per_second": 19.171,
2594
+ "step": 17000
2595
+ },
2596
+ {
2597
+ "epoch": 1.6235299719087748,
2598
+ "grad_norm": 0.472042977809906,
2599
+ "learning_rate": 9.847021256314937e-05,
2600
+ "loss": 0.1585,
2601
+ "step": 17050
2602
+ },
2603
+ {
2604
+ "epoch": 1.6282911964957387,
2605
+ "grad_norm": 0.6247090697288513,
2606
+ "learning_rate": 9.846544657325327e-05,
2607
+ "loss": 0.1583,
2608
+ "step": 17100
2609
+ },
2610
+ {
2611
+ "epoch": 1.6330524210827024,
2612
+ "grad_norm": 0.4843044579029083,
2613
+ "learning_rate": 9.846068058335717e-05,
2614
+ "loss": 0.1592,
2615
+ "step": 17150
2616
+ },
2617
+ {
2618
+ "epoch": 1.6378136456696661,
2619
+ "grad_norm": 0.48187774419784546,
2620
+ "learning_rate": 9.845591459346106e-05,
2621
+ "loss": 0.1592,
2622
+ "step": 17200
2623
+ },
2624
+ {
2625
+ "epoch": 1.64257487025663,
2626
+ "grad_norm": 0.4763176441192627,
2627
+ "learning_rate": 9.845114860356497e-05,
2628
+ "loss": 0.1592,
2629
+ "step": 17250
2630
+ },
2631
+ {
2632
+ "epoch": 1.6473360948435938,
2633
+ "grad_norm": 0.5375500321388245,
2634
+ "learning_rate": 9.844638261366887e-05,
2635
+ "loss": 0.159,
2636
+ "step": 17300
2637
+ },
2638
+ {
2639
+ "epoch": 1.6520973194305575,
2640
+ "grad_norm": 0.4639647305011749,
2641
+ "learning_rate": 9.844161662377276e-05,
2642
+ "loss": 0.1578,
2643
+ "step": 17350
2644
+ },
2645
+ {
2646
+ "epoch": 1.6568585440175214,
2647
+ "grad_norm": 0.44177523255348206,
2648
+ "learning_rate": 9.843685063387667e-05,
2649
+ "loss": 0.1593,
2650
+ "step": 17400
2651
+ },
2652
+ {
2653
+ "epoch": 1.6616197686044851,
2654
+ "grad_norm": 0.5092636942863464,
2655
+ "learning_rate": 9.843208464398055e-05,
2656
+ "loss": 0.161,
2657
+ "step": 17450
2658
+ },
2659
+ {
2660
+ "epoch": 1.6663809931914488,
2661
+ "grad_norm": 0.4873998165130615,
2662
+ "learning_rate": 9.842731865408445e-05,
2663
+ "loss": 0.1584,
2664
+ "step": 17500
2665
+ },
2666
+ {
2667
+ "epoch": 1.6711422177784128,
2668
+ "grad_norm": 0.622734546661377,
2669
+ "learning_rate": 9.842255266418837e-05,
2670
+ "loss": 0.1597,
2671
+ "step": 17550
2672
+ },
2673
+ {
2674
+ "epoch": 1.6759034423653763,
2675
+ "grad_norm": 0.5134626030921936,
2676
+ "learning_rate": 9.841778667429225e-05,
2677
+ "loss": 0.1591,
2678
+ "step": 17600
2679
+ },
2680
+ {
2681
+ "epoch": 1.6806646669523402,
2682
+ "grad_norm": 0.42936229705810547,
2683
+ "learning_rate": 9.841302068439615e-05,
2684
+ "loss": 0.1575,
2685
+ "step": 17650
2686
+ },
2687
+ {
2688
+ "epoch": 1.6854258915393039,
2689
+ "grad_norm": 0.44326427578926086,
2690
+ "learning_rate": 9.840825469450005e-05,
2691
+ "loss": 0.1568,
2692
+ "step": 17700
2693
+ },
2694
+ {
2695
+ "epoch": 1.6901871161262676,
2696
+ "grad_norm": 0.3937668204307556,
2697
+ "learning_rate": 9.840348870460395e-05,
2698
+ "loss": 0.1593,
2699
+ "step": 17750
2700
+ },
2701
+ {
2702
+ "epoch": 1.6949483407132315,
2703
+ "grad_norm": 0.45583653450012207,
2704
+ "learning_rate": 9.839872271470785e-05,
2705
+ "loss": 0.157,
2706
+ "step": 17800
2707
+ },
2708
+ {
2709
+ "epoch": 1.6997095653001952,
2710
+ "grad_norm": 0.5357958078384399,
2711
+ "learning_rate": 9.839395672481175e-05,
2712
+ "loss": 0.1604,
2713
+ "step": 17850
2714
+ },
2715
+ {
2716
+ "epoch": 1.704470789887159,
2717
+ "grad_norm": 0.421678751707077,
2718
+ "learning_rate": 9.838919073491565e-05,
2719
+ "loss": 0.1598,
2720
+ "step": 17900
2721
+ },
2722
+ {
2723
+ "epoch": 1.7092320144741229,
2724
+ "grad_norm": 0.42326951026916504,
2725
+ "learning_rate": 9.838442474501954e-05,
2726
+ "loss": 0.157,
2727
+ "step": 17950
2728
+ },
2729
+ {
2730
+ "epoch": 1.7139932390610864,
2731
+ "grad_norm": 0.43803659081459045,
2732
+ "learning_rate": 9.837965875512344e-05,
2733
+ "loss": 0.1593,
2734
+ "step": 18000
2735
+ },
2736
+ {
2737
+ "epoch": 1.7139932390610864,
2738
+ "eval_loss": 0.15651458501815796,
2739
+ "eval_mae": 0.6603885889053345,
2740
+ "eval_mse": 359.5630187988281,
2741
+ "eval_rmse": 18.962146998660995,
2742
+ "eval_runtime": 60.8886,
2743
+ "eval_samples_per_second": 9811.606,
2744
+ "eval_smape": 325.50642490386963,
2745
+ "eval_steps_per_second": 19.166,
2746
+ "step": 18000
2747
+ },
2748
+ {
2749
+ "epoch": 1.7187544636480503,
2750
+ "grad_norm": 0.42728474736213684,
2751
+ "learning_rate": 9.837489276522735e-05,
2752
+ "loss": 0.1588,
2753
+ "step": 18050
2754
+ },
2755
+ {
2756
+ "epoch": 1.723515688235014,
2757
+ "grad_norm": 0.4929046928882599,
2758
+ "learning_rate": 9.837012677533124e-05,
2759
+ "loss": 0.1587,
2760
+ "step": 18100
2761
+ },
2762
+ {
2763
+ "epoch": 1.7282769128219777,
2764
+ "grad_norm": 0.47886890172958374,
2765
+ "learning_rate": 9.836536078543513e-05,
2766
+ "loss": 0.1572,
2767
+ "step": 18150
2768
+ },
2769
+ {
2770
+ "epoch": 1.7330381374089416,
2771
+ "grad_norm": 0.4829745590686798,
2772
+ "learning_rate": 9.836059479553903e-05,
2773
+ "loss": 0.1575,
2774
+ "step": 18200
2775
+ },
2776
+ {
2777
+ "epoch": 1.7377993619959053,
2778
+ "grad_norm": 0.4256702661514282,
2779
+ "learning_rate": 9.835582880564293e-05,
2780
+ "loss": 0.1575,
2781
+ "step": 18250
2782
+ },
2783
+ {
2784
+ "epoch": 1.742560586582869,
2785
+ "grad_norm": 0.4803942143917084,
2786
+ "learning_rate": 9.835106281574683e-05,
2787
+ "loss": 0.1559,
2788
+ "step": 18300
2789
+ },
2790
+ {
2791
+ "epoch": 1.747321811169833,
2792
+ "grad_norm": 0.5108392834663391,
2793
+ "learning_rate": 9.834629682585073e-05,
2794
+ "loss": 0.1558,
2795
+ "step": 18350
2796
+ },
2797
+ {
2798
+ "epoch": 1.7520830357567967,
2799
+ "grad_norm": 0.5541846752166748,
2800
+ "learning_rate": 9.834153083595463e-05,
2801
+ "loss": 0.1571,
2802
+ "step": 18400
2803
+ },
2804
+ {
2805
+ "epoch": 1.7568442603437604,
2806
+ "grad_norm": 0.3996049463748932,
2807
+ "learning_rate": 9.833676484605853e-05,
2808
+ "loss": 0.1588,
2809
+ "step": 18450
2810
+ },
2811
+ {
2812
+ "epoch": 1.7616054849307243,
2813
+ "grad_norm": 0.4311594069004059,
2814
+ "learning_rate": 9.833199885616243e-05,
2815
+ "loss": 0.1566,
2816
+ "step": 18500
2817
+ },
2818
+ {
2819
+ "epoch": 1.7663667095176878,
2820
+ "grad_norm": 0.45717188715934753,
2821
+ "learning_rate": 9.832723286626633e-05,
2822
+ "loss": 0.1586,
2823
+ "step": 18550
2824
+ },
2825
+ {
2826
+ "epoch": 1.7711279341046517,
2827
+ "grad_norm": 0.48945853114128113,
2828
+ "learning_rate": 9.832246687637022e-05,
2829
+ "loss": 0.1581,
2830
+ "step": 18600
2831
+ },
2832
+ {
2833
+ "epoch": 1.7758891586916155,
2834
+ "grad_norm": 0.4877380430698395,
2835
+ "learning_rate": 9.831770088647413e-05,
2836
+ "loss": 0.1594,
2837
+ "step": 18650
2838
+ },
2839
+ {
2840
+ "epoch": 1.7806503832785792,
2841
+ "grad_norm": 0.47437113523483276,
2842
+ "learning_rate": 9.831293489657803e-05,
2843
+ "loss": 0.1566,
2844
+ "step": 18700
2845
+ },
2846
+ {
2847
+ "epoch": 1.785411607865543,
2848
+ "grad_norm": 0.6995478272438049,
2849
+ "learning_rate": 9.830816890668192e-05,
2850
+ "loss": 0.1564,
2851
+ "step": 18750
2852
+ },
2853
+ {
2854
+ "epoch": 1.7901728324525068,
2855
+ "grad_norm": 0.4471156895160675,
2856
+ "learning_rate": 9.830340291678583e-05,
2857
+ "loss": 0.1567,
2858
+ "step": 18800
2859
+ },
2860
+ {
2861
+ "epoch": 1.7949340570394705,
2862
+ "grad_norm": 0.459011435508728,
2863
+ "learning_rate": 9.829863692688972e-05,
2864
+ "loss": 0.1599,
2865
+ "step": 18850
2866
+ },
2867
+ {
2868
+ "epoch": 1.7996952816264344,
2869
+ "grad_norm": 0.4757770001888275,
2870
+ "learning_rate": 9.829387093699362e-05,
2871
+ "loss": 0.1574,
2872
+ "step": 18900
2873
+ },
2874
+ {
2875
+ "epoch": 1.804456506213398,
2876
+ "grad_norm": 0.4678910970687866,
2877
+ "learning_rate": 9.828910494709751e-05,
2878
+ "loss": 0.1604,
2879
+ "step": 18950
2880
+ },
2881
+ {
2882
+ "epoch": 1.8092177308003619,
2883
+ "grad_norm": 0.4852876663208008,
2884
+ "learning_rate": 9.828433895720141e-05,
2885
+ "loss": 0.1562,
2886
+ "step": 19000
2887
+ },
2888
+ {
2889
+ "epoch": 1.8092177308003619,
2890
+ "eval_loss": 0.15658971667289734,
2891
+ "eval_mae": 0.6544845700263977,
2892
+ "eval_mse": 281.27392578125,
2893
+ "eval_rmse": 16.77122314505564,
2894
+ "eval_runtime": 61.2247,
2895
+ "eval_samples_per_second": 9757.737,
2896
+ "eval_smape": 80.4527759552002,
2897
+ "eval_steps_per_second": 19.061,
2898
+ "step": 19000
2899
+ },
2900
+ {
2901
+ "epoch": 1.8139789553873258,
2902
+ "grad_norm": 0.37452152371406555,
2903
+ "learning_rate": 9.827957296730531e-05,
2904
+ "loss": 0.1561,
2905
+ "step": 19050
2906
+ },
2907
+ {
2908
+ "epoch": 1.8187401799742893,
2909
+ "grad_norm": 0.4231972396373749,
2910
+ "learning_rate": 9.827480697740921e-05,
2911
+ "loss": 0.1588,
2912
+ "step": 19100
2913
+ },
2914
+ {
2915
+ "epoch": 1.8235014045612532,
2916
+ "grad_norm": 0.423755943775177,
2917
+ "learning_rate": 9.827004098751311e-05,
2918
+ "loss": 0.1566,
2919
+ "step": 19150
2920
+ },
2921
+ {
2922
+ "epoch": 1.828262629148217,
2923
+ "grad_norm": 0.4381329119205475,
2924
+ "learning_rate": 9.826527499761701e-05,
2925
+ "loss": 0.1596,
2926
+ "step": 19200
2927
+ },
2928
+ {
2929
+ "epoch": 1.8330238537351806,
2930
+ "grad_norm": 0.4936196804046631,
2931
+ "learning_rate": 9.826050900772091e-05,
2932
+ "loss": 0.1598,
2933
+ "step": 19250
2934
+ },
2935
+ {
2936
+ "epoch": 1.8377850783221445,
2937
+ "grad_norm": 0.6386341452598572,
2938
+ "learning_rate": 9.825574301782481e-05,
2939
+ "loss": 0.1601,
2940
+ "step": 19300
2941
+ },
2942
+ {
2943
+ "epoch": 1.8425463029091083,
2944
+ "grad_norm": 0.49802061915397644,
2945
+ "learning_rate": 9.82509770279287e-05,
2946
+ "loss": 0.1587,
2947
+ "step": 19350
2948
+ },
2949
+ {
2950
+ "epoch": 1.847307527496072,
2951
+ "grad_norm": 0.4627436697483063,
2952
+ "learning_rate": 9.824621103803261e-05,
2953
+ "loss": 0.1587,
2954
+ "step": 19400
2955
+ },
2956
+ {
2957
+ "epoch": 1.852068752083036,
2958
+ "grad_norm": 0.4635562598705292,
2959
+ "learning_rate": 9.824144504813651e-05,
2960
+ "loss": 0.1532,
2961
+ "step": 19450
2962
+ },
2963
+ {
2964
+ "epoch": 1.8568299766699994,
2965
+ "grad_norm": 0.4869281053543091,
2966
+ "learning_rate": 9.82366790582404e-05,
2967
+ "loss": 0.1576,
2968
+ "step": 19500
2969
+ },
2970
+ {
2971
+ "epoch": 1.8615912012569633,
2972
+ "grad_norm": 0.4622304141521454,
2973
+ "learning_rate": 9.82319130683443e-05,
2974
+ "loss": 0.1587,
2975
+ "step": 19550
2976
+ },
2977
+ {
2978
+ "epoch": 1.866352425843927,
2979
+ "grad_norm": 0.42829111218452454,
2980
+ "learning_rate": 9.82271470784482e-05,
2981
+ "loss": 0.1584,
2982
+ "step": 19600
2983
+ },
2984
+ {
2985
+ "epoch": 1.8711136504308907,
2986
+ "grad_norm": 0.44152921438217163,
2987
+ "learning_rate": 9.82223810885521e-05,
2988
+ "loss": 0.1597,
2989
+ "step": 19650
2990
+ },
2991
+ {
2992
+ "epoch": 1.8758748750178547,
2993
+ "grad_norm": 0.4362374246120453,
2994
+ "learning_rate": 9.8217615098656e-05,
2995
+ "loss": 0.1535,
2996
+ "step": 19700
2997
+ },
2998
+ {
2999
+ "epoch": 1.8806360996048184,
3000
+ "grad_norm": 0.42130181193351746,
3001
+ "learning_rate": 9.82128491087599e-05,
3002
+ "loss": 0.1585,
3003
+ "step": 19750
3004
+ },
3005
+ {
3006
+ "epoch": 1.885397324191782,
3007
+ "grad_norm": 0.5120296478271484,
3008
+ "learning_rate": 9.82080831188638e-05,
3009
+ "loss": 0.1583,
3010
+ "step": 19800
3011
+ },
3012
+ {
3013
+ "epoch": 1.890158548778746,
3014
+ "grad_norm": 0.4205983579158783,
3015
+ "learning_rate": 9.820331712896768e-05,
3016
+ "loss": 0.1585,
3017
+ "step": 19850
3018
+ },
3019
+ {
3020
+ "epoch": 1.8949197733657097,
3021
+ "grad_norm": 0.4189218580722809,
3022
+ "learning_rate": 9.819855113907159e-05,
3023
+ "loss": 0.1561,
3024
+ "step": 19900
3025
+ },
3026
+ {
3027
+ "epoch": 1.8996809979526734,
3028
+ "grad_norm": 0.6145504117012024,
3029
+ "learning_rate": 9.819378514917549e-05,
3030
+ "loss": 0.1625,
3031
+ "step": 19950
3032
+ },
3033
+ {
3034
+ "epoch": 1.9044422225396374,
3035
+ "grad_norm": 0.5396240949630737,
3036
+ "learning_rate": 9.818901915927938e-05,
3037
+ "loss": 0.1601,
3038
+ "step": 20000
3039
+ },
3040
+ {
3041
+ "epoch": 1.9044422225396374,
3042
+ "eval_loss": 0.15698538720607758,
3043
+ "eval_mae": 0.6542770862579346,
3044
+ "eval_mse": 287.357666015625,
3045
+ "eval_rmse": 16.951627237985885,
3046
+ "eval_runtime": 58.5657,
3047
+ "eval_samples_per_second": 10200.761,
3048
+ "eval_smape": 79.55442667007446,
3049
+ "eval_steps_per_second": 19.926,
3050
+ "step": 20000
3051
+ },
3052
+ {
3053
+ "epoch": 1.9092034471266008,
3054
+ "grad_norm": 0.45280078053474426,
3055
+ "learning_rate": 9.818425316938329e-05,
3056
+ "loss": 0.1593,
3057
+ "step": 20050
3058
+ },
3059
+ {
3060
+ "epoch": 1.9139646717135648,
3061
+ "grad_norm": 0.455552339553833,
3062
+ "learning_rate": 9.817948717948718e-05,
3063
+ "loss": 0.1579,
3064
+ "step": 20100
3065
+ },
3066
+ {
3067
+ "epoch": 1.9187258963005285,
3068
+ "grad_norm": 0.44774943590164185,
3069
+ "learning_rate": 9.817472118959108e-05,
3070
+ "loss": 0.1577,
3071
+ "step": 20150
3072
+ },
3073
+ {
3074
+ "epoch": 1.9234871208874922,
3075
+ "grad_norm": 0.4146822392940521,
3076
+ "learning_rate": 9.816995519969499e-05,
3077
+ "loss": 0.1567,
3078
+ "step": 20200
3079
+ },
3080
+ {
3081
+ "epoch": 1.9282483454744561,
3082
+ "grad_norm": 0.48615461587905884,
3083
+ "learning_rate": 9.816518920979888e-05,
3084
+ "loss": 0.1596,
3085
+ "step": 20250
3086
+ },
3087
+ {
3088
+ "epoch": 1.9330095700614198,
3089
+ "grad_norm": 0.5081954598426819,
3090
+ "learning_rate": 9.816042321990278e-05,
3091
+ "loss": 0.1576,
3092
+ "step": 20300
3093
+ },
3094
+ {
3095
+ "epoch": 1.9377707946483835,
3096
+ "grad_norm": 0.5155813097953796,
3097
+ "learning_rate": 9.815565723000668e-05,
3098
+ "loss": 0.1551,
3099
+ "step": 20350
3100
+ },
3101
+ {
3102
+ "epoch": 1.9425320192353475,
3103
+ "grad_norm": 0.5232491493225098,
3104
+ "learning_rate": 9.815089124011058e-05,
3105
+ "loss": 0.1597,
3106
+ "step": 20400
3107
+ },
3108
+ {
3109
+ "epoch": 1.947293243822311,
3110
+ "grad_norm": 0.4078335762023926,
3111
+ "learning_rate": 9.814612525021447e-05,
3112
+ "loss": 0.1556,
3113
+ "step": 20450
3114
+ },
3115
+ {
3116
+ "epoch": 1.9520544684092749,
3117
+ "grad_norm": 0.4975152611732483,
3118
+ "learning_rate": 9.814135926031837e-05,
3119
+ "loss": 0.1591,
3120
+ "step": 20500
3121
+ },
3122
+ {
3123
+ "epoch": 1.9568156929962386,
3124
+ "grad_norm": 0.6481941938400269,
3125
+ "learning_rate": 9.813659327042227e-05,
3126
+ "loss": 0.1565,
3127
+ "step": 20550
3128
+ },
3129
+ {
3130
+ "epoch": 1.9615769175832023,
3131
+ "grad_norm": 0.4526354968547821,
3132
+ "learning_rate": 9.813182728052617e-05,
3133
+ "loss": 0.1546,
3134
+ "step": 20600
3135
+ },
3136
+ {
3137
+ "epoch": 1.9663381421701662,
3138
+ "grad_norm": 0.4288252294063568,
3139
+ "learning_rate": 9.812706129063007e-05,
3140
+ "loss": 0.1551,
3141
+ "step": 20650
3142
+ },
3143
+ {
3144
+ "epoch": 1.97109936675713,
3145
+ "grad_norm": 0.434865266084671,
3146
+ "learning_rate": 9.812229530073397e-05,
3147
+ "loss": 0.1557,
3148
+ "step": 20700
3149
+ },
3150
+ {
3151
+ "epoch": 1.9758605913440936,
3152
+ "grad_norm": 0.4131234586238861,
3153
+ "learning_rate": 9.811752931083786e-05,
3154
+ "loss": 0.156,
3155
+ "step": 20750
3156
+ },
3157
+ {
3158
+ "epoch": 1.9806218159310576,
3159
+ "grad_norm": 0.519029438495636,
3160
+ "learning_rate": 9.811276332094177e-05,
3161
+ "loss": 0.1596,
3162
+ "step": 20800
3163
+ },
3164
+ {
3165
+ "epoch": 1.9853830405180213,
3166
+ "grad_norm": 0.4048980474472046,
3167
+ "learning_rate": 9.810799733104566e-05,
3168
+ "loss": 0.153,
3169
+ "step": 20850
3170
+ },
3171
+ {
3172
+ "epoch": 1.990144265104985,
3173
+ "grad_norm": 0.44951915740966797,
3174
+ "learning_rate": 9.810323134114956e-05,
3175
+ "loss": 0.1562,
3176
+ "step": 20900
3177
+ },
3178
+ {
3179
+ "epoch": 1.994905489691949,
3180
+ "grad_norm": 0.40820789337158203,
3181
+ "learning_rate": 9.809846535125346e-05,
3182
+ "loss": 0.1576,
3183
+ "step": 20950
3184
+ },
3185
+ {
3186
+ "epoch": 1.9996667142789124,
3187
+ "grad_norm": 0.6201032400131226,
3188
+ "learning_rate": 9.809369936135736e-05,
3189
+ "loss": 0.1551,
3190
+ "step": 21000
3191
+ },
3192
+ {
3193
+ "epoch": 1.9996667142789124,
3194
+ "eval_loss": 0.15608514845371246,
3195
+ "eval_mae": 0.6444294452667236,
3196
+ "eval_mse": 279.2149658203125,
3197
+ "eval_rmse": 16.709726682992528,
3198
+ "eval_runtime": 54.4875,
3199
+ "eval_samples_per_second": 10964.25,
3200
+ "eval_smape": 102.60157585144043,
3201
+ "eval_steps_per_second": 21.418,
3202
+ "step": 21000
3203
+ },
3204
+ {
3205
+ "epoch": 2.004380326620007,
3206
+ "grad_norm": 0.4798502027988434,
3207
+ "learning_rate": 9.808893337146126e-05,
3208
+ "loss": 0.1558,
3209
+ "step": 21050
3210
+ },
3211
+ {
3212
+ "epoch": 2.0091415512069704,
3213
+ "grad_norm": 0.47718650102615356,
3214
+ "learning_rate": 9.808416738156516e-05,
3215
+ "loss": 0.1581,
3216
+ "step": 21100
3217
+ },
3218
+ {
3219
+ "epoch": 2.0139027757939343,
3220
+ "grad_norm": 0.4789866507053375,
3221
+ "learning_rate": 9.807940139166906e-05,
3222
+ "loss": 0.1558,
3223
+ "step": 21150
3224
+ },
3225
+ {
3226
+ "epoch": 2.0186640003808978,
3227
+ "grad_norm": 0.5219734907150269,
3228
+ "learning_rate": 9.807463540177296e-05,
3229
+ "loss": 0.1564,
3230
+ "step": 21200
3231
+ },
3232
+ {
3233
+ "epoch": 2.0234252249678617,
3234
+ "grad_norm": 0.4981229305267334,
3235
+ "learning_rate": 9.806986941187684e-05,
3236
+ "loss": 0.1583,
3237
+ "step": 21250
3238
+ },
3239
+ {
3240
+ "epoch": 2.0281864495548256,
3241
+ "grad_norm": 0.4696637690067291,
3242
+ "learning_rate": 9.806510342198075e-05,
3243
+ "loss": 0.1568,
3244
+ "step": 21300
3245
+ },
3246
+ {
3247
+ "epoch": 2.032947674141789,
3248
+ "grad_norm": 0.44064363837242126,
3249
+ "learning_rate": 9.806033743208465e-05,
3250
+ "loss": 0.1579,
3251
+ "step": 21350
3252
+ },
3253
+ {
3254
+ "epoch": 2.037708898728753,
3255
+ "grad_norm": 0.5143409967422485,
3256
+ "learning_rate": 9.805557144218854e-05,
3257
+ "loss": 0.156,
3258
+ "step": 21400
3259
+ },
3260
+ {
3261
+ "epoch": 2.042470123315717,
3262
+ "grad_norm": 0.49321916699409485,
3263
+ "learning_rate": 9.805080545229245e-05,
3264
+ "loss": 0.155,
3265
+ "step": 21450
3266
+ },
3267
+ {
3268
+ "epoch": 2.0472313479026805,
3269
+ "grad_norm": 0.4427430331707001,
3270
+ "learning_rate": 9.804603946239634e-05,
3271
+ "loss": 0.1566,
3272
+ "step": 21500
3273
+ },
3274
+ {
3275
+ "epoch": 2.0519925724896444,
3276
+ "grad_norm": 0.45803865790367126,
3277
+ "learning_rate": 9.804127347250024e-05,
3278
+ "loss": 0.1593,
3279
+ "step": 21550
3280
+ },
3281
+ {
3282
+ "epoch": 2.056753797076608,
3283
+ "grad_norm": 0.4853162467479706,
3284
+ "learning_rate": 9.803650748260415e-05,
3285
+ "loss": 0.1575,
3286
+ "step": 21600
3287
+ },
3288
+ {
3289
+ "epoch": 2.061515021663572,
3290
+ "grad_norm": 0.47955945134162903,
3291
+ "learning_rate": 9.803174149270804e-05,
3292
+ "loss": 0.1594,
3293
+ "step": 21650
3294
+ },
3295
+ {
3296
+ "epoch": 2.0662762462505357,
3297
+ "grad_norm": 0.44243359565734863,
3298
+ "learning_rate": 9.802697550281194e-05,
3299
+ "loss": 0.1597,
3300
+ "step": 21700
3301
+ },
3302
+ {
3303
+ "epoch": 2.0710374708374992,
3304
+ "grad_norm": 0.5727405548095703,
3305
+ "learning_rate": 9.802220951291584e-05,
3306
+ "loss": 0.1576,
3307
+ "step": 21750
3308
+ },
3309
+ {
3310
+ "epoch": 2.075798695424463,
3311
+ "grad_norm": 0.5481444001197815,
3312
+ "learning_rate": 9.801744352301974e-05,
3313
+ "loss": 0.1547,
3314
+ "step": 21800
3315
+ },
3316
+ {
3317
+ "epoch": 2.080559920011427,
3318
+ "grad_norm": 0.46369367837905884,
3319
+ "learning_rate": 9.801267753312364e-05,
3320
+ "loss": 0.1553,
3321
+ "step": 21850
3322
+ },
3323
+ {
3324
+ "epoch": 2.0853211445983906,
3325
+ "grad_norm": 0.48963698744773865,
3326
+ "learning_rate": 9.800791154322754e-05,
3327
+ "loss": 0.1579,
3328
+ "step": 21900
3329
+ },
3330
+ {
3331
+ "epoch": 2.0900823691853545,
3332
+ "grad_norm": 0.5048883557319641,
3333
+ "learning_rate": 9.800314555333144e-05,
3334
+ "loss": 0.1597,
3335
+ "step": 21950
3336
+ },
3337
+ {
3338
+ "epoch": 2.0948435937723184,
3339
+ "grad_norm": 0.4893467128276825,
3340
+ "learning_rate": 9.799837956343532e-05,
3341
+ "loss": 0.1532,
3342
+ "step": 22000
3343
+ },
3344
+ {
3345
+ "epoch": 2.0948435937723184,
3346
+ "eval_loss": 0.1553574949502945,
3347
+ "eval_mae": 0.6453903317451477,
3348
+ "eval_mse": 282.9573974609375,
3349
+ "eval_rmse": 16.821337564561787,
3350
+ "eval_runtime": 60.2846,
3351
+ "eval_samples_per_second": 9909.912,
3352
+ "eval_smape": 85.01211404800415,
3353
+ "eval_steps_per_second": 19.358,
3354
+ "step": 22000
3355
+ }
3356
+ ],
3357
+ "logging_steps": 50,
3358
+ "max_steps": 1050100,
3359
+ "num_input_tokens_seen": 0,
3360
+ "num_train_epochs": 100,
3361
+ "save_steps": 2000,
3362
+ "stateful_callbacks": {
3363
+ "EarlyStoppingCallback": {
3364
+ "args": {
3365
+ "early_stopping_patience": 3,
3366
+ "early_stopping_threshold": 0.0
3367
+ },
3368
+ "attributes": {
3369
+ "early_stopping_patience_counter": 0
3370
+ }
3371
+ },
3372
+ "TrainerControl": {
3373
+ "args": {
3374
+ "should_epoch_stop": false,
3375
+ "should_evaluate": false,
3376
+ "should_log": false,
3377
+ "should_save": true,
3378
+ "should_training_stop": false
3379
+ },
3380
+ "attributes": {}
3381
+ }
3382
+ },
3383
+ "total_flos": 4.180047099881472e+16,
3384
+ "train_batch_size": 256,
3385
+ "trial_name": null,
3386
+ "trial_params": null
3387
+ }
checkpoint-22000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6da365a68d9f9be649ea037e893847209c49b9a8fbe245f23f2e5157ea0e5087
3
+ size 5777
checkpoint-24000/config.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "gelu",
3
+ "architectures": [
4
+ "PatchTSTForPrediction"
5
+ ],
6
+ "attention_dropout": 0.1,
7
+ "bias": true,
8
+ "channel_attention": false,
9
+ "channel_consistent_masking": false,
10
+ "context_length": 512,
11
+ "d_model": 256,
12
+ "distribution_output": "normal",
13
+ "do_mask_input": null,
14
+ "dropout": 0.1,
15
+ "ff_dropout": 0.0,
16
+ "ffn_dim": 256,
17
+ "head_dropout": 0.1,
18
+ "init_std": 0.02,
19
+ "loss": "mse",
20
+ "mask_type": "random",
21
+ "mask_value": 0,
22
+ "model_type": "patchtst",
23
+ "norm_eps": 1e-05,
24
+ "norm_type": "batchnorm",
25
+ "num_attention_heads": 16,
26
+ "num_forecast_mask_patches": [
27
+ 2
28
+ ],
29
+ "num_hidden_layers": 3,
30
+ "num_input_channels": 1,
31
+ "num_parallel_samples": 100,
32
+ "num_targets": 1,
33
+ "output_range": null,
34
+ "patch_length": 16,
35
+ "patch_stride": 16,
36
+ "path_dropout": 0.0,
37
+ "pooling_type": null,
38
+ "positional_dropout": 0.0,
39
+ "positional_encoding_type": "sincos",
40
+ "pre_norm": true,
41
+ "prediction_length": 1,
42
+ "prenorm": true,
43
+ "random_mask_ratio": 0.5,
44
+ "scaling": "std",
45
+ "share_embedding": true,
46
+ "share_projection": true,
47
+ "torch_dtype": "float32",
48
+ "transformers_version": "4.51.3",
49
+ "unmasked_channel_indices": null,
50
+ "use_cls_token": false
51
+ }
checkpoint-24000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81c9fdcc1a5ae6b516a9415abcac889092f056770d30e965296c1bce7adb76e0
3
+ size 4852676
checkpoint-24000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e07439af006fecfd778c92debe2f2f2ef01b67e723728a794b2e6a210221f575
3
+ size 9643275
checkpoint-24000/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85fb4aab765f5dd005306d7a2f4e88ee1156630165d8e3e175c81dd9e3adb37e
3
+ size 14645
checkpoint-24000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b4f66f0d3a2dd5c1350bef4ebbb6e97f54f8e7a0a219e77c5c2c856b17a26e4
3
+ size 1465
checkpoint-24000/trainer_state.json ADDED
@@ -0,0 +1,3691 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": 24000,
3
+ "best_metric": 0.15508916974067688,
4
+ "best_model_checkpoint": "./patchtst_tsmixup_final/checkpoint-24000",
5
+ "epoch": 2.285292577250869,
6
+ "eval_steps": 1000,
7
+ "global_step": 24000,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.004761224586963767,
14
+ "grad_norm": 2.066147804260254,
15
+ "learning_rate": 4.9000000000000005e-06,
16
+ "loss": 0.5158,
17
+ "step": 50
18
+ },
19
+ {
20
+ "epoch": 0.009522449173927534,
21
+ "grad_norm": 1.1728284358978271,
22
+ "learning_rate": 9.900000000000002e-06,
23
+ "loss": 0.4068,
24
+ "step": 100
25
+ },
26
+ {
27
+ "epoch": 0.014283673760891302,
28
+ "grad_norm": 1.0786150693893433,
29
+ "learning_rate": 1.49e-05,
30
+ "loss": 0.3107,
31
+ "step": 150
32
+ },
33
+ {
34
+ "epoch": 0.01904489834785507,
35
+ "grad_norm": 1.0987305641174316,
36
+ "learning_rate": 1.9900000000000003e-05,
37
+ "loss": 0.2444,
38
+ "step": 200
39
+ },
40
+ {
41
+ "epoch": 0.023806122934818836,
42
+ "grad_norm": 0.9221014976501465,
43
+ "learning_rate": 2.4900000000000002e-05,
44
+ "loss": 0.2106,
45
+ "step": 250
46
+ },
47
+ {
48
+ "epoch": 0.028567347521782603,
49
+ "grad_norm": 1.1374775171279907,
50
+ "learning_rate": 2.9900000000000002e-05,
51
+ "loss": 0.1931,
52
+ "step": 300
53
+ },
54
+ {
55
+ "epoch": 0.03332857210874637,
56
+ "grad_norm": 1.2504770755767822,
57
+ "learning_rate": 3.49e-05,
58
+ "loss": 0.1971,
59
+ "step": 350
60
+ },
61
+ {
62
+ "epoch": 0.03808979669571014,
63
+ "grad_norm": 0.8367598056793213,
64
+ "learning_rate": 3.99e-05,
65
+ "loss": 0.1918,
66
+ "step": 400
67
+ },
68
+ {
69
+ "epoch": 0.0428510212826739,
70
+ "grad_norm": 0.9541486501693726,
71
+ "learning_rate": 4.49e-05,
72
+ "loss": 0.192,
73
+ "step": 450
74
+ },
75
+ {
76
+ "epoch": 0.04761224586963767,
77
+ "grad_norm": 0.9134742021560669,
78
+ "learning_rate": 4.99e-05,
79
+ "loss": 0.1875,
80
+ "step": 500
81
+ },
82
+ {
83
+ "epoch": 0.052373470456601436,
84
+ "grad_norm": 2.1370866298675537,
85
+ "learning_rate": 5.4900000000000006e-05,
86
+ "loss": 0.19,
87
+ "step": 550
88
+ },
89
+ {
90
+ "epoch": 0.057134695043565206,
91
+ "grad_norm": 1.8110992908477783,
92
+ "learning_rate": 5.99e-05,
93
+ "loss": 0.1902,
94
+ "step": 600
95
+ },
96
+ {
97
+ "epoch": 0.06189591963052897,
98
+ "grad_norm": 0.8074783682823181,
99
+ "learning_rate": 6.49e-05,
100
+ "loss": 0.1867,
101
+ "step": 650
102
+ },
103
+ {
104
+ "epoch": 0.06665714421749273,
105
+ "grad_norm": 1.8686498403549194,
106
+ "learning_rate": 6.99e-05,
107
+ "loss": 0.1839,
108
+ "step": 700
109
+ },
110
+ {
111
+ "epoch": 0.07141836880445651,
112
+ "grad_norm": 0.9226100444793701,
113
+ "learning_rate": 7.49e-05,
114
+ "loss": 0.1832,
115
+ "step": 750
116
+ },
117
+ {
118
+ "epoch": 0.07617959339142027,
119
+ "grad_norm": 1.0714315176010132,
120
+ "learning_rate": 7.99e-05,
121
+ "loss": 0.1862,
122
+ "step": 800
123
+ },
124
+ {
125
+ "epoch": 0.08094081797838404,
126
+ "grad_norm": 0.7746614813804626,
127
+ "learning_rate": 8.49e-05,
128
+ "loss": 0.1836,
129
+ "step": 850
130
+ },
131
+ {
132
+ "epoch": 0.0857020425653478,
133
+ "grad_norm": 0.8154539465904236,
134
+ "learning_rate": 8.99e-05,
135
+ "loss": 0.1863,
136
+ "step": 900
137
+ },
138
+ {
139
+ "epoch": 0.09046326715231158,
140
+ "grad_norm": 0.7890446186065674,
141
+ "learning_rate": 9.49e-05,
142
+ "loss": 0.1865,
143
+ "step": 950
144
+ },
145
+ {
146
+ "epoch": 0.09522449173927534,
147
+ "grad_norm": 0.9673619270324707,
148
+ "learning_rate": 9.99e-05,
149
+ "loss": 0.1797,
150
+ "step": 1000
151
+ },
152
+ {
153
+ "epoch": 0.09522449173927534,
154
+ "eval_loss": 0.17560431361198425,
155
+ "eval_mae": 0.7397370934486389,
156
+ "eval_mse": 447.359619140625,
157
+ "eval_rmse": 21.15087750285139,
158
+ "eval_runtime": 58.709,
159
+ "eval_samples_per_second": 10175.863,
160
+ "eval_smape": 90.89710712432861,
161
+ "eval_steps_per_second": 19.878,
162
+ "step": 1000
163
+ },
164
+ {
165
+ "epoch": 0.09998571632623911,
166
+ "grad_norm": 0.8122360110282898,
167
+ "learning_rate": 9.999532932990182e-05,
168
+ "loss": 0.1813,
169
+ "step": 1050
170
+ },
171
+ {
172
+ "epoch": 0.10474694091320287,
173
+ "grad_norm": 1.058344841003418,
174
+ "learning_rate": 9.999056334000573e-05,
175
+ "loss": 0.1797,
176
+ "step": 1100
177
+ },
178
+ {
179
+ "epoch": 0.10950816550016665,
180
+ "grad_norm": 0.9517110586166382,
181
+ "learning_rate": 9.998579735010962e-05,
182
+ "loss": 0.1816,
183
+ "step": 1150
184
+ },
185
+ {
186
+ "epoch": 0.11426939008713041,
187
+ "grad_norm": 1.0868945121765137,
188
+ "learning_rate": 9.998103136021352e-05,
189
+ "loss": 0.1786,
190
+ "step": 1200
191
+ },
192
+ {
193
+ "epoch": 0.11903061467409418,
194
+ "grad_norm": 1.1176084280014038,
195
+ "learning_rate": 9.997626537031743e-05,
196
+ "loss": 0.1766,
197
+ "step": 1250
198
+ },
199
+ {
200
+ "epoch": 0.12379183926105794,
201
+ "grad_norm": 1.489853858947754,
202
+ "learning_rate": 9.997149938042131e-05,
203
+ "loss": 0.1763,
204
+ "step": 1300
205
+ },
206
+ {
207
+ "epoch": 0.1285530638480217,
208
+ "grad_norm": 1.1758699417114258,
209
+ "learning_rate": 9.996673339052521e-05,
210
+ "loss": 0.1808,
211
+ "step": 1350
212
+ },
213
+ {
214
+ "epoch": 0.13331428843498547,
215
+ "grad_norm": 1.3600101470947266,
216
+ "learning_rate": 9.996196740062911e-05,
217
+ "loss": 0.177,
218
+ "step": 1400
219
+ },
220
+ {
221
+ "epoch": 0.13807551302194926,
222
+ "grad_norm": 0.7045785784721375,
223
+ "learning_rate": 9.995720141073301e-05,
224
+ "loss": 0.1781,
225
+ "step": 1450
226
+ },
227
+ {
228
+ "epoch": 0.14283673760891302,
229
+ "grad_norm": 1.386483907699585,
230
+ "learning_rate": 9.995243542083691e-05,
231
+ "loss": 0.1773,
232
+ "step": 1500
233
+ },
234
+ {
235
+ "epoch": 0.1475979621958768,
236
+ "grad_norm": 1.3609524965286255,
237
+ "learning_rate": 9.994766943094081e-05,
238
+ "loss": 0.1777,
239
+ "step": 1550
240
+ },
241
+ {
242
+ "epoch": 0.15235918678284055,
243
+ "grad_norm": 0.7538266777992249,
244
+ "learning_rate": 9.994290344104471e-05,
245
+ "loss": 0.1771,
246
+ "step": 1600
247
+ },
248
+ {
249
+ "epoch": 0.1571204113698043,
250
+ "grad_norm": 0.6669739484786987,
251
+ "learning_rate": 9.993813745114861e-05,
252
+ "loss": 0.1757,
253
+ "step": 1650
254
+ },
255
+ {
256
+ "epoch": 0.16188163595676808,
257
+ "grad_norm": 1.321413278579712,
258
+ "learning_rate": 9.993337146125251e-05,
259
+ "loss": 0.1729,
260
+ "step": 1700
261
+ },
262
+ {
263
+ "epoch": 0.16664286054373184,
264
+ "grad_norm": 0.7625342011451721,
265
+ "learning_rate": 9.992860547135641e-05,
266
+ "loss": 0.1744,
267
+ "step": 1750
268
+ },
269
+ {
270
+ "epoch": 0.1714040851306956,
271
+ "grad_norm": 1.4134427309036255,
272
+ "learning_rate": 9.99238394814603e-05,
273
+ "loss": 0.1726,
274
+ "step": 1800
275
+ },
276
+ {
277
+ "epoch": 0.17616530971765937,
278
+ "grad_norm": 0.8000154495239258,
279
+ "learning_rate": 9.991907349156421e-05,
280
+ "loss": 0.175,
281
+ "step": 1850
282
+ },
283
+ {
284
+ "epoch": 0.18092653430462316,
285
+ "grad_norm": 0.8776112198829651,
286
+ "learning_rate": 9.99143075016681e-05,
287
+ "loss": 0.1742,
288
+ "step": 1900
289
+ },
290
+ {
291
+ "epoch": 0.18568775889158692,
292
+ "grad_norm": 0.9763234853744507,
293
+ "learning_rate": 9.9909541511772e-05,
294
+ "loss": 0.1753,
295
+ "step": 1950
296
+ },
297
+ {
298
+ "epoch": 0.1904489834785507,
299
+ "grad_norm": 0.6149079203605652,
300
+ "learning_rate": 9.99047755218759e-05,
301
+ "loss": 0.1709,
302
+ "step": 2000
303
+ },
304
+ {
305
+ "epoch": 0.1904489834785507,
306
+ "eval_loss": 0.1690738946199417,
307
+ "eval_mae": 0.7152817249298096,
308
+ "eval_mse": 425.0924072265625,
309
+ "eval_rmse": 20.617769210721185,
310
+ "eval_runtime": 60.02,
311
+ "eval_samples_per_second": 9953.597,
312
+ "eval_smape": 112.30494976043701,
313
+ "eval_steps_per_second": 19.444,
314
+ "step": 2000
315
+ },
316
+ {
317
+ "epoch": 0.19521020806551445,
318
+ "grad_norm": 0.8438695669174194,
319
+ "learning_rate": 9.99000095319798e-05,
320
+ "loss": 0.1753,
321
+ "step": 2050
322
+ },
323
+ {
324
+ "epoch": 0.19997143265247821,
325
+ "grad_norm": 0.8969755172729492,
326
+ "learning_rate": 9.98952435420837e-05,
327
+ "loss": 0.1745,
328
+ "step": 2100
329
+ },
330
+ {
331
+ "epoch": 0.20473265723944198,
332
+ "grad_norm": 0.9189475178718567,
333
+ "learning_rate": 9.98904775521876e-05,
334
+ "loss": 0.1709,
335
+ "step": 2150
336
+ },
337
+ {
338
+ "epoch": 0.20949388182640574,
339
+ "grad_norm": 0.8711380362510681,
340
+ "learning_rate": 9.98857115622915e-05,
341
+ "loss": 0.1692,
342
+ "step": 2200
343
+ },
344
+ {
345
+ "epoch": 0.2142551064133695,
346
+ "grad_norm": 0.7816225290298462,
347
+ "learning_rate": 9.98809455723954e-05,
348
+ "loss": 0.1709,
349
+ "step": 2250
350
+ },
351
+ {
352
+ "epoch": 0.2190163310003333,
353
+ "grad_norm": 0.6408753395080566,
354
+ "learning_rate": 9.987617958249928e-05,
355
+ "loss": 0.1707,
356
+ "step": 2300
357
+ },
358
+ {
359
+ "epoch": 0.22377755558729706,
360
+ "grad_norm": 0.7021253705024719,
361
+ "learning_rate": 9.987141359260319e-05,
362
+ "loss": 0.1739,
363
+ "step": 2350
364
+ },
365
+ {
366
+ "epoch": 0.22853878017426082,
367
+ "grad_norm": 0.9026205539703369,
368
+ "learning_rate": 9.986664760270709e-05,
369
+ "loss": 0.1705,
370
+ "step": 2400
371
+ },
372
+ {
373
+ "epoch": 0.2333000047612246,
374
+ "grad_norm": 0.6956352591514587,
375
+ "learning_rate": 9.986188161281098e-05,
376
+ "loss": 0.1705,
377
+ "step": 2450
378
+ },
379
+ {
380
+ "epoch": 0.23806122934818835,
381
+ "grad_norm": 0.7024583220481873,
382
+ "learning_rate": 9.985711562291489e-05,
383
+ "loss": 0.1718,
384
+ "step": 2500
385
+ },
386
+ {
387
+ "epoch": 0.24282245393515212,
388
+ "grad_norm": 0.6184080839157104,
389
+ "learning_rate": 9.985234963301878e-05,
390
+ "loss": 0.1697,
391
+ "step": 2550
392
+ },
393
+ {
394
+ "epoch": 0.24758367852211588,
395
+ "grad_norm": 0.9684680700302124,
396
+ "learning_rate": 9.984758364312268e-05,
397
+ "loss": 0.1696,
398
+ "step": 2600
399
+ },
400
+ {
401
+ "epoch": 0.25234490310907964,
402
+ "grad_norm": 0.8625733852386475,
403
+ "learning_rate": 9.984281765322659e-05,
404
+ "loss": 0.1731,
405
+ "step": 2650
406
+ },
407
+ {
408
+ "epoch": 0.2571061276960434,
409
+ "grad_norm": 0.6156722903251648,
410
+ "learning_rate": 9.983805166333048e-05,
411
+ "loss": 0.1731,
412
+ "step": 2700
413
+ },
414
+ {
415
+ "epoch": 0.26186735228300717,
416
+ "grad_norm": 0.7371954917907715,
417
+ "learning_rate": 9.983328567343438e-05,
418
+ "loss": 0.173,
419
+ "step": 2750
420
+ },
421
+ {
422
+ "epoch": 0.26662857686997093,
423
+ "grad_norm": 0.658812940120697,
424
+ "learning_rate": 9.982851968353828e-05,
425
+ "loss": 0.1691,
426
+ "step": 2800
427
+ },
428
+ {
429
+ "epoch": 0.2713898014569347,
430
+ "grad_norm": 0.8242245316505432,
431
+ "learning_rate": 9.982375369364217e-05,
432
+ "loss": 0.1715,
433
+ "step": 2850
434
+ },
435
+ {
436
+ "epoch": 0.2761510260438985,
437
+ "grad_norm": 0.6063680052757263,
438
+ "learning_rate": 9.981898770374607e-05,
439
+ "loss": 0.1722,
440
+ "step": 2900
441
+ },
442
+ {
443
+ "epoch": 0.2809122506308623,
444
+ "grad_norm": 0.7695409655570984,
445
+ "learning_rate": 9.981422171384997e-05,
446
+ "loss": 0.1712,
447
+ "step": 2950
448
+ },
449
+ {
450
+ "epoch": 0.28567347521782605,
451
+ "grad_norm": 0.665675163269043,
452
+ "learning_rate": 9.980945572395387e-05,
453
+ "loss": 0.1722,
454
+ "step": 3000
455
+ },
456
+ {
457
+ "epoch": 0.28567347521782605,
458
+ "eval_loss": 0.16621138155460358,
459
+ "eval_mae": 0.7008960247039795,
460
+ "eval_mse": 516.21533203125,
461
+ "eval_rmse": 22.72037262087156,
462
+ "eval_runtime": 62.5998,
463
+ "eval_samples_per_second": 9543.397,
464
+ "eval_smape": 89.52359557151794,
465
+ "eval_steps_per_second": 18.642,
466
+ "step": 3000
467
+ },
468
+ {
469
+ "epoch": 0.2904346998047898,
470
+ "grad_norm": 0.7732612490653992,
471
+ "learning_rate": 9.980468973405776e-05,
472
+ "loss": 0.1708,
473
+ "step": 3050
474
+ },
475
+ {
476
+ "epoch": 0.2951959243917536,
477
+ "grad_norm": 1.0461828708648682,
478
+ "learning_rate": 9.979992374416167e-05,
479
+ "loss": 0.1692,
480
+ "step": 3100
481
+ },
482
+ {
483
+ "epoch": 0.29995714897871734,
484
+ "grad_norm": 0.6057863831520081,
485
+ "learning_rate": 9.979515775426557e-05,
486
+ "loss": 0.1681,
487
+ "step": 3150
488
+ },
489
+ {
490
+ "epoch": 0.3047183735656811,
491
+ "grad_norm": 0.5380491018295288,
492
+ "learning_rate": 9.979039176436946e-05,
493
+ "loss": 0.1687,
494
+ "step": 3200
495
+ },
496
+ {
497
+ "epoch": 0.30947959815264486,
498
+ "grad_norm": 0.7164149284362793,
499
+ "learning_rate": 9.978562577447337e-05,
500
+ "loss": 0.1678,
501
+ "step": 3250
502
+ },
503
+ {
504
+ "epoch": 0.3142408227396086,
505
+ "grad_norm": 0.9223781228065491,
506
+ "learning_rate": 9.978085978457726e-05,
507
+ "loss": 0.1698,
508
+ "step": 3300
509
+ },
510
+ {
511
+ "epoch": 0.3190020473265724,
512
+ "grad_norm": 0.645452082157135,
513
+ "learning_rate": 9.977609379468116e-05,
514
+ "loss": 0.17,
515
+ "step": 3350
516
+ },
517
+ {
518
+ "epoch": 0.32376327191353615,
519
+ "grad_norm": 0.6378370523452759,
520
+ "learning_rate": 9.977132780478507e-05,
521
+ "loss": 0.1673,
522
+ "step": 3400
523
+ },
524
+ {
525
+ "epoch": 0.3285244965004999,
526
+ "grad_norm": 0.511216402053833,
527
+ "learning_rate": 9.976656181488896e-05,
528
+ "loss": 0.1678,
529
+ "step": 3450
530
+ },
531
+ {
532
+ "epoch": 0.3332857210874637,
533
+ "grad_norm": 0.64838707447052,
534
+ "learning_rate": 9.976179582499286e-05,
535
+ "loss": 0.1675,
536
+ "step": 3500
537
+ },
538
+ {
539
+ "epoch": 0.33804694567442745,
540
+ "grad_norm": 0.6467918753623962,
541
+ "learning_rate": 9.975702983509676e-05,
542
+ "loss": 0.1699,
543
+ "step": 3550
544
+ },
545
+ {
546
+ "epoch": 0.3428081702613912,
547
+ "grad_norm": 0.6198284029960632,
548
+ "learning_rate": 9.975226384520065e-05,
549
+ "loss": 0.169,
550
+ "step": 3600
551
+ },
552
+ {
553
+ "epoch": 0.347569394848355,
554
+ "grad_norm": 0.6328741312026978,
555
+ "learning_rate": 9.974749785530455e-05,
556
+ "loss": 0.1685,
557
+ "step": 3650
558
+ },
559
+ {
560
+ "epoch": 0.35233061943531874,
561
+ "grad_norm": 0.8264518976211548,
562
+ "learning_rate": 9.974273186540844e-05,
563
+ "loss": 0.1731,
564
+ "step": 3700
565
+ },
566
+ {
567
+ "epoch": 0.35709184402228256,
568
+ "grad_norm": 0.7238495945930481,
569
+ "learning_rate": 9.973796587551235e-05,
570
+ "loss": 0.1674,
571
+ "step": 3750
572
+ },
573
+ {
574
+ "epoch": 0.3618530686092463,
575
+ "grad_norm": 0.6243422031402588,
576
+ "learning_rate": 9.973319988561624e-05,
577
+ "loss": 0.1699,
578
+ "step": 3800
579
+ },
580
+ {
581
+ "epoch": 0.3666142931962101,
582
+ "grad_norm": 0.76638263463974,
583
+ "learning_rate": 9.972843389572014e-05,
584
+ "loss": 0.1707,
585
+ "step": 3850
586
+ },
587
+ {
588
+ "epoch": 0.37137551778317385,
589
+ "grad_norm": 0.5346329212188721,
590
+ "learning_rate": 9.972366790582405e-05,
591
+ "loss": 0.1669,
592
+ "step": 3900
593
+ },
594
+ {
595
+ "epoch": 0.3761367423701376,
596
+ "grad_norm": 0.6198967695236206,
597
+ "learning_rate": 9.971890191592794e-05,
598
+ "loss": 0.1663,
599
+ "step": 3950
600
+ },
601
+ {
602
+ "epoch": 0.3808979669571014,
603
+ "grad_norm": 0.936530590057373,
604
+ "learning_rate": 9.971413592603184e-05,
605
+ "loss": 0.1694,
606
+ "step": 4000
607
+ },
608
+ {
609
+ "epoch": 0.3808979669571014,
610
+ "eval_loss": 0.16426624357700348,
611
+ "eval_mae": 0.6708112359046936,
612
+ "eval_mse": 321.2046813964844,
613
+ "eval_rmse": 17.922184057655596,
614
+ "eval_runtime": 59.1224,
615
+ "eval_samples_per_second": 10104.719,
616
+ "eval_smape": 93.0514931678772,
617
+ "eval_steps_per_second": 19.739,
618
+ "step": 4000
619
+ },
620
+ {
621
+ "epoch": 0.38565919154406514,
622
+ "grad_norm": 0.5151750445365906,
623
+ "learning_rate": 9.970936993613574e-05,
624
+ "loss": 0.1676,
625
+ "step": 4050
626
+ },
627
+ {
628
+ "epoch": 0.3904204161310289,
629
+ "grad_norm": 0.8430535793304443,
630
+ "learning_rate": 9.970460394623964e-05,
631
+ "loss": 0.1699,
632
+ "step": 4100
633
+ },
634
+ {
635
+ "epoch": 0.39518164071799267,
636
+ "grad_norm": 0.7711997628211975,
637
+ "learning_rate": 9.969983795634354e-05,
638
+ "loss": 0.1664,
639
+ "step": 4150
640
+ },
641
+ {
642
+ "epoch": 0.39994286530495643,
643
+ "grad_norm": 0.5547206401824951,
644
+ "learning_rate": 9.969507196644744e-05,
645
+ "loss": 0.1679,
646
+ "step": 4200
647
+ },
648
+ {
649
+ "epoch": 0.4047040898919202,
650
+ "grad_norm": 0.7514538764953613,
651
+ "learning_rate": 9.969030597655134e-05,
652
+ "loss": 0.1685,
653
+ "step": 4250
654
+ },
655
+ {
656
+ "epoch": 0.40946531447888396,
657
+ "grad_norm": 0.6667706370353699,
658
+ "learning_rate": 9.968553998665524e-05,
659
+ "loss": 0.1691,
660
+ "step": 4300
661
+ },
662
+ {
663
+ "epoch": 0.4142265390658477,
664
+ "grad_norm": 0.5886721611022949,
665
+ "learning_rate": 9.968077399675914e-05,
666
+ "loss": 0.1649,
667
+ "step": 4350
668
+ },
669
+ {
670
+ "epoch": 0.4189877636528115,
671
+ "grad_norm": 0.5160133838653564,
672
+ "learning_rate": 9.967600800686303e-05,
673
+ "loss": 0.1673,
674
+ "step": 4400
675
+ },
676
+ {
677
+ "epoch": 0.42374898823977525,
678
+ "grad_norm": 0.6817535758018494,
679
+ "learning_rate": 9.967124201696692e-05,
680
+ "loss": 0.1687,
681
+ "step": 4450
682
+ },
683
+ {
684
+ "epoch": 0.428510212826739,
685
+ "grad_norm": 0.5424938201904297,
686
+ "learning_rate": 9.966647602707083e-05,
687
+ "loss": 0.1687,
688
+ "step": 4500
689
+ },
690
+ {
691
+ "epoch": 0.43327143741370283,
692
+ "grad_norm": 0.483815461397171,
693
+ "learning_rate": 9.966171003717473e-05,
694
+ "loss": 0.1673,
695
+ "step": 4550
696
+ },
697
+ {
698
+ "epoch": 0.4380326620006666,
699
+ "grad_norm": 0.555853009223938,
700
+ "learning_rate": 9.965694404727862e-05,
701
+ "loss": 0.1667,
702
+ "step": 4600
703
+ },
704
+ {
705
+ "epoch": 0.44279388658763036,
706
+ "grad_norm": 0.45629894733428955,
707
+ "learning_rate": 9.965217805738253e-05,
708
+ "loss": 0.1678,
709
+ "step": 4650
710
+ },
711
+ {
712
+ "epoch": 0.4475551111745941,
713
+ "grad_norm": 0.47480854392051697,
714
+ "learning_rate": 9.964741206748642e-05,
715
+ "loss": 0.1651,
716
+ "step": 4700
717
+ },
718
+ {
719
+ "epoch": 0.4523163357615579,
720
+ "grad_norm": 0.5411455631256104,
721
+ "learning_rate": 9.964264607759032e-05,
722
+ "loss": 0.168,
723
+ "step": 4750
724
+ },
725
+ {
726
+ "epoch": 0.45707756034852165,
727
+ "grad_norm": 0.7176097631454468,
728
+ "learning_rate": 9.963788008769422e-05,
729
+ "loss": 0.1654,
730
+ "step": 4800
731
+ },
732
+ {
733
+ "epoch": 0.4618387849354854,
734
+ "grad_norm": 0.7010710835456848,
735
+ "learning_rate": 9.963311409779812e-05,
736
+ "loss": 0.165,
737
+ "step": 4850
738
+ },
739
+ {
740
+ "epoch": 0.4666000095224492,
741
+ "grad_norm": 0.5730286240577698,
742
+ "learning_rate": 9.962834810790202e-05,
743
+ "loss": 0.1654,
744
+ "step": 4900
745
+ },
746
+ {
747
+ "epoch": 0.47136123410941294,
748
+ "grad_norm": 0.532320499420166,
749
+ "learning_rate": 9.96235821180059e-05,
750
+ "loss": 0.1665,
751
+ "step": 4950
752
+ },
753
+ {
754
+ "epoch": 0.4761224586963767,
755
+ "grad_norm": 0.5974966883659363,
756
+ "learning_rate": 9.961881612810982e-05,
757
+ "loss": 0.1648,
758
+ "step": 5000
759
+ },
760
+ {
761
+ "epoch": 0.4761224586963767,
762
+ "eval_loss": 0.16261516511440277,
763
+ "eval_mae": 0.6730566620826721,
764
+ "eval_mse": 350.6869812011719,
765
+ "eval_rmse": 18.726638278163325,
766
+ "eval_runtime": 61.1422,
767
+ "eval_samples_per_second": 9770.909,
768
+ "eval_smape": 94.07484531402588,
769
+ "eval_steps_per_second": 19.087,
770
+ "step": 5000
771
+ },
772
+ {
773
+ "epoch": 0.48088368328334047,
774
+ "grad_norm": 0.6836587190628052,
775
+ "learning_rate": 9.961405013821372e-05,
776
+ "loss": 0.164,
777
+ "step": 5050
778
+ },
779
+ {
780
+ "epoch": 0.48564490787030423,
781
+ "grad_norm": 0.6935145854949951,
782
+ "learning_rate": 9.96092841483176e-05,
783
+ "loss": 0.1656,
784
+ "step": 5100
785
+ },
786
+ {
787
+ "epoch": 0.490406132457268,
788
+ "grad_norm": 0.5300805568695068,
789
+ "learning_rate": 9.960451815842151e-05,
790
+ "loss": 0.1661,
791
+ "step": 5150
792
+ },
793
+ {
794
+ "epoch": 0.49516735704423176,
795
+ "grad_norm": 0.6059597134590149,
796
+ "learning_rate": 9.95997521685254e-05,
797
+ "loss": 0.1665,
798
+ "step": 5200
799
+ },
800
+ {
801
+ "epoch": 0.4999285816311955,
802
+ "grad_norm": 0.6202102303504944,
803
+ "learning_rate": 9.95949861786293e-05,
804
+ "loss": 0.1667,
805
+ "step": 5250
806
+ },
807
+ {
808
+ "epoch": 0.5046898062181593,
809
+ "grad_norm": 0.6857314705848694,
810
+ "learning_rate": 9.959022018873321e-05,
811
+ "loss": 0.1648,
812
+ "step": 5300
813
+ },
814
+ {
815
+ "epoch": 0.5094510308051231,
816
+ "grad_norm": 0.5026215314865112,
817
+ "learning_rate": 9.95854541988371e-05,
818
+ "loss": 0.1656,
819
+ "step": 5350
820
+ },
821
+ {
822
+ "epoch": 0.5142122553920868,
823
+ "grad_norm": 0.8072870969772339,
824
+ "learning_rate": 9.9580688208941e-05,
825
+ "loss": 0.1637,
826
+ "step": 5400
827
+ },
828
+ {
829
+ "epoch": 0.5189734799790506,
830
+ "grad_norm": 0.5563872456550598,
831
+ "learning_rate": 9.95759222190449e-05,
832
+ "loss": 0.1665,
833
+ "step": 5450
834
+ },
835
+ {
836
+ "epoch": 0.5237347045660143,
837
+ "grad_norm": 0.4486568868160248,
838
+ "learning_rate": 9.95711562291488e-05,
839
+ "loss": 0.1665,
840
+ "step": 5500
841
+ },
842
+ {
843
+ "epoch": 0.5284959291529782,
844
+ "grad_norm": 0.5072858929634094,
845
+ "learning_rate": 9.95663902392527e-05,
846
+ "loss": 0.1671,
847
+ "step": 5550
848
+ },
849
+ {
850
+ "epoch": 0.5332571537399419,
851
+ "grad_norm": 0.4768078327178955,
852
+ "learning_rate": 9.95616242493566e-05,
853
+ "loss": 0.165,
854
+ "step": 5600
855
+ },
856
+ {
857
+ "epoch": 0.5380183783269057,
858
+ "grad_norm": 0.5484294891357422,
859
+ "learning_rate": 9.95568582594605e-05,
860
+ "loss": 0.1615,
861
+ "step": 5650
862
+ },
863
+ {
864
+ "epoch": 0.5427796029138694,
865
+ "grad_norm": 0.5098631978034973,
866
+ "learning_rate": 9.955209226956438e-05,
867
+ "loss": 0.1663,
868
+ "step": 5700
869
+ },
870
+ {
871
+ "epoch": 0.5475408275008332,
872
+ "grad_norm": 0.5663777589797974,
873
+ "learning_rate": 9.95473262796683e-05,
874
+ "loss": 0.1653,
875
+ "step": 5750
876
+ },
877
+ {
878
+ "epoch": 0.552302052087797,
879
+ "grad_norm": 0.5557841658592224,
880
+ "learning_rate": 9.95425602897722e-05,
881
+ "loss": 0.163,
882
+ "step": 5800
883
+ },
884
+ {
885
+ "epoch": 0.5570632766747607,
886
+ "grad_norm": 0.545656144618988,
887
+ "learning_rate": 9.953779429987608e-05,
888
+ "loss": 0.1619,
889
+ "step": 5850
890
+ },
891
+ {
892
+ "epoch": 0.5618245012617246,
893
+ "grad_norm": 0.6774228811264038,
894
+ "learning_rate": 9.953302830998e-05,
895
+ "loss": 0.1649,
896
+ "step": 5900
897
+ },
898
+ {
899
+ "epoch": 0.5665857258486883,
900
+ "grad_norm": 0.4831783175468445,
901
+ "learning_rate": 9.952826232008388e-05,
902
+ "loss": 0.1626,
903
+ "step": 5950
904
+ },
905
+ {
906
+ "epoch": 0.5713469504356521,
907
+ "grad_norm": 0.46657130122184753,
908
+ "learning_rate": 9.952349633018778e-05,
909
+ "loss": 0.1672,
910
+ "step": 6000
911
+ },
912
+ {
913
+ "epoch": 0.5713469504356521,
914
+ "eval_loss": 0.16118212044239044,
915
+ "eval_mae": 0.6796970963478088,
916
+ "eval_mse": 370.8824768066406,
917
+ "eval_rmse": 19.25830929252723,
918
+ "eval_runtime": 58.3327,
919
+ "eval_samples_per_second": 10241.503,
920
+ "eval_smape": 84.66194272041321,
921
+ "eval_steps_per_second": 20.006,
922
+ "step": 6000
923
+ },
924
+ {
925
+ "epoch": 0.5761081750226158,
926
+ "grad_norm": 0.5452147126197815,
927
+ "learning_rate": 9.95187303402917e-05,
928
+ "loss": 0.1645,
929
+ "step": 6050
930
+ },
931
+ {
932
+ "epoch": 0.5808693996095796,
933
+ "grad_norm": 0.6225939989089966,
934
+ "learning_rate": 9.951396435039558e-05,
935
+ "loss": 0.1629,
936
+ "step": 6100
937
+ },
938
+ {
939
+ "epoch": 0.5856306241965433,
940
+ "grad_norm": 0.618532121181488,
941
+ "learning_rate": 9.950919836049948e-05,
942
+ "loss": 0.1638,
943
+ "step": 6150
944
+ },
945
+ {
946
+ "epoch": 0.5903918487835071,
947
+ "grad_norm": 0.6065341830253601,
948
+ "learning_rate": 9.950443237060338e-05,
949
+ "loss": 0.1672,
950
+ "step": 6200
951
+ },
952
+ {
953
+ "epoch": 0.5951530733704709,
954
+ "grad_norm": 0.7495716214179993,
955
+ "learning_rate": 9.949966638070728e-05,
956
+ "loss": 0.1648,
957
+ "step": 6250
958
+ },
959
+ {
960
+ "epoch": 0.5999142979574347,
961
+ "grad_norm": 0.6554955244064331,
962
+ "learning_rate": 9.949490039081118e-05,
963
+ "loss": 0.1654,
964
+ "step": 6300
965
+ },
966
+ {
967
+ "epoch": 0.6046755225443984,
968
+ "grad_norm": 0.5830172300338745,
969
+ "learning_rate": 9.949013440091506e-05,
970
+ "loss": 0.1629,
971
+ "step": 6350
972
+ },
973
+ {
974
+ "epoch": 0.6094367471313622,
975
+ "grad_norm": 0.5021042823791504,
976
+ "learning_rate": 9.948536841101898e-05,
977
+ "loss": 0.1622,
978
+ "step": 6400
979
+ },
980
+ {
981
+ "epoch": 0.6141979717183259,
982
+ "grad_norm": 0.47169509530067444,
983
+ "learning_rate": 9.948060242112288e-05,
984
+ "loss": 0.1632,
985
+ "step": 6450
986
+ },
987
+ {
988
+ "epoch": 0.6189591963052897,
989
+ "grad_norm": 0.7609395980834961,
990
+ "learning_rate": 9.947583643122676e-05,
991
+ "loss": 0.1641,
992
+ "step": 6500
993
+ },
994
+ {
995
+ "epoch": 0.6237204208922534,
996
+ "grad_norm": 0.5191305875778198,
997
+ "learning_rate": 9.947107044133068e-05,
998
+ "loss": 0.1672,
999
+ "step": 6550
1000
+ },
1001
+ {
1002
+ "epoch": 0.6284816454792173,
1003
+ "grad_norm": 0.5454711318016052,
1004
+ "learning_rate": 9.946630445143456e-05,
1005
+ "loss": 0.1648,
1006
+ "step": 6600
1007
+ },
1008
+ {
1009
+ "epoch": 0.6332428700661811,
1010
+ "grad_norm": 0.49112918972969055,
1011
+ "learning_rate": 9.946153846153846e-05,
1012
+ "loss": 0.1648,
1013
+ "step": 6650
1014
+ },
1015
+ {
1016
+ "epoch": 0.6380040946531448,
1017
+ "grad_norm": 0.4859708249568939,
1018
+ "learning_rate": 9.945677247164236e-05,
1019
+ "loss": 0.1652,
1020
+ "step": 6700
1021
+ },
1022
+ {
1023
+ "epoch": 0.6427653192401086,
1024
+ "grad_norm": 0.506971001625061,
1025
+ "learning_rate": 9.945200648174626e-05,
1026
+ "loss": 0.1623,
1027
+ "step": 6750
1028
+ },
1029
+ {
1030
+ "epoch": 0.6475265438270723,
1031
+ "grad_norm": 0.5732383131980896,
1032
+ "learning_rate": 9.944724049185016e-05,
1033
+ "loss": 0.1657,
1034
+ "step": 6800
1035
+ },
1036
+ {
1037
+ "epoch": 0.6522877684140361,
1038
+ "grad_norm": 0.548362672328949,
1039
+ "learning_rate": 9.944247450195406e-05,
1040
+ "loss": 0.1628,
1041
+ "step": 6850
1042
+ },
1043
+ {
1044
+ "epoch": 0.6570489930009998,
1045
+ "grad_norm": 0.5271615982055664,
1046
+ "learning_rate": 9.943770851205796e-05,
1047
+ "loss": 0.1627,
1048
+ "step": 6900
1049
+ },
1050
+ {
1051
+ "epoch": 0.6618102175879637,
1052
+ "grad_norm": 0.7555857300758362,
1053
+ "learning_rate": 9.943294252216186e-05,
1054
+ "loss": 0.1635,
1055
+ "step": 6950
1056
+ },
1057
+ {
1058
+ "epoch": 0.6665714421749274,
1059
+ "grad_norm": 0.5426679849624634,
1060
+ "learning_rate": 9.942817653226576e-05,
1061
+ "loss": 0.1623,
1062
+ "step": 7000
1063
+ },
1064
+ {
1065
+ "epoch": 0.6665714421749274,
1066
+ "eval_loss": 0.1605178564786911,
1067
+ "eval_mae": 0.6715303063392639,
1068
+ "eval_mse": 400.0790100097656,
1069
+ "eval_rmse": 20.001975152713435,
1070
+ "eval_runtime": 61.0562,
1071
+ "eval_samples_per_second": 9784.671,
1072
+ "eval_smape": 89.75983262062073,
1073
+ "eval_steps_per_second": 19.114,
1074
+ "step": 7000
1075
+ },
1076
+ {
1077
+ "epoch": 0.6713326667618912,
1078
+ "grad_norm": 0.5322990417480469,
1079
+ "learning_rate": 9.942341054236966e-05,
1080
+ "loss": 0.1625,
1081
+ "step": 7050
1082
+ },
1083
+ {
1084
+ "epoch": 0.6760938913488549,
1085
+ "grad_norm": 0.6016077995300293,
1086
+ "learning_rate": 9.941864455247354e-05,
1087
+ "loss": 0.1647,
1088
+ "step": 7100
1089
+ },
1090
+ {
1091
+ "epoch": 0.6808551159358187,
1092
+ "grad_norm": 0.5076338648796082,
1093
+ "learning_rate": 9.941387856257746e-05,
1094
+ "loss": 0.1618,
1095
+ "step": 7150
1096
+ },
1097
+ {
1098
+ "epoch": 0.6856163405227824,
1099
+ "grad_norm": 0.5658571124076843,
1100
+ "learning_rate": 9.940911257268136e-05,
1101
+ "loss": 0.1662,
1102
+ "step": 7200
1103
+ },
1104
+ {
1105
+ "epoch": 0.6903775651097462,
1106
+ "grad_norm": 0.6107982993125916,
1107
+ "learning_rate": 9.940434658278524e-05,
1108
+ "loss": 0.1608,
1109
+ "step": 7250
1110
+ },
1111
+ {
1112
+ "epoch": 0.69513878969671,
1113
+ "grad_norm": 0.4623304307460785,
1114
+ "learning_rate": 9.939958059288916e-05,
1115
+ "loss": 0.1635,
1116
+ "step": 7300
1117
+ },
1118
+ {
1119
+ "epoch": 0.6999000142836738,
1120
+ "grad_norm": 0.6437474489212036,
1121
+ "learning_rate": 9.939481460299304e-05,
1122
+ "loss": 0.1606,
1123
+ "step": 7350
1124
+ },
1125
+ {
1126
+ "epoch": 0.7046612388706375,
1127
+ "grad_norm": 0.6315158605575562,
1128
+ "learning_rate": 9.939004861309694e-05,
1129
+ "loss": 0.1615,
1130
+ "step": 7400
1131
+ },
1132
+ {
1133
+ "epoch": 0.7094224634576013,
1134
+ "grad_norm": 0.6503571271896362,
1135
+ "learning_rate": 9.938528262320085e-05,
1136
+ "loss": 0.1619,
1137
+ "step": 7450
1138
+ },
1139
+ {
1140
+ "epoch": 0.7141836880445651,
1141
+ "grad_norm": 0.46252092719078064,
1142
+ "learning_rate": 9.938051663330474e-05,
1143
+ "loss": 0.1625,
1144
+ "step": 7500
1145
+ },
1146
+ {
1147
+ "epoch": 0.7189449126315288,
1148
+ "grad_norm": 0.5186336636543274,
1149
+ "learning_rate": 9.937575064340864e-05,
1150
+ "loss": 0.1628,
1151
+ "step": 7550
1152
+ },
1153
+ {
1154
+ "epoch": 0.7237061372184926,
1155
+ "grad_norm": 0.5236070156097412,
1156
+ "learning_rate": 9.937098465351254e-05,
1157
+ "loss": 0.1624,
1158
+ "step": 7600
1159
+ },
1160
+ {
1161
+ "epoch": 0.7284673618054563,
1162
+ "grad_norm": 0.4777911901473999,
1163
+ "learning_rate": 9.936621866361644e-05,
1164
+ "loss": 0.1625,
1165
+ "step": 7650
1166
+ },
1167
+ {
1168
+ "epoch": 0.7332285863924202,
1169
+ "grad_norm": 0.5092161297798157,
1170
+ "learning_rate": 9.936145267372034e-05,
1171
+ "loss": 0.163,
1172
+ "step": 7700
1173
+ },
1174
+ {
1175
+ "epoch": 0.7379898109793839,
1176
+ "grad_norm": 0.5161564350128174,
1177
+ "learning_rate": 9.935668668382424e-05,
1178
+ "loss": 0.1615,
1179
+ "step": 7750
1180
+ },
1181
+ {
1182
+ "epoch": 0.7427510355663477,
1183
+ "grad_norm": 0.48548147082328796,
1184
+ "learning_rate": 9.935192069392814e-05,
1185
+ "loss": 0.1592,
1186
+ "step": 7800
1187
+ },
1188
+ {
1189
+ "epoch": 0.7475122601533114,
1190
+ "grad_norm": 0.6095620393753052,
1191
+ "learning_rate": 9.934715470403202e-05,
1192
+ "loss": 0.1613,
1193
+ "step": 7850
1194
+ },
1195
+ {
1196
+ "epoch": 0.7522734847402752,
1197
+ "grad_norm": 0.49965670704841614,
1198
+ "learning_rate": 9.934238871413592e-05,
1199
+ "loss": 0.163,
1200
+ "step": 7900
1201
+ },
1202
+ {
1203
+ "epoch": 0.7570347093272389,
1204
+ "grad_norm": 0.5934204459190369,
1205
+ "learning_rate": 9.933762272423984e-05,
1206
+ "loss": 0.1638,
1207
+ "step": 7950
1208
+ },
1209
+ {
1210
+ "epoch": 0.7617959339142027,
1211
+ "grad_norm": 0.6522780060768127,
1212
+ "learning_rate": 9.933285673434372e-05,
1213
+ "loss": 0.1638,
1214
+ "step": 8000
1215
+ },
1216
+ {
1217
+ "epoch": 0.7617959339142027,
1218
+ "eval_loss": 0.16129492223262787,
1219
+ "eval_mae": 0.6771246194839478,
1220
+ "eval_mse": 387.6971435546875,
1221
+ "eval_rmse": 19.69002649959333,
1222
+ "eval_runtime": 57.6529,
1223
+ "eval_samples_per_second": 10362.273,
1224
+ "eval_smape": 122.37988710403442,
1225
+ "eval_steps_per_second": 20.242,
1226
+ "step": 8000
1227
+ },
1228
+ {
1229
+ "epoch": 0.7665571585011665,
1230
+ "grad_norm": 0.47631314396858215,
1231
+ "learning_rate": 9.932809074444762e-05,
1232
+ "loss": 0.1594,
1233
+ "step": 8050
1234
+ },
1235
+ {
1236
+ "epoch": 0.7713183830881303,
1237
+ "grad_norm": 0.4288536310195923,
1238
+ "learning_rate": 9.932332475455152e-05,
1239
+ "loss": 0.1627,
1240
+ "step": 8100
1241
+ },
1242
+ {
1243
+ "epoch": 0.776079607675094,
1244
+ "grad_norm": 0.4548576772212982,
1245
+ "learning_rate": 9.931855876465542e-05,
1246
+ "loss": 0.1638,
1247
+ "step": 8150
1248
+ },
1249
+ {
1250
+ "epoch": 0.7808408322620578,
1251
+ "grad_norm": 0.5950626730918884,
1252
+ "learning_rate": 9.931379277475932e-05,
1253
+ "loss": 0.1646,
1254
+ "step": 8200
1255
+ },
1256
+ {
1257
+ "epoch": 0.7856020568490216,
1258
+ "grad_norm": 0.5772454738616943,
1259
+ "learning_rate": 9.930902678486322e-05,
1260
+ "loss": 0.1629,
1261
+ "step": 8250
1262
+ },
1263
+ {
1264
+ "epoch": 0.7903632814359853,
1265
+ "grad_norm": 0.5833305716514587,
1266
+ "learning_rate": 9.930426079496712e-05,
1267
+ "loss": 0.1635,
1268
+ "step": 8300
1269
+ },
1270
+ {
1271
+ "epoch": 0.7951245060229492,
1272
+ "grad_norm": 0.4767976701259613,
1273
+ "learning_rate": 9.929949480507102e-05,
1274
+ "loss": 0.1621,
1275
+ "step": 8350
1276
+ },
1277
+ {
1278
+ "epoch": 0.7998857306099129,
1279
+ "grad_norm": 0.586681604385376,
1280
+ "learning_rate": 9.929472881517492e-05,
1281
+ "loss": 0.1633,
1282
+ "step": 8400
1283
+ },
1284
+ {
1285
+ "epoch": 0.8046469551968767,
1286
+ "grad_norm": 0.46445733308792114,
1287
+ "learning_rate": 9.928996282527882e-05,
1288
+ "loss": 0.1621,
1289
+ "step": 8450
1290
+ },
1291
+ {
1292
+ "epoch": 0.8094081797838404,
1293
+ "grad_norm": 0.4659370183944702,
1294
+ "learning_rate": 9.92851968353827e-05,
1295
+ "loss": 0.1644,
1296
+ "step": 8500
1297
+ },
1298
+ {
1299
+ "epoch": 0.8141694043708042,
1300
+ "grad_norm": 0.48823997378349304,
1301
+ "learning_rate": 9.928043084548662e-05,
1302
+ "loss": 0.1654,
1303
+ "step": 8550
1304
+ },
1305
+ {
1306
+ "epoch": 0.8189306289577679,
1307
+ "grad_norm": 0.5804855823516846,
1308
+ "learning_rate": 9.92756648555905e-05,
1309
+ "loss": 0.1624,
1310
+ "step": 8600
1311
+ },
1312
+ {
1313
+ "epoch": 0.8236918535447317,
1314
+ "grad_norm": 0.4181581139564514,
1315
+ "learning_rate": 9.92708988656944e-05,
1316
+ "loss": 0.1593,
1317
+ "step": 8650
1318
+ },
1319
+ {
1320
+ "epoch": 0.8284530781316954,
1321
+ "grad_norm": 0.6322731971740723,
1322
+ "learning_rate": 9.926613287579832e-05,
1323
+ "loss": 0.1628,
1324
+ "step": 8700
1325
+ },
1326
+ {
1327
+ "epoch": 0.8332143027186593,
1328
+ "grad_norm": 0.39184707403182983,
1329
+ "learning_rate": 9.92613668859022e-05,
1330
+ "loss": 0.1639,
1331
+ "step": 8750
1332
+ },
1333
+ {
1334
+ "epoch": 0.837975527305623,
1335
+ "grad_norm": 0.5011768341064453,
1336
+ "learning_rate": 9.92566008960061e-05,
1337
+ "loss": 0.1634,
1338
+ "step": 8800
1339
+ },
1340
+ {
1341
+ "epoch": 0.8427367518925868,
1342
+ "grad_norm": 0.47292882204055786,
1343
+ "learning_rate": 9.925183490611e-05,
1344
+ "loss": 0.1609,
1345
+ "step": 8850
1346
+ },
1347
+ {
1348
+ "epoch": 0.8474979764795505,
1349
+ "grad_norm": 0.5086949467658997,
1350
+ "learning_rate": 9.92470689162139e-05,
1351
+ "loss": 0.1616,
1352
+ "step": 8900
1353
+ },
1354
+ {
1355
+ "epoch": 0.8522592010665143,
1356
+ "grad_norm": 0.4720959961414337,
1357
+ "learning_rate": 9.92423029263178e-05,
1358
+ "loss": 0.1611,
1359
+ "step": 8950
1360
+ },
1361
+ {
1362
+ "epoch": 0.857020425653478,
1363
+ "grad_norm": 0.8099896311759949,
1364
+ "learning_rate": 9.92375369364217e-05,
1365
+ "loss": 0.1609,
1366
+ "step": 9000
1367
+ },
1368
+ {
1369
+ "epoch": 0.857020425653478,
1370
+ "eval_loss": 0.16015625,
1371
+ "eval_mae": 0.6603007912635803,
1372
+ "eval_mse": 335.3426818847656,
1373
+ "eval_rmse": 18.31236418065034,
1374
+ "eval_runtime": 56.2142,
1375
+ "eval_samples_per_second": 10627.482,
1376
+ "eval_smape": 109.38767194747925,
1377
+ "eval_steps_per_second": 20.76,
1378
+ "step": 9000
1379
+ },
1380
+ {
1381
+ "epoch": 0.8617816502404418,
1382
+ "grad_norm": 0.5228590369224548,
1383
+ "learning_rate": 9.92327709465256e-05,
1384
+ "loss": 0.1617,
1385
+ "step": 9050
1386
+ },
1387
+ {
1388
+ "epoch": 0.8665428748274057,
1389
+ "grad_norm": 0.5515425205230713,
1390
+ "learning_rate": 9.92280049566295e-05,
1391
+ "loss": 0.1612,
1392
+ "step": 9100
1393
+ },
1394
+ {
1395
+ "epoch": 0.8713040994143694,
1396
+ "grad_norm": 0.5289241075515747,
1397
+ "learning_rate": 9.92232389667334e-05,
1398
+ "loss": 0.1646,
1399
+ "step": 9150
1400
+ },
1401
+ {
1402
+ "epoch": 0.8760653240013332,
1403
+ "grad_norm": 0.5692815780639648,
1404
+ "learning_rate": 9.92184729768373e-05,
1405
+ "loss": 0.1603,
1406
+ "step": 9200
1407
+ },
1408
+ {
1409
+ "epoch": 0.8808265485882969,
1410
+ "grad_norm": 0.41486117243766785,
1411
+ "learning_rate": 9.921370698694119e-05,
1412
+ "loss": 0.1632,
1413
+ "step": 9250
1414
+ },
1415
+ {
1416
+ "epoch": 0.8855877731752607,
1417
+ "grad_norm": 0.488235741853714,
1418
+ "learning_rate": 9.920894099704509e-05,
1419
+ "loss": 0.1613,
1420
+ "step": 9300
1421
+ },
1422
+ {
1423
+ "epoch": 0.8903489977622244,
1424
+ "grad_norm": 0.6576530337333679,
1425
+ "learning_rate": 9.9204175007149e-05,
1426
+ "loss": 0.1618,
1427
+ "step": 9350
1428
+ },
1429
+ {
1430
+ "epoch": 0.8951102223491882,
1431
+ "grad_norm": 0.49431854486465454,
1432
+ "learning_rate": 9.919940901725288e-05,
1433
+ "loss": 0.1618,
1434
+ "step": 9400
1435
+ },
1436
+ {
1437
+ "epoch": 0.899871446936152,
1438
+ "grad_norm": 0.5491801500320435,
1439
+ "learning_rate": 9.919464302735678e-05,
1440
+ "loss": 0.162,
1441
+ "step": 9450
1442
+ },
1443
+ {
1444
+ "epoch": 0.9046326715231158,
1445
+ "grad_norm": 0.5839897990226746,
1446
+ "learning_rate": 9.918987703746068e-05,
1447
+ "loss": 0.1587,
1448
+ "step": 9500
1449
+ },
1450
+ {
1451
+ "epoch": 0.9093938961100795,
1452
+ "grad_norm": 0.5631112456321716,
1453
+ "learning_rate": 9.918511104756458e-05,
1454
+ "loss": 0.1607,
1455
+ "step": 9550
1456
+ },
1457
+ {
1458
+ "epoch": 0.9141551206970433,
1459
+ "grad_norm": 0.5420098900794983,
1460
+ "learning_rate": 9.918034505766848e-05,
1461
+ "loss": 0.1642,
1462
+ "step": 9600
1463
+ },
1464
+ {
1465
+ "epoch": 0.918916345284007,
1466
+ "grad_norm": 0.568087637424469,
1467
+ "learning_rate": 9.917557906777238e-05,
1468
+ "loss": 0.1624,
1469
+ "step": 9650
1470
+ },
1471
+ {
1472
+ "epoch": 0.9236775698709708,
1473
+ "grad_norm": 0.5823555588722229,
1474
+ "learning_rate": 9.917081307787628e-05,
1475
+ "loss": 0.1639,
1476
+ "step": 9700
1477
+ },
1478
+ {
1479
+ "epoch": 0.9284387944579345,
1480
+ "grad_norm": 0.5538271069526672,
1481
+ "learning_rate": 9.916604708798017e-05,
1482
+ "loss": 0.1625,
1483
+ "step": 9750
1484
+ },
1485
+ {
1486
+ "epoch": 0.9332000190448984,
1487
+ "grad_norm": 0.5160115957260132,
1488
+ "learning_rate": 9.916128109808408e-05,
1489
+ "loss": 0.1582,
1490
+ "step": 9800
1491
+ },
1492
+ {
1493
+ "epoch": 0.9379612436318621,
1494
+ "grad_norm": 0.48957574367523193,
1495
+ "learning_rate": 9.915651510818798e-05,
1496
+ "loss": 0.16,
1497
+ "step": 9850
1498
+ },
1499
+ {
1500
+ "epoch": 0.9427224682188259,
1501
+ "grad_norm": 0.601917564868927,
1502
+ "learning_rate": 9.915174911829187e-05,
1503
+ "loss": 0.162,
1504
+ "step": 9900
1505
+ },
1506
+ {
1507
+ "epoch": 0.9474836928057897,
1508
+ "grad_norm": 0.40893155336380005,
1509
+ "learning_rate": 9.914698312839578e-05,
1510
+ "loss": 0.165,
1511
+ "step": 9950
1512
+ },
1513
+ {
1514
+ "epoch": 0.9522449173927534,
1515
+ "grad_norm": 0.5392901301383972,
1516
+ "learning_rate": 9.914221713849967e-05,
1517
+ "loss": 0.1618,
1518
+ "step": 10000
1519
+ },
1520
+ {
1521
+ "epoch": 0.9522449173927534,
1522
+ "eval_loss": 0.15920588374137878,
1523
+ "eval_mae": 0.6688477993011475,
1524
+ "eval_mse": 318.14923095703125,
1525
+ "eval_rmse": 17.83673823761035,
1526
+ "eval_runtime": 59.3096,
1527
+ "eval_samples_per_second": 10072.824,
1528
+ "eval_smape": 76.33218169212341,
1529
+ "eval_steps_per_second": 19.676,
1530
+ "step": 10000
1531
+ },
1532
+ {
1533
+ "epoch": 0.9570061419797172,
1534
+ "grad_norm": 0.600141704082489,
1535
+ "learning_rate": 9.913745114860357e-05,
1536
+ "loss": 0.1596,
1537
+ "step": 10050
1538
+ },
1539
+ {
1540
+ "epoch": 0.9617673665666809,
1541
+ "grad_norm": 0.49183207750320435,
1542
+ "learning_rate": 9.913268515870748e-05,
1543
+ "loss": 0.1636,
1544
+ "step": 10100
1545
+ },
1546
+ {
1547
+ "epoch": 0.9665285911536448,
1548
+ "grad_norm": 0.4760478138923645,
1549
+ "learning_rate": 9.912791916881136e-05,
1550
+ "loss": 0.1584,
1551
+ "step": 10150
1552
+ },
1553
+ {
1554
+ "epoch": 0.9712898157406085,
1555
+ "grad_norm": 0.4729287624359131,
1556
+ "learning_rate": 9.912315317891526e-05,
1557
+ "loss": 0.1617,
1558
+ "step": 10200
1559
+ },
1560
+ {
1561
+ "epoch": 0.9760510403275723,
1562
+ "grad_norm": 0.578062117099762,
1563
+ "learning_rate": 9.911838718901916e-05,
1564
+ "loss": 0.1606,
1565
+ "step": 10250
1566
+ },
1567
+ {
1568
+ "epoch": 0.980812264914536,
1569
+ "grad_norm": 0.43438589572906494,
1570
+ "learning_rate": 9.911362119912306e-05,
1571
+ "loss": 0.1617,
1572
+ "step": 10300
1573
+ },
1574
+ {
1575
+ "epoch": 0.9855734895014998,
1576
+ "grad_norm": 0.5887606143951416,
1577
+ "learning_rate": 9.910885520922696e-05,
1578
+ "loss": 0.1616,
1579
+ "step": 10350
1580
+ },
1581
+ {
1582
+ "epoch": 0.9903347140884635,
1583
+ "grad_norm": 0.4835382401943207,
1584
+ "learning_rate": 9.910408921933086e-05,
1585
+ "loss": 0.16,
1586
+ "step": 10400
1587
+ },
1588
+ {
1589
+ "epoch": 0.9950959386754273,
1590
+ "grad_norm": 0.4186175763607025,
1591
+ "learning_rate": 9.909932322943476e-05,
1592
+ "loss": 0.1636,
1593
+ "step": 10450
1594
+ },
1595
+ {
1596
+ "epoch": 0.999857163262391,
1597
+ "grad_norm": 0.6031454205513,
1598
+ "learning_rate": 9.909455723953865e-05,
1599
+ "loss": 0.16,
1600
+ "step": 10500
1601
+ },
1602
+ {
1603
+ "epoch": 1.0045707756034852,
1604
+ "grad_norm": 0.444181352853775,
1605
+ "learning_rate": 9.908979124964256e-05,
1606
+ "loss": 0.1588,
1607
+ "step": 10550
1608
+ },
1609
+ {
1610
+ "epoch": 1.0093320001904489,
1611
+ "grad_norm": 0.474399596452713,
1612
+ "learning_rate": 9.908502525974646e-05,
1613
+ "loss": 0.1651,
1614
+ "step": 10600
1615
+ },
1616
+ {
1617
+ "epoch": 1.0140932247774128,
1618
+ "grad_norm": 0.4467703700065613,
1619
+ "learning_rate": 9.908025926985035e-05,
1620
+ "loss": 0.1598,
1621
+ "step": 10650
1622
+ },
1623
+ {
1624
+ "epoch": 1.0188544493643765,
1625
+ "grad_norm": 0.5107735395431519,
1626
+ "learning_rate": 9.907549327995426e-05,
1627
+ "loss": 0.1609,
1628
+ "step": 10700
1629
+ },
1630
+ {
1631
+ "epoch": 1.0236156739513402,
1632
+ "grad_norm": 0.4578382968902588,
1633
+ "learning_rate": 9.907072729005815e-05,
1634
+ "loss": 0.1614,
1635
+ "step": 10750
1636
+ },
1637
+ {
1638
+ "epoch": 1.028376898538304,
1639
+ "grad_norm": 0.4924687445163727,
1640
+ "learning_rate": 9.906596130016205e-05,
1641
+ "loss": 0.1594,
1642
+ "step": 10800
1643
+ },
1644
+ {
1645
+ "epoch": 1.0331381231252679,
1646
+ "grad_norm": 0.4967709183692932,
1647
+ "learning_rate": 9.906119531026595e-05,
1648
+ "loss": 0.1614,
1649
+ "step": 10850
1650
+ },
1651
+ {
1652
+ "epoch": 1.0378993477122316,
1653
+ "grad_norm": 0.5059126615524292,
1654
+ "learning_rate": 9.905642932036984e-05,
1655
+ "loss": 0.1622,
1656
+ "step": 10900
1657
+ },
1658
+ {
1659
+ "epoch": 1.0426605722991953,
1660
+ "grad_norm": 0.5229778289794922,
1661
+ "learning_rate": 9.905166333047374e-05,
1662
+ "loss": 0.1592,
1663
+ "step": 10950
1664
+ },
1665
+ {
1666
+ "epoch": 1.0474217968861592,
1667
+ "grad_norm": 0.45143234729766846,
1668
+ "learning_rate": 9.904689734057764e-05,
1669
+ "loss": 0.1588,
1670
+ "step": 11000
1671
+ },
1672
+ {
1673
+ "epoch": 1.0474217968861592,
1674
+ "eval_loss": 0.15856099128723145,
1675
+ "eval_mae": 0.6628284454345703,
1676
+ "eval_mse": 345.36749267578125,
1677
+ "eval_rmse": 18.584065558315846,
1678
+ "eval_runtime": 57.9218,
1679
+ "eval_samples_per_second": 10314.166,
1680
+ "eval_smape": 94.50321197509766,
1681
+ "eval_steps_per_second": 20.148,
1682
+ "step": 11000
1683
+ },
1684
+ {
1685
+ "epoch": 1.052183021473123,
1686
+ "grad_norm": 0.7248756885528564,
1687
+ "learning_rate": 9.904213135068154e-05,
1688
+ "loss": 0.1614,
1689
+ "step": 11050
1690
+ },
1691
+ {
1692
+ "epoch": 1.0569442460600866,
1693
+ "grad_norm": 0.5051783323287964,
1694
+ "learning_rate": 9.903736536078544e-05,
1695
+ "loss": 0.1567,
1696
+ "step": 11100
1697
+ },
1698
+ {
1699
+ "epoch": 1.0617054706470503,
1700
+ "grad_norm": 0.5285612940788269,
1701
+ "learning_rate": 9.903259937088933e-05,
1702
+ "loss": 0.1613,
1703
+ "step": 11150
1704
+ },
1705
+ {
1706
+ "epoch": 1.0664666952340143,
1707
+ "grad_norm": 0.5270511507987976,
1708
+ "learning_rate": 9.902783338099324e-05,
1709
+ "loss": 0.1615,
1710
+ "step": 11200
1711
+ },
1712
+ {
1713
+ "epoch": 1.071227919820978,
1714
+ "grad_norm": 0.5635538101196289,
1715
+ "learning_rate": 9.902306739109714e-05,
1716
+ "loss": 0.1606,
1717
+ "step": 11250
1718
+ },
1719
+ {
1720
+ "epoch": 1.0759891444079417,
1721
+ "grad_norm": 0.5780921578407288,
1722
+ "learning_rate": 9.901830140120103e-05,
1723
+ "loss": 0.1619,
1724
+ "step": 11300
1725
+ },
1726
+ {
1727
+ "epoch": 1.0807503689949054,
1728
+ "grad_norm": 0.5149776935577393,
1729
+ "learning_rate": 9.901353541130494e-05,
1730
+ "loss": 0.1638,
1731
+ "step": 11350
1732
+ },
1733
+ {
1734
+ "epoch": 1.0855115935818693,
1735
+ "grad_norm": 0.46075335144996643,
1736
+ "learning_rate": 9.900876942140883e-05,
1737
+ "loss": 0.1597,
1738
+ "step": 11400
1739
+ },
1740
+ {
1741
+ "epoch": 1.090272818168833,
1742
+ "grad_norm": 0.42272669076919556,
1743
+ "learning_rate": 9.900400343151273e-05,
1744
+ "loss": 0.16,
1745
+ "step": 11450
1746
+ },
1747
+ {
1748
+ "epoch": 1.0950340427557967,
1749
+ "grad_norm": 0.5383277535438538,
1750
+ "learning_rate": 9.899923744161663e-05,
1751
+ "loss": 0.159,
1752
+ "step": 11500
1753
+ },
1754
+ {
1755
+ "epoch": 1.0997952673427607,
1756
+ "grad_norm": 0.45635008811950684,
1757
+ "learning_rate": 9.899447145172053e-05,
1758
+ "loss": 0.1596,
1759
+ "step": 11550
1760
+ },
1761
+ {
1762
+ "epoch": 1.1045564919297244,
1763
+ "grad_norm": 0.4863174259662628,
1764
+ "learning_rate": 9.898970546182443e-05,
1765
+ "loss": 0.1613,
1766
+ "step": 11600
1767
+ },
1768
+ {
1769
+ "epoch": 1.109317716516688,
1770
+ "grad_norm": 0.5197418332099915,
1771
+ "learning_rate": 9.898493947192832e-05,
1772
+ "loss": 0.1601,
1773
+ "step": 11650
1774
+ },
1775
+ {
1776
+ "epoch": 1.1140789411036518,
1777
+ "grad_norm": 0.6299956440925598,
1778
+ "learning_rate": 9.898017348203222e-05,
1779
+ "loss": 0.1621,
1780
+ "step": 11700
1781
+ },
1782
+ {
1783
+ "epoch": 1.1188401656906157,
1784
+ "grad_norm": 0.47120076417922974,
1785
+ "learning_rate": 9.897540749213612e-05,
1786
+ "loss": 0.1624,
1787
+ "step": 11750
1788
+ },
1789
+ {
1790
+ "epoch": 1.1236013902775794,
1791
+ "grad_norm": 0.4576870799064636,
1792
+ "learning_rate": 9.897064150224002e-05,
1793
+ "loss": 0.1629,
1794
+ "step": 11800
1795
+ },
1796
+ {
1797
+ "epoch": 1.1283626148645431,
1798
+ "grad_norm": 0.531906008720398,
1799
+ "learning_rate": 9.896587551234392e-05,
1800
+ "loss": 0.1593,
1801
+ "step": 11850
1802
+ },
1803
+ {
1804
+ "epoch": 1.1331238394515069,
1805
+ "grad_norm": 0.4709097743034363,
1806
+ "learning_rate": 9.896110952244781e-05,
1807
+ "loss": 0.1613,
1808
+ "step": 11900
1809
+ },
1810
+ {
1811
+ "epoch": 1.1378850640384708,
1812
+ "grad_norm": 0.5573967695236206,
1813
+ "learning_rate": 9.895634353255172e-05,
1814
+ "loss": 0.1623,
1815
+ "step": 11950
1816
+ },
1817
+ {
1818
+ "epoch": 1.1426462886254345,
1819
+ "grad_norm": 0.5798735022544861,
1820
+ "learning_rate": 9.895157754265562e-05,
1821
+ "loss": 0.1601,
1822
+ "step": 12000
1823
+ },
1824
+ {
1825
+ "epoch": 1.1426462886254345,
1826
+ "eval_loss": 0.15796181559562683,
1827
+ "eval_mae": 0.65403813123703,
1828
+ "eval_mse": 326.8865051269531,
1829
+ "eval_rmse": 18.0800029072717,
1830
+ "eval_runtime": 56.7165,
1831
+ "eval_samples_per_second": 10533.356,
1832
+ "eval_smape": 81.25044703483582,
1833
+ "eval_steps_per_second": 20.576,
1834
+ "step": 12000
1835
+ },
1836
+ {
1837
+ "epoch": 1.1474075132123982,
1838
+ "grad_norm": 0.5247554183006287,
1839
+ "learning_rate": 9.894681155275951e-05,
1840
+ "loss": 0.1574,
1841
+ "step": 12050
1842
+ },
1843
+ {
1844
+ "epoch": 1.152168737799362,
1845
+ "grad_norm": 0.6056792736053467,
1846
+ "learning_rate": 9.894204556286342e-05,
1847
+ "loss": 0.1583,
1848
+ "step": 12100
1849
+ },
1850
+ {
1851
+ "epoch": 1.1569299623863258,
1852
+ "grad_norm": 0.5114259123802185,
1853
+ "learning_rate": 9.893727957296731e-05,
1854
+ "loss": 0.1641,
1855
+ "step": 12150
1856
+ },
1857
+ {
1858
+ "epoch": 1.1616911869732895,
1859
+ "grad_norm": 0.40764451026916504,
1860
+ "learning_rate": 9.89325135830712e-05,
1861
+ "loss": 0.1621,
1862
+ "step": 12200
1863
+ },
1864
+ {
1865
+ "epoch": 1.1664524115602533,
1866
+ "grad_norm": 0.5216367244720459,
1867
+ "learning_rate": 9.89277475931751e-05,
1868
+ "loss": 0.1597,
1869
+ "step": 12250
1870
+ },
1871
+ {
1872
+ "epoch": 1.171213636147217,
1873
+ "grad_norm": 0.48652514815330505,
1874
+ "learning_rate": 9.8922981603279e-05,
1875
+ "loss": 0.1605,
1876
+ "step": 12300
1877
+ },
1878
+ {
1879
+ "epoch": 1.175974860734181,
1880
+ "grad_norm": 0.46615609526634216,
1881
+ "learning_rate": 9.89182156133829e-05,
1882
+ "loss": 0.1571,
1883
+ "step": 12350
1884
+ },
1885
+ {
1886
+ "epoch": 1.1807360853211446,
1887
+ "grad_norm": 0.5413913130760193,
1888
+ "learning_rate": 9.891344962348679e-05,
1889
+ "loss": 0.1599,
1890
+ "step": 12400
1891
+ },
1892
+ {
1893
+ "epoch": 1.1854973099081083,
1894
+ "grad_norm": 0.42148470878601074,
1895
+ "learning_rate": 9.89086836335907e-05,
1896
+ "loss": 0.1605,
1897
+ "step": 12450
1898
+ },
1899
+ {
1900
+ "epoch": 1.1902585344950722,
1901
+ "grad_norm": 0.7329843044281006,
1902
+ "learning_rate": 9.89039176436946e-05,
1903
+ "loss": 0.1603,
1904
+ "step": 12500
1905
+ },
1906
+ {
1907
+ "epoch": 1.195019759082036,
1908
+ "grad_norm": 0.42273497581481934,
1909
+ "learning_rate": 9.889915165379849e-05,
1910
+ "loss": 0.1617,
1911
+ "step": 12550
1912
+ },
1913
+ {
1914
+ "epoch": 1.1997809836689997,
1915
+ "grad_norm": 0.5869929790496826,
1916
+ "learning_rate": 9.88943856639024e-05,
1917
+ "loss": 0.1612,
1918
+ "step": 12600
1919
+ },
1920
+ {
1921
+ "epoch": 1.2045422082559634,
1922
+ "grad_norm": 0.4401240944862366,
1923
+ "learning_rate": 9.888961967400629e-05,
1924
+ "loss": 0.1577,
1925
+ "step": 12650
1926
+ },
1927
+ {
1928
+ "epoch": 1.2093034328429273,
1929
+ "grad_norm": 0.48275691270828247,
1930
+ "learning_rate": 9.888485368411019e-05,
1931
+ "loss": 0.1595,
1932
+ "step": 12700
1933
+ },
1934
+ {
1935
+ "epoch": 1.214064657429891,
1936
+ "grad_norm": 0.5482587218284607,
1937
+ "learning_rate": 9.88800876942141e-05,
1938
+ "loss": 0.16,
1939
+ "step": 12750
1940
+ },
1941
+ {
1942
+ "epoch": 1.2188258820168547,
1943
+ "grad_norm": 0.6347602009773254,
1944
+ "learning_rate": 9.887532170431799e-05,
1945
+ "loss": 0.1596,
1946
+ "step": 12800
1947
+ },
1948
+ {
1949
+ "epoch": 1.2235871066038184,
1950
+ "grad_norm": 0.4231024384498596,
1951
+ "learning_rate": 9.887055571442189e-05,
1952
+ "loss": 0.1624,
1953
+ "step": 12850
1954
+ },
1955
+ {
1956
+ "epoch": 1.2283483311907823,
1957
+ "grad_norm": 0.5505658984184265,
1958
+ "learning_rate": 9.886578972452579e-05,
1959
+ "loss": 0.1582,
1960
+ "step": 12900
1961
+ },
1962
+ {
1963
+ "epoch": 1.233109555777746,
1964
+ "grad_norm": 0.4918171763420105,
1965
+ "learning_rate": 9.886102373462969e-05,
1966
+ "loss": 0.1577,
1967
+ "step": 12950
1968
+ },
1969
+ {
1970
+ "epoch": 1.2378707803647098,
1971
+ "grad_norm": 0.45393800735473633,
1972
+ "learning_rate": 9.885625774473359e-05,
1973
+ "loss": 0.1585,
1974
+ "step": 13000
1975
+ },
1976
+ {
1977
+ "epoch": 1.2378707803647098,
1978
+ "eval_loss": 0.15753023326396942,
1979
+ "eval_mae": 0.6532484889030457,
1980
+ "eval_mse": 279.79644775390625,
1981
+ "eval_rmse": 16.72711713816539,
1982
+ "eval_runtime": 57.893,
1983
+ "eval_samples_per_second": 10319.3,
1984
+ "eval_smape": 107.6181173324585,
1985
+ "eval_steps_per_second": 20.158,
1986
+ "step": 13000
1987
+ },
1988
+ {
1989
+ "epoch": 1.2426320049516735,
1990
+ "grad_norm": 0.3777833878993988,
1991
+ "learning_rate": 9.885149175483749e-05,
1992
+ "loss": 0.1577,
1993
+ "step": 13050
1994
+ },
1995
+ {
1996
+ "epoch": 1.2473932295386374,
1997
+ "grad_norm": 0.5700230002403259,
1998
+ "learning_rate": 9.884672576494139e-05,
1999
+ "loss": 0.1587,
2000
+ "step": 13100
2001
+ },
2002
+ {
2003
+ "epoch": 1.2521544541256011,
2004
+ "grad_norm": 0.4470745027065277,
2005
+ "learning_rate": 9.884195977504529e-05,
2006
+ "loss": 0.1604,
2007
+ "step": 13150
2008
+ },
2009
+ {
2010
+ "epoch": 1.2569156787125648,
2011
+ "grad_norm": 0.4185906648635864,
2012
+ "learning_rate": 9.883719378514918e-05,
2013
+ "loss": 0.1562,
2014
+ "step": 13200
2015
+ },
2016
+ {
2017
+ "epoch": 1.2616769032995285,
2018
+ "grad_norm": 0.3835722804069519,
2019
+ "learning_rate": 9.883242779525308e-05,
2020
+ "loss": 0.1583,
2021
+ "step": 13250
2022
+ },
2023
+ {
2024
+ "epoch": 1.2664381278864925,
2025
+ "grad_norm": 0.45621258020401,
2026
+ "learning_rate": 9.882766180535697e-05,
2027
+ "loss": 0.1577,
2028
+ "step": 13300
2029
+ },
2030
+ {
2031
+ "epoch": 1.2711993524734562,
2032
+ "grad_norm": 0.5590381622314453,
2033
+ "learning_rate": 9.882289581546088e-05,
2034
+ "loss": 0.1608,
2035
+ "step": 13350
2036
+ },
2037
+ {
2038
+ "epoch": 1.2759605770604199,
2039
+ "grad_norm": 0.5501840114593506,
2040
+ "learning_rate": 9.881812982556477e-05,
2041
+ "loss": 0.1623,
2042
+ "step": 13400
2043
+ },
2044
+ {
2045
+ "epoch": 1.2807218016473838,
2046
+ "grad_norm": 0.4480155408382416,
2047
+ "learning_rate": 9.881336383566867e-05,
2048
+ "loss": 0.1566,
2049
+ "step": 13450
2050
+ },
2051
+ {
2052
+ "epoch": 1.2854830262343475,
2053
+ "grad_norm": 0.5472989678382874,
2054
+ "learning_rate": 9.880859784577258e-05,
2055
+ "loss": 0.1613,
2056
+ "step": 13500
2057
+ },
2058
+ {
2059
+ "epoch": 1.2902442508213112,
2060
+ "grad_norm": 0.48176899552345276,
2061
+ "learning_rate": 9.880383185587647e-05,
2062
+ "loss": 0.1614,
2063
+ "step": 13550
2064
+ },
2065
+ {
2066
+ "epoch": 1.295005475408275,
2067
+ "grad_norm": 0.40140554308891296,
2068
+ "learning_rate": 9.879906586598037e-05,
2069
+ "loss": 0.1586,
2070
+ "step": 13600
2071
+ },
2072
+ {
2073
+ "epoch": 1.2997666999952386,
2074
+ "grad_norm": 0.5117682218551636,
2075
+ "learning_rate": 9.879429987608427e-05,
2076
+ "loss": 0.1602,
2077
+ "step": 13650
2078
+ },
2079
+ {
2080
+ "epoch": 1.3045279245822026,
2081
+ "grad_norm": 0.7169548273086548,
2082
+ "learning_rate": 9.878953388618817e-05,
2083
+ "loss": 0.1623,
2084
+ "step": 13700
2085
+ },
2086
+ {
2087
+ "epoch": 1.3092891491691663,
2088
+ "grad_norm": 0.44949373602867126,
2089
+ "learning_rate": 9.878476789629207e-05,
2090
+ "loss": 0.1601,
2091
+ "step": 13750
2092
+ },
2093
+ {
2094
+ "epoch": 1.31405037375613,
2095
+ "grad_norm": 0.597175121307373,
2096
+ "learning_rate": 9.878000190639595e-05,
2097
+ "loss": 0.1606,
2098
+ "step": 13800
2099
+ },
2100
+ {
2101
+ "epoch": 1.318811598343094,
2102
+ "grad_norm": 0.553895115852356,
2103
+ "learning_rate": 9.877523591649987e-05,
2104
+ "loss": 0.1617,
2105
+ "step": 13850
2106
+ },
2107
+ {
2108
+ "epoch": 1.3235728229300576,
2109
+ "grad_norm": 0.5089927315711975,
2110
+ "learning_rate": 9.877046992660377e-05,
2111
+ "loss": 0.1595,
2112
+ "step": 13900
2113
+ },
2114
+ {
2115
+ "epoch": 1.3283340475170213,
2116
+ "grad_norm": 0.430867463350296,
2117
+ "learning_rate": 9.876570393670765e-05,
2118
+ "loss": 0.1582,
2119
+ "step": 13950
2120
+ },
2121
+ {
2122
+ "epoch": 1.3330952721039853,
2123
+ "grad_norm": 0.39970675110816956,
2124
+ "learning_rate": 9.876093794681156e-05,
2125
+ "loss": 0.1567,
2126
+ "step": 14000
2127
+ },
2128
+ {
2129
+ "epoch": 1.3330952721039853,
2130
+ "eval_loss": 0.15753522515296936,
2131
+ "eval_mae": 0.6622462272644043,
2132
+ "eval_mse": 328.3490295410156,
2133
+ "eval_rmse": 18.120403680409982,
2134
+ "eval_runtime": 58.0545,
2135
+ "eval_samples_per_second": 10290.588,
2136
+ "eval_smape": 91.98985695838928,
2137
+ "eval_steps_per_second": 20.102,
2138
+ "step": 14000
2139
+ },
2140
+ {
2141
+ "epoch": 1.337856496690949,
2142
+ "grad_norm": 0.5808453559875488,
2143
+ "learning_rate": 9.875617195691545e-05,
2144
+ "loss": 0.1589,
2145
+ "step": 14050
2146
+ },
2147
+ {
2148
+ "epoch": 1.3426177212779127,
2149
+ "grad_norm": 0.4418608546257019,
2150
+ "learning_rate": 9.875140596701935e-05,
2151
+ "loss": 0.1584,
2152
+ "step": 14100
2153
+ },
2154
+ {
2155
+ "epoch": 1.3473789458648764,
2156
+ "grad_norm": 0.6623143553733826,
2157
+ "learning_rate": 9.874663997712325e-05,
2158
+ "loss": 0.1623,
2159
+ "step": 14150
2160
+ },
2161
+ {
2162
+ "epoch": 1.35214017045184,
2163
+ "grad_norm": 0.4194190204143524,
2164
+ "learning_rate": 9.874187398722715e-05,
2165
+ "loss": 0.1596,
2166
+ "step": 14200
2167
+ },
2168
+ {
2169
+ "epoch": 1.356901395038804,
2170
+ "grad_norm": 0.6208611130714417,
2171
+ "learning_rate": 9.873710799733105e-05,
2172
+ "loss": 0.1583,
2173
+ "step": 14250
2174
+ },
2175
+ {
2176
+ "epoch": 1.3616626196257677,
2177
+ "grad_norm": 0.4435657262802124,
2178
+ "learning_rate": 9.873234200743495e-05,
2179
+ "loss": 0.16,
2180
+ "step": 14300
2181
+ },
2182
+ {
2183
+ "epoch": 1.3664238442127314,
2184
+ "grad_norm": 0.4906177222728729,
2185
+ "learning_rate": 9.872757601753885e-05,
2186
+ "loss": 0.1581,
2187
+ "step": 14350
2188
+ },
2189
+ {
2190
+ "epoch": 1.3711850687996954,
2191
+ "grad_norm": 0.5340787172317505,
2192
+ "learning_rate": 9.872281002764275e-05,
2193
+ "loss": 0.1588,
2194
+ "step": 14400
2195
+ },
2196
+ {
2197
+ "epoch": 1.375946293386659,
2198
+ "grad_norm": 0.4307633340358734,
2199
+ "learning_rate": 9.871804403774665e-05,
2200
+ "loss": 0.1585,
2201
+ "step": 14450
2202
+ },
2203
+ {
2204
+ "epoch": 1.3807075179736228,
2205
+ "grad_norm": 0.49194300174713135,
2206
+ "learning_rate": 9.871327804785055e-05,
2207
+ "loss": 0.1572,
2208
+ "step": 14500
2209
+ },
2210
+ {
2211
+ "epoch": 1.3854687425605867,
2212
+ "grad_norm": 0.4391520917415619,
2213
+ "learning_rate": 9.870851205795443e-05,
2214
+ "loss": 0.1573,
2215
+ "step": 14550
2216
+ },
2217
+ {
2218
+ "epoch": 1.3902299671475504,
2219
+ "grad_norm": 0.4503444731235504,
2220
+ "learning_rate": 9.870374606805835e-05,
2221
+ "loss": 0.1577,
2222
+ "step": 14600
2223
+ },
2224
+ {
2225
+ "epoch": 1.3949911917345141,
2226
+ "grad_norm": 0.5113334655761719,
2227
+ "learning_rate": 9.869898007816225e-05,
2228
+ "loss": 0.1594,
2229
+ "step": 14650
2230
+ },
2231
+ {
2232
+ "epoch": 1.3997524163214778,
2233
+ "grad_norm": 0.3871005177497864,
2234
+ "learning_rate": 9.869421408826613e-05,
2235
+ "loss": 0.1559,
2236
+ "step": 14700
2237
+ },
2238
+ {
2239
+ "epoch": 1.4045136409084416,
2240
+ "grad_norm": 0.5482053756713867,
2241
+ "learning_rate": 9.868944809837004e-05,
2242
+ "loss": 0.1592,
2243
+ "step": 14750
2244
+ },
2245
+ {
2246
+ "epoch": 1.4092748654954055,
2247
+ "grad_norm": 0.45771437883377075,
2248
+ "learning_rate": 9.868468210847393e-05,
2249
+ "loss": 0.1572,
2250
+ "step": 14800
2251
+ },
2252
+ {
2253
+ "epoch": 1.4140360900823692,
2254
+ "grad_norm": 0.4550696015357971,
2255
+ "learning_rate": 9.867991611857783e-05,
2256
+ "loss": 0.1605,
2257
+ "step": 14850
2258
+ },
2259
+ {
2260
+ "epoch": 1.418797314669333,
2261
+ "grad_norm": 0.6991235613822937,
2262
+ "learning_rate": 9.867515012868174e-05,
2263
+ "loss": 0.1589,
2264
+ "step": 14900
2265
+ },
2266
+ {
2267
+ "epoch": 1.4235585392562968,
2268
+ "grad_norm": 0.5531545877456665,
2269
+ "learning_rate": 9.867038413878563e-05,
2270
+ "loss": 0.1574,
2271
+ "step": 14950
2272
+ },
2273
+ {
2274
+ "epoch": 1.4283197638432605,
2275
+ "grad_norm": 0.4692751169204712,
2276
+ "learning_rate": 9.866561814888953e-05,
2277
+ "loss": 0.1592,
2278
+ "step": 15000
2279
+ },
2280
+ {
2281
+ "epoch": 1.4283197638432605,
2282
+ "eval_loss": 0.15674826502799988,
2283
+ "eval_mae": 0.6522776484489441,
2284
+ "eval_mse": 376.8973388671875,
2285
+ "eval_rmse": 19.41384400027948,
2286
+ "eval_runtime": 57.2088,
2287
+ "eval_samples_per_second": 10442.706,
2288
+ "eval_smape": 89.79519009590149,
2289
+ "eval_steps_per_second": 20.399,
2290
+ "step": 15000
2291
+ },
2292
+ {
2293
+ "epoch": 1.4330809884302242,
2294
+ "grad_norm": 0.41401219367980957,
2295
+ "learning_rate": 9.866085215899343e-05,
2296
+ "loss": 0.1594,
2297
+ "step": 15050
2298
+ },
2299
+ {
2300
+ "epoch": 1.437842213017188,
2301
+ "grad_norm": 0.6155771613121033,
2302
+ "learning_rate": 9.865608616909733e-05,
2303
+ "loss": 0.1597,
2304
+ "step": 15100
2305
+ },
2306
+ {
2307
+ "epoch": 1.4426034376041517,
2308
+ "grad_norm": 0.6229146718978882,
2309
+ "learning_rate": 9.865132017920123e-05,
2310
+ "loss": 0.1595,
2311
+ "step": 15150
2312
+ },
2313
+ {
2314
+ "epoch": 1.4473646621911156,
2315
+ "grad_norm": 0.5311764478683472,
2316
+ "learning_rate": 9.864655418930511e-05,
2317
+ "loss": 0.1613,
2318
+ "step": 15200
2319
+ },
2320
+ {
2321
+ "epoch": 1.4521258867780793,
2322
+ "grad_norm": 0.4075564742088318,
2323
+ "learning_rate": 9.864178819940903e-05,
2324
+ "loss": 0.1581,
2325
+ "step": 15250
2326
+ },
2327
+ {
2328
+ "epoch": 1.456887111365043,
2329
+ "grad_norm": 0.5737677216529846,
2330
+ "learning_rate": 9.863702220951291e-05,
2331
+ "loss": 0.1622,
2332
+ "step": 15300
2333
+ },
2334
+ {
2335
+ "epoch": 1.461648335952007,
2336
+ "grad_norm": 0.5977826118469238,
2337
+ "learning_rate": 9.863225621961681e-05,
2338
+ "loss": 0.1586,
2339
+ "step": 15350
2340
+ },
2341
+ {
2342
+ "epoch": 1.4664095605389706,
2343
+ "grad_norm": 0.4717673659324646,
2344
+ "learning_rate": 9.862749022972073e-05,
2345
+ "loss": 0.1566,
2346
+ "step": 15400
2347
+ },
2348
+ {
2349
+ "epoch": 1.4711707851259344,
2350
+ "grad_norm": 0.4783164858818054,
2351
+ "learning_rate": 9.862272423982461e-05,
2352
+ "loss": 0.162,
2353
+ "step": 15450
2354
+ },
2355
+ {
2356
+ "epoch": 1.4759320097128983,
2357
+ "grad_norm": 0.4709276258945465,
2358
+ "learning_rate": 9.861795824992851e-05,
2359
+ "loss": 0.1628,
2360
+ "step": 15500
2361
+ },
2362
+ {
2363
+ "epoch": 1.480693234299862,
2364
+ "grad_norm": 0.46055328845977783,
2365
+ "learning_rate": 9.861319226003241e-05,
2366
+ "loss": 0.1573,
2367
+ "step": 15550
2368
+ },
2369
+ {
2370
+ "epoch": 1.4854544588868257,
2371
+ "grad_norm": 0.4675583839416504,
2372
+ "learning_rate": 9.860842627013631e-05,
2373
+ "loss": 0.1612,
2374
+ "step": 15600
2375
+ },
2376
+ {
2377
+ "epoch": 1.4902156834737894,
2378
+ "grad_norm": 0.49638524651527405,
2379
+ "learning_rate": 9.860366028024021e-05,
2380
+ "loss": 0.1565,
2381
+ "step": 15650
2382
+ },
2383
+ {
2384
+ "epoch": 1.4949769080607531,
2385
+ "grad_norm": 0.4782148003578186,
2386
+ "learning_rate": 9.859889429034411e-05,
2387
+ "loss": 0.1559,
2388
+ "step": 15700
2389
+ },
2390
+ {
2391
+ "epoch": 1.499738132647717,
2392
+ "grad_norm": 0.39972543716430664,
2393
+ "learning_rate": 9.859412830044801e-05,
2394
+ "loss": 0.1597,
2395
+ "step": 15750
2396
+ },
2397
+ {
2398
+ "epoch": 1.5044993572346808,
2399
+ "grad_norm": 0.46148207783699036,
2400
+ "learning_rate": 9.858936231055191e-05,
2401
+ "loss": 0.165,
2402
+ "step": 15800
2403
+ },
2404
+ {
2405
+ "epoch": 1.5092605818216445,
2406
+ "grad_norm": 0.6166296005249023,
2407
+ "learning_rate": 9.858459632065581e-05,
2408
+ "loss": 0.1601,
2409
+ "step": 15850
2410
+ },
2411
+ {
2412
+ "epoch": 1.5140218064086084,
2413
+ "grad_norm": 0.6877084970474243,
2414
+ "learning_rate": 9.857983033075971e-05,
2415
+ "loss": 0.1622,
2416
+ "step": 15900
2417
+ },
2418
+ {
2419
+ "epoch": 1.518783030995572,
2420
+ "grad_norm": 0.4697044789791107,
2421
+ "learning_rate": 9.85750643408636e-05,
2422
+ "loss": 0.1591,
2423
+ "step": 15950
2424
+ },
2425
+ {
2426
+ "epoch": 1.5235442555825358,
2427
+ "grad_norm": 0.4562913179397583,
2428
+ "learning_rate": 9.857029835096751e-05,
2429
+ "loss": 0.16,
2430
+ "step": 16000
2431
+ },
2432
+ {
2433
+ "epoch": 1.5235442555825358,
2434
+ "eval_loss": 0.15759705007076263,
2435
+ "eval_mae": 0.6579864621162415,
2436
+ "eval_mse": 327.5270690917969,
2437
+ "eval_rmse": 18.097708945935583,
2438
+ "eval_runtime": 59.3074,
2439
+ "eval_samples_per_second": 10073.191,
2440
+ "eval_smape": 105.73155879974365,
2441
+ "eval_steps_per_second": 19.677,
2442
+ "step": 16000
2443
+ },
2444
+ {
2445
+ "epoch": 1.5283054801694997,
2446
+ "grad_norm": 0.4631439447402954,
2447
+ "learning_rate": 9.856553236107139e-05,
2448
+ "loss": 0.1596,
2449
+ "step": 16050
2450
+ },
2451
+ {
2452
+ "epoch": 1.5330667047564632,
2453
+ "grad_norm": 0.5179547667503357,
2454
+ "learning_rate": 9.856076637117529e-05,
2455
+ "loss": 0.161,
2456
+ "step": 16100
2457
+ },
2458
+ {
2459
+ "epoch": 1.5378279293434272,
2460
+ "grad_norm": 0.5846447348594666,
2461
+ "learning_rate": 9.85560003812792e-05,
2462
+ "loss": 0.157,
2463
+ "step": 16150
2464
+ },
2465
+ {
2466
+ "epoch": 1.5425891539303909,
2467
+ "grad_norm": 0.41618812084198,
2468
+ "learning_rate": 9.855123439138309e-05,
2469
+ "loss": 0.1604,
2470
+ "step": 16200
2471
+ },
2472
+ {
2473
+ "epoch": 1.5473503785173546,
2474
+ "grad_norm": 0.5047721862792969,
2475
+ "learning_rate": 9.854646840148699e-05,
2476
+ "loss": 0.1607,
2477
+ "step": 16250
2478
+ },
2479
+ {
2480
+ "epoch": 1.5521116031043185,
2481
+ "grad_norm": 0.5991541147232056,
2482
+ "learning_rate": 9.854170241159089e-05,
2483
+ "loss": 0.1596,
2484
+ "step": 16300
2485
+ },
2486
+ {
2487
+ "epoch": 1.5568728276912822,
2488
+ "grad_norm": 0.43497374653816223,
2489
+ "learning_rate": 9.853693642169479e-05,
2490
+ "loss": 0.1577,
2491
+ "step": 16350
2492
+ },
2493
+ {
2494
+ "epoch": 1.561634052278246,
2495
+ "grad_norm": 0.40854403376579285,
2496
+ "learning_rate": 9.853217043179869e-05,
2497
+ "loss": 0.1599,
2498
+ "step": 16400
2499
+ },
2500
+ {
2501
+ "epoch": 1.5663952768652099,
2502
+ "grad_norm": 0.4918048679828644,
2503
+ "learning_rate": 9.852740444190259e-05,
2504
+ "loss": 0.1603,
2505
+ "step": 16450
2506
+ },
2507
+ {
2508
+ "epoch": 1.5711565014521733,
2509
+ "grad_norm": 0.3920314908027649,
2510
+ "learning_rate": 9.852263845200649e-05,
2511
+ "loss": 0.1573,
2512
+ "step": 16500
2513
+ },
2514
+ {
2515
+ "epoch": 1.5759177260391373,
2516
+ "grad_norm": 0.3981036841869354,
2517
+ "learning_rate": 9.851787246211039e-05,
2518
+ "loss": 0.1586,
2519
+ "step": 16550
2520
+ },
2521
+ {
2522
+ "epoch": 1.5806789506261012,
2523
+ "grad_norm": 0.6724033355712891,
2524
+ "learning_rate": 9.851310647221427e-05,
2525
+ "loss": 0.1577,
2526
+ "step": 16600
2527
+ },
2528
+ {
2529
+ "epoch": 1.5854401752130647,
2530
+ "grad_norm": 0.45915940403938293,
2531
+ "learning_rate": 9.850834048231819e-05,
2532
+ "loss": 0.1634,
2533
+ "step": 16650
2534
+ },
2535
+ {
2536
+ "epoch": 1.5902013998000286,
2537
+ "grad_norm": 0.4456646740436554,
2538
+ "learning_rate": 9.850357449242207e-05,
2539
+ "loss": 0.1598,
2540
+ "step": 16700
2541
+ },
2542
+ {
2543
+ "epoch": 1.5949626243869923,
2544
+ "grad_norm": 0.4617702066898346,
2545
+ "learning_rate": 9.849880850252597e-05,
2546
+ "loss": 0.1588,
2547
+ "step": 16750
2548
+ },
2549
+ {
2550
+ "epoch": 1.599723848973956,
2551
+ "grad_norm": 0.4858147203922272,
2552
+ "learning_rate": 9.849404251262989e-05,
2553
+ "loss": 0.1603,
2554
+ "step": 16800
2555
+ },
2556
+ {
2557
+ "epoch": 1.60448507356092,
2558
+ "grad_norm": 0.5424185395240784,
2559
+ "learning_rate": 9.848927652273377e-05,
2560
+ "loss": 0.1588,
2561
+ "step": 16850
2562
+ },
2563
+ {
2564
+ "epoch": 1.6092462981478837,
2565
+ "grad_norm": 0.432170033454895,
2566
+ "learning_rate": 9.848451053283767e-05,
2567
+ "loss": 0.1593,
2568
+ "step": 16900
2569
+ },
2570
+ {
2571
+ "epoch": 1.6140075227348474,
2572
+ "grad_norm": 0.403414249420166,
2573
+ "learning_rate": 9.847974454294157e-05,
2574
+ "loss": 0.1564,
2575
+ "step": 16950
2576
+ },
2577
+ {
2578
+ "epoch": 1.6187687473218113,
2579
+ "grad_norm": 0.5686953067779541,
2580
+ "learning_rate": 9.847497855304547e-05,
2581
+ "loss": 0.1586,
2582
+ "step": 17000
2583
+ },
2584
+ {
2585
+ "epoch": 1.6187687473218113,
2586
+ "eval_loss": 0.15684476494789124,
2587
+ "eval_mae": 0.6601889133453369,
2588
+ "eval_mse": 399.5775146484375,
2589
+ "eval_rmse": 19.989435075770338,
2590
+ "eval_runtime": 60.8742,
2591
+ "eval_samples_per_second": 9813.93,
2592
+ "eval_smape": 88.6057436466217,
2593
+ "eval_steps_per_second": 19.171,
2594
+ "step": 17000
2595
+ },
2596
+ {
2597
+ "epoch": 1.6235299719087748,
2598
+ "grad_norm": 0.472042977809906,
2599
+ "learning_rate": 9.847021256314937e-05,
2600
+ "loss": 0.1585,
2601
+ "step": 17050
2602
+ },
2603
+ {
2604
+ "epoch": 1.6282911964957387,
2605
+ "grad_norm": 0.6247090697288513,
2606
+ "learning_rate": 9.846544657325327e-05,
2607
+ "loss": 0.1583,
2608
+ "step": 17100
2609
+ },
2610
+ {
2611
+ "epoch": 1.6330524210827024,
2612
+ "grad_norm": 0.4843044579029083,
2613
+ "learning_rate": 9.846068058335717e-05,
2614
+ "loss": 0.1592,
2615
+ "step": 17150
2616
+ },
2617
+ {
2618
+ "epoch": 1.6378136456696661,
2619
+ "grad_norm": 0.48187774419784546,
2620
+ "learning_rate": 9.845591459346106e-05,
2621
+ "loss": 0.1592,
2622
+ "step": 17200
2623
+ },
2624
+ {
2625
+ "epoch": 1.64257487025663,
2626
+ "grad_norm": 0.4763176441192627,
2627
+ "learning_rate": 9.845114860356497e-05,
2628
+ "loss": 0.1592,
2629
+ "step": 17250
2630
+ },
2631
+ {
2632
+ "epoch": 1.6473360948435938,
2633
+ "grad_norm": 0.5375500321388245,
2634
+ "learning_rate": 9.844638261366887e-05,
2635
+ "loss": 0.159,
2636
+ "step": 17300
2637
+ },
2638
+ {
2639
+ "epoch": 1.6520973194305575,
2640
+ "grad_norm": 0.4639647305011749,
2641
+ "learning_rate": 9.844161662377276e-05,
2642
+ "loss": 0.1578,
2643
+ "step": 17350
2644
+ },
2645
+ {
2646
+ "epoch": 1.6568585440175214,
2647
+ "grad_norm": 0.44177523255348206,
2648
+ "learning_rate": 9.843685063387667e-05,
2649
+ "loss": 0.1593,
2650
+ "step": 17400
2651
+ },
2652
+ {
2653
+ "epoch": 1.6616197686044851,
2654
+ "grad_norm": 0.5092636942863464,
2655
+ "learning_rate": 9.843208464398055e-05,
2656
+ "loss": 0.161,
2657
+ "step": 17450
2658
+ },
2659
+ {
2660
+ "epoch": 1.6663809931914488,
2661
+ "grad_norm": 0.4873998165130615,
2662
+ "learning_rate": 9.842731865408445e-05,
2663
+ "loss": 0.1584,
2664
+ "step": 17500
2665
+ },
2666
+ {
2667
+ "epoch": 1.6711422177784128,
2668
+ "grad_norm": 0.622734546661377,
2669
+ "learning_rate": 9.842255266418837e-05,
2670
+ "loss": 0.1597,
2671
+ "step": 17550
2672
+ },
2673
+ {
2674
+ "epoch": 1.6759034423653763,
2675
+ "grad_norm": 0.5134626030921936,
2676
+ "learning_rate": 9.841778667429225e-05,
2677
+ "loss": 0.1591,
2678
+ "step": 17600
2679
+ },
2680
+ {
2681
+ "epoch": 1.6806646669523402,
2682
+ "grad_norm": 0.42936229705810547,
2683
+ "learning_rate": 9.841302068439615e-05,
2684
+ "loss": 0.1575,
2685
+ "step": 17650
2686
+ },
2687
+ {
2688
+ "epoch": 1.6854258915393039,
2689
+ "grad_norm": 0.44326427578926086,
2690
+ "learning_rate": 9.840825469450005e-05,
2691
+ "loss": 0.1568,
2692
+ "step": 17700
2693
+ },
2694
+ {
2695
+ "epoch": 1.6901871161262676,
2696
+ "grad_norm": 0.3937668204307556,
2697
+ "learning_rate": 9.840348870460395e-05,
2698
+ "loss": 0.1593,
2699
+ "step": 17750
2700
+ },
2701
+ {
2702
+ "epoch": 1.6949483407132315,
2703
+ "grad_norm": 0.45583653450012207,
2704
+ "learning_rate": 9.839872271470785e-05,
2705
+ "loss": 0.157,
2706
+ "step": 17800
2707
+ },
2708
+ {
2709
+ "epoch": 1.6997095653001952,
2710
+ "grad_norm": 0.5357958078384399,
2711
+ "learning_rate": 9.839395672481175e-05,
2712
+ "loss": 0.1604,
2713
+ "step": 17850
2714
+ },
2715
+ {
2716
+ "epoch": 1.704470789887159,
2717
+ "grad_norm": 0.421678751707077,
2718
+ "learning_rate": 9.838919073491565e-05,
2719
+ "loss": 0.1598,
2720
+ "step": 17900
2721
+ },
2722
+ {
2723
+ "epoch": 1.7092320144741229,
2724
+ "grad_norm": 0.42326951026916504,
2725
+ "learning_rate": 9.838442474501954e-05,
2726
+ "loss": 0.157,
2727
+ "step": 17950
2728
+ },
2729
+ {
2730
+ "epoch": 1.7139932390610864,
2731
+ "grad_norm": 0.43803659081459045,
2732
+ "learning_rate": 9.837965875512344e-05,
2733
+ "loss": 0.1593,
2734
+ "step": 18000
2735
+ },
2736
+ {
2737
+ "epoch": 1.7139932390610864,
2738
+ "eval_loss": 0.15651458501815796,
2739
+ "eval_mae": 0.6603885889053345,
2740
+ "eval_mse": 359.5630187988281,
2741
+ "eval_rmse": 18.962146998660995,
2742
+ "eval_runtime": 60.8886,
2743
+ "eval_samples_per_second": 9811.606,
2744
+ "eval_smape": 325.50642490386963,
2745
+ "eval_steps_per_second": 19.166,
2746
+ "step": 18000
2747
+ },
2748
+ {
2749
+ "epoch": 1.7187544636480503,
2750
+ "grad_norm": 0.42728474736213684,
2751
+ "learning_rate": 9.837489276522735e-05,
2752
+ "loss": 0.1588,
2753
+ "step": 18050
2754
+ },
2755
+ {
2756
+ "epoch": 1.723515688235014,
2757
+ "grad_norm": 0.4929046928882599,
2758
+ "learning_rate": 9.837012677533124e-05,
2759
+ "loss": 0.1587,
2760
+ "step": 18100
2761
+ },
2762
+ {
2763
+ "epoch": 1.7282769128219777,
2764
+ "grad_norm": 0.47886890172958374,
2765
+ "learning_rate": 9.836536078543513e-05,
2766
+ "loss": 0.1572,
2767
+ "step": 18150
2768
+ },
2769
+ {
2770
+ "epoch": 1.7330381374089416,
2771
+ "grad_norm": 0.4829745590686798,
2772
+ "learning_rate": 9.836059479553903e-05,
2773
+ "loss": 0.1575,
2774
+ "step": 18200
2775
+ },
2776
+ {
2777
+ "epoch": 1.7377993619959053,
2778
+ "grad_norm": 0.4256702661514282,
2779
+ "learning_rate": 9.835582880564293e-05,
2780
+ "loss": 0.1575,
2781
+ "step": 18250
2782
+ },
2783
+ {
2784
+ "epoch": 1.742560586582869,
2785
+ "grad_norm": 0.4803942143917084,
2786
+ "learning_rate": 9.835106281574683e-05,
2787
+ "loss": 0.1559,
2788
+ "step": 18300
2789
+ },
2790
+ {
2791
+ "epoch": 1.747321811169833,
2792
+ "grad_norm": 0.5108392834663391,
2793
+ "learning_rate": 9.834629682585073e-05,
2794
+ "loss": 0.1558,
2795
+ "step": 18350
2796
+ },
2797
+ {
2798
+ "epoch": 1.7520830357567967,
2799
+ "grad_norm": 0.5541846752166748,
2800
+ "learning_rate": 9.834153083595463e-05,
2801
+ "loss": 0.1571,
2802
+ "step": 18400
2803
+ },
2804
+ {
2805
+ "epoch": 1.7568442603437604,
2806
+ "grad_norm": 0.3996049463748932,
2807
+ "learning_rate": 9.833676484605853e-05,
2808
+ "loss": 0.1588,
2809
+ "step": 18450
2810
+ },
2811
+ {
2812
+ "epoch": 1.7616054849307243,
2813
+ "grad_norm": 0.4311594069004059,
2814
+ "learning_rate": 9.833199885616243e-05,
2815
+ "loss": 0.1566,
2816
+ "step": 18500
2817
+ },
2818
+ {
2819
+ "epoch": 1.7663667095176878,
2820
+ "grad_norm": 0.45717188715934753,
2821
+ "learning_rate": 9.832723286626633e-05,
2822
+ "loss": 0.1586,
2823
+ "step": 18550
2824
+ },
2825
+ {
2826
+ "epoch": 1.7711279341046517,
2827
+ "grad_norm": 0.48945853114128113,
2828
+ "learning_rate": 9.832246687637022e-05,
2829
+ "loss": 0.1581,
2830
+ "step": 18600
2831
+ },
2832
+ {
2833
+ "epoch": 1.7758891586916155,
2834
+ "grad_norm": 0.4877380430698395,
2835
+ "learning_rate": 9.831770088647413e-05,
2836
+ "loss": 0.1594,
2837
+ "step": 18650
2838
+ },
2839
+ {
2840
+ "epoch": 1.7806503832785792,
2841
+ "grad_norm": 0.47437113523483276,
2842
+ "learning_rate": 9.831293489657803e-05,
2843
+ "loss": 0.1566,
2844
+ "step": 18700
2845
+ },
2846
+ {
2847
+ "epoch": 1.785411607865543,
2848
+ "grad_norm": 0.6995478272438049,
2849
+ "learning_rate": 9.830816890668192e-05,
2850
+ "loss": 0.1564,
2851
+ "step": 18750
2852
+ },
2853
+ {
2854
+ "epoch": 1.7901728324525068,
2855
+ "grad_norm": 0.4471156895160675,
2856
+ "learning_rate": 9.830340291678583e-05,
2857
+ "loss": 0.1567,
2858
+ "step": 18800
2859
+ },
2860
+ {
2861
+ "epoch": 1.7949340570394705,
2862
+ "grad_norm": 0.459011435508728,
2863
+ "learning_rate": 9.829863692688972e-05,
2864
+ "loss": 0.1599,
2865
+ "step": 18850
2866
+ },
2867
+ {
2868
+ "epoch": 1.7996952816264344,
2869
+ "grad_norm": 0.4757770001888275,
2870
+ "learning_rate": 9.829387093699362e-05,
2871
+ "loss": 0.1574,
2872
+ "step": 18900
2873
+ },
2874
+ {
2875
+ "epoch": 1.804456506213398,
2876
+ "grad_norm": 0.4678910970687866,
2877
+ "learning_rate": 9.828910494709751e-05,
2878
+ "loss": 0.1604,
2879
+ "step": 18950
2880
+ },
2881
+ {
2882
+ "epoch": 1.8092177308003619,
2883
+ "grad_norm": 0.4852876663208008,
2884
+ "learning_rate": 9.828433895720141e-05,
2885
+ "loss": 0.1562,
2886
+ "step": 19000
2887
+ },
2888
+ {
2889
+ "epoch": 1.8092177308003619,
2890
+ "eval_loss": 0.15658971667289734,
2891
+ "eval_mae": 0.6544845700263977,
2892
+ "eval_mse": 281.27392578125,
2893
+ "eval_rmse": 16.77122314505564,
2894
+ "eval_runtime": 61.2247,
2895
+ "eval_samples_per_second": 9757.737,
2896
+ "eval_smape": 80.4527759552002,
2897
+ "eval_steps_per_second": 19.061,
2898
+ "step": 19000
2899
+ },
2900
+ {
2901
+ "epoch": 1.8139789553873258,
2902
+ "grad_norm": 0.37452152371406555,
2903
+ "learning_rate": 9.827957296730531e-05,
2904
+ "loss": 0.1561,
2905
+ "step": 19050
2906
+ },
2907
+ {
2908
+ "epoch": 1.8187401799742893,
2909
+ "grad_norm": 0.4231972396373749,
2910
+ "learning_rate": 9.827480697740921e-05,
2911
+ "loss": 0.1588,
2912
+ "step": 19100
2913
+ },
2914
+ {
2915
+ "epoch": 1.8235014045612532,
2916
+ "grad_norm": 0.423755943775177,
2917
+ "learning_rate": 9.827004098751311e-05,
2918
+ "loss": 0.1566,
2919
+ "step": 19150
2920
+ },
2921
+ {
2922
+ "epoch": 1.828262629148217,
2923
+ "grad_norm": 0.4381329119205475,
2924
+ "learning_rate": 9.826527499761701e-05,
2925
+ "loss": 0.1596,
2926
+ "step": 19200
2927
+ },
2928
+ {
2929
+ "epoch": 1.8330238537351806,
2930
+ "grad_norm": 0.4936196804046631,
2931
+ "learning_rate": 9.826050900772091e-05,
2932
+ "loss": 0.1598,
2933
+ "step": 19250
2934
+ },
2935
+ {
2936
+ "epoch": 1.8377850783221445,
2937
+ "grad_norm": 0.6386341452598572,
2938
+ "learning_rate": 9.825574301782481e-05,
2939
+ "loss": 0.1601,
2940
+ "step": 19300
2941
+ },
2942
+ {
2943
+ "epoch": 1.8425463029091083,
2944
+ "grad_norm": 0.49802061915397644,
2945
+ "learning_rate": 9.82509770279287e-05,
2946
+ "loss": 0.1587,
2947
+ "step": 19350
2948
+ },
2949
+ {
2950
+ "epoch": 1.847307527496072,
2951
+ "grad_norm": 0.4627436697483063,
2952
+ "learning_rate": 9.824621103803261e-05,
2953
+ "loss": 0.1587,
2954
+ "step": 19400
2955
+ },
2956
+ {
2957
+ "epoch": 1.852068752083036,
2958
+ "grad_norm": 0.4635562598705292,
2959
+ "learning_rate": 9.824144504813651e-05,
2960
+ "loss": 0.1532,
2961
+ "step": 19450
2962
+ },
2963
+ {
2964
+ "epoch": 1.8568299766699994,
2965
+ "grad_norm": 0.4869281053543091,
2966
+ "learning_rate": 9.82366790582404e-05,
2967
+ "loss": 0.1576,
2968
+ "step": 19500
2969
+ },
2970
+ {
2971
+ "epoch": 1.8615912012569633,
2972
+ "grad_norm": 0.4622304141521454,
2973
+ "learning_rate": 9.82319130683443e-05,
2974
+ "loss": 0.1587,
2975
+ "step": 19550
2976
+ },
2977
+ {
2978
+ "epoch": 1.866352425843927,
2979
+ "grad_norm": 0.42829111218452454,
2980
+ "learning_rate": 9.82271470784482e-05,
2981
+ "loss": 0.1584,
2982
+ "step": 19600
2983
+ },
2984
+ {
2985
+ "epoch": 1.8711136504308907,
2986
+ "grad_norm": 0.44152921438217163,
2987
+ "learning_rate": 9.82223810885521e-05,
2988
+ "loss": 0.1597,
2989
+ "step": 19650
2990
+ },
2991
+ {
2992
+ "epoch": 1.8758748750178547,
2993
+ "grad_norm": 0.4362374246120453,
2994
+ "learning_rate": 9.8217615098656e-05,
2995
+ "loss": 0.1535,
2996
+ "step": 19700
2997
+ },
2998
+ {
2999
+ "epoch": 1.8806360996048184,
3000
+ "grad_norm": 0.42130181193351746,
3001
+ "learning_rate": 9.82128491087599e-05,
3002
+ "loss": 0.1585,
3003
+ "step": 19750
3004
+ },
3005
+ {
3006
+ "epoch": 1.885397324191782,
3007
+ "grad_norm": 0.5120296478271484,
3008
+ "learning_rate": 9.82080831188638e-05,
3009
+ "loss": 0.1583,
3010
+ "step": 19800
3011
+ },
3012
+ {
3013
+ "epoch": 1.890158548778746,
3014
+ "grad_norm": 0.4205983579158783,
3015
+ "learning_rate": 9.820331712896768e-05,
3016
+ "loss": 0.1585,
3017
+ "step": 19850
3018
+ },
3019
+ {
3020
+ "epoch": 1.8949197733657097,
3021
+ "grad_norm": 0.4189218580722809,
3022
+ "learning_rate": 9.819855113907159e-05,
3023
+ "loss": 0.1561,
3024
+ "step": 19900
3025
+ },
3026
+ {
3027
+ "epoch": 1.8996809979526734,
3028
+ "grad_norm": 0.6145504117012024,
3029
+ "learning_rate": 9.819378514917549e-05,
3030
+ "loss": 0.1625,
3031
+ "step": 19950
3032
+ },
3033
+ {
3034
+ "epoch": 1.9044422225396374,
3035
+ "grad_norm": 0.5396240949630737,
3036
+ "learning_rate": 9.818901915927938e-05,
3037
+ "loss": 0.1601,
3038
+ "step": 20000
3039
+ },
3040
+ {
3041
+ "epoch": 1.9044422225396374,
3042
+ "eval_loss": 0.15698538720607758,
3043
+ "eval_mae": 0.6542770862579346,
3044
+ "eval_mse": 287.357666015625,
3045
+ "eval_rmse": 16.951627237985885,
3046
+ "eval_runtime": 58.5657,
3047
+ "eval_samples_per_second": 10200.761,
3048
+ "eval_smape": 79.55442667007446,
3049
+ "eval_steps_per_second": 19.926,
3050
+ "step": 20000
3051
+ },
3052
+ {
3053
+ "epoch": 1.9092034471266008,
3054
+ "grad_norm": 0.45280078053474426,
3055
+ "learning_rate": 9.818425316938329e-05,
3056
+ "loss": 0.1593,
3057
+ "step": 20050
3058
+ },
3059
+ {
3060
+ "epoch": 1.9139646717135648,
3061
+ "grad_norm": 0.455552339553833,
3062
+ "learning_rate": 9.817948717948718e-05,
3063
+ "loss": 0.1579,
3064
+ "step": 20100
3065
+ },
3066
+ {
3067
+ "epoch": 1.9187258963005285,
3068
+ "grad_norm": 0.44774943590164185,
3069
+ "learning_rate": 9.817472118959108e-05,
3070
+ "loss": 0.1577,
3071
+ "step": 20150
3072
+ },
3073
+ {
3074
+ "epoch": 1.9234871208874922,
3075
+ "grad_norm": 0.4146822392940521,
3076
+ "learning_rate": 9.816995519969499e-05,
3077
+ "loss": 0.1567,
3078
+ "step": 20200
3079
+ },
3080
+ {
3081
+ "epoch": 1.9282483454744561,
3082
+ "grad_norm": 0.48615461587905884,
3083
+ "learning_rate": 9.816518920979888e-05,
3084
+ "loss": 0.1596,
3085
+ "step": 20250
3086
+ },
3087
+ {
3088
+ "epoch": 1.9330095700614198,
3089
+ "grad_norm": 0.5081954598426819,
3090
+ "learning_rate": 9.816042321990278e-05,
3091
+ "loss": 0.1576,
3092
+ "step": 20300
3093
+ },
3094
+ {
3095
+ "epoch": 1.9377707946483835,
3096
+ "grad_norm": 0.5155813097953796,
3097
+ "learning_rate": 9.815565723000668e-05,
3098
+ "loss": 0.1551,
3099
+ "step": 20350
3100
+ },
3101
+ {
3102
+ "epoch": 1.9425320192353475,
3103
+ "grad_norm": 0.5232491493225098,
3104
+ "learning_rate": 9.815089124011058e-05,
3105
+ "loss": 0.1597,
3106
+ "step": 20400
3107
+ },
3108
+ {
3109
+ "epoch": 1.947293243822311,
3110
+ "grad_norm": 0.4078335762023926,
3111
+ "learning_rate": 9.814612525021447e-05,
3112
+ "loss": 0.1556,
3113
+ "step": 20450
3114
+ },
3115
+ {
3116
+ "epoch": 1.9520544684092749,
3117
+ "grad_norm": 0.4975152611732483,
3118
+ "learning_rate": 9.814135926031837e-05,
3119
+ "loss": 0.1591,
3120
+ "step": 20500
3121
+ },
3122
+ {
3123
+ "epoch": 1.9568156929962386,
3124
+ "grad_norm": 0.6481941938400269,
3125
+ "learning_rate": 9.813659327042227e-05,
3126
+ "loss": 0.1565,
3127
+ "step": 20550
3128
+ },
3129
+ {
3130
+ "epoch": 1.9615769175832023,
3131
+ "grad_norm": 0.4526354968547821,
3132
+ "learning_rate": 9.813182728052617e-05,
3133
+ "loss": 0.1546,
3134
+ "step": 20600
3135
+ },
3136
+ {
3137
+ "epoch": 1.9663381421701662,
3138
+ "grad_norm": 0.4288252294063568,
3139
+ "learning_rate": 9.812706129063007e-05,
3140
+ "loss": 0.1551,
3141
+ "step": 20650
3142
+ },
3143
+ {
3144
+ "epoch": 1.97109936675713,
3145
+ "grad_norm": 0.434865266084671,
3146
+ "learning_rate": 9.812229530073397e-05,
3147
+ "loss": 0.1557,
3148
+ "step": 20700
3149
+ },
3150
+ {
3151
+ "epoch": 1.9758605913440936,
3152
+ "grad_norm": 0.4131234586238861,
3153
+ "learning_rate": 9.811752931083786e-05,
3154
+ "loss": 0.156,
3155
+ "step": 20750
3156
+ },
3157
+ {
3158
+ "epoch": 1.9806218159310576,
3159
+ "grad_norm": 0.519029438495636,
3160
+ "learning_rate": 9.811276332094177e-05,
3161
+ "loss": 0.1596,
3162
+ "step": 20800
3163
+ },
3164
+ {
3165
+ "epoch": 1.9853830405180213,
3166
+ "grad_norm": 0.4048980474472046,
3167
+ "learning_rate": 9.810799733104566e-05,
3168
+ "loss": 0.153,
3169
+ "step": 20850
3170
+ },
3171
+ {
3172
+ "epoch": 1.990144265104985,
3173
+ "grad_norm": 0.44951915740966797,
3174
+ "learning_rate": 9.810323134114956e-05,
3175
+ "loss": 0.1562,
3176
+ "step": 20900
3177
+ },
3178
+ {
3179
+ "epoch": 1.994905489691949,
3180
+ "grad_norm": 0.40820789337158203,
3181
+ "learning_rate": 9.809846535125346e-05,
3182
+ "loss": 0.1576,
3183
+ "step": 20950
3184
+ },
3185
+ {
3186
+ "epoch": 1.9996667142789124,
3187
+ "grad_norm": 0.6201032400131226,
3188
+ "learning_rate": 9.809369936135736e-05,
3189
+ "loss": 0.1551,
3190
+ "step": 21000
3191
+ },
3192
+ {
3193
+ "epoch": 1.9996667142789124,
3194
+ "eval_loss": 0.15608514845371246,
3195
+ "eval_mae": 0.6444294452667236,
3196
+ "eval_mse": 279.2149658203125,
3197
+ "eval_rmse": 16.709726682992528,
3198
+ "eval_runtime": 54.4875,
3199
+ "eval_samples_per_second": 10964.25,
3200
+ "eval_smape": 102.60157585144043,
3201
+ "eval_steps_per_second": 21.418,
3202
+ "step": 21000
3203
+ },
3204
+ {
3205
+ "epoch": 2.004380326620007,
3206
+ "grad_norm": 0.4798502027988434,
3207
+ "learning_rate": 9.808893337146126e-05,
3208
+ "loss": 0.1558,
3209
+ "step": 21050
3210
+ },
3211
+ {
3212
+ "epoch": 2.0091415512069704,
3213
+ "grad_norm": 0.47718650102615356,
3214
+ "learning_rate": 9.808416738156516e-05,
3215
+ "loss": 0.1581,
3216
+ "step": 21100
3217
+ },
3218
+ {
3219
+ "epoch": 2.0139027757939343,
3220
+ "grad_norm": 0.4789866507053375,
3221
+ "learning_rate": 9.807940139166906e-05,
3222
+ "loss": 0.1558,
3223
+ "step": 21150
3224
+ },
3225
+ {
3226
+ "epoch": 2.0186640003808978,
3227
+ "grad_norm": 0.5219734907150269,
3228
+ "learning_rate": 9.807463540177296e-05,
3229
+ "loss": 0.1564,
3230
+ "step": 21200
3231
+ },
3232
+ {
3233
+ "epoch": 2.0234252249678617,
3234
+ "grad_norm": 0.4981229305267334,
3235
+ "learning_rate": 9.806986941187684e-05,
3236
+ "loss": 0.1583,
3237
+ "step": 21250
3238
+ },
3239
+ {
3240
+ "epoch": 2.0281864495548256,
3241
+ "grad_norm": 0.4696637690067291,
3242
+ "learning_rate": 9.806510342198075e-05,
3243
+ "loss": 0.1568,
3244
+ "step": 21300
3245
+ },
3246
+ {
3247
+ "epoch": 2.032947674141789,
3248
+ "grad_norm": 0.44064363837242126,
3249
+ "learning_rate": 9.806033743208465e-05,
3250
+ "loss": 0.1579,
3251
+ "step": 21350
3252
+ },
3253
+ {
3254
+ "epoch": 2.037708898728753,
3255
+ "grad_norm": 0.5143409967422485,
3256
+ "learning_rate": 9.805557144218854e-05,
3257
+ "loss": 0.156,
3258
+ "step": 21400
3259
+ },
3260
+ {
3261
+ "epoch": 2.042470123315717,
3262
+ "grad_norm": 0.49321916699409485,
3263
+ "learning_rate": 9.805080545229245e-05,
3264
+ "loss": 0.155,
3265
+ "step": 21450
3266
+ },
3267
+ {
3268
+ "epoch": 2.0472313479026805,
3269
+ "grad_norm": 0.4427430331707001,
3270
+ "learning_rate": 9.804603946239634e-05,
3271
+ "loss": 0.1566,
3272
+ "step": 21500
3273
+ },
3274
+ {
3275
+ "epoch": 2.0519925724896444,
3276
+ "grad_norm": 0.45803865790367126,
3277
+ "learning_rate": 9.804127347250024e-05,
3278
+ "loss": 0.1593,
3279
+ "step": 21550
3280
+ },
3281
+ {
3282
+ "epoch": 2.056753797076608,
3283
+ "grad_norm": 0.4853162467479706,
3284
+ "learning_rate": 9.803650748260415e-05,
3285
+ "loss": 0.1575,
3286
+ "step": 21600
3287
+ },
3288
+ {
3289
+ "epoch": 2.061515021663572,
3290
+ "grad_norm": 0.47955945134162903,
3291
+ "learning_rate": 9.803174149270804e-05,
3292
+ "loss": 0.1594,
3293
+ "step": 21650
3294
+ },
3295
+ {
3296
+ "epoch": 2.0662762462505357,
3297
+ "grad_norm": 0.44243359565734863,
3298
+ "learning_rate": 9.802697550281194e-05,
3299
+ "loss": 0.1597,
3300
+ "step": 21700
3301
+ },
3302
+ {
3303
+ "epoch": 2.0710374708374992,
3304
+ "grad_norm": 0.5727405548095703,
3305
+ "learning_rate": 9.802220951291584e-05,
3306
+ "loss": 0.1576,
3307
+ "step": 21750
3308
+ },
3309
+ {
3310
+ "epoch": 2.075798695424463,
3311
+ "grad_norm": 0.5481444001197815,
3312
+ "learning_rate": 9.801744352301974e-05,
3313
+ "loss": 0.1547,
3314
+ "step": 21800
3315
+ },
3316
+ {
3317
+ "epoch": 2.080559920011427,
3318
+ "grad_norm": 0.46369367837905884,
3319
+ "learning_rate": 9.801267753312364e-05,
3320
+ "loss": 0.1553,
3321
+ "step": 21850
3322
+ },
3323
+ {
3324
+ "epoch": 2.0853211445983906,
3325
+ "grad_norm": 0.48963698744773865,
3326
+ "learning_rate": 9.800791154322754e-05,
3327
+ "loss": 0.1579,
3328
+ "step": 21900
3329
+ },
3330
+ {
3331
+ "epoch": 2.0900823691853545,
3332
+ "grad_norm": 0.5048883557319641,
3333
+ "learning_rate": 9.800314555333144e-05,
3334
+ "loss": 0.1597,
3335
+ "step": 21950
3336
+ },
3337
+ {
3338
+ "epoch": 2.0948435937723184,
3339
+ "grad_norm": 0.4893467128276825,
3340
+ "learning_rate": 9.799837956343532e-05,
3341
+ "loss": 0.1532,
3342
+ "step": 22000
3343
+ },
3344
+ {
3345
+ "epoch": 2.0948435937723184,
3346
+ "eval_loss": 0.1553574949502945,
3347
+ "eval_mae": 0.6453903317451477,
3348
+ "eval_mse": 282.9573974609375,
3349
+ "eval_rmse": 16.821337564561787,
3350
+ "eval_runtime": 60.2846,
3351
+ "eval_samples_per_second": 9909.912,
3352
+ "eval_smape": 85.01211404800415,
3353
+ "eval_steps_per_second": 19.358,
3354
+ "step": 22000
3355
+ },
3356
+ {
3357
+ "epoch": 2.099604818359282,
3358
+ "grad_norm": 0.46226996183395386,
3359
+ "learning_rate": 9.799361357353923e-05,
3360
+ "loss": 0.1564,
3361
+ "step": 22050
3362
+ },
3363
+ {
3364
+ "epoch": 2.104366042946246,
3365
+ "grad_norm": 0.46276605129241943,
3366
+ "learning_rate": 9.798884758364313e-05,
3367
+ "loss": 0.1562,
3368
+ "step": 22100
3369
+ },
3370
+ {
3371
+ "epoch": 2.1091272675332093,
3372
+ "grad_norm": 0.4646179974079132,
3373
+ "learning_rate": 9.798408159374702e-05,
3374
+ "loss": 0.1576,
3375
+ "step": 22150
3376
+ },
3377
+ {
3378
+ "epoch": 2.1138884921201733,
3379
+ "grad_norm": 0.4897124171257019,
3380
+ "learning_rate": 9.797931560385093e-05,
3381
+ "loss": 0.1568,
3382
+ "step": 22200
3383
+ },
3384
+ {
3385
+ "epoch": 2.118649716707137,
3386
+ "grad_norm": 0.43058913946151733,
3387
+ "learning_rate": 9.797454961395482e-05,
3388
+ "loss": 0.1579,
3389
+ "step": 22250
3390
+ },
3391
+ {
3392
+ "epoch": 2.1234109412941007,
3393
+ "grad_norm": 0.37743493914604187,
3394
+ "learning_rate": 9.796978362405872e-05,
3395
+ "loss": 0.1578,
3396
+ "step": 22300
3397
+ },
3398
+ {
3399
+ "epoch": 2.1281721658810646,
3400
+ "grad_norm": 0.4362202286720276,
3401
+ "learning_rate": 9.796501763416263e-05,
3402
+ "loss": 0.1579,
3403
+ "step": 22350
3404
+ },
3405
+ {
3406
+ "epoch": 2.1329333904680285,
3407
+ "grad_norm": 0.3836491107940674,
3408
+ "learning_rate": 9.796025164426652e-05,
3409
+ "loss": 0.1576,
3410
+ "step": 22400
3411
+ },
3412
+ {
3413
+ "epoch": 2.137694615054992,
3414
+ "grad_norm": 0.4960291385650635,
3415
+ "learning_rate": 9.795548565437042e-05,
3416
+ "loss": 0.1563,
3417
+ "step": 22450
3418
+ },
3419
+ {
3420
+ "epoch": 2.142455839641956,
3421
+ "grad_norm": 0.43519532680511475,
3422
+ "learning_rate": 9.795071966447432e-05,
3423
+ "loss": 0.1598,
3424
+ "step": 22500
3425
+ },
3426
+ {
3427
+ "epoch": 2.1472170642289194,
3428
+ "grad_norm": 0.4592967629432678,
3429
+ "learning_rate": 9.794595367457822e-05,
3430
+ "loss": 0.158,
3431
+ "step": 22550
3432
+ },
3433
+ {
3434
+ "epoch": 2.1519782888158834,
3435
+ "grad_norm": 0.5372556447982788,
3436
+ "learning_rate": 9.794118768468212e-05,
3437
+ "loss": 0.1591,
3438
+ "step": 22600
3439
+ },
3440
+ {
3441
+ "epoch": 2.1567395134028473,
3442
+ "grad_norm": 0.40967512130737305,
3443
+ "learning_rate": 9.7936421694786e-05,
3444
+ "loss": 0.159,
3445
+ "step": 22650
3446
+ },
3447
+ {
3448
+ "epoch": 2.161500737989811,
3449
+ "grad_norm": 0.4951903223991394,
3450
+ "learning_rate": 9.793165570488992e-05,
3451
+ "loss": 0.1559,
3452
+ "step": 22700
3453
+ },
3454
+ {
3455
+ "epoch": 2.1662619625767747,
3456
+ "grad_norm": 0.467052698135376,
3457
+ "learning_rate": 9.79268897149938e-05,
3458
+ "loss": 0.1612,
3459
+ "step": 22750
3460
+ },
3461
+ {
3462
+ "epoch": 2.1710231871637387,
3463
+ "grad_norm": 0.42239800095558167,
3464
+ "learning_rate": 9.79221237250977e-05,
3465
+ "loss": 0.1597,
3466
+ "step": 22800
3467
+ },
3468
+ {
3469
+ "epoch": 2.175784411750702,
3470
+ "grad_norm": 0.4856722354888916,
3471
+ "learning_rate": 9.791735773520161e-05,
3472
+ "loss": 0.1572,
3473
+ "step": 22850
3474
+ },
3475
+ {
3476
+ "epoch": 2.180545636337666,
3477
+ "grad_norm": 0.47815021872520447,
3478
+ "learning_rate": 9.79125917453055e-05,
3479
+ "loss": 0.1607,
3480
+ "step": 22900
3481
+ },
3482
+ {
3483
+ "epoch": 2.18530686092463,
3484
+ "grad_norm": 0.5437564849853516,
3485
+ "learning_rate": 9.79078257554094e-05,
3486
+ "loss": 0.1587,
3487
+ "step": 22950
3488
+ },
3489
+ {
3490
+ "epoch": 2.1900680855115935,
3491
+ "grad_norm": 0.42942744493484497,
3492
+ "learning_rate": 9.79030597655133e-05,
3493
+ "loss": 0.1564,
3494
+ "step": 23000
3495
+ },
3496
+ {
3497
+ "epoch": 2.1900680855115935,
3498
+ "eval_loss": 0.15535122156143188,
3499
+ "eval_mae": 0.6484833359718323,
3500
+ "eval_mse": 332.3757629394531,
3501
+ "eval_rmse": 18.231175577549934,
3502
+ "eval_runtime": 56.6725,
3503
+ "eval_samples_per_second": 10541.528,
3504
+ "eval_smape": 76.03496313095093,
3505
+ "eval_steps_per_second": 20.592,
3506
+ "step": 23000
3507
+ },
3508
+ {
3509
+ "epoch": 2.1948293100985574,
3510
+ "grad_norm": 0.5147453546524048,
3511
+ "learning_rate": 9.78982937756172e-05,
3512
+ "loss": 0.1584,
3513
+ "step": 23050
3514
+ },
3515
+ {
3516
+ "epoch": 2.1995905346855213,
3517
+ "grad_norm": 0.46314072608947754,
3518
+ "learning_rate": 9.78935277857211e-05,
3519
+ "loss": 0.1585,
3520
+ "step": 23100
3521
+ },
3522
+ {
3523
+ "epoch": 2.204351759272485,
3524
+ "grad_norm": 0.5470126867294312,
3525
+ "learning_rate": 9.7888761795825e-05,
3526
+ "loss": 0.1594,
3527
+ "step": 23150
3528
+ },
3529
+ {
3530
+ "epoch": 2.2091129838594488,
3531
+ "grad_norm": 0.5074204802513123,
3532
+ "learning_rate": 9.78839958059289e-05,
3533
+ "loss": 0.1576,
3534
+ "step": 23200
3535
+ },
3536
+ {
3537
+ "epoch": 2.2138742084464123,
3538
+ "grad_norm": 0.3913464844226837,
3539
+ "learning_rate": 9.78792298160328e-05,
3540
+ "loss": 0.1564,
3541
+ "step": 23250
3542
+ },
3543
+ {
3544
+ "epoch": 2.218635433033376,
3545
+ "grad_norm": 0.5663050413131714,
3546
+ "learning_rate": 9.78744638261367e-05,
3547
+ "loss": 0.1607,
3548
+ "step": 23300
3549
+ },
3550
+ {
3551
+ "epoch": 2.22339665762034,
3552
+ "grad_norm": 0.5126326084136963,
3553
+ "learning_rate": 9.78696978362406e-05,
3554
+ "loss": 0.1575,
3555
+ "step": 23350
3556
+ },
3557
+ {
3558
+ "epoch": 2.2281578822073036,
3559
+ "grad_norm": 0.531508207321167,
3560
+ "learning_rate": 9.786493184634448e-05,
3561
+ "loss": 0.1552,
3562
+ "step": 23400
3563
+ },
3564
+ {
3565
+ "epoch": 2.2329191067942675,
3566
+ "grad_norm": 0.42278382182121277,
3567
+ "learning_rate": 9.78601658564484e-05,
3568
+ "loss": 0.1563,
3569
+ "step": 23450
3570
+ },
3571
+ {
3572
+ "epoch": 2.2376803313812315,
3573
+ "grad_norm": 0.4637637436389923,
3574
+ "learning_rate": 9.78553998665523e-05,
3575
+ "loss": 0.1589,
3576
+ "step": 23500
3577
+ },
3578
+ {
3579
+ "epoch": 2.242441555968195,
3580
+ "grad_norm": 0.4949529767036438,
3581
+ "learning_rate": 9.785063387665618e-05,
3582
+ "loss": 0.1572,
3583
+ "step": 23550
3584
+ },
3585
+ {
3586
+ "epoch": 2.247202780555159,
3587
+ "grad_norm": 0.5856890678405762,
3588
+ "learning_rate": 9.78458678867601e-05,
3589
+ "loss": 0.1571,
3590
+ "step": 23600
3591
+ },
3592
+ {
3593
+ "epoch": 2.2519640051421224,
3594
+ "grad_norm": 0.6656904220581055,
3595
+ "learning_rate": 9.784110189686398e-05,
3596
+ "loss": 0.1578,
3597
+ "step": 23650
3598
+ },
3599
+ {
3600
+ "epoch": 2.2567252297290863,
3601
+ "grad_norm": 0.49895474314689636,
3602
+ "learning_rate": 9.783633590696788e-05,
3603
+ "loss": 0.1544,
3604
+ "step": 23700
3605
+ },
3606
+ {
3607
+ "epoch": 2.26148645431605,
3608
+ "grad_norm": 0.5674960017204285,
3609
+ "learning_rate": 9.783156991707178e-05,
3610
+ "loss": 0.1586,
3611
+ "step": 23750
3612
+ },
3613
+ {
3614
+ "epoch": 2.2662476789030137,
3615
+ "grad_norm": 0.6098377108573914,
3616
+ "learning_rate": 9.782680392717568e-05,
3617
+ "loss": 0.1563,
3618
+ "step": 23800
3619
+ },
3620
+ {
3621
+ "epoch": 2.2710089034899776,
3622
+ "grad_norm": 0.5532990097999573,
3623
+ "learning_rate": 9.782203793727958e-05,
3624
+ "loss": 0.1558,
3625
+ "step": 23850
3626
+ },
3627
+ {
3628
+ "epoch": 2.2757701280769416,
3629
+ "grad_norm": 0.44679224491119385,
3630
+ "learning_rate": 9.781727194738346e-05,
3631
+ "loss": 0.1582,
3632
+ "step": 23900
3633
+ },
3634
+ {
3635
+ "epoch": 2.280531352663905,
3636
+ "grad_norm": 0.5477356314659119,
3637
+ "learning_rate": 9.781250595748738e-05,
3638
+ "loss": 0.1583,
3639
+ "step": 23950
3640
+ },
3641
+ {
3642
+ "epoch": 2.285292577250869,
3643
+ "grad_norm": 0.5364423990249634,
3644
+ "learning_rate": 9.780773996759128e-05,
3645
+ "loss": 0.1568,
3646
+ "step": 24000
3647
+ },
3648
+ {
3649
+ "epoch": 2.285292577250869,
3650
+ "eval_loss": 0.15508916974067688,
3651
+ "eval_mae": 0.6528403162956238,
3652
+ "eval_mse": 356.0440979003906,
3653
+ "eval_rmse": 18.869130819950097,
3654
+ "eval_runtime": 55.4778,
3655
+ "eval_samples_per_second": 10768.544,
3656
+ "eval_smape": 92.25972294807434,
3657
+ "eval_steps_per_second": 21.035,
3658
+ "step": 24000
3659
+ }
3660
+ ],
3661
+ "logging_steps": 50,
3662
+ "max_steps": 1050100,
3663
+ "num_input_tokens_seen": 0,
3664
+ "num_train_epochs": 100,
3665
+ "save_steps": 2000,
3666
+ "stateful_callbacks": {
3667
+ "EarlyStoppingCallback": {
3668
+ "args": {
3669
+ "early_stopping_patience": 3,
3670
+ "early_stopping_threshold": 0.0
3671
+ },
3672
+ "attributes": {
3673
+ "early_stopping_patience_counter": 0
3674
+ }
3675
+ },
3676
+ "TrainerControl": {
3677
+ "args": {
3678
+ "should_epoch_stop": false,
3679
+ "should_evaluate": false,
3680
+ "should_log": false,
3681
+ "should_save": true,
3682
+ "should_training_stop": false
3683
+ },
3684
+ "attributes": {}
3685
+ }
3686
+ },
3687
+ "total_flos": 4.560071489513472e+16,
3688
+ "train_batch_size": 256,
3689
+ "trial_name": null,
3690
+ "trial_params": null
3691
+ }
checkpoint-24000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6da365a68d9f9be649ea037e893847209c49b9a8fbe245f23f2e5157ea0e5087
3
+ size 5777
checkpoint-26000/config.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "gelu",
3
+ "architectures": [
4
+ "PatchTSTForPrediction"
5
+ ],
6
+ "attention_dropout": 0.1,
7
+ "bias": true,
8
+ "channel_attention": false,
9
+ "channel_consistent_masking": false,
10
+ "context_length": 512,
11
+ "d_model": 256,
12
+ "distribution_output": "normal",
13
+ "do_mask_input": null,
14
+ "dropout": 0.1,
15
+ "ff_dropout": 0.0,
16
+ "ffn_dim": 256,
17
+ "head_dropout": 0.1,
18
+ "init_std": 0.02,
19
+ "loss": "mse",
20
+ "mask_type": "random",
21
+ "mask_value": 0,
22
+ "model_type": "patchtst",
23
+ "norm_eps": 1e-05,
24
+ "norm_type": "batchnorm",
25
+ "num_attention_heads": 16,
26
+ "num_forecast_mask_patches": [
27
+ 2
28
+ ],
29
+ "num_hidden_layers": 3,
30
+ "num_input_channels": 1,
31
+ "num_parallel_samples": 100,
32
+ "num_targets": 1,
33
+ "output_range": null,
34
+ "patch_length": 16,
35
+ "patch_stride": 16,
36
+ "path_dropout": 0.0,
37
+ "pooling_type": null,
38
+ "positional_dropout": 0.0,
39
+ "positional_encoding_type": "sincos",
40
+ "pre_norm": true,
41
+ "prediction_length": 1,
42
+ "prenorm": true,
43
+ "random_mask_ratio": 0.5,
44
+ "scaling": "std",
45
+ "share_embedding": true,
46
+ "share_projection": true,
47
+ "torch_dtype": "float32",
48
+ "transformers_version": "4.51.3",
49
+ "unmasked_channel_indices": null,
50
+ "use_cls_token": false
51
+ }
checkpoint-26000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:646054afec66c212225b9acb683a99cd5f4876b6f5ea1809e30b34ebe9226515
3
+ size 4852676
checkpoint-26000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b5bad401256fdce778140283b68108687d01cc95ccaa27caaaa8c47dd22bb5eb
3
+ size 9643275
checkpoint-26000/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:24d5e15d79547515f03855afcdaa2bb31ed4d79c800d53b578561c50bc1f1c19
3
+ size 14645
checkpoint-26000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4bf46e5e6c632de2cf0dfa029556efc8d09905a8876fa5f430ccc0e01a3ca563
3
+ size 1465
checkpoint-26000/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-26000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6da365a68d9f9be649ea037e893847209c49b9a8fbe245f23f2e5157ea0e5087
3
+ size 5777
config.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "gelu",
3
+ "architectures": [
4
+ "PatchTSTForPrediction"
5
+ ],
6
+ "attention_dropout": 0.1,
7
+ "bias": true,
8
+ "channel_attention": false,
9
+ "channel_consistent_masking": false,
10
+ "context_length": 512,
11
+ "d_model": 256,
12
+ "distribution_output": "normal",
13
+ "do_mask_input": null,
14
+ "dropout": 0.1,
15
+ "ff_dropout": 0.0,
16
+ "ffn_dim": 256,
17
+ "head_dropout": 0.1,
18
+ "init_std": 0.02,
19
+ "loss": "mse",
20
+ "mask_type": "random",
21
+ "mask_value": 0,
22
+ "model_type": "patchtst",
23
+ "norm_eps": 1e-05,
24
+ "norm_type": "batchnorm",
25
+ "num_attention_heads": 16,
26
+ "num_forecast_mask_patches": [
27
+ 2
28
+ ],
29
+ "num_hidden_layers": 3,
30
+ "num_input_channels": 1,
31
+ "num_parallel_samples": 100,
32
+ "num_targets": 1,
33
+ "output_range": null,
34
+ "patch_length": 16,
35
+ "patch_stride": 16,
36
+ "path_dropout": 0.0,
37
+ "pooling_type": null,
38
+ "positional_dropout": 0.0,
39
+ "positional_encoding_type": "sincos",
40
+ "pre_norm": true,
41
+ "prediction_length": 1,
42
+ "prenorm": true,
43
+ "random_mask_ratio": 0.5,
44
+ "scaling": "std",
45
+ "share_embedding": true,
46
+ "share_projection": true,
47
+ "torch_dtype": "float32",
48
+ "transformers_version": "4.51.3",
49
+ "unmasked_channel_indices": null,
50
+ "use_cls_token": false
51
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81c9fdcc1a5ae6b516a9415abcac889092f056770d30e965296c1bce7adb76e0
3
+ size 4852676
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6da365a68d9f9be649ea037e893847209c49b9a8fbe245f23f2e5157ea0e5087
3
+ size 5777