Upload folder using huggingface_hub
#7
by
CreatorPhan
- opened
- adapter_model.bin +1 -1
- optimizer.pt +1 -1
- rng_state.pth +1 -1
- scheduler.pt +1 -1
- trainer_state.json +603 -3
adapter_model.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 39409357
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d17f997023cf8f985a547661db8c0ff488c11f01eaeaca062f1c51fd98fd299d
|
| 3 |
size 39409357
|
optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 78844421
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1bf4520695940f7e95f944db6a71b14e3058601d156c2793f0f499326bbe7e61
|
| 3 |
size 78844421
|
rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14575
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9be2d54ebe54c824e6446e6978ff3aefa7199568ed49c03eaafa5cd13a62e8bf
|
| 3 |
size 14575
|
scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 627
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a56aa22161c776ca1cb5057f21bce0494ad7ac9991dfc63b7cac95a68c7f5148
|
| 3 |
size 627
|
trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
-
"epoch":
|
| 5 |
"eval_steps": 500,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -3007,13 +3007,613 @@
|
|
| 3007 |
"learning_rate": 0.0007023809523809524,
|
| 3008 |
"loss": 0.1548,
|
| 3009 |
"step": 500
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3010 |
}
|
| 3011 |
],
|
| 3012 |
"logging_steps": 1,
|
| 3013 |
"max_steps": 1680,
|
| 3014 |
"num_train_epochs": 48,
|
| 3015 |
"save_steps": 100,
|
| 3016 |
-
"total_flos":
|
| 3017 |
"trial_name": null,
|
| 3018 |
"trial_params": null
|
| 3019 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
+
"epoch": 17.142857142857142,
|
| 5 |
"eval_steps": 500,
|
| 6 |
+
"global_step": 600,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 3007 |
"learning_rate": 0.0007023809523809524,
|
| 3008 |
"loss": 0.1548,
|
| 3009 |
"step": 500
|
| 3010 |
+
},
|
| 3011 |
+
{
|
| 3012 |
+
"epoch": 14.31,
|
| 3013 |
+
"learning_rate": 0.0007017857142857143,
|
| 3014 |
+
"loss": 0.1521,
|
| 3015 |
+
"step": 501
|
| 3016 |
+
},
|
| 3017 |
+
{
|
| 3018 |
+
"epoch": 14.34,
|
| 3019 |
+
"learning_rate": 0.0007011904761904761,
|
| 3020 |
+
"loss": 0.1644,
|
| 3021 |
+
"step": 502
|
| 3022 |
+
},
|
| 3023 |
+
{
|
| 3024 |
+
"epoch": 14.37,
|
| 3025 |
+
"learning_rate": 0.0007005952380952381,
|
| 3026 |
+
"loss": 0.155,
|
| 3027 |
+
"step": 503
|
| 3028 |
+
},
|
| 3029 |
+
{
|
| 3030 |
+
"epoch": 14.4,
|
| 3031 |
+
"learning_rate": 0.0007,
|
| 3032 |
+
"loss": 0.1674,
|
| 3033 |
+
"step": 504
|
| 3034 |
+
},
|
| 3035 |
+
{
|
| 3036 |
+
"epoch": 14.43,
|
| 3037 |
+
"learning_rate": 0.0006994047619047619,
|
| 3038 |
+
"loss": 0.1619,
|
| 3039 |
+
"step": 505
|
| 3040 |
+
},
|
| 3041 |
+
{
|
| 3042 |
+
"epoch": 14.46,
|
| 3043 |
+
"learning_rate": 0.0006988095238095237,
|
| 3044 |
+
"loss": 0.1644,
|
| 3045 |
+
"step": 506
|
| 3046 |
+
},
|
| 3047 |
+
{
|
| 3048 |
+
"epoch": 14.49,
|
| 3049 |
+
"learning_rate": 0.0006982142857142857,
|
| 3050 |
+
"loss": 0.1723,
|
| 3051 |
+
"step": 507
|
| 3052 |
+
},
|
| 3053 |
+
{
|
| 3054 |
+
"epoch": 14.51,
|
| 3055 |
+
"learning_rate": 0.0006976190476190476,
|
| 3056 |
+
"loss": 0.1621,
|
| 3057 |
+
"step": 508
|
| 3058 |
+
},
|
| 3059 |
+
{
|
| 3060 |
+
"epoch": 14.54,
|
| 3061 |
+
"learning_rate": 0.0006970238095238095,
|
| 3062 |
+
"loss": 0.1647,
|
| 3063 |
+
"step": 509
|
| 3064 |
+
},
|
| 3065 |
+
{
|
| 3066 |
+
"epoch": 14.57,
|
| 3067 |
+
"learning_rate": 0.0006964285714285714,
|
| 3068 |
+
"loss": 0.1741,
|
| 3069 |
+
"step": 510
|
| 3070 |
+
},
|
| 3071 |
+
{
|
| 3072 |
+
"epoch": 14.6,
|
| 3073 |
+
"learning_rate": 0.0006958333333333334,
|
| 3074 |
+
"loss": 0.1673,
|
| 3075 |
+
"step": 511
|
| 3076 |
+
},
|
| 3077 |
+
{
|
| 3078 |
+
"epoch": 14.63,
|
| 3079 |
+
"learning_rate": 0.0006952380952380952,
|
| 3080 |
+
"loss": 0.1752,
|
| 3081 |
+
"step": 512
|
| 3082 |
+
},
|
| 3083 |
+
{
|
| 3084 |
+
"epoch": 14.66,
|
| 3085 |
+
"learning_rate": 0.0006946428571428571,
|
| 3086 |
+
"loss": 0.167,
|
| 3087 |
+
"step": 513
|
| 3088 |
+
},
|
| 3089 |
+
{
|
| 3090 |
+
"epoch": 14.69,
|
| 3091 |
+
"learning_rate": 0.0006940476190476191,
|
| 3092 |
+
"loss": 0.1718,
|
| 3093 |
+
"step": 514
|
| 3094 |
+
},
|
| 3095 |
+
{
|
| 3096 |
+
"epoch": 14.71,
|
| 3097 |
+
"learning_rate": 0.000693452380952381,
|
| 3098 |
+
"loss": 0.1787,
|
| 3099 |
+
"step": 515
|
| 3100 |
+
},
|
| 3101 |
+
{
|
| 3102 |
+
"epoch": 14.74,
|
| 3103 |
+
"learning_rate": 0.0006928571428571428,
|
| 3104 |
+
"loss": 0.1747,
|
| 3105 |
+
"step": 516
|
| 3106 |
+
},
|
| 3107 |
+
{
|
| 3108 |
+
"epoch": 14.77,
|
| 3109 |
+
"learning_rate": 0.0006922619047619047,
|
| 3110 |
+
"loss": 0.1766,
|
| 3111 |
+
"step": 517
|
| 3112 |
+
},
|
| 3113 |
+
{
|
| 3114 |
+
"epoch": 14.8,
|
| 3115 |
+
"learning_rate": 0.0006916666666666667,
|
| 3116 |
+
"loss": 0.1782,
|
| 3117 |
+
"step": 518
|
| 3118 |
+
},
|
| 3119 |
+
{
|
| 3120 |
+
"epoch": 14.83,
|
| 3121 |
+
"learning_rate": 0.0006910714285714286,
|
| 3122 |
+
"loss": 0.1799,
|
| 3123 |
+
"step": 519
|
| 3124 |
+
},
|
| 3125 |
+
{
|
| 3126 |
+
"epoch": 14.86,
|
| 3127 |
+
"learning_rate": 0.0006904761904761905,
|
| 3128 |
+
"loss": 0.169,
|
| 3129 |
+
"step": 520
|
| 3130 |
+
},
|
| 3131 |
+
{
|
| 3132 |
+
"epoch": 14.89,
|
| 3133 |
+
"learning_rate": 0.0006898809523809523,
|
| 3134 |
+
"loss": 0.1802,
|
| 3135 |
+
"step": 521
|
| 3136 |
+
},
|
| 3137 |
+
{
|
| 3138 |
+
"epoch": 14.91,
|
| 3139 |
+
"learning_rate": 0.0006892857142857143,
|
| 3140 |
+
"loss": 0.18,
|
| 3141 |
+
"step": 522
|
| 3142 |
+
},
|
| 3143 |
+
{
|
| 3144 |
+
"epoch": 14.94,
|
| 3145 |
+
"learning_rate": 0.0006886904761904762,
|
| 3146 |
+
"loss": 0.1823,
|
| 3147 |
+
"step": 523
|
| 3148 |
+
},
|
| 3149 |
+
{
|
| 3150 |
+
"epoch": 14.97,
|
| 3151 |
+
"learning_rate": 0.0006880952380952381,
|
| 3152 |
+
"loss": 0.1829,
|
| 3153 |
+
"step": 524
|
| 3154 |
+
},
|
| 3155 |
+
{
|
| 3156 |
+
"epoch": 15.0,
|
| 3157 |
+
"learning_rate": 0.0006875,
|
| 3158 |
+
"loss": 0.1885,
|
| 3159 |
+
"step": 525
|
| 3160 |
+
},
|
| 3161 |
+
{
|
| 3162 |
+
"epoch": 15.03,
|
| 3163 |
+
"learning_rate": 0.000686904761904762,
|
| 3164 |
+
"loss": 0.1171,
|
| 3165 |
+
"step": 526
|
| 3166 |
+
},
|
| 3167 |
+
{
|
| 3168 |
+
"epoch": 15.06,
|
| 3169 |
+
"learning_rate": 0.0006863095238095238,
|
| 3170 |
+
"loss": 0.1126,
|
| 3171 |
+
"step": 527
|
| 3172 |
+
},
|
| 3173 |
+
{
|
| 3174 |
+
"epoch": 15.09,
|
| 3175 |
+
"learning_rate": 0.0006857142857142857,
|
| 3176 |
+
"loss": 0.1186,
|
| 3177 |
+
"step": 528
|
| 3178 |
+
},
|
| 3179 |
+
{
|
| 3180 |
+
"epoch": 15.11,
|
| 3181 |
+
"learning_rate": 0.0006851190476190477,
|
| 3182 |
+
"loss": 0.1164,
|
| 3183 |
+
"step": 529
|
| 3184 |
+
},
|
| 3185 |
+
{
|
| 3186 |
+
"epoch": 15.14,
|
| 3187 |
+
"learning_rate": 0.0006845238095238096,
|
| 3188 |
+
"loss": 0.1163,
|
| 3189 |
+
"step": 530
|
| 3190 |
+
},
|
| 3191 |
+
{
|
| 3192 |
+
"epoch": 15.17,
|
| 3193 |
+
"learning_rate": 0.0006839285714285714,
|
| 3194 |
+
"loss": 0.1204,
|
| 3195 |
+
"step": 531
|
| 3196 |
+
},
|
| 3197 |
+
{
|
| 3198 |
+
"epoch": 15.2,
|
| 3199 |
+
"learning_rate": 0.0006833333333333333,
|
| 3200 |
+
"loss": 0.1212,
|
| 3201 |
+
"step": 532
|
| 3202 |
+
},
|
| 3203 |
+
{
|
| 3204 |
+
"epoch": 15.23,
|
| 3205 |
+
"learning_rate": 0.0006827380952380953,
|
| 3206 |
+
"loss": 0.1188,
|
| 3207 |
+
"step": 533
|
| 3208 |
+
},
|
| 3209 |
+
{
|
| 3210 |
+
"epoch": 15.26,
|
| 3211 |
+
"learning_rate": 0.0006821428571428572,
|
| 3212 |
+
"loss": 0.1154,
|
| 3213 |
+
"step": 534
|
| 3214 |
+
},
|
| 3215 |
+
{
|
| 3216 |
+
"epoch": 15.29,
|
| 3217 |
+
"learning_rate": 0.0006815476190476191,
|
| 3218 |
+
"loss": 0.1244,
|
| 3219 |
+
"step": 535
|
| 3220 |
+
},
|
| 3221 |
+
{
|
| 3222 |
+
"epoch": 15.31,
|
| 3223 |
+
"learning_rate": 0.0006809523809523809,
|
| 3224 |
+
"loss": 0.1214,
|
| 3225 |
+
"step": 536
|
| 3226 |
+
},
|
| 3227 |
+
{
|
| 3228 |
+
"epoch": 15.34,
|
| 3229 |
+
"learning_rate": 0.0006803571428571429,
|
| 3230 |
+
"loss": 0.1294,
|
| 3231 |
+
"step": 537
|
| 3232 |
+
},
|
| 3233 |
+
{
|
| 3234 |
+
"epoch": 15.37,
|
| 3235 |
+
"learning_rate": 0.0006797619047619048,
|
| 3236 |
+
"loss": 0.1232,
|
| 3237 |
+
"step": 538
|
| 3238 |
+
},
|
| 3239 |
+
{
|
| 3240 |
+
"epoch": 15.4,
|
| 3241 |
+
"learning_rate": 0.0006791666666666667,
|
| 3242 |
+
"loss": 0.1262,
|
| 3243 |
+
"step": 539
|
| 3244 |
+
},
|
| 3245 |
+
{
|
| 3246 |
+
"epoch": 15.43,
|
| 3247 |
+
"learning_rate": 0.0006785714285714287,
|
| 3248 |
+
"loss": 0.1195,
|
| 3249 |
+
"step": 540
|
| 3250 |
+
},
|
| 3251 |
+
{
|
| 3252 |
+
"epoch": 15.46,
|
| 3253 |
+
"learning_rate": 0.0006779761904761905,
|
| 3254 |
+
"loss": 0.123,
|
| 3255 |
+
"step": 541
|
| 3256 |
+
},
|
| 3257 |
+
{
|
| 3258 |
+
"epoch": 15.49,
|
| 3259 |
+
"learning_rate": 0.0006773809523809524,
|
| 3260 |
+
"loss": 0.1266,
|
| 3261 |
+
"step": 542
|
| 3262 |
+
},
|
| 3263 |
+
{
|
| 3264 |
+
"epoch": 15.51,
|
| 3265 |
+
"learning_rate": 0.0006767857142857143,
|
| 3266 |
+
"loss": 0.1345,
|
| 3267 |
+
"step": 543
|
| 3268 |
+
},
|
| 3269 |
+
{
|
| 3270 |
+
"epoch": 15.54,
|
| 3271 |
+
"learning_rate": 0.0006761904761904763,
|
| 3272 |
+
"loss": 0.1174,
|
| 3273 |
+
"step": 544
|
| 3274 |
+
},
|
| 3275 |
+
{
|
| 3276 |
+
"epoch": 15.57,
|
| 3277 |
+
"learning_rate": 0.0006755952380952382,
|
| 3278 |
+
"loss": 0.1293,
|
| 3279 |
+
"step": 545
|
| 3280 |
+
},
|
| 3281 |
+
{
|
| 3282 |
+
"epoch": 15.6,
|
| 3283 |
+
"learning_rate": 0.000675,
|
| 3284 |
+
"loss": 0.1293,
|
| 3285 |
+
"step": 546
|
| 3286 |
+
},
|
| 3287 |
+
{
|
| 3288 |
+
"epoch": 15.63,
|
| 3289 |
+
"learning_rate": 0.0006744047619047619,
|
| 3290 |
+
"loss": 0.1269,
|
| 3291 |
+
"step": 547
|
| 3292 |
+
},
|
| 3293 |
+
{
|
| 3294 |
+
"epoch": 15.66,
|
| 3295 |
+
"learning_rate": 0.0006738095238095239,
|
| 3296 |
+
"loss": 0.1321,
|
| 3297 |
+
"step": 548
|
| 3298 |
+
},
|
| 3299 |
+
{
|
| 3300 |
+
"epoch": 15.69,
|
| 3301 |
+
"learning_rate": 0.0006732142857142858,
|
| 3302 |
+
"loss": 0.1318,
|
| 3303 |
+
"step": 549
|
| 3304 |
+
},
|
| 3305 |
+
{
|
| 3306 |
+
"epoch": 15.71,
|
| 3307 |
+
"learning_rate": 0.0006726190476190477,
|
| 3308 |
+
"loss": 0.1283,
|
| 3309 |
+
"step": 550
|
| 3310 |
+
},
|
| 3311 |
+
{
|
| 3312 |
+
"epoch": 15.74,
|
| 3313 |
+
"learning_rate": 0.0006720238095238096,
|
| 3314 |
+
"loss": 0.128,
|
| 3315 |
+
"step": 551
|
| 3316 |
+
},
|
| 3317 |
+
{
|
| 3318 |
+
"epoch": 15.77,
|
| 3319 |
+
"learning_rate": 0.0006714285714285714,
|
| 3320 |
+
"loss": 0.1295,
|
| 3321 |
+
"step": 552
|
| 3322 |
+
},
|
| 3323 |
+
{
|
| 3324 |
+
"epoch": 15.8,
|
| 3325 |
+
"learning_rate": 0.0006708333333333333,
|
| 3326 |
+
"loss": 0.1323,
|
| 3327 |
+
"step": 553
|
| 3328 |
+
},
|
| 3329 |
+
{
|
| 3330 |
+
"epoch": 15.83,
|
| 3331 |
+
"learning_rate": 0.0006702380952380952,
|
| 3332 |
+
"loss": 0.1348,
|
| 3333 |
+
"step": 554
|
| 3334 |
+
},
|
| 3335 |
+
{
|
| 3336 |
+
"epoch": 15.86,
|
| 3337 |
+
"learning_rate": 0.0006696428571428571,
|
| 3338 |
+
"loss": 0.1276,
|
| 3339 |
+
"step": 555
|
| 3340 |
+
},
|
| 3341 |
+
{
|
| 3342 |
+
"epoch": 15.89,
|
| 3343 |
+
"learning_rate": 0.000669047619047619,
|
| 3344 |
+
"loss": 0.1356,
|
| 3345 |
+
"step": 556
|
| 3346 |
+
},
|
| 3347 |
+
{
|
| 3348 |
+
"epoch": 15.91,
|
| 3349 |
+
"learning_rate": 0.0006684523809523809,
|
| 3350 |
+
"loss": 0.1404,
|
| 3351 |
+
"step": 557
|
| 3352 |
+
},
|
| 3353 |
+
{
|
| 3354 |
+
"epoch": 15.94,
|
| 3355 |
+
"learning_rate": 0.0006678571428571428,
|
| 3356 |
+
"loss": 0.1311,
|
| 3357 |
+
"step": 558
|
| 3358 |
+
},
|
| 3359 |
+
{
|
| 3360 |
+
"epoch": 15.97,
|
| 3361 |
+
"learning_rate": 0.0006672619047619048,
|
| 3362 |
+
"loss": 0.1401,
|
| 3363 |
+
"step": 559
|
| 3364 |
+
},
|
| 3365 |
+
{
|
| 3366 |
+
"epoch": 16.0,
|
| 3367 |
+
"learning_rate": 0.0006666666666666666,
|
| 3368 |
+
"loss": 0.1411,
|
| 3369 |
+
"step": 560
|
| 3370 |
+
},
|
| 3371 |
+
{
|
| 3372 |
+
"epoch": 16.03,
|
| 3373 |
+
"learning_rate": 0.0006660714285714285,
|
| 3374 |
+
"loss": 0.0862,
|
| 3375 |
+
"step": 561
|
| 3376 |
+
},
|
| 3377 |
+
{
|
| 3378 |
+
"epoch": 16.06,
|
| 3379 |
+
"learning_rate": 0.0006654761904761905,
|
| 3380 |
+
"loss": 0.0902,
|
| 3381 |
+
"step": 562
|
| 3382 |
+
},
|
| 3383 |
+
{
|
| 3384 |
+
"epoch": 16.09,
|
| 3385 |
+
"learning_rate": 0.0006648809523809524,
|
| 3386 |
+
"loss": 0.0871,
|
| 3387 |
+
"step": 563
|
| 3388 |
+
},
|
| 3389 |
+
{
|
| 3390 |
+
"epoch": 16.11,
|
| 3391 |
+
"learning_rate": 0.0006642857142857143,
|
| 3392 |
+
"loss": 0.0906,
|
| 3393 |
+
"step": 564
|
| 3394 |
+
},
|
| 3395 |
+
{
|
| 3396 |
+
"epoch": 16.14,
|
| 3397 |
+
"learning_rate": 0.0006636904761904761,
|
| 3398 |
+
"loss": 0.0891,
|
| 3399 |
+
"step": 565
|
| 3400 |
+
},
|
| 3401 |
+
{
|
| 3402 |
+
"epoch": 16.17,
|
| 3403 |
+
"learning_rate": 0.0006630952380952381,
|
| 3404 |
+
"loss": 0.0917,
|
| 3405 |
+
"step": 566
|
| 3406 |
+
},
|
| 3407 |
+
{
|
| 3408 |
+
"epoch": 16.2,
|
| 3409 |
+
"learning_rate": 0.0006625,
|
| 3410 |
+
"loss": 0.0906,
|
| 3411 |
+
"step": 567
|
| 3412 |
+
},
|
| 3413 |
+
{
|
| 3414 |
+
"epoch": 16.23,
|
| 3415 |
+
"learning_rate": 0.0006619047619047619,
|
| 3416 |
+
"loss": 0.0927,
|
| 3417 |
+
"step": 568
|
| 3418 |
+
},
|
| 3419 |
+
{
|
| 3420 |
+
"epoch": 16.26,
|
| 3421 |
+
"learning_rate": 0.0006613095238095238,
|
| 3422 |
+
"loss": 0.0927,
|
| 3423 |
+
"step": 569
|
| 3424 |
+
},
|
| 3425 |
+
{
|
| 3426 |
+
"epoch": 16.29,
|
| 3427 |
+
"learning_rate": 0.0006607142857142857,
|
| 3428 |
+
"loss": 0.0934,
|
| 3429 |
+
"step": 570
|
| 3430 |
+
},
|
| 3431 |
+
{
|
| 3432 |
+
"epoch": 16.31,
|
| 3433 |
+
"learning_rate": 0.0006601190476190476,
|
| 3434 |
+
"loss": 0.0956,
|
| 3435 |
+
"step": 571
|
| 3436 |
+
},
|
| 3437 |
+
{
|
| 3438 |
+
"epoch": 16.34,
|
| 3439 |
+
"learning_rate": 0.0006595238095238095,
|
| 3440 |
+
"loss": 0.0933,
|
| 3441 |
+
"step": 572
|
| 3442 |
+
},
|
| 3443 |
+
{
|
| 3444 |
+
"epoch": 16.37,
|
| 3445 |
+
"learning_rate": 0.0006589285714285715,
|
| 3446 |
+
"loss": 0.0993,
|
| 3447 |
+
"step": 573
|
| 3448 |
+
},
|
| 3449 |
+
{
|
| 3450 |
+
"epoch": 16.4,
|
| 3451 |
+
"learning_rate": 0.0006583333333333334,
|
| 3452 |
+
"loss": 0.095,
|
| 3453 |
+
"step": 574
|
| 3454 |
+
},
|
| 3455 |
+
{
|
| 3456 |
+
"epoch": 16.43,
|
| 3457 |
+
"learning_rate": 0.0006577380952380952,
|
| 3458 |
+
"loss": 0.0963,
|
| 3459 |
+
"step": 575
|
| 3460 |
+
},
|
| 3461 |
+
{
|
| 3462 |
+
"epoch": 16.46,
|
| 3463 |
+
"learning_rate": 0.0006571428571428571,
|
| 3464 |
+
"loss": 0.0948,
|
| 3465 |
+
"step": 576
|
| 3466 |
+
},
|
| 3467 |
+
{
|
| 3468 |
+
"epoch": 16.49,
|
| 3469 |
+
"learning_rate": 0.0006565476190476191,
|
| 3470 |
+
"loss": 0.0952,
|
| 3471 |
+
"step": 577
|
| 3472 |
+
},
|
| 3473 |
+
{
|
| 3474 |
+
"epoch": 16.51,
|
| 3475 |
+
"learning_rate": 0.000655952380952381,
|
| 3476 |
+
"loss": 0.1001,
|
| 3477 |
+
"step": 578
|
| 3478 |
+
},
|
| 3479 |
+
{
|
| 3480 |
+
"epoch": 16.54,
|
| 3481 |
+
"learning_rate": 0.0006553571428571429,
|
| 3482 |
+
"loss": 0.0924,
|
| 3483 |
+
"step": 579
|
| 3484 |
+
},
|
| 3485 |
+
{
|
| 3486 |
+
"epoch": 16.57,
|
| 3487 |
+
"learning_rate": 0.0006547619047619047,
|
| 3488 |
+
"loss": 0.0962,
|
| 3489 |
+
"step": 580
|
| 3490 |
+
},
|
| 3491 |
+
{
|
| 3492 |
+
"epoch": 16.6,
|
| 3493 |
+
"learning_rate": 0.0006541666666666667,
|
| 3494 |
+
"loss": 0.0949,
|
| 3495 |
+
"step": 581
|
| 3496 |
+
},
|
| 3497 |
+
{
|
| 3498 |
+
"epoch": 16.63,
|
| 3499 |
+
"learning_rate": 0.0006535714285714286,
|
| 3500 |
+
"loss": 0.1,
|
| 3501 |
+
"step": 582
|
| 3502 |
+
},
|
| 3503 |
+
{
|
| 3504 |
+
"epoch": 16.66,
|
| 3505 |
+
"learning_rate": 0.0006529761904761905,
|
| 3506 |
+
"loss": 0.1009,
|
| 3507 |
+
"step": 583
|
| 3508 |
+
},
|
| 3509 |
+
{
|
| 3510 |
+
"epoch": 16.69,
|
| 3511 |
+
"learning_rate": 0.0006523809523809525,
|
| 3512 |
+
"loss": 0.1023,
|
| 3513 |
+
"step": 584
|
| 3514 |
+
},
|
| 3515 |
+
{
|
| 3516 |
+
"epoch": 16.71,
|
| 3517 |
+
"learning_rate": 0.0006517857142857143,
|
| 3518 |
+
"loss": 0.0995,
|
| 3519 |
+
"step": 585
|
| 3520 |
+
},
|
| 3521 |
+
{
|
| 3522 |
+
"epoch": 16.74,
|
| 3523 |
+
"learning_rate": 0.0006511904761904762,
|
| 3524 |
+
"loss": 0.1015,
|
| 3525 |
+
"step": 586
|
| 3526 |
+
},
|
| 3527 |
+
{
|
| 3528 |
+
"epoch": 16.77,
|
| 3529 |
+
"learning_rate": 0.0006505952380952381,
|
| 3530 |
+
"loss": 0.0966,
|
| 3531 |
+
"step": 587
|
| 3532 |
+
},
|
| 3533 |
+
{
|
| 3534 |
+
"epoch": 16.8,
|
| 3535 |
+
"learning_rate": 0.0006500000000000001,
|
| 3536 |
+
"loss": 0.1019,
|
| 3537 |
+
"step": 588
|
| 3538 |
+
},
|
| 3539 |
+
{
|
| 3540 |
+
"epoch": 16.83,
|
| 3541 |
+
"learning_rate": 0.000649404761904762,
|
| 3542 |
+
"loss": 0.0996,
|
| 3543 |
+
"step": 589
|
| 3544 |
+
},
|
| 3545 |
+
{
|
| 3546 |
+
"epoch": 16.86,
|
| 3547 |
+
"learning_rate": 0.0006488095238095238,
|
| 3548 |
+
"loss": 0.103,
|
| 3549 |
+
"step": 590
|
| 3550 |
+
},
|
| 3551 |
+
{
|
| 3552 |
+
"epoch": 16.89,
|
| 3553 |
+
"learning_rate": 0.0006482142857142857,
|
| 3554 |
+
"loss": 0.1042,
|
| 3555 |
+
"step": 591
|
| 3556 |
+
},
|
| 3557 |
+
{
|
| 3558 |
+
"epoch": 16.91,
|
| 3559 |
+
"learning_rate": 0.0006476190476190477,
|
| 3560 |
+
"loss": 0.1039,
|
| 3561 |
+
"step": 592
|
| 3562 |
+
},
|
| 3563 |
+
{
|
| 3564 |
+
"epoch": 16.94,
|
| 3565 |
+
"learning_rate": 0.0006470238095238096,
|
| 3566 |
+
"loss": 0.1058,
|
| 3567 |
+
"step": 593
|
| 3568 |
+
},
|
| 3569 |
+
{
|
| 3570 |
+
"epoch": 16.97,
|
| 3571 |
+
"learning_rate": 0.0006464285714285715,
|
| 3572 |
+
"loss": 0.0994,
|
| 3573 |
+
"step": 594
|
| 3574 |
+
},
|
| 3575 |
+
{
|
| 3576 |
+
"epoch": 17.0,
|
| 3577 |
+
"learning_rate": 0.0006458333333333334,
|
| 3578 |
+
"loss": 0.1062,
|
| 3579 |
+
"step": 595
|
| 3580 |
+
},
|
| 3581 |
+
{
|
| 3582 |
+
"epoch": 17.03,
|
| 3583 |
+
"learning_rate": 0.0006452380952380953,
|
| 3584 |
+
"loss": 0.0709,
|
| 3585 |
+
"step": 596
|
| 3586 |
+
},
|
| 3587 |
+
{
|
| 3588 |
+
"epoch": 17.06,
|
| 3589 |
+
"learning_rate": 0.0006446428571428572,
|
| 3590 |
+
"loss": 0.0733,
|
| 3591 |
+
"step": 597
|
| 3592 |
+
},
|
| 3593 |
+
{
|
| 3594 |
+
"epoch": 17.09,
|
| 3595 |
+
"learning_rate": 0.0006440476190476191,
|
| 3596 |
+
"loss": 0.0724,
|
| 3597 |
+
"step": 598
|
| 3598 |
+
},
|
| 3599 |
+
{
|
| 3600 |
+
"epoch": 17.11,
|
| 3601 |
+
"learning_rate": 0.0006434523809523811,
|
| 3602 |
+
"loss": 0.0733,
|
| 3603 |
+
"step": 599
|
| 3604 |
+
},
|
| 3605 |
+
{
|
| 3606 |
+
"epoch": 17.14,
|
| 3607 |
+
"learning_rate": 0.0006428571428571429,
|
| 3608 |
+
"loss": 0.0741,
|
| 3609 |
+
"step": 600
|
| 3610 |
}
|
| 3611 |
],
|
| 3612 |
"logging_steps": 1,
|
| 3613 |
"max_steps": 1680,
|
| 3614 |
"num_train_epochs": 48,
|
| 3615 |
"save_steps": 100,
|
| 3616 |
+
"total_flos": 3.463615849187021e+17,
|
| 3617 |
"trial_name": null,
|
| 3618 |
"trial_params": null
|
| 3619 |
}
|