Training in progress, epoch 4, checkpoint
Browse files
last-checkpoint/model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1452607848
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f6ff3f1dc805c9466ed00cfc7a6f9413068616eb1fc97e2f09bd6e7c30f57f28
|
| 3 |
size 1452607848
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 2905395322
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e6d8fd523b5b8fe7052b464be7492c7ab9423e6b95ad99f7a6d496a6a67ed84d
|
| 3 |
size 2905395322
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:41e9652818823f2c12a2aca6741fd4b14b0de4680a91bff29d278ba2821d096c
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d74525679bc57e1c37ca53a523959244d1a5fd1a2fb94ad16cf1253a9908fe80
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -2,9 +2,9 @@
|
|
| 2 |
"best_global_step": 986,
|
| 3 |
"best_metric": 0.3360292613506317,
|
| 4 |
"best_model_checkpoint": "./VulnSentry/checkpoint-986",
|
| 5 |
-
"epoch": 4.
|
| 6 |
"eval_steps": 500,
|
| 7 |
-
"global_step":
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
@@ -2814,6 +2814,704 @@
|
|
| 2814 |
"eval_samples_per_second": 18.309,
|
| 2815 |
"eval_steps_per_second": 4.579,
|
| 2816 |
"step": 1972
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2817 |
}
|
| 2818 |
],
|
| 2819 |
"logging_steps": 5,
|
|
@@ -2828,7 +3526,7 @@
|
|
| 2828 |
"should_evaluate": false,
|
| 2829 |
"should_log": false,
|
| 2830 |
"should_save": true,
|
| 2831 |
-
"should_training_stop":
|
| 2832 |
},
|
| 2833 |
"attributes": {}
|
| 2834 |
}
|
|
|
|
| 2 |
"best_global_step": 986,
|
| 3 |
"best_metric": 0.3360292613506317,
|
| 4 |
"best_model_checkpoint": "./VulnSentry/checkpoint-986",
|
| 5 |
+
"epoch": 4.99153403318659,
|
| 6 |
"eval_steps": 500,
|
| 7 |
+
"global_step": 2460,
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
|
|
| 2814 |
"eval_samples_per_second": 18.309,
|
| 2815 |
"eval_steps_per_second": 4.579,
|
| 2816 |
"step": 1972
|
| 2817 |
+
},
|
| 2818 |
+
{
|
| 2819 |
+
"epoch": 4.006095496105655,
|
| 2820 |
+
"grad_norm": 1.3096290826797485,
|
| 2821 |
+
"learning_rate": 1.978476872610939e-06,
|
| 2822 |
+
"loss": 0.1265,
|
| 2823 |
+
"step": 1975
|
| 2824 |
+
},
|
| 2825 |
+
{
|
| 2826 |
+
"epoch": 4.016254656281747,
|
| 2827 |
+
"grad_norm": 1.7543737888336182,
|
| 2828 |
+
"learning_rate": 1.939340295742066e-06,
|
| 2829 |
+
"loss": 0.127,
|
| 2830 |
+
"step": 1980
|
| 2831 |
+
},
|
| 2832 |
+
{
|
| 2833 |
+
"epoch": 4.02641381645784,
|
| 2834 |
+
"grad_norm": 1.6470061540603638,
|
| 2835 |
+
"learning_rate": 1.9005530746217238e-06,
|
| 2836 |
+
"loss": 0.1115,
|
| 2837 |
+
"step": 1985
|
| 2838 |
+
},
|
| 2839 |
+
{
|
| 2840 |
+
"epoch": 4.0365729766339316,
|
| 2841 |
+
"grad_norm": 1.67200767993927,
|
| 2842 |
+
"learning_rate": 1.86211689032059e-06,
|
| 2843 |
+
"loss": 0.1062,
|
| 2844 |
+
"step": 1990
|
| 2845 |
+
},
|
| 2846 |
+
{
|
| 2847 |
+
"epoch": 4.046732136810023,
|
| 2848 |
+
"grad_norm": 1.7861415147781372,
|
| 2849 |
+
"learning_rate": 1.8240334086951117e-06,
|
| 2850 |
+
"loss": 0.1438,
|
| 2851 |
+
"step": 1995
|
| 2852 |
+
},
|
| 2853 |
+
{
|
| 2854 |
+
"epoch": 4.056891296986116,
|
| 2855 |
+
"grad_norm": 1.3593019247055054,
|
| 2856 |
+
"learning_rate": 1.7863042803153074e-06,
|
| 2857 |
+
"loss": 0.1642,
|
| 2858 |
+
"step": 2000
|
| 2859 |
+
},
|
| 2860 |
+
{
|
| 2861 |
+
"epoch": 4.067050457162208,
|
| 2862 |
+
"grad_norm": 2.519192695617676,
|
| 2863 |
+
"learning_rate": 1.7489311403932274e-06,
|
| 2864 |
+
"loss": 0.1178,
|
| 2865 |
+
"step": 2005
|
| 2866 |
+
},
|
| 2867 |
+
{
|
| 2868 |
+
"epoch": 4.0772096173383,
|
| 2869 |
+
"grad_norm": 7.370077133178711,
|
| 2870 |
+
"learning_rate": 1.7119156087120836e-06,
|
| 2871 |
+
"loss": 0.2198,
|
| 2872 |
+
"step": 2010
|
| 2873 |
+
},
|
| 2874 |
+
{
|
| 2875 |
+
"epoch": 4.087368777514392,
|
| 2876 |
+
"grad_norm": 5.7382917404174805,
|
| 2877 |
+
"learning_rate": 1.6752592895560493e-06,
|
| 2878 |
+
"loss": 0.1863,
|
| 2879 |
+
"step": 2015
|
| 2880 |
+
},
|
| 2881 |
+
{
|
| 2882 |
+
"epoch": 4.097527937690484,
|
| 2883 |
+
"grad_norm": 2.0821533203125,
|
| 2884 |
+
"learning_rate": 1.6389637716407225e-06,
|
| 2885 |
+
"loss": 0.2267,
|
| 2886 |
+
"step": 2020
|
| 2887 |
+
},
|
| 2888 |
+
{
|
| 2889 |
+
"epoch": 4.1076870978665765,
|
| 2890 |
+
"grad_norm": 1.516582727432251,
|
| 2891 |
+
"learning_rate": 1.6030306280442764e-06,
|
| 2892 |
+
"loss": 0.1012,
|
| 2893 |
+
"step": 2025
|
| 2894 |
+
},
|
| 2895 |
+
{
|
| 2896 |
+
"epoch": 4.117846258042668,
|
| 2897 |
+
"grad_norm": 1.9154140949249268,
|
| 2898 |
+
"learning_rate": 1.5674614161392753e-06,
|
| 2899 |
+
"loss": 0.128,
|
| 2900 |
+
"step": 2030
|
| 2901 |
+
},
|
| 2902 |
+
{
|
| 2903 |
+
"epoch": 4.128005418218761,
|
| 2904 |
+
"grad_norm": 3.000824451446533,
|
| 2905 |
+
"learning_rate": 1.532257677525183e-06,
|
| 2906 |
+
"loss": 0.1556,
|
| 2907 |
+
"step": 2035
|
| 2908 |
+
},
|
| 2909 |
+
{
|
| 2910 |
+
"epoch": 4.138164578394853,
|
| 2911 |
+
"grad_norm": 2.2871387004852295,
|
| 2912 |
+
"learning_rate": 1.4974209379615335e-06,
|
| 2913 |
+
"loss": 0.2402,
|
| 2914 |
+
"step": 2040
|
| 2915 |
+
},
|
| 2916 |
+
{
|
| 2917 |
+
"epoch": 4.148323738570944,
|
| 2918 |
+
"grad_norm": 2.5938408374786377,
|
| 2919 |
+
"learning_rate": 1.4629527073018267e-06,
|
| 2920 |
+
"loss": 0.1224,
|
| 2921 |
+
"step": 2045
|
| 2922 |
+
},
|
| 2923 |
+
{
|
| 2924 |
+
"epoch": 4.158482898747037,
|
| 2925 |
+
"grad_norm": 2.416022777557373,
|
| 2926 |
+
"learning_rate": 1.4288544794280724e-06,
|
| 2927 |
+
"loss": 0.1712,
|
| 2928 |
+
"step": 2050
|
| 2929 |
+
},
|
| 2930 |
+
{
|
| 2931 |
+
"epoch": 4.168642058923129,
|
| 2932 |
+
"grad_norm": 0.8692270517349243,
|
| 2933 |
+
"learning_rate": 1.3951277321860468e-06,
|
| 2934 |
+
"loss": 0.1125,
|
| 2935 |
+
"step": 2055
|
| 2936 |
+
},
|
| 2937 |
+
{
|
| 2938 |
+
"epoch": 4.178801219099221,
|
| 2939 |
+
"grad_norm": 3.0718603134155273,
|
| 2940 |
+
"learning_rate": 1.3617739273212527e-06,
|
| 2941 |
+
"loss": 0.1212,
|
| 2942 |
+
"step": 2060
|
| 2943 |
+
},
|
| 2944 |
+
{
|
| 2945 |
+
"epoch": 4.188960379275313,
|
| 2946 |
+
"grad_norm": 4.781478404998779,
|
| 2947 |
+
"learning_rate": 1.3287945104155487e-06,
|
| 2948 |
+
"loss": 0.1914,
|
| 2949 |
+
"step": 2065
|
| 2950 |
+
},
|
| 2951 |
+
{
|
| 2952 |
+
"epoch": 4.199119539451405,
|
| 2953 |
+
"grad_norm": 1.0546274185180664,
|
| 2954 |
+
"learning_rate": 1.2961909108245119e-06,
|
| 2955 |
+
"loss": 0.0924,
|
| 2956 |
+
"step": 2070
|
| 2957 |
+
},
|
| 2958 |
+
{
|
| 2959 |
+
"epoch": 4.209278699627498,
|
| 2960 |
+
"grad_norm": 2.102654218673706,
|
| 2961 |
+
"learning_rate": 1.2639645416154744e-06,
|
| 2962 |
+
"loss": 0.0848,
|
| 2963 |
+
"step": 2075
|
| 2964 |
+
},
|
| 2965 |
+
{
|
| 2966 |
+
"epoch": 4.219437859803589,
|
| 2967 |
+
"grad_norm": 3.7154204845428467,
|
| 2968 |
+
"learning_rate": 1.2321167995062954e-06,
|
| 2969 |
+
"loss": 0.1427,
|
| 2970 |
+
"step": 2080
|
| 2971 |
+
},
|
| 2972 |
+
{
|
| 2973 |
+
"epoch": 4.229597019979682,
|
| 2974 |
+
"grad_norm": 2.9363725185394287,
|
| 2975 |
+
"learning_rate": 1.2006490648048118e-06,
|
| 2976 |
+
"loss": 0.0917,
|
| 2977 |
+
"step": 2085
|
| 2978 |
+
},
|
| 2979 |
+
{
|
| 2980 |
+
"epoch": 4.239756180155774,
|
| 2981 |
+
"grad_norm": 3.3261489868164062,
|
| 2982 |
+
"learning_rate": 1.1695627013490262e-06,
|
| 2983 |
+
"loss": 0.187,
|
| 2984 |
+
"step": 2090
|
| 2985 |
+
},
|
| 2986 |
+
{
|
| 2987 |
+
"epoch": 4.2499153403318655,
|
| 2988 |
+
"grad_norm": 5.646364212036133,
|
| 2989 |
+
"learning_rate": 1.1388590564479895e-06,
|
| 2990 |
+
"loss": 0.1566,
|
| 2991 |
+
"step": 2095
|
| 2992 |
+
},
|
| 2993 |
+
{
|
| 2994 |
+
"epoch": 4.260074500507958,
|
| 2995 |
+
"grad_norm": 4.463809490203857,
|
| 2996 |
+
"learning_rate": 1.1085394608234067e-06,
|
| 2997 |
+
"loss": 0.1607,
|
| 2998 |
+
"step": 2100
|
| 2999 |
+
},
|
| 3000 |
+
{
|
| 3001 |
+
"epoch": 4.27023366068405,
|
| 3002 |
+
"grad_norm": 4.577763080596924,
|
| 3003 |
+
"learning_rate": 1.078605228551971e-06,
|
| 3004 |
+
"loss": 0.2331,
|
| 3005 |
+
"step": 2105
|
| 3006 |
+
},
|
| 3007 |
+
{
|
| 3008 |
+
"epoch": 4.2803928208601425,
|
| 3009 |
+
"grad_norm": 1.4819647073745728,
|
| 3010 |
+
"learning_rate": 1.0490576570083999e-06,
|
| 3011 |
+
"loss": 0.1044,
|
| 3012 |
+
"step": 2110
|
| 3013 |
+
},
|
| 3014 |
+
{
|
| 3015 |
+
"epoch": 4.290551981036234,
|
| 3016 |
+
"grad_norm": 2.1327085494995117,
|
| 3017 |
+
"learning_rate": 1.019898026809214e-06,
|
| 3018 |
+
"loss": 0.127,
|
| 3019 |
+
"step": 2115
|
| 3020 |
+
},
|
| 3021 |
+
{
|
| 3022 |
+
"epoch": 4.300711141212327,
|
| 3023 |
+
"grad_norm": 1.4548298120498657,
|
| 3024 |
+
"learning_rate": 9.91127601757228e-07,
|
| 3025 |
+
"loss": 0.1082,
|
| 3026 |
+
"step": 2120
|
| 3027 |
+
},
|
| 3028 |
+
{
|
| 3029 |
+
"epoch": 4.310870301388419,
|
| 3030 |
+
"grad_norm": 8.781686782836914,
|
| 3031 |
+
"learning_rate": 9.62747628786782e-07,
|
| 3032 |
+
"loss": 0.1526,
|
| 3033 |
+
"step": 2125
|
| 3034 |
+
},
|
| 3035 |
+
{
|
| 3036 |
+
"epoch": 4.32102946156451,
|
| 3037 |
+
"grad_norm": 0.7647957801818848,
|
| 3038 |
+
"learning_rate": 9.347593379096942e-07,
|
| 3039 |
+
"loss": 0.0903,
|
| 3040 |
+
"step": 2130
|
| 3041 |
+
},
|
| 3042 |
+
{
|
| 3043 |
+
"epoch": 4.331188621740603,
|
| 3044 |
+
"grad_norm": 2.908586025238037,
|
| 3045 |
+
"learning_rate": 9.071639421619527e-07,
|
| 3046 |
+
"loss": 0.1708,
|
| 3047 |
+
"step": 2135
|
| 3048 |
+
},
|
| 3049 |
+
{
|
| 3050 |
+
"epoch": 4.341347781916695,
|
| 3051 |
+
"grad_norm": 3.934735059738159,
|
| 3052 |
+
"learning_rate": 8.799626375511416e-07,
|
| 3053 |
+
"loss": 0.2115,
|
| 3054 |
+
"step": 2140
|
| 3055 |
+
},
|
| 3056 |
+
{
|
| 3057 |
+
"epoch": 4.3515069420927865,
|
| 3058 |
+
"grad_norm": 2.2062618732452393,
|
| 3059 |
+
"learning_rate": 8.531566030046035e-07,
|
| 3060 |
+
"loss": 0.1316,
|
| 3061 |
+
"step": 2145
|
| 3062 |
+
},
|
| 3063 |
+
{
|
| 3064 |
+
"epoch": 4.361666102268879,
|
| 3065 |
+
"grad_norm": 11.017730712890625,
|
| 3066 |
+
"learning_rate": 8.267470003183498e-07,
|
| 3067 |
+
"loss": 0.1005,
|
| 3068 |
+
"step": 2150
|
| 3069 |
+
},
|
| 3070 |
+
{
|
| 3071 |
+
"epoch": 4.371825262444971,
|
| 3072 |
+
"grad_norm": 4.389218807220459,
|
| 3073 |
+
"learning_rate": 8.007349741066939e-07,
|
| 3074 |
+
"loss": 0.1979,
|
| 3075 |
+
"step": 2155
|
| 3076 |
+
},
|
| 3077 |
+
{
|
| 3078 |
+
"epoch": 4.381984422621064,
|
| 3079 |
+
"grad_norm": 3.0417301654815674,
|
| 3080 |
+
"learning_rate": 7.751216517526594e-07,
|
| 3081 |
+
"loss": 0.1686,
|
| 3082 |
+
"step": 2160
|
| 3083 |
+
},
|
| 3084 |
+
{
|
| 3085 |
+
"epoch": 4.392143582797155,
|
| 3086 |
+
"grad_norm": 8.221713066101074,
|
| 3087 |
+
"learning_rate": 7.499081433591071e-07,
|
| 3088 |
+
"loss": 0.1358,
|
| 3089 |
+
"step": 2165
|
| 3090 |
+
},
|
| 3091 |
+
{
|
| 3092 |
+
"epoch": 4.402302742973248,
|
| 3093 |
+
"grad_norm": 2.864053964614868,
|
| 3094 |
+
"learning_rate": 7.250955417006267e-07,
|
| 3095 |
+
"loss": 0.1458,
|
| 3096 |
+
"step": 2170
|
| 3097 |
+
},
|
| 3098 |
+
{
|
| 3099 |
+
"epoch": 4.41246190314934,
|
| 3100 |
+
"grad_norm": 2.298088312149048,
|
| 3101 |
+
"learning_rate": 7.006849221761736e-07,
|
| 3102 |
+
"loss": 0.1729,
|
| 3103 |
+
"step": 2175
|
| 3104 |
+
},
|
| 3105 |
+
{
|
| 3106 |
+
"epoch": 4.4226210633254315,
|
| 3107 |
+
"grad_norm": 4.31630802154541,
|
| 3108 |
+
"learning_rate": 6.766773427624585e-07,
|
| 3109 |
+
"loss": 0.2031,
|
| 3110 |
+
"step": 2180
|
| 3111 |
+
},
|
| 3112 |
+
{
|
| 3113 |
+
"epoch": 4.432780223501524,
|
| 3114 |
+
"grad_norm": 5.693031311035156,
|
| 3115 |
+
"learning_rate": 6.530738439681017e-07,
|
| 3116 |
+
"loss": 0.1347,
|
| 3117 |
+
"step": 2185
|
| 3118 |
+
},
|
| 3119 |
+
{
|
| 3120 |
+
"epoch": 4.442939383677616,
|
| 3121 |
+
"grad_norm": 11.96535873413086,
|
| 3122 |
+
"learning_rate": 6.298754487885272e-07,
|
| 3123 |
+
"loss": 0.1589,
|
| 3124 |
+
"step": 2190
|
| 3125 |
+
},
|
| 3126 |
+
{
|
| 3127 |
+
"epoch": 4.453098543853708,
|
| 3128 |
+
"grad_norm": 3.070192337036133,
|
| 3129 |
+
"learning_rate": 6.070831626616236e-07,
|
| 3130 |
+
"loss": 0.1075,
|
| 3131 |
+
"step": 2195
|
| 3132 |
+
},
|
| 3133 |
+
{
|
| 3134 |
+
"epoch": 4.4632577040298,
|
| 3135 |
+
"grad_norm": 2.9421584606170654,
|
| 3136 |
+
"learning_rate": 5.846979734241809e-07,
|
| 3137 |
+
"loss": 0.2157,
|
| 3138 |
+
"step": 2200
|
| 3139 |
+
},
|
| 3140 |
+
{
|
| 3141 |
+
"epoch": 4.473416864205892,
|
| 3142 |
+
"grad_norm": 3.4061129093170166,
|
| 3143 |
+
"learning_rate": 5.627208512690641e-07,
|
| 3144 |
+
"loss": 0.2083,
|
| 3145 |
+
"step": 2205
|
| 3146 |
+
},
|
| 3147 |
+
{
|
| 3148 |
+
"epoch": 4.483576024381985,
|
| 3149 |
+
"grad_norm": 2.1255908012390137,
|
| 3150 |
+
"learning_rate": 5.411527487031709e-07,
|
| 3151 |
+
"loss": 0.2344,
|
| 3152 |
+
"step": 2210
|
| 3153 |
+
},
|
| 3154 |
+
{
|
| 3155 |
+
"epoch": 4.493735184558076,
|
| 3156 |
+
"grad_norm": 2.712491273880005,
|
| 3157 |
+
"learning_rate": 5.199946005061462e-07,
|
| 3158 |
+
"loss": 0.1308,
|
| 3159 |
+
"step": 2215
|
| 3160 |
+
},
|
| 3161 |
+
{
|
| 3162 |
+
"epoch": 4.503894344734169,
|
| 3163 |
+
"grad_norm": 2.6156551837921143,
|
| 3164 |
+
"learning_rate": 4.992473236898676e-07,
|
| 3165 |
+
"loss": 0.0954,
|
| 3166 |
+
"step": 2220
|
| 3167 |
+
},
|
| 3168 |
+
{
|
| 3169 |
+
"epoch": 4.514053504910261,
|
| 3170 |
+
"grad_norm": 2.584205150604248,
|
| 3171 |
+
"learning_rate": 4.789118174587071e-07,
|
| 3172 |
+
"loss": 0.1919,
|
| 3173 |
+
"step": 2225
|
| 3174 |
+
},
|
| 3175 |
+
{
|
| 3176 |
+
"epoch": 4.524212665086353,
|
| 3177 |
+
"grad_norm": 2.1053428649902344,
|
| 3178 |
+
"learning_rate": 4.5898896317054686e-07,
|
| 3179 |
+
"loss": 0.188,
|
| 3180 |
+
"step": 2230
|
| 3181 |
+
},
|
| 3182 |
+
{
|
| 3183 |
+
"epoch": 4.534371825262445,
|
| 3184 |
+
"grad_norm": 3.8885061740875244,
|
| 3185 |
+
"learning_rate": 4.394796242985933e-07,
|
| 3186 |
+
"loss": 0.1669,
|
| 3187 |
+
"step": 2235
|
| 3188 |
+
},
|
| 3189 |
+
{
|
| 3190 |
+
"epoch": 4.544530985438537,
|
| 3191 |
+
"grad_norm": 2.2936289310455322,
|
| 3192 |
+
"learning_rate": 4.203846463939498e-07,
|
| 3193 |
+
"loss": 0.0923,
|
| 3194 |
+
"step": 2240
|
| 3195 |
+
},
|
| 3196 |
+
{
|
| 3197 |
+
"epoch": 4.554690145614629,
|
| 3198 |
+
"grad_norm": 2.186384916305542,
|
| 3199 |
+
"learning_rate": 4.0170485704896453e-07,
|
| 3200 |
+
"loss": 0.1293,
|
| 3201 |
+
"step": 2245
|
| 3202 |
+
},
|
| 3203 |
+
{
|
| 3204 |
+
"epoch": 4.564849305790721,
|
| 3205 |
+
"grad_norm": 1.962770700454712,
|
| 3206 |
+
"learning_rate": 3.834410658613652e-07,
|
| 3207 |
+
"loss": 0.1339,
|
| 3208 |
+
"step": 2250
|
| 3209 |
+
},
|
| 3210 |
+
{
|
| 3211 |
+
"epoch": 4.575008465966813,
|
| 3212 |
+
"grad_norm": 6.1539835929870605,
|
| 3213 |
+
"learning_rate": 3.655940643991718e-07,
|
| 3214 |
+
"loss": 0.1465,
|
| 3215 |
+
"step": 2255
|
| 3216 |
+
},
|
| 3217 |
+
{
|
| 3218 |
+
"epoch": 4.585167626142906,
|
| 3219 |
+
"grad_norm": 4.942917346954346,
|
| 3220 |
+
"learning_rate": 3.4816462616638847e-07,
|
| 3221 |
+
"loss": 0.1313,
|
| 3222 |
+
"step": 2260
|
| 3223 |
+
},
|
| 3224 |
+
{
|
| 3225 |
+
"epoch": 4.5953267863189975,
|
| 3226 |
+
"grad_norm": 4.580881595611572,
|
| 3227 |
+
"learning_rate": 3.3115350656948043e-07,
|
| 3228 |
+
"loss": 0.1214,
|
| 3229 |
+
"step": 2265
|
| 3230 |
+
},
|
| 3231 |
+
{
|
| 3232 |
+
"epoch": 4.60548594649509,
|
| 3233 |
+
"grad_norm": 2.40730357170105,
|
| 3234 |
+
"learning_rate": 3.1456144288462773e-07,
|
| 3235 |
+
"loss": 0.1215,
|
| 3236 |
+
"step": 2270
|
| 3237 |
+
},
|
| 3238 |
+
{
|
| 3239 |
+
"epoch": 4.615645106671182,
|
| 3240 |
+
"grad_norm": 1.9706051349639893,
|
| 3241 |
+
"learning_rate": 2.9838915422578e-07,
|
| 3242 |
+
"loss": 0.068,
|
| 3243 |
+
"step": 2275
|
| 3244 |
+
},
|
| 3245 |
+
{
|
| 3246 |
+
"epoch": 4.625804266847274,
|
| 3247 |
+
"grad_norm": 3.5650417804718018,
|
| 3248 |
+
"learning_rate": 2.8263734151348533e-07,
|
| 3249 |
+
"loss": 0.1427,
|
| 3250 |
+
"step": 2280
|
| 3251 |
+
},
|
| 3252 |
+
{
|
| 3253 |
+
"epoch": 4.635963427023366,
|
| 3254 |
+
"grad_norm": 3.764394998550415,
|
| 3255 |
+
"learning_rate": 2.673066874445096e-07,
|
| 3256 |
+
"loss": 0.0833,
|
| 3257 |
+
"step": 2285
|
| 3258 |
+
},
|
| 3259 |
+
{
|
| 3260 |
+
"epoch": 4.646122587199458,
|
| 3261 |
+
"grad_norm": 5.444734573364258,
|
| 3262 |
+
"learning_rate": 2.52397856462252e-07,
|
| 3263 |
+
"loss": 0.1775,
|
| 3264 |
+
"step": 2290
|
| 3265 |
+
},
|
| 3266 |
+
{
|
| 3267 |
+
"epoch": 4.656281747375551,
|
| 3268 |
+
"grad_norm": 6.201726913452148,
|
| 3269 |
+
"learning_rate": 2.3791149472794373e-07,
|
| 3270 |
+
"loss": 0.3198,
|
| 3271 |
+
"step": 2295
|
| 3272 |
+
},
|
| 3273 |
+
{
|
| 3274 |
+
"epoch": 4.666440907551642,
|
| 3275 |
+
"grad_norm": 2.2787365913391113,
|
| 3276 |
+
"learning_rate": 2.2384823009264811e-07,
|
| 3277 |
+
"loss": 0.153,
|
| 3278 |
+
"step": 2300
|
| 3279 |
+
},
|
| 3280 |
+
{
|
| 3281 |
+
"epoch": 4.676600067727734,
|
| 3282 |
+
"grad_norm": 5.477634429931641,
|
| 3283 |
+
"learning_rate": 2.1020867207004026e-07,
|
| 3284 |
+
"loss": 0.1203,
|
| 3285 |
+
"step": 2305
|
| 3286 |
+
},
|
| 3287 |
+
{
|
| 3288 |
+
"epoch": 4.686759227903827,
|
| 3289 |
+
"grad_norm": 3.7705609798431396,
|
| 3290 |
+
"learning_rate": 1.969934118100003e-07,
|
| 3291 |
+
"loss": 0.1749,
|
| 3292 |
+
"step": 2310
|
| 3293 |
+
},
|
| 3294 |
+
{
|
| 3295 |
+
"epoch": 4.696918388079919,
|
| 3296 |
+
"grad_norm": 6.395349502563477,
|
| 3297 |
+
"learning_rate": 1.8420302207298623e-07,
|
| 3298 |
+
"loss": 0.0936,
|
| 3299 |
+
"step": 2315
|
| 3300 |
+
},
|
| 3301 |
+
{
|
| 3302 |
+
"epoch": 4.707077548256011,
|
| 3303 |
+
"grad_norm": 2.2679178714752197,
|
| 3304 |
+
"learning_rate": 1.718380572052092e-07,
|
| 3305 |
+
"loss": 0.1366,
|
| 3306 |
+
"step": 2320
|
| 3307 |
+
},
|
| 3308 |
+
{
|
| 3309 |
+
"epoch": 4.717236708432103,
|
| 3310 |
+
"grad_norm": 2.707852602005005,
|
| 3311 |
+
"learning_rate": 1.5989905311461274e-07,
|
| 3312 |
+
"loss": 0.1601,
|
| 3313 |
+
"step": 2325
|
| 3314 |
+
},
|
| 3315 |
+
{
|
| 3316 |
+
"epoch": 4.727395868608195,
|
| 3317 |
+
"grad_norm": 2.9166934490203857,
|
| 3318 |
+
"learning_rate": 1.4838652724764146e-07,
|
| 3319 |
+
"loss": 0.1211,
|
| 3320 |
+
"step": 2330
|
| 3321 |
+
},
|
| 3322 |
+
{
|
| 3323 |
+
"epoch": 4.737555028784287,
|
| 3324 |
+
"grad_norm": 2.4510881900787354,
|
| 3325 |
+
"learning_rate": 1.3730097856681668e-07,
|
| 3326 |
+
"loss": 0.0938,
|
| 3327 |
+
"step": 2335
|
| 3328 |
+
},
|
| 3329 |
+
{
|
| 3330 |
+
"epoch": 4.747714188960379,
|
| 3331 |
+
"grad_norm": 11.790306091308594,
|
| 3332 |
+
"learning_rate": 1.2664288752911257e-07,
|
| 3333 |
+
"loss": 0.1354,
|
| 3334 |
+
"step": 2340
|
| 3335 |
+
},
|
| 3336 |
+
{
|
| 3337 |
+
"epoch": 4.757873349136472,
|
| 3338 |
+
"grad_norm": 1.4318434000015259,
|
| 3339 |
+
"learning_rate": 1.164127160651285e-07,
|
| 3340 |
+
"loss": 0.0694,
|
| 3341 |
+
"step": 2345
|
| 3342 |
+
},
|
| 3343 |
+
{
|
| 3344 |
+
"epoch": 4.7680325093125635,
|
| 3345 |
+
"grad_norm": 5.423493385314941,
|
| 3346 |
+
"learning_rate": 1.0661090755907045e-07,
|
| 3347 |
+
"loss": 0.2014,
|
| 3348 |
+
"step": 2350
|
| 3349 |
+
},
|
| 3350 |
+
{
|
| 3351 |
+
"epoch": 4.778191669488655,
|
| 3352 |
+
"grad_norm": 2.8703224658966064,
|
| 3353 |
+
"learning_rate": 9.723788682953539e-08,
|
| 3354 |
+
"loss": 0.1312,
|
| 3355 |
+
"step": 2355
|
| 3356 |
+
},
|
| 3357 |
+
{
|
| 3358 |
+
"epoch": 4.788350829664748,
|
| 3359 |
+
"grad_norm": 9.301383018493652,
|
| 3360 |
+
"learning_rate": 8.829406011109821e-08,
|
| 3361 |
+
"loss": 0.192,
|
| 3362 |
+
"step": 2360
|
| 3363 |
+
},
|
| 3364 |
+
{
|
| 3365 |
+
"epoch": 4.79850998984084,
|
| 3366 |
+
"grad_norm": 3.192383289337158,
|
| 3367 |
+
"learning_rate": 7.977981503670795e-08,
|
| 3368 |
+
"loss": 0.1098,
|
| 3369 |
+
"step": 2365
|
| 3370 |
+
},
|
| 3371 |
+
{
|
| 3372 |
+
"epoch": 4.808669150016932,
|
| 3373 |
+
"grad_norm": 3.146561861038208,
|
| 3374 |
+
"learning_rate": 7.169552062088247e-08,
|
| 3375 |
+
"loss": 0.0715,
|
| 3376 |
+
"step": 2370
|
| 3377 |
+
},
|
| 3378 |
+
{
|
| 3379 |
+
"epoch": 4.818828310193024,
|
| 3380 |
+
"grad_norm": 20.11695098876953,
|
| 3381 |
+
"learning_rate": 6.404152724371892e-08,
|
| 3382 |
+
"loss": 0.1107,
|
| 3383 |
+
"step": 2375
|
| 3384 |
+
},
|
| 3385 |
+
{
|
| 3386 |
+
"epoch": 4.828987470369116,
|
| 3387 |
+
"grad_norm": 6.796565055847168,
|
| 3388 |
+
"learning_rate": 5.681816663570594e-08,
|
| 3389 |
+
"loss": 0.1964,
|
| 3390 |
+
"step": 2380
|
| 3391 |
+
},
|
| 3392 |
+
{
|
| 3393 |
+
"epoch": 4.839146630545208,
|
| 3394 |
+
"grad_norm": 7.549309253692627,
|
| 3395 |
+
"learning_rate": 5.002575186334735e-08,
|
| 3396 |
+
"loss": 0.2818,
|
| 3397 |
+
"step": 2385
|
| 3398 |
+
},
|
| 3399 |
+
{
|
| 3400 |
+
"epoch": 4.8493057907213,
|
| 3401 |
+
"grad_norm": 1.8790849447250366,
|
| 3402 |
+
"learning_rate": 4.3664577315593036e-08,
|
| 3403 |
+
"loss": 0.1282,
|
| 3404 |
+
"step": 2390
|
| 3405 |
+
},
|
| 3406 |
+
{
|
| 3407 |
+
"epoch": 4.859464950897393,
|
| 3408 |
+
"grad_norm": 5.264152526855469,
|
| 3409 |
+
"learning_rate": 3.773491869108137e-08,
|
| 3410 |
+
"loss": 0.2058,
|
| 3411 |
+
"step": 2395
|
| 3412 |
+
},
|
| 3413 |
+
{
|
| 3414 |
+
"epoch": 4.869624111073485,
|
| 3415 |
+
"grad_norm": 3.90533709526062,
|
| 3416 |
+
"learning_rate": 3.2237032986185415e-08,
|
| 3417 |
+
"loss": 0.1346,
|
| 3418 |
+
"step": 2400
|
| 3419 |
+
},
|
| 3420 |
+
{
|
| 3421 |
+
"epoch": 4.879783271249576,
|
| 3422 |
+
"grad_norm": 2.8046767711639404,
|
| 3423 |
+
"learning_rate": 2.7171158483882963e-08,
|
| 3424 |
+
"loss": 0.099,
|
| 3425 |
+
"step": 2405
|
| 3426 |
+
},
|
| 3427 |
+
{
|
| 3428 |
+
"epoch": 4.889942431425669,
|
| 3429 |
+
"grad_norm": 2.635749578475952,
|
| 3430 |
+
"learning_rate": 2.2537514743419252e-08,
|
| 3431 |
+
"loss": 0.1868,
|
| 3432 |
+
"step": 2410
|
| 3433 |
+
},
|
| 3434 |
+
{
|
| 3435 |
+
"epoch": 4.900101591601761,
|
| 3436 |
+
"grad_norm": 1.4592561721801758,
|
| 3437 |
+
"learning_rate": 1.8336302590798992e-08,
|
| 3438 |
+
"loss": 0.1337,
|
| 3439 |
+
"step": 2415
|
| 3440 |
+
},
|
| 3441 |
+
{
|
| 3442 |
+
"epoch": 4.910260751777853,
|
| 3443 |
+
"grad_norm": 7.797517776489258,
|
| 3444 |
+
"learning_rate": 1.4567704110080016e-08,
|
| 3445 |
+
"loss": 0.1446,
|
| 3446 |
+
"step": 2420
|
| 3447 |
+
},
|
| 3448 |
+
{
|
| 3449 |
+
"epoch": 4.920419911953945,
|
| 3450 |
+
"grad_norm": 11.264378547668457,
|
| 3451 |
+
"learning_rate": 1.1231882635477364e-08,
|
| 3452 |
+
"loss": 0.1252,
|
| 3453 |
+
"step": 2425
|
| 3454 |
+
},
|
| 3455 |
+
{
|
| 3456 |
+
"epoch": 4.930579072130037,
|
| 3457 |
+
"grad_norm": 1.1823475360870361,
|
| 3458 |
+
"learning_rate": 8.32898274429117e-09,
|
| 3459 |
+
"loss": 0.1656,
|
| 3460 |
+
"step": 2430
|
| 3461 |
+
},
|
| 3462 |
+
{
|
| 3463 |
+
"epoch": 4.9407382323061295,
|
| 3464 |
+
"grad_norm": 5.188180923461914,
|
| 3465 |
+
"learning_rate": 5.859130250636113e-09,
|
| 3466 |
+
"loss": 0.1086,
|
| 3467 |
+
"step": 2435
|
| 3468 |
+
},
|
| 3469 |
+
{
|
| 3470 |
+
"epoch": 4.950897392482221,
|
| 3471 |
+
"grad_norm": 4.300490856170654,
|
| 3472 |
+
"learning_rate": 3.822432199989123e-09,
|
| 3473 |
+
"loss": 0.1405,
|
| 3474 |
+
"step": 2440
|
| 3475 |
+
},
|
| 3476 |
+
{
|
| 3477 |
+
"epoch": 4.961056552658314,
|
| 3478 |
+
"grad_norm": 4.541517734527588,
|
| 3479 |
+
"learning_rate": 2.2189768645519693e-09,
|
| 3480 |
+
"loss": 0.2184,
|
| 3481 |
+
"step": 2445
|
| 3482 |
+
},
|
| 3483 |
+
{
|
| 3484 |
+
"epoch": 4.971215712834406,
|
| 3485 |
+
"grad_norm": 5.930675983428955,
|
| 3486 |
+
"learning_rate": 1.0488337394221059e-09,
|
| 3487 |
+
"loss": 0.1169,
|
| 3488 |
+
"step": 2450
|
| 3489 |
+
},
|
| 3490 |
+
{
|
| 3491 |
+
"epoch": 4.981374873010497,
|
| 3492 |
+
"grad_norm": 2.7766826152801514,
|
| 3493 |
+
"learning_rate": 3.1205353958285724e-10,
|
| 3494 |
+
"loss": 0.1712,
|
| 3495 |
+
"step": 2455
|
| 3496 |
+
},
|
| 3497 |
+
{
|
| 3498 |
+
"epoch": 4.99153403318659,
|
| 3499 |
+
"grad_norm": 1.8827733993530273,
|
| 3500 |
+
"learning_rate": 8.668197707395464e-12,
|
| 3501 |
+
"loss": 0.1449,
|
| 3502 |
+
"step": 2460
|
| 3503 |
+
},
|
| 3504 |
+
{
|
| 3505 |
+
"epoch": 4.99153403318659,
|
| 3506 |
+
"eval_accuracy": 0.9002257336343115,
|
| 3507 |
+
"eval_f1": 0.46618357487922707,
|
| 3508 |
+
"eval_loss": 0.4555704891681671,
|
| 3509 |
+
"eval_precision": 0.4584323040380047,
|
| 3510 |
+
"eval_recall": 0.4742014742014742,
|
| 3511 |
+
"eval_runtime": 241.9883,
|
| 3512 |
+
"eval_samples_per_second": 18.307,
|
| 3513 |
+
"eval_steps_per_second": 4.579,
|
| 3514 |
+
"step": 2460
|
| 3515 |
}
|
| 3516 |
],
|
| 3517 |
"logging_steps": 5,
|
|
|
|
| 3526 |
"should_evaluate": false,
|
| 3527 |
"should_log": false,
|
| 3528 |
"should_save": true,
|
| 3529 |
+
"should_training_stop": true
|
| 3530 |
},
|
| 3531 |
"attributes": {}
|
| 3532 |
}
|