Training in progress, step 8500, checkpoint
Browse files
last-checkpoint/model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 328277848
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b08dad283213606d07f6ab5db889fe475967297819d0fa97888daa2251428bc5
|
| 3 |
size 328277848
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 318646859
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a825b2575d588500993f41103ac272cc25e9d2d7632d64e83467f98084e396cb
|
| 3 |
size 318646859
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14645
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b2170c077dd4bfe6d0d497b721bc49c7786a9b4086e60e7a16be839d33838b66
|
| 3 |
size 14645
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1465
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6cda9bcc9266ec91d2da20eab50cd7cea609c16666645a54519c40bab7f69f1a
|
| 3 |
size 1465
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -2,9 +2,9 @@
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
-
"epoch": 1.
|
| 6 |
"eval_steps": 500,
|
| 7 |
-
"global_step":
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
@@ -5743,6 +5743,364 @@
|
|
| 5743 |
"eval_samples_per_second": 272.602,
|
| 5744 |
"eval_steps_per_second": 5.725,
|
| 5745 |
"step": 8000
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5746 |
}
|
| 5747 |
],
|
| 5748 |
"logging_steps": 10,
|
|
@@ -5762,7 +6120,7 @@
|
|
| 5762 |
"attributes": {}
|
| 5763 |
}
|
| 5764 |
},
|
| 5765 |
-
"total_flos": 2.
|
| 5766 |
"train_batch_size": 48,
|
| 5767 |
"trial_name": null,
|
| 5768 |
"trial_params": null
|
|
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 1.4360533873965196,
|
| 6 |
"eval_steps": 500,
|
| 7 |
+
"global_step": 8500,
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
|
|
| 5743 |
"eval_samples_per_second": 272.602,
|
| 5744 |
"eval_steps_per_second": 5.725,
|
| 5745 |
"step": 8000
|
| 5746 |
+
},
|
| 5747 |
+
{
|
| 5748 |
+
"epoch": 1.353269133299544,
|
| 5749 |
+
"grad_norm": 0.46235671639442444,
|
| 5750 |
+
"learning_rate": 9.883703849181374e-05,
|
| 5751 |
+
"loss": 4.367716598510742,
|
| 5752 |
+
"step": 8010
|
| 5753 |
+
},
|
| 5754 |
+
{
|
| 5755 |
+
"epoch": 1.3549586078729514,
|
| 5756 |
+
"grad_norm": 0.48545390367507935,
|
| 5757 |
+
"learning_rate": 9.838702606099289e-05,
|
| 5758 |
+
"loss": 4.349284362792969,
|
| 5759 |
+
"step": 8020
|
| 5760 |
+
},
|
| 5761 |
+
{
|
| 5762 |
+
"epoch": 1.3566480824463591,
|
| 5763 |
+
"grad_norm": 0.5115071535110474,
|
| 5764 |
+
"learning_rate": 9.793753994381003e-05,
|
| 5765 |
+
"loss": 4.374566268920899,
|
| 5766 |
+
"step": 8030
|
| 5767 |
+
},
|
| 5768 |
+
{
|
| 5769 |
+
"epoch": 1.3583375570197669,
|
| 5770 |
+
"grad_norm": 0.49618780612945557,
|
| 5771 |
+
"learning_rate": 9.748858472381567e-05,
|
| 5772 |
+
"loss": 4.382678604125976,
|
| 5773 |
+
"step": 8040
|
| 5774 |
+
},
|
| 5775 |
+
{
|
| 5776 |
+
"epoch": 1.3600270315931744,
|
| 5777 |
+
"grad_norm": 0.4744579493999481,
|
| 5778 |
+
"learning_rate": 9.704016497914657e-05,
|
| 5779 |
+
"loss": 4.362704849243164,
|
| 5780 |
+
"step": 8050
|
| 5781 |
+
},
|
| 5782 |
+
{
|
| 5783 |
+
"epoch": 1.3617165061665821,
|
| 5784 |
+
"grad_norm": 0.4827500283718109,
|
| 5785 |
+
"learning_rate": 9.659228528247923e-05,
|
| 5786 |
+
"loss": 4.352192306518555,
|
| 5787 |
+
"step": 8060
|
| 5788 |
+
},
|
| 5789 |
+
{
|
| 5790 |
+
"epoch": 1.3634059807399899,
|
| 5791 |
+
"grad_norm": 0.4933985769748688,
|
| 5792 |
+
"learning_rate": 9.614495020098284e-05,
|
| 5793 |
+
"loss": 4.38439826965332,
|
| 5794 |
+
"step": 8070
|
| 5795 |
+
},
|
| 5796 |
+
{
|
| 5797 |
+
"epoch": 1.3650954553133976,
|
| 5798 |
+
"grad_norm": 0.48984023928642273,
|
| 5799 |
+
"learning_rate": 9.569816429627329e-05,
|
| 5800 |
+
"loss": 4.369917297363282,
|
| 5801 |
+
"step": 8080
|
| 5802 |
+
},
|
| 5803 |
+
{
|
| 5804 |
+
"epoch": 1.3667849298868053,
|
| 5805 |
+
"grad_norm": 0.4798352122306824,
|
| 5806 |
+
"learning_rate": 9.525193212436607e-05,
|
| 5807 |
+
"loss": 4.393289184570312,
|
| 5808 |
+
"step": 8090
|
| 5809 |
+
},
|
| 5810 |
+
{
|
| 5811 |
+
"epoch": 1.3684744044602128,
|
| 5812 |
+
"grad_norm": 0.49891650676727295,
|
| 5813 |
+
"learning_rate": 9.480625823563032e-05,
|
| 5814 |
+
"loss": 4.352770614624023,
|
| 5815 |
+
"step": 8100
|
| 5816 |
+
},
|
| 5817 |
+
{
|
| 5818 |
+
"epoch": 1.3701638790336206,
|
| 5819 |
+
"grad_norm": 0.4996585249900818,
|
| 5820 |
+
"learning_rate": 9.436114717474197e-05,
|
| 5821 |
+
"loss": 4.372886276245117,
|
| 5822 |
+
"step": 8110
|
| 5823 |
+
},
|
| 5824 |
+
{
|
| 5825 |
+
"epoch": 1.3718533536070283,
|
| 5826 |
+
"grad_norm": 0.5093587040901184,
|
| 5827 |
+
"learning_rate": 9.391660348063778e-05,
|
| 5828 |
+
"loss": 4.365289306640625,
|
| 5829 |
+
"step": 8120
|
| 5830 |
+
},
|
| 5831 |
+
{
|
| 5832 |
+
"epoch": 1.3735428281804358,
|
| 5833 |
+
"grad_norm": 0.48415860533714294,
|
| 5834 |
+
"learning_rate": 9.347263168646881e-05,
|
| 5835 |
+
"loss": 4.375794982910156,
|
| 5836 |
+
"step": 8130
|
| 5837 |
+
},
|
| 5838 |
+
{
|
| 5839 |
+
"epoch": 1.3752323027538436,
|
| 5840 |
+
"grad_norm": 0.4896807372570038,
|
| 5841 |
+
"learning_rate": 9.30292363195543e-05,
|
| 5842 |
+
"loss": 4.389465713500977,
|
| 5843 |
+
"step": 8140
|
| 5844 |
+
},
|
| 5845 |
+
{
|
| 5846 |
+
"epoch": 1.3769217773272513,
|
| 5847 |
+
"grad_norm": 0.5123707056045532,
|
| 5848 |
+
"learning_rate": 9.258642190133548e-05,
|
| 5849 |
+
"loss": 4.363320159912109,
|
| 5850 |
+
"step": 8150
|
| 5851 |
+
},
|
| 5852 |
+
{
|
| 5853 |
+
"epoch": 1.3786112519006588,
|
| 5854 |
+
"grad_norm": 0.4701666533946991,
|
| 5855 |
+
"learning_rate": 9.21441929473295e-05,
|
| 5856 |
+
"loss": 4.335116577148438,
|
| 5857 |
+
"step": 8160
|
| 5858 |
+
},
|
| 5859 |
+
{
|
| 5860 |
+
"epoch": 1.3803007264740665,
|
| 5861 |
+
"grad_norm": 0.499896764755249,
|
| 5862 |
+
"learning_rate": 9.170255396708336e-05,
|
| 5863 |
+
"loss": 4.3626762390136715,
|
| 5864 |
+
"step": 8170
|
| 5865 |
+
},
|
| 5866 |
+
{
|
| 5867 |
+
"epoch": 1.3819902010474743,
|
| 5868 |
+
"grad_norm": 0.49135464429855347,
|
| 5869 |
+
"learning_rate": 9.126150946412775e-05,
|
| 5870 |
+
"loss": 4.353059387207031,
|
| 5871 |
+
"step": 8180
|
| 5872 |
+
},
|
| 5873 |
+
{
|
| 5874 |
+
"epoch": 1.3836796756208818,
|
| 5875 |
+
"grad_norm": 0.4888681471347809,
|
| 5876 |
+
"learning_rate": 9.082106393593153e-05,
|
| 5877 |
+
"loss": 4.346696090698242,
|
| 5878 |
+
"step": 8190
|
| 5879 |
+
},
|
| 5880 |
+
{
|
| 5881 |
+
"epoch": 1.3853691501942895,
|
| 5882 |
+
"grad_norm": 0.4955255389213562,
|
| 5883 |
+
"learning_rate": 9.038122187385543e-05,
|
| 5884 |
+
"loss": 4.370418930053711,
|
| 5885 |
+
"step": 8200
|
| 5886 |
+
},
|
| 5887 |
+
{
|
| 5888 |
+
"epoch": 1.3870586247676973,
|
| 5889 |
+
"grad_norm": 0.501010000705719,
|
| 5890 |
+
"learning_rate": 8.994198776310652e-05,
|
| 5891 |
+
"loss": 4.367446517944336,
|
| 5892 |
+
"step": 8210
|
| 5893 |
+
},
|
| 5894 |
+
{
|
| 5895 |
+
"epoch": 1.388748099341105,
|
| 5896 |
+
"grad_norm": 0.4743136167526245,
|
| 5897 |
+
"learning_rate": 8.950336608269243e-05,
|
| 5898 |
+
"loss": 4.38268928527832,
|
| 5899 |
+
"step": 8220
|
| 5900 |
+
},
|
| 5901 |
+
{
|
| 5902 |
+
"epoch": 1.3904375739145125,
|
| 5903 |
+
"grad_norm": 0.497666597366333,
|
| 5904 |
+
"learning_rate": 8.906536130537566e-05,
|
| 5905 |
+
"loss": 4.368422317504883,
|
| 5906 |
+
"step": 8230
|
| 5907 |
+
},
|
| 5908 |
+
{
|
| 5909 |
+
"epoch": 1.3921270484879202,
|
| 5910 |
+
"grad_norm": 0.5160584449768066,
|
| 5911 |
+
"learning_rate": 8.862797789762785e-05,
|
| 5912 |
+
"loss": 4.3526569366455075,
|
| 5913 |
+
"step": 8240
|
| 5914 |
+
},
|
| 5915 |
+
{
|
| 5916 |
+
"epoch": 1.393816523061328,
|
| 5917 |
+
"grad_norm": 0.473287969827652,
|
| 5918 |
+
"learning_rate": 8.819122031958446e-05,
|
| 5919 |
+
"loss": 4.373112487792969,
|
| 5920 |
+
"step": 8250
|
| 5921 |
+
},
|
| 5922 |
+
{
|
| 5923 |
+
"epoch": 1.3955059976347357,
|
| 5924 |
+
"grad_norm": 0.5070517659187317,
|
| 5925 |
+
"learning_rate": 8.77550930249991e-05,
|
| 5926 |
+
"loss": 4.352268218994141,
|
| 5927 |
+
"step": 8260
|
| 5928 |
+
},
|
| 5929 |
+
{
|
| 5930 |
+
"epoch": 1.3971954722081432,
|
| 5931 |
+
"grad_norm": 0.4851207733154297,
|
| 5932 |
+
"learning_rate": 8.731960046119819e-05,
|
| 5933 |
+
"loss": 4.377524948120117,
|
| 5934 |
+
"step": 8270
|
| 5935 |
+
},
|
| 5936 |
+
{
|
| 5937 |
+
"epoch": 1.398884946781551,
|
| 5938 |
+
"grad_norm": 0.4885145425796509,
|
| 5939 |
+
"learning_rate": 8.688474706903554e-05,
|
| 5940 |
+
"loss": 4.359702301025391,
|
| 5941 |
+
"step": 8280
|
| 5942 |
+
},
|
| 5943 |
+
{
|
| 5944 |
+
"epoch": 1.4005744213549587,
|
| 5945 |
+
"grad_norm": 0.48066985607147217,
|
| 5946 |
+
"learning_rate": 8.645053728284734e-05,
|
| 5947 |
+
"loss": 4.34954719543457,
|
| 5948 |
+
"step": 8290
|
| 5949 |
+
},
|
| 5950 |
+
{
|
| 5951 |
+
"epoch": 1.4022638959283662,
|
| 5952 |
+
"grad_norm": 0.4763162434101105,
|
| 5953 |
+
"learning_rate": 8.601697553040645e-05,
|
| 5954 |
+
"loss": 4.3670196533203125,
|
| 5955 |
+
"step": 8300
|
| 5956 |
+
},
|
| 5957 |
+
{
|
| 5958 |
+
"epoch": 1.403953370501774,
|
| 5959 |
+
"grad_norm": 0.4713381826877594,
|
| 5960 |
+
"learning_rate": 8.55840662328778e-05,
|
| 5961 |
+
"loss": 4.337409973144531,
|
| 5962 |
+
"step": 8310
|
| 5963 |
+
},
|
| 5964 |
+
{
|
| 5965 |
+
"epoch": 1.4056428450751817,
|
| 5966 |
+
"grad_norm": 0.47513261437416077,
|
| 5967 |
+
"learning_rate": 8.515181380477273e-05,
|
| 5968 |
+
"loss": 4.3685157775878904,
|
| 5969 |
+
"step": 8320
|
| 5970 |
+
},
|
| 5971 |
+
{
|
| 5972 |
+
"epoch": 1.4073323196485892,
|
| 5973 |
+
"grad_norm": 0.4836966097354889,
|
| 5974 |
+
"learning_rate": 8.47202226539046e-05,
|
| 5975 |
+
"loss": 4.3916984558105465,
|
| 5976 |
+
"step": 8330
|
| 5977 |
+
},
|
| 5978 |
+
{
|
| 5979 |
+
"epoch": 1.409021794221997,
|
| 5980 |
+
"grad_norm": 0.48562195897102356,
|
| 5981 |
+
"learning_rate": 8.428929718134331e-05,
|
| 5982 |
+
"loss": 4.382097625732422,
|
| 5983 |
+
"step": 8340
|
| 5984 |
+
},
|
| 5985 |
+
{
|
| 5986 |
+
"epoch": 1.4107112687954046,
|
| 5987 |
+
"grad_norm": 0.4744247496128082,
|
| 5988 |
+
"learning_rate": 8.385904178137061e-05,
|
| 5989 |
+
"loss": 4.366971206665039,
|
| 5990 |
+
"step": 8350
|
| 5991 |
+
},
|
| 5992 |
+
{
|
| 5993 |
+
"epoch": 1.4124007433688122,
|
| 5994 |
+
"grad_norm": 0.5022557973861694,
|
| 5995 |
+
"learning_rate": 8.342946084143546e-05,
|
| 5996 |
+
"loss": 4.335433578491211,
|
| 5997 |
+
"step": 8360
|
| 5998 |
+
},
|
| 5999 |
+
{
|
| 6000 |
+
"epoch": 1.41409021794222,
|
| 6001 |
+
"grad_norm": 0.499116450548172,
|
| 6002 |
+
"learning_rate": 8.300055874210903e-05,
|
| 6003 |
+
"loss": 4.389838027954101,
|
| 6004 |
+
"step": 8370
|
| 6005 |
+
},
|
| 6006 |
+
{
|
| 6007 |
+
"epoch": 1.4157796925156276,
|
| 6008 |
+
"grad_norm": 0.49717390537261963,
|
| 6009 |
+
"learning_rate": 8.257233985704021e-05,
|
| 6010 |
+
"loss": 4.351043319702148,
|
| 6011 |
+
"step": 8380
|
| 6012 |
+
},
|
| 6013 |
+
{
|
| 6014 |
+
"epoch": 1.4174691670890354,
|
| 6015 |
+
"grad_norm": 0.4758422374725342,
|
| 6016 |
+
"learning_rate": 8.214480855291084e-05,
|
| 6017 |
+
"loss": 4.337965774536133,
|
| 6018 |
+
"step": 8390
|
| 6019 |
+
},
|
| 6020 |
+
{
|
| 6021 |
+
"epoch": 1.419158641662443,
|
| 6022 |
+
"grad_norm": 0.4693518877029419,
|
| 6023 |
+
"learning_rate": 8.171796918939142e-05,
|
| 6024 |
+
"loss": 4.340887832641601,
|
| 6025 |
+
"step": 8400
|
| 6026 |
+
},
|
| 6027 |
+
{
|
| 6028 |
+
"epoch": 1.4208481162358506,
|
| 6029 |
+
"grad_norm": 0.47933727502822876,
|
| 6030 |
+
"learning_rate": 8.129182611909642e-05,
|
| 6031 |
+
"loss": 4.35279426574707,
|
| 6032 |
+
"step": 8410
|
| 6033 |
+
},
|
| 6034 |
+
{
|
| 6035 |
+
"epoch": 1.4225375908092583,
|
| 6036 |
+
"grad_norm": 0.46771925687789917,
|
| 6037 |
+
"learning_rate": 8.086638368753993e-05,
|
| 6038 |
+
"loss": 4.373394012451172,
|
| 6039 |
+
"step": 8420
|
| 6040 |
+
},
|
| 6041 |
+
{
|
| 6042 |
+
"epoch": 1.424227065382666,
|
| 6043 |
+
"grad_norm": 0.48759225010871887,
|
| 6044 |
+
"learning_rate": 8.04416462330916e-05,
|
| 6045 |
+
"loss": 4.366844940185547,
|
| 6046 |
+
"step": 8430
|
| 6047 |
+
},
|
| 6048 |
+
{
|
| 6049 |
+
"epoch": 1.4259165399560736,
|
| 6050 |
+
"grad_norm": 0.46530693769454956,
|
| 6051 |
+
"learning_rate": 8.0017618086932e-05,
|
| 6052 |
+
"loss": 4.354487609863281,
|
| 6053 |
+
"step": 8440
|
| 6054 |
+
},
|
| 6055 |
+
{
|
| 6056 |
+
"epoch": 1.4276060145294813,
|
| 6057 |
+
"grad_norm": 0.4575703740119934,
|
| 6058 |
+
"learning_rate": 7.959430357300885e-05,
|
| 6059 |
+
"loss": 4.339031219482422,
|
| 6060 |
+
"step": 8450
|
| 6061 |
+
},
|
| 6062 |
+
{
|
| 6063 |
+
"epoch": 1.429295489102889,
|
| 6064 |
+
"grad_norm": 0.4815446436405182,
|
| 6065 |
+
"learning_rate": 7.917170700799256e-05,
|
| 6066 |
+
"loss": 4.333696365356445,
|
| 6067 |
+
"step": 8460
|
| 6068 |
+
},
|
| 6069 |
+
{
|
| 6070 |
+
"epoch": 1.4309849636762966,
|
| 6071 |
+
"grad_norm": 0.47673627734184265,
|
| 6072 |
+
"learning_rate": 7.874983270123254e-05,
|
| 6073 |
+
"loss": 4.352823638916016,
|
| 6074 |
+
"step": 8470
|
| 6075 |
+
},
|
| 6076 |
+
{
|
| 6077 |
+
"epoch": 1.4326744382497043,
|
| 6078 |
+
"grad_norm": 0.5032398700714111,
|
| 6079 |
+
"learning_rate": 7.832868495471306e-05,
|
| 6080 |
+
"loss": 4.35656852722168,
|
| 6081 |
+
"step": 8480
|
| 6082 |
+
},
|
| 6083 |
+
{
|
| 6084 |
+
"epoch": 1.434363912823112,
|
| 6085 |
+
"grad_norm": 0.5017210245132446,
|
| 6086 |
+
"learning_rate": 7.790826806300928e-05,
|
| 6087 |
+
"loss": 4.358552169799805,
|
| 6088 |
+
"step": 8490
|
| 6089 |
+
},
|
| 6090 |
+
{
|
| 6091 |
+
"epoch": 1.4360533873965196,
|
| 6092 |
+
"grad_norm": 0.46991026401519775,
|
| 6093 |
+
"learning_rate": 7.748858631324393e-05,
|
| 6094 |
+
"loss": 4.356417465209961,
|
| 6095 |
+
"step": 8500
|
| 6096 |
+
},
|
| 6097 |
+
{
|
| 6098 |
+
"epoch": 1.4360533873965196,
|
| 6099 |
+
"eval_loss": 4.313642501831055,
|
| 6100 |
+
"eval_runtime": 5.3487,
|
| 6101 |
+
"eval_samples_per_second": 186.962,
|
| 6102 |
+
"eval_steps_per_second": 3.926,
|
| 6103 |
+
"step": 8500
|
| 6104 |
}
|
| 6105 |
],
|
| 6106 |
"logging_steps": 10,
|
|
|
|
| 6120 |
"attributes": {}
|
| 6121 |
}
|
| 6122 |
},
|
| 6123 |
+
"total_flos": 2.8428620737491763e+17,
|
| 6124 |
"train_batch_size": 48,
|
| 6125 |
"trial_name": null,
|
| 6126 |
"trial_params": null
|