| [ | |
| { | |
| "loss": 0.10111225891113282, | |
| "learning_rate": 4.208860759493671e-05, | |
| "epoch": 1.5818181818181818, | |
| "total_flos": 20460006074731008, | |
| "step": 500 | |
| }, | |
| { | |
| "loss": 0.022906593322753906, | |
| "learning_rate": 3.4177215189873416e-05, | |
| "epoch": 3.1644268774703557, | |
| "total_flos": 40922995465281024, | |
| "step": 1000 | |
| }, | |
| { | |
| "loss": 0.013173851013183593, | |
| "learning_rate": 2.626582278481013e-05, | |
| "epoch": 4.746245059288538, | |
| "total_flos": 61383001540012032, | |
| "step": 1500 | |
| }, | |
| { | |
| "loss": 0.0085712890625, | |
| "learning_rate": 1.8354430379746836e-05, | |
| "epoch": 6.328853754940711, | |
| "total_flos": 81845990930562048, | |
| "step": 2000 | |
| }, | |
| { | |
| "loss": 0.005443511962890625, | |
| "learning_rate": 1.0443037974683544e-05, | |
| "epoch": 7.910671936758893, | |
| "total_flos": 102305997005293056, | |
| "step": 2500 | |
| }, | |
| { | |
| "loss": 0.0037540130615234374, | |
| "learning_rate": 2.531645569620253e-06, | |
| "epoch": 9.493280632411068, | |
| "total_flos": 122768986395843072, | |
| "step": 3000 | |
| } | |
| ] |