| [ | |
| { | |
| "loss": 0.712146484375, | |
| "learning_rate": 9.526784024228659e-05, | |
| "epoch": 0.1419547164454539, | |
| "total_flos": 3042241413120000, | |
| "step": 500 | |
| }, | |
| { | |
| "loss": 0.421217041015625, | |
| "learning_rate": 9.053568048457316e-05, | |
| "epoch": 0.2839094328909078, | |
| "total_flos": 6084482826240000, | |
| "step": 1000 | |
| }, | |
| { | |
| "loss": 0.39531640625, | |
| "learning_rate": 8.580352072685975e-05, | |
| "epoch": 0.4258641493363617, | |
| "total_flos": 9126724239360000, | |
| "step": 1500 | |
| }, | |
| { | |
| "loss": 0.3591341552734375, | |
| "learning_rate": 8.107136096914632e-05, | |
| "epoch": 0.5678188657818156, | |
| "total_flos": 12168965652480000, | |
| "step": 2000 | |
| }, | |
| { | |
| "loss": 0.3376422119140625, | |
| "learning_rate": 7.63392012114329e-05, | |
| "epoch": 0.7097735822272695, | |
| "total_flos": 15211207065600000, | |
| "step": 2500 | |
| }, | |
| { | |
| "loss": 0.328197998046875, | |
| "learning_rate": 7.160704145371948e-05, | |
| "epoch": 0.8517282986727234, | |
| "total_flos": 18253448478720000, | |
| "step": 3000 | |
| }, | |
| { | |
| "loss": 0.3190537109375, | |
| "learning_rate": 6.687488169600607e-05, | |
| "epoch": 0.9936830151181772, | |
| "total_flos": 21295689891840000, | |
| "step": 3500 | |
| }, | |
| { | |
| "loss": 0.28114892578125, | |
| "learning_rate": 6.214272193829264e-05, | |
| "epoch": 1.1357087089218538, | |
| "total_flos": 24339072145489920, | |
| "step": 4000 | |
| }, | |
| { | |
| "loss": 0.2707734375, | |
| "learning_rate": 5.7410562180579217e-05, | |
| "epoch": 1.2776634253673078, | |
| "total_flos": 27381313558609920, | |
| "step": 4500 | |
| }, | |
| { | |
| "loss": 0.26517333984375, | |
| "learning_rate": 5.26784024228658e-05, | |
| "epoch": 1.4196181418127618, | |
| "total_flos": 30423554971729920, | |
| "step": 5000 | |
| }, | |
| { | |
| "loss": 0.262806884765625, | |
| "learning_rate": 4.794624266515238e-05, | |
| "epoch": 1.5615728582582156, | |
| "total_flos": 33465796384849920, | |
| "step": 5500 | |
| }, | |
| { | |
| "loss": 0.255599853515625, | |
| "learning_rate": 4.321408290743896e-05, | |
| "epoch": 1.7035275747036696, | |
| "total_flos": 36508037797969920, | |
| "step": 6000 | |
| }, | |
| { | |
| "loss": 0.2491875, | |
| "learning_rate": 3.848192314972553e-05, | |
| "epoch": 1.8454822911491235, | |
| "total_flos": 39550279211089920, | |
| "step": 6500 | |
| }, | |
| { | |
| "loss": 0.25085498046875, | |
| "learning_rate": 3.374976339201212e-05, | |
| "epoch": 1.9874370075945773, | |
| "total_flos": 42592520624209920, | |
| "step": 7000 | |
| }, | |
| { | |
| "loss": 0.22507177734375, | |
| "learning_rate": 2.9017603634298694e-05, | |
| "epoch": 2.129462701398254, | |
| "total_flos": 45635902877859840, | |
| "step": 7500 | |
| }, | |
| { | |
| "loss": 0.2233544921875, | |
| "learning_rate": 2.4285443876585277e-05, | |
| "epoch": 2.2714174178437077, | |
| "total_flos": 48678144290979840, | |
| "step": 8000 | |
| }, | |
| { | |
| "loss": 0.21958740234375, | |
| "learning_rate": 1.9553284118871853e-05, | |
| "epoch": 2.413372134289162, | |
| "total_flos": 51720385704099840, | |
| "step": 8500 | |
| }, | |
| { | |
| "loss": 0.22100439453125, | |
| "learning_rate": 1.4821124361158433e-05, | |
| "epoch": 2.5553268507346156, | |
| "total_flos": 54762627117219840, | |
| "step": 9000 | |
| }, | |
| { | |
| "loss": 0.21701953125, | |
| "learning_rate": 1.0088964603445012e-05, | |
| "epoch": 2.6972815671800694, | |
| "total_flos": 57804868530339840, | |
| "step": 9500 | |
| }, | |
| { | |
| "loss": 0.21920361328125, | |
| "learning_rate": 5.356804845731592e-06, | |
| "epoch": 2.8392362836255236, | |
| "total_flos": 60847109943459840, | |
| "step": 10000 | |
| }, | |
| { | |
| "loss": 0.21878466796875, | |
| "learning_rate": 6.246450880181715e-07, | |
| "epoch": 2.9811910000709774, | |
| "total_flos": 63889351356579840, | |
| "step": 10500 | |
| } | |
| ] |