| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.6944444444444444, | |
| "eval_steps": 500, | |
| "global_step": 200, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "avg_label": 0.45932112634181976, | |
| "epoch": 0.003472222222222222, | |
| "grad_norm": 0.515625, | |
| "learning_rate": 0.0, | |
| "loss": 0.02412083954550326, | |
| "num_pairs": 28.0, | |
| "step": 1 | |
| }, | |
| { | |
| "avg_label": 0.4166666865348816, | |
| "epoch": 0.006944444444444444, | |
| "grad_norm": 11.625, | |
| "learning_rate": 6.896551724137931e-07, | |
| "loss": 0.40197963267564774, | |
| "num_pairs": 21.5, | |
| "step": 2 | |
| }, | |
| { | |
| "avg_label": 0.4724903553724289, | |
| "epoch": 0.010416666666666666, | |
| "grad_norm": 13.25, | |
| "learning_rate": 1.3793103448275862e-06, | |
| "loss": 0.5004251897335052, | |
| "num_pairs": 28.0, | |
| "step": 3 | |
| }, | |
| { | |
| "avg_label": 0.44384056329727173, | |
| "epoch": 0.013888888888888888, | |
| "grad_norm": 5.875, | |
| "learning_rate": 2.0689655172413796e-06, | |
| "loss": 0.23160110414028168, | |
| "num_pairs": 27.5, | |
| "step": 4 | |
| }, | |
| { | |
| "avg_label": 0.4630681872367859, | |
| "epoch": 0.017361111111111112, | |
| "grad_norm": 8.9375, | |
| "learning_rate": 2.7586206896551725e-06, | |
| "loss": 0.4468539021909237, | |
| "num_pairs": 24.0, | |
| "step": 5 | |
| }, | |
| { | |
| "avg_label": 0.45495015382766724, | |
| "epoch": 0.020833333333333332, | |
| "grad_norm": 1.34375, | |
| "learning_rate": 3.448275862068966e-06, | |
| "loss": 0.017617323901504278, | |
| "num_pairs": 28.0, | |
| "step": 6 | |
| }, | |
| { | |
| "avg_label": 0.4732142984867096, | |
| "epoch": 0.024305555555555556, | |
| "grad_norm": 11.5625, | |
| "learning_rate": 4.137931034482759e-06, | |
| "loss": 0.4206290766596794, | |
| "num_pairs": 24.0, | |
| "step": 7 | |
| }, | |
| { | |
| "avg_label": 0.4736842066049576, | |
| "epoch": 0.027777777777777776, | |
| "grad_norm": 0.1484375, | |
| "learning_rate": 4.8275862068965525e-06, | |
| "loss": 0.004913174081593752, | |
| "num_pairs": 27.0, | |
| "step": 8 | |
| }, | |
| { | |
| "avg_label": 0.45513392984867096, | |
| "epoch": 0.03125, | |
| "grad_norm": 7.84375, | |
| "learning_rate": 5.517241379310345e-06, | |
| "loss": 0.40890943817794323, | |
| "num_pairs": 28.0, | |
| "step": 9 | |
| }, | |
| { | |
| "avg_label": 0.4797794222831726, | |
| "epoch": 0.034722222222222224, | |
| "grad_norm": 4.21875, | |
| "learning_rate": 6.206896551724138e-06, | |
| "loss": 0.07930763380136341, | |
| "num_pairs": 28.0, | |
| "step": 10 | |
| }, | |
| { | |
| "avg_label": 0.4505208283662796, | |
| "epoch": 0.03819444444444445, | |
| "grad_norm": 18.0, | |
| "learning_rate": 6.896551724137932e-06, | |
| "loss": 0.8742586374282837, | |
| "num_pairs": 28.0, | |
| "step": 11 | |
| }, | |
| { | |
| "avg_label": 0.42196430265903473, | |
| "epoch": 0.041666666666666664, | |
| "grad_norm": 17.625, | |
| "learning_rate": 7.586206896551724e-06, | |
| "loss": 0.8603673875331879, | |
| "num_pairs": 23.5, | |
| "step": 12 | |
| }, | |
| { | |
| "avg_label": 0.44431088864803314, | |
| "epoch": 0.04513888888888889, | |
| "grad_norm": 1.3203125, | |
| "learning_rate": 8.275862068965518e-06, | |
| "loss": 0.028431319631636143, | |
| "num_pairs": 28.0, | |
| "step": 13 | |
| }, | |
| { | |
| "avg_label": 0.3893229216337204, | |
| "epoch": 0.04861111111111111, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.965517241379312e-06, | |
| "loss": 0.251303069293499, | |
| "num_pairs": 23.5, | |
| "step": 14 | |
| }, | |
| { | |
| "avg_label": 0.43348929286003113, | |
| "epoch": 0.052083333333333336, | |
| "grad_norm": 3.796875, | |
| "learning_rate": 9.655172413793105e-06, | |
| "loss": 0.11253293231129646, | |
| "num_pairs": 27.0, | |
| "step": 15 | |
| }, | |
| { | |
| "avg_label": 0.439510241150856, | |
| "epoch": 0.05555555555555555, | |
| "grad_norm": 4.21875, | |
| "learning_rate": 1.0344827586206898e-05, | |
| "loss": 0.16489703953266144, | |
| "num_pairs": 28.0, | |
| "step": 16 | |
| }, | |
| { | |
| "avg_label": 0.4412849396467209, | |
| "epoch": 0.059027777777777776, | |
| "grad_norm": 4.84375, | |
| "learning_rate": 1.103448275862069e-05, | |
| "loss": 0.17035143449902534, | |
| "num_pairs": 28.0, | |
| "step": 17 | |
| }, | |
| { | |
| "avg_label": 0.4661668390035629, | |
| "epoch": 0.0625, | |
| "grad_norm": 3.1875, | |
| "learning_rate": 1.1724137931034483e-05, | |
| "loss": 0.10208164528012276, | |
| "num_pairs": 28.0, | |
| "step": 18 | |
| }, | |
| { | |
| "avg_label": 0.45625001192092896, | |
| "epoch": 0.06597222222222222, | |
| "grad_norm": 13.625, | |
| "learning_rate": 1.2413793103448277e-05, | |
| "loss": 0.5987264770083129, | |
| "num_pairs": 28.0, | |
| "step": 19 | |
| }, | |
| { | |
| "avg_label": 0.4458743929862976, | |
| "epoch": 0.06944444444444445, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 1.310344827586207e-05, | |
| "loss": 0.03994415677152574, | |
| "num_pairs": 28.0, | |
| "step": 20 | |
| }, | |
| { | |
| "avg_label": 0.4499442130327225, | |
| "epoch": 0.07291666666666667, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 1.3793103448275863e-05, | |
| "loss": 0.08950073830783367, | |
| "num_pairs": 28.0, | |
| "step": 21 | |
| }, | |
| { | |
| "avg_label": 0.4587053656578064, | |
| "epoch": 0.0763888888888889, | |
| "grad_norm": 9.0625, | |
| "learning_rate": 1.4482758620689657e-05, | |
| "loss": 0.5212011188268661, | |
| "num_pairs": 28.0, | |
| "step": 22 | |
| }, | |
| { | |
| "avg_label": 0.4502655118703842, | |
| "epoch": 0.0798611111111111, | |
| "grad_norm": 5.03125, | |
| "learning_rate": 1.5172413793103448e-05, | |
| "loss": 0.08295552420895547, | |
| "num_pairs": 28.0, | |
| "step": 23 | |
| }, | |
| { | |
| "avg_label": 0.4362068921327591, | |
| "epoch": 0.08333333333333333, | |
| "grad_norm": 1.65625, | |
| "learning_rate": 1.586206896551724e-05, | |
| "loss": 0.07221807166934013, | |
| "num_pairs": 27.0, | |
| "step": 24 | |
| }, | |
| { | |
| "avg_label": 0.4532342702150345, | |
| "epoch": 0.08680555555555555, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 1.6551724137931037e-05, | |
| "loss": 0.03893738705664873, | |
| "num_pairs": 28.0, | |
| "step": 25 | |
| }, | |
| { | |
| "avg_label": 0.4947916716337204, | |
| "epoch": 0.09027777777777778, | |
| "grad_norm": 6.28125, | |
| "learning_rate": 1.7241379310344828e-05, | |
| "loss": 0.13335712999105453, | |
| "num_pairs": 23.5, | |
| "step": 26 | |
| }, | |
| { | |
| "avg_label": 0.3125, | |
| "epoch": 0.09375, | |
| "grad_norm": 13.75, | |
| "learning_rate": 1.7931034482758623e-05, | |
| "loss": 0.23121396452188492, | |
| "num_pairs": 17.5, | |
| "step": 27 | |
| }, | |
| { | |
| "avg_label": 0.4624594449996948, | |
| "epoch": 0.09722222222222222, | |
| "grad_norm": 11.75, | |
| "learning_rate": 1.8620689655172415e-05, | |
| "loss": 0.4965571314096451, | |
| "num_pairs": 28.0, | |
| "step": 28 | |
| }, | |
| { | |
| "avg_label": 0.46101999282836914, | |
| "epoch": 0.10069444444444445, | |
| "grad_norm": 3.578125, | |
| "learning_rate": 1.931034482758621e-05, | |
| "loss": 0.16813608072698116, | |
| "num_pairs": 28.0, | |
| "step": 29 | |
| }, | |
| { | |
| "avg_label": 0.4742646962404251, | |
| "epoch": 0.10416666666666667, | |
| "grad_norm": 3.171875, | |
| "learning_rate": 2e-05, | |
| "loss": 0.1642257682979107, | |
| "num_pairs": 22.0, | |
| "step": 30 | |
| }, | |
| { | |
| "avg_label": 0.45412661135196686, | |
| "epoch": 0.1076388888888889, | |
| "grad_norm": 8.4375, | |
| "learning_rate": 1.9999835072185805e-05, | |
| "loss": 0.24545209854841232, | |
| "num_pairs": 28.0, | |
| "step": 31 | |
| }, | |
| { | |
| "avg_label": 0.45036764442920685, | |
| "epoch": 0.1111111111111111, | |
| "grad_norm": 3.234375, | |
| "learning_rate": 1.999934029418346e-05, | |
| "loss": 0.14530762657523155, | |
| "num_pairs": 26.0, | |
| "step": 32 | |
| }, | |
| { | |
| "avg_label": 0.4388917088508606, | |
| "epoch": 0.11458333333333333, | |
| "grad_norm": 2.890625, | |
| "learning_rate": 1.9998515682313485e-05, | |
| "loss": 0.10479701962321997, | |
| "num_pairs": 28.0, | |
| "step": 33 | |
| }, | |
| { | |
| "avg_label": 0.43915343284606934, | |
| "epoch": 0.11805555555555555, | |
| "grad_norm": 7.875, | |
| "learning_rate": 1.999736126377618e-05, | |
| "loss": 0.3016491085290909, | |
| "num_pairs": 28.0, | |
| "step": 34 | |
| }, | |
| { | |
| "avg_label": 0.4497767984867096, | |
| "epoch": 0.12152777777777778, | |
| "grad_norm": 3.96875, | |
| "learning_rate": 1.999587707665068e-05, | |
| "loss": 0.15345897153019905, | |
| "num_pairs": 28.0, | |
| "step": 35 | |
| }, | |
| { | |
| "avg_label": 0.4572916626930237, | |
| "epoch": 0.125, | |
| "grad_norm": 1.3515625, | |
| "learning_rate": 1.999406316989374e-05, | |
| "loss": 0.04903313983231783, | |
| "num_pairs": 28.0, | |
| "step": 36 | |
| }, | |
| { | |
| "avg_label": 0.45703125, | |
| "epoch": 0.1284722222222222, | |
| "grad_norm": 8.6875, | |
| "learning_rate": 1.9991919603338088e-05, | |
| "loss": 0.36730772256851196, | |
| "num_pairs": 28.0, | |
| "step": 37 | |
| }, | |
| { | |
| "avg_label": 0.4379105120897293, | |
| "epoch": 0.13194444444444445, | |
| "grad_norm": 6.25, | |
| "learning_rate": 1.998944644769048e-05, | |
| "loss": 0.2889542682096362, | |
| "num_pairs": 27.5, | |
| "step": 38 | |
| }, | |
| { | |
| "avg_label": 0.4781250059604645, | |
| "epoch": 0.13541666666666666, | |
| "grad_norm": 10.0625, | |
| "learning_rate": 1.9986643784529346e-05, | |
| "loss": 0.416560098528862, | |
| "num_pairs": 24.5, | |
| "step": 39 | |
| }, | |
| { | |
| "avg_label": 0.4678819328546524, | |
| "epoch": 0.1388888888888889, | |
| "grad_norm": 4.96875, | |
| "learning_rate": 1.9983511706302102e-05, | |
| "loss": 0.1765335127711296, | |
| "num_pairs": 28.0, | |
| "step": 40 | |
| }, | |
| { | |
| "avg_label": 0.4556451737880707, | |
| "epoch": 0.1423611111111111, | |
| "grad_norm": 3.171875, | |
| "learning_rate": 1.9980050316322118e-05, | |
| "loss": 0.15112879872322083, | |
| "num_pairs": 27.5, | |
| "step": 41 | |
| }, | |
| { | |
| "avg_label": 0.4666379243135452, | |
| "epoch": 0.14583333333333334, | |
| "grad_norm": 10.875, | |
| "learning_rate": 1.997625972876529e-05, | |
| "loss": 0.5556365996599197, | |
| "num_pairs": 28.0, | |
| "step": 42 | |
| }, | |
| { | |
| "avg_label": 0.45076756179332733, | |
| "epoch": 0.14930555555555555, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 1.997214006866628e-05, | |
| "loss": 0.09973586304113269, | |
| "num_pairs": 28.0, | |
| "step": 43 | |
| }, | |
| { | |
| "avg_label": 0.4405048042535782, | |
| "epoch": 0.1527777777777778, | |
| "grad_norm": 8.9375, | |
| "learning_rate": 1.9967691471914392e-05, | |
| "loss": 0.3806912750005722, | |
| "num_pairs": 28.0, | |
| "step": 44 | |
| }, | |
| { | |
| "avg_label": 0.4602416008710861, | |
| "epoch": 0.15625, | |
| "grad_norm": 4.8125, | |
| "learning_rate": 1.99629140852491e-05, | |
| "loss": 0.14190024323761463, | |
| "num_pairs": 28.0, | |
| "step": 45 | |
| }, | |
| { | |
| "avg_label": 0.44902269542217255, | |
| "epoch": 0.1597222222222222, | |
| "grad_norm": 1.890625, | |
| "learning_rate": 1.9957808066255187e-05, | |
| "loss": 0.07964974269270897, | |
| "num_pairs": 28.0, | |
| "step": 46 | |
| }, | |
| { | |
| "avg_label": 0.44250571727752686, | |
| "epoch": 0.16319444444444445, | |
| "grad_norm": 7.5625, | |
| "learning_rate": 1.9952373583357566e-05, | |
| "loss": 0.3196651563048363, | |
| "num_pairs": 27.5, | |
| "step": 47 | |
| }, | |
| { | |
| "avg_label": 0.45804399251937866, | |
| "epoch": 0.16666666666666666, | |
| "grad_norm": 3.03125, | |
| "learning_rate": 1.994661081581571e-05, | |
| "loss": 0.1772424913942814, | |
| "num_pairs": 28.0, | |
| "step": 48 | |
| }, | |
| { | |
| "avg_label": 0.44343800842761993, | |
| "epoch": 0.1701388888888889, | |
| "grad_norm": 0.7734375, | |
| "learning_rate": 1.9940519953717762e-05, | |
| "loss": 0.03773397207260132, | |
| "num_pairs": 27.5, | |
| "step": 49 | |
| }, | |
| { | |
| "avg_label": 0.4277420938014984, | |
| "epoch": 0.1736111111111111, | |
| "grad_norm": 0.7578125, | |
| "learning_rate": 1.993410119797422e-05, | |
| "loss": 0.017126482212916017, | |
| "num_pairs": 27.5, | |
| "step": 50 | |
| }, | |
| { | |
| "avg_label": 0.4761904776096344, | |
| "epoch": 0.17708333333333334, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 1.9927354760311365e-05, | |
| "loss": 0.0958416610956192, | |
| "num_pairs": 24.0, | |
| "step": 51 | |
| }, | |
| { | |
| "avg_label": 0.45110294222831726, | |
| "epoch": 0.18055555555555555, | |
| "grad_norm": 3.859375, | |
| "learning_rate": 1.992028086326424e-05, | |
| "loss": 0.176346430554986, | |
| "num_pairs": 28.0, | |
| "step": 52 | |
| }, | |
| { | |
| "avg_label": 0.4557291716337204, | |
| "epoch": 0.1840277777777778, | |
| "grad_norm": 1.484375, | |
| "learning_rate": 1.991287974016932e-05, | |
| "loss": 0.06965811923146248, | |
| "num_pairs": 28.0, | |
| "step": 53 | |
| }, | |
| { | |
| "avg_label": 0.441498339176178, | |
| "epoch": 0.1875, | |
| "grad_norm": 1.1328125, | |
| "learning_rate": 1.9905151635156813e-05, | |
| "loss": 0.04886321909725666, | |
| "num_pairs": 28.0, | |
| "step": 54 | |
| }, | |
| { | |
| "avg_label": 0.4579603523015976, | |
| "epoch": 0.1909722222222222, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 1.9897096803142616e-05, | |
| "loss": 0.02616998180747032, | |
| "num_pairs": 28.0, | |
| "step": 55 | |
| }, | |
| { | |
| "avg_label": 0.45521390438079834, | |
| "epoch": 0.19444444444444445, | |
| "grad_norm": 0.48046875, | |
| "learning_rate": 1.988871550981989e-05, | |
| "loss": 0.04496368020772934, | |
| "num_pairs": 28.0, | |
| "step": 56 | |
| }, | |
| { | |
| "avg_label": 0.46743176877498627, | |
| "epoch": 0.19791666666666666, | |
| "grad_norm": 1.953125, | |
| "learning_rate": 1.988000803165032e-05, | |
| "loss": 0.08031387068331242, | |
| "num_pairs": 27.0, | |
| "step": 57 | |
| }, | |
| { | |
| "avg_label": 0.39706501364707947, | |
| "epoch": 0.2013888888888889, | |
| "grad_norm": 0.765625, | |
| "learning_rate": 1.9870974655854974e-05, | |
| "loss": 0.05276821367442608, | |
| "num_pairs": 28.0, | |
| "step": 58 | |
| }, | |
| { | |
| "avg_label": 0.4635416716337204, | |
| "epoch": 0.2048611111111111, | |
| "grad_norm": 0.345703125, | |
| "learning_rate": 1.9861615680404833e-05, | |
| "loss": 0.02046751804300584, | |
| "num_pairs": 28.0, | |
| "step": 59 | |
| }, | |
| { | |
| "avg_label": 0.4392857253551483, | |
| "epoch": 0.20833333333333334, | |
| "grad_norm": 1.59375, | |
| "learning_rate": 1.985193141401097e-05, | |
| "loss": 0.03745671547949314, | |
| "num_pairs": 28.0, | |
| "step": 60 | |
| }, | |
| { | |
| "avg_label": 0.430803582072258, | |
| "epoch": 0.21180555555555555, | |
| "grad_norm": 2.34375, | |
| "learning_rate": 1.9841922176114366e-05, | |
| "loss": 0.04785962332971394, | |
| "num_pairs": 21.5, | |
| "step": 61 | |
| }, | |
| { | |
| "avg_label": 0.4626736044883728, | |
| "epoch": 0.2152777777777778, | |
| "grad_norm": 0.61328125, | |
| "learning_rate": 1.9831588296875367e-05, | |
| "loss": 0.021605145651847124, | |
| "num_pairs": 28.0, | |
| "step": 62 | |
| }, | |
| { | |
| "avg_label": 0.4780505895614624, | |
| "epoch": 0.21875, | |
| "grad_norm": 1.609375, | |
| "learning_rate": 1.982093011716279e-05, | |
| "loss": 0.08873376995325089, | |
| "num_pairs": 28.0, | |
| "step": 63 | |
| }, | |
| { | |
| "avg_label": 0.4489087462425232, | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 1.65625, | |
| "learning_rate": 1.9809947988542696e-05, | |
| "loss": 0.0287565803155303, | |
| "num_pairs": 27.5, | |
| "step": 64 | |
| }, | |
| { | |
| "avg_label": 0.4442349076271057, | |
| "epoch": 0.22569444444444445, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 1.979864227326678e-05, | |
| "loss": 0.041462352965027094, | |
| "num_pairs": 28.0, | |
| "step": 65 | |
| }, | |
| { | |
| "avg_label": 0.5274767875671387, | |
| "epoch": 0.22916666666666666, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 1.9787013344260422e-05, | |
| "loss": 0.006527273333631456, | |
| "num_pairs": 26.0, | |
| "step": 66 | |
| }, | |
| { | |
| "avg_label": 0.45734797418117523, | |
| "epoch": 0.2326388888888889, | |
| "grad_norm": 7.21875, | |
| "learning_rate": 1.9775061585110387e-05, | |
| "loss": 0.33710330724716187, | |
| "num_pairs": 28.0, | |
| "step": 67 | |
| }, | |
| { | |
| "avg_label": 0.44953545928001404, | |
| "epoch": 0.2361111111111111, | |
| "grad_norm": 1.1796875, | |
| "learning_rate": 1.976278739005218e-05, | |
| "loss": 0.05175479780882597, | |
| "num_pairs": 28.0, | |
| "step": 68 | |
| }, | |
| { | |
| "avg_label": 0.4675245136022568, | |
| "epoch": 0.23958333333333334, | |
| "grad_norm": 3.859375, | |
| "learning_rate": 1.9750191163957042e-05, | |
| "loss": 0.15232571214437485, | |
| "num_pairs": 28.0, | |
| "step": 69 | |
| }, | |
| { | |
| "avg_label": 0.45781250298023224, | |
| "epoch": 0.24305555555555555, | |
| "grad_norm": 1.7890625, | |
| "learning_rate": 1.9737273322318565e-05, | |
| "loss": 0.09527681209146976, | |
| "num_pairs": 28.0, | |
| "step": 70 | |
| }, | |
| { | |
| "avg_label": 0.4518994987010956, | |
| "epoch": 0.2465277777777778, | |
| "grad_norm": 0.703125, | |
| "learning_rate": 1.972403429123904e-05, | |
| "loss": 0.035001321870367974, | |
| "num_pairs": 28.0, | |
| "step": 71 | |
| }, | |
| { | |
| "avg_label": 0.4765625, | |
| "epoch": 0.25, | |
| "grad_norm": 0.75, | |
| "learning_rate": 1.971047450741535e-05, | |
| "loss": 0.026095230132341385, | |
| "num_pairs": 22.0, | |
| "step": 72 | |
| }, | |
| { | |
| "avg_label": 0.5010775923728943, | |
| "epoch": 0.2534722222222222, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 1.9696594418124598e-05, | |
| "loss": 0.028016670839861035, | |
| "num_pairs": 23.5, | |
| "step": 73 | |
| }, | |
| { | |
| "avg_label": 0.6005434691905975, | |
| "epoch": 0.2569444444444444, | |
| "grad_norm": 0.62109375, | |
| "learning_rate": 1.9682394481209338e-05, | |
| "loss": 0.017244269140064716, | |
| "num_pairs": 20.0, | |
| "step": 74 | |
| }, | |
| { | |
| "avg_label": 0.44895362854003906, | |
| "epoch": 0.2604166666666667, | |
| "grad_norm": 2.65625, | |
| "learning_rate": 1.966787516506249e-05, | |
| "loss": 0.13350838515907526, | |
| "num_pairs": 27.5, | |
| "step": 75 | |
| }, | |
| { | |
| "avg_label": 0.44657258689403534, | |
| "epoch": 0.2638888888888889, | |
| "grad_norm": 2.6875, | |
| "learning_rate": 1.9653036948611864e-05, | |
| "loss": 0.13598253019154072, | |
| "num_pairs": 24.5, | |
| "step": 76 | |
| }, | |
| { | |
| "avg_label": 0.4062500149011612, | |
| "epoch": 0.2673611111111111, | |
| "grad_norm": 1.6328125, | |
| "learning_rate": 1.9637880321304387e-05, | |
| "loss": 0.09597307164222002, | |
| "num_pairs": 21.5, | |
| "step": 77 | |
| }, | |
| { | |
| "avg_label": 0.43358150124549866, | |
| "epoch": 0.2708333333333333, | |
| "grad_norm": 1.5859375, | |
| "learning_rate": 1.962240578308993e-05, | |
| "loss": 0.02830888982862234, | |
| "num_pairs": 28.0, | |
| "step": 78 | |
| }, | |
| { | |
| "avg_label": 0.4296875, | |
| "epoch": 0.2743055555555556, | |
| "grad_norm": 0.5703125, | |
| "learning_rate": 1.9606613844404853e-05, | |
| "loss": 0.0342620718292892, | |
| "num_pairs": 27.5, | |
| "step": 79 | |
| }, | |
| { | |
| "avg_label": 0.44969919323921204, | |
| "epoch": 0.2777777777777778, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 1.9590505026155146e-05, | |
| "loss": 0.047657732968218625, | |
| "num_pairs": 28.0, | |
| "step": 80 | |
| }, | |
| { | |
| "avg_label": 0.44572368264198303, | |
| "epoch": 0.28125, | |
| "grad_norm": 0.98828125, | |
| "learning_rate": 1.9574079859699236e-05, | |
| "loss": 0.048789044842123985, | |
| "num_pairs": 28.0, | |
| "step": 81 | |
| }, | |
| { | |
| "avg_label": 0.39816810190677643, | |
| "epoch": 0.2847222222222222, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 1.955733888683049e-05, | |
| "loss": 0.053058773279190063, | |
| "num_pairs": 27.5, | |
| "step": 82 | |
| }, | |
| { | |
| "avg_label": 0.43917112052440643, | |
| "epoch": 0.2881944444444444, | |
| "grad_norm": 0.91015625, | |
| "learning_rate": 1.9540282659759317e-05, | |
| "loss": 0.05478274542838335, | |
| "num_pairs": 27.5, | |
| "step": 83 | |
| }, | |
| { | |
| "avg_label": 0.4709821492433548, | |
| "epoch": 0.2916666666666667, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 1.9522911741094966e-05, | |
| "loss": 0.011658322997391224, | |
| "num_pairs": 28.0, | |
| "step": 84 | |
| }, | |
| { | |
| "avg_label": 0.4544480890035629, | |
| "epoch": 0.2951388888888889, | |
| "grad_norm": 11.3125, | |
| "learning_rate": 1.9505226703826973e-05, | |
| "loss": 0.523826252669096, | |
| "num_pairs": 28.0, | |
| "step": 85 | |
| }, | |
| { | |
| "avg_label": 0.4711538553237915, | |
| "epoch": 0.2986111111111111, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 1.948722813130624e-05, | |
| "loss": 0.023038258543238044, | |
| "num_pairs": 27.5, | |
| "step": 86 | |
| }, | |
| { | |
| "avg_label": 0.4539930522441864, | |
| "epoch": 0.3020833333333333, | |
| "grad_norm": 1.6328125, | |
| "learning_rate": 1.9468916617225814e-05, | |
| "loss": 0.058613574132323265, | |
| "num_pairs": 28.0, | |
| "step": 87 | |
| }, | |
| { | |
| "avg_label": 0.4584091007709503, | |
| "epoch": 0.3055555555555556, | |
| "grad_norm": 0.388671875, | |
| "learning_rate": 1.9450292765601287e-05, | |
| "loss": 0.016918038250878453, | |
| "num_pairs": 28.0, | |
| "step": 88 | |
| }, | |
| { | |
| "avg_label": 0.4232954829931259, | |
| "epoch": 0.3090277777777778, | |
| "grad_norm": 1.3359375, | |
| "learning_rate": 1.94313571907509e-05, | |
| "loss": 0.060430300422012806, | |
| "num_pairs": 28.0, | |
| "step": 89 | |
| }, | |
| { | |
| "avg_label": 0.5833333134651184, | |
| "epoch": 0.3125, | |
| "grad_norm": 4.125, | |
| "learning_rate": 1.941211051727524e-05, | |
| "loss": 0.060059962212108076, | |
| "num_pairs": 20.5, | |
| "step": 90 | |
| }, | |
| { | |
| "avg_label": 0.45364584028720856, | |
| "epoch": 0.3159722222222222, | |
| "grad_norm": 1.4375, | |
| "learning_rate": 1.939255338003666e-05, | |
| "loss": 0.04583445412572473, | |
| "num_pairs": 28.0, | |
| "step": 91 | |
| }, | |
| { | |
| "avg_label": 0.4437500089406967, | |
| "epoch": 0.3194444444444444, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 1.937268642413835e-05, | |
| "loss": 0.00882079591974616, | |
| "num_pairs": 27.0, | |
| "step": 92 | |
| }, | |
| { | |
| "avg_label": 0.4508301317691803, | |
| "epoch": 0.3229166666666667, | |
| "grad_norm": 0.51953125, | |
| "learning_rate": 1.9352510304903017e-05, | |
| "loss": 0.03392870319657959, | |
| "num_pairs": 27.5, | |
| "step": 93 | |
| }, | |
| { | |
| "avg_label": 0.44549499452114105, | |
| "epoch": 0.3263888888888889, | |
| "grad_norm": 1.4375, | |
| "learning_rate": 1.9332025687851325e-05, | |
| "loss": 0.04178585158661008, | |
| "num_pairs": 28.0, | |
| "step": 94 | |
| }, | |
| { | |
| "avg_label": 0.47159090638160706, | |
| "epoch": 0.3298611111111111, | |
| "grad_norm": 0.92578125, | |
| "learning_rate": 1.931123324867989e-05, | |
| "loss": 0.028863655403256416, | |
| "num_pairs": 28.0, | |
| "step": 95 | |
| }, | |
| { | |
| "avg_label": 0.4557291716337204, | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 2.671875, | |
| "learning_rate": 1.929013367323902e-05, | |
| "loss": 0.1290583610534668, | |
| "num_pairs": 28.0, | |
| "step": 96 | |
| }, | |
| { | |
| "avg_label": 0.44440363347530365, | |
| "epoch": 0.3368055555555556, | |
| "grad_norm": 0.75390625, | |
| "learning_rate": 1.926872765751009e-05, | |
| "loss": 0.01571450149640441, | |
| "num_pairs": 28.0, | |
| "step": 97 | |
| }, | |
| { | |
| "avg_label": 0.5104166716337204, | |
| "epoch": 0.3402777777777778, | |
| "grad_norm": 5.9375, | |
| "learning_rate": 1.9247015907582574e-05, | |
| "loss": 0.2692837491631508, | |
| "num_pairs": 27.5, | |
| "step": 98 | |
| }, | |
| { | |
| "avg_label": 0.4975000023841858, | |
| "epoch": 0.34375, | |
| "grad_norm": 0.462890625, | |
| "learning_rate": 1.9224999139630766e-05, | |
| "loss": 0.02842018473893404, | |
| "num_pairs": 24.0, | |
| "step": 99 | |
| }, | |
| { | |
| "avg_label": 0.46345899999141693, | |
| "epoch": 0.3472222222222222, | |
| "grad_norm": 3.578125, | |
| "learning_rate": 1.920267807989015e-05, | |
| "loss": 0.12233215570449829, | |
| "num_pairs": 25.5, | |
| "step": 100 | |
| }, | |
| { | |
| "avg_label": 0.4338157922029495, | |
| "epoch": 0.3506944444444444, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 1.918005346463344e-05, | |
| "loss": 0.004982678568921983, | |
| "num_pairs": 27.5, | |
| "step": 101 | |
| }, | |
| { | |
| "avg_label": 0.4490041136741638, | |
| "epoch": 0.3541666666666667, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 1.9157126040146307e-05, | |
| "loss": 0.01375208212994039, | |
| "num_pairs": 27.5, | |
| "step": 102 | |
| }, | |
| { | |
| "avg_label": 0.4447794109582901, | |
| "epoch": 0.3576388888888889, | |
| "grad_norm": 5.125, | |
| "learning_rate": 1.9133896562702746e-05, | |
| "loss": 0.20500781387090683, | |
| "num_pairs": 27.5, | |
| "step": 103 | |
| }, | |
| { | |
| "avg_label": 0.45121754705905914, | |
| "epoch": 0.3611111111111111, | |
| "grad_norm": 1.015625, | |
| "learning_rate": 1.911036579854016e-05, | |
| "loss": 0.024166605668142438, | |
| "num_pairs": 28.0, | |
| "step": 104 | |
| }, | |
| { | |
| "avg_label": 0.5431034564971924, | |
| "epoch": 0.3645833333333333, | |
| "grad_norm": 0.87109375, | |
| "learning_rate": 1.9086534523834032e-05, | |
| "loss": 0.07264208607375622, | |
| "num_pairs": 21.5, | |
| "step": 105 | |
| }, | |
| { | |
| "avg_label": 0.5112500041723251, | |
| "epoch": 0.3680555555555556, | |
| "grad_norm": 0.5, | |
| "learning_rate": 1.906240352467238e-05, | |
| "loss": 0.027374916709959507, | |
| "num_pairs": 24.5, | |
| "step": 106 | |
| }, | |
| { | |
| "avg_label": 0.4663461595773697, | |
| "epoch": 0.3715277777777778, | |
| "grad_norm": 0.88671875, | |
| "learning_rate": 1.9037973597029796e-05, | |
| "loss": 0.009546086774207652, | |
| "num_pairs": 24.0, | |
| "step": 107 | |
| }, | |
| { | |
| "avg_label": 0.45906251668930054, | |
| "epoch": 0.375, | |
| "grad_norm": 0.74609375, | |
| "learning_rate": 1.901324554674119e-05, | |
| "loss": 0.021506985649466515, | |
| "num_pairs": 27.5, | |
| "step": 108 | |
| }, | |
| { | |
| "avg_label": 0.5089285671710968, | |
| "epoch": 0.3784722222222222, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 1.8988220189475216e-05, | |
| "loss": 0.021596498914732365, | |
| "num_pairs": 24.5, | |
| "step": 109 | |
| }, | |
| { | |
| "avg_label": 0.45194804668426514, | |
| "epoch": 0.3819444444444444, | |
| "grad_norm": 0.44921875, | |
| "learning_rate": 1.896289835070737e-05, | |
| "loss": 0.044558977242559195, | |
| "num_pairs": 28.0, | |
| "step": 110 | |
| }, | |
| { | |
| "avg_label": 0.548076942563057, | |
| "epoch": 0.3854166666666667, | |
| "grad_norm": 5.34375, | |
| "learning_rate": 1.893728086569276e-05, | |
| "loss": 0.13392094970913604, | |
| "num_pairs": 21.5, | |
| "step": 111 | |
| }, | |
| { | |
| "avg_label": 0.4564732164144516, | |
| "epoch": 0.3888888888888889, | |
| "grad_norm": 1.796875, | |
| "learning_rate": 1.891136857943854e-05, | |
| "loss": 0.07770301774144173, | |
| "num_pairs": 28.0, | |
| "step": 112 | |
| }, | |
| { | |
| "avg_label": 0.4529440253973007, | |
| "epoch": 0.3923611111111111, | |
| "grad_norm": 0.609375, | |
| "learning_rate": 1.8885162346676063e-05, | |
| "loss": 0.03013956267386675, | |
| "num_pairs": 28.0, | |
| "step": 113 | |
| }, | |
| { | |
| "avg_label": 0.45974941551685333, | |
| "epoch": 0.3958333333333333, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 1.8858663031832665e-05, | |
| "loss": 0.08871974685462192, | |
| "num_pairs": 28.0, | |
| "step": 114 | |
| }, | |
| { | |
| "avg_label": 0.45034562051296234, | |
| "epoch": 0.3993055555555556, | |
| "grad_norm": 0.1533203125, | |
| "learning_rate": 1.8831871509003164e-05, | |
| "loss": 0.002237841486930847, | |
| "num_pairs": 28.0, | |
| "step": 115 | |
| }, | |
| { | |
| "avg_label": 0.46330973505973816, | |
| "epoch": 0.4027777777777778, | |
| "grad_norm": 1.4140625, | |
| "learning_rate": 1.8804788661921012e-05, | |
| "loss": 0.034147982485592365, | |
| "num_pairs": 28.0, | |
| "step": 116 | |
| }, | |
| { | |
| "avg_label": 0.4184027910232544, | |
| "epoch": 0.40625, | |
| "grad_norm": 0.66015625, | |
| "learning_rate": 1.877741538392917e-05, | |
| "loss": 0.032842551125213504, | |
| "num_pairs": 28.0, | |
| "step": 117 | |
| }, | |
| { | |
| "avg_label": 0.44732142984867096, | |
| "epoch": 0.4097222222222222, | |
| "grad_norm": 0.50390625, | |
| "learning_rate": 1.8749752577950614e-05, | |
| "loss": 0.022164800204336643, | |
| "num_pairs": 23.5, | |
| "step": 118 | |
| }, | |
| { | |
| "avg_label": 0.4625000059604645, | |
| "epoch": 0.4131944444444444, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 1.8721801156458573e-05, | |
| "loss": 0.13851431757211685, | |
| "num_pairs": 22.0, | |
| "step": 119 | |
| }, | |
| { | |
| "avg_label": 0.46339286863803864, | |
| "epoch": 0.4166666666666667, | |
| "grad_norm": 1.4140625, | |
| "learning_rate": 1.869356204144642e-05, | |
| "loss": 0.03970568906515837, | |
| "num_pairs": 26.0, | |
| "step": 120 | |
| }, | |
| { | |
| "avg_label": 0.46469351649284363, | |
| "epoch": 0.4201388888888889, | |
| "grad_norm": 1.4765625, | |
| "learning_rate": 1.866503616439725e-05, | |
| "loss": 0.06737381592392921, | |
| "num_pairs": 27.5, | |
| "step": 121 | |
| }, | |
| { | |
| "avg_label": 0.44958437979221344, | |
| "epoch": 0.4236111111111111, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 1.8636224466253177e-05, | |
| "loss": 0.03086982760578394, | |
| "num_pairs": 28.0, | |
| "step": 122 | |
| }, | |
| { | |
| "avg_label": 0.4493534415960312, | |
| "epoch": 0.4270833333333333, | |
| "grad_norm": 1.0078125, | |
| "learning_rate": 1.860712789738428e-05, | |
| "loss": 0.03511205594986677, | |
| "num_pairs": 28.0, | |
| "step": 123 | |
| }, | |
| { | |
| "avg_label": 0.44804346561431885, | |
| "epoch": 0.4305555555555556, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 1.857774741755726e-05, | |
| "loss": 0.056939804926514626, | |
| "num_pairs": 27.0, | |
| "step": 124 | |
| }, | |
| { | |
| "avg_label": 0.4523809552192688, | |
| "epoch": 0.4340277777777778, | |
| "grad_norm": 2.375, | |
| "learning_rate": 1.854808399590378e-05, | |
| "loss": 0.09799622371792793, | |
| "num_pairs": 28.0, | |
| "step": 125 | |
| }, | |
| { | |
| "avg_label": 0.4740624874830246, | |
| "epoch": 0.4375, | |
| "grad_norm": 1.625, | |
| "learning_rate": 1.8518138610888505e-05, | |
| "loss": 0.04024723544716835, | |
| "num_pairs": 28.0, | |
| "step": 126 | |
| }, | |
| { | |
| "avg_label": 0.4765625, | |
| "epoch": 0.4409722222222222, | |
| "grad_norm": 4.71875, | |
| "learning_rate": 1.8487912250276805e-05, | |
| "loss": 0.23970045894384384, | |
| "num_pairs": 24.0, | |
| "step": 127 | |
| }, | |
| { | |
| "avg_label": 0.4485294073820114, | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 0.87890625, | |
| "learning_rate": 1.8457405911102202e-05, | |
| "loss": 0.024124122224748135, | |
| "num_pairs": 24.5, | |
| "step": 128 | |
| }, | |
| { | |
| "avg_label": 0.470486119389534, | |
| "epoch": 0.4479166666666667, | |
| "grad_norm": 7.21875, | |
| "learning_rate": 1.8426620599633464e-05, | |
| "loss": 0.2318284586071968, | |
| "num_pairs": 22.0, | |
| "step": 129 | |
| }, | |
| { | |
| "avg_label": 0.4488866627216339, | |
| "epoch": 0.4513888888888889, | |
| "grad_norm": 0.6953125, | |
| "learning_rate": 1.8395557331341413e-05, | |
| "loss": 0.015422451309859753, | |
| "num_pairs": 28.0, | |
| "step": 130 | |
| }, | |
| { | |
| "avg_label": 0.4508064538240433, | |
| "epoch": 0.4548611111111111, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 1.836421713086544e-05, | |
| "loss": 0.022061188239604235, | |
| "num_pairs": 28.0, | |
| "step": 131 | |
| }, | |
| { | |
| "avg_label": 0.4425466060638428, | |
| "epoch": 0.4583333333333333, | |
| "grad_norm": 1.5234375, | |
| "learning_rate": 1.83326010319797e-05, | |
| "loss": 0.08789217192679644, | |
| "num_pairs": 27.5, | |
| "step": 132 | |
| }, | |
| { | |
| "avg_label": 0.4583333283662796, | |
| "epoch": 0.4618055555555556, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 1.830071007755901e-05, | |
| "loss": 0.0123936936724931, | |
| "num_pairs": 28.0, | |
| "step": 133 | |
| }, | |
| { | |
| "avg_label": 0.4434974938631058, | |
| "epoch": 0.4652777777777778, | |
| "grad_norm": 0.640625, | |
| "learning_rate": 1.8268545319544443e-05, | |
| "loss": 0.0157788903452456, | |
| "num_pairs": 28.0, | |
| "step": 134 | |
| }, | |
| { | |
| "avg_label": 0.4349999725818634, | |
| "epoch": 0.46875, | |
| "grad_norm": 1.3046875, | |
| "learning_rate": 1.823610781890865e-05, | |
| "loss": 0.054201798513531685, | |
| "num_pairs": 26.0, | |
| "step": 135 | |
| }, | |
| { | |
| "avg_label": 0.4729166626930237, | |
| "epoch": 0.4722222222222222, | |
| "grad_norm": 0.875, | |
| "learning_rate": 1.820339864562085e-05, | |
| "loss": 0.03274301765486598, | |
| "num_pairs": 26.5, | |
| "step": 136 | |
| }, | |
| { | |
| "avg_label": 0.4567129611968994, | |
| "epoch": 0.4756944444444444, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 1.817041887861153e-05, | |
| "loss": 0.05754423700273037, | |
| "num_pairs": 27.5, | |
| "step": 137 | |
| }, | |
| { | |
| "avg_label": 0.3948863595724106, | |
| "epoch": 0.4791666666666667, | |
| "grad_norm": 1.0703125, | |
| "learning_rate": 1.8137169605736867e-05, | |
| "loss": 0.07348897308111191, | |
| "num_pairs": 21.5, | |
| "step": 138 | |
| }, | |
| { | |
| "avg_label": 0.47046372294425964, | |
| "epoch": 0.4826388888888889, | |
| "grad_norm": 0.6015625, | |
| "learning_rate": 1.8103651923742846e-05, | |
| "loss": 0.04229694373498205, | |
| "num_pairs": 28.0, | |
| "step": 139 | |
| }, | |
| { | |
| "avg_label": 0.4568256735801697, | |
| "epoch": 0.4861111111111111, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 1.8069866938229066e-05, | |
| "loss": 0.0422638775780797, | |
| "num_pairs": 28.0, | |
| "step": 140 | |
| }, | |
| { | |
| "avg_label": 0.46851328015327454, | |
| "epoch": 0.4895833333333333, | |
| "grad_norm": 1.078125, | |
| "learning_rate": 1.8035815763612293e-05, | |
| "loss": 0.048252967186272144, | |
| "num_pairs": 27.5, | |
| "step": 141 | |
| }, | |
| { | |
| "avg_label": 0.46875, | |
| "epoch": 0.4930555555555556, | |
| "grad_norm": 1.109375, | |
| "learning_rate": 1.8001499523089683e-05, | |
| "loss": 0.022715996543411165, | |
| "num_pairs": 20.0, | |
| "step": 142 | |
| }, | |
| { | |
| "avg_label": 0.46064816415309906, | |
| "epoch": 0.4965277777777778, | |
| "grad_norm": 0.298828125, | |
| "learning_rate": 1.7966919348601754e-05, | |
| "loss": 0.0070006223395466805, | |
| "num_pairs": 28.0, | |
| "step": 143 | |
| }, | |
| { | |
| "avg_label": 0.4451013505458832, | |
| "epoch": 0.5, | |
| "grad_norm": 0.65625, | |
| "learning_rate": 1.7932076380795017e-05, | |
| "loss": 0.034362755715847015, | |
| "num_pairs": 28.0, | |
| "step": 144 | |
| }, | |
| { | |
| "avg_label": 0.4553571492433548, | |
| "epoch": 0.5034722222222222, | |
| "grad_norm": 0.6953125, | |
| "learning_rate": 1.7896971768984373e-05, | |
| "loss": 0.02424975112080574, | |
| "num_pairs": 28.0, | |
| "step": 145 | |
| }, | |
| { | |
| "avg_label": 0.40937501192092896, | |
| "epoch": 0.5069444444444444, | |
| "grad_norm": 1.40625, | |
| "learning_rate": 1.7861606671115207e-05, | |
| "loss": 0.09015845181420445, | |
| "num_pairs": 28.0, | |
| "step": 146 | |
| }, | |
| { | |
| "avg_label": 0.4572192281484604, | |
| "epoch": 0.5104166666666666, | |
| "grad_norm": 2.921875, | |
| "learning_rate": 1.7825982253725175e-05, | |
| "loss": 0.09106537594925612, | |
| "num_pairs": 28.0, | |
| "step": 147 | |
| }, | |
| { | |
| "avg_label": 0.4486243277788162, | |
| "epoch": 0.5138888888888888, | |
| "grad_norm": 0.22265625, | |
| "learning_rate": 1.7790099691905736e-05, | |
| "loss": 0.007117493078112602, | |
| "num_pairs": 28.0, | |
| "step": 148 | |
| }, | |
| { | |
| "avg_label": 0.42847222089767456, | |
| "epoch": 0.5173611111111112, | |
| "grad_norm": 0.326171875, | |
| "learning_rate": 1.7753960169263387e-05, | |
| "loss": 0.011324664112180471, | |
| "num_pairs": 25.5, | |
| "step": 149 | |
| }, | |
| { | |
| "avg_label": 0.439529225230217, | |
| "epoch": 0.5208333333333334, | |
| "grad_norm": 0.921875, | |
| "learning_rate": 1.7717564877880623e-05, | |
| "loss": 0.05978692602366209, | |
| "num_pairs": 27.5, | |
| "step": 150 | |
| }, | |
| { | |
| "avg_label": 0.4516952484846115, | |
| "epoch": 0.5243055555555556, | |
| "grad_norm": 1.375, | |
| "learning_rate": 1.7680915018276613e-05, | |
| "loss": 0.050734762102365494, | |
| "num_pairs": 28.0, | |
| "step": 151 | |
| }, | |
| { | |
| "avg_label": 0.41067710518836975, | |
| "epoch": 0.5277777777777778, | |
| "grad_norm": 0.93359375, | |
| "learning_rate": 1.764401179936761e-05, | |
| "loss": 0.0304207439039601, | |
| "num_pairs": 27.5, | |
| "step": 152 | |
| }, | |
| { | |
| "avg_label": 0.491695836186409, | |
| "epoch": 0.53125, | |
| "grad_norm": 0.482421875, | |
| "learning_rate": 1.7606856438427054e-05, | |
| "loss": 0.014743164298124611, | |
| "num_pairs": 27.5, | |
| "step": 153 | |
| }, | |
| { | |
| "avg_label": 0.446428582072258, | |
| "epoch": 0.5347222222222222, | |
| "grad_norm": 0.890625, | |
| "learning_rate": 1.7569450161045444e-05, | |
| "loss": 0.04068564437329769, | |
| "num_pairs": 23.5, | |
| "step": 154 | |
| }, | |
| { | |
| "avg_label": 0.45932111144065857, | |
| "epoch": 0.5381944444444444, | |
| "grad_norm": 0.443359375, | |
| "learning_rate": 1.7531794201089888e-05, | |
| "loss": 0.022642125841230154, | |
| "num_pairs": 28.0, | |
| "step": 155 | |
| }, | |
| { | |
| "avg_label": 0.45102816820144653, | |
| "epoch": 0.5416666666666666, | |
| "grad_norm": 0.333984375, | |
| "learning_rate": 1.749388980066342e-05, | |
| "loss": 0.009190058917738497, | |
| "num_pairs": 28.0, | |
| "step": 156 | |
| }, | |
| { | |
| "avg_label": 0.44252873957157135, | |
| "epoch": 0.5451388888888888, | |
| "grad_norm": 0.490234375, | |
| "learning_rate": 1.745573821006403e-05, | |
| "loss": 0.0138811687938869, | |
| "num_pairs": 27.5, | |
| "step": 157 | |
| }, | |
| { | |
| "avg_label": 0.44221231341362, | |
| "epoch": 0.5486111111111112, | |
| "grad_norm": 1.2890625, | |
| "learning_rate": 1.7417340687743393e-05, | |
| "loss": 0.05438859760761261, | |
| "num_pairs": 28.0, | |
| "step": 158 | |
| }, | |
| { | |
| "avg_label": 0.4568452388048172, | |
| "epoch": 0.5520833333333334, | |
| "grad_norm": 0.25, | |
| "learning_rate": 1.7378698500265402e-05, | |
| "loss": 0.010841536102816463, | |
| "num_pairs": 28.0, | |
| "step": 159 | |
| }, | |
| { | |
| "avg_label": 0.4703125059604645, | |
| "epoch": 0.5555555555555556, | |
| "grad_norm": 0.8515625, | |
| "learning_rate": 1.7339812922264366e-05, | |
| "loss": 0.03851235564798117, | |
| "num_pairs": 28.0, | |
| "step": 160 | |
| }, | |
| { | |
| "avg_label": 0.45864899456501007, | |
| "epoch": 0.5590277777777778, | |
| "grad_norm": 3.4375, | |
| "learning_rate": 1.730068523640295e-05, | |
| "loss": 0.11844043270684779, | |
| "num_pairs": 26.5, | |
| "step": 161 | |
| }, | |
| { | |
| "avg_label": 0.4791666567325592, | |
| "epoch": 0.5625, | |
| "grad_norm": 0.224609375, | |
| "learning_rate": 1.72613167333299e-05, | |
| "loss": 0.028862916253274307, | |
| "num_pairs": 24.0, | |
| "step": 162 | |
| }, | |
| { | |
| "avg_label": 0.45817309617996216, | |
| "epoch": 0.5659722222222222, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 1.7221708711637455e-05, | |
| "loss": 0.00937138102017343, | |
| "num_pairs": 27.5, | |
| "step": 163 | |
| }, | |
| { | |
| "avg_label": 0.43892045319080353, | |
| "epoch": 0.5694444444444444, | |
| "grad_norm": 0.671875, | |
| "learning_rate": 1.718186247781849e-05, | |
| "loss": 0.028468840289860964, | |
| "num_pairs": 28.0, | |
| "step": 164 | |
| }, | |
| { | |
| "avg_label": 0.4518229067325592, | |
| "epoch": 0.5729166666666666, | |
| "grad_norm": 6.125, | |
| "learning_rate": 1.7141779346223465e-05, | |
| "loss": 0.30780595541000366, | |
| "num_pairs": 27.5, | |
| "step": 165 | |
| }, | |
| { | |
| "avg_label": 0.45110294222831726, | |
| "epoch": 0.5763888888888888, | |
| "grad_norm": 0.306640625, | |
| "learning_rate": 1.7101460639017034e-05, | |
| "loss": 0.008247810648754239, | |
| "num_pairs": 28.0, | |
| "step": 166 | |
| }, | |
| { | |
| "avg_label": 0.46600276231765747, | |
| "epoch": 0.5798611111111112, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 1.7060907686134445e-05, | |
| "loss": 0.06383802928030491, | |
| "num_pairs": 28.0, | |
| "step": 167 | |
| }, | |
| { | |
| "avg_label": 0.4557291716337204, | |
| "epoch": 0.5833333333333334, | |
| "grad_norm": 0.40234375, | |
| "learning_rate": 1.7020121825237672e-05, | |
| "loss": 0.018603924312628806, | |
| "num_pairs": 25.5, | |
| "step": 168 | |
| }, | |
| { | |
| "avg_label": 0.4603828638792038, | |
| "epoch": 0.5868055555555556, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 1.6979104401671296e-05, | |
| "loss": 0.022878690622746944, | |
| "num_pairs": 28.0, | |
| "step": 169 | |
| }, | |
| { | |
| "avg_label": 0.4588768184185028, | |
| "epoch": 0.5902777777777778, | |
| "grad_norm": 1.4375, | |
| "learning_rate": 1.693785676841812e-05, | |
| "loss": 0.06640730798244476, | |
| "num_pairs": 28.0, | |
| "step": 170 | |
| }, | |
| { | |
| "avg_label": 0.4221428632736206, | |
| "epoch": 0.59375, | |
| "grad_norm": 0.107421875, | |
| "learning_rate": 1.6896380286054537e-05, | |
| "loss": 0.0038028154522180557, | |
| "num_pairs": 28.0, | |
| "step": 171 | |
| }, | |
| { | |
| "avg_label": 0.47298388183116913, | |
| "epoch": 0.5972222222222222, | |
| "grad_norm": 0.55078125, | |
| "learning_rate": 1.6854676322705673e-05, | |
| "loss": 0.019407693296670914, | |
| "num_pairs": 28.0, | |
| "step": 172 | |
| }, | |
| { | |
| "avg_label": 0.444306343793869, | |
| "epoch": 0.6006944444444444, | |
| "grad_norm": 2.65625, | |
| "learning_rate": 1.6812746254000222e-05, | |
| "loss": 0.11606021597981453, | |
| "num_pairs": 28.0, | |
| "step": 173 | |
| }, | |
| { | |
| "avg_label": 0.3993750214576721, | |
| "epoch": 0.6041666666666666, | |
| "grad_norm": 1.921875, | |
| "learning_rate": 1.67705914630251e-05, | |
| "loss": 0.08407180290669203, | |
| "num_pairs": 27.0, | |
| "step": 174 | |
| }, | |
| { | |
| "avg_label": 0.4376724064350128, | |
| "epoch": 0.6076388888888888, | |
| "grad_norm": 0.59765625, | |
| "learning_rate": 1.6728213340279822e-05, | |
| "loss": 0.01655407203361392, | |
| "num_pairs": 28.0, | |
| "step": 175 | |
| }, | |
| { | |
| "avg_label": 0.31534090638160706, | |
| "epoch": 0.6111111111111112, | |
| "grad_norm": 0.1396484375, | |
| "learning_rate": 1.668561328363061e-05, | |
| "loss": 0.007483100518584251, | |
| "num_pairs": 20.0, | |
| "step": 176 | |
| }, | |
| { | |
| "avg_label": 0.4442349076271057, | |
| "epoch": 0.6145833333333334, | |
| "grad_norm": 0.7265625, | |
| "learning_rate": 1.6642792698264313e-05, | |
| "loss": 0.03157099802047014, | |
| "num_pairs": 28.0, | |
| "step": 177 | |
| }, | |
| { | |
| "avg_label": 0.4552556872367859, | |
| "epoch": 0.6180555555555556, | |
| "grad_norm": 1.0234375, | |
| "learning_rate": 1.6599752996642044e-05, | |
| "loss": 0.0451383707113564, | |
| "num_pairs": 28.0, | |
| "step": 178 | |
| }, | |
| { | |
| "avg_label": 0.4735576957464218, | |
| "epoch": 0.6215277777777778, | |
| "grad_norm": 1.203125, | |
| "learning_rate": 1.655649559845258e-05, | |
| "loss": 0.056825135834515095, | |
| "num_pairs": 22.0, | |
| "step": 179 | |
| }, | |
| { | |
| "avg_label": 0.38331981003284454, | |
| "epoch": 0.625, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 1.651302193056555e-05, | |
| "loss": 0.03129632119089365, | |
| "num_pairs": 22.5, | |
| "step": 180 | |
| }, | |
| { | |
| "avg_label": 0.4483432173728943, | |
| "epoch": 0.6284722222222222, | |
| "grad_norm": 1.34375, | |
| "learning_rate": 1.6469333426984357e-05, | |
| "loss": 0.05972203053534031, | |
| "num_pairs": 28.0, | |
| "step": 181 | |
| }, | |
| { | |
| "avg_label": 0.468173086643219, | |
| "epoch": 0.6319444444444444, | |
| "grad_norm": 1.4765625, | |
| "learning_rate": 1.6425431528798883e-05, | |
| "loss": 0.06996654625982046, | |
| "num_pairs": 28.0, | |
| "step": 182 | |
| }, | |
| { | |
| "avg_label": 0.4802695959806442, | |
| "epoch": 0.6354166666666666, | |
| "grad_norm": 0.890625, | |
| "learning_rate": 1.6381317684137946e-05, | |
| "loss": 0.038833396742120385, | |
| "num_pairs": 27.5, | |
| "step": 183 | |
| }, | |
| { | |
| "avg_label": 0.44396552443504333, | |
| "epoch": 0.6388888888888888, | |
| "grad_norm": 0.33984375, | |
| "learning_rate": 1.6336993348121543e-05, | |
| "loss": 0.010088181821629405, | |
| "num_pairs": 27.5, | |
| "step": 184 | |
| }, | |
| { | |
| "avg_label": 0.45236895978450775, | |
| "epoch": 0.6423611111111112, | |
| "grad_norm": 0.10546875, | |
| "learning_rate": 1.6292459982812845e-05, | |
| "loss": 0.0034077019081451, | |
| "num_pairs": 28.0, | |
| "step": 185 | |
| }, | |
| { | |
| "avg_label": 0.47602301836013794, | |
| "epoch": 0.6458333333333334, | |
| "grad_norm": 0.25, | |
| "learning_rate": 1.624771905716997e-05, | |
| "loss": 0.004801646777195856, | |
| "num_pairs": 27.0, | |
| "step": 186 | |
| }, | |
| { | |
| "avg_label": 0.45323528349399567, | |
| "epoch": 0.6493055555555556, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 1.620277204699754e-05, | |
| "loss": 0.042771865613758564, | |
| "num_pairs": 27.5, | |
| "step": 187 | |
| }, | |
| { | |
| "avg_label": 0.4444444477558136, | |
| "epoch": 0.6527777777777778, | |
| "grad_norm": 0.6640625, | |
| "learning_rate": 1.615762043489797e-05, | |
| "loss": 0.0236468983348459, | |
| "num_pairs": 25.0, | |
| "step": 188 | |
| }, | |
| { | |
| "avg_label": 0.44826723635196686, | |
| "epoch": 0.65625, | |
| "grad_norm": 0.1982421875, | |
| "learning_rate": 1.611226571022261e-05, | |
| "loss": 0.007521128980442882, | |
| "num_pairs": 28.0, | |
| "step": 189 | |
| }, | |
| { | |
| "avg_label": 0.4715389013290405, | |
| "epoch": 0.6597222222222222, | |
| "grad_norm": 0.5625, | |
| "learning_rate": 1.6066709369022576e-05, | |
| "loss": 0.020246338099241257, | |
| "num_pairs": 27.0, | |
| "step": 190 | |
| }, | |
| { | |
| "avg_label": 0.46243055164813995, | |
| "epoch": 0.6631944444444444, | |
| "grad_norm": 3.53125, | |
| "learning_rate": 1.6020952913999423e-05, | |
| "loss": 0.08208651561290026, | |
| "num_pairs": 27.5, | |
| "step": 191 | |
| }, | |
| { | |
| "avg_label": 0.4524739533662796, | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 0.166015625, | |
| "learning_rate": 1.5974997854455575e-05, | |
| "loss": 0.0023918856459204108, | |
| "num_pairs": 28.0, | |
| "step": 192 | |
| }, | |
| { | |
| "avg_label": 0.46304087340831757, | |
| "epoch": 0.6701388888888888, | |
| "grad_norm": 0.412109375, | |
| "learning_rate": 1.5928845706244537e-05, | |
| "loss": 0.020947942975908518, | |
| "num_pairs": 28.0, | |
| "step": 193 | |
| }, | |
| { | |
| "avg_label": 0.4618534445762634, | |
| "epoch": 0.6736111111111112, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 1.588249799172089e-05, | |
| "loss": 0.005286574247293174, | |
| "num_pairs": 27.5, | |
| "step": 194 | |
| }, | |
| { | |
| "avg_label": 0.4678194969892502, | |
| "epoch": 0.6770833333333334, | |
| "grad_norm": 1.1015625, | |
| "learning_rate": 1.583595623969009e-05, | |
| "loss": 0.05471465736627579, | |
| "num_pairs": 28.0, | |
| "step": 195 | |
| }, | |
| { | |
| "avg_label": 0.4570639431476593, | |
| "epoch": 0.6805555555555556, | |
| "grad_norm": 0.2197265625, | |
| "learning_rate": 1.5789221985358017e-05, | |
| "loss": 0.004043272638227791, | |
| "num_pairs": 27.5, | |
| "step": 196 | |
| }, | |
| { | |
| "avg_label": 0.44032806158065796, | |
| "epoch": 0.6840277777777778, | |
| "grad_norm": 2.171875, | |
| "learning_rate": 1.574229677028036e-05, | |
| "loss": 0.03309955738950521, | |
| "num_pairs": 25.5, | |
| "step": 197 | |
| }, | |
| { | |
| "avg_label": 0.4401831030845642, | |
| "epoch": 0.6875, | |
| "grad_norm": 1.390625, | |
| "learning_rate": 1.5695182142311743e-05, | |
| "loss": 0.03160541597753763, | |
| "num_pairs": 28.0, | |
| "step": 198 | |
| }, | |
| { | |
| "avg_label": 0.45328740775585175, | |
| "epoch": 0.6909722222222222, | |
| "grad_norm": 4.3125, | |
| "learning_rate": 1.564787965555469e-05, | |
| "loss": 0.07159067690372467, | |
| "num_pairs": 27.5, | |
| "step": 199 | |
| }, | |
| { | |
| "avg_label": 0.4379340261220932, | |
| "epoch": 0.6944444444444444, | |
| "grad_norm": 0.65625, | |
| "learning_rate": 1.560039087030836e-05, | |
| "loss": 0.0306890313513577, | |
| "num_pairs": 28.0, | |
| "step": 200 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 576, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |