| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.3472222222222222, | |
| "eval_steps": 500, | |
| "global_step": 100, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "avg_label": 0.45932112634181976, | |
| "epoch": 0.003472222222222222, | |
| "grad_norm": 0.515625, | |
| "learning_rate": 0.0, | |
| "loss": 0.02412083954550326, | |
| "num_pairs": 28.0, | |
| "step": 1 | |
| }, | |
| { | |
| "avg_label": 0.4166666865348816, | |
| "epoch": 0.006944444444444444, | |
| "grad_norm": 11.625, | |
| "learning_rate": 6.896551724137931e-07, | |
| "loss": 0.40197963267564774, | |
| "num_pairs": 21.5, | |
| "step": 2 | |
| }, | |
| { | |
| "avg_label": 0.4724903553724289, | |
| "epoch": 0.010416666666666666, | |
| "grad_norm": 13.25, | |
| "learning_rate": 1.3793103448275862e-06, | |
| "loss": 0.5004251897335052, | |
| "num_pairs": 28.0, | |
| "step": 3 | |
| }, | |
| { | |
| "avg_label": 0.44384056329727173, | |
| "epoch": 0.013888888888888888, | |
| "grad_norm": 5.875, | |
| "learning_rate": 2.0689655172413796e-06, | |
| "loss": 0.23160110414028168, | |
| "num_pairs": 27.5, | |
| "step": 4 | |
| }, | |
| { | |
| "avg_label": 0.4630681872367859, | |
| "epoch": 0.017361111111111112, | |
| "grad_norm": 8.9375, | |
| "learning_rate": 2.7586206896551725e-06, | |
| "loss": 0.4468539021909237, | |
| "num_pairs": 24.0, | |
| "step": 5 | |
| }, | |
| { | |
| "avg_label": 0.45495015382766724, | |
| "epoch": 0.020833333333333332, | |
| "grad_norm": 1.34375, | |
| "learning_rate": 3.448275862068966e-06, | |
| "loss": 0.017617323901504278, | |
| "num_pairs": 28.0, | |
| "step": 6 | |
| }, | |
| { | |
| "avg_label": 0.4732142984867096, | |
| "epoch": 0.024305555555555556, | |
| "grad_norm": 11.5625, | |
| "learning_rate": 4.137931034482759e-06, | |
| "loss": 0.4206290766596794, | |
| "num_pairs": 24.0, | |
| "step": 7 | |
| }, | |
| { | |
| "avg_label": 0.4736842066049576, | |
| "epoch": 0.027777777777777776, | |
| "grad_norm": 0.1484375, | |
| "learning_rate": 4.8275862068965525e-06, | |
| "loss": 0.004913174081593752, | |
| "num_pairs": 27.0, | |
| "step": 8 | |
| }, | |
| { | |
| "avg_label": 0.45513392984867096, | |
| "epoch": 0.03125, | |
| "grad_norm": 7.84375, | |
| "learning_rate": 5.517241379310345e-06, | |
| "loss": 0.40890943817794323, | |
| "num_pairs": 28.0, | |
| "step": 9 | |
| }, | |
| { | |
| "avg_label": 0.4797794222831726, | |
| "epoch": 0.034722222222222224, | |
| "grad_norm": 4.21875, | |
| "learning_rate": 6.206896551724138e-06, | |
| "loss": 0.07930763380136341, | |
| "num_pairs": 28.0, | |
| "step": 10 | |
| }, | |
| { | |
| "avg_label": 0.4505208283662796, | |
| "epoch": 0.03819444444444445, | |
| "grad_norm": 18.0, | |
| "learning_rate": 6.896551724137932e-06, | |
| "loss": 0.8742586374282837, | |
| "num_pairs": 28.0, | |
| "step": 11 | |
| }, | |
| { | |
| "avg_label": 0.42196430265903473, | |
| "epoch": 0.041666666666666664, | |
| "grad_norm": 17.625, | |
| "learning_rate": 7.586206896551724e-06, | |
| "loss": 0.8603673875331879, | |
| "num_pairs": 23.5, | |
| "step": 12 | |
| }, | |
| { | |
| "avg_label": 0.44431088864803314, | |
| "epoch": 0.04513888888888889, | |
| "grad_norm": 1.3203125, | |
| "learning_rate": 8.275862068965518e-06, | |
| "loss": 0.028431319631636143, | |
| "num_pairs": 28.0, | |
| "step": 13 | |
| }, | |
| { | |
| "avg_label": 0.3893229216337204, | |
| "epoch": 0.04861111111111111, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.965517241379312e-06, | |
| "loss": 0.251303069293499, | |
| "num_pairs": 23.5, | |
| "step": 14 | |
| }, | |
| { | |
| "avg_label": 0.43348929286003113, | |
| "epoch": 0.052083333333333336, | |
| "grad_norm": 3.796875, | |
| "learning_rate": 9.655172413793105e-06, | |
| "loss": 0.11253293231129646, | |
| "num_pairs": 27.0, | |
| "step": 15 | |
| }, | |
| { | |
| "avg_label": 0.439510241150856, | |
| "epoch": 0.05555555555555555, | |
| "grad_norm": 4.21875, | |
| "learning_rate": 1.0344827586206898e-05, | |
| "loss": 0.16489703953266144, | |
| "num_pairs": 28.0, | |
| "step": 16 | |
| }, | |
| { | |
| "avg_label": 0.4412849396467209, | |
| "epoch": 0.059027777777777776, | |
| "grad_norm": 4.84375, | |
| "learning_rate": 1.103448275862069e-05, | |
| "loss": 0.17035143449902534, | |
| "num_pairs": 28.0, | |
| "step": 17 | |
| }, | |
| { | |
| "avg_label": 0.4661668390035629, | |
| "epoch": 0.0625, | |
| "grad_norm": 3.1875, | |
| "learning_rate": 1.1724137931034483e-05, | |
| "loss": 0.10208164528012276, | |
| "num_pairs": 28.0, | |
| "step": 18 | |
| }, | |
| { | |
| "avg_label": 0.45625001192092896, | |
| "epoch": 0.06597222222222222, | |
| "grad_norm": 13.625, | |
| "learning_rate": 1.2413793103448277e-05, | |
| "loss": 0.5987264770083129, | |
| "num_pairs": 28.0, | |
| "step": 19 | |
| }, | |
| { | |
| "avg_label": 0.4458743929862976, | |
| "epoch": 0.06944444444444445, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 1.310344827586207e-05, | |
| "loss": 0.03994415677152574, | |
| "num_pairs": 28.0, | |
| "step": 20 | |
| }, | |
| { | |
| "avg_label": 0.4499442130327225, | |
| "epoch": 0.07291666666666667, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 1.3793103448275863e-05, | |
| "loss": 0.08950073830783367, | |
| "num_pairs": 28.0, | |
| "step": 21 | |
| }, | |
| { | |
| "avg_label": 0.4587053656578064, | |
| "epoch": 0.0763888888888889, | |
| "grad_norm": 9.0625, | |
| "learning_rate": 1.4482758620689657e-05, | |
| "loss": 0.5212011188268661, | |
| "num_pairs": 28.0, | |
| "step": 22 | |
| }, | |
| { | |
| "avg_label": 0.4502655118703842, | |
| "epoch": 0.0798611111111111, | |
| "grad_norm": 5.03125, | |
| "learning_rate": 1.5172413793103448e-05, | |
| "loss": 0.08295552420895547, | |
| "num_pairs": 28.0, | |
| "step": 23 | |
| }, | |
| { | |
| "avg_label": 0.4362068921327591, | |
| "epoch": 0.08333333333333333, | |
| "grad_norm": 1.65625, | |
| "learning_rate": 1.586206896551724e-05, | |
| "loss": 0.07221807166934013, | |
| "num_pairs": 27.0, | |
| "step": 24 | |
| }, | |
| { | |
| "avg_label": 0.4532342702150345, | |
| "epoch": 0.08680555555555555, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 1.6551724137931037e-05, | |
| "loss": 0.03893738705664873, | |
| "num_pairs": 28.0, | |
| "step": 25 | |
| }, | |
| { | |
| "avg_label": 0.4947916716337204, | |
| "epoch": 0.09027777777777778, | |
| "grad_norm": 6.28125, | |
| "learning_rate": 1.7241379310344828e-05, | |
| "loss": 0.13335712999105453, | |
| "num_pairs": 23.5, | |
| "step": 26 | |
| }, | |
| { | |
| "avg_label": 0.3125, | |
| "epoch": 0.09375, | |
| "grad_norm": 13.75, | |
| "learning_rate": 1.7931034482758623e-05, | |
| "loss": 0.23121396452188492, | |
| "num_pairs": 17.5, | |
| "step": 27 | |
| }, | |
| { | |
| "avg_label": 0.4624594449996948, | |
| "epoch": 0.09722222222222222, | |
| "grad_norm": 11.75, | |
| "learning_rate": 1.8620689655172415e-05, | |
| "loss": 0.4965571314096451, | |
| "num_pairs": 28.0, | |
| "step": 28 | |
| }, | |
| { | |
| "avg_label": 0.46101999282836914, | |
| "epoch": 0.10069444444444445, | |
| "grad_norm": 3.578125, | |
| "learning_rate": 1.931034482758621e-05, | |
| "loss": 0.16813608072698116, | |
| "num_pairs": 28.0, | |
| "step": 29 | |
| }, | |
| { | |
| "avg_label": 0.4742646962404251, | |
| "epoch": 0.10416666666666667, | |
| "grad_norm": 3.171875, | |
| "learning_rate": 2e-05, | |
| "loss": 0.1642257682979107, | |
| "num_pairs": 22.0, | |
| "step": 30 | |
| }, | |
| { | |
| "avg_label": 0.45412661135196686, | |
| "epoch": 0.1076388888888889, | |
| "grad_norm": 8.4375, | |
| "learning_rate": 1.9999835072185805e-05, | |
| "loss": 0.24545209854841232, | |
| "num_pairs": 28.0, | |
| "step": 31 | |
| }, | |
| { | |
| "avg_label": 0.45036764442920685, | |
| "epoch": 0.1111111111111111, | |
| "grad_norm": 3.234375, | |
| "learning_rate": 1.999934029418346e-05, | |
| "loss": 0.14530762657523155, | |
| "num_pairs": 26.0, | |
| "step": 32 | |
| }, | |
| { | |
| "avg_label": 0.4388917088508606, | |
| "epoch": 0.11458333333333333, | |
| "grad_norm": 2.890625, | |
| "learning_rate": 1.9998515682313485e-05, | |
| "loss": 0.10479701962321997, | |
| "num_pairs": 28.0, | |
| "step": 33 | |
| }, | |
| { | |
| "avg_label": 0.43915343284606934, | |
| "epoch": 0.11805555555555555, | |
| "grad_norm": 7.875, | |
| "learning_rate": 1.999736126377618e-05, | |
| "loss": 0.3016491085290909, | |
| "num_pairs": 28.0, | |
| "step": 34 | |
| }, | |
| { | |
| "avg_label": 0.4497767984867096, | |
| "epoch": 0.12152777777777778, | |
| "grad_norm": 3.96875, | |
| "learning_rate": 1.999587707665068e-05, | |
| "loss": 0.15345897153019905, | |
| "num_pairs": 28.0, | |
| "step": 35 | |
| }, | |
| { | |
| "avg_label": 0.4572916626930237, | |
| "epoch": 0.125, | |
| "grad_norm": 1.3515625, | |
| "learning_rate": 1.999406316989374e-05, | |
| "loss": 0.04903313983231783, | |
| "num_pairs": 28.0, | |
| "step": 36 | |
| }, | |
| { | |
| "avg_label": 0.45703125, | |
| "epoch": 0.1284722222222222, | |
| "grad_norm": 8.6875, | |
| "learning_rate": 1.9991919603338088e-05, | |
| "loss": 0.36730772256851196, | |
| "num_pairs": 28.0, | |
| "step": 37 | |
| }, | |
| { | |
| "avg_label": 0.4379105120897293, | |
| "epoch": 0.13194444444444445, | |
| "grad_norm": 6.25, | |
| "learning_rate": 1.998944644769048e-05, | |
| "loss": 0.2889542682096362, | |
| "num_pairs": 27.5, | |
| "step": 38 | |
| }, | |
| { | |
| "avg_label": 0.4781250059604645, | |
| "epoch": 0.13541666666666666, | |
| "grad_norm": 10.0625, | |
| "learning_rate": 1.9986643784529346e-05, | |
| "loss": 0.416560098528862, | |
| "num_pairs": 24.5, | |
| "step": 39 | |
| }, | |
| { | |
| "avg_label": 0.4678819328546524, | |
| "epoch": 0.1388888888888889, | |
| "grad_norm": 4.96875, | |
| "learning_rate": 1.9983511706302102e-05, | |
| "loss": 0.1765335127711296, | |
| "num_pairs": 28.0, | |
| "step": 40 | |
| }, | |
| { | |
| "avg_label": 0.4556451737880707, | |
| "epoch": 0.1423611111111111, | |
| "grad_norm": 3.171875, | |
| "learning_rate": 1.9980050316322118e-05, | |
| "loss": 0.15112879872322083, | |
| "num_pairs": 27.5, | |
| "step": 41 | |
| }, | |
| { | |
| "avg_label": 0.4666379243135452, | |
| "epoch": 0.14583333333333334, | |
| "grad_norm": 10.875, | |
| "learning_rate": 1.997625972876529e-05, | |
| "loss": 0.5556365996599197, | |
| "num_pairs": 28.0, | |
| "step": 42 | |
| }, | |
| { | |
| "avg_label": 0.45076756179332733, | |
| "epoch": 0.14930555555555555, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 1.997214006866628e-05, | |
| "loss": 0.09973586304113269, | |
| "num_pairs": 28.0, | |
| "step": 43 | |
| }, | |
| { | |
| "avg_label": 0.4405048042535782, | |
| "epoch": 0.1527777777777778, | |
| "grad_norm": 8.9375, | |
| "learning_rate": 1.9967691471914392e-05, | |
| "loss": 0.3806912750005722, | |
| "num_pairs": 28.0, | |
| "step": 44 | |
| }, | |
| { | |
| "avg_label": 0.4602416008710861, | |
| "epoch": 0.15625, | |
| "grad_norm": 4.8125, | |
| "learning_rate": 1.99629140852491e-05, | |
| "loss": 0.14190024323761463, | |
| "num_pairs": 28.0, | |
| "step": 45 | |
| }, | |
| { | |
| "avg_label": 0.44902269542217255, | |
| "epoch": 0.1597222222222222, | |
| "grad_norm": 1.890625, | |
| "learning_rate": 1.9957808066255187e-05, | |
| "loss": 0.07964974269270897, | |
| "num_pairs": 28.0, | |
| "step": 46 | |
| }, | |
| { | |
| "avg_label": 0.44250571727752686, | |
| "epoch": 0.16319444444444445, | |
| "grad_norm": 7.5625, | |
| "learning_rate": 1.9952373583357566e-05, | |
| "loss": 0.3196651563048363, | |
| "num_pairs": 27.5, | |
| "step": 47 | |
| }, | |
| { | |
| "avg_label": 0.45804399251937866, | |
| "epoch": 0.16666666666666666, | |
| "grad_norm": 3.03125, | |
| "learning_rate": 1.994661081581571e-05, | |
| "loss": 0.1772424913942814, | |
| "num_pairs": 28.0, | |
| "step": 48 | |
| }, | |
| { | |
| "avg_label": 0.44343800842761993, | |
| "epoch": 0.1701388888888889, | |
| "grad_norm": 0.7734375, | |
| "learning_rate": 1.9940519953717762e-05, | |
| "loss": 0.03773397207260132, | |
| "num_pairs": 27.5, | |
| "step": 49 | |
| }, | |
| { | |
| "avg_label": 0.4277420938014984, | |
| "epoch": 0.1736111111111111, | |
| "grad_norm": 0.7578125, | |
| "learning_rate": 1.993410119797422e-05, | |
| "loss": 0.017126482212916017, | |
| "num_pairs": 27.5, | |
| "step": 50 | |
| }, | |
| { | |
| "avg_label": 0.4761904776096344, | |
| "epoch": 0.17708333333333334, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 1.9927354760311365e-05, | |
| "loss": 0.0958416610956192, | |
| "num_pairs": 24.0, | |
| "step": 51 | |
| }, | |
| { | |
| "avg_label": 0.45110294222831726, | |
| "epoch": 0.18055555555555555, | |
| "grad_norm": 3.859375, | |
| "learning_rate": 1.992028086326424e-05, | |
| "loss": 0.176346430554986, | |
| "num_pairs": 28.0, | |
| "step": 52 | |
| }, | |
| { | |
| "avg_label": 0.4557291716337204, | |
| "epoch": 0.1840277777777778, | |
| "grad_norm": 1.484375, | |
| "learning_rate": 1.991287974016932e-05, | |
| "loss": 0.06965811923146248, | |
| "num_pairs": 28.0, | |
| "step": 53 | |
| }, | |
| { | |
| "avg_label": 0.441498339176178, | |
| "epoch": 0.1875, | |
| "grad_norm": 1.1328125, | |
| "learning_rate": 1.9905151635156813e-05, | |
| "loss": 0.04886321909725666, | |
| "num_pairs": 28.0, | |
| "step": 54 | |
| }, | |
| { | |
| "avg_label": 0.4579603523015976, | |
| "epoch": 0.1909722222222222, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 1.9897096803142616e-05, | |
| "loss": 0.02616998180747032, | |
| "num_pairs": 28.0, | |
| "step": 55 | |
| }, | |
| { | |
| "avg_label": 0.45521390438079834, | |
| "epoch": 0.19444444444444445, | |
| "grad_norm": 0.48046875, | |
| "learning_rate": 1.988871550981989e-05, | |
| "loss": 0.04496368020772934, | |
| "num_pairs": 28.0, | |
| "step": 56 | |
| }, | |
| { | |
| "avg_label": 0.46743176877498627, | |
| "epoch": 0.19791666666666666, | |
| "grad_norm": 1.953125, | |
| "learning_rate": 1.988000803165032e-05, | |
| "loss": 0.08031387068331242, | |
| "num_pairs": 27.0, | |
| "step": 57 | |
| }, | |
| { | |
| "avg_label": 0.39706501364707947, | |
| "epoch": 0.2013888888888889, | |
| "grad_norm": 0.765625, | |
| "learning_rate": 1.9870974655854974e-05, | |
| "loss": 0.05276821367442608, | |
| "num_pairs": 28.0, | |
| "step": 58 | |
| }, | |
| { | |
| "avg_label": 0.4635416716337204, | |
| "epoch": 0.2048611111111111, | |
| "grad_norm": 0.345703125, | |
| "learning_rate": 1.9861615680404833e-05, | |
| "loss": 0.02046751804300584, | |
| "num_pairs": 28.0, | |
| "step": 59 | |
| }, | |
| { | |
| "avg_label": 0.4392857253551483, | |
| "epoch": 0.20833333333333334, | |
| "grad_norm": 1.59375, | |
| "learning_rate": 1.985193141401097e-05, | |
| "loss": 0.03745671547949314, | |
| "num_pairs": 28.0, | |
| "step": 60 | |
| }, | |
| { | |
| "avg_label": 0.430803582072258, | |
| "epoch": 0.21180555555555555, | |
| "grad_norm": 2.34375, | |
| "learning_rate": 1.9841922176114366e-05, | |
| "loss": 0.04785962332971394, | |
| "num_pairs": 21.5, | |
| "step": 61 | |
| }, | |
| { | |
| "avg_label": 0.4626736044883728, | |
| "epoch": 0.2152777777777778, | |
| "grad_norm": 0.61328125, | |
| "learning_rate": 1.9831588296875367e-05, | |
| "loss": 0.021605145651847124, | |
| "num_pairs": 28.0, | |
| "step": 62 | |
| }, | |
| { | |
| "avg_label": 0.4780505895614624, | |
| "epoch": 0.21875, | |
| "grad_norm": 1.609375, | |
| "learning_rate": 1.982093011716279e-05, | |
| "loss": 0.08873376995325089, | |
| "num_pairs": 28.0, | |
| "step": 63 | |
| }, | |
| { | |
| "avg_label": 0.4489087462425232, | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 1.65625, | |
| "learning_rate": 1.9809947988542696e-05, | |
| "loss": 0.0287565803155303, | |
| "num_pairs": 27.5, | |
| "step": 64 | |
| }, | |
| { | |
| "avg_label": 0.4442349076271057, | |
| "epoch": 0.22569444444444445, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 1.979864227326678e-05, | |
| "loss": 0.041462352965027094, | |
| "num_pairs": 28.0, | |
| "step": 65 | |
| }, | |
| { | |
| "avg_label": 0.5274767875671387, | |
| "epoch": 0.22916666666666666, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 1.9787013344260422e-05, | |
| "loss": 0.006527273333631456, | |
| "num_pairs": 26.0, | |
| "step": 66 | |
| }, | |
| { | |
| "avg_label": 0.45734797418117523, | |
| "epoch": 0.2326388888888889, | |
| "grad_norm": 7.21875, | |
| "learning_rate": 1.9775061585110387e-05, | |
| "loss": 0.33710330724716187, | |
| "num_pairs": 28.0, | |
| "step": 67 | |
| }, | |
| { | |
| "avg_label": 0.44953545928001404, | |
| "epoch": 0.2361111111111111, | |
| "grad_norm": 1.1796875, | |
| "learning_rate": 1.976278739005218e-05, | |
| "loss": 0.05175479780882597, | |
| "num_pairs": 28.0, | |
| "step": 68 | |
| }, | |
| { | |
| "avg_label": 0.4675245136022568, | |
| "epoch": 0.23958333333333334, | |
| "grad_norm": 3.859375, | |
| "learning_rate": 1.9750191163957042e-05, | |
| "loss": 0.15232571214437485, | |
| "num_pairs": 28.0, | |
| "step": 69 | |
| }, | |
| { | |
| "avg_label": 0.45781250298023224, | |
| "epoch": 0.24305555555555555, | |
| "grad_norm": 1.7890625, | |
| "learning_rate": 1.9737273322318565e-05, | |
| "loss": 0.09527681209146976, | |
| "num_pairs": 28.0, | |
| "step": 70 | |
| }, | |
| { | |
| "avg_label": 0.4518994987010956, | |
| "epoch": 0.2465277777777778, | |
| "grad_norm": 0.703125, | |
| "learning_rate": 1.972403429123904e-05, | |
| "loss": 0.035001321870367974, | |
| "num_pairs": 28.0, | |
| "step": 71 | |
| }, | |
| { | |
| "avg_label": 0.4765625, | |
| "epoch": 0.25, | |
| "grad_norm": 0.75, | |
| "learning_rate": 1.971047450741535e-05, | |
| "loss": 0.026095230132341385, | |
| "num_pairs": 22.0, | |
| "step": 72 | |
| }, | |
| { | |
| "avg_label": 0.5010775923728943, | |
| "epoch": 0.2534722222222222, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 1.9696594418124598e-05, | |
| "loss": 0.028016670839861035, | |
| "num_pairs": 23.5, | |
| "step": 73 | |
| }, | |
| { | |
| "avg_label": 0.6005434691905975, | |
| "epoch": 0.2569444444444444, | |
| "grad_norm": 0.62109375, | |
| "learning_rate": 1.9682394481209338e-05, | |
| "loss": 0.017244269140064716, | |
| "num_pairs": 20.0, | |
| "step": 74 | |
| }, | |
| { | |
| "avg_label": 0.44895362854003906, | |
| "epoch": 0.2604166666666667, | |
| "grad_norm": 2.65625, | |
| "learning_rate": 1.966787516506249e-05, | |
| "loss": 0.13350838515907526, | |
| "num_pairs": 27.5, | |
| "step": 75 | |
| }, | |
| { | |
| "avg_label": 0.44657258689403534, | |
| "epoch": 0.2638888888888889, | |
| "grad_norm": 2.6875, | |
| "learning_rate": 1.9653036948611864e-05, | |
| "loss": 0.13598253019154072, | |
| "num_pairs": 24.5, | |
| "step": 76 | |
| }, | |
| { | |
| "avg_label": 0.4062500149011612, | |
| "epoch": 0.2673611111111111, | |
| "grad_norm": 1.6328125, | |
| "learning_rate": 1.9637880321304387e-05, | |
| "loss": 0.09597307164222002, | |
| "num_pairs": 21.5, | |
| "step": 77 | |
| }, | |
| { | |
| "avg_label": 0.43358150124549866, | |
| "epoch": 0.2708333333333333, | |
| "grad_norm": 1.5859375, | |
| "learning_rate": 1.962240578308993e-05, | |
| "loss": 0.02830888982862234, | |
| "num_pairs": 28.0, | |
| "step": 78 | |
| }, | |
| { | |
| "avg_label": 0.4296875, | |
| "epoch": 0.2743055555555556, | |
| "grad_norm": 0.5703125, | |
| "learning_rate": 1.9606613844404853e-05, | |
| "loss": 0.0342620718292892, | |
| "num_pairs": 27.5, | |
| "step": 79 | |
| }, | |
| { | |
| "avg_label": 0.44969919323921204, | |
| "epoch": 0.2777777777777778, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 1.9590505026155146e-05, | |
| "loss": 0.047657732968218625, | |
| "num_pairs": 28.0, | |
| "step": 80 | |
| }, | |
| { | |
| "avg_label": 0.44572368264198303, | |
| "epoch": 0.28125, | |
| "grad_norm": 0.98828125, | |
| "learning_rate": 1.9574079859699236e-05, | |
| "loss": 0.048789044842123985, | |
| "num_pairs": 28.0, | |
| "step": 81 | |
| }, | |
| { | |
| "avg_label": 0.39816810190677643, | |
| "epoch": 0.2847222222222222, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 1.955733888683049e-05, | |
| "loss": 0.053058773279190063, | |
| "num_pairs": 27.5, | |
| "step": 82 | |
| }, | |
| { | |
| "avg_label": 0.43917112052440643, | |
| "epoch": 0.2881944444444444, | |
| "grad_norm": 0.91015625, | |
| "learning_rate": 1.9540282659759317e-05, | |
| "loss": 0.05478274542838335, | |
| "num_pairs": 27.5, | |
| "step": 83 | |
| }, | |
| { | |
| "avg_label": 0.4709821492433548, | |
| "epoch": 0.2916666666666667, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 1.9522911741094966e-05, | |
| "loss": 0.011658322997391224, | |
| "num_pairs": 28.0, | |
| "step": 84 | |
| }, | |
| { | |
| "avg_label": 0.4544480890035629, | |
| "epoch": 0.2951388888888889, | |
| "grad_norm": 11.3125, | |
| "learning_rate": 1.9505226703826973e-05, | |
| "loss": 0.523826252669096, | |
| "num_pairs": 28.0, | |
| "step": 85 | |
| }, | |
| { | |
| "avg_label": 0.4711538553237915, | |
| "epoch": 0.2986111111111111, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 1.948722813130624e-05, | |
| "loss": 0.023038258543238044, | |
| "num_pairs": 27.5, | |
| "step": 86 | |
| }, | |
| { | |
| "avg_label": 0.4539930522441864, | |
| "epoch": 0.3020833333333333, | |
| "grad_norm": 1.6328125, | |
| "learning_rate": 1.9468916617225814e-05, | |
| "loss": 0.058613574132323265, | |
| "num_pairs": 28.0, | |
| "step": 87 | |
| }, | |
| { | |
| "avg_label": 0.4584091007709503, | |
| "epoch": 0.3055555555555556, | |
| "grad_norm": 0.388671875, | |
| "learning_rate": 1.9450292765601287e-05, | |
| "loss": 0.016918038250878453, | |
| "num_pairs": 28.0, | |
| "step": 88 | |
| }, | |
| { | |
| "avg_label": 0.4232954829931259, | |
| "epoch": 0.3090277777777778, | |
| "grad_norm": 1.3359375, | |
| "learning_rate": 1.94313571907509e-05, | |
| "loss": 0.060430300422012806, | |
| "num_pairs": 28.0, | |
| "step": 89 | |
| }, | |
| { | |
| "avg_label": 0.5833333134651184, | |
| "epoch": 0.3125, | |
| "grad_norm": 4.125, | |
| "learning_rate": 1.941211051727524e-05, | |
| "loss": 0.060059962212108076, | |
| "num_pairs": 20.5, | |
| "step": 90 | |
| }, | |
| { | |
| "avg_label": 0.45364584028720856, | |
| "epoch": 0.3159722222222222, | |
| "grad_norm": 1.4375, | |
| "learning_rate": 1.939255338003666e-05, | |
| "loss": 0.04583445412572473, | |
| "num_pairs": 28.0, | |
| "step": 91 | |
| }, | |
| { | |
| "avg_label": 0.4437500089406967, | |
| "epoch": 0.3194444444444444, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 1.937268642413835e-05, | |
| "loss": 0.00882079591974616, | |
| "num_pairs": 27.0, | |
| "step": 92 | |
| }, | |
| { | |
| "avg_label": 0.4508301317691803, | |
| "epoch": 0.3229166666666667, | |
| "grad_norm": 0.51953125, | |
| "learning_rate": 1.9352510304903017e-05, | |
| "loss": 0.03392870319657959, | |
| "num_pairs": 27.5, | |
| "step": 93 | |
| }, | |
| { | |
| "avg_label": 0.44549499452114105, | |
| "epoch": 0.3263888888888889, | |
| "grad_norm": 1.4375, | |
| "learning_rate": 1.9332025687851325e-05, | |
| "loss": 0.04178585158661008, | |
| "num_pairs": 28.0, | |
| "step": 94 | |
| }, | |
| { | |
| "avg_label": 0.47159090638160706, | |
| "epoch": 0.3298611111111111, | |
| "grad_norm": 0.92578125, | |
| "learning_rate": 1.931123324867989e-05, | |
| "loss": 0.028863655403256416, | |
| "num_pairs": 28.0, | |
| "step": 95 | |
| }, | |
| { | |
| "avg_label": 0.4557291716337204, | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 2.671875, | |
| "learning_rate": 1.929013367323902e-05, | |
| "loss": 0.1290583610534668, | |
| "num_pairs": 28.0, | |
| "step": 96 | |
| }, | |
| { | |
| "avg_label": 0.44440363347530365, | |
| "epoch": 0.3368055555555556, | |
| "grad_norm": 0.75390625, | |
| "learning_rate": 1.926872765751009e-05, | |
| "loss": 0.01571450149640441, | |
| "num_pairs": 28.0, | |
| "step": 97 | |
| }, | |
| { | |
| "avg_label": 0.5104166716337204, | |
| "epoch": 0.3402777777777778, | |
| "grad_norm": 5.9375, | |
| "learning_rate": 1.9247015907582574e-05, | |
| "loss": 0.2692837491631508, | |
| "num_pairs": 27.5, | |
| "step": 98 | |
| }, | |
| { | |
| "avg_label": 0.4975000023841858, | |
| "epoch": 0.34375, | |
| "grad_norm": 0.462890625, | |
| "learning_rate": 1.9224999139630766e-05, | |
| "loss": 0.02842018473893404, | |
| "num_pairs": 24.0, | |
| "step": 99 | |
| }, | |
| { | |
| "avg_label": 0.46345899999141693, | |
| "epoch": 0.3472222222222222, | |
| "grad_norm": 3.578125, | |
| "learning_rate": 1.920267807989015e-05, | |
| "loss": 0.12233215570449829, | |
| "num_pairs": 25.5, | |
| "step": 100 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 576, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |