| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9987398134923968, | |
| "eval_steps": 248, | |
| "global_step": 743, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0013441989414433337, | |
| "grad_norm": 9.1875, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 1.0378, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0013441989414433337, | |
| "eval_loss": 3.0436718463897705, | |
| "eval_runtime": 5734.1832, | |
| "eval_samples_per_second": 3.861, | |
| "eval_steps_per_second": 0.483, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0026883978828866673, | |
| "grad_norm": 10.3125, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 1.1143, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.004032596824330001, | |
| "grad_norm": 8.75, | |
| "learning_rate": 3e-06, | |
| "loss": 1.0933, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.005376795765773335, | |
| "grad_norm": 6.40625, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 1.1014, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.006720994707216668, | |
| "grad_norm": 5.34375, | |
| "learning_rate": 5e-06, | |
| "loss": 1.0504, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.008065193648660002, | |
| "grad_norm": 3.453125, | |
| "learning_rate": 6e-06, | |
| "loss": 1.0031, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.009409392590103335, | |
| "grad_norm": 3.0, | |
| "learning_rate": 7e-06, | |
| "loss": 1.0001, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.01075359153154667, | |
| "grad_norm": 2.640625, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 1.0196, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.012097790472990002, | |
| "grad_norm": 2.21875, | |
| "learning_rate": 9e-06, | |
| "loss": 0.9207, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.013441989414433336, | |
| "grad_norm": 1.859375, | |
| "learning_rate": 1e-05, | |
| "loss": 0.9502, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01478618835587667, | |
| "grad_norm": 2.34375, | |
| "learning_rate": 9.999954076906038e-06, | |
| "loss": 0.9674, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.016130387297320005, | |
| "grad_norm": 1.6953125, | |
| "learning_rate": 9.99981630846772e-06, | |
| "loss": 0.8617, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.017474586238763336, | |
| "grad_norm": 1.8046875, | |
| "learning_rate": 9.999586697215748e-06, | |
| "loss": 0.9222, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.01881878518020667, | |
| "grad_norm": 1.6484375, | |
| "learning_rate": 9.999265247367909e-06, | |
| "loss": 0.884, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.020162984121650004, | |
| "grad_norm": 1.7578125, | |
| "learning_rate": 9.998851964828987e-06, | |
| "loss": 0.9086, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.02150718306309334, | |
| "grad_norm": 1.6015625, | |
| "learning_rate": 9.99834685719067e-06, | |
| "loss": 0.8723, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.022851382004536673, | |
| "grad_norm": 1.7109375, | |
| "learning_rate": 9.997749933731397e-06, | |
| "loss": 0.8678, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.024195580945980004, | |
| "grad_norm": 1.5625, | |
| "learning_rate": 9.997061205416203e-06, | |
| "loss": 0.8497, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.025539779887423338, | |
| "grad_norm": 1.515625, | |
| "learning_rate": 9.996280684896496e-06, | |
| "loss": 0.8475, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.026883978828866673, | |
| "grad_norm": 1.625, | |
| "learning_rate": 9.995408386509846e-06, | |
| "loss": 0.8673, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.028228177770310007, | |
| "grad_norm": 1.5625, | |
| "learning_rate": 9.99444432627971e-06, | |
| "loss": 0.8704, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.02957237671175334, | |
| "grad_norm": 1.53125, | |
| "learning_rate": 9.993388521915134e-06, | |
| "loss": 0.813, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.030916575653196672, | |
| "grad_norm": 1.453125, | |
| "learning_rate": 9.992240992810445e-06, | |
| "loss": 0.8497, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.03226077459464001, | |
| "grad_norm": 1.453125, | |
| "learning_rate": 9.991001760044877e-06, | |
| "loss": 0.8027, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.03360497353608334, | |
| "grad_norm": 1.375, | |
| "learning_rate": 9.989670846382189e-06, | |
| "loss": 0.8099, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.03494917247752667, | |
| "grad_norm": 1.328125, | |
| "learning_rate": 9.98824827627025e-06, | |
| "loss": 0.7818, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.03629337141897001, | |
| "grad_norm": 1.5, | |
| "learning_rate": 9.986734075840591e-06, | |
| "loss": 0.8078, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.03763757036041334, | |
| "grad_norm": 1.5625, | |
| "learning_rate": 9.985128272907917e-06, | |
| "loss": 0.819, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.03898176930185668, | |
| "grad_norm": 1.34375, | |
| "learning_rate": 9.983430896969606e-06, | |
| "loss": 0.7949, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.04032596824330001, | |
| "grad_norm": 1.46875, | |
| "learning_rate": 9.981641979205158e-06, | |
| "loss": 0.8044, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.04167016718474334, | |
| "grad_norm": 1.4921875, | |
| "learning_rate": 9.97976155247563e-06, | |
| "loss": 0.8025, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.04301436612618668, | |
| "grad_norm": 1.3828125, | |
| "learning_rate": 9.977789651323025e-06, | |
| "loss": 0.7461, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.04435856506763001, | |
| "grad_norm": 1.40625, | |
| "learning_rate": 9.975726311969664e-06, | |
| "loss": 0.787, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.045702764009073346, | |
| "grad_norm": 1.4609375, | |
| "learning_rate": 9.973571572317519e-06, | |
| "loss": 0.7837, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.04704696295051668, | |
| "grad_norm": 1.4453125, | |
| "learning_rate": 9.971325471947518e-06, | |
| "loss": 0.8101, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.04839116189196001, | |
| "grad_norm": 1.4921875, | |
| "learning_rate": 9.968988052118804e-06, | |
| "loss": 0.783, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.049735360833403346, | |
| "grad_norm": 1.4609375, | |
| "learning_rate": 9.966559355768005e-06, | |
| "loss": 0.8141, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.051079559774846676, | |
| "grad_norm": 1.359375, | |
| "learning_rate": 9.964039427508418e-06, | |
| "loss": 0.7753, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.052423758716290014, | |
| "grad_norm": 1.375, | |
| "learning_rate": 9.961428313629203e-06, | |
| "loss": 0.773, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.053767957657733345, | |
| "grad_norm": 1.3515625, | |
| "learning_rate": 9.958726062094533e-06, | |
| "loss": 0.7557, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.055112156599176676, | |
| "grad_norm": 1.3203125, | |
| "learning_rate": 9.955932722542709e-06, | |
| "loss": 0.7241, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.056456355540620014, | |
| "grad_norm": 1.4296875, | |
| "learning_rate": 9.953048346285245e-06, | |
| "loss": 0.781, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.057800554482063345, | |
| "grad_norm": 1.3984375, | |
| "learning_rate": 9.950072986305938e-06, | |
| "loss": 0.7644, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.05914475342350668, | |
| "grad_norm": 1.3515625, | |
| "learning_rate": 9.947006697259881e-06, | |
| "loss": 0.7406, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.06048895236495001, | |
| "grad_norm": 1.3828125, | |
| "learning_rate": 9.943849535472468e-06, | |
| "loss": 0.762, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.061833151306393344, | |
| "grad_norm": 1.3046875, | |
| "learning_rate": 9.940601558938348e-06, | |
| "loss": 0.7448, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.06317735024783667, | |
| "grad_norm": 1.390625, | |
| "learning_rate": 9.93726282732038e-06, | |
| "loss": 0.7658, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.06452154918928002, | |
| "grad_norm": 1.4765625, | |
| "learning_rate": 9.933833401948514e-06, | |
| "loss": 0.7876, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.06586574813072335, | |
| "grad_norm": 1.3984375, | |
| "learning_rate": 9.930313345818683e-06, | |
| "loss": 0.8166, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.06720994707216668, | |
| "grad_norm": 1.421875, | |
| "learning_rate": 9.92670272359163e-06, | |
| "loss": 0.7927, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.06855414601361001, | |
| "grad_norm": 1.359375, | |
| "learning_rate": 9.923001601591738e-06, | |
| "loss": 0.7906, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.06989834495505334, | |
| "grad_norm": 1.296875, | |
| "learning_rate": 9.919210047805791e-06, | |
| "loss": 0.7576, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.07124254389649669, | |
| "grad_norm": 1.4453125, | |
| "learning_rate": 9.915328131881745e-06, | |
| "loss": 0.7847, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.07258674283794002, | |
| "grad_norm": 1.4375, | |
| "learning_rate": 9.911355925127433e-06, | |
| "loss": 0.7728, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.07393094177938335, | |
| "grad_norm": 1.4375, | |
| "learning_rate": 9.907293500509268e-06, | |
| "loss": 0.7947, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.07527514072082668, | |
| "grad_norm": 1.3515625, | |
| "learning_rate": 9.903140932650891e-06, | |
| "loss": 0.7503, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.07661933966227001, | |
| "grad_norm": 1.3671875, | |
| "learning_rate": 9.898898297831808e-06, | |
| "loss": 0.7519, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.07796353860371336, | |
| "grad_norm": 1.4296875, | |
| "learning_rate": 9.894565673985986e-06, | |
| "loss": 0.7438, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.07930773754515669, | |
| "grad_norm": 1.4375, | |
| "learning_rate": 9.890143140700419e-06, | |
| "loss": 0.7635, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.08065193648660002, | |
| "grad_norm": 1.375, | |
| "learning_rate": 9.885630779213678e-06, | |
| "loss": 0.7176, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.08199613542804335, | |
| "grad_norm": 1.4296875, | |
| "learning_rate": 9.881028672414397e-06, | |
| "loss": 0.7536, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.08334033436948668, | |
| "grad_norm": 1.4375, | |
| "learning_rate": 9.876336904839772e-06, | |
| "loss": 0.7572, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.08468453331093002, | |
| "grad_norm": 1.4609375, | |
| "learning_rate": 9.871555562673996e-06, | |
| "loss": 0.7767, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.08602873225237335, | |
| "grad_norm": 1.4921875, | |
| "learning_rate": 9.86668473374668e-06, | |
| "loss": 0.763, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.08737293119381669, | |
| "grad_norm": 1.3984375, | |
| "learning_rate": 9.861724507531234e-06, | |
| "loss": 0.7576, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.08871713013526002, | |
| "grad_norm": 1.421875, | |
| "learning_rate": 9.856674975143237e-06, | |
| "loss": 0.7342, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.09006132907670335, | |
| "grad_norm": 1.4453125, | |
| "learning_rate": 9.851536229338747e-06, | |
| "loss": 0.7487, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.09140552801814669, | |
| "grad_norm": 1.390625, | |
| "learning_rate": 9.846308364512607e-06, | |
| "loss": 0.7604, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.09274972695959002, | |
| "grad_norm": 1.5078125, | |
| "learning_rate": 9.840991476696707e-06, | |
| "loss": 0.7537, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.09409392590103335, | |
| "grad_norm": 1.4296875, | |
| "learning_rate": 9.835585663558221e-06, | |
| "loss": 0.7608, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.09543812484247668, | |
| "grad_norm": 1.4140625, | |
| "learning_rate": 9.830091024397818e-06, | |
| "loss": 0.7298, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.09678232378392002, | |
| "grad_norm": 1.4140625, | |
| "learning_rate": 9.824507660147831e-06, | |
| "loss": 0.7646, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.09812652272536336, | |
| "grad_norm": 1.3984375, | |
| "learning_rate": 9.8188356733704e-06, | |
| "loss": 0.7679, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.09947072166680669, | |
| "grad_norm": 1.4765625, | |
| "learning_rate": 9.813075168255601e-06, | |
| "loss": 0.7263, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.10081492060825002, | |
| "grad_norm": 1.484375, | |
| "learning_rate": 9.807226250619522e-06, | |
| "loss": 0.7589, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.10215911954969335, | |
| "grad_norm": 1.515625, | |
| "learning_rate": 9.801289027902316e-06, | |
| "loss": 0.7216, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.10350331849113668, | |
| "grad_norm": 1.4765625, | |
| "learning_rate": 9.795263609166243e-06, | |
| "loss": 0.7779, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.10484751743258003, | |
| "grad_norm": 1.4609375, | |
| "learning_rate": 9.789150105093647e-06, | |
| "loss": 0.6941, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.10619171637402336, | |
| "grad_norm": 1.46875, | |
| "learning_rate": 9.78294862798494e-06, | |
| "loss": 0.7362, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.10753591531546669, | |
| "grad_norm": 1.4296875, | |
| "learning_rate": 9.776659291756528e-06, | |
| "loss": 0.7103, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.10888011425691002, | |
| "grad_norm": 1.359375, | |
| "learning_rate": 9.77028221193872e-06, | |
| "loss": 0.6866, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.11022431319835335, | |
| "grad_norm": 1.53125, | |
| "learning_rate": 9.763817505673614e-06, | |
| "loss": 0.7517, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.1115685121397967, | |
| "grad_norm": 1.453125, | |
| "learning_rate": 9.75726529171293e-06, | |
| "loss": 0.7508, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.11291271108124003, | |
| "grad_norm": 1.5, | |
| "learning_rate": 9.750625690415848e-06, | |
| "loss": 0.7275, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.11425691002268336, | |
| "grad_norm": 1.484375, | |
| "learning_rate": 9.74389882374678e-06, | |
| "loss": 0.6994, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.11560110896412669, | |
| "grad_norm": 1.4453125, | |
| "learning_rate": 9.737084815273137e-06, | |
| "loss": 0.7365, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.11694530790557002, | |
| "grad_norm": 1.5078125, | |
| "learning_rate": 9.730183790163061e-06, | |
| "loss": 0.7294, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.11828950684701336, | |
| "grad_norm": 1.375, | |
| "learning_rate": 9.72319587518312e-06, | |
| "loss": 0.7226, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.1196337057884567, | |
| "grad_norm": 1.390625, | |
| "learning_rate": 9.716121198695987e-06, | |
| "loss": 0.7126, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.12097790472990003, | |
| "grad_norm": 1.484375, | |
| "learning_rate": 9.708959890658074e-06, | |
| "loss": 0.7118, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.12232210367134336, | |
| "grad_norm": 1.3515625, | |
| "learning_rate": 9.70171208261715e-06, | |
| "loss": 0.7164, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.12366630261278669, | |
| "grad_norm": 1.4765625, | |
| "learning_rate": 9.69437790770992e-06, | |
| "loss": 0.7015, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.12501050155423002, | |
| "grad_norm": 1.3828125, | |
| "learning_rate": 9.68695750065959e-06, | |
| "loss": 0.7212, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.12635470049567335, | |
| "grad_norm": 1.4140625, | |
| "learning_rate": 9.679450997773378e-06, | |
| "loss": 0.7301, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.12769889943711668, | |
| "grad_norm": 1.53125, | |
| "learning_rate": 9.67185853694002e-06, | |
| "loss": 0.7434, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.12904309837856004, | |
| "grad_norm": 1.3828125, | |
| "learning_rate": 9.664180257627231e-06, | |
| "loss": 0.7503, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.13038729732000337, | |
| "grad_norm": 1.3984375, | |
| "learning_rate": 9.656416300879147e-06, | |
| "loss": 0.704, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.1317314962614467, | |
| "grad_norm": 1.3515625, | |
| "learning_rate": 9.648566809313738e-06, | |
| "loss": 0.7091, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.13307569520289003, | |
| "grad_norm": 1.25, | |
| "learning_rate": 9.640631927120177e-06, | |
| "loss": 0.6939, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.13441989414433336, | |
| "grad_norm": 1.25, | |
| "learning_rate": 9.632611800056202e-06, | |
| "loss": 0.6645, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1357640930857767, | |
| "grad_norm": 1.3671875, | |
| "learning_rate": 9.62450657544543e-06, | |
| "loss": 0.7314, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.13710829202722002, | |
| "grad_norm": 1.359375, | |
| "learning_rate": 9.616316402174657e-06, | |
| "loss": 0.7022, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.13845249096866336, | |
| "grad_norm": 1.390625, | |
| "learning_rate": 9.608041430691126e-06, | |
| "loss": 0.7015, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.1397966899101067, | |
| "grad_norm": 1.3046875, | |
| "learning_rate": 9.59968181299975e-06, | |
| "loss": 0.6831, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.14114088885155002, | |
| "grad_norm": 1.28125, | |
| "learning_rate": 9.591237702660335e-06, | |
| "loss": 0.6903, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.14248508779299338, | |
| "grad_norm": 1.2421875, | |
| "learning_rate": 9.58270925478475e-06, | |
| "loss": 0.7236, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.1438292867344367, | |
| "grad_norm": 1.3125, | |
| "learning_rate": 9.574096626034077e-06, | |
| "loss": 0.7375, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.14517348567588004, | |
| "grad_norm": 1.2421875, | |
| "learning_rate": 9.565399974615744e-06, | |
| "loss": 0.7051, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.14651768461732337, | |
| "grad_norm": 1.3515625, | |
| "learning_rate": 9.556619460280605e-06, | |
| "loss": 0.6961, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.1478618835587667, | |
| "grad_norm": 1.2890625, | |
| "learning_rate": 9.547755244320013e-06, | |
| "loss": 0.731, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.14920608250021003, | |
| "grad_norm": 1.3125, | |
| "learning_rate": 9.53880748956286e-06, | |
| "loss": 0.6992, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.15055028144165336, | |
| "grad_norm": 1.2578125, | |
| "learning_rate": 9.529776360372576e-06, | |
| "loss": 0.6954, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.1518944803830967, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 9.52066202264412e-06, | |
| "loss": 0.6932, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.15323867932454002, | |
| "grad_norm": 1.1875, | |
| "learning_rate": 9.511464643800926e-06, | |
| "loss": 0.7117, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.15458287826598335, | |
| "grad_norm": 1.1875, | |
| "learning_rate": 9.502184392791834e-06, | |
| "loss": 0.6992, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.1559270772074267, | |
| "grad_norm": 1.09375, | |
| "learning_rate": 9.492821440087978e-06, | |
| "loss": 0.6744, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.15727127614887004, | |
| "grad_norm": 1.1796875, | |
| "learning_rate": 9.48337595767966e-06, | |
| "loss": 0.6553, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.15861547509031337, | |
| "grad_norm": 1.1484375, | |
| "learning_rate": 9.473848119073188e-06, | |
| "loss": 0.7206, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.1599596740317567, | |
| "grad_norm": 1.125, | |
| "learning_rate": 9.4642380992877e-06, | |
| "loss": 0.7014, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.16130387297320004, | |
| "grad_norm": 1.1171875, | |
| "learning_rate": 9.454546074851927e-06, | |
| "loss": 0.7249, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.16264807191464337, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 9.444772223800972e-06, | |
| "loss": 0.7142, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.1639922708560867, | |
| "grad_norm": 1.0390625, | |
| "learning_rate": 9.434916725673023e-06, | |
| "loss": 0.6778, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.16533646979753003, | |
| "grad_norm": 1.0390625, | |
| "learning_rate": 9.42497976150607e-06, | |
| "loss": 0.6831, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.16668066873897336, | |
| "grad_norm": 1.0546875, | |
| "learning_rate": 9.414961513834569e-06, | |
| "loss": 0.6744, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.1680248676804167, | |
| "grad_norm": 1.1015625, | |
| "learning_rate": 9.404862166686089e-06, | |
| "loss": 0.7059, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.16936906662186005, | |
| "grad_norm": 1.0390625, | |
| "learning_rate": 9.394681905577938e-06, | |
| "loss": 0.7245, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.17071326556330338, | |
| "grad_norm": 1.0390625, | |
| "learning_rate": 9.384420917513752e-06, | |
| "loss": 0.7064, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.1720574645047467, | |
| "grad_norm": 1.0, | |
| "learning_rate": 9.374079390980058e-06, | |
| "loss": 0.7177, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.17340166344619004, | |
| "grad_norm": 1.0078125, | |
| "learning_rate": 9.363657515942814e-06, | |
| "loss": 0.7031, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.17474586238763337, | |
| "grad_norm": 0.96875, | |
| "learning_rate": 9.35315548384392e-06, | |
| "loss": 0.6752, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.1760900613290767, | |
| "grad_norm": 1.0, | |
| "learning_rate": 9.342573487597696e-06, | |
| "loss": 0.7189, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.17743426027052003, | |
| "grad_norm": 1.0234375, | |
| "learning_rate": 9.331911721587345e-06, | |
| "loss": 0.6873, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.17877845921196336, | |
| "grad_norm": 1.0, | |
| "learning_rate": 9.321170381661383e-06, | |
| "loss": 0.7056, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.1801226581534067, | |
| "grad_norm": 0.984375, | |
| "learning_rate": 9.310349665130035e-06, | |
| "loss": 0.7024, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.18146685709485003, | |
| "grad_norm": 1.015625, | |
| "learning_rate": 9.299449770761612e-06, | |
| "loss": 0.708, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.18281105603629338, | |
| "grad_norm": 0.8984375, | |
| "learning_rate": 9.288470898778863e-06, | |
| "loss": 0.6624, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.18415525497773672, | |
| "grad_norm": 0.984375, | |
| "learning_rate": 9.277413250855296e-06, | |
| "loss": 0.6622, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.18549945391918005, | |
| "grad_norm": 0.9609375, | |
| "learning_rate": 9.266277030111474e-06, | |
| "loss": 0.7074, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.18684365286062338, | |
| "grad_norm": 0.97265625, | |
| "learning_rate": 9.25506244111128e-06, | |
| "loss": 0.6745, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.1881878518020667, | |
| "grad_norm": 0.93359375, | |
| "learning_rate": 9.243769689858167e-06, | |
| "loss": 0.6627, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.18953205074351004, | |
| "grad_norm": 0.9609375, | |
| "learning_rate": 9.232398983791363e-06, | |
| "loss": 0.6563, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.19087624968495337, | |
| "grad_norm": 1.0234375, | |
| "learning_rate": 9.220950531782069e-06, | |
| "loss": 0.688, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.1922204486263967, | |
| "grad_norm": 1.046875, | |
| "learning_rate": 9.209424544129621e-06, | |
| "loss": 0.7021, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.19356464756784003, | |
| "grad_norm": 0.87890625, | |
| "learning_rate": 9.197821232557625e-06, | |
| "loss": 0.6826, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.19490884650928336, | |
| "grad_norm": 0.9140625, | |
| "learning_rate": 9.186140810210066e-06, | |
| "loss": 0.6904, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.19625304545072672, | |
| "grad_norm": 1.0, | |
| "learning_rate": 9.1743834916474e-06, | |
| "loss": 0.6747, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.19759724439217005, | |
| "grad_norm": 0.9609375, | |
| "learning_rate": 9.162549492842603e-06, | |
| "loss": 0.7073, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.19894144333361338, | |
| "grad_norm": 0.9765625, | |
| "learning_rate": 9.150639031177211e-06, | |
| "loss": 0.6773, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.2002856422750567, | |
| "grad_norm": 0.94921875, | |
| "learning_rate": 9.138652325437326e-06, | |
| "loss": 0.689, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.20162984121650004, | |
| "grad_norm": 1.015625, | |
| "learning_rate": 9.12658959580959e-06, | |
| "loss": 0.6874, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.20297404015794338, | |
| "grad_norm": 0.90234375, | |
| "learning_rate": 9.114451063877152e-06, | |
| "loss": 0.6617, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.2043182390993867, | |
| "grad_norm": 1.0, | |
| "learning_rate": 9.102236952615588e-06, | |
| "loss": 0.6882, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.20566243804083004, | |
| "grad_norm": 1.03125, | |
| "learning_rate": 9.08994748638881e-06, | |
| "loss": 0.6919, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.20700663698227337, | |
| "grad_norm": 0.97265625, | |
| "learning_rate": 9.077582890944945e-06, | |
| "loss": 0.6713, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.2083508359237167, | |
| "grad_norm": 0.921875, | |
| "learning_rate": 9.065143393412179e-06, | |
| "loss": 0.6504, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.20969503486516006, | |
| "grad_norm": 0.98046875, | |
| "learning_rate": 9.052629222294605e-06, | |
| "loss": 0.6674, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.2110392338066034, | |
| "grad_norm": 0.9375, | |
| "learning_rate": 9.040040607467999e-06, | |
| "loss": 0.7111, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.21238343274804672, | |
| "grad_norm": 1.03125, | |
| "learning_rate": 9.02737778017562e-06, | |
| "loss": 0.6596, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.21372763168949005, | |
| "grad_norm": 0.921875, | |
| "learning_rate": 9.014640973023951e-06, | |
| "loss": 0.6846, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.21507183063093338, | |
| "grad_norm": 0.94921875, | |
| "learning_rate": 9.00183041997843e-06, | |
| "loss": 0.6788, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.2164160295723767, | |
| "grad_norm": 0.890625, | |
| "learning_rate": 8.988946356359147e-06, | |
| "loss": 0.6888, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.21776022851382004, | |
| "grad_norm": 0.92578125, | |
| "learning_rate": 8.97598901883653e-06, | |
| "loss": 0.6854, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.21910442745526337, | |
| "grad_norm": 0.94140625, | |
| "learning_rate": 8.962958645426989e-06, | |
| "loss": 0.7137, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.2204486263967067, | |
| "grad_norm": 0.8984375, | |
| "learning_rate": 8.949855475488549e-06, | |
| "loss": 0.7041, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.22179282533815003, | |
| "grad_norm": 0.98046875, | |
| "learning_rate": 8.936679749716452e-06, | |
| "loss": 0.6573, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.2231370242795934, | |
| "grad_norm": 0.8984375, | |
| "learning_rate": 8.923431710138735e-06, | |
| "loss": 0.6694, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.22448122322103672, | |
| "grad_norm": 0.94140625, | |
| "learning_rate": 8.910111600111786e-06, | |
| "loss": 0.6904, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.22582542216248006, | |
| "grad_norm": 0.953125, | |
| "learning_rate": 8.896719664315866e-06, | |
| "loss": 0.679, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.22716962110392339, | |
| "grad_norm": 0.9140625, | |
| "learning_rate": 8.883256148750634e-06, | |
| "loss": 0.6631, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.22851382004536672, | |
| "grad_norm": 0.9765625, | |
| "learning_rate": 8.869721300730596e-06, | |
| "loss": 0.6576, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.22985801898681005, | |
| "grad_norm": 0.9296875, | |
| "learning_rate": 8.856115368880598e-06, | |
| "loss": 0.6714, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.23120221792825338, | |
| "grad_norm": 0.97265625, | |
| "learning_rate": 8.842438603131232e-06, | |
| "loss": 0.6572, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.2325464168696967, | |
| "grad_norm": 0.91796875, | |
| "learning_rate": 8.828691254714259e-06, | |
| "loss": 0.6798, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.23389061581114004, | |
| "grad_norm": 0.90234375, | |
| "learning_rate": 8.814873576157988e-06, | |
| "loss": 0.6934, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.23523481475258337, | |
| "grad_norm": 1.0390625, | |
| "learning_rate": 8.800985821282637e-06, | |
| "loss": 0.6648, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.23657901369402673, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 8.787028245195676e-06, | |
| "loss": 0.6705, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.23792321263547006, | |
| "grad_norm": 0.8984375, | |
| "learning_rate": 8.773001104287137e-06, | |
| "loss": 0.6661, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.2392674115769134, | |
| "grad_norm": 0.94140625, | |
| "learning_rate": 8.758904656224904e-06, | |
| "loss": 0.6968, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.24061161051835672, | |
| "grad_norm": 0.94140625, | |
| "learning_rate": 8.744739159949981e-06, | |
| "loss": 0.6817, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.24195580945980005, | |
| "grad_norm": 0.9296875, | |
| "learning_rate": 8.730504875671732e-06, | |
| "loss": 0.6704, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.24330000840124338, | |
| "grad_norm": 0.90625, | |
| "learning_rate": 8.71620206486311e-06, | |
| "loss": 0.6484, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.24464420734268671, | |
| "grad_norm": 0.8515625, | |
| "learning_rate": 8.701830990255843e-06, | |
| "loss": 0.6711, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.24598840628413005, | |
| "grad_norm": 1.0234375, | |
| "learning_rate": 8.687391915835617e-06, | |
| "loss": 0.6526, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.24733260522557338, | |
| "grad_norm": 0.87890625, | |
| "learning_rate": 8.672885106837216e-06, | |
| "loss": 0.6907, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.2486768041670167, | |
| "grad_norm": 0.9609375, | |
| "learning_rate": 8.658310829739666e-06, | |
| "loss": 0.7003, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.25002100310846004, | |
| "grad_norm": 0.85546875, | |
| "learning_rate": 8.643669352261321e-06, | |
| "loss": 0.6353, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.25136520204990337, | |
| "grad_norm": 0.921875, | |
| "learning_rate": 8.628960943354965e-06, | |
| "loss": 0.6566, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.2527094009913467, | |
| "grad_norm": 0.92578125, | |
| "learning_rate": 8.614185873202852e-06, | |
| "loss": 0.6676, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.25405359993279003, | |
| "grad_norm": 0.953125, | |
| "learning_rate": 8.599344413211755e-06, | |
| "loss": 0.6647, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.25539779887423336, | |
| "grad_norm": 0.9140625, | |
| "learning_rate": 8.58443683600798e-06, | |
| "loss": 0.6771, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.2567419978156767, | |
| "grad_norm": 0.90234375, | |
| "learning_rate": 8.569463415432356e-06, | |
| "loss": 0.6629, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.2580861967571201, | |
| "grad_norm": 0.91015625, | |
| "learning_rate": 8.554424426535202e-06, | |
| "loss": 0.7327, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.2594303956985634, | |
| "grad_norm": 0.94921875, | |
| "learning_rate": 8.539320145571277e-06, | |
| "loss": 0.6809, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.26077459464000674, | |
| "grad_norm": 0.875, | |
| "learning_rate": 8.524150849994708e-06, | |
| "loss": 0.6895, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.26211879358145007, | |
| "grad_norm": 0.9140625, | |
| "learning_rate": 8.50891681845389e-06, | |
| "loss": 0.6739, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.2634629925228934, | |
| "grad_norm": 0.96875, | |
| "learning_rate": 8.493618330786365e-06, | |
| "loss": 0.7063, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.26480719146433673, | |
| "grad_norm": 0.85546875, | |
| "learning_rate": 8.47825566801369e-06, | |
| "loss": 0.6676, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.26615139040578006, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 8.462829112336266e-06, | |
| "loss": 0.6842, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.2674955893472234, | |
| "grad_norm": 0.90625, | |
| "learning_rate": 8.44733894712816e-06, | |
| "loss": 0.7045, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.2688397882886667, | |
| "grad_norm": 1.015625, | |
| "learning_rate": 8.431785456931898e-06, | |
| "loss": 0.6569, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.27018398723011006, | |
| "grad_norm": 0.91015625, | |
| "learning_rate": 8.416168927453237e-06, | |
| "loss": 0.679, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.2715281861715534, | |
| "grad_norm": 0.8671875, | |
| "learning_rate": 8.400489645555914e-06, | |
| "loss": 0.6452, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.2728723851129967, | |
| "grad_norm": 0.9296875, | |
| "learning_rate": 8.384747899256386e-06, | |
| "loss": 0.6744, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.27421658405444005, | |
| "grad_norm": 0.91015625, | |
| "learning_rate": 8.368943977718528e-06, | |
| "loss": 0.6501, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.2755607829958834, | |
| "grad_norm": 0.890625, | |
| "learning_rate": 8.353078171248335e-06, | |
| "loss": 0.666, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.2769049819373267, | |
| "grad_norm": 0.87109375, | |
| "learning_rate": 8.337150771288571e-06, | |
| "loss": 0.6663, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.27824918087877004, | |
| "grad_norm": 0.9296875, | |
| "learning_rate": 8.32116207041343e-06, | |
| "loss": 0.6481, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.2795933798202134, | |
| "grad_norm": 0.921875, | |
| "learning_rate": 8.30511236232316e-06, | |
| "loss": 0.6884, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.2809375787616567, | |
| "grad_norm": 0.890625, | |
| "learning_rate": 8.289001941838659e-06, | |
| "loss": 0.6562, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.28228177770310003, | |
| "grad_norm": 0.8984375, | |
| "learning_rate": 8.27283110489607e-06, | |
| "loss": 0.7106, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.28362597664454336, | |
| "grad_norm": 0.98046875, | |
| "learning_rate": 8.256600148541339e-06, | |
| "loss": 0.689, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.28497017558598675, | |
| "grad_norm": 0.96484375, | |
| "learning_rate": 8.240309370924758e-06, | |
| "loss": 0.6683, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.2863143745274301, | |
| "grad_norm": 0.921875, | |
| "learning_rate": 8.223959071295492e-06, | |
| "loss": 0.6866, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.2876585734688734, | |
| "grad_norm": 0.94140625, | |
| "learning_rate": 8.207549549996083e-06, | |
| "loss": 0.6688, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.28900277241031674, | |
| "grad_norm": 0.91796875, | |
| "learning_rate": 8.191081108456922e-06, | |
| "loss": 0.6721, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.2903469713517601, | |
| "grad_norm": 0.89453125, | |
| "learning_rate": 8.174554049190726e-06, | |
| "loss": 0.6579, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.2916911702932034, | |
| "grad_norm": 0.9609375, | |
| "learning_rate": 8.157968675786971e-06, | |
| "loss": 0.6841, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.29303536923464674, | |
| "grad_norm": 0.93359375, | |
| "learning_rate": 8.141325292906325e-06, | |
| "loss": 0.6739, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.29437956817609007, | |
| "grad_norm": 0.875, | |
| "learning_rate": 8.124624206275041e-06, | |
| "loss": 0.6544, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.2957237671175334, | |
| "grad_norm": 0.97265625, | |
| "learning_rate": 8.107865722679347e-06, | |
| "loss": 0.6525, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.29706796605897673, | |
| "grad_norm": 0.890625, | |
| "learning_rate": 8.091050149959808e-06, | |
| "loss": 0.6394, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.29841216500042006, | |
| "grad_norm": 0.87890625, | |
| "learning_rate": 8.074177797005677e-06, | |
| "loss": 0.6674, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.2997563639418634, | |
| "grad_norm": 0.984375, | |
| "learning_rate": 8.057248973749216e-06, | |
| "loss": 0.6749, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.3011005628833067, | |
| "grad_norm": 1.0234375, | |
| "learning_rate": 8.040263991159996e-06, | |
| "loss": 0.7223, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.30244476182475005, | |
| "grad_norm": 0.859375, | |
| "learning_rate": 8.0232231612392e-06, | |
| "loss": 0.6861, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.3037889607661934, | |
| "grad_norm": 0.9140625, | |
| "learning_rate": 8.006126797013884e-06, | |
| "loss": 0.6883, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.3051331597076367, | |
| "grad_norm": 0.94140625, | |
| "learning_rate": 7.98897521253122e-06, | |
| "loss": 0.655, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.30647735864908004, | |
| "grad_norm": 0.8984375, | |
| "learning_rate": 7.971768722852741e-06, | |
| "loss": 0.6696, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.3078215575905234, | |
| "grad_norm": 0.875, | |
| "learning_rate": 7.954507644048544e-06, | |
| "loss": 0.6676, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.3091657565319667, | |
| "grad_norm": 0.8828125, | |
| "learning_rate": 7.937192293191485e-06, | |
| "loss": 0.6508, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.31050995547341004, | |
| "grad_norm": 0.9375, | |
| "learning_rate": 7.919822988351359e-06, | |
| "loss": 0.6676, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.3118541544148534, | |
| "grad_norm": 0.859375, | |
| "learning_rate": 7.902400048589051e-06, | |
| "loss": 0.6512, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.31319835335629675, | |
| "grad_norm": 0.94140625, | |
| "learning_rate": 7.884923793950684e-06, | |
| "loss": 0.6846, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.3145425522977401, | |
| "grad_norm": 0.8515625, | |
| "learning_rate": 7.86739454546173e-06, | |
| "loss": 0.6644, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.3158867512391834, | |
| "grad_norm": 0.8828125, | |
| "learning_rate": 7.849812625121122e-06, | |
| "loss": 0.6701, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.31723095018062675, | |
| "grad_norm": 0.94921875, | |
| "learning_rate": 7.832178355895327e-06, | |
| "loss": 0.6749, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.3185751491220701, | |
| "grad_norm": 0.87890625, | |
| "learning_rate": 7.81449206171243e-06, | |
| "loss": 0.6584, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.3199193480635134, | |
| "grad_norm": 0.890625, | |
| "learning_rate": 7.796754067456168e-06, | |
| "loss": 0.6685, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.32126354700495674, | |
| "grad_norm": 0.8671875, | |
| "learning_rate": 7.778964698959973e-06, | |
| "loss": 0.6659, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.32260774594640007, | |
| "grad_norm": 0.8671875, | |
| "learning_rate": 7.761124283000982e-06, | |
| "loss": 0.6647, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.3239519448878434, | |
| "grad_norm": 0.9140625, | |
| "learning_rate": 7.743233147294036e-06, | |
| "loss": 0.6793, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.32529614382928673, | |
| "grad_norm": 0.87109375, | |
| "learning_rate": 7.725291620485653e-06, | |
| "loss": 0.6633, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.32664034277073006, | |
| "grad_norm": 0.90625, | |
| "learning_rate": 7.707300032148004e-06, | |
| "loss": 0.6812, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.3279845417121734, | |
| "grad_norm": 0.88671875, | |
| "learning_rate": 7.689258712772851e-06, | |
| "loss": 0.6734, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.3293287406536167, | |
| "grad_norm": 0.859375, | |
| "learning_rate": 7.671167993765474e-06, | |
| "loss": 0.6775, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.33067293959506006, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 7.653028207438588e-06, | |
| "loss": 0.6723, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.3320171385365034, | |
| "grad_norm": 0.90625, | |
| "learning_rate": 7.634839687006242e-06, | |
| "loss": 0.6814, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.3333613374779467, | |
| "grad_norm": 0.984375, | |
| "learning_rate": 7.616602766577683e-06, | |
| "loss": 0.6816, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.3333613374779467, | |
| "eval_loss": 2.7341208457946777, | |
| "eval_runtime": 5734.9058, | |
| "eval_samples_per_second": 3.861, | |
| "eval_steps_per_second": 0.483, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.33470553641939005, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 7.59831778115124e-06, | |
| "loss": 0.6496, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.3360497353608334, | |
| "grad_norm": 0.8515625, | |
| "learning_rate": 7.579985066608153e-06, | |
| "loss": 0.6608, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.3373939343022767, | |
| "grad_norm": 0.859375, | |
| "learning_rate": 7.56160495970641e-06, | |
| "loss": 0.6673, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.3387381332437201, | |
| "grad_norm": 0.8984375, | |
| "learning_rate": 7.543177798074564e-06, | |
| "loss": 0.6474, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.3400823321851634, | |
| "grad_norm": 0.8671875, | |
| "learning_rate": 7.524703920205521e-06, | |
| "loss": 0.6679, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.34142653112660676, | |
| "grad_norm": 0.87890625, | |
| "learning_rate": 7.506183665450336e-06, | |
| "loss": 0.6638, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.3427707300680501, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 7.487617374011968e-06, | |
| "loss": 0.6451, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.3441149290094934, | |
| "grad_norm": 0.8828125, | |
| "learning_rate": 7.469005386939036e-06, | |
| "loss": 0.656, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.34545912795093675, | |
| "grad_norm": 0.92578125, | |
| "learning_rate": 7.450348046119551e-06, | |
| "loss": 0.6103, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.3468033268923801, | |
| "grad_norm": 0.87890625, | |
| "learning_rate": 7.43164569427464e-06, | |
| "loss": 0.6534, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.3481475258338234, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 7.4128986749522495e-06, | |
| "loss": 0.647, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.34949172477526674, | |
| "grad_norm": 0.8828125, | |
| "learning_rate": 7.394107332520828e-06, | |
| "loss": 0.6478, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.3508359237167101, | |
| "grad_norm": 0.84765625, | |
| "learning_rate": 7.375272012163011e-06, | |
| "loss": 0.657, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.3521801226581534, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 7.3563930598692725e-06, | |
| "loss": 0.6087, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.35352432159959674, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 7.3374708224315725e-06, | |
| "loss": 0.6572, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.35486852054104007, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 7.318505647436986e-06, | |
| "loss": 0.636, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.3562127194824834, | |
| "grad_norm": 0.875, | |
| "learning_rate": 7.299497883261319e-06, | |
| "loss": 0.6426, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.35755691842392673, | |
| "grad_norm": 0.83984375, | |
| "learning_rate": 7.280447879062711e-06, | |
| "loss": 0.6429, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.35890111736537006, | |
| "grad_norm": 0.91796875, | |
| "learning_rate": 7.261355984775208e-06, | |
| "loss": 0.6658, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.3602453163068134, | |
| "grad_norm": 0.859375, | |
| "learning_rate": 7.2422225511023555e-06, | |
| "loss": 0.6765, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.3615895152482567, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 7.223047929510744e-06, | |
| "loss": 0.6313, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.36293371418970005, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 7.203832472223551e-06, | |
| "loss": 0.6479, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.3642779131311434, | |
| "grad_norm": 0.83984375, | |
| "learning_rate": 7.184576532214077e-06, | |
| "loss": 0.6491, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.36562211207258677, | |
| "grad_norm": 0.97265625, | |
| "learning_rate": 7.16528046319926e-06, | |
| "loss": 0.7071, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.3669663110140301, | |
| "grad_norm": 0.91015625, | |
| "learning_rate": 7.145944619633176e-06, | |
| "loss": 0.6588, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.36831050995547343, | |
| "grad_norm": 0.86328125, | |
| "learning_rate": 7.126569356700529e-06, | |
| "loss": 0.6662, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.36965470889691676, | |
| "grad_norm": 0.87109375, | |
| "learning_rate": 7.107155030310127e-06, | |
| "loss": 0.6488, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.3709989078383601, | |
| "grad_norm": 0.89453125, | |
| "learning_rate": 7.087701997088344e-06, | |
| "loss": 0.677, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.3723431067798034, | |
| "grad_norm": 0.875, | |
| "learning_rate": 7.068210614372568e-06, | |
| "loss": 0.6609, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.37368730572124675, | |
| "grad_norm": 0.83203125, | |
| "learning_rate": 7.048681240204641e-06, | |
| "loss": 0.6594, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.3750315046626901, | |
| "grad_norm": 0.875, | |
| "learning_rate": 7.029114233324277e-06, | |
| "loss": 0.6553, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.3763757036041334, | |
| "grad_norm": 0.921875, | |
| "learning_rate": 7.0095099531624715e-06, | |
| "loss": 0.6669, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.37771990254557675, | |
| "grad_norm": 0.890625, | |
| "learning_rate": 6.989868759834908e-06, | |
| "loss": 0.6445, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.3790641014870201, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 6.970191014135331e-06, | |
| "loss": 0.6635, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.3804083004284634, | |
| "grad_norm": 0.86328125, | |
| "learning_rate": 6.950477077528927e-06, | |
| "loss": 0.7111, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.38175249936990674, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 6.93072731214568e-06, | |
| "loss": 0.6149, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.38309669831135007, | |
| "grad_norm": 0.88671875, | |
| "learning_rate": 6.910942080773725e-06, | |
| "loss": 0.6709, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.3844408972527934, | |
| "grad_norm": 0.91015625, | |
| "learning_rate": 6.891121746852675e-06, | |
| "loss": 0.6614, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.38578509619423673, | |
| "grad_norm": 0.8515625, | |
| "learning_rate": 6.8712666744669555e-06, | |
| "loss": 0.6694, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.38712929513568006, | |
| "grad_norm": 0.83203125, | |
| "learning_rate": 6.851377228339106e-06, | |
| "loss": 0.6441, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.3884734940771234, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 6.831453773823091e-06, | |
| "loss": 0.6131, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.3898176930185667, | |
| "grad_norm": 0.85546875, | |
| "learning_rate": 6.811496676897577e-06, | |
| "loss": 0.6441, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.39116189196001006, | |
| "grad_norm": 0.859375, | |
| "learning_rate": 6.791506304159221e-06, | |
| "loss": 0.6395, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.39250609090145344, | |
| "grad_norm": 0.88671875, | |
| "learning_rate": 6.771483022815926e-06, | |
| "loss": 0.6647, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.3938502898428968, | |
| "grad_norm": 0.86328125, | |
| "learning_rate": 6.751427200680109e-06, | |
| "loss": 0.6705, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.3951944887843401, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 6.731339206161928e-06, | |
| "loss": 0.6415, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.39653868772578343, | |
| "grad_norm": 0.90625, | |
| "learning_rate": 6.711219408262527e-06, | |
| "loss": 0.6557, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.39788288666722677, | |
| "grad_norm": 0.83203125, | |
| "learning_rate": 6.691068176567257e-06, | |
| "loss": 0.6598, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.3992270856086701, | |
| "grad_norm": 0.87890625, | |
| "learning_rate": 6.6708858812388776e-06, | |
| "loss": 0.6459, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.4005712845501134, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 6.650672893010769e-06, | |
| "loss": 0.6435, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.40191548349155676, | |
| "grad_norm": 0.85546875, | |
| "learning_rate": 6.630429583180113e-06, | |
| "loss": 0.6317, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.4032596824330001, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 6.610156323601076e-06, | |
| "loss": 0.6647, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.4046038813744434, | |
| "grad_norm": 0.91796875, | |
| "learning_rate": 6.5898534866779815e-06, | |
| "loss": 0.6754, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.40594808031588675, | |
| "grad_norm": 0.84765625, | |
| "learning_rate": 6.5695214453584645e-06, | |
| "loss": 0.6588, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.4072922792573301, | |
| "grad_norm": 0.86328125, | |
| "learning_rate": 6.549160573126623e-06, | |
| "loss": 0.6554, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.4086364781987734, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 6.528771243996157e-06, | |
| "loss": 0.6297, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.40998067714021674, | |
| "grad_norm": 0.953125, | |
| "learning_rate": 6.508353832503495e-06, | |
| "loss": 0.6734, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.4113248760816601, | |
| "grad_norm": 0.85546875, | |
| "learning_rate": 6.48790871370092e-06, | |
| "loss": 0.6298, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.4126690750231034, | |
| "grad_norm": 0.8828125, | |
| "learning_rate": 6.467436263149679e-06, | |
| "loss": 0.6294, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.41401327396454674, | |
| "grad_norm": 0.8515625, | |
| "learning_rate": 6.4469368569130786e-06, | |
| "loss": 0.6473, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.41535747290599007, | |
| "grad_norm": 0.8515625, | |
| "learning_rate": 6.426410871549582e-06, | |
| "loss": 0.651, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.4167016718474334, | |
| "grad_norm": 0.87890625, | |
| "learning_rate": 6.405858684105892e-06, | |
| "loss": 0.6939, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.41804587078887673, | |
| "grad_norm": 0.85546875, | |
| "learning_rate": 6.3852806721100235e-06, | |
| "loss": 0.6567, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.4193900697303201, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 6.364677213564365e-06, | |
| "loss": 0.6303, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.42073426867176345, | |
| "grad_norm": 0.859375, | |
| "learning_rate": 6.344048686938745e-06, | |
| "loss": 0.6575, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.4220784676132068, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 6.323395471163467e-06, | |
| "loss": 0.6529, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.4234226665546501, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 6.30271794562236e-06, | |
| "loss": 0.6593, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.42476686549609344, | |
| "grad_norm": 0.8515625, | |
| "learning_rate": 6.282016490145804e-06, | |
| "loss": 0.647, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.42611106443753677, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 6.261291485003751e-06, | |
| "loss": 0.6456, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.4274552633789801, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 6.2405433108987456e-06, | |
| "loss": 0.6761, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.42879946232042343, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 6.219772348958928e-06, | |
| "loss": 0.6518, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.43014366126186676, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 6.198978980731034e-06, | |
| "loss": 0.6249, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.4314878602033101, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 6.17816358817338e-06, | |
| "loss": 0.6582, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.4328320591447534, | |
| "grad_norm": 0.8671875, | |
| "learning_rate": 6.157326553648862e-06, | |
| "loss": 0.6616, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.43417625808619675, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 6.136468259917917e-06, | |
| "loss": 0.6463, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.4355204570276401, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 6.115589090131496e-06, | |
| "loss": 0.6469, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.4368646559690834, | |
| "grad_norm": 0.8671875, | |
| "learning_rate": 6.094689427824031e-06, | |
| "loss": 0.6575, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.43820885491052675, | |
| "grad_norm": 0.86328125, | |
| "learning_rate": 6.073769656906386e-06, | |
| "loss": 0.6369, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.4395530538519701, | |
| "grad_norm": 0.8828125, | |
| "learning_rate": 6.052830161658799e-06, | |
| "loss": 0.6787, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.4408972527934134, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 6.031871326723837e-06, | |
| "loss": 0.6556, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.44224145173485674, | |
| "grad_norm": 0.86328125, | |
| "learning_rate": 6.010893537099316e-06, | |
| "loss": 0.6669, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.44358565067630007, | |
| "grad_norm": 0.83984375, | |
| "learning_rate": 5.989897178131239e-06, | |
| "loss": 0.6495, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.4449298496177434, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 5.968882635506711e-06, | |
| "loss": 0.6547, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.4462740485591868, | |
| "grad_norm": 0.86328125, | |
| "learning_rate": 5.947850295246859e-06, | |
| "loss": 0.6327, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.4476182475006301, | |
| "grad_norm": 0.83203125, | |
| "learning_rate": 5.92680054369974e-06, | |
| "loss": 0.6238, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.44896244644207345, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 5.905733767533238e-06, | |
| "loss": 0.6675, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.4503066453835168, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 5.884650353727972e-06, | |
| "loss": 0.676, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.4516508443249601, | |
| "grad_norm": 0.859375, | |
| "learning_rate": 5.863550689570179e-06, | |
| "loss": 0.6651, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.45299504326640344, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 5.842435162644602e-06, | |
| "loss": 0.6574, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.45433924220784677, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 5.821304160827371e-06, | |
| "loss": 0.6397, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.4556834411492901, | |
| "grad_norm": 0.8671875, | |
| "learning_rate": 5.80015807227888e-06, | |
| "loss": 0.6435, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.45702764009073343, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 5.778997285436654e-06, | |
| "loss": 0.6343, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.45837183903217676, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 5.757822189008215e-06, | |
| "loss": 0.6599, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.4597160379736201, | |
| "grad_norm": 0.86328125, | |
| "learning_rate": 5.736633171963937e-06, | |
| "loss": 0.6413, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.4610602369150634, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 5.71543062352991e-06, | |
| "loss": 0.6395, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.46240443585650676, | |
| "grad_norm": 0.77734375, | |
| "learning_rate": 5.694214933180784e-06, | |
| "loss": 0.6258, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.4637486347979501, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 5.672986490632614e-06, | |
| "loss": 0.6446, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.4650928337393934, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 5.6517456858357065e-06, | |
| "loss": 0.6337, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.46643703268083675, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 5.630492908967451e-06, | |
| "loss": 0.6534, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.4677812316222801, | |
| "grad_norm": 0.83984375, | |
| "learning_rate": 5.609228550425154e-06, | |
| "loss": 0.6646, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.4691254305637234, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 5.587953000818872e-06, | |
| "loss": 0.6372, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.47046962950516674, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 5.566666650964229e-06, | |
| "loss": 0.6562, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.4718138284466101, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 5.5453698918752404e-06, | |
| "loss": 0.6673, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.47315802738805346, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 5.524063114757139e-06, | |
| "loss": 0.6604, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.4745022263294968, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 5.50274671099917e-06, | |
| "loss": 0.6768, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.4758464252709401, | |
| "grad_norm": 0.85546875, | |
| "learning_rate": 5.481421072167424e-06, | |
| "loss": 0.6106, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.47719062421238345, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 5.460086589997623e-06, | |
| "loss": 0.6522, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.4785348231538268, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 5.43874365638794e-06, | |
| "loss": 0.6482, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.4798790220952701, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 5.417392663391796e-06, | |
| "loss": 0.6525, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.48122322103671344, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 5.3960340032106515e-06, | |
| "loss": 0.6455, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.4825674199781568, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 5.374668068186809e-06, | |
| "loss": 0.6325, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.4839116189196001, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 5.353295250796206e-06, | |
| "loss": 0.6651, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.48525581786104344, | |
| "grad_norm": 0.87109375, | |
| "learning_rate": 5.331915943641205e-06, | |
| "loss": 0.6724, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.48660001680248677, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 5.3105305394433745e-06, | |
| "loss": 0.6722, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.4879442157439301, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 5.28913943103629e-06, | |
| "loss": 0.6086, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.48928841468537343, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 5.267743011358301e-06, | |
| "loss": 0.636, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.49063261362681676, | |
| "grad_norm": 0.83984375, | |
| "learning_rate": 5.246341673445323e-06, | |
| "loss": 0.6824, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.4919768125682601, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 5.2249358104236205e-06, | |
| "loss": 0.6846, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.4933210115097034, | |
| "grad_norm": 0.86328125, | |
| "learning_rate": 5.203525815502574e-06, | |
| "loss": 0.6837, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.49466521045114675, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 5.182112081967467e-06, | |
| "loss": 0.6567, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.4960094093925901, | |
| "grad_norm": 0.84765625, | |
| "learning_rate": 5.160695003172259e-06, | |
| "loss": 0.6655, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.4973536083340334, | |
| "grad_norm": 0.87109375, | |
| "learning_rate": 5.13927497253236e-06, | |
| "loss": 0.6424, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.49869780727547675, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 5.1178523835174e-06, | |
| "loss": 0.6536, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.5000420062169201, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 5.096427629644007e-06, | |
| "loss": 0.6441, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.5013862051583634, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 5.075001104468576e-06, | |
| "loss": 0.6562, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.5027304040998067, | |
| "grad_norm": 0.9609375, | |
| "learning_rate": 5.053573201580039e-06, | |
| "loss": 0.6624, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.5040746030412501, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 5.032144314592633e-06, | |
| "loss": 0.6581, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.5054188019826934, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 5.010714837138675e-06, | |
| "loss": 0.6804, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.5067630009241367, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 4.989285162861327e-06, | |
| "loss": 0.6657, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.5081071998655801, | |
| "grad_norm": 0.83203125, | |
| "learning_rate": 4.967855685407368e-06, | |
| "loss": 0.6203, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.5094513988070234, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 4.946426798419963e-06, | |
| "loss": 0.6388, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.5107955977484667, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 4.924998895531425e-06, | |
| "loss": 0.6311, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.51213979668991, | |
| "grad_norm": 0.7890625, | |
| "learning_rate": 4.903572370355993e-06, | |
| "loss": 0.6218, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.5134839956313534, | |
| "grad_norm": 0.890625, | |
| "learning_rate": 4.882147616482602e-06, | |
| "loss": 0.6695, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.5148281945727968, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 4.860725027467641e-06, | |
| "loss": 0.6359, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.5161723935142402, | |
| "grad_norm": 0.84765625, | |
| "learning_rate": 4.839304996827741e-06, | |
| "loss": 0.6733, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.5175165924556835, | |
| "grad_norm": 0.94921875, | |
| "learning_rate": 4.817887918032535e-06, | |
| "loss": 0.6709, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.5188607913971268, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 4.796474184497428e-06, | |
| "loss": 0.6456, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.5202049903385702, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 4.775064189576381e-06, | |
| "loss": 0.6788, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.5215491892800135, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 4.7536583265546775e-06, | |
| "loss": 0.6346, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.5228933882214568, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 4.732256988641701e-06, | |
| "loss": 0.6375, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.5242375871629001, | |
| "grad_norm": 0.859375, | |
| "learning_rate": 4.71086056896371e-06, | |
| "loss": 0.6887, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.5255817861043435, | |
| "grad_norm": 0.8515625, | |
| "learning_rate": 4.689469460556626e-06, | |
| "loss": 0.6727, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.5269259850457868, | |
| "grad_norm": 0.8515625, | |
| "learning_rate": 4.668084056358796e-06, | |
| "loss": 0.6355, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.5282701839872301, | |
| "grad_norm": 0.8515625, | |
| "learning_rate": 4.646704749203794e-06, | |
| "loss": 0.644, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.5296143829286735, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 4.625331931813193e-06, | |
| "loss": 0.6344, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.5309585818701168, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 4.603965996789349e-06, | |
| "loss": 0.6421, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.5323027808115601, | |
| "grad_norm": 0.84765625, | |
| "learning_rate": 4.582607336608205e-06, | |
| "loss": 0.662, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.5336469797530035, | |
| "grad_norm": 0.83203125, | |
| "learning_rate": 4.561256343612061e-06, | |
| "loss": 0.6751, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.5349911786944468, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 4.539913410002378e-06, | |
| "loss": 0.6576, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.5363353776358901, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 4.518578927832577e-06, | |
| "loss": 0.5937, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.5376795765773335, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 4.497253289000831e-06, | |
| "loss": 0.6482, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.5390237755187768, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 4.475936885242863e-06, | |
| "loss": 0.646, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.5403679744602201, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 4.45463010812476e-06, | |
| "loss": 0.6614, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.5417121734016634, | |
| "grad_norm": 0.7890625, | |
| "learning_rate": 4.433333349035773e-06, | |
| "loss": 0.629, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.5430563723431068, | |
| "grad_norm": 0.7890625, | |
| "learning_rate": 4.41204699918113e-06, | |
| "loss": 0.6608, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.5444005712845501, | |
| "grad_norm": 0.83984375, | |
| "learning_rate": 4.390771449574846e-06, | |
| "loss": 0.6352, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.5457447702259934, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 4.369507091032551e-06, | |
| "loss": 0.6474, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.5470889691674368, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 4.348254314164294e-06, | |
| "loss": 0.6162, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.5484331681088801, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 4.327013509367386e-06, | |
| "loss": 0.6688, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.5497773670503234, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 4.305785066819218e-06, | |
| "loss": 0.6492, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.5511215659917668, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 4.284569376470091e-06, | |
| "loss": 0.6739, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.5524657649332101, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 4.263366828036065e-06, | |
| "loss": 0.6367, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.5538099638746534, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 4.242177810991789e-06, | |
| "loss": 0.6451, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.5551541628160968, | |
| "grad_norm": 0.859375, | |
| "learning_rate": 4.221002714563347e-06, | |
| "loss": 0.6406, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.5564983617575401, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 4.199841927721121e-06, | |
| "loss": 0.6732, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.5578425606989834, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 4.178695839172631e-06, | |
| "loss": 0.6307, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.5591867596404267, | |
| "grad_norm": 0.84765625, | |
| "learning_rate": 4.1575648373554e-06, | |
| "loss": 0.6583, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.5605309585818701, | |
| "grad_norm": 0.8515625, | |
| "learning_rate": 4.136449310429822e-06, | |
| "loss": 0.6443, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.5618751575233134, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 4.115349646272029e-06, | |
| "loss": 0.6657, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.5632193564647567, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 4.094266232466763e-06, | |
| "loss": 0.6467, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.5645635554062001, | |
| "grad_norm": 0.8515625, | |
| "learning_rate": 4.0731994563002604e-06, | |
| "loss": 0.6445, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.5659077543476434, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 4.052149704753142e-06, | |
| "loss": 0.677, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.5672519532890867, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 4.03111736449329e-06, | |
| "loss": 0.6323, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.5685961522305302, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 4.010102821868762e-06, | |
| "loss": 0.6582, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.5699403511719735, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 3.989106462900686e-06, | |
| "loss": 0.6554, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.5712845501134168, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 3.968128673276164e-06, | |
| "loss": 0.6369, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.5726287490548602, | |
| "grad_norm": 0.8515625, | |
| "learning_rate": 3.947169838341202e-06, | |
| "loss": 0.6658, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.5739729479963035, | |
| "grad_norm": 0.85546875, | |
| "learning_rate": 3.926230343093617e-06, | |
| "loss": 0.6925, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.5753171469377468, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 3.90531057217597e-06, | |
| "loss": 0.7031, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.5766613458791902, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 3.8844109098685045e-06, | |
| "loss": 0.6587, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.5780055448206335, | |
| "grad_norm": 0.84765625, | |
| "learning_rate": 3.863531740082086e-06, | |
| "loss": 0.66, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.5793497437620768, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 3.8426734463511385e-06, | |
| "loss": 0.6507, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.5806939427035201, | |
| "grad_norm": 0.83203125, | |
| "learning_rate": 3.82183641182662e-06, | |
| "loss": 0.6456, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.5820381416449635, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 3.8010210192689688e-06, | |
| "loss": 0.6328, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.5833823405864068, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 3.780227651041073e-06, | |
| "loss": 0.6614, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.5847265395278501, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 3.7594566891012544e-06, | |
| "loss": 0.662, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.5860707384692935, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 3.738708514996251e-06, | |
| "loss": 0.6694, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.5874149374107368, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 3.7179835098541984e-06, | |
| "loss": 0.6382, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.5887591363521801, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 3.6972820543776404e-06, | |
| "loss": 0.6363, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.5901033352936235, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 3.676604528836535e-06, | |
| "loss": 0.6392, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.5914475342350668, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 3.6559513130612567e-06, | |
| "loss": 0.6573, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.5927917331765101, | |
| "grad_norm": 0.76953125, | |
| "learning_rate": 3.6353227864356354e-06, | |
| "loss": 0.6257, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.5941359321179535, | |
| "grad_norm": 0.84765625, | |
| "learning_rate": 3.614719327889978e-06, | |
| "loss": 0.6674, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.5954801310593968, | |
| "grad_norm": 0.859375, | |
| "learning_rate": 3.5941413158941086e-06, | |
| "loss": 0.6569, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.5968243300008401, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 3.573589128450418e-06, | |
| "loss": 0.6259, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.5981685289422835, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 3.5530631430869235e-06, | |
| "loss": 0.6497, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.5995127278837268, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 3.5325637368503224e-06, | |
| "loss": 0.6382, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.6008569268251701, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 3.5120912862990807e-06, | |
| "loss": 0.6879, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.6022011257666134, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 3.4916461674965074e-06, | |
| "loss": 0.6606, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.6035453247080568, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 3.4712287560038447e-06, | |
| "loss": 0.637, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.6048895236495001, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 3.450839426873378e-06, | |
| "loss": 0.6468, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.6062337225909434, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 3.4304785546415376e-06, | |
| "loss": 0.671, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.6075779215323868, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 3.41014651332202e-06, | |
| "loss": 0.658, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.6089221204738301, | |
| "grad_norm": 0.84765625, | |
| "learning_rate": 3.389843676398925e-06, | |
| "loss": 0.6747, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.6102663194152734, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 3.3695704168198895e-06, | |
| "loss": 0.6783, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.6116105183567168, | |
| "grad_norm": 0.859375, | |
| "learning_rate": 3.349327106989232e-06, | |
| "loss": 0.6662, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.6129547172981601, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 3.3291141187611233e-06, | |
| "loss": 0.642, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.6142989162396034, | |
| "grad_norm": 0.7734375, | |
| "learning_rate": 3.3089318234327446e-06, | |
| "loss": 0.6241, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.6156431151810468, | |
| "grad_norm": 0.75, | |
| "learning_rate": 3.2887805917374736e-06, | |
| "loss": 0.6397, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.6169873141224901, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 3.2686607938380736e-06, | |
| "loss": 0.6485, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.6183315130639334, | |
| "grad_norm": 0.9375, | |
| "learning_rate": 3.2485727993198945e-06, | |
| "loss": 0.6542, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.6196757120053767, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 3.2285169771840752e-06, | |
| "loss": 0.6461, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.6210199109468201, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 3.208493695840781e-06, | |
| "loss": 0.6646, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.6223641098882635, | |
| "grad_norm": 0.77734375, | |
| "learning_rate": 3.1885033231024253e-06, | |
| "loss": 0.6265, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.6237083088297068, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 3.1685462261769105e-06, | |
| "loss": 0.6475, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.6250525077711502, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 3.148622771660895e-06, | |
| "loss": 0.6892, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.6263967067125935, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 3.128733325533047e-06, | |
| "loss": 0.6632, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.6277409056540368, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 3.1088782531473268e-06, | |
| "loss": 0.6329, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.6290851045954802, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 3.089057919226277e-06, | |
| "loss": 0.6611, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.6304293035369235, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 3.069272687854322e-06, | |
| "loss": 0.6494, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.6317735024783668, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 3.0495229224710754e-06, | |
| "loss": 0.6206, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.6331177014198102, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 3.02980898586467e-06, | |
| "loss": 0.6415, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.6344619003612535, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 3.010131240165094e-06, | |
| "loss": 0.6442, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.6358060993026968, | |
| "grad_norm": 0.77734375, | |
| "learning_rate": 2.9904900468375298e-06, | |
| "loss": 0.6449, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.6371502982441402, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 2.9708857666757244e-06, | |
| "loss": 0.6277, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.6384944971855835, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 2.951318759795361e-06, | |
| "loss": 0.635, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.6398386961270268, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 2.9317893856274333e-06, | |
| "loss": 0.6666, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.6411828950684701, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 2.912298002911659e-06, | |
| "loss": 0.656, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.6425270940099135, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 2.892844969689876e-06, | |
| "loss": 0.6807, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.6438712929513568, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 2.8734306432994733e-06, | |
| "loss": 0.6521, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.6452154918928001, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 2.8540553803668252e-06, | |
| "loss": 0.6573, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.6465596908342435, | |
| "grad_norm": 0.77734375, | |
| "learning_rate": 2.834719536800742e-06, | |
| "loss": 0.6376, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.6479038897756868, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 2.815423467785925e-06, | |
| "loss": 0.6733, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.6492480887171301, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 2.7961675277764498e-06, | |
| "loss": 0.65, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.6505922876585735, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 2.7769520704892566e-06, | |
| "loss": 0.6501, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.6519364866000168, | |
| "grad_norm": 0.84765625, | |
| "learning_rate": 2.757777448897646e-06, | |
| "loss": 0.655, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.6532806855414601, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 2.7386440152247933e-06, | |
| "loss": 0.6156, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.6546248844829035, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 2.71955212093729e-06, | |
| "loss": 0.645, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.6559690834243468, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 2.7005021167386804e-06, | |
| "loss": 0.642, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.6573132823657901, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 2.6814943525630134e-06, | |
| "loss": 0.6601, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.6586574813072334, | |
| "grad_norm": 0.7734375, | |
| "learning_rate": 2.662529177568429e-06, | |
| "loss": 0.6241, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.6600016802486768, | |
| "grad_norm": 0.77734375, | |
| "learning_rate": 2.6436069401307287e-06, | |
| "loss": 0.6447, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.6613458791901201, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 2.6247279878369914e-06, | |
| "loss": 0.6519, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.6626900781315634, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 2.605892667479173e-06, | |
| "loss": 0.658, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.6640342770730068, | |
| "grad_norm": 0.98828125, | |
| "learning_rate": 2.587101325047753e-06, | |
| "loss": 0.638, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.6653784760144501, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 2.56835430572536e-06, | |
| "loss": 0.6535, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.6667226749558934, | |
| "grad_norm": 0.7890625, | |
| "learning_rate": 2.549651953880449e-06, | |
| "loss": 0.6543, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.6667226749558934, | |
| "eval_loss": 2.7381093502044678, | |
| "eval_runtime": 5731.7858, | |
| "eval_samples_per_second": 3.863, | |
| "eval_steps_per_second": 0.483, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.6680668738973368, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 2.5309946130609654e-06, | |
| "loss": 0.6406, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.6694110728387801, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 2.5123826259880324e-06, | |
| "loss": 0.6338, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.6707552717802234, | |
| "grad_norm": 0.7578125, | |
| "learning_rate": 2.493816334549664e-06, | |
| "loss": 0.616, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.6720994707216668, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 2.47529607979448e-06, | |
| "loss": 0.6364, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.6734436696631101, | |
| "grad_norm": 0.7890625, | |
| "learning_rate": 2.456822201925438e-06, | |
| "loss": 0.6448, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.6747878686045534, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 2.43839504029359e-06, | |
| "loss": 0.6149, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.6761320675459969, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 2.420014933391849e-06, | |
| "loss": 0.6309, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.6774762664874402, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 2.4016822188487604e-06, | |
| "loss": 0.6276, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.6788204654288835, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 2.3833972334223183e-06, | |
| "loss": 0.6373, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.6801646643703269, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 2.3651603129937597e-06, | |
| "loss": 0.6238, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.6815088633117702, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 2.3469717925614133e-06, | |
| "loss": 0.6265, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.6828530622532135, | |
| "grad_norm": 0.83984375, | |
| "learning_rate": 2.3288320062345276e-06, | |
| "loss": 0.6804, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.6841972611946568, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 2.310741287227152e-06, | |
| "loss": 0.6436, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.6855414601361002, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 2.2926999678519975e-06, | |
| "loss": 0.6501, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.6868856590775435, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 2.274708379514348e-06, | |
| "loss": 0.6544, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.6882298580189868, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 2.256766852705967e-06, | |
| "loss": 0.6254, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.6895740569604302, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 2.238875716999019e-06, | |
| "loss": 0.6157, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.6909182559018735, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 2.221035301040027e-06, | |
| "loss": 0.637, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.6922624548433168, | |
| "grad_norm": 0.77734375, | |
| "learning_rate": 2.203245932543834e-06, | |
| "loss": 0.646, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.6936066537847602, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 2.185507938287572e-06, | |
| "loss": 0.6492, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.6949508527262035, | |
| "grad_norm": 0.90625, | |
| "learning_rate": 2.1678216441046733e-06, | |
| "loss": 0.6379, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.6962950516676468, | |
| "grad_norm": 0.859375, | |
| "learning_rate": 2.1501873748788804e-06, | |
| "loss": 0.6383, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.6976392506090902, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 2.1326054545382698e-06, | |
| "loss": 0.6593, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.6989834495505335, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 2.1150762060493157e-06, | |
| "loss": 0.6555, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.7003276484919768, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 2.0975999514109503e-06, | |
| "loss": 0.6482, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.7016718474334201, | |
| "grad_norm": 0.83203125, | |
| "learning_rate": 2.080177011648645e-06, | |
| "loss": 0.6926, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.7030160463748635, | |
| "grad_norm": 0.83203125, | |
| "learning_rate": 2.0628077068085173e-06, | |
| "loss": 0.6603, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.7043602453163068, | |
| "grad_norm": 0.83984375, | |
| "learning_rate": 2.0454923559514595e-06, | |
| "loss": 0.6679, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.7057044442577501, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 2.028231277147261e-06, | |
| "loss": 0.6322, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.7070486431991935, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 2.0110247874687816e-06, | |
| "loss": 0.6446, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.7083928421406368, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 1.9938732029861192e-06, | |
| "loss": 0.629, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.7097370410820801, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 1.976776838760801e-06, | |
| "loss": 0.6586, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.7110812400235235, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 1.9597360088400053e-06, | |
| "loss": 0.6929, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.7124254389649668, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 1.9427510262507864e-06, | |
| "loss": 0.635, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.7137696379064101, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 1.9258222029943228e-06, | |
| "loss": 0.653, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.7151138368478535, | |
| "grad_norm": 0.77734375, | |
| "learning_rate": 1.9089498500401915e-06, | |
| "loss": 0.6295, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.7164580357892968, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 1.8921342773206553e-06, | |
| "loss": 0.6375, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.7178022347307401, | |
| "grad_norm": 0.77734375, | |
| "learning_rate": 1.87537579372496e-06, | |
| "loss": 0.6327, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.7191464336721834, | |
| "grad_norm": 0.7890625, | |
| "learning_rate": 1.858674707093675e-06, | |
| "loss": 0.6597, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.7204906326136268, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 1.8420313242130294e-06, | |
| "loss": 0.6585, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.7218348315550701, | |
| "grad_norm": 0.7890625, | |
| "learning_rate": 1.825445950809277e-06, | |
| "loss": 0.6267, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.7231790304965134, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 1.8089188915430794e-06, | |
| "loss": 0.6727, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.7245232294379568, | |
| "grad_norm": 0.7890625, | |
| "learning_rate": 1.7924504500039193e-06, | |
| "loss": 0.653, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.7258674283794001, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 1.7760409287045076e-06, | |
| "loss": 0.642, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.7272116273208434, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 1.7596906290752424e-06, | |
| "loss": 0.6479, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.7285558262622868, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 1.7433998514586631e-06, | |
| "loss": 0.6649, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.7299000252037302, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 1.7271688951039312e-06, | |
| "loss": 0.6754, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.7312442241451735, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 1.7109980581613417e-06, | |
| "loss": 0.632, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.7325884230866169, | |
| "grad_norm": 0.76171875, | |
| "learning_rate": 1.694887637676842e-06, | |
| "loss": 0.5958, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.7339326220280602, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 1.6788379295865703e-06, | |
| "loss": 0.6313, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.7352768209695035, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 1.6628492287114296e-06, | |
| "loss": 0.6562, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.7366210199109469, | |
| "grad_norm": 0.8515625, | |
| "learning_rate": 1.6469218287516664e-06, | |
| "loss": 0.6277, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.7379652188523902, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 1.6310560222814714e-06, | |
| "loss": 0.6473, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.7393094177938335, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 1.6152521007436146e-06, | |
| "loss": 0.6446, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.7406536167352769, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 1.599510354444087e-06, | |
| "loss": 0.6552, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.7419978156767202, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 1.5838310725467643e-06, | |
| "loss": 0.6373, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.7433420146181635, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 1.568214543068103e-06, | |
| "loss": 0.6516, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.7446862135596068, | |
| "grad_norm": 0.7890625, | |
| "learning_rate": 1.5526610528718416e-06, | |
| "loss": 0.6185, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.7460304125010502, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 1.5371708876637353e-06, | |
| "loss": 0.691, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.7473746114424935, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 1.5217443319863112e-06, | |
| "loss": 0.6475, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.7487188103839368, | |
| "grad_norm": 0.7578125, | |
| "learning_rate": 1.5063816692136374e-06, | |
| "loss": 0.6037, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.7500630093253802, | |
| "grad_norm": 0.77734375, | |
| "learning_rate": 1.4910831815461125e-06, | |
| "loss": 0.6247, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.7514072082668235, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 1.4758491500052924e-06, | |
| "loss": 0.6306, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.7527514072082668, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 1.4606798544287243e-06, | |
| "loss": 0.6729, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.7540956061497102, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 1.4455755734647992e-06, | |
| "loss": 0.6456, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.7554398050911535, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 1.4305365845676438e-06, | |
| "loss": 0.6482, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.7567840040325968, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 1.4155631639920208e-06, | |
| "loss": 0.6465, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.7581282029740402, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 1.4006555867882466e-06, | |
| "loss": 0.6638, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.7594724019154835, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 1.3858141267971492e-06, | |
| "loss": 0.6282, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.7608166008569268, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 1.3710390566450366e-06, | |
| "loss": 0.6749, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.7621607997983701, | |
| "grad_norm": 0.7890625, | |
| "learning_rate": 1.3563306477386783e-06, | |
| "loss": 0.655, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.7635049987398135, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 1.3416891702603357e-06, | |
| "loss": 0.6596, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.7648491976812568, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 1.3271148931627859e-06, | |
| "loss": 0.6813, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.7661933966227001, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 1.3126080841643856e-06, | |
| "loss": 0.6312, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.7675375955641435, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 1.2981690097441574e-06, | |
| "loss": 0.6442, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.7688817945055868, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 1.2837979351368912e-06, | |
| "loss": 0.6484, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.7702259934470301, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 1.2694951243282682e-06, | |
| "loss": 0.6373, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.7715701923884735, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 1.25526084005002e-06, | |
| "loss": 0.6318, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.7729143913299168, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 1.2410953437750966e-06, | |
| "loss": 0.6471, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.7742585902713601, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 1.2269988957128638e-06, | |
| "loss": 0.6353, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.7756027892128035, | |
| "grad_norm": 0.890625, | |
| "learning_rate": 1.212971754804324e-06, | |
| "loss": 0.6639, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.7769469881542468, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 1.1990141787173648e-06, | |
| "loss": 0.6428, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.7782911870956901, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 1.1851264238420136e-06, | |
| "loss": 0.6572, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.7796353860371334, | |
| "grad_norm": 0.7734375, | |
| "learning_rate": 1.1713087452857408e-06, | |
| "loss": 0.6291, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.7809795849785768, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 1.1575613968687683e-06, | |
| "loss": 0.6529, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.7823237839200201, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 1.1438846311194024e-06, | |
| "loss": 0.6679, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.7836679828614634, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 1.1302786992694049e-06, | |
| "loss": 0.6377, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.7850121818029069, | |
| "grad_norm": 0.7734375, | |
| "learning_rate": 1.1167438512493684e-06, | |
| "loss": 0.637, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.7863563807443502, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 1.1032803356841343e-06, | |
| "loss": 0.6484, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.7877005796857935, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 1.089888399888216e-06, | |
| "loss": 0.6971, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.7890447786272369, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 1.0765682898612657e-06, | |
| "loss": 0.6715, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.7903889775686802, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 1.0633202502835494e-06, | |
| "loss": 0.6915, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.7917331765101235, | |
| "grad_norm": 0.76953125, | |
| "learning_rate": 1.0501445245114523e-06, | |
| "loss": 0.6216, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.7930773754515669, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 1.0370413545730117e-06, | |
| "loss": 0.6246, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.7944215743930102, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 1.0240109811634714e-06, | |
| "loss": 0.6232, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.7957657733344535, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 1.0110536436408535e-06, | |
| "loss": 0.6392, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.7971099722758969, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 9.981695800215701e-07, | |
| "loss": 0.6428, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.7984541712173402, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 9.853590269760493e-07, | |
| "loss": 0.6712, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.7997983701587835, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 9.726222198243806e-07, | |
| "loss": 0.6173, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.8011425691002269, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 9.599593925320017e-07, | |
| "loss": 0.6397, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.8024867680416702, | |
| "grad_norm": 0.84765625, | |
| "learning_rate": 9.47370777705397e-07, | |
| "loss": 0.6875, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.8038309669831135, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 9.348566065878218e-07, | |
| "loss": 0.6455, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.8051751659245568, | |
| "grad_norm": 0.76953125, | |
| "learning_rate": 9.224171090550571e-07, | |
| "loss": 0.6325, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.8065193648660002, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 9.100525136111915e-07, | |
| "loss": 0.6663, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.8078635638074435, | |
| "grad_norm": 0.7578125, | |
| "learning_rate": 8.97763047384414e-07, | |
| "loss": 0.6358, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.8092077627488868, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 8.855489361228497e-07, | |
| "loss": 0.6556, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.8105519616903302, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 8.734104041904129e-07, | |
| "loss": 0.6613, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.8118961606317735, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 8.613476745626769e-07, | |
| "loss": 0.6688, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.8132403595732168, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 8.4936096882279e-07, | |
| "loss": 0.637, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.8145845585146602, | |
| "grad_norm": 0.7578125, | |
| "learning_rate": 8.374505071573991e-07, | |
| "loss": 0.6512, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.8159287574561035, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 8.256165083526019e-07, | |
| "loss": 0.6522, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.8172729563975468, | |
| "grad_norm": 0.765625, | |
| "learning_rate": 8.138591897899345e-07, | |
| "loss": 0.621, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.8186171553389902, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 8.021787674423775e-07, | |
| "loss": 0.6703, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.8199613542804335, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 7.905754558703805e-07, | |
| "loss": 0.6519, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.8213055532218768, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 7.790494682179317e-07, | |
| "loss": 0.6565, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.8226497521633201, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 7.676010162086389e-07, | |
| "loss": 0.6753, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.8239939511047635, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 7.56230310141835e-07, | |
| "loss": 0.6604, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.8253381500462068, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 7.449375588887203e-07, | |
| "loss": 0.6374, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.8266823489876501, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 7.337229698885279e-07, | |
| "loss": 0.6595, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.8280265479290935, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 7.225867491447053e-07, | |
| "loss": 0.6368, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.8293707468705368, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 7.115291012211384e-07, | |
| "loss": 0.63, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.8307149458119801, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 7.005502292383898e-07, | |
| "loss": 0.6361, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.8320591447534235, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 6.896503348699657e-07, | |
| "loss": 0.6454, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.8334033436948668, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 6.788296183386162e-07, | |
| "loss": 0.6877, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.8347475426363101, | |
| "grad_norm": 0.76171875, | |
| "learning_rate": 6.680882784126552e-07, | |
| "loss": 0.6173, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.8360917415777535, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 6.574265124023055e-07, | |
| "loss": 0.6452, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.8374359405191968, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 6.46844516156081e-07, | |
| "loss": 0.6147, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.8387801394606402, | |
| "grad_norm": 0.765625, | |
| "learning_rate": 6.363424840571869e-07, | |
| "loss": 0.6646, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.8401243384020836, | |
| "grad_norm": 0.83203125, | |
| "learning_rate": 6.259206090199427e-07, | |
| "loss": 0.6547, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.8414685373435269, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 6.155790824862484e-07, | |
| "loss": 0.6231, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.8428127362849702, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 6.053180944220627e-07, | |
| "loss": 0.6684, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.8441569352264136, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 5.951378333139118e-07, | |
| "loss": 0.6465, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.8455011341678569, | |
| "grad_norm": 0.83984375, | |
| "learning_rate": 5.850384861654329e-07, | |
| "loss": 0.6451, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.8468453331093002, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 5.750202384939313e-07, | |
| "loss": 0.6367, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.8481895320507435, | |
| "grad_norm": 0.7734375, | |
| "learning_rate": 5.65083274326978e-07, | |
| "loss": 0.6602, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.8495337309921869, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 5.552277761990293e-07, | |
| "loss": 0.6587, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.8508779299336302, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 5.454539251480739e-07, | |
| "loss": 0.6605, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.8522221288750735, | |
| "grad_norm": 0.86328125, | |
| "learning_rate": 5.35761900712301e-07, | |
| "loss": 0.6672, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.8535663278165169, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 5.261518809268118e-07, | |
| "loss": 0.6428, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.8549105267579602, | |
| "grad_norm": 0.7890625, | |
| "learning_rate": 5.166240423203428e-07, | |
| "loss": 0.6359, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.8562547256994035, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 5.071785599120243e-07, | |
| "loss": 0.6562, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.8575989246408469, | |
| "grad_norm": 0.91015625, | |
| "learning_rate": 4.978156072081669e-07, | |
| "loss": 0.6627, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.8589431235822902, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 4.885353561990752e-07, | |
| "loss": 0.6588, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.8602873225237335, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 4.793379773558816e-07, | |
| "loss": 0.6279, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.8616315214651769, | |
| "grad_norm": 0.86328125, | |
| "learning_rate": 4.702236396274251e-07, | |
| "loss": 0.662, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.8629757204066202, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 4.611925104371423e-07, | |
| "loss": 0.6556, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.8643199193480635, | |
| "grad_norm": 0.7734375, | |
| "learning_rate": 4.522447556799875e-07, | |
| "loss": 0.6417, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.8656641182895068, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 4.433805397193969e-07, | |
| "loss": 0.6636, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.8670083172309502, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 4.3460002538425806e-07, | |
| "loss": 0.6509, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.8683525161723935, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 4.2590337396592406e-07, | |
| "loss": 0.6481, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.8696967151138368, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 4.1729074521525193e-07, | |
| "loss": 0.6307, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.8710409140552802, | |
| "grad_norm": 0.76953125, | |
| "learning_rate": 4.0876229733966655e-07, | |
| "loss": 0.626, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.8723851129967235, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 4.0031818700025095e-07, | |
| "loss": 0.6414, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.8737293119381668, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 3.919585693088751e-07, | |
| "loss": 0.6427, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.8750735108796102, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 3.836835978253434e-07, | |
| "loss": 0.6505, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.8764177098210535, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 3.754934245545722e-07, | |
| "loss": 0.6691, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.8777619087624968, | |
| "grad_norm": 0.7890625, | |
| "learning_rate": 3.6738819994379946e-07, | |
| "loss": 0.6116, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.8791061077039402, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 3.593680728798238e-07, | |
| "loss": 0.6865, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.8804503066453835, | |
| "grad_norm": 0.7734375, | |
| "learning_rate": 3.514331906862622e-07, | |
| "loss": 0.6743, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.8817945055868268, | |
| "grad_norm": 0.75390625, | |
| "learning_rate": 3.435836991208524e-07, | |
| "loss": 0.6521, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.8831387045282701, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 3.3581974237277106e-07, | |
| "loss": 0.6471, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.8844829034697135, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 3.281414630599811e-07, | |
| "loss": 0.6595, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.8858271024111568, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 3.205490022266228e-07, | |
| "loss": 0.6279, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.8871713013526001, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 3.130424993404102e-07, | |
| "loss": 0.6854, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.8885155002940435, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 3.0562209229008043e-07, | |
| "loss": 0.6259, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.8898596992354868, | |
| "grad_norm": 0.7890625, | |
| "learning_rate": 2.9828791738285235e-07, | |
| "loss": 0.6296, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.8912038981769301, | |
| "grad_norm": 0.7734375, | |
| "learning_rate": 2.9104010934192795e-07, | |
| "loss": 0.649, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.8925480971183736, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 2.838788013040139e-07, | |
| "loss": 0.669, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.8938922960598169, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 2.7680412481688013e-07, | |
| "loss": 0.693, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.8952364950012602, | |
| "grad_norm": 0.7734375, | |
| "learning_rate": 2.698162098369406e-07, | |
| "loss": 0.649, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.8965806939427036, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 2.6291518472686404e-07, | |
| "loss": 0.6503, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.8979248928841469, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 2.561011762532212e-07, | |
| "loss": 0.6714, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.8992690918255902, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 2.4937430958415275e-07, | |
| "loss": 0.646, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.9006132907670336, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 2.4273470828707014e-07, | |
| "loss": 0.6876, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.9019574897084769, | |
| "grad_norm": 0.75390625, | |
| "learning_rate": 2.361824943263874e-07, | |
| "loss": 0.6294, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.9033016886499202, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 2.2971778806127997e-07, | |
| "loss": 0.6249, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.9046458875913636, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 2.2334070824347243e-07, | |
| "loss": 0.6247, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.9059900865328069, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 2.1705137201505967e-07, | |
| "loss": 0.6516, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.9073342854742502, | |
| "grad_norm": 0.7734375, | |
| "learning_rate": 2.1084989490635254e-07, | |
| "loss": 0.6497, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.9086784844156935, | |
| "grad_norm": 0.83203125, | |
| "learning_rate": 2.0473639083375796e-07, | |
| "loss": 0.6489, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.9100226833571369, | |
| "grad_norm": 0.7890625, | |
| "learning_rate": 1.9871097209768375e-07, | |
| "loss": 0.6255, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.9113668822985802, | |
| "grad_norm": 0.8203125, | |
| "learning_rate": 1.9277374938047989e-07, | |
| "loss": 0.6596, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.9127110812400235, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 1.8692483174439946e-07, | |
| "loss": 0.6328, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.9140552801814669, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 1.8116432662960037e-07, | |
| "loss": 0.6387, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.9153994791229102, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 1.7549233985217074e-07, | |
| "loss": 0.6653, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.9167436780643535, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 1.6990897560218212e-07, | |
| "loss": 0.6423, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.9180878770057969, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 1.644143364417794e-07, | |
| "loss": 0.6218, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.9194320759472402, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 1.5900852330329564e-07, | |
| "loss": 0.6484, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.9207762748886835, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 1.5369163548739464e-07, | |
| "loss": 0.6568, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.9221204738301269, | |
| "grad_norm": 0.77734375, | |
| "learning_rate": 1.4846377066125351e-07, | |
| "loss": 0.6457, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.9234646727715702, | |
| "grad_norm": 0.765625, | |
| "learning_rate": 1.4332502485676358e-07, | |
| "loss": 0.6276, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.9248088717130135, | |
| "grad_norm": 0.80859375, | |
| "learning_rate": 1.3827549246876627e-07, | |
| "loss": 0.6767, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.9261530706544568, | |
| "grad_norm": 0.77734375, | |
| "learning_rate": 1.3331526625332268e-07, | |
| "loss": 0.6218, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.9274972695959002, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 1.2844443732600576e-07, | |
| "loss": 0.6505, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.9288414685373435, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 1.2366309516022966e-07, | |
| "loss": 0.6412, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.9301856674787868, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 1.1897132758560471e-07, | |
| "loss": 0.6617, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.9315298664202302, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 1.1436922078632395e-07, | |
| "loss": 0.64, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.9328740653616735, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 1.0985685929958135e-07, | |
| "loss": 0.6292, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.9342182643031168, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 1.0543432601401615e-07, | |
| "loss": 0.6764, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.9355624632445602, | |
| "grad_norm": 0.77734375, | |
| "learning_rate": 1.0110170216819315e-07, | |
| "loss": 0.6528, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.9369066621860035, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 9.685906734910988e-08, | |
| "loss": 0.6624, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.9382508611274468, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 9.270649949073229e-08, | |
| "loss": 0.6362, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.9395950600688902, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 8.8644074872567e-08, | |
| "loss": 0.6367, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.9409392590103335, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 8.467186811825623e-08, | |
| "loss": 0.6606, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.9422834579517768, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 8.07899521942096e-08, | |
| "loss": 0.6489, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.9436276568932201, | |
| "grad_norm": 0.765625, | |
| "learning_rate": 7.69983984082634e-08, | |
| "loss": 0.6556, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.9449718558346635, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 7.329727640837059e-08, | |
| "loss": 0.6676, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.9463160547761069, | |
| "grad_norm": 0.75390625, | |
| "learning_rate": 6.968665418131848e-08, | |
| "loss": 0.6127, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.9476602537175502, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 6.616659805148695e-08, | |
| "loss": 0.6773, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.9490044526589936, | |
| "grad_norm": 0.8046875, | |
| "learning_rate": 6.273717267962165e-08, | |
| "loss": 0.6753, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.9503486516004369, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 5.9398441061652714e-08, | |
| "loss": 0.6306, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.9516928505418802, | |
| "grad_norm": 0.76953125, | |
| "learning_rate": 5.615046452753403e-08, | |
| "loss": 0.6623, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.9530370494833236, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 5.299330274011916e-08, | |
| "loss": 0.6346, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.9543812484247669, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 4.992701369406161e-08, | |
| "loss": 0.6213, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.9557254473662102, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 4.695165371475463e-08, | |
| "loss": 0.6431, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.9570696463076536, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 4.4067277457292553e-08, | |
| "loss": 0.6355, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.9584138452490969, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 4.127393790546719e-08, | |
| "loss": 0.6526, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.9597580441905402, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 3.857168637079744e-08, | |
| "loss": 0.6663, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.9611022431319836, | |
| "grad_norm": 0.7890625, | |
| "learning_rate": 3.596057249158347e-08, | |
| "loss": 0.6246, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.9624464420734269, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 3.344064423199567e-08, | |
| "loss": 0.6345, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.9637906410148702, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 3.101194788119599e-08, | |
| "loss": 0.6369, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.9651348399563136, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 2.8674528052484162e-08, | |
| "loss": 0.6639, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.9664790388977569, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 2.6428427682480552e-08, | |
| "loss": 0.6701, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.9678232378392002, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 2.4273688030336805e-08, | |
| "loss": 0.6696, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.9691674367806435, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 2.221034867697702e-08, | |
| "loss": 0.6525, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.9705116357220869, | |
| "grad_norm": 0.76953125, | |
| "learning_rate": 2.0238447524372206e-08, | |
| "loss": 0.6532, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.9718558346635302, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 1.8358020794843056e-08, | |
| "loss": 0.6504, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.9732000336049735, | |
| "grad_norm": 0.7734375, | |
| "learning_rate": 1.656910303039494e-08, | |
| "loss": 0.6338, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.9745442325464169, | |
| "grad_norm": 0.79296875, | |
| "learning_rate": 1.4871727092083398e-08, | |
| "loss": 0.6581, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.9758884314878602, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 1.3265924159410193e-08, | |
| "loss": 0.6551, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.9772326304293035, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 1.1751723729750975e-08, | |
| "loss": 0.6729, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.9785768293707469, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 1.0329153617812948e-08, | |
| "loss": 0.6887, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.9799210283121902, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 8.998239955124722e-09, | |
| "loss": 0.6568, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.9812652272536335, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 7.759007189555578e-09, | |
| "loss": 0.6433, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.9826094261950769, | |
| "grad_norm": 1.015625, | |
| "learning_rate": 6.611478084866951e-09, | |
| "loss": 0.6676, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.9839536251365202, | |
| "grad_norm": 0.8125, | |
| "learning_rate": 5.5556737202927534e-09, | |
| "loss": 0.6519, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.9852978240779635, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 4.5916134901552445e-09, | |
| "loss": 0.6383, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.9866420230194068, | |
| "grad_norm": 0.80078125, | |
| "learning_rate": 3.7193151035047614e-09, | |
| "loss": 0.6309, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.9879862219608502, | |
| "grad_norm": 0.76953125, | |
| "learning_rate": 2.93879458379831e-09, | |
| "loss": 0.6682, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.9893304209022935, | |
| "grad_norm": 0.77734375, | |
| "learning_rate": 2.2500662686025797e-09, | |
| "loss": 0.6513, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.9906746198437368, | |
| "grad_norm": 0.77734375, | |
| "learning_rate": 1.6531428093313761e-09, | |
| "loss": 0.627, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.9920188187851802, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 1.148035171014139e-09, | |
| "loss": 0.6565, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.9933630177266235, | |
| "grad_norm": 0.82421875, | |
| "learning_rate": 7.347526320927722e-10, | |
| "loss": 0.6631, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.9947072166680668, | |
| "grad_norm": 0.81640625, | |
| "learning_rate": 4.1330278425177896e-10, | |
| "loss": 0.6566, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.9960514156095102, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 1.8369153228114946e-10, | |
| "loss": 0.6564, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.9973956145509535, | |
| "grad_norm": 0.78125, | |
| "learning_rate": 4.5923093963118334e-11, | |
| "loss": 0.6299, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.9987398134923968, | |
| "grad_norm": 0.77734375, | |
| "learning_rate": 0.0, | |
| "loss": 0.643, | |
| "step": 743 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 743, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 248, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.7541079597200703e+19, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |