| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 1350, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0022222222222222222, | |
| "grad_norm": 2.623322362612143, | |
| "learning_rate": 4.878048780487805e-07, | |
| "loss": 3.0431, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0044444444444444444, | |
| "grad_norm": 2.261103014889126, | |
| "learning_rate": 9.75609756097561e-07, | |
| "loss": 3.0184, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.006666666666666667, | |
| "grad_norm": 2.7168444790133903, | |
| "learning_rate": 1.4634146341463414e-06, | |
| "loss": 3.3019, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.008888888888888889, | |
| "grad_norm": 2.7343955653769876, | |
| "learning_rate": 1.951219512195122e-06, | |
| "loss": 3.2182, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.011111111111111112, | |
| "grad_norm": 2.2100918502143414, | |
| "learning_rate": 2.4390243902439027e-06, | |
| "loss": 2.9725, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.013333333333333334, | |
| "grad_norm": 2.3250911641120293, | |
| "learning_rate": 2.926829268292683e-06, | |
| "loss": 3.1052, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.015555555555555555, | |
| "grad_norm": 2.6288955885970515, | |
| "learning_rate": 3.414634146341464e-06, | |
| "loss": 3.0472, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.017777777777777778, | |
| "grad_norm": 2.219103106569174, | |
| "learning_rate": 3.902439024390244e-06, | |
| "loss": 2.9739, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.2520911835478774, | |
| "learning_rate": 4.390243902439025e-06, | |
| "loss": 2.8884, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.022222222222222223, | |
| "grad_norm": 2.1493375853557364, | |
| "learning_rate": 4.8780487804878055e-06, | |
| "loss": 2.9373, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.024444444444444446, | |
| "grad_norm": 2.3992410965049404, | |
| "learning_rate": 5.365853658536586e-06, | |
| "loss": 3.0224, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.02666666666666667, | |
| "grad_norm": 2.6279528519907367, | |
| "learning_rate": 5.853658536585366e-06, | |
| "loss": 3.055, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.028888888888888888, | |
| "grad_norm": 2.1553348953628038, | |
| "learning_rate": 6.341463414634147e-06, | |
| "loss": 2.9028, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.03111111111111111, | |
| "grad_norm": 2.1821297305309466, | |
| "learning_rate": 6.829268292682928e-06, | |
| "loss": 2.6423, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.03333333333333333, | |
| "grad_norm": 2.1821297305309466, | |
| "learning_rate": 6.829268292682928e-06, | |
| "loss": 2.8703, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.035555555555555556, | |
| "grad_norm": 2.3930540157080933, | |
| "learning_rate": 7.317073170731707e-06, | |
| "loss": 2.6671, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.03777777777777778, | |
| "grad_norm": 2.304230557213507, | |
| "learning_rate": 7.804878048780489e-06, | |
| "loss": 2.8108, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.3606667375109334, | |
| "learning_rate": 8.292682926829268e-06, | |
| "loss": 2.7557, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.042222222222222223, | |
| "grad_norm": 2.2694672135734058, | |
| "learning_rate": 8.78048780487805e-06, | |
| "loss": 2.6854, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.044444444444444446, | |
| "grad_norm": 2.2694672135734058, | |
| "learning_rate": 8.78048780487805e-06, | |
| "loss": 2.6259, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.04666666666666667, | |
| "grad_norm": 2.260385565524592, | |
| "learning_rate": 9.268292682926831e-06, | |
| "loss": 2.5754, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.04888888888888889, | |
| "grad_norm": 2.2022125875440444, | |
| "learning_rate": 9.756097560975611e-06, | |
| "loss": 2.5253, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.051111111111111114, | |
| "grad_norm": 3.5096993808994843, | |
| "learning_rate": 1.024390243902439e-05, | |
| "loss": 2.4524, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.05333333333333334, | |
| "grad_norm": 2.1136234261807076, | |
| "learning_rate": 1.0731707317073172e-05, | |
| "loss": 2.3648, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.05555555555555555, | |
| "grad_norm": 2.279726869850189, | |
| "learning_rate": 1.1219512195121953e-05, | |
| "loss": 2.3092, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.057777777777777775, | |
| "grad_norm": 2.0624550418977132, | |
| "learning_rate": 1.1707317073170731e-05, | |
| "loss": 2.3245, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.1452353524431373, | |
| "learning_rate": 1.2195121951219513e-05, | |
| "loss": 2.0884, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.06222222222222222, | |
| "grad_norm": 2.037063719665142, | |
| "learning_rate": 1.2682926829268294e-05, | |
| "loss": 2.0828, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.06444444444444444, | |
| "grad_norm": 2.1625230802297195, | |
| "learning_rate": 1.3170731707317076e-05, | |
| "loss": 2.0986, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.06666666666666667, | |
| "grad_norm": 1.977909713991803, | |
| "learning_rate": 1.3658536585365855e-05, | |
| "loss": 1.9159, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.06888888888888889, | |
| "grad_norm": 1.8324253121705385, | |
| "learning_rate": 1.4146341463414635e-05, | |
| "loss": 1.9116, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.07111111111111111, | |
| "grad_norm": 2.079040049018754, | |
| "learning_rate": 1.4634146341463415e-05, | |
| "loss": 1.8451, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.07333333333333333, | |
| "grad_norm": 1.809402468708381, | |
| "learning_rate": 1.5121951219512196e-05, | |
| "loss": 1.8039, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.07555555555555556, | |
| "grad_norm": 1.7844520594891926, | |
| "learning_rate": 1.5609756097560978e-05, | |
| "loss": 1.611, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.07777777777777778, | |
| "grad_norm": 1.7670406551498323, | |
| "learning_rate": 1.6097560975609757e-05, | |
| "loss": 1.6735, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.6240755311579538, | |
| "learning_rate": 1.6585365853658537e-05, | |
| "loss": 1.544, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.08222222222222222, | |
| "grad_norm": 1.6598703391595058, | |
| "learning_rate": 1.7073170731707317e-05, | |
| "loss": 1.4969, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.08444444444444445, | |
| "grad_norm": 1.7797228247223702, | |
| "learning_rate": 1.75609756097561e-05, | |
| "loss": 1.5034, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.08666666666666667, | |
| "grad_norm": 1.6060846046108213, | |
| "learning_rate": 1.804878048780488e-05, | |
| "loss": 1.347, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.08888888888888889, | |
| "grad_norm": 1.6655545462059487, | |
| "learning_rate": 1.8536585365853663e-05, | |
| "loss": 1.3162, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.09111111111111111, | |
| "grad_norm": 1.6440980116956432, | |
| "learning_rate": 1.902439024390244e-05, | |
| "loss": 1.2312, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.09333333333333334, | |
| "grad_norm": 1.6772833304574546, | |
| "learning_rate": 1.9512195121951222e-05, | |
| "loss": 1.2062, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.09555555555555556, | |
| "grad_norm": 1.669217307716062, | |
| "learning_rate": 2e-05, | |
| "loss": 1.0715, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.09777777777777778, | |
| "grad_norm": 1.6043628877097749, | |
| "learning_rate": 1.999997120014852e-05, | |
| "loss": 1.0351, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 1.6679143739908302, | |
| "learning_rate": 1.9999884800759955e-05, | |
| "loss": 0.953, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.10222222222222223, | |
| "grad_norm": 1.574198968752061, | |
| "learning_rate": 1.9999740802331976e-05, | |
| "loss": 0.8663, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.10444444444444445, | |
| "grad_norm": 1.4909340970325713, | |
| "learning_rate": 1.9999539205693996e-05, | |
| "loss": 0.7911, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.10666666666666667, | |
| "grad_norm": 1.4900807130383797, | |
| "learning_rate": 1.9999280012007213e-05, | |
| "loss": 0.7467, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.10888888888888888, | |
| "grad_norm": 1.516805622959705, | |
| "learning_rate": 1.9998963222764574e-05, | |
| "loss": 0.6698, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.1111111111111111, | |
| "grad_norm": 1.3149395696585964, | |
| "learning_rate": 1.9998588839790777e-05, | |
| "loss": 0.6201, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.11333333333333333, | |
| "grad_norm": 1.6723781816084982, | |
| "learning_rate": 1.9998156865242256e-05, | |
| "loss": 0.5536, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.11555555555555555, | |
| "grad_norm": 1.223792216559664, | |
| "learning_rate": 1.9997667301607172e-05, | |
| "loss": 0.5297, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.11777777777777777, | |
| "grad_norm": 1.0992516074768282, | |
| "learning_rate": 1.9997120151705393e-05, | |
| "loss": 0.4383, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.2464445419569536, | |
| "learning_rate": 1.9996515418688493e-05, | |
| "loss": 0.4063, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.12222222222222222, | |
| "grad_norm": 1.0859125380167651, | |
| "learning_rate": 1.9995853106039707e-05, | |
| "loss": 0.3942, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.12444444444444444, | |
| "grad_norm": 1.1985531426652731, | |
| "learning_rate": 1.9995133217573943e-05, | |
| "loss": 0.3501, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.12666666666666668, | |
| "grad_norm": 1.0417891714128484, | |
| "learning_rate": 1.999435575743774e-05, | |
| "loss": 0.3369, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.1288888888888889, | |
| "grad_norm": 1.1926194557532863, | |
| "learning_rate": 1.9993520730109236e-05, | |
| "loss": 0.3181, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.13111111111111112, | |
| "grad_norm": 1.0284700411429233, | |
| "learning_rate": 1.999262814039817e-05, | |
| "loss": 0.2614, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.13333333333333333, | |
| "grad_norm": 1.0423244776334333, | |
| "learning_rate": 1.9991677993445832e-05, | |
| "loss": 0.2517, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.13555555555555557, | |
| "grad_norm": 1.1806023347098358, | |
| "learning_rate": 1.9990670294725036e-05, | |
| "loss": 0.2361, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.13777777777777778, | |
| "grad_norm": 1.1588129036209098, | |
| "learning_rate": 1.99896050500401e-05, | |
| "loss": 0.2172, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.9963259552167313, | |
| "learning_rate": 1.9988482265526805e-05, | |
| "loss": 0.2112, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.14222222222222222, | |
| "grad_norm": 0.9633795140630308, | |
| "learning_rate": 1.9987301947652354e-05, | |
| "loss": 0.2309, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.14444444444444443, | |
| "grad_norm": 0.8935284726202222, | |
| "learning_rate": 1.998606410321534e-05, | |
| "loss": 0.1689, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.14666666666666667, | |
| "grad_norm": 0.7244578036029303, | |
| "learning_rate": 1.998476873934571e-05, | |
| "loss": 0.146, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.14888888888888888, | |
| "grad_norm": 0.8206113325604889, | |
| "learning_rate": 1.9983415863504723e-05, | |
| "loss": 0.1326, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.1511111111111111, | |
| "grad_norm": 0.929646005224609, | |
| "learning_rate": 1.998200548348491e-05, | |
| "loss": 0.2032, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.15333333333333332, | |
| "grad_norm": 1.1574986786521502, | |
| "learning_rate": 1.9980537607410007e-05, | |
| "loss": 0.1953, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.15555555555555556, | |
| "grad_norm": 0.8467966213189054, | |
| "learning_rate": 1.9979012243734943e-05, | |
| "loss": 0.1036, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.15777777777777777, | |
| "grad_norm": 1.4950401937069921, | |
| "learning_rate": 1.9977429401245764e-05, | |
| "loss": 0.1945, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.8619786539742662, | |
| "learning_rate": 1.9975789089059598e-05, | |
| "loss": 0.1328, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.1622222222222222, | |
| "grad_norm": 0.8012986736687174, | |
| "learning_rate": 1.9974091316624596e-05, | |
| "loss": 0.1053, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.16444444444444445, | |
| "grad_norm": 1.427531027612763, | |
| "learning_rate": 1.9972336093719876e-05, | |
| "loss": 0.1612, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.16666666666666666, | |
| "grad_norm": 1.3714252905367872, | |
| "learning_rate": 1.997052343045547e-05, | |
| "loss": 0.1108, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.1688888888888889, | |
| "grad_norm": 0.5396381544048664, | |
| "learning_rate": 1.9968653337272262e-05, | |
| "loss": 0.0632, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.1711111111111111, | |
| "grad_norm": 0.917340021397074, | |
| "learning_rate": 1.9966725824941933e-05, | |
| "loss": 0.0703, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.17333333333333334, | |
| "grad_norm": 0.9527175902434158, | |
| "learning_rate": 1.9964740904566903e-05, | |
| "loss": 0.0684, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.17555555555555555, | |
| "grad_norm": 0.8873445304144839, | |
| "learning_rate": 1.9962698587580246e-05, | |
| "loss": 0.0589, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.17777777777777778, | |
| "grad_norm": 1.1563328274020708, | |
| "learning_rate": 1.996059888574565e-05, | |
| "loss": 0.081, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.5088497800722968, | |
| "learning_rate": 1.9958441811157342e-05, | |
| "loss": 0.0444, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.18222222222222223, | |
| "grad_norm": 0.5076228510085662, | |
| "learning_rate": 1.9956227376239995e-05, | |
| "loss": 0.0374, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.18444444444444444, | |
| "grad_norm": 0.599711304843245, | |
| "learning_rate": 1.99539555937487e-05, | |
| "loss": 0.0441, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.18666666666666668, | |
| "grad_norm": 0.6675064429989921, | |
| "learning_rate": 1.9951626476768847e-05, | |
| "loss": 0.0399, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.18888888888888888, | |
| "grad_norm": 0.7109046216657469, | |
| "learning_rate": 1.9949240038716092e-05, | |
| "loss": 0.055, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.19111111111111112, | |
| "grad_norm": 1.5218018892449177, | |
| "learning_rate": 1.9946796293336237e-05, | |
| "loss": 0.0999, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.19333333333333333, | |
| "grad_norm": 0.6400697851026957, | |
| "learning_rate": 1.9944295254705187e-05, | |
| "loss": 0.0359, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.19555555555555557, | |
| "grad_norm": 0.5987592280115199, | |
| "learning_rate": 1.994173693722885e-05, | |
| "loss": 0.0238, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.19777777777777777, | |
| "grad_norm": 0.4169792553824183, | |
| "learning_rate": 1.9939121355643057e-05, | |
| "loss": 0.0285, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.822257685361629, | |
| "learning_rate": 1.993644852501348e-05, | |
| "loss": 0.0251, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.20222222222222222, | |
| "grad_norm": 0.5887886595708829, | |
| "learning_rate": 1.9933718460735553e-05, | |
| "loss": 0.0204, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.20444444444444446, | |
| "grad_norm": 0.4717267328425819, | |
| "learning_rate": 1.9930931178534353e-05, | |
| "loss": 0.0208, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.20666666666666667, | |
| "grad_norm": 0.6256908207283287, | |
| "learning_rate": 1.9928086694464544e-05, | |
| "loss": 0.0225, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.2088888888888889, | |
| "grad_norm": 1.1944230735599275, | |
| "learning_rate": 1.992518502491028e-05, | |
| "loss": 0.0438, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.2111111111111111, | |
| "grad_norm": 0.3797429952201044, | |
| "learning_rate": 1.992222618658508e-05, | |
| "loss": 0.015, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.21333333333333335, | |
| "grad_norm": 0.5442507624352912, | |
| "learning_rate": 1.9919210196531774e-05, | |
| "loss": 0.0127, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.21555555555555556, | |
| "grad_norm": 0.20192174806719207, | |
| "learning_rate": 1.9916137072122367e-05, | |
| "loss": 0.0045, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.21777777777777776, | |
| "grad_norm": 0.35629669027546523, | |
| "learning_rate": 1.9913006831057967e-05, | |
| "loss": 0.0073, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.3811343068709108, | |
| "learning_rate": 1.9909819491368677e-05, | |
| "loss": 0.0071, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 0.3811343068709108, | |
| "learning_rate": 1.9909819491368677e-05, | |
| "loss": 0.0354, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.22444444444444445, | |
| "grad_norm": 0.21277371217333504, | |
| "learning_rate": 1.9906575071413468e-05, | |
| "loss": 0.0059, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.22666666666666666, | |
| "grad_norm": 0.3829854099778751, | |
| "learning_rate": 1.9903273589880107e-05, | |
| "loss": 0.0072, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.2288888888888889, | |
| "grad_norm": 0.4598266610872749, | |
| "learning_rate": 1.989991506578503e-05, | |
| "loss": 0.008, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.2311111111111111, | |
| "grad_norm": 0.12172334327236085, | |
| "learning_rate": 1.9896499518473237e-05, | |
| "loss": 0.0037, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.23333333333333334, | |
| "grad_norm": 0.26889325048345314, | |
| "learning_rate": 1.9893026967618176e-05, | |
| "loss": 0.0061, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.23555555555555555, | |
| "grad_norm": 0.263079713257789, | |
| "learning_rate": 1.988949743322164e-05, | |
| "loss": 0.0043, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.23777777777777778, | |
| "grad_norm": 0.2853704607797298, | |
| "learning_rate": 1.988591093561364e-05, | |
| "loss": 0.0041, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.5349086075608627, | |
| "learning_rate": 1.9882267495452296e-05, | |
| "loss": 0.0102, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.24222222222222223, | |
| "grad_norm": 1.3227157979779598, | |
| "learning_rate": 1.987856713372372e-05, | |
| "loss": 0.0518, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.24444444444444444, | |
| "grad_norm": 0.48774892977597367, | |
| "learning_rate": 1.9874809871741877e-05, | |
| "loss": 0.0057, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.24666666666666667, | |
| "grad_norm": 0.08301750407717456, | |
| "learning_rate": 1.987099573114849e-05, | |
| "loss": 0.0024, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.24888888888888888, | |
| "grad_norm": 0.0998951630645237, | |
| "learning_rate": 1.986712473391289e-05, | |
| "loss": 0.0018, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.2511111111111111, | |
| "grad_norm": 0.8337266418083582, | |
| "learning_rate": 1.9863196902331916e-05, | |
| "loss": 0.0172, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.25333333333333335, | |
| "grad_norm": 0.07651210583101263, | |
| "learning_rate": 1.985921225902975e-05, | |
| "loss": 0.002, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.25555555555555554, | |
| "grad_norm": 0.05073998203831285, | |
| "learning_rate": 1.985517082695783e-05, | |
| "loss": 0.0017, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.2577777777777778, | |
| "grad_norm": 0.4578873073764795, | |
| "learning_rate": 1.985107262939468e-05, | |
| "loss": 0.0071, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.9130370391316529, | |
| "learning_rate": 1.984691768994579e-05, | |
| "loss": 0.0213, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.26222222222222225, | |
| "grad_norm": 0.12791629342690117, | |
| "learning_rate": 1.9842706032543496e-05, | |
| "loss": 0.0017, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.2644444444444444, | |
| "grad_norm": 1.0755693304035614, | |
| "learning_rate": 1.983843768144682e-05, | |
| "loss": 0.0251, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.26666666666666666, | |
| "grad_norm": 0.5145717610045697, | |
| "learning_rate": 1.983411266124133e-05, | |
| "loss": 0.0069, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.2688888888888889, | |
| "grad_norm": 0.03758403240804189, | |
| "learning_rate": 1.982973099683902e-05, | |
| "loss": 0.0013, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.27111111111111114, | |
| "grad_norm": 0.8066826274319282, | |
| "learning_rate": 1.9825292713478145e-05, | |
| "loss": 0.0125, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.2733333333333333, | |
| "grad_norm": 0.2405952016960853, | |
| "learning_rate": 1.9820797836723086e-05, | |
| "loss": 0.0025, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.27555555555555555, | |
| "grad_norm": 0.4547771467502152, | |
| "learning_rate": 1.98162463924642e-05, | |
| "loss": 0.0067, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.2777777777777778, | |
| "grad_norm": 0.14450929585996608, | |
| "learning_rate": 1.9811638406917666e-05, | |
| "loss": 0.0025, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.08905361756324702, | |
| "learning_rate": 1.9806973906625352e-05, | |
| "loss": 0.0015, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.2822222222222222, | |
| "grad_norm": 0.4007541877884539, | |
| "learning_rate": 1.980225291845463e-05, | |
| "loss": 0.0054, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.28444444444444444, | |
| "grad_norm": 0.032568395601894465, | |
| "learning_rate": 1.9797475469598267e-05, | |
| "loss": 0.0009, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.2866666666666667, | |
| "grad_norm": 0.33024035103426347, | |
| "learning_rate": 1.9792641587574212e-05, | |
| "loss": 0.0046, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.28888888888888886, | |
| "grad_norm": 0.14878342926580176, | |
| "learning_rate": 1.978775130022549e-05, | |
| "loss": 0.0027, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.2911111111111111, | |
| "grad_norm": 0.35334691805857943, | |
| "learning_rate": 1.978280463572001e-05, | |
| "loss": 0.004, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.29333333333333333, | |
| "grad_norm": 0.13172844479878656, | |
| "learning_rate": 1.977780162255041e-05, | |
| "loss": 0.0018, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.29555555555555557, | |
| "grad_norm": 0.06019878327632559, | |
| "learning_rate": 1.9772742289533896e-05, | |
| "loss": 0.0016, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.29777777777777775, | |
| "grad_norm": 0.160216865282591, | |
| "learning_rate": 1.9767626665812083e-05, | |
| "loss": 0.0023, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.6806549477062398, | |
| "learning_rate": 1.9762454780850807e-05, | |
| "loss": 0.0475, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.3022222222222222, | |
| "grad_norm": 0.11393022798646299, | |
| "learning_rate": 1.9757226664439968e-05, | |
| "loss": 0.0022, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.30444444444444446, | |
| "grad_norm": 0.037872960249665674, | |
| "learning_rate": 1.9751942346693368e-05, | |
| "loss": 0.001, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.30666666666666664, | |
| "grad_norm": 0.15275329226428894, | |
| "learning_rate": 1.9746601858048517e-05, | |
| "loss": 0.0027, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.3088888888888889, | |
| "grad_norm": 0.04247765423538433, | |
| "learning_rate": 1.974120522926647e-05, | |
| "loss": 0.0011, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.3111111111111111, | |
| "grad_norm": 0.660804460437883, | |
| "learning_rate": 1.973575249143165e-05, | |
| "loss": 0.0085, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.31333333333333335, | |
| "grad_norm": 0.07819276083719805, | |
| "learning_rate": 1.9730243675951666e-05, | |
| "loss": 0.0018, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.31555555555555553, | |
| "grad_norm": 0.09224536898648249, | |
| "learning_rate": 1.972467881455713e-05, | |
| "loss": 0.0017, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.31777777777777777, | |
| "grad_norm": 0.10341228774388184, | |
| "learning_rate": 1.9719057939301477e-05, | |
| "loss": 0.002, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.07586422594766226, | |
| "learning_rate": 1.9713381082560784e-05, | |
| "loss": 0.0018, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.32222222222222224, | |
| "grad_norm": 1.0021154898255815, | |
| "learning_rate": 1.970764827703358e-05, | |
| "loss": 0.0175, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.3244444444444444, | |
| "grad_norm": 0.21464660706537472, | |
| "learning_rate": 1.9701859555740647e-05, | |
| "loss": 0.0039, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.32666666666666666, | |
| "grad_norm": 0.05521619098399361, | |
| "learning_rate": 1.9696014952024854e-05, | |
| "loss": 0.0014, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.3288888888888889, | |
| "grad_norm": 0.07631263663499638, | |
| "learning_rate": 1.969011449955094e-05, | |
| "loss": 0.0016, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.33111111111111113, | |
| "grad_norm": 0.8136940095826757, | |
| "learning_rate": 1.968415823230534e-05, | |
| "loss": 0.0064, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 0.06702292784574418, | |
| "learning_rate": 1.9678146184595974e-05, | |
| "loss": 0.0016, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.33555555555555555, | |
| "grad_norm": 0.09196987991250953, | |
| "learning_rate": 1.967207839105206e-05, | |
| "loss": 0.0018, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.3377777777777778, | |
| "grad_norm": 0.12196125664466427, | |
| "learning_rate": 1.9665954886623906e-05, | |
| "loss": 0.0016, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.08860432742792679, | |
| "learning_rate": 1.9659775706582717e-05, | |
| "loss": 0.0019, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.3422222222222222, | |
| "grad_norm": 0.10768240905093937, | |
| "learning_rate": 1.9653540886520387e-05, | |
| "loss": 0.002, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.34444444444444444, | |
| "grad_norm": 0.04911215401412496, | |
| "learning_rate": 1.9647250462349296e-05, | |
| "loss": 0.0012, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.3466666666666667, | |
| "grad_norm": 0.08466663703887824, | |
| "learning_rate": 1.96409044703021e-05, | |
| "loss": 0.0015, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.3488888888888889, | |
| "grad_norm": 0.03028451241557383, | |
| "learning_rate": 1.9634502946931517e-05, | |
| "loss": 0.0009, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.3511111111111111, | |
| "grad_norm": 0.031113002163027238, | |
| "learning_rate": 1.9628045929110144e-05, | |
| "loss": 0.0009, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.35333333333333333, | |
| "grad_norm": 0.035588285940576635, | |
| "learning_rate": 1.9621533454030204e-05, | |
| "loss": 0.0009, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.35555555555555557, | |
| "grad_norm": 0.019139703403524014, | |
| "learning_rate": 1.9614965559203358e-05, | |
| "loss": 0.0006, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.35777777777777775, | |
| "grad_norm": 0.15643810004548148, | |
| "learning_rate": 1.9608342282460492e-05, | |
| "loss": 0.0014, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.8823034744889429, | |
| "learning_rate": 1.960166366195148e-05, | |
| "loss": 0.0133, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.3622222222222222, | |
| "grad_norm": 0.03013019362562091, | |
| "learning_rate": 1.9594929736144978e-05, | |
| "loss": 0.0008, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.36444444444444446, | |
| "grad_norm": 0.026987225487895616, | |
| "learning_rate": 1.9588140543828196e-05, | |
| "loss": 0.0007, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.36666666666666664, | |
| "grad_norm": 0.06166775274737293, | |
| "learning_rate": 1.9581296124106682e-05, | |
| "loss": 0.0008, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.3688888888888889, | |
| "grad_norm": 0.045024988660031316, | |
| "learning_rate": 1.957439651640409e-05, | |
| "loss": 0.001, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.3711111111111111, | |
| "grad_norm": 0.03824056387771315, | |
| "learning_rate": 1.956744176046196e-05, | |
| "loss": 0.0007, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.37333333333333335, | |
| "grad_norm": 0.03560550981791134, | |
| "learning_rate": 1.9560431896339475e-05, | |
| "loss": 0.0007, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.37555555555555553, | |
| "grad_norm": 0.05723839962050225, | |
| "learning_rate": 1.9553366964413244e-05, | |
| "loss": 0.0007, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.37777777777777777, | |
| "grad_norm": 0.05208477744102516, | |
| "learning_rate": 1.9546247005377065e-05, | |
| "loss": 0.0006, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.0364993873141882, | |
| "learning_rate": 1.9539072060241692e-05, | |
| "loss": 0.0007, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.38222222222222224, | |
| "grad_norm": 1.320198279525588, | |
| "learning_rate": 1.9531842170334595e-05, | |
| "loss": 0.0311, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.3844444444444444, | |
| "grad_norm": 0.020346083647731265, | |
| "learning_rate": 1.952455737729973e-05, | |
| "loss": 0.0006, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.38666666666666666, | |
| "grad_norm": 0.03523247698947723, | |
| "learning_rate": 1.951721772309728e-05, | |
| "loss": 0.0008, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.3888888888888889, | |
| "grad_norm": 0.16475102282308718, | |
| "learning_rate": 1.950982325000344e-05, | |
| "loss": 0.002, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.39111111111111113, | |
| "grad_norm": 0.18757317206994756, | |
| "learning_rate": 1.9502374000610152e-05, | |
| "loss": 0.0011, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.3933333333333333, | |
| "grad_norm": 0.35299098661920186, | |
| "learning_rate": 1.9494870017824877e-05, | |
| "loss": 0.0021, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.39555555555555555, | |
| "grad_norm": 0.3649401501488327, | |
| "learning_rate": 1.9487311344870327e-05, | |
| "loss": 0.0026, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.3977777777777778, | |
| "grad_norm": 0.032056929621218, | |
| "learning_rate": 1.947969802528424e-05, | |
| "loss": 0.0006, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.32436393205718156, | |
| "learning_rate": 1.9472030102919102e-05, | |
| "loss": 0.0038, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.4022222222222222, | |
| "grad_norm": 0.1087856457884649, | |
| "learning_rate": 1.9464307621941926e-05, | |
| "loss": 0.0012, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.40444444444444444, | |
| "grad_norm": 0.03408582478970204, | |
| "learning_rate": 1.945653062683397e-05, | |
| "loss": 0.0006, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.4066666666666667, | |
| "grad_norm": 0.0766541545478875, | |
| "learning_rate": 1.9448699162390497e-05, | |
| "loss": 0.0008, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.4088888888888889, | |
| "grad_norm": 0.7957873792569852, | |
| "learning_rate": 1.9440813273720504e-05, | |
| "loss": 0.0339, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.4111111111111111, | |
| "grad_norm": 0.08898514516994652, | |
| "learning_rate": 1.9432873006246483e-05, | |
| "loss": 0.001, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.41333333333333333, | |
| "grad_norm": 0.011285791772131364, | |
| "learning_rate": 1.9424878405704134e-05, | |
| "loss": 0.0003, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.41555555555555557, | |
| "grad_norm": 0.0350501270150874, | |
| "learning_rate": 1.941682951814212e-05, | |
| "loss": 0.0006, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.4177777777777778, | |
| "grad_norm": 0.026380949033664686, | |
| "learning_rate": 1.940872638992179e-05, | |
| "loss": 0.0006, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.15531694271701021, | |
| "learning_rate": 1.9400569067716927e-05, | |
| "loss": 0.0014, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.4222222222222222, | |
| "grad_norm": 0.057262237850984284, | |
| "learning_rate": 1.9392357598513463e-05, | |
| "loss": 0.0007, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.42444444444444446, | |
| "grad_norm": 0.03194325120657333, | |
| "learning_rate": 1.938409202960922e-05, | |
| "loss": 0.0006, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.4266666666666667, | |
| "grad_norm": 0.2489216925764617, | |
| "learning_rate": 1.9375772408613625e-05, | |
| "loss": 0.0017, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.4288888888888889, | |
| "grad_norm": 0.24189666890137113, | |
| "learning_rate": 1.936739878344745e-05, | |
| "loss": 0.0014, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.4311111111111111, | |
| "grad_norm": 0.04584287274935283, | |
| "learning_rate": 1.9358971202342523e-05, | |
| "loss": 0.0008, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.43333333333333335, | |
| "grad_norm": 1.5466102888601088, | |
| "learning_rate": 1.935048971384147e-05, | |
| "loss": 0.0236, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.43555555555555553, | |
| "grad_norm": 0.033320124640433296, | |
| "learning_rate": 1.93419543667974e-05, | |
| "loss": 0.0006, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.43777777777777777, | |
| "grad_norm": 0.023109548272609658, | |
| "learning_rate": 1.9333365210373668e-05, | |
| "loss": 0.0004, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.05916221058149562, | |
| "learning_rate": 1.932472229404356e-05, | |
| "loss": 0.0007, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.44222222222222224, | |
| "grad_norm": 0.2749665246841557, | |
| "learning_rate": 1.931602566759001e-05, | |
| "loss": 0.0027, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 0.03240933696725177, | |
| "learning_rate": 1.930727538110534e-05, | |
| "loss": 0.0006, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.44666666666666666, | |
| "grad_norm": 0.9231922047479213, | |
| "learning_rate": 1.929847148499093e-05, | |
| "loss": 0.0144, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.4488888888888889, | |
| "grad_norm": 0.028968966906976937, | |
| "learning_rate": 1.928961402995696e-05, | |
| "loss": 0.0005, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.45111111111111113, | |
| "grad_norm": 0.22287274469313864, | |
| "learning_rate": 1.9280703067022114e-05, | |
| "loss": 0.0009, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.4533333333333333, | |
| "grad_norm": 0.3586159059347094, | |
| "learning_rate": 1.927173864751327e-05, | |
| "loss": 0.0025, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.45555555555555555, | |
| "grad_norm": 0.05358265984057159, | |
| "learning_rate": 1.9262720823065217e-05, | |
| "loss": 0.0006, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.4577777777777778, | |
| "grad_norm": 0.31284523262218844, | |
| "learning_rate": 1.9253649645620363e-05, | |
| "loss": 0.0027, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.5503573082645226, | |
| "learning_rate": 1.9244525167428412e-05, | |
| "loss": 0.0055, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.4622222222222222, | |
| "grad_norm": 1.0093300512124708, | |
| "learning_rate": 1.923534744104609e-05, | |
| "loss": 0.0278, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.46444444444444444, | |
| "grad_norm": 0.01595946272415077, | |
| "learning_rate": 1.922611651933683e-05, | |
| "loss": 0.0005, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.4666666666666667, | |
| "grad_norm": 0.03344116540187883, | |
| "learning_rate": 1.9216832455470466e-05, | |
| "loss": 0.0004, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.4688888888888889, | |
| "grad_norm": 0.5626372357227121, | |
| "learning_rate": 1.920749530292293e-05, | |
| "loss": 0.0034, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.4711111111111111, | |
| "grad_norm": 0.021501649881054824, | |
| "learning_rate": 1.9198105115475946e-05, | |
| "loss": 0.0005, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.47333333333333333, | |
| "grad_norm": 0.012684722719238902, | |
| "learning_rate": 1.9188661947216712e-05, | |
| "loss": 0.0003, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.47555555555555556, | |
| "grad_norm": 1.178647005571923, | |
| "learning_rate": 1.9179165852537596e-05, | |
| "loss": 0.0068, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.4777777777777778, | |
| "grad_norm": 0.03583186544652586, | |
| "learning_rate": 1.916961688613582e-05, | |
| "loss": 0.0006, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.026769514633125883, | |
| "learning_rate": 1.9160015103013153e-05, | |
| "loss": 0.0006, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.4822222222222222, | |
| "grad_norm": 0.7080667415031866, | |
| "learning_rate": 1.9150360558475574e-05, | |
| "loss": 0.0216, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.48444444444444446, | |
| "grad_norm": 0.024452394431017342, | |
| "learning_rate": 1.9140653308132977e-05, | |
| "loss": 0.0004, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.4866666666666667, | |
| "grad_norm": 0.04561094298777004, | |
| "learning_rate": 1.9130893407898834e-05, | |
| "loss": 0.0009, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.4888888888888889, | |
| "grad_norm": 0.03985886749512988, | |
| "learning_rate": 1.912108091398988e-05, | |
| "loss": 0.0009, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.4911111111111111, | |
| "grad_norm": 0.0745056378199607, | |
| "learning_rate": 1.9111215882925787e-05, | |
| "loss": 0.0007, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.49333333333333335, | |
| "grad_norm": 0.031071980548846766, | |
| "learning_rate": 1.9101298371528845e-05, | |
| "loss": 0.0007, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.4955555555555556, | |
| "grad_norm": 0.05318716324629559, | |
| "learning_rate": 1.9091328436923624e-05, | |
| "loss": 0.0009, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.49777777777777776, | |
| "grad_norm": 0.03889872268798812, | |
| "learning_rate": 1.908130613653665e-05, | |
| "loss": 0.0007, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.5908443792110623, | |
| "learning_rate": 1.9071231528096074e-05, | |
| "loss": 0.0056, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.5022222222222222, | |
| "grad_norm": 0.011398670582844527, | |
| "learning_rate": 1.9061104669631343e-05, | |
| "loss": 0.0003, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.5044444444444445, | |
| "grad_norm": 0.03214990260807195, | |
| "learning_rate": 1.9050925619472863e-05, | |
| "loss": 0.0005, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.5066666666666667, | |
| "grad_norm": 0.27792558548531765, | |
| "learning_rate": 1.9040694436251657e-05, | |
| "loss": 0.0022, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.5088888888888888, | |
| "grad_norm": 0.03697530683124863, | |
| "learning_rate": 1.9030411178899037e-05, | |
| "loss": 0.0006, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.5111111111111111, | |
| "grad_norm": 0.016065442222854807, | |
| "learning_rate": 1.902007590664626e-05, | |
| "loss": 0.0004, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.5133333333333333, | |
| "grad_norm": 0.7814700401297519, | |
| "learning_rate": 1.900968867902419e-05, | |
| "loss": 0.0144, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.5155555555555555, | |
| "grad_norm": 0.32071500034992223, | |
| "learning_rate": 1.8999249555862953e-05, | |
| "loss": 0.0029, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.5177777777777778, | |
| "grad_norm": 0.01709783232264348, | |
| "learning_rate": 1.8988758597291577e-05, | |
| "loss": 0.0004, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.6574438589165187, | |
| "learning_rate": 1.8978215863737675e-05, | |
| "loss": 0.0134, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.5222222222222223, | |
| "grad_norm": 0.4863552285642119, | |
| "learning_rate": 1.8967621415927087e-05, | |
| "loss": 0.0046, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.5244444444444445, | |
| "grad_norm": 0.1298096995846685, | |
| "learning_rate": 1.8956975314883512e-05, | |
| "loss": 0.0017, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.5266666666666666, | |
| "grad_norm": 0.02639761152341246, | |
| "learning_rate": 1.8946277621928174e-05, | |
| "loss": 0.0005, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.5288888888888889, | |
| "grad_norm": 0.028667654764917946, | |
| "learning_rate": 1.893552839867947e-05, | |
| "loss": 0.0006, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.5311111111111111, | |
| "grad_norm": 1.1567917941716623, | |
| "learning_rate": 1.8924727707052607e-05, | |
| "loss": 0.0111, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.5333333333333333, | |
| "grad_norm": 0.02089248978216262, | |
| "learning_rate": 1.8913875609259246e-05, | |
| "loss": 0.0005, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.5355555555555556, | |
| "grad_norm": 0.029049612723626016, | |
| "learning_rate": 1.890297216780715e-05, | |
| "loss": 0.0007, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.5377777777777778, | |
| "grad_norm": 0.010838980867766716, | |
| "learning_rate": 1.8892017445499812e-05, | |
| "loss": 0.0003, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.0669309101309544, | |
| "learning_rate": 1.8881011505436114e-05, | |
| "loss": 0.0008, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.5422222222222223, | |
| "grad_norm": 0.02582230565188314, | |
| "learning_rate": 1.8869954411009942e-05, | |
| "loss": 0.0006, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.5444444444444444, | |
| "grad_norm": 0.036159244681545, | |
| "learning_rate": 1.8858846225909832e-05, | |
| "loss": 0.0008, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.5466666666666666, | |
| "grad_norm": 0.0758404585670732, | |
| "learning_rate": 1.8847687014118596e-05, | |
| "loss": 0.0013, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.5488888888888889, | |
| "grad_norm": 0.14990089520514846, | |
| "learning_rate": 1.8836476839912967e-05, | |
| "loss": 0.0019, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.5511111111111111, | |
| "grad_norm": 0.03739028864542761, | |
| "learning_rate": 1.8825215767863215e-05, | |
| "loss": 0.0006, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.5533333333333333, | |
| "grad_norm": 0.07280507858148468, | |
| "learning_rate": 1.8813903862832776e-05, | |
| "loss": 0.0009, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.5555555555555556, | |
| "grad_norm": 0.10327193829164616, | |
| "learning_rate": 1.8802541189977893e-05, | |
| "loss": 0.0012, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.5577777777777778, | |
| "grad_norm": 0.06825123037770792, | |
| "learning_rate": 1.879112781474722e-05, | |
| "loss": 0.0012, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.07883080610316746, | |
| "learning_rate": 1.8779663802881465e-05, | |
| "loss": 0.0008, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.5622222222222222, | |
| "grad_norm": 0.04253439566045438, | |
| "learning_rate": 1.876814922041299e-05, | |
| "loss": 0.0008, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.5644444444444444, | |
| "grad_norm": 0.06932424815123103, | |
| "learning_rate": 1.8756584133665447e-05, | |
| "loss": 0.0008, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.5666666666666667, | |
| "grad_norm": 0.06206143897208355, | |
| "learning_rate": 1.8744968609253398e-05, | |
| "loss": 0.0008, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.5688888888888889, | |
| "grad_norm": 0.7695547493955517, | |
| "learning_rate": 1.8733302714081915e-05, | |
| "loss": 0.0202, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.5711111111111111, | |
| "grad_norm": 0.06331494654423411, | |
| "learning_rate": 1.8721586515346204e-05, | |
| "loss": 0.0007, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.5733333333333334, | |
| "grad_norm": 0.020838442545883098, | |
| "learning_rate": 1.870982008053123e-05, | |
| "loss": 0.0006, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.5755555555555556, | |
| "grad_norm": 0.016134548346999962, | |
| "learning_rate": 1.86980034774113e-05, | |
| "loss": 0.0005, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.5777777777777777, | |
| "grad_norm": 0.019703237163135892, | |
| "learning_rate": 1.8686136774049704e-05, | |
| "loss": 0.0004, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.017361733741644604, | |
| "learning_rate": 1.86742200387983e-05, | |
| "loss": 0.0005, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.5822222222222222, | |
| "grad_norm": 0.016648696635112818, | |
| "learning_rate": 1.866225334029712e-05, | |
| "loss": 0.0005, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.5844444444444444, | |
| "grad_norm": 0.019546129454740456, | |
| "learning_rate": 1.8650236747474007e-05, | |
| "loss": 0.0003, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.5866666666666667, | |
| "grad_norm": 0.6160233971331717, | |
| "learning_rate": 1.8638170329544164e-05, | |
| "loss": 0.0443, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.5888888888888889, | |
| "grad_norm": 0.010130609233305131, | |
| "learning_rate": 1.8626054156009807e-05, | |
| "loss": 0.0003, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.5911111111111111, | |
| "grad_norm": 0.39479291035098263, | |
| "learning_rate": 1.8613888296659736e-05, | |
| "loss": 0.0025, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.5933333333333334, | |
| "grad_norm": 0.013812616263719862, | |
| "learning_rate": 1.860167282156894e-05, | |
| "loss": 0.0003, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.5955555555555555, | |
| "grad_norm": 0.018169106009057658, | |
| "learning_rate": 1.8589407801098192e-05, | |
| "loss": 0.0004, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.5977777777777777, | |
| "grad_norm": 0.17012294752998244, | |
| "learning_rate": 1.857709330589364e-05, | |
| "loss": 0.0015, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.01494535535116139, | |
| "learning_rate": 1.856472940688642e-05, | |
| "loss": 0.0004, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.6022222222222222, | |
| "grad_norm": 1.5922240027279706, | |
| "learning_rate": 1.8552316175292214e-05, | |
| "loss": 0.0243, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.6044444444444445, | |
| "grad_norm": 0.00908383812609346, | |
| "learning_rate": 1.8539853682610876e-05, | |
| "loss": 0.0002, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.6066666666666667, | |
| "grad_norm": 0.015838289578029737, | |
| "learning_rate": 1.8527342000625984e-05, | |
| "loss": 0.0004, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.6088888888888889, | |
| "grad_norm": 0.008425041809995652, | |
| "learning_rate": 1.8514781201404464e-05, | |
| "loss": 0.0003, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.6111111111111112, | |
| "grad_norm": 0.02433588209864682, | |
| "learning_rate": 1.8502171357296144e-05, | |
| "loss": 0.0004, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.6133333333333333, | |
| "grad_norm": 0.02642165670842728, | |
| "learning_rate": 1.8489512540933346e-05, | |
| "loss": 0.0004, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.6155555555555555, | |
| "grad_norm": 0.06640841655766903, | |
| "learning_rate": 1.8476804825230482e-05, | |
| "loss": 0.0008, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.6177777777777778, | |
| "grad_norm": 0.048945307199085464, | |
| "learning_rate": 1.8464048283383613e-05, | |
| "loss": 0.0006, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.020042128979306413, | |
| "learning_rate": 1.8451242988870043e-05, | |
| "loss": 0.0004, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.6222222222222222, | |
| "grad_norm": 0.3786660023485802, | |
| "learning_rate": 1.843838901544789e-05, | |
| "loss": 0.0046, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.6244444444444445, | |
| "grad_norm": 0.07121516005252483, | |
| "learning_rate": 1.842548643715566e-05, | |
| "loss": 0.0007, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.6266666666666667, | |
| "grad_norm": 0.20109432203989125, | |
| "learning_rate": 1.8412535328311813e-05, | |
| "loss": 0.0028, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.6288888888888889, | |
| "grad_norm": 0.10841184255094245, | |
| "learning_rate": 1.839953576351436e-05, | |
| "loss": 0.0013, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.6311111111111111, | |
| "grad_norm": 1.3530251070576613, | |
| "learning_rate": 1.8386487817640398e-05, | |
| "loss": 0.0091, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.6333333333333333, | |
| "grad_norm": 0.057500596916280435, | |
| "learning_rate": 1.837339156584572e-05, | |
| "loss": 0.0006, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.6355555555555555, | |
| "grad_norm": 0.016550540186704814, | |
| "learning_rate": 1.8360247083564343e-05, | |
| "loss": 0.0003, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.6377777777777778, | |
| "grad_norm": 0.06541368486345062, | |
| "learning_rate": 1.834705444650809e-05, | |
| "loss": 0.0008, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.08832266991079811, | |
| "learning_rate": 1.8333813730666158e-05, | |
| "loss": 0.0008, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.6422222222222222, | |
| "grad_norm": 0.03524782743583213, | |
| "learning_rate": 1.8320525012304685e-05, | |
| "loss": 0.0005, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.6444444444444445, | |
| "grad_norm": 1.8794205176406664, | |
| "learning_rate": 1.8307188367966288e-05, | |
| "loss": 0.0187, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.6466666666666666, | |
| "grad_norm": 0.014844423467085928, | |
| "learning_rate": 1.8293803874469645e-05, | |
| "loss": 0.0002, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.6488888888888888, | |
| "grad_norm": 0.010222162836149195, | |
| "learning_rate": 1.8280371608909034e-05, | |
| "loss": 0.0003, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.6511111111111111, | |
| "grad_norm": 0.5614092073406145, | |
| "learning_rate": 1.8266891648653916e-05, | |
| "loss": 0.0057, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.6533333333333333, | |
| "grad_norm": 0.01297741818562359, | |
| "learning_rate": 1.8253364071348457e-05, | |
| "loss": 0.0004, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.6555555555555556, | |
| "grad_norm": 0.5168884986099888, | |
| "learning_rate": 1.8239788954911102e-05, | |
| "loss": 0.0326, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.6577777777777778, | |
| "grad_norm": 0.6163464375616924, | |
| "learning_rate": 1.8226166377534113e-05, | |
| "loss": 0.0259, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.048716941996478184, | |
| "learning_rate": 1.8212496417683135e-05, | |
| "loss": 0.0005, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.6622222222222223, | |
| "grad_norm": 0.055467938164609626, | |
| "learning_rate": 1.8198779154096735e-05, | |
| "loss": 0.0005, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.6644444444444444, | |
| "grad_norm": 1.0967104950928754, | |
| "learning_rate": 1.8185014665785936e-05, | |
| "loss": 0.0034, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 0.29453937302991706, | |
| "learning_rate": 1.8171203032033788e-05, | |
| "loss": 0.0017, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.6688888888888889, | |
| "grad_norm": 0.29139330435900607, | |
| "learning_rate": 1.8157344332394885e-05, | |
| "loss": 0.0033, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.6711111111111111, | |
| "grad_norm": 0.3176205472420609, | |
| "learning_rate": 1.814343864669493e-05, | |
| "loss": 0.0048, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.6733333333333333, | |
| "grad_norm": 0.06715748514888928, | |
| "learning_rate": 1.8129486055030255e-05, | |
| "loss": 0.001, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.6755555555555556, | |
| "grad_norm": 0.03486404041763582, | |
| "learning_rate": 1.8115486637767384e-05, | |
| "loss": 0.0005, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.6777777777777778, | |
| "grad_norm": 0.19155650982719616, | |
| "learning_rate": 1.8101440475542533e-05, | |
| "loss": 0.0017, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.01663378864478831, | |
| "learning_rate": 1.8087347649261183e-05, | |
| "loss": 0.0004, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.6822222222222222, | |
| "grad_norm": 0.013905433428486385, | |
| "learning_rate": 1.8073208240097598e-05, | |
| "loss": 0.0004, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.6844444444444444, | |
| "grad_norm": 0.012513681879387959, | |
| "learning_rate": 1.805902232949435e-05, | |
| "loss": 0.0004, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.6866666666666666, | |
| "grad_norm": 0.01103453340398427, | |
| "learning_rate": 1.8044789999161864e-05, | |
| "loss": 0.0003, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.6888888888888889, | |
| "grad_norm": 0.032867476038944785, | |
| "learning_rate": 1.8030511331077945e-05, | |
| "loss": 0.0006, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.6911111111111111, | |
| "grad_norm": 0.018617863432919445, | |
| "learning_rate": 1.8016186407487287e-05, | |
| "loss": 0.0005, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.6933333333333334, | |
| "grad_norm": 0.01940037917805814, | |
| "learning_rate": 1.8001815310901036e-05, | |
| "loss": 0.0004, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.6955555555555556, | |
| "grad_norm": 0.017455886946362617, | |
| "learning_rate": 1.7987398124096274e-05, | |
| "loss": 0.0004, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.6977777777777778, | |
| "grad_norm": 1.7821683316049801, | |
| "learning_rate": 1.7972934930115568e-05, | |
| "loss": 0.0063, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.0313130869534443, | |
| "learning_rate": 1.7958425812266493e-05, | |
| "loss": 0.0006, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.7022222222222222, | |
| "grad_norm": 0.0773273972736791, | |
| "learning_rate": 1.7943870854121126e-05, | |
| "loss": 0.0009, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.7044444444444444, | |
| "grad_norm": 0.05926540960947002, | |
| "learning_rate": 1.7929270139515606e-05, | |
| "loss": 0.0011, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.7066666666666667, | |
| "grad_norm": 0.6357868737964609, | |
| "learning_rate": 1.7914623752549606e-05, | |
| "loss": 0.0157, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.7088888888888889, | |
| "grad_norm": 0.012382616216716056, | |
| "learning_rate": 1.789993177758588e-05, | |
| "loss": 0.0004, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.7111111111111111, | |
| "grad_norm": 0.6997607013009971, | |
| "learning_rate": 1.7885194299249774e-05, | |
| "loss": 0.0288, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.7133333333333334, | |
| "grad_norm": 0.017802425682117262, | |
| "learning_rate": 1.787041140242872e-05, | |
| "loss": 0.0005, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.7155555555555555, | |
| "grad_norm": 0.022940108486145597, | |
| "learning_rate": 1.785558317227177e-05, | |
| "loss": 0.0005, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.7177777777777777, | |
| "grad_norm": 0.06501948023353454, | |
| "learning_rate": 1.7840709694189082e-05, | |
| "loss": 0.0008, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.018826765951076158, | |
| "learning_rate": 1.782579105385145e-05, | |
| "loss": 0.0003, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.7222222222222222, | |
| "grad_norm": 0.8693847240241009, | |
| "learning_rate": 1.7810827337189806e-05, | |
| "loss": 0.0176, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.7244444444444444, | |
| "grad_norm": 0.01158528241766916, | |
| "learning_rate": 1.7795818630394705e-05, | |
| "loss": 0.0003, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.7266666666666667, | |
| "grad_norm": 0.012953238973338926, | |
| "learning_rate": 1.7780765019915854e-05, | |
| "loss": 0.0003, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.7288888888888889, | |
| "grad_norm": 0.13363097863618223, | |
| "learning_rate": 1.776566659246161e-05, | |
| "loss": 0.0016, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.7311111111111112, | |
| "grad_norm": 0.020333860204620323, | |
| "learning_rate": 1.7750523434998454e-05, | |
| "loss": 0.0004, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.7333333333333333, | |
| "grad_norm": 0.03465349990517107, | |
| "learning_rate": 1.773533563475053e-05, | |
| "loss": 0.0005, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.7355555555555555, | |
| "grad_norm": 0.018035445023050833, | |
| "learning_rate": 1.772010327919912e-05, | |
| "loss": 0.0005, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.7377777777777778, | |
| "grad_norm": 0.011536550677576184, | |
| "learning_rate": 1.7704826456082137e-05, | |
| "loss": 0.0003, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.04063924006611042, | |
| "learning_rate": 1.768950525339362e-05, | |
| "loss": 0.0006, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.7422222222222222, | |
| "grad_norm": 0.07205814515426334, | |
| "learning_rate": 1.7674139759383253e-05, | |
| "loss": 0.0009, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.7444444444444445, | |
| "grad_norm": 0.035447086137769555, | |
| "learning_rate": 1.765873006255582e-05, | |
| "loss": 0.0007, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.7466666666666667, | |
| "grad_norm": 0.034945381142001616, | |
| "learning_rate": 1.764327625167072e-05, | |
| "loss": 0.0005, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.7488888888888889, | |
| "grad_norm": 0.9393792294740548, | |
| "learning_rate": 1.7627778415741437e-05, | |
| "loss": 0.0428, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.7511111111111111, | |
| "grad_norm": 0.2613311853585298, | |
| "learning_rate": 1.761223664403505e-05, | |
| "loss": 0.002, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.7533333333333333, | |
| "grad_norm": 0.02525971134733198, | |
| "learning_rate": 1.7596651026071708e-05, | |
| "loss": 0.0005, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.7555555555555555, | |
| "grad_norm": 0.5021933110166663, | |
| "learning_rate": 1.7581021651624097e-05, | |
| "loss": 0.0093, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.7577777777777778, | |
| "grad_norm": 0.2655607607436516, | |
| "learning_rate": 1.7565348610716963e-05, | |
| "loss": 0.0028, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.041929568367314644, | |
| "learning_rate": 1.754963199362654e-05, | |
| "loss": 0.0007, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.7622222222222222, | |
| "grad_norm": 0.030224663903822044, | |
| "learning_rate": 1.7533871890880088e-05, | |
| "loss": 0.0006, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.7644444444444445, | |
| "grad_norm": 0.2580604461447013, | |
| "learning_rate": 1.7518068393255324e-05, | |
| "loss": 0.0025, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.7666666666666667, | |
| "grad_norm": 1.0286164223904415, | |
| "learning_rate": 1.7502221591779932e-05, | |
| "loss": 0.0085, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.7688888888888888, | |
| "grad_norm": 0.016495690865293042, | |
| "learning_rate": 1.748633157773101e-05, | |
| "loss": 0.0004, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.7711111111111111, | |
| "grad_norm": 0.01401711124126451, | |
| "learning_rate": 1.7470398442634572e-05, | |
| "loss": 0.0002, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.7733333333333333, | |
| "grad_norm": 0.02338665150794283, | |
| "learning_rate": 1.7454422278264997e-05, | |
| "loss": 0.0004, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.7755555555555556, | |
| "grad_norm": 0.41343999375758606, | |
| "learning_rate": 1.7438403176644524e-05, | |
| "loss": 0.0057, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.7777777777777778, | |
| "grad_norm": 0.020138467473283024, | |
| "learning_rate": 1.74223412300427e-05, | |
| "loss": 0.0005, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.1260119479643109, | |
| "learning_rate": 1.7406236530975862e-05, | |
| "loss": 0.0013, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.7822222222222223, | |
| "grad_norm": 0.01910263004877557, | |
| "learning_rate": 1.7390089172206594e-05, | |
| "loss": 0.0004, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.7844444444444445, | |
| "grad_norm": 0.019251255169848696, | |
| "learning_rate": 1.7373899246743202e-05, | |
| "loss": 0.0004, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.7866666666666666, | |
| "grad_norm": 1.374613782427883, | |
| "learning_rate": 1.7357666847839186e-05, | |
| "loss": 0.0318, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.7888888888888889, | |
| "grad_norm": 0.0073541400794499045, | |
| "learning_rate": 1.734139206899267e-05, | |
| "loss": 0.0002, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.7911111111111111, | |
| "grad_norm": 0.05969515450696645, | |
| "learning_rate": 1.7325075003945902e-05, | |
| "loss": 0.0009, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.7933333333333333, | |
| "grad_norm": 0.011016610013281147, | |
| "learning_rate": 1.730871574668469e-05, | |
| "loss": 0.0003, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.7955555555555556, | |
| "grad_norm": 0.016460126625044567, | |
| "learning_rate": 1.729231439143787e-05, | |
| "loss": 0.0005, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.7977777777777778, | |
| "grad_norm": 0.019704945491741347, | |
| "learning_rate": 1.727587103267677e-05, | |
| "loss": 0.0005, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.19336072400042412, | |
| "learning_rate": 1.7259385765114634e-05, | |
| "loss": 0.0017, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.8022222222222222, | |
| "grad_norm": 0.026754345164991526, | |
| "learning_rate": 1.7242858683706122e-05, | |
| "loss": 0.0004, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.8044444444444444, | |
| "grad_norm": 0.03426383148141979, | |
| "learning_rate": 1.7226289883646727e-05, | |
| "loss": 0.0005, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.8066666666666666, | |
| "grad_norm": 0.021763530198138415, | |
| "learning_rate": 1.720967946037225e-05, | |
| "loss": 0.0005, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.8088888888888889, | |
| "grad_norm": 0.01942414258365435, | |
| "learning_rate": 1.7193027509558233e-05, | |
| "loss": 0.0005, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.8111111111111111, | |
| "grad_norm": 0.025698832353816627, | |
| "learning_rate": 1.7176334127119418e-05, | |
| "loss": 0.0005, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.8133333333333334, | |
| "grad_norm": 0.03976655206372822, | |
| "learning_rate": 1.7159599409209194e-05, | |
| "loss": 0.0007, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.8155555555555556, | |
| "grad_norm": 0.03253167783550585, | |
| "learning_rate": 1.7142823452219036e-05, | |
| "loss": 0.0005, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.8177777777777778, | |
| "grad_norm": 0.4663806913786246, | |
| "learning_rate": 1.7126006352777965e-05, | |
| "loss": 0.0069, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.11338495135499817, | |
| "learning_rate": 1.710914820775196e-05, | |
| "loss": 0.0013, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.8222222222222222, | |
| "grad_norm": 0.19043992946634256, | |
| "learning_rate": 1.7092249114243453e-05, | |
| "loss": 0.0015, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.8244444444444444, | |
| "grad_norm": 0.03232831597933056, | |
| "learning_rate": 1.7075309169590708e-05, | |
| "loss": 0.0005, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.8266666666666667, | |
| "grad_norm": 0.03042525050700494, | |
| "learning_rate": 1.705832847136731e-05, | |
| "loss": 0.0006, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.8288888888888889, | |
| "grad_norm": 0.029843600704805073, | |
| "learning_rate": 1.704130711738157e-05, | |
| "loss": 0.0005, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.8311111111111111, | |
| "grad_norm": 0.09298415548740087, | |
| "learning_rate": 1.7024245205675986e-05, | |
| "loss": 0.0011, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 0.7381072072560813, | |
| "learning_rate": 1.7007142834526665e-05, | |
| "loss": 0.0027, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.8355555555555556, | |
| "grad_norm": 0.030898000907047772, | |
| "learning_rate": 1.6990000102442748e-05, | |
| "loss": 0.0005, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.8377777777777777, | |
| "grad_norm": 0.013810655617155203, | |
| "learning_rate": 1.697281710816587e-05, | |
| "loss": 0.0003, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.014278553140774343, | |
| "learning_rate": 1.6955593950669568e-05, | |
| "loss": 0.0004, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.8422222222222222, | |
| "grad_norm": 0.016978929102246836, | |
| "learning_rate": 1.6938330729158713e-05, | |
| "loss": 0.0004, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.8444444444444444, | |
| "grad_norm": 0.021488794588194672, | |
| "learning_rate": 1.692102754306895e-05, | |
| "loss": 0.0005, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.8466666666666667, | |
| "grad_norm": 0.010475882365762162, | |
| "learning_rate": 1.690368449206612e-05, | |
| "loss": 0.0003, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.8488888888888889, | |
| "grad_norm": 0.020903375463389794, | |
| "learning_rate": 1.6886301676045676e-05, | |
| "loss": 0.0004, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.8511111111111112, | |
| "grad_norm": 0.01232299162643352, | |
| "learning_rate": 1.6868879195132128e-05, | |
| "loss": 0.0003, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.8533333333333334, | |
| "grad_norm": 0.01379618707088667, | |
| "learning_rate": 1.6851417149678442e-05, | |
| "loss": 0.0003, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.8555555555555555, | |
| "grad_norm": 0.015107029459242761, | |
| "learning_rate": 1.6833915640265485e-05, | |
| "loss": 0.0004, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.8577777777777778, | |
| "grad_norm": 0.011297376367716301, | |
| "learning_rate": 1.6816374767701437e-05, | |
| "loss": 0.0003, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.01762300667191289, | |
| "learning_rate": 1.6798794633021192e-05, | |
| "loss": 0.0003, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.8622222222222222, | |
| "grad_norm": 0.015168920715436343, | |
| "learning_rate": 1.678117533748581e-05, | |
| "loss": 0.0003, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.8644444444444445, | |
| "grad_norm": 0.17789200811416378, | |
| "learning_rate": 1.6763516982581905e-05, | |
| "loss": 0.0024, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.8666666666666667, | |
| "grad_norm": 0.013426130783423165, | |
| "learning_rate": 1.6745819670021083e-05, | |
| "loss": 0.0004, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.8688888888888889, | |
| "grad_norm": 0.013974513871344695, | |
| "learning_rate": 1.6728083501739333e-05, | |
| "loss": 0.0003, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.8711111111111111, | |
| "grad_norm": 0.04162266704610194, | |
| "learning_rate": 1.6710308579896462e-05, | |
| "loss": 0.0005, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.8733333333333333, | |
| "grad_norm": 0.021989085013756154, | |
| "learning_rate": 1.669249500687549e-05, | |
| "loss": 0.0004, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.8755555555555555, | |
| "grad_norm": 0.026829178594480974, | |
| "learning_rate": 1.667464288528207e-05, | |
| "loss": 0.0004, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.8777777777777778, | |
| "grad_norm": 0.038037996596902844, | |
| "learning_rate": 1.6656752317943888e-05, | |
| "loss": 0.0006, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.4877016583934509, | |
| "learning_rate": 1.6638823407910085e-05, | |
| "loss": 0.0222, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.8822222222222222, | |
| "grad_norm": 0.5213983063535248, | |
| "learning_rate": 1.6620856258450652e-05, | |
| "loss": 0.0274, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.8844444444444445, | |
| "grad_norm": 0.6951502649376107, | |
| "learning_rate": 1.6602850973055824e-05, | |
| "loss": 0.0164, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.8866666666666667, | |
| "grad_norm": 0.008529209353542937, | |
| "learning_rate": 1.6584807655435528e-05, | |
| "loss": 0.0002, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 0.06653222852728614, | |
| "learning_rate": 1.6566726409518722e-05, | |
| "loss": 0.0006, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.8911111111111111, | |
| "grad_norm": 0.5137753612237632, | |
| "learning_rate": 1.6548607339452853e-05, | |
| "loss": 0.0194, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.8933333333333333, | |
| "grad_norm": 0.5600986975785164, | |
| "learning_rate": 1.6530450549603223e-05, | |
| "loss": 0.0052, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.8955555555555555, | |
| "grad_norm": 0.020421625438739377, | |
| "learning_rate": 1.6512256144552407e-05, | |
| "loss": 0.0004, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.8977777777777778, | |
| "grad_norm": 0.010647580666273, | |
| "learning_rate": 1.6494024229099634e-05, | |
| "loss": 0.0003, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.05780837130598162, | |
| "learning_rate": 1.64757549082602e-05, | |
| "loss": 0.0009, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.9022222222222223, | |
| "grad_norm": 0.02331733319324902, | |
| "learning_rate": 1.645744828726484e-05, | |
| "loss": 0.0004, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.9044444444444445, | |
| "grad_norm": 0.7767368621634094, | |
| "learning_rate": 1.6439104471559157e-05, | |
| "loss": 0.0224, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.9066666666666666, | |
| "grad_norm": 0.03136543291914878, | |
| "learning_rate": 1.6420723566802982e-05, | |
| "loss": 0.0006, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.9088888888888889, | |
| "grad_norm": 0.04684603230289486, | |
| "learning_rate": 1.640230567886978e-05, | |
| "loss": 0.0007, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.9111111111111111, | |
| "grad_norm": 0.06344553689973946, | |
| "learning_rate": 1.6383850913846036e-05, | |
| "loss": 0.0012, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.9133333333333333, | |
| "grad_norm": 0.08639467137507505, | |
| "learning_rate": 1.6365359378030654e-05, | |
| "loss": 0.0012, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.9155555555555556, | |
| "grad_norm": 0.2280852084459664, | |
| "learning_rate": 1.6346831177934326e-05, | |
| "loss": 0.0025, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.9177777777777778, | |
| "grad_norm": 0.07067104867790554, | |
| "learning_rate": 1.632826642027894e-05, | |
| "loss": 0.001, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.7199108830320498, | |
| "learning_rate": 1.6309665211996936e-05, | |
| "loss": 0.0119, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.9222222222222223, | |
| "grad_norm": 0.025795893461720176, | |
| "learning_rate": 1.6291027660230735e-05, | |
| "loss": 0.0003, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.9244444444444444, | |
| "grad_norm": 0.048014295661047215, | |
| "learning_rate": 1.6272353872332075e-05, | |
| "loss": 0.0007, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.9266666666666666, | |
| "grad_norm": 0.46726646702586105, | |
| "learning_rate": 1.625364395586142e-05, | |
| "loss": 0.0059, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.9288888888888889, | |
| "grad_norm": 0.018994234855163952, | |
| "learning_rate": 1.6234898018587336e-05, | |
| "loss": 0.0003, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.9311111111111111, | |
| "grad_norm": 0.0739594515623708, | |
| "learning_rate": 1.6216116168485864e-05, | |
| "loss": 0.0009, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.9333333333333333, | |
| "grad_norm": 0.010635445796381671, | |
| "learning_rate": 1.61972985137399e-05, | |
| "loss": 0.0002, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.9355555555555556, | |
| "grad_norm": 0.03822091385480426, | |
| "learning_rate": 1.6178445162738577e-05, | |
| "loss": 0.0006, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.9377777777777778, | |
| "grad_norm": 0.03432968303154165, | |
| "learning_rate": 1.6159556224076637e-05, | |
| "loss": 0.0006, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.07779016806822235, | |
| "learning_rate": 1.614063180655381e-05, | |
| "loss": 0.0013, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.9422222222222222, | |
| "grad_norm": 0.12579400330730645, | |
| "learning_rate": 1.612167201917417e-05, | |
| "loss": 0.0017, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.9444444444444444, | |
| "grad_norm": 0.10726643981512668, | |
| "learning_rate": 1.6102676971145543e-05, | |
| "loss": 0.0015, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.9466666666666667, | |
| "grad_norm": 0.021359001430812475, | |
| "learning_rate": 1.6083646771878826e-05, | |
| "loss": 0.0004, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.9488888888888889, | |
| "grad_norm": 0.1353006433211143, | |
| "learning_rate": 1.6064581530987408e-05, | |
| "loss": 0.0021, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.9511111111111111, | |
| "grad_norm": 0.04688664023675852, | |
| "learning_rate": 1.6045481358286516e-05, | |
| "loss": 0.0007, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.9533333333333334, | |
| "grad_norm": 0.01289267222879979, | |
| "learning_rate": 1.6026346363792565e-05, | |
| "loss": 0.0003, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.9555555555555556, | |
| "grad_norm": 0.01477339223555971, | |
| "learning_rate": 1.6007176657722567e-05, | |
| "loss": 0.0003, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.9577777777777777, | |
| "grad_norm": 0.008843252906498168, | |
| "learning_rate": 1.598797235049345e-05, | |
| "loss": 0.0002, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.01892672603800085, | |
| "learning_rate": 1.5968733552721462e-05, | |
| "loss": 0.0003, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.9622222222222222, | |
| "grad_norm": 0.5940865558653796, | |
| "learning_rate": 1.59494603752215e-05, | |
| "loss": 0.0118, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.9644444444444444, | |
| "grad_norm": 0.05129539287345838, | |
| "learning_rate": 1.5930152929006496e-05, | |
| "loss": 0.0008, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.9666666666666667, | |
| "grad_norm": 0.028529297832521044, | |
| "learning_rate": 1.5910811325286768e-05, | |
| "loss": 0.0005, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.9688888888888889, | |
| "grad_norm": 0.007939989745324281, | |
| "learning_rate": 1.5891435675469376e-05, | |
| "loss": 0.0002, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.9711111111111111, | |
| "grad_norm": 0.009613993217963796, | |
| "learning_rate": 1.587202609115749e-05, | |
| "loss": 0.0003, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.9733333333333334, | |
| "grad_norm": 0.08417634242575713, | |
| "learning_rate": 1.585258268414974e-05, | |
| "loss": 0.0009, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.9755555555555555, | |
| "grad_norm": 0.0072338826011778745, | |
| "learning_rate": 1.583310556643957e-05, | |
| "loss": 0.0002, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.9777777777777777, | |
| "grad_norm": 0.007599957758152161, | |
| "learning_rate": 1.58135948502146e-05, | |
| "loss": 0.0002, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.01617932200923873, | |
| "learning_rate": 1.5794050647855977e-05, | |
| "loss": 0.0003, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.9822222222222222, | |
| "grad_norm": 0.009828407648935285, | |
| "learning_rate": 1.5774473071937725e-05, | |
| "loss": 0.0003, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.9844444444444445, | |
| "grad_norm": 0.007951860465854223, | |
| "learning_rate": 1.57548622352261e-05, | |
| "loss": 0.0002, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.9866666666666667, | |
| "grad_norm": 0.009068462617847741, | |
| "learning_rate": 1.5735218250678944e-05, | |
| "loss": 0.0002, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.9888888888888889, | |
| "grad_norm": 0.5460966234191948, | |
| "learning_rate": 1.5715541231445018e-05, | |
| "loss": 0.0126, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.9911111111111112, | |
| "grad_norm": 0.010757409173275096, | |
| "learning_rate": 1.5695831290863367e-05, | |
| "loss": 0.0002, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.9933333333333333, | |
| "grad_norm": 0.009461891967446937, | |
| "learning_rate": 1.567608854246267e-05, | |
| "loss": 0.0002, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.9955555555555555, | |
| "grad_norm": 0.4520097975994242, | |
| "learning_rate": 1.5656313099960564e-05, | |
| "loss": 0.011, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.9977777777777778, | |
| "grad_norm": 0.19796145689133768, | |
| "learning_rate": 1.5636505077263017e-05, | |
| "loss": 0.0023, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.00822562852033268, | |
| "learning_rate": 1.561666458846365e-05, | |
| "loss": 0.0002, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.0022222222222221, | |
| "grad_norm": 0.025821635278883714, | |
| "learning_rate": 1.5596791747843083e-05, | |
| "loss": 0.0005, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.0044444444444445, | |
| "grad_norm": 0.60593936018183, | |
| "learning_rate": 1.5576886669868297e-05, | |
| "loss": 0.0239, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.0066666666666666, | |
| "grad_norm": 0.45761622683458714, | |
| "learning_rate": 1.5556949469191943e-05, | |
| "loss": 0.0095, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.008888888888889, | |
| "grad_norm": 0.029293031050829844, | |
| "learning_rate": 1.5536980260651705e-05, | |
| "loss": 0.0004, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.011111111111111, | |
| "grad_norm": 1.6004340152324048, | |
| "learning_rate": 1.5516979159269638e-05, | |
| "loss": 0.0161, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.0133333333333334, | |
| "grad_norm": 0.37977413942915167, | |
| "learning_rate": 1.5496946280251482e-05, | |
| "loss": 0.0045, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.0155555555555555, | |
| "grad_norm": 0.036509727818815514, | |
| "learning_rate": 1.5476881738986037e-05, | |
| "loss": 0.0006, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.0177777777777777, | |
| "grad_norm": 0.035601006787746046, | |
| "learning_rate": 1.545678565104445e-05, | |
| "loss": 0.0004, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "grad_norm": 0.061125195495146084, | |
| "learning_rate": 1.5436658132179602e-05, | |
| "loss": 0.0005, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.0222222222222221, | |
| "grad_norm": 0.06196934168429523, | |
| "learning_rate": 1.54164992983254e-05, | |
| "loss": 0.0008, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.0244444444444445, | |
| "grad_norm": 0.11437590767475016, | |
| "learning_rate": 1.5396309265596127e-05, | |
| "loss": 0.0011, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.0266666666666666, | |
| "grad_norm": 0.029052570156313278, | |
| "learning_rate": 1.5376088150285777e-05, | |
| "loss": 0.0004, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.028888888888889, | |
| "grad_norm": 0.04116964826297007, | |
| "learning_rate": 1.5355836068867365e-05, | |
| "loss": 0.0005, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.031111111111111, | |
| "grad_norm": 0.04001834503801157, | |
| "learning_rate": 1.5335553137992286e-05, | |
| "loss": 0.0005, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.0333333333333334, | |
| "grad_norm": 0.3911835441464241, | |
| "learning_rate": 1.5315239474489617e-05, | |
| "loss": 0.0048, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.0355555555555556, | |
| "grad_norm": 0.9926459616325984, | |
| "learning_rate": 1.5294895195365454e-05, | |
| "loss": 0.0161, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.0377777777777777, | |
| "grad_norm": 0.2751893251787431, | |
| "learning_rate": 1.5274520417802243e-05, | |
| "loss": 0.0029, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 0.014159725755644063, | |
| "learning_rate": 1.5254115259158095e-05, | |
| "loss": 0.0003, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.0422222222222222, | |
| "grad_norm": 0.12466956323275698, | |
| "learning_rate": 1.5233679836966122e-05, | |
| "loss": 0.0012, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.0444444444444445, | |
| "grad_norm": 0.04132880791398538, | |
| "learning_rate": 1.5213214268933745e-05, | |
| "loss": 0.0006, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.0466666666666666, | |
| "grad_norm": 0.24315491447320656, | |
| "learning_rate": 1.519271867294203e-05, | |
| "loss": 0.0034, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.048888888888889, | |
| "grad_norm": 0.007337060428486466, | |
| "learning_rate": 1.5172193167045e-05, | |
| "loss": 0.0002, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.051111111111111, | |
| "grad_norm": 0.04608726202215001, | |
| "learning_rate": 1.515163786946896e-05, | |
| "loss": 0.0004, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.0533333333333332, | |
| "grad_norm": 0.019874303573272013, | |
| "learning_rate": 1.5131052898611818e-05, | |
| "loss": 0.0002, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.0555555555555556, | |
| "grad_norm": 0.0120090942659895, | |
| "learning_rate": 1.5110438373042384e-05, | |
| "loss": 0.0003, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.0577777777777777, | |
| "grad_norm": 0.026519927776125937, | |
| "learning_rate": 1.5089794411499718e-05, | |
| "loss": 0.0003, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "grad_norm": 0.03162835419145942, | |
| "learning_rate": 1.5069121132892432e-05, | |
| "loss": 0.0003, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.0622222222222222, | |
| "grad_norm": 0.026274701589366296, | |
| "learning_rate": 1.504841865629799e-05, | |
| "loss": 0.0003, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.0644444444444445, | |
| "grad_norm": 0.018208934858747503, | |
| "learning_rate": 1.502768710096204e-05, | |
| "loss": 0.0004, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.0666666666666667, | |
| "grad_norm": 0.027607304080729103, | |
| "learning_rate": 1.5006926586297725e-05, | |
| "loss": 0.0004, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.068888888888889, | |
| "grad_norm": 0.01563639185250947, | |
| "learning_rate": 1.4986137231885e-05, | |
| "loss": 0.0003, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.0711111111111111, | |
| "grad_norm": 0.013703229126924794, | |
| "learning_rate": 1.4965319157469926e-05, | |
| "loss": 0.0002, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.0733333333333333, | |
| "grad_norm": 0.06543102140428486, | |
| "learning_rate": 1.4944472482963993e-05, | |
| "loss": 0.0005, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.0755555555555556, | |
| "grad_norm": 0.08266503445761587, | |
| "learning_rate": 1.4923597328443423e-05, | |
| "loss": 0.0005, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.0777777777777777, | |
| "grad_norm": 0.5358723545644576, | |
| "learning_rate": 1.490269381414849e-05, | |
| "loss": 0.0171, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 0.02000459361886383, | |
| "learning_rate": 1.4881762060482814e-05, | |
| "loss": 0.0003, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.0822222222222222, | |
| "grad_norm": 0.021298063221063722, | |
| "learning_rate": 1.4860802188012677e-05, | |
| "loss": 0.0004, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.0844444444444445, | |
| "grad_norm": 0.027724599981135623, | |
| "learning_rate": 1.4839814317466317e-05, | |
| "loss": 0.0003, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.0866666666666667, | |
| "grad_norm": 0.02033127659019327, | |
| "learning_rate": 1.4818798569733246e-05, | |
| "loss": 0.0003, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.0888888888888888, | |
| "grad_norm": 0.018725800742508684, | |
| "learning_rate": 1.4797755065863553e-05, | |
| "loss": 0.0003, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.0911111111111111, | |
| "grad_norm": 0.04974479956949468, | |
| "learning_rate": 1.4776683927067189e-05, | |
| "loss": 0.0007, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.0933333333333333, | |
| "grad_norm": 0.011015481076944297, | |
| "learning_rate": 1.4755585274713289e-05, | |
| "loss": 0.0002, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.0955555555555556, | |
| "grad_norm": 0.013758292701495483, | |
| "learning_rate": 1.473445923032946e-05, | |
| "loss": 0.0002, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.0977777777777777, | |
| "grad_norm": 0.02094087189840709, | |
| "learning_rate": 1.47133059156011e-05, | |
| "loss": 0.0004, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 0.019534999275520708, | |
| "learning_rate": 1.4692125452370664e-05, | |
| "loss": 0.0003, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.1022222222222222, | |
| "grad_norm": 0.010109800651906193, | |
| "learning_rate": 1.4670917962636997e-05, | |
| "loss": 0.0002, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.1044444444444443, | |
| "grad_norm": 0.38343107493080064, | |
| "learning_rate": 1.4649683568554604e-05, | |
| "loss": 0.0094, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.1066666666666667, | |
| "grad_norm": 0.03312658306574886, | |
| "learning_rate": 1.4628422392432969e-05, | |
| "loss": 0.0004, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.1088888888888888, | |
| "grad_norm": 0.00912159228185532, | |
| "learning_rate": 1.4607134556735836e-05, | |
| "loss": 0.0002, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.1111111111111112, | |
| "grad_norm": 0.21214197589982675, | |
| "learning_rate": 1.4585820184080502e-05, | |
| "loss": 0.0013, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.1133333333333333, | |
| "grad_norm": 0.025301001325632417, | |
| "learning_rate": 1.4564479397237124e-05, | |
| "loss": 0.0003, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.1155555555555556, | |
| "grad_norm": 0.01063044884015372, | |
| "learning_rate": 1.4543112319127997e-05, | |
| "loss": 0.0002, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.1177777777777778, | |
| "grad_norm": 0.010073609604728887, | |
| "learning_rate": 1.4521719072826858e-05, | |
| "loss": 0.0002, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 0.09895469704715089, | |
| "learning_rate": 1.450029978155817e-05, | |
| "loss": 0.0011, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.1222222222222222, | |
| "grad_norm": 0.010099688813433509, | |
| "learning_rate": 1.4478854568696419e-05, | |
| "loss": 0.0002, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.1244444444444444, | |
| "grad_norm": 0.006262914795394865, | |
| "learning_rate": 1.4457383557765385e-05, | |
| "loss": 0.0001, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.1266666666666667, | |
| "grad_norm": 0.016448627026562204, | |
| "learning_rate": 1.4435886872437456e-05, | |
| "loss": 0.0002, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.1288888888888888, | |
| "grad_norm": 0.09352157605298969, | |
| "learning_rate": 1.4414364636532909e-05, | |
| "loss": 0.001, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.1311111111111112, | |
| "grad_norm": 0.018490614311629315, | |
| "learning_rate": 1.4392816974019176e-05, | |
| "loss": 0.0002, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.1333333333333333, | |
| "grad_norm": 0.007997541210979998, | |
| "learning_rate": 1.437124400901015e-05, | |
| "loss": 0.0001, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.1355555555555557, | |
| "grad_norm": 0.5240045661378968, | |
| "learning_rate": 1.4349645865765476e-05, | |
| "loss": 0.0146, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.1377777777777778, | |
| "grad_norm": 0.026094166216975346, | |
| "learning_rate": 1.4328022668689816e-05, | |
| "loss": 0.0003, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.1400000000000001, | |
| "grad_norm": 0.00988269830514442, | |
| "learning_rate": 1.4306374542332141e-05, | |
| "loss": 0.0002, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.1422222222222222, | |
| "grad_norm": 0.0395848326131681, | |
| "learning_rate": 1.4284701611385015e-05, | |
| "loss": 0.0005, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.1444444444444444, | |
| "grad_norm": 0.040450681991947325, | |
| "learning_rate": 1.4263004000683877e-05, | |
| "loss": 0.0002, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.1466666666666667, | |
| "grad_norm": 0.01924262223447334, | |
| "learning_rate": 1.4241281835206323e-05, | |
| "loss": 0.0003, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.1488888888888888, | |
| "grad_norm": 0.015806933562690747, | |
| "learning_rate": 1.4219535240071378e-05, | |
| "loss": 0.0002, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.1511111111111112, | |
| "grad_norm": 0.012055879386854998, | |
| "learning_rate": 1.4197764340538786e-05, | |
| "loss": 0.0003, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.1533333333333333, | |
| "grad_norm": 0.05244591399064711, | |
| "learning_rate": 1.417596926200828e-05, | |
| "loss": 0.0005, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.1555555555555554, | |
| "grad_norm": 0.018026412432253308, | |
| "learning_rate": 1.4154150130018867e-05, | |
| "loss": 0.0003, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.1577777777777778, | |
| "grad_norm": 0.010243283910133827, | |
| "learning_rate": 1.4132307070248094e-05, | |
| "loss": 0.0002, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 0.024563366657143346, | |
| "learning_rate": 1.4110440208511345e-05, | |
| "loss": 0.0004, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.1622222222222223, | |
| "grad_norm": 0.03262386391024997, | |
| "learning_rate": 1.4088549670761084e-05, | |
| "loss": 0.0005, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.1644444444444444, | |
| "grad_norm": 0.013765607212224739, | |
| "learning_rate": 1.4066635583086167e-05, | |
| "loss": 0.0002, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.1666666666666667, | |
| "grad_norm": 0.03211414281068978, | |
| "learning_rate": 1.4044698071711082e-05, | |
| "loss": 0.0004, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.1688888888888889, | |
| "grad_norm": 0.06428704546629092, | |
| "learning_rate": 1.4022737262995248e-05, | |
| "loss": 0.0005, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.1711111111111112, | |
| "grad_norm": 0.02919385112936442, | |
| "learning_rate": 1.4000753283432267e-05, | |
| "loss": 0.0003, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.1733333333333333, | |
| "grad_norm": 0.01340143096239381, | |
| "learning_rate": 1.397874625964921e-05, | |
| "loss": 0.0002, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.1755555555555555, | |
| "grad_norm": 0.016293339279987835, | |
| "learning_rate": 1.395671631840588e-05, | |
| "loss": 0.0003, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.1777777777777778, | |
| "grad_norm": 0.006969876453146651, | |
| "learning_rate": 1.3934663586594086e-05, | |
| "loss": 0.0001, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "grad_norm": 0.00837422727178899, | |
| "learning_rate": 1.3912588191236904e-05, | |
| "loss": 0.0002, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.1822222222222223, | |
| "grad_norm": 0.011685300835379286, | |
| "learning_rate": 1.3890490259487957e-05, | |
| "loss": 0.0001, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.1844444444444444, | |
| "grad_norm": 0.30336746624711075, | |
| "learning_rate": 1.3868369918630675e-05, | |
| "loss": 0.0045, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.1866666666666668, | |
| "grad_norm": 0.26356489759822443, | |
| "learning_rate": 1.3846227296077568e-05, | |
| "loss": 0.0027, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.1888888888888889, | |
| "grad_norm": 0.10363821316702906, | |
| "learning_rate": 1.3824062519369483e-05, | |
| "loss": 0.0007, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.1911111111111112, | |
| "grad_norm": 0.15429269315362823, | |
| "learning_rate": 1.3801875716174874e-05, | |
| "loss": 0.0017, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.1933333333333334, | |
| "grad_norm": 0.005967382796351075, | |
| "learning_rate": 1.3779667014289067e-05, | |
| "loss": 0.0001, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.1955555555555555, | |
| "grad_norm": 0.028871426051085274, | |
| "learning_rate": 1.3757436541633529e-05, | |
| "loss": 0.0002, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.1977777777777778, | |
| "grad_norm": 0.016876673178325433, | |
| "learning_rate": 1.3735184426255117e-05, | |
| "loss": 0.0003, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 0.0065047485944187125, | |
| "learning_rate": 1.371291079632536e-05, | |
| "loss": 0.0001, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.2022222222222223, | |
| "grad_norm": 0.10242644248148282, | |
| "learning_rate": 1.3690615780139703e-05, | |
| "loss": 0.0009, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.2044444444444444, | |
| "grad_norm": 0.007881695085592156, | |
| "learning_rate": 1.3668299506116772e-05, | |
| "loss": 0.0001, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.2066666666666666, | |
| "grad_norm": 0.005287243489641969, | |
| "learning_rate": 1.364596210279765e-05, | |
| "loss": 0.0001, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.208888888888889, | |
| "grad_norm": 0.0067160740878849014, | |
| "learning_rate": 1.3623603698845115e-05, | |
| "loss": 0.0002, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.211111111111111, | |
| "grad_norm": 0.7187911735401995, | |
| "learning_rate": 1.3601224423042906e-05, | |
| "loss": 0.0074, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.2133333333333334, | |
| "grad_norm": 0.005250381899508567, | |
| "learning_rate": 1.357882440429499e-05, | |
| "loss": 0.0001, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.2155555555555555, | |
| "grad_norm": 0.008005697104459034, | |
| "learning_rate": 1.3556403771624809e-05, | |
| "loss": 0.0002, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.2177777777777778, | |
| "grad_norm": 0.018296661958173248, | |
| "learning_rate": 1.3533962654174542e-05, | |
| "loss": 0.0003, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 0.1342964442752511, | |
| "learning_rate": 1.3511501181204354e-05, | |
| "loss": 0.0009, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.2222222222222223, | |
| "grad_norm": 0.014105885030842489, | |
| "learning_rate": 1.348901948209167e-05, | |
| "loss": 0.0002, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.2244444444444444, | |
| "grad_norm": 0.03375088301664795, | |
| "learning_rate": 1.3466517686330401e-05, | |
| "loss": 0.0001, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.2266666666666666, | |
| "grad_norm": 0.01571550714559194, | |
| "learning_rate": 1.344399592353023e-05, | |
| "loss": 0.0002, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.228888888888889, | |
| "grad_norm": 0.7796593974087448, | |
| "learning_rate": 1.3421454323415837e-05, | |
| "loss": 0.0066, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.231111111111111, | |
| "grad_norm": 0.03501162543147225, | |
| "learning_rate": 1.3398893015826166e-05, | |
| "loss": 0.0004, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.2333333333333334, | |
| "grad_norm": 0.03477928209126996, | |
| "learning_rate": 1.337631213071369e-05, | |
| "loss": 0.0003, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.2355555555555555, | |
| "grad_norm": 0.048912037286620164, | |
| "learning_rate": 1.3353711798143624e-05, | |
| "loss": 0.0003, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.2377777777777779, | |
| "grad_norm": 0.12291192245937141, | |
| "learning_rate": 1.333109214829322e-05, | |
| "loss": 0.0008, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 0.04426638663990192, | |
| "learning_rate": 1.3308453311450987e-05, | |
| "loss": 0.0003, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.2422222222222223, | |
| "grad_norm": 0.20872622490585263, | |
| "learning_rate": 1.328579541801595e-05, | |
| "loss": 0.0011, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.2444444444444445, | |
| "grad_norm": 0.20312667273998455, | |
| "learning_rate": 1.3263118598496905e-05, | |
| "loss": 0.0015, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.2466666666666666, | |
| "grad_norm": 0.07933114459404779, | |
| "learning_rate": 1.324042298351166e-05, | |
| "loss": 0.0007, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.248888888888889, | |
| "grad_norm": 0.02558960977196579, | |
| "learning_rate": 1.321770870378628e-05, | |
| "loss": 0.0003, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.251111111111111, | |
| "grad_norm": 0.028876532871827718, | |
| "learning_rate": 1.3194975890154344e-05, | |
| "loss": 0.0003, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.2533333333333334, | |
| "grad_norm": 0.4803662466261829, | |
| "learning_rate": 1.3172224673556186e-05, | |
| "loss": 0.0035, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.2555555555555555, | |
| "grad_norm": 0.009915399555400467, | |
| "learning_rate": 1.3149455185038132e-05, | |
| "loss": 0.0002, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.2577777777777777, | |
| "grad_norm": 0.12429811425071294, | |
| "learning_rate": 1.3126667555751761e-05, | |
| "loss": 0.0007, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "grad_norm": 0.20056345377856585, | |
| "learning_rate": 1.3103861916953142e-05, | |
| "loss": 0.0011, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.2622222222222224, | |
| "grad_norm": 0.12304150394147376, | |
| "learning_rate": 1.3081038400002078e-05, | |
| "loss": 0.0008, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.2644444444444445, | |
| "grad_norm": 0.163958586101712, | |
| "learning_rate": 1.3058197136361344e-05, | |
| "loss": 0.0012, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.2666666666666666, | |
| "grad_norm": 1.8161783611930993, | |
| "learning_rate": 1.3035338257595946e-05, | |
| "loss": 0.0269, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.268888888888889, | |
| "grad_norm": 0.006728456562315674, | |
| "learning_rate": 1.3012461895372343e-05, | |
| "loss": 0.0001, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.271111111111111, | |
| "grad_norm": 0.005123454577859645, | |
| "learning_rate": 1.2989568181457704e-05, | |
| "loss": 0.0001, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.2733333333333334, | |
| "grad_norm": 0.7706458856752144, | |
| "learning_rate": 1.296665724771914e-05, | |
| "loss": 0.0257, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.2755555555555556, | |
| "grad_norm": 0.010076290374005092, | |
| "learning_rate": 1.2943729226122952e-05, | |
| "loss": 0.0001, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.2777777777777777, | |
| "grad_norm": 0.007988004773424125, | |
| "learning_rate": 1.2920784248733857e-05, | |
| "loss": 0.0001, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 0.06885539627001369, | |
| "learning_rate": 1.2897822447714247e-05, | |
| "loss": 0.0007, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.2822222222222222, | |
| "grad_norm": 1.1948735365301388, | |
| "learning_rate": 1.2874843955323418e-05, | |
| "loss": 0.016, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.2844444444444445, | |
| "grad_norm": 0.01193248340737645, | |
| "learning_rate": 1.2851848903916792e-05, | |
| "loss": 0.0002, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.2866666666666666, | |
| "grad_norm": 0.06978396718503933, | |
| "learning_rate": 1.2828837425945193e-05, | |
| "loss": 0.0009, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.2888888888888888, | |
| "grad_norm": 0.023860085037097106, | |
| "learning_rate": 1.2805809653954045e-05, | |
| "loss": 0.0003, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.291111111111111, | |
| "grad_norm": 0.5563026854317613, | |
| "learning_rate": 1.2782765720582634e-05, | |
| "loss": 0.0104, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.2933333333333334, | |
| "grad_norm": 1.0937489342228541, | |
| "learning_rate": 1.275970575856333e-05, | |
| "loss": 0.0072, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.2955555555555556, | |
| "grad_norm": 0.5300043852092995, | |
| "learning_rate": 1.2736629900720832e-05, | |
| "loss": 0.0025, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.2977777777777777, | |
| "grad_norm": 0.020831195016286372, | |
| "learning_rate": 1.271353827997139e-05, | |
| "loss": 0.0004, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 0.05333156702528884, | |
| "learning_rate": 1.2690431029322057e-05, | |
| "loss": 0.0006, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.3022222222222222, | |
| "grad_norm": 0.13103974511805142, | |
| "learning_rate": 1.266730828186991e-05, | |
| "loss": 0.0012, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.3044444444444445, | |
| "grad_norm": 0.013299197540789539, | |
| "learning_rate": 1.2644170170801288e-05, | |
| "loss": 0.0002, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.3066666666666666, | |
| "grad_norm": 0.19801829338460458, | |
| "learning_rate": 1.2621016829391022e-05, | |
| "loss": 0.0024, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.3088888888888888, | |
| "grad_norm": 0.09722982734381252, | |
| "learning_rate": 1.2597848391001675e-05, | |
| "loss": 0.001, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.3111111111111111, | |
| "grad_norm": 0.30095225198994546, | |
| "learning_rate": 1.257466498908276e-05, | |
| "loss": 0.0045, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.3133333333333335, | |
| "grad_norm": 0.1250220735304216, | |
| "learning_rate": 1.2551466757169984e-05, | |
| "loss": 0.0013, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.3155555555555556, | |
| "grad_norm": 0.06546740704187415, | |
| "learning_rate": 1.2528253828884473e-05, | |
| "loss": 0.0008, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.3177777777777777, | |
| "grad_norm": 0.18068332418986313, | |
| "learning_rate": 1.2505026337932005e-05, | |
| "loss": 0.002, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 0.7031153962535911, | |
| "learning_rate": 1.248178441810224e-05, | |
| "loss": 0.0064, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.3222222222222222, | |
| "grad_norm": 0.022706666477180018, | |
| "learning_rate": 1.2458528203267945e-05, | |
| "loss": 0.0004, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.3244444444444445, | |
| "grad_norm": 0.21914911800572423, | |
| "learning_rate": 1.2435257827384224e-05, | |
| "loss": 0.0026, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.3266666666666667, | |
| "grad_norm": 0.08887090548712598, | |
| "learning_rate": 1.2411973424487751e-05, | |
| "loss": 0.0009, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.3288888888888888, | |
| "grad_norm": 0.015683694037267062, | |
| "learning_rate": 1.2388675128696001e-05, | |
| "loss": 0.0003, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.3311111111111111, | |
| "grad_norm": 0.011744189275124894, | |
| "learning_rate": 1.236536307420646e-05, | |
| "loss": 0.0002, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 0.26429959048670565, | |
| "learning_rate": 1.2342037395295871e-05, | |
| "loss": 0.0021, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.3355555555555556, | |
| "grad_norm": 0.02689272493166622, | |
| "learning_rate": 1.2318698226319452e-05, | |
| "loss": 0.0003, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.3377777777777777, | |
| "grad_norm": 0.01993739120276598, | |
| "learning_rate": 1.2295345701710124e-05, | |
| "loss": 0.0003, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "grad_norm": 0.0069023072201245695, | |
| "learning_rate": 1.2271979955977733e-05, | |
| "loss": 0.0001, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.3422222222222222, | |
| "grad_norm": 0.0058312561229769905, | |
| "learning_rate": 1.2248601123708279e-05, | |
| "loss": 0.0001, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.3444444444444446, | |
| "grad_norm": 0.007722329309202313, | |
| "learning_rate": 1.2225209339563144e-05, | |
| "loss": 0.0002, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.3466666666666667, | |
| "grad_norm": 0.010449716514657231, | |
| "learning_rate": 1.2201804738278311e-05, | |
| "loss": 0.0002, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.3488888888888888, | |
| "grad_norm": 0.007689763202479693, | |
| "learning_rate": 1.2178387454663587e-05, | |
| "loss": 0.0002, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.3511111111111112, | |
| "grad_norm": 0.007174565011204396, | |
| "learning_rate": 1.2154957623601831e-05, | |
| "loss": 0.0002, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.3533333333333333, | |
| "grad_norm": 0.006370823781154833, | |
| "learning_rate": 1.2131515380048171e-05, | |
| "loss": 0.0002, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.3555555555555556, | |
| "grad_norm": 0.006738308538376243, | |
| "learning_rate": 1.2108060859029233e-05, | |
| "loss": 0.0001, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.3577777777777778, | |
| "grad_norm": 0.00798414464325406, | |
| "learning_rate": 1.2084594195642367e-05, | |
| "loss": 0.0001, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.3599999999999999, | |
| "grad_norm": 0.017950593880835447, | |
| "learning_rate": 1.2061115525054855e-05, | |
| "loss": 0.0002, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.3622222222222222, | |
| "grad_norm": 0.00778728718717965, | |
| "learning_rate": 1.2037624982503135e-05, | |
| "loss": 0.0001, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.3644444444444446, | |
| "grad_norm": 0.006518564156370815, | |
| "learning_rate": 1.2014122703292047e-05, | |
| "loss": 0.0001, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.3666666666666667, | |
| "grad_norm": 0.015050422076109854, | |
| "learning_rate": 1.1990608822794007e-05, | |
| "loss": 0.0003, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.3688888888888888, | |
| "grad_norm": 0.013855121873446856, | |
| "learning_rate": 1.1967083476448282e-05, | |
| "loss": 0.0002, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.3711111111111112, | |
| "grad_norm": 0.27153605948071297, | |
| "learning_rate": 1.1943546799760161e-05, | |
| "loss": 0.0016, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.3733333333333333, | |
| "grad_norm": 0.007018350279333277, | |
| "learning_rate": 1.1919998928300203e-05, | |
| "loss": 0.0001, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.3755555555555556, | |
| "grad_norm": 0.008358442295345079, | |
| "learning_rate": 1.1896439997703446e-05, | |
| "loss": 0.0002, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.3777777777777778, | |
| "grad_norm": 0.00859544351459007, | |
| "learning_rate": 1.1872870143668635e-05, | |
| "loss": 0.0001, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 0.10864000520677959, | |
| "learning_rate": 1.1849289501957429e-05, | |
| "loss": 0.0007, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.3822222222222222, | |
| "grad_norm": 0.006325356147408436, | |
| "learning_rate": 1.182569820839362e-05, | |
| "loss": 0.0001, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.3844444444444444, | |
| "grad_norm": 0.006737148566459628, | |
| "learning_rate": 1.1802096398862359e-05, | |
| "loss": 0.0001, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.3866666666666667, | |
| "grad_norm": 0.13051592968700335, | |
| "learning_rate": 1.1778484209309368e-05, | |
| "loss": 0.0007, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.3888888888888888, | |
| "grad_norm": 0.029720751684129163, | |
| "learning_rate": 1.1754861775740163e-05, | |
| "loss": 0.0002, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.3911111111111112, | |
| "grad_norm": 0.007134341445893347, | |
| "learning_rate": 1.1731229234219253e-05, | |
| "loss": 0.0002, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.3933333333333333, | |
| "grad_norm": 0.007223367129925694, | |
| "learning_rate": 1.1707586720869375e-05, | |
| "loss": 0.0001, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 1.3955555555555557, | |
| "grad_norm": 0.005687397111079098, | |
| "learning_rate": 1.168393437187071e-05, | |
| "loss": 0.0001, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.3977777777777778, | |
| "grad_norm": 0.19870462159256616, | |
| "learning_rate": 1.166027232346008e-05, | |
| "loss": 0.001, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 0.013255892717797011, | |
| "learning_rate": 1.1636600711930184e-05, | |
| "loss": 0.0002, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.4022222222222223, | |
| "grad_norm": 0.019427684603492767, | |
| "learning_rate": 1.1612919673628798e-05, | |
| "loss": 0.0002, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 1.4044444444444444, | |
| "grad_norm": 0.013839500154355188, | |
| "learning_rate": 1.1589229344958e-05, | |
| "loss": 0.0002, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.4066666666666667, | |
| "grad_norm": 0.03722306751150982, | |
| "learning_rate": 1.1565529862373382e-05, | |
| "loss": 0.0002, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 1.4088888888888889, | |
| "grad_norm": 0.03465678385745218, | |
| "learning_rate": 1.154182136238326e-05, | |
| "loss": 0.0004, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.411111111111111, | |
| "grad_norm": 0.011812611181893808, | |
| "learning_rate": 1.1518103981547889e-05, | |
| "loss": 0.0001, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.4133333333333333, | |
| "grad_norm": 0.011226077687576726, | |
| "learning_rate": 1.1494377856478674e-05, | |
| "loss": 0.0002, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.4155555555555557, | |
| "grad_norm": 0.005820169207423803, | |
| "learning_rate": 1.1470643123837395e-05, | |
| "loss": 0.0001, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 1.4177777777777778, | |
| "grad_norm": 0.011075363264309248, | |
| "learning_rate": 1.1446899920335407e-05, | |
| "loss": 0.0001, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 0.008969108133242247, | |
| "learning_rate": 1.1423148382732854e-05, | |
| "loss": 0.0002, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 1.4222222222222223, | |
| "grad_norm": 0.008534814214197597, | |
| "learning_rate": 1.1399388647837888e-05, | |
| "loss": 0.0001, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.4244444444444444, | |
| "grad_norm": 0.005455356885820614, | |
| "learning_rate": 1.1375620852505878e-05, | |
| "loss": 0.0001, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 1.4266666666666667, | |
| "grad_norm": 0.008590844895721213, | |
| "learning_rate": 1.135184513363862e-05, | |
| "loss": 0.0001, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.4288888888888889, | |
| "grad_norm": 0.005681794603748044, | |
| "learning_rate": 1.1328061628183546e-05, | |
| "loss": 0.0001, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 1.431111111111111, | |
| "grad_norm": 0.0055903065738202445, | |
| "learning_rate": 1.130427047313294e-05, | |
| "loss": 0.0001, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.4333333333333333, | |
| "grad_norm": 0.04934415845776409, | |
| "learning_rate": 1.1280471805523153e-05, | |
| "loss": 0.0004, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.4355555555555555, | |
| "grad_norm": 0.0058865634966226675, | |
| "learning_rate": 1.1256665762433798e-05, | |
| "loss": 0.0001, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.4377777777777778, | |
| "grad_norm": 0.03155054912536503, | |
| "learning_rate": 1.123285248098698e-05, | |
| "loss": 0.0002, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 0.890735397167982, | |
| "learning_rate": 1.1209032098346493e-05, | |
| "loss": 0.0067, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.4422222222222223, | |
| "grad_norm": 0.005352132973540954, | |
| "learning_rate": 1.118520475171703e-05, | |
| "loss": 0.0001, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.4444444444444444, | |
| "grad_norm": 0.00565005068198891, | |
| "learning_rate": 1.1161370578343398e-05, | |
| "loss": 0.0001, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.4466666666666668, | |
| "grad_norm": 0.013564129609136631, | |
| "learning_rate": 1.1137529715509736e-05, | |
| "loss": 0.0002, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.448888888888889, | |
| "grad_norm": 0.007085073069766104, | |
| "learning_rate": 1.1113682300538702e-05, | |
| "loss": 0.0001, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.451111111111111, | |
| "grad_norm": 0.005847120753996041, | |
| "learning_rate": 1.1089828470790694e-05, | |
| "loss": 0.0001, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.4533333333333334, | |
| "grad_norm": 0.029702454581616196, | |
| "learning_rate": 1.1065968363663069e-05, | |
| "loss": 0.0003, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.4555555555555555, | |
| "grad_norm": 0.08039601075179509, | |
| "learning_rate": 1.1042102116589331e-05, | |
| "loss": 0.0004, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.4577777777777778, | |
| "grad_norm": 0.6667089965245326, | |
| "learning_rate": 1.1018229867038358e-05, | |
| "loss": 0.0045, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "grad_norm": 0.007073609892995615, | |
| "learning_rate": 1.0994351752513593e-05, | |
| "loss": 0.0002, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.462222222222222, | |
| "grad_norm": 0.009420414391606739, | |
| "learning_rate": 1.0970467910552267e-05, | |
| "loss": 0.0002, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.4644444444444444, | |
| "grad_norm": 0.0070740021000240605, | |
| "learning_rate": 1.0946578478724603e-05, | |
| "loss": 0.0001, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.4666666666666668, | |
| "grad_norm": 0.006165147198691562, | |
| "learning_rate": 1.092268359463302e-05, | |
| "loss": 0.0001, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.468888888888889, | |
| "grad_norm": 0.008226038016569211, | |
| "learning_rate": 1.0898783395911341e-05, | |
| "loss": 0.0001, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 1.471111111111111, | |
| "grad_norm": 0.0060554341320594035, | |
| "learning_rate": 1.0874878020223994e-05, | |
| "loss": 0.0001, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.4733333333333334, | |
| "grad_norm": 0.03013732594027257, | |
| "learning_rate": 1.085096760526524e-05, | |
| "loss": 0.0004, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 1.4755555555555555, | |
| "grad_norm": 0.19345883271092537, | |
| "learning_rate": 1.0827052288758357e-05, | |
| "loss": 0.0014, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.4777777777777779, | |
| "grad_norm": 0.039114520368266466, | |
| "learning_rate": 1.0803132208454858e-05, | |
| "loss": 0.0003, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 0.008108416680241867, | |
| "learning_rate": 1.077920750213369e-05, | |
| "loss": 0.0001, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.482222222222222, | |
| "grad_norm": 0.10258334281946903, | |
| "learning_rate": 1.0755278307600459e-05, | |
| "loss": 0.0006, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 1.4844444444444445, | |
| "grad_norm": 0.5869357491300612, | |
| "learning_rate": 1.0731344762686606e-05, | |
| "loss": 0.0082, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 1.4866666666666668, | |
| "grad_norm": 0.006944121725113849, | |
| "learning_rate": 1.0707407005248647e-05, | |
| "loss": 0.0001, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 1.488888888888889, | |
| "grad_norm": 0.027567509231424862, | |
| "learning_rate": 1.068346517316735e-05, | |
| "loss": 0.0003, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.491111111111111, | |
| "grad_norm": 0.6570177618615247, | |
| "learning_rate": 1.0659519404346955e-05, | |
| "loss": 0.007, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 1.4933333333333334, | |
| "grad_norm": 0.286657791540923, | |
| "learning_rate": 1.0635569836714384e-05, | |
| "loss": 0.0022, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 1.4955555555555555, | |
| "grad_norm": 0.9868251226116292, | |
| "learning_rate": 1.0611616608218429e-05, | |
| "loss": 0.0196, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 1.4977777777777779, | |
| "grad_norm": 0.0062657710230512725, | |
| "learning_rate": 1.058765985682898e-05, | |
| "loss": 0.0001, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.006099056602990665, | |
| "learning_rate": 1.0563699720536209e-05, | |
| "loss": 0.0001, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.5022222222222221, | |
| "grad_norm": 0.0063827116639828055, | |
| "learning_rate": 1.0539736337349792e-05, | |
| "loss": 0.0001, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 1.5044444444444445, | |
| "grad_norm": 0.009075599648609324, | |
| "learning_rate": 1.0515769845298106e-05, | |
| "loss": 0.0001, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 1.5066666666666668, | |
| "grad_norm": 0.007118170949877647, | |
| "learning_rate": 1.0491800382427429e-05, | |
| "loss": 0.0001, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 1.508888888888889, | |
| "grad_norm": 0.042898539709834, | |
| "learning_rate": 1.0467828086801158e-05, | |
| "loss": 0.0003, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 1.511111111111111, | |
| "grad_norm": 0.006237798870374292, | |
| "learning_rate": 1.0443853096499e-05, | |
| "loss": 0.0001, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.5133333333333332, | |
| "grad_norm": 0.2907587416622262, | |
| "learning_rate": 1.0419875549616196e-05, | |
| "loss": 0.0007, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 1.5155555555555555, | |
| "grad_norm": 0.009093183287340576, | |
| "learning_rate": 1.0395895584262696e-05, | |
| "loss": 0.0001, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 1.517777777777778, | |
| "grad_norm": 0.016858494829957302, | |
| "learning_rate": 1.0371913338562391e-05, | |
| "loss": 0.0003, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 0.01634219938618174, | |
| "learning_rate": 1.03479289506523e-05, | |
| "loss": 0.0002, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 1.5222222222222221, | |
| "grad_norm": 0.03621135154486156, | |
| "learning_rate": 1.032394255868179e-05, | |
| "loss": 0.0002, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.5244444444444445, | |
| "grad_norm": 0.01052742733632145, | |
| "learning_rate": 1.0299954300811763e-05, | |
| "loss": 0.0002, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 1.5266666666666666, | |
| "grad_norm": 0.4699850917382568, | |
| "learning_rate": 1.0275964315213873e-05, | |
| "loss": 0.0142, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 1.528888888888889, | |
| "grad_norm": 0.010455275086931115, | |
| "learning_rate": 1.0251972740069724e-05, | |
| "loss": 0.0002, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 1.531111111111111, | |
| "grad_norm": 0.020478441838448495, | |
| "learning_rate": 1.022797971357008e-05, | |
| "loss": 0.0003, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 1.5333333333333332, | |
| "grad_norm": 0.005668167214918058, | |
| "learning_rate": 1.0203985373914056e-05, | |
| "loss": 0.0001, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.5355555555555556, | |
| "grad_norm": 0.3130337774352449, | |
| "learning_rate": 1.0179989859308337e-05, | |
| "loss": 0.0031, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 1.537777777777778, | |
| "grad_norm": 0.015936025484749476, | |
| "learning_rate": 1.0155993307966372e-05, | |
| "loss": 0.0002, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "grad_norm": 0.011536961635149265, | |
| "learning_rate": 1.013199585810759e-05, | |
| "loss": 0.0002, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 1.5422222222222222, | |
| "grad_norm": 0.10720197509122546, | |
| "learning_rate": 1.0107997647956587e-05, | |
| "loss": 0.0005, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 1.5444444444444443, | |
| "grad_norm": 0.16780727597168332, | |
| "learning_rate": 1.0083998815742335e-05, | |
| "loss": 0.0015, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.5466666666666666, | |
| "grad_norm": 0.009725719978170538, | |
| "learning_rate": 1.0059999499697403e-05, | |
| "loss": 0.0001, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 1.548888888888889, | |
| "grad_norm": 0.011376207402560755, | |
| "learning_rate": 1.0035999838057133e-05, | |
| "loss": 0.0002, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 1.551111111111111, | |
| "grad_norm": 0.04427351590785059, | |
| "learning_rate": 1.0011999969058867e-05, | |
| "loss": 0.0005, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 1.5533333333333332, | |
| "grad_norm": 0.0092319270812557, | |
| "learning_rate": 9.988000030941134e-06, | |
| "loss": 0.0002, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 1.5555555555555556, | |
| "grad_norm": 0.030573441985318268, | |
| "learning_rate": 9.964000161942867e-06, | |
| "loss": 0.0003, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.557777777777778, | |
| "grad_norm": 0.04320358477217055, | |
| "learning_rate": 9.940000500302599e-06, | |
| "loss": 0.0005, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "grad_norm": 0.010954938085626557, | |
| "learning_rate": 9.916001184257668e-06, | |
| "loss": 0.0002, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 1.5622222222222222, | |
| "grad_norm": 0.06971972370964201, | |
| "learning_rate": 9.892002352043417e-06, | |
| "loss": 0.0008, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 1.5644444444444443, | |
| "grad_norm": 0.07083461851710443, | |
| "learning_rate": 9.868004141892412e-06, | |
| "loss": 0.0002, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 1.5666666666666667, | |
| "grad_norm": 0.006054969478712078, | |
| "learning_rate": 9.84400669203363e-06, | |
| "loss": 0.0001, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.568888888888889, | |
| "grad_norm": 0.030010971823964505, | |
| "learning_rate": 9.820010140691668e-06, | |
| "loss": 0.0004, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 1.5711111111111111, | |
| "grad_norm": 0.06663871047589909, | |
| "learning_rate": 9.79601462608595e-06, | |
| "loss": 0.0005, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 1.5733333333333333, | |
| "grad_norm": 0.006719976645815689, | |
| "learning_rate": 9.772020286429922e-06, | |
| "loss": 0.0001, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 1.5755555555555556, | |
| "grad_norm": 0.006303076608827912, | |
| "learning_rate": 9.748027259930276e-06, | |
| "loss": 0.0001, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 1.5777777777777777, | |
| "grad_norm": 1.3820169283645465, | |
| "learning_rate": 9.72403568478613e-06, | |
| "loss": 0.0293, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "grad_norm": 0.5467955274702471, | |
| "learning_rate": 9.70004569918824e-06, | |
| "loss": 0.0038, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 1.5822222222222222, | |
| "grad_norm": 0.014902292676128305, | |
| "learning_rate": 9.676057441318212e-06, | |
| "loss": 0.0002, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 1.5844444444444443, | |
| "grad_norm": 0.1591464017388678, | |
| "learning_rate": 9.652071049347703e-06, | |
| "loss": 0.0014, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 1.5866666666666667, | |
| "grad_norm": 0.006778545322175168, | |
| "learning_rate": 9.628086661437615e-06, | |
| "loss": 0.0001, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 1.588888888888889, | |
| "grad_norm": 0.06758570759627619, | |
| "learning_rate": 9.604104415737309e-06, | |
| "loss": 0.0005, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.5911111111111111, | |
| "grad_norm": 0.1788082402916065, | |
| "learning_rate": 9.580124450383804e-06, | |
| "loss": 0.0014, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 1.5933333333333333, | |
| "grad_norm": 0.005291588518946077, | |
| "learning_rate": 9.556146903500997e-06, | |
| "loss": 0.0001, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 1.5955555555555554, | |
| "grad_norm": 0.0058607426243974855, | |
| "learning_rate": 9.532171913198844e-06, | |
| "loss": 0.0001, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 1.5977777777777777, | |
| "grad_norm": 0.007145817769455254, | |
| "learning_rate": 9.508199617572574e-06, | |
| "loss": 0.0001, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 0.00562840148772073, | |
| "learning_rate": 9.4842301547019e-06, | |
| "loss": 0.0001, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.6022222222222222, | |
| "grad_norm": 0.020490381961161024, | |
| "learning_rate": 9.460263662650209e-06, | |
| "loss": 0.0003, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 1.6044444444444443, | |
| "grad_norm": 0.006179634203761565, | |
| "learning_rate": 9.436300279463794e-06, | |
| "loss": 0.0001, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 1.6066666666666667, | |
| "grad_norm": 0.1541450361826207, | |
| "learning_rate": 9.412340143171025e-06, | |
| "loss": 0.0005, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 1.608888888888889, | |
| "grad_norm": 0.005823671113247757, | |
| "learning_rate": 9.388383391781576e-06, | |
| "loss": 0.0001, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 1.6111111111111112, | |
| "grad_norm": 0.013805210963465328, | |
| "learning_rate": 9.364430163285618e-06, | |
| "loss": 0.0002, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.6133333333333333, | |
| "grad_norm": 0.006665863221844302, | |
| "learning_rate": 9.340480595653047e-06, | |
| "loss": 0.0001, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 1.6155555555555554, | |
| "grad_norm": 0.00820286527361825, | |
| "learning_rate": 9.316534826832652e-06, | |
| "loss": 0.0001, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 1.6177777777777778, | |
| "grad_norm": 0.011929093910890769, | |
| "learning_rate": 9.292592994751356e-06, | |
| "loss": 0.0002, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "grad_norm": 0.0059639515057671195, | |
| "learning_rate": 9.268655237313397e-06, | |
| "loss": 0.0001, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 1.6222222222222222, | |
| "grad_norm": 0.006538940872402595, | |
| "learning_rate": 9.244721692399545e-06, | |
| "loss": 0.0001, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.6244444444444444, | |
| "grad_norm": 0.01804894190619014, | |
| "learning_rate": 9.220792497866313e-06, | |
| "loss": 0.0002, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 1.6266666666666667, | |
| "grad_norm": 0.0303024650880962, | |
| "learning_rate": 9.196867791545148e-06, | |
| "loss": 0.0003, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 1.628888888888889, | |
| "grad_norm": 0.012094318039754012, | |
| "learning_rate": 9.172947711241648e-06, | |
| "loss": 0.0002, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 1.6311111111111112, | |
| "grad_norm": 0.009387630461299142, | |
| "learning_rate": 9.14903239473476e-06, | |
| "loss": 0.0002, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 1.6333333333333333, | |
| "grad_norm": 0.014984490394184057, | |
| "learning_rate": 9.125121979776006e-06, | |
| "loss": 0.0002, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.6355555555555554, | |
| "grad_norm": 0.013561350623148971, | |
| "learning_rate": 9.101216604088662e-06, | |
| "loss": 0.0002, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 1.6377777777777778, | |
| "grad_norm": 0.05876937621646307, | |
| "learning_rate": 9.07731640536698e-06, | |
| "loss": 0.0004, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 1.6400000000000001, | |
| "grad_norm": 0.027107956284777576, | |
| "learning_rate": 9.0534215212754e-06, | |
| "loss": 0.0003, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 1.6422222222222222, | |
| "grad_norm": 0.005813733407651434, | |
| "learning_rate": 9.029532089447736e-06, | |
| "loss": 0.0001, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 1.6444444444444444, | |
| "grad_norm": 0.0057564727144559956, | |
| "learning_rate": 9.005648247486412e-06, | |
| "loss": 0.0001, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.6466666666666665, | |
| "grad_norm": 0.010741034079875447, | |
| "learning_rate": 8.981770132961649e-06, | |
| "loss": 0.0002, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 1.6488888888888888, | |
| "grad_norm": 0.005523318432255079, | |
| "learning_rate": 8.957897883410669e-06, | |
| "loss": 0.0001, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 1.6511111111111112, | |
| "grad_norm": 0.5726473811749346, | |
| "learning_rate": 8.934031636336931e-06, | |
| "loss": 0.0187, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 1.6533333333333333, | |
| "grad_norm": 0.007648869535974707, | |
| "learning_rate": 8.910171529209306e-06, | |
| "loss": 0.0001, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 1.6555555555555554, | |
| "grad_norm": 0.05365171261253819, | |
| "learning_rate": 8.886317699461302e-06, | |
| "loss": 0.0005, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.6577777777777778, | |
| "grad_norm": 0.0059177237899166995, | |
| "learning_rate": 8.862470284490266e-06, | |
| "loss": 0.0001, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 1.6600000000000001, | |
| "grad_norm": 0.010691792573203962, | |
| "learning_rate": 8.838629421656604e-06, | |
| "loss": 0.0001, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 1.6622222222222223, | |
| "grad_norm": 0.009015957307662194, | |
| "learning_rate": 8.814795248282974e-06, | |
| "loss": 0.0002, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 1.6644444444444444, | |
| "grad_norm": 0.004649037312383401, | |
| "learning_rate": 8.790967901653512e-06, | |
| "loss": 0.0001, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 0.0049915014862222246, | |
| "learning_rate": 8.767147519013024e-06, | |
| "loss": 0.0001, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.6688888888888889, | |
| "grad_norm": 0.010450273277654873, | |
| "learning_rate": 8.743334237566202e-06, | |
| "loss": 0.0002, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 1.6711111111111112, | |
| "grad_norm": 0.004856164431749516, | |
| "learning_rate": 8.719528194476849e-06, | |
| "loss": 0.0001, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 1.6733333333333333, | |
| "grad_norm": 0.00645416251058259, | |
| "learning_rate": 8.695729526867061e-06, | |
| "loss": 0.0001, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 1.6755555555555555, | |
| "grad_norm": 0.006296136011180783, | |
| "learning_rate": 8.671938371816457e-06, | |
| "loss": 0.0001, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 1.6777777777777778, | |
| "grad_norm": 0.006099266013020226, | |
| "learning_rate": 8.648154866361384e-06, | |
| "loss": 0.0001, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 1.6800000000000002, | |
| "grad_norm": 0.05097215247085627, | |
| "learning_rate": 8.624379147494126e-06, | |
| "loss": 0.0003, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 1.6822222222222223, | |
| "grad_norm": 0.006110100510174432, | |
| "learning_rate": 8.600611352162115e-06, | |
| "loss": 0.0001, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 1.6844444444444444, | |
| "grad_norm": 0.004907569193177824, | |
| "learning_rate": 8.576851617267151e-06, | |
| "loss": 0.0001, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 1.6866666666666665, | |
| "grad_norm": 0.007338018748085221, | |
| "learning_rate": 8.553100079664598e-06, | |
| "loss": 0.0001, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 1.6888888888888889, | |
| "grad_norm": 0.009435162808976215, | |
| "learning_rate": 8.529356876162606e-06, | |
| "loss": 0.0001, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.6911111111111112, | |
| "grad_norm": 0.004608572532657142, | |
| "learning_rate": 8.505622143521327e-06, | |
| "loss": 0.0001, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 1.6933333333333334, | |
| "grad_norm": 0.38672080686171983, | |
| "learning_rate": 8.481896018452115e-06, | |
| "loss": 0.003, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 1.6955555555555555, | |
| "grad_norm": 0.004223089064574624, | |
| "learning_rate": 8.458178637616743e-06, | |
| "loss": 0.0001, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 1.6977777777777778, | |
| "grad_norm": 0.011532995480279503, | |
| "learning_rate": 8.43447013762662e-06, | |
| "loss": 0.0002, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "grad_norm": 0.00904275087082993, | |
| "learning_rate": 8.410770655042003e-06, | |
| "loss": 0.0001, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 1.7022222222222223, | |
| "grad_norm": 0.04782688768258787, | |
| "learning_rate": 8.387080326371207e-06, | |
| "loss": 0.0004, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 1.7044444444444444, | |
| "grad_norm": 0.005973575888367235, | |
| "learning_rate": 8.363399288069821e-06, | |
| "loss": 0.0001, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 1.7066666666666666, | |
| "grad_norm": 0.005475198783218788, | |
| "learning_rate": 8.33972767653992e-06, | |
| "loss": 0.0001, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 1.708888888888889, | |
| "grad_norm": 0.005542991848072764, | |
| "learning_rate": 8.31606562812929e-06, | |
| "loss": 0.0001, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 1.7111111111111112, | |
| "grad_norm": 0.007591934736502721, | |
| "learning_rate": 8.292413279130625e-06, | |
| "loss": 0.0001, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.7133333333333334, | |
| "grad_norm": 0.2994543740927371, | |
| "learning_rate": 8.26877076578075e-06, | |
| "loss": 0.0018, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 1.7155555555555555, | |
| "grad_norm": 0.00487266979488178, | |
| "learning_rate": 8.24513822425984e-06, | |
| "loss": 0.0001, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 1.7177777777777776, | |
| "grad_norm": 0.006433893332834025, | |
| "learning_rate": 8.221515790690633e-06, | |
| "loss": 0.0001, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "grad_norm": 0.004698687646378282, | |
| "learning_rate": 8.197903601137644e-06, | |
| "loss": 0.0001, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 1.7222222222222223, | |
| "grad_norm": 0.005288990109850347, | |
| "learning_rate": 8.174301791606384e-06, | |
| "loss": 0.0001, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.7244444444444444, | |
| "grad_norm": 0.008417763449185623, | |
| "learning_rate": 8.150710498042576e-06, | |
| "loss": 0.0002, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 1.7266666666666666, | |
| "grad_norm": 0.004932840205764515, | |
| "learning_rate": 8.127129856331365e-06, | |
| "loss": 0.0001, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 1.728888888888889, | |
| "grad_norm": 0.04184165453592892, | |
| "learning_rate": 8.103560002296554e-06, | |
| "loss": 0.0004, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 1.7311111111111113, | |
| "grad_norm": 0.3677314242879906, | |
| "learning_rate": 8.0800010716998e-06, | |
| "loss": 0.0021, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 1.7333333333333334, | |
| "grad_norm": 0.02139801488050292, | |
| "learning_rate": 8.056453200239842e-06, | |
| "loss": 0.0002, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.7355555555555555, | |
| "grad_norm": 0.022195305030182053, | |
| "learning_rate": 8.03291652355172e-06, | |
| "loss": 0.0003, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 1.7377777777777776, | |
| "grad_norm": 0.007864394845988323, | |
| "learning_rate": 8.009391177205995e-06, | |
| "loss": 0.0001, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "grad_norm": 0.012238881550260377, | |
| "learning_rate": 7.985877296707958e-06, | |
| "loss": 0.0002, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 1.7422222222222223, | |
| "grad_norm": 0.009077714774709764, | |
| "learning_rate": 7.962375017496867e-06, | |
| "loss": 0.0001, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 1.7444444444444445, | |
| "grad_norm": 0.00477581696449405, | |
| "learning_rate": 7.93888447494515e-06, | |
| "loss": 0.0001, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 1.7466666666666666, | |
| "grad_norm": 0.010077508604582104, | |
| "learning_rate": 7.915405804357632e-06, | |
| "loss": 0.0001, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 1.748888888888889, | |
| "grad_norm": 0.004006018236566087, | |
| "learning_rate": 7.891939140970767e-06, | |
| "loss": 0.0001, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 1.751111111111111, | |
| "grad_norm": 0.007776745743237208, | |
| "learning_rate": 7.868484619951832e-06, | |
| "loss": 0.0001, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 1.7533333333333334, | |
| "grad_norm": 0.006558190626715821, | |
| "learning_rate": 7.845042376398174e-06, | |
| "loss": 0.0001, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 1.7555555555555555, | |
| "grad_norm": 0.012728287924886702, | |
| "learning_rate": 7.821612545336416e-06, | |
| "loss": 0.0001, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.7577777777777777, | |
| "grad_norm": 0.019393051233493136, | |
| "learning_rate": 7.798195261721692e-06, | |
| "loss": 0.0003, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 0.007585128244599703, | |
| "learning_rate": 7.774790660436857e-06, | |
| "loss": 0.0001, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 1.7622222222222224, | |
| "grad_norm": 0.018732108992699427, | |
| "learning_rate": 7.751398876291725e-06, | |
| "loss": 0.0003, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 1.7644444444444445, | |
| "grad_norm": 0.012159679813607176, | |
| "learning_rate": 7.72802004402227e-06, | |
| "loss": 0.0002, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 1.7666666666666666, | |
| "grad_norm": 0.010461293953590287, | |
| "learning_rate": 7.704654298289878e-06, | |
| "loss": 0.0001, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 1.7688888888888887, | |
| "grad_norm": 0.01656739323056127, | |
| "learning_rate": 7.681301773680548e-06, | |
| "loss": 0.0001, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 1.771111111111111, | |
| "grad_norm": 0.014855518028811346, | |
| "learning_rate": 7.65796260470413e-06, | |
| "loss": 0.0002, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 1.7733333333333334, | |
| "grad_norm": 0.007211463270469363, | |
| "learning_rate": 7.634636925793542e-06, | |
| "loss": 0.0001, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 1.7755555555555556, | |
| "grad_norm": 0.009221454980006732, | |
| "learning_rate": 7.611324871304002e-06, | |
| "loss": 0.0001, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 1.7777777777777777, | |
| "grad_norm": 0.00527703487852243, | |
| "learning_rate": 7.58802657551225e-06, | |
| "loss": 0.0001, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "grad_norm": 0.005754885446374674, | |
| "learning_rate": 7.56474217261578e-06, | |
| "loss": 0.0002, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 1.7822222222222224, | |
| "grad_norm": 0.04352692790769988, | |
| "learning_rate": 7.54147179673206e-06, | |
| "loss": 0.0004, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 1.7844444444444445, | |
| "grad_norm": 0.0049611027136041595, | |
| "learning_rate": 7.518215581897763e-06, | |
| "loss": 0.0001, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 1.7866666666666666, | |
| "grad_norm": 0.010213049880198742, | |
| "learning_rate": 7.494973662067996e-06, | |
| "loss": 0.0002, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 1.7888888888888888, | |
| "grad_norm": 0.008401732024698658, | |
| "learning_rate": 7.471746171115529e-06, | |
| "loss": 0.0001, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 1.791111111111111, | |
| "grad_norm": 0.007850135027375235, | |
| "learning_rate": 7.44853324283002e-06, | |
| "loss": 0.0001, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 1.7933333333333334, | |
| "grad_norm": 0.00579944610199464, | |
| "learning_rate": 7.425335010917244e-06, | |
| "loss": 0.0001, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 1.7955555555555556, | |
| "grad_norm": 0.009258421415421275, | |
| "learning_rate": 7.402151608998329e-06, | |
| "loss": 0.0001, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 1.7977777777777777, | |
| "grad_norm": 0.11405051644288978, | |
| "learning_rate": 7.378983170608982e-06, | |
| "loss": 0.0008, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 0.006320556558882009, | |
| "learning_rate": 7.355829829198715e-06, | |
| "loss": 0.0001, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.8022222222222222, | |
| "grad_norm": 0.012045241309534882, | |
| "learning_rate": 7.332691718130094e-06, | |
| "loss": 0.0001, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 1.8044444444444445, | |
| "grad_norm": 0.520542977972645, | |
| "learning_rate": 7.3095689706779476e-06, | |
| "loss": 0.0045, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 1.8066666666666666, | |
| "grad_norm": 0.019381346685568877, | |
| "learning_rate": 7.2864617200286124e-06, | |
| "loss": 0.0002, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 1.8088888888888888, | |
| "grad_norm": 0.007508218090511844, | |
| "learning_rate": 7.263370099279173e-06, | |
| "loss": 0.0001, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 1.8111111111111111, | |
| "grad_norm": 0.006636910045438217, | |
| "learning_rate": 7.2402942414366714e-06, | |
| "loss": 0.0001, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 1.8133333333333335, | |
| "grad_norm": 0.5970675431076587, | |
| "learning_rate": 7.217234279417369e-06, | |
| "loss": 0.0281, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 1.8155555555555556, | |
| "grad_norm": 0.20910915509044223, | |
| "learning_rate": 7.1941903460459575e-06, | |
| "loss": 0.0014, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 1.8177777777777777, | |
| "grad_norm": 0.005756606591866209, | |
| "learning_rate": 7.1711625740548115e-06, | |
| "loss": 0.0001, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 1.8199999999999998, | |
| "grad_norm": 0.01015309259186487, | |
| "learning_rate": 7.148151096083211e-06, | |
| "loss": 0.0001, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 1.8222222222222222, | |
| "grad_norm": 0.011163991647484358, | |
| "learning_rate": 7.125156044676586e-06, | |
| "loss": 0.0001, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.8244444444444445, | |
| "grad_norm": 0.006911758672704037, | |
| "learning_rate": 7.102177552285753e-06, | |
| "loss": 0.0001, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 1.8266666666666667, | |
| "grad_norm": 0.011861531736748487, | |
| "learning_rate": 7.0792157512661445e-06, | |
| "loss": 0.0001, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 1.8288888888888888, | |
| "grad_norm": 0.016554162931998656, | |
| "learning_rate": 7.056270773877051e-06, | |
| "loss": 0.0002, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 1.8311111111111111, | |
| "grad_norm": 0.0054004361128319744, | |
| "learning_rate": 7.033342752280861e-06, | |
| "loss": 0.0001, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 1.8333333333333335, | |
| "grad_norm": 0.005023600731988617, | |
| "learning_rate": 7.010431818542298e-06, | |
| "loss": 0.0001, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 1.8355555555555556, | |
| "grad_norm": 0.009620786803093213, | |
| "learning_rate": 6.9875381046276605e-06, | |
| "loss": 0.0001, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 1.8377777777777777, | |
| "grad_norm": 0.008813541653437583, | |
| "learning_rate": 6.964661742404058e-06, | |
| "loss": 0.0002, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 1.8399999999999999, | |
| "grad_norm": 0.005463228743609924, | |
| "learning_rate": 6.9418028636386595e-06, | |
| "loss": 0.0001, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 1.8422222222222222, | |
| "grad_norm": 0.017018327496045683, | |
| "learning_rate": 6.918961599997926e-06, | |
| "loss": 0.0002, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 1.8444444444444446, | |
| "grad_norm": 0.00495689378945732, | |
| "learning_rate": 6.89613808304686e-06, | |
| "loss": 0.0001, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.8466666666666667, | |
| "grad_norm": 0.008186424362690388, | |
| "learning_rate": 6.873332444248241e-06, | |
| "loss": 0.0001, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 1.8488888888888888, | |
| "grad_norm": 0.00512814076180878, | |
| "learning_rate": 6.85054481496187e-06, | |
| "loss": 0.0001, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 1.8511111111111112, | |
| "grad_norm": 0.009508659924489689, | |
| "learning_rate": 6.827775326443817e-06, | |
| "loss": 0.0001, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 1.8533333333333335, | |
| "grad_norm": 0.006763646962175536, | |
| "learning_rate": 6.805024109845657e-06, | |
| "loss": 0.0001, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 1.8555555555555556, | |
| "grad_norm": 0.005577090374834863, | |
| "learning_rate": 6.7822912962137225e-06, | |
| "loss": 0.0001, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 1.8577777777777778, | |
| "grad_norm": 0.004079818983816815, | |
| "learning_rate": 6.759577016488343e-06, | |
| "loss": 0.0001, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 1.8599999999999999, | |
| "grad_norm": 0.0042987853792713976, | |
| "learning_rate": 6.736881401503097e-06, | |
| "loss": 0.0001, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 1.8622222222222222, | |
| "grad_norm": 0.08285529618987642, | |
| "learning_rate": 6.714204581984052e-06, | |
| "loss": 0.0005, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 1.8644444444444446, | |
| "grad_norm": 0.011153940125612353, | |
| "learning_rate": 6.691546688549016e-06, | |
| "loss": 0.0002, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 1.8666666666666667, | |
| "grad_norm": 0.008769649761811774, | |
| "learning_rate": 6.668907851706782e-06, | |
| "loss": 0.0001, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.8688888888888888, | |
| "grad_norm": 0.8175197306040879, | |
| "learning_rate": 6.646288201856377e-06, | |
| "loss": 0.0263, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 1.871111111111111, | |
| "grad_norm": 0.009087403083982025, | |
| "learning_rate": 6.623687869286314e-06, | |
| "loss": 0.0001, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 1.8733333333333333, | |
| "grad_norm": 0.03365911176956834, | |
| "learning_rate": 6.601106984173835e-06, | |
| "loss": 0.0003, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 1.8755555555555556, | |
| "grad_norm": 0.005352710342181907, | |
| "learning_rate": 6.578545676584168e-06, | |
| "loss": 0.0001, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 1.8777777777777778, | |
| "grad_norm": 0.004245977263925399, | |
| "learning_rate": 6.556004076469773e-06, | |
| "loss": 0.0001, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "grad_norm": 0.014620276778664822, | |
| "learning_rate": 6.533482313669599e-06, | |
| "loss": 0.0002, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 1.8822222222222222, | |
| "grad_norm": 0.6281415276451581, | |
| "learning_rate": 6.510980517908334e-06, | |
| "loss": 0.031, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 1.8844444444444446, | |
| "grad_norm": 0.006886647746715712, | |
| "learning_rate": 6.488498818795646e-06, | |
| "loss": 0.0001, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 1.8866666666666667, | |
| "grad_norm": 0.005100411075461833, | |
| "learning_rate": 6.466037345825462e-06, | |
| "loss": 0.0001, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 1.8888888888888888, | |
| "grad_norm": 0.10551873355305119, | |
| "learning_rate": 6.443596228375193e-06, | |
| "loss": 0.0007, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.891111111111111, | |
| "grad_norm": 0.006119219051238931, | |
| "learning_rate": 6.421175595705013e-06, | |
| "loss": 0.0001, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 1.8933333333333333, | |
| "grad_norm": 0.004052476881331301, | |
| "learning_rate": 6.398775576957097e-06, | |
| "loss": 0.0001, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 1.8955555555555557, | |
| "grad_norm": 0.8114488694715846, | |
| "learning_rate": 6.37639630115489e-06, | |
| "loss": 0.0025, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 1.8977777777777778, | |
| "grad_norm": 0.0053107177917564915, | |
| "learning_rate": 6.354037897202352e-06, | |
| "loss": 0.0001, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 0.005331548486758555, | |
| "learning_rate": 6.331700493883228e-06, | |
| "loss": 0.0001, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 1.9022222222222223, | |
| "grad_norm": 0.011609706829984554, | |
| "learning_rate": 6.3093842198603014e-06, | |
| "loss": 0.0002, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 1.9044444444444446, | |
| "grad_norm": 0.007378859212799551, | |
| "learning_rate": 6.287089203674641e-06, | |
| "loss": 0.0001, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 1.9066666666666667, | |
| "grad_norm": 0.006641448834362174, | |
| "learning_rate": 6.264815573744884e-06, | |
| "loss": 0.0001, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 1.9088888888888889, | |
| "grad_norm": 0.02980993239880587, | |
| "learning_rate": 6.242563458366475e-06, | |
| "loss": 0.0004, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 1.911111111111111, | |
| "grad_norm": 0.036304179408240815, | |
| "learning_rate": 6.220332985710936e-06, | |
| "loss": 0.0002, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.9133333333333333, | |
| "grad_norm": 0.004435838299747552, | |
| "learning_rate": 6.198124283825131e-06, | |
| "loss": 0.0001, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 1.9155555555555557, | |
| "grad_norm": 0.011765358286836293, | |
| "learning_rate": 6.17593748063052e-06, | |
| "loss": 0.0001, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 1.9177777777777778, | |
| "grad_norm": 0.012044616751011209, | |
| "learning_rate": 6.153772703922434e-06, | |
| "loss": 0.0002, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 0.011012411350759551, | |
| "learning_rate": 6.131630081369325e-06, | |
| "loss": 0.0002, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 1.9222222222222223, | |
| "grad_norm": 0.008381116879417993, | |
| "learning_rate": 6.1095097405120465e-06, | |
| "loss": 0.0002, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 1.9244444444444444, | |
| "grad_norm": 0.006654569987083457, | |
| "learning_rate": 6.0874118087631e-06, | |
| "loss": 0.0001, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 1.9266666666666667, | |
| "grad_norm": 0.007497467574307127, | |
| "learning_rate": 6.065336413405918e-06, | |
| "loss": 0.0001, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 1.9288888888888889, | |
| "grad_norm": 0.00824096532364245, | |
| "learning_rate": 6.043283681594123e-06, | |
| "loss": 0.0001, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 1.931111111111111, | |
| "grad_norm": 0.013656292779705767, | |
| "learning_rate": 6.021253740350793e-06, | |
| "loss": 0.0002, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 1.9333333333333333, | |
| "grad_norm": 0.008338880379787751, | |
| "learning_rate": 5.999246716567737e-06, | |
| "loss": 0.0001, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.9355555555555557, | |
| "grad_norm": 0.04537032361419367, | |
| "learning_rate": 5.977262737004756e-06, | |
| "loss": 0.0004, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 1.9377777777777778, | |
| "grad_norm": 0.235175391585093, | |
| "learning_rate": 5.955301928288919e-06, | |
| "loss": 0.002, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "grad_norm": 0.0077392287958668975, | |
| "learning_rate": 5.933364416913836e-06, | |
| "loss": 0.0001, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 1.942222222222222, | |
| "grad_norm": 0.004933175649419399, | |
| "learning_rate": 5.911450329238918e-06, | |
| "loss": 0.0001, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 1.9444444444444444, | |
| "grad_norm": 0.004606718891752622, | |
| "learning_rate": 5.889559791488658e-06, | |
| "loss": 0.0001, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.9466666666666668, | |
| "grad_norm": 0.024627511476694872, | |
| "learning_rate": 5.867692929751907e-06, | |
| "loss": 0.0003, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 1.948888888888889, | |
| "grad_norm": 0.006505899059626392, | |
| "learning_rate": 5.845849869981137e-06, | |
| "loss": 0.0001, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 1.951111111111111, | |
| "grad_norm": 0.03268596628702246, | |
| "learning_rate": 5.824030737991722e-06, | |
| "loss": 0.0004, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 1.9533333333333334, | |
| "grad_norm": 0.007497593506385594, | |
| "learning_rate": 5.802235659461216e-06, | |
| "loss": 0.0001, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 1.9555555555555557, | |
| "grad_norm": 0.021232254059375327, | |
| "learning_rate": 5.780464759928623e-06, | |
| "loss": 0.0002, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.9577777777777778, | |
| "grad_norm": 0.0063559156253589835, | |
| "learning_rate": 5.758718164793675e-06, | |
| "loss": 0.0001, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "grad_norm": 0.006323882840213799, | |
| "learning_rate": 5.736995999316122e-06, | |
| "loss": 0.0001, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 1.962222222222222, | |
| "grad_norm": 0.5055280205220132, | |
| "learning_rate": 5.715298388614987e-06, | |
| "loss": 0.0074, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 1.9644444444444444, | |
| "grad_norm": 0.0044161780544871975, | |
| "learning_rate": 5.693625457667862e-06, | |
| "loss": 0.0001, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 1.9666666666666668, | |
| "grad_norm": 0.004524827100756733, | |
| "learning_rate": 5.671977331310187e-06, | |
| "loss": 0.0001, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 1.968888888888889, | |
| "grad_norm": 0.007332930332910707, | |
| "learning_rate": 5.650354134234526e-06, | |
| "loss": 0.0002, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 1.971111111111111, | |
| "grad_norm": 0.012619624356771787, | |
| "learning_rate": 5.628755990989854e-06, | |
| "loss": 0.0002, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 1.9733333333333334, | |
| "grad_norm": 0.005836573084793587, | |
| "learning_rate": 5.607183025980831e-06, | |
| "loss": 0.0001, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 1.9755555555555555, | |
| "grad_norm": 0.005086404241320908, | |
| "learning_rate": 5.585635363467097e-06, | |
| "loss": 0.0001, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 1.9777777777777779, | |
| "grad_norm": 0.003997482555832169, | |
| "learning_rate": 5.564113127562543e-06, | |
| "loss": 0.0001, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "grad_norm": 0.19483423471210445, | |
| "learning_rate": 5.542616442234618e-06, | |
| "loss": 0.0013, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 1.982222222222222, | |
| "grad_norm": 0.006373878867769347, | |
| "learning_rate": 5.5211454313035865e-06, | |
| "loss": 0.0001, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 1.9844444444444445, | |
| "grad_norm": 0.005143906889337787, | |
| "learning_rate": 5.4997002184418325e-06, | |
| "loss": 0.0001, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 1.9866666666666668, | |
| "grad_norm": 0.01630345182876676, | |
| "learning_rate": 5.478280927173145e-06, | |
| "loss": 0.0001, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 1.988888888888889, | |
| "grad_norm": 0.006929788669228794, | |
| "learning_rate": 5.456887680872007e-06, | |
| "loss": 0.0001, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 1.991111111111111, | |
| "grad_norm": 0.031271170454043214, | |
| "learning_rate": 5.435520602762878e-06, | |
| "loss": 0.0003, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 1.9933333333333332, | |
| "grad_norm": 0.09021141995367406, | |
| "learning_rate": 5.4141798159195e-06, | |
| "loss": 0.0009, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 1.9955555555555555, | |
| "grad_norm": 0.006060782254190974, | |
| "learning_rate": 5.392865443264164e-06, | |
| "loss": 0.0001, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 1.9977777777777779, | |
| "grad_norm": 0.005885598423731525, | |
| "learning_rate": 5.3715776075670286e-06, | |
| "loss": 0.0001, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.005162278952833458, | |
| "learning_rate": 5.350316431445397e-06, | |
| "loss": 0.0001, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.002222222222222, | |
| "grad_norm": 0.006377626540326884, | |
| "learning_rate": 5.329082037363007e-06, | |
| "loss": 0.0001, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 2.0044444444444443, | |
| "grad_norm": 0.004339285227317409, | |
| "learning_rate": 5.307874547629339e-06, | |
| "loss": 0.0001, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 2.006666666666667, | |
| "grad_norm": 0.004417747365873751, | |
| "learning_rate": 5.286694084398905e-06, | |
| "loss": 0.0001, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 2.008888888888889, | |
| "grad_norm": 0.00798192017334622, | |
| "learning_rate": 5.2655407696705416e-06, | |
| "loss": 0.0002, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 2.011111111111111, | |
| "grad_norm": 0.009790744598206124, | |
| "learning_rate": 5.244414725286717e-06, | |
| "loss": 0.0001, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 2.013333333333333, | |
| "grad_norm": 0.0144572850806694, | |
| "learning_rate": 5.223316072932817e-06, | |
| "loss": 0.0002, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 2.0155555555555558, | |
| "grad_norm": 0.005251315999579009, | |
| "learning_rate": 5.202244934136449e-06, | |
| "loss": 0.0001, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 2.017777777777778, | |
| "grad_norm": 0.014511592519004647, | |
| "learning_rate": 5.1812014302667535e-06, | |
| "loss": 0.0002, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "grad_norm": 0.03013628315545542, | |
| "learning_rate": 5.160185682533686e-06, | |
| "loss": 0.0004, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 2.022222222222222, | |
| "grad_norm": 0.018566923707603544, | |
| "learning_rate": 5.1391978119873275e-06, | |
| "loss": 0.0003, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.0244444444444443, | |
| "grad_norm": 0.007063702332944817, | |
| "learning_rate": 5.11823793951719e-06, | |
| "loss": 0.0001, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 2.026666666666667, | |
| "grad_norm": 0.009933684332748409, | |
| "learning_rate": 5.097306185851515e-06, | |
| "loss": 0.0002, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 2.028888888888889, | |
| "grad_norm": 0.010480910272289808, | |
| "learning_rate": 5.076402671556578e-06, | |
| "loss": 0.0002, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 2.031111111111111, | |
| "grad_norm": 0.03682956911614836, | |
| "learning_rate": 5.05552751703601e-06, | |
| "loss": 0.0004, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 2.033333333333333, | |
| "grad_norm": 0.0050899821105464815, | |
| "learning_rate": 5.034680842530075e-06, | |
| "loss": 0.0001, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 2.0355555555555553, | |
| "grad_norm": 0.010850761638919243, | |
| "learning_rate": 5.0138627681149974e-06, | |
| "loss": 0.0002, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 2.037777777777778, | |
| "grad_norm": 0.0057297561872837735, | |
| "learning_rate": 4.993073413702273e-06, | |
| "loss": 0.0001, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "grad_norm": 0.011061280775842406, | |
| "learning_rate": 4.972312899037963e-06, | |
| "loss": 0.0002, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 2.042222222222222, | |
| "grad_norm": 0.01506996017690318, | |
| "learning_rate": 4.951581343702014e-06, | |
| "loss": 0.0003, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 2.0444444444444443, | |
| "grad_norm": 0.008072621715204979, | |
| "learning_rate": 4.930878867107572e-06, | |
| "loss": 0.0001, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.046666666666667, | |
| "grad_norm": 0.007306677067491335, | |
| "learning_rate": 4.9102055885002834e-06, | |
| "loss": 0.0001, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 2.048888888888889, | |
| "grad_norm": 0.012606002760401273, | |
| "learning_rate": 4.88956162695762e-06, | |
| "loss": 0.0001, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 2.051111111111111, | |
| "grad_norm": 0.004489098410894985, | |
| "learning_rate": 4.868947101388188e-06, | |
| "loss": 0.0001, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 2.0533333333333332, | |
| "grad_norm": 0.003972473539188523, | |
| "learning_rate": 4.848362130531039e-06, | |
| "loss": 0.0001, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 2.0555555555555554, | |
| "grad_norm": 0.015506131288919417, | |
| "learning_rate": 4.827806832955e-06, | |
| "loss": 0.0001, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 2.057777777777778, | |
| "grad_norm": 0.02888300990727671, | |
| "learning_rate": 4.807281327057972e-06, | |
| "loss": 0.0003, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "grad_norm": 0.003896524910882283, | |
| "learning_rate": 4.786785731066258e-06, | |
| "loss": 0.0001, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 2.062222222222222, | |
| "grad_norm": 0.0036967675583388297, | |
| "learning_rate": 4.766320163033882e-06, | |
| "loss": 0.0001, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 2.0644444444444443, | |
| "grad_norm": 0.00726508703374028, | |
| "learning_rate": 4.745884740841909e-06, | |
| "loss": 0.0001, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 2.066666666666667, | |
| "grad_norm": 0.006706186863594598, | |
| "learning_rate": 4.725479582197764e-06, | |
| "loss": 0.0001, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.068888888888889, | |
| "grad_norm": 0.005744332650237057, | |
| "learning_rate": 4.705104804634549e-06, | |
| "loss": 0.0001, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 2.071111111111111, | |
| "grad_norm": 0.005963971603264281, | |
| "learning_rate": 4.684760525510388e-06, | |
| "loss": 0.0001, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 2.0733333333333333, | |
| "grad_norm": 0.005377691210036081, | |
| "learning_rate": 4.664446862007718e-06, | |
| "loss": 0.0001, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 2.0755555555555554, | |
| "grad_norm": 0.0956583175878244, | |
| "learning_rate": 4.644163931132634e-06, | |
| "loss": 0.0009, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 2.077777777777778, | |
| "grad_norm": 0.004221270175498253, | |
| "learning_rate": 4.623911849714226e-06, | |
| "loss": 0.0001, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "grad_norm": 0.0045696190942705935, | |
| "learning_rate": 4.603690734403873e-06, | |
| "loss": 0.0001, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 2.082222222222222, | |
| "grad_norm": 0.003958843138772146, | |
| "learning_rate": 4.583500701674603e-06, | |
| "loss": 0.0001, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 2.0844444444444443, | |
| "grad_norm": 0.00920915332365134, | |
| "learning_rate": 4.5633418678204e-06, | |
| "loss": 0.0001, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 2.086666666666667, | |
| "grad_norm": 0.003958746374584934, | |
| "learning_rate": 4.543214348955552e-06, | |
| "loss": 0.0001, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 2.088888888888889, | |
| "grad_norm": 0.004544461131101174, | |
| "learning_rate": 4.523118261013969e-06, | |
| "loss": 0.0001, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.091111111111111, | |
| "grad_norm": 0.07257058246581209, | |
| "learning_rate": 4.50305371974852e-06, | |
| "loss": 0.0003, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 2.0933333333333333, | |
| "grad_norm": 0.005639945404013589, | |
| "learning_rate": 4.483020840730365e-06, | |
| "loss": 0.0001, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 2.0955555555555554, | |
| "grad_norm": 0.032796766466521195, | |
| "learning_rate": 4.463019739348296e-06, | |
| "loss": 0.0001, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 2.097777777777778, | |
| "grad_norm": 0.005631214478711146, | |
| "learning_rate": 4.443050530808061e-06, | |
| "loss": 0.0001, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "grad_norm": 0.004697908951213257, | |
| "learning_rate": 4.423113330131708e-06, | |
| "loss": 0.0001, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 2.102222222222222, | |
| "grad_norm": 0.013155576639003044, | |
| "learning_rate": 4.403208252156921e-06, | |
| "loss": 0.0002, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 2.1044444444444443, | |
| "grad_norm": 0.008411689368181242, | |
| "learning_rate": 4.383335411536357e-06, | |
| "loss": 0.0001, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 2.1066666666666665, | |
| "grad_norm": 0.19313559818815842, | |
| "learning_rate": 4.363494922736988e-06, | |
| "loss": 0.0016, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 2.108888888888889, | |
| "grad_norm": 0.005488587754023579, | |
| "learning_rate": 4.343686900039438e-06, | |
| "loss": 0.0001, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 2.111111111111111, | |
| "grad_norm": 0.007552904084356708, | |
| "learning_rate": 4.323911457537335e-06, | |
| "loss": 0.0001, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.1133333333333333, | |
| "grad_norm": 0.006603320699908656, | |
| "learning_rate": 4.3041687091366325e-06, | |
| "loss": 0.0001, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 2.1155555555555554, | |
| "grad_norm": 0.03218107191749073, | |
| "learning_rate": 4.284458768554984e-06, | |
| "loss": 0.0001, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 2.117777777777778, | |
| "grad_norm": 0.003965471111599559, | |
| "learning_rate": 4.264781749321058e-06, | |
| "loss": 0.0001, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "grad_norm": 0.012027841374553132, | |
| "learning_rate": 4.245137764773899e-06, | |
| "loss": 0.0001, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 2.1222222222222222, | |
| "grad_norm": 0.21050540015201846, | |
| "learning_rate": 4.2255269280622754e-06, | |
| "loss": 0.0014, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 2.1244444444444444, | |
| "grad_norm": 0.0235151283631568, | |
| "learning_rate": 4.205949352144025e-06, | |
| "loss": 0.0001, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 2.1266666666666665, | |
| "grad_norm": 0.0050033621887494995, | |
| "learning_rate": 4.186405149785403e-06, | |
| "loss": 0.0001, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 2.128888888888889, | |
| "grad_norm": 0.01076001909047283, | |
| "learning_rate": 4.166894433560435e-06, | |
| "loss": 0.0001, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 2.131111111111111, | |
| "grad_norm": 0.00959531425331137, | |
| "learning_rate": 4.1474173158502615e-06, | |
| "loss": 0.0001, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 2.1333333333333333, | |
| "grad_norm": 0.004297967879909364, | |
| "learning_rate": 4.1279739088425106e-06, | |
| "loss": 0.0001, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.1355555555555554, | |
| "grad_norm": 0.0039084473009554565, | |
| "learning_rate": 4.108564324530626e-06, | |
| "loss": 0.0001, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 2.137777777777778, | |
| "grad_norm": 0.01259579009717942, | |
| "learning_rate": 4.0891886747132356e-06, | |
| "loss": 0.0001, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "grad_norm": 0.04276287228357518, | |
| "learning_rate": 4.069847070993508e-06, | |
| "loss": 0.0004, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 2.1422222222222222, | |
| "grad_norm": 0.005519938251128461, | |
| "learning_rate": 4.050539624778506e-06, | |
| "loss": 0.0001, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 2.1444444444444444, | |
| "grad_norm": 0.004345278879569674, | |
| "learning_rate": 4.031266447278543e-06, | |
| "loss": 0.0001, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 2.1466666666666665, | |
| "grad_norm": 0.0047809765556548435, | |
| "learning_rate": 4.012027649506555e-06, | |
| "loss": 0.0001, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 2.148888888888889, | |
| "grad_norm": 0.007262043311470335, | |
| "learning_rate": 3.992823342277437e-06, | |
| "loss": 0.0001, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 2.151111111111111, | |
| "grad_norm": 0.038488782260049495, | |
| "learning_rate": 3.973653636207437e-06, | |
| "loss": 0.0002, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 2.1533333333333333, | |
| "grad_norm": 0.005170367824117691, | |
| "learning_rate": 3.9545186417134865e-06, | |
| "loss": 0.0001, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 2.1555555555555554, | |
| "grad_norm": 0.006718829056620287, | |
| "learning_rate": 3.935418469012592e-06, | |
| "loss": 0.0001, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.1577777777777776, | |
| "grad_norm": 0.005486818963923312, | |
| "learning_rate": 3.916353228121176e-06, | |
| "loss": 0.0001, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 0.00543724006827059, | |
| "learning_rate": 3.897323028854461e-06, | |
| "loss": 0.0001, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 2.1622222222222223, | |
| "grad_norm": 0.007253338100018996, | |
| "learning_rate": 3.878327980825829e-06, | |
| "loss": 0.0001, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 2.1644444444444444, | |
| "grad_norm": 0.004788163338010424, | |
| "learning_rate": 3.859368193446193e-06, | |
| "loss": 0.0001, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 2.1666666666666665, | |
| "grad_norm": 0.047783010016075385, | |
| "learning_rate": 3.840443775923365e-06, | |
| "loss": 0.0003, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 2.168888888888889, | |
| "grad_norm": 0.00507827388378548, | |
| "learning_rate": 3.821554837261424e-06, | |
| "loss": 0.0001, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 2.171111111111111, | |
| "grad_norm": 0.008518792662368229, | |
| "learning_rate": 3.802701486260102e-06, | |
| "loss": 0.0001, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 2.1733333333333333, | |
| "grad_norm": 0.0041793997962789545, | |
| "learning_rate": 3.783883831514139e-06, | |
| "loss": 0.0001, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 2.1755555555555555, | |
| "grad_norm": 0.6260495915902624, | |
| "learning_rate": 3.7651019814126656e-06, | |
| "loss": 0.0056, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 2.1777777777777776, | |
| "grad_norm": 0.5844939048495187, | |
| "learning_rate": 3.7463560441385814e-06, | |
| "loss": 0.0113, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "grad_norm": 0.14596918269272327, | |
| "learning_rate": 3.727646127667929e-06, | |
| "loss": 0.0007, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 2.1822222222222223, | |
| "grad_norm": 0.0040705704888426035, | |
| "learning_rate": 3.70897233976927e-06, | |
| "loss": 0.0001, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 2.1844444444444444, | |
| "grad_norm": 0.1114444379758751, | |
| "learning_rate": 3.6903347880030684e-06, | |
| "loss": 0.0004, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 2.1866666666666665, | |
| "grad_norm": 0.4208002718939926, | |
| "learning_rate": 3.6717335797210663e-06, | |
| "loss": 0.0043, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 2.188888888888889, | |
| "grad_norm": 0.006849423625035047, | |
| "learning_rate": 3.653168822065677e-06, | |
| "loss": 0.0001, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 2.1911111111111112, | |
| "grad_norm": 0.005257967529070517, | |
| "learning_rate": 3.6346406219693485e-06, | |
| "loss": 0.0001, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 2.1933333333333334, | |
| "grad_norm": 0.003928932408318167, | |
| "learning_rate": 3.6161490861539626e-06, | |
| "loss": 0.0001, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 2.1955555555555555, | |
| "grad_norm": 0.0039097540324176195, | |
| "learning_rate": 3.5976943211302206e-06, | |
| "loss": 0.0001, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 2.1977777777777776, | |
| "grad_norm": 0.01917025077992462, | |
| "learning_rate": 3.5792764331970187e-06, | |
| "loss": 0.0001, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "grad_norm": 0.005604064350312851, | |
| "learning_rate": 3.560895528440844e-06, | |
| "loss": 0.0001, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.2022222222222223, | |
| "grad_norm": 0.004800945780431629, | |
| "learning_rate": 3.5425517127351614e-06, | |
| "loss": 0.0001, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 2.2044444444444444, | |
| "grad_norm": 0.004879052613344668, | |
| "learning_rate": 3.524245091739805e-06, | |
| "loss": 0.0001, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 2.2066666666666666, | |
| "grad_norm": 0.0043818124429130636, | |
| "learning_rate": 3.5059757709003685e-06, | |
| "loss": 0.0001, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 2.2088888888888887, | |
| "grad_norm": 0.004077161024693103, | |
| "learning_rate": 3.487743855447593e-06, | |
| "loss": 0.0001, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 2.2111111111111112, | |
| "grad_norm": 0.007564114512294026, | |
| "learning_rate": 3.4695494503967773e-06, | |
| "loss": 0.0001, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 2.2133333333333334, | |
| "grad_norm": 0.004836393559588784, | |
| "learning_rate": 3.4513926605471504e-06, | |
| "loss": 0.0001, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 2.2155555555555555, | |
| "grad_norm": 0.007320394644995222, | |
| "learning_rate": 3.433273590481282e-06, | |
| "loss": 0.0001, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 2.2177777777777776, | |
| "grad_norm": 0.006231564879502769, | |
| "learning_rate": 3.4151923445644785e-06, | |
| "loss": 0.0001, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "grad_norm": 0.0042266725326756014, | |
| "learning_rate": 3.3971490269441777e-06, | |
| "loss": 0.0001, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 2.2222222222222223, | |
| "grad_norm": 0.0046251341054135845, | |
| "learning_rate": 3.3791437415493556e-06, | |
| "loss": 0.0001, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.2244444444444444, | |
| "grad_norm": 0.02986165373842408, | |
| "learning_rate": 3.361176592089919e-06, | |
| "loss": 0.0003, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 2.2266666666666666, | |
| "grad_norm": 0.0057532210558945505, | |
| "learning_rate": 3.3432476820561134e-06, | |
| "loss": 0.0001, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 2.2288888888888887, | |
| "grad_norm": 2.5090024186407427, | |
| "learning_rate": 3.3253571147179333e-06, | |
| "loss": 0.0134, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 2.2311111111111113, | |
| "grad_norm": 0.01170703562163291, | |
| "learning_rate": 3.307504993124513e-06, | |
| "loss": 0.0001, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 2.2333333333333334, | |
| "grad_norm": 0.003848931291039126, | |
| "learning_rate": 3.2896914201035377e-06, | |
| "loss": 0.0001, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 2.2355555555555555, | |
| "grad_norm": 0.672943080663016, | |
| "learning_rate": 3.2719164982606675e-06, | |
| "loss": 0.0032, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 2.2377777777777776, | |
| "grad_norm": 0.09768426363389018, | |
| "learning_rate": 3.254180329978921e-06, | |
| "loss": 0.0006, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 0.006225071132196414, | |
| "learning_rate": 3.2364830174180984e-06, | |
| "loss": 0.0001, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 2.2422222222222223, | |
| "grad_norm": 0.003928134388756732, | |
| "learning_rate": 3.2188246625141963e-06, | |
| "loss": 0.0001, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 2.2444444444444445, | |
| "grad_norm": 0.019999581861604915, | |
| "learning_rate": 3.2012053669788136e-06, | |
| "loss": 0.0002, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.2466666666666666, | |
| "grad_norm": 0.00704729979664219, | |
| "learning_rate": 3.183625232298566e-06, | |
| "loss": 0.0001, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 2.2488888888888887, | |
| "grad_norm": 0.018454183846657195, | |
| "learning_rate": 3.1660843597345137e-06, | |
| "loss": 0.0002, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 2.2511111111111113, | |
| "grad_norm": 0.010131693985038895, | |
| "learning_rate": 3.1485828503215588e-06, | |
| "loss": 0.0001, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 2.2533333333333334, | |
| "grad_norm": 0.011571074135933475, | |
| "learning_rate": 3.1311208048678742e-06, | |
| "loss": 0.0001, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 2.2555555555555555, | |
| "grad_norm": 0.004048518827404016, | |
| "learning_rate": 3.113698323954326e-06, | |
| "loss": 0.0001, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 2.2577777777777777, | |
| "grad_norm": 0.34792992795709904, | |
| "learning_rate": 3.0963155079338834e-06, | |
| "loss": 0.0036, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "grad_norm": 0.5027747340317579, | |
| "learning_rate": 3.0789724569310532e-06, | |
| "loss": 0.0063, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 2.2622222222222224, | |
| "grad_norm": 0.005344286601416652, | |
| "learning_rate": 3.061669270841291e-06, | |
| "loss": 0.0001, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 2.2644444444444445, | |
| "grad_norm": 0.012136927208304322, | |
| "learning_rate": 3.044406049330437e-06, | |
| "loss": 0.0002, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 2.2666666666666666, | |
| "grad_norm": 0.5891041327000618, | |
| "learning_rate": 3.0271828918341317e-06, | |
| "loss": 0.0014, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.2688888888888887, | |
| "grad_norm": 0.30710561963317223, | |
| "learning_rate": 3.0099998975572553e-06, | |
| "loss": 0.0026, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 2.2711111111111113, | |
| "grad_norm": 0.006365361140616042, | |
| "learning_rate": 2.9928571654733374e-06, | |
| "loss": 0.0001, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 2.2733333333333334, | |
| "grad_norm": 0.025494005946716376, | |
| "learning_rate": 2.975754794324015e-06, | |
| "loss": 0.0003, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 2.2755555555555556, | |
| "grad_norm": 0.01361014276663751, | |
| "learning_rate": 2.9586928826184323e-06, | |
| "loss": 0.0001, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 2.2777777777777777, | |
| "grad_norm": 0.06065389399585956, | |
| "learning_rate": 2.941671528632695e-06, | |
| "loss": 0.0005, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 2.2800000000000002, | |
| "grad_norm": 0.007689127383420576, | |
| "learning_rate": 2.9246908304092945e-06, | |
| "loss": 0.0001, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 2.2822222222222224, | |
| "grad_norm": 0.004594718895988505, | |
| "learning_rate": 2.9077508857565507e-06, | |
| "loss": 0.0001, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 2.2844444444444445, | |
| "grad_norm": 0.0070023078609854385, | |
| "learning_rate": 2.8908517922480385e-06, | |
| "loss": 0.0001, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 2.2866666666666666, | |
| "grad_norm": 0.0045828701661759, | |
| "learning_rate": 2.8739936472220385e-06, | |
| "loss": 0.0001, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 2.2888888888888888, | |
| "grad_norm": 0.012726232281324886, | |
| "learning_rate": 2.8571765477809645e-06, | |
| "loss": 0.0001, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.2911111111111113, | |
| "grad_norm": 0.004724465720840135, | |
| "learning_rate": 2.8404005907908083e-06, | |
| "loss": 0.0001, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 2.2933333333333334, | |
| "grad_norm": 0.055322523524338064, | |
| "learning_rate": 2.8236658728805844e-06, | |
| "loss": 0.0004, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 2.2955555555555556, | |
| "grad_norm": 0.009733881695803697, | |
| "learning_rate": 2.8069724904417704e-06, | |
| "loss": 0.0001, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 2.2977777777777777, | |
| "grad_norm": 0.006727111935187473, | |
| "learning_rate": 2.7903205396277546e-06, | |
| "loss": 0.0001, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "grad_norm": 0.01845655062489918, | |
| "learning_rate": 2.7737101163532763e-06, | |
| "loss": 0.0001, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 2.3022222222222224, | |
| "grad_norm": 0.01887759400897995, | |
| "learning_rate": 2.757141316293884e-06, | |
| "loss": 0.0002, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 2.3044444444444445, | |
| "grad_norm": 0.00517433364934638, | |
| "learning_rate": 2.740614234885368e-06, | |
| "loss": 0.0001, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 2.3066666666666666, | |
| "grad_norm": 0.004658625685485781, | |
| "learning_rate": 2.724128967323234e-06, | |
| "loss": 0.0001, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 2.3088888888888888, | |
| "grad_norm": 0.004400457833783457, | |
| "learning_rate": 2.7076856085621294e-06, | |
| "loss": 0.0001, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 2.311111111111111, | |
| "grad_norm": 0.0052875365563783555, | |
| "learning_rate": 2.691284253315309e-06, | |
| "loss": 0.0001, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.3133333333333335, | |
| "grad_norm": 0.01140168466528017, | |
| "learning_rate": 2.674924996054099e-06, | |
| "loss": 0.0001, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 2.3155555555555556, | |
| "grad_norm": 0.004649534663066586, | |
| "learning_rate": 2.6586079310073323e-06, | |
| "loss": 0.0001, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 2.3177777777777777, | |
| "grad_norm": 0.006660210716995058, | |
| "learning_rate": 2.6423331521608173e-06, | |
| "loss": 0.0001, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 0.12421871763353338, | |
| "learning_rate": 2.626100753256798e-06, | |
| "loss": 0.0011, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 2.3222222222222224, | |
| "grad_norm": 0.7718837714171015, | |
| "learning_rate": 2.6099108277934105e-06, | |
| "loss": 0.0246, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 2.3244444444444445, | |
| "grad_norm": 0.00445486718471388, | |
| "learning_rate": 2.5937634690241396e-06, | |
| "loss": 0.0001, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 2.3266666666666667, | |
| "grad_norm": 0.0046582589404737095, | |
| "learning_rate": 2.5776587699573007e-06, | |
| "loss": 0.0001, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 2.328888888888889, | |
| "grad_norm": 0.008074485989913857, | |
| "learning_rate": 2.5615968233554766e-06, | |
| "loss": 0.0001, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 2.3311111111111114, | |
| "grad_norm": 0.006343859573169782, | |
| "learning_rate": 2.545577721735004e-06, | |
| "loss": 0.0001, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 2.3333333333333335, | |
| "grad_norm": 0.003468443322902027, | |
| "learning_rate": 2.529601557365432e-06, | |
| "loss": 0.0001, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.3355555555555556, | |
| "grad_norm": 0.02233749772874027, | |
| "learning_rate": 2.5136684222689933e-06, | |
| "loss": 0.0002, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 2.3377777777777777, | |
| "grad_norm": 0.005172077968539298, | |
| "learning_rate": 2.4977784082200728e-06, | |
| "loss": 0.0001, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "grad_norm": 0.017207084175632038, | |
| "learning_rate": 2.4819316067446787e-06, | |
| "loss": 0.0002, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 2.3422222222222224, | |
| "grad_norm": 0.008848904074828303, | |
| "learning_rate": 2.4661281091199142e-06, | |
| "loss": 0.0001, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 2.3444444444444446, | |
| "grad_norm": 0.004835701119379919, | |
| "learning_rate": 2.4503680063734615e-06, | |
| "loss": 0.0001, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 2.3466666666666667, | |
| "grad_norm": 0.45111226104602087, | |
| "learning_rate": 2.4346513892830427e-06, | |
| "loss": 0.0056, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 2.348888888888889, | |
| "grad_norm": 0.008107943133592574, | |
| "learning_rate": 2.418978348375904e-06, | |
| "loss": 0.0001, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 2.351111111111111, | |
| "grad_norm": 0.005452470336513288, | |
| "learning_rate": 2.4033489739282943e-06, | |
| "loss": 0.0001, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 2.3533333333333335, | |
| "grad_norm": 0.006756956899402857, | |
| "learning_rate": 2.3877633559649505e-06, | |
| "loss": 0.0001, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 2.3555555555555556, | |
| "grad_norm": 0.005273229524579953, | |
| "learning_rate": 2.372221584258566e-06, | |
| "loss": 0.0001, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.3577777777777778, | |
| "grad_norm": 0.05385805136493815, | |
| "learning_rate": 2.356723748329286e-06, | |
| "loss": 0.0004, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "grad_norm": 0.012068745587600403, | |
| "learning_rate": 2.341269937444183e-06, | |
| "loss": 0.0002, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 2.362222222222222, | |
| "grad_norm": 0.015007488052523894, | |
| "learning_rate": 2.3258602406167465e-06, | |
| "loss": 0.0001, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 2.3644444444444446, | |
| "grad_norm": 0.05093808437948701, | |
| "learning_rate": 2.3104947466063785e-06, | |
| "loss": 0.0004, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 2.3666666666666667, | |
| "grad_norm": 0.15365765387144414, | |
| "learning_rate": 2.295173543917867e-06, | |
| "loss": 0.0013, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 2.368888888888889, | |
| "grad_norm": 0.008550324314707846, | |
| "learning_rate": 2.2798967208008806e-06, | |
| "loss": 0.0001, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 2.371111111111111, | |
| "grad_norm": 0.00438810737836959, | |
| "learning_rate": 2.2646643652494693e-06, | |
| "loss": 0.0001, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 2.3733333333333335, | |
| "grad_norm": 0.012171729618934207, | |
| "learning_rate": 2.249476565001548e-06, | |
| "loss": 0.0002, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 2.3755555555555556, | |
| "grad_norm": 0.026725012873623392, | |
| "learning_rate": 2.234333407538396e-06, | |
| "loss": 0.0003, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 2.3777777777777778, | |
| "grad_norm": 0.005551417080129842, | |
| "learning_rate": 2.219234980084148e-06, | |
| "loss": 0.0001, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "grad_norm": 0.03733450799458195, | |
| "learning_rate": 2.2041813696052996e-06, | |
| "loss": 0.0003, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 2.3822222222222225, | |
| "grad_norm": 0.006016302217247116, | |
| "learning_rate": 2.189172662810197e-06, | |
| "loss": 0.0001, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 2.3844444444444446, | |
| "grad_norm": 0.004448515526709854, | |
| "learning_rate": 2.1742089461485504e-06, | |
| "loss": 0.0001, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 2.3866666666666667, | |
| "grad_norm": 0.005545262625990853, | |
| "learning_rate": 2.1592903058109215e-06, | |
| "loss": 0.0001, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 2.388888888888889, | |
| "grad_norm": 0.006864322644896238, | |
| "learning_rate": 2.1444168277282352e-06, | |
| "loss": 0.0001, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 2.391111111111111, | |
| "grad_norm": 0.5419845413299136, | |
| "learning_rate": 2.1295885975712805e-06, | |
| "loss": 0.0109, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 2.3933333333333335, | |
| "grad_norm": 0.007326501739890902, | |
| "learning_rate": 2.1148057007502277e-06, | |
| "loss": 0.0001, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 2.3955555555555557, | |
| "grad_norm": 0.008454837691412703, | |
| "learning_rate": 2.100068222414121e-06, | |
| "loss": 0.0001, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 2.397777777777778, | |
| "grad_norm": 0.009933199875967556, | |
| "learning_rate": 2.0853762474503982e-06, | |
| "loss": 0.0002, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 0.006228563631736913, | |
| "learning_rate": 2.0707298604843964e-06, | |
| "loss": 0.0001, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.402222222222222, | |
| "grad_norm": 0.00873748811089882, | |
| "learning_rate": 2.0561291458788736e-06, | |
| "loss": 0.0001, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 2.4044444444444446, | |
| "grad_norm": 0.005898714163932388, | |
| "learning_rate": 2.0415741877335095e-06, | |
| "loss": 0.0001, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 2.4066666666666667, | |
| "grad_norm": 0.04965663638538196, | |
| "learning_rate": 2.027065069884432e-06, | |
| "loss": 0.0004, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 2.408888888888889, | |
| "grad_norm": 0.005588372417784015, | |
| "learning_rate": 2.0126018759037292e-06, | |
| "loss": 0.0001, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 2.411111111111111, | |
| "grad_norm": 0.015870717864283915, | |
| "learning_rate": 1.9981846890989665e-06, | |
| "loss": 0.0001, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 2.413333333333333, | |
| "grad_norm": 0.014145709039017728, | |
| "learning_rate": 1.9838135925127134e-06, | |
| "loss": 0.0002, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 2.4155555555555557, | |
| "grad_norm": 0.005228779960829623, | |
| "learning_rate": 1.9694886689220592e-06, | |
| "loss": 0.0001, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 2.417777777777778, | |
| "grad_norm": 0.006242548924504129, | |
| "learning_rate": 1.955210000838138e-06, | |
| "loss": 0.0001, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "grad_norm": 0.1739049467152041, | |
| "learning_rate": 1.9409776705056514e-06, | |
| "loss": 0.0015, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 2.422222222222222, | |
| "grad_norm": 0.4609884460182513, | |
| "learning_rate": 1.9267917599024045e-06, | |
| "loss": 0.0151, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.4244444444444446, | |
| "grad_norm": 0.03168251722246924, | |
| "learning_rate": 1.912652350738818e-06, | |
| "loss": 0.0002, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 2.4266666666666667, | |
| "grad_norm": 0.007548930972442952, | |
| "learning_rate": 1.8985595244574707e-06, | |
| "loss": 0.0001, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 2.428888888888889, | |
| "grad_norm": 0.017887887661668177, | |
| "learning_rate": 1.8845133622326174e-06, | |
| "loss": 0.0003, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 2.431111111111111, | |
| "grad_norm": 0.009493153430366537, | |
| "learning_rate": 1.870513944969743e-06, | |
| "loss": 0.0001, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 2.4333333333333336, | |
| "grad_norm": 0.01843458790489535, | |
| "learning_rate": 1.8565613533050719e-06, | |
| "loss": 0.0002, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 2.4355555555555557, | |
| "grad_norm": 0.006903208289958932, | |
| "learning_rate": 1.8426556676051178e-06, | |
| "loss": 0.0001, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 2.437777777777778, | |
| "grad_norm": 0.3053830346108793, | |
| "learning_rate": 1.8287969679662165e-06, | |
| "loss": 0.003, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "grad_norm": 0.004454782655409662, | |
| "learning_rate": 1.8149853342140644e-06, | |
| "loss": 0.0001, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 2.442222222222222, | |
| "grad_norm": 0.005208846020473513, | |
| "learning_rate": 1.8012208459032665e-06, | |
| "loss": 0.0001, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 2.4444444444444446, | |
| "grad_norm": 0.0037467207870704607, | |
| "learning_rate": 1.7875035823168641e-06, | |
| "loss": 0.0001, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.4466666666666668, | |
| "grad_norm": 0.0032666946708352447, | |
| "learning_rate": 1.773833622465888e-06, | |
| "loss": 0.0001, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 2.448888888888889, | |
| "grad_norm": 0.005892300218469363, | |
| "learning_rate": 1.760211045088902e-06, | |
| "loss": 0.0001, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 2.451111111111111, | |
| "grad_norm": 0.0040072647077079825, | |
| "learning_rate": 1.7466359286515443e-06, | |
| "loss": 0.0001, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 2.453333333333333, | |
| "grad_norm": 0.003473817502789472, | |
| "learning_rate": 1.7331083513460855e-06, | |
| "loss": 0.0001, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 2.4555555555555557, | |
| "grad_norm": 0.003940367189316847, | |
| "learning_rate": 1.7196283910909673e-06, | |
| "loss": 0.0001, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 2.457777777777778, | |
| "grad_norm": 0.015686793608383997, | |
| "learning_rate": 1.7061961255303594e-06, | |
| "loss": 0.0002, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "grad_norm": 0.006970762382441299, | |
| "learning_rate": 1.692811632033715e-06, | |
| "loss": 0.0001, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 2.462222222222222, | |
| "grad_norm": 0.0028376947544573585, | |
| "learning_rate": 1.6794749876953187e-06, | |
| "loss": 0.0001, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 2.464444444444444, | |
| "grad_norm": 0.04159401040005708, | |
| "learning_rate": 1.6661862693338437e-06, | |
| "loss": 0.0003, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 2.466666666666667, | |
| "grad_norm": 0.04703578221057295, | |
| "learning_rate": 1.652945553491916e-06, | |
| "loss": 0.0004, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.468888888888889, | |
| "grad_norm": 0.004000944711979711, | |
| "learning_rate": 1.6397529164356606e-06, | |
| "loss": 0.0001, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 2.471111111111111, | |
| "grad_norm": 0.03827165970352177, | |
| "learning_rate": 1.626608434154281e-06, | |
| "loss": 0.0004, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 2.473333333333333, | |
| "grad_norm": 0.0031239639815219484, | |
| "learning_rate": 1.613512182359601e-06, | |
| "loss": 0.0001, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 2.4755555555555557, | |
| "grad_norm": 0.00413124818054462, | |
| "learning_rate": 1.6004642364856438e-06, | |
| "loss": 0.0001, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 2.477777777777778, | |
| "grad_norm": 0.004432120045301809, | |
| "learning_rate": 1.587464671688187e-06, | |
| "loss": 0.0001, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "grad_norm": 0.0030404415896111117, | |
| "learning_rate": 1.574513562844342e-06, | |
| "loss": 0.0001, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 2.482222222222222, | |
| "grad_norm": 0.003210013173423719, | |
| "learning_rate": 1.5616109845521099e-06, | |
| "loss": 0.0001, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 2.4844444444444447, | |
| "grad_norm": 0.004662417447309395, | |
| "learning_rate": 1.5487570111299566e-06, | |
| "loss": 0.0001, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 2.486666666666667, | |
| "grad_norm": 0.03249338671064761, | |
| "learning_rate": 1.5359517166163884e-06, | |
| "loss": 0.0002, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 2.488888888888889, | |
| "grad_norm": 0.029354504998656474, | |
| "learning_rate": 1.5231951747695207e-06, | |
| "loss": 0.0004, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.491111111111111, | |
| "grad_norm": 0.06840933315813424, | |
| "learning_rate": 1.5104874590666563e-06, | |
| "loss": 0.0006, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 2.493333333333333, | |
| "grad_norm": 0.13511264054996472, | |
| "learning_rate": 1.4978286427038602e-06, | |
| "loss": 0.0008, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 2.4955555555555557, | |
| "grad_norm": 0.003378040321499233, | |
| "learning_rate": 1.485218798595538e-06, | |
| "loss": 0.0001, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 2.497777777777778, | |
| "grad_norm": 0.008407129400997342, | |
| "learning_rate": 1.4726579993740153e-06, | |
| "loss": 0.0001, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.00299576502898044, | |
| "learning_rate": 1.4601463173891273e-06, | |
| "loss": 0.0001, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 2.502222222222222, | |
| "grad_norm": 0.0053359389596989845, | |
| "learning_rate": 1.4476838247077874e-06, | |
| "loss": 0.0001, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 2.5044444444444443, | |
| "grad_norm": 0.003307072835645359, | |
| "learning_rate": 1.4352705931135835e-06, | |
| "loss": 0.0001, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 2.506666666666667, | |
| "grad_norm": 0.0031299767094290372, | |
| "learning_rate": 1.4229066941063618e-06, | |
| "loss": 0.0001, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 2.508888888888889, | |
| "grad_norm": 0.004443007371136663, | |
| "learning_rate": 1.4105921989018112e-06, | |
| "loss": 0.0001, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 2.511111111111111, | |
| "grad_norm": 0.027607284406943558, | |
| "learning_rate": 1.3983271784310616e-06, | |
| "loss": 0.0003, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.513333333333333, | |
| "grad_norm": 0.032541955101797394, | |
| "learning_rate": 1.3861117033402639e-06, | |
| "loss": 0.0002, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 2.5155555555555553, | |
| "grad_norm": 0.0061965570758014555, | |
| "learning_rate": 1.373945843990192e-06, | |
| "loss": 0.0001, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 2.517777777777778, | |
| "grad_norm": 0.006825874567293678, | |
| "learning_rate": 1.3618296704558364e-06, | |
| "loss": 0.0001, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "grad_norm": 0.004464298362319657, | |
| "learning_rate": 1.3497632525259963e-06, | |
| "loss": 0.0001, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 2.522222222222222, | |
| "grad_norm": 0.0084395829054528, | |
| "learning_rate": 1.3377466597028788e-06, | |
| "loss": 0.0001, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 2.5244444444444447, | |
| "grad_norm": 0.0030700891076772645, | |
| "learning_rate": 1.325779961201703e-06, | |
| "loss": 0.0001, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 2.5266666666666664, | |
| "grad_norm": 0.016132652073630065, | |
| "learning_rate": 1.313863225950297e-06, | |
| "loss": 0.0002, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 2.528888888888889, | |
| "grad_norm": 0.026961934993611894, | |
| "learning_rate": 1.301996522588701e-06, | |
| "loss": 0.0003, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 2.531111111111111, | |
| "grad_norm": 0.004828192358172071, | |
| "learning_rate": 1.2901799194687737e-06, | |
| "loss": 0.0001, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 2.533333333333333, | |
| "grad_norm": 0.04582108040085326, | |
| "learning_rate": 1.2784134846537988e-06, | |
| "loss": 0.0002, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.535555555555556, | |
| "grad_norm": 0.00616382232624251, | |
| "learning_rate": 1.2666972859180894e-06, | |
| "loss": 0.0001, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 2.537777777777778, | |
| "grad_norm": 0.012027425719181035, | |
| "learning_rate": 1.255031390746605e-06, | |
| "loss": 0.0002, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "grad_norm": 0.00305959120969031, | |
| "learning_rate": 1.2434158663345553e-06, | |
| "loss": 0.0001, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 2.542222222222222, | |
| "grad_norm": 0.00415193020409453, | |
| "learning_rate": 1.2318507795870138e-06, | |
| "loss": 0.0001, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 2.5444444444444443, | |
| "grad_norm": 0.003960536625087934, | |
| "learning_rate": 1.220336197118539e-06, | |
| "loss": 0.0001, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 2.546666666666667, | |
| "grad_norm": 0.0024591621002865802, | |
| "learning_rate": 1.2088721852527807e-06, | |
| "loss": 0.0001, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 2.548888888888889, | |
| "grad_norm": 0.003248478092160033, | |
| "learning_rate": 1.1974588100221074e-06, | |
| "loss": 0.0001, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 2.551111111111111, | |
| "grad_norm": 0.007410516843208672, | |
| "learning_rate": 1.1860961371672242e-06, | |
| "loss": 0.0001, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 2.5533333333333332, | |
| "grad_norm": 0.01639798226983445, | |
| "learning_rate": 1.1747842321367886e-06, | |
| "loss": 0.0002, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 2.5555555555555554, | |
| "grad_norm": 0.020739444695162108, | |
| "learning_rate": 1.1635231600870334e-06, | |
| "loss": 0.0002, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.557777777777778, | |
| "grad_norm": 0.007055632949527986, | |
| "learning_rate": 1.1523129858814042e-06, | |
| "loss": 0.0001, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "grad_norm": 0.0034175004440998038, | |
| "learning_rate": 1.14115377409017e-06, | |
| "loss": 0.0001, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 2.562222222222222, | |
| "grad_norm": 0.0037633190854998782, | |
| "learning_rate": 1.1300455889900587e-06, | |
| "loss": 0.0001, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 2.5644444444444443, | |
| "grad_norm": 0.005251379387288615, | |
| "learning_rate": 1.1189884945638874e-06, | |
| "loss": 0.0001, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 2.5666666666666664, | |
| "grad_norm": 0.0034114089032869764, | |
| "learning_rate": 1.1079825545001887e-06, | |
| "loss": 0.0001, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 2.568888888888889, | |
| "grad_norm": 0.0037836450266260425, | |
| "learning_rate": 1.097027832192854e-06, | |
| "loss": 0.0001, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 2.571111111111111, | |
| "grad_norm": 0.007026033134502348, | |
| "learning_rate": 1.086124390740757e-06, | |
| "loss": 0.0001, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 2.5733333333333333, | |
| "grad_norm": 0.012309562977294748, | |
| "learning_rate": 1.0752722929473936e-06, | |
| "loss": 0.0002, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 2.575555555555556, | |
| "grad_norm": 0.0049706293194138945, | |
| "learning_rate": 1.0644716013205303e-06, | |
| "loss": 0.0001, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 2.5777777777777775, | |
| "grad_norm": 0.005914896360722813, | |
| "learning_rate": 1.0537223780718265e-06, | |
| "loss": 0.0001, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "grad_norm": 0.02009042720452564, | |
| "learning_rate": 1.0430246851164904e-06, | |
| "loss": 0.0002, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 2.582222222222222, | |
| "grad_norm": 0.008033771882258227, | |
| "learning_rate": 1.032378584072915e-06, | |
| "loss": 0.0001, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 2.5844444444444443, | |
| "grad_norm": 0.027780781334453254, | |
| "learning_rate": 1.021784136262326e-06, | |
| "loss": 0.0002, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 2.586666666666667, | |
| "grad_norm": 0.005349765799483499, | |
| "learning_rate": 1.0112414027084262e-06, | |
| "loss": 0.0001, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 2.588888888888889, | |
| "grad_norm": 0.5350371820953406, | |
| "learning_rate": 1.0007504441370508e-06, | |
| "loss": 0.02, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 2.591111111111111, | |
| "grad_norm": 0.00579608557767442, | |
| "learning_rate": 9.903113209758098e-07, | |
| "loss": 0.0001, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 2.5933333333333333, | |
| "grad_norm": 0.003346227235388532, | |
| "learning_rate": 9.799240933537379e-07, | |
| "loss": 0.0001, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 2.5955555555555554, | |
| "grad_norm": 0.005555161156391479, | |
| "learning_rate": 9.69588821100963e-07, | |
| "loss": 0.0001, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 2.597777777777778, | |
| "grad_norm": 0.11931186183352382, | |
| "learning_rate": 9.59305563748345e-07, | |
| "loss": 0.0007, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 0.013277004746277736, | |
| "learning_rate": 9.490743805271396e-07, | |
| "loss": 0.0002, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.602222222222222, | |
| "grad_norm": 0.005277608274691736, | |
| "learning_rate": 9.388953303686587e-07, | |
| "loss": 0.0001, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 2.6044444444444443, | |
| "grad_norm": 0.004267175411334593, | |
| "learning_rate": 9.28768471903928e-07, | |
| "loss": 0.0001, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 2.6066666666666665, | |
| "grad_norm": 0.004959692209382295, | |
| "learning_rate": 9.186938634633536e-07, | |
| "loss": 0.0001, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 2.608888888888889, | |
| "grad_norm": 0.0041297918366795154, | |
| "learning_rate": 9.086715630763787e-07, | |
| "loss": 0.0001, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 2.611111111111111, | |
| "grad_norm": 0.022836573896434544, | |
| "learning_rate": 8.987016284711569e-07, | |
| "loss": 0.0003, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 2.6133333333333333, | |
| "grad_norm": 0.03773559326274797, | |
| "learning_rate": 8.887841170742128e-07, | |
| "loss": 0.0003, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 2.6155555555555554, | |
| "grad_norm": 0.0028647191988726483, | |
| "learning_rate": 8.789190860101226e-07, | |
| "loss": 0.0001, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 2.6177777777777775, | |
| "grad_norm": 0.0031789939784086733, | |
| "learning_rate": 8.691065921011687e-07, | |
| "loss": 0.0001, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "grad_norm": 0.12720773687545253, | |
| "learning_rate": 8.593466918670257e-07, | |
| "loss": 0.0006, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 2.6222222222222222, | |
| "grad_norm": 0.4268093156244954, | |
| "learning_rate": 8.49639441524428e-07, | |
| "loss": 0.0176, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.6244444444444444, | |
| "grad_norm": 0.03660775873018899, | |
| "learning_rate": 8.399848969868507e-07, | |
| "loss": 0.0003, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 2.626666666666667, | |
| "grad_norm": 0.0036012881835176067, | |
| "learning_rate": 8.303831138641805e-07, | |
| "loss": 0.0001, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 2.628888888888889, | |
| "grad_norm": 0.005401850806658615, | |
| "learning_rate": 8.208341474624071e-07, | |
| "loss": 0.0001, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 2.631111111111111, | |
| "grad_norm": 0.004784686260418065, | |
| "learning_rate": 8.113380527832904e-07, | |
| "loss": 0.0001, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 2.6333333333333333, | |
| "grad_norm": 0.003873826265462296, | |
| "learning_rate": 8.018948845240538e-07, | |
| "loss": 0.0001, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 2.6355555555555554, | |
| "grad_norm": 0.003570060316026375, | |
| "learning_rate": 7.925046970770689e-07, | |
| "loss": 0.0001, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 2.637777777777778, | |
| "grad_norm": 0.006792997134444474, | |
| "learning_rate": 7.83167544529534e-07, | |
| "loss": 0.0001, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 0.0032408994341471156, | |
| "learning_rate": 7.738834806631712e-07, | |
| "loss": 0.0001, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 2.6422222222222222, | |
| "grad_norm": 0.009679842187633845, | |
| "learning_rate": 7.646525589539122e-07, | |
| "loss": 0.0002, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 2.6444444444444444, | |
| "grad_norm": 0.004631679708146829, | |
| "learning_rate": 7.554748325715921e-07, | |
| "loss": 0.0001, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.6466666666666665, | |
| "grad_norm": 0.4849777823275308, | |
| "learning_rate": 7.463503543796413e-07, | |
| "loss": 0.0139, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 2.648888888888889, | |
| "grad_norm": 0.007147499280612747, | |
| "learning_rate": 7.372791769347843e-07, | |
| "loss": 0.0001, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 2.651111111111111, | |
| "grad_norm": 0.0027480981094971813, | |
| "learning_rate": 7.282613524867321e-07, | |
| "loss": 0.0001, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 2.6533333333333333, | |
| "grad_norm": 0.0037869936959253293, | |
| "learning_rate": 7.192969329778888e-07, | |
| "loss": 0.0001, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 2.6555555555555554, | |
| "grad_norm": 0.0038971404907898392, | |
| "learning_rate": 7.103859700430416e-07, | |
| "loss": 0.0001, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 2.6577777777777776, | |
| "grad_norm": 0.011757925550804325, | |
| "learning_rate": 7.015285150090744e-07, | |
| "loss": 0.0001, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "grad_norm": 0.033436530937038085, | |
| "learning_rate": 6.927246188946635e-07, | |
| "loss": 0.0003, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 2.6622222222222223, | |
| "grad_norm": 0.006811719784407436, | |
| "learning_rate": 6.839743324099901e-07, | |
| "loss": 0.0001, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 2.6644444444444444, | |
| "grad_norm": 0.004809971025462849, | |
| "learning_rate": 6.752777059564431e-07, | |
| "loss": 0.0001, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 0.019661579581628955, | |
| "learning_rate": 6.666347896263326e-07, | |
| "loss": 0.0002, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.6688888888888886, | |
| "grad_norm": 0.006940685125493866, | |
| "learning_rate": 6.58045633202602e-07, | |
| "loss": 0.0001, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 2.671111111111111, | |
| "grad_norm": 0.006292333930267419, | |
| "learning_rate": 6.495102861585356e-07, | |
| "loss": 0.0001, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 2.6733333333333333, | |
| "grad_norm": 0.025469882925400526, | |
| "learning_rate": 6.41028797657478e-07, | |
| "loss": 0.0003, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 2.6755555555555555, | |
| "grad_norm": 0.036026851315135576, | |
| "learning_rate": 6.32601216552553e-07, | |
| "loss": 0.0003, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 2.677777777777778, | |
| "grad_norm": 0.01434040450695198, | |
| "learning_rate": 6.242275913863772e-07, | |
| "loss": 0.0002, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "grad_norm": 0.0042179101566482465, | |
| "learning_rate": 6.159079703907823e-07, | |
| "loss": 0.0001, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 2.6822222222222223, | |
| "grad_norm": 0.32902750838539624, | |
| "learning_rate": 6.076424014865378e-07, | |
| "loss": 0.0025, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 2.6844444444444444, | |
| "grad_norm": 0.1452460013027498, | |
| "learning_rate": 5.994309322830749e-07, | |
| "loss": 0.0007, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 2.6866666666666665, | |
| "grad_norm": 0.6248074899419059, | |
| "learning_rate": 5.912736100782135e-07, | |
| "loss": 0.0087, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 2.688888888888889, | |
| "grad_norm": 0.14446898588050747, | |
| "learning_rate": 5.831704818578842e-07, | |
| "loss": 0.0009, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 2.6911111111111112, | |
| "grad_norm": 0.011284358134503602, | |
| "learning_rate": 5.751215942958699e-07, | |
| "loss": 0.0002, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 2.6933333333333334, | |
| "grad_norm": 0.003748648503738471, | |
| "learning_rate": 5.671269937535196e-07, | |
| "loss": 0.0001, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 2.6955555555555555, | |
| "grad_norm": 0.0052129587083826825, | |
| "learning_rate": 5.591867262794969e-07, | |
| "loss": 0.0001, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 2.6977777777777776, | |
| "grad_norm": 0.003630031924193664, | |
| "learning_rate": 5.513008376095064e-07, | |
| "loss": 0.0001, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "grad_norm": 0.007204342913371975, | |
| "learning_rate": 5.434693731660324e-07, | |
| "loss": 0.0001, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 2.7022222222222223, | |
| "grad_norm": 0.003021953835953822, | |
| "learning_rate": 5.356923780580759e-07, | |
| "loss": 0.0001, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 2.7044444444444444, | |
| "grad_norm": 0.11814245905239595, | |
| "learning_rate": 5.279698970809011e-07, | |
| "loss": 0.0008, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 2.7066666666666666, | |
| "grad_norm": 0.028739708793016044, | |
| "learning_rate": 5.203019747157645e-07, | |
| "loss": 0.0003, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 2.7088888888888887, | |
| "grad_norm": 0.07574818681187415, | |
| "learning_rate": 5.12688655129675e-07, | |
| "loss": 0.0006, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 2.7111111111111112, | |
| "grad_norm": 0.004549392230014784, | |
| "learning_rate": 5.051299821751254e-07, | |
| "loss": 0.0001, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 2.7133333333333334, | |
| "grad_norm": 0.0029246563255944918, | |
| "learning_rate": 4.976259993898503e-07, | |
| "loss": 0.0001, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 2.7155555555555555, | |
| "grad_norm": 0.003691299192047548, | |
| "learning_rate": 4.901767499965637e-07, | |
| "loss": 0.0001, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 2.7177777777777776, | |
| "grad_norm": 0.008907447638894101, | |
| "learning_rate": 4.827822769027235e-07, | |
| "loss": 0.0001, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 2.7199999999999998, | |
| "grad_norm": 0.00492160377583905, | |
| "learning_rate": 4.7544262270027396e-07, | |
| "loss": 0.0001, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 2.7222222222222223, | |
| "grad_norm": 0.4690062060929002, | |
| "learning_rate": 4.6815782966540546e-07, | |
| "loss": 0.0028, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 2.7244444444444444, | |
| "grad_norm": 0.0042106967996562925, | |
| "learning_rate": 4.6092793975831e-07, | |
| "loss": 0.0001, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 2.7266666666666666, | |
| "grad_norm": 0.0032781998138083825, | |
| "learning_rate": 4.537529946229369e-07, | |
| "loss": 0.0001, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 2.728888888888889, | |
| "grad_norm": 0.3933125648266716, | |
| "learning_rate": 4.4663303558675764e-07, | |
| "loss": 0.0044, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 2.7311111111111113, | |
| "grad_norm": 0.003407069200395943, | |
| "learning_rate": 4.3956810366052705e-07, | |
| "loss": 0.0001, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 2.7333333333333334, | |
| "grad_norm": 0.005459938015419892, | |
| "learning_rate": 4.325582395380412e-07, | |
| "loss": 0.0001, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 2.7355555555555555, | |
| "grad_norm": 0.004093481462860526, | |
| "learning_rate": 4.2560348359590995e-07, | |
| "loss": 0.0001, | |
| "step": 1231 | |
| }, | |
| { | |
| "epoch": 2.7377777777777776, | |
| "grad_norm": 0.04103644971617523, | |
| "learning_rate": 4.187038758933204e-07, | |
| "loss": 0.0004, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "grad_norm": 0.003084757983468076, | |
| "learning_rate": 4.118594561718081e-07, | |
| "loss": 0.0001, | |
| "step": 1233 | |
| }, | |
| { | |
| "epoch": 2.7422222222222223, | |
| "grad_norm": 0.01124572696377006, | |
| "learning_rate": 4.0507026385502747e-07, | |
| "loss": 0.0002, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 2.7444444444444445, | |
| "grad_norm": 0.04318815897797949, | |
| "learning_rate": 3.9833633804852277e-07, | |
| "loss": 0.0004, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 2.7466666666666666, | |
| "grad_norm": 0.0054772194241636705, | |
| "learning_rate": 3.916577175395098e-07, | |
| "loss": 0.0001, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 2.7488888888888887, | |
| "grad_norm": 0.006884774909181901, | |
| "learning_rate": 3.8503444079664334e-07, | |
| "loss": 0.0001, | |
| "step": 1237 | |
| }, | |
| { | |
| "epoch": 2.7511111111111113, | |
| "grad_norm": 0.00413296317013562, | |
| "learning_rate": 3.784665459697989e-07, | |
| "loss": 0.0001, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 2.7533333333333334, | |
| "grad_norm": 0.02255459114762652, | |
| "learning_rate": 3.7195407088985834e-07, | |
| "loss": 0.0002, | |
| "step": 1239 | |
| }, | |
| { | |
| "epoch": 2.7555555555555555, | |
| "grad_norm": 0.03719970637958613, | |
| "learning_rate": 3.6549705306848313e-07, | |
| "loss": 0.0003, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.7577777777777777, | |
| "grad_norm": 0.007983234914170268, | |
| "learning_rate": 3.5909552969790376e-07, | |
| "loss": 0.0001, | |
| "step": 1241 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "grad_norm": 0.011979857989960716, | |
| "learning_rate": 3.5274953765070505e-07, | |
| "loss": 0.0001, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 2.7622222222222224, | |
| "grad_norm": 0.005517077940813177, | |
| "learning_rate": 3.4645911347961357e-07, | |
| "loss": 0.0001, | |
| "step": 1243 | |
| }, | |
| { | |
| "epoch": 2.7644444444444445, | |
| "grad_norm": 0.003745817144335179, | |
| "learning_rate": 3.4022429341728503e-07, | |
| "loss": 0.0001, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 2.7666666666666666, | |
| "grad_norm": 0.003682670098745159, | |
| "learning_rate": 3.340451133760958e-07, | |
| "loss": 0.0001, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 2.7688888888888887, | |
| "grad_norm": 0.004801024937481491, | |
| "learning_rate": 3.279216089479431e-07, | |
| "loss": 0.0001, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 2.771111111111111, | |
| "grad_norm": 0.00479974781733169, | |
| "learning_rate": 3.218538154040285e-07, | |
| "loss": 0.0001, | |
| "step": 1247 | |
| }, | |
| { | |
| "epoch": 2.7733333333333334, | |
| "grad_norm": 0.004122134821049037, | |
| "learning_rate": 3.158417676946635e-07, | |
| "loss": 0.0001, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 2.7755555555555556, | |
| "grad_norm": 0.004033493297464662, | |
| "learning_rate": 3.0988550044906305e-07, | |
| "loss": 0.0001, | |
| "step": 1249 | |
| }, | |
| { | |
| "epoch": 2.7777777777777777, | |
| "grad_norm": 0.027930720175123828, | |
| "learning_rate": 3.039850479751505e-07, | |
| "loss": 0.0003, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.7800000000000002, | |
| "grad_norm": 0.012685135179609314, | |
| "learning_rate": 2.9814044425935605e-07, | |
| "loss": 0.0001, | |
| "step": 1251 | |
| }, | |
| { | |
| "epoch": 2.7822222222222224, | |
| "grad_norm": 0.010190417633208807, | |
| "learning_rate": 2.923517229664241e-07, | |
| "loss": 0.0001, | |
| "step": 1252 | |
| }, | |
| { | |
| "epoch": 2.7844444444444445, | |
| "grad_norm": 0.0059579245690946, | |
| "learning_rate": 2.8661891743921644e-07, | |
| "loss": 0.0001, | |
| "step": 1253 | |
| }, | |
| { | |
| "epoch": 2.7866666666666666, | |
| "grad_norm": 0.005158188093585272, | |
| "learning_rate": 2.809420606985236e-07, | |
| "loss": 0.0001, | |
| "step": 1254 | |
| }, | |
| { | |
| "epoch": 2.7888888888888888, | |
| "grad_norm": 0.005363771569069556, | |
| "learning_rate": 2.753211854428728e-07, | |
| "loss": 0.0001, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 2.7911111111111113, | |
| "grad_norm": 0.024241794556868625, | |
| "learning_rate": 2.6975632404833584e-07, | |
| "loss": 0.0003, | |
| "step": 1256 | |
| }, | |
| { | |
| "epoch": 2.7933333333333334, | |
| "grad_norm": 0.0027377962733165662, | |
| "learning_rate": 2.6424750856835155e-07, | |
| "loss": 0.0001, | |
| "step": 1257 | |
| }, | |
| { | |
| "epoch": 2.7955555555555556, | |
| "grad_norm": 0.04515004412328495, | |
| "learning_rate": 2.5879477073353254e-07, | |
| "loss": 0.0004, | |
| "step": 1258 | |
| }, | |
| { | |
| "epoch": 2.7977777777777777, | |
| "grad_norm": 0.0321015947914232, | |
| "learning_rate": 2.5339814195148636e-07, | |
| "loss": 0.0004, | |
| "step": 1259 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 0.010998446646797659, | |
| "learning_rate": 2.480576533066348e-07, | |
| "loss": 0.0001, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.8022222222222224, | |
| "grad_norm": 0.010998055413069254, | |
| "learning_rate": 2.427733355600337e-07, | |
| "loss": 0.0002, | |
| "step": 1261 | |
| }, | |
| { | |
| "epoch": 2.8044444444444445, | |
| "grad_norm": 0.006691749144475911, | |
| "learning_rate": 2.375452191491967e-07, | |
| "loss": 0.0001, | |
| "step": 1262 | |
| }, | |
| { | |
| "epoch": 2.8066666666666666, | |
| "grad_norm": 0.010634653045682464, | |
| "learning_rate": 2.3237333418791863e-07, | |
| "loss": 0.0001, | |
| "step": 1263 | |
| }, | |
| { | |
| "epoch": 2.8088888888888888, | |
| "grad_norm": 0.0030932629680560787, | |
| "learning_rate": 2.2725771046610335e-07, | |
| "loss": 0.0001, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 2.811111111111111, | |
| "grad_norm": 0.0725601108660828, | |
| "learning_rate": 2.2219837744959284e-07, | |
| "loss": 0.0004, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 2.8133333333333335, | |
| "grad_norm": 0.003271168475792778, | |
| "learning_rate": 2.1719536427999289e-07, | |
| "loss": 0.0001, | |
| "step": 1266 | |
| }, | |
| { | |
| "epoch": 2.8155555555555556, | |
| "grad_norm": 0.27261890898746227, | |
| "learning_rate": 2.1224869977451102e-07, | |
| "loss": 0.0012, | |
| "step": 1267 | |
| }, | |
| { | |
| "epoch": 2.8177777777777777, | |
| "grad_norm": 0.00365797451345942, | |
| "learning_rate": 2.0735841242578992e-07, | |
| "loss": 0.0001, | |
| "step": 1268 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "grad_norm": 0.19687492382136854, | |
| "learning_rate": 2.0252453040173646e-07, | |
| "loss": 0.0014, | |
| "step": 1269 | |
| }, | |
| { | |
| "epoch": 2.822222222222222, | |
| "grad_norm": 0.002394795768472273, | |
| "learning_rate": 1.9774708154536971e-07, | |
| "loss": 0.0001, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.8244444444444445, | |
| "grad_norm": 0.006649392463927794, | |
| "learning_rate": 1.9302609337465195e-07, | |
| "loss": 0.0001, | |
| "step": 1271 | |
| }, | |
| { | |
| "epoch": 2.8266666666666667, | |
| "grad_norm": 0.4965363542623611, | |
| "learning_rate": 1.8836159308233571e-07, | |
| "loss": 0.0053, | |
| "step": 1272 | |
| }, | |
| { | |
| "epoch": 2.828888888888889, | |
| "grad_norm": 0.012540724731826346, | |
| "learning_rate": 1.8375360753580485e-07, | |
| "loss": 0.0002, | |
| "step": 1273 | |
| }, | |
| { | |
| "epoch": 2.8311111111111114, | |
| "grad_norm": 0.005489379139549707, | |
| "learning_rate": 1.7920216327691696e-07, | |
| "loss": 0.0001, | |
| "step": 1274 | |
| }, | |
| { | |
| "epoch": 2.8333333333333335, | |
| "grad_norm": 0.016183241052155788, | |
| "learning_rate": 1.7470728652185688e-07, | |
| "loss": 0.0003, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 2.8355555555555556, | |
| "grad_norm": 0.008224578737159016, | |
| "learning_rate": 1.7026900316098217e-07, | |
| "loss": 0.0002, | |
| "step": 1276 | |
| }, | |
| { | |
| "epoch": 2.8377777777777777, | |
| "grad_norm": 0.0048274142910085666, | |
| "learning_rate": 1.6588733875867237e-07, | |
| "loss": 0.0001, | |
| "step": 1277 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "grad_norm": 0.4311891829869607, | |
| "learning_rate": 1.615623185531845e-07, | |
| "loss": 0.0088, | |
| "step": 1278 | |
| }, | |
| { | |
| "epoch": 2.8422222222222224, | |
| "grad_norm": 0.004263070002129473, | |
| "learning_rate": 1.572939674565055e-07, | |
| "loss": 0.0001, | |
| "step": 1279 | |
| }, | |
| { | |
| "epoch": 2.8444444444444446, | |
| "grad_norm": 0.015254795497478086, | |
| "learning_rate": 1.5308231005421115e-07, | |
| "loss": 0.0002, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.8466666666666667, | |
| "grad_norm": 0.013291408804940421, | |
| "learning_rate": 1.4892737060532404e-07, | |
| "loss": 0.0002, | |
| "step": 1281 | |
| }, | |
| { | |
| "epoch": 2.848888888888889, | |
| "grad_norm": 0.005738119731368655, | |
| "learning_rate": 1.4482917304217136e-07, | |
| "loss": 0.0001, | |
| "step": 1282 | |
| }, | |
| { | |
| "epoch": 2.851111111111111, | |
| "grad_norm": 0.016468433130945724, | |
| "learning_rate": 1.407877409702496e-07, | |
| "loss": 0.0002, | |
| "step": 1283 | |
| }, | |
| { | |
| "epoch": 2.8533333333333335, | |
| "grad_norm": 0.0033913224734236956, | |
| "learning_rate": 1.3680309766808675e-07, | |
| "loss": 0.0001, | |
| "step": 1284 | |
| }, | |
| { | |
| "epoch": 2.8555555555555556, | |
| "grad_norm": 0.005868384507214187, | |
| "learning_rate": 1.3287526608711132e-07, | |
| "loss": 0.0001, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 2.8577777777777778, | |
| "grad_norm": 0.00513873763037026, | |
| "learning_rate": 1.2900426885151473e-07, | |
| "loss": 0.0001, | |
| "step": 1286 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "grad_norm": 0.006470978797847646, | |
| "learning_rate": 1.2519012825812804e-07, | |
| "loss": 0.0001, | |
| "step": 1287 | |
| }, | |
| { | |
| "epoch": 2.862222222222222, | |
| "grad_norm": 0.3611330127796414, | |
| "learning_rate": 1.2143286627628424e-07, | |
| "loss": 0.0022, | |
| "step": 1288 | |
| }, | |
| { | |
| "epoch": 2.8644444444444446, | |
| "grad_norm": 0.0025519549951894425, | |
| "learning_rate": 1.1773250454770512e-07, | |
| "loss": 0.0001, | |
| "step": 1289 | |
| }, | |
| { | |
| "epoch": 2.8666666666666667, | |
| "grad_norm": 0.0030790402051275234, | |
| "learning_rate": 1.1408906438636236e-07, | |
| "loss": 0.0001, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.868888888888889, | |
| "grad_norm": 0.04500910445550049, | |
| "learning_rate": 1.1050256677836213e-07, | |
| "loss": 0.0005, | |
| "step": 1291 | |
| }, | |
| { | |
| "epoch": 2.871111111111111, | |
| "grad_norm": 0.08484721600919212, | |
| "learning_rate": 1.0697303238182522e-07, | |
| "loss": 0.0007, | |
| "step": 1292 | |
| }, | |
| { | |
| "epoch": 2.873333333333333, | |
| "grad_norm": 0.006992759589863438, | |
| "learning_rate": 1.0350048152676484e-07, | |
| "loss": 0.0001, | |
| "step": 1293 | |
| }, | |
| { | |
| "epoch": 2.8755555555555556, | |
| "grad_norm": 0.0032217969189247257, | |
| "learning_rate": 1.0008493421497123e-07, | |
| "loss": 0.0001, | |
| "step": 1294 | |
| }, | |
| { | |
| "epoch": 2.8777777777777778, | |
| "grad_norm": 0.00294265330598326, | |
| "learning_rate": 9.672641011989503e-08, | |
| "loss": 0.0001, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "grad_norm": 0.013432657931401524, | |
| "learning_rate": 9.342492858653519e-08, | |
| "loss": 0.0002, | |
| "step": 1296 | |
| }, | |
| { | |
| "epoch": 2.8822222222222225, | |
| "grad_norm": 0.00484558294903799, | |
| "learning_rate": 9.018050863132566e-08, | |
| "loss": 0.0001, | |
| "step": 1297 | |
| }, | |
| { | |
| "epoch": 2.8844444444444446, | |
| "grad_norm": 0.0038969287386960896, | |
| "learning_rate": 8.699316894203225e-08, | |
| "loss": 0.0001, | |
| "step": 1298 | |
| }, | |
| { | |
| "epoch": 2.8866666666666667, | |
| "grad_norm": 0.13263414349766448, | |
| "learning_rate": 8.386292787763483e-08, | |
| "loss": 0.0009, | |
| "step": 1299 | |
| }, | |
| { | |
| "epoch": 2.888888888888889, | |
| "grad_norm": 0.003416846646386931, | |
| "learning_rate": 8.078980346822863e-08, | |
| "loss": 0.0001, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.891111111111111, | |
| "grad_norm": 0.003086859046861962, | |
| "learning_rate": 7.777381341492085e-08, | |
| "loss": 0.0001, | |
| "step": 1301 | |
| }, | |
| { | |
| "epoch": 2.8933333333333335, | |
| "grad_norm": 0.005047679447377061, | |
| "learning_rate": 7.481497508972313e-08, | |
| "loss": 0.0001, | |
| "step": 1302 | |
| }, | |
| { | |
| "epoch": 2.8955555555555557, | |
| "grad_norm": 0.6511769185918372, | |
| "learning_rate": 7.191330553545595e-08, | |
| "loss": 0.0063, | |
| "step": 1303 | |
| }, | |
| { | |
| "epoch": 2.897777777777778, | |
| "grad_norm": 0.022839738759826613, | |
| "learning_rate": 6.906882146565097e-08, | |
| "loss": 0.0003, | |
| "step": 1304 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 0.0031342380530989516, | |
| "learning_rate": 6.628153926445113e-08, | |
| "loss": 0.0001, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 2.902222222222222, | |
| "grad_norm": 0.017551207292460038, | |
| "learning_rate": 6.355147498651959e-08, | |
| "loss": 0.0002, | |
| "step": 1306 | |
| }, | |
| { | |
| "epoch": 2.9044444444444446, | |
| "grad_norm": 0.07403602569092663, | |
| "learning_rate": 6.087864435694535e-08, | |
| "loss": 0.0006, | |
| "step": 1307 | |
| }, | |
| { | |
| "epoch": 2.9066666666666667, | |
| "grad_norm": 0.3148099703604383, | |
| "learning_rate": 5.8263062771153344e-08, | |
| "loss": 0.0019, | |
| "step": 1308 | |
| }, | |
| { | |
| "epoch": 2.908888888888889, | |
| "grad_norm": 0.00387331665789422, | |
| "learning_rate": 5.5704745294815624e-08, | |
| "loss": 0.0001, | |
| "step": 1309 | |
| }, | |
| { | |
| "epoch": 2.911111111111111, | |
| "grad_norm": 0.021576625519809902, | |
| "learning_rate": 5.3203706663765845e-08, | |
| "loss": 0.0002, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 2.913333333333333, | |
| "grad_norm": 0.0034130606338086387, | |
| "learning_rate": 5.0759961283911584e-08, | |
| "loss": 0.0001, | |
| "step": 1311 | |
| }, | |
| { | |
| "epoch": 2.9155555555555557, | |
| "grad_norm": 0.0038156080689095854, | |
| "learning_rate": 4.8373523231153297e-08, | |
| "loss": 0.0001, | |
| "step": 1312 | |
| }, | |
| { | |
| "epoch": 2.917777777777778, | |
| "grad_norm": 0.015014992268700504, | |
| "learning_rate": 4.604440625130324e-08, | |
| "loss": 0.0002, | |
| "step": 1313 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "grad_norm": 0.025212359208510927, | |
| "learning_rate": 4.377262376000557e-08, | |
| "loss": 0.0003, | |
| "step": 1314 | |
| }, | |
| { | |
| "epoch": 2.9222222222222225, | |
| "grad_norm": 0.0495628057571495, | |
| "learning_rate": 4.155818884266194e-08, | |
| "loss": 0.0003, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 2.924444444444444, | |
| "grad_norm": 0.0032226594480421703, | |
| "learning_rate": 3.940111425435045e-08, | |
| "loss": 0.0001, | |
| "step": 1316 | |
| }, | |
| { | |
| "epoch": 2.9266666666666667, | |
| "grad_norm": 0.0038040649196142812, | |
| "learning_rate": 3.730141241975682e-08, | |
| "loss": 0.0001, | |
| "step": 1317 | |
| }, | |
| { | |
| "epoch": 2.928888888888889, | |
| "grad_norm": 0.0032837782458860106, | |
| "learning_rate": 3.525909543310002e-08, | |
| "loss": 0.0001, | |
| "step": 1318 | |
| }, | |
| { | |
| "epoch": 2.931111111111111, | |
| "grad_norm": 0.0033282093518853862, | |
| "learning_rate": 3.327417505806785e-08, | |
| "loss": 0.0001, | |
| "step": 1319 | |
| }, | |
| { | |
| "epoch": 2.9333333333333336, | |
| "grad_norm": 0.0036058895200049334, | |
| "learning_rate": 3.134666272774034e-08, | |
| "loss": 0.0001, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.9355555555555557, | |
| "grad_norm": 0.15329747019942228, | |
| "learning_rate": 2.9476569544532042e-08, | |
| "loss": 0.0009, | |
| "step": 1321 | |
| }, | |
| { | |
| "epoch": 2.937777777777778, | |
| "grad_norm": 0.015676203360389086, | |
| "learning_rate": 2.7663906280124276e-08, | |
| "loss": 0.0002, | |
| "step": 1322 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "grad_norm": 0.00368092033774781, | |
| "learning_rate": 2.5908683375404088e-08, | |
| "loss": 0.0001, | |
| "step": 1323 | |
| }, | |
| { | |
| "epoch": 2.942222222222222, | |
| "grad_norm": 0.003448489788369788, | |
| "learning_rate": 2.4210910940402066e-08, | |
| "loss": 0.0001, | |
| "step": 1324 | |
| }, | |
| { | |
| "epoch": 2.9444444444444446, | |
| "grad_norm": 0.003721559987849499, | |
| "learning_rate": 2.257059875423795e-08, | |
| "loss": 0.0001, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 2.9466666666666668, | |
| "grad_norm": 0.00768849252547393, | |
| "learning_rate": 2.0987756265060664e-08, | |
| "loss": 0.0001, | |
| "step": 1326 | |
| }, | |
| { | |
| "epoch": 2.948888888888889, | |
| "grad_norm": 0.011805470173303967, | |
| "learning_rate": 1.946239258999616e-08, | |
| "loss": 0.0002, | |
| "step": 1327 | |
| }, | |
| { | |
| "epoch": 2.951111111111111, | |
| "grad_norm": 0.00449266188291609, | |
| "learning_rate": 1.7994516515094097e-08, | |
| "loss": 0.0001, | |
| "step": 1328 | |
| }, | |
| { | |
| "epoch": 2.953333333333333, | |
| "grad_norm": 0.0035354581440366554, | |
| "learning_rate": 1.6584136495277904e-08, | |
| "loss": 0.0001, | |
| "step": 1329 | |
| }, | |
| { | |
| "epoch": 2.9555555555555557, | |
| "grad_norm": 0.019812931095097786, | |
| "learning_rate": 1.523126065429259e-08, | |
| "loss": 0.0002, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.957777777777778, | |
| "grad_norm": 0.0026262297366852743, | |
| "learning_rate": 1.3935896784663671e-08, | |
| "loss": 0.0001, | |
| "step": 1331 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "grad_norm": 0.012992764182115608, | |
| "learning_rate": 1.2698052347649426e-08, | |
| "loss": 0.0002, | |
| "step": 1332 | |
| }, | |
| { | |
| "epoch": 2.962222222222222, | |
| "grad_norm": 0.0026522205938780915, | |
| "learning_rate": 1.1517734473195375e-08, | |
| "loss": 0.0001, | |
| "step": 1333 | |
| }, | |
| { | |
| "epoch": 2.964444444444444, | |
| "grad_norm": 0.009415062661770548, | |
| "learning_rate": 1.0394949959898759e-08, | |
| "loss": 0.0001, | |
| "step": 1334 | |
| }, | |
| { | |
| "epoch": 2.966666666666667, | |
| "grad_norm": 0.014273510234527593, | |
| "learning_rate": 9.32970527496524e-09, | |
| "loss": 0.0002, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 2.968888888888889, | |
| "grad_norm": 0.0033088933528249904, | |
| "learning_rate": 8.322006554171147e-09, | |
| "loss": 0.0001, | |
| "step": 1336 | |
| }, | |
| { | |
| "epoch": 2.971111111111111, | |
| "grad_norm": 0.002826063361941048, | |
| "learning_rate": 7.371859601832398e-09, | |
| "loss": 0.0001, | |
| "step": 1337 | |
| }, | |
| { | |
| "epoch": 2.9733333333333336, | |
| "grad_norm": 0.03722767739152592, | |
| "learning_rate": 6.479269890766748e-09, | |
| "loss": 0.0003, | |
| "step": 1338 | |
| }, | |
| { | |
| "epoch": 2.9755555555555553, | |
| "grad_norm": 0.004732572538956217, | |
| "learning_rate": 5.644242562264923e-09, | |
| "loss": 0.0001, | |
| "step": 1339 | |
| }, | |
| { | |
| "epoch": 2.977777777777778, | |
| "grad_norm": 0.006326849340241003, | |
| "learning_rate": 4.866782426058425e-09, | |
| "loss": 0.0001, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "grad_norm": 0.0032141717860662304, | |
| "learning_rate": 4.146893960295106e-09, | |
| "loss": 0.0001, | |
| "step": 1341 | |
| }, | |
| { | |
| "epoch": 2.982222222222222, | |
| "grad_norm": 0.0050314927209308856, | |
| "learning_rate": 3.4845813115114147e-09, | |
| "loss": 0.0001, | |
| "step": 1342 | |
| }, | |
| { | |
| "epoch": 2.9844444444444447, | |
| "grad_norm": 0.033486444512699325, | |
| "learning_rate": 2.879848294609078e-09, | |
| "loss": 0.0003, | |
| "step": 1343 | |
| }, | |
| { | |
| "epoch": 2.986666666666667, | |
| "grad_norm": 0.02362186341905294, | |
| "learning_rate": 2.332698392830679e-09, | |
| "loss": 0.0002, | |
| "step": 1344 | |
| }, | |
| { | |
| "epoch": 2.988888888888889, | |
| "grad_norm": 0.02174838270113621, | |
| "learning_rate": 1.843134757745224e-09, | |
| "loss": 0.0003, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 2.991111111111111, | |
| "grad_norm": 0.0029008058405127378, | |
| "learning_rate": 1.4111602092226062e-09, | |
| "loss": 0.0001, | |
| "step": 1346 | |
| }, | |
| { | |
| "epoch": 2.993333333333333, | |
| "grad_norm": 0.006082092489615866, | |
| "learning_rate": 1.0367772354258342e-09, | |
| "loss": 0.0001, | |
| "step": 1347 | |
| }, | |
| { | |
| "epoch": 2.9955555555555557, | |
| "grad_norm": 0.0034629792458822587, | |
| "learning_rate": 7.199879927877185e-10, | |
| "loss": 0.0001, | |
| "step": 1348 | |
| }, | |
| { | |
| "epoch": 2.997777777777778, | |
| "grad_norm": 0.023336551343364174, | |
| "learning_rate": 4.6079430600531883e-10, | |
| "loss": 0.0003, | |
| "step": 1349 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.012419478132995498, | |
| "learning_rate": 2.5919766802773306e-10, | |
| "loss": 0.0001, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 1350, | |
| "total_flos": 16673934671872.0, | |
| "train_loss": 0.08598035371858849, | |
| "train_runtime": 9845.7437, | |
| "train_samples_per_second": 2.192, | |
| "train_steps_per_second": 0.137 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 1350, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 50000, | |
| "total_flos": 16673934671872.0, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |