| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 8.0, |
| "eval_steps": 500, |
| "global_step": 1640, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.004878048780487805, |
| "grad_norm": 37.90617752075195, |
| "learning_rate": 5e-06, |
| "loss": 5.3731, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.00975609756097561, |
| "grad_norm": 32.92101287841797, |
| "learning_rate": 4.999997064365715e-06, |
| "loss": 3.7922, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.014634146341463415, |
| "grad_norm": 28.488866806030273, |
| "learning_rate": 4.999988257469751e-06, |
| "loss": 3.3238, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.01951219512195122, |
| "grad_norm": 28.274654388427734, |
| "learning_rate": 4.999973579332793e-06, |
| "loss": 3.5405, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.024390243902439025, |
| "grad_norm": 18.761985778808594, |
| "learning_rate": 4.999953029989312e-06, |
| "loss": 2.7492, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.02926829268292683, |
| "grad_norm": 9.646541595458984, |
| "learning_rate": 4.999926609487568e-06, |
| "loss": 1.7146, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.03414634146341464, |
| "grad_norm": 15.763612747192383, |
| "learning_rate": 4.9998943178896106e-06, |
| "loss": 2.8485, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.03902439024390244, |
| "grad_norm": 13.853349685668945, |
| "learning_rate": 4.999856155271276e-06, |
| "loss": 3.0975, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.04390243902439024, |
| "grad_norm": 16.893617630004883, |
| "learning_rate": 4.999812121722191e-06, |
| "loss": 2.8534, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.04878048780487805, |
| "grad_norm": 15.47554874420166, |
| "learning_rate": 4.999762217345766e-06, |
| "loss": 3.4569, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.05365853658536585, |
| "grad_norm": 6.277958393096924, |
| "learning_rate": 4.999706442259205e-06, |
| "loss": 1.8088, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.05853658536585366, |
| "grad_norm": 5.451688289642334, |
| "learning_rate": 4.999644796593492e-06, |
| "loss": 1.8104, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.06341463414634146, |
| "grad_norm": 4.333822727203369, |
| "learning_rate": 4.999577280493407e-06, |
| "loss": 1.7201, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.06829268292682927, |
| "grad_norm": 4.488610744476318, |
| "learning_rate": 4.99950389411751e-06, |
| "loss": 1.9804, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.07317073170731707, |
| "grad_norm": 1.8763024806976318, |
| "learning_rate": 4.999424637638148e-06, |
| "loss": 0.9227, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.07804878048780488, |
| "grad_norm": 2.98995304107666, |
| "learning_rate": 4.999339511241458e-06, |
| "loss": 1.2368, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.08292682926829269, |
| "grad_norm": 5.778397083282471, |
| "learning_rate": 4.9992485151273584e-06, |
| "loss": 2.5745, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.08780487804878048, |
| "grad_norm": 5.222808361053467, |
| "learning_rate": 4.999151649509554e-06, |
| "loss": 2.0987, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.09268292682926829, |
| "grad_norm": 4.075173377990723, |
| "learning_rate": 4.9990489146155356e-06, |
| "loss": 1.7732, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.0975609756097561, |
| "grad_norm": 4.210625171661377, |
| "learning_rate": 4.9989403106865765e-06, |
| "loss": 1.8928, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.1024390243902439, |
| "grad_norm": 1.7968504428863525, |
| "learning_rate": 4.9988258379777334e-06, |
| "loss": 1.0457, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.1073170731707317, |
| "grad_norm": 2.494248151779175, |
| "learning_rate": 4.998705496757846e-06, |
| "loss": 1.3236, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.11219512195121951, |
| "grad_norm": 4.312766075134277, |
| "learning_rate": 4.998579287309538e-06, |
| "loss": 2.4289, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.11707317073170732, |
| "grad_norm": 5.361437797546387, |
| "learning_rate": 4.998447209929211e-06, |
| "loss": 1.7608, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.12195121951219512, |
| "grad_norm": 1.4642046689987183, |
| "learning_rate": 4.998309264927053e-06, |
| "loss": 0.7571, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.12682926829268293, |
| "grad_norm": 3.642005205154419, |
| "learning_rate": 4.998165452627025e-06, |
| "loss": 1.0634, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.13170731707317074, |
| "grad_norm": 3.2727863788604736, |
| "learning_rate": 4.998015773366874e-06, |
| "loss": 1.4016, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.13658536585365855, |
| "grad_norm": 1.5661338567733765, |
| "learning_rate": 4.997860227498122e-06, |
| "loss": 0.8305, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.14146341463414633, |
| "grad_norm": 3.3989508152008057, |
| "learning_rate": 4.99769881538607e-06, |
| "loss": 1.8744, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.14634146341463414, |
| "grad_norm": 3.512221574783325, |
| "learning_rate": 4.997531537409794e-06, |
| "loss": 1.7701, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.15121951219512195, |
| "grad_norm": 2.261887311935425, |
| "learning_rate": 4.99735839396215e-06, |
| "loss": 1.0653, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.15609756097560976, |
| "grad_norm": 2.2495696544647217, |
| "learning_rate": 4.9971793854497655e-06, |
| "loss": 1.1914, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.16097560975609757, |
| "grad_norm": 4.262424468994141, |
| "learning_rate": 4.996994512293042e-06, |
| "loss": 1.7311, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.16585365853658537, |
| "grad_norm": 3.361311674118042, |
| "learning_rate": 4.996803774926157e-06, |
| "loss": 1.4482, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.17073170731707318, |
| "grad_norm": 1.5855786800384521, |
| "learning_rate": 4.996607173797059e-06, |
| "loss": 1.3959, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.17560975609756097, |
| "grad_norm": 2.5001468658447266, |
| "learning_rate": 4.996404709367466e-06, |
| "loss": 1.3452, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.18048780487804877, |
| "grad_norm": 2.7903096675872803, |
| "learning_rate": 4.996196382112868e-06, |
| "loss": 1.018, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.18536585365853658, |
| "grad_norm": 2.7586171627044678, |
| "learning_rate": 4.9959821925225235e-06, |
| "loss": 1.291, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.1902439024390244, |
| "grad_norm": 2.873840808868408, |
| "learning_rate": 4.995762141099456e-06, |
| "loss": 1.1418, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.1951219512195122, |
| "grad_norm": 1.6896076202392578, |
| "learning_rate": 4.995536228360461e-06, |
| "loss": 1.1662, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 1.4597285985946655, |
| "learning_rate": 4.995304454836095e-06, |
| "loss": 1.0373, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.2048780487804878, |
| "grad_norm": 2.4222421646118164, |
| "learning_rate": 4.9950668210706795e-06, |
| "loss": 1.1895, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.2097560975609756, |
| "grad_norm": 3.295085906982422, |
| "learning_rate": 4.994823327622299e-06, |
| "loss": 1.4166, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.2146341463414634, |
| "grad_norm": 1.8216716051101685, |
| "learning_rate": 4.9945739750628e-06, |
| "loss": 1.0009, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.21951219512195122, |
| "grad_norm": 2.2421364784240723, |
| "learning_rate": 4.994318763977789e-06, |
| "loss": 1.1999, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.22439024390243903, |
| "grad_norm": 5.6812744140625, |
| "learning_rate": 4.994057694966632e-06, |
| "loss": 1.3831, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.22926829268292684, |
| "grad_norm": 1.477816104888916, |
| "learning_rate": 4.993790768642449e-06, |
| "loss": 1.1419, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.23414634146341465, |
| "grad_norm": 4.485177993774414, |
| "learning_rate": 4.99351798563212e-06, |
| "loss": 1.9434, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.23902439024390243, |
| "grad_norm": 2.770219326019287, |
| "learning_rate": 4.993239346576278e-06, |
| "loss": 1.3214, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.24390243902439024, |
| "grad_norm": 2.721611976623535, |
| "learning_rate": 4.99295485212931e-06, |
| "loss": 1.4329, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.24878048780487805, |
| "grad_norm": 3.714306592941284, |
| "learning_rate": 4.992664502959351e-06, |
| "loss": 1.372, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.25365853658536586, |
| "grad_norm": 1.2679803371429443, |
| "learning_rate": 4.99236829974829e-06, |
| "loss": 0.8086, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.25853658536585367, |
| "grad_norm": 1.6986169815063477, |
| "learning_rate": 4.992066243191762e-06, |
| "loss": 1.2012, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.2634146341463415, |
| "grad_norm": 1.3443604707717896, |
| "learning_rate": 4.991758333999148e-06, |
| "loss": 0.8986, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.2682926829268293, |
| "grad_norm": 2.0896975994110107, |
| "learning_rate": 4.991444572893575e-06, |
| "loss": 0.8819, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.2731707317073171, |
| "grad_norm": 1.255516529083252, |
| "learning_rate": 4.991124960611916e-06, |
| "loss": 0.6255, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.2780487804878049, |
| "grad_norm": 2.125410318374634, |
| "learning_rate": 4.99079949790478e-06, |
| "loss": 0.9983, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.28292682926829266, |
| "grad_norm": 2.0312907695770264, |
| "learning_rate": 4.99046818553652e-06, |
| "loss": 1.1137, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.28780487804878047, |
| "grad_norm": 2.904625177383423, |
| "learning_rate": 4.9901310242852246e-06, |
| "loss": 1.2009, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.2926829268292683, |
| "grad_norm": 2.512932777404785, |
| "learning_rate": 4.9897880149427206e-06, |
| "loss": 1.2234, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.2975609756097561, |
| "grad_norm": 2.5621752738952637, |
| "learning_rate": 4.989439158314566e-06, |
| "loss": 1.2354, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.3024390243902439, |
| "grad_norm": 1.0051912069320679, |
| "learning_rate": 4.989084455220056e-06, |
| "loss": 0.614, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.3073170731707317, |
| "grad_norm": 1.5008565187454224, |
| "learning_rate": 4.988723906492212e-06, |
| "loss": 1.002, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.3121951219512195, |
| "grad_norm": 1.1436376571655273, |
| "learning_rate": 4.988357512977785e-06, |
| "loss": 0.6483, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.3170731707317073, |
| "grad_norm": 1.16792893409729, |
| "learning_rate": 4.987985275537252e-06, |
| "loss": 0.638, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.32195121951219513, |
| "grad_norm": 1.5670536756515503, |
| "learning_rate": 4.9876071950448185e-06, |
| "loss": 1.0274, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.32682926829268294, |
| "grad_norm": 1.357515811920166, |
| "learning_rate": 4.987223272388407e-06, |
| "loss": 0.7612, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.33170731707317075, |
| "grad_norm": 1.8383222818374634, |
| "learning_rate": 4.986833508469663e-06, |
| "loss": 1.1496, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.33658536585365856, |
| "grad_norm": 2.8976657390594482, |
| "learning_rate": 4.98643790420395e-06, |
| "loss": 0.9924, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.34146341463414637, |
| "grad_norm": 1.7679390907287598, |
| "learning_rate": 4.986036460520348e-06, |
| "loss": 0.9408, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.3463414634146341, |
| "grad_norm": 2.3815059661865234, |
| "learning_rate": 4.98562917836165e-06, |
| "loss": 1.2923, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.35121951219512193, |
| "grad_norm": 1.679026484489441, |
| "learning_rate": 4.985216058684362e-06, |
| "loss": 0.8476, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.35609756097560974, |
| "grad_norm": 1.7254586219787598, |
| "learning_rate": 4.984797102458697e-06, |
| "loss": 1.1775, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.36097560975609755, |
| "grad_norm": 1.195844292640686, |
| "learning_rate": 4.984372310668579e-06, |
| "loss": 0.6664, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.36585365853658536, |
| "grad_norm": 1.754431128501892, |
| "learning_rate": 4.983941684311633e-06, |
| "loss": 1.4361, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.37073170731707317, |
| "grad_norm": 1.8255788087844849, |
| "learning_rate": 4.983505224399188e-06, |
| "loss": 0.8977, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.375609756097561, |
| "grad_norm": 1.340451717376709, |
| "learning_rate": 4.983062931956275e-06, |
| "loss": 0.9432, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.3804878048780488, |
| "grad_norm": 1.7810503244400024, |
| "learning_rate": 4.9826148080216195e-06, |
| "loss": 1.0463, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.3853658536585366, |
| "grad_norm": 1.729826807975769, |
| "learning_rate": 4.9821608536476445e-06, |
| "loss": 1.4451, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.3902439024390244, |
| "grad_norm": 2.6638681888580322, |
| "learning_rate": 4.981701069900465e-06, |
| "loss": 1.0409, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.3951219512195122, |
| "grad_norm": 1.467076301574707, |
| "learning_rate": 4.9812354578598876e-06, |
| "loss": 0.8596, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 1.453657627105713, |
| "learning_rate": 4.980764018619405e-06, |
| "loss": 0.9033, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.40487804878048783, |
| "grad_norm": 1.9025623798370361, |
| "learning_rate": 4.980286753286196e-06, |
| "loss": 1.0586, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.4097560975609756, |
| "grad_norm": 1.4698103666305542, |
| "learning_rate": 4.97980366298112e-06, |
| "loss": 0.8611, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.4146341463414634, |
| "grad_norm": 1.6086381673812866, |
| "learning_rate": 4.97931474883872e-06, |
| "loss": 0.8884, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.4195121951219512, |
| "grad_norm": 1.6679224967956543, |
| "learning_rate": 4.978820012007213e-06, |
| "loss": 0.969, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.424390243902439, |
| "grad_norm": 1.5308334827423096, |
| "learning_rate": 4.978319453648495e-06, |
| "loss": 1.0345, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.4292682926829268, |
| "grad_norm": 1.7788817882537842, |
| "learning_rate": 4.977813074938128e-06, |
| "loss": 0.9649, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.43414634146341463, |
| "grad_norm": 1.591383934020996, |
| "learning_rate": 4.977300877065347e-06, |
| "loss": 0.9299, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.43902439024390244, |
| "grad_norm": 1.525985836982727, |
| "learning_rate": 4.976782861233053e-06, |
| "loss": 0.7272, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.44390243902439025, |
| "grad_norm": 2.7333128452301025, |
| "learning_rate": 4.976259028657812e-06, |
| "loss": 0.6694, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.44878048780487806, |
| "grad_norm": 1.2718108892440796, |
| "learning_rate": 4.975729380569845e-06, |
| "loss": 0.8037, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.45365853658536587, |
| "grad_norm": 1.157771110534668, |
| "learning_rate": 4.975193918213035e-06, |
| "loss": 0.52, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.4585365853658537, |
| "grad_norm": 1.0602248907089233, |
| "learning_rate": 4.974652642844921e-06, |
| "loss": 0.7135, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.4634146341463415, |
| "grad_norm": 2.418555974960327, |
| "learning_rate": 4.974105555736693e-06, |
| "loss": 1.0996, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.4682926829268293, |
| "grad_norm": 1.1728110313415527, |
| "learning_rate": 4.973552658173186e-06, |
| "loss": 0.7268, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.47317073170731705, |
| "grad_norm": 1.943113088607788, |
| "learning_rate": 4.972993951452887e-06, |
| "loss": 0.9091, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.47804878048780486, |
| "grad_norm": 5.063096523284912, |
| "learning_rate": 4.9724294368879214e-06, |
| "loss": 0.8242, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.48292682926829267, |
| "grad_norm": 1.5232555866241455, |
| "learning_rate": 4.971859115804055e-06, |
| "loss": 1.0543, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.4878048780487805, |
| "grad_norm": 1.1453967094421387, |
| "learning_rate": 4.9712829895406935e-06, |
| "loss": 0.8209, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.4926829268292683, |
| "grad_norm": 2.12345814704895, |
| "learning_rate": 4.970701059450872e-06, |
| "loss": 0.5849, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.4975609756097561, |
| "grad_norm": 1.2201842069625854, |
| "learning_rate": 4.970113326901258e-06, |
| "loss": 0.9969, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.5024390243902439, |
| "grad_norm": 1.3047524690628052, |
| "learning_rate": 4.9695197932721455e-06, |
| "loss": 0.9339, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.5073170731707317, |
| "grad_norm": 1.6083660125732422, |
| "learning_rate": 4.968920459957453e-06, |
| "loss": 0.9702, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.5121951219512195, |
| "grad_norm": 1.5247286558151245, |
| "learning_rate": 4.968315328364719e-06, |
| "loss": 1.0449, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.5170731707317073, |
| "grad_norm": 1.354805588722229, |
| "learning_rate": 4.9677043999151e-06, |
| "loss": 1.1431, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.5219512195121951, |
| "grad_norm": 1.088321328163147, |
| "learning_rate": 4.967087676043366e-06, |
| "loss": 0.519, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.526829268292683, |
| "grad_norm": 1.5086662769317627, |
| "learning_rate": 4.966465158197897e-06, |
| "loss": 0.9357, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.5317073170731708, |
| "grad_norm": 1.3161298036575317, |
| "learning_rate": 4.965836847840681e-06, |
| "loss": 0.7234, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.5365853658536586, |
| "grad_norm": 1.4465640783309937, |
| "learning_rate": 4.96520274644731e-06, |
| "loss": 0.8911, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.5414634146341464, |
| "grad_norm": 1.0576995611190796, |
| "learning_rate": 4.964562855506976e-06, |
| "loss": 0.7254, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.5463414634146342, |
| "grad_norm": 1.1018916368484497, |
| "learning_rate": 4.963917176522466e-06, |
| "loss": 0.6603, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.551219512195122, |
| "grad_norm": 2.150622606277466, |
| "learning_rate": 4.963265711010164e-06, |
| "loss": 1.0451, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.5560975609756098, |
| "grad_norm": 1.7743186950683594, |
| "learning_rate": 4.9626084605000395e-06, |
| "loss": 0.8717, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.5609756097560976, |
| "grad_norm": 1.205291509628296, |
| "learning_rate": 4.961945426535652e-06, |
| "loss": 0.5802, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.5658536585365853, |
| "grad_norm": 1.3969353437423706, |
| "learning_rate": 4.961276610674141e-06, |
| "loss": 0.9158, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.5707317073170731, |
| "grad_norm": 1.2318240404129028, |
| "learning_rate": 4.960602014486225e-06, |
| "loss": 1.0086, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.5756097560975609, |
| "grad_norm": 1.2202470302581787, |
| "learning_rate": 4.959921639556199e-06, |
| "loss": 0.7888, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.5804878048780487, |
| "grad_norm": 1.1564440727233887, |
| "learning_rate": 4.959235487481928e-06, |
| "loss": 1.0053, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.5853658536585366, |
| "grad_norm": 1.2278865575790405, |
| "learning_rate": 4.958543559874846e-06, |
| "loss": 0.5486, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.5902439024390244, |
| "grad_norm": 1.5465888977050781, |
| "learning_rate": 4.9578458583599495e-06, |
| "loss": 0.8232, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.5951219512195122, |
| "grad_norm": 1.6284047365188599, |
| "learning_rate": 4.957142384575795e-06, |
| "loss": 0.7773, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 1.1794490814208984, |
| "learning_rate": 4.956433140174498e-06, |
| "loss": 0.8236, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.6048780487804878, |
| "grad_norm": 1.2404091358184814, |
| "learning_rate": 4.9557181268217225e-06, |
| "loss": 0.8611, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.6097560975609756, |
| "grad_norm": 1.2188373804092407, |
| "learning_rate": 4.954997346196683e-06, |
| "loss": 1.2922, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.6146341463414634, |
| "grad_norm": 1.1514503955841064, |
| "learning_rate": 4.954270799992138e-06, |
| "loss": 0.7073, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.6195121951219512, |
| "grad_norm": 1.285913109779358, |
| "learning_rate": 4.953538489914387e-06, |
| "loss": 0.729, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.624390243902439, |
| "grad_norm": 1.8677104711532593, |
| "learning_rate": 4.9528004176832654e-06, |
| "loss": 0.6211, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.6292682926829268, |
| "grad_norm": 1.2369698286056519, |
| "learning_rate": 4.952056585032142e-06, |
| "loss": 1.0311, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.6341463414634146, |
| "grad_norm": 1.186990737915039, |
| "learning_rate": 4.951306993707913e-06, |
| "loss": 0.8207, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.6390243902439025, |
| "grad_norm": 1.189424991607666, |
| "learning_rate": 4.950551645470998e-06, |
| "loss": 0.7853, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.6439024390243903, |
| "grad_norm": 1.3477216958999634, |
| "learning_rate": 4.9497905420953406e-06, |
| "loss": 0.7271, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.6487804878048781, |
| "grad_norm": 1.368592619895935, |
| "learning_rate": 4.949023685368395e-06, |
| "loss": 0.7394, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.6536585365853659, |
| "grad_norm": 1.2769527435302734, |
| "learning_rate": 4.948251077091131e-06, |
| "loss": 1.0713, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.6585365853658537, |
| "grad_norm": 1.1477972269058228, |
| "learning_rate": 4.947472719078025e-06, |
| "loss": 0.841, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.6634146341463415, |
| "grad_norm": 1.0983368158340454, |
| "learning_rate": 4.9466886131570565e-06, |
| "loss": 0.8665, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.6682926829268293, |
| "grad_norm": 1.2526847124099731, |
| "learning_rate": 4.945898761169704e-06, |
| "loss": 1.0613, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.6731707317073171, |
| "grad_norm": 1.1426396369934082, |
| "learning_rate": 4.945103164970941e-06, |
| "loss": 0.6248, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.6780487804878049, |
| "grad_norm": 1.3275880813598633, |
| "learning_rate": 4.9443018264292304e-06, |
| "loss": 0.755, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.6829268292682927, |
| "grad_norm": 1.4942073822021484, |
| "learning_rate": 4.9434947474265225e-06, |
| "loss": 0.9451, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.6878048780487804, |
| "grad_norm": 1.1777600049972534, |
| "learning_rate": 4.942681929858249e-06, |
| "loss": 1.0505, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.6926829268292682, |
| "grad_norm": 1.2474116086959839, |
| "learning_rate": 4.941863375633315e-06, |
| "loss": 0.9174, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.697560975609756, |
| "grad_norm": 1.5067697763442993, |
| "learning_rate": 4.9410390866741056e-06, |
| "loss": 0.7737, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.7024390243902439, |
| "grad_norm": 1.3016574382781982, |
| "learning_rate": 4.9402090649164655e-06, |
| "loss": 0.7588, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.7073170731707317, |
| "grad_norm": 1.652600884437561, |
| "learning_rate": 4.9393733123097085e-06, |
| "loss": 1.0588, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.7121951219512195, |
| "grad_norm": 1.267997145652771, |
| "learning_rate": 4.9385318308166065e-06, |
| "loss": 0.8996, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.7170731707317073, |
| "grad_norm": 1.8560484647750854, |
| "learning_rate": 4.937684622413385e-06, |
| "loss": 0.6286, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.7219512195121951, |
| "grad_norm": 1.4419782161712646, |
| "learning_rate": 4.9368316890897185e-06, |
| "loss": 0.9818, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.7268292682926829, |
| "grad_norm": 1.134084701538086, |
| "learning_rate": 4.9359730328487264e-06, |
| "loss": 0.5246, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.7317073170731707, |
| "grad_norm": 1.0102615356445312, |
| "learning_rate": 4.935108655706972e-06, |
| "loss": 0.7948, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.7365853658536585, |
| "grad_norm": 1.3480703830718994, |
| "learning_rate": 4.934238559694448e-06, |
| "loss": 1.0951, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.7414634146341463, |
| "grad_norm": 1.4133951663970947, |
| "learning_rate": 4.9333627468545845e-06, |
| "loss": 0.6936, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.7463414634146341, |
| "grad_norm": 1.3072413206100464, |
| "learning_rate": 4.932481219244231e-06, |
| "loss": 0.799, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.751219512195122, |
| "grad_norm": 1.3893049955368042, |
| "learning_rate": 4.931593978933666e-06, |
| "loss": 0.7375, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.7560975609756098, |
| "grad_norm": 1.2313531637191772, |
| "learning_rate": 4.930701028006577e-06, |
| "loss": 0.9487, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.7609756097560976, |
| "grad_norm": 1.3426295518875122, |
| "learning_rate": 4.929802368560066e-06, |
| "loss": 0.7542, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.7658536585365854, |
| "grad_norm": 1.5280004739761353, |
| "learning_rate": 4.928898002704642e-06, |
| "loss": 0.8784, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.7707317073170732, |
| "grad_norm": 1.3149527311325073, |
| "learning_rate": 4.927987932564215e-06, |
| "loss": 0.7247, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.775609756097561, |
| "grad_norm": 1.073188304901123, |
| "learning_rate": 4.927072160276092e-06, |
| "loss": 0.7826, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.7804878048780488, |
| "grad_norm": 1.426188588142395, |
| "learning_rate": 4.926150687990969e-06, |
| "loss": 0.6129, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.7853658536585366, |
| "grad_norm": 1.2348767518997192, |
| "learning_rate": 4.925223517872934e-06, |
| "loss": 0.9191, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.7902439024390244, |
| "grad_norm": 1.599665641784668, |
| "learning_rate": 4.9242906520994484e-06, |
| "loss": 1.0177, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.7951219512195122, |
| "grad_norm": 1.1841332912445068, |
| "learning_rate": 4.923352092861358e-06, |
| "loss": 0.8342, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 1.0213048458099365, |
| "learning_rate": 4.922407842362875e-06, |
| "loss": 0.6292, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.8048780487804879, |
| "grad_norm": 1.1878992319107056, |
| "learning_rate": 4.921457902821578e-06, |
| "loss": 0.9357, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.8097560975609757, |
| "grad_norm": 1.297462821006775, |
| "learning_rate": 4.920502276468408e-06, |
| "loss": 0.8829, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.8146341463414634, |
| "grad_norm": 1.1800835132598877, |
| "learning_rate": 4.9195409655476605e-06, |
| "loss": 0.712, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.8195121951219512, |
| "grad_norm": 1.4254546165466309, |
| "learning_rate": 4.918573972316982e-06, |
| "loss": 0.9997, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.824390243902439, |
| "grad_norm": 1.3243224620819092, |
| "learning_rate": 4.917601299047361e-06, |
| "loss": 0.7944, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.8292682926829268, |
| "grad_norm": 1.3879033327102661, |
| "learning_rate": 4.916622948023129e-06, |
| "loss": 0.7778, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.8341463414634146, |
| "grad_norm": 1.245430588722229, |
| "learning_rate": 4.915638921541952e-06, |
| "loss": 0.6247, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.8390243902439024, |
| "grad_norm": 1.3728258609771729, |
| "learning_rate": 4.914649221914822e-06, |
| "loss": 0.8762, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.8439024390243902, |
| "grad_norm": 1.3080862760543823, |
| "learning_rate": 4.913653851466057e-06, |
| "loss": 0.6381, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.848780487804878, |
| "grad_norm": 1.5109484195709229, |
| "learning_rate": 4.912652812533291e-06, |
| "loss": 0.8127, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.8536585365853658, |
| "grad_norm": 1.237879753112793, |
| "learning_rate": 4.911646107467472e-06, |
| "loss": 0.8257, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.8585365853658536, |
| "grad_norm": 1.2725176811218262, |
| "learning_rate": 4.9106337386328524e-06, |
| "loss": 0.9758, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.8634146341463415, |
| "grad_norm": 1.271756887435913, |
| "learning_rate": 4.909615708406991e-06, |
| "loss": 0.8436, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.8682926829268293, |
| "grad_norm": 1.0920095443725586, |
| "learning_rate": 4.908592019180738e-06, |
| "loss": 0.6657, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.8731707317073171, |
| "grad_norm": 0.9080491065979004, |
| "learning_rate": 4.907562673358234e-06, |
| "loss": 0.6322, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.8780487804878049, |
| "grad_norm": 0.9868524074554443, |
| "learning_rate": 4.906527673356907e-06, |
| "loss": 0.5918, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.8829268292682927, |
| "grad_norm": 1.49118173122406, |
| "learning_rate": 4.905487021607462e-06, |
| "loss": 0.4939, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.8878048780487805, |
| "grad_norm": 1.3323795795440674, |
| "learning_rate": 4.904440720553876e-06, |
| "loss": 0.8444, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.8926829268292683, |
| "grad_norm": 1.340374231338501, |
| "learning_rate": 4.903388772653396e-06, |
| "loss": 0.765, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.8975609756097561, |
| "grad_norm": 1.2728081941604614, |
| "learning_rate": 4.902331180376529e-06, |
| "loss": 0.8126, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.9024390243902439, |
| "grad_norm": 1.154461145401001, |
| "learning_rate": 4.901267946207038e-06, |
| "loss": 0.9256, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.9073170731707317, |
| "grad_norm": 1.0781828165054321, |
| "learning_rate": 4.900199072641937e-06, |
| "loss": 0.7057, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.9121951219512195, |
| "grad_norm": 1.0474439859390259, |
| "learning_rate": 4.899124562191484e-06, |
| "loss": 0.6024, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.9170731707317074, |
| "grad_norm": 1.019089937210083, |
| "learning_rate": 4.8980444173791735e-06, |
| "loss": 0.5001, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.9219512195121952, |
| "grad_norm": 1.3556936979293823, |
| "learning_rate": 4.896958640741735e-06, |
| "loss": 0.8636, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.926829268292683, |
| "grad_norm": 1.2974662780761719, |
| "learning_rate": 4.895867234829121e-06, |
| "loss": 0.9257, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.9317073170731708, |
| "grad_norm": 1.3130805492401123, |
| "learning_rate": 4.894770202204509e-06, |
| "loss": 0.7026, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.9365853658536586, |
| "grad_norm": 1.5521293878555298, |
| "learning_rate": 4.893667545444285e-06, |
| "loss": 0.8671, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.9414634146341463, |
| "grad_norm": 1.0167640447616577, |
| "learning_rate": 4.8925592671380495e-06, |
| "loss": 0.6643, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.9463414634146341, |
| "grad_norm": 0.8992295861244202, |
| "learning_rate": 4.891445369888601e-06, |
| "loss": 0.5294, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.9512195121951219, |
| "grad_norm": 1.053918480873108, |
| "learning_rate": 4.890325856311936e-06, |
| "loss": 0.8734, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.9560975609756097, |
| "grad_norm": 1.1650863885879517, |
| "learning_rate": 4.889200729037241e-06, |
| "loss": 0.7858, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.9609756097560975, |
| "grad_norm": 1.0390702486038208, |
| "learning_rate": 4.888069990706884e-06, |
| "loss": 0.6428, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.9658536585365853, |
| "grad_norm": 1.4749332666397095, |
| "learning_rate": 4.886933643976414e-06, |
| "loss": 0.55, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.9707317073170731, |
| "grad_norm": 0.9483368992805481, |
| "learning_rate": 4.885791691514548e-06, |
| "loss": 0.4786, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.975609756097561, |
| "grad_norm": 1.0259203910827637, |
| "learning_rate": 4.884644136003172e-06, |
| "loss": 0.6137, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.9804878048780488, |
| "grad_norm": 1.1431037187576294, |
| "learning_rate": 4.883490980137327e-06, |
| "loss": 1.2973, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.9853658536585366, |
| "grad_norm": 1.1985281705856323, |
| "learning_rate": 4.882332226625208e-06, |
| "loss": 0.7734, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.9902439024390244, |
| "grad_norm": 0.9396414160728455, |
| "learning_rate": 4.881167878188158e-06, |
| "loss": 0.8172, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.9951219512195122, |
| "grad_norm": 1.3293070793151855, |
| "learning_rate": 4.8799979375606565e-06, |
| "loss": 0.727, |
| "step": 204 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 1.0133116245269775, |
| "learning_rate": 4.878822407490319e-06, |
| "loss": 0.7099, |
| "step": 205 |
| }, |
| { |
| "epoch": 1.0048780487804878, |
| "grad_norm": 1.0823330879211426, |
| "learning_rate": 4.8776412907378845e-06, |
| "loss": 0.6611, |
| "step": 206 |
| }, |
| { |
| "epoch": 1.0097560975609756, |
| "grad_norm": 0.9297066926956177, |
| "learning_rate": 4.876454590077216e-06, |
| "loss": 0.5825, |
| "step": 207 |
| }, |
| { |
| "epoch": 1.0146341463414634, |
| "grad_norm": 1.0025253295898438, |
| "learning_rate": 4.875262308295289e-06, |
| "loss": 0.8154, |
| "step": 208 |
| }, |
| { |
| "epoch": 1.0195121951219512, |
| "grad_norm": 1.1356369256973267, |
| "learning_rate": 4.874064448192185e-06, |
| "loss": 0.7126, |
| "step": 209 |
| }, |
| { |
| "epoch": 1.024390243902439, |
| "grad_norm": 1.4361388683319092, |
| "learning_rate": 4.872861012581088e-06, |
| "loss": 0.6323, |
| "step": 210 |
| }, |
| { |
| "epoch": 1.0292682926829269, |
| "grad_norm": 1.2314097881317139, |
| "learning_rate": 4.871652004288275e-06, |
| "loss": 0.6683, |
| "step": 211 |
| }, |
| { |
| "epoch": 1.0341463414634147, |
| "grad_norm": 1.1820945739746094, |
| "learning_rate": 4.870437426153113e-06, |
| "loss": 0.6659, |
| "step": 212 |
| }, |
| { |
| "epoch": 1.0390243902439025, |
| "grad_norm": 1.1955009698867798, |
| "learning_rate": 4.869217281028045e-06, |
| "loss": 0.8964, |
| "step": 213 |
| }, |
| { |
| "epoch": 1.0439024390243903, |
| "grad_norm": 1.2589020729064941, |
| "learning_rate": 4.867991571778592e-06, |
| "loss": 0.894, |
| "step": 214 |
| }, |
| { |
| "epoch": 1.048780487804878, |
| "grad_norm": 0.8933411836624146, |
| "learning_rate": 4.866760301283342e-06, |
| "loss": 0.5199, |
| "step": 215 |
| }, |
| { |
| "epoch": 1.053658536585366, |
| "grad_norm": 1.126219630241394, |
| "learning_rate": 4.865523472433942e-06, |
| "loss": 0.7014, |
| "step": 216 |
| }, |
| { |
| "epoch": 1.0585365853658537, |
| "grad_norm": 1.1173880100250244, |
| "learning_rate": 4.8642810881350935e-06, |
| "loss": 0.6085, |
| "step": 217 |
| }, |
| { |
| "epoch": 1.0634146341463415, |
| "grad_norm": 0.9508454203605652, |
| "learning_rate": 4.863033151304546e-06, |
| "loss": 0.6333, |
| "step": 218 |
| }, |
| { |
| "epoch": 1.0682926829268293, |
| "grad_norm": 1.8204004764556885, |
| "learning_rate": 4.861779664873088e-06, |
| "loss": 0.8899, |
| "step": 219 |
| }, |
| { |
| "epoch": 1.0731707317073171, |
| "grad_norm": 1.5721564292907715, |
| "learning_rate": 4.8605206317845425e-06, |
| "loss": 0.8105, |
| "step": 220 |
| }, |
| { |
| "epoch": 1.078048780487805, |
| "grad_norm": 1.3572758436203003, |
| "learning_rate": 4.859256054995758e-06, |
| "loss": 0.8634, |
| "step": 221 |
| }, |
| { |
| "epoch": 1.0829268292682928, |
| "grad_norm": 0.8531430959701538, |
| "learning_rate": 4.8579859374766e-06, |
| "loss": 0.4826, |
| "step": 222 |
| }, |
| { |
| "epoch": 1.0878048780487806, |
| "grad_norm": 0.771088719367981, |
| "learning_rate": 4.856710282209952e-06, |
| "loss": 0.4085, |
| "step": 223 |
| }, |
| { |
| "epoch": 1.0926829268292684, |
| "grad_norm": 1.0056614875793457, |
| "learning_rate": 4.855429092191698e-06, |
| "loss": 0.7168, |
| "step": 224 |
| }, |
| { |
| "epoch": 1.0975609756097562, |
| "grad_norm": 0.9825501441955566, |
| "learning_rate": 4.854142370430725e-06, |
| "loss": 0.6276, |
| "step": 225 |
| }, |
| { |
| "epoch": 1.102439024390244, |
| "grad_norm": 1.0209708213806152, |
| "learning_rate": 4.8528501199489045e-06, |
| "loss": 0.6521, |
| "step": 226 |
| }, |
| { |
| "epoch": 1.1073170731707318, |
| "grad_norm": 1.3162622451782227, |
| "learning_rate": 4.851552343781099e-06, |
| "loss": 0.8244, |
| "step": 227 |
| }, |
| { |
| "epoch": 1.1121951219512196, |
| "grad_norm": 1.150299072265625, |
| "learning_rate": 4.850249044975145e-06, |
| "loss": 0.7804, |
| "step": 228 |
| }, |
| { |
| "epoch": 1.1170731707317074, |
| "grad_norm": 1.142996907234192, |
| "learning_rate": 4.848940226591849e-06, |
| "loss": 0.9935, |
| "step": 229 |
| }, |
| { |
| "epoch": 1.1219512195121952, |
| "grad_norm": 0.9757675528526306, |
| "learning_rate": 4.847625891704982e-06, |
| "loss": 0.6041, |
| "step": 230 |
| }, |
| { |
| "epoch": 1.126829268292683, |
| "grad_norm": 1.1686437129974365, |
| "learning_rate": 4.846306043401268e-06, |
| "loss": 0.6873, |
| "step": 231 |
| }, |
| { |
| "epoch": 1.1317073170731708, |
| "grad_norm": 1.4826396703720093, |
| "learning_rate": 4.844980684780381e-06, |
| "loss": 0.6412, |
| "step": 232 |
| }, |
| { |
| "epoch": 1.1365853658536587, |
| "grad_norm": 1.204198956489563, |
| "learning_rate": 4.8436498189549345e-06, |
| "loss": 0.5779, |
| "step": 233 |
| }, |
| { |
| "epoch": 1.1414634146341462, |
| "grad_norm": 0.8927454948425293, |
| "learning_rate": 4.842313449050477e-06, |
| "loss": 0.5894, |
| "step": 234 |
| }, |
| { |
| "epoch": 1.146341463414634, |
| "grad_norm": 0.9504485726356506, |
| "learning_rate": 4.840971578205486e-06, |
| "loss": 0.5195, |
| "step": 235 |
| }, |
| { |
| "epoch": 1.1512195121951219, |
| "grad_norm": 0.8682957291603088, |
| "learning_rate": 4.839624209571352e-06, |
| "loss": 0.3927, |
| "step": 236 |
| }, |
| { |
| "epoch": 1.1560975609756097, |
| "grad_norm": 1.0243151187896729, |
| "learning_rate": 4.838271346312381e-06, |
| "loss": 0.8549, |
| "step": 237 |
| }, |
| { |
| "epoch": 1.1609756097560975, |
| "grad_norm": 0.9931624531745911, |
| "learning_rate": 4.836912991605782e-06, |
| "loss": 0.8616, |
| "step": 238 |
| }, |
| { |
| "epoch": 1.1658536585365853, |
| "grad_norm": 1.0424671173095703, |
| "learning_rate": 4.835549148641663e-06, |
| "loss": 0.5675, |
| "step": 239 |
| }, |
| { |
| "epoch": 1.170731707317073, |
| "grad_norm": 0.9706572890281677, |
| "learning_rate": 4.834179820623018e-06, |
| "loss": 0.6444, |
| "step": 240 |
| }, |
| { |
| "epoch": 1.175609756097561, |
| "grad_norm": 0.9557904601097107, |
| "learning_rate": 4.832805010765724e-06, |
| "loss": 0.6006, |
| "step": 241 |
| }, |
| { |
| "epoch": 1.1804878048780487, |
| "grad_norm": 1.2297061681747437, |
| "learning_rate": 4.831424722298531e-06, |
| "loss": 0.6897, |
| "step": 242 |
| }, |
| { |
| "epoch": 1.1853658536585365, |
| "grad_norm": 1.3953443765640259, |
| "learning_rate": 4.830038958463061e-06, |
| "loss": 0.772, |
| "step": 243 |
| }, |
| { |
| "epoch": 1.1902439024390243, |
| "grad_norm": 2.120232105255127, |
| "learning_rate": 4.828647722513785e-06, |
| "loss": 0.9361, |
| "step": 244 |
| }, |
| { |
| "epoch": 1.1951219512195121, |
| "grad_norm": 1.4457300901412964, |
| "learning_rate": 4.827251017718034e-06, |
| "loss": 0.8036, |
| "step": 245 |
| }, |
| { |
| "epoch": 1.2, |
| "grad_norm": 1.0927690267562866, |
| "learning_rate": 4.8258488473559794e-06, |
| "loss": 0.8354, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.2048780487804878, |
| "grad_norm": 1.4953900575637817, |
| "learning_rate": 4.824441214720629e-06, |
| "loss": 0.9083, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.2097560975609756, |
| "grad_norm": 0.9229385852813721, |
| "learning_rate": 4.823028123117818e-06, |
| "loss": 0.441, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.2146341463414634, |
| "grad_norm": 1.8001827001571655, |
| "learning_rate": 4.8216095758662015e-06, |
| "loss": 0.7975, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.2195121951219512, |
| "grad_norm": 1.0904715061187744, |
| "learning_rate": 4.82018557629725e-06, |
| "loss": 0.8526, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.224390243902439, |
| "grad_norm": 1.3482294082641602, |
| "learning_rate": 4.8187561277552376e-06, |
| "loss": 0.6198, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.2292682926829268, |
| "grad_norm": 0.9257897138595581, |
| "learning_rate": 4.817321233597232e-06, |
| "loss": 0.8115, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.2341463414634146, |
| "grad_norm": 1.1533488035202026, |
| "learning_rate": 4.815880897193095e-06, |
| "loss": 0.5524, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.2390243902439024, |
| "grad_norm": 1.2778701782226562, |
| "learning_rate": 4.814435121925466e-06, |
| "loss": 0.802, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.2439024390243902, |
| "grad_norm": 1.037063717842102, |
| "learning_rate": 4.812983911189761e-06, |
| "loss": 0.696, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.248780487804878, |
| "grad_norm": 1.0112067461013794, |
| "learning_rate": 4.811527268394157e-06, |
| "loss": 0.5189, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.2536585365853659, |
| "grad_norm": 1.1271675825119019, |
| "learning_rate": 4.810065196959591e-06, |
| "loss": 0.6292, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.2585365853658537, |
| "grad_norm": 1.0987378358840942, |
| "learning_rate": 4.8085977003197496e-06, |
| "loss": 0.8452, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.2634146341463415, |
| "grad_norm": 1.5386906862258911, |
| "learning_rate": 4.807124781921059e-06, |
| "loss": 1.1094, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.2682926829268293, |
| "grad_norm": 0.9946249127388, |
| "learning_rate": 4.805646445222679e-06, |
| "loss": 0.6367, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.273170731707317, |
| "grad_norm": 0.9774323105812073, |
| "learning_rate": 4.804162693696494e-06, |
| "loss": 0.6157, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.278048780487805, |
| "grad_norm": 1.9447377920150757, |
| "learning_rate": 4.802673530827105e-06, |
| "loss": 0.6076, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.2829268292682927, |
| "grad_norm": 0.7815698981285095, |
| "learning_rate": 4.801178960111823e-06, |
| "loss": 0.5985, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.2878048780487805, |
| "grad_norm": 1.014426589012146, |
| "learning_rate": 4.799678985060658e-06, |
| "loss": 0.851, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.2926829268292683, |
| "grad_norm": 0.9904977083206177, |
| "learning_rate": 4.798173609196314e-06, |
| "loss": 0.8511, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.2975609756097561, |
| "grad_norm": 1.0354292392730713, |
| "learning_rate": 4.796662836054176e-06, |
| "loss": 0.4885, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.302439024390244, |
| "grad_norm": 0.9485008120536804, |
| "learning_rate": 4.795146669182304e-06, |
| "loss": 0.6486, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.3073170731707318, |
| "grad_norm": 0.9609008431434631, |
| "learning_rate": 4.793625112141431e-06, |
| "loss": 0.5446, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.3121951219512196, |
| "grad_norm": 0.8880930542945862, |
| "learning_rate": 4.792098168504943e-06, |
| "loss": 0.5741, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.3170731707317074, |
| "grad_norm": 0.8475173711776733, |
| "learning_rate": 4.790565841858879e-06, |
| "loss": 0.488, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.3219512195121952, |
| "grad_norm": 1.04447603225708, |
| "learning_rate": 4.789028135801919e-06, |
| "loss": 0.7792, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.326829268292683, |
| "grad_norm": 1.2021688222885132, |
| "learning_rate": 4.787485053945377e-06, |
| "loss": 0.8513, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.3317073170731708, |
| "grad_norm": 0.9609706401824951, |
| "learning_rate": 4.785936599913193e-06, |
| "loss": 0.7186, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.3365853658536586, |
| "grad_norm": 0.9922477602958679, |
| "learning_rate": 4.784382777341922e-06, |
| "loss": 0.7355, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.3414634146341464, |
| "grad_norm": 1.2870302200317383, |
| "learning_rate": 4.782823589880729e-06, |
| "loss": 1.0985, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.346341463414634, |
| "grad_norm": 0.9369707107543945, |
| "learning_rate": 4.7812590411913755e-06, |
| "loss": 0.7729, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.3512195121951218, |
| "grad_norm": 1.1406941413879395, |
| "learning_rate": 4.779689134948217e-06, |
| "loss": 1.1142, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.3560975609756096, |
| "grad_norm": 1.1078243255615234, |
| "learning_rate": 4.77811387483819e-06, |
| "loss": 0.5348, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.3609756097560974, |
| "grad_norm": 1.1744791269302368, |
| "learning_rate": 4.776533264560804e-06, |
| "loss": 0.7202, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.3658536585365852, |
| "grad_norm": 1.2643998861312866, |
| "learning_rate": 4.774947307828134e-06, |
| "loss": 0.9659, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.370731707317073, |
| "grad_norm": 1.069071888923645, |
| "learning_rate": 4.773356008364812e-06, |
| "loss": 0.6257, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.3756097560975609, |
| "grad_norm": 0.8661286234855652, |
| "learning_rate": 4.771759369908017e-06, |
| "loss": 0.4138, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.3804878048780487, |
| "grad_norm": 0.9560813903808594, |
| "learning_rate": 4.7701573962074635e-06, |
| "loss": 0.6435, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.3853658536585365, |
| "grad_norm": 1.0229038000106812, |
| "learning_rate": 4.7685500910254015e-06, |
| "loss": 0.5709, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.3902439024390243, |
| "grad_norm": 1.7641900777816772, |
| "learning_rate": 4.766937458136598e-06, |
| "loss": 0.7815, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.395121951219512, |
| "grad_norm": 1.011093258857727, |
| "learning_rate": 4.765319501328332e-06, |
| "loss": 0.7428, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.4, |
| "grad_norm": 1.0194127559661865, |
| "learning_rate": 4.763696224400391e-06, |
| "loss": 0.542, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.4048780487804877, |
| "grad_norm": 0.9469794034957886, |
| "learning_rate": 4.762067631165049e-06, |
| "loss": 0.5758, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.4097560975609755, |
| "grad_norm": 1.1177825927734375, |
| "learning_rate": 4.760433725447071e-06, |
| "loss": 0.8141, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.4146341463414633, |
| "grad_norm": 1.388083577156067, |
| "learning_rate": 4.758794511083697e-06, |
| "loss": 0.8286, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.4195121951219511, |
| "grad_norm": 1.1623952388763428, |
| "learning_rate": 4.757149991924633e-06, |
| "loss": 0.6733, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.424390243902439, |
| "grad_norm": 1.328235149383545, |
| "learning_rate": 4.755500171832045e-06, |
| "loss": 0.5397, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.4292682926829268, |
| "grad_norm": 1.0121268033981323, |
| "learning_rate": 4.753845054680548e-06, |
| "loss": 0.6813, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.4341463414634146, |
| "grad_norm": 1.0799837112426758, |
| "learning_rate": 4.752184644357197e-06, |
| "loss": 0.5136, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.4390243902439024, |
| "grad_norm": 1.010602593421936, |
| "learning_rate": 4.750518944761477e-06, |
| "loss": 0.5768, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.4439024390243902, |
| "grad_norm": 1.1127134561538696, |
| "learning_rate": 4.748847959805297e-06, |
| "loss": 0.5663, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.448780487804878, |
| "grad_norm": 0.9182597398757935, |
| "learning_rate": 4.7471716934129774e-06, |
| "loss": 0.5599, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.4536585365853658, |
| "grad_norm": 1.0173683166503906, |
| "learning_rate": 4.745490149521242e-06, |
| "loss": 0.5155, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.4585365853658536, |
| "grad_norm": 0.9683080911636353, |
| "learning_rate": 4.743803332079209e-06, |
| "loss": 0.5744, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.4634146341463414, |
| "grad_norm": 0.9615181684494019, |
| "learning_rate": 4.742111245048382e-06, |
| "loss": 0.5961, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.4682926829268292, |
| "grad_norm": 1.1113585233688354, |
| "learning_rate": 4.740413892402639e-06, |
| "loss": 0.5751, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.473170731707317, |
| "grad_norm": 1.1533280611038208, |
| "learning_rate": 4.738711278128228e-06, |
| "loss": 0.6668, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.4780487804878049, |
| "grad_norm": 1.086147665977478, |
| "learning_rate": 4.7370034062237476e-06, |
| "loss": 0.4249, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.4829268292682927, |
| "grad_norm": 3.0501999855041504, |
| "learning_rate": 4.73529028070015e-06, |
| "loss": 0.6284, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.4878048780487805, |
| "grad_norm": 1.2545751333236694, |
| "learning_rate": 4.733571905580723e-06, |
| "loss": 0.8968, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.4926829268292683, |
| "grad_norm": 0.9740838408470154, |
| "learning_rate": 4.731848284901082e-06, |
| "loss": 0.7402, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.497560975609756, |
| "grad_norm": 1.0430322885513306, |
| "learning_rate": 4.730119422709165e-06, |
| "loss": 0.4697, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.502439024390244, |
| "grad_norm": 1.2707469463348389, |
| "learning_rate": 4.728385323065215e-06, |
| "loss": 0.7548, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.5073170731707317, |
| "grad_norm": 0.9956101775169373, |
| "learning_rate": 4.7266459900417815e-06, |
| "loss": 0.5444, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.5121951219512195, |
| "grad_norm": 1.0116164684295654, |
| "learning_rate": 4.724901427723698e-06, |
| "loss": 0.7939, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.5170731707317073, |
| "grad_norm": 1.0668343305587769, |
| "learning_rate": 4.723151640208084e-06, |
| "loss": 0.3966, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.5219512195121951, |
| "grad_norm": 0.9897716045379639, |
| "learning_rate": 4.721396631604327e-06, |
| "loss": 0.4675, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.526829268292683, |
| "grad_norm": 0.9885281920433044, |
| "learning_rate": 4.7196364060340785e-06, |
| "loss": 0.5411, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.5317073170731708, |
| "grad_norm": 1.1385118961334229, |
| "learning_rate": 4.7178709676312416e-06, |
| "loss": 0.8041, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.5365853658536586, |
| "grad_norm": 1.2253623008728027, |
| "learning_rate": 4.716100320541961e-06, |
| "loss": 1.0583, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.5414634146341464, |
| "grad_norm": 1.1313822269439697, |
| "learning_rate": 4.714324468924614e-06, |
| "loss": 0.7701, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.5463414634146342, |
| "grad_norm": 1.120343804359436, |
| "learning_rate": 4.712543416949803e-06, |
| "loss": 0.7407, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.551219512195122, |
| "grad_norm": 1.5084882974624634, |
| "learning_rate": 4.71075716880034e-06, |
| "loss": 0.753, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.5560975609756098, |
| "grad_norm": 1.3213189840316772, |
| "learning_rate": 4.708965728671243e-06, |
| "loss": 0.8935, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.5609756097560976, |
| "grad_norm": 1.170746922492981, |
| "learning_rate": 4.7071691007697214e-06, |
| "loss": 0.6782, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.5658536585365854, |
| "grad_norm": 1.2023199796676636, |
| "learning_rate": 4.705367289315172e-06, |
| "loss": 0.8011, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.5707317073170732, |
| "grad_norm": 1.0213698148727417, |
| "learning_rate": 4.703560298539158e-06, |
| "loss": 0.5011, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.575609756097561, |
| "grad_norm": 1.045581579208374, |
| "learning_rate": 4.701748132685415e-06, |
| "loss": 0.5501, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.5804878048780489, |
| "grad_norm": 0.9141654372215271, |
| "learning_rate": 4.699930796009825e-06, |
| "loss": 0.5551, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.5853658536585367, |
| "grad_norm": 1.3005549907684326, |
| "learning_rate": 4.698108292780418e-06, |
| "loss": 0.7293, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.5902439024390245, |
| "grad_norm": 0.985907793045044, |
| "learning_rate": 4.696280627277356e-06, |
| "loss": 0.5366, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.5951219512195123, |
| "grad_norm": 0.9095384478569031, |
| "learning_rate": 4.6944478037929255e-06, |
| "loss": 0.5508, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.6, |
| "grad_norm": 1.346676230430603, |
| "learning_rate": 4.692609826631525e-06, |
| "loss": 0.6719, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.604878048780488, |
| "grad_norm": 1.088921308517456, |
| "learning_rate": 4.690766700109659e-06, |
| "loss": 0.4088, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.6097560975609757, |
| "grad_norm": 0.8905205726623535, |
| "learning_rate": 4.6889184285559234e-06, |
| "loss": 0.4671, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.6146341463414635, |
| "grad_norm": 1.2066144943237305, |
| "learning_rate": 4.687065016310996e-06, |
| "loss": 0.7891, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.6195121951219513, |
| "grad_norm": 1.0449296236038208, |
| "learning_rate": 4.685206467727631e-06, |
| "loss": 0.6103, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.6243902439024391, |
| "grad_norm": 1.15915048122406, |
| "learning_rate": 4.683342787170644e-06, |
| "loss": 0.605, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.629268292682927, |
| "grad_norm": 1.0918726921081543, |
| "learning_rate": 4.6814739790169006e-06, |
| "loss": 0.5444, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.6341463414634148, |
| "grad_norm": 1.0298805236816406, |
| "learning_rate": 4.679600047655313e-06, |
| "loss": 0.7902, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.6390243902439026, |
| "grad_norm": 1.3017504215240479, |
| "learning_rate": 4.6777209974868194e-06, |
| "loss": 1.1195, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.6439024390243904, |
| "grad_norm": 1.45652174949646, |
| "learning_rate": 4.675836832924387e-06, |
| "loss": 0.6358, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.6487804878048782, |
| "grad_norm": 0.8610002398490906, |
| "learning_rate": 4.673947558392989e-06, |
| "loss": 0.4196, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.653658536585366, |
| "grad_norm": 0.8891443014144897, |
| "learning_rate": 4.6720531783296e-06, |
| "loss": 0.5593, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.6585365853658538, |
| "grad_norm": 0.9679135680198669, |
| "learning_rate": 4.670153697183185e-06, |
| "loss": 0.6149, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.6634146341463416, |
| "grad_norm": 1.019687294960022, |
| "learning_rate": 4.668249119414692e-06, |
| "loss": 0.5855, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.6682926829268294, |
| "grad_norm": 0.9645085334777832, |
| "learning_rate": 4.666339449497033e-06, |
| "loss": 0.6321, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.6731707317073172, |
| "grad_norm": 1.1760913133621216, |
| "learning_rate": 4.664424691915084e-06, |
| "loss": 0.5839, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.678048780487805, |
| "grad_norm": 1.1706181764602661, |
| "learning_rate": 4.6625048511656675e-06, |
| "loss": 0.5806, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.6829268292682928, |
| "grad_norm": 1.1575871706008911, |
| "learning_rate": 4.660579931757543e-06, |
| "loss": 0.4945, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.6878048780487804, |
| "grad_norm": 0.9929284453392029, |
| "learning_rate": 4.6586499382113985e-06, |
| "loss": 0.5662, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.6926829268292682, |
| "grad_norm": 0.9940921068191528, |
| "learning_rate": 4.6567148750598375e-06, |
| "loss": 0.8166, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.697560975609756, |
| "grad_norm": 1.1893478631973267, |
| "learning_rate": 4.6547747468473705e-06, |
| "loss": 0.9291, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.7024390243902439, |
| "grad_norm": 0.8776846528053284, |
| "learning_rate": 4.652829558130404e-06, |
| "loss": 0.4457, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.7073170731707317, |
| "grad_norm": 1.2133142948150635, |
| "learning_rate": 4.6508793134772265e-06, |
| "loss": 0.6364, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.7121951219512195, |
| "grad_norm": 0.8865175247192383, |
| "learning_rate": 4.648924017468003e-06, |
| "loss": 0.5514, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.7170731707317073, |
| "grad_norm": 1.3637226819992065, |
| "learning_rate": 4.646963674694761e-06, |
| "loss": 0.8656, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.721951219512195, |
| "grad_norm": 0.875629723072052, |
| "learning_rate": 4.64499828976138e-06, |
| "loss": 0.3992, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.726829268292683, |
| "grad_norm": 1.0361976623535156, |
| "learning_rate": 4.64302786728358e-06, |
| "loss": 0.5056, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.7317073170731707, |
| "grad_norm": 0.9224256873130798, |
| "learning_rate": 4.641052411888913e-06, |
| "loss": 0.5352, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.7365853658536585, |
| "grad_norm": 1.2034342288970947, |
| "learning_rate": 4.6390719282167515e-06, |
| "loss": 0.4753, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.7414634146341463, |
| "grad_norm": 1.056547999382019, |
| "learning_rate": 4.637086420918276e-06, |
| "loss": 0.7975, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.7463414634146341, |
| "grad_norm": 0.9398707151412964, |
| "learning_rate": 4.635095894656465e-06, |
| "loss": 0.6944, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.751219512195122, |
| "grad_norm": 1.3796380758285522, |
| "learning_rate": 4.633100354106085e-06, |
| "loss": 0.377, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.7560975609756098, |
| "grad_norm": 1.1999366283416748, |
| "learning_rate": 4.631099803953677e-06, |
| "loss": 0.8488, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.7609756097560976, |
| "grad_norm": 1.1613068580627441, |
| "learning_rate": 4.629094248897546e-06, |
| "loss": 0.4856, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.7658536585365854, |
| "grad_norm": 1.407758116722107, |
| "learning_rate": 4.627083693647757e-06, |
| "loss": 0.5932, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.7707317073170732, |
| "grad_norm": 1.1262705326080322, |
| "learning_rate": 4.625068142926111e-06, |
| "loss": 0.862, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.775609756097561, |
| "grad_norm": 1.0207730531692505, |
| "learning_rate": 4.623047601466144e-06, |
| "loss": 0.7695, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.7804878048780488, |
| "grad_norm": 1.0084431171417236, |
| "learning_rate": 4.621022074013114e-06, |
| "loss": 0.6608, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.7853658536585366, |
| "grad_norm": 1.044545292854309, |
| "learning_rate": 4.618991565323987e-06, |
| "loss": 0.5231, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.7902439024390244, |
| "grad_norm": 0.9962389469146729, |
| "learning_rate": 4.616956080167426e-06, |
| "loss": 0.5736, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.7951219512195122, |
| "grad_norm": 1.3212288618087769, |
| "learning_rate": 4.614915623323786e-06, |
| "loss": 0.9586, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.8, |
| "grad_norm": 0.9554499983787537, |
| "learning_rate": 4.612870199585092e-06, |
| "loss": 0.4346, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.8048780487804879, |
| "grad_norm": 1.082366943359375, |
| "learning_rate": 4.610819813755038e-06, |
| "loss": 0.505, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.8097560975609757, |
| "grad_norm": 1.225993037223816, |
| "learning_rate": 4.608764470648971e-06, |
| "loss": 0.4801, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.8146341463414632, |
| "grad_norm": 1.0881706476211548, |
| "learning_rate": 4.606704175093879e-06, |
| "loss": 0.4478, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.819512195121951, |
| "grad_norm": 1.114046335220337, |
| "learning_rate": 4.604638931928383e-06, |
| "loss": 0.8015, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.8243902439024389, |
| "grad_norm": 0.9838706851005554, |
| "learning_rate": 4.602568746002718e-06, |
| "loss": 0.5204, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.8292682926829267, |
| "grad_norm": 1.038713812828064, |
| "learning_rate": 4.600493622178734e-06, |
| "loss": 0.8388, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.8341463414634145, |
| "grad_norm": 1.0684245824813843, |
| "learning_rate": 4.598413565329876e-06, |
| "loss": 0.5744, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.8390243902439023, |
| "grad_norm": 0.8907456994056702, |
| "learning_rate": 4.596328580341169e-06, |
| "loss": 0.5621, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.84390243902439, |
| "grad_norm": 0.9921515583992004, |
| "learning_rate": 4.5942386721092195e-06, |
| "loss": 0.6967, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.848780487804878, |
| "grad_norm": 1.1683647632598877, |
| "learning_rate": 4.592143845542189e-06, |
| "loss": 0.6489, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.8536585365853657, |
| "grad_norm": 1.0409291982650757, |
| "learning_rate": 4.590044105559797e-06, |
| "loss": 0.8945, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.8585365853658535, |
| "grad_norm": 1.0684564113616943, |
| "learning_rate": 4.587939457093296e-06, |
| "loss": 0.5476, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.8634146341463413, |
| "grad_norm": 1.3661733865737915, |
| "learning_rate": 4.585829905085468e-06, |
| "loss": 0.6763, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.8682926829268292, |
| "grad_norm": 1.1465227603912354, |
| "learning_rate": 4.5837154544906135e-06, |
| "loss": 0.7817, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.873170731707317, |
| "grad_norm": 1.0232677459716797, |
| "learning_rate": 4.581596110274535e-06, |
| "loss": 0.7276, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.8780487804878048, |
| "grad_norm": 1.0359809398651123, |
| "learning_rate": 4.579471877414527e-06, |
| "loss": 0.9635, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.8829268292682926, |
| "grad_norm": 1.2472409009933472, |
| "learning_rate": 4.577342760899368e-06, |
| "loss": 0.6782, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.8878048780487804, |
| "grad_norm": 0.9241912961006165, |
| "learning_rate": 4.575208765729302e-06, |
| "loss": 0.5327, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.8926829268292682, |
| "grad_norm": 1.3745805025100708, |
| "learning_rate": 4.573069896916035e-06, |
| "loss": 0.8436, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.897560975609756, |
| "grad_norm": 1.0620322227478027, |
| "learning_rate": 4.5709261594827125e-06, |
| "loss": 0.659, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.9024390243902438, |
| "grad_norm": 1.3068687915802002, |
| "learning_rate": 4.568777558463922e-06, |
| "loss": 0.5219, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.9073170731707316, |
| "grad_norm": 1.0368698835372925, |
| "learning_rate": 4.566624098905665e-06, |
| "loss": 0.7099, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.9121951219512194, |
| "grad_norm": 1.0876407623291016, |
| "learning_rate": 4.564465785865359e-06, |
| "loss": 0.5276, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.9170731707317072, |
| "grad_norm": 0.9230280518531799, |
| "learning_rate": 4.56230262441182e-06, |
| "loss": 0.4292, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.921951219512195, |
| "grad_norm": 0.9985240697860718, |
| "learning_rate": 4.560134619625247e-06, |
| "loss": 0.6602, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.9268292682926829, |
| "grad_norm": 0.8872730135917664, |
| "learning_rate": 4.5579617765972155e-06, |
| "loss": 0.5711, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.9317073170731707, |
| "grad_norm": 0.9474479556083679, |
| "learning_rate": 4.555784100430662e-06, |
| "loss": 0.5299, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.9365853658536585, |
| "grad_norm": 1.0377358198165894, |
| "learning_rate": 4.553601596239877e-06, |
| "loss": 0.4649, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.9414634146341463, |
| "grad_norm": 1.0765342712402344, |
| "learning_rate": 4.551414269150489e-06, |
| "loss": 0.5578, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.946341463414634, |
| "grad_norm": 1.0844486951828003, |
| "learning_rate": 4.54922212429945e-06, |
| "loss": 0.5486, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.951219512195122, |
| "grad_norm": 1.1640657186508179, |
| "learning_rate": 4.547025166835027e-06, |
| "loss": 0.7462, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.9560975609756097, |
| "grad_norm": 1.2733076810836792, |
| "learning_rate": 4.544823401916794e-06, |
| "loss": 0.881, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.9609756097560975, |
| "grad_norm": 1.155869960784912, |
| "learning_rate": 4.542616834715612e-06, |
| "loss": 0.6374, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.9658536585365853, |
| "grad_norm": 0.8623374700546265, |
| "learning_rate": 4.540405470413618e-06, |
| "loss": 0.4188, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.9707317073170731, |
| "grad_norm": 1.0502699613571167, |
| "learning_rate": 4.53818931420422e-06, |
| "loss": 0.7233, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.975609756097561, |
| "grad_norm": 0.8630202412605286, |
| "learning_rate": 4.535968371292076e-06, |
| "loss": 0.5896, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.9804878048780488, |
| "grad_norm": 1.1017824411392212, |
| "learning_rate": 4.533742646893086e-06, |
| "loss": 0.6971, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.9853658536585366, |
| "grad_norm": 0.9387734532356262, |
| "learning_rate": 4.531512146234383e-06, |
| "loss": 0.6718, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.9902439024390244, |
| "grad_norm": 1.0347439050674438, |
| "learning_rate": 4.529276874554312e-06, |
| "loss": 0.8829, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.9951219512195122, |
| "grad_norm": 1.0173542499542236, |
| "learning_rate": 4.527036837102426e-06, |
| "loss": 0.5154, |
| "step": 409 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 1.1297523975372314, |
| "learning_rate": 4.524792039139471e-06, |
| "loss": 0.7721, |
| "step": 410 |
| }, |
| { |
| "epoch": 2.004878048780488, |
| "grad_norm": 0.9763960242271423, |
| "learning_rate": 4.522542485937369e-06, |
| "loss": 0.3978, |
| "step": 411 |
| }, |
| { |
| "epoch": 2.0097560975609756, |
| "grad_norm": 0.9650730490684509, |
| "learning_rate": 4.520288182779214e-06, |
| "loss": 0.6826, |
| "step": 412 |
| }, |
| { |
| "epoch": 2.0146341463414634, |
| "grad_norm": 0.7682514190673828, |
| "learning_rate": 4.518029134959253e-06, |
| "loss": 0.351, |
| "step": 413 |
| }, |
| { |
| "epoch": 2.0195121951219512, |
| "grad_norm": 1.0681227445602417, |
| "learning_rate": 4.515765347782878e-06, |
| "loss": 0.6467, |
| "step": 414 |
| }, |
| { |
| "epoch": 2.024390243902439, |
| "grad_norm": 1.2426350116729736, |
| "learning_rate": 4.5134968265666085e-06, |
| "loss": 0.8831, |
| "step": 415 |
| }, |
| { |
| "epoch": 2.029268292682927, |
| "grad_norm": 0.9794759154319763, |
| "learning_rate": 4.511223576638084e-06, |
| "loss": 0.6419, |
| "step": 416 |
| }, |
| { |
| "epoch": 2.0341463414634147, |
| "grad_norm": 1.119649887084961, |
| "learning_rate": 4.508945603336049e-06, |
| "loss": 0.8023, |
| "step": 417 |
| }, |
| { |
| "epoch": 2.0390243902439025, |
| "grad_norm": 0.858971118927002, |
| "learning_rate": 4.50666291201034e-06, |
| "loss": 0.4974, |
| "step": 418 |
| }, |
| { |
| "epoch": 2.0439024390243903, |
| "grad_norm": 1.0508594512939453, |
| "learning_rate": 4.504375508021876e-06, |
| "loss": 0.3603, |
| "step": 419 |
| }, |
| { |
| "epoch": 2.048780487804878, |
| "grad_norm": 0.9566763043403625, |
| "learning_rate": 4.50208339674264e-06, |
| "loss": 0.7813, |
| "step": 420 |
| }, |
| { |
| "epoch": 2.053658536585366, |
| "grad_norm": 1.074040412902832, |
| "learning_rate": 4.499786583555675e-06, |
| "loss": 0.8065, |
| "step": 421 |
| }, |
| { |
| "epoch": 2.0585365853658537, |
| "grad_norm": 0.8816580772399902, |
| "learning_rate": 4.497485073855061e-06, |
| "loss": 0.4439, |
| "step": 422 |
| }, |
| { |
| "epoch": 2.0634146341463415, |
| "grad_norm": 1.0733896493911743, |
| "learning_rate": 4.495178873045913e-06, |
| "loss": 0.4481, |
| "step": 423 |
| }, |
| { |
| "epoch": 2.0682926829268293, |
| "grad_norm": 0.9010451436042786, |
| "learning_rate": 4.4928679865443605e-06, |
| "loss": 0.4407, |
| "step": 424 |
| }, |
| { |
| "epoch": 2.073170731707317, |
| "grad_norm": 0.9359092712402344, |
| "learning_rate": 4.4905524197775366e-06, |
| "loss": 0.5847, |
| "step": 425 |
| }, |
| { |
| "epoch": 2.078048780487805, |
| "grad_norm": 0.9389141798019409, |
| "learning_rate": 4.4882321781835666e-06, |
| "loss": 0.5377, |
| "step": 426 |
| }, |
| { |
| "epoch": 2.0829268292682928, |
| "grad_norm": 0.89751797914505, |
| "learning_rate": 4.4859072672115565e-06, |
| "loss": 0.3879, |
| "step": 427 |
| }, |
| { |
| "epoch": 2.0878048780487806, |
| "grad_norm": 1.0244700908660889, |
| "learning_rate": 4.483577692321577e-06, |
| "loss": 0.8237, |
| "step": 428 |
| }, |
| { |
| "epoch": 2.0926829268292684, |
| "grad_norm": 1.0703409910202026, |
| "learning_rate": 4.481243458984651e-06, |
| "loss": 0.5826, |
| "step": 429 |
| }, |
| { |
| "epoch": 2.097560975609756, |
| "grad_norm": 0.9995833039283752, |
| "learning_rate": 4.478904572682743e-06, |
| "loss": 0.6091, |
| "step": 430 |
| }, |
| { |
| "epoch": 2.102439024390244, |
| "grad_norm": 0.8649471998214722, |
| "learning_rate": 4.476561038908745e-06, |
| "loss": 0.4863, |
| "step": 431 |
| }, |
| { |
| "epoch": 2.107317073170732, |
| "grad_norm": 1.1670926809310913, |
| "learning_rate": 4.474212863166464e-06, |
| "loss": 0.6584, |
| "step": 432 |
| }, |
| { |
| "epoch": 2.1121951219512196, |
| "grad_norm": 1.2743312120437622, |
| "learning_rate": 4.471860050970608e-06, |
| "loss": 0.6777, |
| "step": 433 |
| }, |
| { |
| "epoch": 2.1170731707317074, |
| "grad_norm": 1.2678401470184326, |
| "learning_rate": 4.469502607846774e-06, |
| "loss": 0.9609, |
| "step": 434 |
| }, |
| { |
| "epoch": 2.1219512195121952, |
| "grad_norm": 0.9796558618545532, |
| "learning_rate": 4.467140539331434e-06, |
| "loss": 0.4574, |
| "step": 435 |
| }, |
| { |
| "epoch": 2.126829268292683, |
| "grad_norm": 1.0830684900283813, |
| "learning_rate": 4.464773850971924e-06, |
| "loss": 0.3067, |
| "step": 436 |
| }, |
| { |
| "epoch": 2.131707317073171, |
| "grad_norm": 1.002589464187622, |
| "learning_rate": 4.46240254832643e-06, |
| "loss": 0.5383, |
| "step": 437 |
| }, |
| { |
| "epoch": 2.1365853658536587, |
| "grad_norm": 1.1145734786987305, |
| "learning_rate": 4.460026636963971e-06, |
| "loss": 0.6173, |
| "step": 438 |
| }, |
| { |
| "epoch": 2.1414634146341465, |
| "grad_norm": 0.94740891456604, |
| "learning_rate": 4.4576461224643965e-06, |
| "loss": 0.4991, |
| "step": 439 |
| }, |
| { |
| "epoch": 2.1463414634146343, |
| "grad_norm": 0.8613864183425903, |
| "learning_rate": 4.455261010418359e-06, |
| "loss": 0.3956, |
| "step": 440 |
| }, |
| { |
| "epoch": 2.151219512195122, |
| "grad_norm": 0.9509091377258301, |
| "learning_rate": 4.452871306427314e-06, |
| "loss": 0.7165, |
| "step": 441 |
| }, |
| { |
| "epoch": 2.15609756097561, |
| "grad_norm": 1.259364128112793, |
| "learning_rate": 4.450477016103498e-06, |
| "loss": 0.5682, |
| "step": 442 |
| }, |
| { |
| "epoch": 2.1609756097560977, |
| "grad_norm": 1.1716279983520508, |
| "learning_rate": 4.4480781450699205e-06, |
| "loss": 0.4917, |
| "step": 443 |
| }, |
| { |
| "epoch": 2.1658536585365855, |
| "grad_norm": 1.0395866632461548, |
| "learning_rate": 4.4456746989603464e-06, |
| "loss": 0.4338, |
| "step": 444 |
| }, |
| { |
| "epoch": 2.1707317073170733, |
| "grad_norm": 1.232602834701538, |
| "learning_rate": 4.443266683419289e-06, |
| "loss": 0.6356, |
| "step": 445 |
| }, |
| { |
| "epoch": 2.175609756097561, |
| "grad_norm": 1.254172921180725, |
| "learning_rate": 4.440854104101988e-06, |
| "loss": 0.472, |
| "step": 446 |
| }, |
| { |
| "epoch": 2.180487804878049, |
| "grad_norm": 1.2319004535675049, |
| "learning_rate": 4.438436966674406e-06, |
| "loss": 0.6408, |
| "step": 447 |
| }, |
| { |
| "epoch": 2.1853658536585368, |
| "grad_norm": 1.0100780725479126, |
| "learning_rate": 4.436015276813208e-06, |
| "loss": 0.4524, |
| "step": 448 |
| }, |
| { |
| "epoch": 2.1902439024390246, |
| "grad_norm": 0.9656887650489807, |
| "learning_rate": 4.4335890402057505e-06, |
| "loss": 0.5999, |
| "step": 449 |
| }, |
| { |
| "epoch": 2.1951219512195124, |
| "grad_norm": 1.1730879545211792, |
| "learning_rate": 4.431158262550067e-06, |
| "loss": 0.5953, |
| "step": 450 |
| }, |
| { |
| "epoch": 2.2, |
| "grad_norm": 0.9255422949790955, |
| "learning_rate": 4.428722949554858e-06, |
| "loss": 0.3487, |
| "step": 451 |
| }, |
| { |
| "epoch": 2.204878048780488, |
| "grad_norm": 0.9878072142601013, |
| "learning_rate": 4.426283106939474e-06, |
| "loss": 0.4937, |
| "step": 452 |
| }, |
| { |
| "epoch": 2.209756097560976, |
| "grad_norm": 0.982023298740387, |
| "learning_rate": 4.423838740433903e-06, |
| "loss": 0.6299, |
| "step": 453 |
| }, |
| { |
| "epoch": 2.2146341463414636, |
| "grad_norm": 0.7727266550064087, |
| "learning_rate": 4.4213898557787586e-06, |
| "loss": 0.2789, |
| "step": 454 |
| }, |
| { |
| "epoch": 2.2195121951219514, |
| "grad_norm": 1.5341951847076416, |
| "learning_rate": 4.4189364587252636e-06, |
| "loss": 0.8498, |
| "step": 455 |
| }, |
| { |
| "epoch": 2.2243902439024392, |
| "grad_norm": 1.1611250638961792, |
| "learning_rate": 4.416478555035241e-06, |
| "loss": 0.4075, |
| "step": 456 |
| }, |
| { |
| "epoch": 2.229268292682927, |
| "grad_norm": 1.0459867715835571, |
| "learning_rate": 4.4140161504810935e-06, |
| "loss": 0.4946, |
| "step": 457 |
| }, |
| { |
| "epoch": 2.234146341463415, |
| "grad_norm": 0.9366090297698975, |
| "learning_rate": 4.4115492508457986e-06, |
| "loss": 0.3479, |
| "step": 458 |
| }, |
| { |
| "epoch": 2.2390243902439027, |
| "grad_norm": 1.0325448513031006, |
| "learning_rate": 4.409077861922887e-06, |
| "loss": 0.5437, |
| "step": 459 |
| }, |
| { |
| "epoch": 2.2439024390243905, |
| "grad_norm": 0.9326527118682861, |
| "learning_rate": 4.406601989516435e-06, |
| "loss": 0.4594, |
| "step": 460 |
| }, |
| { |
| "epoch": 2.2487804878048783, |
| "grad_norm": 0.7127180099487305, |
| "learning_rate": 4.404121639441047e-06, |
| "loss": 0.3067, |
| "step": 461 |
| }, |
| { |
| "epoch": 2.253658536585366, |
| "grad_norm": 1.0416815280914307, |
| "learning_rate": 4.401636817521843e-06, |
| "loss": 0.5402, |
| "step": 462 |
| }, |
| { |
| "epoch": 2.258536585365854, |
| "grad_norm": 1.8258185386657715, |
| "learning_rate": 4.399147529594447e-06, |
| "loss": 0.3964, |
| "step": 463 |
| }, |
| { |
| "epoch": 2.2634146341463417, |
| "grad_norm": 0.9795071482658386, |
| "learning_rate": 4.3966537815049686e-06, |
| "loss": 0.5118, |
| "step": 464 |
| }, |
| { |
| "epoch": 2.2682926829268295, |
| "grad_norm": 1.1920483112335205, |
| "learning_rate": 4.394155579109994e-06, |
| "loss": 0.6511, |
| "step": 465 |
| }, |
| { |
| "epoch": 2.2731707317073173, |
| "grad_norm": 1.361159324645996, |
| "learning_rate": 4.391652928276572e-06, |
| "loss": 0.6874, |
| "step": 466 |
| }, |
| { |
| "epoch": 2.278048780487805, |
| "grad_norm": 0.9973228573799133, |
| "learning_rate": 4.389145834882195e-06, |
| "loss": 0.6057, |
| "step": 467 |
| }, |
| { |
| "epoch": 2.2829268292682925, |
| "grad_norm": 1.3514574766159058, |
| "learning_rate": 4.386634304814789e-06, |
| "loss": 0.4762, |
| "step": 468 |
| }, |
| { |
| "epoch": 2.2878048780487803, |
| "grad_norm": 1.2089687585830688, |
| "learning_rate": 4.384118343972704e-06, |
| "loss": 0.689, |
| "step": 469 |
| }, |
| { |
| "epoch": 2.292682926829268, |
| "grad_norm": 0.9414058327674866, |
| "learning_rate": 4.381597958264692e-06, |
| "loss": 0.7257, |
| "step": 470 |
| }, |
| { |
| "epoch": 2.297560975609756, |
| "grad_norm": 1.0120850801467896, |
| "learning_rate": 4.379073153609896e-06, |
| "loss": 0.7515, |
| "step": 471 |
| }, |
| { |
| "epoch": 2.3024390243902437, |
| "grad_norm": 1.586024522781372, |
| "learning_rate": 4.37654393593784e-06, |
| "loss": 0.64, |
| "step": 472 |
| }, |
| { |
| "epoch": 2.3073170731707315, |
| "grad_norm": 0.9766375422477722, |
| "learning_rate": 4.3740103111884096e-06, |
| "loss": 0.5162, |
| "step": 473 |
| }, |
| { |
| "epoch": 2.3121951219512193, |
| "grad_norm": 1.001685380935669, |
| "learning_rate": 4.371472285311842e-06, |
| "loss": 0.4464, |
| "step": 474 |
| }, |
| { |
| "epoch": 2.317073170731707, |
| "grad_norm": 0.7923868894577026, |
| "learning_rate": 4.368929864268709e-06, |
| "loss": 0.365, |
| "step": 475 |
| }, |
| { |
| "epoch": 2.321951219512195, |
| "grad_norm": 1.0744857788085938, |
| "learning_rate": 4.366383054029907e-06, |
| "loss": 0.6253, |
| "step": 476 |
| }, |
| { |
| "epoch": 2.3268292682926828, |
| "grad_norm": 0.9156374931335449, |
| "learning_rate": 4.363831860576638e-06, |
| "loss": 0.6512, |
| "step": 477 |
| }, |
| { |
| "epoch": 2.3317073170731706, |
| "grad_norm": 1.0944534540176392, |
| "learning_rate": 4.361276289900396e-06, |
| "loss": 0.5627, |
| "step": 478 |
| }, |
| { |
| "epoch": 2.3365853658536584, |
| "grad_norm": 1.1438108682632446, |
| "learning_rate": 4.358716348002962e-06, |
| "loss": 0.7402, |
| "step": 479 |
| }, |
| { |
| "epoch": 2.341463414634146, |
| "grad_norm": 1.1678388118743896, |
| "learning_rate": 4.356152040896376e-06, |
| "loss": 0.512, |
| "step": 480 |
| }, |
| { |
| "epoch": 2.346341463414634, |
| "grad_norm": 0.8130245208740234, |
| "learning_rate": 4.3535833746029335e-06, |
| "loss": 0.3934, |
| "step": 481 |
| }, |
| { |
| "epoch": 2.351219512195122, |
| "grad_norm": 1.229127287864685, |
| "learning_rate": 4.351010355155165e-06, |
| "loss": 0.4782, |
| "step": 482 |
| }, |
| { |
| "epoch": 2.3560975609756096, |
| "grad_norm": 0.9830904006958008, |
| "learning_rate": 4.348432988595828e-06, |
| "loss": 0.3879, |
| "step": 483 |
| }, |
| { |
| "epoch": 2.3609756097560974, |
| "grad_norm": 1.3584911823272705, |
| "learning_rate": 4.345851280977885e-06, |
| "loss": 0.7305, |
| "step": 484 |
| }, |
| { |
| "epoch": 2.3658536585365852, |
| "grad_norm": 1.0106158256530762, |
| "learning_rate": 4.343265238364496e-06, |
| "loss": 0.4247, |
| "step": 485 |
| }, |
| { |
| "epoch": 2.370731707317073, |
| "grad_norm": 1.1921676397323608, |
| "learning_rate": 4.340674866829001e-06, |
| "loss": 0.5446, |
| "step": 486 |
| }, |
| { |
| "epoch": 2.375609756097561, |
| "grad_norm": 1.1733497381210327, |
| "learning_rate": 4.338080172454908e-06, |
| "loss": 0.8386, |
| "step": 487 |
| }, |
| { |
| "epoch": 2.3804878048780487, |
| "grad_norm": 0.8914453387260437, |
| "learning_rate": 4.335481161335875e-06, |
| "loss": 0.5393, |
| "step": 488 |
| }, |
| { |
| "epoch": 2.3853658536585365, |
| "grad_norm": 1.122434377670288, |
| "learning_rate": 4.332877839575699e-06, |
| "loss": 0.4645, |
| "step": 489 |
| }, |
| { |
| "epoch": 2.3902439024390243, |
| "grad_norm": 1.078754186630249, |
| "learning_rate": 4.330270213288301e-06, |
| "loss": 0.6284, |
| "step": 490 |
| }, |
| { |
| "epoch": 2.395121951219512, |
| "grad_norm": 1.0400331020355225, |
| "learning_rate": 4.32765828859771e-06, |
| "loss": 0.8431, |
| "step": 491 |
| }, |
| { |
| "epoch": 2.4, |
| "grad_norm": 0.9895585775375366, |
| "learning_rate": 4.325042071638051e-06, |
| "loss": 0.6601, |
| "step": 492 |
| }, |
| { |
| "epoch": 2.4048780487804877, |
| "grad_norm": 1.1766878366470337, |
| "learning_rate": 4.322421568553529e-06, |
| "loss": 0.4766, |
| "step": 493 |
| }, |
| { |
| "epoch": 2.4097560975609755, |
| "grad_norm": 0.9032670855522156, |
| "learning_rate": 4.319796785498416e-06, |
| "loss": 0.4363, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.4146341463414633, |
| "grad_norm": 0.9736960530281067, |
| "learning_rate": 4.317167728637032e-06, |
| "loss": 0.6109, |
| "step": 495 |
| }, |
| { |
| "epoch": 2.419512195121951, |
| "grad_norm": 0.7923660278320312, |
| "learning_rate": 4.314534404143738e-06, |
| "loss": 0.4987, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.424390243902439, |
| "grad_norm": 0.9087777137756348, |
| "learning_rate": 4.3118968182029155e-06, |
| "loss": 0.6411, |
| "step": 497 |
| }, |
| { |
| "epoch": 2.4292682926829268, |
| "grad_norm": 0.851117730140686, |
| "learning_rate": 4.3092549770089566e-06, |
| "loss": 0.3541, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.4341463414634146, |
| "grad_norm": 0.9581378102302551, |
| "learning_rate": 4.306608886766243e-06, |
| "loss": 0.5448, |
| "step": 499 |
| }, |
| { |
| "epoch": 2.4390243902439024, |
| "grad_norm": 1.081851601600647, |
| "learning_rate": 4.303958553689137e-06, |
| "loss": 0.5593, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.44390243902439, |
| "grad_norm": 1.1111576557159424, |
| "learning_rate": 4.3013039840019675e-06, |
| "loss": 0.7566, |
| "step": 501 |
| }, |
| { |
| "epoch": 2.448780487804878, |
| "grad_norm": 1.0168198347091675, |
| "learning_rate": 4.2986451839390105e-06, |
| "loss": 0.3996, |
| "step": 502 |
| }, |
| { |
| "epoch": 2.453658536585366, |
| "grad_norm": 0.9412428140640259, |
| "learning_rate": 4.295982159744476e-06, |
| "loss": 0.5602, |
| "step": 503 |
| }, |
| { |
| "epoch": 2.4585365853658536, |
| "grad_norm": 1.0618679523468018, |
| "learning_rate": 4.293314917672498e-06, |
| "loss": 0.6466, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.4634146341463414, |
| "grad_norm": 0.8414422273635864, |
| "learning_rate": 4.290643463987114e-06, |
| "loss": 0.357, |
| "step": 505 |
| }, |
| { |
| "epoch": 2.4682926829268292, |
| "grad_norm": 0.93071448802948, |
| "learning_rate": 4.287967804962252e-06, |
| "loss": 0.4179, |
| "step": 506 |
| }, |
| { |
| "epoch": 2.473170731707317, |
| "grad_norm": 0.9793124794960022, |
| "learning_rate": 4.285287946881718e-06, |
| "loss": 0.2698, |
| "step": 507 |
| }, |
| { |
| "epoch": 2.478048780487805, |
| "grad_norm": 0.9545714259147644, |
| "learning_rate": 4.282603896039178e-06, |
| "loss": 0.6855, |
| "step": 508 |
| }, |
| { |
| "epoch": 2.4829268292682927, |
| "grad_norm": 1.1140731573104858, |
| "learning_rate": 4.279915658738145e-06, |
| "loss": 0.5114, |
| "step": 509 |
| }, |
| { |
| "epoch": 2.4878048780487805, |
| "grad_norm": 1.0547738075256348, |
| "learning_rate": 4.277223241291966e-06, |
| "loss": 0.7367, |
| "step": 510 |
| }, |
| { |
| "epoch": 2.4926829268292683, |
| "grad_norm": 0.8616530895233154, |
| "learning_rate": 4.274526650023801e-06, |
| "loss": 0.5337, |
| "step": 511 |
| }, |
| { |
| "epoch": 2.497560975609756, |
| "grad_norm": 1.3530237674713135, |
| "learning_rate": 4.271825891266617e-06, |
| "loss": 0.5597, |
| "step": 512 |
| }, |
| { |
| "epoch": 2.502439024390244, |
| "grad_norm": 1.4124853610992432, |
| "learning_rate": 4.269120971363164e-06, |
| "loss": 0.8461, |
| "step": 513 |
| }, |
| { |
| "epoch": 2.5073170731707317, |
| "grad_norm": 1.0308994054794312, |
| "learning_rate": 4.266411896665967e-06, |
| "loss": 0.4304, |
| "step": 514 |
| }, |
| { |
| "epoch": 2.5121951219512195, |
| "grad_norm": 1.0831527709960938, |
| "learning_rate": 4.263698673537309e-06, |
| "loss": 0.5428, |
| "step": 515 |
| }, |
| { |
| "epoch": 2.5170731707317073, |
| "grad_norm": 1.2008475065231323, |
| "learning_rate": 4.260981308349214e-06, |
| "loss": 0.6922, |
| "step": 516 |
| }, |
| { |
| "epoch": 2.521951219512195, |
| "grad_norm": 1.1929224729537964, |
| "learning_rate": 4.258259807483434e-06, |
| "loss": 0.5716, |
| "step": 517 |
| }, |
| { |
| "epoch": 2.526829268292683, |
| "grad_norm": 1.016539216041565, |
| "learning_rate": 4.255534177331435e-06, |
| "loss": 0.5806, |
| "step": 518 |
| }, |
| { |
| "epoch": 2.5317073170731708, |
| "grad_norm": 1.018875002861023, |
| "learning_rate": 4.252804424294378e-06, |
| "loss": 0.5581, |
| "step": 519 |
| }, |
| { |
| "epoch": 2.5365853658536586, |
| "grad_norm": 0.9992810487747192, |
| "learning_rate": 4.25007055478311e-06, |
| "loss": 0.6786, |
| "step": 520 |
| }, |
| { |
| "epoch": 2.5414634146341464, |
| "grad_norm": 1.1207003593444824, |
| "learning_rate": 4.247332575218144e-06, |
| "loss": 0.4548, |
| "step": 521 |
| }, |
| { |
| "epoch": 2.546341463414634, |
| "grad_norm": 1.1575409173965454, |
| "learning_rate": 4.244590492029643e-06, |
| "loss": 0.6846, |
| "step": 522 |
| }, |
| { |
| "epoch": 2.551219512195122, |
| "grad_norm": 0.9805243015289307, |
| "learning_rate": 4.241844311657411e-06, |
| "loss": 0.4301, |
| "step": 523 |
| }, |
| { |
| "epoch": 2.55609756097561, |
| "grad_norm": 0.9760981202125549, |
| "learning_rate": 4.239094040550875e-06, |
| "loss": 0.3545, |
| "step": 524 |
| }, |
| { |
| "epoch": 2.5609756097560976, |
| "grad_norm": 0.8702017664909363, |
| "learning_rate": 4.236339685169065e-06, |
| "loss": 0.5429, |
| "step": 525 |
| }, |
| { |
| "epoch": 2.5658536585365854, |
| "grad_norm": 1.0681567192077637, |
| "learning_rate": 4.233581251980604e-06, |
| "loss": 0.3289, |
| "step": 526 |
| }, |
| { |
| "epoch": 2.5707317073170732, |
| "grad_norm": 1.1807548999786377, |
| "learning_rate": 4.230818747463696e-06, |
| "loss": 0.4876, |
| "step": 527 |
| }, |
| { |
| "epoch": 2.575609756097561, |
| "grad_norm": 0.9812930226325989, |
| "learning_rate": 4.228052178106101e-06, |
| "loss": 0.5025, |
| "step": 528 |
| }, |
| { |
| "epoch": 2.580487804878049, |
| "grad_norm": 0.8600794672966003, |
| "learning_rate": 4.2252815504051285e-06, |
| "loss": 0.3133, |
| "step": 529 |
| }, |
| { |
| "epoch": 2.5853658536585367, |
| "grad_norm": 1.89119553565979, |
| "learning_rate": 4.222506870867618e-06, |
| "loss": 0.6036, |
| "step": 530 |
| }, |
| { |
| "epoch": 2.5902439024390245, |
| "grad_norm": 1.0424220561981201, |
| "learning_rate": 4.2197281460099245e-06, |
| "loss": 0.6877, |
| "step": 531 |
| }, |
| { |
| "epoch": 2.5951219512195123, |
| "grad_norm": 1.027593731880188, |
| "learning_rate": 4.216945382357905e-06, |
| "loss": 0.6352, |
| "step": 532 |
| }, |
| { |
| "epoch": 2.6, |
| "grad_norm": 1.1954094171524048, |
| "learning_rate": 4.214158586446901e-06, |
| "loss": 0.966, |
| "step": 533 |
| }, |
| { |
| "epoch": 2.604878048780488, |
| "grad_norm": 1.2490975856781006, |
| "learning_rate": 4.211367764821722e-06, |
| "loss": 0.9059, |
| "step": 534 |
| }, |
| { |
| "epoch": 2.6097560975609757, |
| "grad_norm": 1.1297111511230469, |
| "learning_rate": 4.208572924036634e-06, |
| "loss": 0.5006, |
| "step": 535 |
| }, |
| { |
| "epoch": 2.6146341463414635, |
| "grad_norm": 0.9446660876274109, |
| "learning_rate": 4.2057740706553415e-06, |
| "loss": 0.4905, |
| "step": 536 |
| }, |
| { |
| "epoch": 2.6195121951219513, |
| "grad_norm": 0.916692316532135, |
| "learning_rate": 4.202971211250971e-06, |
| "loss": 0.6694, |
| "step": 537 |
| }, |
| { |
| "epoch": 2.624390243902439, |
| "grad_norm": 0.840929388999939, |
| "learning_rate": 4.200164352406061e-06, |
| "loss": 0.3739, |
| "step": 538 |
| }, |
| { |
| "epoch": 2.629268292682927, |
| "grad_norm": 1.069427728652954, |
| "learning_rate": 4.197353500712539e-06, |
| "loss": 0.6359, |
| "step": 539 |
| }, |
| { |
| "epoch": 2.6341463414634148, |
| "grad_norm": 1.0660371780395508, |
| "learning_rate": 4.1945386627717115e-06, |
| "loss": 0.5006, |
| "step": 540 |
| }, |
| { |
| "epoch": 2.6390243902439026, |
| "grad_norm": 1.9662373065948486, |
| "learning_rate": 4.191719845194246e-06, |
| "loss": 0.649, |
| "step": 541 |
| }, |
| { |
| "epoch": 2.6439024390243904, |
| "grad_norm": 0.9865717887878418, |
| "learning_rate": 4.188897054600156e-06, |
| "loss": 0.6179, |
| "step": 542 |
| }, |
| { |
| "epoch": 2.648780487804878, |
| "grad_norm": 1.0393004417419434, |
| "learning_rate": 4.186070297618787e-06, |
| "loss": 0.7156, |
| "step": 543 |
| }, |
| { |
| "epoch": 2.653658536585366, |
| "grad_norm": 0.9797636270523071, |
| "learning_rate": 4.183239580888799e-06, |
| "loss": 0.7249, |
| "step": 544 |
| }, |
| { |
| "epoch": 2.658536585365854, |
| "grad_norm": 1.180819034576416, |
| "learning_rate": 4.18040491105815e-06, |
| "loss": 0.4961, |
| "step": 545 |
| }, |
| { |
| "epoch": 2.6634146341463416, |
| "grad_norm": 0.9986240863800049, |
| "learning_rate": 4.177566294784085e-06, |
| "loss": 0.4397, |
| "step": 546 |
| }, |
| { |
| "epoch": 2.6682926829268294, |
| "grad_norm": 1.2416610717773438, |
| "learning_rate": 4.174723738733114e-06, |
| "loss": 0.7625, |
| "step": 547 |
| }, |
| { |
| "epoch": 2.6731707317073172, |
| "grad_norm": 0.9271990656852722, |
| "learning_rate": 4.171877249581001e-06, |
| "loss": 0.6626, |
| "step": 548 |
| }, |
| { |
| "epoch": 2.678048780487805, |
| "grad_norm": 0.9085447788238525, |
| "learning_rate": 4.169026834012748e-06, |
| "loss": 0.4209, |
| "step": 549 |
| }, |
| { |
| "epoch": 2.682926829268293, |
| "grad_norm": 0.9767999649047852, |
| "learning_rate": 4.166172498722577e-06, |
| "loss": 0.4466, |
| "step": 550 |
| }, |
| { |
| "epoch": 2.68780487804878, |
| "grad_norm": 1.405003309249878, |
| "learning_rate": 4.163314250413913e-06, |
| "loss": 0.8207, |
| "step": 551 |
| }, |
| { |
| "epoch": 2.692682926829268, |
| "grad_norm": 1.001278042793274, |
| "learning_rate": 4.160452095799378e-06, |
| "loss": 0.4949, |
| "step": 552 |
| }, |
| { |
| "epoch": 2.697560975609756, |
| "grad_norm": 0.8813621401786804, |
| "learning_rate": 4.157586041600759e-06, |
| "loss": 0.2726, |
| "step": 553 |
| }, |
| { |
| "epoch": 2.7024390243902436, |
| "grad_norm": 1.087396502494812, |
| "learning_rate": 4.154716094549008e-06, |
| "loss": 0.6358, |
| "step": 554 |
| }, |
| { |
| "epoch": 2.7073170731707314, |
| "grad_norm": 0.864211916923523, |
| "learning_rate": 4.151842261384217e-06, |
| "loss": 0.4146, |
| "step": 555 |
| }, |
| { |
| "epoch": 2.7121951219512193, |
| "grad_norm": 1.580068588256836, |
| "learning_rate": 4.148964548855603e-06, |
| "loss": 0.9695, |
| "step": 556 |
| }, |
| { |
| "epoch": 2.717073170731707, |
| "grad_norm": 0.7623794674873352, |
| "learning_rate": 4.146082963721496e-06, |
| "loss": 0.3429, |
| "step": 557 |
| }, |
| { |
| "epoch": 2.721951219512195, |
| "grad_norm": 1.2673579454421997, |
| "learning_rate": 4.143197512749322e-06, |
| "loss": 1.1916, |
| "step": 558 |
| }, |
| { |
| "epoch": 2.7268292682926827, |
| "grad_norm": 1.0848994255065918, |
| "learning_rate": 4.140308202715581e-06, |
| "loss": 0.8112, |
| "step": 559 |
| }, |
| { |
| "epoch": 2.7317073170731705, |
| "grad_norm": 0.9205752015113831, |
| "learning_rate": 4.13741504040584e-06, |
| "loss": 0.4364, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.7365853658536583, |
| "grad_norm": 1.0304152965545654, |
| "learning_rate": 4.134518032614713e-06, |
| "loss": 0.4841, |
| "step": 561 |
| }, |
| { |
| "epoch": 2.741463414634146, |
| "grad_norm": 0.9854786992073059, |
| "learning_rate": 4.1316171861458445e-06, |
| "loss": 0.418, |
| "step": 562 |
| }, |
| { |
| "epoch": 2.746341463414634, |
| "grad_norm": 1.0625019073486328, |
| "learning_rate": 4.128712507811893e-06, |
| "loss": 0.6479, |
| "step": 563 |
| }, |
| { |
| "epoch": 2.7512195121951217, |
| "grad_norm": 1.0722914934158325, |
| "learning_rate": 4.125804004434517e-06, |
| "loss": 0.6462, |
| "step": 564 |
| }, |
| { |
| "epoch": 2.7560975609756095, |
| "grad_norm": 0.890087366104126, |
| "learning_rate": 4.12289168284436e-06, |
| "loss": 0.4462, |
| "step": 565 |
| }, |
| { |
| "epoch": 2.7609756097560973, |
| "grad_norm": 1.0615348815917969, |
| "learning_rate": 4.119975549881029e-06, |
| "loss": 0.6229, |
| "step": 566 |
| }, |
| { |
| "epoch": 2.765853658536585, |
| "grad_norm": 0.8919638395309448, |
| "learning_rate": 4.1170556123930846e-06, |
| "loss": 0.3847, |
| "step": 567 |
| }, |
| { |
| "epoch": 2.770731707317073, |
| "grad_norm": 0.9881047606468201, |
| "learning_rate": 4.114131877238021e-06, |
| "loss": 0.5972, |
| "step": 568 |
| }, |
| { |
| "epoch": 2.7756097560975608, |
| "grad_norm": 0.8665289878845215, |
| "learning_rate": 4.111204351282254e-06, |
| "loss": 0.4755, |
| "step": 569 |
| }, |
| { |
| "epoch": 2.7804878048780486, |
| "grad_norm": 0.7870835661888123, |
| "learning_rate": 4.108273041401098e-06, |
| "loss": 0.4341, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.7853658536585364, |
| "grad_norm": 1.3610732555389404, |
| "learning_rate": 4.105337954478756e-06, |
| "loss": 0.8646, |
| "step": 571 |
| }, |
| { |
| "epoch": 2.790243902439024, |
| "grad_norm": 1.0715464353561401, |
| "learning_rate": 4.102399097408304e-06, |
| "loss": 0.7017, |
| "step": 572 |
| }, |
| { |
| "epoch": 2.795121951219512, |
| "grad_norm": 0.952342689037323, |
| "learning_rate": 4.099456477091667e-06, |
| "loss": 0.3381, |
| "step": 573 |
| }, |
| { |
| "epoch": 2.8, |
| "grad_norm": 1.151577115058899, |
| "learning_rate": 4.096510100439611e-06, |
| "loss": 0.8217, |
| "step": 574 |
| }, |
| { |
| "epoch": 2.8048780487804876, |
| "grad_norm": 0.8553835153579712, |
| "learning_rate": 4.093559974371725e-06, |
| "loss": 0.3334, |
| "step": 575 |
| }, |
| { |
| "epoch": 2.8097560975609754, |
| "grad_norm": 1.004453420639038, |
| "learning_rate": 4.0906061058164e-06, |
| "loss": 0.8324, |
| "step": 576 |
| }, |
| { |
| "epoch": 2.8146341463414632, |
| "grad_norm": 0.9378971457481384, |
| "learning_rate": 4.087648501710819e-06, |
| "loss": 0.3753, |
| "step": 577 |
| }, |
| { |
| "epoch": 2.819512195121951, |
| "grad_norm": 0.9435027241706848, |
| "learning_rate": 4.084687169000938e-06, |
| "loss": 0.5675, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.824390243902439, |
| "grad_norm": 0.856401801109314, |
| "learning_rate": 4.081722114641469e-06, |
| "loss": 0.5309, |
| "step": 579 |
| }, |
| { |
| "epoch": 2.8292682926829267, |
| "grad_norm": 1.1497118473052979, |
| "learning_rate": 4.0787533455958626e-06, |
| "loss": 0.3339, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.8341463414634145, |
| "grad_norm": 1.0275132656097412, |
| "learning_rate": 4.075780868836296e-06, |
| "loss": 0.4303, |
| "step": 581 |
| }, |
| { |
| "epoch": 2.8390243902439023, |
| "grad_norm": 0.903195321559906, |
| "learning_rate": 4.072804691343653e-06, |
| "loss": 0.495, |
| "step": 582 |
| }, |
| { |
| "epoch": 2.84390243902439, |
| "grad_norm": 1.1491634845733643, |
| "learning_rate": 4.069824820107507e-06, |
| "loss": 0.9994, |
| "step": 583 |
| }, |
| { |
| "epoch": 2.848780487804878, |
| "grad_norm": 0.7706964015960693, |
| "learning_rate": 4.06684126212611e-06, |
| "loss": 0.3226, |
| "step": 584 |
| }, |
| { |
| "epoch": 2.8536585365853657, |
| "grad_norm": 0.9241564273834229, |
| "learning_rate": 4.063854024406369e-06, |
| "loss": 0.5793, |
| "step": 585 |
| }, |
| { |
| "epoch": 2.8585365853658535, |
| "grad_norm": 0.9884312152862549, |
| "learning_rate": 4.060863113963835e-06, |
| "loss": 0.4683, |
| "step": 586 |
| }, |
| { |
| "epoch": 2.8634146341463413, |
| "grad_norm": 0.9997614026069641, |
| "learning_rate": 4.057868537822683e-06, |
| "loss": 0.519, |
| "step": 587 |
| }, |
| { |
| "epoch": 2.868292682926829, |
| "grad_norm": 0.9449941515922546, |
| "learning_rate": 4.054870303015695e-06, |
| "loss": 0.381, |
| "step": 588 |
| }, |
| { |
| "epoch": 2.873170731707317, |
| "grad_norm": 1.0271875858306885, |
| "learning_rate": 4.05186841658425e-06, |
| "loss": 0.4554, |
| "step": 589 |
| }, |
| { |
| "epoch": 2.8780487804878048, |
| "grad_norm": 0.9256722331047058, |
| "learning_rate": 4.048862885578301e-06, |
| "loss": 0.5297, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.8829268292682926, |
| "grad_norm": 1.0047836303710938, |
| "learning_rate": 4.045853717056358e-06, |
| "loss": 0.5968, |
| "step": 591 |
| }, |
| { |
| "epoch": 2.8878048780487804, |
| "grad_norm": 0.9485352635383606, |
| "learning_rate": 4.0428409180854775e-06, |
| "loss": 0.5042, |
| "step": 592 |
| }, |
| { |
| "epoch": 2.892682926829268, |
| "grad_norm": 0.9246886372566223, |
| "learning_rate": 4.039824495741238e-06, |
| "loss": 0.4622, |
| "step": 593 |
| }, |
| { |
| "epoch": 2.897560975609756, |
| "grad_norm": 0.8539214134216309, |
| "learning_rate": 4.036804457107733e-06, |
| "loss": 0.53, |
| "step": 594 |
| }, |
| { |
| "epoch": 2.902439024390244, |
| "grad_norm": 1.2358900308609009, |
| "learning_rate": 4.0337808092775435e-06, |
| "loss": 0.701, |
| "step": 595 |
| }, |
| { |
| "epoch": 2.9073170731707316, |
| "grad_norm": 0.8977146744728088, |
| "learning_rate": 4.030753559351728e-06, |
| "loss": 0.3942, |
| "step": 596 |
| }, |
| { |
| "epoch": 2.9121951219512194, |
| "grad_norm": 0.8575205206871033, |
| "learning_rate": 4.027722714439808e-06, |
| "loss": 0.3713, |
| "step": 597 |
| }, |
| { |
| "epoch": 2.9170731707317072, |
| "grad_norm": 1.2680315971374512, |
| "learning_rate": 4.024688281659743e-06, |
| "loss": 0.9398, |
| "step": 598 |
| }, |
| { |
| "epoch": 2.921951219512195, |
| "grad_norm": 1.7810138463974, |
| "learning_rate": 4.021650268137924e-06, |
| "loss": 0.6056, |
| "step": 599 |
| }, |
| { |
| "epoch": 2.926829268292683, |
| "grad_norm": 0.8538106083869934, |
| "learning_rate": 4.018608681009143e-06, |
| "loss": 0.4939, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.9317073170731707, |
| "grad_norm": 1.0676621198654175, |
| "learning_rate": 4.015563527416596e-06, |
| "loss": 0.5676, |
| "step": 601 |
| }, |
| { |
| "epoch": 2.9365853658536585, |
| "grad_norm": 0.987902820110321, |
| "learning_rate": 4.012514814511844e-06, |
| "loss": 0.5004, |
| "step": 602 |
| }, |
| { |
| "epoch": 2.9414634146341463, |
| "grad_norm": 0.9196493625640869, |
| "learning_rate": 4.009462549454816e-06, |
| "loss": 0.5476, |
| "step": 603 |
| }, |
| { |
| "epoch": 2.946341463414634, |
| "grad_norm": 1.1288981437683105, |
| "learning_rate": 4.006406739413775e-06, |
| "loss": 0.5804, |
| "step": 604 |
| }, |
| { |
| "epoch": 2.951219512195122, |
| "grad_norm": 0.8905384540557861, |
| "learning_rate": 4.003347391565317e-06, |
| "loss": 0.4979, |
| "step": 605 |
| }, |
| { |
| "epoch": 2.9560975609756097, |
| "grad_norm": 1.000335693359375, |
| "learning_rate": 4.000284513094342e-06, |
| "loss": 0.5727, |
| "step": 606 |
| }, |
| { |
| "epoch": 2.9609756097560975, |
| "grad_norm": 0.9347658753395081, |
| "learning_rate": 3.997218111194042e-06, |
| "loss": 0.5235, |
| "step": 607 |
| }, |
| { |
| "epoch": 2.9658536585365853, |
| "grad_norm": 0.8476413488388062, |
| "learning_rate": 3.994148193065886e-06, |
| "loss": 0.3922, |
| "step": 608 |
| }, |
| { |
| "epoch": 2.970731707317073, |
| "grad_norm": 0.8186416029930115, |
| "learning_rate": 3.991074765919598e-06, |
| "loss": 0.3482, |
| "step": 609 |
| }, |
| { |
| "epoch": 2.975609756097561, |
| "grad_norm": 1.2038166522979736, |
| "learning_rate": 3.987997836973147e-06, |
| "loss": 0.4684, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.9804878048780488, |
| "grad_norm": 1.037007212638855, |
| "learning_rate": 3.984917413452721e-06, |
| "loss": 0.4811, |
| "step": 611 |
| }, |
| { |
| "epoch": 2.9853658536585366, |
| "grad_norm": 1.2915143966674805, |
| "learning_rate": 3.981833502592717e-06, |
| "loss": 0.7361, |
| "step": 612 |
| }, |
| { |
| "epoch": 2.9902439024390244, |
| "grad_norm": 1.5253301858901978, |
| "learning_rate": 3.978746111635725e-06, |
| "loss": 0.312, |
| "step": 613 |
| }, |
| { |
| "epoch": 2.995121951219512, |
| "grad_norm": 0.8432179093360901, |
| "learning_rate": 3.9756552478325045e-06, |
| "loss": 0.4931, |
| "step": 614 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.8148512840270996, |
| "learning_rate": 3.972560918441972e-06, |
| "loss": 0.3185, |
| "step": 615 |
| }, |
| { |
| "epoch": 3.004878048780488, |
| "grad_norm": 0.7907199859619141, |
| "learning_rate": 3.969463130731183e-06, |
| "loss": 0.3731, |
| "step": 616 |
| }, |
| { |
| "epoch": 3.0097560975609756, |
| "grad_norm": 0.8440088033676147, |
| "learning_rate": 3.966361891975316e-06, |
| "loss": 0.4135, |
| "step": 617 |
| }, |
| { |
| "epoch": 3.0146341463414634, |
| "grad_norm": 0.9237042665481567, |
| "learning_rate": 3.963257209457652e-06, |
| "loss": 0.4563, |
| "step": 618 |
| }, |
| { |
| "epoch": 3.0195121951219512, |
| "grad_norm": 0.7932549118995667, |
| "learning_rate": 3.960149090469561e-06, |
| "loss": 0.2539, |
| "step": 619 |
| }, |
| { |
| "epoch": 3.024390243902439, |
| "grad_norm": 0.9151872396469116, |
| "learning_rate": 3.957037542310484e-06, |
| "loss": 0.2685, |
| "step": 620 |
| }, |
| { |
| "epoch": 3.029268292682927, |
| "grad_norm": 0.8039369583129883, |
| "learning_rate": 3.953922572287915e-06, |
| "loss": 0.4401, |
| "step": 621 |
| }, |
| { |
| "epoch": 3.0341463414634147, |
| "grad_norm": 0.9228289723396301, |
| "learning_rate": 3.950804187717384e-06, |
| "loss": 0.5399, |
| "step": 622 |
| }, |
| { |
| "epoch": 3.0390243902439025, |
| "grad_norm": 1.0876184701919556, |
| "learning_rate": 3.947682395922439e-06, |
| "loss": 0.6546, |
| "step": 623 |
| }, |
| { |
| "epoch": 3.0439024390243903, |
| "grad_norm": 0.8611107468605042, |
| "learning_rate": 3.9445572042346346e-06, |
| "loss": 0.2007, |
| "step": 624 |
| }, |
| { |
| "epoch": 3.048780487804878, |
| "grad_norm": 0.8184982538223267, |
| "learning_rate": 3.941428619993505e-06, |
| "loss": 0.3513, |
| "step": 625 |
| }, |
| { |
| "epoch": 3.053658536585366, |
| "grad_norm": 1.0696086883544922, |
| "learning_rate": 3.938296650546552e-06, |
| "loss": 0.2816, |
| "step": 626 |
| }, |
| { |
| "epoch": 3.0585365853658537, |
| "grad_norm": 1.4805349111557007, |
| "learning_rate": 3.935161303249231e-06, |
| "loss": 0.4662, |
| "step": 627 |
| }, |
| { |
| "epoch": 3.0634146341463415, |
| "grad_norm": 1.0260307788848877, |
| "learning_rate": 3.932022585464928e-06, |
| "loss": 0.4517, |
| "step": 628 |
| }, |
| { |
| "epoch": 3.0682926829268293, |
| "grad_norm": 0.972687304019928, |
| "learning_rate": 3.928880504564943e-06, |
| "loss": 0.3406, |
| "step": 629 |
| }, |
| { |
| "epoch": 3.073170731707317, |
| "grad_norm": 0.9252133369445801, |
| "learning_rate": 3.92573506792848e-06, |
| "loss": 0.3444, |
| "step": 630 |
| }, |
| { |
| "epoch": 3.078048780487805, |
| "grad_norm": 1.0907679796218872, |
| "learning_rate": 3.9225862829426184e-06, |
| "loss": 0.3427, |
| "step": 631 |
| }, |
| { |
| "epoch": 3.0829268292682928, |
| "grad_norm": 1.0673291683197021, |
| "learning_rate": 3.919434157002303e-06, |
| "loss": 0.4476, |
| "step": 632 |
| }, |
| { |
| "epoch": 3.0878048780487806, |
| "grad_norm": 0.758525550365448, |
| "learning_rate": 3.916278697510325e-06, |
| "loss": 0.274, |
| "step": 633 |
| }, |
| { |
| "epoch": 3.0926829268292684, |
| "grad_norm": 0.9257010221481323, |
| "learning_rate": 3.913119911877305e-06, |
| "loss": 0.5037, |
| "step": 634 |
| }, |
| { |
| "epoch": 3.097560975609756, |
| "grad_norm": 1.0138721466064453, |
| "learning_rate": 3.909957807521674e-06, |
| "loss": 0.253, |
| "step": 635 |
| }, |
| { |
| "epoch": 3.102439024390244, |
| "grad_norm": 0.9045038819313049, |
| "learning_rate": 3.906792391869657e-06, |
| "loss": 0.381, |
| "step": 636 |
| }, |
| { |
| "epoch": 3.107317073170732, |
| "grad_norm": 0.9882903099060059, |
| "learning_rate": 3.903623672355258e-06, |
| "loss": 0.3979, |
| "step": 637 |
| }, |
| { |
| "epoch": 3.1121951219512196, |
| "grad_norm": 0.9428558945655823, |
| "learning_rate": 3.900451656420237e-06, |
| "loss": 0.3531, |
| "step": 638 |
| }, |
| { |
| "epoch": 3.1170731707317074, |
| "grad_norm": 0.9153510332107544, |
| "learning_rate": 3.897276351514097e-06, |
| "loss": 0.2724, |
| "step": 639 |
| }, |
| { |
| "epoch": 3.1219512195121952, |
| "grad_norm": 1.1453444957733154, |
| "learning_rate": 3.894097765094065e-06, |
| "loss": 0.5488, |
| "step": 640 |
| }, |
| { |
| "epoch": 3.126829268292683, |
| "grad_norm": 0.7873973250389099, |
| "learning_rate": 3.890915904625075e-06, |
| "loss": 0.2855, |
| "step": 641 |
| }, |
| { |
| "epoch": 3.131707317073171, |
| "grad_norm": 1.2348054647445679, |
| "learning_rate": 3.887730777579751e-06, |
| "loss": 0.5758, |
| "step": 642 |
| }, |
| { |
| "epoch": 3.1365853658536587, |
| "grad_norm": 1.1336091756820679, |
| "learning_rate": 3.884542391438387e-06, |
| "loss": 0.681, |
| "step": 643 |
| }, |
| { |
| "epoch": 3.1414634146341465, |
| "grad_norm": 1.1599171161651611, |
| "learning_rate": 3.88135075368893e-06, |
| "loss": 0.8581, |
| "step": 644 |
| }, |
| { |
| "epoch": 3.1463414634146343, |
| "grad_norm": 0.9442818760871887, |
| "learning_rate": 3.878155871826968e-06, |
| "loss": 0.4053, |
| "step": 645 |
| }, |
| { |
| "epoch": 3.151219512195122, |
| "grad_norm": 0.9527677297592163, |
| "learning_rate": 3.874957753355701e-06, |
| "loss": 0.3673, |
| "step": 646 |
| }, |
| { |
| "epoch": 3.15609756097561, |
| "grad_norm": 0.9200324416160583, |
| "learning_rate": 3.8717564057859365e-06, |
| "loss": 0.6203, |
| "step": 647 |
| }, |
| { |
| "epoch": 3.1609756097560977, |
| "grad_norm": 1.0606499910354614, |
| "learning_rate": 3.868551836636063e-06, |
| "loss": 0.564, |
| "step": 648 |
| }, |
| { |
| "epoch": 3.1658536585365855, |
| "grad_norm": 1.064496397972107, |
| "learning_rate": 3.865344053432035e-06, |
| "loss": 0.316, |
| "step": 649 |
| }, |
| { |
| "epoch": 3.1707317073170733, |
| "grad_norm": 1.259859323501587, |
| "learning_rate": 3.862133063707353e-06, |
| "loss": 0.4876, |
| "step": 650 |
| }, |
| { |
| "epoch": 3.175609756097561, |
| "grad_norm": 0.9357886910438538, |
| "learning_rate": 3.858918875003053e-06, |
| "loss": 0.3449, |
| "step": 651 |
| }, |
| { |
| "epoch": 3.180487804878049, |
| "grad_norm": 0.8545693755149841, |
| "learning_rate": 3.855701494867679e-06, |
| "loss": 0.3919, |
| "step": 652 |
| }, |
| { |
| "epoch": 3.1853658536585368, |
| "grad_norm": 0.9744400382041931, |
| "learning_rate": 3.852480930857275e-06, |
| "loss": 0.5602, |
| "step": 653 |
| }, |
| { |
| "epoch": 3.1902439024390246, |
| "grad_norm": 0.9290996193885803, |
| "learning_rate": 3.849257190535356e-06, |
| "loss": 0.3391, |
| "step": 654 |
| }, |
| { |
| "epoch": 3.1951219512195124, |
| "grad_norm": 1.150904655456543, |
| "learning_rate": 3.846030281472902e-06, |
| "loss": 0.6754, |
| "step": 655 |
| }, |
| { |
| "epoch": 3.2, |
| "grad_norm": 0.9049080610275269, |
| "learning_rate": 3.842800211248333e-06, |
| "loss": 0.342, |
| "step": 656 |
| }, |
| { |
| "epoch": 3.204878048780488, |
| "grad_norm": 1.6837809085845947, |
| "learning_rate": 3.839566987447492e-06, |
| "loss": 0.5241, |
| "step": 657 |
| }, |
| { |
| "epoch": 3.209756097560976, |
| "grad_norm": 0.9485814571380615, |
| "learning_rate": 3.8363306176636296e-06, |
| "loss": 0.6228, |
| "step": 658 |
| }, |
| { |
| "epoch": 3.2146341463414636, |
| "grad_norm": 1.029710054397583, |
| "learning_rate": 3.833091109497384e-06, |
| "loss": 0.6792, |
| "step": 659 |
| }, |
| { |
| "epoch": 3.2195121951219514, |
| "grad_norm": 0.881193220615387, |
| "learning_rate": 3.829848470556765e-06, |
| "loss": 0.25, |
| "step": 660 |
| }, |
| { |
| "epoch": 3.2243902439024392, |
| "grad_norm": 0.8923963904380798, |
| "learning_rate": 3.8266027084571335e-06, |
| "loss": 0.4637, |
| "step": 661 |
| }, |
| { |
| "epoch": 3.229268292682927, |
| "grad_norm": 1.039754867553711, |
| "learning_rate": 3.823353830821187e-06, |
| "loss": 0.2432, |
| "step": 662 |
| }, |
| { |
| "epoch": 3.234146341463415, |
| "grad_norm": 0.8456618189811707, |
| "learning_rate": 3.820101845278937e-06, |
| "loss": 0.4113, |
| "step": 663 |
| }, |
| { |
| "epoch": 3.2390243902439027, |
| "grad_norm": 1.1256377696990967, |
| "learning_rate": 3.816846759467696e-06, |
| "loss": 0.4089, |
| "step": 664 |
| }, |
| { |
| "epoch": 3.2439024390243905, |
| "grad_norm": 1.1781972646713257, |
| "learning_rate": 3.8135885810320587e-06, |
| "loss": 0.5524, |
| "step": 665 |
| }, |
| { |
| "epoch": 3.2487804878048783, |
| "grad_norm": 0.8150308132171631, |
| "learning_rate": 3.810327317623881e-06, |
| "loss": 0.3115, |
| "step": 666 |
| }, |
| { |
| "epoch": 3.253658536585366, |
| "grad_norm": 1.0984582901000977, |
| "learning_rate": 3.8070629769022628e-06, |
| "loss": 0.4814, |
| "step": 667 |
| }, |
| { |
| "epoch": 3.258536585365854, |
| "grad_norm": 0.9676326513290405, |
| "learning_rate": 3.8037955665335335e-06, |
| "loss": 0.3787, |
| "step": 668 |
| }, |
| { |
| "epoch": 3.2634146341463417, |
| "grad_norm": 1.1023277044296265, |
| "learning_rate": 3.800525094191231e-06, |
| "loss": 0.4983, |
| "step": 669 |
| }, |
| { |
| "epoch": 3.2682926829268295, |
| "grad_norm": 0.8557276725769043, |
| "learning_rate": 3.797251567556083e-06, |
| "loss": 0.3505, |
| "step": 670 |
| }, |
| { |
| "epoch": 3.2731707317073173, |
| "grad_norm": 0.8645808100700378, |
| "learning_rate": 3.793974994315991e-06, |
| "loss": 0.2222, |
| "step": 671 |
| }, |
| { |
| "epoch": 3.278048780487805, |
| "grad_norm": 1.0875083208084106, |
| "learning_rate": 3.790695382166013e-06, |
| "loss": 0.454, |
| "step": 672 |
| }, |
| { |
| "epoch": 3.2829268292682925, |
| "grad_norm": 0.9899719953536987, |
| "learning_rate": 3.7874127388083415e-06, |
| "loss": 0.3548, |
| "step": 673 |
| }, |
| { |
| "epoch": 3.2878048780487803, |
| "grad_norm": 1.0319510698318481, |
| "learning_rate": 3.7841270719522895e-06, |
| "loss": 0.4269, |
| "step": 674 |
| }, |
| { |
| "epoch": 3.292682926829268, |
| "grad_norm": 1.1482504606246948, |
| "learning_rate": 3.7808383893142692e-06, |
| "loss": 0.2515, |
| "step": 675 |
| }, |
| { |
| "epoch": 3.297560975609756, |
| "grad_norm": 0.977626383304596, |
| "learning_rate": 3.7775466986177763e-06, |
| "loss": 0.3434, |
| "step": 676 |
| }, |
| { |
| "epoch": 3.3024390243902437, |
| "grad_norm": 0.8783503770828247, |
| "learning_rate": 3.774252007593371e-06, |
| "loss": 0.2183, |
| "step": 677 |
| }, |
| { |
| "epoch": 3.3073170731707315, |
| "grad_norm": 1.0656304359436035, |
| "learning_rate": 3.7709543239786593e-06, |
| "loss": 0.5429, |
| "step": 678 |
| }, |
| { |
| "epoch": 3.3121951219512193, |
| "grad_norm": 0.9340170621871948, |
| "learning_rate": 3.767653655518277e-06, |
| "loss": 0.3605, |
| "step": 679 |
| }, |
| { |
| "epoch": 3.317073170731707, |
| "grad_norm": 1.228542685508728, |
| "learning_rate": 3.7643500099638673e-06, |
| "loss": 0.3435, |
| "step": 680 |
| }, |
| { |
| "epoch": 3.321951219512195, |
| "grad_norm": 0.9645997881889343, |
| "learning_rate": 3.7610433950740667e-06, |
| "loss": 0.6613, |
| "step": 681 |
| }, |
| { |
| "epoch": 3.3268292682926828, |
| "grad_norm": 1.077845811843872, |
| "learning_rate": 3.757733818614485e-06, |
| "loss": 0.4901, |
| "step": 682 |
| }, |
| { |
| "epoch": 3.3317073170731706, |
| "grad_norm": 0.8504129648208618, |
| "learning_rate": 3.7544212883576856e-06, |
| "loss": 0.4069, |
| "step": 683 |
| }, |
| { |
| "epoch": 3.3365853658536584, |
| "grad_norm": 1.0994789600372314, |
| "learning_rate": 3.751105812083172e-06, |
| "loss": 0.3476, |
| "step": 684 |
| }, |
| { |
| "epoch": 3.341463414634146, |
| "grad_norm": 1.2599692344665527, |
| "learning_rate": 3.7477873975773655e-06, |
| "loss": 0.5327, |
| "step": 685 |
| }, |
| { |
| "epoch": 3.346341463414634, |
| "grad_norm": 1.0701467990875244, |
| "learning_rate": 3.7444660526335853e-06, |
| "loss": 0.473, |
| "step": 686 |
| }, |
| { |
| "epoch": 3.351219512195122, |
| "grad_norm": 1.7928602695465088, |
| "learning_rate": 3.741141785052036e-06, |
| "loss": 0.8412, |
| "step": 687 |
| }, |
| { |
| "epoch": 3.3560975609756096, |
| "grad_norm": 1.075492262840271, |
| "learning_rate": 3.737814602639784e-06, |
| "loss": 0.5616, |
| "step": 688 |
| }, |
| { |
| "epoch": 3.3609756097560974, |
| "grad_norm": 0.9147948622703552, |
| "learning_rate": 3.7344845132107427e-06, |
| "loss": 0.4454, |
| "step": 689 |
| }, |
| { |
| "epoch": 3.3658536585365852, |
| "grad_norm": 1.0845904350280762, |
| "learning_rate": 3.731151524585651e-06, |
| "loss": 0.4776, |
| "step": 690 |
| }, |
| { |
| "epoch": 3.370731707317073, |
| "grad_norm": 1.2276109457015991, |
| "learning_rate": 3.7278156445920584e-06, |
| "loss": 0.8544, |
| "step": 691 |
| }, |
| { |
| "epoch": 3.375609756097561, |
| "grad_norm": 0.866237998008728, |
| "learning_rate": 3.724476881064303e-06, |
| "loss": 0.297, |
| "step": 692 |
| }, |
| { |
| "epoch": 3.3804878048780487, |
| "grad_norm": 0.8218240737915039, |
| "learning_rate": 3.721135241843496e-06, |
| "loss": 0.3716, |
| "step": 693 |
| }, |
| { |
| "epoch": 3.3853658536585365, |
| "grad_norm": 1.1857250928878784, |
| "learning_rate": 3.7177907347775016e-06, |
| "loss": 0.4965, |
| "step": 694 |
| }, |
| { |
| "epoch": 3.3902439024390243, |
| "grad_norm": 1.726030945777893, |
| "learning_rate": 3.71444336772092e-06, |
| "loss": 0.6302, |
| "step": 695 |
| }, |
| { |
| "epoch": 3.395121951219512, |
| "grad_norm": 1.3070801496505737, |
| "learning_rate": 3.711093148535068e-06, |
| "loss": 0.7139, |
| "step": 696 |
| }, |
| { |
| "epoch": 3.4, |
| "grad_norm": 1.210959553718567, |
| "learning_rate": 3.707740085087959e-06, |
| "loss": 0.3059, |
| "step": 697 |
| }, |
| { |
| "epoch": 3.4048780487804877, |
| "grad_norm": 0.9498068690299988, |
| "learning_rate": 3.7043841852542884e-06, |
| "loss": 0.4441, |
| "step": 698 |
| }, |
| { |
| "epoch": 3.4097560975609755, |
| "grad_norm": 0.8833593130111694, |
| "learning_rate": 3.701025456915411e-06, |
| "loss": 0.3382, |
| "step": 699 |
| }, |
| { |
| "epoch": 3.4146341463414633, |
| "grad_norm": 0.9266107678413391, |
| "learning_rate": 3.697663907959327e-06, |
| "loss": 0.4165, |
| "step": 700 |
| }, |
| { |
| "epoch": 3.419512195121951, |
| "grad_norm": 1.4498791694641113, |
| "learning_rate": 3.6942995462806574e-06, |
| "loss": 0.7236, |
| "step": 701 |
| }, |
| { |
| "epoch": 3.424390243902439, |
| "grad_norm": 0.8553938269615173, |
| "learning_rate": 3.6909323797806314e-06, |
| "loss": 0.3514, |
| "step": 702 |
| }, |
| { |
| "epoch": 3.4292682926829268, |
| "grad_norm": 1.291072130203247, |
| "learning_rate": 3.6875624163670635e-06, |
| "loss": 0.6465, |
| "step": 703 |
| }, |
| { |
| "epoch": 3.4341463414634146, |
| "grad_norm": 1.2313672304153442, |
| "learning_rate": 3.6841896639543394e-06, |
| "loss": 0.3876, |
| "step": 704 |
| }, |
| { |
| "epoch": 3.4390243902439024, |
| "grad_norm": 0.8775885105133057, |
| "learning_rate": 3.6808141304633924e-06, |
| "loss": 0.3682, |
| "step": 705 |
| }, |
| { |
| "epoch": 3.44390243902439, |
| "grad_norm": 0.9601542949676514, |
| "learning_rate": 3.6774358238216878e-06, |
| "loss": 0.3753, |
| "step": 706 |
| }, |
| { |
| "epoch": 3.448780487804878, |
| "grad_norm": 1.371579885482788, |
| "learning_rate": 3.6740547519632048e-06, |
| "loss": 0.3362, |
| "step": 707 |
| }, |
| { |
| "epoch": 3.453658536585366, |
| "grad_norm": 0.7805762887001038, |
| "learning_rate": 3.670670922828414e-06, |
| "loss": 0.3909, |
| "step": 708 |
| }, |
| { |
| "epoch": 3.4585365853658536, |
| "grad_norm": 1.2143738269805908, |
| "learning_rate": 3.667284344364264e-06, |
| "loss": 0.3859, |
| "step": 709 |
| }, |
| { |
| "epoch": 3.4634146341463414, |
| "grad_norm": 0.9730637073516846, |
| "learning_rate": 3.6638950245241604e-06, |
| "loss": 0.5365, |
| "step": 710 |
| }, |
| { |
| "epoch": 3.4682926829268292, |
| "grad_norm": 1.291444182395935, |
| "learning_rate": 3.660502971267945e-06, |
| "loss": 0.3862, |
| "step": 711 |
| }, |
| { |
| "epoch": 3.473170731707317, |
| "grad_norm": 0.9339273571968079, |
| "learning_rate": 3.65710819256188e-06, |
| "loss": 0.1731, |
| "step": 712 |
| }, |
| { |
| "epoch": 3.478048780487805, |
| "grad_norm": 0.9810922741889954, |
| "learning_rate": 3.65371069637863e-06, |
| "loss": 0.3927, |
| "step": 713 |
| }, |
| { |
| "epoch": 3.4829268292682927, |
| "grad_norm": 1.027161955833435, |
| "learning_rate": 3.650310490697238e-06, |
| "loss": 0.5772, |
| "step": 714 |
| }, |
| { |
| "epoch": 3.4878048780487805, |
| "grad_norm": 1.0883551836013794, |
| "learning_rate": 3.646907583503114e-06, |
| "loss": 0.5687, |
| "step": 715 |
| }, |
| { |
| "epoch": 3.4926829268292683, |
| "grad_norm": 1.0536428689956665, |
| "learning_rate": 3.6435019827880093e-06, |
| "loss": 0.3598, |
| "step": 716 |
| }, |
| { |
| "epoch": 3.497560975609756, |
| "grad_norm": 0.8690207004547119, |
| "learning_rate": 3.640093696550003e-06, |
| "loss": 0.4317, |
| "step": 717 |
| }, |
| { |
| "epoch": 3.502439024390244, |
| "grad_norm": 0.9786996245384216, |
| "learning_rate": 3.6366827327934817e-06, |
| "loss": 0.428, |
| "step": 718 |
| }, |
| { |
| "epoch": 3.5073170731707317, |
| "grad_norm": 1.1201248168945312, |
| "learning_rate": 3.6332690995291176e-06, |
| "loss": 0.5273, |
| "step": 719 |
| }, |
| { |
| "epoch": 3.5121951219512195, |
| "grad_norm": 1.447026252746582, |
| "learning_rate": 3.6298528047738545e-06, |
| "loss": 1.3048, |
| "step": 720 |
| }, |
| { |
| "epoch": 3.5170731707317073, |
| "grad_norm": 0.988138735294342, |
| "learning_rate": 3.626433856550886e-06, |
| "loss": 0.6072, |
| "step": 721 |
| }, |
| { |
| "epoch": 3.521951219512195, |
| "grad_norm": 1.019419550895691, |
| "learning_rate": 3.623012262889637e-06, |
| "loss": 0.5362, |
| "step": 722 |
| }, |
| { |
| "epoch": 3.526829268292683, |
| "grad_norm": 1.2538626194000244, |
| "learning_rate": 3.6195880318257465e-06, |
| "loss": 0.5864, |
| "step": 723 |
| }, |
| { |
| "epoch": 3.5317073170731708, |
| "grad_norm": 1.2680046558380127, |
| "learning_rate": 3.616161171401046e-06, |
| "loss": 0.7208, |
| "step": 724 |
| }, |
| { |
| "epoch": 3.5365853658536586, |
| "grad_norm": 1.0290186405181885, |
| "learning_rate": 3.612731689663542e-06, |
| "loss": 0.3631, |
| "step": 725 |
| }, |
| { |
| "epoch": 3.5414634146341464, |
| "grad_norm": 0.941155731678009, |
| "learning_rate": 3.6092995946673996e-06, |
| "loss": 0.592, |
| "step": 726 |
| }, |
| { |
| "epoch": 3.546341463414634, |
| "grad_norm": 1.253089427947998, |
| "learning_rate": 3.605864894472918e-06, |
| "loss": 0.388, |
| "step": 727 |
| }, |
| { |
| "epoch": 3.551219512195122, |
| "grad_norm": 1.1095308065414429, |
| "learning_rate": 3.602427597146516e-06, |
| "loss": 0.5986, |
| "step": 728 |
| }, |
| { |
| "epoch": 3.55609756097561, |
| "grad_norm": 1.3642429113388062, |
| "learning_rate": 3.5989877107607134e-06, |
| "loss": 0.5178, |
| "step": 729 |
| }, |
| { |
| "epoch": 3.5609756097560976, |
| "grad_norm": 1.2215455770492554, |
| "learning_rate": 3.5955452433941075e-06, |
| "loss": 0.562, |
| "step": 730 |
| }, |
| { |
| "epoch": 3.5658536585365854, |
| "grad_norm": 0.955337405204773, |
| "learning_rate": 3.5921002031313586e-06, |
| "loss": 0.337, |
| "step": 731 |
| }, |
| { |
| "epoch": 3.5707317073170732, |
| "grad_norm": 0.9776975512504578, |
| "learning_rate": 3.58865259806317e-06, |
| "loss": 0.3314, |
| "step": 732 |
| }, |
| { |
| "epoch": 3.575609756097561, |
| "grad_norm": 1.1856746673583984, |
| "learning_rate": 3.585202436286267e-06, |
| "loss": 0.5582, |
| "step": 733 |
| }, |
| { |
| "epoch": 3.580487804878049, |
| "grad_norm": 1.1398861408233643, |
| "learning_rate": 3.581749725903381e-06, |
| "loss": 0.5148, |
| "step": 734 |
| }, |
| { |
| "epoch": 3.5853658536585367, |
| "grad_norm": 0.9702528119087219, |
| "learning_rate": 3.5782944750232274e-06, |
| "loss": 0.4666, |
| "step": 735 |
| }, |
| { |
| "epoch": 3.5902439024390245, |
| "grad_norm": 0.951115608215332, |
| "learning_rate": 3.574836691760489e-06, |
| "loss": 0.1839, |
| "step": 736 |
| }, |
| { |
| "epoch": 3.5951219512195123, |
| "grad_norm": 1.1828500032424927, |
| "learning_rate": 3.571376384235795e-06, |
| "loss": 0.4325, |
| "step": 737 |
| }, |
| { |
| "epoch": 3.6, |
| "grad_norm": 1.0837311744689941, |
| "learning_rate": 3.5679135605757035e-06, |
| "loss": 0.3254, |
| "step": 738 |
| }, |
| { |
| "epoch": 3.604878048780488, |
| "grad_norm": 0.9672379493713379, |
| "learning_rate": 3.564448228912682e-06, |
| "loss": 0.3019, |
| "step": 739 |
| }, |
| { |
| "epoch": 3.6097560975609757, |
| "grad_norm": 1.1051793098449707, |
| "learning_rate": 3.5609803973850877e-06, |
| "loss": 0.4066, |
| "step": 740 |
| }, |
| { |
| "epoch": 3.6146341463414635, |
| "grad_norm": 1.0380092859268188, |
| "learning_rate": 3.557510074137147e-06, |
| "loss": 0.4469, |
| "step": 741 |
| }, |
| { |
| "epoch": 3.6195121951219513, |
| "grad_norm": 0.8451574444770813, |
| "learning_rate": 3.554037267318942e-06, |
| "loss": 0.4371, |
| "step": 742 |
| }, |
| { |
| "epoch": 3.624390243902439, |
| "grad_norm": 0.9186508655548096, |
| "learning_rate": 3.5505619850863847e-06, |
| "loss": 0.3235, |
| "step": 743 |
| }, |
| { |
| "epoch": 3.629268292682927, |
| "grad_norm": 0.9366088509559631, |
| "learning_rate": 3.5470842356012007e-06, |
| "loss": 0.2157, |
| "step": 744 |
| }, |
| { |
| "epoch": 3.6341463414634148, |
| "grad_norm": 1.0610483884811401, |
| "learning_rate": 3.5436040270309113e-06, |
| "loss": 0.5039, |
| "step": 745 |
| }, |
| { |
| "epoch": 3.6390243902439026, |
| "grad_norm": 0.9160799980163574, |
| "learning_rate": 3.540121367548811e-06, |
| "loss": 0.2946, |
| "step": 746 |
| }, |
| { |
| "epoch": 3.6439024390243904, |
| "grad_norm": 1.1034797430038452, |
| "learning_rate": 3.5366362653339524e-06, |
| "loss": 0.6456, |
| "step": 747 |
| }, |
| { |
| "epoch": 3.648780487804878, |
| "grad_norm": 1.0821247100830078, |
| "learning_rate": 3.533148728571124e-06, |
| "loss": 0.2399, |
| "step": 748 |
| }, |
| { |
| "epoch": 3.653658536585366, |
| "grad_norm": 1.058131456375122, |
| "learning_rate": 3.5296587654508317e-06, |
| "loss": 0.4446, |
| "step": 749 |
| }, |
| { |
| "epoch": 3.658536585365854, |
| "grad_norm": 2.5837414264678955, |
| "learning_rate": 3.526166384169279e-06, |
| "loss": 0.6744, |
| "step": 750 |
| }, |
| { |
| "epoch": 3.6634146341463416, |
| "grad_norm": 1.1982769966125488, |
| "learning_rate": 3.5226715929283507e-06, |
| "loss": 0.3842, |
| "step": 751 |
| }, |
| { |
| "epoch": 3.6682926829268294, |
| "grad_norm": 0.849543035030365, |
| "learning_rate": 3.519174399935588e-06, |
| "loss": 0.2888, |
| "step": 752 |
| }, |
| { |
| "epoch": 3.6731707317073172, |
| "grad_norm": 0.9241414666175842, |
| "learning_rate": 3.5156748134041767e-06, |
| "loss": 0.2104, |
| "step": 753 |
| }, |
| { |
| "epoch": 3.678048780487805, |
| "grad_norm": 1.1095383167266846, |
| "learning_rate": 3.5121728415529203e-06, |
| "loss": 0.7921, |
| "step": 754 |
| }, |
| { |
| "epoch": 3.682926829268293, |
| "grad_norm": 1.0041614770889282, |
| "learning_rate": 3.5086684926062266e-06, |
| "loss": 0.3725, |
| "step": 755 |
| }, |
| { |
| "epoch": 3.68780487804878, |
| "grad_norm": 1.1060123443603516, |
| "learning_rate": 3.505161774794085e-06, |
| "loss": 0.4381, |
| "step": 756 |
| }, |
| { |
| "epoch": 3.692682926829268, |
| "grad_norm": 0.917390763759613, |
| "learning_rate": 3.5016526963520474e-06, |
| "loss": 0.3079, |
| "step": 757 |
| }, |
| { |
| "epoch": 3.697560975609756, |
| "grad_norm": 1.10245943069458, |
| "learning_rate": 3.498141265521212e-06, |
| "loss": 0.8131, |
| "step": 758 |
| }, |
| { |
| "epoch": 3.7024390243902436, |
| "grad_norm": 0.9380331039428711, |
| "learning_rate": 3.4946274905481997e-06, |
| "loss": 0.3149, |
| "step": 759 |
| }, |
| { |
| "epoch": 3.7073170731707314, |
| "grad_norm": 1.1269862651824951, |
| "learning_rate": 3.4911113796851364e-06, |
| "loss": 0.4485, |
| "step": 760 |
| }, |
| { |
| "epoch": 3.7121951219512193, |
| "grad_norm": 0.9700295925140381, |
| "learning_rate": 3.487592941189636e-06, |
| "loss": 0.2876, |
| "step": 761 |
| }, |
| { |
| "epoch": 3.717073170731707, |
| "grad_norm": 1.2680857181549072, |
| "learning_rate": 3.484072183324776e-06, |
| "loss": 0.7641, |
| "step": 762 |
| }, |
| { |
| "epoch": 3.721951219512195, |
| "grad_norm": 1.0977661609649658, |
| "learning_rate": 3.4805491143590823e-06, |
| "loss": 0.5787, |
| "step": 763 |
| }, |
| { |
| "epoch": 3.7268292682926827, |
| "grad_norm": 1.1288927793502808, |
| "learning_rate": 3.4770237425665103e-06, |
| "loss": 0.5136, |
| "step": 764 |
| }, |
| { |
| "epoch": 3.7317073170731705, |
| "grad_norm": 1.0365726947784424, |
| "learning_rate": 3.4734960762264204e-06, |
| "loss": 0.6113, |
| "step": 765 |
| }, |
| { |
| "epoch": 3.7365853658536583, |
| "grad_norm": 1.0862468481063843, |
| "learning_rate": 3.469966123623563e-06, |
| "loss": 0.6132, |
| "step": 766 |
| }, |
| { |
| "epoch": 3.741463414634146, |
| "grad_norm": 1.078573226928711, |
| "learning_rate": 3.46643389304806e-06, |
| "loss": 0.465, |
| "step": 767 |
| }, |
| { |
| "epoch": 3.746341463414634, |
| "grad_norm": 1.3158698081970215, |
| "learning_rate": 3.4628993927953786e-06, |
| "loss": 0.9712, |
| "step": 768 |
| }, |
| { |
| "epoch": 3.7512195121951217, |
| "grad_norm": 0.9415583610534668, |
| "learning_rate": 3.45936263116632e-06, |
| "loss": 0.2983, |
| "step": 769 |
| }, |
| { |
| "epoch": 3.7560975609756095, |
| "grad_norm": 0.8315958380699158, |
| "learning_rate": 3.4558236164669957e-06, |
| "loss": 0.3424, |
| "step": 770 |
| }, |
| { |
| "epoch": 3.7609756097560973, |
| "grad_norm": 1.0617963075637817, |
| "learning_rate": 3.4522823570088073e-06, |
| "loss": 0.2894, |
| "step": 771 |
| }, |
| { |
| "epoch": 3.765853658536585, |
| "grad_norm": 1.1169861555099487, |
| "learning_rate": 3.4487388611084295e-06, |
| "loss": 0.4487, |
| "step": 772 |
| }, |
| { |
| "epoch": 3.770731707317073, |
| "grad_norm": 1.055111050605774, |
| "learning_rate": 3.445193137087788e-06, |
| "loss": 0.2635, |
| "step": 773 |
| }, |
| { |
| "epoch": 3.7756097560975608, |
| "grad_norm": 0.9245924949645996, |
| "learning_rate": 3.4416451932740424e-06, |
| "loss": 0.4559, |
| "step": 774 |
| }, |
| { |
| "epoch": 3.7804878048780486, |
| "grad_norm": 1.205375075340271, |
| "learning_rate": 3.4380950379995652e-06, |
| "loss": 0.5927, |
| "step": 775 |
| }, |
| { |
| "epoch": 3.7853658536585364, |
| "grad_norm": 0.9713656902313232, |
| "learning_rate": 3.434542679601922e-06, |
| "loss": 0.469, |
| "step": 776 |
| }, |
| { |
| "epoch": 3.790243902439024, |
| "grad_norm": 1.037654161453247, |
| "learning_rate": 3.4309881264238538e-06, |
| "loss": 0.2425, |
| "step": 777 |
| }, |
| { |
| "epoch": 3.795121951219512, |
| "grad_norm": 1.1205179691314697, |
| "learning_rate": 3.4274313868132547e-06, |
| "loss": 0.3137, |
| "step": 778 |
| }, |
| { |
| "epoch": 3.8, |
| "grad_norm": 1.0268551111221313, |
| "learning_rate": 3.4238724691231534e-06, |
| "loss": 0.3677, |
| "step": 779 |
| }, |
| { |
| "epoch": 3.8048780487804876, |
| "grad_norm": 1.1247752904891968, |
| "learning_rate": 3.4203113817116955e-06, |
| "loss": 0.5795, |
| "step": 780 |
| }, |
| { |
| "epoch": 3.8097560975609754, |
| "grad_norm": 1.093456745147705, |
| "learning_rate": 3.4167481329421204e-06, |
| "loss": 0.3284, |
| "step": 781 |
| }, |
| { |
| "epoch": 3.8146341463414632, |
| "grad_norm": 1.1700347661972046, |
| "learning_rate": 3.4131827311827447e-06, |
| "loss": 0.5421, |
| "step": 782 |
| }, |
| { |
| "epoch": 3.819512195121951, |
| "grad_norm": 0.8392572999000549, |
| "learning_rate": 3.4096151848069416e-06, |
| "loss": 0.2708, |
| "step": 783 |
| }, |
| { |
| "epoch": 3.824390243902439, |
| "grad_norm": 0.8408678770065308, |
| "learning_rate": 3.4060455021931195e-06, |
| "loss": 0.3812, |
| "step": 784 |
| }, |
| { |
| "epoch": 3.8292682926829267, |
| "grad_norm": 1.0810067653656006, |
| "learning_rate": 3.402473691724704e-06, |
| "loss": 0.3683, |
| "step": 785 |
| }, |
| { |
| "epoch": 3.8341463414634145, |
| "grad_norm": 0.7941387295722961, |
| "learning_rate": 3.39889976179012e-06, |
| "loss": 0.3159, |
| "step": 786 |
| }, |
| { |
| "epoch": 3.8390243902439023, |
| "grad_norm": 0.9092013835906982, |
| "learning_rate": 3.3953237207827673e-06, |
| "loss": 0.488, |
| "step": 787 |
| }, |
| { |
| "epoch": 3.84390243902439, |
| "grad_norm": 1.1730678081512451, |
| "learning_rate": 3.391745577101005e-06, |
| "loss": 0.729, |
| "step": 788 |
| }, |
| { |
| "epoch": 3.848780487804878, |
| "grad_norm": 1.2216498851776123, |
| "learning_rate": 3.3881653391481306e-06, |
| "loss": 0.4317, |
| "step": 789 |
| }, |
| { |
| "epoch": 3.8536585365853657, |
| "grad_norm": 1.2773486375808716, |
| "learning_rate": 3.384583015332359e-06, |
| "loss": 0.4652, |
| "step": 790 |
| }, |
| { |
| "epoch": 3.8585365853658535, |
| "grad_norm": 1.0143038034439087, |
| "learning_rate": 3.380998614066805e-06, |
| "loss": 0.3218, |
| "step": 791 |
| }, |
| { |
| "epoch": 3.8634146341463413, |
| "grad_norm": 1.1280455589294434, |
| "learning_rate": 3.3774121437694606e-06, |
| "loss": 0.4218, |
| "step": 792 |
| }, |
| { |
| "epoch": 3.868292682926829, |
| "grad_norm": 1.0607877969741821, |
| "learning_rate": 3.3738236128631786e-06, |
| "loss": 0.2838, |
| "step": 793 |
| }, |
| { |
| "epoch": 3.873170731707317, |
| "grad_norm": 1.145294189453125, |
| "learning_rate": 3.3702330297756503e-06, |
| "loss": 0.5267, |
| "step": 794 |
| }, |
| { |
| "epoch": 3.8780487804878048, |
| "grad_norm": 1.0045257806777954, |
| "learning_rate": 3.366640402939387e-06, |
| "loss": 0.1956, |
| "step": 795 |
| }, |
| { |
| "epoch": 3.8829268292682926, |
| "grad_norm": 1.9531723260879517, |
| "learning_rate": 3.363045740791698e-06, |
| "loss": 0.465, |
| "step": 796 |
| }, |
| { |
| "epoch": 3.8878048780487804, |
| "grad_norm": 0.8978796005249023, |
| "learning_rate": 3.3594490517746774e-06, |
| "loss": 0.3693, |
| "step": 797 |
| }, |
| { |
| "epoch": 3.892682926829268, |
| "grad_norm": 0.9718804955482483, |
| "learning_rate": 3.3558503443351733e-06, |
| "loss": 0.4562, |
| "step": 798 |
| }, |
| { |
| "epoch": 3.897560975609756, |
| "grad_norm": 0.888634204864502, |
| "learning_rate": 3.352249626924777e-06, |
| "loss": 0.3944, |
| "step": 799 |
| }, |
| { |
| "epoch": 3.902439024390244, |
| "grad_norm": 1.355827808380127, |
| "learning_rate": 3.348646907999801e-06, |
| "loss": 0.932, |
| "step": 800 |
| }, |
| { |
| "epoch": 3.9073170731707316, |
| "grad_norm": 1.005395770072937, |
| "learning_rate": 3.345042196021257e-06, |
| "loss": 0.4999, |
| "step": 801 |
| }, |
| { |
| "epoch": 3.9121951219512194, |
| "grad_norm": 1.253622055053711, |
| "learning_rate": 3.3414354994548385e-06, |
| "loss": 0.7352, |
| "step": 802 |
| }, |
| { |
| "epoch": 3.9170731707317072, |
| "grad_norm": 1.058511734008789, |
| "learning_rate": 3.337826826770898e-06, |
| "loss": 0.4555, |
| "step": 803 |
| }, |
| { |
| "epoch": 3.921951219512195, |
| "grad_norm": 1.0374521017074585, |
| "learning_rate": 3.3342161864444312e-06, |
| "loss": 0.3344, |
| "step": 804 |
| }, |
| { |
| "epoch": 3.926829268292683, |
| "grad_norm": 0.8886129856109619, |
| "learning_rate": 3.3306035869550534e-06, |
| "loss": 0.2988, |
| "step": 805 |
| }, |
| { |
| "epoch": 3.9317073170731707, |
| "grad_norm": 1.144066572189331, |
| "learning_rate": 3.326989036786981e-06, |
| "loss": 0.2734, |
| "step": 806 |
| }, |
| { |
| "epoch": 3.9365853658536585, |
| "grad_norm": 1.2550209760665894, |
| "learning_rate": 3.3233725444290126e-06, |
| "loss": 0.4412, |
| "step": 807 |
| }, |
| { |
| "epoch": 3.9414634146341463, |
| "grad_norm": 0.9990758299827576, |
| "learning_rate": 3.3197541183745065e-06, |
| "loss": 0.6003, |
| "step": 808 |
| }, |
| { |
| "epoch": 3.946341463414634, |
| "grad_norm": 0.9522280097007751, |
| "learning_rate": 3.3161337671213634e-06, |
| "loss": 0.3911, |
| "step": 809 |
| }, |
| { |
| "epoch": 3.951219512195122, |
| "grad_norm": 0.8392520546913147, |
| "learning_rate": 3.312511499172006e-06, |
| "loss": 0.3038, |
| "step": 810 |
| }, |
| { |
| "epoch": 3.9560975609756097, |
| "grad_norm": 1.0443915128707886, |
| "learning_rate": 3.3088873230333562e-06, |
| "loss": 0.4902, |
| "step": 811 |
| }, |
| { |
| "epoch": 3.9609756097560975, |
| "grad_norm": 1.5163060426712036, |
| "learning_rate": 3.3052612472168193e-06, |
| "loss": 0.3193, |
| "step": 812 |
| }, |
| { |
| "epoch": 3.9658536585365853, |
| "grad_norm": 1.0339828729629517, |
| "learning_rate": 3.3016332802382618e-06, |
| "loss": 0.4381, |
| "step": 813 |
| }, |
| { |
| "epoch": 3.970731707317073, |
| "grad_norm": 1.1660975217819214, |
| "learning_rate": 3.2980034306179897e-06, |
| "loss": 0.3802, |
| "step": 814 |
| }, |
| { |
| "epoch": 3.975609756097561, |
| "grad_norm": 0.9736506938934326, |
| "learning_rate": 3.294371706880733e-06, |
| "loss": 0.4898, |
| "step": 815 |
| }, |
| { |
| "epoch": 3.9804878048780488, |
| "grad_norm": 0.9874656796455383, |
| "learning_rate": 3.290738117555622e-06, |
| "loss": 0.3163, |
| "step": 816 |
| }, |
| { |
| "epoch": 3.9853658536585366, |
| "grad_norm": 1.0968129634857178, |
| "learning_rate": 3.2871026711761666e-06, |
| "loss": 0.5979, |
| "step": 817 |
| }, |
| { |
| "epoch": 3.9902439024390244, |
| "grad_norm": 1.0595239400863647, |
| "learning_rate": 3.2834653762802414e-06, |
| "loss": 0.3729, |
| "step": 818 |
| }, |
| { |
| "epoch": 3.995121951219512, |
| "grad_norm": 1.129920482635498, |
| "learning_rate": 3.2798262414100594e-06, |
| "loss": 0.3179, |
| "step": 819 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.9918786883354187, |
| "learning_rate": 3.2761852751121566e-06, |
| "loss": 0.3184, |
| "step": 820 |
| }, |
| { |
| "epoch": 4.004878048780488, |
| "grad_norm": 1.0081284046173096, |
| "learning_rate": 3.272542485937369e-06, |
| "loss": 0.4687, |
| "step": 821 |
| }, |
| { |
| "epoch": 4.009756097560976, |
| "grad_norm": 1.025944471359253, |
| "learning_rate": 3.2688978824408136e-06, |
| "loss": 0.2941, |
| "step": 822 |
| }, |
| { |
| "epoch": 4.014634146341463, |
| "grad_norm": 0.9274433255195618, |
| "learning_rate": 3.2652514731818698e-06, |
| "loss": 0.2709, |
| "step": 823 |
| }, |
| { |
| "epoch": 4.019512195121951, |
| "grad_norm": 0.9776120185852051, |
| "learning_rate": 3.2616032667241564e-06, |
| "loss": 0.1915, |
| "step": 824 |
| }, |
| { |
| "epoch": 4.024390243902439, |
| "grad_norm": 1.0483906269073486, |
| "learning_rate": 3.257953271635513e-06, |
| "loss": 0.5291, |
| "step": 825 |
| }, |
| { |
| "epoch": 4.029268292682927, |
| "grad_norm": 0.9861155152320862, |
| "learning_rate": 3.2543014964879814e-06, |
| "loss": 0.2661, |
| "step": 826 |
| }, |
| { |
| "epoch": 4.034146341463415, |
| "grad_norm": 1.320236086845398, |
| "learning_rate": 3.250647949857781e-06, |
| "loss": 0.2301, |
| "step": 827 |
| }, |
| { |
| "epoch": 4.0390243902439025, |
| "grad_norm": 1.469008207321167, |
| "learning_rate": 3.2469926403252932e-06, |
| "loss": 0.6312, |
| "step": 828 |
| }, |
| { |
| "epoch": 4.04390243902439, |
| "grad_norm": 1.1796139478683472, |
| "learning_rate": 3.2433355764750417e-06, |
| "loss": 0.1662, |
| "step": 829 |
| }, |
| { |
| "epoch": 4.048780487804878, |
| "grad_norm": 1.3211941719055176, |
| "learning_rate": 3.2396767668956656e-06, |
| "loss": 0.514, |
| "step": 830 |
| }, |
| { |
| "epoch": 4.053658536585366, |
| "grad_norm": 1.2660949230194092, |
| "learning_rate": 3.2360162201799085e-06, |
| "loss": 0.4094, |
| "step": 831 |
| }, |
| { |
| "epoch": 4.058536585365854, |
| "grad_norm": 1.0212953090667725, |
| "learning_rate": 3.2323539449245906e-06, |
| "loss": 0.2655, |
| "step": 832 |
| }, |
| { |
| "epoch": 4.0634146341463415, |
| "grad_norm": 1.0902808904647827, |
| "learning_rate": 3.2286899497305917e-06, |
| "loss": 0.2253, |
| "step": 833 |
| }, |
| { |
| "epoch": 4.068292682926829, |
| "grad_norm": 0.9108849167823792, |
| "learning_rate": 3.2250242432028335e-06, |
| "loss": 0.3198, |
| "step": 834 |
| }, |
| { |
| "epoch": 4.073170731707317, |
| "grad_norm": 1.349802851676941, |
| "learning_rate": 3.221356833950254e-06, |
| "loss": 0.7985, |
| "step": 835 |
| }, |
| { |
| "epoch": 4.078048780487805, |
| "grad_norm": 0.9956075549125671, |
| "learning_rate": 3.21768773058579e-06, |
| "loss": 0.2803, |
| "step": 836 |
| }, |
| { |
| "epoch": 4.082926829268293, |
| "grad_norm": 1.123693823814392, |
| "learning_rate": 3.21401694172636e-06, |
| "loss": 0.2803, |
| "step": 837 |
| }, |
| { |
| "epoch": 4.087804878048781, |
| "grad_norm": 0.7673684358596802, |
| "learning_rate": 3.2103444759928383e-06, |
| "loss": 0.1641, |
| "step": 838 |
| }, |
| { |
| "epoch": 4.092682926829268, |
| "grad_norm": 0.8822228312492371, |
| "learning_rate": 3.2066703420100377e-06, |
| "loss": 0.1986, |
| "step": 839 |
| }, |
| { |
| "epoch": 4.097560975609756, |
| "grad_norm": 1.0781583786010742, |
| "learning_rate": 3.2029945484066883e-06, |
| "loss": 0.4772, |
| "step": 840 |
| }, |
| { |
| "epoch": 4.102439024390244, |
| "grad_norm": 1.1041297912597656, |
| "learning_rate": 3.1993171038154203e-06, |
| "loss": 0.2845, |
| "step": 841 |
| }, |
| { |
| "epoch": 4.107317073170732, |
| "grad_norm": 0.8251579403877258, |
| "learning_rate": 3.1956380168727385e-06, |
| "loss": 0.267, |
| "step": 842 |
| }, |
| { |
| "epoch": 4.11219512195122, |
| "grad_norm": 1.3584140539169312, |
| "learning_rate": 3.191957296219007e-06, |
| "loss": 0.4082, |
| "step": 843 |
| }, |
| { |
| "epoch": 4.117073170731707, |
| "grad_norm": 1.0828088521957397, |
| "learning_rate": 3.1882749504984247e-06, |
| "loss": 0.235, |
| "step": 844 |
| }, |
| { |
| "epoch": 4.121951219512195, |
| "grad_norm": 0.7371436357498169, |
| "learning_rate": 3.1845909883590076e-06, |
| "loss": 0.2477, |
| "step": 845 |
| }, |
| { |
| "epoch": 4.126829268292683, |
| "grad_norm": 1.1333115100860596, |
| "learning_rate": 3.180905418452569e-06, |
| "loss": 0.5014, |
| "step": 846 |
| }, |
| { |
| "epoch": 4.131707317073171, |
| "grad_norm": 0.8755896687507629, |
| "learning_rate": 3.1772182494346963e-06, |
| "loss": 0.3227, |
| "step": 847 |
| }, |
| { |
| "epoch": 4.136585365853659, |
| "grad_norm": 1.198217749595642, |
| "learning_rate": 3.1735294899647344e-06, |
| "loss": 0.415, |
| "step": 848 |
| }, |
| { |
| "epoch": 4.1414634146341465, |
| "grad_norm": 1.0420305728912354, |
| "learning_rate": 3.169839148705762e-06, |
| "loss": 0.2631, |
| "step": 849 |
| }, |
| { |
| "epoch": 4.146341463414634, |
| "grad_norm": 1.231887698173523, |
| "learning_rate": 3.1661472343245725e-06, |
| "loss": 0.7334, |
| "step": 850 |
| }, |
| { |
| "epoch": 4.151219512195122, |
| "grad_norm": 1.086815357208252, |
| "learning_rate": 3.162453755491655e-06, |
| "loss": 0.4588, |
| "step": 851 |
| }, |
| { |
| "epoch": 4.15609756097561, |
| "grad_norm": 1.2308528423309326, |
| "learning_rate": 3.158758720881171e-06, |
| "loss": 0.431, |
| "step": 852 |
| }, |
| { |
| "epoch": 4.160975609756098, |
| "grad_norm": 1.3028631210327148, |
| "learning_rate": 3.155062139170937e-06, |
| "loss": 0.351, |
| "step": 853 |
| }, |
| { |
| "epoch": 4.1658536585365855, |
| "grad_norm": 1.2701951265335083, |
| "learning_rate": 3.1513640190424034e-06, |
| "loss": 0.2138, |
| "step": 854 |
| }, |
| { |
| "epoch": 4.170731707317073, |
| "grad_norm": 1.1779377460479736, |
| "learning_rate": 3.147664369180632e-06, |
| "loss": 0.2532, |
| "step": 855 |
| }, |
| { |
| "epoch": 4.175609756097561, |
| "grad_norm": 1.287402868270874, |
| "learning_rate": 3.143963198274278e-06, |
| "loss": 0.4129, |
| "step": 856 |
| }, |
| { |
| "epoch": 4.180487804878049, |
| "grad_norm": 0.9432743787765503, |
| "learning_rate": 3.140260515015569e-06, |
| "loss": 0.2941, |
| "step": 857 |
| }, |
| { |
| "epoch": 4.185365853658537, |
| "grad_norm": 1.0425447225570679, |
| "learning_rate": 3.136556328100284e-06, |
| "loss": 0.3346, |
| "step": 858 |
| }, |
| { |
| "epoch": 4.190243902439025, |
| "grad_norm": 1.240814447402954, |
| "learning_rate": 3.132850646227734e-06, |
| "loss": 0.6134, |
| "step": 859 |
| }, |
| { |
| "epoch": 4.195121951219512, |
| "grad_norm": 1.1277090311050415, |
| "learning_rate": 3.12914347810074e-06, |
| "loss": 0.3543, |
| "step": 860 |
| }, |
| { |
| "epoch": 4.2, |
| "grad_norm": 1.275528073310852, |
| "learning_rate": 3.125434832425613e-06, |
| "loss": 0.2518, |
| "step": 861 |
| }, |
| { |
| "epoch": 4.204878048780488, |
| "grad_norm": 0.7928908467292786, |
| "learning_rate": 3.121724717912138e-06, |
| "loss": 0.1532, |
| "step": 862 |
| }, |
| { |
| "epoch": 4.209756097560976, |
| "grad_norm": 1.1028555631637573, |
| "learning_rate": 3.118013143273542e-06, |
| "loss": 0.3368, |
| "step": 863 |
| }, |
| { |
| "epoch": 4.214634146341464, |
| "grad_norm": 1.8106095790863037, |
| "learning_rate": 3.1143001172264893e-06, |
| "loss": 0.2334, |
| "step": 864 |
| }, |
| { |
| "epoch": 4.219512195121951, |
| "grad_norm": 1.2573802471160889, |
| "learning_rate": 3.1105856484910474e-06, |
| "loss": 0.3072, |
| "step": 865 |
| }, |
| { |
| "epoch": 4.224390243902439, |
| "grad_norm": 0.870959460735321, |
| "learning_rate": 3.1068697457906736e-06, |
| "loss": 0.2215, |
| "step": 866 |
| }, |
| { |
| "epoch": 4.229268292682927, |
| "grad_norm": 1.066684603691101, |
| "learning_rate": 3.1031524178521938e-06, |
| "loss": 0.4311, |
| "step": 867 |
| }, |
| { |
| "epoch": 4.234146341463415, |
| "grad_norm": 1.2743711471557617, |
| "learning_rate": 3.0994336734057804e-06, |
| "loss": 0.1342, |
| "step": 868 |
| }, |
| { |
| "epoch": 4.239024390243903, |
| "grad_norm": 0.8251144289970398, |
| "learning_rate": 3.0957135211849315e-06, |
| "loss": 0.2224, |
| "step": 869 |
| }, |
| { |
| "epoch": 4.2439024390243905, |
| "grad_norm": 1.1386126279830933, |
| "learning_rate": 3.0919919699264535e-06, |
| "loss": 0.2674, |
| "step": 870 |
| }, |
| { |
| "epoch": 4.248780487804878, |
| "grad_norm": 1.1335779428482056, |
| "learning_rate": 3.0882690283704355e-06, |
| "loss": 0.8506, |
| "step": 871 |
| }, |
| { |
| "epoch": 4.253658536585366, |
| "grad_norm": 0.9600904583930969, |
| "learning_rate": 3.084544705260234e-06, |
| "loss": 0.2926, |
| "step": 872 |
| }, |
| { |
| "epoch": 4.258536585365854, |
| "grad_norm": 0.896686851978302, |
| "learning_rate": 3.080819009342451e-06, |
| "loss": 0.1941, |
| "step": 873 |
| }, |
| { |
| "epoch": 4.263414634146342, |
| "grad_norm": 0.8546966314315796, |
| "learning_rate": 3.077091949366908e-06, |
| "loss": 0.2209, |
| "step": 874 |
| }, |
| { |
| "epoch": 4.2682926829268295, |
| "grad_norm": 0.9326870441436768, |
| "learning_rate": 3.073363534086636e-06, |
| "loss": 0.2608, |
| "step": 875 |
| }, |
| { |
| "epoch": 4.273170731707317, |
| "grad_norm": 0.8635675311088562, |
| "learning_rate": 3.0696337722578444e-06, |
| "loss": 0.1911, |
| "step": 876 |
| }, |
| { |
| "epoch": 4.278048780487805, |
| "grad_norm": 1.2039563655853271, |
| "learning_rate": 3.0659026726399072e-06, |
| "loss": 0.3195, |
| "step": 877 |
| }, |
| { |
| "epoch": 4.282926829268293, |
| "grad_norm": 1.041063666343689, |
| "learning_rate": 3.0621702439953393e-06, |
| "loss": 0.4438, |
| "step": 878 |
| }, |
| { |
| "epoch": 4.287804878048781, |
| "grad_norm": 0.9710636734962463, |
| "learning_rate": 3.0584364950897768e-06, |
| "loss": 0.1192, |
| "step": 879 |
| }, |
| { |
| "epoch": 4.2926829268292686, |
| "grad_norm": 1.001789927482605, |
| "learning_rate": 3.0547014346919574e-06, |
| "loss": 0.2768, |
| "step": 880 |
| }, |
| { |
| "epoch": 4.297560975609756, |
| "grad_norm": 1.0699266195297241, |
| "learning_rate": 3.0509650715736977e-06, |
| "loss": 0.2787, |
| "step": 881 |
| }, |
| { |
| "epoch": 4.302439024390244, |
| "grad_norm": 0.9404529929161072, |
| "learning_rate": 3.0472274145098744e-06, |
| "loss": 0.4201, |
| "step": 882 |
| }, |
| { |
| "epoch": 4.307317073170732, |
| "grad_norm": 1.296057105064392, |
| "learning_rate": 3.0434884722784026e-06, |
| "loss": 0.3718, |
| "step": 883 |
| }, |
| { |
| "epoch": 4.31219512195122, |
| "grad_norm": 0.9642940163612366, |
| "learning_rate": 3.0397482536602168e-06, |
| "loss": 0.2024, |
| "step": 884 |
| }, |
| { |
| "epoch": 4.317073170731708, |
| "grad_norm": 1.0013500452041626, |
| "learning_rate": 3.0360067674392475e-06, |
| "loss": 0.2857, |
| "step": 885 |
| }, |
| { |
| "epoch": 4.321951219512195, |
| "grad_norm": 1.1276105642318726, |
| "learning_rate": 3.0322640224024024e-06, |
| "loss": 0.2326, |
| "step": 886 |
| }, |
| { |
| "epoch": 4.326829268292683, |
| "grad_norm": 1.1642167568206787, |
| "learning_rate": 3.0285200273395478e-06, |
| "loss": 0.3897, |
| "step": 887 |
| }, |
| { |
| "epoch": 4.331707317073171, |
| "grad_norm": 1.1692358255386353, |
| "learning_rate": 3.024774791043481e-06, |
| "loss": 0.4172, |
| "step": 888 |
| }, |
| { |
| "epoch": 4.336585365853659, |
| "grad_norm": 1.2507195472717285, |
| "learning_rate": 3.021028322309921e-06, |
| "loss": 0.4637, |
| "step": 889 |
| }, |
| { |
| "epoch": 4.341463414634147, |
| "grad_norm": 1.2589614391326904, |
| "learning_rate": 3.0172806299374734e-06, |
| "loss": 0.4284, |
| "step": 890 |
| }, |
| { |
| "epoch": 4.3463414634146345, |
| "grad_norm": 1.159298300743103, |
| "learning_rate": 3.0135317227276247e-06, |
| "loss": 0.3921, |
| "step": 891 |
| }, |
| { |
| "epoch": 4.351219512195122, |
| "grad_norm": 1.0309828519821167, |
| "learning_rate": 3.0097816094847104e-06, |
| "loss": 0.3799, |
| "step": 892 |
| }, |
| { |
| "epoch": 4.35609756097561, |
| "grad_norm": 0.9921269416809082, |
| "learning_rate": 3.0060302990158984e-06, |
| "loss": 0.2218, |
| "step": 893 |
| }, |
| { |
| "epoch": 4.360975609756098, |
| "grad_norm": 0.9881783127784729, |
| "learning_rate": 3.002277800131171e-06, |
| "loss": 0.1882, |
| "step": 894 |
| }, |
| { |
| "epoch": 4.365853658536586, |
| "grad_norm": 1.0279638767242432, |
| "learning_rate": 2.998524121643298e-06, |
| "loss": 0.2855, |
| "step": 895 |
| }, |
| { |
| "epoch": 4.3707317073170735, |
| "grad_norm": 1.2783422470092773, |
| "learning_rate": 2.994769272367822e-06, |
| "loss": 0.3899, |
| "step": 896 |
| }, |
| { |
| "epoch": 4.375609756097561, |
| "grad_norm": 0.9480025172233582, |
| "learning_rate": 2.991013261123035e-06, |
| "loss": 0.2136, |
| "step": 897 |
| }, |
| { |
| "epoch": 4.380487804878049, |
| "grad_norm": 1.1166763305664062, |
| "learning_rate": 2.9872560967299554e-06, |
| "loss": 0.2992, |
| "step": 898 |
| }, |
| { |
| "epoch": 4.385365853658537, |
| "grad_norm": 1.1016902923583984, |
| "learning_rate": 2.9834977880123132e-06, |
| "loss": 0.1948, |
| "step": 899 |
| }, |
| { |
| "epoch": 4.390243902439025, |
| "grad_norm": 1.0177924633026123, |
| "learning_rate": 2.9797383437965243e-06, |
| "loss": 0.2447, |
| "step": 900 |
| }, |
| { |
| "epoch": 4.3951219512195125, |
| "grad_norm": 0.910971999168396, |
| "learning_rate": 2.975977772911671e-06, |
| "loss": 0.2198, |
| "step": 901 |
| }, |
| { |
| "epoch": 4.4, |
| "grad_norm": 1.0623188018798828, |
| "learning_rate": 2.972216084189482e-06, |
| "loss": 0.2353, |
| "step": 902 |
| }, |
| { |
| "epoch": 4.404878048780488, |
| "grad_norm": 0.8617135286331177, |
| "learning_rate": 2.9684532864643123e-06, |
| "loss": 0.1998, |
| "step": 903 |
| }, |
| { |
| "epoch": 4.409756097560976, |
| "grad_norm": 1.0422427654266357, |
| "learning_rate": 2.964689388573118e-06, |
| "loss": 0.1688, |
| "step": 904 |
| }, |
| { |
| "epoch": 4.414634146341464, |
| "grad_norm": 1.0696886777877808, |
| "learning_rate": 2.9609243993554434e-06, |
| "loss": 0.4587, |
| "step": 905 |
| }, |
| { |
| "epoch": 4.419512195121952, |
| "grad_norm": 1.1068525314331055, |
| "learning_rate": 2.9571583276533923e-06, |
| "loss": 0.2282, |
| "step": 906 |
| }, |
| { |
| "epoch": 4.424390243902439, |
| "grad_norm": 1.363254189491272, |
| "learning_rate": 2.9533911823116124e-06, |
| "loss": 0.7017, |
| "step": 907 |
| }, |
| { |
| "epoch": 4.429268292682927, |
| "grad_norm": 1.504699945449829, |
| "learning_rate": 2.9496229721772734e-06, |
| "loss": 0.4154, |
| "step": 908 |
| }, |
| { |
| "epoch": 4.434146341463415, |
| "grad_norm": 1.0644068717956543, |
| "learning_rate": 2.9458537061000435e-06, |
| "loss": 0.3262, |
| "step": 909 |
| }, |
| { |
| "epoch": 4.439024390243903, |
| "grad_norm": 1.1175642013549805, |
| "learning_rate": 2.9420833929320726e-06, |
| "loss": 0.3613, |
| "step": 910 |
| }, |
| { |
| "epoch": 4.443902439024391, |
| "grad_norm": 1.418470859527588, |
| "learning_rate": 2.93831204152797e-06, |
| "loss": 0.5274, |
| "step": 911 |
| }, |
| { |
| "epoch": 4.4487804878048784, |
| "grad_norm": 0.8108303546905518, |
| "learning_rate": 2.9345396607447807e-06, |
| "loss": 0.1267, |
| "step": 912 |
| }, |
| { |
| "epoch": 4.453658536585366, |
| "grad_norm": 1.049116611480713, |
| "learning_rate": 2.9307662594419704e-06, |
| "loss": 0.2678, |
| "step": 913 |
| }, |
| { |
| "epoch": 4.458536585365854, |
| "grad_norm": 1.158342719078064, |
| "learning_rate": 2.9269918464814e-06, |
| "loss": 0.3162, |
| "step": 914 |
| }, |
| { |
| "epoch": 4.463414634146342, |
| "grad_norm": 1.4344679117202759, |
| "learning_rate": 2.923216430727306e-06, |
| "loss": 0.4786, |
| "step": 915 |
| }, |
| { |
| "epoch": 4.46829268292683, |
| "grad_norm": 1.156111478805542, |
| "learning_rate": 2.9194400210462808e-06, |
| "loss": 0.4763, |
| "step": 916 |
| }, |
| { |
| "epoch": 4.473170731707317, |
| "grad_norm": 1.1826188564300537, |
| "learning_rate": 2.91566262630725e-06, |
| "loss": 0.5285, |
| "step": 917 |
| }, |
| { |
| "epoch": 4.478048780487805, |
| "grad_norm": 1.2508891820907593, |
| "learning_rate": 2.9118842553814526e-06, |
| "loss": 0.2565, |
| "step": 918 |
| }, |
| { |
| "epoch": 4.482926829268292, |
| "grad_norm": 0.9462976455688477, |
| "learning_rate": 2.9081049171424223e-06, |
| "loss": 0.2208, |
| "step": 919 |
| }, |
| { |
| "epoch": 4.487804878048781, |
| "grad_norm": 0.9479988813400269, |
| "learning_rate": 2.9043246204659624e-06, |
| "loss": 0.2, |
| "step": 920 |
| }, |
| { |
| "epoch": 4.492682926829268, |
| "grad_norm": 1.0272135734558105, |
| "learning_rate": 2.9005433742301274e-06, |
| "loss": 0.4585, |
| "step": 921 |
| }, |
| { |
| "epoch": 4.4975609756097565, |
| "grad_norm": 1.7547694444656372, |
| "learning_rate": 2.8967611873152037e-06, |
| "loss": 0.3949, |
| "step": 922 |
| }, |
| { |
| "epoch": 4.5024390243902435, |
| "grad_norm": 1.0447901487350464, |
| "learning_rate": 2.892978068603683e-06, |
| "loss": 0.1928, |
| "step": 923 |
| }, |
| { |
| "epoch": 4.507317073170732, |
| "grad_norm": 1.0131272077560425, |
| "learning_rate": 2.889194026980249e-06, |
| "loss": 0.3458, |
| "step": 924 |
| }, |
| { |
| "epoch": 4.512195121951219, |
| "grad_norm": 1.1343997716903687, |
| "learning_rate": 2.8854090713317514e-06, |
| "loss": 0.1519, |
| "step": 925 |
| }, |
| { |
| "epoch": 4.517073170731708, |
| "grad_norm": 1.2432359457015991, |
| "learning_rate": 2.8816232105471864e-06, |
| "loss": 0.1789, |
| "step": 926 |
| }, |
| { |
| "epoch": 4.521951219512195, |
| "grad_norm": 1.2051594257354736, |
| "learning_rate": 2.877836453517677e-06, |
| "loss": 0.518, |
| "step": 927 |
| }, |
| { |
| "epoch": 4.526829268292683, |
| "grad_norm": 1.1518090963363647, |
| "learning_rate": 2.8740488091364492e-06, |
| "loss": 0.4737, |
| "step": 928 |
| }, |
| { |
| "epoch": 4.53170731707317, |
| "grad_norm": 1.456160068511963, |
| "learning_rate": 2.870260286298814e-06, |
| "loss": 0.6425, |
| "step": 929 |
| }, |
| { |
| "epoch": 4.536585365853659, |
| "grad_norm": 1.2971833944320679, |
| "learning_rate": 2.866470893902147e-06, |
| "loss": 0.3322, |
| "step": 930 |
| }, |
| { |
| "epoch": 4.541463414634146, |
| "grad_norm": 1.0898277759552002, |
| "learning_rate": 2.8626806408458626e-06, |
| "loss": 0.258, |
| "step": 931 |
| }, |
| { |
| "epoch": 4.546341463414635, |
| "grad_norm": 0.9179545044898987, |
| "learning_rate": 2.8588895360313983e-06, |
| "loss": 0.2085, |
| "step": 932 |
| }, |
| { |
| "epoch": 4.5512195121951216, |
| "grad_norm": 1.0029069185256958, |
| "learning_rate": 2.8550975883621935e-06, |
| "loss": 0.3558, |
| "step": 933 |
| }, |
| { |
| "epoch": 4.55609756097561, |
| "grad_norm": 1.0553942918777466, |
| "learning_rate": 2.8513048067436644e-06, |
| "loss": 0.3892, |
| "step": 934 |
| }, |
| { |
| "epoch": 4.560975609756097, |
| "grad_norm": 1.0313464403152466, |
| "learning_rate": 2.847511200083187e-06, |
| "loss": 0.2626, |
| "step": 935 |
| }, |
| { |
| "epoch": 4.565853658536585, |
| "grad_norm": 0.9714272618293762, |
| "learning_rate": 2.843716777290074e-06, |
| "loss": 0.2896, |
| "step": 936 |
| }, |
| { |
| "epoch": 4.570731707317073, |
| "grad_norm": 1.239687204360962, |
| "learning_rate": 2.839921547275556e-06, |
| "loss": 0.2688, |
| "step": 937 |
| }, |
| { |
| "epoch": 4.575609756097561, |
| "grad_norm": 1.451963186264038, |
| "learning_rate": 2.836125518952759e-06, |
| "loss": 0.2953, |
| "step": 938 |
| }, |
| { |
| "epoch": 4.580487804878048, |
| "grad_norm": 1.0113798379898071, |
| "learning_rate": 2.8323287012366845e-06, |
| "loss": 0.4247, |
| "step": 939 |
| }, |
| { |
| "epoch": 4.585365853658536, |
| "grad_norm": 1.1036227941513062, |
| "learning_rate": 2.828531103044186e-06, |
| "loss": 0.2388, |
| "step": 940 |
| }, |
| { |
| "epoch": 4.590243902439024, |
| "grad_norm": 1.0381038188934326, |
| "learning_rate": 2.8247327332939512e-06, |
| "loss": 0.3913, |
| "step": 941 |
| }, |
| { |
| "epoch": 4.595121951219512, |
| "grad_norm": 1.0053890943527222, |
| "learning_rate": 2.82093360090648e-06, |
| "loss": 0.4356, |
| "step": 942 |
| }, |
| { |
| "epoch": 4.6, |
| "grad_norm": 1.2142903804779053, |
| "learning_rate": 2.8171337148040636e-06, |
| "loss": 0.4161, |
| "step": 943 |
| }, |
| { |
| "epoch": 4.6048780487804875, |
| "grad_norm": 1.8541486263275146, |
| "learning_rate": 2.813333083910761e-06, |
| "loss": 0.2803, |
| "step": 944 |
| }, |
| { |
| "epoch": 4.609756097560975, |
| "grad_norm": 0.9036626815795898, |
| "learning_rate": 2.8095317171523835e-06, |
| "loss": 0.1578, |
| "step": 945 |
| }, |
| { |
| "epoch": 4.614634146341463, |
| "grad_norm": 1.1875560283660889, |
| "learning_rate": 2.805729623456469e-06, |
| "loss": 0.4107, |
| "step": 946 |
| }, |
| { |
| "epoch": 4.619512195121951, |
| "grad_norm": 1.259789228439331, |
| "learning_rate": 2.8019268117522624e-06, |
| "loss": 0.5262, |
| "step": 947 |
| }, |
| { |
| "epoch": 4.624390243902439, |
| "grad_norm": 1.116606593132019, |
| "learning_rate": 2.798123290970695e-06, |
| "loss": 0.3334, |
| "step": 948 |
| }, |
| { |
| "epoch": 4.6292682926829265, |
| "grad_norm": 1.0191316604614258, |
| "learning_rate": 2.794319070044365e-06, |
| "loss": 0.395, |
| "step": 949 |
| }, |
| { |
| "epoch": 4.634146341463414, |
| "grad_norm": 1.1099485158920288, |
| "learning_rate": 2.790514157907512e-06, |
| "loss": 0.2684, |
| "step": 950 |
| }, |
| { |
| "epoch": 4.639024390243902, |
| "grad_norm": 0.9431113004684448, |
| "learning_rate": 2.786708563496002e-06, |
| "loss": 0.2696, |
| "step": 951 |
| }, |
| { |
| "epoch": 4.64390243902439, |
| "grad_norm": 1.19779372215271, |
| "learning_rate": 2.782902295747299e-06, |
| "loss": 0.5113, |
| "step": 952 |
| }, |
| { |
| "epoch": 4.648780487804878, |
| "grad_norm": 1.1742883920669556, |
| "learning_rate": 2.7790953636004536e-06, |
| "loss": 0.6055, |
| "step": 953 |
| }, |
| { |
| "epoch": 4.6536585365853655, |
| "grad_norm": 1.2542123794555664, |
| "learning_rate": 2.775287775996074e-06, |
| "loss": 0.5556, |
| "step": 954 |
| }, |
| { |
| "epoch": 4.658536585365853, |
| "grad_norm": 1.0845754146575928, |
| "learning_rate": 2.7714795418763067e-06, |
| "loss": 0.2445, |
| "step": 955 |
| }, |
| { |
| "epoch": 4.663414634146341, |
| "grad_norm": 1.3217185735702515, |
| "learning_rate": 2.7676706701848187e-06, |
| "loss": 0.4524, |
| "step": 956 |
| }, |
| { |
| "epoch": 4.668292682926829, |
| "grad_norm": 0.9640854597091675, |
| "learning_rate": 2.763861169866774e-06, |
| "loss": 0.4953, |
| "step": 957 |
| }, |
| { |
| "epoch": 4.673170731707317, |
| "grad_norm": 0.864771842956543, |
| "learning_rate": 2.7600510498688104e-06, |
| "loss": 0.1544, |
| "step": 958 |
| }, |
| { |
| "epoch": 4.678048780487805, |
| "grad_norm": 2.0469486713409424, |
| "learning_rate": 2.7562403191390246e-06, |
| "loss": 0.2998, |
| "step": 959 |
| }, |
| { |
| "epoch": 4.682926829268292, |
| "grad_norm": 1.0981053113937378, |
| "learning_rate": 2.7524289866269467e-06, |
| "loss": 0.3188, |
| "step": 960 |
| }, |
| { |
| "epoch": 4.68780487804878, |
| "grad_norm": 0.959457278251648, |
| "learning_rate": 2.748617061283518e-06, |
| "loss": 0.2297, |
| "step": 961 |
| }, |
| { |
| "epoch": 4.692682926829268, |
| "grad_norm": 1.2737903594970703, |
| "learning_rate": 2.744804552061074e-06, |
| "loss": 0.2563, |
| "step": 962 |
| }, |
| { |
| "epoch": 4.697560975609756, |
| "grad_norm": 1.0287935733795166, |
| "learning_rate": 2.740991467913321e-06, |
| "loss": 0.4164, |
| "step": 963 |
| }, |
| { |
| "epoch": 4.702439024390244, |
| "grad_norm": 1.0602225065231323, |
| "learning_rate": 2.737177817795315e-06, |
| "loss": 0.2201, |
| "step": 964 |
| }, |
| { |
| "epoch": 4.7073170731707314, |
| "grad_norm": 1.0084565877914429, |
| "learning_rate": 2.7333636106634414e-06, |
| "loss": 0.283, |
| "step": 965 |
| }, |
| { |
| "epoch": 4.712195121951219, |
| "grad_norm": 1.2561463117599487, |
| "learning_rate": 2.7295488554753957e-06, |
| "loss": 0.4592, |
| "step": 966 |
| }, |
| { |
| "epoch": 4.717073170731707, |
| "grad_norm": 1.0347182750701904, |
| "learning_rate": 2.725733561190157e-06, |
| "loss": 0.2642, |
| "step": 967 |
| }, |
| { |
| "epoch": 4.721951219512195, |
| "grad_norm": 1.312893033027649, |
| "learning_rate": 2.721917736767973e-06, |
| "loss": 0.335, |
| "step": 968 |
| }, |
| { |
| "epoch": 4.726829268292683, |
| "grad_norm": 1.0608656406402588, |
| "learning_rate": 2.7181013911703357e-06, |
| "loss": 0.2288, |
| "step": 969 |
| }, |
| { |
| "epoch": 4.7317073170731705, |
| "grad_norm": 1.0253140926361084, |
| "learning_rate": 2.714284533359961e-06, |
| "loss": 0.2827, |
| "step": 970 |
| }, |
| { |
| "epoch": 4.736585365853658, |
| "grad_norm": 0.9973790049552917, |
| "learning_rate": 2.710467172300768e-06, |
| "loss": 0.4102, |
| "step": 971 |
| }, |
| { |
| "epoch": 4.741463414634146, |
| "grad_norm": 1.2393323183059692, |
| "learning_rate": 2.706649316957857e-06, |
| "loss": 0.4489, |
| "step": 972 |
| }, |
| { |
| "epoch": 4.746341463414634, |
| "grad_norm": 1.1221544742584229, |
| "learning_rate": 2.7028309762974897e-06, |
| "loss": 0.1774, |
| "step": 973 |
| }, |
| { |
| "epoch": 4.751219512195122, |
| "grad_norm": 1.07940673828125, |
| "learning_rate": 2.699012159287069e-06, |
| "loss": 0.3329, |
| "step": 974 |
| }, |
| { |
| "epoch": 4.7560975609756095, |
| "grad_norm": 1.027277946472168, |
| "learning_rate": 2.6951928748951125e-06, |
| "loss": 0.2044, |
| "step": 975 |
| }, |
| { |
| "epoch": 4.760975609756097, |
| "grad_norm": 1.0281989574432373, |
| "learning_rate": 2.69137313209124e-06, |
| "loss": 0.3622, |
| "step": 976 |
| }, |
| { |
| "epoch": 4.765853658536585, |
| "grad_norm": 1.1298357248306274, |
| "learning_rate": 2.687552939846145e-06, |
| "loss": 0.4628, |
| "step": 977 |
| }, |
| { |
| "epoch": 4.770731707317073, |
| "grad_norm": 1.0114378929138184, |
| "learning_rate": 2.6837323071315766e-06, |
| "loss": 0.1181, |
| "step": 978 |
| }, |
| { |
| "epoch": 4.775609756097561, |
| "grad_norm": 1.000329613685608, |
| "learning_rate": 2.679911242920321e-06, |
| "loss": 0.2879, |
| "step": 979 |
| }, |
| { |
| "epoch": 4.780487804878049, |
| "grad_norm": 1.3905553817749023, |
| "learning_rate": 2.6760897561861742e-06, |
| "loss": 0.6606, |
| "step": 980 |
| }, |
| { |
| "epoch": 4.785365853658536, |
| "grad_norm": 0.9317846894264221, |
| "learning_rate": 2.672267855903927e-06, |
| "loss": 0.1451, |
| "step": 981 |
| }, |
| { |
| "epoch": 4.790243902439024, |
| "grad_norm": 0.9949944615364075, |
| "learning_rate": 2.6684455510493413e-06, |
| "loss": 0.3478, |
| "step": 982 |
| }, |
| { |
| "epoch": 4.795121951219512, |
| "grad_norm": 0.9357514381408691, |
| "learning_rate": 2.6646228505991267e-06, |
| "loss": 0.3766, |
| "step": 983 |
| }, |
| { |
| "epoch": 4.8, |
| "grad_norm": 1.4344565868377686, |
| "learning_rate": 2.6607997635309246e-06, |
| "loss": 0.3122, |
| "step": 984 |
| }, |
| { |
| "epoch": 4.804878048780488, |
| "grad_norm": 1.2585278749465942, |
| "learning_rate": 2.6569762988232838e-06, |
| "loss": 0.356, |
| "step": 985 |
| }, |
| { |
| "epoch": 4.809756097560975, |
| "grad_norm": 1.0323916673660278, |
| "learning_rate": 2.653152465455639e-06, |
| "loss": 0.5199, |
| "step": 986 |
| }, |
| { |
| "epoch": 4.814634146341463, |
| "grad_norm": 1.1831841468811035, |
| "learning_rate": 2.6493282724082913e-06, |
| "loss": 0.5155, |
| "step": 987 |
| }, |
| { |
| "epoch": 4.819512195121951, |
| "grad_norm": 1.177829384803772, |
| "learning_rate": 2.6455037286623864e-06, |
| "loss": 0.263, |
| "step": 988 |
| }, |
| { |
| "epoch": 4.824390243902439, |
| "grad_norm": 0.7737200260162354, |
| "learning_rate": 2.6416788431998935e-06, |
| "loss": 0.2256, |
| "step": 989 |
| }, |
| { |
| "epoch": 4.829268292682927, |
| "grad_norm": 1.0419354438781738, |
| "learning_rate": 2.637853625003585e-06, |
| "loss": 0.4914, |
| "step": 990 |
| }, |
| { |
| "epoch": 4.8341463414634145, |
| "grad_norm": 1.000624418258667, |
| "learning_rate": 2.6340280830570142e-06, |
| "loss": 0.3594, |
| "step": 991 |
| }, |
| { |
| "epoch": 4.839024390243902, |
| "grad_norm": 1.0777456760406494, |
| "learning_rate": 2.6302022263444947e-06, |
| "loss": 0.263, |
| "step": 992 |
| }, |
| { |
| "epoch": 4.84390243902439, |
| "grad_norm": 1.333800196647644, |
| "learning_rate": 2.6263760638510793e-06, |
| "loss": 0.5697, |
| "step": 993 |
| }, |
| { |
| "epoch": 4.848780487804878, |
| "grad_norm": 1.0035558938980103, |
| "learning_rate": 2.6225496045625394e-06, |
| "loss": 0.2984, |
| "step": 994 |
| }, |
| { |
| "epoch": 4.853658536585366, |
| "grad_norm": 1.214937448501587, |
| "learning_rate": 2.6187228574653428e-06, |
| "loss": 0.3698, |
| "step": 995 |
| }, |
| { |
| "epoch": 4.8585365853658535, |
| "grad_norm": 0.9136359095573425, |
| "learning_rate": 2.614895831546633e-06, |
| "loss": 0.2602, |
| "step": 996 |
| }, |
| { |
| "epoch": 4.863414634146341, |
| "grad_norm": 1.2129359245300293, |
| "learning_rate": 2.6110685357942096e-06, |
| "loss": 0.4928, |
| "step": 997 |
| }, |
| { |
| "epoch": 4.868292682926829, |
| "grad_norm": 1.2663754224777222, |
| "learning_rate": 2.6072409791965048e-06, |
| "loss": 0.5422, |
| "step": 998 |
| }, |
| { |
| "epoch": 4.873170731707317, |
| "grad_norm": 1.4935377836227417, |
| "learning_rate": 2.6034131707425638e-06, |
| "loss": 0.7989, |
| "step": 999 |
| }, |
| { |
| "epoch": 4.878048780487805, |
| "grad_norm": 0.999937891960144, |
| "learning_rate": 2.5995851194220223e-06, |
| "loss": 0.3162, |
| "step": 1000 |
| }, |
| { |
| "epoch": 4.882926829268293, |
| "grad_norm": 1.0912432670593262, |
| "learning_rate": 2.595756834225089e-06, |
| "loss": 0.329, |
| "step": 1001 |
| }, |
| { |
| "epoch": 4.88780487804878, |
| "grad_norm": 1.195001244544983, |
| "learning_rate": 2.5919283241425188e-06, |
| "loss": 0.2323, |
| "step": 1002 |
| }, |
| { |
| "epoch": 4.892682926829268, |
| "grad_norm": 0.7830819487571716, |
| "learning_rate": 2.5880995981655965e-06, |
| "loss": 0.2372, |
| "step": 1003 |
| }, |
| { |
| "epoch": 4.897560975609756, |
| "grad_norm": 1.099342703819275, |
| "learning_rate": 2.584270665286113e-06, |
| "loss": 0.1749, |
| "step": 1004 |
| }, |
| { |
| "epoch": 4.902439024390244, |
| "grad_norm": 1.270918846130371, |
| "learning_rate": 2.580441534496346e-06, |
| "loss": 0.3421, |
| "step": 1005 |
| }, |
| { |
| "epoch": 4.907317073170732, |
| "grad_norm": 0.9069335460662842, |
| "learning_rate": 2.576612214789039e-06, |
| "loss": 0.2538, |
| "step": 1006 |
| }, |
| { |
| "epoch": 4.912195121951219, |
| "grad_norm": 1.377794623374939, |
| "learning_rate": 2.5727827151573747e-06, |
| "loss": 0.4034, |
| "step": 1007 |
| }, |
| { |
| "epoch": 4.917073170731707, |
| "grad_norm": 1.1331335306167603, |
| "learning_rate": 2.568953044594964e-06, |
| "loss": 0.2865, |
| "step": 1008 |
| }, |
| { |
| "epoch": 4.921951219512195, |
| "grad_norm": 1.0799779891967773, |
| "learning_rate": 2.5651232120958157e-06, |
| "loss": 0.3365, |
| "step": 1009 |
| }, |
| { |
| "epoch": 4.926829268292683, |
| "grad_norm": 1.1052212715148926, |
| "learning_rate": 2.56129322665432e-06, |
| "loss": 0.2303, |
| "step": 1010 |
| }, |
| { |
| "epoch": 4.931707317073171, |
| "grad_norm": 0.913105845451355, |
| "learning_rate": 2.5574630972652263e-06, |
| "loss": 0.1745, |
| "step": 1011 |
| }, |
| { |
| "epoch": 4.9365853658536585, |
| "grad_norm": 1.0110231637954712, |
| "learning_rate": 2.553632832923622e-06, |
| "loss": 0.3202, |
| "step": 1012 |
| }, |
| { |
| "epoch": 4.941463414634146, |
| "grad_norm": 1.1608706712722778, |
| "learning_rate": 2.5498024426249107e-06, |
| "loss": 0.5757, |
| "step": 1013 |
| }, |
| { |
| "epoch": 4.946341463414634, |
| "grad_norm": 0.9768334627151489, |
| "learning_rate": 2.545971935364794e-06, |
| "loss": 0.1663, |
| "step": 1014 |
| }, |
| { |
| "epoch": 4.951219512195122, |
| "grad_norm": 1.130802035331726, |
| "learning_rate": 2.5421413201392443e-06, |
| "loss": 0.3053, |
| "step": 1015 |
| }, |
| { |
| "epoch": 4.95609756097561, |
| "grad_norm": 1.5004934072494507, |
| "learning_rate": 2.538310605944491e-06, |
| "loss": 0.2943, |
| "step": 1016 |
| }, |
| { |
| "epoch": 4.9609756097560975, |
| "grad_norm": 0.9939762353897095, |
| "learning_rate": 2.534479801776996e-06, |
| "loss": 0.2933, |
| "step": 1017 |
| }, |
| { |
| "epoch": 4.965853658536585, |
| "grad_norm": 0.9881106019020081, |
| "learning_rate": 2.53064891663343e-06, |
| "loss": 0.3813, |
| "step": 1018 |
| }, |
| { |
| "epoch": 4.970731707317073, |
| "grad_norm": 0.9731859564781189, |
| "learning_rate": 2.526817959510655e-06, |
| "loss": 0.3543, |
| "step": 1019 |
| }, |
| { |
| "epoch": 4.975609756097561, |
| "grad_norm": 1.3951330184936523, |
| "learning_rate": 2.5229869394057038e-06, |
| "loss": 0.4254, |
| "step": 1020 |
| }, |
| { |
| "epoch": 4.980487804878049, |
| "grad_norm": 0.9658120274543762, |
| "learning_rate": 2.5191558653157542e-06, |
| "loss": 0.2484, |
| "step": 1021 |
| }, |
| { |
| "epoch": 4.985365853658537, |
| "grad_norm": 0.8997960090637207, |
| "learning_rate": 2.515324746238113e-06, |
| "loss": 0.1693, |
| "step": 1022 |
| }, |
| { |
| "epoch": 4.990243902439024, |
| "grad_norm": 0.9819786548614502, |
| "learning_rate": 2.511493591170191e-06, |
| "loss": 0.3336, |
| "step": 1023 |
| }, |
| { |
| "epoch": 4.995121951219512, |
| "grad_norm": 1.3949546813964844, |
| "learning_rate": 2.5076624091094846e-06, |
| "loss": 0.2995, |
| "step": 1024 |
| }, |
| { |
| "epoch": 5.0, |
| "grad_norm": 0.9916773438453674, |
| "learning_rate": 2.503831209053554e-06, |
| "loss": 0.3243, |
| "step": 1025 |
| }, |
| { |
| "epoch": 5.004878048780488, |
| "grad_norm": 0.9542043805122375, |
| "learning_rate": 2.5e-06, |
| "loss": 0.1981, |
| "step": 1026 |
| }, |
| { |
| "epoch": 5.009756097560976, |
| "grad_norm": 1.2487229108810425, |
| "learning_rate": 2.4961687909464462e-06, |
| "loss": 0.3443, |
| "step": 1027 |
| }, |
| { |
| "epoch": 5.014634146341463, |
| "grad_norm": 1.5230785608291626, |
| "learning_rate": 2.492337590890516e-06, |
| "loss": 0.5778, |
| "step": 1028 |
| }, |
| { |
| "epoch": 5.019512195121951, |
| "grad_norm": 1.013128638267517, |
| "learning_rate": 2.4885064088298097e-06, |
| "loss": 0.4135, |
| "step": 1029 |
| }, |
| { |
| "epoch": 5.024390243902439, |
| "grad_norm": 0.944968044757843, |
| "learning_rate": 2.4846752537618875e-06, |
| "loss": 0.1586, |
| "step": 1030 |
| }, |
| { |
| "epoch": 5.029268292682927, |
| "grad_norm": 1.3642313480377197, |
| "learning_rate": 2.480844134684246e-06, |
| "loss": 0.407, |
| "step": 1031 |
| }, |
| { |
| "epoch": 5.034146341463415, |
| "grad_norm": 1.2477118968963623, |
| "learning_rate": 2.4770130605942966e-06, |
| "loss": 0.2376, |
| "step": 1032 |
| }, |
| { |
| "epoch": 5.0390243902439025, |
| "grad_norm": 1.0908373594284058, |
| "learning_rate": 2.4731820404893457e-06, |
| "loss": 0.2063, |
| "step": 1033 |
| }, |
| { |
| "epoch": 5.04390243902439, |
| "grad_norm": 1.423384666442871, |
| "learning_rate": 2.469351083366571e-06, |
| "loss": 0.255, |
| "step": 1034 |
| }, |
| { |
| "epoch": 5.048780487804878, |
| "grad_norm": 1.0463767051696777, |
| "learning_rate": 2.4655201982230044e-06, |
| "loss": 0.1164, |
| "step": 1035 |
| }, |
| { |
| "epoch": 5.053658536585366, |
| "grad_norm": 1.1025980710983276, |
| "learning_rate": 2.4616893940555094e-06, |
| "loss": 0.1895, |
| "step": 1036 |
| }, |
| { |
| "epoch": 5.058536585365854, |
| "grad_norm": 1.50823175907135, |
| "learning_rate": 2.457858679860757e-06, |
| "loss": 0.6323, |
| "step": 1037 |
| }, |
| { |
| "epoch": 5.0634146341463415, |
| "grad_norm": 1.2428679466247559, |
| "learning_rate": 2.4540280646352072e-06, |
| "loss": 0.303, |
| "step": 1038 |
| }, |
| { |
| "epoch": 5.068292682926829, |
| "grad_norm": 1.2156243324279785, |
| "learning_rate": 2.45019755737509e-06, |
| "loss": 0.2281, |
| "step": 1039 |
| }, |
| { |
| "epoch": 5.073170731707317, |
| "grad_norm": 1.9944231510162354, |
| "learning_rate": 2.4463671670763787e-06, |
| "loss": 0.3072, |
| "step": 1040 |
| }, |
| { |
| "epoch": 5.078048780487805, |
| "grad_norm": 1.210920810699463, |
| "learning_rate": 2.4425369027347746e-06, |
| "loss": 0.3228, |
| "step": 1041 |
| }, |
| { |
| "epoch": 5.082926829268293, |
| "grad_norm": 0.9305552244186401, |
| "learning_rate": 2.4387067733456804e-06, |
| "loss": 0.2808, |
| "step": 1042 |
| }, |
| { |
| "epoch": 5.087804878048781, |
| "grad_norm": 1.200379490852356, |
| "learning_rate": 2.4348767879041847e-06, |
| "loss": 0.2998, |
| "step": 1043 |
| }, |
| { |
| "epoch": 5.092682926829268, |
| "grad_norm": 1.2235733270645142, |
| "learning_rate": 2.4310469554050366e-06, |
| "loss": 0.2274, |
| "step": 1044 |
| }, |
| { |
| "epoch": 5.097560975609756, |
| "grad_norm": 0.9284917116165161, |
| "learning_rate": 2.4272172848426257e-06, |
| "loss": 0.3156, |
| "step": 1045 |
| }, |
| { |
| "epoch": 5.102439024390244, |
| "grad_norm": 1.1812609434127808, |
| "learning_rate": 2.423387785210962e-06, |
| "loss": 0.2529, |
| "step": 1046 |
| }, |
| { |
| "epoch": 5.107317073170732, |
| "grad_norm": 1.2920304536819458, |
| "learning_rate": 2.4195584655036544e-06, |
| "loss": 0.4466, |
| "step": 1047 |
| }, |
| { |
| "epoch": 5.11219512195122, |
| "grad_norm": 1.0265544652938843, |
| "learning_rate": 2.4157293347138877e-06, |
| "loss": 0.1946, |
| "step": 1048 |
| }, |
| { |
| "epoch": 5.117073170731707, |
| "grad_norm": 2.3565609455108643, |
| "learning_rate": 2.4119004018344043e-06, |
| "loss": 0.3177, |
| "step": 1049 |
| }, |
| { |
| "epoch": 5.121951219512195, |
| "grad_norm": 1.1925452947616577, |
| "learning_rate": 2.408071675857482e-06, |
| "loss": 0.2563, |
| "step": 1050 |
| }, |
| { |
| "epoch": 5.126829268292683, |
| "grad_norm": 1.3194458484649658, |
| "learning_rate": 2.404243165774912e-06, |
| "loss": 0.4448, |
| "step": 1051 |
| }, |
| { |
| "epoch": 5.131707317073171, |
| "grad_norm": 0.8365583419799805, |
| "learning_rate": 2.4004148805779785e-06, |
| "loss": 0.1032, |
| "step": 1052 |
| }, |
| { |
| "epoch": 5.136585365853659, |
| "grad_norm": 1.257830262184143, |
| "learning_rate": 2.3965868292574375e-06, |
| "loss": 0.2855, |
| "step": 1053 |
| }, |
| { |
| "epoch": 5.1414634146341465, |
| "grad_norm": 1.3698722124099731, |
| "learning_rate": 2.392759020803496e-06, |
| "loss": 0.2996, |
| "step": 1054 |
| }, |
| { |
| "epoch": 5.146341463414634, |
| "grad_norm": 1.1302275657653809, |
| "learning_rate": 2.3889314642057916e-06, |
| "loss": 0.2131, |
| "step": 1055 |
| }, |
| { |
| "epoch": 5.151219512195122, |
| "grad_norm": 1.1752660274505615, |
| "learning_rate": 2.3851041684533677e-06, |
| "loss": 0.4146, |
| "step": 1056 |
| }, |
| { |
| "epoch": 5.15609756097561, |
| "grad_norm": 0.8562495112419128, |
| "learning_rate": 2.381277142534658e-06, |
| "loss": 0.1059, |
| "step": 1057 |
| }, |
| { |
| "epoch": 5.160975609756098, |
| "grad_norm": 1.279471516609192, |
| "learning_rate": 2.3774503954374614e-06, |
| "loss": 0.1535, |
| "step": 1058 |
| }, |
| { |
| "epoch": 5.1658536585365855, |
| "grad_norm": 1.7923734188079834, |
| "learning_rate": 2.373623936148921e-06, |
| "loss": 0.3611, |
| "step": 1059 |
| }, |
| { |
| "epoch": 5.170731707317073, |
| "grad_norm": 1.5222898721694946, |
| "learning_rate": 2.369797773655506e-06, |
| "loss": 0.3535, |
| "step": 1060 |
| }, |
| { |
| "epoch": 5.175609756097561, |
| "grad_norm": 1.0636627674102783, |
| "learning_rate": 2.3659719169429866e-06, |
| "loss": 0.2679, |
| "step": 1061 |
| }, |
| { |
| "epoch": 5.180487804878049, |
| "grad_norm": 1.0380195379257202, |
| "learning_rate": 2.3621463749964153e-06, |
| "loss": 0.1407, |
| "step": 1062 |
| }, |
| { |
| "epoch": 5.185365853658537, |
| "grad_norm": 0.9535474181175232, |
| "learning_rate": 2.3583211568001073e-06, |
| "loss": 0.1263, |
| "step": 1063 |
| }, |
| { |
| "epoch": 5.190243902439025, |
| "grad_norm": 1.0084075927734375, |
| "learning_rate": 2.3544962713376144e-06, |
| "loss": 0.1556, |
| "step": 1064 |
| }, |
| { |
| "epoch": 5.195121951219512, |
| "grad_norm": 0.9850101470947266, |
| "learning_rate": 2.3506717275917095e-06, |
| "loss": 0.1819, |
| "step": 1065 |
| }, |
| { |
| "epoch": 5.2, |
| "grad_norm": 0.9447112083435059, |
| "learning_rate": 2.346847534544362e-06, |
| "loss": 0.1648, |
| "step": 1066 |
| }, |
| { |
| "epoch": 5.204878048780488, |
| "grad_norm": 1.0958189964294434, |
| "learning_rate": 2.3430237011767166e-06, |
| "loss": 0.2921, |
| "step": 1067 |
| }, |
| { |
| "epoch": 5.209756097560976, |
| "grad_norm": 0.9113654494285583, |
| "learning_rate": 2.3392002364690762e-06, |
| "loss": 0.1123, |
| "step": 1068 |
| }, |
| { |
| "epoch": 5.214634146341464, |
| "grad_norm": 1.5014578104019165, |
| "learning_rate": 2.335377149400874e-06, |
| "loss": 0.2756, |
| "step": 1069 |
| }, |
| { |
| "epoch": 5.219512195121951, |
| "grad_norm": 1.297300100326538, |
| "learning_rate": 2.3315544489506596e-06, |
| "loss": 0.3623, |
| "step": 1070 |
| }, |
| { |
| "epoch": 5.224390243902439, |
| "grad_norm": 1.3639144897460938, |
| "learning_rate": 2.3277321440960733e-06, |
| "loss": 0.2713, |
| "step": 1071 |
| }, |
| { |
| "epoch": 5.229268292682927, |
| "grad_norm": 0.932643473148346, |
| "learning_rate": 2.323910243813826e-06, |
| "loss": 0.1127, |
| "step": 1072 |
| }, |
| { |
| "epoch": 5.234146341463415, |
| "grad_norm": 1.1686640977859497, |
| "learning_rate": 2.3200887570796798e-06, |
| "loss": 0.4201, |
| "step": 1073 |
| }, |
| { |
| "epoch": 5.239024390243903, |
| "grad_norm": 1.193697214126587, |
| "learning_rate": 2.316267692868424e-06, |
| "loss": 0.2283, |
| "step": 1074 |
| }, |
| { |
| "epoch": 5.2439024390243905, |
| "grad_norm": 1.6448438167572021, |
| "learning_rate": 2.312447060153856e-06, |
| "loss": 0.2204, |
| "step": 1075 |
| }, |
| { |
| "epoch": 5.248780487804878, |
| "grad_norm": 1.203925371170044, |
| "learning_rate": 2.308626867908761e-06, |
| "loss": 0.2119, |
| "step": 1076 |
| }, |
| { |
| "epoch": 5.253658536585366, |
| "grad_norm": 1.2128314971923828, |
| "learning_rate": 2.3048071251048884e-06, |
| "loss": 0.3189, |
| "step": 1077 |
| }, |
| { |
| "epoch": 5.258536585365854, |
| "grad_norm": 1.2930793762207031, |
| "learning_rate": 2.300987840712932e-06, |
| "loss": 0.3482, |
| "step": 1078 |
| }, |
| { |
| "epoch": 5.263414634146342, |
| "grad_norm": 1.1673916578292847, |
| "learning_rate": 2.297169023702511e-06, |
| "loss": 0.4532, |
| "step": 1079 |
| }, |
| { |
| "epoch": 5.2682926829268295, |
| "grad_norm": 1.0989543199539185, |
| "learning_rate": 2.2933506830421436e-06, |
| "loss": 0.3365, |
| "step": 1080 |
| }, |
| { |
| "epoch": 5.273170731707317, |
| "grad_norm": 1.0421515703201294, |
| "learning_rate": 2.2895328276992325e-06, |
| "loss": 0.1207, |
| "step": 1081 |
| }, |
| { |
| "epoch": 5.278048780487805, |
| "grad_norm": 1.3659073114395142, |
| "learning_rate": 2.28571546664004e-06, |
| "loss": 0.3638, |
| "step": 1082 |
| }, |
| { |
| "epoch": 5.282926829268293, |
| "grad_norm": 0.975866436958313, |
| "learning_rate": 2.281898608829665e-06, |
| "loss": 0.1151, |
| "step": 1083 |
| }, |
| { |
| "epoch": 5.287804878048781, |
| "grad_norm": 0.9325199127197266, |
| "learning_rate": 2.2780822632320273e-06, |
| "loss": 0.1909, |
| "step": 1084 |
| }, |
| { |
| "epoch": 5.2926829268292686, |
| "grad_norm": 1.0251420736312866, |
| "learning_rate": 2.2742664388098435e-06, |
| "loss": 0.156, |
| "step": 1085 |
| }, |
| { |
| "epoch": 5.297560975609756, |
| "grad_norm": 0.8853452205657959, |
| "learning_rate": 2.270451144524605e-06, |
| "loss": 0.2, |
| "step": 1086 |
| }, |
| { |
| "epoch": 5.302439024390244, |
| "grad_norm": 1.1478848457336426, |
| "learning_rate": 2.266636389336559e-06, |
| "loss": 0.2425, |
| "step": 1087 |
| }, |
| { |
| "epoch": 5.307317073170732, |
| "grad_norm": 0.9325762987136841, |
| "learning_rate": 2.262822182204686e-06, |
| "loss": 0.1064, |
| "step": 1088 |
| }, |
| { |
| "epoch": 5.31219512195122, |
| "grad_norm": 1.0397100448608398, |
| "learning_rate": 2.2590085320866798e-06, |
| "loss": 0.1143, |
| "step": 1089 |
| }, |
| { |
| "epoch": 5.317073170731708, |
| "grad_norm": 0.9676294922828674, |
| "learning_rate": 2.255195447938927e-06, |
| "loss": 0.1028, |
| "step": 1090 |
| }, |
| { |
| "epoch": 5.321951219512195, |
| "grad_norm": 1.4252179861068726, |
| "learning_rate": 2.251382938716482e-06, |
| "loss": 0.2209, |
| "step": 1091 |
| }, |
| { |
| "epoch": 5.326829268292683, |
| "grad_norm": 1.0721783638000488, |
| "learning_rate": 2.2475710133730533e-06, |
| "loss": 0.1538, |
| "step": 1092 |
| }, |
| { |
| "epoch": 5.331707317073171, |
| "grad_norm": 0.9011938571929932, |
| "learning_rate": 2.243759680860975e-06, |
| "loss": 0.1627, |
| "step": 1093 |
| }, |
| { |
| "epoch": 5.336585365853659, |
| "grad_norm": 1.1225794553756714, |
| "learning_rate": 2.2399489501311896e-06, |
| "loss": 0.2376, |
| "step": 1094 |
| }, |
| { |
| "epoch": 5.341463414634147, |
| "grad_norm": 1.106941819190979, |
| "learning_rate": 2.2361388301332265e-06, |
| "loss": 0.4092, |
| "step": 1095 |
| }, |
| { |
| "epoch": 5.3463414634146345, |
| "grad_norm": 0.7896466255187988, |
| "learning_rate": 2.2323293298151817e-06, |
| "loss": 0.1019, |
| "step": 1096 |
| }, |
| { |
| "epoch": 5.351219512195122, |
| "grad_norm": 1.0374608039855957, |
| "learning_rate": 2.2285204581236937e-06, |
| "loss": 0.1808, |
| "step": 1097 |
| }, |
| { |
| "epoch": 5.35609756097561, |
| "grad_norm": 1.07212495803833, |
| "learning_rate": 2.2247122240039268e-06, |
| "loss": 0.1773, |
| "step": 1098 |
| }, |
| { |
| "epoch": 5.360975609756098, |
| "grad_norm": 0.9184301495552063, |
| "learning_rate": 2.2209046363995464e-06, |
| "loss": 0.0608, |
| "step": 1099 |
| }, |
| { |
| "epoch": 5.365853658536586, |
| "grad_norm": 1.1575993299484253, |
| "learning_rate": 2.217097704252701e-06, |
| "loss": 0.289, |
| "step": 1100 |
| }, |
| { |
| "epoch": 5.3707317073170735, |
| "grad_norm": 0.869531512260437, |
| "learning_rate": 2.2132914365039993e-06, |
| "loss": 0.1568, |
| "step": 1101 |
| }, |
| { |
| "epoch": 5.375609756097561, |
| "grad_norm": 1.1020842790603638, |
| "learning_rate": 2.2094858420924882e-06, |
| "loss": 0.0779, |
| "step": 1102 |
| }, |
| { |
| "epoch": 5.380487804878049, |
| "grad_norm": 1.4796274900436401, |
| "learning_rate": 2.205680929955635e-06, |
| "loss": 0.3573, |
| "step": 1103 |
| }, |
| { |
| "epoch": 5.385365853658537, |
| "grad_norm": 1.0098387002944946, |
| "learning_rate": 2.201876709029305e-06, |
| "loss": 0.2391, |
| "step": 1104 |
| }, |
| { |
| "epoch": 5.390243902439025, |
| "grad_norm": 1.0524678230285645, |
| "learning_rate": 2.198073188247738e-06, |
| "loss": 0.1946, |
| "step": 1105 |
| }, |
| { |
| "epoch": 5.3951219512195125, |
| "grad_norm": 0.8759175539016724, |
| "learning_rate": 2.1942703765435317e-06, |
| "loss": 0.0508, |
| "step": 1106 |
| }, |
| { |
| "epoch": 5.4, |
| "grad_norm": 1.378225564956665, |
| "learning_rate": 2.190468282847617e-06, |
| "loss": 0.4062, |
| "step": 1107 |
| }, |
| { |
| "epoch": 5.404878048780488, |
| "grad_norm": 1.21510910987854, |
| "learning_rate": 2.186666916089239e-06, |
| "loss": 0.539, |
| "step": 1108 |
| }, |
| { |
| "epoch": 5.409756097560976, |
| "grad_norm": 1.0114099979400635, |
| "learning_rate": 2.1828662851959377e-06, |
| "loss": 0.1577, |
| "step": 1109 |
| }, |
| { |
| "epoch": 5.414634146341464, |
| "grad_norm": 1.0858491659164429, |
| "learning_rate": 2.1790663990935203e-06, |
| "loss": 0.2497, |
| "step": 1110 |
| }, |
| { |
| "epoch": 5.419512195121952, |
| "grad_norm": 1.2288187742233276, |
| "learning_rate": 2.1752672667060488e-06, |
| "loss": 0.1893, |
| "step": 1111 |
| }, |
| { |
| "epoch": 5.424390243902439, |
| "grad_norm": 1.244839072227478, |
| "learning_rate": 2.1714688969558146e-06, |
| "loss": 0.1314, |
| "step": 1112 |
| }, |
| { |
| "epoch": 5.429268292682927, |
| "grad_norm": 1.169716715812683, |
| "learning_rate": 2.167671298763316e-06, |
| "loss": 0.4354, |
| "step": 1113 |
| }, |
| { |
| "epoch": 5.434146341463415, |
| "grad_norm": 1.0661805868148804, |
| "learning_rate": 2.1638744810472414e-06, |
| "loss": 0.3079, |
| "step": 1114 |
| }, |
| { |
| "epoch": 5.439024390243903, |
| "grad_norm": 0.9970805644989014, |
| "learning_rate": 2.1600784527244445e-06, |
| "loss": 0.1573, |
| "step": 1115 |
| }, |
| { |
| "epoch": 5.443902439024391, |
| "grad_norm": 1.458986759185791, |
| "learning_rate": 2.1562832227099266e-06, |
| "loss": 0.5079, |
| "step": 1116 |
| }, |
| { |
| "epoch": 5.4487804878048784, |
| "grad_norm": 1.2743233442306519, |
| "learning_rate": 2.152488799916814e-06, |
| "loss": 0.4, |
| "step": 1117 |
| }, |
| { |
| "epoch": 5.453658536585366, |
| "grad_norm": 1.822080135345459, |
| "learning_rate": 2.148695193256336e-06, |
| "loss": 0.5168, |
| "step": 1118 |
| }, |
| { |
| "epoch": 5.458536585365854, |
| "grad_norm": 0.9681462645530701, |
| "learning_rate": 2.1449024116378064e-06, |
| "loss": 0.2588, |
| "step": 1119 |
| }, |
| { |
| "epoch": 5.463414634146342, |
| "grad_norm": 1.0483061075210571, |
| "learning_rate": 2.1411104639686013e-06, |
| "loss": 0.1395, |
| "step": 1120 |
| }, |
| { |
| "epoch": 5.46829268292683, |
| "grad_norm": 1.3658936023712158, |
| "learning_rate": 2.137319359154138e-06, |
| "loss": 0.2823, |
| "step": 1121 |
| }, |
| { |
| "epoch": 5.473170731707317, |
| "grad_norm": 0.9887218475341797, |
| "learning_rate": 2.133529106097853e-06, |
| "loss": 0.1195, |
| "step": 1122 |
| }, |
| { |
| "epoch": 5.478048780487805, |
| "grad_norm": 1.1662182807922363, |
| "learning_rate": 2.1297397137011862e-06, |
| "loss": 0.2621, |
| "step": 1123 |
| }, |
| { |
| "epoch": 5.482926829268292, |
| "grad_norm": 1.2363375425338745, |
| "learning_rate": 2.125951190863551e-06, |
| "loss": 0.2179, |
| "step": 1124 |
| }, |
| { |
| "epoch": 5.487804878048781, |
| "grad_norm": 0.7923662662506104, |
| "learning_rate": 2.1221635464823237e-06, |
| "loss": 0.1064, |
| "step": 1125 |
| }, |
| { |
| "epoch": 5.492682926829268, |
| "grad_norm": 1.751186728477478, |
| "learning_rate": 2.1183767894528135e-06, |
| "loss": 0.5021, |
| "step": 1126 |
| }, |
| { |
| "epoch": 5.4975609756097565, |
| "grad_norm": 0.9320492148399353, |
| "learning_rate": 2.114590928668249e-06, |
| "loss": 0.1303, |
| "step": 1127 |
| }, |
| { |
| "epoch": 5.5024390243902435, |
| "grad_norm": 0.9334157109260559, |
| "learning_rate": 2.1108059730197517e-06, |
| "loss": 0.1716, |
| "step": 1128 |
| }, |
| { |
| "epoch": 5.507317073170732, |
| "grad_norm": 1.0579321384429932, |
| "learning_rate": 2.1070219313963173e-06, |
| "loss": 0.1431, |
| "step": 1129 |
| }, |
| { |
| "epoch": 5.512195121951219, |
| "grad_norm": 1.2380542755126953, |
| "learning_rate": 2.1032388126847967e-06, |
| "loss": 0.1667, |
| "step": 1130 |
| }, |
| { |
| "epoch": 5.517073170731708, |
| "grad_norm": 0.8622035384178162, |
| "learning_rate": 2.099456625769872e-06, |
| "loss": 0.0722, |
| "step": 1131 |
| }, |
| { |
| "epoch": 5.521951219512195, |
| "grad_norm": 1.125844955444336, |
| "learning_rate": 2.0956753795340376e-06, |
| "loss": 0.1991, |
| "step": 1132 |
| }, |
| { |
| "epoch": 5.526829268292683, |
| "grad_norm": 1.1052132844924927, |
| "learning_rate": 2.091895082857578e-06, |
| "loss": 0.357, |
| "step": 1133 |
| }, |
| { |
| "epoch": 5.53170731707317, |
| "grad_norm": 1.0329958200454712, |
| "learning_rate": 2.0881157446185474e-06, |
| "loss": 0.172, |
| "step": 1134 |
| }, |
| { |
| "epoch": 5.536585365853659, |
| "grad_norm": 1.0203710794448853, |
| "learning_rate": 2.0843373736927506e-06, |
| "loss": 0.1389, |
| "step": 1135 |
| }, |
| { |
| "epoch": 5.541463414634146, |
| "grad_norm": 0.7079288959503174, |
| "learning_rate": 2.08055997895372e-06, |
| "loss": 0.0853, |
| "step": 1136 |
| }, |
| { |
| "epoch": 5.546341463414635, |
| "grad_norm": 1.1098510026931763, |
| "learning_rate": 2.0767835692726944e-06, |
| "loss": 0.1492, |
| "step": 1137 |
| }, |
| { |
| "epoch": 5.5512195121951216, |
| "grad_norm": 1.1874253749847412, |
| "learning_rate": 2.0730081535186e-06, |
| "loss": 0.457, |
| "step": 1138 |
| }, |
| { |
| "epoch": 5.55609756097561, |
| "grad_norm": 1.0629135370254517, |
| "learning_rate": 2.06923374055803e-06, |
| "loss": 0.2759, |
| "step": 1139 |
| }, |
| { |
| "epoch": 5.560975609756097, |
| "grad_norm": 0.9080162048339844, |
| "learning_rate": 2.0654603392552193e-06, |
| "loss": 0.0708, |
| "step": 1140 |
| }, |
| { |
| "epoch": 5.565853658536585, |
| "grad_norm": 1.2526748180389404, |
| "learning_rate": 2.0616879584720305e-06, |
| "loss": 0.2727, |
| "step": 1141 |
| }, |
| { |
| "epoch": 5.570731707317073, |
| "grad_norm": 0.9930171966552734, |
| "learning_rate": 2.057916607067928e-06, |
| "loss": 0.1473, |
| "step": 1142 |
| }, |
| { |
| "epoch": 5.575609756097561, |
| "grad_norm": 0.9729718565940857, |
| "learning_rate": 2.054146293899957e-06, |
| "loss": 0.1325, |
| "step": 1143 |
| }, |
| { |
| "epoch": 5.580487804878048, |
| "grad_norm": 0.9204704761505127, |
| "learning_rate": 2.0503770278227274e-06, |
| "loss": 0.1246, |
| "step": 1144 |
| }, |
| { |
| "epoch": 5.585365853658536, |
| "grad_norm": 1.0536152124404907, |
| "learning_rate": 2.0466088176883876e-06, |
| "loss": 0.1, |
| "step": 1145 |
| }, |
| { |
| "epoch": 5.590243902439024, |
| "grad_norm": 1.211815595626831, |
| "learning_rate": 2.042841672346608e-06, |
| "loss": 0.2188, |
| "step": 1146 |
| }, |
| { |
| "epoch": 5.595121951219512, |
| "grad_norm": 1.4135674238204956, |
| "learning_rate": 2.039075600644557e-06, |
| "loss": 0.1599, |
| "step": 1147 |
| }, |
| { |
| "epoch": 5.6, |
| "grad_norm": 1.0430986881256104, |
| "learning_rate": 2.0353106114268824e-06, |
| "loss": 0.1691, |
| "step": 1148 |
| }, |
| { |
| "epoch": 5.6048780487804875, |
| "grad_norm": 0.8738663196563721, |
| "learning_rate": 2.031546713535688e-06, |
| "loss": 0.1175, |
| "step": 1149 |
| }, |
| { |
| "epoch": 5.609756097560975, |
| "grad_norm": 1.015977144241333, |
| "learning_rate": 2.027783915810518e-06, |
| "loss": 0.1638, |
| "step": 1150 |
| }, |
| { |
| "epoch": 5.614634146341463, |
| "grad_norm": 1.184048056602478, |
| "learning_rate": 2.024022227088329e-06, |
| "loss": 0.4066, |
| "step": 1151 |
| }, |
| { |
| "epoch": 5.619512195121951, |
| "grad_norm": 1.1206215620040894, |
| "learning_rate": 2.020261656203476e-06, |
| "loss": 0.4061, |
| "step": 1152 |
| }, |
| { |
| "epoch": 5.624390243902439, |
| "grad_norm": 1.150066614151001, |
| "learning_rate": 2.016502211987687e-06, |
| "loss": 0.2951, |
| "step": 1153 |
| }, |
| { |
| "epoch": 5.6292682926829265, |
| "grad_norm": 1.465074896812439, |
| "learning_rate": 2.0127439032700446e-06, |
| "loss": 0.2386, |
| "step": 1154 |
| }, |
| { |
| "epoch": 5.634146341463414, |
| "grad_norm": 1.1083053350448608, |
| "learning_rate": 2.0089867388769664e-06, |
| "loss": 0.2346, |
| "step": 1155 |
| }, |
| { |
| "epoch": 5.639024390243902, |
| "grad_norm": 0.8865154981613159, |
| "learning_rate": 2.0052307276321793e-06, |
| "loss": 0.1226, |
| "step": 1156 |
| }, |
| { |
| "epoch": 5.64390243902439, |
| "grad_norm": 1.0869574546813965, |
| "learning_rate": 2.001475878356703e-06, |
| "loss": 0.2506, |
| "step": 1157 |
| }, |
| { |
| "epoch": 5.648780487804878, |
| "grad_norm": 1.066562533378601, |
| "learning_rate": 1.99772219986883e-06, |
| "loss": 0.3113, |
| "step": 1158 |
| }, |
| { |
| "epoch": 5.6536585365853655, |
| "grad_norm": 1.489364504814148, |
| "learning_rate": 1.9939697009841024e-06, |
| "loss": 0.1878, |
| "step": 1159 |
| }, |
| { |
| "epoch": 5.658536585365853, |
| "grad_norm": 1.1426478624343872, |
| "learning_rate": 1.990218390515291e-06, |
| "loss": 0.2903, |
| "step": 1160 |
| }, |
| { |
| "epoch": 5.663414634146341, |
| "grad_norm": 1.1296617984771729, |
| "learning_rate": 1.9864682772723757e-06, |
| "loss": 0.2505, |
| "step": 1161 |
| }, |
| { |
| "epoch": 5.668292682926829, |
| "grad_norm": 0.7900092005729675, |
| "learning_rate": 1.9827193700625274e-06, |
| "loss": 0.1204, |
| "step": 1162 |
| }, |
| { |
| "epoch": 5.673170731707317, |
| "grad_norm": 1.2168926000595093, |
| "learning_rate": 1.978971677690081e-06, |
| "loss": 0.4264, |
| "step": 1163 |
| }, |
| { |
| "epoch": 5.678048780487805, |
| "grad_norm": 1.0507045984268188, |
| "learning_rate": 1.97522520895652e-06, |
| "loss": 0.0916, |
| "step": 1164 |
| }, |
| { |
| "epoch": 5.682926829268292, |
| "grad_norm": 1.5506707429885864, |
| "learning_rate": 1.971479972660454e-06, |
| "loss": 0.2858, |
| "step": 1165 |
| }, |
| { |
| "epoch": 5.68780487804878, |
| "grad_norm": 0.9136821031570435, |
| "learning_rate": 1.967735977597598e-06, |
| "loss": 0.0917, |
| "step": 1166 |
| }, |
| { |
| "epoch": 5.692682926829268, |
| "grad_norm": 0.7896243929862976, |
| "learning_rate": 1.9639932325607538e-06, |
| "loss": 0.1089, |
| "step": 1167 |
| }, |
| { |
| "epoch": 5.697560975609756, |
| "grad_norm": 1.396902084350586, |
| "learning_rate": 1.9602517463397845e-06, |
| "loss": 0.1683, |
| "step": 1168 |
| }, |
| { |
| "epoch": 5.702439024390244, |
| "grad_norm": 1.012985348701477, |
| "learning_rate": 1.9565115277215978e-06, |
| "loss": 0.2019, |
| "step": 1169 |
| }, |
| { |
| "epoch": 5.7073170731707314, |
| "grad_norm": 0.9816607236862183, |
| "learning_rate": 1.952772585490127e-06, |
| "loss": 0.163, |
| "step": 1170 |
| }, |
| { |
| "epoch": 5.712195121951219, |
| "grad_norm": 0.9821074604988098, |
| "learning_rate": 1.9490349284263036e-06, |
| "loss": 0.0972, |
| "step": 1171 |
| }, |
| { |
| "epoch": 5.717073170731707, |
| "grad_norm": 0.9475919604301453, |
| "learning_rate": 1.9452985653080443e-06, |
| "loss": 0.1485, |
| "step": 1172 |
| }, |
| { |
| "epoch": 5.721951219512195, |
| "grad_norm": 0.921571671962738, |
| "learning_rate": 1.9415635049102245e-06, |
| "loss": 0.1658, |
| "step": 1173 |
| }, |
| { |
| "epoch": 5.726829268292683, |
| "grad_norm": 1.297415852546692, |
| "learning_rate": 1.937829756004662e-06, |
| "loss": 0.4056, |
| "step": 1174 |
| }, |
| { |
| "epoch": 5.7317073170731705, |
| "grad_norm": 1.2068060636520386, |
| "learning_rate": 1.9340973273600944e-06, |
| "loss": 0.228, |
| "step": 1175 |
| }, |
| { |
| "epoch": 5.736585365853658, |
| "grad_norm": 1.0924711227416992, |
| "learning_rate": 1.930366227742157e-06, |
| "loss": 0.3379, |
| "step": 1176 |
| }, |
| { |
| "epoch": 5.741463414634146, |
| "grad_norm": 1.2374088764190674, |
| "learning_rate": 1.9266364659133653e-06, |
| "loss": 0.2372, |
| "step": 1177 |
| }, |
| { |
| "epoch": 5.746341463414634, |
| "grad_norm": 0.913871705532074, |
| "learning_rate": 1.922908050633093e-06, |
| "loss": 0.113, |
| "step": 1178 |
| }, |
| { |
| "epoch": 5.751219512195122, |
| "grad_norm": 1.136725902557373, |
| "learning_rate": 1.919180990657551e-06, |
| "loss": 0.2227, |
| "step": 1179 |
| }, |
| { |
| "epoch": 5.7560975609756095, |
| "grad_norm": 0.9916288256645203, |
| "learning_rate": 1.9154552947397668e-06, |
| "loss": 0.1983, |
| "step": 1180 |
| }, |
| { |
| "epoch": 5.760975609756097, |
| "grad_norm": 1.3981820344924927, |
| "learning_rate": 1.9117309716295658e-06, |
| "loss": 0.3343, |
| "step": 1181 |
| }, |
| { |
| "epoch": 5.765853658536585, |
| "grad_norm": 1.008737325668335, |
| "learning_rate": 1.9080080300735478e-06, |
| "loss": 0.1742, |
| "step": 1182 |
| }, |
| { |
| "epoch": 5.770731707317073, |
| "grad_norm": 1.3809062242507935, |
| "learning_rate": 1.9042864788150695e-06, |
| "loss": 0.3167, |
| "step": 1183 |
| }, |
| { |
| "epoch": 5.775609756097561, |
| "grad_norm": 0.7168273329734802, |
| "learning_rate": 1.9005663265942206e-06, |
| "loss": 0.0745, |
| "step": 1184 |
| }, |
| { |
| "epoch": 5.780487804878049, |
| "grad_norm": 0.9125983715057373, |
| "learning_rate": 1.8968475821478066e-06, |
| "loss": 0.1352, |
| "step": 1185 |
| }, |
| { |
| "epoch": 5.785365853658536, |
| "grad_norm": 1.2017624378204346, |
| "learning_rate": 1.8931302542093274e-06, |
| "loss": 0.2144, |
| "step": 1186 |
| }, |
| { |
| "epoch": 5.790243902439024, |
| "grad_norm": 0.902391254901886, |
| "learning_rate": 1.8894143515089539e-06, |
| "loss": 0.1071, |
| "step": 1187 |
| }, |
| { |
| "epoch": 5.795121951219512, |
| "grad_norm": 0.9119297862052917, |
| "learning_rate": 1.8856998827735118e-06, |
| "loss": 0.141, |
| "step": 1188 |
| }, |
| { |
| "epoch": 5.8, |
| "grad_norm": 1.308137059211731, |
| "learning_rate": 1.8819868567264588e-06, |
| "loss": 0.4523, |
| "step": 1189 |
| }, |
| { |
| "epoch": 5.804878048780488, |
| "grad_norm": 1.2294268608093262, |
| "learning_rate": 1.8782752820878636e-06, |
| "loss": 0.1783, |
| "step": 1190 |
| }, |
| { |
| "epoch": 5.809756097560975, |
| "grad_norm": 1.2038543224334717, |
| "learning_rate": 1.8745651675743876e-06, |
| "loss": 0.3132, |
| "step": 1191 |
| }, |
| { |
| "epoch": 5.814634146341463, |
| "grad_norm": 1.4764758348464966, |
| "learning_rate": 1.870856521899261e-06, |
| "loss": 0.2718, |
| "step": 1192 |
| }, |
| { |
| "epoch": 5.819512195121951, |
| "grad_norm": 0.9900771975517273, |
| "learning_rate": 1.867149353772267e-06, |
| "loss": 0.0695, |
| "step": 1193 |
| }, |
| { |
| "epoch": 5.824390243902439, |
| "grad_norm": 1.202021837234497, |
| "learning_rate": 1.863443671899717e-06, |
| "loss": 0.1107, |
| "step": 1194 |
| }, |
| { |
| "epoch": 5.829268292682927, |
| "grad_norm": 1.2088758945465088, |
| "learning_rate": 1.8597394849844319e-06, |
| "loss": 0.2713, |
| "step": 1195 |
| }, |
| { |
| "epoch": 5.8341463414634145, |
| "grad_norm": 0.9177408814430237, |
| "learning_rate": 1.8560368017257229e-06, |
| "loss": 0.1473, |
| "step": 1196 |
| }, |
| { |
| "epoch": 5.839024390243902, |
| "grad_norm": 1.2501375675201416, |
| "learning_rate": 1.8523356308193696e-06, |
| "loss": 0.4487, |
| "step": 1197 |
| }, |
| { |
| "epoch": 5.84390243902439, |
| "grad_norm": 1.118666410446167, |
| "learning_rate": 1.8486359809575977e-06, |
| "loss": 0.2547, |
| "step": 1198 |
| }, |
| { |
| "epoch": 5.848780487804878, |
| "grad_norm": 1.0372276306152344, |
| "learning_rate": 1.8449378608290638e-06, |
| "loss": 0.176, |
| "step": 1199 |
| }, |
| { |
| "epoch": 5.853658536585366, |
| "grad_norm": 1.4009885787963867, |
| "learning_rate": 1.8412412791188306e-06, |
| "loss": 0.2859, |
| "step": 1200 |
| }, |
| { |
| "epoch": 5.8585365853658535, |
| "grad_norm": 1.327064037322998, |
| "learning_rate": 1.8375462445083464e-06, |
| "loss": 0.282, |
| "step": 1201 |
| }, |
| { |
| "epoch": 5.863414634146341, |
| "grad_norm": 1.2236543893814087, |
| "learning_rate": 1.8338527656754285e-06, |
| "loss": 0.138, |
| "step": 1202 |
| }, |
| { |
| "epoch": 5.868292682926829, |
| "grad_norm": 1.3589495420455933, |
| "learning_rate": 1.830160851294239e-06, |
| "loss": 0.2383, |
| "step": 1203 |
| }, |
| { |
| "epoch": 5.873170731707317, |
| "grad_norm": 1.2593518495559692, |
| "learning_rate": 1.8264705100352662e-06, |
| "loss": 0.3166, |
| "step": 1204 |
| }, |
| { |
| "epoch": 5.878048780487805, |
| "grad_norm": 1.3545321226119995, |
| "learning_rate": 1.8227817505653045e-06, |
| "loss": 0.2123, |
| "step": 1205 |
| }, |
| { |
| "epoch": 5.882926829268293, |
| "grad_norm": 1.2606163024902344, |
| "learning_rate": 1.8190945815474323e-06, |
| "loss": 0.3234, |
| "step": 1206 |
| }, |
| { |
| "epoch": 5.88780487804878, |
| "grad_norm": 1.1344720125198364, |
| "learning_rate": 1.8154090116409934e-06, |
| "loss": 0.2473, |
| "step": 1207 |
| }, |
| { |
| "epoch": 5.892682926829268, |
| "grad_norm": 1.3704792261123657, |
| "learning_rate": 1.811725049501577e-06, |
| "loss": 0.2684, |
| "step": 1208 |
| }, |
| { |
| "epoch": 5.897560975609756, |
| "grad_norm": 1.3613022565841675, |
| "learning_rate": 1.8080427037809941e-06, |
| "loss": 0.4087, |
| "step": 1209 |
| }, |
| { |
| "epoch": 5.902439024390244, |
| "grad_norm": 1.125900387763977, |
| "learning_rate": 1.8043619831272623e-06, |
| "loss": 0.2208, |
| "step": 1210 |
| }, |
| { |
| "epoch": 5.907317073170732, |
| "grad_norm": 1.4554516077041626, |
| "learning_rate": 1.8006828961845807e-06, |
| "loss": 0.4104, |
| "step": 1211 |
| }, |
| { |
| "epoch": 5.912195121951219, |
| "grad_norm": 1.507084846496582, |
| "learning_rate": 1.7970054515933124e-06, |
| "loss": 0.4952, |
| "step": 1212 |
| }, |
| { |
| "epoch": 5.917073170731707, |
| "grad_norm": 1.6275372505187988, |
| "learning_rate": 1.793329657989964e-06, |
| "loss": 0.5594, |
| "step": 1213 |
| }, |
| { |
| "epoch": 5.921951219512195, |
| "grad_norm": 1.0565378665924072, |
| "learning_rate": 1.7896555240071627e-06, |
| "loss": 0.1108, |
| "step": 1214 |
| }, |
| { |
| "epoch": 5.926829268292683, |
| "grad_norm": 0.8863050937652588, |
| "learning_rate": 1.7859830582736406e-06, |
| "loss": 0.1669, |
| "step": 1215 |
| }, |
| { |
| "epoch": 5.931707317073171, |
| "grad_norm": 0.9882891774177551, |
| "learning_rate": 1.782312269414211e-06, |
| "loss": 0.2115, |
| "step": 1216 |
| }, |
| { |
| "epoch": 5.9365853658536585, |
| "grad_norm": 1.5111620426177979, |
| "learning_rate": 1.7786431660497474e-06, |
| "loss": 0.5776, |
| "step": 1217 |
| }, |
| { |
| "epoch": 5.941463414634146, |
| "grad_norm": 1.2369654178619385, |
| "learning_rate": 1.7749757567971678e-06, |
| "loss": 0.2491, |
| "step": 1218 |
| }, |
| { |
| "epoch": 5.946341463414634, |
| "grad_norm": 1.2180466651916504, |
| "learning_rate": 1.7713100502694091e-06, |
| "loss": 0.2229, |
| "step": 1219 |
| }, |
| { |
| "epoch": 5.951219512195122, |
| "grad_norm": 0.8823730945587158, |
| "learning_rate": 1.7676460550754104e-06, |
| "loss": 0.0664, |
| "step": 1220 |
| }, |
| { |
| "epoch": 5.95609756097561, |
| "grad_norm": 1.2817288637161255, |
| "learning_rate": 1.7639837798200923e-06, |
| "loss": 0.293, |
| "step": 1221 |
| }, |
| { |
| "epoch": 5.9609756097560975, |
| "grad_norm": 0.871067464351654, |
| "learning_rate": 1.7603232331043346e-06, |
| "loss": 0.1633, |
| "step": 1222 |
| }, |
| { |
| "epoch": 5.965853658536585, |
| "grad_norm": 1.4113450050354004, |
| "learning_rate": 1.7566644235249591e-06, |
| "loss": 0.6671, |
| "step": 1223 |
| }, |
| { |
| "epoch": 5.970731707317073, |
| "grad_norm": 1.4699230194091797, |
| "learning_rate": 1.7530073596747072e-06, |
| "loss": 0.1407, |
| "step": 1224 |
| }, |
| { |
| "epoch": 5.975609756097561, |
| "grad_norm": 0.8759057521820068, |
| "learning_rate": 1.74935205014222e-06, |
| "loss": 0.0762, |
| "step": 1225 |
| }, |
| { |
| "epoch": 5.980487804878049, |
| "grad_norm": 0.9844141006469727, |
| "learning_rate": 1.7456985035120194e-06, |
| "loss": 0.1108, |
| "step": 1226 |
| }, |
| { |
| "epoch": 5.985365853658537, |
| "grad_norm": 0.9976269006729126, |
| "learning_rate": 1.7420467283644877e-06, |
| "loss": 0.2162, |
| "step": 1227 |
| }, |
| { |
| "epoch": 5.990243902439024, |
| "grad_norm": 1.125545620918274, |
| "learning_rate": 1.738396733275844e-06, |
| "loss": 0.1998, |
| "step": 1228 |
| }, |
| { |
| "epoch": 5.995121951219512, |
| "grad_norm": 2.0705325603485107, |
| "learning_rate": 1.7347485268181309e-06, |
| "loss": 0.4108, |
| "step": 1229 |
| }, |
| { |
| "epoch": 6.0, |
| "grad_norm": 0.8654595017433167, |
| "learning_rate": 1.7311021175591868e-06, |
| "loss": 0.1534, |
| "step": 1230 |
| }, |
| { |
| "epoch": 6.004878048780488, |
| "grad_norm": 1.0550576448440552, |
| "learning_rate": 1.7274575140626318e-06, |
| "loss": 0.1718, |
| "step": 1231 |
| }, |
| { |
| "epoch": 6.009756097560976, |
| "grad_norm": 1.057306170463562, |
| "learning_rate": 1.7238147248878444e-06, |
| "loss": 0.1425, |
| "step": 1232 |
| }, |
| { |
| "epoch": 6.014634146341463, |
| "grad_norm": 0.925433337688446, |
| "learning_rate": 1.7201737585899415e-06, |
| "loss": 0.1146, |
| "step": 1233 |
| }, |
| { |
| "epoch": 6.019512195121951, |
| "grad_norm": 0.8379129767417908, |
| "learning_rate": 1.7165346237197594e-06, |
| "loss": 0.1446, |
| "step": 1234 |
| }, |
| { |
| "epoch": 6.024390243902439, |
| "grad_norm": 1.3918683528900146, |
| "learning_rate": 1.7128973288238344e-06, |
| "loss": 0.304, |
| "step": 1235 |
| }, |
| { |
| "epoch": 6.029268292682927, |
| "grad_norm": 0.9713860154151917, |
| "learning_rate": 1.709261882444379e-06, |
| "loss": 0.1576, |
| "step": 1236 |
| }, |
| { |
| "epoch": 6.034146341463415, |
| "grad_norm": 1.0337470769882202, |
| "learning_rate": 1.705628293119268e-06, |
| "loss": 0.1528, |
| "step": 1237 |
| }, |
| { |
| "epoch": 6.0390243902439025, |
| "grad_norm": 2.0387566089630127, |
| "learning_rate": 1.701996569382011e-06, |
| "loss": 0.5301, |
| "step": 1238 |
| }, |
| { |
| "epoch": 6.04390243902439, |
| "grad_norm": 1.2020694017410278, |
| "learning_rate": 1.6983667197617386e-06, |
| "loss": 0.2001, |
| "step": 1239 |
| }, |
| { |
| "epoch": 6.048780487804878, |
| "grad_norm": 1.1894810199737549, |
| "learning_rate": 1.6947387527831813e-06, |
| "loss": 0.068, |
| "step": 1240 |
| }, |
| { |
| "epoch": 6.053658536585366, |
| "grad_norm": 1.3444442749023438, |
| "learning_rate": 1.6911126769666442e-06, |
| "loss": 0.0774, |
| "step": 1241 |
| }, |
| { |
| "epoch": 6.058536585365854, |
| "grad_norm": 2.132267713546753, |
| "learning_rate": 1.6874885008279945e-06, |
| "loss": 0.3761, |
| "step": 1242 |
| }, |
| { |
| "epoch": 6.0634146341463415, |
| "grad_norm": 1.3125851154327393, |
| "learning_rate": 1.683866232878637e-06, |
| "loss": 0.0854, |
| "step": 1243 |
| }, |
| { |
| "epoch": 6.068292682926829, |
| "grad_norm": 1.0103996992111206, |
| "learning_rate": 1.6802458816254941e-06, |
| "loss": 0.0924, |
| "step": 1244 |
| }, |
| { |
| "epoch": 6.073170731707317, |
| "grad_norm": 0.8832839727401733, |
| "learning_rate": 1.676627455570988e-06, |
| "loss": 0.0909, |
| "step": 1245 |
| }, |
| { |
| "epoch": 6.078048780487805, |
| "grad_norm": 1.291757583618164, |
| "learning_rate": 1.6730109632130199e-06, |
| "loss": 0.2173, |
| "step": 1246 |
| }, |
| { |
| "epoch": 6.082926829268293, |
| "grad_norm": 0.8954195976257324, |
| "learning_rate": 1.6693964130449472e-06, |
| "loss": 0.0573, |
| "step": 1247 |
| }, |
| { |
| "epoch": 6.087804878048781, |
| "grad_norm": 0.8903191089630127, |
| "learning_rate": 1.6657838135555696e-06, |
| "loss": 0.0731, |
| "step": 1248 |
| }, |
| { |
| "epoch": 6.092682926829268, |
| "grad_norm": 1.7279207706451416, |
| "learning_rate": 1.6621731732291024e-06, |
| "loss": 0.0727, |
| "step": 1249 |
| }, |
| { |
| "epoch": 6.097560975609756, |
| "grad_norm": 1.3296290636062622, |
| "learning_rate": 1.6585645005451623e-06, |
| "loss": 0.1539, |
| "step": 1250 |
| }, |
| { |
| "epoch": 6.102439024390244, |
| "grad_norm": 0.9061964750289917, |
| "learning_rate": 1.6549578039787436e-06, |
| "loss": 0.1489, |
| "step": 1251 |
| }, |
| { |
| "epoch": 6.107317073170732, |
| "grad_norm": 0.7422329783439636, |
| "learning_rate": 1.6513530920001998e-06, |
| "loss": 0.0494, |
| "step": 1252 |
| }, |
| { |
| "epoch": 6.11219512195122, |
| "grad_norm": 0.8662224411964417, |
| "learning_rate": 1.6477503730752237e-06, |
| "loss": 0.1079, |
| "step": 1253 |
| }, |
| { |
| "epoch": 6.117073170731707, |
| "grad_norm": 2.585952043533325, |
| "learning_rate": 1.6441496556648278e-06, |
| "loss": 0.2317, |
| "step": 1254 |
| }, |
| { |
| "epoch": 6.121951219512195, |
| "grad_norm": 1.4481921195983887, |
| "learning_rate": 1.6405509482253234e-06, |
| "loss": 0.5054, |
| "step": 1255 |
| }, |
| { |
| "epoch": 6.126829268292683, |
| "grad_norm": 0.7095943093299866, |
| "learning_rate": 1.636954259208302e-06, |
| "loss": 0.0931, |
| "step": 1256 |
| }, |
| { |
| "epoch": 6.131707317073171, |
| "grad_norm": 1.049392580986023, |
| "learning_rate": 1.6333595970606143e-06, |
| "loss": 0.1507, |
| "step": 1257 |
| }, |
| { |
| "epoch": 6.136585365853659, |
| "grad_norm": 1.1488687992095947, |
| "learning_rate": 1.62976697022435e-06, |
| "loss": 0.2695, |
| "step": 1258 |
| }, |
| { |
| "epoch": 6.1414634146341465, |
| "grad_norm": 0.9358264803886414, |
| "learning_rate": 1.6261763871368225e-06, |
| "loss": 0.1395, |
| "step": 1259 |
| }, |
| { |
| "epoch": 6.146341463414634, |
| "grad_norm": 1.0877493619918823, |
| "learning_rate": 1.6225878562305403e-06, |
| "loss": 0.2465, |
| "step": 1260 |
| }, |
| { |
| "epoch": 6.151219512195122, |
| "grad_norm": 1.1635690927505493, |
| "learning_rate": 1.6190013859331958e-06, |
| "loss": 0.1768, |
| "step": 1261 |
| }, |
| { |
| "epoch": 6.15609756097561, |
| "grad_norm": 0.9073989391326904, |
| "learning_rate": 1.6154169846676415e-06, |
| "loss": 0.1429, |
| "step": 1262 |
| }, |
| { |
| "epoch": 6.160975609756098, |
| "grad_norm": 1.2221269607543945, |
| "learning_rate": 1.6118346608518698e-06, |
| "loss": 0.171, |
| "step": 1263 |
| }, |
| { |
| "epoch": 6.1658536585365855, |
| "grad_norm": 0.8346547484397888, |
| "learning_rate": 1.6082544228989958e-06, |
| "loss": 0.0594, |
| "step": 1264 |
| }, |
| { |
| "epoch": 6.170731707317073, |
| "grad_norm": 1.4623945951461792, |
| "learning_rate": 1.6046762792172336e-06, |
| "loss": 0.1183, |
| "step": 1265 |
| }, |
| { |
| "epoch": 6.175609756097561, |
| "grad_norm": 1.2494970560073853, |
| "learning_rate": 1.6011002382098806e-06, |
| "loss": 0.303, |
| "step": 1266 |
| }, |
| { |
| "epoch": 6.180487804878049, |
| "grad_norm": 0.8755192756652832, |
| "learning_rate": 1.5975263082752968e-06, |
| "loss": 0.0745, |
| "step": 1267 |
| }, |
| { |
| "epoch": 6.185365853658537, |
| "grad_norm": 0.8409280776977539, |
| "learning_rate": 1.5939544978068816e-06, |
| "loss": 0.0979, |
| "step": 1268 |
| }, |
| { |
| "epoch": 6.190243902439025, |
| "grad_norm": 1.0874266624450684, |
| "learning_rate": 1.590384815193059e-06, |
| "loss": 0.2561, |
| "step": 1269 |
| }, |
| { |
| "epoch": 6.195121951219512, |
| "grad_norm": 1.4487617015838623, |
| "learning_rate": 1.5868172688172559e-06, |
| "loss": 0.2705, |
| "step": 1270 |
| }, |
| { |
| "epoch": 6.2, |
| "grad_norm": 1.1141877174377441, |
| "learning_rate": 1.5832518670578802e-06, |
| "loss": 0.3218, |
| "step": 1271 |
| }, |
| { |
| "epoch": 6.204878048780488, |
| "grad_norm": 1.1559665203094482, |
| "learning_rate": 1.5796886182883053e-06, |
| "loss": 0.3038, |
| "step": 1272 |
| }, |
| { |
| "epoch": 6.209756097560976, |
| "grad_norm": 0.7617695927619934, |
| "learning_rate": 1.5761275308768476e-06, |
| "loss": 0.1078, |
| "step": 1273 |
| }, |
| { |
| "epoch": 6.214634146341464, |
| "grad_norm": 0.7606493234634399, |
| "learning_rate": 1.5725686131867462e-06, |
| "loss": 0.0702, |
| "step": 1274 |
| }, |
| { |
| "epoch": 6.219512195121951, |
| "grad_norm": 1.153968334197998, |
| "learning_rate": 1.569011873576147e-06, |
| "loss": 0.1627, |
| "step": 1275 |
| }, |
| { |
| "epoch": 6.224390243902439, |
| "grad_norm": 1.0581713914871216, |
| "learning_rate": 1.5654573203980782e-06, |
| "loss": 0.1538, |
| "step": 1276 |
| }, |
| { |
| "epoch": 6.229268292682927, |
| "grad_norm": 1.156667709350586, |
| "learning_rate": 1.5619049620004354e-06, |
| "loss": 0.2563, |
| "step": 1277 |
| }, |
| { |
| "epoch": 6.234146341463415, |
| "grad_norm": 0.8070971369743347, |
| "learning_rate": 1.5583548067259584e-06, |
| "loss": 0.1003, |
| "step": 1278 |
| }, |
| { |
| "epoch": 6.239024390243903, |
| "grad_norm": 1.0131428241729736, |
| "learning_rate": 1.5548068629122126e-06, |
| "loss": 0.2334, |
| "step": 1279 |
| }, |
| { |
| "epoch": 6.2439024390243905, |
| "grad_norm": 0.9818571209907532, |
| "learning_rate": 1.5512611388915711e-06, |
| "loss": 0.2086, |
| "step": 1280 |
| }, |
| { |
| "epoch": 6.248780487804878, |
| "grad_norm": 1.344126582145691, |
| "learning_rate": 1.5477176429911934e-06, |
| "loss": 0.4732, |
| "step": 1281 |
| }, |
| { |
| "epoch": 6.253658536585366, |
| "grad_norm": 0.8236895203590393, |
| "learning_rate": 1.5441763835330048e-06, |
| "loss": 0.0692, |
| "step": 1282 |
| }, |
| { |
| "epoch": 6.258536585365854, |
| "grad_norm": 1.0422422885894775, |
| "learning_rate": 1.5406373688336807e-06, |
| "loss": 0.084, |
| "step": 1283 |
| }, |
| { |
| "epoch": 6.263414634146342, |
| "grad_norm": 0.9542838335037231, |
| "learning_rate": 1.5371006072046225e-06, |
| "loss": 0.1759, |
| "step": 1284 |
| }, |
| { |
| "epoch": 6.2682926829268295, |
| "grad_norm": 0.7214556932449341, |
| "learning_rate": 1.5335661069519408e-06, |
| "loss": 0.089, |
| "step": 1285 |
| }, |
| { |
| "epoch": 6.273170731707317, |
| "grad_norm": 1.2626168727874756, |
| "learning_rate": 1.5300338763764371e-06, |
| "loss": 0.0866, |
| "step": 1286 |
| }, |
| { |
| "epoch": 6.278048780487805, |
| "grad_norm": 1.2875781059265137, |
| "learning_rate": 1.5265039237735804e-06, |
| "loss": 0.1159, |
| "step": 1287 |
| }, |
| { |
| "epoch": 6.282926829268293, |
| "grad_norm": 1.00115966796875, |
| "learning_rate": 1.5229762574334903e-06, |
| "loss": 0.0688, |
| "step": 1288 |
| }, |
| { |
| "epoch": 6.287804878048781, |
| "grad_norm": 1.2635149955749512, |
| "learning_rate": 1.5194508856409181e-06, |
| "loss": 0.2394, |
| "step": 1289 |
| }, |
| { |
| "epoch": 6.2926829268292686, |
| "grad_norm": 1.0395175218582153, |
| "learning_rate": 1.515927816675225e-06, |
| "loss": 0.1104, |
| "step": 1290 |
| }, |
| { |
| "epoch": 6.297560975609756, |
| "grad_norm": 2.234849452972412, |
| "learning_rate": 1.5124070588103648e-06, |
| "loss": 0.0526, |
| "step": 1291 |
| }, |
| { |
| "epoch": 6.302439024390244, |
| "grad_norm": 0.791607677936554, |
| "learning_rate": 1.5088886203148643e-06, |
| "loss": 0.1148, |
| "step": 1292 |
| }, |
| { |
| "epoch": 6.307317073170732, |
| "grad_norm": 1.2630048990249634, |
| "learning_rate": 1.505372509451801e-06, |
| "loss": 0.3476, |
| "step": 1293 |
| }, |
| { |
| "epoch": 6.31219512195122, |
| "grad_norm": 0.9735398888587952, |
| "learning_rate": 1.5018587344787888e-06, |
| "loss": 0.0946, |
| "step": 1294 |
| }, |
| { |
| "epoch": 6.317073170731708, |
| "grad_norm": 1.4363349676132202, |
| "learning_rate": 1.498347303647953e-06, |
| "loss": 0.1628, |
| "step": 1295 |
| }, |
| { |
| "epoch": 6.321951219512195, |
| "grad_norm": 1.1442455053329468, |
| "learning_rate": 1.4948382252059158e-06, |
| "loss": 0.1958, |
| "step": 1296 |
| }, |
| { |
| "epoch": 6.326829268292683, |
| "grad_norm": 0.9576900601387024, |
| "learning_rate": 1.4913315073937742e-06, |
| "loss": 0.1721, |
| "step": 1297 |
| }, |
| { |
| "epoch": 6.331707317073171, |
| "grad_norm": 1.2679523229599, |
| "learning_rate": 1.4878271584470805e-06, |
| "loss": 0.2193, |
| "step": 1298 |
| }, |
| { |
| "epoch": 6.336585365853659, |
| "grad_norm": 1.2026294469833374, |
| "learning_rate": 1.4843251865958242e-06, |
| "loss": 0.1086, |
| "step": 1299 |
| }, |
| { |
| "epoch": 6.341463414634147, |
| "grad_norm": 1.2589906454086304, |
| "learning_rate": 1.4808256000644128e-06, |
| "loss": 0.1949, |
| "step": 1300 |
| }, |
| { |
| "epoch": 6.3463414634146345, |
| "grad_norm": 1.1509987115859985, |
| "learning_rate": 1.4773284070716504e-06, |
| "loss": 0.2124, |
| "step": 1301 |
| }, |
| { |
| "epoch": 6.351219512195122, |
| "grad_norm": 1.9435495138168335, |
| "learning_rate": 1.473833615830722e-06, |
| "loss": 0.1059, |
| "step": 1302 |
| }, |
| { |
| "epoch": 6.35609756097561, |
| "grad_norm": 1.1303460597991943, |
| "learning_rate": 1.4703412345491692e-06, |
| "loss": 0.1562, |
| "step": 1303 |
| }, |
| { |
| "epoch": 6.360975609756098, |
| "grad_norm": 1.0799435377120972, |
| "learning_rate": 1.4668512714288763e-06, |
| "loss": 0.1182, |
| "step": 1304 |
| }, |
| { |
| "epoch": 6.365853658536586, |
| "grad_norm": 0.8857361674308777, |
| "learning_rate": 1.4633637346660478e-06, |
| "loss": 0.0652, |
| "step": 1305 |
| }, |
| { |
| "epoch": 6.3707317073170735, |
| "grad_norm": 1.1127703189849854, |
| "learning_rate": 1.4598786324511892e-06, |
| "loss": 0.099, |
| "step": 1306 |
| }, |
| { |
| "epoch": 6.375609756097561, |
| "grad_norm": 1.0147268772125244, |
| "learning_rate": 1.456395972969089e-06, |
| "loss": 0.0864, |
| "step": 1307 |
| }, |
| { |
| "epoch": 6.380487804878049, |
| "grad_norm": 1.2551109790802002, |
| "learning_rate": 1.4529157643987995e-06, |
| "loss": 0.2552, |
| "step": 1308 |
| }, |
| { |
| "epoch": 6.385365853658537, |
| "grad_norm": 1.044129490852356, |
| "learning_rate": 1.4494380149136162e-06, |
| "loss": 0.1989, |
| "step": 1309 |
| }, |
| { |
| "epoch": 6.390243902439025, |
| "grad_norm": 1.366299033164978, |
| "learning_rate": 1.4459627326810576e-06, |
| "loss": 0.1694, |
| "step": 1310 |
| }, |
| { |
| "epoch": 6.3951219512195125, |
| "grad_norm": 1.2774457931518555, |
| "learning_rate": 1.4424899258628533e-06, |
| "loss": 0.1415, |
| "step": 1311 |
| }, |
| { |
| "epoch": 6.4, |
| "grad_norm": 1.07902991771698, |
| "learning_rate": 1.439019602614914e-06, |
| "loss": 0.0497, |
| "step": 1312 |
| }, |
| { |
| "epoch": 6.404878048780488, |
| "grad_norm": 1.1242656707763672, |
| "learning_rate": 1.4355517710873184e-06, |
| "loss": 0.2435, |
| "step": 1313 |
| }, |
| { |
| "epoch": 6.409756097560976, |
| "grad_norm": 1.9656187295913696, |
| "learning_rate": 1.432086439424297e-06, |
| "loss": 0.3044, |
| "step": 1314 |
| }, |
| { |
| "epoch": 6.414634146341464, |
| "grad_norm": 1.5738033056259155, |
| "learning_rate": 1.428623615764206e-06, |
| "loss": 0.4497, |
| "step": 1315 |
| }, |
| { |
| "epoch": 6.419512195121952, |
| "grad_norm": 0.8496007919311523, |
| "learning_rate": 1.4251633082395117e-06, |
| "loss": 0.142, |
| "step": 1316 |
| }, |
| { |
| "epoch": 6.424390243902439, |
| "grad_norm": 1.3462088108062744, |
| "learning_rate": 1.4217055249767734e-06, |
| "loss": 0.177, |
| "step": 1317 |
| }, |
| { |
| "epoch": 6.429268292682927, |
| "grad_norm": 0.9757170677185059, |
| "learning_rate": 1.4182502740966203e-06, |
| "loss": 0.0472, |
| "step": 1318 |
| }, |
| { |
| "epoch": 6.434146341463415, |
| "grad_norm": 1.0830655097961426, |
| "learning_rate": 1.4147975637137334e-06, |
| "loss": 0.1507, |
| "step": 1319 |
| }, |
| { |
| "epoch": 6.439024390243903, |
| "grad_norm": 1.2118395566940308, |
| "learning_rate": 1.411347401936831e-06, |
| "loss": 0.1994, |
| "step": 1320 |
| }, |
| { |
| "epoch": 6.443902439024391, |
| "grad_norm": 1.5108318328857422, |
| "learning_rate": 1.4078997968686425e-06, |
| "loss": 0.1086, |
| "step": 1321 |
| }, |
| { |
| "epoch": 6.4487804878048784, |
| "grad_norm": 1.0448169708251953, |
| "learning_rate": 1.404454756605893e-06, |
| "loss": 0.1801, |
| "step": 1322 |
| }, |
| { |
| "epoch": 6.453658536585366, |
| "grad_norm": 1.2599785327911377, |
| "learning_rate": 1.4010122892392872e-06, |
| "loss": 0.3254, |
| "step": 1323 |
| }, |
| { |
| "epoch": 6.458536585365854, |
| "grad_norm": 1.0006107091903687, |
| "learning_rate": 1.3975724028534842e-06, |
| "loss": 0.2379, |
| "step": 1324 |
| }, |
| { |
| "epoch": 6.463414634146342, |
| "grad_norm": 1.205500602722168, |
| "learning_rate": 1.394135105527083e-06, |
| "loss": 0.1195, |
| "step": 1325 |
| }, |
| { |
| "epoch": 6.46829268292683, |
| "grad_norm": 1.282423973083496, |
| "learning_rate": 1.3907004053326006e-06, |
| "loss": 0.1627, |
| "step": 1326 |
| }, |
| { |
| "epoch": 6.473170731707317, |
| "grad_norm": 1.0242518186569214, |
| "learning_rate": 1.387268310336458e-06, |
| "loss": 0.1358, |
| "step": 1327 |
| }, |
| { |
| "epoch": 6.478048780487805, |
| "grad_norm": 1.1453756093978882, |
| "learning_rate": 1.3838388285989552e-06, |
| "loss": 0.1715, |
| "step": 1328 |
| }, |
| { |
| "epoch": 6.482926829268292, |
| "grad_norm": 0.9189904928207397, |
| "learning_rate": 1.380411968174254e-06, |
| "loss": 0.1148, |
| "step": 1329 |
| }, |
| { |
| "epoch": 6.487804878048781, |
| "grad_norm": 1.40865159034729, |
| "learning_rate": 1.3769877371103635e-06, |
| "loss": 0.2742, |
| "step": 1330 |
| }, |
| { |
| "epoch": 6.492682926829268, |
| "grad_norm": 1.5062743425369263, |
| "learning_rate": 1.373566143449115e-06, |
| "loss": 0.418, |
| "step": 1331 |
| }, |
| { |
| "epoch": 6.4975609756097565, |
| "grad_norm": 0.7628481388092041, |
| "learning_rate": 1.3701471952261457e-06, |
| "loss": 0.069, |
| "step": 1332 |
| }, |
| { |
| "epoch": 6.5024390243902435, |
| "grad_norm": 1.154660701751709, |
| "learning_rate": 1.3667309004708832e-06, |
| "loss": 0.1043, |
| "step": 1333 |
| }, |
| { |
| "epoch": 6.507317073170732, |
| "grad_norm": 1.6828221082687378, |
| "learning_rate": 1.3633172672065195e-06, |
| "loss": 0.2613, |
| "step": 1334 |
| }, |
| { |
| "epoch": 6.512195121951219, |
| "grad_norm": 1.1095471382141113, |
| "learning_rate": 1.359906303449997e-06, |
| "loss": 0.1142, |
| "step": 1335 |
| }, |
| { |
| "epoch": 6.517073170731708, |
| "grad_norm": 0.9421728849411011, |
| "learning_rate": 1.3564980172119913e-06, |
| "loss": 0.0699, |
| "step": 1336 |
| }, |
| { |
| "epoch": 6.521951219512195, |
| "grad_norm": 1.3236054182052612, |
| "learning_rate": 1.3530924164968873e-06, |
| "loss": 0.4213, |
| "step": 1337 |
| }, |
| { |
| "epoch": 6.526829268292683, |
| "grad_norm": 0.973343551158905, |
| "learning_rate": 1.3496895093027617e-06, |
| "loss": 0.1617, |
| "step": 1338 |
| }, |
| { |
| "epoch": 6.53170731707317, |
| "grad_norm": 1.0116113424301147, |
| "learning_rate": 1.3462893036213706e-06, |
| "loss": 0.1201, |
| "step": 1339 |
| }, |
| { |
| "epoch": 6.536585365853659, |
| "grad_norm": 1.2368037700653076, |
| "learning_rate": 1.3428918074381203e-06, |
| "loss": 0.127, |
| "step": 1340 |
| }, |
| { |
| "epoch": 6.541463414634146, |
| "grad_norm": 1.0005751848220825, |
| "learning_rate": 1.3394970287320553e-06, |
| "loss": 0.2255, |
| "step": 1341 |
| }, |
| { |
| "epoch": 6.546341463414635, |
| "grad_norm": 0.7819541096687317, |
| "learning_rate": 1.3361049754758404e-06, |
| "loss": 0.1052, |
| "step": 1342 |
| }, |
| { |
| "epoch": 6.5512195121951216, |
| "grad_norm": 0.8330570459365845, |
| "learning_rate": 1.3327156556357369e-06, |
| "loss": 0.0622, |
| "step": 1343 |
| }, |
| { |
| "epoch": 6.55609756097561, |
| "grad_norm": 1.335412859916687, |
| "learning_rate": 1.3293290771715875e-06, |
| "loss": 0.2166, |
| "step": 1344 |
| }, |
| { |
| "epoch": 6.560975609756097, |
| "grad_norm": 0.9714047312736511, |
| "learning_rate": 1.3259452480367963e-06, |
| "loss": 0.1613, |
| "step": 1345 |
| }, |
| { |
| "epoch": 6.565853658536585, |
| "grad_norm": 1.260328769683838, |
| "learning_rate": 1.3225641761783126e-06, |
| "loss": 0.2404, |
| "step": 1346 |
| }, |
| { |
| "epoch": 6.570731707317073, |
| "grad_norm": 1.480240821838379, |
| "learning_rate": 1.3191858695366084e-06, |
| "loss": 0.3174, |
| "step": 1347 |
| }, |
| { |
| "epoch": 6.575609756097561, |
| "grad_norm": 1.1139479875564575, |
| "learning_rate": 1.3158103360456603e-06, |
| "loss": 0.1937, |
| "step": 1348 |
| }, |
| { |
| "epoch": 6.580487804878048, |
| "grad_norm": 1.135933518409729, |
| "learning_rate": 1.3124375836329362e-06, |
| "loss": 0.183, |
| "step": 1349 |
| }, |
| { |
| "epoch": 6.585365853658536, |
| "grad_norm": 1.24773108959198, |
| "learning_rate": 1.3090676202193692e-06, |
| "loss": 0.0279, |
| "step": 1350 |
| }, |
| { |
| "epoch": 6.590243902439024, |
| "grad_norm": 1.0175641775131226, |
| "learning_rate": 1.3057004537193424e-06, |
| "loss": 0.109, |
| "step": 1351 |
| }, |
| { |
| "epoch": 6.595121951219512, |
| "grad_norm": 0.8159235119819641, |
| "learning_rate": 1.302336092040673e-06, |
| "loss": 0.0878, |
| "step": 1352 |
| }, |
| { |
| "epoch": 6.6, |
| "grad_norm": 1.0272810459136963, |
| "learning_rate": 1.298974543084589e-06, |
| "loss": 0.116, |
| "step": 1353 |
| }, |
| { |
| "epoch": 6.6048780487804875, |
| "grad_norm": 1.0128616094589233, |
| "learning_rate": 1.2956158147457116e-06, |
| "loss": 0.1006, |
| "step": 1354 |
| }, |
| { |
| "epoch": 6.609756097560975, |
| "grad_norm": 0.9154186248779297, |
| "learning_rate": 1.2922599149120412e-06, |
| "loss": 0.1024, |
| "step": 1355 |
| }, |
| { |
| "epoch": 6.614634146341463, |
| "grad_norm": 0.941557765007019, |
| "learning_rate": 1.2889068514649328e-06, |
| "loss": 0.1577, |
| "step": 1356 |
| }, |
| { |
| "epoch": 6.619512195121951, |
| "grad_norm": 1.0502948760986328, |
| "learning_rate": 1.2855566322790796e-06, |
| "loss": 0.0702, |
| "step": 1357 |
| }, |
| { |
| "epoch": 6.624390243902439, |
| "grad_norm": 1.050806999206543, |
| "learning_rate": 1.2822092652224989e-06, |
| "loss": 0.1168, |
| "step": 1358 |
| }, |
| { |
| "epoch": 6.6292682926829265, |
| "grad_norm": 1.0025922060012817, |
| "learning_rate": 1.2788647581565048e-06, |
| "loss": 0.0951, |
| "step": 1359 |
| }, |
| { |
| "epoch": 6.634146341463414, |
| "grad_norm": 1.266708254814148, |
| "learning_rate": 1.275523118935697e-06, |
| "loss": 0.0827, |
| "step": 1360 |
| }, |
| { |
| "epoch": 6.639024390243902, |
| "grad_norm": 1.0859824419021606, |
| "learning_rate": 1.2721843554079418e-06, |
| "loss": 0.1783, |
| "step": 1361 |
| }, |
| { |
| "epoch": 6.64390243902439, |
| "grad_norm": 1.2870323657989502, |
| "learning_rate": 1.2688484754143493e-06, |
| "loss": 0.4135, |
| "step": 1362 |
| }, |
| { |
| "epoch": 6.648780487804878, |
| "grad_norm": 1.2668397426605225, |
| "learning_rate": 1.2655154867892577e-06, |
| "loss": 0.2108, |
| "step": 1363 |
| }, |
| { |
| "epoch": 6.6536585365853655, |
| "grad_norm": 1.4955235719680786, |
| "learning_rate": 1.2621853973602158e-06, |
| "loss": 0.2235, |
| "step": 1364 |
| }, |
| { |
| "epoch": 6.658536585365853, |
| "grad_norm": 1.0341984033584595, |
| "learning_rate": 1.2588582149479645e-06, |
| "loss": 0.0549, |
| "step": 1365 |
| }, |
| { |
| "epoch": 6.663414634146341, |
| "grad_norm": 1.038429617881775, |
| "learning_rate": 1.2555339473664151e-06, |
| "loss": 0.1321, |
| "step": 1366 |
| }, |
| { |
| "epoch": 6.668292682926829, |
| "grad_norm": 1.2604671716690063, |
| "learning_rate": 1.2522126024226347e-06, |
| "loss": 0.2104, |
| "step": 1367 |
| }, |
| { |
| "epoch": 6.673170731707317, |
| "grad_norm": 1.0938949584960938, |
| "learning_rate": 1.2488941879168278e-06, |
| "loss": 0.0685, |
| "step": 1368 |
| }, |
| { |
| "epoch": 6.678048780487805, |
| "grad_norm": 0.8830887079238892, |
| "learning_rate": 1.2455787116423148e-06, |
| "loss": 0.1789, |
| "step": 1369 |
| }, |
| { |
| "epoch": 6.682926829268292, |
| "grad_norm": 0.9735605120658875, |
| "learning_rate": 1.2422661813855158e-06, |
| "loss": 0.1276, |
| "step": 1370 |
| }, |
| { |
| "epoch": 6.68780487804878, |
| "grad_norm": 1.117775559425354, |
| "learning_rate": 1.238956604925934e-06, |
| "loss": 0.106, |
| "step": 1371 |
| }, |
| { |
| "epoch": 6.692682926829268, |
| "grad_norm": 1.5626012086868286, |
| "learning_rate": 1.2356499900361333e-06, |
| "loss": 0.2649, |
| "step": 1372 |
| }, |
| { |
| "epoch": 6.697560975609756, |
| "grad_norm": 0.8927910327911377, |
| "learning_rate": 1.2323463444817227e-06, |
| "loss": 0.1202, |
| "step": 1373 |
| }, |
| { |
| "epoch": 6.702439024390244, |
| "grad_norm": 1.393783450126648, |
| "learning_rate": 1.2290456760213405e-06, |
| "loss": 0.2409, |
| "step": 1374 |
| }, |
| { |
| "epoch": 6.7073170731707314, |
| "grad_norm": 1.1260027885437012, |
| "learning_rate": 1.2257479924066296e-06, |
| "loss": 0.229, |
| "step": 1375 |
| }, |
| { |
| "epoch": 6.712195121951219, |
| "grad_norm": 2.048091411590576, |
| "learning_rate": 1.2224533013822237e-06, |
| "loss": 0.2282, |
| "step": 1376 |
| }, |
| { |
| "epoch": 6.717073170731707, |
| "grad_norm": 1.2933448553085327, |
| "learning_rate": 1.2191616106857312e-06, |
| "loss": 0.2226, |
| "step": 1377 |
| }, |
| { |
| "epoch": 6.721951219512195, |
| "grad_norm": 1.0582860708236694, |
| "learning_rate": 1.2158729280477112e-06, |
| "loss": 0.1759, |
| "step": 1378 |
| }, |
| { |
| "epoch": 6.726829268292683, |
| "grad_norm": 1.248373031616211, |
| "learning_rate": 1.2125872611916578e-06, |
| "loss": 0.2021, |
| "step": 1379 |
| }, |
| { |
| "epoch": 6.7317073170731705, |
| "grad_norm": 0.9428868889808655, |
| "learning_rate": 1.2093046178339869e-06, |
| "loss": 0.0795, |
| "step": 1380 |
| }, |
| { |
| "epoch": 6.736585365853658, |
| "grad_norm": 0.8160692453384399, |
| "learning_rate": 1.206025005684009e-06, |
| "loss": 0.1338, |
| "step": 1381 |
| }, |
| { |
| "epoch": 6.741463414634146, |
| "grad_norm": 0.7535449862480164, |
| "learning_rate": 1.202748432443918e-06, |
| "loss": 0.0606, |
| "step": 1382 |
| }, |
| { |
| "epoch": 6.746341463414634, |
| "grad_norm": 1.2124066352844238, |
| "learning_rate": 1.1994749058087695e-06, |
| "loss": 0.2288, |
| "step": 1383 |
| }, |
| { |
| "epoch": 6.751219512195122, |
| "grad_norm": 1.502294659614563, |
| "learning_rate": 1.196204433466467e-06, |
| "loss": 0.3443, |
| "step": 1384 |
| }, |
| { |
| "epoch": 6.7560975609756095, |
| "grad_norm": 1.232467770576477, |
| "learning_rate": 1.192937023097738e-06, |
| "loss": 0.2051, |
| "step": 1385 |
| }, |
| { |
| "epoch": 6.760975609756097, |
| "grad_norm": 0.8221524953842163, |
| "learning_rate": 1.1896726823761195e-06, |
| "loss": 0.0482, |
| "step": 1386 |
| }, |
| { |
| "epoch": 6.765853658536585, |
| "grad_norm": 1.254210114479065, |
| "learning_rate": 1.1864114189679413e-06, |
| "loss": 0.0872, |
| "step": 1387 |
| }, |
| { |
| "epoch": 6.770731707317073, |
| "grad_norm": 1.212073802947998, |
| "learning_rate": 1.183153240532304e-06, |
| "loss": 0.0943, |
| "step": 1388 |
| }, |
| { |
| "epoch": 6.775609756097561, |
| "grad_norm": 1.2102311849594116, |
| "learning_rate": 1.179898154721063e-06, |
| "loss": 0.1226, |
| "step": 1389 |
| }, |
| { |
| "epoch": 6.780487804878049, |
| "grad_norm": 1.673811912536621, |
| "learning_rate": 1.1766461691788137e-06, |
| "loss": 0.0617, |
| "step": 1390 |
| }, |
| { |
| "epoch": 6.785365853658536, |
| "grad_norm": 1.4431748390197754, |
| "learning_rate": 1.1733972915428665e-06, |
| "loss": 0.2783, |
| "step": 1391 |
| }, |
| { |
| "epoch": 6.790243902439024, |
| "grad_norm": 1.1467320919036865, |
| "learning_rate": 1.1701515294432348e-06, |
| "loss": 0.1642, |
| "step": 1392 |
| }, |
| { |
| "epoch": 6.795121951219512, |
| "grad_norm": 1.7306759357452393, |
| "learning_rate": 1.1669088905026156e-06, |
| "loss": 0.3577, |
| "step": 1393 |
| }, |
| { |
| "epoch": 6.8, |
| "grad_norm": 1.0853081941604614, |
| "learning_rate": 1.163669382336371e-06, |
| "loss": 0.1911, |
| "step": 1394 |
| }, |
| { |
| "epoch": 6.804878048780488, |
| "grad_norm": 0.7488344311714172, |
| "learning_rate": 1.160433012552508e-06, |
| "loss": 0.0403, |
| "step": 1395 |
| }, |
| { |
| "epoch": 6.809756097560975, |
| "grad_norm": 1.2400141954421997, |
| "learning_rate": 1.1571997887516672e-06, |
| "loss": 0.3637, |
| "step": 1396 |
| }, |
| { |
| "epoch": 6.814634146341463, |
| "grad_norm": 1.2724562883377075, |
| "learning_rate": 1.1539697185270982e-06, |
| "loss": 0.1827, |
| "step": 1397 |
| }, |
| { |
| "epoch": 6.819512195121951, |
| "grad_norm": 1.0190290212631226, |
| "learning_rate": 1.1507428094646448e-06, |
| "loss": 0.1489, |
| "step": 1398 |
| }, |
| { |
| "epoch": 6.824390243902439, |
| "grad_norm": 0.8387356400489807, |
| "learning_rate": 1.1475190691427255e-06, |
| "loss": 0.0689, |
| "step": 1399 |
| }, |
| { |
| "epoch": 6.829268292682927, |
| "grad_norm": 0.5320870876312256, |
| "learning_rate": 1.1442985051323205e-06, |
| "loss": 0.0203, |
| "step": 1400 |
| }, |
| { |
| "epoch": 6.8341463414634145, |
| "grad_norm": 1.9445618391036987, |
| "learning_rate": 1.1410811249969475e-06, |
| "loss": 0.5178, |
| "step": 1401 |
| }, |
| { |
| "epoch": 6.839024390243902, |
| "grad_norm": 1.3419086933135986, |
| "learning_rate": 1.1378669362926468e-06, |
| "loss": 0.284, |
| "step": 1402 |
| }, |
| { |
| "epoch": 6.84390243902439, |
| "grad_norm": 1.31820809841156, |
| "learning_rate": 1.1346559465679656e-06, |
| "loss": 0.2732, |
| "step": 1403 |
| }, |
| { |
| "epoch": 6.848780487804878, |
| "grad_norm": 0.6823004484176636, |
| "learning_rate": 1.1314481633639374e-06, |
| "loss": 0.0371, |
| "step": 1404 |
| }, |
| { |
| "epoch": 6.853658536585366, |
| "grad_norm": 1.3075387477874756, |
| "learning_rate": 1.1282435942140632e-06, |
| "loss": 0.2317, |
| "step": 1405 |
| }, |
| { |
| "epoch": 6.8585365853658535, |
| "grad_norm": 1.0345687866210938, |
| "learning_rate": 1.1250422466442992e-06, |
| "loss": 0.1354, |
| "step": 1406 |
| }, |
| { |
| "epoch": 6.863414634146341, |
| "grad_norm": 0.8593140840530396, |
| "learning_rate": 1.1218441281730334e-06, |
| "loss": 0.1287, |
| "step": 1407 |
| }, |
| { |
| "epoch": 6.868292682926829, |
| "grad_norm": 1.0140306949615479, |
| "learning_rate": 1.1186492463110696e-06, |
| "loss": 0.1075, |
| "step": 1408 |
| }, |
| { |
| "epoch": 6.873170731707317, |
| "grad_norm": 1.056603193283081, |
| "learning_rate": 1.1154576085616135e-06, |
| "loss": 0.0877, |
| "step": 1409 |
| }, |
| { |
| "epoch": 6.878048780487805, |
| "grad_norm": 0.9203841686248779, |
| "learning_rate": 1.1122692224202491e-06, |
| "loss": 0.0866, |
| "step": 1410 |
| }, |
| { |
| "epoch": 6.882926829268293, |
| "grad_norm": 1.1666905879974365, |
| "learning_rate": 1.1090840953749253e-06, |
| "loss": 0.2371, |
| "step": 1411 |
| }, |
| { |
| "epoch": 6.88780487804878, |
| "grad_norm": 0.806583046913147, |
| "learning_rate": 1.1059022349059362e-06, |
| "loss": 0.058, |
| "step": 1412 |
| }, |
| { |
| "epoch": 6.892682926829268, |
| "grad_norm": 1.529600977897644, |
| "learning_rate": 1.102723648485905e-06, |
| "loss": 0.4139, |
| "step": 1413 |
| }, |
| { |
| "epoch": 6.897560975609756, |
| "grad_norm": 1.5345758199691772, |
| "learning_rate": 1.0995483435797643e-06, |
| "loss": 0.2383, |
| "step": 1414 |
| }, |
| { |
| "epoch": 6.902439024390244, |
| "grad_norm": 0.9218557476997375, |
| "learning_rate": 1.0963763276447435e-06, |
| "loss": 0.0451, |
| "step": 1415 |
| }, |
| { |
| "epoch": 6.907317073170732, |
| "grad_norm": 1.0400121212005615, |
| "learning_rate": 1.0932076081303442e-06, |
| "loss": 0.0984, |
| "step": 1416 |
| }, |
| { |
| "epoch": 6.912195121951219, |
| "grad_norm": 0.9838992953300476, |
| "learning_rate": 1.0900421924783272e-06, |
| "loss": 0.1202, |
| "step": 1417 |
| }, |
| { |
| "epoch": 6.917073170731707, |
| "grad_norm": 0.9632418751716614, |
| "learning_rate": 1.0868800881226962e-06, |
| "loss": 0.1938, |
| "step": 1418 |
| }, |
| { |
| "epoch": 6.921951219512195, |
| "grad_norm": 1.308287501335144, |
| "learning_rate": 1.0837213024896764e-06, |
| "loss": 0.1664, |
| "step": 1419 |
| }, |
| { |
| "epoch": 6.926829268292683, |
| "grad_norm": 1.2088172435760498, |
| "learning_rate": 1.080565842997698e-06, |
| "loss": 0.2382, |
| "step": 1420 |
| }, |
| { |
| "epoch": 6.931707317073171, |
| "grad_norm": 0.6009799838066101, |
| "learning_rate": 1.0774137170573826e-06, |
| "loss": 0.0705, |
| "step": 1421 |
| }, |
| { |
| "epoch": 6.9365853658536585, |
| "grad_norm": 1.1284160614013672, |
| "learning_rate": 1.074264932071521e-06, |
| "loss": 0.2337, |
| "step": 1422 |
| }, |
| { |
| "epoch": 6.941463414634146, |
| "grad_norm": 1.1777656078338623, |
| "learning_rate": 1.0711194954350568e-06, |
| "loss": 0.1266, |
| "step": 1423 |
| }, |
| { |
| "epoch": 6.946341463414634, |
| "grad_norm": 1.0326296091079712, |
| "learning_rate": 1.0679774145350735e-06, |
| "loss": 0.1498, |
| "step": 1424 |
| }, |
| { |
| "epoch": 6.951219512195122, |
| "grad_norm": 1.2106353044509888, |
| "learning_rate": 1.0648386967507703e-06, |
| "loss": 0.3686, |
| "step": 1425 |
| }, |
| { |
| "epoch": 6.95609756097561, |
| "grad_norm": 0.9451847672462463, |
| "learning_rate": 1.0617033494534486e-06, |
| "loss": 0.1227, |
| "step": 1426 |
| }, |
| { |
| "epoch": 6.9609756097560975, |
| "grad_norm": 0.9187631011009216, |
| "learning_rate": 1.0585713800064964e-06, |
| "loss": 0.0725, |
| "step": 1427 |
| }, |
| { |
| "epoch": 6.965853658536585, |
| "grad_norm": 1.0765812397003174, |
| "learning_rate": 1.0554427957653663e-06, |
| "loss": 0.2055, |
| "step": 1428 |
| }, |
| { |
| "epoch": 6.970731707317073, |
| "grad_norm": 1.2303036451339722, |
| "learning_rate": 1.0523176040775615e-06, |
| "loss": 0.3308, |
| "step": 1429 |
| }, |
| { |
| "epoch": 6.975609756097561, |
| "grad_norm": 1.1930241584777832, |
| "learning_rate": 1.0491958122826173e-06, |
| "loss": 0.2065, |
| "step": 1430 |
| }, |
| { |
| "epoch": 6.980487804878049, |
| "grad_norm": 0.8994832038879395, |
| "learning_rate": 1.0460774277120866e-06, |
| "loss": 0.0904, |
| "step": 1431 |
| }, |
| { |
| "epoch": 6.985365853658537, |
| "grad_norm": 0.9850203990936279, |
| "learning_rate": 1.0429624576895177e-06, |
| "loss": 0.0591, |
| "step": 1432 |
| }, |
| { |
| "epoch": 6.990243902439024, |
| "grad_norm": 1.204479455947876, |
| "learning_rate": 1.03985090953044e-06, |
| "loss": 0.1802, |
| "step": 1433 |
| }, |
| { |
| "epoch": 6.995121951219512, |
| "grad_norm": 1.073361873626709, |
| "learning_rate": 1.0367427905423497e-06, |
| "loss": 0.2169, |
| "step": 1434 |
| }, |
| { |
| "epoch": 7.0, |
| "grad_norm": 0.8216325044631958, |
| "learning_rate": 1.0336381080246858e-06, |
| "loss": 0.0718, |
| "step": 1435 |
| }, |
| { |
| "epoch": 7.004878048780488, |
| "grad_norm": 1.052810788154602, |
| "learning_rate": 1.0305368692688175e-06, |
| "loss": 0.1574, |
| "step": 1436 |
| }, |
| { |
| "epoch": 7.009756097560976, |
| "grad_norm": 0.9019646048545837, |
| "learning_rate": 1.027439081558029e-06, |
| "loss": 0.1102, |
| "step": 1437 |
| }, |
| { |
| "epoch": 7.014634146341463, |
| "grad_norm": 1.0521090030670166, |
| "learning_rate": 1.0243447521674967e-06, |
| "loss": 0.1744, |
| "step": 1438 |
| }, |
| { |
| "epoch": 7.019512195121951, |
| "grad_norm": 1.2154330015182495, |
| "learning_rate": 1.021253888364276e-06, |
| "loss": 0.4529, |
| "step": 1439 |
| }, |
| { |
| "epoch": 7.024390243902439, |
| "grad_norm": 0.750938892364502, |
| "learning_rate": 1.018166497407284e-06, |
| "loss": 0.0773, |
| "step": 1440 |
| }, |
| { |
| "epoch": 7.029268292682927, |
| "grad_norm": 0.6538183093070984, |
| "learning_rate": 1.0150825865472813e-06, |
| "loss": 0.0633, |
| "step": 1441 |
| }, |
| { |
| "epoch": 7.034146341463415, |
| "grad_norm": 0.7504788041114807, |
| "learning_rate": 1.0120021630268542e-06, |
| "loss": 0.0664, |
| "step": 1442 |
| }, |
| { |
| "epoch": 7.0390243902439025, |
| "grad_norm": 1.1257810592651367, |
| "learning_rate": 1.0089252340804025e-06, |
| "loss": 0.1104, |
| "step": 1443 |
| }, |
| { |
| "epoch": 7.04390243902439, |
| "grad_norm": 1.3814443349838257, |
| "learning_rate": 1.0058518069341152e-06, |
| "loss": 0.2337, |
| "step": 1444 |
| }, |
| { |
| "epoch": 7.048780487804878, |
| "grad_norm": 0.7977355718612671, |
| "learning_rate": 1.002781888805958e-06, |
| "loss": 0.0632, |
| "step": 1445 |
| }, |
| { |
| "epoch": 7.053658536585366, |
| "grad_norm": 0.9505071043968201, |
| "learning_rate": 9.997154869056588e-07, |
| "loss": 0.079, |
| "step": 1446 |
| }, |
| { |
| "epoch": 7.058536585365854, |
| "grad_norm": 1.8017224073410034, |
| "learning_rate": 9.966526084346837e-07, |
| "loss": 0.0992, |
| "step": 1447 |
| }, |
| { |
| "epoch": 7.0634146341463415, |
| "grad_norm": 1.6866507530212402, |
| "learning_rate": 9.935932605862258e-07, |
| "loss": 0.3274, |
| "step": 1448 |
| }, |
| { |
| "epoch": 7.068292682926829, |
| "grad_norm": 1.0027443170547485, |
| "learning_rate": 9.905374505451853e-07, |
| "loss": 0.152, |
| "step": 1449 |
| }, |
| { |
| "epoch": 7.073170731707317, |
| "grad_norm": 1.3855262994766235, |
| "learning_rate": 9.874851854881565e-07, |
| "loss": 0.2713, |
| "step": 1450 |
| }, |
| { |
| "epoch": 7.078048780487805, |
| "grad_norm": 1.0189032554626465, |
| "learning_rate": 9.844364725834058e-07, |
| "loss": 0.1553, |
| "step": 1451 |
| }, |
| { |
| "epoch": 7.082926829268293, |
| "grad_norm": 1.272226095199585, |
| "learning_rate": 9.813913189908571e-07, |
| "loss": 0.1665, |
| "step": 1452 |
| }, |
| { |
| "epoch": 7.087804878048781, |
| "grad_norm": 0.7456101775169373, |
| "learning_rate": 9.783497318620783e-07, |
| "loss": 0.0652, |
| "step": 1453 |
| }, |
| { |
| "epoch": 7.092682926829268, |
| "grad_norm": 0.7051019072532654, |
| "learning_rate": 9.75311718340258e-07, |
| "loss": 0.0751, |
| "step": 1454 |
| }, |
| { |
| "epoch": 7.097560975609756, |
| "grad_norm": 1.129547357559204, |
| "learning_rate": 9.722772855601927e-07, |
| "loss": 0.2222, |
| "step": 1455 |
| }, |
| { |
| "epoch": 7.102439024390244, |
| "grad_norm": 0.8566828370094299, |
| "learning_rate": 9.692464406482727e-07, |
| "loss": 0.0828, |
| "step": 1456 |
| }, |
| { |
| "epoch": 7.107317073170732, |
| "grad_norm": 0.7901312708854675, |
| "learning_rate": 9.662191907224582e-07, |
| "loss": 0.0483, |
| "step": 1457 |
| }, |
| { |
| "epoch": 7.11219512195122, |
| "grad_norm": 0.8374278545379639, |
| "learning_rate": 9.63195542892268e-07, |
| "loss": 0.0666, |
| "step": 1458 |
| }, |
| { |
| "epoch": 7.117073170731707, |
| "grad_norm": 0.8921765089035034, |
| "learning_rate": 9.601755042587624e-07, |
| "loss": 0.0972, |
| "step": 1459 |
| }, |
| { |
| "epoch": 7.121951219512195, |
| "grad_norm": 0.9732463955879211, |
| "learning_rate": 9.571590819145244e-07, |
| "loss": 0.0476, |
| "step": 1460 |
| }, |
| { |
| "epoch": 7.126829268292683, |
| "grad_norm": 1.284700632095337, |
| "learning_rate": 9.541462829436426e-07, |
| "loss": 0.1645, |
| "step": 1461 |
| }, |
| { |
| "epoch": 7.131707317073171, |
| "grad_norm": 1.2691322565078735, |
| "learning_rate": 9.511371144217005e-07, |
| "loss": 0.1717, |
| "step": 1462 |
| }, |
| { |
| "epoch": 7.136585365853659, |
| "grad_norm": 1.72730553150177, |
| "learning_rate": 9.481315834157512e-07, |
| "loss": 0.2492, |
| "step": 1463 |
| }, |
| { |
| "epoch": 7.1414634146341465, |
| "grad_norm": 1.1432853937149048, |
| "learning_rate": 9.451296969843058e-07, |
| "loss": 0.143, |
| "step": 1464 |
| }, |
| { |
| "epoch": 7.146341463414634, |
| "grad_norm": 1.0376734733581543, |
| "learning_rate": 9.42131462177319e-07, |
| "loss": 0.1186, |
| "step": 1465 |
| }, |
| { |
| "epoch": 7.151219512195122, |
| "grad_norm": 1.0639046430587769, |
| "learning_rate": 9.39136886036166e-07, |
| "loss": 0.1073, |
| "step": 1466 |
| }, |
| { |
| "epoch": 7.15609756097561, |
| "grad_norm": 0.8768961429595947, |
| "learning_rate": 9.361459755936316e-07, |
| "loss": 0.0595, |
| "step": 1467 |
| }, |
| { |
| "epoch": 7.160975609756098, |
| "grad_norm": 0.8885585069656372, |
| "learning_rate": 9.331587378738902e-07, |
| "loss": 0.1289, |
| "step": 1468 |
| }, |
| { |
| "epoch": 7.1658536585365855, |
| "grad_norm": 2.7977395057678223, |
| "learning_rate": 9.301751798924935e-07, |
| "loss": 0.1762, |
| "step": 1469 |
| }, |
| { |
| "epoch": 7.170731707317073, |
| "grad_norm": 0.7919803857803345, |
| "learning_rate": 9.27195308656349e-07, |
| "loss": 0.0919, |
| "step": 1470 |
| }, |
| { |
| "epoch": 7.175609756097561, |
| "grad_norm": 1.2924004793167114, |
| "learning_rate": 9.24219131163705e-07, |
| "loss": 0.2055, |
| "step": 1471 |
| }, |
| { |
| "epoch": 7.180487804878049, |
| "grad_norm": 0.9636824727058411, |
| "learning_rate": 9.212466544041385e-07, |
| "loss": 0.0541, |
| "step": 1472 |
| }, |
| { |
| "epoch": 7.185365853658537, |
| "grad_norm": 1.1815683841705322, |
| "learning_rate": 9.182778853585325e-07, |
| "loss": 0.1983, |
| "step": 1473 |
| }, |
| { |
| "epoch": 7.190243902439025, |
| "grad_norm": 1.063944935798645, |
| "learning_rate": 9.153128309990622e-07, |
| "loss": 0.19, |
| "step": 1474 |
| }, |
| { |
| "epoch": 7.195121951219512, |
| "grad_norm": 1.3285342454910278, |
| "learning_rate": 9.123514982891813e-07, |
| "loss": 0.1505, |
| "step": 1475 |
| }, |
| { |
| "epoch": 7.2, |
| "grad_norm": 0.9774877429008484, |
| "learning_rate": 9.093938941836012e-07, |
| "loss": 0.1066, |
| "step": 1476 |
| }, |
| { |
| "epoch": 7.204878048780488, |
| "grad_norm": 0.9382181763648987, |
| "learning_rate": 9.064400256282757e-07, |
| "loss": 0.0863, |
| "step": 1477 |
| }, |
| { |
| "epoch": 7.209756097560976, |
| "grad_norm": 1.0966013669967651, |
| "learning_rate": 9.034898995603894e-07, |
| "loss": 0.1533, |
| "step": 1478 |
| }, |
| { |
| "epoch": 7.214634146341464, |
| "grad_norm": 0.8254204392433167, |
| "learning_rate": 9.00543522908334e-07, |
| "loss": 0.0917, |
| "step": 1479 |
| }, |
| { |
| "epoch": 7.219512195121951, |
| "grad_norm": 0.9348810315132141, |
| "learning_rate": 8.976009025916962e-07, |
| "loss": 0.065, |
| "step": 1480 |
| }, |
| { |
| "epoch": 7.224390243902439, |
| "grad_norm": 1.166670560836792, |
| "learning_rate": 8.946620455212438e-07, |
| "loss": 0.0884, |
| "step": 1481 |
| }, |
| { |
| "epoch": 7.229268292682927, |
| "grad_norm": 1.373329758644104, |
| "learning_rate": 8.917269585989027e-07, |
| "loss": 0.3353, |
| "step": 1482 |
| }, |
| { |
| "epoch": 7.234146341463415, |
| "grad_norm": 0.8657915592193604, |
| "learning_rate": 8.887956487177462e-07, |
| "loss": 0.0897, |
| "step": 1483 |
| }, |
| { |
| "epoch": 7.239024390243903, |
| "grad_norm": 0.8597664833068848, |
| "learning_rate": 8.858681227619789e-07, |
| "loss": 0.1181, |
| "step": 1484 |
| }, |
| { |
| "epoch": 7.2439024390243905, |
| "grad_norm": 0.839962899684906, |
| "learning_rate": 8.829443876069163e-07, |
| "loss": 0.0599, |
| "step": 1485 |
| }, |
| { |
| "epoch": 7.248780487804878, |
| "grad_norm": 0.8345217108726501, |
| "learning_rate": 8.800244501189722e-07, |
| "loss": 0.0387, |
| "step": 1486 |
| }, |
| { |
| "epoch": 7.253658536585366, |
| "grad_norm": 1.267763614654541, |
| "learning_rate": 8.771083171556407e-07, |
| "loss": 0.2695, |
| "step": 1487 |
| }, |
| { |
| "epoch": 7.258536585365854, |
| "grad_norm": 0.8838726878166199, |
| "learning_rate": 8.741959955654833e-07, |
| "loss": 0.0838, |
| "step": 1488 |
| }, |
| { |
| "epoch": 7.263414634146342, |
| "grad_norm": 0.7908660173416138, |
| "learning_rate": 8.712874921881082e-07, |
| "loss": 0.1355, |
| "step": 1489 |
| }, |
| { |
| "epoch": 7.2682926829268295, |
| "grad_norm": 1.2876062393188477, |
| "learning_rate": 8.683828138541559e-07, |
| "loss": 0.3281, |
| "step": 1490 |
| }, |
| { |
| "epoch": 7.273170731707317, |
| "grad_norm": 1.1086186170578003, |
| "learning_rate": 8.654819673852874e-07, |
| "loss": 0.1662, |
| "step": 1491 |
| }, |
| { |
| "epoch": 7.278048780487805, |
| "grad_norm": 0.7914115190505981, |
| "learning_rate": 8.625849595941608e-07, |
| "loss": 0.0565, |
| "step": 1492 |
| }, |
| { |
| "epoch": 7.282926829268293, |
| "grad_norm": 0.6734137535095215, |
| "learning_rate": 8.596917972844199e-07, |
| "loss": 0.0334, |
| "step": 1493 |
| }, |
| { |
| "epoch": 7.287804878048781, |
| "grad_norm": 0.9427139163017273, |
| "learning_rate": 8.568024872506792e-07, |
| "loss": 0.1858, |
| "step": 1494 |
| }, |
| { |
| "epoch": 7.2926829268292686, |
| "grad_norm": 0.8241920471191406, |
| "learning_rate": 8.539170362785043e-07, |
| "loss": 0.0419, |
| "step": 1495 |
| }, |
| { |
| "epoch": 7.297560975609756, |
| "grad_norm": 0.8029516935348511, |
| "learning_rate": 8.510354511443975e-07, |
| "loss": 0.0793, |
| "step": 1496 |
| }, |
| { |
| "epoch": 7.302439024390244, |
| "grad_norm": 0.7739546895027161, |
| "learning_rate": 8.48157738615784e-07, |
| "loss": 0.0646, |
| "step": 1497 |
| }, |
| { |
| "epoch": 7.307317073170732, |
| "grad_norm": 0.6402938961982727, |
| "learning_rate": 8.452839054509926e-07, |
| "loss": 0.0686, |
| "step": 1498 |
| }, |
| { |
| "epoch": 7.31219512195122, |
| "grad_norm": 2.102001667022705, |
| "learning_rate": 8.42413958399241e-07, |
| "loss": 0.089, |
| "step": 1499 |
| }, |
| { |
| "epoch": 7.317073170731708, |
| "grad_norm": 0.9000923037528992, |
| "learning_rate": 8.39547904200623e-07, |
| "loss": 0.0704, |
| "step": 1500 |
| }, |
| { |
| "epoch": 7.321951219512195, |
| "grad_norm": 0.7872455716133118, |
| "learning_rate": 8.366857495860869e-07, |
| "loss": 0.0642, |
| "step": 1501 |
| }, |
| { |
| "epoch": 7.326829268292683, |
| "grad_norm": 1.1488183736801147, |
| "learning_rate": 8.338275012774247e-07, |
| "loss": 0.0603, |
| "step": 1502 |
| }, |
| { |
| "epoch": 7.331707317073171, |
| "grad_norm": 0.929627001285553, |
| "learning_rate": 8.309731659872522e-07, |
| "loss": 0.1138, |
| "step": 1503 |
| }, |
| { |
| "epoch": 7.336585365853659, |
| "grad_norm": 0.9145927429199219, |
| "learning_rate": 8.281227504189992e-07, |
| "loss": 0.1613, |
| "step": 1504 |
| }, |
| { |
| "epoch": 7.341463414634147, |
| "grad_norm": 0.7761052846908569, |
| "learning_rate": 8.252762612668869e-07, |
| "loss": 0.0843, |
| "step": 1505 |
| }, |
| { |
| "epoch": 7.3463414634146345, |
| "grad_norm": 1.2363475561141968, |
| "learning_rate": 8.224337052159154e-07, |
| "loss": 0.0786, |
| "step": 1506 |
| }, |
| { |
| "epoch": 7.351219512195122, |
| "grad_norm": 1.2809274196624756, |
| "learning_rate": 8.195950889418503e-07, |
| "loss": 0.1432, |
| "step": 1507 |
| }, |
| { |
| "epoch": 7.35609756097561, |
| "grad_norm": 1.4146350622177124, |
| "learning_rate": 8.167604191112021e-07, |
| "loss": 0.2068, |
| "step": 1508 |
| }, |
| { |
| "epoch": 7.360975609756098, |
| "grad_norm": 1.1985394954681396, |
| "learning_rate": 8.139297023812131e-07, |
| "loss": 0.1531, |
| "step": 1509 |
| }, |
| { |
| "epoch": 7.365853658536586, |
| "grad_norm": 0.9208254218101501, |
| "learning_rate": 8.111029453998448e-07, |
| "loss": 0.0809, |
| "step": 1510 |
| }, |
| { |
| "epoch": 7.3707317073170735, |
| "grad_norm": 1.5169329643249512, |
| "learning_rate": 8.082801548057553e-07, |
| "loss": 0.122, |
| "step": 1511 |
| }, |
| { |
| "epoch": 7.375609756097561, |
| "grad_norm": 0.702643632888794, |
| "learning_rate": 8.05461337228289e-07, |
| "loss": 0.0567, |
| "step": 1512 |
| }, |
| { |
| "epoch": 7.380487804878049, |
| "grad_norm": 0.833108127117157, |
| "learning_rate": 8.026464992874617e-07, |
| "loss": 0.0791, |
| "step": 1513 |
| }, |
| { |
| "epoch": 7.385365853658537, |
| "grad_norm": 0.5722957253456116, |
| "learning_rate": 7.998356475939398e-07, |
| "loss": 0.0135, |
| "step": 1514 |
| }, |
| { |
| "epoch": 7.390243902439025, |
| "grad_norm": 0.6496143341064453, |
| "learning_rate": 7.970287887490289e-07, |
| "loss": 0.0336, |
| "step": 1515 |
| }, |
| { |
| "epoch": 7.3951219512195125, |
| "grad_norm": 0.5775876641273499, |
| "learning_rate": 7.942259293446594e-07, |
| "loss": 0.0192, |
| "step": 1516 |
| }, |
| { |
| "epoch": 7.4, |
| "grad_norm": 0.8442699909210205, |
| "learning_rate": 7.914270759633669e-07, |
| "loss": 0.073, |
| "step": 1517 |
| }, |
| { |
| "epoch": 7.404878048780488, |
| "grad_norm": 0.5718724131584167, |
| "learning_rate": 7.886322351782782e-07, |
| "loss": 0.0201, |
| "step": 1518 |
| }, |
| { |
| "epoch": 7.409756097560976, |
| "grad_norm": 1.0626249313354492, |
| "learning_rate": 7.858414135530995e-07, |
| "loss": 0.138, |
| "step": 1519 |
| }, |
| { |
| "epoch": 7.414634146341464, |
| "grad_norm": 1.07315993309021, |
| "learning_rate": 7.83054617642095e-07, |
| "loss": 0.092, |
| "step": 1520 |
| }, |
| { |
| "epoch": 7.419512195121952, |
| "grad_norm": 1.1001923084259033, |
| "learning_rate": 7.802718539900761e-07, |
| "loss": 0.1063, |
| "step": 1521 |
| }, |
| { |
| "epoch": 7.424390243902439, |
| "grad_norm": 0.6773379445075989, |
| "learning_rate": 7.774931291323826e-07, |
| "loss": 0.0586, |
| "step": 1522 |
| }, |
| { |
| "epoch": 7.429268292682927, |
| "grad_norm": 1.2498868703842163, |
| "learning_rate": 7.747184495948723e-07, |
| "loss": 0.3014, |
| "step": 1523 |
| }, |
| { |
| "epoch": 7.434146341463415, |
| "grad_norm": 1.6956262588500977, |
| "learning_rate": 7.719478218939e-07, |
| "loss": 0.1645, |
| "step": 1524 |
| }, |
| { |
| "epoch": 7.439024390243903, |
| "grad_norm": 0.733025848865509, |
| "learning_rate": 7.691812525363044e-07, |
| "loss": 0.0463, |
| "step": 1525 |
| }, |
| { |
| "epoch": 7.443902439024391, |
| "grad_norm": 0.8861591219902039, |
| "learning_rate": 7.66418748019396e-07, |
| "loss": 0.0616, |
| "step": 1526 |
| }, |
| { |
| "epoch": 7.4487804878048784, |
| "grad_norm": 0.6299625039100647, |
| "learning_rate": 7.636603148309363e-07, |
| "loss": 0.0386, |
| "step": 1527 |
| }, |
| { |
| "epoch": 7.453658536585366, |
| "grad_norm": 1.0957764387130737, |
| "learning_rate": 7.609059594491253e-07, |
| "loss": 0.1015, |
| "step": 1528 |
| }, |
| { |
| "epoch": 7.458536585365854, |
| "grad_norm": 1.3109453916549683, |
| "learning_rate": 7.581556883425886e-07, |
| "loss": 0.3966, |
| "step": 1529 |
| }, |
| { |
| "epoch": 7.463414634146342, |
| "grad_norm": 1.3401238918304443, |
| "learning_rate": 7.55409507970358e-07, |
| "loss": 0.2687, |
| "step": 1530 |
| }, |
| { |
| "epoch": 7.46829268292683, |
| "grad_norm": 0.6099500060081482, |
| "learning_rate": 7.526674247818569e-07, |
| "loss": 0.0355, |
| "step": 1531 |
| }, |
| { |
| "epoch": 7.473170731707317, |
| "grad_norm": 1.170320987701416, |
| "learning_rate": 7.499294452168904e-07, |
| "loss": 0.1753, |
| "step": 1532 |
| }, |
| { |
| "epoch": 7.478048780487805, |
| "grad_norm": 0.9354384541511536, |
| "learning_rate": 7.471955757056227e-07, |
| "loss": 0.0812, |
| "step": 1533 |
| }, |
| { |
| "epoch": 7.482926829268292, |
| "grad_norm": 0.9029474854469299, |
| "learning_rate": 7.444658226685656e-07, |
| "loss": 0.055, |
| "step": 1534 |
| }, |
| { |
| "epoch": 7.487804878048781, |
| "grad_norm": 1.1110976934432983, |
| "learning_rate": 7.417401925165666e-07, |
| "loss": 0.1035, |
| "step": 1535 |
| }, |
| { |
| "epoch": 7.492682926829268, |
| "grad_norm": 0.7902478575706482, |
| "learning_rate": 7.390186916507869e-07, |
| "loss": 0.0745, |
| "step": 1536 |
| }, |
| { |
| "epoch": 7.4975609756097565, |
| "grad_norm": 0.5375012159347534, |
| "learning_rate": 7.363013264626914e-07, |
| "loss": 0.0206, |
| "step": 1537 |
| }, |
| { |
| "epoch": 7.5024390243902435, |
| "grad_norm": 1.016384243965149, |
| "learning_rate": 7.335881033340334e-07, |
| "loss": 0.1809, |
| "step": 1538 |
| }, |
| { |
| "epoch": 7.507317073170732, |
| "grad_norm": 1.161800742149353, |
| "learning_rate": 7.308790286368373e-07, |
| "loss": 0.1462, |
| "step": 1539 |
| }, |
| { |
| "epoch": 7.512195121951219, |
| "grad_norm": 1.2205479145050049, |
| "learning_rate": 7.281741087333846e-07, |
| "loss": 0.2044, |
| "step": 1540 |
| }, |
| { |
| "epoch": 7.517073170731708, |
| "grad_norm": 1.619023323059082, |
| "learning_rate": 7.254733499761993e-07, |
| "loss": 0.1539, |
| "step": 1541 |
| }, |
| { |
| "epoch": 7.521951219512195, |
| "grad_norm": 1.1716769933700562, |
| "learning_rate": 7.22776758708035e-07, |
| "loss": 0.2405, |
| "step": 1542 |
| }, |
| { |
| "epoch": 7.526829268292683, |
| "grad_norm": 1.1175401210784912, |
| "learning_rate": 7.200843412618555e-07, |
| "loss": 0.1471, |
| "step": 1543 |
| }, |
| { |
| "epoch": 7.53170731707317, |
| "grad_norm": 0.9348238706588745, |
| "learning_rate": 7.173961039608227e-07, |
| "loss": 0.0995, |
| "step": 1544 |
| }, |
| { |
| "epoch": 7.536585365853659, |
| "grad_norm": 0.9532445669174194, |
| "learning_rate": 7.147120531182828e-07, |
| "loss": 0.1762, |
| "step": 1545 |
| }, |
| { |
| "epoch": 7.541463414634146, |
| "grad_norm": 0.821707010269165, |
| "learning_rate": 7.120321950377487e-07, |
| "loss": 0.0363, |
| "step": 1546 |
| }, |
| { |
| "epoch": 7.546341463414635, |
| "grad_norm": 0.8796511292457581, |
| "learning_rate": 7.093565360128863e-07, |
| "loss": 0.0725, |
| "step": 1547 |
| }, |
| { |
| "epoch": 7.5512195121951216, |
| "grad_norm": 0.9664599299430847, |
| "learning_rate": 7.066850823275024e-07, |
| "loss": 0.098, |
| "step": 1548 |
| }, |
| { |
| "epoch": 7.55609756097561, |
| "grad_norm": 1.4284769296646118, |
| "learning_rate": 7.040178402555245e-07, |
| "loss": 0.0962, |
| "step": 1549 |
| }, |
| { |
| "epoch": 7.560975609756097, |
| "grad_norm": 0.8095406293869019, |
| "learning_rate": 7.013548160609901e-07, |
| "loss": 0.0726, |
| "step": 1550 |
| }, |
| { |
| "epoch": 7.565853658536585, |
| "grad_norm": 0.920695960521698, |
| "learning_rate": 6.986960159980327e-07, |
| "loss": 0.1442, |
| "step": 1551 |
| }, |
| { |
| "epoch": 7.570731707317073, |
| "grad_norm": 1.2659902572631836, |
| "learning_rate": 6.960414463108631e-07, |
| "loss": 0.2081, |
| "step": 1552 |
| }, |
| { |
| "epoch": 7.575609756097561, |
| "grad_norm": 0.9657885432243347, |
| "learning_rate": 6.933911132337575e-07, |
| "loss": 0.0971, |
| "step": 1553 |
| }, |
| { |
| "epoch": 7.580487804878048, |
| "grad_norm": 0.8895794153213501, |
| "learning_rate": 6.907450229910443e-07, |
| "loss": 0.161, |
| "step": 1554 |
| }, |
| { |
| "epoch": 7.585365853658536, |
| "grad_norm": 0.7729442119598389, |
| "learning_rate": 6.881031817970848e-07, |
| "loss": 0.0582, |
| "step": 1555 |
| }, |
| { |
| "epoch": 7.590243902439024, |
| "grad_norm": 1.2856751680374146, |
| "learning_rate": 6.854655958562625e-07, |
| "loss": 0.3543, |
| "step": 1556 |
| }, |
| { |
| "epoch": 7.595121951219512, |
| "grad_norm": 0.7980842590332031, |
| "learning_rate": 6.82832271362969e-07, |
| "loss": 0.0522, |
| "step": 1557 |
| }, |
| { |
| "epoch": 7.6, |
| "grad_norm": 0.8392421007156372, |
| "learning_rate": 6.802032145015855e-07, |
| "loss": 0.0934, |
| "step": 1558 |
| }, |
| { |
| "epoch": 7.6048780487804875, |
| "grad_norm": 1.5677391290664673, |
| "learning_rate": 6.775784314464717e-07, |
| "loss": 0.1563, |
| "step": 1559 |
| }, |
| { |
| "epoch": 7.609756097560975, |
| "grad_norm": 0.9202232956886292, |
| "learning_rate": 6.749579283619492e-07, |
| "loss": 0.1134, |
| "step": 1560 |
| }, |
| { |
| "epoch": 7.614634146341463, |
| "grad_norm": 0.761900007724762, |
| "learning_rate": 6.723417114022907e-07, |
| "loss": 0.0782, |
| "step": 1561 |
| }, |
| { |
| "epoch": 7.619512195121951, |
| "grad_norm": 1.2099084854125977, |
| "learning_rate": 6.697297867117e-07, |
| "loss": 0.1507, |
| "step": 1562 |
| }, |
| { |
| "epoch": 7.624390243902439, |
| "grad_norm": 1.1177504062652588, |
| "learning_rate": 6.671221604243014e-07, |
| "loss": 0.1404, |
| "step": 1563 |
| }, |
| { |
| "epoch": 7.6292682926829265, |
| "grad_norm": 1.752407431602478, |
| "learning_rate": 6.645188386641257e-07, |
| "loss": 0.3834, |
| "step": 1564 |
| }, |
| { |
| "epoch": 7.634146341463414, |
| "grad_norm": 0.5573866367340088, |
| "learning_rate": 6.61919827545093e-07, |
| "loss": 0.0228, |
| "step": 1565 |
| }, |
| { |
| "epoch": 7.639024390243902, |
| "grad_norm": 1.4471404552459717, |
| "learning_rate": 6.593251331709993e-07, |
| "loss": 0.3744, |
| "step": 1566 |
| }, |
| { |
| "epoch": 7.64390243902439, |
| "grad_norm": 1.831053376197815, |
| "learning_rate": 6.567347616355049e-07, |
| "loss": 0.3253, |
| "step": 1567 |
| }, |
| { |
| "epoch": 7.648780487804878, |
| "grad_norm": 0.6713041663169861, |
| "learning_rate": 6.541487190221163e-07, |
| "loss": 0.0339, |
| "step": 1568 |
| }, |
| { |
| "epoch": 7.6536585365853655, |
| "grad_norm": 0.6553516387939453, |
| "learning_rate": 6.515670114041725e-07, |
| "loss": 0.0308, |
| "step": 1569 |
| }, |
| { |
| "epoch": 7.658536585365853, |
| "grad_norm": 0.8251855373382568, |
| "learning_rate": 6.489896448448349e-07, |
| "loss": 0.0938, |
| "step": 1570 |
| }, |
| { |
| "epoch": 7.663414634146341, |
| "grad_norm": 1.358507752418518, |
| "learning_rate": 6.464166253970672e-07, |
| "loss": 0.2063, |
| "step": 1571 |
| }, |
| { |
| "epoch": 7.668292682926829, |
| "grad_norm": 0.5880959033966064, |
| "learning_rate": 6.43847959103624e-07, |
| "loss": 0.0353, |
| "step": 1572 |
| }, |
| { |
| "epoch": 7.673170731707317, |
| "grad_norm": 1.2899762392044067, |
| "learning_rate": 6.412836519970383e-07, |
| "loss": 0.1629, |
| "step": 1573 |
| }, |
| { |
| "epoch": 7.678048780487805, |
| "grad_norm": 0.645285427570343, |
| "learning_rate": 6.387237100996041e-07, |
| "loss": 0.029, |
| "step": 1574 |
| }, |
| { |
| "epoch": 7.682926829268292, |
| "grad_norm": 0.8590375185012817, |
| "learning_rate": 6.361681394233631e-07, |
| "loss": 0.1296, |
| "step": 1575 |
| }, |
| { |
| "epoch": 7.68780487804878, |
| "grad_norm": 0.7912958264350891, |
| "learning_rate": 6.336169459700933e-07, |
| "loss": 0.0744, |
| "step": 1576 |
| }, |
| { |
| "epoch": 7.692682926829268, |
| "grad_norm": 1.0610671043395996, |
| "learning_rate": 6.310701357312909e-07, |
| "loss": 0.1001, |
| "step": 1577 |
| }, |
| { |
| "epoch": 7.697560975609756, |
| "grad_norm": 0.9412058591842651, |
| "learning_rate": 6.285277146881588e-07, |
| "loss": 0.045, |
| "step": 1578 |
| }, |
| { |
| "epoch": 7.702439024390244, |
| "grad_norm": 0.9780194759368896, |
| "learning_rate": 6.259896888115904e-07, |
| "loss": 0.1099, |
| "step": 1579 |
| }, |
| { |
| "epoch": 7.7073170731707314, |
| "grad_norm": 1.0854240655899048, |
| "learning_rate": 6.234560640621606e-07, |
| "loss": 0.1211, |
| "step": 1580 |
| }, |
| { |
| "epoch": 7.712195121951219, |
| "grad_norm": 0.7268218994140625, |
| "learning_rate": 6.209268463901047e-07, |
| "loss": 0.029, |
| "step": 1581 |
| }, |
| { |
| "epoch": 7.717073170731707, |
| "grad_norm": 0.802086353302002, |
| "learning_rate": 6.184020417353084e-07, |
| "loss": 0.0714, |
| "step": 1582 |
| }, |
| { |
| "epoch": 7.721951219512195, |
| "grad_norm": 1.4602060317993164, |
| "learning_rate": 6.158816560272962e-07, |
| "loss": 0.2754, |
| "step": 1583 |
| }, |
| { |
| "epoch": 7.726829268292683, |
| "grad_norm": 1.6787465810775757, |
| "learning_rate": 6.133656951852113e-07, |
| "loss": 0.3757, |
| "step": 1584 |
| }, |
| { |
| "epoch": 7.7317073170731705, |
| "grad_norm": 0.8407995700836182, |
| "learning_rate": 6.10854165117806e-07, |
| "loss": 0.0656, |
| "step": 1585 |
| }, |
| { |
| "epoch": 7.736585365853658, |
| "grad_norm": 0.863117516040802, |
| "learning_rate": 6.083470717234285e-07, |
| "loss": 0.059, |
| "step": 1586 |
| }, |
| { |
| "epoch": 7.741463414634146, |
| "grad_norm": 0.8231815099716187, |
| "learning_rate": 6.058444208900061e-07, |
| "loss": 0.0688, |
| "step": 1587 |
| }, |
| { |
| "epoch": 7.746341463414634, |
| "grad_norm": 0.5683103203773499, |
| "learning_rate": 6.033462184950317e-07, |
| "loss": 0.0206, |
| "step": 1588 |
| }, |
| { |
| "epoch": 7.751219512195122, |
| "grad_norm": 0.9569870233535767, |
| "learning_rate": 6.008524704055535e-07, |
| "loss": 0.0697, |
| "step": 1589 |
| }, |
| { |
| "epoch": 7.7560975609756095, |
| "grad_norm": 0.93723064661026, |
| "learning_rate": 5.983631824781572e-07, |
| "loss": 0.1364, |
| "step": 1590 |
| }, |
| { |
| "epoch": 7.760975609756097, |
| "grad_norm": 0.9307323098182678, |
| "learning_rate": 5.95878360558953e-07, |
| "loss": 0.0888, |
| "step": 1591 |
| }, |
| { |
| "epoch": 7.765853658536585, |
| "grad_norm": 0.9898451566696167, |
| "learning_rate": 5.933980104835652e-07, |
| "loss": 0.1927, |
| "step": 1592 |
| }, |
| { |
| "epoch": 7.770731707317073, |
| "grad_norm": 1.1425596475601196, |
| "learning_rate": 5.909221380771132e-07, |
| "loss": 0.1798, |
| "step": 1593 |
| }, |
| { |
| "epoch": 7.775609756097561, |
| "grad_norm": 1.2097892761230469, |
| "learning_rate": 5.884507491542024e-07, |
| "loss": 0.167, |
| "step": 1594 |
| }, |
| { |
| "epoch": 7.780487804878049, |
| "grad_norm": 0.7511147856712341, |
| "learning_rate": 5.859838495189068e-07, |
| "loss": 0.052, |
| "step": 1595 |
| }, |
| { |
| "epoch": 7.785365853658536, |
| "grad_norm": 1.0986175537109375, |
| "learning_rate": 5.835214449647602e-07, |
| "loss": 0.1336, |
| "step": 1596 |
| }, |
| { |
| "epoch": 7.790243902439024, |
| "grad_norm": 0.8146430850028992, |
| "learning_rate": 5.810635412747373e-07, |
| "loss": 0.0657, |
| "step": 1597 |
| }, |
| { |
| "epoch": 7.795121951219512, |
| "grad_norm": 0.6946802735328674, |
| "learning_rate": 5.786101442212422e-07, |
| "loss": 0.0503, |
| "step": 1598 |
| }, |
| { |
| "epoch": 7.8, |
| "grad_norm": 1.2120875120162964, |
| "learning_rate": 5.761612595660979e-07, |
| "loss": 0.2715, |
| "step": 1599 |
| }, |
| { |
| "epoch": 7.804878048780488, |
| "grad_norm": 0.6051833629608154, |
| "learning_rate": 5.737168930605272e-07, |
| "loss": 0.0212, |
| "step": 1600 |
| }, |
| { |
| "epoch": 7.809756097560975, |
| "grad_norm": 0.7571241855621338, |
| "learning_rate": 5.712770504451426e-07, |
| "loss": 0.0686, |
| "step": 1601 |
| }, |
| { |
| "epoch": 7.814634146341463, |
| "grad_norm": 1.5256556272506714, |
| "learning_rate": 5.688417374499336e-07, |
| "loss": 0.1827, |
| "step": 1602 |
| }, |
| { |
| "epoch": 7.819512195121951, |
| "grad_norm": 1.1259573698043823, |
| "learning_rate": 5.664109597942504e-07, |
| "loss": 0.0728, |
| "step": 1603 |
| }, |
| { |
| "epoch": 7.824390243902439, |
| "grad_norm": 1.527320146560669, |
| "learning_rate": 5.639847231867917e-07, |
| "loss": 0.3322, |
| "step": 1604 |
| }, |
| { |
| "epoch": 7.829268292682927, |
| "grad_norm": 1.0056272745132446, |
| "learning_rate": 5.61563033325594e-07, |
| "loss": 0.1727, |
| "step": 1605 |
| }, |
| { |
| "epoch": 7.8341463414634145, |
| "grad_norm": 1.0396225452423096, |
| "learning_rate": 5.591458958980123e-07, |
| "loss": 0.0781, |
| "step": 1606 |
| }, |
| { |
| "epoch": 7.839024390243902, |
| "grad_norm": 1.085661768913269, |
| "learning_rate": 5.567333165807115e-07, |
| "loss": 0.144, |
| "step": 1607 |
| }, |
| { |
| "epoch": 7.84390243902439, |
| "grad_norm": 0.5520138740539551, |
| "learning_rate": 5.543253010396538e-07, |
| "loss": 0.038, |
| "step": 1608 |
| }, |
| { |
| "epoch": 7.848780487804878, |
| "grad_norm": 1.343543291091919, |
| "learning_rate": 5.519218549300806e-07, |
| "loss": 0.1562, |
| "step": 1609 |
| }, |
| { |
| "epoch": 7.853658536585366, |
| "grad_norm": 1.2264282703399658, |
| "learning_rate": 5.495229838965021e-07, |
| "loss": 0.1926, |
| "step": 1610 |
| }, |
| { |
| "epoch": 7.8585365853658535, |
| "grad_norm": 0.854395866394043, |
| "learning_rate": 5.471286935726866e-07, |
| "loss": 0.0471, |
| "step": 1611 |
| }, |
| { |
| "epoch": 7.863414634146341, |
| "grad_norm": 1.2901803255081177, |
| "learning_rate": 5.447389895816416e-07, |
| "loss": 0.2489, |
| "step": 1612 |
| }, |
| { |
| "epoch": 7.868292682926829, |
| "grad_norm": 1.7009903192520142, |
| "learning_rate": 5.423538775356049e-07, |
| "loss": 0.335, |
| "step": 1613 |
| }, |
| { |
| "epoch": 7.873170731707317, |
| "grad_norm": 0.8392524719238281, |
| "learning_rate": 5.399733630360287e-07, |
| "loss": 0.0951, |
| "step": 1614 |
| }, |
| { |
| "epoch": 7.878048780487805, |
| "grad_norm": 1.0800468921661377, |
| "learning_rate": 5.375974516735713e-07, |
| "loss": 0.1884, |
| "step": 1615 |
| }, |
| { |
| "epoch": 7.882926829268293, |
| "grad_norm": 0.9787384271621704, |
| "learning_rate": 5.352261490280767e-07, |
| "loss": 0.0705, |
| "step": 1616 |
| }, |
| { |
| "epoch": 7.88780487804878, |
| "grad_norm": 1.0949972867965698, |
| "learning_rate": 5.328594606685661e-07, |
| "loss": 0.0589, |
| "step": 1617 |
| }, |
| { |
| "epoch": 7.892682926829268, |
| "grad_norm": 0.8980702757835388, |
| "learning_rate": 5.304973921532264e-07, |
| "loss": 0.1445, |
| "step": 1618 |
| }, |
| { |
| "epoch": 7.897560975609756, |
| "grad_norm": 0.945303738117218, |
| "learning_rate": 5.281399490293923e-07, |
| "loss": 0.0972, |
| "step": 1619 |
| }, |
| { |
| "epoch": 7.902439024390244, |
| "grad_norm": 1.1684542894363403, |
| "learning_rate": 5.257871368335357e-07, |
| "loss": 0.123, |
| "step": 1620 |
| }, |
| { |
| "epoch": 7.907317073170732, |
| "grad_norm": 0.9429066181182861, |
| "learning_rate": 5.234389610912552e-07, |
| "loss": 0.0194, |
| "step": 1621 |
| }, |
| { |
| "epoch": 7.912195121951219, |
| "grad_norm": 1.167583703994751, |
| "learning_rate": 5.210954273172578e-07, |
| "loss": 0.1536, |
| "step": 1622 |
| }, |
| { |
| "epoch": 7.917073170731707, |
| "grad_norm": 0.9684360027313232, |
| "learning_rate": 5.187565410153497e-07, |
| "loss": 0.1505, |
| "step": 1623 |
| }, |
| { |
| "epoch": 7.921951219512195, |
| "grad_norm": 1.0447014570236206, |
| "learning_rate": 5.164223076784239e-07, |
| "loss": 0.1055, |
| "step": 1624 |
| }, |
| { |
| "epoch": 7.926829268292683, |
| "grad_norm": 1.1968408823013306, |
| "learning_rate": 5.14092732788444e-07, |
| "loss": 0.2419, |
| "step": 1625 |
| }, |
| { |
| "epoch": 7.931707317073171, |
| "grad_norm": 1.3599355220794678, |
| "learning_rate": 5.117678218164337e-07, |
| "loss": 0.0725, |
| "step": 1626 |
| }, |
| { |
| "epoch": 7.9365853658536585, |
| "grad_norm": 0.838928759098053, |
| "learning_rate": 5.094475802224644e-07, |
| "loss": 0.1036, |
| "step": 1627 |
| }, |
| { |
| "epoch": 7.941463414634146, |
| "grad_norm": 0.7429474592208862, |
| "learning_rate": 5.071320134556404e-07, |
| "loss": 0.0374, |
| "step": 1628 |
| }, |
| { |
| "epoch": 7.946341463414634, |
| "grad_norm": 0.8248745203018188, |
| "learning_rate": 5.048211269540868e-07, |
| "loss": 0.0732, |
| "step": 1629 |
| }, |
| { |
| "epoch": 7.951219512195122, |
| "grad_norm": 0.979279100894928, |
| "learning_rate": 5.025149261449391e-07, |
| "loss": 0.1127, |
| "step": 1630 |
| }, |
| { |
| "epoch": 7.95609756097561, |
| "grad_norm": 1.0153231620788574, |
| "learning_rate": 5.002134164443262e-07, |
| "loss": 0.0743, |
| "step": 1631 |
| }, |
| { |
| "epoch": 7.9609756097560975, |
| "grad_norm": 0.6215126514434814, |
| "learning_rate": 4.979166032573607e-07, |
| "loss": 0.0202, |
| "step": 1632 |
| }, |
| { |
| "epoch": 7.965853658536585, |
| "grad_norm": 1.2080820798873901, |
| "learning_rate": 4.956244919781247e-07, |
| "loss": 0.2897, |
| "step": 1633 |
| }, |
| { |
| "epoch": 7.970731707317073, |
| "grad_norm": 0.7493009567260742, |
| "learning_rate": 4.933370879896604e-07, |
| "loss": 0.0412, |
| "step": 1634 |
| }, |
| { |
| "epoch": 7.975609756097561, |
| "grad_norm": 0.9477584958076477, |
| "learning_rate": 4.91054396663952e-07, |
| "loss": 0.0618, |
| "step": 1635 |
| }, |
| { |
| "epoch": 7.980487804878049, |
| "grad_norm": 1.0992841720581055, |
| "learning_rate": 4.887764233619163e-07, |
| "loss": 0.0868, |
| "step": 1636 |
| }, |
| { |
| "epoch": 7.985365853658537, |
| "grad_norm": 0.7600829601287842, |
| "learning_rate": 4.865031734333919e-07, |
| "loss": 0.0511, |
| "step": 1637 |
| }, |
| { |
| "epoch": 7.990243902439024, |
| "grad_norm": 1.106069564819336, |
| "learning_rate": 4.842346522171226e-07, |
| "loss": 0.0927, |
| "step": 1638 |
| }, |
| { |
| "epoch": 7.995121951219512, |
| "grad_norm": 0.8861885070800781, |
| "learning_rate": 4.819708650407467e-07, |
| "loss": 0.1585, |
| "step": 1639 |
| }, |
| { |
| "epoch": 8.0, |
| "grad_norm": 0.8401235938072205, |
| "learning_rate": 4.797118172207863e-07, |
| "loss": 0.0683, |
| "step": 1640 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 2050, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 10, |
| "save_steps": 208, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 4.837735309078692e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|