| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.0, |
| "eval_steps": 500, |
| "global_step": 6828, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.005858230814294083, |
| "grad_norm": 10.993535995483398, |
| "learning_rate": 1.9978910369068542e-05, |
| "loss": 3.7814, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.011716461628588167, |
| "grad_norm": 6.300055980682373, |
| "learning_rate": 1.9955477445811366e-05, |
| "loss": 3.9016, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.01757469244288225, |
| "grad_norm": 14.147106170654297, |
| "learning_rate": 1.993204452255419e-05, |
| "loss": 3.6143, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.023432923257176334, |
| "grad_norm": 9.435710906982422, |
| "learning_rate": 1.9908611599297014e-05, |
| "loss": 2.4102, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.029291154071470416, |
| "grad_norm": 10.944840431213379, |
| "learning_rate": 1.988517867603984e-05, |
| "loss": 2.179, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.0351493848857645, |
| "grad_norm": 5.027509689331055, |
| "learning_rate": 1.9861745752782663e-05, |
| "loss": 2.0129, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.041007615700058585, |
| "grad_norm": 7.124016761779785, |
| "learning_rate": 1.9838312829525487e-05, |
| "loss": 1.3423, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.04686584651435267, |
| "grad_norm": 4.878211498260498, |
| "learning_rate": 1.981487990626831e-05, |
| "loss": 1.5739, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.05272407732864675, |
| "grad_norm": 2.8620986938476562, |
| "learning_rate": 1.9791446983011135e-05, |
| "loss": 1.3344, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.05858230814294083, |
| "grad_norm": 3.641845226287842, |
| "learning_rate": 1.9768014059753956e-05, |
| "loss": 1.3627, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.06444053895723492, |
| "grad_norm": 3.619720935821533, |
| "learning_rate": 1.974458113649678e-05, |
| "loss": 0.9266, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.070298769771529, |
| "grad_norm": 1.5297939777374268, |
| "learning_rate": 1.9721148213239604e-05, |
| "loss": 0.8823, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.07615700058582309, |
| "grad_norm": 1.3193559646606445, |
| "learning_rate": 1.9697715289982428e-05, |
| "loss": 0.724, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.08201523140011717, |
| "grad_norm": 3.3446288108825684, |
| "learning_rate": 1.967428236672525e-05, |
| "loss": 0.7263, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.08787346221441125, |
| "grad_norm": 4.514213562011719, |
| "learning_rate": 1.9650849443468073e-05, |
| "loss": 0.7383, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.09373169302870533, |
| "grad_norm": 1.2520763874053955, |
| "learning_rate": 1.9627416520210897e-05, |
| "loss": 0.658, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.09958992384299942, |
| "grad_norm": 1.4183530807495117, |
| "learning_rate": 1.960398359695372e-05, |
| "loss": 0.6493, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.1054481546572935, |
| "grad_norm": 2.3520781993865967, |
| "learning_rate": 1.9580550673696545e-05, |
| "loss": 0.5405, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.11130638547158758, |
| "grad_norm": 2.6773362159729004, |
| "learning_rate": 1.955711775043937e-05, |
| "loss": 0.6707, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.11716461628588166, |
| "grad_norm": 3.6037440299987793, |
| "learning_rate": 1.9533684827182193e-05, |
| "loss": 0.5618, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.12302284710017575, |
| "grad_norm": 4.3400702476501465, |
| "learning_rate": 1.9510251903925017e-05, |
| "loss": 0.5424, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.12888107791446984, |
| "grad_norm": 1.572972059249878, |
| "learning_rate": 1.948681898066784e-05, |
| "loss": 0.5249, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.1347393087287639, |
| "grad_norm": 1.511702537536621, |
| "learning_rate": 1.9463386057410662e-05, |
| "loss": 0.5116, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.140597539543058, |
| "grad_norm": 1.3979424238204956, |
| "learning_rate": 1.9439953134153486e-05, |
| "loss": 0.5342, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.14645577035735208, |
| "grad_norm": 2.983322858810425, |
| "learning_rate": 1.941652021089631e-05, |
| "loss": 0.3911, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.15231400117164617, |
| "grad_norm": 1.372771143913269, |
| "learning_rate": 1.9393087287639135e-05, |
| "loss": 0.3779, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.15817223198594024, |
| "grad_norm": 1.926331639289856, |
| "learning_rate": 1.936965436438196e-05, |
| "loss": 0.3879, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.16403046280023434, |
| "grad_norm": 1.2269700765609741, |
| "learning_rate": 1.9346221441124783e-05, |
| "loss": 0.4142, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.1698886936145284, |
| "grad_norm": 4.581326961517334, |
| "learning_rate": 1.9322788517867607e-05, |
| "loss": 0.3587, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.1757469244288225, |
| "grad_norm": 1.6644238233566284, |
| "learning_rate": 1.9299355594610428e-05, |
| "loss": 0.2604, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.18160515524311657, |
| "grad_norm": 1.3686414957046509, |
| "learning_rate": 1.9275922671353252e-05, |
| "loss": 0.2254, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.18746338605741067, |
| "grad_norm": 1.5756723880767822, |
| "learning_rate": 1.9252489748096076e-05, |
| "loss": 0.297, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.19332161687170474, |
| "grad_norm": 1.3246747255325317, |
| "learning_rate": 1.92290568248389e-05, |
| "loss": 0.2594, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.19917984768599883, |
| "grad_norm": 2.5869126319885254, |
| "learning_rate": 1.9205623901581724e-05, |
| "loss": 0.1844, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.2050380785002929, |
| "grad_norm": 1.37604558467865, |
| "learning_rate": 1.9182190978324548e-05, |
| "loss": 0.2867, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.210896309314587, |
| "grad_norm": 2.2903075218200684, |
| "learning_rate": 1.915875805506737e-05, |
| "loss": 0.2371, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.21675454012888107, |
| "grad_norm": 0.9721745252609253, |
| "learning_rate": 1.9135325131810193e-05, |
| "loss": 0.238, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.22261277094317516, |
| "grad_norm": 0.6926158666610718, |
| "learning_rate": 1.9111892208553017e-05, |
| "loss": 0.2084, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.22847100175746923, |
| "grad_norm": 3.8920700550079346, |
| "learning_rate": 1.908845928529584e-05, |
| "loss": 0.313, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.23432923257176333, |
| "grad_norm": 1.1392645835876465, |
| "learning_rate": 1.9065026362038665e-05, |
| "loss": 0.2973, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.2401874633860574, |
| "grad_norm": 0.7737612128257751, |
| "learning_rate": 1.904159343878149e-05, |
| "loss": 0.1771, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.2460456942003515, |
| "grad_norm": 0.5699288845062256, |
| "learning_rate": 1.9018160515524314e-05, |
| "loss": 0.1408, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.2519039250146456, |
| "grad_norm": 1.6235454082489014, |
| "learning_rate": 1.8994727592267138e-05, |
| "loss": 0.1686, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.2577621558289397, |
| "grad_norm": 3.0851707458496094, |
| "learning_rate": 1.8971294669009962e-05, |
| "loss": 0.1298, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.26362038664323373, |
| "grad_norm": 1.0552427768707275, |
| "learning_rate": 1.8947861745752786e-05, |
| "loss": 0.1288, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.2694786174575278, |
| "grad_norm": 1.6423840522766113, |
| "learning_rate": 1.892442882249561e-05, |
| "loss": 0.1995, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.2753368482718219, |
| "grad_norm": 0.3003721535205841, |
| "learning_rate": 1.890099589923843e-05, |
| "loss": 0.1002, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.281195079086116, |
| "grad_norm": 0.5576034784317017, |
| "learning_rate": 1.8877562975981255e-05, |
| "loss": 0.1311, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.28705330990041006, |
| "grad_norm": 0.36932089924812317, |
| "learning_rate": 1.885413005272408e-05, |
| "loss": 0.2413, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.29291154071470415, |
| "grad_norm": 0.4237634241580963, |
| "learning_rate": 1.8830697129466903e-05, |
| "loss": 0.0527, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.29876977152899825, |
| "grad_norm": 3.5395658016204834, |
| "learning_rate": 1.8807264206209724e-05, |
| "loss": 0.1513, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.30462800234329235, |
| "grad_norm": 0.28212159872055054, |
| "learning_rate": 1.8783831282952548e-05, |
| "loss": 0.0912, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.3104862331575864, |
| "grad_norm": 3.4626212120056152, |
| "learning_rate": 1.8760398359695372e-05, |
| "loss": 0.1459, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.3163444639718805, |
| "grad_norm": 1.6932200193405151, |
| "learning_rate": 1.8736965436438196e-05, |
| "loss": 0.1388, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.3222026947861746, |
| "grad_norm": 0.14610934257507324, |
| "learning_rate": 1.871353251318102e-05, |
| "loss": 0.1426, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.3280609256004687, |
| "grad_norm": 0.2995212972164154, |
| "learning_rate": 1.8690099589923845e-05, |
| "loss": 0.1387, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.3339191564147627, |
| "grad_norm": 0.7743491530418396, |
| "learning_rate": 1.866666666666667e-05, |
| "loss": 0.0986, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.3397773872290568, |
| "grad_norm": 4.253698825836182, |
| "learning_rate": 1.8643233743409493e-05, |
| "loss": 0.1312, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.3456356180433509, |
| "grad_norm": 1.1342084407806396, |
| "learning_rate": 1.8619800820152317e-05, |
| "loss": 0.0792, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.351493848857645, |
| "grad_norm": 0.500845730304718, |
| "learning_rate": 1.859636789689514e-05, |
| "loss": 0.2464, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.35735207967193905, |
| "grad_norm": 7.179585933685303, |
| "learning_rate": 1.8572934973637965e-05, |
| "loss": 0.1881, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.36321031048623315, |
| "grad_norm": 3.4145667552948, |
| "learning_rate": 1.8549502050380786e-05, |
| "loss": 0.2294, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.36906854130052724, |
| "grad_norm": 4.951934337615967, |
| "learning_rate": 1.852606912712361e-05, |
| "loss": 0.0984, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.37492677211482134, |
| "grad_norm": 3.63204288482666, |
| "learning_rate": 1.8502636203866434e-05, |
| "loss": 0.3341, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.38078500292911543, |
| "grad_norm": 3.267408609390259, |
| "learning_rate": 1.8479203280609258e-05, |
| "loss": 0.1523, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.3866432337434095, |
| "grad_norm": 3.1641011238098145, |
| "learning_rate": 1.8455770357352082e-05, |
| "loss": 0.286, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.39250146455770357, |
| "grad_norm": 1.006550669670105, |
| "learning_rate": 1.8432337434094903e-05, |
| "loss": 0.1675, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.39835969537199767, |
| "grad_norm": 0.17614607512950897, |
| "learning_rate": 1.8408904510837727e-05, |
| "loss": 0.1466, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.40421792618629176, |
| "grad_norm": 0.142693430185318, |
| "learning_rate": 1.838547158758055e-05, |
| "loss": 0.2919, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.4100761570005858, |
| "grad_norm": 3.1687896251678467, |
| "learning_rate": 1.8362038664323375e-05, |
| "loss": 0.2602, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.4159343878148799, |
| "grad_norm": 0.22907620668411255, |
| "learning_rate": 1.83386057410662e-05, |
| "loss": 0.1003, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.421792618629174, |
| "grad_norm": 0.19047817587852478, |
| "learning_rate": 1.8315172817809024e-05, |
| "loss": 0.1336, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.4276508494434681, |
| "grad_norm": 4.431445121765137, |
| "learning_rate": 1.8291739894551848e-05, |
| "loss": 0.1076, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.43350908025776214, |
| "grad_norm": 1.916579008102417, |
| "learning_rate": 1.8268306971294672e-05, |
| "loss": 0.1822, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.43936731107205623, |
| "grad_norm": 3.521374225616455, |
| "learning_rate": 1.8244874048037493e-05, |
| "loss": 0.0826, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.44522554188635033, |
| "grad_norm": 2.703446865081787, |
| "learning_rate": 1.8221441124780317e-05, |
| "loss": 0.18, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.4510837727006444, |
| "grad_norm": 0.11092668026685715, |
| "learning_rate": 1.819800820152314e-05, |
| "loss": 0.2272, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.45694200351493847, |
| "grad_norm": 0.1550651639699936, |
| "learning_rate": 1.8174575278265965e-05, |
| "loss": 0.2156, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.46280023432923256, |
| "grad_norm": 0.18351873755455017, |
| "learning_rate": 1.815114235500879e-05, |
| "loss": 0.0512, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.46865846514352666, |
| "grad_norm": 0.20050418376922607, |
| "learning_rate": 1.8127709431751613e-05, |
| "loss": 0.0394, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.47451669595782076, |
| "grad_norm": 3.0404205322265625, |
| "learning_rate": 1.8104276508494437e-05, |
| "loss": 0.1457, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.4803749267721148, |
| "grad_norm": 0.16257639229297638, |
| "learning_rate": 1.808084358523726e-05, |
| "loss": 0.173, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.4862331575864089, |
| "grad_norm": 0.5828198194503784, |
| "learning_rate": 1.8057410661980082e-05, |
| "loss": 0.094, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.492091388400703, |
| "grad_norm": 0.1483214646577835, |
| "learning_rate": 1.8033977738722906e-05, |
| "loss": 0.0408, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.4979496192149971, |
| "grad_norm": 0.07068105041980743, |
| "learning_rate": 1.801054481546573e-05, |
| "loss": 0.0934, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.5038078500292912, |
| "grad_norm": 0.6805324554443359, |
| "learning_rate": 1.7987111892208554e-05, |
| "loss": 0.0113, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.5096660808435852, |
| "grad_norm": 0.09847233444452286, |
| "learning_rate": 1.796367896895138e-05, |
| "loss": 0.2669, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.5155243116578794, |
| "grad_norm": 0.3944021761417389, |
| "learning_rate": 1.7940246045694203e-05, |
| "loss": 0.1015, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.5213825424721734, |
| "grad_norm": 0.17011871933937073, |
| "learning_rate": 1.7916813122437023e-05, |
| "loss": 0.1127, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.5272407732864675, |
| "grad_norm": 2.945100784301758, |
| "learning_rate": 1.7893380199179847e-05, |
| "loss": 0.1913, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.5330990041007616, |
| "grad_norm": 0.25906553864479065, |
| "learning_rate": 1.786994727592267e-05, |
| "loss": 0.0835, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.5389572349150556, |
| "grad_norm": 0.30626562237739563, |
| "learning_rate": 1.7846514352665496e-05, |
| "loss": 0.0305, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.5448154657293497, |
| "grad_norm": 0.1143951565027237, |
| "learning_rate": 1.782308142940832e-05, |
| "loss": 0.0216, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.5506736965436438, |
| "grad_norm": 0.19745256006717682, |
| "learning_rate": 1.7799648506151144e-05, |
| "loss": 0.1507, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.5565319273579379, |
| "grad_norm": 7.011041164398193, |
| "learning_rate": 1.7776215582893968e-05, |
| "loss": 0.3032, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.562390158172232, |
| "grad_norm": 3.46498966217041, |
| "learning_rate": 1.7752782659636792e-05, |
| "loss": 0.2152, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.5682483889865261, |
| "grad_norm": 0.17841269075870514, |
| "learning_rate": 1.7729349736379616e-05, |
| "loss": 0.0435, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.5741066198008201, |
| "grad_norm": 0.3201232850551605, |
| "learning_rate": 1.770591681312244e-05, |
| "loss": 0.0959, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.5799648506151143, |
| "grad_norm": 3.194690704345703, |
| "learning_rate": 1.7682483889865264e-05, |
| "loss": 0.1778, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.5858230814294083, |
| "grad_norm": 3.6589221954345703, |
| "learning_rate": 1.7659050966608085e-05, |
| "loss": 0.1359, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.5916813122437024, |
| "grad_norm": 8.663952827453613, |
| "learning_rate": 1.763561804335091e-05, |
| "loss": 0.1705, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.5975395430579965, |
| "grad_norm": 0.9029685854911804, |
| "learning_rate": 1.7612185120093733e-05, |
| "loss": 0.1045, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.6033977738722905, |
| "grad_norm": 0.06549205631017685, |
| "learning_rate": 1.7588752196836558e-05, |
| "loss": 0.0432, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.6092560046865847, |
| "grad_norm": 0.2381201982498169, |
| "learning_rate": 1.7565319273579378e-05, |
| "loss": 0.0923, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.6151142355008787, |
| "grad_norm": 0.07783389836549759, |
| "learning_rate": 1.7541886350322202e-05, |
| "loss": 0.0385, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.6209724663151728, |
| "grad_norm": 0.1266435831785202, |
| "learning_rate": 1.7518453427065027e-05, |
| "loss": 0.0282, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.6268306971294669, |
| "grad_norm": 0.20830880105495453, |
| "learning_rate": 1.749502050380785e-05, |
| "loss": 0.0758, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.632688927943761, |
| "grad_norm": 1.412645936012268, |
| "learning_rate": 1.7471587580550675e-05, |
| "loss": 0.1091, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.6385471587580551, |
| "grad_norm": 0.6468214988708496, |
| "learning_rate": 1.74481546572935e-05, |
| "loss": 0.1342, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.6444053895723492, |
| "grad_norm": 0.05657221004366875, |
| "learning_rate": 1.7424721734036323e-05, |
| "loss": 0.0898, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.6502636203866432, |
| "grad_norm": 0.18487469851970673, |
| "learning_rate": 1.7401288810779147e-05, |
| "loss": 0.0827, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.6561218512009374, |
| "grad_norm": 0.06991606205701828, |
| "learning_rate": 1.737785588752197e-05, |
| "loss": 0.147, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.6619800820152314, |
| "grad_norm": 0.3615979254245758, |
| "learning_rate": 1.7354422964264795e-05, |
| "loss": 0.0985, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.6678383128295254, |
| "grad_norm": 16.712085723876953, |
| "learning_rate": 1.733099004100762e-05, |
| "loss": 0.1162, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.6736965436438196, |
| "grad_norm": 0.044244520366191864, |
| "learning_rate": 1.730755711775044e-05, |
| "loss": 0.0973, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.6795547744581136, |
| "grad_norm": 2.611173391342163, |
| "learning_rate": 1.7284124194493264e-05, |
| "loss": 0.131, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.6854130052724078, |
| "grad_norm": 9.249836921691895, |
| "learning_rate": 1.726069127123609e-05, |
| "loss": 0.1265, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.6912712360867018, |
| "grad_norm": 4.1363301277160645, |
| "learning_rate": 1.7237258347978912e-05, |
| "loss": 0.1003, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.6971294669009959, |
| "grad_norm": 3.4619572162628174, |
| "learning_rate": 1.7213825424721737e-05, |
| "loss": 0.1756, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.70298769771529, |
| "grad_norm": 3.285961627960205, |
| "learning_rate": 1.7190392501464557e-05, |
| "loss": 0.2226, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.7088459285295841, |
| "grad_norm": 0.13242599368095398, |
| "learning_rate": 1.716695957820738e-05, |
| "loss": 0.1064, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.7147041593438781, |
| "grad_norm": 0.39351117610931396, |
| "learning_rate": 1.7143526654950206e-05, |
| "loss": 0.0712, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.7205623901581723, |
| "grad_norm": 3.50242018699646, |
| "learning_rate": 1.712009373169303e-05, |
| "loss": 0.1174, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.7264206209724663, |
| "grad_norm": 1.7381882667541504, |
| "learning_rate": 1.7096660808435854e-05, |
| "loss": 0.3288, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.7322788517867604, |
| "grad_norm": 0.6413008570671082, |
| "learning_rate": 1.7073227885178678e-05, |
| "loss": 0.2029, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.7381370826010545, |
| "grad_norm": 11.778146743774414, |
| "learning_rate": 1.7049794961921502e-05, |
| "loss": 0.1279, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.7439953134153485, |
| "grad_norm": 0.32977521419525146, |
| "learning_rate": 1.7026362038664326e-05, |
| "loss": 0.1154, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.7498535442296427, |
| "grad_norm": 0.11577194184064865, |
| "learning_rate": 1.7002929115407147e-05, |
| "loss": 0.1317, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.7557117750439367, |
| "grad_norm": 0.08130369335412979, |
| "learning_rate": 1.697949619214997e-05, |
| "loss": 0.0718, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.7615700058582309, |
| "grad_norm": 4.955254554748535, |
| "learning_rate": 1.6956063268892795e-05, |
| "loss": 0.1281, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.7674282366725249, |
| "grad_norm": 0.06785889714956284, |
| "learning_rate": 1.693263034563562e-05, |
| "loss": 0.1004, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.773286467486819, |
| "grad_norm": 0.1323424130678177, |
| "learning_rate": 1.6909197422378443e-05, |
| "loss": 0.1207, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.7791446983011131, |
| "grad_norm": 8.72974681854248, |
| "learning_rate": 1.6885764499121267e-05, |
| "loss": 0.1181, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.7850029291154071, |
| "grad_norm": 0.08895012736320496, |
| "learning_rate": 1.686233157586409e-05, |
| "loss": 0.0098, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.7908611599297012, |
| "grad_norm": 0.3387869894504547, |
| "learning_rate": 1.6838898652606916e-05, |
| "loss": 0.0319, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.7967193907439953, |
| "grad_norm": 0.06218031048774719, |
| "learning_rate": 1.681546572934974e-05, |
| "loss": 0.1521, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.8025776215582894, |
| "grad_norm": 0.0525616817176342, |
| "learning_rate": 1.679203280609256e-05, |
| "loss": 0.032, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.8084358523725835, |
| "grad_norm": 0.33692577481269836, |
| "learning_rate": 1.6768599882835385e-05, |
| "loss": 0.0106, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.8142940831868776, |
| "grad_norm": 3.903052806854248, |
| "learning_rate": 1.674516695957821e-05, |
| "loss": 0.1573, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.8201523140011716, |
| "grad_norm": 0.07920405268669128, |
| "learning_rate": 1.6721734036321033e-05, |
| "loss": 0.0089, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.8260105448154658, |
| "grad_norm": 2.7212400436401367, |
| "learning_rate": 1.6698301113063854e-05, |
| "loss": 0.2825, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.8318687756297598, |
| "grad_norm": 0.2885132133960724, |
| "learning_rate": 1.6674868189806678e-05, |
| "loss": 0.0744, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.8377270064440538, |
| "grad_norm": 0.04495526850223541, |
| "learning_rate": 1.6651435266549502e-05, |
| "loss": 0.0115, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.843585237258348, |
| "grad_norm": 0.15983568131923676, |
| "learning_rate": 1.6628002343292326e-05, |
| "loss": 0.03, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.849443468072642, |
| "grad_norm": 0.054607000201940536, |
| "learning_rate": 1.660456942003515e-05, |
| "loss": 0.0691, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.8553016988869362, |
| "grad_norm": 0.06109416112303734, |
| "learning_rate": 1.6581136496777974e-05, |
| "loss": 0.217, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.8611599297012302, |
| "grad_norm": 11.838920593261719, |
| "learning_rate": 1.6557703573520798e-05, |
| "loss": 0.2354, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.8670181605155243, |
| "grad_norm": 5.60699462890625, |
| "learning_rate": 1.6534270650263622e-05, |
| "loss": 0.1286, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.8728763913298184, |
| "grad_norm": 4.943994998931885, |
| "learning_rate": 1.6510837727006446e-05, |
| "loss": 0.1325, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.8787346221441125, |
| "grad_norm": 0.08744335919618607, |
| "learning_rate": 1.648740480374927e-05, |
| "loss": 0.1518, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.8845928529584065, |
| "grad_norm": 5.690299987792969, |
| "learning_rate": 1.6463971880492095e-05, |
| "loss": 0.1634, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.8904510837727007, |
| "grad_norm": 0.06222234293818474, |
| "learning_rate": 1.644053895723492e-05, |
| "loss": 0.1902, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.8963093145869947, |
| "grad_norm": 0.13232512772083282, |
| "learning_rate": 1.641710603397774e-05, |
| "loss": 0.0997, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.9021675454012889, |
| "grad_norm": 0.09595705568790436, |
| "learning_rate": 1.6393673110720564e-05, |
| "loss": 0.1137, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.9080257762155829, |
| "grad_norm": 3.798790693283081, |
| "learning_rate": 1.6370240187463388e-05, |
| "loss": 0.2348, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.9138840070298769, |
| "grad_norm": 6.075992107391357, |
| "learning_rate": 1.6346807264206212e-05, |
| "loss": 0.083, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.9197422378441711, |
| "grad_norm": 0.1521161049604416, |
| "learning_rate": 1.6323374340949033e-05, |
| "loss": 0.1863, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.9256004686584651, |
| "grad_norm": 4.295522689819336, |
| "learning_rate": 1.6299941417691857e-05, |
| "loss": 0.0441, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.9314586994727593, |
| "grad_norm": 11.862258911132812, |
| "learning_rate": 1.627650849443468e-05, |
| "loss": 0.1057, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.9373169302870533, |
| "grad_norm": 0.19827328622341156, |
| "learning_rate": 1.6253075571177505e-05, |
| "loss": 0.0422, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.9431751611013474, |
| "grad_norm": 0.05980195105075836, |
| "learning_rate": 1.622964264792033e-05, |
| "loss": 0.0119, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.9490333919156415, |
| "grad_norm": 5.003294944763184, |
| "learning_rate": 1.6206209724663153e-05, |
| "loss": 0.0994, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.9548916227299356, |
| "grad_norm": 2.8840978145599365, |
| "learning_rate": 1.6182776801405977e-05, |
| "loss": 0.1385, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.9607498535442296, |
| "grad_norm": 0.07565687596797943, |
| "learning_rate": 1.61593438781488e-05, |
| "loss": 0.0457, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.9666080843585237, |
| "grad_norm": 16.399011611938477, |
| "learning_rate": 1.6135910954891626e-05, |
| "loss": 0.1724, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.9724663151728178, |
| "grad_norm": 0.08199520409107208, |
| "learning_rate": 1.611247803163445e-05, |
| "loss": 0.1122, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.9783245459871119, |
| "grad_norm": 0.07179383933544159, |
| "learning_rate": 1.608904510837727e-05, |
| "loss": 0.1905, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.984182776801406, |
| "grad_norm": 3.4531142711639404, |
| "learning_rate": 1.6065612185120094e-05, |
| "loss": 0.2529, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.9900410076157, |
| "grad_norm": 3.2887964248657227, |
| "learning_rate": 1.604217926186292e-05, |
| "loss": 0.2152, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.9958992384299942, |
| "grad_norm": 0.08914665877819061, |
| "learning_rate": 1.6018746338605743e-05, |
| "loss": 0.0181, |
| "step": 1700 |
| }, |
| { |
| "epoch": 1.0017574692442883, |
| "grad_norm": 0.09250498563051224, |
| "learning_rate": 1.5995313415348567e-05, |
| "loss": 0.0127, |
| "step": 1710 |
| }, |
| { |
| "epoch": 1.0076157000585824, |
| "grad_norm": 3.05525541305542, |
| "learning_rate": 1.597188049209139e-05, |
| "loss": 0.1035, |
| "step": 1720 |
| }, |
| { |
| "epoch": 1.0134739308728764, |
| "grad_norm": 7.614686489105225, |
| "learning_rate": 1.594844756883421e-05, |
| "loss": 0.1496, |
| "step": 1730 |
| }, |
| { |
| "epoch": 1.0193321616871704, |
| "grad_norm": 0.10618708282709122, |
| "learning_rate": 1.5925014645577036e-05, |
| "loss": 0.0079, |
| "step": 1740 |
| }, |
| { |
| "epoch": 1.0251903925014645, |
| "grad_norm": 6.923569202423096, |
| "learning_rate": 1.590158172231986e-05, |
| "loss": 0.0851, |
| "step": 1750 |
| }, |
| { |
| "epoch": 1.0310486233157587, |
| "grad_norm": 5.463113307952881, |
| "learning_rate": 1.5878148799062684e-05, |
| "loss": 0.1242, |
| "step": 1760 |
| }, |
| { |
| "epoch": 1.0369068541300528, |
| "grad_norm": 0.06814823299646378, |
| "learning_rate": 1.5854715875805508e-05, |
| "loss": 0.1508, |
| "step": 1770 |
| }, |
| { |
| "epoch": 1.0427650849443468, |
| "grad_norm": 0.18510116636753082, |
| "learning_rate": 1.5831282952548332e-05, |
| "loss": 0.1044, |
| "step": 1780 |
| }, |
| { |
| "epoch": 1.0486233157586409, |
| "grad_norm": 7.552770614624023, |
| "learning_rate": 1.5807850029291156e-05, |
| "loss": 0.0893, |
| "step": 1790 |
| }, |
| { |
| "epoch": 1.054481546572935, |
| "grad_norm": 0.9201821684837341, |
| "learning_rate": 1.5784417106033977e-05, |
| "loss": 0.0783, |
| "step": 1800 |
| }, |
| { |
| "epoch": 1.060339777387229, |
| "grad_norm": 8.554439544677734, |
| "learning_rate": 1.57609841827768e-05, |
| "loss": 0.2055, |
| "step": 1810 |
| }, |
| { |
| "epoch": 1.0661980082015232, |
| "grad_norm": 0.0478215292096138, |
| "learning_rate": 1.5737551259519625e-05, |
| "loss": 0.051, |
| "step": 1820 |
| }, |
| { |
| "epoch": 1.0720562390158173, |
| "grad_norm": 3.4328463077545166, |
| "learning_rate": 1.571411833626245e-05, |
| "loss": 0.1924, |
| "step": 1830 |
| }, |
| { |
| "epoch": 1.0779144698301113, |
| "grad_norm": 0.7175261974334717, |
| "learning_rate": 1.5690685413005274e-05, |
| "loss": 0.0065, |
| "step": 1840 |
| }, |
| { |
| "epoch": 1.0837727006444053, |
| "grad_norm": 0.20879748463630676, |
| "learning_rate": 1.5667252489748098e-05, |
| "loss": 0.006, |
| "step": 1850 |
| }, |
| { |
| "epoch": 1.0896309314586994, |
| "grad_norm": 0.04610930755734444, |
| "learning_rate": 1.5643819566490922e-05, |
| "loss": 0.0084, |
| "step": 1860 |
| }, |
| { |
| "epoch": 1.0954891622729936, |
| "grad_norm": 0.023400362581014633, |
| "learning_rate": 1.5620386643233746e-05, |
| "loss": 0.0703, |
| "step": 1870 |
| }, |
| { |
| "epoch": 1.1013473930872877, |
| "grad_norm": 0.007849013432860374, |
| "learning_rate": 1.559695371997657e-05, |
| "loss": 0.0054, |
| "step": 1880 |
| }, |
| { |
| "epoch": 1.1072056239015817, |
| "grad_norm": 0.025371093302965164, |
| "learning_rate": 1.5573520796719394e-05, |
| "loss": 0.0773, |
| "step": 1890 |
| }, |
| { |
| "epoch": 1.1130638547158758, |
| "grad_norm": 0.06657887995243073, |
| "learning_rate": 1.5550087873462215e-05, |
| "loss": 0.0375, |
| "step": 1900 |
| }, |
| { |
| "epoch": 1.1189220855301698, |
| "grad_norm": 2.8749732971191406, |
| "learning_rate": 1.552665495020504e-05, |
| "loss": 0.1956, |
| "step": 1910 |
| }, |
| { |
| "epoch": 1.124780316344464, |
| "grad_norm": 0.04028751328587532, |
| "learning_rate": 1.5503222026947863e-05, |
| "loss": 0.1195, |
| "step": 1920 |
| }, |
| { |
| "epoch": 1.1306385471587581, |
| "grad_norm": 0.045628052204847336, |
| "learning_rate": 1.5479789103690687e-05, |
| "loss": 0.1612, |
| "step": 1930 |
| }, |
| { |
| "epoch": 1.1364967779730522, |
| "grad_norm": 2.970036506652832, |
| "learning_rate": 1.5456356180433508e-05, |
| "loss": 0.0281, |
| "step": 1940 |
| }, |
| { |
| "epoch": 1.1423550087873462, |
| "grad_norm": 0.28056764602661133, |
| "learning_rate": 1.5432923257176332e-05, |
| "loss": 0.0254, |
| "step": 1950 |
| }, |
| { |
| "epoch": 1.1482132396016402, |
| "grad_norm": 0.044658973813056946, |
| "learning_rate": 1.5409490333919156e-05, |
| "loss": 0.1445, |
| "step": 1960 |
| }, |
| { |
| "epoch": 1.1540714704159343, |
| "grad_norm": 0.12443964928388596, |
| "learning_rate": 1.538605741066198e-05, |
| "loss": 0.2513, |
| "step": 1970 |
| }, |
| { |
| "epoch": 1.1599297012302285, |
| "grad_norm": 0.06219332292675972, |
| "learning_rate": 1.5362624487404804e-05, |
| "loss": 0.0266, |
| "step": 1980 |
| }, |
| { |
| "epoch": 1.1657879320445226, |
| "grad_norm": 0.09316384792327881, |
| "learning_rate": 1.533919156414763e-05, |
| "loss": 0.0737, |
| "step": 1990 |
| }, |
| { |
| "epoch": 1.1716461628588166, |
| "grad_norm": 7.367013931274414, |
| "learning_rate": 1.5315758640890453e-05, |
| "loss": 0.0688, |
| "step": 2000 |
| }, |
| { |
| "epoch": 1.1775043936731107, |
| "grad_norm": 0.04027366265654564, |
| "learning_rate": 1.5292325717633277e-05, |
| "loss": 0.0572, |
| "step": 2010 |
| }, |
| { |
| "epoch": 1.1833626244874047, |
| "grad_norm": 0.36168113350868225, |
| "learning_rate": 1.52688927943761e-05, |
| "loss": 0.1153, |
| "step": 2020 |
| }, |
| { |
| "epoch": 1.189220855301699, |
| "grad_norm": 0.10165631026029587, |
| "learning_rate": 1.5245459871118923e-05, |
| "loss": 0.0672, |
| "step": 2030 |
| }, |
| { |
| "epoch": 1.195079086115993, |
| "grad_norm": 1.8676950931549072, |
| "learning_rate": 1.5222026947861747e-05, |
| "loss": 0.1765, |
| "step": 2040 |
| }, |
| { |
| "epoch": 1.200937316930287, |
| "grad_norm": 0.0685826912522316, |
| "learning_rate": 1.5198594024604571e-05, |
| "loss": 0.0532, |
| "step": 2050 |
| }, |
| { |
| "epoch": 1.206795547744581, |
| "grad_norm": 0.09834755212068558, |
| "learning_rate": 1.5175161101347396e-05, |
| "loss": 0.0509, |
| "step": 2060 |
| }, |
| { |
| "epoch": 1.2126537785588751, |
| "grad_norm": 0.26971983909606934, |
| "learning_rate": 1.5151728178090216e-05, |
| "loss": 0.0384, |
| "step": 2070 |
| }, |
| { |
| "epoch": 1.2185120093731694, |
| "grad_norm": 0.042703777551651, |
| "learning_rate": 1.512829525483304e-05, |
| "loss": 0.0825, |
| "step": 2080 |
| }, |
| { |
| "epoch": 1.2243702401874634, |
| "grad_norm": 0.02655109018087387, |
| "learning_rate": 1.5104862331575865e-05, |
| "loss": 0.1039, |
| "step": 2090 |
| }, |
| { |
| "epoch": 1.2302284710017575, |
| "grad_norm": 19.35833740234375, |
| "learning_rate": 1.5081429408318689e-05, |
| "loss": 0.1339, |
| "step": 2100 |
| }, |
| { |
| "epoch": 1.2360867018160515, |
| "grad_norm": 0.0528317391872406, |
| "learning_rate": 1.5057996485061513e-05, |
| "loss": 0.1107, |
| "step": 2110 |
| }, |
| { |
| "epoch": 1.2419449326303456, |
| "grad_norm": 0.06290628761053085, |
| "learning_rate": 1.5034563561804337e-05, |
| "loss": 0.0576, |
| "step": 2120 |
| }, |
| { |
| "epoch": 1.2478031634446398, |
| "grad_norm": 0.049718987196683884, |
| "learning_rate": 1.501113063854716e-05, |
| "loss": 0.1598, |
| "step": 2130 |
| }, |
| { |
| "epoch": 1.2536613942589339, |
| "grad_norm": 1.1866627931594849, |
| "learning_rate": 1.4987697715289983e-05, |
| "loss": 0.2636, |
| "step": 2140 |
| }, |
| { |
| "epoch": 1.259519625073228, |
| "grad_norm": 13.535921096801758, |
| "learning_rate": 1.4964264792032807e-05, |
| "loss": 0.0952, |
| "step": 2150 |
| }, |
| { |
| "epoch": 1.265377855887522, |
| "grad_norm": 3.8203511238098145, |
| "learning_rate": 1.4940831868775632e-05, |
| "loss": 0.08, |
| "step": 2160 |
| }, |
| { |
| "epoch": 1.271236086701816, |
| "grad_norm": 0.06578367948532104, |
| "learning_rate": 1.4917398945518456e-05, |
| "loss": 0.0749, |
| "step": 2170 |
| }, |
| { |
| "epoch": 1.2770943175161102, |
| "grad_norm": 0.025391127914190292, |
| "learning_rate": 1.489396602226128e-05, |
| "loss": 0.1329, |
| "step": 2180 |
| }, |
| { |
| "epoch": 1.2829525483304043, |
| "grad_norm": 4.095520973205566, |
| "learning_rate": 1.4870533099004102e-05, |
| "loss": 0.116, |
| "step": 2190 |
| }, |
| { |
| "epoch": 1.2888107791446983, |
| "grad_norm": 0.11842747032642365, |
| "learning_rate": 1.4847100175746925e-05, |
| "loss": 0.0872, |
| "step": 2200 |
| }, |
| { |
| "epoch": 1.2946690099589924, |
| "grad_norm": 0.24338534474372864, |
| "learning_rate": 1.4823667252489749e-05, |
| "loss": 0.0083, |
| "step": 2210 |
| }, |
| { |
| "epoch": 1.3005272407732864, |
| "grad_norm": 0.7529749274253845, |
| "learning_rate": 1.4800234329232573e-05, |
| "loss": 0.1073, |
| "step": 2220 |
| }, |
| { |
| "epoch": 1.3063854715875807, |
| "grad_norm": 0.021239090710878372, |
| "learning_rate": 1.4776801405975395e-05, |
| "loss": 0.0095, |
| "step": 2230 |
| }, |
| { |
| "epoch": 1.3122437024018747, |
| "grad_norm": 0.21038176119327545, |
| "learning_rate": 1.475336848271822e-05, |
| "loss": 0.0802, |
| "step": 2240 |
| }, |
| { |
| "epoch": 1.3181019332161688, |
| "grad_norm": 4.547256946563721, |
| "learning_rate": 1.4729935559461044e-05, |
| "loss": 0.0826, |
| "step": 2250 |
| }, |
| { |
| "epoch": 1.3239601640304628, |
| "grad_norm": 0.02918773703277111, |
| "learning_rate": 1.4706502636203868e-05, |
| "loss": 0.0776, |
| "step": 2260 |
| }, |
| { |
| "epoch": 1.3298183948447568, |
| "grad_norm": 0.1411774605512619, |
| "learning_rate": 1.4683069712946692e-05, |
| "loss": 0.0842, |
| "step": 2270 |
| }, |
| { |
| "epoch": 1.335676625659051, |
| "grad_norm": 6.09588098526001, |
| "learning_rate": 1.4659636789689516e-05, |
| "loss": 0.1478, |
| "step": 2280 |
| }, |
| { |
| "epoch": 1.341534856473345, |
| "grad_norm": 0.08673793077468872, |
| "learning_rate": 1.4636203866432338e-05, |
| "loss": 0.0364, |
| "step": 2290 |
| }, |
| { |
| "epoch": 1.3473930872876392, |
| "grad_norm": 0.020423686131834984, |
| "learning_rate": 1.4612770943175162e-05, |
| "loss": 0.0088, |
| "step": 2300 |
| }, |
| { |
| "epoch": 1.3532513181019332, |
| "grad_norm": 0.07648295164108276, |
| "learning_rate": 1.4589338019917987e-05, |
| "loss": 0.0018, |
| "step": 2310 |
| }, |
| { |
| "epoch": 1.3591095489162273, |
| "grad_norm": 0.04958697408437729, |
| "learning_rate": 1.456590509666081e-05, |
| "loss": 0.0842, |
| "step": 2320 |
| }, |
| { |
| "epoch": 1.3649677797305213, |
| "grad_norm": 0.1480845808982849, |
| "learning_rate": 1.4542472173403633e-05, |
| "loss": 0.1083, |
| "step": 2330 |
| }, |
| { |
| "epoch": 1.3708260105448153, |
| "grad_norm": 4.45640754699707, |
| "learning_rate": 1.4519039250146455e-05, |
| "loss": 0.1541, |
| "step": 2340 |
| }, |
| { |
| "epoch": 1.3766842413591096, |
| "grad_norm": 3.787076711654663, |
| "learning_rate": 1.449560632688928e-05, |
| "loss": 0.0705, |
| "step": 2350 |
| }, |
| { |
| "epoch": 1.3825424721734036, |
| "grad_norm": 0.051576077938079834, |
| "learning_rate": 1.4472173403632104e-05, |
| "loss": 0.0039, |
| "step": 2360 |
| }, |
| { |
| "epoch": 1.3884007029876977, |
| "grad_norm": 0.5413435697555542, |
| "learning_rate": 1.4448740480374928e-05, |
| "loss": 0.1665, |
| "step": 2370 |
| }, |
| { |
| "epoch": 1.3942589338019917, |
| "grad_norm": 0.40767887234687805, |
| "learning_rate": 1.4425307557117752e-05, |
| "loss": 0.0356, |
| "step": 2380 |
| }, |
| { |
| "epoch": 1.4001171646162858, |
| "grad_norm": 0.08306195586919785, |
| "learning_rate": 1.4401874633860576e-05, |
| "loss": 0.1033, |
| "step": 2390 |
| }, |
| { |
| "epoch": 1.40597539543058, |
| "grad_norm": 0.18400411307811737, |
| "learning_rate": 1.4378441710603398e-05, |
| "loss": 0.0393, |
| "step": 2400 |
| }, |
| { |
| "epoch": 1.411833626244874, |
| "grad_norm": 0.050293222069740295, |
| "learning_rate": 1.4355008787346223e-05, |
| "loss": 0.0709, |
| "step": 2410 |
| }, |
| { |
| "epoch": 1.4176918570591681, |
| "grad_norm": 0.1496008336544037, |
| "learning_rate": 1.4331575864089047e-05, |
| "loss": 0.0617, |
| "step": 2420 |
| }, |
| { |
| "epoch": 1.4235500878734622, |
| "grad_norm": 0.05141230672597885, |
| "learning_rate": 1.430814294083187e-05, |
| "loss": 0.1249, |
| "step": 2430 |
| }, |
| { |
| "epoch": 1.4294083186877562, |
| "grad_norm": 0.012737499549984932, |
| "learning_rate": 1.4284710017574695e-05, |
| "loss": 0.0873, |
| "step": 2440 |
| }, |
| { |
| "epoch": 1.4352665495020505, |
| "grad_norm": 0.03204461559653282, |
| "learning_rate": 1.4261277094317519e-05, |
| "loss": 0.0055, |
| "step": 2450 |
| }, |
| { |
| "epoch": 1.4411247803163445, |
| "grad_norm": 0.01898861490190029, |
| "learning_rate": 1.423784417106034e-05, |
| "loss": 0.0796, |
| "step": 2460 |
| }, |
| { |
| "epoch": 1.4469830111306385, |
| "grad_norm": 3.666001796722412, |
| "learning_rate": 1.4214411247803164e-05, |
| "loss": 0.1088, |
| "step": 2470 |
| }, |
| { |
| "epoch": 1.4528412419449326, |
| "grad_norm": 3.432924270629883, |
| "learning_rate": 1.4190978324545988e-05, |
| "loss": 0.1513, |
| "step": 2480 |
| }, |
| { |
| "epoch": 1.4586994727592266, |
| "grad_norm": 0.04218652471899986, |
| "learning_rate": 1.4167545401288812e-05, |
| "loss": 0.0185, |
| "step": 2490 |
| }, |
| { |
| "epoch": 1.4645577035735209, |
| "grad_norm": 0.025178318843245506, |
| "learning_rate": 1.4144112478031635e-05, |
| "loss": 0.0038, |
| "step": 2500 |
| }, |
| { |
| "epoch": 1.470415934387815, |
| "grad_norm": 4.099980354309082, |
| "learning_rate": 1.4120679554774459e-05, |
| "loss": 0.0935, |
| "step": 2510 |
| }, |
| { |
| "epoch": 1.476274165202109, |
| "grad_norm": 5.700379848480225, |
| "learning_rate": 1.4097246631517283e-05, |
| "loss": 0.0138, |
| "step": 2520 |
| }, |
| { |
| "epoch": 1.482132396016403, |
| "grad_norm": 12.581140518188477, |
| "learning_rate": 1.4073813708260107e-05, |
| "loss": 0.0832, |
| "step": 2530 |
| }, |
| { |
| "epoch": 1.487990626830697, |
| "grad_norm": 0.24273565411567688, |
| "learning_rate": 1.4050380785002931e-05, |
| "loss": 0.0639, |
| "step": 2540 |
| }, |
| { |
| "epoch": 1.4938488576449913, |
| "grad_norm": 0.04139144718647003, |
| "learning_rate": 1.4026947861745755e-05, |
| "loss": 0.0265, |
| "step": 2550 |
| }, |
| { |
| "epoch": 1.4997070884592854, |
| "grad_norm": 0.05344259366393089, |
| "learning_rate": 1.4003514938488578e-05, |
| "loss": 0.1068, |
| "step": 2560 |
| }, |
| { |
| "epoch": 1.5055653192735794, |
| "grad_norm": 0.015608408488333225, |
| "learning_rate": 1.3980082015231402e-05, |
| "loss": 0.2022, |
| "step": 2570 |
| }, |
| { |
| "epoch": 1.5114235500878734, |
| "grad_norm": 0.023171085864305496, |
| "learning_rate": 1.3956649091974226e-05, |
| "loss": 0.0281, |
| "step": 2580 |
| }, |
| { |
| "epoch": 1.5172817809021675, |
| "grad_norm": 6.981873512268066, |
| "learning_rate": 1.3933216168717048e-05, |
| "loss": 0.0813, |
| "step": 2590 |
| }, |
| { |
| "epoch": 1.5231400117164617, |
| "grad_norm": 0.06565181910991669, |
| "learning_rate": 1.390978324545987e-05, |
| "loss": 0.0602, |
| "step": 2600 |
| }, |
| { |
| "epoch": 1.5289982425307556, |
| "grad_norm": 0.20862698554992676, |
| "learning_rate": 1.3886350322202695e-05, |
| "loss": 0.0693, |
| "step": 2610 |
| }, |
| { |
| "epoch": 1.5348564733450498, |
| "grad_norm": 0.02823842503130436, |
| "learning_rate": 1.3862917398945519e-05, |
| "loss": 0.03, |
| "step": 2620 |
| }, |
| { |
| "epoch": 1.5407147041593439, |
| "grad_norm": 0.7352235913276672, |
| "learning_rate": 1.3839484475688343e-05, |
| "loss": 0.0442, |
| "step": 2630 |
| }, |
| { |
| "epoch": 1.546572934973638, |
| "grad_norm": 0.02312728948891163, |
| "learning_rate": 1.3816051552431167e-05, |
| "loss": 0.0351, |
| "step": 2640 |
| }, |
| { |
| "epoch": 1.5524311657879322, |
| "grad_norm": 0.07476960122585297, |
| "learning_rate": 1.3792618629173991e-05, |
| "loss": 0.0387, |
| "step": 2650 |
| }, |
| { |
| "epoch": 1.558289396602226, |
| "grad_norm": 0.14400868117809296, |
| "learning_rate": 1.3769185705916814e-05, |
| "loss": 0.0602, |
| "step": 2660 |
| }, |
| { |
| "epoch": 1.5641476274165202, |
| "grad_norm": 0.03687351942062378, |
| "learning_rate": 1.3745752782659638e-05, |
| "loss": 0.0235, |
| "step": 2670 |
| }, |
| { |
| "epoch": 1.5700058582308143, |
| "grad_norm": 0.02022375538945198, |
| "learning_rate": 1.3722319859402462e-05, |
| "loss": 0.1263, |
| "step": 2680 |
| }, |
| { |
| "epoch": 1.5758640890451083, |
| "grad_norm": 9.280242919921875, |
| "learning_rate": 1.3698886936145286e-05, |
| "loss": 0.0292, |
| "step": 2690 |
| }, |
| { |
| "epoch": 1.5817223198594026, |
| "grad_norm": 0.02085457742214203, |
| "learning_rate": 1.367545401288811e-05, |
| "loss": 0.0104, |
| "step": 2700 |
| }, |
| { |
| "epoch": 1.5875805506736964, |
| "grad_norm": 5.943541526794434, |
| "learning_rate": 1.3652021089630934e-05, |
| "loss": 0.0689, |
| "step": 2710 |
| }, |
| { |
| "epoch": 1.5934387814879907, |
| "grad_norm": 0.07496699690818787, |
| "learning_rate": 1.3628588166373755e-05, |
| "loss": 0.0393, |
| "step": 2720 |
| }, |
| { |
| "epoch": 1.5992970123022847, |
| "grad_norm": 0.05785732343792915, |
| "learning_rate": 1.3605155243116579e-05, |
| "loss": 0.0791, |
| "step": 2730 |
| }, |
| { |
| "epoch": 1.6051552431165788, |
| "grad_norm": 0.026315512135624886, |
| "learning_rate": 1.3581722319859403e-05, |
| "loss": 0.0734, |
| "step": 2740 |
| }, |
| { |
| "epoch": 1.611013473930873, |
| "grad_norm": 10.757952690124512, |
| "learning_rate": 1.3558289396602227e-05, |
| "loss": 0.019, |
| "step": 2750 |
| }, |
| { |
| "epoch": 1.6168717047451668, |
| "grad_norm": 0.1766202300786972, |
| "learning_rate": 1.353485647334505e-05, |
| "loss": 0.0712, |
| "step": 2760 |
| }, |
| { |
| "epoch": 1.622729935559461, |
| "grad_norm": 0.023655325174331665, |
| "learning_rate": 1.3511423550087874e-05, |
| "loss": 0.031, |
| "step": 2770 |
| }, |
| { |
| "epoch": 1.6285881663737551, |
| "grad_norm": 13.143453598022461, |
| "learning_rate": 1.3487990626830698e-05, |
| "loss": 0.0238, |
| "step": 2780 |
| }, |
| { |
| "epoch": 1.6344463971880492, |
| "grad_norm": 0.2318425476551056, |
| "learning_rate": 1.3464557703573522e-05, |
| "loss": 0.0077, |
| "step": 2790 |
| }, |
| { |
| "epoch": 1.6403046280023434, |
| "grad_norm": 0.032312728464603424, |
| "learning_rate": 1.3441124780316346e-05, |
| "loss": 0.1002, |
| "step": 2800 |
| }, |
| { |
| "epoch": 1.6461628588166373, |
| "grad_norm": 0.021946435794234276, |
| "learning_rate": 1.341769185705917e-05, |
| "loss": 0.0765, |
| "step": 2810 |
| }, |
| { |
| "epoch": 1.6520210896309315, |
| "grad_norm": 4.089444637298584, |
| "learning_rate": 1.3394258933801994e-05, |
| "loss": 0.0796, |
| "step": 2820 |
| }, |
| { |
| "epoch": 1.6578793204452256, |
| "grad_norm": 6.655864715576172, |
| "learning_rate": 1.3370826010544817e-05, |
| "loss": 0.1206, |
| "step": 2830 |
| }, |
| { |
| "epoch": 1.6637375512595196, |
| "grad_norm": 1.4402270317077637, |
| "learning_rate": 1.3347393087287641e-05, |
| "loss": 0.0079, |
| "step": 2840 |
| }, |
| { |
| "epoch": 1.6695957820738139, |
| "grad_norm": 0.031677696853876114, |
| "learning_rate": 1.3323960164030465e-05, |
| "loss": 0.0024, |
| "step": 2850 |
| }, |
| { |
| "epoch": 1.6754540128881077, |
| "grad_norm": 0.21266481280326843, |
| "learning_rate": 1.3300527240773287e-05, |
| "loss": 0.1047, |
| "step": 2860 |
| }, |
| { |
| "epoch": 1.681312243702402, |
| "grad_norm": 0.08722619712352753, |
| "learning_rate": 1.327709431751611e-05, |
| "loss": 0.1248, |
| "step": 2870 |
| }, |
| { |
| "epoch": 1.687170474516696, |
| "grad_norm": 13.05366039276123, |
| "learning_rate": 1.3253661394258934e-05, |
| "loss": 0.0397, |
| "step": 2880 |
| }, |
| { |
| "epoch": 1.69302870533099, |
| "grad_norm": 0.050381001085042953, |
| "learning_rate": 1.3230228471001758e-05, |
| "loss": 0.0436, |
| "step": 2890 |
| }, |
| { |
| "epoch": 1.698886936145284, |
| "grad_norm": 0.19227945804595947, |
| "learning_rate": 1.3206795547744582e-05, |
| "loss": 0.1769, |
| "step": 2900 |
| }, |
| { |
| "epoch": 1.7047451669595781, |
| "grad_norm": 8.314407348632812, |
| "learning_rate": 1.3183362624487406e-05, |
| "loss": 0.0578, |
| "step": 2910 |
| }, |
| { |
| "epoch": 1.7106033977738724, |
| "grad_norm": 4.961170673370361, |
| "learning_rate": 1.315992970123023e-05, |
| "loss": 0.0179, |
| "step": 2920 |
| }, |
| { |
| "epoch": 1.7164616285881664, |
| "grad_norm": 0.01738910935819149, |
| "learning_rate": 1.3136496777973053e-05, |
| "loss": 0.2876, |
| "step": 2930 |
| }, |
| { |
| "epoch": 1.7223198594024605, |
| "grad_norm": 0.026022635400295258, |
| "learning_rate": 1.3113063854715877e-05, |
| "loss": 0.1407, |
| "step": 2940 |
| }, |
| { |
| "epoch": 1.7281780902167545, |
| "grad_norm": 4.809380054473877, |
| "learning_rate": 1.3089630931458701e-05, |
| "loss": 0.1144, |
| "step": 2950 |
| }, |
| { |
| "epoch": 1.7340363210310485, |
| "grad_norm": 12.418693542480469, |
| "learning_rate": 1.3066198008201525e-05, |
| "loss": 0.0545, |
| "step": 2960 |
| }, |
| { |
| "epoch": 1.7398945518453428, |
| "grad_norm": 0.03380923718214035, |
| "learning_rate": 1.304276508494435e-05, |
| "loss": 0.0892, |
| "step": 2970 |
| }, |
| { |
| "epoch": 1.7457527826596366, |
| "grad_norm": 0.022299950942397118, |
| "learning_rate": 1.3019332161687173e-05, |
| "loss": 0.2233, |
| "step": 2980 |
| }, |
| { |
| "epoch": 1.751611013473931, |
| "grad_norm": 0.03865174576640129, |
| "learning_rate": 1.2995899238429994e-05, |
| "loss": 0.1028, |
| "step": 2990 |
| }, |
| { |
| "epoch": 1.757469244288225, |
| "grad_norm": 6.527011871337891, |
| "learning_rate": 1.2972466315172818e-05, |
| "loss": 0.0616, |
| "step": 3000 |
| }, |
| { |
| "epoch": 1.763327475102519, |
| "grad_norm": 2.869323492050171, |
| "learning_rate": 1.2949033391915642e-05, |
| "loss": 0.0057, |
| "step": 3010 |
| }, |
| { |
| "epoch": 1.7691857059168132, |
| "grad_norm": 7.389009475708008, |
| "learning_rate": 1.2925600468658466e-05, |
| "loss": 0.0454, |
| "step": 3020 |
| }, |
| { |
| "epoch": 1.775043936731107, |
| "grad_norm": 0.01063600555062294, |
| "learning_rate": 1.2902167545401289e-05, |
| "loss": 0.0059, |
| "step": 3030 |
| }, |
| { |
| "epoch": 1.7809021675454013, |
| "grad_norm": 0.018307434394955635, |
| "learning_rate": 1.2878734622144113e-05, |
| "loss": 0.0535, |
| "step": 3040 |
| }, |
| { |
| "epoch": 1.7867603983596954, |
| "grad_norm": 7.301026344299316, |
| "learning_rate": 1.2855301698886937e-05, |
| "loss": 0.1781, |
| "step": 3050 |
| }, |
| { |
| "epoch": 1.7926186291739894, |
| "grad_norm": 0.01443097461014986, |
| "learning_rate": 1.2831868775629761e-05, |
| "loss": 0.0498, |
| "step": 3060 |
| }, |
| { |
| "epoch": 1.7984768599882837, |
| "grad_norm": 0.023425087332725525, |
| "learning_rate": 1.2808435852372585e-05, |
| "loss": 0.0193, |
| "step": 3070 |
| }, |
| { |
| "epoch": 1.8043350908025775, |
| "grad_norm": 0.014292699284851551, |
| "learning_rate": 1.278500292911541e-05, |
| "loss": 0.1091, |
| "step": 3080 |
| }, |
| { |
| "epoch": 1.8101933216168717, |
| "grad_norm": 10.170490264892578, |
| "learning_rate": 1.2761570005858232e-05, |
| "loss": 0.0909, |
| "step": 3090 |
| }, |
| { |
| "epoch": 1.8160515524311658, |
| "grad_norm": 0.008421896025538445, |
| "learning_rate": 1.2738137082601056e-05, |
| "loss": 0.0414, |
| "step": 3100 |
| }, |
| { |
| "epoch": 1.8219097832454598, |
| "grad_norm": 3.434213638305664, |
| "learning_rate": 1.271470415934388e-05, |
| "loss": 0.1271, |
| "step": 3110 |
| }, |
| { |
| "epoch": 1.827768014059754, |
| "grad_norm": 3.279567241668701, |
| "learning_rate": 1.2691271236086702e-05, |
| "loss": 0.194, |
| "step": 3120 |
| }, |
| { |
| "epoch": 1.833626244874048, |
| "grad_norm": 0.016901379451155663, |
| "learning_rate": 1.2667838312829525e-05, |
| "loss": 0.0084, |
| "step": 3130 |
| }, |
| { |
| "epoch": 1.8394844756883422, |
| "grad_norm": 0.023041723296046257, |
| "learning_rate": 1.2644405389572349e-05, |
| "loss": 0.0146, |
| "step": 3140 |
| }, |
| { |
| "epoch": 1.8453427065026362, |
| "grad_norm": 0.013225620612502098, |
| "learning_rate": 1.2620972466315173e-05, |
| "loss": 0.0693, |
| "step": 3150 |
| }, |
| { |
| "epoch": 1.8512009373169303, |
| "grad_norm": 0.3961861729621887, |
| "learning_rate": 1.2597539543057997e-05, |
| "loss": 0.1117, |
| "step": 3160 |
| }, |
| { |
| "epoch": 1.8570591681312245, |
| "grad_norm": 3.4199657440185547, |
| "learning_rate": 1.2574106619800821e-05, |
| "loss": 0.1389, |
| "step": 3170 |
| }, |
| { |
| "epoch": 1.8629173989455183, |
| "grad_norm": 0.012990470044314861, |
| "learning_rate": 1.2550673696543645e-05, |
| "loss": 0.0668, |
| "step": 3180 |
| }, |
| { |
| "epoch": 1.8687756297598126, |
| "grad_norm": 0.05180348455905914, |
| "learning_rate": 1.2527240773286468e-05, |
| "loss": 0.0596, |
| "step": 3190 |
| }, |
| { |
| "epoch": 1.8746338605741066, |
| "grad_norm": 0.1156950443983078, |
| "learning_rate": 1.2503807850029292e-05, |
| "loss": 0.1105, |
| "step": 3200 |
| }, |
| { |
| "epoch": 1.8804920913884007, |
| "grad_norm": 0.14457456767559052, |
| "learning_rate": 1.2480374926772116e-05, |
| "loss": 0.0467, |
| "step": 3210 |
| }, |
| { |
| "epoch": 1.886350322202695, |
| "grad_norm": 0.04957001283764839, |
| "learning_rate": 1.245694200351494e-05, |
| "loss": 0.0851, |
| "step": 3220 |
| }, |
| { |
| "epoch": 1.8922085530169888, |
| "grad_norm": 0.01281155925244093, |
| "learning_rate": 1.2433509080257764e-05, |
| "loss": 0.0945, |
| "step": 3230 |
| }, |
| { |
| "epoch": 1.898066783831283, |
| "grad_norm": 0.02651827037334442, |
| "learning_rate": 1.2410076157000588e-05, |
| "loss": 0.0506, |
| "step": 3240 |
| }, |
| { |
| "epoch": 1.903925014645577, |
| "grad_norm": 0.329218327999115, |
| "learning_rate": 1.238664323374341e-05, |
| "loss": 0.0122, |
| "step": 3250 |
| }, |
| { |
| "epoch": 1.909783245459871, |
| "grad_norm": 0.02343546599149704, |
| "learning_rate": 1.2363210310486233e-05, |
| "loss": 0.0122, |
| "step": 3260 |
| }, |
| { |
| "epoch": 1.9156414762741654, |
| "grad_norm": 0.37970277667045593, |
| "learning_rate": 1.2339777387229057e-05, |
| "loss": 0.0808, |
| "step": 3270 |
| }, |
| { |
| "epoch": 1.9214997070884592, |
| "grad_norm": 0.027905110269784927, |
| "learning_rate": 1.2316344463971882e-05, |
| "loss": 0.0453, |
| "step": 3280 |
| }, |
| { |
| "epoch": 1.9273579379027534, |
| "grad_norm": 0.021370578557252884, |
| "learning_rate": 1.2292911540714706e-05, |
| "loss": 0.1797, |
| "step": 3290 |
| }, |
| { |
| "epoch": 1.9332161687170475, |
| "grad_norm": 0.02419857680797577, |
| "learning_rate": 1.2269478617457528e-05, |
| "loss": 0.1422, |
| "step": 3300 |
| }, |
| { |
| "epoch": 1.9390743995313415, |
| "grad_norm": 0.0072282287292182446, |
| "learning_rate": 1.2246045694200352e-05, |
| "loss": 0.0327, |
| "step": 3310 |
| }, |
| { |
| "epoch": 1.9449326303456356, |
| "grad_norm": 0.10726092755794525, |
| "learning_rate": 1.2222612770943176e-05, |
| "loss": 0.062, |
| "step": 3320 |
| }, |
| { |
| "epoch": 1.9507908611599296, |
| "grad_norm": 0.08492777496576309, |
| "learning_rate": 1.2199179847686e-05, |
| "loss": 0.0247, |
| "step": 3330 |
| }, |
| { |
| "epoch": 1.9566490919742239, |
| "grad_norm": 0.03214435651898384, |
| "learning_rate": 1.2175746924428825e-05, |
| "loss": 0.0945, |
| "step": 3340 |
| }, |
| { |
| "epoch": 1.962507322788518, |
| "grad_norm": 0.05654756724834442, |
| "learning_rate": 1.2152314001171649e-05, |
| "loss": 0.0385, |
| "step": 3350 |
| }, |
| { |
| "epoch": 1.968365553602812, |
| "grad_norm": 0.08512037247419357, |
| "learning_rate": 1.2128881077914471e-05, |
| "loss": 0.0031, |
| "step": 3360 |
| }, |
| { |
| "epoch": 1.974223784417106, |
| "grad_norm": 0.040594056248664856, |
| "learning_rate": 1.2105448154657295e-05, |
| "loss": 0.0454, |
| "step": 3370 |
| }, |
| { |
| "epoch": 1.9800820152314, |
| "grad_norm": 1.7671189308166504, |
| "learning_rate": 1.2082015231400118e-05, |
| "loss": 0.0803, |
| "step": 3380 |
| }, |
| { |
| "epoch": 1.9859402460456943, |
| "grad_norm": 0.11120552569627762, |
| "learning_rate": 1.2058582308142942e-05, |
| "loss": 0.0028, |
| "step": 3390 |
| }, |
| { |
| "epoch": 1.9917984768599881, |
| "grad_norm": 1.060610294342041, |
| "learning_rate": 1.2035149384885764e-05, |
| "loss": 0.0918, |
| "step": 3400 |
| }, |
| { |
| "epoch": 1.9976567076742824, |
| "grad_norm": 0.03006490133702755, |
| "learning_rate": 1.2011716461628588e-05, |
| "loss": 0.0126, |
| "step": 3410 |
| }, |
| { |
| "epoch": 2.0035149384885766, |
| "grad_norm": 0.2244044691324234, |
| "learning_rate": 1.1988283538371412e-05, |
| "loss": 0.0445, |
| "step": 3420 |
| }, |
| { |
| "epoch": 2.0093731693028705, |
| "grad_norm": 2.045053005218506, |
| "learning_rate": 1.1964850615114236e-05, |
| "loss": 0.0151, |
| "step": 3430 |
| }, |
| { |
| "epoch": 2.0152314001171647, |
| "grad_norm": 0.3408319652080536, |
| "learning_rate": 1.194141769185706e-05, |
| "loss": 0.0399, |
| "step": 3440 |
| }, |
| { |
| "epoch": 2.0210896309314585, |
| "grad_norm": 7.308938980102539, |
| "learning_rate": 1.1917984768599885e-05, |
| "loss": 0.0922, |
| "step": 3450 |
| }, |
| { |
| "epoch": 2.026947861745753, |
| "grad_norm": 0.013798482716083527, |
| "learning_rate": 1.1894551845342707e-05, |
| "loss": 0.0645, |
| "step": 3460 |
| }, |
| { |
| "epoch": 2.032806092560047, |
| "grad_norm": 0.04501907154917717, |
| "learning_rate": 1.1871118922085531e-05, |
| "loss": 0.0255, |
| "step": 3470 |
| }, |
| { |
| "epoch": 2.038664323374341, |
| "grad_norm": 0.6319682002067566, |
| "learning_rate": 1.1847685998828355e-05, |
| "loss": 0.0731, |
| "step": 3480 |
| }, |
| { |
| "epoch": 2.044522554188635, |
| "grad_norm": 0.7583388090133667, |
| "learning_rate": 1.182425307557118e-05, |
| "loss": 0.067, |
| "step": 3490 |
| }, |
| { |
| "epoch": 2.050380785002929, |
| "grad_norm": 0.009900501929223537, |
| "learning_rate": 1.1800820152314004e-05, |
| "loss": 0.0668, |
| "step": 3500 |
| }, |
| { |
| "epoch": 2.0562390158172232, |
| "grad_norm": 0.04583236575126648, |
| "learning_rate": 1.1777387229056824e-05, |
| "loss": 0.096, |
| "step": 3510 |
| }, |
| { |
| "epoch": 2.0620972466315175, |
| "grad_norm": 0.0209378469735384, |
| "learning_rate": 1.1753954305799648e-05, |
| "loss": 0.0497, |
| "step": 3520 |
| }, |
| { |
| "epoch": 2.0679554774458113, |
| "grad_norm": 0.8492453098297119, |
| "learning_rate": 1.1730521382542473e-05, |
| "loss": 0.0916, |
| "step": 3530 |
| }, |
| { |
| "epoch": 2.0738137082601056, |
| "grad_norm": 0.03321617469191551, |
| "learning_rate": 1.1707088459285297e-05, |
| "loss": 0.083, |
| "step": 3540 |
| }, |
| { |
| "epoch": 2.0796719390743994, |
| "grad_norm": 5.165300369262695, |
| "learning_rate": 1.168365553602812e-05, |
| "loss": 0.1411, |
| "step": 3550 |
| }, |
| { |
| "epoch": 2.0855301698886937, |
| "grad_norm": 0.02975725382566452, |
| "learning_rate": 1.1660222612770943e-05, |
| "loss": 0.0023, |
| "step": 3560 |
| }, |
| { |
| "epoch": 2.0913884007029875, |
| "grad_norm": 0.08700698614120483, |
| "learning_rate": 1.1636789689513767e-05, |
| "loss": 0.0051, |
| "step": 3570 |
| }, |
| { |
| "epoch": 2.0972466315172817, |
| "grad_norm": 1.9142591953277588, |
| "learning_rate": 1.1613356766256591e-05, |
| "loss": 0.1711, |
| "step": 3580 |
| }, |
| { |
| "epoch": 2.103104862331576, |
| "grad_norm": 0.09102124720811844, |
| "learning_rate": 1.1589923842999416e-05, |
| "loss": 0.1587, |
| "step": 3590 |
| }, |
| { |
| "epoch": 2.10896309314587, |
| "grad_norm": 0.1819785088300705, |
| "learning_rate": 1.156649091974224e-05, |
| "loss": 0.0042, |
| "step": 3600 |
| }, |
| { |
| "epoch": 2.114821323960164, |
| "grad_norm": 0.012282956391572952, |
| "learning_rate": 1.1543057996485064e-05, |
| "loss": 0.1206, |
| "step": 3610 |
| }, |
| { |
| "epoch": 2.120679554774458, |
| "grad_norm": 15.593123435974121, |
| "learning_rate": 1.1519625073227886e-05, |
| "loss": 0.0587, |
| "step": 3620 |
| }, |
| { |
| "epoch": 2.126537785588752, |
| "grad_norm": 0.027000226080417633, |
| "learning_rate": 1.149619214997071e-05, |
| "loss": 0.0625, |
| "step": 3630 |
| }, |
| { |
| "epoch": 2.1323960164030464, |
| "grad_norm": 0.012658847495913506, |
| "learning_rate": 1.1472759226713533e-05, |
| "loss": 0.0097, |
| "step": 3640 |
| }, |
| { |
| "epoch": 2.1382542472173403, |
| "grad_norm": 0.021828286349773407, |
| "learning_rate": 1.1449326303456357e-05, |
| "loss": 0.0117, |
| "step": 3650 |
| }, |
| { |
| "epoch": 2.1441124780316345, |
| "grad_norm": 0.022427566349506378, |
| "learning_rate": 1.142589338019918e-05, |
| "loss": 0.0761, |
| "step": 3660 |
| }, |
| { |
| "epoch": 2.1499707088459283, |
| "grad_norm": 0.010592641308903694, |
| "learning_rate": 1.1402460456942003e-05, |
| "loss": 0.0641, |
| "step": 3670 |
| }, |
| { |
| "epoch": 2.1558289396602226, |
| "grad_norm": 0.01156581286340952, |
| "learning_rate": 1.1379027533684827e-05, |
| "loss": 0.0455, |
| "step": 3680 |
| }, |
| { |
| "epoch": 2.161687170474517, |
| "grad_norm": 2.198169708251953, |
| "learning_rate": 1.1355594610427652e-05, |
| "loss": 0.0373, |
| "step": 3690 |
| }, |
| { |
| "epoch": 2.1675454012888107, |
| "grad_norm": 0.035707347095012665, |
| "learning_rate": 1.1332161687170476e-05, |
| "loss": 0.0033, |
| "step": 3700 |
| }, |
| { |
| "epoch": 2.173403632103105, |
| "grad_norm": 18.409835815429688, |
| "learning_rate": 1.13087287639133e-05, |
| "loss": 0.0505, |
| "step": 3710 |
| }, |
| { |
| "epoch": 2.1792618629173988, |
| "grad_norm": 7.633121967315674, |
| "learning_rate": 1.1285295840656122e-05, |
| "loss": 0.1812, |
| "step": 3720 |
| }, |
| { |
| "epoch": 2.185120093731693, |
| "grad_norm": 7.193730354309082, |
| "learning_rate": 1.1261862917398946e-05, |
| "loss": 0.0805, |
| "step": 3730 |
| }, |
| { |
| "epoch": 2.1909783245459873, |
| "grad_norm": 7.592154502868652, |
| "learning_rate": 1.123842999414177e-05, |
| "loss": 0.0256, |
| "step": 3740 |
| }, |
| { |
| "epoch": 2.196836555360281, |
| "grad_norm": 0.02322685346007347, |
| "learning_rate": 1.1214997070884595e-05, |
| "loss": 0.0057, |
| "step": 3750 |
| }, |
| { |
| "epoch": 2.2026947861745754, |
| "grad_norm": 0.02266596630215645, |
| "learning_rate": 1.1191564147627419e-05, |
| "loss": 0.0783, |
| "step": 3760 |
| }, |
| { |
| "epoch": 2.208553016988869, |
| "grad_norm": 0.01605592481791973, |
| "learning_rate": 1.116813122437024e-05, |
| "loss": 0.0818, |
| "step": 3770 |
| }, |
| { |
| "epoch": 2.2144112478031635, |
| "grad_norm": 0.011953979730606079, |
| "learning_rate": 1.1144698301113064e-05, |
| "loss": 0.0044, |
| "step": 3780 |
| }, |
| { |
| "epoch": 2.2202694786174577, |
| "grad_norm": 4.54928731918335, |
| "learning_rate": 1.1121265377855888e-05, |
| "loss": 0.0757, |
| "step": 3790 |
| }, |
| { |
| "epoch": 2.2261277094317515, |
| "grad_norm": 0.05169807747006416, |
| "learning_rate": 1.1097832454598712e-05, |
| "loss": 0.0247, |
| "step": 3800 |
| }, |
| { |
| "epoch": 2.231985940246046, |
| "grad_norm": 0.11785856634378433, |
| "learning_rate": 1.1074399531341536e-05, |
| "loss": 0.0026, |
| "step": 3810 |
| }, |
| { |
| "epoch": 2.2378441710603396, |
| "grad_norm": 0.06222229450941086, |
| "learning_rate": 1.105096660808436e-05, |
| "loss": 0.1561, |
| "step": 3820 |
| }, |
| { |
| "epoch": 2.243702401874634, |
| "grad_norm": 0.05662352964282036, |
| "learning_rate": 1.1027533684827182e-05, |
| "loss": 0.002, |
| "step": 3830 |
| }, |
| { |
| "epoch": 2.249560632688928, |
| "grad_norm": 0.011579878628253937, |
| "learning_rate": 1.1004100761570007e-05, |
| "loss": 0.0117, |
| "step": 3840 |
| }, |
| { |
| "epoch": 2.255418863503222, |
| "grad_norm": 0.16005638241767883, |
| "learning_rate": 1.098066783831283e-05, |
| "loss": 0.0039, |
| "step": 3850 |
| }, |
| { |
| "epoch": 2.2612770943175162, |
| "grad_norm": 4.731288433074951, |
| "learning_rate": 1.0957234915055655e-05, |
| "loss": 0.0357, |
| "step": 3860 |
| }, |
| { |
| "epoch": 2.26713532513181, |
| "grad_norm": 0.20386534929275513, |
| "learning_rate": 1.0933801991798479e-05, |
| "loss": 0.0775, |
| "step": 3870 |
| }, |
| { |
| "epoch": 2.2729935559461043, |
| "grad_norm": 0.030687009915709496, |
| "learning_rate": 1.0910369068541303e-05, |
| "loss": 0.066, |
| "step": 3880 |
| }, |
| { |
| "epoch": 2.2788517867603986, |
| "grad_norm": 0.07073375582695007, |
| "learning_rate": 1.0886936145284125e-05, |
| "loss": 0.0295, |
| "step": 3890 |
| }, |
| { |
| "epoch": 2.2847100175746924, |
| "grad_norm": 0.01278142724186182, |
| "learning_rate": 1.086350322202695e-05, |
| "loss": 0.2097, |
| "step": 3900 |
| }, |
| { |
| "epoch": 2.2905682483889866, |
| "grad_norm": 0.010042221285402775, |
| "learning_rate": 1.0840070298769772e-05, |
| "loss": 0.0785, |
| "step": 3910 |
| }, |
| { |
| "epoch": 2.2964264792032805, |
| "grad_norm": 0.04149511829018593, |
| "learning_rate": 1.0816637375512596e-05, |
| "loss": 0.1287, |
| "step": 3920 |
| }, |
| { |
| "epoch": 2.3022847100175747, |
| "grad_norm": 0.016597773879766464, |
| "learning_rate": 1.0793204452255418e-05, |
| "loss": 0.0506, |
| "step": 3930 |
| }, |
| { |
| "epoch": 2.3081429408318686, |
| "grad_norm": 0.022845905274152756, |
| "learning_rate": 1.0769771528998243e-05, |
| "loss": 0.0809, |
| "step": 3940 |
| }, |
| { |
| "epoch": 2.314001171646163, |
| "grad_norm": 8.821196556091309, |
| "learning_rate": 1.0746338605741067e-05, |
| "loss": 0.0733, |
| "step": 3950 |
| }, |
| { |
| "epoch": 2.319859402460457, |
| "grad_norm": 0.053114939481019974, |
| "learning_rate": 1.072290568248389e-05, |
| "loss": 0.0605, |
| "step": 3960 |
| }, |
| { |
| "epoch": 2.325717633274751, |
| "grad_norm": 0.04653681442141533, |
| "learning_rate": 1.0699472759226715e-05, |
| "loss": 0.0016, |
| "step": 3970 |
| }, |
| { |
| "epoch": 2.331575864089045, |
| "grad_norm": 0.0594046413898468, |
| "learning_rate": 1.0676039835969539e-05, |
| "loss": 0.0522, |
| "step": 3980 |
| }, |
| { |
| "epoch": 2.3374340949033394, |
| "grad_norm": 0.7372789978981018, |
| "learning_rate": 1.0652606912712361e-05, |
| "loss": 0.0026, |
| "step": 3990 |
| }, |
| { |
| "epoch": 2.3432923257176332, |
| "grad_norm": 0.02045411616563797, |
| "learning_rate": 1.0629173989455186e-05, |
| "loss": 0.0019, |
| "step": 4000 |
| }, |
| { |
| "epoch": 2.3491505565319275, |
| "grad_norm": 0.01363250333815813, |
| "learning_rate": 1.060574106619801e-05, |
| "loss": 0.0497, |
| "step": 4010 |
| }, |
| { |
| "epoch": 2.3550087873462213, |
| "grad_norm": 0.03560090810060501, |
| "learning_rate": 1.0582308142940834e-05, |
| "loss": 0.067, |
| "step": 4020 |
| }, |
| { |
| "epoch": 2.3608670181605156, |
| "grad_norm": 0.016359740868210793, |
| "learning_rate": 1.0558875219683658e-05, |
| "loss": 0.0019, |
| "step": 4030 |
| }, |
| { |
| "epoch": 2.3667252489748094, |
| "grad_norm": 0.21759074926376343, |
| "learning_rate": 1.0535442296426479e-05, |
| "loss": 0.004, |
| "step": 4040 |
| }, |
| { |
| "epoch": 2.3725834797891037, |
| "grad_norm": 0.008675262331962585, |
| "learning_rate": 1.0512009373169303e-05, |
| "loss": 0.0216, |
| "step": 4050 |
| }, |
| { |
| "epoch": 2.378441710603398, |
| "grad_norm": 0.018076395615935326, |
| "learning_rate": 1.0488576449912127e-05, |
| "loss": 0.0076, |
| "step": 4060 |
| }, |
| { |
| "epoch": 2.3842999414176917, |
| "grad_norm": 0.024144655093550682, |
| "learning_rate": 1.0465143526654951e-05, |
| "loss": 0.0012, |
| "step": 4070 |
| }, |
| { |
| "epoch": 2.390158172231986, |
| "grad_norm": 0.011699177324771881, |
| "learning_rate": 1.0441710603397775e-05, |
| "loss": 0.0005, |
| "step": 4080 |
| }, |
| { |
| "epoch": 2.39601640304628, |
| "grad_norm": 6.62136697769165, |
| "learning_rate": 1.0418277680140597e-05, |
| "loss": 0.1261, |
| "step": 4090 |
| }, |
| { |
| "epoch": 2.401874633860574, |
| "grad_norm": 8.810157775878906, |
| "learning_rate": 1.0394844756883422e-05, |
| "loss": 0.2643, |
| "step": 4100 |
| }, |
| { |
| "epoch": 2.4077328646748684, |
| "grad_norm": 0.032008685171604156, |
| "learning_rate": 1.0371411833626246e-05, |
| "loss": 0.0065, |
| "step": 4110 |
| }, |
| { |
| "epoch": 2.413591095489162, |
| "grad_norm": 0.01840485818684101, |
| "learning_rate": 1.034797891036907e-05, |
| "loss": 0.0225, |
| "step": 4120 |
| }, |
| { |
| "epoch": 2.4194493263034564, |
| "grad_norm": 20.19922637939453, |
| "learning_rate": 1.0324545987111894e-05, |
| "loss": 0.0457, |
| "step": 4130 |
| }, |
| { |
| "epoch": 2.4253075571177503, |
| "grad_norm": 0.062382347881793976, |
| "learning_rate": 1.0301113063854718e-05, |
| "loss": 0.0626, |
| "step": 4140 |
| }, |
| { |
| "epoch": 2.4311657879320445, |
| "grad_norm": 0.03572423383593559, |
| "learning_rate": 1.027768014059754e-05, |
| "loss": 0.1125, |
| "step": 4150 |
| }, |
| { |
| "epoch": 2.437024018746339, |
| "grad_norm": 0.011173143982887268, |
| "learning_rate": 1.0254247217340365e-05, |
| "loss": 0.1037, |
| "step": 4160 |
| }, |
| { |
| "epoch": 2.4428822495606326, |
| "grad_norm": 0.01709819957613945, |
| "learning_rate": 1.0230814294083187e-05, |
| "loss": 0.0461, |
| "step": 4170 |
| }, |
| { |
| "epoch": 2.448740480374927, |
| "grad_norm": 9.399250030517578, |
| "learning_rate": 1.0207381370826011e-05, |
| "loss": 0.0471, |
| "step": 4180 |
| }, |
| { |
| "epoch": 2.4545987111892207, |
| "grad_norm": 0.2731788754463196, |
| "learning_rate": 1.0183948447568834e-05, |
| "loss": 0.0299, |
| "step": 4190 |
| }, |
| { |
| "epoch": 2.460456942003515, |
| "grad_norm": 0.026814190670847893, |
| "learning_rate": 1.0160515524311658e-05, |
| "loss": 0.0272, |
| "step": 4200 |
| }, |
| { |
| "epoch": 2.466315172817809, |
| "grad_norm": 0.011844445019960403, |
| "learning_rate": 1.0137082601054482e-05, |
| "loss": 0.0022, |
| "step": 4210 |
| }, |
| { |
| "epoch": 2.472173403632103, |
| "grad_norm": 5.839965343475342, |
| "learning_rate": 1.0113649677797306e-05, |
| "loss": 0.1149, |
| "step": 4220 |
| }, |
| { |
| "epoch": 2.4780316344463973, |
| "grad_norm": 0.023594753816723824, |
| "learning_rate": 1.009021675454013e-05, |
| "loss": 0.0551, |
| "step": 4230 |
| }, |
| { |
| "epoch": 2.483889865260691, |
| "grad_norm": 1.1899319887161255, |
| "learning_rate": 1.0066783831282954e-05, |
| "loss": 0.0286, |
| "step": 4240 |
| }, |
| { |
| "epoch": 2.4897480960749854, |
| "grad_norm": 0.03600088879466057, |
| "learning_rate": 1.0043350908025777e-05, |
| "loss": 0.0567, |
| "step": 4250 |
| }, |
| { |
| "epoch": 2.4956063268892796, |
| "grad_norm": 8.555127143859863, |
| "learning_rate": 1.00199179847686e-05, |
| "loss": 0.1053, |
| "step": 4260 |
| }, |
| { |
| "epoch": 2.5014645577035735, |
| "grad_norm": 0.011175552383065224, |
| "learning_rate": 9.996485061511425e-06, |
| "loss": 0.0026, |
| "step": 4270 |
| }, |
| { |
| "epoch": 2.5073227885178677, |
| "grad_norm": 0.03444055840373039, |
| "learning_rate": 9.973052138254247e-06, |
| "loss": 0.1587, |
| "step": 4280 |
| }, |
| { |
| "epoch": 2.5131810193321615, |
| "grad_norm": 0.018991222605109215, |
| "learning_rate": 9.949619214997071e-06, |
| "loss": 0.0993, |
| "step": 4290 |
| }, |
| { |
| "epoch": 2.519039250146456, |
| "grad_norm": 0.5050359964370728, |
| "learning_rate": 9.926186291739895e-06, |
| "loss": 0.0029, |
| "step": 4300 |
| }, |
| { |
| "epoch": 2.5248974809607496, |
| "grad_norm": 0.05206138268113136, |
| "learning_rate": 9.90275336848272e-06, |
| "loss": 0.0015, |
| "step": 4310 |
| }, |
| { |
| "epoch": 2.530755711775044, |
| "grad_norm": 0.009325658902525902, |
| "learning_rate": 9.879320445225544e-06, |
| "loss": 0.0499, |
| "step": 4320 |
| }, |
| { |
| "epoch": 2.536613942589338, |
| "grad_norm": 1.3305965662002563, |
| "learning_rate": 9.855887521968366e-06, |
| "loss": 0.0027, |
| "step": 4330 |
| }, |
| { |
| "epoch": 2.542472173403632, |
| "grad_norm": 0.016510557383298874, |
| "learning_rate": 9.83245459871119e-06, |
| "loss": 0.0434, |
| "step": 4340 |
| }, |
| { |
| "epoch": 2.5483304042179262, |
| "grad_norm": 0.019491681829094887, |
| "learning_rate": 9.809021675454014e-06, |
| "loss": 0.1281, |
| "step": 4350 |
| }, |
| { |
| "epoch": 2.5541886350322205, |
| "grad_norm": 0.01898523047566414, |
| "learning_rate": 9.785588752196837e-06, |
| "loss": 0.028, |
| "step": 4360 |
| }, |
| { |
| "epoch": 2.5600468658465143, |
| "grad_norm": 0.02802233025431633, |
| "learning_rate": 9.76215582893966e-06, |
| "loss": 0.0589, |
| "step": 4370 |
| }, |
| { |
| "epoch": 2.5659050966608086, |
| "grad_norm": 1.6262356042861938, |
| "learning_rate": 9.738722905682485e-06, |
| "loss": 0.0604, |
| "step": 4380 |
| }, |
| { |
| "epoch": 2.5717633274751024, |
| "grad_norm": 3.5327939987182617, |
| "learning_rate": 9.715289982425309e-06, |
| "loss": 0.1679, |
| "step": 4390 |
| }, |
| { |
| "epoch": 2.5776215582893967, |
| "grad_norm": 0.00650936970487237, |
| "learning_rate": 9.691857059168131e-06, |
| "loss": 0.0189, |
| "step": 4400 |
| }, |
| { |
| "epoch": 2.5834797891036905, |
| "grad_norm": 0.01874629780650139, |
| "learning_rate": 9.668424135910956e-06, |
| "loss": 0.2578, |
| "step": 4410 |
| }, |
| { |
| "epoch": 2.5893380199179847, |
| "grad_norm": 0.012205085717141628, |
| "learning_rate": 9.64499121265378e-06, |
| "loss": 0.0441, |
| "step": 4420 |
| }, |
| { |
| "epoch": 2.595196250732279, |
| "grad_norm": 5.673819541931152, |
| "learning_rate": 9.621558289396604e-06, |
| "loss": 0.0822, |
| "step": 4430 |
| }, |
| { |
| "epoch": 2.601054481546573, |
| "grad_norm": 0.14531837403774261, |
| "learning_rate": 9.598125366139426e-06, |
| "loss": 0.0222, |
| "step": 4440 |
| }, |
| { |
| "epoch": 2.606912712360867, |
| "grad_norm": 4.667191505432129, |
| "learning_rate": 9.57469244288225e-06, |
| "loss": 0.0781, |
| "step": 4450 |
| }, |
| { |
| "epoch": 2.6127709431751613, |
| "grad_norm": 0.022902924567461014, |
| "learning_rate": 9.551259519625073e-06, |
| "loss": 0.0011, |
| "step": 4460 |
| }, |
| { |
| "epoch": 2.618629173989455, |
| "grad_norm": 0.01694462075829506, |
| "learning_rate": 9.527826596367897e-06, |
| "loss": 0.1432, |
| "step": 4470 |
| }, |
| { |
| "epoch": 2.6244874048037494, |
| "grad_norm": 0.01189732737839222, |
| "learning_rate": 9.504393673110721e-06, |
| "loss": 0.0016, |
| "step": 4480 |
| }, |
| { |
| "epoch": 2.6303456356180432, |
| "grad_norm": 0.05359295755624771, |
| "learning_rate": 9.480960749853545e-06, |
| "loss": 0.0014, |
| "step": 4490 |
| }, |
| { |
| "epoch": 2.6362038664323375, |
| "grad_norm": 0.01870916783809662, |
| "learning_rate": 9.45752782659637e-06, |
| "loss": 0.0008, |
| "step": 4500 |
| }, |
| { |
| "epoch": 2.6420620972466313, |
| "grad_norm": 0.028297973796725273, |
| "learning_rate": 9.434094903339193e-06, |
| "loss": 0.0261, |
| "step": 4510 |
| }, |
| { |
| "epoch": 2.6479203280609256, |
| "grad_norm": 10.352713584899902, |
| "learning_rate": 9.410661980082016e-06, |
| "loss": 0.0238, |
| "step": 4520 |
| }, |
| { |
| "epoch": 2.65377855887522, |
| "grad_norm": 0.010723591782152653, |
| "learning_rate": 9.38722905682484e-06, |
| "loss": 0.006, |
| "step": 4530 |
| }, |
| { |
| "epoch": 2.6596367896895137, |
| "grad_norm": 0.012386606074869633, |
| "learning_rate": 9.363796133567662e-06, |
| "loss": 0.0008, |
| "step": 4540 |
| }, |
| { |
| "epoch": 2.665495020503808, |
| "grad_norm": 0.01021635066717863, |
| "learning_rate": 9.340363210310486e-06, |
| "loss": 0.0811, |
| "step": 4550 |
| }, |
| { |
| "epoch": 2.671353251318102, |
| "grad_norm": 0.03580183908343315, |
| "learning_rate": 9.31693028705331e-06, |
| "loss": 0.0029, |
| "step": 4560 |
| }, |
| { |
| "epoch": 2.677211482132396, |
| "grad_norm": 0.004673232790082693, |
| "learning_rate": 9.293497363796135e-06, |
| "loss": 0.0131, |
| "step": 4570 |
| }, |
| { |
| "epoch": 2.68306971294669, |
| "grad_norm": 0.01355653628706932, |
| "learning_rate": 9.270064440538959e-06, |
| "loss": 0.1425, |
| "step": 4580 |
| }, |
| { |
| "epoch": 2.688927943760984, |
| "grad_norm": 0.0050459071062505245, |
| "learning_rate": 9.246631517281783e-06, |
| "loss": 0.0011, |
| "step": 4590 |
| }, |
| { |
| "epoch": 2.6947861745752784, |
| "grad_norm": 0.02213786356151104, |
| "learning_rate": 9.223198594024605e-06, |
| "loss": 0.0449, |
| "step": 4600 |
| }, |
| { |
| "epoch": 2.700644405389572, |
| "grad_norm": 8.822415351867676, |
| "learning_rate": 9.19976567076743e-06, |
| "loss": 0.0382, |
| "step": 4610 |
| }, |
| { |
| "epoch": 2.7065026362038664, |
| "grad_norm": 5.51662015914917, |
| "learning_rate": 9.176332747510252e-06, |
| "loss": 0.0559, |
| "step": 4620 |
| }, |
| { |
| "epoch": 2.7123608670181607, |
| "grad_norm": 0.012278486043214798, |
| "learning_rate": 9.152899824253076e-06, |
| "loss": 0.033, |
| "step": 4630 |
| }, |
| { |
| "epoch": 2.7182190978324545, |
| "grad_norm": 0.0068341088481247425, |
| "learning_rate": 9.1294669009959e-06, |
| "loss": 0.0239, |
| "step": 4640 |
| }, |
| { |
| "epoch": 2.724077328646749, |
| "grad_norm": 0.006046035327017307, |
| "learning_rate": 9.106033977738724e-06, |
| "loss": 0.0207, |
| "step": 4650 |
| }, |
| { |
| "epoch": 2.7299355594610426, |
| "grad_norm": 0.04954253509640694, |
| "learning_rate": 9.082601054481547e-06, |
| "loss": 0.0037, |
| "step": 4660 |
| }, |
| { |
| "epoch": 2.735793790275337, |
| "grad_norm": 0.05820884928107262, |
| "learning_rate": 9.05916813122437e-06, |
| "loss": 0.1181, |
| "step": 4670 |
| }, |
| { |
| "epoch": 2.7416520210896307, |
| "grad_norm": 1.0971448421478271, |
| "learning_rate": 9.035735207967195e-06, |
| "loss": 0.0029, |
| "step": 4680 |
| }, |
| { |
| "epoch": 2.747510251903925, |
| "grad_norm": 0.011835920624434948, |
| "learning_rate": 9.012302284710019e-06, |
| "loss": 0.0419, |
| "step": 4690 |
| }, |
| { |
| "epoch": 2.753368482718219, |
| "grad_norm": 0.23561663925647736, |
| "learning_rate": 8.988869361452841e-06, |
| "loss": 0.0589, |
| "step": 4700 |
| }, |
| { |
| "epoch": 2.759226713532513, |
| "grad_norm": 0.005294264294207096, |
| "learning_rate": 8.965436438195665e-06, |
| "loss": 0.0392, |
| "step": 4710 |
| }, |
| { |
| "epoch": 2.7650849443468073, |
| "grad_norm": 0.018343601375818253, |
| "learning_rate": 8.94200351493849e-06, |
| "loss": 0.0765, |
| "step": 4720 |
| }, |
| { |
| "epoch": 2.7709431751611016, |
| "grad_norm": 0.01226333249360323, |
| "learning_rate": 8.918570591681312e-06, |
| "loss": 0.0454, |
| "step": 4730 |
| }, |
| { |
| "epoch": 2.7768014059753954, |
| "grad_norm": 0.02732853591442108, |
| "learning_rate": 8.895137668424136e-06, |
| "loss": 0.0613, |
| "step": 4740 |
| }, |
| { |
| "epoch": 2.7826596367896896, |
| "grad_norm": 0.009565740823745728, |
| "learning_rate": 8.87170474516696e-06, |
| "loss": 0.1543, |
| "step": 4750 |
| }, |
| { |
| "epoch": 2.7885178676039835, |
| "grad_norm": 0.005303303245455027, |
| "learning_rate": 8.848271821909784e-06, |
| "loss": 0.0008, |
| "step": 4760 |
| }, |
| { |
| "epoch": 2.7943760984182777, |
| "grad_norm": 6.742043495178223, |
| "learning_rate": 8.824838898652608e-06, |
| "loss": 0.1279, |
| "step": 4770 |
| }, |
| { |
| "epoch": 2.8002343292325715, |
| "grad_norm": 0.017538409680128098, |
| "learning_rate": 8.801405975395433e-06, |
| "loss": 0.0303, |
| "step": 4780 |
| }, |
| { |
| "epoch": 2.806092560046866, |
| "grad_norm": 0.5058709383010864, |
| "learning_rate": 8.777973052138255e-06, |
| "loss": 0.0032, |
| "step": 4790 |
| }, |
| { |
| "epoch": 2.81195079086116, |
| "grad_norm": 0.025335755199193954, |
| "learning_rate": 8.754540128881079e-06, |
| "loss": 0.0009, |
| "step": 4800 |
| }, |
| { |
| "epoch": 2.817809021675454, |
| "grad_norm": 0.03526374325156212, |
| "learning_rate": 8.731107205623902e-06, |
| "loss": 0.0523, |
| "step": 4810 |
| }, |
| { |
| "epoch": 2.823667252489748, |
| "grad_norm": 0.17080160975456238, |
| "learning_rate": 8.707674282366726e-06, |
| "loss": 0.1355, |
| "step": 4820 |
| }, |
| { |
| "epoch": 2.8295254833040424, |
| "grad_norm": 0.051808152347803116, |
| "learning_rate": 8.68424135910955e-06, |
| "loss": 0.0025, |
| "step": 4830 |
| }, |
| { |
| "epoch": 2.8353837141183362, |
| "grad_norm": 0.02091473899781704, |
| "learning_rate": 8.660808435852374e-06, |
| "loss": 0.001, |
| "step": 4840 |
| }, |
| { |
| "epoch": 2.8412419449326305, |
| "grad_norm": 0.011587745510041714, |
| "learning_rate": 8.637375512595198e-06, |
| "loss": 0.0477, |
| "step": 4850 |
| }, |
| { |
| "epoch": 2.8471001757469243, |
| "grad_norm": 0.019507162272930145, |
| "learning_rate": 8.61394258933802e-06, |
| "loss": 0.0502, |
| "step": 4860 |
| }, |
| { |
| "epoch": 2.8529584065612186, |
| "grad_norm": 0.06052269786596298, |
| "learning_rate": 8.590509666080845e-06, |
| "loss": 0.0079, |
| "step": 4870 |
| }, |
| { |
| "epoch": 2.8588166373755124, |
| "grad_norm": 0.035398270934820175, |
| "learning_rate": 8.567076742823669e-06, |
| "loss": 0.0665, |
| "step": 4880 |
| }, |
| { |
| "epoch": 2.8646748681898067, |
| "grad_norm": 0.5212422609329224, |
| "learning_rate": 8.543643819566491e-06, |
| "loss": 0.0596, |
| "step": 4890 |
| }, |
| { |
| "epoch": 2.870533099004101, |
| "grad_norm": 20.980255126953125, |
| "learning_rate": 8.520210896309315e-06, |
| "loss": 0.0332, |
| "step": 4900 |
| }, |
| { |
| "epoch": 2.8763913298183947, |
| "grad_norm": 0.005666888318955898, |
| "learning_rate": 8.49677797305214e-06, |
| "loss": 0.0809, |
| "step": 4910 |
| }, |
| { |
| "epoch": 2.882249560632689, |
| "grad_norm": 0.008367589674890041, |
| "learning_rate": 8.473345049794962e-06, |
| "loss": 0.2048, |
| "step": 4920 |
| }, |
| { |
| "epoch": 2.8881077914469833, |
| "grad_norm": 0.004735939204692841, |
| "learning_rate": 8.449912126537786e-06, |
| "loss": 0.0793, |
| "step": 4930 |
| }, |
| { |
| "epoch": 2.893966022261277, |
| "grad_norm": 0.008181789889931679, |
| "learning_rate": 8.42647920328061e-06, |
| "loss": 0.0646, |
| "step": 4940 |
| }, |
| { |
| "epoch": 2.899824253075571, |
| "grad_norm": 0.00998101569712162, |
| "learning_rate": 8.403046280023434e-06, |
| "loss": 0.0014, |
| "step": 4950 |
| }, |
| { |
| "epoch": 2.905682483889865, |
| "grad_norm": 4.3682756423950195, |
| "learning_rate": 8.379613356766258e-06, |
| "loss": 0.0846, |
| "step": 4960 |
| }, |
| { |
| "epoch": 2.9115407147041594, |
| "grad_norm": 0.23237890005111694, |
| "learning_rate": 8.35618043350908e-06, |
| "loss": 0.0053, |
| "step": 4970 |
| }, |
| { |
| "epoch": 2.9173989455184532, |
| "grad_norm": 0.01949893683195114, |
| "learning_rate": 8.332747510251905e-06, |
| "loss": 0.1204, |
| "step": 4980 |
| }, |
| { |
| "epoch": 2.9232571763327475, |
| "grad_norm": 0.01860973611474037, |
| "learning_rate": 8.309314586994727e-06, |
| "loss": 0.1016, |
| "step": 4990 |
| }, |
| { |
| "epoch": 2.9291154071470418, |
| "grad_norm": 0.029606563970446587, |
| "learning_rate": 8.285881663737551e-06, |
| "loss": 0.0556, |
| "step": 5000 |
| }, |
| { |
| "epoch": 2.9349736379613356, |
| "grad_norm": 0.08157221972942352, |
| "learning_rate": 8.262448740480375e-06, |
| "loss": 0.0051, |
| "step": 5010 |
| }, |
| { |
| "epoch": 2.94083186877563, |
| "grad_norm": 5.128834247589111, |
| "learning_rate": 8.2390158172232e-06, |
| "loss": 0.0766, |
| "step": 5020 |
| }, |
| { |
| "epoch": 2.946690099589924, |
| "grad_norm": 0.009375077672302723, |
| "learning_rate": 8.215582893966024e-06, |
| "loss": 0.0119, |
| "step": 5030 |
| }, |
| { |
| "epoch": 2.952548330404218, |
| "grad_norm": 0.022684168070554733, |
| "learning_rate": 8.192149970708848e-06, |
| "loss": 0.065, |
| "step": 5040 |
| }, |
| { |
| "epoch": 2.9584065612185118, |
| "grad_norm": 0.009624495171010494, |
| "learning_rate": 8.16871704745167e-06, |
| "loss": 0.0009, |
| "step": 5050 |
| }, |
| { |
| "epoch": 2.964264792032806, |
| "grad_norm": 0.012474278919398785, |
| "learning_rate": 8.145284124194494e-06, |
| "loss": 0.0695, |
| "step": 5060 |
| }, |
| { |
| "epoch": 2.9701230228471003, |
| "grad_norm": 0.015769358724355698, |
| "learning_rate": 8.121851200937317e-06, |
| "loss": 0.002, |
| "step": 5070 |
| }, |
| { |
| "epoch": 2.975981253661394, |
| "grad_norm": 0.028395146131515503, |
| "learning_rate": 8.09841827768014e-06, |
| "loss": 0.0201, |
| "step": 5080 |
| }, |
| { |
| "epoch": 2.9818394844756884, |
| "grad_norm": 0.015639053657650948, |
| "learning_rate": 8.074985354422965e-06, |
| "loss": 0.0012, |
| "step": 5090 |
| }, |
| { |
| "epoch": 2.9876977152899826, |
| "grad_norm": 0.0030274316668510437, |
| "learning_rate": 8.051552431165789e-06, |
| "loss": 0.0005, |
| "step": 5100 |
| }, |
| { |
| "epoch": 2.9935559461042764, |
| "grad_norm": 0.0040931059047579765, |
| "learning_rate": 8.028119507908613e-06, |
| "loss": 0.116, |
| "step": 5110 |
| }, |
| { |
| "epoch": 2.9994141769185707, |
| "grad_norm": 0.009678595699369907, |
| "learning_rate": 8.004686584651435e-06, |
| "loss": 0.0424, |
| "step": 5120 |
| }, |
| { |
| "epoch": 3.0052724077328645, |
| "grad_norm": 0.011326028034090996, |
| "learning_rate": 7.98125366139426e-06, |
| "loss": 0.0055, |
| "step": 5130 |
| }, |
| { |
| "epoch": 3.011130638547159, |
| "grad_norm": 0.039426740258932114, |
| "learning_rate": 7.957820738137084e-06, |
| "loss": 0.0352, |
| "step": 5140 |
| }, |
| { |
| "epoch": 3.016988869361453, |
| "grad_norm": 0.006824046839028597, |
| "learning_rate": 7.934387814879906e-06, |
| "loss": 0.0763, |
| "step": 5150 |
| }, |
| { |
| "epoch": 3.022847100175747, |
| "grad_norm": 0.006939908489584923, |
| "learning_rate": 7.91095489162273e-06, |
| "loss": 0.0226, |
| "step": 5160 |
| }, |
| { |
| "epoch": 3.028705330990041, |
| "grad_norm": 4.174925327301025, |
| "learning_rate": 7.887521968365554e-06, |
| "loss": 0.0047, |
| "step": 5170 |
| }, |
| { |
| "epoch": 3.034563561804335, |
| "grad_norm": 7.989070415496826, |
| "learning_rate": 7.864089045108378e-06, |
| "loss": 0.1392, |
| "step": 5180 |
| }, |
| { |
| "epoch": 3.040421792618629, |
| "grad_norm": 0.038751065731048584, |
| "learning_rate": 7.840656121851201e-06, |
| "loss": 0.0339, |
| "step": 5190 |
| }, |
| { |
| "epoch": 3.0462800234329235, |
| "grad_norm": 0.2962633967399597, |
| "learning_rate": 7.817223198594025e-06, |
| "loss": 0.0638, |
| "step": 5200 |
| }, |
| { |
| "epoch": 3.0521382542472173, |
| "grad_norm": 0.006989763118326664, |
| "learning_rate": 7.793790275336849e-06, |
| "loss": 0.0402, |
| "step": 5210 |
| }, |
| { |
| "epoch": 3.0579964850615116, |
| "grad_norm": 0.01909850351512432, |
| "learning_rate": 7.770357352079673e-06, |
| "loss": 0.0014, |
| "step": 5220 |
| }, |
| { |
| "epoch": 3.0638547158758054, |
| "grad_norm": 0.02757435478270054, |
| "learning_rate": 7.746924428822497e-06, |
| "loss": 0.0057, |
| "step": 5230 |
| }, |
| { |
| "epoch": 3.0697129466900996, |
| "grad_norm": 0.007305964361876249, |
| "learning_rate": 7.72349150556532e-06, |
| "loss": 0.0185, |
| "step": 5240 |
| }, |
| { |
| "epoch": 3.0755711775043935, |
| "grad_norm": 0.07572151720523834, |
| "learning_rate": 7.700058582308144e-06, |
| "loss": 0.008, |
| "step": 5250 |
| }, |
| { |
| "epoch": 3.0814294083186877, |
| "grad_norm": 0.02649087645113468, |
| "learning_rate": 7.676625659050966e-06, |
| "loss": 0.0367, |
| "step": 5260 |
| }, |
| { |
| "epoch": 3.087287639132982, |
| "grad_norm": 6.918787479400635, |
| "learning_rate": 7.65319273579379e-06, |
| "loss": 0.056, |
| "step": 5270 |
| }, |
| { |
| "epoch": 3.093145869947276, |
| "grad_norm": 0.018537085503339767, |
| "learning_rate": 7.629759812536615e-06, |
| "loss": 0.0009, |
| "step": 5280 |
| }, |
| { |
| "epoch": 3.09900410076157, |
| "grad_norm": 0.03876962885260582, |
| "learning_rate": 7.606326889279439e-06, |
| "loss": 0.1119, |
| "step": 5290 |
| }, |
| { |
| "epoch": 3.104862331575864, |
| "grad_norm": 0.011184383183717728, |
| "learning_rate": 7.582893966022262e-06, |
| "loss": 0.0459, |
| "step": 5300 |
| }, |
| { |
| "epoch": 3.110720562390158, |
| "grad_norm": 0.043455854058265686, |
| "learning_rate": 7.559461042765086e-06, |
| "loss": 0.0941, |
| "step": 5310 |
| }, |
| { |
| "epoch": 3.1165787932044524, |
| "grad_norm": 0.007882621139287949, |
| "learning_rate": 7.5360281195079085e-06, |
| "loss": 0.0465, |
| "step": 5320 |
| }, |
| { |
| "epoch": 3.1224370240187462, |
| "grad_norm": 0.019092563539743423, |
| "learning_rate": 7.5125951962507326e-06, |
| "loss": 0.0769, |
| "step": 5330 |
| }, |
| { |
| "epoch": 3.1282952548330405, |
| "grad_norm": 0.0035473741590976715, |
| "learning_rate": 7.489162272993557e-06, |
| "loss": 0.0005, |
| "step": 5340 |
| }, |
| { |
| "epoch": 3.1341534856473343, |
| "grad_norm": 11.695319175720215, |
| "learning_rate": 7.46572934973638e-06, |
| "loss": 0.1004, |
| "step": 5350 |
| }, |
| { |
| "epoch": 3.1400117164616286, |
| "grad_norm": 0.024623600766062737, |
| "learning_rate": 7.442296426479204e-06, |
| "loss": 0.0007, |
| "step": 5360 |
| }, |
| { |
| "epoch": 3.145869947275923, |
| "grad_norm": 0.011481067165732384, |
| "learning_rate": 7.418863503222028e-06, |
| "loss": 0.0835, |
| "step": 5370 |
| }, |
| { |
| "epoch": 3.1517281780902167, |
| "grad_norm": 0.008594786748290062, |
| "learning_rate": 7.395430579964851e-06, |
| "loss": 0.0006, |
| "step": 5380 |
| }, |
| { |
| "epoch": 3.157586408904511, |
| "grad_norm": 0.17031900584697723, |
| "learning_rate": 7.371997656707675e-06, |
| "loss": 0.001, |
| "step": 5390 |
| }, |
| { |
| "epoch": 3.1634446397188047, |
| "grad_norm": 0.013221434317529202, |
| "learning_rate": 7.348564733450498e-06, |
| "loss": 0.0677, |
| "step": 5400 |
| }, |
| { |
| "epoch": 3.169302870533099, |
| "grad_norm": 0.014631446450948715, |
| "learning_rate": 7.325131810193322e-06, |
| "loss": 0.0011, |
| "step": 5410 |
| }, |
| { |
| "epoch": 3.1751611013473933, |
| "grad_norm": 1.4008026123046875, |
| "learning_rate": 7.301698886936146e-06, |
| "loss": 0.0021, |
| "step": 5420 |
| }, |
| { |
| "epoch": 3.181019332161687, |
| "grad_norm": 0.018001943826675415, |
| "learning_rate": 7.2782659636789695e-06, |
| "loss": 0.004, |
| "step": 5430 |
| }, |
| { |
| "epoch": 3.1868775629759813, |
| "grad_norm": 0.01583823189139366, |
| "learning_rate": 7.254833040421794e-06, |
| "loss": 0.062, |
| "step": 5440 |
| }, |
| { |
| "epoch": 3.192735793790275, |
| "grad_norm": 0.003109519137069583, |
| "learning_rate": 7.231400117164616e-06, |
| "loss": 0.0125, |
| "step": 5450 |
| }, |
| { |
| "epoch": 3.1985940246045694, |
| "grad_norm": 9.15262222290039, |
| "learning_rate": 7.20796719390744e-06, |
| "loss": 0.0794, |
| "step": 5460 |
| }, |
| { |
| "epoch": 3.2044522554188637, |
| "grad_norm": 0.016906848177313805, |
| "learning_rate": 7.184534270650264e-06, |
| "loss": 0.0019, |
| "step": 5470 |
| }, |
| { |
| "epoch": 3.2103104862331575, |
| "grad_norm": 0.016455991193652153, |
| "learning_rate": 7.161101347393088e-06, |
| "loss": 0.0014, |
| "step": 5480 |
| }, |
| { |
| "epoch": 3.2161687170474518, |
| "grad_norm": 23.191343307495117, |
| "learning_rate": 7.137668424135912e-06, |
| "loss": 0.1007, |
| "step": 5490 |
| }, |
| { |
| "epoch": 3.2220269478617456, |
| "grad_norm": 1.549330234527588, |
| "learning_rate": 7.114235500878736e-06, |
| "loss": 0.0032, |
| "step": 5500 |
| }, |
| { |
| "epoch": 3.22788517867604, |
| "grad_norm": 0.004654415417462587, |
| "learning_rate": 7.090802577621558e-06, |
| "loss": 0.053, |
| "step": 5510 |
| }, |
| { |
| "epoch": 3.233743409490334, |
| "grad_norm": 0.040500707924366, |
| "learning_rate": 7.067369654364382e-06, |
| "loss": 0.0008, |
| "step": 5520 |
| }, |
| { |
| "epoch": 3.239601640304628, |
| "grad_norm": 0.0040084077045321465, |
| "learning_rate": 7.043936731107206e-06, |
| "loss": 0.1087, |
| "step": 5530 |
| }, |
| { |
| "epoch": 3.245459871118922, |
| "grad_norm": 0.009112117812037468, |
| "learning_rate": 7.02050380785003e-06, |
| "loss": 0.0138, |
| "step": 5540 |
| }, |
| { |
| "epoch": 3.251318101933216, |
| "grad_norm": 0.004673032555729151, |
| "learning_rate": 6.997070884592854e-06, |
| "loss": 0.0546, |
| "step": 5550 |
| }, |
| { |
| "epoch": 3.2571763327475103, |
| "grad_norm": 0.014100752770900726, |
| "learning_rate": 6.973637961335678e-06, |
| "loss": 0.1029, |
| "step": 5560 |
| }, |
| { |
| "epoch": 3.2630345635618045, |
| "grad_norm": 0.019094625487923622, |
| "learning_rate": 6.950205038078501e-06, |
| "loss": 0.0014, |
| "step": 5570 |
| }, |
| { |
| "epoch": 3.2688927943760984, |
| "grad_norm": 0.014180914498865604, |
| "learning_rate": 6.926772114821324e-06, |
| "loss": 0.0015, |
| "step": 5580 |
| }, |
| { |
| "epoch": 3.2747510251903926, |
| "grad_norm": 12.107597351074219, |
| "learning_rate": 6.903339191564148e-06, |
| "loss": 0.1002, |
| "step": 5590 |
| }, |
| { |
| "epoch": 3.2806092560046864, |
| "grad_norm": 0.009043432772159576, |
| "learning_rate": 6.879906268306972e-06, |
| "loss": 0.002, |
| "step": 5600 |
| }, |
| { |
| "epoch": 3.2864674868189807, |
| "grad_norm": 0.03756948560476303, |
| "learning_rate": 6.856473345049796e-06, |
| "loss": 0.0593, |
| "step": 5610 |
| }, |
| { |
| "epoch": 3.2923257176332745, |
| "grad_norm": 0.003288848791271448, |
| "learning_rate": 6.833040421792619e-06, |
| "loss": 0.0003, |
| "step": 5620 |
| }, |
| { |
| "epoch": 3.298183948447569, |
| "grad_norm": 0.14631402492523193, |
| "learning_rate": 6.809607498535443e-06, |
| "loss": 0.1386, |
| "step": 5630 |
| }, |
| { |
| "epoch": 3.304042179261863, |
| "grad_norm": 0.5870568156242371, |
| "learning_rate": 6.786174575278267e-06, |
| "loss": 0.0011, |
| "step": 5640 |
| }, |
| { |
| "epoch": 3.309900410076157, |
| "grad_norm": 0.06321928650140762, |
| "learning_rate": 6.76274165202109e-06, |
| "loss": 0.0016, |
| "step": 5650 |
| }, |
| { |
| "epoch": 3.315758640890451, |
| "grad_norm": 0.01610243320465088, |
| "learning_rate": 6.739308728763914e-06, |
| "loss": 0.0012, |
| "step": 5660 |
| }, |
| { |
| "epoch": 3.3216168717047454, |
| "grad_norm": 0.01371396891772747, |
| "learning_rate": 6.715875805506737e-06, |
| "loss": 0.0007, |
| "step": 5670 |
| }, |
| { |
| "epoch": 3.327475102519039, |
| "grad_norm": 0.010886987671256065, |
| "learning_rate": 6.692442882249561e-06, |
| "loss": 0.0055, |
| "step": 5680 |
| }, |
| { |
| "epoch": 3.3333333333333335, |
| "grad_norm": 0.021596791222691536, |
| "learning_rate": 6.669009958992385e-06, |
| "loss": 0.0634, |
| "step": 5690 |
| }, |
| { |
| "epoch": 3.3391915641476273, |
| "grad_norm": 0.005040355958044529, |
| "learning_rate": 6.645577035735209e-06, |
| "loss": 0.0016, |
| "step": 5700 |
| }, |
| { |
| "epoch": 3.3450497949619216, |
| "grad_norm": 0.013297148048877716, |
| "learning_rate": 6.622144112478032e-06, |
| "loss": 0.0018, |
| "step": 5710 |
| }, |
| { |
| "epoch": 3.3509080257762154, |
| "grad_norm": 1.7069166898727417, |
| "learning_rate": 6.598711189220855e-06, |
| "loss": 0.0047, |
| "step": 5720 |
| }, |
| { |
| "epoch": 3.3567662565905096, |
| "grad_norm": 0.018289022147655487, |
| "learning_rate": 6.575278265963679e-06, |
| "loss": 0.1117, |
| "step": 5730 |
| }, |
| { |
| "epoch": 3.362624487404804, |
| "grad_norm": 0.0036156855057924986, |
| "learning_rate": 6.5518453427065035e-06, |
| "loss": 0.0294, |
| "step": 5740 |
| }, |
| { |
| "epoch": 3.3684827182190977, |
| "grad_norm": 0.02520505152642727, |
| "learning_rate": 6.528412419449327e-06, |
| "loss": 0.1358, |
| "step": 5750 |
| }, |
| { |
| "epoch": 3.374340949033392, |
| "grad_norm": 0.02904174104332924, |
| "learning_rate": 6.504979496192151e-06, |
| "loss": 0.0013, |
| "step": 5760 |
| }, |
| { |
| "epoch": 3.380199179847686, |
| "grad_norm": 0.009434922598302364, |
| "learning_rate": 6.481546572934975e-06, |
| "loss": 0.0488, |
| "step": 5770 |
| }, |
| { |
| "epoch": 3.38605741066198, |
| "grad_norm": 0.0074505978263914585, |
| "learning_rate": 6.458113649677797e-06, |
| "loss": 0.0813, |
| "step": 5780 |
| }, |
| { |
| "epoch": 3.3919156414762743, |
| "grad_norm": 0.04690678417682648, |
| "learning_rate": 6.4346807264206215e-06, |
| "loss": 0.0007, |
| "step": 5790 |
| }, |
| { |
| "epoch": 3.397773872290568, |
| "grad_norm": 0.00975983776152134, |
| "learning_rate": 6.411247803163445e-06, |
| "loss": 0.0011, |
| "step": 5800 |
| }, |
| { |
| "epoch": 3.4036321031048624, |
| "grad_norm": 0.007019174285233021, |
| "learning_rate": 6.387814879906269e-06, |
| "loss": 0.0174, |
| "step": 5810 |
| }, |
| { |
| "epoch": 3.4094903339191562, |
| "grad_norm": 0.2021629959344864, |
| "learning_rate": 6.364381956649093e-06, |
| "loss": 0.0034, |
| "step": 5820 |
| }, |
| { |
| "epoch": 3.4153485647334505, |
| "grad_norm": 5.6091694831848145, |
| "learning_rate": 6.340949033391916e-06, |
| "loss": 0.1777, |
| "step": 5830 |
| }, |
| { |
| "epoch": 3.4212067955477448, |
| "grad_norm": 0.01964072696864605, |
| "learning_rate": 6.3175161101347395e-06, |
| "loss": 0.0405, |
| "step": 5840 |
| }, |
| { |
| "epoch": 3.4270650263620386, |
| "grad_norm": 0.08962537348270416, |
| "learning_rate": 6.294083186877563e-06, |
| "loss": 0.0137, |
| "step": 5850 |
| }, |
| { |
| "epoch": 3.432923257176333, |
| "grad_norm": 0.04991769418120384, |
| "learning_rate": 6.270650263620387e-06, |
| "loss": 0.0265, |
| "step": 5860 |
| }, |
| { |
| "epoch": 3.4387814879906267, |
| "grad_norm": 0.009561276063323021, |
| "learning_rate": 6.247217340363211e-06, |
| "loss": 0.0042, |
| "step": 5870 |
| }, |
| { |
| "epoch": 3.444639718804921, |
| "grad_norm": 0.021691441535949707, |
| "learning_rate": 6.223784417106034e-06, |
| "loss": 0.0006, |
| "step": 5880 |
| }, |
| { |
| "epoch": 3.450497949619215, |
| "grad_norm": 0.03954236954450607, |
| "learning_rate": 6.200351493848858e-06, |
| "loss": 0.0705, |
| "step": 5890 |
| }, |
| { |
| "epoch": 3.456356180433509, |
| "grad_norm": 0.048457127064466476, |
| "learning_rate": 6.1769185705916825e-06, |
| "loss": 0.0015, |
| "step": 5900 |
| }, |
| { |
| "epoch": 3.4622144112478033, |
| "grad_norm": 0.21364781260490417, |
| "learning_rate": 6.153485647334505e-06, |
| "loss": 0.0015, |
| "step": 5910 |
| }, |
| { |
| "epoch": 3.468072642062097, |
| "grad_norm": 11.668537139892578, |
| "learning_rate": 6.130052724077329e-06, |
| "loss": 0.1273, |
| "step": 5920 |
| }, |
| { |
| "epoch": 3.4739308728763914, |
| "grad_norm": 6.336328506469727, |
| "learning_rate": 6.106619800820152e-06, |
| "loss": 0.1067, |
| "step": 5930 |
| }, |
| { |
| "epoch": 3.4797891036906856, |
| "grad_norm": 0.013874629512429237, |
| "learning_rate": 6.083186877562976e-06, |
| "loss": 0.0825, |
| "step": 5940 |
| }, |
| { |
| "epoch": 3.4856473345049794, |
| "grad_norm": 0.019092243164777756, |
| "learning_rate": 6.0597539543058005e-06, |
| "loss": 0.0014, |
| "step": 5950 |
| }, |
| { |
| "epoch": 3.4915055653192737, |
| "grad_norm": 0.0071174767799675465, |
| "learning_rate": 6.036321031048624e-06, |
| "loss": 0.0776, |
| "step": 5960 |
| }, |
| { |
| "epoch": 3.4973637961335675, |
| "grad_norm": 0.011898105032742023, |
| "learning_rate": 6.012888107791447e-06, |
| "loss": 0.0421, |
| "step": 5970 |
| }, |
| { |
| "epoch": 3.503222026947862, |
| "grad_norm": 0.005896019283682108, |
| "learning_rate": 5.989455184534271e-06, |
| "loss": 0.0042, |
| "step": 5980 |
| }, |
| { |
| "epoch": 3.5090802577621556, |
| "grad_norm": 5.650592803955078, |
| "learning_rate": 5.9660222612770944e-06, |
| "loss": 0.0315, |
| "step": 5990 |
| }, |
| { |
| "epoch": 3.51493848857645, |
| "grad_norm": 0.0065024252980947495, |
| "learning_rate": 5.9425893380199186e-06, |
| "loss": 0.1116, |
| "step": 6000 |
| }, |
| { |
| "epoch": 3.520796719390744, |
| "grad_norm": 0.011170946061611176, |
| "learning_rate": 5.919156414762743e-06, |
| "loss": 0.016, |
| "step": 6010 |
| }, |
| { |
| "epoch": 3.526654950205038, |
| "grad_norm": 0.008527078665792942, |
| "learning_rate": 5.895723491505566e-06, |
| "loss": 0.0875, |
| "step": 6020 |
| }, |
| { |
| "epoch": 3.532513181019332, |
| "grad_norm": 6.157717704772949, |
| "learning_rate": 5.87229056824839e-06, |
| "loss": 0.0056, |
| "step": 6030 |
| }, |
| { |
| "epoch": 3.5383714118336265, |
| "grad_norm": 0.02608925849199295, |
| "learning_rate": 5.8488576449912125e-06, |
| "loss": 0.1613, |
| "step": 6040 |
| }, |
| { |
| "epoch": 3.5442296426479203, |
| "grad_norm": 0.0074567231349647045, |
| "learning_rate": 5.825424721734037e-06, |
| "loss": 0.0732, |
| "step": 6050 |
| }, |
| { |
| "epoch": 3.5500878734622145, |
| "grad_norm": 0.004207393154501915, |
| "learning_rate": 5.801991798476861e-06, |
| "loss": 0.0088, |
| "step": 6060 |
| }, |
| { |
| "epoch": 3.5559461042765084, |
| "grad_norm": 0.03752859681844711, |
| "learning_rate": 5.778558875219684e-06, |
| "loss": 0.0189, |
| "step": 6070 |
| }, |
| { |
| "epoch": 3.5618043350908026, |
| "grad_norm": 0.21637430787086487, |
| "learning_rate": 5.755125951962508e-06, |
| "loss": 0.0853, |
| "step": 6080 |
| }, |
| { |
| "epoch": 3.5676625659050965, |
| "grad_norm": 0.00733395479619503, |
| "learning_rate": 5.731693028705332e-06, |
| "loss": 0.0009, |
| "step": 6090 |
| }, |
| { |
| "epoch": 3.5735207967193907, |
| "grad_norm": 0.11012960225343704, |
| "learning_rate": 5.7082601054481555e-06, |
| "loss": 0.0006, |
| "step": 6100 |
| }, |
| { |
| "epoch": 3.579379027533685, |
| "grad_norm": 0.004387567285448313, |
| "learning_rate": 5.684827182190979e-06, |
| "loss": 0.045, |
| "step": 6110 |
| }, |
| { |
| "epoch": 3.585237258347979, |
| "grad_norm": 0.0044116429053246975, |
| "learning_rate": 5.661394258933802e-06, |
| "loss": 0.003, |
| "step": 6120 |
| }, |
| { |
| "epoch": 3.591095489162273, |
| "grad_norm": 0.0144761111587286, |
| "learning_rate": 5.637961335676626e-06, |
| "loss": 0.015, |
| "step": 6130 |
| }, |
| { |
| "epoch": 3.5969537199765673, |
| "grad_norm": 0.037230703979730606, |
| "learning_rate": 5.61452841241945e-06, |
| "loss": 0.0015, |
| "step": 6140 |
| }, |
| { |
| "epoch": 3.602811950790861, |
| "grad_norm": 0.002905528526753187, |
| "learning_rate": 5.5910954891622735e-06, |
| "loss": 0.0701, |
| "step": 6150 |
| }, |
| { |
| "epoch": 3.6086701816051554, |
| "grad_norm": 0.023759884759783745, |
| "learning_rate": 5.567662565905098e-06, |
| "loss": 0.0009, |
| "step": 6160 |
| }, |
| { |
| "epoch": 3.614528412419449, |
| "grad_norm": 0.028644567355513573, |
| "learning_rate": 5.54422964264792e-06, |
| "loss": 0.0111, |
| "step": 6170 |
| }, |
| { |
| "epoch": 3.6203866432337435, |
| "grad_norm": 8.238365173339844, |
| "learning_rate": 5.520796719390744e-06, |
| "loss": 0.0121, |
| "step": 6180 |
| }, |
| { |
| "epoch": 3.6262448740480373, |
| "grad_norm": 0.018764765933156013, |
| "learning_rate": 5.497363796133568e-06, |
| "loss": 0.0021, |
| "step": 6190 |
| }, |
| { |
| "epoch": 3.6321031048623316, |
| "grad_norm": 0.008327108807861805, |
| "learning_rate": 5.4739308728763915e-06, |
| "loss": 0.0454, |
| "step": 6200 |
| }, |
| { |
| "epoch": 3.637961335676626, |
| "grad_norm": 14.486366271972656, |
| "learning_rate": 5.450497949619216e-06, |
| "loss": 0.0652, |
| "step": 6210 |
| }, |
| { |
| "epoch": 3.6438195664909196, |
| "grad_norm": 0.003859069664031267, |
| "learning_rate": 5.42706502636204e-06, |
| "loss": 0.0061, |
| "step": 6220 |
| }, |
| { |
| "epoch": 3.649677797305214, |
| "grad_norm": 0.008242030628025532, |
| "learning_rate": 5.403632103104863e-06, |
| "loss": 0.0894, |
| "step": 6230 |
| }, |
| { |
| "epoch": 3.655536028119508, |
| "grad_norm": 0.003433211473748088, |
| "learning_rate": 5.380199179847686e-06, |
| "loss": 0.0657, |
| "step": 6240 |
| }, |
| { |
| "epoch": 3.661394258933802, |
| "grad_norm": 0.027050454169511795, |
| "learning_rate": 5.3567662565905095e-06, |
| "loss": 0.0009, |
| "step": 6250 |
| }, |
| { |
| "epoch": 3.667252489748096, |
| "grad_norm": 0.058935631066560745, |
| "learning_rate": 5.333333333333334e-06, |
| "loss": 0.0012, |
| "step": 6260 |
| }, |
| { |
| "epoch": 3.67311072056239, |
| "grad_norm": 1.1793447732925415, |
| "learning_rate": 5.309900410076158e-06, |
| "loss": 0.0981, |
| "step": 6270 |
| }, |
| { |
| "epoch": 3.6789689513766843, |
| "grad_norm": 0.008707714267075062, |
| "learning_rate": 5.286467486818981e-06, |
| "loss": 0.0514, |
| "step": 6280 |
| }, |
| { |
| "epoch": 3.684827182190978, |
| "grad_norm": 0.015372232533991337, |
| "learning_rate": 5.263034563561805e-06, |
| "loss": 0.0492, |
| "step": 6290 |
| }, |
| { |
| "epoch": 3.6906854130052724, |
| "grad_norm": 0.042791515588760376, |
| "learning_rate": 5.2396016403046276e-06, |
| "loss": 0.0943, |
| "step": 6300 |
| }, |
| { |
| "epoch": 3.6965436438195667, |
| "grad_norm": 0.016382984817028046, |
| "learning_rate": 5.216168717047452e-06, |
| "loss": 0.0488, |
| "step": 6310 |
| }, |
| { |
| "epoch": 3.7024018746338605, |
| "grad_norm": 0.27941107749938965, |
| "learning_rate": 5.192735793790276e-06, |
| "loss": 0.0018, |
| "step": 6320 |
| }, |
| { |
| "epoch": 3.7082601054481548, |
| "grad_norm": 1.3604308366775513, |
| "learning_rate": 5.169302870533099e-06, |
| "loss": 0.0513, |
| "step": 6330 |
| }, |
| { |
| "epoch": 3.7141183362624486, |
| "grad_norm": 0.014508064836263657, |
| "learning_rate": 5.145869947275923e-06, |
| "loss": 0.109, |
| "step": 6340 |
| }, |
| { |
| "epoch": 3.719976567076743, |
| "grad_norm": 0.002610184019431472, |
| "learning_rate": 5.122437024018747e-06, |
| "loss": 0.0587, |
| "step": 6350 |
| }, |
| { |
| "epoch": 3.7258347978910367, |
| "grad_norm": 0.097575843334198, |
| "learning_rate": 5.0990041007615706e-06, |
| "loss": 0.0074, |
| "step": 6360 |
| }, |
| { |
| "epoch": 3.731693028705331, |
| "grad_norm": 0.004868449177592993, |
| "learning_rate": 5.075571177504394e-06, |
| "loss": 0.0429, |
| "step": 6370 |
| }, |
| { |
| "epoch": 3.737551259519625, |
| "grad_norm": 0.018078980967402458, |
| "learning_rate": 5.052138254247217e-06, |
| "loss": 0.0255, |
| "step": 6380 |
| }, |
| { |
| "epoch": 3.743409490333919, |
| "grad_norm": 0.004908588249236345, |
| "learning_rate": 5.028705330990041e-06, |
| "loss": 0.003, |
| "step": 6390 |
| }, |
| { |
| "epoch": 3.7492677211482133, |
| "grad_norm": 0.38909196853637695, |
| "learning_rate": 5.005272407732865e-06, |
| "loss": 0.0012, |
| "step": 6400 |
| }, |
| { |
| "epoch": 3.7551259519625075, |
| "grad_norm": 0.009000121615827084, |
| "learning_rate": 4.981839484475689e-06, |
| "loss": 0.1188, |
| "step": 6410 |
| }, |
| { |
| "epoch": 3.7609841827768014, |
| "grad_norm": 19.085678100585938, |
| "learning_rate": 4.958406561218512e-06, |
| "loss": 0.0977, |
| "step": 6420 |
| }, |
| { |
| "epoch": 3.7668424135910956, |
| "grad_norm": 0.1372321993112564, |
| "learning_rate": 4.934973637961336e-06, |
| "loss": 0.0021, |
| "step": 6430 |
| }, |
| { |
| "epoch": 3.7727006444053894, |
| "grad_norm": 0.018704118207097054, |
| "learning_rate": 4.91154071470416e-06, |
| "loss": 0.0008, |
| "step": 6440 |
| }, |
| { |
| "epoch": 3.7785588752196837, |
| "grad_norm": 0.009288474917411804, |
| "learning_rate": 4.888107791446983e-06, |
| "loss": 0.0373, |
| "step": 6450 |
| }, |
| { |
| "epoch": 3.7844171060339775, |
| "grad_norm": 0.003785450244322419, |
| "learning_rate": 4.8646748681898075e-06, |
| "loss": 0.0143, |
| "step": 6460 |
| }, |
| { |
| "epoch": 3.790275336848272, |
| "grad_norm": 0.0340275801718235, |
| "learning_rate": 4.841241944932631e-06, |
| "loss": 0.0006, |
| "step": 6470 |
| }, |
| { |
| "epoch": 3.796133567662566, |
| "grad_norm": 0.0029304090421646833, |
| "learning_rate": 4.817809021675454e-06, |
| "loss": 0.0692, |
| "step": 6480 |
| }, |
| { |
| "epoch": 3.80199179847686, |
| "grad_norm": 0.002915244782343507, |
| "learning_rate": 4.794376098418278e-06, |
| "loss": 0.0648, |
| "step": 6490 |
| }, |
| { |
| "epoch": 3.807850029291154, |
| "grad_norm": 0.015850398689508438, |
| "learning_rate": 4.770943175161102e-06, |
| "loss": 0.2175, |
| "step": 6500 |
| }, |
| { |
| "epoch": 3.8137082601054484, |
| "grad_norm": 7.541270732879639, |
| "learning_rate": 4.7475102519039255e-06, |
| "loss": 0.0332, |
| "step": 6510 |
| }, |
| { |
| "epoch": 3.819566490919742, |
| "grad_norm": 0.06860964745283127, |
| "learning_rate": 4.724077328646749e-06, |
| "loss": 0.0722, |
| "step": 6520 |
| }, |
| { |
| "epoch": 3.8254247217340365, |
| "grad_norm": 0.013454129919409752, |
| "learning_rate": 4.700644405389573e-06, |
| "loss": 0.0136, |
| "step": 6530 |
| }, |
| { |
| "epoch": 3.8312829525483303, |
| "grad_norm": 15.519331932067871, |
| "learning_rate": 4.677211482132397e-06, |
| "loss": 0.1096, |
| "step": 6540 |
| }, |
| { |
| "epoch": 3.8371411833626246, |
| "grad_norm": 0.2657744884490967, |
| "learning_rate": 4.65377855887522e-06, |
| "loss": 0.0208, |
| "step": 6550 |
| }, |
| { |
| "epoch": 3.8429994141769184, |
| "grad_norm": 0.15025635063648224, |
| "learning_rate": 4.6303456356180435e-06, |
| "loss": 0.0053, |
| "step": 6560 |
| }, |
| { |
| "epoch": 3.8488576449912126, |
| "grad_norm": 3.5955233573913574, |
| "learning_rate": 4.606912712360868e-06, |
| "loss": 0.165, |
| "step": 6570 |
| }, |
| { |
| "epoch": 3.854715875805507, |
| "grad_norm": 0.007440212648361921, |
| "learning_rate": 4.583479789103691e-06, |
| "loss": 0.0355, |
| "step": 6580 |
| }, |
| { |
| "epoch": 3.8605741066198007, |
| "grad_norm": 0.011499539948999882, |
| "learning_rate": 4.560046865846515e-06, |
| "loss": 0.0562, |
| "step": 6590 |
| }, |
| { |
| "epoch": 3.866432337434095, |
| "grad_norm": 0.007440892513841391, |
| "learning_rate": 4.536613942589338e-06, |
| "loss": 0.0007, |
| "step": 6600 |
| }, |
| { |
| "epoch": 3.8722905682483892, |
| "grad_norm": 0.04684402793645859, |
| "learning_rate": 4.5131810193321615e-06, |
| "loss": 0.0627, |
| "step": 6610 |
| }, |
| { |
| "epoch": 3.878148799062683, |
| "grad_norm": 0.0034894447308033705, |
| "learning_rate": 4.489748096074986e-06, |
| "loss": 0.0025, |
| "step": 6620 |
| }, |
| { |
| "epoch": 3.884007029876977, |
| "grad_norm": 8.962274551391602, |
| "learning_rate": 4.46631517281781e-06, |
| "loss": 0.0395, |
| "step": 6630 |
| }, |
| { |
| "epoch": 3.889865260691271, |
| "grad_norm": 0.004020276945084333, |
| "learning_rate": 4.442882249560633e-06, |
| "loss": 0.0207, |
| "step": 6640 |
| }, |
| { |
| "epoch": 3.8957234915055654, |
| "grad_norm": 0.07267985492944717, |
| "learning_rate": 4.419449326303456e-06, |
| "loss": 0.0442, |
| "step": 6650 |
| }, |
| { |
| "epoch": 3.9015817223198592, |
| "grad_norm": 0.09898925572633743, |
| "learning_rate": 4.3960164030462804e-06, |
| "loss": 0.0411, |
| "step": 6660 |
| }, |
| { |
| "epoch": 3.9074399531341535, |
| "grad_norm": 1.5570751428604126, |
| "learning_rate": 4.3725834797891045e-06, |
| "loss": 0.0262, |
| "step": 6670 |
| }, |
| { |
| "epoch": 3.9132981839484478, |
| "grad_norm": 0.002429471118375659, |
| "learning_rate": 4.349150556531928e-06, |
| "loss": 0.0075, |
| "step": 6680 |
| }, |
| { |
| "epoch": 3.9191564147627416, |
| "grad_norm": 0.002811690792441368, |
| "learning_rate": 4.325717633274751e-06, |
| "loss": 0.0584, |
| "step": 6690 |
| }, |
| { |
| "epoch": 3.925014645577036, |
| "grad_norm": 0.0018692004960030317, |
| "learning_rate": 4.302284710017575e-06, |
| "loss": 0.0009, |
| "step": 6700 |
| }, |
| { |
| "epoch": 3.93087287639133, |
| "grad_norm": 9.163466453552246, |
| "learning_rate": 4.2788517867603985e-06, |
| "loss": 0.1206, |
| "step": 6710 |
| }, |
| { |
| "epoch": 3.936731107205624, |
| "grad_norm": 0.007258796598762274, |
| "learning_rate": 4.2554188635032226e-06, |
| "loss": 0.1337, |
| "step": 6720 |
| }, |
| { |
| "epoch": 3.9425893380199177, |
| "grad_norm": 0.016324525699019432, |
| "learning_rate": 4.231985940246046e-06, |
| "loss": 0.0077, |
| "step": 6730 |
| }, |
| { |
| "epoch": 3.948447568834212, |
| "grad_norm": 0.012507823295891285, |
| "learning_rate": 4.20855301698887e-06, |
| "loss": 0.0357, |
| "step": 6740 |
| }, |
| { |
| "epoch": 3.9543057996485063, |
| "grad_norm": 1.1951687335968018, |
| "learning_rate": 4.185120093731693e-06, |
| "loss": 0.0739, |
| "step": 6750 |
| }, |
| { |
| "epoch": 3.9601640304628, |
| "grad_norm": 0.051935408264398575, |
| "learning_rate": 4.161687170474517e-06, |
| "loss": 0.0717, |
| "step": 6760 |
| }, |
| { |
| "epoch": 3.9660222612770943, |
| "grad_norm": 0.08532533049583435, |
| "learning_rate": 4.138254247217341e-06, |
| "loss": 0.0472, |
| "step": 6770 |
| }, |
| { |
| "epoch": 3.9718804920913886, |
| "grad_norm": 0.022381288930773735, |
| "learning_rate": 4.114821323960164e-06, |
| "loss": 0.0007, |
| "step": 6780 |
| }, |
| { |
| "epoch": 3.9777387229056824, |
| "grad_norm": 0.008231847546994686, |
| "learning_rate": 4.091388400702988e-06, |
| "loss": 0.0264, |
| "step": 6790 |
| }, |
| { |
| "epoch": 3.9835969537199767, |
| "grad_norm": 0.03306123986840248, |
| "learning_rate": 4.067955477445812e-06, |
| "loss": 0.0664, |
| "step": 6800 |
| }, |
| { |
| "epoch": 3.9894551845342705, |
| "grad_norm": 0.08156836032867432, |
| "learning_rate": 4.044522554188635e-06, |
| "loss": 0.0415, |
| "step": 6810 |
| }, |
| { |
| "epoch": 3.9953134153485648, |
| "grad_norm": 0.23798277974128723, |
| "learning_rate": 4.021089630931459e-06, |
| "loss": 0.0084, |
| "step": 6820 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 8535, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 7383216334848000.0, |
| "train_batch_size": 8, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|