| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9988649262202043, | |
| "eval_steps": 500, | |
| "global_step": 660, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.003026863412788498, | |
| "grad_norm": 3.235436458649722, | |
| "learning_rate": 3.0303030303030305e-07, | |
| "loss": 0.6633, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.006053726825576996, | |
| "grad_norm": 3.5645935283996213, | |
| "learning_rate": 6.060606060606061e-07, | |
| "loss": 0.7281, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.009080590238365494, | |
| "grad_norm": 3.093392616778869, | |
| "learning_rate": 9.090909090909091e-07, | |
| "loss": 0.6619, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.012107453651153992, | |
| "grad_norm": 3.179784613260367, | |
| "learning_rate": 1.2121212121212122e-06, | |
| "loss": 0.7157, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01513431706394249, | |
| "grad_norm": 3.46258699147839, | |
| "learning_rate": 1.5151515151515152e-06, | |
| "loss": 0.6966, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.018161180476730987, | |
| "grad_norm": 2.9974449421523177, | |
| "learning_rate": 1.8181818181818183e-06, | |
| "loss": 0.69, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.021188043889519486, | |
| "grad_norm": 2.927738436003254, | |
| "learning_rate": 2.1212121212121216e-06, | |
| "loss": 0.6768, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.024214907302307985, | |
| "grad_norm": 2.5627230161070766, | |
| "learning_rate": 2.4242424242424244e-06, | |
| "loss": 0.6258, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.02724177071509648, | |
| "grad_norm": 1.9533304515860135, | |
| "learning_rate": 2.7272727272727272e-06, | |
| "loss": 0.6542, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.03026863412788498, | |
| "grad_norm": 1.6031670956646584, | |
| "learning_rate": 3.0303030303030305e-06, | |
| "loss": 0.6031, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03329549754067348, | |
| "grad_norm": 1.582263600875127, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 0.5973, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.036322360953461974, | |
| "grad_norm": 1.8187361836534968, | |
| "learning_rate": 3.6363636363636366e-06, | |
| "loss": 0.56, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.03934922436625047, | |
| "grad_norm": 2.6328924517759726, | |
| "learning_rate": 3.93939393939394e-06, | |
| "loss": 0.5889, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.04237608777903897, | |
| "grad_norm": 2.297455235478784, | |
| "learning_rate": 4.242424242424243e-06, | |
| "loss": 0.567, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.04540295119182747, | |
| "grad_norm": 1.6940276525975617, | |
| "learning_rate": 4.5454545454545455e-06, | |
| "loss": 0.539, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.04842981460461597, | |
| "grad_norm": 1.557100492365704, | |
| "learning_rate": 4.848484848484849e-06, | |
| "loss": 0.551, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.051456678017404466, | |
| "grad_norm": 1.484912717744596, | |
| "learning_rate": 5.151515151515152e-06, | |
| "loss": 0.561, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.05448354143019296, | |
| "grad_norm": 1.5377546874099912, | |
| "learning_rate": 5.4545454545454545e-06, | |
| "loss": 0.5402, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.057510404842981463, | |
| "grad_norm": 1.3623070346388788, | |
| "learning_rate": 5.7575757575757586e-06, | |
| "loss": 0.5315, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.06053726825576996, | |
| "grad_norm": 1.194648507210797, | |
| "learning_rate": 6.060606060606061e-06, | |
| "loss": 0.5457, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.06356413166855845, | |
| "grad_norm": 1.1096469526390318, | |
| "learning_rate": 6.363636363636364e-06, | |
| "loss": 0.5437, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.06659099508134696, | |
| "grad_norm": 1.1488715164216095, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.5363, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.06961785849413545, | |
| "grad_norm": 1.08655716021847, | |
| "learning_rate": 6.969696969696971e-06, | |
| "loss": 0.5024, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.07264472190692395, | |
| "grad_norm": 0.9854350717024366, | |
| "learning_rate": 7.272727272727273e-06, | |
| "loss": 0.527, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.07567158531971245, | |
| "grad_norm": 0.9343934321342595, | |
| "learning_rate": 7.5757575757575764e-06, | |
| "loss": 0.5013, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.07869844873250094, | |
| "grad_norm": 0.9357695211108015, | |
| "learning_rate": 7.87878787878788e-06, | |
| "loss": 0.4887, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.08172531214528944, | |
| "grad_norm": 0.9545589291967199, | |
| "learning_rate": 8.181818181818183e-06, | |
| "loss": 0.4864, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.08475217555807794, | |
| "grad_norm": 0.9992711933908768, | |
| "learning_rate": 8.484848484848486e-06, | |
| "loss": 0.5016, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.08777903897086645, | |
| "grad_norm": 0.8724761296961243, | |
| "learning_rate": 8.787878787878788e-06, | |
| "loss": 0.4776, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.09080590238365494, | |
| "grad_norm": 0.9939969896946891, | |
| "learning_rate": 9.090909090909091e-06, | |
| "loss": 0.5119, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.09383276579644344, | |
| "grad_norm": 0.9721951413329807, | |
| "learning_rate": 9.393939393939396e-06, | |
| "loss": 0.4876, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.09685962920923194, | |
| "grad_norm": 0.9605804902651769, | |
| "learning_rate": 9.696969696969698e-06, | |
| "loss": 0.4914, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.09988649262202043, | |
| "grad_norm": 0.8933282406320995, | |
| "learning_rate": 1e-05, | |
| "loss": 0.4692, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.10291335603480893, | |
| "grad_norm": 0.9103939718766416, | |
| "learning_rate": 1.0303030303030304e-05, | |
| "loss": 0.4705, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.10594021944759743, | |
| "grad_norm": 0.9089186839038462, | |
| "learning_rate": 1.0606060606060606e-05, | |
| "loss": 0.4876, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.10896708286038592, | |
| "grad_norm": 0.8808305054441777, | |
| "learning_rate": 1.0909090909090909e-05, | |
| "loss": 0.5064, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.11199394627317442, | |
| "grad_norm": 0.9202133707568927, | |
| "learning_rate": 1.1212121212121212e-05, | |
| "loss": 0.5072, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.11502080968596293, | |
| "grad_norm": 0.8482614015957081, | |
| "learning_rate": 1.1515151515151517e-05, | |
| "loss": 0.475, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.11804767309875142, | |
| "grad_norm": 0.8474539666938257, | |
| "learning_rate": 1.181818181818182e-05, | |
| "loss": 0.4812, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.12107453651153992, | |
| "grad_norm": 0.8905684341419864, | |
| "learning_rate": 1.2121212121212122e-05, | |
| "loss": 0.472, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.12410139992432842, | |
| "grad_norm": 0.8984945379550433, | |
| "learning_rate": 1.2424242424242425e-05, | |
| "loss": 0.4645, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.1271282633371169, | |
| "grad_norm": 1.011380245894981, | |
| "learning_rate": 1.2727272727272728e-05, | |
| "loss": 0.4644, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.1301551267499054, | |
| "grad_norm": 0.9490157445069504, | |
| "learning_rate": 1.3030303030303032e-05, | |
| "loss": 0.4849, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.13318199016269391, | |
| "grad_norm": 0.9771201657720733, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.4884, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.1362088535754824, | |
| "grad_norm": 0.9590703124374225, | |
| "learning_rate": 1.3636363636363637e-05, | |
| "loss": 0.4535, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.1392357169882709, | |
| "grad_norm": 0.8898615991271187, | |
| "learning_rate": 1.3939393939393942e-05, | |
| "loss": 0.4838, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.1422625804010594, | |
| "grad_norm": 1.0535049541973271, | |
| "learning_rate": 1.4242424242424245e-05, | |
| "loss": 0.4903, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.1452894438138479, | |
| "grad_norm": 0.9437094791917945, | |
| "learning_rate": 1.4545454545454546e-05, | |
| "loss": 0.478, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.14831630722663638, | |
| "grad_norm": 0.8997776335801163, | |
| "learning_rate": 1.484848484848485e-05, | |
| "loss": 0.4797, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.1513431706394249, | |
| "grad_norm": 1.0086849690587059, | |
| "learning_rate": 1.5151515151515153e-05, | |
| "loss": 0.4831, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.1543700340522134, | |
| "grad_norm": 1.134307357014477, | |
| "learning_rate": 1.5454545454545454e-05, | |
| "loss": 0.489, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.15739689746500188, | |
| "grad_norm": 1.0213818056555448, | |
| "learning_rate": 1.575757575757576e-05, | |
| "loss": 0.4723, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.1604237608777904, | |
| "grad_norm": 0.9374569620036011, | |
| "learning_rate": 1.606060606060606e-05, | |
| "loss": 0.4737, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.16345062429057888, | |
| "grad_norm": 0.9877869443702924, | |
| "learning_rate": 1.6363636363636366e-05, | |
| "loss": 0.4814, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.16647748770336737, | |
| "grad_norm": 0.8689881305911301, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.4623, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.1695043511161559, | |
| "grad_norm": 0.9661969974601068, | |
| "learning_rate": 1.6969696969696972e-05, | |
| "loss": 0.4679, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.17253121452894438, | |
| "grad_norm": 0.9232526663906674, | |
| "learning_rate": 1.7272727272727274e-05, | |
| "loss": 0.4993, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.1755580779417329, | |
| "grad_norm": 0.9442789679228032, | |
| "learning_rate": 1.7575757575757576e-05, | |
| "loss": 0.4951, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.17858494135452138, | |
| "grad_norm": 0.9631389402769466, | |
| "learning_rate": 1.787878787878788e-05, | |
| "loss": 0.4912, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.18161180476730987, | |
| "grad_norm": 0.9990868712008387, | |
| "learning_rate": 1.8181818181818182e-05, | |
| "loss": 0.4836, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.1846386681800984, | |
| "grad_norm": 0.9553625858589984, | |
| "learning_rate": 1.8484848484848487e-05, | |
| "loss": 0.4647, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.18766553159288688, | |
| "grad_norm": 1.0539161489638034, | |
| "learning_rate": 1.8787878787878792e-05, | |
| "loss": 0.4874, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.19069239500567536, | |
| "grad_norm": 0.945529567190985, | |
| "learning_rate": 1.9090909090909094e-05, | |
| "loss": 0.4724, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.19371925841846388, | |
| "grad_norm": 1.018363798045299, | |
| "learning_rate": 1.9393939393939395e-05, | |
| "loss": 0.4746, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.19674612183125237, | |
| "grad_norm": 0.9043620095709098, | |
| "learning_rate": 1.96969696969697e-05, | |
| "loss": 0.4596, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.19977298524404086, | |
| "grad_norm": 0.8644661702492273, | |
| "learning_rate": 2e-05, | |
| "loss": 0.4551, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.20279984865682937, | |
| "grad_norm": 1.0451189341512501, | |
| "learning_rate": 1.9999860139251737e-05, | |
| "loss": 0.4714, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.20582671206961786, | |
| "grad_norm": 1.002626602774765, | |
| "learning_rate": 1.9999440560919153e-05, | |
| "loss": 0.4781, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.20885357548240635, | |
| "grad_norm": 0.9545348999807897, | |
| "learning_rate": 1.9998741276738753e-05, | |
| "loss": 0.4592, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.21188043889519487, | |
| "grad_norm": 1.030593546665657, | |
| "learning_rate": 1.999776230627102e-05, | |
| "loss": 0.4787, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.21490730230798336, | |
| "grad_norm": 0.9634420246177808, | |
| "learning_rate": 1.9996503676899863e-05, | |
| "loss": 0.4846, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.21793416572077184, | |
| "grad_norm": 0.9467386587915338, | |
| "learning_rate": 1.9994965423831853e-05, | |
| "loss": 0.4714, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.22096102913356036, | |
| "grad_norm": 0.8924293788346243, | |
| "learning_rate": 1.9993147590095232e-05, | |
| "loss": 0.4602, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.22398789254634885, | |
| "grad_norm": 0.9380565748757239, | |
| "learning_rate": 1.999105022653872e-05, | |
| "loss": 0.4574, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.22701475595913734, | |
| "grad_norm": 0.9518581092321344, | |
| "learning_rate": 1.9988673391830082e-05, | |
| "loss": 0.4839, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.23004161937192585, | |
| "grad_norm": 0.9538524503816821, | |
| "learning_rate": 1.9986017152454497e-05, | |
| "loss": 0.4733, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.23306848278471434, | |
| "grad_norm": 0.9234948362334984, | |
| "learning_rate": 1.9983081582712684e-05, | |
| "loss": 0.4584, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.23609534619750283, | |
| "grad_norm": 1.0118006052373039, | |
| "learning_rate": 1.9979866764718846e-05, | |
| "loss": 0.4352, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.23912220961029135, | |
| "grad_norm": 1.0145236646947924, | |
| "learning_rate": 1.997637278839835e-05, | |
| "loss": 0.4852, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.24214907302307984, | |
| "grad_norm": 0.8513391984670292, | |
| "learning_rate": 1.9972599751485225e-05, | |
| "loss": 0.4563, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.24517593643586832, | |
| "grad_norm": 1.251985454014456, | |
| "learning_rate": 1.9968547759519426e-05, | |
| "loss": 0.4585, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.24820279984865684, | |
| "grad_norm": 1.110770916578026, | |
| "learning_rate": 1.9964216925843876e-05, | |
| "loss": 0.4718, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.2512296632614453, | |
| "grad_norm": 0.9977711604302626, | |
| "learning_rate": 1.9959607371601303e-05, | |
| "loss": 0.4456, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.2542565266742338, | |
| "grad_norm": 1.103736062284778, | |
| "learning_rate": 1.9954719225730847e-05, | |
| "loss": 0.4712, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.25728339008702233, | |
| "grad_norm": 0.9312002516164151, | |
| "learning_rate": 1.994955262496446e-05, | |
| "loss": 0.4631, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.2603102534998108, | |
| "grad_norm": 0.9678544247788788, | |
| "learning_rate": 1.9944107713823068e-05, | |
| "loss": 0.472, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.2633371169125993, | |
| "grad_norm": 0.8587164979946262, | |
| "learning_rate": 1.9938384644612542e-05, | |
| "loss": 0.4728, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.26636398032538783, | |
| "grad_norm": 0.9493569562876263, | |
| "learning_rate": 1.9932383577419432e-05, | |
| "loss": 0.46, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.2693908437381763, | |
| "grad_norm": 0.9528631855943044, | |
| "learning_rate": 1.9926104680106484e-05, | |
| "loss": 0.4746, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.2724177071509648, | |
| "grad_norm": 1.0221463667936372, | |
| "learning_rate": 1.9919548128307954e-05, | |
| "loss": 0.4841, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.2754445705637533, | |
| "grad_norm": 0.880456179465352, | |
| "learning_rate": 1.9912714105424694e-05, | |
| "loss": 0.456, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.2784714339765418, | |
| "grad_norm": 1.007384572340114, | |
| "learning_rate": 1.990560280261901e-05, | |
| "loss": 0.4884, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.2814982973893303, | |
| "grad_norm": 0.8533142414315109, | |
| "learning_rate": 1.989821441880933e-05, | |
| "loss": 0.4322, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.2845251608021188, | |
| "grad_norm": 0.9114622713395024, | |
| "learning_rate": 1.9890549160664633e-05, | |
| "loss": 0.4751, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.2875520242149073, | |
| "grad_norm": 0.9326624170732905, | |
| "learning_rate": 1.9882607242598663e-05, | |
| "loss": 0.4507, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.2905788876276958, | |
| "grad_norm": 0.8529005087231366, | |
| "learning_rate": 1.9874388886763944e-05, | |
| "loss": 0.4514, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.2936057510404843, | |
| "grad_norm": 8.965430832156894, | |
| "learning_rate": 1.9865894323045558e-05, | |
| "loss": 0.4974, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.29663261445327277, | |
| "grad_norm": 12.904180683674056, | |
| "learning_rate": 1.9857123789054707e-05, | |
| "loss": 0.5255, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.2996594778660613, | |
| "grad_norm": 1.3225451438349816, | |
| "learning_rate": 1.9848077530122083e-05, | |
| "loss": 0.4819, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.3026863412788498, | |
| "grad_norm": 1.0162073741038125, | |
| "learning_rate": 1.9838755799290993e-05, | |
| "loss": 0.475, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.30571320469163826, | |
| "grad_norm": 1.0484189507403519, | |
| "learning_rate": 1.9829158857310288e-05, | |
| "loss": 0.4663, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.3087400681044268, | |
| "grad_norm": 1.0536659287160743, | |
| "learning_rate": 1.9819286972627066e-05, | |
| "loss": 0.4969, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.3117669315172153, | |
| "grad_norm": 1.1267796297604018, | |
| "learning_rate": 1.9809140421379168e-05, | |
| "loss": 0.4808, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.31479379493000376, | |
| "grad_norm": 0.9124226717828057, | |
| "learning_rate": 1.979871948738743e-05, | |
| "loss": 0.4632, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.3178206583427923, | |
| "grad_norm": 1.0202922501989256, | |
| "learning_rate": 1.978802446214779e-05, | |
| "loss": 0.4712, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.3208475217555808, | |
| "grad_norm": 0.9541637843124078, | |
| "learning_rate": 1.9777055644823087e-05, | |
| "loss": 0.448, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.32387438516836925, | |
| "grad_norm": 0.9904713396707905, | |
| "learning_rate": 1.9765813342234726e-05, | |
| "loss": 0.4805, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.32690124858115777, | |
| "grad_norm": 0.9698707772276001, | |
| "learning_rate": 1.9754297868854075e-05, | |
| "loss": 0.476, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.3299281119939463, | |
| "grad_norm": 0.9354237855295647, | |
| "learning_rate": 1.9742509546793673e-05, | |
| "loss": 0.4468, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.33295497540673474, | |
| "grad_norm": 0.9230396561919654, | |
| "learning_rate": 1.973044870579824e-05, | |
| "loss": 0.4589, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.33598183881952326, | |
| "grad_norm": 1.0501558478916118, | |
| "learning_rate": 1.9718115683235418e-05, | |
| "loss": 0.4458, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.3390087022323118, | |
| "grad_norm": 0.8140014657502624, | |
| "learning_rate": 1.970551082408636e-05, | |
| "loss": 0.4456, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.3420355656451003, | |
| "grad_norm": 0.9197656914259078, | |
| "learning_rate": 1.969263448093608e-05, | |
| "loss": 0.4574, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.34506242905788875, | |
| "grad_norm": 0.9466028010229838, | |
| "learning_rate": 1.9679487013963566e-05, | |
| "loss": 0.4562, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.34808929247067727, | |
| "grad_norm": 0.7859343506423083, | |
| "learning_rate": 1.9666068790931733e-05, | |
| "loss": 0.4652, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.3511161558834658, | |
| "grad_norm": 0.842199823964043, | |
| "learning_rate": 1.9652380187177128e-05, | |
| "loss": 0.4511, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.35414301929625425, | |
| "grad_norm": 0.8678047904641017, | |
| "learning_rate": 1.9638421585599422e-05, | |
| "loss": 0.4685, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.35716988270904276, | |
| "grad_norm": 0.8935927032846372, | |
| "learning_rate": 1.9624193376650708e-05, | |
| "loss": 0.4547, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.3601967461218313, | |
| "grad_norm": 0.8090398363404913, | |
| "learning_rate": 1.960969595832457e-05, | |
| "loss": 0.4489, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.36322360953461974, | |
| "grad_norm": 0.8571063903270532, | |
| "learning_rate": 1.9594929736144978e-05, | |
| "loss": 0.4687, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.36625047294740826, | |
| "grad_norm": 0.8347566205280457, | |
| "learning_rate": 1.957989512315489e-05, | |
| "loss": 0.4362, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.3692773363601968, | |
| "grad_norm": 0.8496125169402892, | |
| "learning_rate": 1.956459253990476e-05, | |
| "loss": 0.4494, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.37230419977298523, | |
| "grad_norm": 0.8607909235741252, | |
| "learning_rate": 1.9549022414440738e-05, | |
| "loss": 0.4587, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.37533106318577375, | |
| "grad_norm": 0.8377688613673809, | |
| "learning_rate": 1.9533185182292705e-05, | |
| "loss": 0.4649, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.37835792659856227, | |
| "grad_norm": 0.8956718022996314, | |
| "learning_rate": 1.9517081286462082e-05, | |
| "loss": 0.4691, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.3813847900113507, | |
| "grad_norm": 0.8619607962579379, | |
| "learning_rate": 1.9500711177409456e-05, | |
| "loss": 0.4632, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.38441165342413924, | |
| "grad_norm": 0.7922147893885945, | |
| "learning_rate": 1.9484075313041968e-05, | |
| "loss": 0.4493, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.38743851683692776, | |
| "grad_norm": 0.8228714306966575, | |
| "learning_rate": 1.9467174158700507e-05, | |
| "loss": 0.4371, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.3904653802497162, | |
| "grad_norm": 0.8104019279961472, | |
| "learning_rate": 1.9450008187146685e-05, | |
| "loss": 0.4391, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.39349224366250474, | |
| "grad_norm": 0.7838993189333445, | |
| "learning_rate": 1.9432577878549635e-05, | |
| "loss": 0.4688, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.39651910707529325, | |
| "grad_norm": 0.8756965482353155, | |
| "learning_rate": 1.9414883720472557e-05, | |
| "loss": 0.4598, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.3995459704880817, | |
| "grad_norm": 0.953360061737771, | |
| "learning_rate": 1.9396926207859085e-05, | |
| "loss": 0.4733, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.40257283390087023, | |
| "grad_norm": 0.8308443396762537, | |
| "learning_rate": 1.937870584301945e-05, | |
| "loss": 0.4671, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.40559969731365875, | |
| "grad_norm": 0.817588513292344, | |
| "learning_rate": 1.9360223135616423e-05, | |
| "loss": 0.4617, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.4086265607264472, | |
| "grad_norm": 0.8327012262765101, | |
| "learning_rate": 1.9341478602651068e-05, | |
| "loss": 0.4564, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.4116534241392357, | |
| "grad_norm": 0.8538273356717329, | |
| "learning_rate": 1.932247276844826e-05, | |
| "loss": 0.4402, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.41468028755202424, | |
| "grad_norm": 0.759617785498127, | |
| "learning_rate": 1.9303206164642037e-05, | |
| "loss": 0.4601, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.4177071509648127, | |
| "grad_norm": 0.8194134193241404, | |
| "learning_rate": 1.9283679330160726e-05, | |
| "loss": 0.46, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.4207340143776012, | |
| "grad_norm": 0.8739952351533794, | |
| "learning_rate": 1.9263892811211865e-05, | |
| "loss": 0.4624, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.42376087779038973, | |
| "grad_norm": 0.7767141630388571, | |
| "learning_rate": 1.9243847161266924e-05, | |
| "loss": 0.4507, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.4267877412031782, | |
| "grad_norm": 0.9201821660105389, | |
| "learning_rate": 1.9223542941045817e-05, | |
| "loss": 0.4676, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.4298146046159667, | |
| "grad_norm": 0.7722345893954266, | |
| "learning_rate": 1.920298071850123e-05, | |
| "loss": 0.4478, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.4328414680287552, | |
| "grad_norm": 0.7741229952829096, | |
| "learning_rate": 1.9182161068802742e-05, | |
| "loss": 0.4431, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.4358683314415437, | |
| "grad_norm": 0.7981544009870728, | |
| "learning_rate": 1.9161084574320696e-05, | |
| "loss": 0.4436, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.4388951948543322, | |
| "grad_norm": 0.8242168829362365, | |
| "learning_rate": 1.913975182460996e-05, | |
| "loss": 0.4557, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.4419220582671207, | |
| "grad_norm": 0.7706535357047114, | |
| "learning_rate": 1.9118163416393392e-05, | |
| "loss": 0.4422, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.4449489216799092, | |
| "grad_norm": 0.8419257040272804, | |
| "learning_rate": 1.9096319953545186e-05, | |
| "loss": 0.4554, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.4479757850926977, | |
| "grad_norm": 0.8378837909589202, | |
| "learning_rate": 1.9074222047073945e-05, | |
| "loss": 0.4762, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.4510026485054862, | |
| "grad_norm": 0.8385547498874574, | |
| "learning_rate": 1.9051870315105626e-05, | |
| "loss": 0.459, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.4540295119182747, | |
| "grad_norm": 0.7759374570558363, | |
| "learning_rate": 1.9029265382866216e-05, | |
| "loss": 0.4522, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.4570563753310632, | |
| "grad_norm": 0.7759289530084589, | |
| "learning_rate": 1.9006407882664256e-05, | |
| "loss": 0.4214, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.4600832387438517, | |
| "grad_norm": 0.7709877456302019, | |
| "learning_rate": 1.8983298453873172e-05, | |
| "loss": 0.4507, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.46311010215664017, | |
| "grad_norm": 0.8010872699905442, | |
| "learning_rate": 1.895993774291336e-05, | |
| "loss": 0.4583, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.4661369655694287, | |
| "grad_norm": 0.7442741212452282, | |
| "learning_rate": 1.8936326403234125e-05, | |
| "loss": 0.4357, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.4691638289822172, | |
| "grad_norm": 0.8124936265412964, | |
| "learning_rate": 1.891246509529539e-05, | |
| "loss": 0.4632, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.47219069239500566, | |
| "grad_norm": 0.7966106026514826, | |
| "learning_rate": 1.8888354486549238e-05, | |
| "loss": 0.4562, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.4752175558077942, | |
| "grad_norm": 0.7676758497760634, | |
| "learning_rate": 1.886399525142122e-05, | |
| "loss": 0.4343, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.4782444192205827, | |
| "grad_norm": 0.7601518615733768, | |
| "learning_rate": 1.8839388071291506e-05, | |
| "loss": 0.4454, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.48127128263337116, | |
| "grad_norm": 0.7744639740374917, | |
| "learning_rate": 1.881453363447582e-05, | |
| "loss": 0.4688, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.48429814604615967, | |
| "grad_norm": 0.8663350945527193, | |
| "learning_rate": 1.8789432636206197e-05, | |
| "loss": 0.4383, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.4873250094589482, | |
| "grad_norm": 0.8573400399489379, | |
| "learning_rate": 1.8764085778611507e-05, | |
| "loss": 0.477, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.49035187287173665, | |
| "grad_norm": 0.776800144650827, | |
| "learning_rate": 1.873849377069785e-05, | |
| "loss": 0.4279, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.49337873628452517, | |
| "grad_norm": 0.8162914307039698, | |
| "learning_rate": 1.87126573283287e-05, | |
| "loss": 0.4498, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.4964055996973137, | |
| "grad_norm": 0.7793488477804008, | |
| "learning_rate": 1.8686577174204887e-05, | |
| "loss": 0.4507, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.49943246311010214, | |
| "grad_norm": 0.7581955869362403, | |
| "learning_rate": 1.866025403784439e-05, | |
| "loss": 0.436, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.5024593265228906, | |
| "grad_norm": 0.8428076101491118, | |
| "learning_rate": 1.863368865556191e-05, | |
| "loss": 0.4412, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.5054861899356792, | |
| "grad_norm": 0.9637638201950827, | |
| "learning_rate": 1.8606881770448305e-05, | |
| "loss": 0.4351, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.5085130533484676, | |
| "grad_norm": 0.8169262851281399, | |
| "learning_rate": 1.8579834132349773e-05, | |
| "loss": 0.4442, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.5115399167612561, | |
| "grad_norm": 0.8167322292597452, | |
| "learning_rate": 1.8552546497846893e-05, | |
| "loss": 0.4341, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.5145667801740447, | |
| "grad_norm": 0.8770780917772608, | |
| "learning_rate": 1.8525019630233463e-05, | |
| "loss": 0.4743, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5175936435868331, | |
| "grad_norm": 1.0567304218943345, | |
| "learning_rate": 1.8497254299495147e-05, | |
| "loss": 0.4371, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.5206205069996216, | |
| "grad_norm": 0.8148740362632707, | |
| "learning_rate": 1.8469251282287925e-05, | |
| "loss": 0.4435, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.5236473704124102, | |
| "grad_norm": 0.7933230558237211, | |
| "learning_rate": 1.8441011361916387e-05, | |
| "loss": 0.4423, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.5266742338251986, | |
| "grad_norm": 0.8256694445635196, | |
| "learning_rate": 1.8412535328311813e-05, | |
| "loss": 0.4433, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.5297010972379871, | |
| "grad_norm": 0.7420630428123556, | |
| "learning_rate": 1.8383823978010077e-05, | |
| "loss": 0.4519, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.5327279606507757, | |
| "grad_norm": 0.7933055150071372, | |
| "learning_rate": 1.8354878114129368e-05, | |
| "loss": 0.417, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.5357548240635641, | |
| "grad_norm": 0.7520339336260935, | |
| "learning_rate": 1.8325698546347714e-05, | |
| "loss": 0.4623, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.5387816874763526, | |
| "grad_norm": 0.7619546315139389, | |
| "learning_rate": 1.8296286090880362e-05, | |
| "loss": 0.4408, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.5418085508891411, | |
| "grad_norm": 0.7958562166380587, | |
| "learning_rate": 1.8266641570456915e-05, | |
| "loss": 0.4473, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.5448354143019296, | |
| "grad_norm": 0.8414500343837285, | |
| "learning_rate": 1.8236765814298328e-05, | |
| "loss": 0.4447, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.5478622777147181, | |
| "grad_norm": 0.9080616373812107, | |
| "learning_rate": 1.820665965809373e-05, | |
| "loss": 0.4456, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.5508891411275066, | |
| "grad_norm": 0.754258431179921, | |
| "learning_rate": 1.8176323943977034e-05, | |
| "loss": 0.4351, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.5539160045402951, | |
| "grad_norm": 0.8098503106780682, | |
| "learning_rate": 1.814575952050336e-05, | |
| "loss": 0.441, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.5569428679530836, | |
| "grad_norm": 0.8352289414660538, | |
| "learning_rate": 1.8114967242625342e-05, | |
| "loss": 0.4494, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.5599697313658721, | |
| "grad_norm": 0.7681163692296273, | |
| "learning_rate": 1.808394797166919e-05, | |
| "loss": 0.4384, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.5629965947786606, | |
| "grad_norm": 0.747127957915616, | |
| "learning_rate": 1.8052702575310588e-05, | |
| "loss": 0.4121, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.5660234581914491, | |
| "grad_norm": 0.8218478679852107, | |
| "learning_rate": 1.802123192755044e-05, | |
| "loss": 0.4409, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.5690503216042376, | |
| "grad_norm": 0.8152328898758183, | |
| "learning_rate": 1.7989536908690413e-05, | |
| "loss": 0.4327, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.5720771850170261, | |
| "grad_norm": 0.8711371168887442, | |
| "learning_rate": 1.7957618405308323e-05, | |
| "loss": 0.4378, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.5751040484298146, | |
| "grad_norm": 0.8638263265536331, | |
| "learning_rate": 1.792547731023332e-05, | |
| "loss": 0.4146, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.5781309118426031, | |
| "grad_norm": 0.8002603800846039, | |
| "learning_rate": 1.789311452252092e-05, | |
| "loss": 0.4268, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.5811577752553916, | |
| "grad_norm": 0.8573803414316463, | |
| "learning_rate": 1.7860530947427878e-05, | |
| "loss": 0.4385, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.58418463866818, | |
| "grad_norm": 0.8073779924473132, | |
| "learning_rate": 1.782772749638682e-05, | |
| "loss": 0.4336, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.5872115020809686, | |
| "grad_norm": 0.7608108526849635, | |
| "learning_rate": 1.779470508698079e-05, | |
| "loss": 0.4255, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.5902383654937571, | |
| "grad_norm": 0.8482995521517027, | |
| "learning_rate": 1.776146464291757e-05, | |
| "loss": 0.425, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.5932652289065455, | |
| "grad_norm": 0.8916144337271275, | |
| "learning_rate": 1.772800709400383e-05, | |
| "loss": 0.4411, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.5962920923193341, | |
| "grad_norm": 0.7417144652621136, | |
| "learning_rate": 1.7694333376119144e-05, | |
| "loss": 0.4382, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.5993189557321226, | |
| "grad_norm": 0.8300238664862641, | |
| "learning_rate": 1.766044443118978e-05, | |
| "loss": 0.4334, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.602345819144911, | |
| "grad_norm": 0.7299705378025234, | |
| "learning_rate": 1.762634120716238e-05, | |
| "loss": 0.4235, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.6053726825576996, | |
| "grad_norm": 0.7044753738932356, | |
| "learning_rate": 1.7592024657977432e-05, | |
| "loss": 0.4299, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6083995459704881, | |
| "grad_norm": 0.7642157289476394, | |
| "learning_rate": 1.7557495743542586e-05, | |
| "loss": 0.4458, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.6114264093832765, | |
| "grad_norm": 0.8428239024660611, | |
| "learning_rate": 1.75227554297058e-05, | |
| "loss": 0.4304, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.6144532727960651, | |
| "grad_norm": 0.8077485956789754, | |
| "learning_rate": 1.7487804688228327e-05, | |
| "loss": 0.4367, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.6174801362088536, | |
| "grad_norm": 0.9063559367377406, | |
| "learning_rate": 1.745264449675755e-05, | |
| "loss": 0.4453, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.620506999621642, | |
| "grad_norm": 0.8043034850106837, | |
| "learning_rate": 1.7417275838799596e-05, | |
| "loss": 0.4514, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.6235338630344306, | |
| "grad_norm": 0.8144260157371154, | |
| "learning_rate": 1.7381699703691866e-05, | |
| "loss": 0.4459, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.626560726447219, | |
| "grad_norm": 0.9563709608799079, | |
| "learning_rate": 1.734591708657533e-05, | |
| "loss": 0.4258, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.6295875898600075, | |
| "grad_norm": 0.7479430675820818, | |
| "learning_rate": 1.730992898836672e-05, | |
| "loss": 0.4352, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.6326144532727961, | |
| "grad_norm": 1.092865824271307, | |
| "learning_rate": 1.7273736415730488e-05, | |
| "loss": 0.4365, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.6356413166855845, | |
| "grad_norm": 0.8375699585075692, | |
| "learning_rate": 1.72373403810507e-05, | |
| "loss": 0.4599, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.638668180098373, | |
| "grad_norm": 0.8254247763199031, | |
| "learning_rate": 1.720074190240269e-05, | |
| "loss": 0.4331, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.6416950435111616, | |
| "grad_norm": 0.8155654342341622, | |
| "learning_rate": 1.7163942003524574e-05, | |
| "loss": 0.441, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.64472190692395, | |
| "grad_norm": 0.8363163686812076, | |
| "learning_rate": 1.7126941713788633e-05, | |
| "loss": 0.4572, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.6477487703367385, | |
| "grad_norm": 0.8065614536681267, | |
| "learning_rate": 1.70897420681725e-05, | |
| "loss": 0.4229, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.6507756337495271, | |
| "grad_norm": 0.8798391293864685, | |
| "learning_rate": 1.7052344107230244e-05, | |
| "loss": 0.4375, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.6538024971623155, | |
| "grad_norm": 0.816194442972985, | |
| "learning_rate": 1.7014748877063212e-05, | |
| "loss": 0.4505, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.656829360575104, | |
| "grad_norm": 0.763640038718994, | |
| "learning_rate": 1.697695742929082e-05, | |
| "loss": 0.4285, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.6598562239878926, | |
| "grad_norm": 0.8665799367816605, | |
| "learning_rate": 1.693897082102109e-05, | |
| "loss": 0.4327, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.662883087400681, | |
| "grad_norm": 0.7632749371911016, | |
| "learning_rate": 1.6900790114821122e-05, | |
| "loss": 0.419, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.6659099508134695, | |
| "grad_norm": 0.82972578035047, | |
| "learning_rate": 1.686241637868734e-05, | |
| "loss": 0.4227, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.6689368142262581, | |
| "grad_norm": 0.726025867928948, | |
| "learning_rate": 1.682385068601563e-05, | |
| "loss": 0.4299, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.6719636776390465, | |
| "grad_norm": 0.8050613150824316, | |
| "learning_rate": 1.6785094115571323e-05, | |
| "loss": 0.4341, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.6749905410518351, | |
| "grad_norm": 0.7668645524154176, | |
| "learning_rate": 1.674614775145901e-05, | |
| "loss": 0.434, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.6780174044646236, | |
| "grad_norm": 0.8473114078768379, | |
| "learning_rate": 1.670701268309221e-05, | |
| "loss": 0.4166, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.681044267877412, | |
| "grad_norm": 0.8383790645003683, | |
| "learning_rate": 1.666769000516292e-05, | |
| "loss": 0.428, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.6840711312902006, | |
| "grad_norm": 0.7693649578515219, | |
| "learning_rate": 1.6628180817610963e-05, | |
| "loss": 0.4309, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.687097994702989, | |
| "grad_norm": 0.8320533148447656, | |
| "learning_rate": 1.658848622559325e-05, | |
| "loss": 0.4212, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.6901248581157775, | |
| "grad_norm": 0.7260198892087389, | |
| "learning_rate": 1.6548607339452853e-05, | |
| "loss": 0.4251, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.6931517215285661, | |
| "grad_norm": 0.9029577261157358, | |
| "learning_rate": 1.6508545274687936e-05, | |
| "loss": 0.4572, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.6961785849413545, | |
| "grad_norm": 0.7197466238499969, | |
| "learning_rate": 1.6468301151920576e-05, | |
| "loss": 0.4242, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.699205448354143, | |
| "grad_norm": 0.7742847644397802, | |
| "learning_rate": 1.6427876096865394e-05, | |
| "loss": 0.4394, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.7022323117669316, | |
| "grad_norm": 0.8380011329459673, | |
| "learning_rate": 1.6387271240298082e-05, | |
| "loss": 0.4371, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.70525917517972, | |
| "grad_norm": 0.7107929674639848, | |
| "learning_rate": 1.6346487718023762e-05, | |
| "loss": 0.4366, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.7082860385925085, | |
| "grad_norm": 0.9619734201764684, | |
| "learning_rate": 1.6305526670845225e-05, | |
| "loss": 0.4353, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.7113129020052971, | |
| "grad_norm": 0.751949350016426, | |
| "learning_rate": 1.6264389244531015e-05, | |
| "loss": 0.4483, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.7143397654180855, | |
| "grad_norm": 0.9579595471430041, | |
| "learning_rate": 1.6223076589783368e-05, | |
| "loss": 0.4419, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.717366628830874, | |
| "grad_norm": 0.8814789311643189, | |
| "learning_rate": 1.6181589862206053e-05, | |
| "loss": 0.4412, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.7203934922436626, | |
| "grad_norm": 0.7890367977878839, | |
| "learning_rate": 1.613993022227202e-05, | |
| "loss": 0.4711, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.723420355656451, | |
| "grad_norm": 0.8725786601277864, | |
| "learning_rate": 1.6098098835290955e-05, | |
| "loss": 0.4097, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.7264472190692395, | |
| "grad_norm": 0.7535459533804848, | |
| "learning_rate": 1.6056096871376667e-05, | |
| "loss": 0.405, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.729474082482028, | |
| "grad_norm": 0.8151725329306759, | |
| "learning_rate": 1.6013925505414386e-05, | |
| "loss": 0.4443, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.7325009458948165, | |
| "grad_norm": 0.6986284239125755, | |
| "learning_rate": 1.5971585917027864e-05, | |
| "loss": 0.4387, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.735527809307605, | |
| "grad_norm": 0.711160824465759, | |
| "learning_rate": 1.5929079290546408e-05, | |
| "loss": 0.4313, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.7385546727203935, | |
| "grad_norm": 0.7747993590917854, | |
| "learning_rate": 1.5886406814971728e-05, | |
| "loss": 0.4345, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.741581536133182, | |
| "grad_norm": 0.6998573640035735, | |
| "learning_rate": 1.584356968394471e-05, | |
| "loss": 0.4191, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.7446083995459705, | |
| "grad_norm": 0.7828113851190492, | |
| "learning_rate": 1.5800569095711983e-05, | |
| "loss": 0.4584, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.747635262958759, | |
| "grad_norm": 0.8208386890894316, | |
| "learning_rate": 1.575740625309244e-05, | |
| "loss": 0.4687, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.7506621263715475, | |
| "grad_norm": 0.6749431645261781, | |
| "learning_rate": 1.5714082363443576e-05, | |
| "loss": 0.4134, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.753688989784336, | |
| "grad_norm": 0.7318896821137213, | |
| "learning_rate": 1.5670598638627707e-05, | |
| "loss": 0.4495, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.7567158531971245, | |
| "grad_norm": 0.6740851031790088, | |
| "learning_rate": 1.5626956294978103e-05, | |
| "loss": 0.416, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.759742716609913, | |
| "grad_norm": 0.7420662374601229, | |
| "learning_rate": 1.5583156553264923e-05, | |
| "loss": 0.4279, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.7627695800227015, | |
| "grad_norm": 0.7244466694674909, | |
| "learning_rate": 1.5539200638661106e-05, | |
| "loss": 0.4176, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.76579644343549, | |
| "grad_norm": 0.699452393476543, | |
| "learning_rate": 1.5495089780708062e-05, | |
| "loss": 0.4311, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.7688233068482785, | |
| "grad_norm": 0.7131583405325259, | |
| "learning_rate": 1.5450825213281317e-05, | |
| "loss": 0.4377, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.771850170261067, | |
| "grad_norm": 0.7998788659541156, | |
| "learning_rate": 1.5406408174555978e-05, | |
| "loss": 0.4218, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.7748770336738555, | |
| "grad_norm": 0.7187304321390485, | |
| "learning_rate": 1.5361839906972095e-05, | |
| "loss": 0.4113, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.777903897086644, | |
| "grad_norm": 0.7189215644246775, | |
| "learning_rate": 1.531712165719992e-05, | |
| "loss": 0.4351, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.7809307604994324, | |
| "grad_norm": 0.912368504828945, | |
| "learning_rate": 1.5272254676105026e-05, | |
| "loss": 0.4279, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.783957623912221, | |
| "grad_norm": 0.7991126185266673, | |
| "learning_rate": 1.5227240218713326e-05, | |
| "loss": 0.4158, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.7869844873250095, | |
| "grad_norm": 0.9009696818932866, | |
| "learning_rate": 1.5182079544175957e-05, | |
| "loss": 0.429, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.7900113507377979, | |
| "grad_norm": 0.7407945230185996, | |
| "learning_rate": 1.5136773915734067e-05, | |
| "loss": 0.4336, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.7930382141505865, | |
| "grad_norm": 0.7883924763371828, | |
| "learning_rate": 1.5091324600683472e-05, | |
| "loss": 0.4244, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.796065077563375, | |
| "grad_norm": 0.6813004638104684, | |
| "learning_rate": 1.5045732870339213e-05, | |
| "loss": 0.3992, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.7990919409761634, | |
| "grad_norm": 0.778562345968094, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.4418, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.802118804388952, | |
| "grad_norm": 0.8114511723746026, | |
| "learning_rate": 1.4954127268912525e-05, | |
| "loss": 0.4168, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.8051456678017405, | |
| "grad_norm": 0.7723328828458176, | |
| "learning_rate": 1.4908115960235683e-05, | |
| "loss": 0.4449, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.8081725312145289, | |
| "grad_norm": 0.7979142117941048, | |
| "learning_rate": 1.4861967361004687e-05, | |
| "loss": 0.449, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.8111993946273175, | |
| "grad_norm": 0.7295039417962921, | |
| "learning_rate": 1.4815682762095065e-05, | |
| "loss": 0.4129, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.814226258040106, | |
| "grad_norm": 0.7664832516504386, | |
| "learning_rate": 1.476926345818654e-05, | |
| "loss": 0.4387, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.8172531214528944, | |
| "grad_norm": 0.756152513707808, | |
| "learning_rate": 1.472271074772683e-05, | |
| "loss": 0.4185, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.820279984865683, | |
| "grad_norm": 0.6790362026590407, | |
| "learning_rate": 1.4676025932895315e-05, | |
| "loss": 0.4066, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.8233068482784714, | |
| "grad_norm": 0.7739279330664569, | |
| "learning_rate": 1.4629210319566626e-05, | |
| "loss": 0.408, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.8263337116912599, | |
| "grad_norm": 0.6958729561966952, | |
| "learning_rate": 1.4582265217274105e-05, | |
| "loss": 0.4217, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.8293605751040485, | |
| "grad_norm": 0.7218792900005002, | |
| "learning_rate": 1.4535191939173179e-05, | |
| "loss": 0.4152, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.8323874385168369, | |
| "grad_norm": 0.7056662139566339, | |
| "learning_rate": 1.4487991802004625e-05, | |
| "loss": 0.419, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.8354143019296254, | |
| "grad_norm": 0.7243290267095895, | |
| "learning_rate": 1.4440666126057743e-05, | |
| "loss": 0.4226, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.838441165342414, | |
| "grad_norm": 0.9116703642860973, | |
| "learning_rate": 1.4393216235133427e-05, | |
| "loss": 0.4167, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.8414680287552024, | |
| "grad_norm": 0.6913104899347299, | |
| "learning_rate": 1.4345643456507126e-05, | |
| "loss": 0.4166, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.8444948921679909, | |
| "grad_norm": 0.7780194594315958, | |
| "learning_rate": 1.4297949120891718e-05, | |
| "loss": 0.4017, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.8475217555807795, | |
| "grad_norm": 0.7135874824236577, | |
| "learning_rate": 1.4250134562400301e-05, | |
| "loss": 0.4296, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.8505486189935679, | |
| "grad_norm": 0.6650307696736724, | |
| "learning_rate": 1.4202201118508863e-05, | |
| "loss": 0.4106, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.8535754824063564, | |
| "grad_norm": 0.7230912742869076, | |
| "learning_rate": 1.4154150130018867e-05, | |
| "loss": 0.4265, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.856602345819145, | |
| "grad_norm": 0.7872017402026339, | |
| "learning_rate": 1.4105982941019751e-05, | |
| "loss": 0.4474, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.8596292092319334, | |
| "grad_norm": 0.8170141175509318, | |
| "learning_rate": 1.405770089885134e-05, | |
| "loss": 0.4351, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.8626560726447219, | |
| "grad_norm": 0.9076333196650805, | |
| "learning_rate": 1.4009305354066138e-05, | |
| "loss": 0.4433, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.8656829360575105, | |
| "grad_norm": 0.7652824667105684, | |
| "learning_rate": 1.396079766039157e-05, | |
| "loss": 0.3869, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.8687097994702989, | |
| "grad_norm": 0.9185459500767443, | |
| "learning_rate": 1.39121791746921e-05, | |
| "loss": 0.4398, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.8717366628830874, | |
| "grad_norm": 0.683856092519685, | |
| "learning_rate": 1.3863451256931286e-05, | |
| "loss": 0.3957, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.874763526295876, | |
| "grad_norm": 0.87356169351691, | |
| "learning_rate": 1.381461527013374e-05, | |
| "loss": 0.4219, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.8777903897086644, | |
| "grad_norm": 0.7629226280417597, | |
| "learning_rate": 1.3765672580346986e-05, | |
| "loss": 0.4352, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.8808172531214529, | |
| "grad_norm": 0.8631834212542296, | |
| "learning_rate": 1.3716624556603275e-05, | |
| "loss": 0.4347, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.8838441165342414, | |
| "grad_norm": 0.8010682582248608, | |
| "learning_rate": 1.3667472570881264e-05, | |
| "loss": 0.4358, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.8868709799470299, | |
| "grad_norm": 0.6976119432685854, | |
| "learning_rate": 1.361821799806765e-05, | |
| "loss": 0.4219, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.8898978433598184, | |
| "grad_norm": 0.8420218111264165, | |
| "learning_rate": 1.356886221591872e-05, | |
| "loss": 0.4229, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.8929247067726069, | |
| "grad_norm": 0.7605670845613539, | |
| "learning_rate": 1.3519406605021797e-05, | |
| "loss": 0.413, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.8959515701853954, | |
| "grad_norm": 0.7083126933963982, | |
| "learning_rate": 1.3469852548756626e-05, | |
| "loss": 0.4132, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.8989784335981839, | |
| "grad_norm": 0.726850081451116, | |
| "learning_rate": 1.342020143325669e-05, | |
| "loss": 0.4459, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.9020052970109724, | |
| "grad_norm": 0.7566549446693742, | |
| "learning_rate": 1.3370454647370418e-05, | |
| "loss": 0.4283, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.9050321604237609, | |
| "grad_norm": 0.7463337161504136, | |
| "learning_rate": 1.3320613582622354e-05, | |
| "loss": 0.4204, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.9080590238365494, | |
| "grad_norm": 0.7097766877131763, | |
| "learning_rate": 1.3270679633174219e-05, | |
| "loss": 0.4277, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.9110858872493379, | |
| "grad_norm": 0.8176773766544522, | |
| "learning_rate": 1.3220654195785917e-05, | |
| "loss": 0.4575, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.9141127506621264, | |
| "grad_norm": 0.9219923771505071, | |
| "learning_rate": 1.3170538669776469e-05, | |
| "loss": 0.4334, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.9171396140749148, | |
| "grad_norm": 0.7351748712320839, | |
| "learning_rate": 1.3120334456984871e-05, | |
| "loss": 0.413, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.9201664774877034, | |
| "grad_norm": 0.7825100363347536, | |
| "learning_rate": 1.3070042961730878e-05, | |
| "loss": 0.4206, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.9231933409004919, | |
| "grad_norm": 0.6735083502158334, | |
| "learning_rate": 1.3019665590775717e-05, | |
| "loss": 0.3941, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.9262202043132803, | |
| "grad_norm": 0.8690681440679482, | |
| "learning_rate": 1.296920375328275e-05, | |
| "loss": 0.4347, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.9292470677260689, | |
| "grad_norm": 0.752863424640265, | |
| "learning_rate": 1.2918658860778046e-05, | |
| "loss": 0.4191, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.9322739311388574, | |
| "grad_norm": 0.7336037657454644, | |
| "learning_rate": 1.2868032327110904e-05, | |
| "loss": 0.4135, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.9353007945516458, | |
| "grad_norm": 0.8033102578684338, | |
| "learning_rate": 1.2817325568414299e-05, | |
| "loss": 0.4162, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.9383276579644344, | |
| "grad_norm": 0.6947098316627842, | |
| "learning_rate": 1.2766540003065272e-05, | |
| "loss": 0.4062, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.9413545213772229, | |
| "grad_norm": 0.85057880337841, | |
| "learning_rate": 1.2715677051645259e-05, | |
| "loss": 0.422, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.9443813847900113, | |
| "grad_norm": 0.7118721401571699, | |
| "learning_rate": 1.266473813690035e-05, | |
| "loss": 0.4241, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.9474082482027999, | |
| "grad_norm": 0.7197445289742042, | |
| "learning_rate": 1.2613724683701491e-05, | |
| "loss": 0.4244, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.9504351116155884, | |
| "grad_norm": 0.8341867578235612, | |
| "learning_rate": 1.2562638119004627e-05, | |
| "loss": 0.4172, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.9534619750283768, | |
| "grad_norm": 0.7179575929786518, | |
| "learning_rate": 1.2511479871810792e-05, | |
| "loss": 0.4352, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.9564888384411654, | |
| "grad_norm": 0.746800585578556, | |
| "learning_rate": 1.2460251373126136e-05, | |
| "loss": 0.4001, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.9595157018539539, | |
| "grad_norm": 0.7480516491648973, | |
| "learning_rate": 1.2408954055921884e-05, | |
| "loss": 0.4219, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.9625425652667423, | |
| "grad_norm": 0.6947227466601075, | |
| "learning_rate": 1.2357589355094275e-05, | |
| "loss": 0.4088, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.9655694286795309, | |
| "grad_norm": 0.6802161432755544, | |
| "learning_rate": 1.2306158707424402e-05, | |
| "loss": 0.4126, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.9685962920923193, | |
| "grad_norm": 0.6559511391299027, | |
| "learning_rate": 1.2254663551538047e-05, | |
| "loss": 0.4069, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.9716231555051078, | |
| "grad_norm": 0.6904309448352038, | |
| "learning_rate": 1.2203105327865407e-05, | |
| "loss": 0.4434, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.9746500189178964, | |
| "grad_norm": 0.7182868452223119, | |
| "learning_rate": 1.215148547860084e-05, | |
| "loss": 0.4349, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.9776768823306848, | |
| "grad_norm": 0.7814920803954862, | |
| "learning_rate": 1.2099805447662485e-05, | |
| "loss": 0.393, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.9807037457434733, | |
| "grad_norm": 0.6949386704179543, | |
| "learning_rate": 1.2048066680651908e-05, | |
| "loss": 0.411, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.9837306091562619, | |
| "grad_norm": 0.6381648381346965, | |
| "learning_rate": 1.1996270624813642e-05, | |
| "loss": 0.4098, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.9867574725690503, | |
| "grad_norm": 0.7427466598485836, | |
| "learning_rate": 1.194441872899471e-05, | |
| "loss": 0.4043, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.9897843359818388, | |
| "grad_norm": 0.6621844709178613, | |
| "learning_rate": 1.1892512443604103e-05, | |
| "loss": 0.4002, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.9928111993946274, | |
| "grad_norm": 0.7912473266303484, | |
| "learning_rate": 1.1840553220572204e-05, | |
| "loss": 0.4286, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.9958380628074158, | |
| "grad_norm": 0.7011746504431725, | |
| "learning_rate": 1.1788542513310178e-05, | |
| "loss": 0.4158, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.9988649262202043, | |
| "grad_norm": 0.6704473383696113, | |
| "learning_rate": 1.1736481776669307e-05, | |
| "loss": 0.4317, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.0030268634127886, | |
| "grad_norm": 0.9056114324326852, | |
| "learning_rate": 1.1684372466900306e-05, | |
| "loss": 0.3097, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.006053726825577, | |
| "grad_norm": 0.8530657966696973, | |
| "learning_rate": 1.1632216041612595e-05, | |
| "loss": 0.3115, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.0090805902383655, | |
| "grad_norm": 0.7101116681619808, | |
| "learning_rate": 1.15800139597335e-05, | |
| "loss": 0.3154, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.012107453651154, | |
| "grad_norm": 0.6488846749256443, | |
| "learning_rate": 1.1527767681467472e-05, | |
| "loss": 0.3083, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.0151343170639424, | |
| "grad_norm": 0.7966833609718857, | |
| "learning_rate": 1.1475478668255223e-05, | |
| "loss": 0.3025, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.018161180476731, | |
| "grad_norm": 0.7997274017116879, | |
| "learning_rate": 1.1423148382732854e-05, | |
| "loss": 0.3016, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.0211880438895196, | |
| "grad_norm": 0.8706465996902483, | |
| "learning_rate": 1.1370778288690947e-05, | |
| "loss": 0.3078, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.024214907302308, | |
| "grad_norm": 0.8388383356557727, | |
| "learning_rate": 1.1318369851033604e-05, | |
| "loss": 0.2942, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.0272417707150965, | |
| "grad_norm": 0.7743256303109989, | |
| "learning_rate": 1.1265924535737494e-05, | |
| "loss": 0.3026, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.030268634127885, | |
| "grad_norm": 0.8122251757192691, | |
| "learning_rate": 1.121344380981082e-05, | |
| "loss": 0.3125, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.0332954975406734, | |
| "grad_norm": 0.8351870356710823, | |
| "learning_rate": 1.1160929141252303e-05, | |
| "loss": 0.3037, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.036322360953462, | |
| "grad_norm": 0.7582036188225919, | |
| "learning_rate": 1.1108381999010111e-05, | |
| "loss": 0.3067, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.0393492243662505, | |
| "grad_norm": 0.945337804023826, | |
| "learning_rate": 1.1055803852940772e-05, | |
| "loss": 0.2861, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.042376087779039, | |
| "grad_norm": 0.8048903461691072, | |
| "learning_rate": 1.1003196173768051e-05, | |
| "loss": 0.2979, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.0454029511918275, | |
| "grad_norm": 0.8167878527427705, | |
| "learning_rate": 1.0950560433041825e-05, | |
| "loss": 0.3077, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.048429814604616, | |
| "grad_norm": 0.8037778021136479, | |
| "learning_rate": 1.0897898103096917e-05, | |
| "loss": 0.3012, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.0514566780174044, | |
| "grad_norm": 0.7839978759654623, | |
| "learning_rate": 1.0845210657011893e-05, | |
| "loss": 0.3031, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.054483541430193, | |
| "grad_norm": 0.8173483126490009, | |
| "learning_rate": 1.0792499568567885e-05, | |
| "loss": 0.303, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.0575104048429815, | |
| "grad_norm": 0.7894527703382768, | |
| "learning_rate": 1.0739766312207344e-05, | |
| "loss": 0.3112, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.0605372682557699, | |
| "grad_norm": 0.7667546212318844, | |
| "learning_rate": 1.068701236299281e-05, | |
| "loss": 0.2999, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.0635641316685585, | |
| "grad_norm": 0.7777270351047659, | |
| "learning_rate": 1.0634239196565646e-05, | |
| "loss": 0.3059, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.066590995081347, | |
| "grad_norm": 0.7337030677894119, | |
| "learning_rate": 1.0581448289104759e-05, | |
| "loss": 0.3019, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.0696178584941354, | |
| "grad_norm": 0.744014283833164, | |
| "learning_rate": 1.0528641117285315e-05, | |
| "loss": 0.3085, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.072644721906924, | |
| "grad_norm": 0.7307665672362731, | |
| "learning_rate": 1.0475819158237426e-05, | |
| "loss": 0.3031, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.0756715853197125, | |
| "grad_norm": 0.721771354976752, | |
| "learning_rate": 1.0422983889504831e-05, | |
| "loss": 0.3006, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.0786984487325009, | |
| "grad_norm": 0.7197419718483739, | |
| "learning_rate": 1.0370136789003582e-05, | |
| "loss": 0.3154, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.0817253121452894, | |
| "grad_norm": 0.6930720363551771, | |
| "learning_rate": 1.031727933498068e-05, | |
| "loss": 0.292, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.084752175558078, | |
| "grad_norm": 0.756567737097623, | |
| "learning_rate": 1.0264413005972736e-05, | |
| "loss": 0.302, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.0877790389708664, | |
| "grad_norm": 0.7287513135304579, | |
| "learning_rate": 1.0211539280764617e-05, | |
| "loss": 0.3097, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.090805902383655, | |
| "grad_norm": 0.7545678474652777, | |
| "learning_rate": 1.015865963834808e-05, | |
| "loss": 0.3018, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.0938327657964435, | |
| "grad_norm": 0.7362187870202994, | |
| "learning_rate": 1.0105775557880398e-05, | |
| "loss": 0.3028, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.0968596292092319, | |
| "grad_norm": 0.7738766420819987, | |
| "learning_rate": 1.0052888518642978e-05, | |
| "loss": 0.3031, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.0998864926220204, | |
| "grad_norm": 0.6483276863743285, | |
| "learning_rate": 1e-05, | |
| "loss": 0.2788, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.102913356034809, | |
| "grad_norm": 0.7085221964890713, | |
| "learning_rate": 9.947111481357023e-06, | |
| "loss": 0.2918, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.1059402194475974, | |
| "grad_norm": 0.7625098354588125, | |
| "learning_rate": 9.894224442119606e-06, | |
| "loss": 0.3123, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.108967082860386, | |
| "grad_norm": 0.719688121865531, | |
| "learning_rate": 9.841340361651921e-06, | |
| "loss": 0.2933, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.1119939462731745, | |
| "grad_norm": 0.750560822944283, | |
| "learning_rate": 9.788460719235386e-06, | |
| "loss": 0.2966, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.1150208096859628, | |
| "grad_norm": 0.7447605168139636, | |
| "learning_rate": 9.735586994027267e-06, | |
| "loss": 0.311, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.1180476730987514, | |
| "grad_norm": 0.7384100418059716, | |
| "learning_rate": 9.682720665019325e-06, | |
| "loss": 0.2982, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.12107453651154, | |
| "grad_norm": 0.7413671197529755, | |
| "learning_rate": 9.62986321099642e-06, | |
| "loss": 0.2941, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.1241013999243283, | |
| "grad_norm": 0.7589280728066173, | |
| "learning_rate": 9.57701611049517e-06, | |
| "loss": 0.2944, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.127128263337117, | |
| "grad_norm": 0.763299391635082, | |
| "learning_rate": 9.524180841762577e-06, | |
| "loss": 0.306, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.1301551267499055, | |
| "grad_norm": 0.8110476425603581, | |
| "learning_rate": 9.471358882714687e-06, | |
| "loss": 0.2916, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.1331819901626938, | |
| "grad_norm": 0.754620993409508, | |
| "learning_rate": 9.418551710895243e-06, | |
| "loss": 0.3094, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.1362088535754824, | |
| "grad_norm": 0.687571159798026, | |
| "learning_rate": 9.365760803434356e-06, | |
| "loss": 0.2881, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.139235716988271, | |
| "grad_norm": 0.8403887107925709, | |
| "learning_rate": 9.312987637007191e-06, | |
| "loss": 0.3022, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.1422625804010593, | |
| "grad_norm": 0.7279418281704557, | |
| "learning_rate": 9.260233687792657e-06, | |
| "loss": 0.2907, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.145289443813848, | |
| "grad_norm": 0.8223954170263438, | |
| "learning_rate": 9.207500431432115e-06, | |
| "loss": 0.2989, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.1483163072266365, | |
| "grad_norm": 0.792893618619976, | |
| "learning_rate": 9.154789342988108e-06, | |
| "loss": 0.3071, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.1513431706394248, | |
| "grad_norm": 0.7358869242349388, | |
| "learning_rate": 9.102101896903084e-06, | |
| "loss": 0.3098, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.1543700340522134, | |
| "grad_norm": 0.8298249010965482, | |
| "learning_rate": 9.049439566958176e-06, | |
| "loss": 0.3116, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.157396897465002, | |
| "grad_norm": 0.8138247056909561, | |
| "learning_rate": 8.99680382623195e-06, | |
| "loss": 0.2824, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.1604237608777903, | |
| "grad_norm": 0.7690523126623674, | |
| "learning_rate": 8.944196147059233e-06, | |
| "loss": 0.3018, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.1634506242905789, | |
| "grad_norm": 0.8008296656488894, | |
| "learning_rate": 8.89161800098989e-06, | |
| "loss": 0.3003, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.1664774877033675, | |
| "grad_norm": 0.7153608484140768, | |
| "learning_rate": 8.839070858747697e-06, | |
| "loss": 0.305, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.1695043511161558, | |
| "grad_norm": 0.6816796706501214, | |
| "learning_rate": 8.786556190189183e-06, | |
| "loss": 0.2788, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.1725312145289444, | |
| "grad_norm": 0.7823169151002916, | |
| "learning_rate": 8.734075464262507e-06, | |
| "loss": 0.2977, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.175558077941733, | |
| "grad_norm": 0.7261528305196607, | |
| "learning_rate": 8.681630148966397e-06, | |
| "loss": 0.2886, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.1785849413545213, | |
| "grad_norm": 0.7365784987639206, | |
| "learning_rate": 8.629221711309056e-06, | |
| "loss": 0.2848, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.1816118047673099, | |
| "grad_norm": 0.7499747278448416, | |
| "learning_rate": 8.576851617267151e-06, | |
| "loss": 0.2888, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.1846386681800984, | |
| "grad_norm": 0.8219867361742994, | |
| "learning_rate": 8.52452133174478e-06, | |
| "loss": 0.2762, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.1876655315928868, | |
| "grad_norm": 0.7554920110713274, | |
| "learning_rate": 8.472232318532531e-06, | |
| "loss": 0.3025, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.1906923950056754, | |
| "grad_norm": 0.8195298248798789, | |
| "learning_rate": 8.419986040266502e-06, | |
| "loss": 0.291, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.193719258418464, | |
| "grad_norm": 0.7562126836406086, | |
| "learning_rate": 8.367783958387407e-06, | |
| "loss": 0.2875, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.1967461218312523, | |
| "grad_norm": 0.7394157348514436, | |
| "learning_rate": 8.315627533099697e-06, | |
| "loss": 0.2943, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.1997729852440409, | |
| "grad_norm": 0.7534833583318963, | |
| "learning_rate": 8.263518223330698e-06, | |
| "loss": 0.2979, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.2027998486568294, | |
| "grad_norm": 0.7693687927357046, | |
| "learning_rate": 8.211457486689829e-06, | |
| "loss": 0.3063, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.2058267120696178, | |
| "grad_norm": 0.7067736938862111, | |
| "learning_rate": 8.159446779427798e-06, | |
| "loss": 0.2946, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.2088535754824064, | |
| "grad_norm": 0.7363955653846156, | |
| "learning_rate": 8.107487556395902e-06, | |
| "loss": 0.2936, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.211880438895195, | |
| "grad_norm": 0.8480638019377852, | |
| "learning_rate": 8.055581271005292e-06, | |
| "loss": 0.3138, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.2149073023079833, | |
| "grad_norm": 0.7245448626432517, | |
| "learning_rate": 8.00372937518636e-06, | |
| "loss": 0.2892, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.2179341657207718, | |
| "grad_norm": 0.7994323884630878, | |
| "learning_rate": 7.951933319348095e-06, | |
| "loss": 0.2991, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.2209610291335604, | |
| "grad_norm": 0.723140744868228, | |
| "learning_rate": 7.900194552337516e-06, | |
| "loss": 0.2917, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.2239878925463488, | |
| "grad_norm": 0.7004181106477327, | |
| "learning_rate": 7.848514521399167e-06, | |
| "loss": 0.2944, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.2270147559591373, | |
| "grad_norm": 0.6786102420677808, | |
| "learning_rate": 7.796894672134594e-06, | |
| "loss": 0.2844, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.230041619371926, | |
| "grad_norm": 0.7292211657839189, | |
| "learning_rate": 7.745336448461958e-06, | |
| "loss": 0.2964, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.2330684827847143, | |
| "grad_norm": 0.7673864766297774, | |
| "learning_rate": 7.6938412925756e-06, | |
| "loss": 0.2938, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.2360953461975028, | |
| "grad_norm": 0.7583643773575292, | |
| "learning_rate": 7.642410644905726e-06, | |
| "loss": 0.2886, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.2391222096102914, | |
| "grad_norm": 0.7188229966634913, | |
| "learning_rate": 7.591045944078119e-06, | |
| "loss": 0.283, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.2421490730230798, | |
| "grad_norm": 0.7760179689777242, | |
| "learning_rate": 7.539748626873866e-06, | |
| "loss": 0.3055, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.2451759364358683, | |
| "grad_norm": 0.8005488545888585, | |
| "learning_rate": 7.488520128189209e-06, | |
| "loss": 0.3137, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.248202799848657, | |
| "grad_norm": 0.6956489887848328, | |
| "learning_rate": 7.4373618809953755e-06, | |
| "loss": 0.2856, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.2512296632614452, | |
| "grad_norm": 0.7985830014430589, | |
| "learning_rate": 7.386275316298513e-06, | |
| "loss": 0.3008, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.2542565266742338, | |
| "grad_norm": 0.7086171033345939, | |
| "learning_rate": 7.335261863099652e-06, | |
| "loss": 0.2969, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.2572833900870224, | |
| "grad_norm": 0.7824473459075301, | |
| "learning_rate": 7.2843229483547405e-06, | |
| "loss": 0.303, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.2603102534998107, | |
| "grad_norm": 0.709514387223654, | |
| "learning_rate": 7.233459996934731e-06, | |
| "loss": 0.3077, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.2633371169125993, | |
| "grad_norm": 0.7530320837964829, | |
| "learning_rate": 7.182674431585703e-06, | |
| "loss": 0.3034, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.2663639803253879, | |
| "grad_norm": 0.7315529183474975, | |
| "learning_rate": 7.131967672889101e-06, | |
| "loss": 0.2897, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.2693908437381762, | |
| "grad_norm": 0.7420913112141836, | |
| "learning_rate": 7.081341139221955e-06, | |
| "loss": 0.3221, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.2724177071509648, | |
| "grad_norm": 0.7103051952108638, | |
| "learning_rate": 7.0307962467172555e-06, | |
| "loss": 0.288, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.2754445705637534, | |
| "grad_norm": 0.8047381899219737, | |
| "learning_rate": 6.9803344092242855e-06, | |
| "loss": 0.3275, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.2784714339765417, | |
| "grad_norm": 0.7104822153733231, | |
| "learning_rate": 6.929957038269123e-06, | |
| "loss": 0.297, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.2814982973893303, | |
| "grad_norm": 0.7231783380360892, | |
| "learning_rate": 6.87966554301513e-06, | |
| "loss": 0.2891, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.2845251608021189, | |
| "grad_norm": 0.7518467188516564, | |
| "learning_rate": 6.8294613302235325e-06, | |
| "loss": 0.2901, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.2875520242149072, | |
| "grad_norm": 0.7713572679487606, | |
| "learning_rate": 6.779345804214088e-06, | |
| "loss": 0.3006, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.2905788876276958, | |
| "grad_norm": 0.7859497128457842, | |
| "learning_rate": 6.729320366825785e-06, | |
| "loss": 0.2844, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.2936057510404844, | |
| "grad_norm": 0.7411935925908989, | |
| "learning_rate": 6.679386417377649e-06, | |
| "loss": 0.3014, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.2966326144532727, | |
| "grad_norm": 0.7337313145909549, | |
| "learning_rate": 6.629545352629583e-06, | |
| "loss": 0.2895, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.2996594778660613, | |
| "grad_norm": 0.7020629155983285, | |
| "learning_rate": 6.579798566743314e-06, | |
| "loss": 0.2897, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.3026863412788499, | |
| "grad_norm": 0.7690957535592647, | |
| "learning_rate": 6.530147451243377e-06, | |
| "loss": 0.3076, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.3057132046916382, | |
| "grad_norm": 0.7363293747525321, | |
| "learning_rate": 6.480593394978208e-06, | |
| "loss": 0.2953, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.3087400681044268, | |
| "grad_norm": 0.7428197272387507, | |
| "learning_rate": 6.431137784081283e-06, | |
| "loss": 0.2945, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.3117669315172154, | |
| "grad_norm": 0.6998116149373447, | |
| "learning_rate": 6.381782001932352e-06, | |
| "loss": 0.2873, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.3147937949300037, | |
| "grad_norm": 0.737160826561709, | |
| "learning_rate": 6.33252742911874e-06, | |
| "loss": 0.3003, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.3178206583427923, | |
| "grad_norm": 0.7461756730788723, | |
| "learning_rate": 6.283375443396726e-06, | |
| "loss": 0.3067, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.3208475217555808, | |
| "grad_norm": 0.7381979741046565, | |
| "learning_rate": 6.234327419653013e-06, | |
| "loss": 0.2998, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.3238743851683692, | |
| "grad_norm": 0.8323458597686157, | |
| "learning_rate": 6.185384729866264e-06, | |
| "loss": 0.2869, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.3269012485811578, | |
| "grad_norm": 0.7505741136765467, | |
| "learning_rate": 6.136548743068713e-06, | |
| "loss": 0.3058, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.3299281119939463, | |
| "grad_norm": 0.7255593477792327, | |
| "learning_rate": 6.087820825307904e-06, | |
| "loss": 0.2913, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.3329549754067347, | |
| "grad_norm": 0.8552522766982229, | |
| "learning_rate": 6.039202339608432e-06, | |
| "loss": 0.2954, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.3359818388195233, | |
| "grad_norm": 0.7278177679141681, | |
| "learning_rate": 5.990694645933866e-06, | |
| "loss": 0.2894, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.3390087022323118, | |
| "grad_norm": 0.7286476935215261, | |
| "learning_rate": 5.9422991011486635e-06, | |
| "loss": 0.2773, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.3420355656451002, | |
| "grad_norm": 1.0203950174650196, | |
| "learning_rate": 5.894017058980249e-06, | |
| "loss": 0.2998, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.3450624290578888, | |
| "grad_norm": 0.6851268629938885, | |
| "learning_rate": 5.845849869981137e-06, | |
| "loss": 0.2733, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.3480892924706773, | |
| "grad_norm": 0.7329561855268578, | |
| "learning_rate": 5.797798881491138e-06, | |
| "loss": 0.2938, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.351116155883466, | |
| "grad_norm": 0.8008789233196687, | |
| "learning_rate": 5.749865437599703e-06, | |
| "loss": 0.3056, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.3541430192962542, | |
| "grad_norm": 0.7333658987533364, | |
| "learning_rate": 5.702050879108284e-06, | |
| "loss": 0.2877, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.3571698827090428, | |
| "grad_norm": 0.7118878966067858, | |
| "learning_rate": 5.654356543492883e-06, | |
| "loss": 0.2854, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.3601967461218312, | |
| "grad_norm": 0.7165509071187739, | |
| "learning_rate": 5.606783764866576e-06, | |
| "loss": 0.2859, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.3632236095346197, | |
| "grad_norm": 0.8456937277267329, | |
| "learning_rate": 5.559333873942259e-06, | |
| "loss": 0.3045, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.3662504729474083, | |
| "grad_norm": 0.7800574945408677, | |
| "learning_rate": 5.512008197995379e-06, | |
| "loss": 0.2729, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.3692773363601969, | |
| "grad_norm": 0.733597268236208, | |
| "learning_rate": 5.464808060826825e-06, | |
| "loss": 0.3001, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.3723041997729852, | |
| "grad_norm": 0.7234405055039879, | |
| "learning_rate": 5.417734782725896e-06, | |
| "loss": 0.2889, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.3753310631857738, | |
| "grad_norm": 0.8571673435578278, | |
| "learning_rate": 5.370789680433376e-06, | |
| "loss": 0.2964, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.3783579265985622, | |
| "grad_norm": 0.7100899160075168, | |
| "learning_rate": 5.323974067104687e-06, | |
| "loss": 0.2877, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.3813847900113507, | |
| "grad_norm": 0.715614073428981, | |
| "learning_rate": 5.277289252273175e-06, | |
| "loss": 0.2961, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.3844116534241393, | |
| "grad_norm": 0.7120853571127941, | |
| "learning_rate": 5.230736541813463e-06, | |
| "loss": 0.2873, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.3874385168369279, | |
| "grad_norm": 0.8162581998562425, | |
| "learning_rate": 5.184317237904939e-06, | |
| "loss": 0.2966, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.3904653802497162, | |
| "grad_norm": 0.7224629289945345, | |
| "learning_rate": 5.138032638995315e-06, | |
| "loss": 0.2832, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.3934922436625048, | |
| "grad_norm": 0.7181607300724704, | |
| "learning_rate": 5.091884039764321e-06, | |
| "loss": 0.2787, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.3965191070752931, | |
| "grad_norm": 0.745235460337963, | |
| "learning_rate": 5.045872731087479e-06, | |
| "loss": 0.3076, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.3995459704880817, | |
| "grad_norm": 0.7059274832329271, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 0.2812, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.4025728339008703, | |
| "grad_norm": 0.7261226530275093, | |
| "learning_rate": 4.954267129660789e-06, | |
| "loss": 0.293, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.4055996973136589, | |
| "grad_norm": 0.7441870710723584, | |
| "learning_rate": 4.908675399316534e-06, | |
| "loss": 0.2915, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.4086265607264472, | |
| "grad_norm": 0.7016763070099846, | |
| "learning_rate": 4.863226084265939e-06, | |
| "loss": 0.2888, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.4116534241392358, | |
| "grad_norm": 0.7271051268722979, | |
| "learning_rate": 4.817920455824045e-06, | |
| "loss": 0.2917, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.4146802875520241, | |
| "grad_norm": 0.8513329715776123, | |
| "learning_rate": 4.772759781286679e-06, | |
| "loss": 0.2855, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.4177071509648127, | |
| "grad_norm": 0.7377943919805398, | |
| "learning_rate": 4.727745323894976e-06, | |
| "loss": 0.2752, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.4207340143776013, | |
| "grad_norm": 0.7599910722742809, | |
| "learning_rate": 4.682878342800087e-06, | |
| "loss": 0.276, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.4237608777903898, | |
| "grad_norm": 0.7456135123769635, | |
| "learning_rate": 4.638160093027908e-06, | |
| "loss": 0.2943, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.4267877412031782, | |
| "grad_norm": 0.7247590545628322, | |
| "learning_rate": 4.593591825444028e-06, | |
| "loss": 0.2832, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.4298146046159668, | |
| "grad_norm": 0.8152582694794308, | |
| "learning_rate": 4.549174786718684e-06, | |
| "loss": 0.2842, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.4328414680287551, | |
| "grad_norm": 0.7628957083900787, | |
| "learning_rate": 4.504910219291941e-06, | |
| "loss": 0.2986, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.4358683314415437, | |
| "grad_norm": 0.7097385828078482, | |
| "learning_rate": 4.460799361338898e-06, | |
| "loss": 0.2782, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.4388951948543323, | |
| "grad_norm": 0.6939480313334291, | |
| "learning_rate": 4.416843446735077e-06, | |
| "loss": 0.2899, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.4419220582671208, | |
| "grad_norm": 0.7601419650428849, | |
| "learning_rate": 4.373043705021899e-06, | |
| "loss": 0.2733, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.4449489216799092, | |
| "grad_norm": 0.747700547424619, | |
| "learning_rate": 4.3294013613722944e-06, | |
| "loss": 0.2898, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.4479757850926978, | |
| "grad_norm": 0.7321534176052829, | |
| "learning_rate": 4.2859176365564294e-06, | |
| "loss": 0.2989, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.451002648505486, | |
| "grad_norm": 0.6988411415930715, | |
| "learning_rate": 4.2425937469075626e-06, | |
| "loss": 0.2886, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.4540295119182747, | |
| "grad_norm": 0.7761365319117365, | |
| "learning_rate": 4.19943090428802e-06, | |
| "loss": 0.3139, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.4570563753310632, | |
| "grad_norm": 0.7065579979784431, | |
| "learning_rate": 4.1564303160552935e-06, | |
| "loss": 0.2917, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.4600832387438518, | |
| "grad_norm": 0.7570160815598899, | |
| "learning_rate": 4.113593185028273e-06, | |
| "loss": 0.2765, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.4631101021566402, | |
| "grad_norm": 0.7795028589365341, | |
| "learning_rate": 4.070920709453597e-06, | |
| "loss": 0.2941, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.4661369655694287, | |
| "grad_norm": 0.6828977534724469, | |
| "learning_rate": 4.028414082972141e-06, | |
| "loss": 0.2768, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.469163828982217, | |
| "grad_norm": 0.7548357869538868, | |
| "learning_rate": 3.986074494585619e-06, | |
| "loss": 0.3045, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.4721906923950057, | |
| "grad_norm": 0.7010855131732908, | |
| "learning_rate": 3.943903128623336e-06, | |
| "loss": 0.2759, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.4752175558077942, | |
| "grad_norm": 0.7113138520875217, | |
| "learning_rate": 3.9019011647090465e-06, | |
| "loss": 0.2968, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.4782444192205828, | |
| "grad_norm": 0.8304273303189595, | |
| "learning_rate": 3.860069777727983e-06, | |
| "loss": 0.2816, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.4812712826333712, | |
| "grad_norm": 0.6854195217020267, | |
| "learning_rate": 3.818410137793947e-06, | |
| "loss": 0.2874, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.4842981460461597, | |
| "grad_norm": 0.7782139117998569, | |
| "learning_rate": 3.7769234102166365e-06, | |
| "loss": 0.2908, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.487325009458948, | |
| "grad_norm": 0.7151336456391801, | |
| "learning_rate": 3.735610755468988e-06, | |
| "loss": 0.2947, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.4903518728717366, | |
| "grad_norm": 0.7493511967942955, | |
| "learning_rate": 3.6944733291547784e-06, | |
| "loss": 0.2905, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.4933787362845252, | |
| "grad_norm": 0.7999001632157942, | |
| "learning_rate": 3.653512281976238e-06, | |
| "loss": 0.2813, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.4964055996973138, | |
| "grad_norm": 0.7780555119581226, | |
| "learning_rate": 3.612728759701919e-06, | |
| "loss": 0.287, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.4994324631101021, | |
| "grad_norm": 0.6980813019684152, | |
| "learning_rate": 3.5721239031346067e-06, | |
| "loss": 0.271, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.5024593265228905, | |
| "grad_norm": 0.7924304836800483, | |
| "learning_rate": 3.5316988480794255e-06, | |
| "loss": 0.3006, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.505486189935679, | |
| "grad_norm": 0.7764118578876771, | |
| "learning_rate": 3.4914547253120655e-06, | |
| "loss": 0.3003, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.5085130533484676, | |
| "grad_norm": 0.7233571636448777, | |
| "learning_rate": 3.4513926605471504e-06, | |
| "loss": 0.2775, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.5115399167612562, | |
| "grad_norm": 0.7715212275906694, | |
| "learning_rate": 3.4115137744067516e-06, | |
| "loss": 0.2921, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.5145667801740448, | |
| "grad_norm": 0.7585703472864148, | |
| "learning_rate": 3.37181918238904e-06, | |
| "loss": 0.2988, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.5175936435868331, | |
| "grad_norm": 0.7373584233885836, | |
| "learning_rate": 3.3323099948370853e-06, | |
| "loss": 0.2947, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.5206205069996215, | |
| "grad_norm": 0.7024793202535262, | |
| "learning_rate": 3.292987316907792e-06, | |
| "loss": 0.2866, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.52364737041241, | |
| "grad_norm": 0.6866934173104662, | |
| "learning_rate": 3.253852248540994e-06, | |
| "loss": 0.288, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.5266742338251986, | |
| "grad_norm": 0.7446535452898911, | |
| "learning_rate": 3.2149058844286796e-06, | |
| "loss": 0.2857, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.5297010972379872, | |
| "grad_norm": 0.7058024107660282, | |
| "learning_rate": 3.1761493139843734e-06, | |
| "loss": 0.2746, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.5327279606507758, | |
| "grad_norm": 0.7152314604142057, | |
| "learning_rate": 3.1375836213126653e-06, | |
| "loss": 0.275, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.5357548240635641, | |
| "grad_norm": 0.7361499347573892, | |
| "learning_rate": 3.099209885178882e-06, | |
| "loss": 0.3049, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.5387816874763525, | |
| "grad_norm": 0.7070178398962562, | |
| "learning_rate": 3.0610291789789094e-06, | |
| "loss": 0.2754, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.541808550889141, | |
| "grad_norm": 0.7056929264345234, | |
| "learning_rate": 3.023042570709185e-06, | |
| "loss": 0.2834, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.5448354143019296, | |
| "grad_norm": 0.7358822822235239, | |
| "learning_rate": 2.9852511229367862e-06, | |
| "loss": 0.2769, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.5478622777147182, | |
| "grad_norm": 0.7269462023969843, | |
| "learning_rate": 2.9476558927697605e-06, | |
| "loss": 0.2891, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.5508891411275068, | |
| "grad_norm": 0.7267648816561406, | |
| "learning_rate": 2.9102579318274994e-06, | |
| "loss": 0.284, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.553916004540295, | |
| "grad_norm": 0.81082083342321, | |
| "learning_rate": 2.8730582862113743e-06, | |
| "loss": 0.29, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.5569428679530835, | |
| "grad_norm": 0.7349299651358615, | |
| "learning_rate": 2.8360579964754277e-06, | |
| "loss": 0.2956, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.559969731365872, | |
| "grad_norm": 0.7347955385843173, | |
| "learning_rate": 2.7992580975973136e-06, | |
| "loss": 0.2898, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.5629965947786606, | |
| "grad_norm": 0.7741223951636901, | |
| "learning_rate": 2.7626596189492983e-06, | |
| "loss": 0.2842, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.5660234581914492, | |
| "grad_norm": 0.7302759349378197, | |
| "learning_rate": 2.726263584269513e-06, | |
| "loss": 0.2872, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.5690503216042377, | |
| "grad_norm": 0.7068378693126671, | |
| "learning_rate": 2.690071011633284e-06, | |
| "loss": 0.2691, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.572077185017026, | |
| "grad_norm": 0.763210419698676, | |
| "learning_rate": 2.6540829134246683e-06, | |
| "loss": 0.2985, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.5751040484298144, | |
| "grad_norm": 0.7458387625331496, | |
| "learning_rate": 2.618300296308135e-06, | |
| "loss": 0.2921, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.578130911842603, | |
| "grad_norm": 0.7549843645214687, | |
| "learning_rate": 2.582724161200405e-06, | |
| "loss": 0.2952, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.5811577752553916, | |
| "grad_norm": 0.6708451309727878, | |
| "learning_rate": 2.5473555032424534e-06, | |
| "loss": 0.2707, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.5841846386681802, | |
| "grad_norm": 0.7921423765859591, | |
| "learning_rate": 2.5121953117716744e-06, | |
| "loss": 0.3005, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.5872115020809687, | |
| "grad_norm": 0.7156686993032988, | |
| "learning_rate": 2.477244570294206e-06, | |
| "loss": 0.2928, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.590238365493757, | |
| "grad_norm": 0.741438268896103, | |
| "learning_rate": 2.4425042564574186e-06, | |
| "loss": 0.2909, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.5932652289065454, | |
| "grad_norm": 0.7126420131911106, | |
| "learning_rate": 2.4079753420225694e-06, | |
| "loss": 0.2893, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.596292092319334, | |
| "grad_norm": 0.6945180976922313, | |
| "learning_rate": 2.3736587928376197e-06, | |
| "loss": 0.2717, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.5993189557321226, | |
| "grad_norm": 0.7417501745321297, | |
| "learning_rate": 2.339555568810221e-06, | |
| "loss": 0.2789, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.6023458191449111, | |
| "grad_norm": 0.7839953653490982, | |
| "learning_rate": 2.305666623880858e-06, | |
| "loss": 0.2839, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.6053726825576997, | |
| "grad_norm": 0.7551774729545742, | |
| "learning_rate": 2.27199290599617e-06, | |
| "loss": 0.2976, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.608399545970488, | |
| "grad_norm": 0.7850882748573174, | |
| "learning_rate": 2.2385353570824308e-06, | |
| "loss": 0.3143, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.6114264093832764, | |
| "grad_norm": 0.6912849718601763, | |
| "learning_rate": 2.2052949130192136e-06, | |
| "loss": 0.2879, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.614453272796065, | |
| "grad_norm": 0.69305922593856, | |
| "learning_rate": 2.172272503613183e-06, | |
| "loss": 0.2769, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.6174801362088536, | |
| "grad_norm": 0.7498482055488955, | |
| "learning_rate": 2.1394690525721275e-06, | |
| "loss": 0.2975, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.6205069996216421, | |
| "grad_norm": 0.7029708904978335, | |
| "learning_rate": 2.1068854774790783e-06, | |
| "loss": 0.2882, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.6235338630344307, | |
| "grad_norm": 0.7267016274862943, | |
| "learning_rate": 2.0745226897666858e-06, | |
| "loss": 0.2847, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.626560726447219, | |
| "grad_norm": 0.7343301279256917, | |
| "learning_rate": 2.0423815946916783e-06, | |
| "loss": 0.2729, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.6295875898600074, | |
| "grad_norm": 0.6933712288775056, | |
| "learning_rate": 2.010463091309587e-06, | |
| "loss": 0.29, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.632614453272796, | |
| "grad_norm": 0.7128394414147942, | |
| "learning_rate": 1.9787680724495617e-06, | |
| "loss": 0.2789, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.6356413166855845, | |
| "grad_norm": 0.6806063171358394, | |
| "learning_rate": 1.947297424689414e-06, | |
| "loss": 0.2845, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.6386681800983731, | |
| "grad_norm": 0.7616271314644175, | |
| "learning_rate": 1.9160520283308115e-06, | |
| "loss": 0.3032, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.6416950435111617, | |
| "grad_norm": 0.7434964202112795, | |
| "learning_rate": 1.8850327573746584e-06, | |
| "loss": 0.2892, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.64472190692395, | |
| "grad_norm": 0.6899606630068712, | |
| "learning_rate": 1.854240479496643e-06, | |
| "loss": 0.2754, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.6477487703367384, | |
| "grad_norm": 0.7403122570498195, | |
| "learning_rate": 1.8236760560229715e-06, | |
| "loss": 0.3013, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.650775633749527, | |
| "grad_norm": 0.6980924495315326, | |
| "learning_rate": 1.7933403419062689e-06, | |
| "loss": 0.2954, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.6538024971623155, | |
| "grad_norm": 0.7506213925850654, | |
| "learning_rate": 1.7632341857016733e-06, | |
| "loss": 0.2967, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.656829360575104, | |
| "grad_norm": 0.7082781511509584, | |
| "learning_rate": 1.7333584295430894e-06, | |
| "loss": 0.282, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.6598562239878927, | |
| "grad_norm": 0.7245149393135567, | |
| "learning_rate": 1.7037139091196396e-06, | |
| "loss": 0.285, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.662883087400681, | |
| "grad_norm": 0.7430794268067457, | |
| "learning_rate": 1.6743014536522872e-06, | |
| "loss": 0.2975, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.6659099508134694, | |
| "grad_norm": 0.7352819171956158, | |
| "learning_rate": 1.6451218858706374e-06, | |
| "loss": 0.2836, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.668936814226258, | |
| "grad_norm": 0.6899265091515417, | |
| "learning_rate": 1.616176021989926e-06, | |
| "loss": 0.2731, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.6719636776390465, | |
| "grad_norm": 0.7306946981845605, | |
| "learning_rate": 1.587464671688187e-06, | |
| "loss": 0.2735, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.674990541051835, | |
| "grad_norm": 0.7537542357900331, | |
| "learning_rate": 1.558988638083616e-06, | |
| "loss": 0.2889, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.6780174044646237, | |
| "grad_norm": 0.769024927653227, | |
| "learning_rate": 1.5307487177120773e-06, | |
| "loss": 0.3034, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.681044267877412, | |
| "grad_norm": 0.7969651911826424, | |
| "learning_rate": 1.5027457005048573e-06, | |
| "loss": 0.2867, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.6840711312902006, | |
| "grad_norm": 0.7495097659385466, | |
| "learning_rate": 1.4749803697665366e-06, | |
| "loss": 0.2823, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.687097994702989, | |
| "grad_norm": 0.742092530785966, | |
| "learning_rate": 1.4474535021531099e-06, | |
| "loss": 0.2737, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.6901248581157775, | |
| "grad_norm": 0.7408088895223955, | |
| "learning_rate": 1.4201658676502294e-06, | |
| "loss": 0.2857, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.693151721528566, | |
| "grad_norm": 0.746135322244573, | |
| "learning_rate": 1.3931182295516965e-06, | |
| "loss": 0.2818, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.6961785849413547, | |
| "grad_norm": 0.7303274415674746, | |
| "learning_rate": 1.3663113444380905e-06, | |
| "loss": 0.2842, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.699205448354143, | |
| "grad_norm": 0.7421086064710165, | |
| "learning_rate": 1.339745962155613e-06, | |
| "loss": 0.2746, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.7022323117669316, | |
| "grad_norm": 0.738010247003656, | |
| "learning_rate": 1.3134228257951142e-06, | |
| "loss": 0.2816, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.70525917517972, | |
| "grad_norm": 0.653930048735564, | |
| "learning_rate": 1.2873426716713012e-06, | |
| "loss": 0.2731, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.7082860385925085, | |
| "grad_norm": 0.7291415886800213, | |
| "learning_rate": 1.2615062293021508e-06, | |
| "loss": 0.2833, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.711312902005297, | |
| "grad_norm": 0.7429149069294742, | |
| "learning_rate": 1.2359142213884933e-06, | |
| "loss": 0.2898, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.7143397654180856, | |
| "grad_norm": 0.7468563459361297, | |
| "learning_rate": 1.2105673637938054e-06, | |
| "loss": 0.282, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.717366628830874, | |
| "grad_norm": 0.748235390000226, | |
| "learning_rate": 1.1854663655241804e-06, | |
| "loss": 0.2868, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.7203934922436626, | |
| "grad_norm": 0.7399831789304407, | |
| "learning_rate": 1.1606119287084982e-06, | |
| "loss": 0.2995, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.723420355656451, | |
| "grad_norm": 0.70530159421794, | |
| "learning_rate": 1.136004748578785e-06, | |
| "loss": 0.2901, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.7264472190692395, | |
| "grad_norm": 0.7263729785065444, | |
| "learning_rate": 1.1116455134507665e-06, | |
| "loss": 0.2804, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.729474082482028, | |
| "grad_norm": 0.7400879965869811, | |
| "learning_rate": 1.0875349047046113e-06, | |
| "loss": 0.2832, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.7325009458948166, | |
| "grad_norm": 0.6939441719805616, | |
| "learning_rate": 1.0636735967658785e-06, | |
| "loss": 0.2777, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.735527809307605, | |
| "grad_norm": 0.7187457254469004, | |
| "learning_rate": 1.0400622570866426e-06, | |
| "loss": 0.2882, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.7385546727203935, | |
| "grad_norm": 0.7129927768099702, | |
| "learning_rate": 1.0167015461268303e-06, | |
| "loss": 0.2965, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.741581536133182, | |
| "grad_norm": 0.7377020049375957, | |
| "learning_rate": 9.935921173357444e-07, | |
| "loss": 0.2767, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.7446083995459705, | |
| "grad_norm": 0.7171897972465745, | |
| "learning_rate": 9.707346171337895e-07, | |
| "loss": 0.2843, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.747635262958759, | |
| "grad_norm": 0.7470525113713176, | |
| "learning_rate": 9.481296848943744e-07, | |
| "loss": 0.2792, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.7506621263715476, | |
| "grad_norm": 0.6835454968910495, | |
| "learning_rate": 9.257779529260558e-07, | |
| "loss": 0.2836, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.753688989784336, | |
| "grad_norm": 0.7331058996787356, | |
| "learning_rate": 9.036800464548157e-07, | |
| "loss": 0.2755, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.7567158531971245, | |
| "grad_norm": 0.7002489691833005, | |
| "learning_rate": 8.818365836066101e-07, | |
| "loss": 0.2877, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.7597427166099129, | |
| "grad_norm": 0.676411565349038, | |
| "learning_rate": 8.602481753900427e-07, | |
| "loss": 0.2689, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.7627695800227015, | |
| "grad_norm": 0.7212299819251898, | |
| "learning_rate": 8.389154256793042e-07, | |
| "loss": 0.2921, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.76579644343549, | |
| "grad_norm": 0.6871888810542574, | |
| "learning_rate": 8.178389311972612e-07, | |
| "loss": 0.2816, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.7688233068482786, | |
| "grad_norm": 0.7189354803567105, | |
| "learning_rate": 7.970192814987676e-07, | |
| "loss": 0.2876, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.771850170261067, | |
| "grad_norm": 0.6980741561133377, | |
| "learning_rate": 7.764570589541876e-07, | |
| "loss": 0.289, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.7748770336738555, | |
| "grad_norm": 0.741741361276942, | |
| "learning_rate": 7.561528387330797e-07, | |
| "loss": 0.2769, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.7779038970866439, | |
| "grad_norm": 0.7323169677735252, | |
| "learning_rate": 7.361071887881376e-07, | |
| "loss": 0.2805, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.7809307604994324, | |
| "grad_norm": 0.6892002504883342, | |
| "learning_rate": 7.163206698392744e-07, | |
| "loss": 0.2761, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.783957623912221, | |
| "grad_norm": 0.7653224993989634, | |
| "learning_rate": 6.96793835357964e-07, | |
| "loss": 0.2831, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.7869844873250096, | |
| "grad_norm": 0.699940441666469, | |
| "learning_rate": 6.775272315517423e-07, | |
| "loss": 0.2803, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.790011350737798, | |
| "grad_norm": 0.7612862245089655, | |
| "learning_rate": 6.585213973489335e-07, | |
| "loss": 0.2906, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.7930382141505865, | |
| "grad_norm": 0.802610549538578, | |
| "learning_rate": 6.397768643835755e-07, | |
| "loss": 0.2908, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.7960650775633749, | |
| "grad_norm": 0.7606915724821676, | |
| "learning_rate": 6.212941569805508e-07, | |
| "loss": 0.2972, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.7990919409761634, | |
| "grad_norm": 0.7580447676780981, | |
| "learning_rate": 6.030737921409169e-07, | |
| "loss": 0.306, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.802118804388952, | |
| "grad_norm": 0.7217683294734949, | |
| "learning_rate": 5.851162795274445e-07, | |
| "loss": 0.2851, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.8051456678017406, | |
| "grad_norm": 0.6646661253396376, | |
| "learning_rate": 5.674221214503639e-07, | |
| "loss": 0.2622, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.808172531214529, | |
| "grad_norm": 0.7527591491824924, | |
| "learning_rate": 5.499918128533155e-07, | |
| "loss": 0.2845, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.8111993946273175, | |
| "grad_norm": 0.7093218838484242, | |
| "learning_rate": 5.328258412994958e-07, | |
| "loss": 0.2813, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.8142262580401058, | |
| "grad_norm": 0.7583654632717032, | |
| "learning_rate": 5.159246869580348e-07, | |
| "loss": 0.2955, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.8172531214528944, | |
| "grad_norm": 0.7241449879881268, | |
| "learning_rate": 4.992888225905467e-07, | |
| "loss": 0.2809, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.820279984865683, | |
| "grad_norm": 0.7326374792225369, | |
| "learning_rate": 4.829187135379221e-07, | |
| "loss": 0.2794, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.8233068482784716, | |
| "grad_norm": 0.7011079746347699, | |
| "learning_rate": 4.6681481770729844e-07, | |
| "loss": 0.2851, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.82633371169126, | |
| "grad_norm": 0.7339175136770655, | |
| "learning_rate": 4.509775855592613e-07, | |
| "loss": 0.2932, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.8293605751040485, | |
| "grad_norm": 0.742366044242703, | |
| "learning_rate": 4.354074600952407e-07, | |
| "loss": 0.2817, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.8323874385168368, | |
| "grad_norm": 0.6960508964119638, | |
| "learning_rate": 4.2010487684511105e-07, | |
| "loss": 0.2749, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.8354143019296254, | |
| "grad_norm": 0.7177484864169353, | |
| "learning_rate": 4.0507026385502747e-07, | |
| "loss": 0.2669, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.838441165342414, | |
| "grad_norm": 0.698412561144052, | |
| "learning_rate": 3.9030404167542777e-07, | |
| "loss": 0.2891, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.8414680287552025, | |
| "grad_norm": 0.7134931269829115, | |
| "learning_rate": 3.7580662334929517e-07, | |
| "loss": 0.2796, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.844494892167991, | |
| "grad_norm": 0.655839335813593, | |
| "learning_rate": 3.615784144005796e-07, | |
| "loss": 0.2694, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.8475217555807795, | |
| "grad_norm": 0.8102919212572529, | |
| "learning_rate": 3.476198128228736e-07, | |
| "loss": 0.2998, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.8505486189935678, | |
| "grad_norm": 0.758533730707042, | |
| "learning_rate": 3.339312090682689e-07, | |
| "loss": 0.2919, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.8535754824063564, | |
| "grad_norm": 0.7257975043920856, | |
| "learning_rate": 3.2051298603643754e-07, | |
| "loss": 0.2782, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.856602345819145, | |
| "grad_norm": 0.7243900104948473, | |
| "learning_rate": 3.0736551906392354e-07, | |
| "loss": 0.2841, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.8596292092319335, | |
| "grad_norm": 0.7172858690647557, | |
| "learning_rate": 2.9448917591363923e-07, | |
| "loss": 0.2802, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.8626560726447219, | |
| "grad_norm": 0.6812343387762557, | |
| "learning_rate": 2.818843167645835e-07, | |
| "loss": 0.2799, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.8656829360575105, | |
| "grad_norm": 0.7377461951855622, | |
| "learning_rate": 2.6955129420176193e-07, | |
| "loss": 0.284, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.8687097994702988, | |
| "grad_norm": 0.6826023677217762, | |
| "learning_rate": 2.5749045320632824e-07, | |
| "loss": 0.268, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.8717366628830874, | |
| "grad_norm": 0.7177902190604383, | |
| "learning_rate": 2.4570213114592957e-07, | |
| "loss": 0.2919, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.874763526295876, | |
| "grad_norm": 0.7630130041934038, | |
| "learning_rate": 2.3418665776527738e-07, | |
| "loss": 0.2872, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.8777903897086645, | |
| "grad_norm": 0.6598327695164433, | |
| "learning_rate": 2.2294435517691504e-07, | |
| "loss": 0.2702, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.8808172531214529, | |
| "grad_norm": 0.6955688252731471, | |
| "learning_rate": 2.119755378522137e-07, | |
| "loss": 0.2698, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.8838441165342414, | |
| "grad_norm": 0.6719447189782813, | |
| "learning_rate": 2.0128051261257165e-07, | |
| "loss": 0.2821, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.8868709799470298, | |
| "grad_norm": 0.6896399440887028, | |
| "learning_rate": 1.908595786208367e-07, | |
| "loss": 0.289, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.8898978433598184, | |
| "grad_norm": 0.7462970074930224, | |
| "learning_rate": 1.8071302737293294e-07, | |
| "loss": 0.2879, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.892924706772607, | |
| "grad_norm": 0.7185170588257133, | |
| "learning_rate": 1.7084114268971275e-07, | |
| "loss": 0.2711, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.8959515701853955, | |
| "grad_norm": 0.6975099789039778, | |
| "learning_rate": 1.612442007090076e-07, | |
| "loss": 0.2738, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.8989784335981839, | |
| "grad_norm": 0.7383859590727406, | |
| "learning_rate": 1.519224698779198e-07, | |
| "loss": 0.279, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 1.9020052970109724, | |
| "grad_norm": 0.7351693534254338, | |
| "learning_rate": 1.4287621094529524e-07, | |
| "loss": 0.3017, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.9050321604237608, | |
| "grad_norm": 0.7343494928664471, | |
| "learning_rate": 1.3410567695444576e-07, | |
| "loss": 0.3005, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 1.9080590238365494, | |
| "grad_norm": 0.6962622472427729, | |
| "learning_rate": 1.2561111323605714e-07, | |
| "loss": 0.2843, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.911085887249338, | |
| "grad_norm": 0.6879715684560946, | |
| "learning_rate": 1.1739275740134004e-07, | |
| "loss": 0.2798, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 1.9141127506621265, | |
| "grad_norm": 0.7211784347702246, | |
| "learning_rate": 1.0945083933537104e-07, | |
| "loss": 0.2871, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.9171396140749148, | |
| "grad_norm": 0.6920713687831043, | |
| "learning_rate": 1.0178558119067316e-07, | |
| "loss": 0.2685, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 1.9201664774877034, | |
| "grad_norm": 0.7113552568049702, | |
| "learning_rate": 9.439719738099318e-08, | |
| "loss": 0.273, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.9231933409004918, | |
| "grad_norm": 0.7230670643939646, | |
| "learning_rate": 8.728589457530857e-08, | |
| "loss": 0.281, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.9262202043132803, | |
| "grad_norm": 0.67956434417701, | |
| "learning_rate": 8.04518716920466e-08, | |
| "loss": 0.2726, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.929247067726069, | |
| "grad_norm": 0.686099801645611, | |
| "learning_rate": 7.389531989351773e-08, | |
| "loss": 0.2735, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 1.9322739311388575, | |
| "grad_norm": 0.6966486340782885, | |
| "learning_rate": 6.761642258056977e-08, | |
| "loss": 0.2764, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.9353007945516458, | |
| "grad_norm": 0.7164865927215468, | |
| "learning_rate": 6.161535538745877e-08, | |
| "loss": 0.2923, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 1.9383276579644344, | |
| "grad_norm": 0.7439418958100256, | |
| "learning_rate": 5.5892286176932875e-08, | |
| "loss": 0.2813, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.9413545213772228, | |
| "grad_norm": 0.7288911553256673, | |
| "learning_rate": 5.044737503554165e-08, | |
| "loss": 0.2937, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 1.9443813847900113, | |
| "grad_norm": 0.6994801424222791, | |
| "learning_rate": 4.528077426915412e-08, | |
| "loss": 0.2912, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.9474082482028, | |
| "grad_norm": 0.7341432707743182, | |
| "learning_rate": 4.0392628398699954e-08, | |
| "loss": 0.2832, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 1.9504351116155885, | |
| "grad_norm": 0.6979419114448084, | |
| "learning_rate": 3.578307415612714e-08, | |
| "loss": 0.2701, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.9534619750283768, | |
| "grad_norm": 0.750803155187425, | |
| "learning_rate": 3.1452240480577265e-08, | |
| "loss": 0.2926, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.9564888384411654, | |
| "grad_norm": 0.6838855971162886, | |
| "learning_rate": 2.7400248514776184e-08, | |
| "loss": 0.2823, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.9595157018539537, | |
| "grad_norm": 0.7356635176578719, | |
| "learning_rate": 2.3627211601651157e-08, | |
| "loss": 0.2894, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 1.9625425652667423, | |
| "grad_norm": 0.7016083406103137, | |
| "learning_rate": 2.013323528115674e-08, | |
| "loss": 0.2764, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.9655694286795309, | |
| "grad_norm": 0.6923939069789969, | |
| "learning_rate": 1.6918417287318245e-08, | |
| "loss": 0.273, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.9685962920923195, | |
| "grad_norm": 0.754724684231685, | |
| "learning_rate": 1.3982847545507271e-08, | |
| "loss": 0.2842, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.9716231555051078, | |
| "grad_norm": 0.7434440805750054, | |
| "learning_rate": 1.1326608169920373e-08, | |
| "loss": 0.2802, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.9746500189178964, | |
| "grad_norm": 0.7231237117817423, | |
| "learning_rate": 8.949773461282008e-09, | |
| "loss": 0.288, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.9776768823306847, | |
| "grad_norm": 0.6985415731193907, | |
| "learning_rate": 6.8524099047695415e-09, | |
| "loss": 0.2867, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.9807037457434733, | |
| "grad_norm": 0.7055975612573275, | |
| "learning_rate": 5.034576168149175e-09, | |
| "loss": 0.2727, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.9837306091562619, | |
| "grad_norm": 0.6909272774853085, | |
| "learning_rate": 3.4963231001383657e-09, | |
| "loss": 0.2769, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.9867574725690504, | |
| "grad_norm": 0.7060592973015987, | |
| "learning_rate": 2.237693728981416e-09, | |
| "loss": 0.2773, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.9897843359818388, | |
| "grad_norm": 0.7319430087975928, | |
| "learning_rate": 1.2587232612493172e-09, | |
| "loss": 0.3011, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.9928111993946274, | |
| "grad_norm": 0.7167491228344711, | |
| "learning_rate": 5.594390808494332e-10, | |
| "loss": 0.273, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.9958380628074157, | |
| "grad_norm": 0.7035027842122591, | |
| "learning_rate": 1.3986074826388697e-10, | |
| "loss": 0.2865, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.9988649262202043, | |
| "grad_norm": 0.7201409937608538, | |
| "learning_rate": 0.0, | |
| "loss": 0.273, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.9988649262202043, | |
| "step": 660, | |
| "total_flos": 64959092490240.0, | |
| "train_loss": 0.14502406124815798, | |
| "train_runtime": 1228.9078, | |
| "train_samples_per_second": 68.799, | |
| "train_steps_per_second": 0.537 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 660, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 64959092490240.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |