| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9977298524404086, | |
| "eval_steps": 500, | |
| "global_step": 660, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.003026863412788498, | |
| "grad_norm": 3.2445129100487082, | |
| "learning_rate": 3.0303030303030305e-07, | |
| "loss": 0.6711, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.006053726825576996, | |
| "grad_norm": 3.5335666152674943, | |
| "learning_rate": 6.060606060606061e-07, | |
| "loss": 0.7432, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.009080590238365494, | |
| "grad_norm": 3.092373820607628, | |
| "learning_rate": 9.090909090909091e-07, | |
| "loss": 0.6732, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.012107453651153992, | |
| "grad_norm": 3.094844032193639, | |
| "learning_rate": 1.2121212121212122e-06, | |
| "loss": 0.7014, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01513431706394249, | |
| "grad_norm": 3.4115191876182527, | |
| "learning_rate": 1.5151515151515152e-06, | |
| "loss": 0.6961, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.018161180476730987, | |
| "grad_norm": 3.072690992638549, | |
| "learning_rate": 1.8181818181818183e-06, | |
| "loss": 0.6827, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.021188043889519486, | |
| "grad_norm": 2.860518471953025, | |
| "learning_rate": 2.1212121212121216e-06, | |
| "loss": 0.6606, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.024214907302307985, | |
| "grad_norm": 2.4324658565927293, | |
| "learning_rate": 2.4242424242424244e-06, | |
| "loss": 0.6336, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.02724177071509648, | |
| "grad_norm": 1.8804279234560397, | |
| "learning_rate": 2.7272727272727272e-06, | |
| "loss": 0.6493, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.03026863412788498, | |
| "grad_norm": 1.592877158659705, | |
| "learning_rate": 3.0303030303030305e-06, | |
| "loss": 0.6191, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03329549754067348, | |
| "grad_norm": 1.564927107732014, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 0.5899, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.036322360953461974, | |
| "grad_norm": 1.7171119534875026, | |
| "learning_rate": 3.6363636363636366e-06, | |
| "loss": 0.5677, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.03934922436625047, | |
| "grad_norm": 2.6341688392176477, | |
| "learning_rate": 3.93939393939394e-06, | |
| "loss": 0.5974, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.04237608777903897, | |
| "grad_norm": 2.2530347119007184, | |
| "learning_rate": 4.242424242424243e-06, | |
| "loss": 0.5729, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.04540295119182747, | |
| "grad_norm": 1.6857532767524732, | |
| "learning_rate": 4.5454545454545455e-06, | |
| "loss": 0.55, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.04842981460461597, | |
| "grad_norm": 1.51981530198523, | |
| "learning_rate": 4.848484848484849e-06, | |
| "loss": 0.5597, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.051456678017404466, | |
| "grad_norm": 1.509048633791455, | |
| "learning_rate": 5.151515151515152e-06, | |
| "loss": 0.5725, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.05448354143019296, | |
| "grad_norm": 1.4753443619493487, | |
| "learning_rate": 5.4545454545454545e-06, | |
| "loss": 0.5445, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.057510404842981463, | |
| "grad_norm": 1.3476278926685936, | |
| "learning_rate": 5.7575757575757586e-06, | |
| "loss": 0.5433, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.06053726825576996, | |
| "grad_norm": 1.1764510325447552, | |
| "learning_rate": 6.060606060606061e-06, | |
| "loss": 0.566, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.06356413166855845, | |
| "grad_norm": 1.0557249742490764, | |
| "learning_rate": 6.363636363636364e-06, | |
| "loss": 0.5302, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.06659099508134696, | |
| "grad_norm": 1.2019113587353751, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.5396, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.06961785849413545, | |
| "grad_norm": 1.091157484250839, | |
| "learning_rate": 6.969696969696971e-06, | |
| "loss": 0.4999, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.07264472190692395, | |
| "grad_norm": 0.9613726660768409, | |
| "learning_rate": 7.272727272727273e-06, | |
| "loss": 0.5139, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.07567158531971245, | |
| "grad_norm": 1.0095502687317122, | |
| "learning_rate": 7.5757575757575764e-06, | |
| "loss": 0.4973, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.07869844873250094, | |
| "grad_norm": 0.9125554159795735, | |
| "learning_rate": 7.87878787878788e-06, | |
| "loss": 0.5028, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.08172531214528944, | |
| "grad_norm": 0.988920035496659, | |
| "learning_rate": 8.181818181818183e-06, | |
| "loss": 0.4818, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.08475217555807794, | |
| "grad_norm": 0.9670632819551506, | |
| "learning_rate": 8.484848484848486e-06, | |
| "loss": 0.5073, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.08777903897086645, | |
| "grad_norm": 0.8924722926760835, | |
| "learning_rate": 8.787878787878788e-06, | |
| "loss": 0.4783, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.09080590238365494, | |
| "grad_norm": 1.0464942369787262, | |
| "learning_rate": 9.090909090909091e-06, | |
| "loss": 0.5139, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.09383276579644344, | |
| "grad_norm": 0.899009938189217, | |
| "learning_rate": 9.393939393939396e-06, | |
| "loss": 0.4871, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.09685962920923194, | |
| "grad_norm": 0.9355323632271628, | |
| "learning_rate": 9.696969696969698e-06, | |
| "loss": 0.4829, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.09988649262202043, | |
| "grad_norm": 0.8667180999417167, | |
| "learning_rate": 1e-05, | |
| "loss": 0.4694, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.10291335603480893, | |
| "grad_norm": 0.9646276471726137, | |
| "learning_rate": 1.0303030303030304e-05, | |
| "loss": 0.4956, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.10594021944759743, | |
| "grad_norm": 0.894710782226242, | |
| "learning_rate": 1.0606060606060606e-05, | |
| "loss": 0.4815, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.10896708286038592, | |
| "grad_norm": 0.9144901998449186, | |
| "learning_rate": 1.0909090909090909e-05, | |
| "loss": 0.5131, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.11199394627317442, | |
| "grad_norm": 0.9980140591786194, | |
| "learning_rate": 1.1212121212121212e-05, | |
| "loss": 0.507, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.11502080968596293, | |
| "grad_norm": 0.8537135855690867, | |
| "learning_rate": 1.1515151515151517e-05, | |
| "loss": 0.4722, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.11804767309875142, | |
| "grad_norm": 0.8468653854745745, | |
| "learning_rate": 1.181818181818182e-05, | |
| "loss": 0.5081, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.12107453651153992, | |
| "grad_norm": 0.8646512841767032, | |
| "learning_rate": 1.2121212121212122e-05, | |
| "loss": 0.4666, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.12410139992432842, | |
| "grad_norm": 0.9327466910002606, | |
| "learning_rate": 1.2424242424242425e-05, | |
| "loss": 0.4639, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.1271282633371169, | |
| "grad_norm": 0.925137481295361, | |
| "learning_rate": 1.2727272727272728e-05, | |
| "loss": 0.4597, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.1301551267499054, | |
| "grad_norm": 0.8850637498590468, | |
| "learning_rate": 1.3030303030303032e-05, | |
| "loss": 0.4911, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.13318199016269391, | |
| "grad_norm": 0.9155011433413445, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.4892, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.1362088535754824, | |
| "grad_norm": 0.8638104778338866, | |
| "learning_rate": 1.3636363636363637e-05, | |
| "loss": 0.4545, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.1392357169882709, | |
| "grad_norm": 0.9075107686348386, | |
| "learning_rate": 1.3939393939393942e-05, | |
| "loss": 0.4851, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.1422625804010594, | |
| "grad_norm": 0.960685521555281, | |
| "learning_rate": 1.4242424242424245e-05, | |
| "loss": 0.4643, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.1452894438138479, | |
| "grad_norm": 0.9707385662399042, | |
| "learning_rate": 1.4545454545454546e-05, | |
| "loss": 0.4773, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.14831630722663638, | |
| "grad_norm": 0.9926311987676993, | |
| "learning_rate": 1.484848484848485e-05, | |
| "loss": 0.4866, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.1513431706394249, | |
| "grad_norm": 1.0779151883172813, | |
| "learning_rate": 1.5151515151515153e-05, | |
| "loss": 0.4946, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.1543700340522134, | |
| "grad_norm": 0.9930943210936318, | |
| "learning_rate": 1.5454545454545454e-05, | |
| "loss": 0.4917, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.15739689746500188, | |
| "grad_norm": 0.9649781985203033, | |
| "learning_rate": 1.575757575757576e-05, | |
| "loss": 0.4625, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.1604237608777904, | |
| "grad_norm": 1.0979714118040722, | |
| "learning_rate": 1.606060606060606e-05, | |
| "loss": 0.4943, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.16345062429057888, | |
| "grad_norm": 0.9352479565324687, | |
| "learning_rate": 1.6363636363636366e-05, | |
| "loss": 0.4744, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.16647748770336737, | |
| "grad_norm": 0.8399589894060994, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.4465, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.1695043511161559, | |
| "grad_norm": 0.8733760142688252, | |
| "learning_rate": 1.6969696969696972e-05, | |
| "loss": 0.5041, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.17253121452894438, | |
| "grad_norm": 1.029513199786962, | |
| "learning_rate": 1.7272727272727274e-05, | |
| "loss": 0.506, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.1755580779417329, | |
| "grad_norm": 0.9120620033143617, | |
| "learning_rate": 1.7575757575757576e-05, | |
| "loss": 0.5042, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.17858494135452138, | |
| "grad_norm": 0.978353958073019, | |
| "learning_rate": 1.787878787878788e-05, | |
| "loss": 0.4951, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.18161180476730987, | |
| "grad_norm": 1.0592658860036697, | |
| "learning_rate": 1.8181818181818182e-05, | |
| "loss": 0.491, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.1846386681800984, | |
| "grad_norm": 1.037197308373934, | |
| "learning_rate": 1.8484848484848487e-05, | |
| "loss": 0.4817, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.18766553159288688, | |
| "grad_norm": 1.1230553305690367, | |
| "learning_rate": 1.8787878787878792e-05, | |
| "loss": 0.4938, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.19069239500567536, | |
| "grad_norm": 0.9055731792554766, | |
| "learning_rate": 1.9090909090909094e-05, | |
| "loss": 0.4712, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.19371925841846388, | |
| "grad_norm": 1.0669310273644705, | |
| "learning_rate": 1.9393939393939395e-05, | |
| "loss": 0.481, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.19674612183125237, | |
| "grad_norm": 0.8951824930846698, | |
| "learning_rate": 1.96969696969697e-05, | |
| "loss": 0.4626, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.19977298524404086, | |
| "grad_norm": 0.8700267036148249, | |
| "learning_rate": 2e-05, | |
| "loss": 0.4563, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.20279984865682937, | |
| "grad_norm": 0.9541105398406964, | |
| "learning_rate": 1.9999860139251737e-05, | |
| "loss": 0.4824, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.20582671206961786, | |
| "grad_norm": 0.8584759999204346, | |
| "learning_rate": 1.9999440560919153e-05, | |
| "loss": 0.467, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.20885357548240635, | |
| "grad_norm": 0.9741080463779722, | |
| "learning_rate": 1.9998741276738753e-05, | |
| "loss": 0.4811, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.21188043889519487, | |
| "grad_norm": 0.9021808835478438, | |
| "learning_rate": 1.999776230627102e-05, | |
| "loss": 0.4748, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.21490730230798336, | |
| "grad_norm": 0.9305503804577565, | |
| "learning_rate": 1.9996503676899863e-05, | |
| "loss": 0.4844, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.21793416572077184, | |
| "grad_norm": 0.994363580189972, | |
| "learning_rate": 1.9994965423831853e-05, | |
| "loss": 0.48, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.22096102913356036, | |
| "grad_norm": 0.8870850580834687, | |
| "learning_rate": 1.9993147590095232e-05, | |
| "loss": 0.4797, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.22398789254634885, | |
| "grad_norm": 0.8905020698570876, | |
| "learning_rate": 1.999105022653872e-05, | |
| "loss": 0.47, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.22701475595913734, | |
| "grad_norm": 0.9158534013606188, | |
| "learning_rate": 1.9988673391830082e-05, | |
| "loss": 0.4893, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.23004161937192585, | |
| "grad_norm": 0.8750476523807099, | |
| "learning_rate": 1.9986017152454497e-05, | |
| "loss": 0.4787, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.23306848278471434, | |
| "grad_norm": 0.937870383089102, | |
| "learning_rate": 1.9983081582712684e-05, | |
| "loss": 0.4638, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.23609534619750283, | |
| "grad_norm": 0.8507339840877179, | |
| "learning_rate": 1.9979866764718846e-05, | |
| "loss": 0.4459, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.23912220961029135, | |
| "grad_norm": 0.9739601029755169, | |
| "learning_rate": 1.997637278839835e-05, | |
| "loss": 0.4722, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.24214907302307984, | |
| "grad_norm": 0.8229530979222659, | |
| "learning_rate": 1.9972599751485225e-05, | |
| "loss": 0.4473, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.24517593643586832, | |
| "grad_norm": 1.0167556493449679, | |
| "learning_rate": 1.9968547759519426e-05, | |
| "loss": 0.4688, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.24820279984865684, | |
| "grad_norm": 0.9565038121513104, | |
| "learning_rate": 1.9964216925843876e-05, | |
| "loss": 0.4832, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.2512296632614453, | |
| "grad_norm": 0.8323632438514111, | |
| "learning_rate": 1.9959607371601303e-05, | |
| "loss": 0.4444, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.2542565266742338, | |
| "grad_norm": 1.013176239563786, | |
| "learning_rate": 1.9954719225730847e-05, | |
| "loss": 0.4668, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.25728339008702233, | |
| "grad_norm": 0.8740547627329837, | |
| "learning_rate": 1.994955262496446e-05, | |
| "loss": 0.4913, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.2603102534998108, | |
| "grad_norm": 0.895480207542129, | |
| "learning_rate": 1.9944107713823068e-05, | |
| "loss": 0.4713, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.2633371169125993, | |
| "grad_norm": 0.8801376054145112, | |
| "learning_rate": 1.9938384644612542e-05, | |
| "loss": 0.4688, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.26636398032538783, | |
| "grad_norm": 0.9743625141179318, | |
| "learning_rate": 1.9932383577419432e-05, | |
| "loss": 0.4717, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.2693908437381763, | |
| "grad_norm": 1.1371030984694044, | |
| "learning_rate": 1.9926104680106484e-05, | |
| "loss": 0.4795, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.2724177071509648, | |
| "grad_norm": 0.840713640167593, | |
| "learning_rate": 1.9919548128307954e-05, | |
| "loss": 0.4757, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.2754445705637533, | |
| "grad_norm": 0.9551337851479497, | |
| "learning_rate": 1.9912714105424694e-05, | |
| "loss": 0.4585, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.2784714339765418, | |
| "grad_norm": 1.0185211027080534, | |
| "learning_rate": 1.990560280261901e-05, | |
| "loss": 0.4885, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.2814982973893303, | |
| "grad_norm": 1.027049016721632, | |
| "learning_rate": 1.989821441880933e-05, | |
| "loss": 0.4338, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.2845251608021188, | |
| "grad_norm": 0.9580916770351323, | |
| "learning_rate": 1.9890549160664633e-05, | |
| "loss": 0.4666, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.2875520242149073, | |
| "grad_norm": 1.1222282915549917, | |
| "learning_rate": 1.9882607242598663e-05, | |
| "loss": 0.4668, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.2905788876276958, | |
| "grad_norm": 0.9191834329973493, | |
| "learning_rate": 1.9874388886763944e-05, | |
| "loss": 0.4499, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.2936057510404843, | |
| "grad_norm": 0.8698226745554651, | |
| "learning_rate": 1.9865894323045558e-05, | |
| "loss": 0.4835, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.29663261445327277, | |
| "grad_norm": 1.3598009583025596, | |
| "learning_rate": 1.9857123789054707e-05, | |
| "loss": 0.4792, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.2996594778660613, | |
| "grad_norm": 0.8506943195836284, | |
| "learning_rate": 1.9848077530122083e-05, | |
| "loss": 0.4648, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.3026863412788498, | |
| "grad_norm": 1.227984015211783, | |
| "learning_rate": 1.9838755799290993e-05, | |
| "loss": 0.4634, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.30571320469163826, | |
| "grad_norm": 1.0092726577715512, | |
| "learning_rate": 1.9829158857310288e-05, | |
| "loss": 0.4792, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.3087400681044268, | |
| "grad_norm": 0.9604765788895034, | |
| "learning_rate": 1.9819286972627066e-05, | |
| "loss": 0.4894, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.3117669315172153, | |
| "grad_norm": 1.2462461474877684, | |
| "learning_rate": 1.9809140421379168e-05, | |
| "loss": 0.4841, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.31479379493000376, | |
| "grad_norm": 1.0352197859224184, | |
| "learning_rate": 1.979871948738743e-05, | |
| "loss": 0.4563, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.3178206583427923, | |
| "grad_norm": 0.9167812336916815, | |
| "learning_rate": 1.978802446214779e-05, | |
| "loss": 0.4596, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.3208475217555808, | |
| "grad_norm": 0.8539429836107758, | |
| "learning_rate": 1.9777055644823087e-05, | |
| "loss": 0.4591, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.32387438516836925, | |
| "grad_norm": 1.01179694338569, | |
| "learning_rate": 1.9765813342234726e-05, | |
| "loss": 0.4819, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.32690124858115777, | |
| "grad_norm": 0.8208952592634129, | |
| "learning_rate": 1.9754297868854075e-05, | |
| "loss": 0.4637, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.3299281119939463, | |
| "grad_norm": 0.98351372794355, | |
| "learning_rate": 1.9742509546793673e-05, | |
| "loss": 0.4478, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.33295497540673474, | |
| "grad_norm": 0.7751901728196992, | |
| "learning_rate": 1.973044870579824e-05, | |
| "loss": 0.4663, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.33598183881952326, | |
| "grad_norm": 0.8472625629933997, | |
| "learning_rate": 1.9718115683235418e-05, | |
| "loss": 0.4508, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.3390087022323118, | |
| "grad_norm": 0.7829836426864378, | |
| "learning_rate": 1.970551082408636e-05, | |
| "loss": 0.4498, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.3420355656451003, | |
| "grad_norm": 0.8141280950541406, | |
| "learning_rate": 1.969263448093608e-05, | |
| "loss": 0.4464, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.34506242905788875, | |
| "grad_norm": 0.8776392964966433, | |
| "learning_rate": 1.9679487013963566e-05, | |
| "loss": 0.4623, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.34808929247067727, | |
| "grad_norm": 0.8526833246856463, | |
| "learning_rate": 1.9666068790931733e-05, | |
| "loss": 0.4748, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.3511161558834658, | |
| "grad_norm": 0.8828178190838567, | |
| "learning_rate": 1.9652380187177128e-05, | |
| "loss": 0.4635, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.35414301929625425, | |
| "grad_norm": 0.9025241258658163, | |
| "learning_rate": 1.9638421585599422e-05, | |
| "loss": 0.4769, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.35716988270904276, | |
| "grad_norm": 0.8501338237756111, | |
| "learning_rate": 1.9624193376650708e-05, | |
| "loss": 0.4557, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.3601967461218313, | |
| "grad_norm": 0.9500429777041476, | |
| "learning_rate": 1.960969595832457e-05, | |
| "loss": 0.4591, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.36322360953461974, | |
| "grad_norm": 0.8364296439579625, | |
| "learning_rate": 1.9594929736144978e-05, | |
| "loss": 0.4729, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.36625047294740826, | |
| "grad_norm": 0.823521629755321, | |
| "learning_rate": 1.957989512315489e-05, | |
| "loss": 0.4476, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.3692773363601968, | |
| "grad_norm": 0.8949188369225515, | |
| "learning_rate": 1.956459253990476e-05, | |
| "loss": 0.4622, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.37230419977298523, | |
| "grad_norm": 0.8156250418957853, | |
| "learning_rate": 1.9549022414440738e-05, | |
| "loss": 0.469, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.37533106318577375, | |
| "grad_norm": 0.9989690435523535, | |
| "learning_rate": 1.9533185182292705e-05, | |
| "loss": 0.4666, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.37835792659856227, | |
| "grad_norm": 0.8376945378854798, | |
| "learning_rate": 1.9517081286462082e-05, | |
| "loss": 0.4717, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.3813847900113507, | |
| "grad_norm": 0.9369698932376678, | |
| "learning_rate": 1.9500711177409456e-05, | |
| "loss": 0.4711, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.38441165342413924, | |
| "grad_norm": 0.9056128516644173, | |
| "learning_rate": 1.9484075313041968e-05, | |
| "loss": 0.4601, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.38743851683692776, | |
| "grad_norm": 0.8207081182094544, | |
| "learning_rate": 1.9467174158700507e-05, | |
| "loss": 0.4602, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.3904653802497162, | |
| "grad_norm": 0.9846163287075285, | |
| "learning_rate": 1.9450008187146685e-05, | |
| "loss": 0.4412, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.39349224366250474, | |
| "grad_norm": 0.8771774346973058, | |
| "learning_rate": 1.9432577878549635e-05, | |
| "loss": 0.4703, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.39651910707529325, | |
| "grad_norm": 0.8670421805140147, | |
| "learning_rate": 1.9414883720472557e-05, | |
| "loss": 0.4649, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.3995459704880817, | |
| "grad_norm": 0.8582373912287916, | |
| "learning_rate": 1.9396926207859085e-05, | |
| "loss": 0.4752, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.40257283390087023, | |
| "grad_norm": 0.97590006260324, | |
| "learning_rate": 1.937870584301945e-05, | |
| "loss": 0.473, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.40559969731365875, | |
| "grad_norm": 0.8210848567366091, | |
| "learning_rate": 1.9360223135616423e-05, | |
| "loss": 0.467, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.4086265607264472, | |
| "grad_norm": 1.0416938932849331, | |
| "learning_rate": 1.9341478602651068e-05, | |
| "loss": 0.4733, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.4116534241392357, | |
| "grad_norm": 0.7321649496422883, | |
| "learning_rate": 1.932247276844826e-05, | |
| "loss": 0.4308, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.41468028755202424, | |
| "grad_norm": 0.9088724515580408, | |
| "learning_rate": 1.9303206164642037e-05, | |
| "loss": 0.463, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.4177071509648127, | |
| "grad_norm": 0.8014920281084885, | |
| "learning_rate": 1.9283679330160726e-05, | |
| "loss": 0.4597, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.4207340143776012, | |
| "grad_norm": 0.9147880071075603, | |
| "learning_rate": 1.9263892811211865e-05, | |
| "loss": 0.4574, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.42376087779038973, | |
| "grad_norm": 0.7668955269191813, | |
| "learning_rate": 1.9243847161266924e-05, | |
| "loss": 0.4517, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.4267877412031782, | |
| "grad_norm": 0.9054622580780235, | |
| "learning_rate": 1.9223542941045817e-05, | |
| "loss": 0.4637, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.4298146046159667, | |
| "grad_norm": 0.8529521919275123, | |
| "learning_rate": 1.920298071850123e-05, | |
| "loss": 0.4509, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.4328414680287552, | |
| "grad_norm": 0.8313332472341063, | |
| "learning_rate": 1.9182161068802742e-05, | |
| "loss": 0.4223, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.4358683314415437, | |
| "grad_norm": 0.8678013973083016, | |
| "learning_rate": 1.9161084574320696e-05, | |
| "loss": 0.4552, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.4388951948543322, | |
| "grad_norm": 0.8306481729754955, | |
| "learning_rate": 1.913975182460996e-05, | |
| "loss": 0.4528, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.4419220582671207, | |
| "grad_norm": 0.7905119162082933, | |
| "learning_rate": 1.9118163416393392e-05, | |
| "loss": 0.456, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.4449489216799092, | |
| "grad_norm": 0.8533900197119348, | |
| "learning_rate": 1.9096319953545186e-05, | |
| "loss": 0.4519, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.4479757850926977, | |
| "grad_norm": 0.808323929953835, | |
| "learning_rate": 1.9074222047073945e-05, | |
| "loss": 0.4744, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.4510026485054862, | |
| "grad_norm": 0.9703068755267475, | |
| "learning_rate": 1.9051870315105626e-05, | |
| "loss": 0.4728, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.4540295119182747, | |
| "grad_norm": 0.8511396826229641, | |
| "learning_rate": 1.9029265382866216e-05, | |
| "loss": 0.4642, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.4570563753310632, | |
| "grad_norm": 0.9028572337726332, | |
| "learning_rate": 1.9006407882664256e-05, | |
| "loss": 0.4347, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.4600832387438517, | |
| "grad_norm": 0.9111011901342507, | |
| "learning_rate": 1.8983298453873172e-05, | |
| "loss": 0.4521, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.46311010215664017, | |
| "grad_norm": 0.9828422465088805, | |
| "learning_rate": 1.895993774291336e-05, | |
| "loss": 0.4632, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.4661369655694287, | |
| "grad_norm": 0.7252721012263067, | |
| "learning_rate": 1.8936326403234125e-05, | |
| "loss": 0.4351, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.4691638289822172, | |
| "grad_norm": 0.9439245973180166, | |
| "learning_rate": 1.891246509529539e-05, | |
| "loss": 0.4621, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.47219069239500566, | |
| "grad_norm": 0.8011462306286947, | |
| "learning_rate": 1.8888354486549238e-05, | |
| "loss": 0.4634, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.4752175558077942, | |
| "grad_norm": 0.8441591333711804, | |
| "learning_rate": 1.886399525142122e-05, | |
| "loss": 0.4495, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.4782444192205827, | |
| "grad_norm": 0.8218002513163581, | |
| "learning_rate": 1.8839388071291506e-05, | |
| "loss": 0.4482, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.48127128263337116, | |
| "grad_norm": 0.7758566060258302, | |
| "learning_rate": 1.881453363447582e-05, | |
| "loss": 0.4504, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.48429814604615967, | |
| "grad_norm": 0.7810909589269536, | |
| "learning_rate": 1.8789432636206197e-05, | |
| "loss": 0.4281, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.4873250094589482, | |
| "grad_norm": 0.8639112995064352, | |
| "learning_rate": 1.8764085778611507e-05, | |
| "loss": 0.4612, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.49035187287173665, | |
| "grad_norm": 0.7873206618367643, | |
| "learning_rate": 1.873849377069785e-05, | |
| "loss": 0.4367, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.49337873628452517, | |
| "grad_norm": 0.7954108515704802, | |
| "learning_rate": 1.87126573283287e-05, | |
| "loss": 0.4469, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.4964055996973137, | |
| "grad_norm": 0.7909641050699863, | |
| "learning_rate": 1.8686577174204887e-05, | |
| "loss": 0.4656, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.49943246311010214, | |
| "grad_norm": 0.7559529381563198, | |
| "learning_rate": 1.866025403784439e-05, | |
| "loss": 0.4597, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.5024593265228906, | |
| "grad_norm": 0.7977246999908352, | |
| "learning_rate": 1.863368865556191e-05, | |
| "loss": 0.4608, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.5054861899356792, | |
| "grad_norm": 0.7747875059538782, | |
| "learning_rate": 1.8606881770448305e-05, | |
| "loss": 0.4425, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.5085130533484676, | |
| "grad_norm": 0.8186671479985753, | |
| "learning_rate": 1.8579834132349773e-05, | |
| "loss": 0.4501, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.5115399167612561, | |
| "grad_norm": 0.7913498246423268, | |
| "learning_rate": 1.8552546497846893e-05, | |
| "loss": 0.4385, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.5145667801740447, | |
| "grad_norm": 0.7922245207341021, | |
| "learning_rate": 1.8525019630233463e-05, | |
| "loss": 0.4715, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5175936435868331, | |
| "grad_norm": 0.8173339816814198, | |
| "learning_rate": 1.8497254299495147e-05, | |
| "loss": 0.4322, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.5206205069996216, | |
| "grad_norm": 0.7846047182772484, | |
| "learning_rate": 1.8469251282287925e-05, | |
| "loss": 0.4532, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.5236473704124102, | |
| "grad_norm": 0.8082443855275164, | |
| "learning_rate": 1.8441011361916387e-05, | |
| "loss": 0.4427, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.5266742338251986, | |
| "grad_norm": 0.8686412708736564, | |
| "learning_rate": 1.8412535328311813e-05, | |
| "loss": 0.4377, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.5297010972379871, | |
| "grad_norm": 0.7656408070360102, | |
| "learning_rate": 1.8383823978010077e-05, | |
| "loss": 0.4632, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.5327279606507757, | |
| "grad_norm": 0.7754106766463631, | |
| "learning_rate": 1.8354878114129368e-05, | |
| "loss": 0.4221, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.5357548240635641, | |
| "grad_norm": 0.7594400391558458, | |
| "learning_rate": 1.8325698546347714e-05, | |
| "loss": 0.4492, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.5387816874763526, | |
| "grad_norm": 0.8543676943589079, | |
| "learning_rate": 1.8296286090880362e-05, | |
| "loss": 0.4455, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.5418085508891411, | |
| "grad_norm": 0.7187011555633529, | |
| "learning_rate": 1.8266641570456915e-05, | |
| "loss": 0.4393, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.5448354143019296, | |
| "grad_norm": 0.7562560344545153, | |
| "learning_rate": 1.8236765814298328e-05, | |
| "loss": 0.4539, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.5478622777147181, | |
| "grad_norm": 0.8302478215117404, | |
| "learning_rate": 1.820665965809373e-05, | |
| "loss": 0.4448, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.5508891411275066, | |
| "grad_norm": 0.7705023152683942, | |
| "learning_rate": 1.8176323943977034e-05, | |
| "loss": 0.4384, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.5539160045402951, | |
| "grad_norm": 0.8702415564775692, | |
| "learning_rate": 1.814575952050336e-05, | |
| "loss": 0.445, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.5569428679530836, | |
| "grad_norm": 0.7443062541841527, | |
| "learning_rate": 1.8114967242625342e-05, | |
| "loss": 0.4409, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.5599697313658721, | |
| "grad_norm": 0.7808533638919993, | |
| "learning_rate": 1.808394797166919e-05, | |
| "loss": 0.4466, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.5629965947786606, | |
| "grad_norm": 0.780132221914014, | |
| "learning_rate": 1.8052702575310588e-05, | |
| "loss": 0.4286, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.5660234581914491, | |
| "grad_norm": 0.8340939662946734, | |
| "learning_rate": 1.802123192755044e-05, | |
| "loss": 0.4413, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.5690503216042376, | |
| "grad_norm": 0.7475644022293316, | |
| "learning_rate": 1.7989536908690413e-05, | |
| "loss": 0.4207, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.5720771850170261, | |
| "grad_norm": 0.8873261488159166, | |
| "learning_rate": 1.7957618405308323e-05, | |
| "loss": 0.4468, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.5751040484298146, | |
| "grad_norm": 0.6915412291902805, | |
| "learning_rate": 1.792547731023332e-05, | |
| "loss": 0.4134, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.5781309118426031, | |
| "grad_norm": 0.7477004872730207, | |
| "learning_rate": 1.789311452252092e-05, | |
| "loss": 0.426, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.5811577752553916, | |
| "grad_norm": 0.7843693086188022, | |
| "learning_rate": 1.7860530947427878e-05, | |
| "loss": 0.4314, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.58418463866818, | |
| "grad_norm": 0.7725251774669426, | |
| "learning_rate": 1.782772749638682e-05, | |
| "loss": 0.4341, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.5872115020809686, | |
| "grad_norm": 0.8617102201098283, | |
| "learning_rate": 1.779470508698079e-05, | |
| "loss": 0.4423, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.5902383654937571, | |
| "grad_norm": 0.812619220356799, | |
| "learning_rate": 1.776146464291757e-05, | |
| "loss": 0.4338, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.5932652289065455, | |
| "grad_norm": 0.8655244857076875, | |
| "learning_rate": 1.772800709400383e-05, | |
| "loss": 0.4593, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.5962920923193341, | |
| "grad_norm": 0.7471758222242877, | |
| "learning_rate": 1.7694333376119144e-05, | |
| "loss": 0.4295, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.5993189557321226, | |
| "grad_norm": 0.7898097866810851, | |
| "learning_rate": 1.766044443118978e-05, | |
| "loss": 0.4401, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.602345819144911, | |
| "grad_norm": 0.7274314257409898, | |
| "learning_rate": 1.762634120716238e-05, | |
| "loss": 0.4385, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.6053726825576996, | |
| "grad_norm": 0.7609899668854222, | |
| "learning_rate": 1.7592024657977432e-05, | |
| "loss": 0.452, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6083995459704881, | |
| "grad_norm": 0.7609792156485585, | |
| "learning_rate": 1.7557495743542586e-05, | |
| "loss": 0.4424, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.6114264093832765, | |
| "grad_norm": 0.7901020762714905, | |
| "learning_rate": 1.75227554297058e-05, | |
| "loss": 0.4446, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.6144532727960651, | |
| "grad_norm": 0.7287220424323192, | |
| "learning_rate": 1.7487804688228327e-05, | |
| "loss": 0.4505, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.6174801362088536, | |
| "grad_norm": 0.7425917367537703, | |
| "learning_rate": 1.745264449675755e-05, | |
| "loss": 0.444, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.620506999621642, | |
| "grad_norm": 0.7785518297776105, | |
| "learning_rate": 1.7417275838799596e-05, | |
| "loss": 0.4488, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.6235338630344306, | |
| "grad_norm": 0.7232707435063455, | |
| "learning_rate": 1.7381699703691866e-05, | |
| "loss": 0.4403, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.626560726447219, | |
| "grad_norm": 0.8205081714061198, | |
| "learning_rate": 1.734591708657533e-05, | |
| "loss": 0.4175, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.6295875898600075, | |
| "grad_norm": 0.7393984140559844, | |
| "learning_rate": 1.730992898836672e-05, | |
| "loss": 0.4346, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.6326144532727961, | |
| "grad_norm": 0.7448665743170344, | |
| "learning_rate": 1.7273736415730488e-05, | |
| "loss": 0.4252, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.6356413166855845, | |
| "grad_norm": 0.7398638920730205, | |
| "learning_rate": 1.72373403810507e-05, | |
| "loss": 0.461, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.638668180098373, | |
| "grad_norm": 0.7163303690335373, | |
| "learning_rate": 1.720074190240269e-05, | |
| "loss": 0.4372, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.6416950435111616, | |
| "grad_norm": 0.8282356636189698, | |
| "learning_rate": 1.7163942003524574e-05, | |
| "loss": 0.4516, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.64472190692395, | |
| "grad_norm": 0.7466563059507493, | |
| "learning_rate": 1.7126941713788633e-05, | |
| "loss": 0.457, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.6477487703367385, | |
| "grad_norm": 0.9553636132484358, | |
| "learning_rate": 1.70897420681725e-05, | |
| "loss": 0.4341, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.6507756337495271, | |
| "grad_norm": 0.7863173922892727, | |
| "learning_rate": 1.7052344107230244e-05, | |
| "loss": 0.4299, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.6538024971623155, | |
| "grad_norm": 0.9602196901334236, | |
| "learning_rate": 1.7014748877063212e-05, | |
| "loss": 0.4489, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.656829360575104, | |
| "grad_norm": 0.7531015281248331, | |
| "learning_rate": 1.697695742929082e-05, | |
| "loss": 0.4258, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.6598562239878926, | |
| "grad_norm": 0.8319469104866558, | |
| "learning_rate": 1.693897082102109e-05, | |
| "loss": 0.4274, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.662883087400681, | |
| "grad_norm": 0.7526825441858571, | |
| "learning_rate": 1.6900790114821122e-05, | |
| "loss": 0.4332, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.6659099508134695, | |
| "grad_norm": 0.7323843442326259, | |
| "learning_rate": 1.686241637868734e-05, | |
| "loss": 0.4179, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.6689368142262581, | |
| "grad_norm": 0.804469185879843, | |
| "learning_rate": 1.682385068601563e-05, | |
| "loss": 0.4312, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.6719636776390465, | |
| "grad_norm": 0.7669291115008096, | |
| "learning_rate": 1.6785094115571323e-05, | |
| "loss": 0.4131, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.6749905410518351, | |
| "grad_norm": 0.7755273822685601, | |
| "learning_rate": 1.674614775145901e-05, | |
| "loss": 0.435, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.6780174044646236, | |
| "grad_norm": 0.8398095320173529, | |
| "learning_rate": 1.670701268309221e-05, | |
| "loss": 0.4225, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.681044267877412, | |
| "grad_norm": 0.722043863118035, | |
| "learning_rate": 1.666769000516292e-05, | |
| "loss": 0.4333, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.6840711312902006, | |
| "grad_norm": 0.9029354872383214, | |
| "learning_rate": 1.6628180817610963e-05, | |
| "loss": 0.4259, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.687097994702989, | |
| "grad_norm": 0.7117467937620462, | |
| "learning_rate": 1.658848622559325e-05, | |
| "loss": 0.4128, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.6901248581157775, | |
| "grad_norm": 0.978293173561035, | |
| "learning_rate": 1.6548607339452853e-05, | |
| "loss": 0.4337, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.6931517215285661, | |
| "grad_norm": 0.8211155470921585, | |
| "learning_rate": 1.6508545274687936e-05, | |
| "loss": 0.4443, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.6961785849413545, | |
| "grad_norm": 0.8367961657083118, | |
| "learning_rate": 1.6468301151920576e-05, | |
| "loss": 0.4162, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.699205448354143, | |
| "grad_norm": 0.8872631962269948, | |
| "learning_rate": 1.6427876096865394e-05, | |
| "loss": 0.4439, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.7022323117669316, | |
| "grad_norm": 0.7806948001083459, | |
| "learning_rate": 1.6387271240298082e-05, | |
| "loss": 0.4548, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.70525917517972, | |
| "grad_norm": 0.90877273137771, | |
| "learning_rate": 1.6346487718023762e-05, | |
| "loss": 0.4465, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.7082860385925085, | |
| "grad_norm": 0.7729732322096745, | |
| "learning_rate": 1.6305526670845225e-05, | |
| "loss": 0.4464, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.7113129020052971, | |
| "grad_norm": 0.9247974158762133, | |
| "learning_rate": 1.6264389244531015e-05, | |
| "loss": 0.4196, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.7143397654180855, | |
| "grad_norm": 0.7679544226082441, | |
| "learning_rate": 1.6223076589783368e-05, | |
| "loss": 0.433, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.717366628830874, | |
| "grad_norm": 0.8025167758267522, | |
| "learning_rate": 1.6181589862206053e-05, | |
| "loss": 0.4557, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.7203934922436626, | |
| "grad_norm": 0.8430018718413385, | |
| "learning_rate": 1.613993022227202e-05, | |
| "loss": 0.4826, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.723420355656451, | |
| "grad_norm": 0.7036786364492591, | |
| "learning_rate": 1.6098098835290955e-05, | |
| "loss": 0.4082, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.7264472190692395, | |
| "grad_norm": 0.8139194775992823, | |
| "learning_rate": 1.6056096871376667e-05, | |
| "loss": 0.4166, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.729474082482028, | |
| "grad_norm": 0.7940945640115459, | |
| "learning_rate": 1.6013925505414386e-05, | |
| "loss": 0.4515, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.7325009458948165, | |
| "grad_norm": 11.813112911774754, | |
| "learning_rate": 1.5971585917027864e-05, | |
| "loss": 0.4408, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.735527809307605, | |
| "grad_norm": 4.9486225172973155, | |
| "learning_rate": 1.5929079290546408e-05, | |
| "loss": 0.4457, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.7385546727203935, | |
| "grad_norm": 0.9518023071731383, | |
| "learning_rate": 1.5886406814971728e-05, | |
| "loss": 0.451, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.741581536133182, | |
| "grad_norm": 0.8009030035880538, | |
| "learning_rate": 1.584356968394471e-05, | |
| "loss": 0.4292, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.7446083995459705, | |
| "grad_norm": 9.006259268865023, | |
| "learning_rate": 1.5800569095711983e-05, | |
| "loss": 0.47, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.747635262958759, | |
| "grad_norm": 1.0629368859908026, | |
| "learning_rate": 1.575740625309244e-05, | |
| "loss": 0.4796, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.7506621263715475, | |
| "grad_norm": 15.883092463968385, | |
| "learning_rate": 1.5714082363443576e-05, | |
| "loss": 0.4355, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.753688989784336, | |
| "grad_norm": 0.8958288433788306, | |
| "learning_rate": 1.5670598638627707e-05, | |
| "loss": 0.4549, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.7567158531971245, | |
| "grad_norm": 0.8042583317851038, | |
| "learning_rate": 1.5626956294978103e-05, | |
| "loss": 0.4301, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.759742716609913, | |
| "grad_norm": 0.7828586887308195, | |
| "learning_rate": 1.5583156553264923e-05, | |
| "loss": 0.432, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.7627695800227015, | |
| "grad_norm": 0.8603600018771175, | |
| "learning_rate": 1.5539200638661106e-05, | |
| "loss": 0.4588, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.76579644343549, | |
| "grad_norm": 0.8014001258938823, | |
| "learning_rate": 1.5495089780708062e-05, | |
| "loss": 0.458, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.7688233068482785, | |
| "grad_norm": 0.7403770064840077, | |
| "learning_rate": 1.5450825213281317e-05, | |
| "loss": 0.4433, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.771850170261067, | |
| "grad_norm": 0.8552046439270082, | |
| "learning_rate": 1.5406408174555978e-05, | |
| "loss": 0.4274, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.7748770336738555, | |
| "grad_norm": 0.8212561343905351, | |
| "learning_rate": 1.5361839906972095e-05, | |
| "loss": 0.4219, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.777903897086644, | |
| "grad_norm": 0.7846922073617575, | |
| "learning_rate": 1.531712165719992e-05, | |
| "loss": 0.4433, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.7809307604994324, | |
| "grad_norm": 0.7279909114849502, | |
| "learning_rate": 1.5272254676105026e-05, | |
| "loss": 0.4142, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.783957623912221, | |
| "grad_norm": 0.8354198525400404, | |
| "learning_rate": 1.5227240218713326e-05, | |
| "loss": 0.4413, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.7869844873250095, | |
| "grad_norm": 0.7950284750919131, | |
| "learning_rate": 1.5182079544175957e-05, | |
| "loss": 0.4279, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.7900113507377979, | |
| "grad_norm": 0.7459000933127606, | |
| "learning_rate": 1.5136773915734067e-05, | |
| "loss": 0.4331, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.7930382141505865, | |
| "grad_norm": 0.7776451343286253, | |
| "learning_rate": 1.5091324600683472e-05, | |
| "loss": 0.4215, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.796065077563375, | |
| "grad_norm": 0.8109873696758336, | |
| "learning_rate": 1.5045732870339213e-05, | |
| "loss": 0.4071, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.7990919409761634, | |
| "grad_norm": 0.8305716846456163, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.4471, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.802118804388952, | |
| "grad_norm": 0.8263542192608007, | |
| "learning_rate": 1.4954127268912525e-05, | |
| "loss": 0.4237, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.8051456678017405, | |
| "grad_norm": 0.8018779388088998, | |
| "learning_rate": 1.4908115960235683e-05, | |
| "loss": 0.4399, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.8081725312145289, | |
| "grad_norm": 0.8467724070828737, | |
| "learning_rate": 1.4861967361004687e-05, | |
| "loss": 0.4489, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.8111993946273175, | |
| "grad_norm": 0.73512921398508, | |
| "learning_rate": 1.4815682762095065e-05, | |
| "loss": 0.4091, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.814226258040106, | |
| "grad_norm": 0.7671287240082943, | |
| "learning_rate": 1.476926345818654e-05, | |
| "loss": 0.449, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.8172531214528944, | |
| "grad_norm": 0.8283498715195778, | |
| "learning_rate": 1.472271074772683e-05, | |
| "loss": 0.4272, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.820279984865683, | |
| "grad_norm": 0.68137401967818, | |
| "learning_rate": 1.4676025932895315e-05, | |
| "loss": 0.4149, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.8233068482784714, | |
| "grad_norm": 0.6954898349910011, | |
| "learning_rate": 1.4629210319566626e-05, | |
| "loss": 0.4216, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.8263337116912599, | |
| "grad_norm": 0.858586878675773, | |
| "learning_rate": 1.4582265217274105e-05, | |
| "loss": 0.4283, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.8293605751040485, | |
| "grad_norm": 0.7366977655257433, | |
| "learning_rate": 1.4535191939173179e-05, | |
| "loss": 0.4218, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.8323874385168369, | |
| "grad_norm": 0.8455964285412637, | |
| "learning_rate": 1.4487991802004625e-05, | |
| "loss": 0.4135, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.8354143019296254, | |
| "grad_norm": 0.712444886094419, | |
| "learning_rate": 1.4440666126057743e-05, | |
| "loss": 0.4355, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.838441165342414, | |
| "grad_norm": 0.7717779848786924, | |
| "learning_rate": 1.4393216235133427e-05, | |
| "loss": 0.4278, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.8414680287552024, | |
| "grad_norm": 0.7057545727349501, | |
| "learning_rate": 1.4345643456507126e-05, | |
| "loss": 0.4122, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.8444948921679909, | |
| "grad_norm": 0.6738789957266199, | |
| "learning_rate": 1.4297949120891718e-05, | |
| "loss": 0.3988, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.8475217555807795, | |
| "grad_norm": 0.7484537307643594, | |
| "learning_rate": 1.4250134562400301e-05, | |
| "loss": 0.4268, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.8505486189935679, | |
| "grad_norm": 0.6836680948080401, | |
| "learning_rate": 1.4202201118508863e-05, | |
| "loss": 0.4216, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.8535754824063564, | |
| "grad_norm": 0.7006225160892514, | |
| "learning_rate": 1.4154150130018867e-05, | |
| "loss": 0.4211, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.856602345819145, | |
| "grad_norm": 0.7722468328399257, | |
| "learning_rate": 1.4105982941019751e-05, | |
| "loss": 0.4547, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.8596292092319334, | |
| "grad_norm": 0.7467697472195512, | |
| "learning_rate": 1.405770089885134e-05, | |
| "loss": 0.4351, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.8626560726447219, | |
| "grad_norm": 0.7321769953046323, | |
| "learning_rate": 1.4009305354066138e-05, | |
| "loss": 0.4409, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.8656829360575105, | |
| "grad_norm": 0.7123200085179694, | |
| "learning_rate": 1.396079766039157e-05, | |
| "loss": 0.4066, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.8687097994702989, | |
| "grad_norm": 0.7153653927284432, | |
| "learning_rate": 1.39121791746921e-05, | |
| "loss": 0.4312, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.8717366628830874, | |
| "grad_norm": 0.741268136443869, | |
| "learning_rate": 1.3863451256931286e-05, | |
| "loss": 0.3994, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.874763526295876, | |
| "grad_norm": 0.7059241329455294, | |
| "learning_rate": 1.381461527013374e-05, | |
| "loss": 0.4157, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.8777903897086644, | |
| "grad_norm": 0.8132308400940682, | |
| "learning_rate": 1.3765672580346986e-05, | |
| "loss": 0.4295, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.8808172531214529, | |
| "grad_norm": 0.6844522247101862, | |
| "learning_rate": 1.3716624556603275e-05, | |
| "loss": 0.4367, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.8838441165342414, | |
| "grad_norm": 0.7646780854084878, | |
| "learning_rate": 1.3667472570881264e-05, | |
| "loss": 0.4339, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.8868709799470299, | |
| "grad_norm": 0.6961419768425183, | |
| "learning_rate": 1.361821799806765e-05, | |
| "loss": 0.4355, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.8898978433598184, | |
| "grad_norm": 0.7076216348243808, | |
| "learning_rate": 1.356886221591872e-05, | |
| "loss": 0.4182, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.8929247067726069, | |
| "grad_norm": 0.658453432924013, | |
| "learning_rate": 1.3519406605021797e-05, | |
| "loss": 0.4153, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.8959515701853954, | |
| "grad_norm": 0.6959621108824917, | |
| "learning_rate": 1.3469852548756626e-05, | |
| "loss": 0.4132, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.8989784335981839, | |
| "grad_norm": 0.7270145855005942, | |
| "learning_rate": 1.342020143325669e-05, | |
| "loss": 0.4326, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.9020052970109724, | |
| "grad_norm": 0.7150354935873843, | |
| "learning_rate": 1.3370454647370418e-05, | |
| "loss": 0.4491, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.9050321604237609, | |
| "grad_norm": 0.7359095723013732, | |
| "learning_rate": 1.3320613582622354e-05, | |
| "loss": 0.4173, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.9080590238365494, | |
| "grad_norm": 0.7201469095002386, | |
| "learning_rate": 1.3270679633174219e-05, | |
| "loss": 0.4439, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.9110858872493379, | |
| "grad_norm": 0.7444102169762538, | |
| "learning_rate": 1.3220654195785917e-05, | |
| "loss": 0.4602, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.9141127506621264, | |
| "grad_norm": 0.6863401765610452, | |
| "learning_rate": 1.3170538669776469e-05, | |
| "loss": 0.4364, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.9171396140749148, | |
| "grad_norm": 0.7915563572196381, | |
| "learning_rate": 1.3120334456984871e-05, | |
| "loss": 0.4237, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.9201664774877034, | |
| "grad_norm": 0.6849523873302381, | |
| "learning_rate": 1.3070042961730878e-05, | |
| "loss": 0.4241, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.9231933409004919, | |
| "grad_norm": 0.6569270847011346, | |
| "learning_rate": 1.3019665590775717e-05, | |
| "loss": 0.3956, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.9262202043132803, | |
| "grad_norm": 0.8195874261952539, | |
| "learning_rate": 1.296920375328275e-05, | |
| "loss": 0.4406, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.9292470677260689, | |
| "grad_norm": 0.7039704196977338, | |
| "learning_rate": 1.2918658860778046e-05, | |
| "loss": 0.4165, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.9322739311388574, | |
| "grad_norm": 0.8669137753589233, | |
| "learning_rate": 1.2868032327110904e-05, | |
| "loss": 0.412, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.9353007945516458, | |
| "grad_norm": 0.7340024772245138, | |
| "learning_rate": 1.2817325568414299e-05, | |
| "loss": 0.4353, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.9383276579644344, | |
| "grad_norm": 0.7497556131673987, | |
| "learning_rate": 1.2766540003065272e-05, | |
| "loss": 0.4019, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.9413545213772229, | |
| "grad_norm": 0.7639804389634129, | |
| "learning_rate": 1.2715677051645259e-05, | |
| "loss": 0.4241, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.9443813847900113, | |
| "grad_norm": 0.6975292895746819, | |
| "learning_rate": 1.266473813690035e-05, | |
| "loss": 0.4208, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.9474082482027999, | |
| "grad_norm": 0.787420761100616, | |
| "learning_rate": 1.2613724683701491e-05, | |
| "loss": 0.4209, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.9504351116155884, | |
| "grad_norm": 0.731389997188442, | |
| "learning_rate": 1.2562638119004627e-05, | |
| "loss": 0.4297, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.9534619750283768, | |
| "grad_norm": 0.7135765581176838, | |
| "learning_rate": 1.2511479871810792e-05, | |
| "loss": 0.4381, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.9564888384411654, | |
| "grad_norm": 0.6870333536324249, | |
| "learning_rate": 1.2460251373126136e-05, | |
| "loss": 0.4049, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.9595157018539539, | |
| "grad_norm": 0.6636912625994247, | |
| "learning_rate": 1.2408954055921884e-05, | |
| "loss": 0.4256, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.9625425652667423, | |
| "grad_norm": 0.6964773990839926, | |
| "learning_rate": 1.2357589355094275e-05, | |
| "loss": 0.416, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.9655694286795309, | |
| "grad_norm": 0.6758409819915321, | |
| "learning_rate": 1.2306158707424402e-05, | |
| "loss": 0.4264, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.9685962920923193, | |
| "grad_norm": 0.6631953804050803, | |
| "learning_rate": 1.2254663551538047e-05, | |
| "loss": 0.4057, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.9716231555051078, | |
| "grad_norm": 0.7272860789154645, | |
| "learning_rate": 1.2203105327865407e-05, | |
| "loss": 0.4311, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.9746500189178964, | |
| "grad_norm": 0.7063650008959375, | |
| "learning_rate": 1.215148547860084e-05, | |
| "loss": 0.427, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.9776768823306848, | |
| "grad_norm": 0.6450036863535319, | |
| "learning_rate": 1.2099805447662485e-05, | |
| "loss": 0.3926, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.9807037457434733, | |
| "grad_norm": 0.7026674576023975, | |
| "learning_rate": 1.2048066680651908e-05, | |
| "loss": 0.422, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.9837306091562619, | |
| "grad_norm": 0.6436345012843648, | |
| "learning_rate": 1.1996270624813642e-05, | |
| "loss": 0.4226, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.9867574725690503, | |
| "grad_norm": 0.6961886160652397, | |
| "learning_rate": 1.194441872899471e-05, | |
| "loss": 0.4134, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.9897843359818388, | |
| "grad_norm": 0.6447841319960351, | |
| "learning_rate": 1.1892512443604103e-05, | |
| "loss": 0.402, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.9928111993946274, | |
| "grad_norm": 0.6756297490836396, | |
| "learning_rate": 1.1840553220572204e-05, | |
| "loss": 0.4159, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.9958380628074158, | |
| "grad_norm": 0.6949717794724873, | |
| "learning_rate": 1.1788542513310178e-05, | |
| "loss": 0.4356, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.9988649262202043, | |
| "grad_norm": 0.7195133511776262, | |
| "learning_rate": 1.1736481776669307e-05, | |
| "loss": 0.4392, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.0018917896329929, | |
| "grad_norm": 0.6763780280850585, | |
| "learning_rate": 1.1684372466900306e-05, | |
| "loss": 0.363, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.0049186530457812, | |
| "grad_norm": 0.8257062195960014, | |
| "learning_rate": 1.1632216041612595e-05, | |
| "loss": 0.3127, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.0079455164585698, | |
| "grad_norm": 0.7540209344945445, | |
| "learning_rate": 1.15800139597335e-05, | |
| "loss": 0.3199, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.0109723798713584, | |
| "grad_norm": 0.7210943962860681, | |
| "learning_rate": 1.1527767681467472e-05, | |
| "loss": 0.3221, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.0139992432841467, | |
| "grad_norm": 0.7239933012303479, | |
| "learning_rate": 1.1475478668255223e-05, | |
| "loss": 0.3203, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.0170261066969353, | |
| "grad_norm": 0.7350007090490946, | |
| "learning_rate": 1.1423148382732854e-05, | |
| "loss": 0.308, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.0200529701097238, | |
| "grad_norm": 0.7934937292490568, | |
| "learning_rate": 1.1370778288690947e-05, | |
| "loss": 0.3089, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.0230798335225122, | |
| "grad_norm": 0.8849446251067858, | |
| "learning_rate": 1.1318369851033604e-05, | |
| "loss": 0.3127, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.0261066969353008, | |
| "grad_norm": 0.731792385877625, | |
| "learning_rate": 1.1265924535737494e-05, | |
| "loss": 0.3027, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.0291335603480893, | |
| "grad_norm": 0.762262504327284, | |
| "learning_rate": 1.121344380981082e-05, | |
| "loss": 0.3219, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.0321604237608777, | |
| "grad_norm": 0.7711490218158292, | |
| "learning_rate": 1.1160929141252303e-05, | |
| "loss": 0.3079, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.0351872871736663, | |
| "grad_norm": 0.9350175522772958, | |
| "learning_rate": 1.1108381999010111e-05, | |
| "loss": 0.3069, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.0382141505864548, | |
| "grad_norm": 0.7426494910114684, | |
| "learning_rate": 1.1055803852940772e-05, | |
| "loss": 0.3141, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.0412410139992432, | |
| "grad_norm": 0.8004676205505588, | |
| "learning_rate": 1.1003196173768051e-05, | |
| "loss": 0.2904, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.0442678774120318, | |
| "grad_norm": 0.834018083221305, | |
| "learning_rate": 1.0950560433041825e-05, | |
| "loss": 0.3109, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.0472947408248203, | |
| "grad_norm": 0.7479019853031549, | |
| "learning_rate": 1.0897898103096917e-05, | |
| "loss": 0.3139, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.0503216042376087, | |
| "grad_norm": 0.9502524535205459, | |
| "learning_rate": 1.0845210657011893e-05, | |
| "loss": 0.301, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.0533484676503972, | |
| "grad_norm": 0.8854562986516674, | |
| "learning_rate": 1.0792499568567885e-05, | |
| "loss": 0.3287, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.0563753310631858, | |
| "grad_norm": 0.780965524524189, | |
| "learning_rate": 1.0739766312207344e-05, | |
| "loss": 0.2999, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.0594021944759742, | |
| "grad_norm": 0.7971661382741867, | |
| "learning_rate": 1.068701236299281e-05, | |
| "loss": 0.3142, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.0624290578887627, | |
| "grad_norm": 0.8168288684903563, | |
| "learning_rate": 1.0634239196565646e-05, | |
| "loss": 0.2932, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.0654559213015513, | |
| "grad_norm": 0.7973032185142181, | |
| "learning_rate": 1.0581448289104759e-05, | |
| "loss": 0.3057, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.0684827847143397, | |
| "grad_norm": 0.7700709657396392, | |
| "learning_rate": 1.0528641117285315e-05, | |
| "loss": 0.3173, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.0715096481271282, | |
| "grad_norm": 0.732002458870274, | |
| "learning_rate": 1.0475819158237426e-05, | |
| "loss": 0.2879, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.0745365115399168, | |
| "grad_norm": 0.7560593244130787, | |
| "learning_rate": 1.0422983889504831e-05, | |
| "loss": 0.3166, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.0775633749527052, | |
| "grad_norm": 0.8514135455913109, | |
| "learning_rate": 1.0370136789003582e-05, | |
| "loss": 0.3141, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.0805902383654937, | |
| "grad_norm": 0.7634618119917063, | |
| "learning_rate": 1.031727933498068e-05, | |
| "loss": 0.3007, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.0836171017782823, | |
| "grad_norm": 0.7136080167180243, | |
| "learning_rate": 1.0264413005972736e-05, | |
| "loss": 0.3098, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.0866439651910706, | |
| "grad_norm": 0.7641965676584433, | |
| "learning_rate": 1.0211539280764617e-05, | |
| "loss": 0.3006, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.0896708286038592, | |
| "grad_norm": 0.8367039686868846, | |
| "learning_rate": 1.015865963834808e-05, | |
| "loss": 0.3005, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.0926976920166478, | |
| "grad_norm": 0.7554211192560765, | |
| "learning_rate": 1.0105775557880398e-05, | |
| "loss": 0.3121, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.0957245554294364, | |
| "grad_norm": 0.8842122518496518, | |
| "learning_rate": 1.0052888518642978e-05, | |
| "loss": 0.3038, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.0987514188422247, | |
| "grad_norm": 0.8083474207370819, | |
| "learning_rate": 1e-05, | |
| "loss": 0.2852, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.1017782822550133, | |
| "grad_norm": 0.7007484654949582, | |
| "learning_rate": 9.947111481357023e-06, | |
| "loss": 0.2977, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.1048051456678016, | |
| "grad_norm": 0.9944539277137213, | |
| "learning_rate": 9.894224442119606e-06, | |
| "loss": 0.2913, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.1078320090805902, | |
| "grad_norm": 0.7959856607836462, | |
| "learning_rate": 9.841340361651921e-06, | |
| "loss": 0.2979, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.1108588724933788, | |
| "grad_norm": 0.7507145405929788, | |
| "learning_rate": 9.788460719235386e-06, | |
| "loss": 0.2987, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.1138857359061674, | |
| "grad_norm": 0.7979976158497915, | |
| "learning_rate": 9.735586994027267e-06, | |
| "loss": 0.3052, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.1169125993189557, | |
| "grad_norm": 0.895505453968812, | |
| "learning_rate": 9.682720665019325e-06, | |
| "loss": 0.3225, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.1199394627317443, | |
| "grad_norm": 0.7606652923735153, | |
| "learning_rate": 9.62986321099642e-06, | |
| "loss": 0.2937, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.1229663261445326, | |
| "grad_norm": 0.7877302491111315, | |
| "learning_rate": 9.57701611049517e-06, | |
| "loss": 0.3027, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.1259931895573212, | |
| "grad_norm": 0.8183803187920982, | |
| "learning_rate": 9.524180841762577e-06, | |
| "loss": 0.3047, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.1290200529701098, | |
| "grad_norm": 0.7800060025803817, | |
| "learning_rate": 9.471358882714687e-06, | |
| "loss": 0.3026, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.1320469163828983, | |
| "grad_norm": 0.749746589784754, | |
| "learning_rate": 9.418551710895243e-06, | |
| "loss": 0.3138, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.1350737797956867, | |
| "grad_norm": 0.7170578044175413, | |
| "learning_rate": 9.365760803434356e-06, | |
| "loss": 0.3057, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.1381006432084753, | |
| "grad_norm": 0.7153778845623648, | |
| "learning_rate": 9.312987637007191e-06, | |
| "loss": 0.2998, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.1411275066212636, | |
| "grad_norm": 0.6990019728557185, | |
| "learning_rate": 9.260233687792657e-06, | |
| "loss": 0.3014, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.1441543700340522, | |
| "grad_norm": 0.735486085225637, | |
| "learning_rate": 9.207500431432115e-06, | |
| "loss": 0.3077, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.1471812334468408, | |
| "grad_norm": 0.7936654903048778, | |
| "learning_rate": 9.154789342988108e-06, | |
| "loss": 0.3074, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.1502080968596293, | |
| "grad_norm": 0.7660427483004251, | |
| "learning_rate": 9.102101896903084e-06, | |
| "loss": 0.3526, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.1532349602724177, | |
| "grad_norm": 0.7133079478198697, | |
| "learning_rate": 9.049439566958176e-06, | |
| "loss": 0.3146, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.1562618236852062, | |
| "grad_norm": 0.738392812814898, | |
| "learning_rate": 8.99680382623195e-06, | |
| "loss": 0.3046, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.1592886870979946, | |
| "grad_norm": 0.6848417792627729, | |
| "learning_rate": 8.944196147059233e-06, | |
| "loss": 0.2801, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.1623155505107832, | |
| "grad_norm": 0.7326633373710211, | |
| "learning_rate": 8.89161800098989e-06, | |
| "loss": 0.3062, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.1653424139235717, | |
| "grad_norm": 0.7677777889336196, | |
| "learning_rate": 8.839070858747697e-06, | |
| "loss": 0.3217, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.1683692773363603, | |
| "grad_norm": 0.6807379668256043, | |
| "learning_rate": 8.786556190189183e-06, | |
| "loss": 0.2881, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.1713961407491487, | |
| "grad_norm": 0.7094729897101273, | |
| "learning_rate": 8.734075464262507e-06, | |
| "loss": 0.3042, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.1744230041619372, | |
| "grad_norm": 0.743134079206942, | |
| "learning_rate": 8.681630148966397e-06, | |
| "loss": 0.292, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.1774498675747256, | |
| "grad_norm": 0.693942262898779, | |
| "learning_rate": 8.629221711309056e-06, | |
| "loss": 0.3021, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.1804767309875142, | |
| "grad_norm": 0.7062729189281528, | |
| "learning_rate": 8.576851617267151e-06, | |
| "loss": 0.2988, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.1835035944003027, | |
| "grad_norm": 0.7029179922652952, | |
| "learning_rate": 8.52452133174478e-06, | |
| "loss": 0.2873, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.1865304578130913, | |
| "grad_norm": 0.7052583963977933, | |
| "learning_rate": 8.472232318532531e-06, | |
| "loss": 0.2956, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.1895573212258796, | |
| "grad_norm": 0.7479415586333562, | |
| "learning_rate": 8.419986040266502e-06, | |
| "loss": 0.3002, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.1925841846386682, | |
| "grad_norm": 0.7200349365358958, | |
| "learning_rate": 8.367783958387407e-06, | |
| "loss": 0.2854, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.1956110480514566, | |
| "grad_norm": 0.7032934219532944, | |
| "learning_rate": 8.315627533099697e-06, | |
| "loss": 0.2997, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.1986379114642451, | |
| "grad_norm": 0.7483544332279294, | |
| "learning_rate": 8.263518223330698e-06, | |
| "loss": 0.3075, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.2016647748770337, | |
| "grad_norm": 0.7096885812603374, | |
| "learning_rate": 8.211457486689829e-06, | |
| "loss": 0.2881, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.2046916382898223, | |
| "grad_norm": 0.7398365948665891, | |
| "learning_rate": 8.159446779427798e-06, | |
| "loss": 0.2984, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.2077185017026106, | |
| "grad_norm": 0.715181490758838, | |
| "learning_rate": 8.107487556395902e-06, | |
| "loss": 0.2899, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.2107453651153992, | |
| "grad_norm": 0.7234707209860036, | |
| "learning_rate": 8.055581271005292e-06, | |
| "loss": 0.3039, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.2137722285281876, | |
| "grad_norm": 0.7277406746320897, | |
| "learning_rate": 8.00372937518636e-06, | |
| "loss": 0.2986, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.2167990919409761, | |
| "grad_norm": 0.6827017175409712, | |
| "learning_rate": 7.951933319348095e-06, | |
| "loss": 0.3004, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.2198259553537647, | |
| "grad_norm": 0.7302362128081412, | |
| "learning_rate": 7.900194552337516e-06, | |
| "loss": 0.2956, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.2228528187665533, | |
| "grad_norm": 0.7524258709589485, | |
| "learning_rate": 7.848514521399167e-06, | |
| "loss": 0.2996, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.2258796821793416, | |
| "grad_norm": 0.7084823404602297, | |
| "learning_rate": 7.796894672134594e-06, | |
| "loss": 0.298, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.2289065455921302, | |
| "grad_norm": 0.7012947459884271, | |
| "learning_rate": 7.745336448461958e-06, | |
| "loss": 0.2921, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.2319334090049185, | |
| "grad_norm": 0.7189483799260806, | |
| "learning_rate": 7.6938412925756e-06, | |
| "loss": 0.2914, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.2349602724177071, | |
| "grad_norm": 0.7188043781519954, | |
| "learning_rate": 7.642410644905726e-06, | |
| "loss": 0.3017, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.2379871358304957, | |
| "grad_norm": 0.7223180728377869, | |
| "learning_rate": 7.591045944078119e-06, | |
| "loss": 0.2884, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.2410139992432843, | |
| "grad_norm": 0.7487261938197093, | |
| "learning_rate": 7.539748626873866e-06, | |
| "loss": 0.3069, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.2440408626560726, | |
| "grad_norm": 0.7412428496470427, | |
| "learning_rate": 7.488520128189209e-06, | |
| "loss": 0.317, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.2470677260688612, | |
| "grad_norm": 0.7189695597014218, | |
| "learning_rate": 7.4373618809953755e-06, | |
| "loss": 0.295, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.2500945894816495, | |
| "grad_norm": 0.7569410197229501, | |
| "learning_rate": 7.386275316298513e-06, | |
| "loss": 0.2986, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.253121452894438, | |
| "grad_norm": 0.6900877579163265, | |
| "learning_rate": 7.335261863099652e-06, | |
| "loss": 0.2867, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.2561483163072267, | |
| "grad_norm": 0.7568328414055138, | |
| "learning_rate": 7.2843229483547405e-06, | |
| "loss": 0.3026, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.2591751797200152, | |
| "grad_norm": 0.7180518777268041, | |
| "learning_rate": 7.233459996934731e-06, | |
| "loss": 0.2946, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.2622020431328036, | |
| "grad_norm": 0.7612614912044211, | |
| "learning_rate": 7.182674431585703e-06, | |
| "loss": 0.3016, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.2652289065455922, | |
| "grad_norm": 0.7534743406280243, | |
| "learning_rate": 7.131967672889101e-06, | |
| "loss": 0.2998, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.2682557699583805, | |
| "grad_norm": 0.7730795083632603, | |
| "learning_rate": 7.081341139221955e-06, | |
| "loss": 0.3022, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.271282633371169, | |
| "grad_norm": 0.686434304285361, | |
| "learning_rate": 7.0307962467172555e-06, | |
| "loss": 0.3013, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.2743094967839577, | |
| "grad_norm": 0.8114724040363707, | |
| "learning_rate": 6.9803344092242855e-06, | |
| "loss": 0.3167, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.2773363601967462, | |
| "grad_norm": 0.7575703536791668, | |
| "learning_rate": 6.929957038269123e-06, | |
| "loss": 0.3025, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.2803632236095346, | |
| "grad_norm": 0.7107392617571183, | |
| "learning_rate": 6.87966554301513e-06, | |
| "loss": 0.2987, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.2833900870223232, | |
| "grad_norm": 0.7120932275744412, | |
| "learning_rate": 6.8294613302235325e-06, | |
| "loss": 0.2823, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.2864169504351115, | |
| "grad_norm": 0.7355058077037857, | |
| "learning_rate": 6.779345804214088e-06, | |
| "loss": 0.3049, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.2894438138479, | |
| "grad_norm": 0.7404295585043179, | |
| "learning_rate": 6.729320366825785e-06, | |
| "loss": 0.2964, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.2924706772606886, | |
| "grad_norm": 0.7205754897016723, | |
| "learning_rate": 6.679386417377649e-06, | |
| "loss": 0.3053, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.2954975406734772, | |
| "grad_norm": 0.7584778772928888, | |
| "learning_rate": 6.629545352629583e-06, | |
| "loss": 0.3053, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.2985244040862656, | |
| "grad_norm": 0.7100282503883792, | |
| "learning_rate": 6.579798566743314e-06, | |
| "loss": 0.2897, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.3015512674990541, | |
| "grad_norm": 0.7301479981693604, | |
| "learning_rate": 6.530147451243377e-06, | |
| "loss": 0.3063, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.3045781309118425, | |
| "grad_norm": 0.802818384620131, | |
| "learning_rate": 6.480593394978208e-06, | |
| "loss": 0.3136, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.307604994324631, | |
| "grad_norm": 0.7456739270669519, | |
| "learning_rate": 6.431137784081283e-06, | |
| "loss": 0.303, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.3106318577374196, | |
| "grad_norm": 0.7061596313419777, | |
| "learning_rate": 6.381782001932352e-06, | |
| "loss": 0.2854, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.3136587211502082, | |
| "grad_norm": 0.7288506117926064, | |
| "learning_rate": 6.33252742911874e-06, | |
| "loss": 0.2896, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.3166855845629966, | |
| "grad_norm": 0.771300337845044, | |
| "learning_rate": 6.283375443396726e-06, | |
| "loss": 0.3113, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.3197124479757851, | |
| "grad_norm": 0.7756827338463533, | |
| "learning_rate": 6.234327419653013e-06, | |
| "loss": 0.3184, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.3227393113885735, | |
| "grad_norm": 0.7044259092342111, | |
| "learning_rate": 6.185384729866264e-06, | |
| "loss": 0.2981, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.325766174801362, | |
| "grad_norm": 0.756123562076688, | |
| "learning_rate": 6.136548743068713e-06, | |
| "loss": 0.3153, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.3287930382141506, | |
| "grad_norm": 0.7257908826330434, | |
| "learning_rate": 6.087820825307904e-06, | |
| "loss": 0.2914, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.3318199016269392, | |
| "grad_norm": 0.7599610052647437, | |
| "learning_rate": 6.039202339608432e-06, | |
| "loss": 0.2987, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.3348467650397275, | |
| "grad_norm": 0.7319510470423332, | |
| "learning_rate": 5.990694645933866e-06, | |
| "loss": 0.2819, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.3378736284525161, | |
| "grad_norm": 0.7227280961896572, | |
| "learning_rate": 5.9422991011486635e-06, | |
| "loss": 0.2872, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.3409004918653045, | |
| "grad_norm": 0.6809805175929268, | |
| "learning_rate": 5.894017058980249e-06, | |
| "loss": 0.2821, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.343927355278093, | |
| "grad_norm": 0.743558289754542, | |
| "learning_rate": 5.845849869981137e-06, | |
| "loss": 0.2916, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.3469542186908816, | |
| "grad_norm": 0.7012263636530582, | |
| "learning_rate": 5.797798881491138e-06, | |
| "loss": 0.2959, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.3499810821036702, | |
| "grad_norm": 0.7275863576427302, | |
| "learning_rate": 5.749865437599703e-06, | |
| "loss": 0.3087, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.3530079455164585, | |
| "grad_norm": 0.729261216784672, | |
| "learning_rate": 5.702050879108284e-06, | |
| "loss": 0.2805, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.356034808929247, | |
| "grad_norm": 0.7071136038454546, | |
| "learning_rate": 5.654356543492883e-06, | |
| "loss": 0.2898, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.3590616723420355, | |
| "grad_norm": 0.7190997337403445, | |
| "learning_rate": 5.606783764866576e-06, | |
| "loss": 0.2835, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.362088535754824, | |
| "grad_norm": 0.7728953204300376, | |
| "learning_rate": 5.559333873942259e-06, | |
| "loss": 0.2858, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.3651153991676126, | |
| "grad_norm": 0.6945150828592328, | |
| "learning_rate": 5.512008197995379e-06, | |
| "loss": 0.2962, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.3681422625804012, | |
| "grad_norm": 0.735633050467111, | |
| "learning_rate": 5.464808060826825e-06, | |
| "loss": 0.2872, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.3711691259931895, | |
| "grad_norm": 0.7090495949246177, | |
| "learning_rate": 5.417734782725896e-06, | |
| "loss": 0.2883, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.374195989405978, | |
| "grad_norm": 0.763495794163854, | |
| "learning_rate": 5.370789680433376e-06, | |
| "loss": 0.3002, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.3772228528187664, | |
| "grad_norm": 0.7175060132812318, | |
| "learning_rate": 5.323974067104687e-06, | |
| "loss": 0.2863, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.380249716231555, | |
| "grad_norm": 0.697836229805869, | |
| "learning_rate": 5.277289252273175e-06, | |
| "loss": 0.2964, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.3832765796443436, | |
| "grad_norm": 0.7116312155587158, | |
| "learning_rate": 5.230736541813463e-06, | |
| "loss": 0.2957, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.3863034430571322, | |
| "grad_norm": 0.7111139684716851, | |
| "learning_rate": 5.184317237904939e-06, | |
| "loss": 0.2848, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.3893303064699205, | |
| "grad_norm": 0.7377350661242721, | |
| "learning_rate": 5.138032638995315e-06, | |
| "loss": 0.2982, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.392357169882709, | |
| "grad_norm": 0.6753776171195766, | |
| "learning_rate": 5.091884039764321e-06, | |
| "loss": 0.2782, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.3953840332954974, | |
| "grad_norm": 0.7048805471281395, | |
| "learning_rate": 5.045872731087479e-06, | |
| "loss": 0.3011, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.398410896708286, | |
| "grad_norm": 0.7066458532724698, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 0.3076, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.4014377601210746, | |
| "grad_norm": 0.6657612046388043, | |
| "learning_rate": 4.954267129660789e-06, | |
| "loss": 0.2776, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.4044646235338631, | |
| "grad_norm": 0.6882409984310152, | |
| "learning_rate": 4.908675399316534e-06, | |
| "loss": 0.2902, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.4074914869466515, | |
| "grad_norm": 0.7434756784325893, | |
| "learning_rate": 4.863226084265939e-06, | |
| "loss": 0.2975, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.41051835035944, | |
| "grad_norm": 0.6997881718969922, | |
| "learning_rate": 4.817920455824045e-06, | |
| "loss": 0.2996, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.4135452137722284, | |
| "grad_norm": 0.773193010936456, | |
| "learning_rate": 4.772759781286679e-06, | |
| "loss": 0.3106, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.416572077185017, | |
| "grad_norm": 0.7172354036607921, | |
| "learning_rate": 4.727745323894976e-06, | |
| "loss": 0.2865, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.4195989405978056, | |
| "grad_norm": 0.6981887514470826, | |
| "learning_rate": 4.682878342800087e-06, | |
| "loss": 0.2834, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.4226258040105941, | |
| "grad_norm": 0.833589487871422, | |
| "learning_rate": 4.638160093027908e-06, | |
| "loss": 0.3012, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.4256526674233825, | |
| "grad_norm": 0.7057918685028615, | |
| "learning_rate": 4.593591825444028e-06, | |
| "loss": 0.3014, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.428679530836171, | |
| "grad_norm": 0.7353070943536116, | |
| "learning_rate": 4.549174786718684e-06, | |
| "loss": 0.2936, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.4317063942489594, | |
| "grad_norm": 0.7199743098327781, | |
| "learning_rate": 4.504910219291941e-06, | |
| "loss": 0.3062, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.434733257661748, | |
| "grad_norm": 0.7022127311213061, | |
| "learning_rate": 4.460799361338898e-06, | |
| "loss": 0.2944, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.4377601210745365, | |
| "grad_norm": 0.6925961776413392, | |
| "learning_rate": 4.416843446735077e-06, | |
| "loss": 0.2894, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.4407869844873251, | |
| "grad_norm": 0.7106833934913287, | |
| "learning_rate": 4.373043705021899e-06, | |
| "loss": 0.2806, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.4438138479001135, | |
| "grad_norm": 0.6997589097918275, | |
| "learning_rate": 4.3294013613722944e-06, | |
| "loss": 0.294, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.446840711312902, | |
| "grad_norm": 0.7042914532159267, | |
| "learning_rate": 4.2859176365564294e-06, | |
| "loss": 0.2868, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.4498675747256904, | |
| "grad_norm": 0.7108709381980339, | |
| "learning_rate": 4.2425937469075626e-06, | |
| "loss": 0.2872, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.452894438138479, | |
| "grad_norm": 0.7414605290018282, | |
| "learning_rate": 4.19943090428802e-06, | |
| "loss": 0.2979, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.4559213015512675, | |
| "grad_norm": 0.7262992166625679, | |
| "learning_rate": 4.1564303160552935e-06, | |
| "loss": 0.2936, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.458948164964056, | |
| "grad_norm": 0.7275574362106993, | |
| "learning_rate": 4.113593185028273e-06, | |
| "loss": 0.298, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.4619750283768445, | |
| "grad_norm": 0.6870636311203779, | |
| "learning_rate": 4.070920709453597e-06, | |
| "loss": 0.2849, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.465001891789633, | |
| "grad_norm": 0.6829564102290653, | |
| "learning_rate": 4.028414082972141e-06, | |
| "loss": 0.2841, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.4680287552024214, | |
| "grad_norm": 0.7510119674750081, | |
| "learning_rate": 3.986074494585619e-06, | |
| "loss": 0.3062, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.47105561861521, | |
| "grad_norm": 0.6981622598487627, | |
| "learning_rate": 3.943903128623336e-06, | |
| "loss": 0.2869, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.4740824820279985, | |
| "grad_norm": 0.7331301527060915, | |
| "learning_rate": 3.9019011647090465e-06, | |
| "loss": 0.3002, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.477109345440787, | |
| "grad_norm": 0.7346183560184907, | |
| "learning_rate": 3.860069777727983e-06, | |
| "loss": 0.2951, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.4801362088535754, | |
| "grad_norm": 0.6896143633077928, | |
| "learning_rate": 3.818410137793947e-06, | |
| "loss": 0.2953, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.483163072266364, | |
| "grad_norm": 0.7115957374328798, | |
| "learning_rate": 3.7769234102166365e-06, | |
| "loss": 0.2964, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.4861899356791524, | |
| "grad_norm": 0.7012432250429187, | |
| "learning_rate": 3.735610755468988e-06, | |
| "loss": 0.2845, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.489216799091941, | |
| "grad_norm": 0.7426631115729697, | |
| "learning_rate": 3.6944733291547784e-06, | |
| "loss": 0.3115, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.4922436625047295, | |
| "grad_norm": 0.707946621551838, | |
| "learning_rate": 3.653512281976238e-06, | |
| "loss": 0.2837, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.495270525917518, | |
| "grad_norm": 0.7416798888955621, | |
| "learning_rate": 3.612728759701919e-06, | |
| "loss": 0.3037, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.4982973893303064, | |
| "grad_norm": 0.707422601897051, | |
| "learning_rate": 3.5721239031346067e-06, | |
| "loss": 0.293, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.501324252743095, | |
| "grad_norm": 0.7824031241464112, | |
| "learning_rate": 3.5316988480794255e-06, | |
| "loss": 0.2849, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.5043511161558833, | |
| "grad_norm": 0.7706494271424782, | |
| "learning_rate": 3.4914547253120655e-06, | |
| "loss": 0.3106, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.507377979568672, | |
| "grad_norm": 0.7070022382658396, | |
| "learning_rate": 3.4513926605471504e-06, | |
| "loss": 0.2891, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.5104048429814605, | |
| "grad_norm": 0.7065278695179165, | |
| "learning_rate": 3.4115137744067516e-06, | |
| "loss": 0.2883, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.513431706394249, | |
| "grad_norm": 0.7553723369135225, | |
| "learning_rate": 3.37181918238904e-06, | |
| "loss": 0.2967, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.5164585698070374, | |
| "grad_norm": 0.7315076317324215, | |
| "learning_rate": 3.3323099948370853e-06, | |
| "loss": 0.2977, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.519485433219826, | |
| "grad_norm": 0.7591720771018126, | |
| "learning_rate": 3.292987316907792e-06, | |
| "loss": 0.2981, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.5225122966326143, | |
| "grad_norm": 0.6835302376263906, | |
| "learning_rate": 3.253852248540994e-06, | |
| "loss": 0.2883, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.525539160045403, | |
| "grad_norm": 0.6878575475377292, | |
| "learning_rate": 3.2149058844286796e-06, | |
| "loss": 0.2849, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.5285660234581915, | |
| "grad_norm": 0.7036079683871901, | |
| "learning_rate": 3.1761493139843734e-06, | |
| "loss": 0.2867, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.53159288687098, | |
| "grad_norm": 0.72440605384167, | |
| "learning_rate": 3.1375836213126653e-06, | |
| "loss": 0.2725, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.5346197502837684, | |
| "grad_norm": 0.7475588570206838, | |
| "learning_rate": 3.099209885178882e-06, | |
| "loss": 0.3016, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.537646613696557, | |
| "grad_norm": 0.7011456699390767, | |
| "learning_rate": 3.0610291789789094e-06, | |
| "loss": 0.2931, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.5406734771093453, | |
| "grad_norm": 0.7060143576279827, | |
| "learning_rate": 3.023042570709185e-06, | |
| "loss": 0.2918, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.543700340522134, | |
| "grad_norm": 0.7519291284192683, | |
| "learning_rate": 2.9852511229367862e-06, | |
| "loss": 0.2884, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.5467272039349225, | |
| "grad_norm": 0.7301244064400944, | |
| "learning_rate": 2.9476558927697605e-06, | |
| "loss": 0.2972, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.549754067347711, | |
| "grad_norm": 0.7288307011591838, | |
| "learning_rate": 2.9102579318274994e-06, | |
| "loss": 0.2886, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.5527809307604994, | |
| "grad_norm": 0.7840510486162967, | |
| "learning_rate": 2.8730582862113743e-06, | |
| "loss": 0.3147, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.555807794173288, | |
| "grad_norm": 0.7233483593976119, | |
| "learning_rate": 2.8360579964754277e-06, | |
| "loss": 0.2877, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.5588346575860763, | |
| "grad_norm": 0.7455911352391579, | |
| "learning_rate": 2.7992580975973136e-06, | |
| "loss": 0.3045, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.5618615209988649, | |
| "grad_norm": 0.732665293413995, | |
| "learning_rate": 2.7626596189492983e-06, | |
| "loss": 0.2876, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.5648883844116535, | |
| "grad_norm": 0.758174239017253, | |
| "learning_rate": 2.726263584269513e-06, | |
| "loss": 0.303, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.567915247824442, | |
| "grad_norm": 0.7219067576668127, | |
| "learning_rate": 2.690071011633284e-06, | |
| "loss": 0.2787, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.5709421112372304, | |
| "grad_norm": 0.7185569256593365, | |
| "learning_rate": 2.6540829134246683e-06, | |
| "loss": 0.2822, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.573968974650019, | |
| "grad_norm": 0.7225051519530642, | |
| "learning_rate": 2.618300296308135e-06, | |
| "loss": 0.2972, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.5769958380628073, | |
| "grad_norm": 0.744481485120923, | |
| "learning_rate": 2.582724161200405e-06, | |
| "loss": 0.2898, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.5800227014755959, | |
| "grad_norm": 0.7075632028198592, | |
| "learning_rate": 2.5473555032424534e-06, | |
| "loss": 0.2992, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.5830495648883844, | |
| "grad_norm": 0.7609945626796282, | |
| "learning_rate": 2.5121953117716744e-06, | |
| "loss": 0.2941, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.586076428301173, | |
| "grad_norm": 0.6975002999831483, | |
| "learning_rate": 2.477244570294206e-06, | |
| "loss": 0.2837, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.5891032917139614, | |
| "grad_norm": 0.69708811541583, | |
| "learning_rate": 2.4425042564574186e-06, | |
| "loss": 0.2821, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.59213015512675, | |
| "grad_norm": 0.76374413717087, | |
| "learning_rate": 2.4079753420225694e-06, | |
| "loss": 0.2888, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.5951570185395383, | |
| "grad_norm": 0.7159668784183697, | |
| "learning_rate": 2.3736587928376197e-06, | |
| "loss": 0.2849, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.5981838819523269, | |
| "grad_norm": 0.7175278111636298, | |
| "learning_rate": 2.339555568810221e-06, | |
| "loss": 0.2921, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.6012107453651154, | |
| "grad_norm": 0.7300706052047552, | |
| "learning_rate": 2.305666623880858e-06, | |
| "loss": 0.2925, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.604237608777904, | |
| "grad_norm": 0.6989844479478865, | |
| "learning_rate": 2.27199290599617e-06, | |
| "loss": 0.2932, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.6072644721906924, | |
| "grad_norm": 0.8294195496991809, | |
| "learning_rate": 2.2385353570824308e-06, | |
| "loss": 0.2966, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.610291335603481, | |
| "grad_norm": 0.746836147125782, | |
| "learning_rate": 2.2052949130192136e-06, | |
| "loss": 0.3083, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.6133181990162693, | |
| "grad_norm": 0.6912313095540511, | |
| "learning_rate": 2.172272503613183e-06, | |
| "loss": 0.2828, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.6163450624290578, | |
| "grad_norm": 0.678097703678436, | |
| "learning_rate": 2.1394690525721275e-06, | |
| "loss": 0.2851, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.6193719258418464, | |
| "grad_norm": 0.7704918992858485, | |
| "learning_rate": 2.1068854774790783e-06, | |
| "loss": 0.2901, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.622398789254635, | |
| "grad_norm": 0.7046461493959828, | |
| "learning_rate": 2.0745226897666858e-06, | |
| "loss": 0.2935, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.6254256526674233, | |
| "grad_norm": 0.6922998137450477, | |
| "learning_rate": 2.0423815946916783e-06, | |
| "loss": 0.2843, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.628452516080212, | |
| "grad_norm": 0.6888534671759411, | |
| "learning_rate": 2.010463091309587e-06, | |
| "loss": 0.2735, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.6314793794930003, | |
| "grad_norm": 0.7080277079797282, | |
| "learning_rate": 1.9787680724495617e-06, | |
| "loss": 0.2882, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.6345062429057888, | |
| "grad_norm": 0.6710409154459778, | |
| "learning_rate": 1.947297424689414e-06, | |
| "loss": 0.275, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.6375331063185774, | |
| "grad_norm": 0.7214519415811514, | |
| "learning_rate": 1.9160520283308115e-06, | |
| "loss": 0.3003, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.640559969731366, | |
| "grad_norm": 0.7393158906798757, | |
| "learning_rate": 1.8850327573746584e-06, | |
| "loss": 0.3011, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.6435868331441543, | |
| "grad_norm": 0.7004116739320664, | |
| "learning_rate": 1.854240479496643e-06, | |
| "loss": 0.2925, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.646613696556943, | |
| "grad_norm": 0.7604605727882595, | |
| "learning_rate": 1.8236760560229715e-06, | |
| "loss": 0.3051, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.6496405599697312, | |
| "grad_norm": 0.6738231989448779, | |
| "learning_rate": 1.7933403419062689e-06, | |
| "loss": 0.2844, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.6526674233825198, | |
| "grad_norm": 0.7152054081926045, | |
| "learning_rate": 1.7632341857016733e-06, | |
| "loss": 0.2902, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.6556942867953084, | |
| "grad_norm": 0.7471003456399699, | |
| "learning_rate": 1.7333584295430894e-06, | |
| "loss": 0.2927, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.658721150208097, | |
| "grad_norm": 0.7010122041593257, | |
| "learning_rate": 1.7037139091196396e-06, | |
| "loss": 0.2984, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.6617480136208853, | |
| "grad_norm": 0.7505449527404975, | |
| "learning_rate": 1.6743014536522872e-06, | |
| "loss": 0.3081, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.6647748770336739, | |
| "grad_norm": 0.7307207396871118, | |
| "learning_rate": 1.6451218858706374e-06, | |
| "loss": 0.2926, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.6678017404464622, | |
| "grad_norm": 0.706363501468216, | |
| "learning_rate": 1.616176021989926e-06, | |
| "loss": 0.2761, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.6708286038592508, | |
| "grad_norm": 0.6913418644659586, | |
| "learning_rate": 1.587464671688187e-06, | |
| "loss": 0.2805, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.6738554672720394, | |
| "grad_norm": 0.7047790614575827, | |
| "learning_rate": 1.558988638083616e-06, | |
| "loss": 0.2926, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.676882330684828, | |
| "grad_norm": 0.6865856498315134, | |
| "learning_rate": 1.5307487177120773e-06, | |
| "loss": 0.289, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.6799091940976163, | |
| "grad_norm": 0.7922329104574141, | |
| "learning_rate": 1.5027457005048573e-06, | |
| "loss": 0.3004, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.6829360575104049, | |
| "grad_norm": 0.7330882003375084, | |
| "learning_rate": 1.4749803697665366e-06, | |
| "loss": 0.2869, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.6859629209231932, | |
| "grad_norm": 0.6706639418626685, | |
| "learning_rate": 1.4474535021531099e-06, | |
| "loss": 0.2754, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.6889897843359818, | |
| "grad_norm": 0.6858066646951181, | |
| "learning_rate": 1.4201658676502294e-06, | |
| "loss": 0.2737, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.6920166477487704, | |
| "grad_norm": 0.7037302402178043, | |
| "learning_rate": 1.3931182295516965e-06, | |
| "loss": 0.2798, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.695043511161559, | |
| "grad_norm": 0.7316686988799225, | |
| "learning_rate": 1.3663113444380905e-06, | |
| "loss": 0.295, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.6980703745743473, | |
| "grad_norm": 0.7081619726365443, | |
| "learning_rate": 1.339745962155613e-06, | |
| "loss": 0.2829, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.7010972379871359, | |
| "grad_norm": 0.6688905551033065, | |
| "learning_rate": 1.3134228257951142e-06, | |
| "loss": 0.2716, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.7041241013999242, | |
| "grad_norm": 0.6807121429907115, | |
| "learning_rate": 1.2873426716713012e-06, | |
| "loss": 0.2792, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.7071509648127128, | |
| "grad_norm": 0.7395584808009796, | |
| "learning_rate": 1.2615062293021508e-06, | |
| "loss": 0.2887, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.7101778282255014, | |
| "grad_norm": 0.6859773860865065, | |
| "learning_rate": 1.2359142213884933e-06, | |
| "loss": 0.2802, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.71320469163829, | |
| "grad_norm": 0.7417595335766094, | |
| "learning_rate": 1.2105673637938054e-06, | |
| "loss": 0.2917, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.7162315550510783, | |
| "grad_norm": 0.7307811897560617, | |
| "learning_rate": 1.1854663655241804e-06, | |
| "loss": 0.2906, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.7192584184638668, | |
| "grad_norm": 0.7283019705727332, | |
| "learning_rate": 1.1606119287084982e-06, | |
| "loss": 0.2887, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.7222852818766552, | |
| "grad_norm": 0.7186389974920064, | |
| "learning_rate": 1.136004748578785e-06, | |
| "loss": 0.2861, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.7253121452894438, | |
| "grad_norm": 0.7146020425898192, | |
| "learning_rate": 1.1116455134507665e-06, | |
| "loss": 0.2898, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.7283390087022323, | |
| "grad_norm": 0.718372414898604, | |
| "learning_rate": 1.0875349047046113e-06, | |
| "loss": 0.2963, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.731365872115021, | |
| "grad_norm": 0.7138700130760391, | |
| "learning_rate": 1.0636735967658785e-06, | |
| "loss": 0.2808, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.7343927355278093, | |
| "grad_norm": 0.7354030330526387, | |
| "learning_rate": 1.0400622570866426e-06, | |
| "loss": 0.2872, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.7374195989405978, | |
| "grad_norm": 0.7123848906641325, | |
| "learning_rate": 1.0167015461268303e-06, | |
| "loss": 0.285, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.7404464623533862, | |
| "grad_norm": 0.7021936579897722, | |
| "learning_rate": 9.935921173357444e-07, | |
| "loss": 0.2789, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.7434733257661748, | |
| "grad_norm": 0.7562312769138867, | |
| "learning_rate": 9.707346171337895e-07, | |
| "loss": 0.2952, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.7465001891789633, | |
| "grad_norm": 0.7007681361652561, | |
| "learning_rate": 9.481296848943744e-07, | |
| "loss": 0.2744, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.749527052591752, | |
| "grad_norm": 0.7407196153323605, | |
| "learning_rate": 9.257779529260558e-07, | |
| "loss": 0.2919, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.7525539160045402, | |
| "grad_norm": 0.6829315938574598, | |
| "learning_rate": 9.036800464548157e-07, | |
| "loss": 0.2687, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.7555807794173288, | |
| "grad_norm": 0.6801318616286867, | |
| "learning_rate": 8.818365836066101e-07, | |
| "loss": 0.2774, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.7586076428301172, | |
| "grad_norm": 0.681400464908231, | |
| "learning_rate": 8.602481753900427e-07, | |
| "loss": 0.2842, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.7616345062429057, | |
| "grad_norm": 0.6797452503369867, | |
| "learning_rate": 8.389154256793042e-07, | |
| "loss": 0.2837, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.7646613696556943, | |
| "grad_norm": 0.7139195567908404, | |
| "learning_rate": 8.178389311972612e-07, | |
| "loss": 0.3101, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.7676882330684829, | |
| "grad_norm": 0.73279063580427, | |
| "learning_rate": 7.970192814987676e-07, | |
| "loss": 0.2777, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.7707150964812712, | |
| "grad_norm": 0.6849794391962554, | |
| "learning_rate": 7.764570589541876e-07, | |
| "loss": 0.2752, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.7737419598940598, | |
| "grad_norm": 0.7388977055538356, | |
| "learning_rate": 7.561528387330797e-07, | |
| "loss": 0.2822, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.7767688233068482, | |
| "grad_norm": 0.6974317770750728, | |
| "learning_rate": 7.361071887881376e-07, | |
| "loss": 0.283, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.7797956867196367, | |
| "grad_norm": 0.7160962918807577, | |
| "learning_rate": 7.163206698392744e-07, | |
| "loss": 0.2693, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.7828225501324253, | |
| "grad_norm": 0.73686245901484, | |
| "learning_rate": 6.96793835357964e-07, | |
| "loss": 0.2851, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.7858494135452139, | |
| "grad_norm": 0.7128671703264858, | |
| "learning_rate": 6.775272315517423e-07, | |
| "loss": 0.2839, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.7888762769580022, | |
| "grad_norm": 0.7806509400256381, | |
| "learning_rate": 6.585213973489335e-07, | |
| "loss": 0.3056, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.7919031403707908, | |
| "grad_norm": 0.7211912720595653, | |
| "learning_rate": 6.397768643835755e-07, | |
| "loss": 0.2934, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.7949300037835791, | |
| "grad_norm": 0.7656169540768807, | |
| "learning_rate": 6.212941569805508e-07, | |
| "loss": 0.3102, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.7979568671963677, | |
| "grad_norm": 0.7618646464874628, | |
| "learning_rate": 6.030737921409169e-07, | |
| "loss": 0.2876, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.8009837306091563, | |
| "grad_norm": 0.7538512047371928, | |
| "learning_rate": 5.851162795274445e-07, | |
| "loss": 0.2923, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.8040105940219449, | |
| "grad_norm": 0.7168120545459888, | |
| "learning_rate": 5.674221214503639e-07, | |
| "loss": 0.2724, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.8070374574347332, | |
| "grad_norm": 0.694747256043253, | |
| "learning_rate": 5.499918128533155e-07, | |
| "loss": 0.2747, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.8100643208475218, | |
| "grad_norm": 0.7218803599821014, | |
| "learning_rate": 5.328258412994958e-07, | |
| "loss": 0.2899, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.8130911842603101, | |
| "grad_norm": 0.7420998078552697, | |
| "learning_rate": 5.159246869580348e-07, | |
| "loss": 0.2927, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.8161180476730987, | |
| "grad_norm": 0.7559890299023758, | |
| "learning_rate": 4.992888225905467e-07, | |
| "loss": 0.2882, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.8191449110858873, | |
| "grad_norm": 0.7387064340800056, | |
| "learning_rate": 4.829187135379221e-07, | |
| "loss": 0.2903, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.8221717744986758, | |
| "grad_norm": 0.7431173486567465, | |
| "learning_rate": 4.6681481770729844e-07, | |
| "loss": 0.2905, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.8251986379114642, | |
| "grad_norm": 0.7398853541933428, | |
| "learning_rate": 4.509775855592613e-07, | |
| "loss": 0.2892, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.8282255013242528, | |
| "grad_norm": 0.7182209486950671, | |
| "learning_rate": 4.354074600952407e-07, | |
| "loss": 0.2824, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.8312523647370411, | |
| "grad_norm": 0.7848938076872569, | |
| "learning_rate": 4.2010487684511105e-07, | |
| "loss": 0.2908, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.8342792281498297, | |
| "grad_norm": 0.6732660410246396, | |
| "learning_rate": 4.0507026385502747e-07, | |
| "loss": 0.2854, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.8373060915626183, | |
| "grad_norm": 0.6905828173062015, | |
| "learning_rate": 3.9030404167542777e-07, | |
| "loss": 0.2831, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.8403329549754068, | |
| "grad_norm": 0.7161464176617742, | |
| "learning_rate": 3.7580662334929517e-07, | |
| "loss": 0.2897, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.8433598183881952, | |
| "grad_norm": 0.7191804279173392, | |
| "learning_rate": 3.615784144005796e-07, | |
| "loss": 0.2938, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.8463866818009838, | |
| "grad_norm": 0.7223784033324767, | |
| "learning_rate": 3.476198128228736e-07, | |
| "loss": 0.2903, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.849413545213772, | |
| "grad_norm": 0.7143106543158316, | |
| "learning_rate": 3.339312090682689e-07, | |
| "loss": 0.285, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.8524404086265607, | |
| "grad_norm": 0.7186542132528649, | |
| "learning_rate": 3.2051298603643754e-07, | |
| "loss": 0.2895, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.8554672720393492, | |
| "grad_norm": 0.7197447724536623, | |
| "learning_rate": 3.0736551906392354e-07, | |
| "loss": 0.295, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.8584941354521378, | |
| "grad_norm": 0.707349175980668, | |
| "learning_rate": 2.9448917591363923e-07, | |
| "loss": 0.2834, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.8615209988649262, | |
| "grad_norm": 0.7111422731500202, | |
| "learning_rate": 2.818843167645835e-07, | |
| "loss": 0.2898, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.8645478622777147, | |
| "grad_norm": 0.7459269171209035, | |
| "learning_rate": 2.6955129420176193e-07, | |
| "loss": 0.2887, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.867574725690503, | |
| "grad_norm": 0.7274740473840461, | |
| "learning_rate": 2.5749045320632824e-07, | |
| "loss": 0.288, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.8706015891032917, | |
| "grad_norm": 0.695585392688565, | |
| "learning_rate": 2.4570213114592957e-07, | |
| "loss": 0.2827, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.8736284525160802, | |
| "grad_norm": 0.695015974921577, | |
| "learning_rate": 2.3418665776527738e-07, | |
| "loss": 0.2909, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.8766553159288688, | |
| "grad_norm": 0.7099184145113648, | |
| "learning_rate": 2.2294435517691504e-07, | |
| "loss": 0.2769, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.8796821793416572, | |
| "grad_norm": 0.6945692458573143, | |
| "learning_rate": 2.119755378522137e-07, | |
| "loss": 0.273, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.8827090427544457, | |
| "grad_norm": 0.6624875032488875, | |
| "learning_rate": 2.0128051261257165e-07, | |
| "loss": 0.28, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.885735906167234, | |
| "grad_norm": 0.7311031624068631, | |
| "learning_rate": 1.908595786208367e-07, | |
| "loss": 0.2833, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.8887627695800226, | |
| "grad_norm": 0.6685635022888985, | |
| "learning_rate": 1.8071302737293294e-07, | |
| "loss": 0.2848, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.8917896329928112, | |
| "grad_norm": 0.737380607358807, | |
| "learning_rate": 1.7084114268971275e-07, | |
| "loss": 0.2954, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.8948164964055998, | |
| "grad_norm": 0.6796038571229768, | |
| "learning_rate": 1.612442007090076e-07, | |
| "loss": 0.2709, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.8978433598183881, | |
| "grad_norm": 0.6963944704233772, | |
| "learning_rate": 1.519224698779198e-07, | |
| "loss": 0.2802, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 1.9008702232311767, | |
| "grad_norm": 0.7243576113946623, | |
| "learning_rate": 1.4287621094529524e-07, | |
| "loss": 0.2987, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.903897086643965, | |
| "grad_norm": 0.7584954652759534, | |
| "learning_rate": 1.3410567695444576e-07, | |
| "loss": 0.3067, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 1.9069239500567536, | |
| "grad_norm": 0.7016310594901625, | |
| "learning_rate": 1.2561111323605714e-07, | |
| "loss": 0.2883, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.9099508134695422, | |
| "grad_norm": 0.7117525564517762, | |
| "learning_rate": 1.1739275740134004e-07, | |
| "loss": 0.2914, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 1.9129776768823308, | |
| "grad_norm": 0.7163171199425554, | |
| "learning_rate": 1.0945083933537104e-07, | |
| "loss": 0.2992, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.9160045402951191, | |
| "grad_norm": 0.6703970337469539, | |
| "learning_rate": 1.0178558119067316e-07, | |
| "loss": 0.2663, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 1.9190314037079077, | |
| "grad_norm": 0.7212287713075038, | |
| "learning_rate": 9.439719738099318e-08, | |
| "loss": 0.2795, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.922058267120696, | |
| "grad_norm": 0.7014024912950563, | |
| "learning_rate": 8.728589457530857e-08, | |
| "loss": 0.2791, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.9250851305334846, | |
| "grad_norm": 0.7066321662168396, | |
| "learning_rate": 8.04518716920466e-08, | |
| "loss": 0.2854, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.9281119939462732, | |
| "grad_norm": 0.7029031888414914, | |
| "learning_rate": 7.389531989351773e-08, | |
| "loss": 0.2753, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 1.9311388573590618, | |
| "grad_norm": 0.6792330863359597, | |
| "learning_rate": 6.761642258056977e-08, | |
| "loss": 0.2707, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.9341657207718501, | |
| "grad_norm": 0.6937782944155036, | |
| "learning_rate": 6.161535538745877e-08, | |
| "loss": 0.2988, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 1.9371925841846387, | |
| "grad_norm": 0.7041507612121065, | |
| "learning_rate": 5.5892286176932875e-08, | |
| "loss": 0.28, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.940219447597427, | |
| "grad_norm": 0.7062336311892526, | |
| "learning_rate": 5.044737503554165e-08, | |
| "loss": 0.2984, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 1.9432463110102156, | |
| "grad_norm": 0.7255074738715077, | |
| "learning_rate": 4.528077426915412e-08, | |
| "loss": 0.3004, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.9462731744230042, | |
| "grad_norm": 0.7289396056146205, | |
| "learning_rate": 4.0392628398699954e-08, | |
| "loss": 0.2906, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 1.9493000378357928, | |
| "grad_norm": 0.6795060901751642, | |
| "learning_rate": 3.578307415612714e-08, | |
| "loss": 0.2692, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.952326901248581, | |
| "grad_norm": 0.7126840978489811, | |
| "learning_rate": 3.1452240480577265e-08, | |
| "loss": 0.2787, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.9553537646613697, | |
| "grad_norm": 0.7304533008803592, | |
| "learning_rate": 2.7400248514776184e-08, | |
| "loss": 0.2913, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.958380628074158, | |
| "grad_norm": 0.7268061528119466, | |
| "learning_rate": 2.3627211601651157e-08, | |
| "loss": 0.2936, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 1.9614074914869466, | |
| "grad_norm": 0.7086629587665598, | |
| "learning_rate": 2.013323528115674e-08, | |
| "loss": 0.2876, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.9644343548997352, | |
| "grad_norm": 0.7344656338299671, | |
| "learning_rate": 1.6918417287318245e-08, | |
| "loss": 0.2832, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.9674612183125237, | |
| "grad_norm": 0.7689805869442997, | |
| "learning_rate": 1.3982847545507271e-08, | |
| "loss": 0.2899, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.970488081725312, | |
| "grad_norm": 0.7217682051285186, | |
| "learning_rate": 1.1326608169920373e-08, | |
| "loss": 0.2872, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.9735149451381007, | |
| "grad_norm": 0.7129689049020616, | |
| "learning_rate": 8.949773461282008e-09, | |
| "loss": 0.282, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.976541808550889, | |
| "grad_norm": 0.7086532729970971, | |
| "learning_rate": 6.8524099047695415e-09, | |
| "loss": 0.2936, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.9795686719636776, | |
| "grad_norm": 0.723633993464164, | |
| "learning_rate": 5.034576168149175e-09, | |
| "loss": 0.2796, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.9825955353764662, | |
| "grad_norm": 0.6714025697517155, | |
| "learning_rate": 3.4963231001383657e-09, | |
| "loss": 0.2731, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.9856223987892547, | |
| "grad_norm": 0.6893485867973999, | |
| "learning_rate": 2.237693728981416e-09, | |
| "loss": 0.2767, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.988649262202043, | |
| "grad_norm": 0.7098013161957313, | |
| "learning_rate": 1.2587232612493172e-09, | |
| "loss": 0.2851, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.9916761256148316, | |
| "grad_norm": 0.7302256905092277, | |
| "learning_rate": 5.594390808494332e-10, | |
| "loss": 0.2874, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.99470298902762, | |
| "grad_norm": 0.7005790261149879, | |
| "learning_rate": 1.3986074826388697e-10, | |
| "loss": 0.279, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.9977298524404086, | |
| "grad_norm": 0.7230422984346807, | |
| "learning_rate": 0.0, | |
| "loss": 0.2994, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.9977298524404086, | |
| "step": 660, | |
| "total_flos": 67084837847040.0, | |
| "train_loss": 0.3775221068750728, | |
| "train_runtime": 2377.8446, | |
| "train_samples_per_second": 35.557, | |
| "train_steps_per_second": 0.278 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 660, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 67084837847040.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |