| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.9977298524404086, |
| "eval_steps": 500, |
| "global_step": 660, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.003026863412788498, |
| "grad_norm": 2.653642309001846, |
| "learning_rate": 3.0303030303030305e-07, |
| "loss": 0.7607, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.006053726825576996, |
| "grad_norm": 2.5922112333202088, |
| "learning_rate": 6.060606060606061e-07, |
| "loss": 0.7518, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.009080590238365494, |
| "grad_norm": 2.554194759996629, |
| "learning_rate": 9.090909090909091e-07, |
| "loss": 0.7257, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.012107453651153992, |
| "grad_norm": 2.63438925636528, |
| "learning_rate": 1.2121212121212122e-06, |
| "loss": 0.7808, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.01513431706394249, |
| "grad_norm": 2.462685042118507, |
| "learning_rate": 1.5151515151515152e-06, |
| "loss": 0.7046, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.018161180476730987, |
| "grad_norm": 2.352286315669494, |
| "learning_rate": 1.8181818181818183e-06, |
| "loss": 0.7208, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.021188043889519486, |
| "grad_norm": 2.063556901218873, |
| "learning_rate": 2.1212121212121216e-06, |
| "loss": 0.7149, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.024214907302307985, |
| "grad_norm": 1.8154847790671955, |
| "learning_rate": 2.4242424242424244e-06, |
| "loss": 0.6994, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.02724177071509648, |
| "grad_norm": 1.2076222176550426, |
| "learning_rate": 2.7272727272727272e-06, |
| "loss": 0.6912, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.03026863412788498, |
| "grad_norm": 1.214170698873997, |
| "learning_rate": 3.0303030303030305e-06, |
| "loss": 0.6898, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.03329549754067348, |
| "grad_norm": 1.249393928614815, |
| "learning_rate": 3.3333333333333333e-06, |
| "loss": 0.659, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.036322360953461974, |
| "grad_norm": 1.9156334051480104, |
| "learning_rate": 3.6363636363636366e-06, |
| "loss": 0.6679, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.03934922436625047, |
| "grad_norm": 3.0417637453190634, |
| "learning_rate": 3.93939393939394e-06, |
| "loss": 0.6902, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.04237608777903897, |
| "grad_norm": 2.459496434867318, |
| "learning_rate": 4.242424242424243e-06, |
| "loss": 0.6584, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.04540295119182747, |
| "grad_norm": 2.0017984414461925, |
| "learning_rate": 4.5454545454545455e-06, |
| "loss": 0.662, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.04842981460461597, |
| "grad_norm": 1.3619786408678614, |
| "learning_rate": 4.848484848484849e-06, |
| "loss": 0.5831, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.051456678017404466, |
| "grad_norm": 1.1123523922758016, |
| "learning_rate": 5.151515151515152e-06, |
| "loss": 0.6011, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.05448354143019296, |
| "grad_norm": 1.2313268742997454, |
| "learning_rate": 5.4545454545454545e-06, |
| "loss": 0.6159, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.057510404842981463, |
| "grad_norm": 1.1775618980909193, |
| "learning_rate": 5.7575757575757586e-06, |
| "loss": 0.6232, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.06053726825576996, |
| "grad_norm": 1.0161247742343753, |
| "learning_rate": 6.060606060606061e-06, |
| "loss": 0.5972, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.06356413166855845, |
| "grad_norm": 0.8477184006555634, |
| "learning_rate": 6.363636363636364e-06, |
| "loss": 0.589, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.06659099508134696, |
| "grad_norm": 0.7581805666749508, |
| "learning_rate": 6.666666666666667e-06, |
| "loss": 0.5371, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.06961785849413545, |
| "grad_norm": 0.8705917577466252, |
| "learning_rate": 6.969696969696971e-06, |
| "loss": 0.5591, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.07264472190692395, |
| "grad_norm": 0.8522831911904432, |
| "learning_rate": 7.272727272727273e-06, |
| "loss": 0.6045, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.07567158531971245, |
| "grad_norm": 0.7352647897854065, |
| "learning_rate": 7.5757575757575764e-06, |
| "loss": 0.5433, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.07869844873250094, |
| "grad_norm": 0.7194423582606616, |
| "learning_rate": 7.87878787878788e-06, |
| "loss": 0.5745, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.08172531214528944, |
| "grad_norm": 0.7178198575109902, |
| "learning_rate": 8.181818181818183e-06, |
| "loss": 0.5365, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.08475217555807794, |
| "grad_norm": 0.7287566186498791, |
| "learning_rate": 8.484848484848486e-06, |
| "loss": 0.5047, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.08777903897086645, |
| "grad_norm": 0.795016826843346, |
| "learning_rate": 8.787878787878788e-06, |
| "loss": 0.5747, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.09080590238365494, |
| "grad_norm": 0.7070438081150247, |
| "learning_rate": 9.090909090909091e-06, |
| "loss": 0.5558, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.09383276579644344, |
| "grad_norm": 0.7189224373814807, |
| "learning_rate": 9.393939393939396e-06, |
| "loss": 0.5253, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.09685962920923194, |
| "grad_norm": 0.708188733507097, |
| "learning_rate": 9.696969696969698e-06, |
| "loss": 0.5357, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.09988649262202043, |
| "grad_norm": 0.709892169548937, |
| "learning_rate": 1e-05, |
| "loss": 0.5542, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.10291335603480893, |
| "grad_norm": 0.6689205883594461, |
| "learning_rate": 1.0303030303030304e-05, |
| "loss": 0.5251, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.10594021944759743, |
| "grad_norm": 0.7708338817589518, |
| "learning_rate": 1.0606060606060606e-05, |
| "loss": 0.5624, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.10896708286038592, |
| "grad_norm": 0.6607276883497549, |
| "learning_rate": 1.0909090909090909e-05, |
| "loss": 0.5277, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.11199394627317442, |
| "grad_norm": 0.6782181867807554, |
| "learning_rate": 1.1212121212121212e-05, |
| "loss": 0.5471, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.11502080968596293, |
| "grad_norm": 0.6633295184018997, |
| "learning_rate": 1.1515151515151517e-05, |
| "loss": 0.5076, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.11804767309875142, |
| "grad_norm": 0.6831770809175276, |
| "learning_rate": 1.181818181818182e-05, |
| "loss": 0.5085, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.12107453651153992, |
| "grad_norm": 0.7163772729536908, |
| "learning_rate": 1.2121212121212122e-05, |
| "loss": 0.5025, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.12410139992432842, |
| "grad_norm": 0.673621628700286, |
| "learning_rate": 1.2424242424242425e-05, |
| "loss": 0.5404, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.1271282633371169, |
| "grad_norm": 0.6978798166145279, |
| "learning_rate": 1.2727272727272728e-05, |
| "loss": 0.5206, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.1301551267499054, |
| "grad_norm": 0.6758251776071441, |
| "learning_rate": 1.3030303030303032e-05, |
| "loss": 0.5151, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.13318199016269391, |
| "grad_norm": 0.6320608089101, |
| "learning_rate": 1.3333333333333333e-05, |
| "loss": 0.5084, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.1362088535754824, |
| "grad_norm": 0.6920400003814712, |
| "learning_rate": 1.3636363636363637e-05, |
| "loss": 0.5215, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.1392357169882709, |
| "grad_norm": 0.6426589394479636, |
| "learning_rate": 1.3939393939393942e-05, |
| "loss": 0.5478, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.1422625804010594, |
| "grad_norm": 0.6592544978872925, |
| "learning_rate": 1.4242424242424245e-05, |
| "loss": 0.5363, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.1452894438138479, |
| "grad_norm": 0.6303886033476793, |
| "learning_rate": 1.4545454545454546e-05, |
| "loss": 0.4778, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.14831630722663638, |
| "grad_norm": 0.6825469178984898, |
| "learning_rate": 1.484848484848485e-05, |
| "loss": 0.5313, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.1513431706394249, |
| "grad_norm": 0.6511075065453922, |
| "learning_rate": 1.5151515151515153e-05, |
| "loss": 0.5037, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.1543700340522134, |
| "grad_norm": 0.6783685695103729, |
| "learning_rate": 1.5454545454545454e-05, |
| "loss": 0.4632, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.15739689746500188, |
| "grad_norm": 0.6778524173791399, |
| "learning_rate": 1.575757575757576e-05, |
| "loss": 0.5347, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.1604237608777904, |
| "grad_norm": 0.6713616058177643, |
| "learning_rate": 1.606060606060606e-05, |
| "loss": 0.4869, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.16345062429057888, |
| "grad_norm": 0.7146450729124729, |
| "learning_rate": 1.6363636363636366e-05, |
| "loss": 0.4758, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.16647748770336737, |
| "grad_norm": 0.6877829806421919, |
| "learning_rate": 1.6666666666666667e-05, |
| "loss": 0.5339, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.1695043511161559, |
| "grad_norm": 0.638685196422612, |
| "learning_rate": 1.6969696969696972e-05, |
| "loss": 0.4868, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.17253121452894438, |
| "grad_norm": 0.7094289181124012, |
| "learning_rate": 1.7272727272727274e-05, |
| "loss": 0.5111, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.1755580779417329, |
| "grad_norm": 0.6714538266612285, |
| "learning_rate": 1.7575757575757576e-05, |
| "loss": 0.5121, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.17858494135452138, |
| "grad_norm": 0.6969505485711796, |
| "learning_rate": 1.787878787878788e-05, |
| "loss": 0.4897, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.18161180476730987, |
| "grad_norm": 0.7349628866367, |
| "learning_rate": 1.8181818181818182e-05, |
| "loss": 0.553, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.1846386681800984, |
| "grad_norm": 0.6696445466750143, |
| "learning_rate": 1.8484848484848487e-05, |
| "loss": 0.4951, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.18766553159288688, |
| "grad_norm": 0.6617587898318434, |
| "learning_rate": 1.8787878787878792e-05, |
| "loss": 0.5284, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.19069239500567536, |
| "grad_norm": 0.706221486781084, |
| "learning_rate": 1.9090909090909094e-05, |
| "loss": 0.4885, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.19371925841846388, |
| "grad_norm": 0.6614213463731939, |
| "learning_rate": 1.9393939393939395e-05, |
| "loss": 0.5159, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.19674612183125237, |
| "grad_norm": 0.726559409699496, |
| "learning_rate": 1.96969696969697e-05, |
| "loss": 0.5148, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.19977298524404086, |
| "grad_norm": 0.703206497838019, |
| "learning_rate": 2e-05, |
| "loss": 0.5015, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.20279984865682937, |
| "grad_norm": 0.8434838159306466, |
| "learning_rate": 1.9999860139251737e-05, |
| "loss": 0.527, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.20582671206961786, |
| "grad_norm": 0.6132658265405436, |
| "learning_rate": 1.9999440560919153e-05, |
| "loss": 0.4921, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.20885357548240635, |
| "grad_norm": 0.7505502804477754, |
| "learning_rate": 1.9998741276738753e-05, |
| "loss": 0.469, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.21188043889519487, |
| "grad_norm": 0.7115606662498123, |
| "learning_rate": 1.999776230627102e-05, |
| "loss": 0.5244, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.21490730230798336, |
| "grad_norm": 0.7570799848249147, |
| "learning_rate": 1.9996503676899863e-05, |
| "loss": 0.4871, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.21793416572077184, |
| "grad_norm": 0.6745168914567232, |
| "learning_rate": 1.9994965423831853e-05, |
| "loss": 0.5216, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.22096102913356036, |
| "grad_norm": 0.8026032876428376, |
| "learning_rate": 1.9993147590095232e-05, |
| "loss": 0.4818, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.22398789254634885, |
| "grad_norm": 0.7703357098063327, |
| "learning_rate": 1.999105022653872e-05, |
| "loss": 0.5289, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.22701475595913734, |
| "grad_norm": 0.6884525063015393, |
| "learning_rate": 1.9988673391830082e-05, |
| "loss": 0.4997, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.23004161937192585, |
| "grad_norm": 0.7908440606495958, |
| "learning_rate": 1.9986017152454497e-05, |
| "loss": 0.5035, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.23306848278471434, |
| "grad_norm": 0.6217642015366157, |
| "learning_rate": 1.9983081582712684e-05, |
| "loss": 0.4673, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.23609534619750283, |
| "grad_norm": 0.8086567400881232, |
| "learning_rate": 1.9979866764718846e-05, |
| "loss": 0.4936, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.23912220961029135, |
| "grad_norm": 0.6748553548070318, |
| "learning_rate": 1.997637278839835e-05, |
| "loss": 0.5007, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.24214907302307984, |
| "grad_norm": 0.7909118323454936, |
| "learning_rate": 1.9972599751485225e-05, |
| "loss": 0.4929, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.24517593643586832, |
| "grad_norm": 0.6761702566000428, |
| "learning_rate": 1.9968547759519426e-05, |
| "loss": 0.5018, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.24820279984865684, |
| "grad_norm": 0.7713095553128116, |
| "learning_rate": 1.9964216925843876e-05, |
| "loss": 0.4794, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.2512296632614453, |
| "grad_norm": 0.6765563475808045, |
| "learning_rate": 1.9959607371601303e-05, |
| "loss": 0.4839, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.2542565266742338, |
| "grad_norm": 0.7848989238135033, |
| "learning_rate": 1.9954719225730847e-05, |
| "loss": 0.503, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.25728339008702233, |
| "grad_norm": 0.6752303325964191, |
| "learning_rate": 1.994955262496446e-05, |
| "loss": 0.4929, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.2603102534998108, |
| "grad_norm": 0.8090157427997055, |
| "learning_rate": 1.9944107713823068e-05, |
| "loss": 0.5074, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.2633371169125993, |
| "grad_norm": 0.6148199872593046, |
| "learning_rate": 1.9938384644612542e-05, |
| "loss": 0.4761, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.26636398032538783, |
| "grad_norm": 0.6818622789453987, |
| "learning_rate": 1.9932383577419432e-05, |
| "loss": 0.4856, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.2693908437381763, |
| "grad_norm": 0.6706670307612664, |
| "learning_rate": 1.9926104680106484e-05, |
| "loss": 0.5031, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.2724177071509648, |
| "grad_norm": 0.6707470958888189, |
| "learning_rate": 1.9919548128307954e-05, |
| "loss": 0.4713, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.2754445705637533, |
| "grad_norm": 0.7024692529494156, |
| "learning_rate": 1.9912714105424694e-05, |
| "loss": 0.4836, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.2784714339765418, |
| "grad_norm": 0.6826575867532084, |
| "learning_rate": 1.990560280261901e-05, |
| "loss": 0.4818, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.2814982973893303, |
| "grad_norm": 0.6875484519608218, |
| "learning_rate": 1.989821441880933e-05, |
| "loss": 0.4707, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.2845251608021188, |
| "grad_norm": 0.6989815243455156, |
| "learning_rate": 1.9890549160664633e-05, |
| "loss": 0.4555, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.2875520242149073, |
| "grad_norm": 0.6881951073889534, |
| "learning_rate": 1.9882607242598663e-05, |
| "loss": 0.5173, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.2905788876276958, |
| "grad_norm": 0.7746163150218435, |
| "learning_rate": 1.9874388886763944e-05, |
| "loss": 0.4493, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.2936057510404843, |
| "grad_norm": 0.7874564596528957, |
| "learning_rate": 1.9865894323045558e-05, |
| "loss": 0.5304, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.29663261445327277, |
| "grad_norm": 0.701524225555686, |
| "learning_rate": 1.9857123789054707e-05, |
| "loss": 0.5377, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.2996594778660613, |
| "grad_norm": 0.8145646846456013, |
| "learning_rate": 1.9848077530122083e-05, |
| "loss": 0.502, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.3026863412788498, |
| "grad_norm": 0.6768328450225374, |
| "learning_rate": 1.9838755799290993e-05, |
| "loss": 0.4821, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.30571320469163826, |
| "grad_norm": 0.8263833925233237, |
| "learning_rate": 1.9829158857310288e-05, |
| "loss": 0.513, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.3087400681044268, |
| "grad_norm": 0.705647528914653, |
| "learning_rate": 1.9819286972627066e-05, |
| "loss": 0.4954, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.3117669315172153, |
| "grad_norm": 1.112583589601411, |
| "learning_rate": 1.9809140421379168e-05, |
| "loss": 0.4735, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.31479379493000376, |
| "grad_norm": 1.0573008413710339, |
| "learning_rate": 1.979871948738743e-05, |
| "loss": 0.4988, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.3178206583427923, |
| "grad_norm": 4.563156628451907, |
| "learning_rate": 1.978802446214779e-05, |
| "loss": 0.5498, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.3208475217555808, |
| "grad_norm": 1.5419963879262906, |
| "learning_rate": 1.9777055644823087e-05, |
| "loss": 0.518, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.32387438516836925, |
| "grad_norm": 0.9957849878521595, |
| "learning_rate": 1.9765813342234726e-05, |
| "loss": 0.5223, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.32690124858115777, |
| "grad_norm": 0.9918631792361824, |
| "learning_rate": 1.9754297868854075e-05, |
| "loss": 0.491, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.3299281119939463, |
| "grad_norm": 0.7501498300964237, |
| "learning_rate": 1.9742509546793673e-05, |
| "loss": 0.527, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.33295497540673474, |
| "grad_norm": 0.8475448493105577, |
| "learning_rate": 1.973044870579824e-05, |
| "loss": 0.5106, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.33598183881952326, |
| "grad_norm": 0.6097368728455299, |
| "learning_rate": 1.9718115683235418e-05, |
| "loss": 0.4682, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.3390087022323118, |
| "grad_norm": 0.7885205586754842, |
| "learning_rate": 1.970551082408636e-05, |
| "loss": 0.4548, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.3420355656451003, |
| "grad_norm": 0.6172104545055264, |
| "learning_rate": 1.969263448093608e-05, |
| "loss": 0.4661, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.34506242905788875, |
| "grad_norm": 0.719254702740748, |
| "learning_rate": 1.9679487013963566e-05, |
| "loss": 0.5002, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.34808929247067727, |
| "grad_norm": 0.6117151881946589, |
| "learning_rate": 1.9666068790931733e-05, |
| "loss": 0.4877, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.3511161558834658, |
| "grad_norm": 0.7027132398898194, |
| "learning_rate": 1.9652380187177128e-05, |
| "loss": 0.4677, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.35414301929625425, |
| "grad_norm": 0.6054380731971206, |
| "learning_rate": 1.9638421585599422e-05, |
| "loss": 0.4619, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.35716988270904276, |
| "grad_norm": 0.6697458443391867, |
| "learning_rate": 1.9624193376650708e-05, |
| "loss": 0.4725, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.3601967461218313, |
| "grad_norm": 0.6320885158836743, |
| "learning_rate": 1.960969595832457e-05, |
| "loss": 0.4735, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.36322360953461974, |
| "grad_norm": 0.7297143353827921, |
| "learning_rate": 1.9594929736144978e-05, |
| "loss": 0.4995, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.36625047294740826, |
| "grad_norm": 0.6882788090108111, |
| "learning_rate": 1.957989512315489e-05, |
| "loss": 0.4802, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.3692773363601968, |
| "grad_norm": 0.7103992968091178, |
| "learning_rate": 1.956459253990476e-05, |
| "loss": 0.4671, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.37230419977298523, |
| "grad_norm": 0.6596711674879402, |
| "learning_rate": 1.9549022414440738e-05, |
| "loss": 0.4845, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.37533106318577375, |
| "grad_norm": 0.7056786527520791, |
| "learning_rate": 1.9533185182292705e-05, |
| "loss": 0.5066, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.37835792659856227, |
| "grad_norm": 0.6463010214185686, |
| "learning_rate": 1.9517081286462082e-05, |
| "loss": 0.4658, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.3813847900113507, |
| "grad_norm": 0.6449317177074809, |
| "learning_rate": 1.9500711177409456e-05, |
| "loss": 0.4439, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.38441165342413924, |
| "grad_norm": 0.6711182804100264, |
| "learning_rate": 1.9484075313041968e-05, |
| "loss": 0.4639, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.38743851683692776, |
| "grad_norm": 0.6216750305908364, |
| "learning_rate": 1.9467174158700507e-05, |
| "loss": 0.4639, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.3904653802497162, |
| "grad_norm": 0.6715332804368, |
| "learning_rate": 1.9450008187146685e-05, |
| "loss": 0.5023, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.39349224366250474, |
| "grad_norm": 0.7147058467691788, |
| "learning_rate": 1.9432577878549635e-05, |
| "loss": 0.468, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.39651910707529325, |
| "grad_norm": 0.6787351073466207, |
| "learning_rate": 1.9414883720472557e-05, |
| "loss": 0.4886, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.3995459704880817, |
| "grad_norm": 0.6542651634964172, |
| "learning_rate": 1.9396926207859085e-05, |
| "loss": 0.4928, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.40257283390087023, |
| "grad_norm": 0.6730854186948818, |
| "learning_rate": 1.937870584301945e-05, |
| "loss": 0.4568, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.40559969731365875, |
| "grad_norm": 0.6525479754112047, |
| "learning_rate": 1.9360223135616423e-05, |
| "loss": 0.4742, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.4086265607264472, |
| "grad_norm": 0.6176641561031895, |
| "learning_rate": 1.9341478602651068e-05, |
| "loss": 0.4674, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.4116534241392357, |
| "grad_norm": 0.6178984085617434, |
| "learning_rate": 1.932247276844826e-05, |
| "loss": 0.4748, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.41468028755202424, |
| "grad_norm": 0.6321251612378418, |
| "learning_rate": 1.9303206164642037e-05, |
| "loss": 0.4763, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.4177071509648127, |
| "grad_norm": 0.6539134673370697, |
| "learning_rate": 1.9283679330160726e-05, |
| "loss": 0.5008, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.4207340143776012, |
| "grad_norm": 0.629247203087845, |
| "learning_rate": 1.9263892811211865e-05, |
| "loss": 0.4959, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.42376087779038973, |
| "grad_norm": 0.6615716325218832, |
| "learning_rate": 1.9243847161266924e-05, |
| "loss": 0.4781, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.4267877412031782, |
| "grad_norm": 0.6807124736623016, |
| "learning_rate": 1.9223542941045817e-05, |
| "loss": 0.4456, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.4298146046159667, |
| "grad_norm": 0.5781124762880367, |
| "learning_rate": 1.920298071850123e-05, |
| "loss": 0.4461, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.4328414680287552, |
| "grad_norm": 0.6966598711355443, |
| "learning_rate": 1.9182161068802742e-05, |
| "loss": 0.446, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.4358683314415437, |
| "grad_norm": 0.6567319123851109, |
| "learning_rate": 1.9161084574320696e-05, |
| "loss": 0.4751, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.4388951948543322, |
| "grad_norm": 0.7402740815269796, |
| "learning_rate": 1.913975182460996e-05, |
| "loss": 0.4812, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.4419220582671207, |
| "grad_norm": 0.611532484278698, |
| "learning_rate": 1.9118163416393392e-05, |
| "loss": 0.4636, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.4449489216799092, |
| "grad_norm": 0.7063333694418584, |
| "learning_rate": 1.9096319953545186e-05, |
| "loss": 0.492, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.4479757850926977, |
| "grad_norm": 0.6347556617125097, |
| "learning_rate": 1.9074222047073945e-05, |
| "loss": 0.4441, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.4510026485054862, |
| "grad_norm": 0.7009661095563158, |
| "learning_rate": 1.9051870315105626e-05, |
| "loss": 0.4595, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.4540295119182747, |
| "grad_norm": 0.6155698353426865, |
| "learning_rate": 1.9029265382866216e-05, |
| "loss": 0.5157, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.4570563753310632, |
| "grad_norm": 0.7565512880981403, |
| "learning_rate": 1.9006407882664256e-05, |
| "loss": 0.4718, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.4600832387438517, |
| "grad_norm": 0.6377798494614118, |
| "learning_rate": 1.8983298453873172e-05, |
| "loss": 0.4734, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.46311010215664017, |
| "grad_norm": 0.7115285069601067, |
| "learning_rate": 1.895993774291336e-05, |
| "loss": 0.4446, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.4661369655694287, |
| "grad_norm": 0.626546307492942, |
| "learning_rate": 1.8936326403234125e-05, |
| "loss": 0.4235, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.4691638289822172, |
| "grad_norm": 0.6989309565590944, |
| "learning_rate": 1.891246509529539e-05, |
| "loss": 0.4648, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.47219069239500566, |
| "grad_norm": 0.6264462766886602, |
| "learning_rate": 1.8888354486549238e-05, |
| "loss": 0.4734, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.4752175558077942, |
| "grad_norm": 0.6586007898441965, |
| "learning_rate": 1.886399525142122e-05, |
| "loss": 0.4606, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.4782444192205827, |
| "grad_norm": 0.6762512929813792, |
| "learning_rate": 1.8839388071291506e-05, |
| "loss": 0.4673, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.48127128263337116, |
| "grad_norm": 0.6520802178788475, |
| "learning_rate": 1.881453363447582e-05, |
| "loss": 0.4399, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.48429814604615967, |
| "grad_norm": 0.6315321840707008, |
| "learning_rate": 1.8789432636206197e-05, |
| "loss": 0.4565, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.4873250094589482, |
| "grad_norm": 0.6173557754039471, |
| "learning_rate": 1.8764085778611507e-05, |
| "loss": 0.4882, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.49035187287173665, |
| "grad_norm": 0.6632428987221951, |
| "learning_rate": 1.873849377069785e-05, |
| "loss": 0.4563, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.49337873628452517, |
| "grad_norm": 0.6173524446577745, |
| "learning_rate": 1.87126573283287e-05, |
| "loss": 0.4737, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.4964055996973137, |
| "grad_norm": 0.7448052847307424, |
| "learning_rate": 1.8686577174204887e-05, |
| "loss": 0.4581, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.49943246311010214, |
| "grad_norm": 0.6162007595348211, |
| "learning_rate": 1.866025403784439e-05, |
| "loss": 0.4597, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.5024593265228906, |
| "grad_norm": 0.644579592262543, |
| "learning_rate": 1.863368865556191e-05, |
| "loss": 0.4656, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.5054861899356792, |
| "grad_norm": 0.6081483512075876, |
| "learning_rate": 1.8606881770448305e-05, |
| "loss": 0.446, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.5085130533484676, |
| "grad_norm": 0.6164087468455623, |
| "learning_rate": 1.8579834132349773e-05, |
| "loss": 0.4597, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.5115399167612561, |
| "grad_norm": 0.5778923434429061, |
| "learning_rate": 1.8552546497846893e-05, |
| "loss": 0.475, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.5145667801740447, |
| "grad_norm": 0.6904024690925634, |
| "learning_rate": 1.8525019630233463e-05, |
| "loss": 0.4591, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.5175936435868331, |
| "grad_norm": 0.5733852004940836, |
| "learning_rate": 1.8497254299495147e-05, |
| "loss": 0.454, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.5206205069996216, |
| "grad_norm": 0.7226227274606795, |
| "learning_rate": 1.8469251282287925e-05, |
| "loss": 0.4801, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.5236473704124102, |
| "grad_norm": 0.6999327344798008, |
| "learning_rate": 1.8441011361916387e-05, |
| "loss": 0.4891, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.5266742338251986, |
| "grad_norm": 0.6731236040321812, |
| "learning_rate": 1.8412535328311813e-05, |
| "loss": 0.4923, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.5297010972379871, |
| "grad_norm": 0.7284066217196885, |
| "learning_rate": 1.8383823978010077e-05, |
| "loss": 0.4535, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.5327279606507757, |
| "grad_norm": 0.6675316294269169, |
| "learning_rate": 1.8354878114129368e-05, |
| "loss": 0.4691, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.5357548240635641, |
| "grad_norm": 0.6430468297631092, |
| "learning_rate": 1.8325698546347714e-05, |
| "loss": 0.438, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.5387816874763526, |
| "grad_norm": 0.6236167988296556, |
| "learning_rate": 1.8296286090880362e-05, |
| "loss": 0.4764, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.5418085508891411, |
| "grad_norm": 0.6833528437786831, |
| "learning_rate": 1.8266641570456915e-05, |
| "loss": 0.4594, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.5448354143019296, |
| "grad_norm": 0.6191943776744446, |
| "learning_rate": 1.8236765814298328e-05, |
| "loss": 0.4311, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.5478622777147181, |
| "grad_norm": 0.7094813750809941, |
| "learning_rate": 1.820665965809373e-05, |
| "loss": 0.4569, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.5508891411275066, |
| "grad_norm": 0.6172794257535029, |
| "learning_rate": 1.8176323943977034e-05, |
| "loss": 0.4462, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.5539160045402951, |
| "grad_norm": 0.6970614533926457, |
| "learning_rate": 1.814575952050336e-05, |
| "loss": 0.5002, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.5569428679530836, |
| "grad_norm": 0.6134218686673267, |
| "learning_rate": 1.8114967242625342e-05, |
| "loss": 0.4541, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.5599697313658721, |
| "grad_norm": 0.6279799927111867, |
| "learning_rate": 1.808394797166919e-05, |
| "loss": 0.4619, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.5629965947786606, |
| "grad_norm": 0.6062609988486924, |
| "learning_rate": 1.8052702575310588e-05, |
| "loss": 0.4572, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.5660234581914491, |
| "grad_norm": 0.6765594721594622, |
| "learning_rate": 1.802123192755044e-05, |
| "loss": 0.4923, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.5690503216042376, |
| "grad_norm": 0.5989714922185403, |
| "learning_rate": 1.7989536908690413e-05, |
| "loss": 0.4566, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.5720771850170261, |
| "grad_norm": 0.5654098894953311, |
| "learning_rate": 1.7957618405308323e-05, |
| "loss": 0.4383, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.5751040484298146, |
| "grad_norm": 0.6094404762659081, |
| "learning_rate": 1.792547731023332e-05, |
| "loss": 0.4665, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.5781309118426031, |
| "grad_norm": 0.578404340650778, |
| "learning_rate": 1.789311452252092e-05, |
| "loss": 0.4566, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.5811577752553916, |
| "grad_norm": 0.5887549251095974, |
| "learning_rate": 1.7860530947427878e-05, |
| "loss": 0.4377, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.58418463866818, |
| "grad_norm": 0.5878909756117219, |
| "learning_rate": 1.782772749638682e-05, |
| "loss": 0.4443, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.5872115020809686, |
| "grad_norm": 0.6230290972876099, |
| "learning_rate": 1.779470508698079e-05, |
| "loss": 0.4407, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.5902383654937571, |
| "grad_norm": 0.5883712349316433, |
| "learning_rate": 1.776146464291757e-05, |
| "loss": 0.4586, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.5932652289065455, |
| "grad_norm": 0.5818914639555242, |
| "learning_rate": 1.772800709400383e-05, |
| "loss": 0.4422, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.5962920923193341, |
| "grad_norm": 0.5925425243464446, |
| "learning_rate": 1.7694333376119144e-05, |
| "loss": 0.4741, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.5993189557321226, |
| "grad_norm": 0.5904170861473932, |
| "learning_rate": 1.766044443118978e-05, |
| "loss": 0.4431, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.602345819144911, |
| "grad_norm": 0.6806471413231173, |
| "learning_rate": 1.762634120716238e-05, |
| "loss": 0.5139, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.6053726825576996, |
| "grad_norm": 0.6286910641028791, |
| "learning_rate": 1.7592024657977432e-05, |
| "loss": 0.4989, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.6083995459704881, |
| "grad_norm": 0.6245618264528192, |
| "learning_rate": 1.7557495743542586e-05, |
| "loss": 0.4511, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.6114264093832765, |
| "grad_norm": 0.6132991964860224, |
| "learning_rate": 1.75227554297058e-05, |
| "loss": 0.4676, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.6144532727960651, |
| "grad_norm": 0.6154910007964906, |
| "learning_rate": 1.7487804688228327e-05, |
| "loss": 0.5012, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.6174801362088536, |
| "grad_norm": 0.6020062397729947, |
| "learning_rate": 1.745264449675755e-05, |
| "loss": 0.4303, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.620506999621642, |
| "grad_norm": 0.6210587387537155, |
| "learning_rate": 1.7417275838799596e-05, |
| "loss": 0.4502, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.6235338630344306, |
| "grad_norm": 0.5997066355321784, |
| "learning_rate": 1.7381699703691866e-05, |
| "loss": 0.4353, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.626560726447219, |
| "grad_norm": 0.5985750220470296, |
| "learning_rate": 1.734591708657533e-05, |
| "loss": 0.4601, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.6295875898600075, |
| "grad_norm": 0.6082984419772222, |
| "learning_rate": 1.730992898836672e-05, |
| "loss": 0.458, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.6326144532727961, |
| "grad_norm": 0.6288608594750572, |
| "learning_rate": 1.7273736415730488e-05, |
| "loss": 0.4866, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.6356413166855845, |
| "grad_norm": 0.624761341133163, |
| "learning_rate": 1.72373403810507e-05, |
| "loss": 0.5011, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.638668180098373, |
| "grad_norm": 0.5636976950324082, |
| "learning_rate": 1.720074190240269e-05, |
| "loss": 0.455, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.6416950435111616, |
| "grad_norm": 0.6045340371994375, |
| "learning_rate": 1.7163942003524574e-05, |
| "loss": 0.4496, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.64472190692395, |
| "grad_norm": 0.6246910650251282, |
| "learning_rate": 1.7126941713788633e-05, |
| "loss": 0.4687, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.6477487703367385, |
| "grad_norm": 0.6048947001058114, |
| "learning_rate": 1.70897420681725e-05, |
| "loss": 0.4829, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.6507756337495271, |
| "grad_norm": 0.6363021906243671, |
| "learning_rate": 1.7052344107230244e-05, |
| "loss": 0.4543, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.6538024971623155, |
| "grad_norm": 0.5683959474931025, |
| "learning_rate": 1.7014748877063212e-05, |
| "loss": 0.4512, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.656829360575104, |
| "grad_norm": 0.590705297248844, |
| "learning_rate": 1.697695742929082e-05, |
| "loss": 0.4765, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.6598562239878926, |
| "grad_norm": 0.6162427119097076, |
| "learning_rate": 1.693897082102109e-05, |
| "loss": 0.4874, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.662883087400681, |
| "grad_norm": 0.5762169004257623, |
| "learning_rate": 1.6900790114821122e-05, |
| "loss": 0.4632, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.6659099508134695, |
| "grad_norm": 0.6012524947073945, |
| "learning_rate": 1.686241637868734e-05, |
| "loss": 0.4125, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.6689368142262581, |
| "grad_norm": 0.5213993348344866, |
| "learning_rate": 1.682385068601563e-05, |
| "loss": 0.4436, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.6719636776390465, |
| "grad_norm": 0.6391231575011538, |
| "learning_rate": 1.6785094115571323e-05, |
| "loss": 0.4496, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.6749905410518351, |
| "grad_norm": 0.5860781493940427, |
| "learning_rate": 1.674614775145901e-05, |
| "loss": 0.4325, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.6780174044646236, |
| "grad_norm": 0.6177729967982134, |
| "learning_rate": 1.670701268309221e-05, |
| "loss": 0.4535, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.681044267877412, |
| "grad_norm": 0.592088195171676, |
| "learning_rate": 1.666769000516292e-05, |
| "loss": 0.4445, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.6840711312902006, |
| "grad_norm": 0.6032920039386354, |
| "learning_rate": 1.6628180817610963e-05, |
| "loss": 0.4652, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.687097994702989, |
| "grad_norm": 0.584142199464464, |
| "learning_rate": 1.658848622559325e-05, |
| "loss": 0.4672, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.6901248581157775, |
| "grad_norm": 0.5141795455869701, |
| "learning_rate": 1.6548607339452853e-05, |
| "loss": 0.4346, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.6931517215285661, |
| "grad_norm": 0.620751280657942, |
| "learning_rate": 1.6508545274687936e-05, |
| "loss": 0.4902, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.6961785849413545, |
| "grad_norm": 0.5307868762464378, |
| "learning_rate": 1.6468301151920576e-05, |
| "loss": 0.4298, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.699205448354143, |
| "grad_norm": 0.5693977942565753, |
| "learning_rate": 1.6427876096865394e-05, |
| "loss": 0.476, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.7022323117669316, |
| "grad_norm": 0.5976434354939867, |
| "learning_rate": 1.6387271240298082e-05, |
| "loss": 0.4648, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.70525917517972, |
| "grad_norm": 0.5338202178939262, |
| "learning_rate": 1.6346487718023762e-05, |
| "loss": 0.4324, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.7082860385925085, |
| "grad_norm": 0.5911800531857866, |
| "learning_rate": 1.6305526670845225e-05, |
| "loss": 0.4827, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.7113129020052971, |
| "grad_norm": 0.5396182502659621, |
| "learning_rate": 1.6264389244531015e-05, |
| "loss": 0.4478, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.7143397654180855, |
| "grad_norm": 0.5323330525505672, |
| "learning_rate": 1.6223076589783368e-05, |
| "loss": 0.4096, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.717366628830874, |
| "grad_norm": 0.5701117874434086, |
| "learning_rate": 1.6181589862206053e-05, |
| "loss": 0.4736, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.7203934922436626, |
| "grad_norm": 0.5840093135240358, |
| "learning_rate": 1.613993022227202e-05, |
| "loss": 0.4528, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.723420355656451, |
| "grad_norm": 0.6024058976210077, |
| "learning_rate": 1.6098098835290955e-05, |
| "loss": 0.4756, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.7264472190692395, |
| "grad_norm": 0.6168880034198778, |
| "learning_rate": 1.6056096871376667e-05, |
| "loss": 0.4184, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.729474082482028, |
| "grad_norm": 0.550641358597231, |
| "learning_rate": 1.6013925505414386e-05, |
| "loss": 0.4439, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.7325009458948165, |
| "grad_norm": 0.6370378803561358, |
| "learning_rate": 1.5971585917027864e-05, |
| "loss": 0.4515, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.735527809307605, |
| "grad_norm": 0.5204395799064176, |
| "learning_rate": 1.5929079290546408e-05, |
| "loss": 0.4355, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.7385546727203935, |
| "grad_norm": 0.6046579886324439, |
| "learning_rate": 1.5886406814971728e-05, |
| "loss": 0.461, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.741581536133182, |
| "grad_norm": 0.5580701920683901, |
| "learning_rate": 1.584356968394471e-05, |
| "loss": 0.4376, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.7446083995459705, |
| "grad_norm": 0.5613850747261606, |
| "learning_rate": 1.5800569095711983e-05, |
| "loss": 0.4807, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.747635262958759, |
| "grad_norm": 0.5525638182055364, |
| "learning_rate": 1.575740625309244e-05, |
| "loss": 0.4687, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.7506621263715475, |
| "grad_norm": 0.581489547733923, |
| "learning_rate": 1.5714082363443576e-05, |
| "loss": 0.4757, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.753688989784336, |
| "grad_norm": 0.5665898871787093, |
| "learning_rate": 1.5670598638627707e-05, |
| "loss": 0.4571, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.7567158531971245, |
| "grad_norm": 0.5719018697005243, |
| "learning_rate": 1.5626956294978103e-05, |
| "loss": 0.4319, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.759742716609913, |
| "grad_norm": 0.5955991048110572, |
| "learning_rate": 1.5583156553264923e-05, |
| "loss": 0.4292, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.7627695800227015, |
| "grad_norm": 0.6602276921957603, |
| "learning_rate": 1.5539200638661106e-05, |
| "loss": 0.4525, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.76579644343549, |
| "grad_norm": 0.5647884583743515, |
| "learning_rate": 1.5495089780708062e-05, |
| "loss": 0.4626, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.7688233068482785, |
| "grad_norm": 0.7054424845478967, |
| "learning_rate": 1.5450825213281317e-05, |
| "loss": 0.4577, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.771850170261067, |
| "grad_norm": 0.6207669242248656, |
| "learning_rate": 1.5406408174555978e-05, |
| "loss": 0.4207, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.7748770336738555, |
| "grad_norm": 0.5521778957471621, |
| "learning_rate": 1.5361839906972095e-05, |
| "loss": 0.4103, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.777903897086644, |
| "grad_norm": 0.7317913688406207, |
| "learning_rate": 1.531712165719992e-05, |
| "loss": 0.437, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.7809307604994324, |
| "grad_norm": 0.5188816297108134, |
| "learning_rate": 1.5272254676105026e-05, |
| "loss": 0.4605, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.783957623912221, |
| "grad_norm": 0.6252471165524401, |
| "learning_rate": 1.5227240218713326e-05, |
| "loss": 0.4353, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.7869844873250095, |
| "grad_norm": 0.6034710012703107, |
| "learning_rate": 1.5182079544175957e-05, |
| "loss": 0.4715, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.7900113507377979, |
| "grad_norm": 0.5758162063303118, |
| "learning_rate": 1.5136773915734067e-05, |
| "loss": 0.4489, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.7930382141505865, |
| "grad_norm": 0.6351383122797396, |
| "learning_rate": 1.5091324600683472e-05, |
| "loss": 0.4473, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.796065077563375, |
| "grad_norm": 0.5791080706096535, |
| "learning_rate": 1.5045732870339213e-05, |
| "loss": 0.4865, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.7990919409761634, |
| "grad_norm": 0.5833343821256102, |
| "learning_rate": 1.5000000000000002e-05, |
| "loss": 0.4451, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.802118804388952, |
| "grad_norm": 0.6667205638269371, |
| "learning_rate": 1.4954127268912525e-05, |
| "loss": 0.4564, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.8051456678017405, |
| "grad_norm": 0.5507926227382565, |
| "learning_rate": 1.4908115960235683e-05, |
| "loss": 0.4678, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.8081725312145289, |
| "grad_norm": 0.6280136813805461, |
| "learning_rate": 1.4861967361004687e-05, |
| "loss": 0.4628, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.8111993946273175, |
| "grad_norm": 0.5548303535061571, |
| "learning_rate": 1.4815682762095065e-05, |
| "loss": 0.4328, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.814226258040106, |
| "grad_norm": 0.6167752775833147, |
| "learning_rate": 1.476926345818654e-05, |
| "loss": 0.4505, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.8172531214528944, |
| "grad_norm": 0.5453107837103142, |
| "learning_rate": 1.472271074772683e-05, |
| "loss": 0.4374, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.820279984865683, |
| "grad_norm": 0.5661867840834097, |
| "learning_rate": 1.4676025932895315e-05, |
| "loss": 0.4416, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.8233068482784714, |
| "grad_norm": 0.5744750357185142, |
| "learning_rate": 1.4629210319566626e-05, |
| "loss": 0.4414, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.8263337116912599, |
| "grad_norm": 0.557043473985475, |
| "learning_rate": 1.4582265217274105e-05, |
| "loss": 0.4482, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.8293605751040485, |
| "grad_norm": 0.5129476555713144, |
| "learning_rate": 1.4535191939173179e-05, |
| "loss": 0.4402, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.8323874385168369, |
| "grad_norm": 0.5663442978416631, |
| "learning_rate": 1.4487991802004625e-05, |
| "loss": 0.4488, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.8354143019296254, |
| "grad_norm": 0.5477943321049938, |
| "learning_rate": 1.4440666126057743e-05, |
| "loss": 0.4286, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.838441165342414, |
| "grad_norm": 0.5941854410561181, |
| "learning_rate": 1.4393216235133427e-05, |
| "loss": 0.4554, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.8414680287552024, |
| "grad_norm": 0.5235597813977861, |
| "learning_rate": 1.4345643456507126e-05, |
| "loss": 0.4345, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.8444948921679909, |
| "grad_norm": 0.5950903975671218, |
| "learning_rate": 1.4297949120891718e-05, |
| "loss": 0.4396, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.8475217555807795, |
| "grad_norm": 0.5710408902833406, |
| "learning_rate": 1.4250134562400301e-05, |
| "loss": 0.4877, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.8505486189935679, |
| "grad_norm": 0.5638725014761413, |
| "learning_rate": 1.4202201118508863e-05, |
| "loss": 0.4267, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.8535754824063564, |
| "grad_norm": 0.6225407085931399, |
| "learning_rate": 1.4154150130018867e-05, |
| "loss": 0.455, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.856602345819145, |
| "grad_norm": 0.603119894922715, |
| "learning_rate": 1.4105982941019751e-05, |
| "loss": 0.4414, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.8596292092319334, |
| "grad_norm": 0.6460924211895752, |
| "learning_rate": 1.405770089885134e-05, |
| "loss": 0.5052, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.8626560726447219, |
| "grad_norm": 0.5549576920365494, |
| "learning_rate": 1.4009305354066138e-05, |
| "loss": 0.4415, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.8656829360575105, |
| "grad_norm": 0.6062500402242622, |
| "learning_rate": 1.396079766039157e-05, |
| "loss": 0.4162, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.8687097994702989, |
| "grad_norm": 0.598862738611488, |
| "learning_rate": 1.39121791746921e-05, |
| "loss": 0.4469, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.8717366628830874, |
| "grad_norm": 0.5987421250819631, |
| "learning_rate": 1.3863451256931286e-05, |
| "loss": 0.438, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.874763526295876, |
| "grad_norm": 0.6379362856437264, |
| "learning_rate": 1.381461527013374e-05, |
| "loss": 0.4386, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.8777903897086644, |
| "grad_norm": 0.5743854647009672, |
| "learning_rate": 1.3765672580346986e-05, |
| "loss": 0.4506, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.8808172531214529, |
| "grad_norm": 0.6954210871152465, |
| "learning_rate": 1.3716624556603275e-05, |
| "loss": 0.437, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.8838441165342414, |
| "grad_norm": 0.6224537750665278, |
| "learning_rate": 1.3667472570881264e-05, |
| "loss": 0.4519, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.8868709799470299, |
| "grad_norm": 0.569215588097282, |
| "learning_rate": 1.361821799806765e-05, |
| "loss": 0.4227, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.8898978433598184, |
| "grad_norm": 0.7020677259891825, |
| "learning_rate": 1.356886221591872e-05, |
| "loss": 0.4238, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.8929247067726069, |
| "grad_norm": 0.6613381233174982, |
| "learning_rate": 1.3519406605021797e-05, |
| "loss": 0.4671, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.8959515701853954, |
| "grad_norm": 0.6658023274346871, |
| "learning_rate": 1.3469852548756626e-05, |
| "loss": 0.4282, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.8989784335981839, |
| "grad_norm": 0.6039718483326141, |
| "learning_rate": 1.342020143325669e-05, |
| "loss": 0.4487, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.9020052970109724, |
| "grad_norm": 0.5928115354710685, |
| "learning_rate": 1.3370454647370418e-05, |
| "loss": 0.4306, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.9050321604237609, |
| "grad_norm": 0.6297118136475328, |
| "learning_rate": 1.3320613582622354e-05, |
| "loss": 0.408, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.9080590238365494, |
| "grad_norm": 0.5631704790574225, |
| "learning_rate": 1.3270679633174219e-05, |
| "loss": 0.4348, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.9110858872493379, |
| "grad_norm": 0.5601372412079054, |
| "learning_rate": 1.3220654195785917e-05, |
| "loss": 0.4384, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.9141127506621264, |
| "grad_norm": 0.6116931393291521, |
| "learning_rate": 1.3170538669776469e-05, |
| "loss": 0.4372, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.9171396140749148, |
| "grad_norm": 0.5785602434152506, |
| "learning_rate": 1.3120334456984871e-05, |
| "loss": 0.4298, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.9201664774877034, |
| "grad_norm": 0.5864172787075979, |
| "learning_rate": 1.3070042961730878e-05, |
| "loss": 0.4419, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.9231933409004919, |
| "grad_norm": 0.5777144316756396, |
| "learning_rate": 1.3019665590775717e-05, |
| "loss": 0.4395, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.9262202043132803, |
| "grad_norm": 0.6290443229794741, |
| "learning_rate": 1.296920375328275e-05, |
| "loss": 0.415, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.9292470677260689, |
| "grad_norm": 0.604212434657053, |
| "learning_rate": 1.2918658860778046e-05, |
| "loss": 0.4586, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.9322739311388574, |
| "grad_norm": 0.5768569815666824, |
| "learning_rate": 1.2868032327110904e-05, |
| "loss": 0.4267, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.9353007945516458, |
| "grad_norm": 0.6080643887747446, |
| "learning_rate": 1.2817325568414299e-05, |
| "loss": 0.4304, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.9383276579644344, |
| "grad_norm": 0.5949587703622018, |
| "learning_rate": 1.2766540003065272e-05, |
| "loss": 0.438, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.9413545213772229, |
| "grad_norm": 0.6020334120966452, |
| "learning_rate": 1.2715677051645259e-05, |
| "loss": 0.4111, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.9443813847900113, |
| "grad_norm": 0.6333161140126744, |
| "learning_rate": 1.266473813690035e-05, |
| "loss": 0.4273, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.9474082482027999, |
| "grad_norm": 0.5351338292144018, |
| "learning_rate": 1.2613724683701491e-05, |
| "loss": 0.415, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.9504351116155884, |
| "grad_norm": 0.5748297206760773, |
| "learning_rate": 1.2562638119004627e-05, |
| "loss": 0.4389, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.9534619750283768, |
| "grad_norm": 0.6118311897287021, |
| "learning_rate": 1.2511479871810792e-05, |
| "loss": 0.4258, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.9564888384411654, |
| "grad_norm": 0.5724280253975008, |
| "learning_rate": 1.2460251373126136e-05, |
| "loss": 0.4368, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.9595157018539539, |
| "grad_norm": 0.5419387606143478, |
| "learning_rate": 1.2408954055921884e-05, |
| "loss": 0.454, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.9625425652667423, |
| "grad_norm": 0.6271287805897796, |
| "learning_rate": 1.2357589355094275e-05, |
| "loss": 0.4519, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.9655694286795309, |
| "grad_norm": 0.5450391400283195, |
| "learning_rate": 1.2306158707424402e-05, |
| "loss": 0.4393, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.9685962920923193, |
| "grad_norm": 0.5938246198781107, |
| "learning_rate": 1.2254663551538047e-05, |
| "loss": 0.4359, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.9716231555051078, |
| "grad_norm": 0.6458508616085348, |
| "learning_rate": 1.2203105327865407e-05, |
| "loss": 0.4555, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.9746500189178964, |
| "grad_norm": 0.5392315001442343, |
| "learning_rate": 1.215148547860084e-05, |
| "loss": 0.4356, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.9776768823306848, |
| "grad_norm": 0.555583837981962, |
| "learning_rate": 1.2099805447662485e-05, |
| "loss": 0.4331, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.9807037457434733, |
| "grad_norm": 0.6233182145710846, |
| "learning_rate": 1.2048066680651908e-05, |
| "loss": 0.4457, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.9837306091562619, |
| "grad_norm": 0.5706575010137057, |
| "learning_rate": 1.1996270624813642e-05, |
| "loss": 0.4809, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.9867574725690503, |
| "grad_norm": 0.5607058562617756, |
| "learning_rate": 1.194441872899471e-05, |
| "loss": 0.433, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.9897843359818388, |
| "grad_norm": 0.6337675952394343, |
| "learning_rate": 1.1892512443604103e-05, |
| "loss": 0.4092, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.9928111993946274, |
| "grad_norm": 0.5601593624177207, |
| "learning_rate": 1.1840553220572204e-05, |
| "loss": 0.4389, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.9958380628074158, |
| "grad_norm": 0.5648069788997735, |
| "learning_rate": 1.1788542513310178e-05, |
| "loss": 0.4145, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.9988649262202043, |
| "grad_norm": 0.6295747555116963, |
| "learning_rate": 1.1736481776669307e-05, |
| "loss": 0.4647, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.0018917896329929, |
| "grad_norm": 0.6763440004521155, |
| "learning_rate": 1.1684372466900306e-05, |
| "loss": 0.3761, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.0049186530457812, |
| "grad_norm": 0.7458101987680528, |
| "learning_rate": 1.1632216041612595e-05, |
| "loss": 0.387, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.0079455164585698, |
| "grad_norm": 0.7357311079701176, |
| "learning_rate": 1.15800139597335e-05, |
| "loss": 0.3764, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.0109723798713584, |
| "grad_norm": 0.7717686853169455, |
| "learning_rate": 1.1527767681467472e-05, |
| "loss": 0.3635, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.0139992432841467, |
| "grad_norm": 0.7065795662400062, |
| "learning_rate": 1.1475478668255223e-05, |
| "loss": 0.383, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.0170261066969353, |
| "grad_norm": 0.9177552875156756, |
| "learning_rate": 1.1423148382732854e-05, |
| "loss": 0.3348, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.0200529701097238, |
| "grad_norm": 0.6321450925810481, |
| "learning_rate": 1.1370778288690947e-05, |
| "loss": 0.3591, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.0230798335225122, |
| "grad_norm": 0.6289265068415445, |
| "learning_rate": 1.1318369851033604e-05, |
| "loss": 0.3671, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.0261066969353008, |
| "grad_norm": 0.6840203382918636, |
| "learning_rate": 1.1265924535737494e-05, |
| "loss": 0.3454, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.0291335603480893, |
| "grad_norm": 0.6241092308430238, |
| "learning_rate": 1.121344380981082e-05, |
| "loss": 0.3334, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.0321604237608777, |
| "grad_norm": 0.6165040083323652, |
| "learning_rate": 1.1160929141252303e-05, |
| "loss": 0.3286, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.0351872871736663, |
| "grad_norm": 0.6540210711282685, |
| "learning_rate": 1.1108381999010111e-05, |
| "loss": 0.3532, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.0382141505864548, |
| "grad_norm": 0.6361004350122359, |
| "learning_rate": 1.1055803852940772e-05, |
| "loss": 0.3448, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.0412410139992432, |
| "grad_norm": 0.5995394272088068, |
| "learning_rate": 1.1003196173768051e-05, |
| "loss": 0.3395, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.0442678774120318, |
| "grad_norm": 0.6575253459959216, |
| "learning_rate": 1.0950560433041825e-05, |
| "loss": 0.3384, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.0472947408248203, |
| "grad_norm": 0.5986410573344175, |
| "learning_rate": 1.0897898103096917e-05, |
| "loss": 0.3403, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.0503216042376087, |
| "grad_norm": 0.6059712807992255, |
| "learning_rate": 1.0845210657011893e-05, |
| "loss": 0.3469, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.0533484676503972, |
| "grad_norm": 0.5856765463404571, |
| "learning_rate": 1.0792499568567885e-05, |
| "loss": 0.3432, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.0563753310631858, |
| "grad_norm": 0.6060734395289886, |
| "learning_rate": 1.0739766312207344e-05, |
| "loss": 0.346, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.0594021944759742, |
| "grad_norm": 0.594419760076603, |
| "learning_rate": 1.068701236299281e-05, |
| "loss": 0.3376, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.0624290578887627, |
| "grad_norm": 0.6045655187641306, |
| "learning_rate": 1.0634239196565646e-05, |
| "loss": 0.3369, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.0654559213015513, |
| "grad_norm": 0.6392684858621426, |
| "learning_rate": 1.0581448289104759e-05, |
| "loss": 0.3431, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.0684827847143397, |
| "grad_norm": 0.5855656032638442, |
| "learning_rate": 1.0528641117285315e-05, |
| "loss": 0.3544, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.0715096481271282, |
| "grad_norm": 0.5920665399355434, |
| "learning_rate": 1.0475819158237426e-05, |
| "loss": 0.3562, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.0745365115399168, |
| "grad_norm": 0.5693121413786889, |
| "learning_rate": 1.0422983889504831e-05, |
| "loss": 0.3339, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.0775633749527052, |
| "grad_norm": 0.6223813639455659, |
| "learning_rate": 1.0370136789003582e-05, |
| "loss": 0.3674, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.0805902383654937, |
| "grad_norm": 0.5983549752495387, |
| "learning_rate": 1.031727933498068e-05, |
| "loss": 0.3379, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.0836171017782823, |
| "grad_norm": 0.5698102905876657, |
| "learning_rate": 1.0264413005972736e-05, |
| "loss": 0.3404, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.0866439651910706, |
| "grad_norm": 0.5628619535872136, |
| "learning_rate": 1.0211539280764617e-05, |
| "loss": 0.3228, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.0896708286038592, |
| "grad_norm": 0.5731047846086312, |
| "learning_rate": 1.015865963834808e-05, |
| "loss": 0.3421, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.0926976920166478, |
| "grad_norm": 0.6109810528662306, |
| "learning_rate": 1.0105775557880398e-05, |
| "loss": 0.3647, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.0957245554294364, |
| "grad_norm": 0.5621973962374506, |
| "learning_rate": 1.0052888518642978e-05, |
| "loss": 0.3575, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.0987514188422247, |
| "grad_norm": 0.5870593645318578, |
| "learning_rate": 1e-05, |
| "loss": 0.3639, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.1017782822550133, |
| "grad_norm": 0.6192841510938353, |
| "learning_rate": 9.947111481357023e-06, |
| "loss": 0.3481, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.1048051456678016, |
| "grad_norm": 0.5708840092102739, |
| "learning_rate": 9.894224442119606e-06, |
| "loss": 0.3674, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.1078320090805902, |
| "grad_norm": 0.6270214395862005, |
| "learning_rate": 9.841340361651921e-06, |
| "loss": 0.3454, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.1108588724933788, |
| "grad_norm": 0.5751327114907606, |
| "learning_rate": 9.788460719235386e-06, |
| "loss": 0.3644, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.1138857359061674, |
| "grad_norm": 0.6593656572115276, |
| "learning_rate": 9.735586994027267e-06, |
| "loss": 0.3189, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.1169125993189557, |
| "grad_norm": 0.5697961927811549, |
| "learning_rate": 9.682720665019325e-06, |
| "loss": 0.3351, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.1199394627317443, |
| "grad_norm": 0.5783005136573245, |
| "learning_rate": 9.62986321099642e-06, |
| "loss": 0.3172, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.1229663261445326, |
| "grad_norm": 0.6602814212616022, |
| "learning_rate": 9.57701611049517e-06, |
| "loss": 0.3477, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.1259931895573212, |
| "grad_norm": 0.5466495486897278, |
| "learning_rate": 9.524180841762577e-06, |
| "loss": 0.3449, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.1290200529701098, |
| "grad_norm": 0.5898747145363605, |
| "learning_rate": 9.471358882714687e-06, |
| "loss": 0.3538, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.1320469163828983, |
| "grad_norm": 0.6055038084990242, |
| "learning_rate": 9.418551710895243e-06, |
| "loss": 0.3356, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.1350737797956867, |
| "grad_norm": 0.5711419608727597, |
| "learning_rate": 9.365760803434356e-06, |
| "loss": 0.3374, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.1381006432084753, |
| "grad_norm": 0.5582308120665458, |
| "learning_rate": 9.312987637007191e-06, |
| "loss": 0.3457, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.1411275066212636, |
| "grad_norm": 0.6228675184029376, |
| "learning_rate": 9.260233687792657e-06, |
| "loss": 0.3437, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.1441543700340522, |
| "grad_norm": 0.571541793087105, |
| "learning_rate": 9.207500431432115e-06, |
| "loss": 0.3417, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.1471812334468408, |
| "grad_norm": 0.5827669475909949, |
| "learning_rate": 9.154789342988108e-06, |
| "loss": 0.3511, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.1502080968596293, |
| "grad_norm": 0.5997040669168835, |
| "learning_rate": 9.102101896903084e-06, |
| "loss": 0.3495, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.1532349602724177, |
| "grad_norm": 0.5846826538547297, |
| "learning_rate": 9.049439566958176e-06, |
| "loss": 0.3437, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.1562618236852062, |
| "grad_norm": 0.6170682848154051, |
| "learning_rate": 8.99680382623195e-06, |
| "loss": 0.35, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.1592886870979946, |
| "grad_norm": 0.5409605082135953, |
| "learning_rate": 8.944196147059233e-06, |
| "loss": 0.3397, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.1623155505107832, |
| "grad_norm": 0.5422397824189615, |
| "learning_rate": 8.89161800098989e-06, |
| "loss": 0.3198, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.1653424139235717, |
| "grad_norm": 0.6709624091575735, |
| "learning_rate": 8.839070858747697e-06, |
| "loss": 0.3709, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.1683692773363603, |
| "grad_norm": 0.5991988690501839, |
| "learning_rate": 8.786556190189183e-06, |
| "loss": 0.3378, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.1713961407491487, |
| "grad_norm": 0.5844740298871881, |
| "learning_rate": 8.734075464262507e-06, |
| "loss": 0.3385, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.1744230041619372, |
| "grad_norm": 0.5787295522146091, |
| "learning_rate": 8.681630148966397e-06, |
| "loss": 0.3504, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.1774498675747256, |
| "grad_norm": 0.5705869702784712, |
| "learning_rate": 8.629221711309056e-06, |
| "loss": 0.3472, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.1804767309875142, |
| "grad_norm": 0.5469340675580189, |
| "learning_rate": 8.576851617267151e-06, |
| "loss": 0.3262, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.1835035944003027, |
| "grad_norm": 0.6390614574325132, |
| "learning_rate": 8.52452133174478e-06, |
| "loss": 0.3338, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.1865304578130913, |
| "grad_norm": 0.5626809617022358, |
| "learning_rate": 8.472232318532531e-06, |
| "loss": 0.3522, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.1895573212258796, |
| "grad_norm": 0.6074343592856284, |
| "learning_rate": 8.419986040266502e-06, |
| "loss": 0.3335, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.1925841846386682, |
| "grad_norm": 0.6108834302902758, |
| "learning_rate": 8.367783958387407e-06, |
| "loss": 0.3634, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.1956110480514566, |
| "grad_norm": 0.5924041613120298, |
| "learning_rate": 8.315627533099697e-06, |
| "loss": 0.3352, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.1986379114642451, |
| "grad_norm": 0.5537237413168906, |
| "learning_rate": 8.263518223330698e-06, |
| "loss": 0.3397, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.2016647748770337, |
| "grad_norm": 0.5995259959826205, |
| "learning_rate": 8.211457486689829e-06, |
| "loss": 0.3288, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.2046916382898223, |
| "grad_norm": 0.586227691301758, |
| "learning_rate": 8.159446779427798e-06, |
| "loss": 0.3469, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.2077185017026106, |
| "grad_norm": 0.5721294421911566, |
| "learning_rate": 8.107487556395902e-06, |
| "loss": 0.349, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.2107453651153992, |
| "grad_norm": 0.5335405620365062, |
| "learning_rate": 8.055581271005292e-06, |
| "loss": 0.3185, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.2137722285281876, |
| "grad_norm": 0.6405685814283789, |
| "learning_rate": 8.00372937518636e-06, |
| "loss": 0.3947, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.2167990919409761, |
| "grad_norm": 0.5643907919650019, |
| "learning_rate": 7.951933319348095e-06, |
| "loss": 0.335, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.2198259553537647, |
| "grad_norm": 0.5467062061450758, |
| "learning_rate": 7.900194552337516e-06, |
| "loss": 0.3385, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.2228528187665533, |
| "grad_norm": 0.618770713655019, |
| "learning_rate": 7.848514521399167e-06, |
| "loss": 0.361, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.2258796821793416, |
| "grad_norm": 0.5842007696391234, |
| "learning_rate": 7.796894672134594e-06, |
| "loss": 0.3379, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.2289065455921302, |
| "grad_norm": 0.5266206356487769, |
| "learning_rate": 7.745336448461958e-06, |
| "loss": 0.3362, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.2319334090049185, |
| "grad_norm": 0.5513791529472379, |
| "learning_rate": 7.6938412925756e-06, |
| "loss": 0.3617, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.2349602724177071, |
| "grad_norm": 0.5899581433906347, |
| "learning_rate": 7.642410644905726e-06, |
| "loss": 0.347, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.2379871358304957, |
| "grad_norm": 0.5632085415653629, |
| "learning_rate": 7.591045944078119e-06, |
| "loss": 0.3227, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.2410139992432843, |
| "grad_norm": 0.646688847920783, |
| "learning_rate": 7.539748626873866e-06, |
| "loss": 0.3835, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.2440408626560726, |
| "grad_norm": 0.565815570373247, |
| "learning_rate": 7.488520128189209e-06, |
| "loss": 0.3381, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.2470677260688612, |
| "grad_norm": 0.5508218814977722, |
| "learning_rate": 7.4373618809953755e-06, |
| "loss": 0.3254, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.2500945894816495, |
| "grad_norm": 0.5775285820935306, |
| "learning_rate": 7.386275316298513e-06, |
| "loss": 0.333, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.253121452894438, |
| "grad_norm": 0.5419978291783134, |
| "learning_rate": 7.335261863099652e-06, |
| "loss": 0.3254, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.2561483163072267, |
| "grad_norm": 0.5484080196401268, |
| "learning_rate": 7.2843229483547405e-06, |
| "loss": 0.3291, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.2591751797200152, |
| "grad_norm": 0.5923578982303893, |
| "learning_rate": 7.233459996934731e-06, |
| "loss": 0.3165, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.2622020431328036, |
| "grad_norm": 0.5323464300979155, |
| "learning_rate": 7.182674431585703e-06, |
| "loss": 0.3225, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.2652289065455922, |
| "grad_norm": 0.597156918147518, |
| "learning_rate": 7.131967672889101e-06, |
| "loss": 0.3452, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.2682557699583805, |
| "grad_norm": 0.5903084563181314, |
| "learning_rate": 7.081341139221955e-06, |
| "loss": 0.3604, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.271282633371169, |
| "grad_norm": 0.593569173911432, |
| "learning_rate": 7.0307962467172555e-06, |
| "loss": 0.3192, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.2743094967839577, |
| "grad_norm": 0.553599303532681, |
| "learning_rate": 6.9803344092242855e-06, |
| "loss": 0.3233, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.2773363601967462, |
| "grad_norm": 0.5722066857943819, |
| "learning_rate": 6.929957038269123e-06, |
| "loss": 0.3656, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.2803632236095346, |
| "grad_norm": 0.6229815969094524, |
| "learning_rate": 6.87966554301513e-06, |
| "loss": 0.3466, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.2833900870223232, |
| "grad_norm": 0.5726929444576085, |
| "learning_rate": 6.8294613302235325e-06, |
| "loss": 0.3389, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.2864169504351115, |
| "grad_norm": 0.5555540574035858, |
| "learning_rate": 6.779345804214088e-06, |
| "loss": 0.3345, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.2894438138479, |
| "grad_norm": 0.5553338955130577, |
| "learning_rate": 6.729320366825785e-06, |
| "loss": 0.3514, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.2924706772606886, |
| "grad_norm": 0.6243720745316447, |
| "learning_rate": 6.679386417377649e-06, |
| "loss": 0.3635, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.2954975406734772, |
| "grad_norm": 0.5531397142233633, |
| "learning_rate": 6.629545352629583e-06, |
| "loss": 0.3638, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.2985244040862656, |
| "grad_norm": 0.5613951944973024, |
| "learning_rate": 6.579798566743314e-06, |
| "loss": 0.3458, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.3015512674990541, |
| "grad_norm": 0.5100453016571512, |
| "learning_rate": 6.530147451243377e-06, |
| "loss": 0.3144, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.3045781309118425, |
| "grad_norm": 0.5686461287674612, |
| "learning_rate": 6.480593394978208e-06, |
| "loss": 0.3516, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.307604994324631, |
| "grad_norm": 0.5724419417242387, |
| "learning_rate": 6.431137784081283e-06, |
| "loss": 0.3434, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.3106318577374196, |
| "grad_norm": 0.5547824812824002, |
| "learning_rate": 6.381782001932352e-06, |
| "loss": 0.3521, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.3136587211502082, |
| "grad_norm": 0.5399423116097835, |
| "learning_rate": 6.33252742911874e-06, |
| "loss": 0.333, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.3166855845629966, |
| "grad_norm": 0.5874679725515185, |
| "learning_rate": 6.283375443396726e-06, |
| "loss": 0.3382, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.3197124479757851, |
| "grad_norm": 0.5737078845333226, |
| "learning_rate": 6.234327419653013e-06, |
| "loss": 0.3577, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.3227393113885735, |
| "grad_norm": 0.5601875807415152, |
| "learning_rate": 6.185384729866264e-06, |
| "loss": 0.3342, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.325766174801362, |
| "grad_norm": 0.538551682042874, |
| "learning_rate": 6.136548743068713e-06, |
| "loss": 0.324, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.3287930382141506, |
| "grad_norm": 0.5579579164805096, |
| "learning_rate": 6.087820825307904e-06, |
| "loss": 0.3477, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.3318199016269392, |
| "grad_norm": 0.5572643172617712, |
| "learning_rate": 6.039202339608432e-06, |
| "loss": 0.3391, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.3348467650397275, |
| "grad_norm": 0.5681301827042138, |
| "learning_rate": 5.990694645933866e-06, |
| "loss": 0.3646, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.3378736284525161, |
| "grad_norm": 0.5367557097686797, |
| "learning_rate": 5.9422991011486635e-06, |
| "loss": 0.3385, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.3409004918653045, |
| "grad_norm": 0.5426137109937247, |
| "learning_rate": 5.894017058980249e-06, |
| "loss": 0.3176, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.343927355278093, |
| "grad_norm": 0.5477836510987404, |
| "learning_rate": 5.845849869981137e-06, |
| "loss": 0.3138, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.3469542186908816, |
| "grad_norm": 0.5456791910076206, |
| "learning_rate": 5.797798881491138e-06, |
| "loss": 0.344, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.3499810821036702, |
| "grad_norm": 0.5520282720983019, |
| "learning_rate": 5.749865437599703e-06, |
| "loss": 0.3485, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.3530079455164585, |
| "grad_norm": 0.5521474031345254, |
| "learning_rate": 5.702050879108284e-06, |
| "loss": 0.3408, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.356034808929247, |
| "grad_norm": 0.548564658972675, |
| "learning_rate": 5.654356543492883e-06, |
| "loss": 0.3433, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.3590616723420355, |
| "grad_norm": 0.5438952579132498, |
| "learning_rate": 5.606783764866576e-06, |
| "loss": 0.3551, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.362088535754824, |
| "grad_norm": 0.5318392433791388, |
| "learning_rate": 5.559333873942259e-06, |
| "loss": 0.348, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.3651153991676126, |
| "grad_norm": 0.5061705585504341, |
| "learning_rate": 5.512008197995379e-06, |
| "loss": 0.3199, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.3681422625804012, |
| "grad_norm": 0.5393172856391282, |
| "learning_rate": 5.464808060826825e-06, |
| "loss": 0.3497, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.3711691259931895, |
| "grad_norm": 0.5668109293418759, |
| "learning_rate": 5.417734782725896e-06, |
| "loss": 0.3482, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.374195989405978, |
| "grad_norm": 0.544311693084174, |
| "learning_rate": 5.370789680433376e-06, |
| "loss": 0.3421, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.3772228528187664, |
| "grad_norm": 0.5157258721726508, |
| "learning_rate": 5.323974067104687e-06, |
| "loss": 0.3149, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.380249716231555, |
| "grad_norm": 0.5551022028976371, |
| "learning_rate": 5.277289252273175e-06, |
| "loss": 0.3421, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.3832765796443436, |
| "grad_norm": 0.5795019652706563, |
| "learning_rate": 5.230736541813463e-06, |
| "loss": 0.3237, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.3863034430571322, |
| "grad_norm": 0.5555972127211857, |
| "learning_rate": 5.184317237904939e-06, |
| "loss": 0.3614, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.3893303064699205, |
| "grad_norm": 0.5429781404014025, |
| "learning_rate": 5.138032638995315e-06, |
| "loss": 0.3364, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.392357169882709, |
| "grad_norm": 0.5428787651268926, |
| "learning_rate": 5.091884039764321e-06, |
| "loss": 0.3308, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.3953840332954974, |
| "grad_norm": 0.546411228561912, |
| "learning_rate": 5.045872731087479e-06, |
| "loss": 0.3303, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.398410896708286, |
| "grad_norm": 0.5776370787405458, |
| "learning_rate": 5.000000000000003e-06, |
| "loss": 0.3604, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.4014377601210746, |
| "grad_norm": 0.5412848296262482, |
| "learning_rate": 4.954267129660789e-06, |
| "loss": 0.3378, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.4044646235338631, |
| "grad_norm": 0.5193209876471725, |
| "learning_rate": 4.908675399316534e-06, |
| "loss": 0.3462, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.4074914869466515, |
| "grad_norm": 0.5585630158275061, |
| "learning_rate": 4.863226084265939e-06, |
| "loss": 0.3587, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.41051835035944, |
| "grad_norm": 0.534922999597849, |
| "learning_rate": 4.817920455824045e-06, |
| "loss": 0.3539, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.4135452137722284, |
| "grad_norm": 0.5603280075048959, |
| "learning_rate": 4.772759781286679e-06, |
| "loss": 0.3188, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.416572077185017, |
| "grad_norm": 0.5547404479145099, |
| "learning_rate": 4.727745323894976e-06, |
| "loss": 0.3536, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.4195989405978056, |
| "grad_norm": 0.5360397333868736, |
| "learning_rate": 4.682878342800087e-06, |
| "loss": 0.3295, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.4226258040105941, |
| "grad_norm": 0.5594639544278357, |
| "learning_rate": 4.638160093027908e-06, |
| "loss": 0.3365, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.4256526674233825, |
| "grad_norm": 0.5569096723075034, |
| "learning_rate": 4.593591825444028e-06, |
| "loss": 0.3454, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.428679530836171, |
| "grad_norm": 0.5599723522714323, |
| "learning_rate": 4.549174786718684e-06, |
| "loss": 0.3337, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.4317063942489594, |
| "grad_norm": 0.5747715584978174, |
| "learning_rate": 4.504910219291941e-06, |
| "loss": 0.3443, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.434733257661748, |
| "grad_norm": 0.5693242747624353, |
| "learning_rate": 4.460799361338898e-06, |
| "loss": 0.3408, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.4377601210745365, |
| "grad_norm": 0.5951141720172414, |
| "learning_rate": 4.416843446735077e-06, |
| "loss": 0.3485, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.4407869844873251, |
| "grad_norm": 0.5369277042296827, |
| "learning_rate": 4.373043705021899e-06, |
| "loss": 0.3361, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.4438138479001135, |
| "grad_norm": 0.5363526804804581, |
| "learning_rate": 4.3294013613722944e-06, |
| "loss": 0.3615, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.446840711312902, |
| "grad_norm": 0.5620570177865998, |
| "learning_rate": 4.2859176365564294e-06, |
| "loss": 0.3358, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.4498675747256904, |
| "grad_norm": 0.5518198256530519, |
| "learning_rate": 4.2425937469075626e-06, |
| "loss": 0.3328, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.452894438138479, |
| "grad_norm": 0.5556520541778592, |
| "learning_rate": 4.19943090428802e-06, |
| "loss": 0.3376, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.4559213015512675, |
| "grad_norm": 0.5303514005471629, |
| "learning_rate": 4.1564303160552935e-06, |
| "loss": 0.3305, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.458948164964056, |
| "grad_norm": 0.533081192300205, |
| "learning_rate": 4.113593185028273e-06, |
| "loss": 0.3296, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.4619750283768445, |
| "grad_norm": 0.5219490519033269, |
| "learning_rate": 4.070920709453597e-06, |
| "loss": 0.3104, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.465001891789633, |
| "grad_norm": 0.5426105571932429, |
| "learning_rate": 4.028414082972141e-06, |
| "loss": 0.3219, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.4680287552024214, |
| "grad_norm": 0.5497656570570758, |
| "learning_rate": 3.986074494585619e-06, |
| "loss": 0.3184, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.47105561861521, |
| "grad_norm": 0.5239164687528085, |
| "learning_rate": 3.943903128623336e-06, |
| "loss": 0.3188, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.4740824820279985, |
| "grad_norm": 0.516373792095981, |
| "learning_rate": 3.9019011647090465e-06, |
| "loss": 0.3219, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.477109345440787, |
| "grad_norm": 0.5367408204940652, |
| "learning_rate": 3.860069777727983e-06, |
| "loss": 0.3231, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.4801362088535754, |
| "grad_norm": 0.5496329286629653, |
| "learning_rate": 3.818410137793947e-06, |
| "loss": 0.3301, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.483163072266364, |
| "grad_norm": 0.5334420863096035, |
| "learning_rate": 3.7769234102166365e-06, |
| "loss": 0.3245, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.4861899356791524, |
| "grad_norm": 0.5323076960619461, |
| "learning_rate": 3.735610755468988e-06, |
| "loss": 0.3405, |
| "step": 491 |
| }, |
| { |
| "epoch": 1.489216799091941, |
| "grad_norm": 0.5380616673054802, |
| "learning_rate": 3.6944733291547784e-06, |
| "loss": 0.3523, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.4922436625047295, |
| "grad_norm": 0.5817667007710551, |
| "learning_rate": 3.653512281976238e-06, |
| "loss": 0.3433, |
| "step": 493 |
| }, |
| { |
| "epoch": 1.495270525917518, |
| "grad_norm": 0.5452858852329028, |
| "learning_rate": 3.612728759701919e-06, |
| "loss": 0.3202, |
| "step": 494 |
| }, |
| { |
| "epoch": 1.4982973893303064, |
| "grad_norm": 0.5499290913033745, |
| "learning_rate": 3.5721239031346067e-06, |
| "loss": 0.2938, |
| "step": 495 |
| }, |
| { |
| "epoch": 1.501324252743095, |
| "grad_norm": 0.5732273600347847, |
| "learning_rate": 3.5316988480794255e-06, |
| "loss": 0.3386, |
| "step": 496 |
| }, |
| { |
| "epoch": 1.5043511161558833, |
| "grad_norm": 0.5669908963512301, |
| "learning_rate": 3.4914547253120655e-06, |
| "loss": 0.3552, |
| "step": 497 |
| }, |
| { |
| "epoch": 1.507377979568672, |
| "grad_norm": 0.5344679664912712, |
| "learning_rate": 3.4513926605471504e-06, |
| "loss": 0.3259, |
| "step": 498 |
| }, |
| { |
| "epoch": 1.5104048429814605, |
| "grad_norm": 0.5447672402613514, |
| "learning_rate": 3.4115137744067516e-06, |
| "loss": 0.3481, |
| "step": 499 |
| }, |
| { |
| "epoch": 1.513431706394249, |
| "grad_norm": 0.5625756836572919, |
| "learning_rate": 3.37181918238904e-06, |
| "loss": 0.3693, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.5164585698070374, |
| "grad_norm": 0.5544132310528578, |
| "learning_rate": 3.3323099948370853e-06, |
| "loss": 0.3294, |
| "step": 501 |
| }, |
| { |
| "epoch": 1.519485433219826, |
| "grad_norm": 0.5510107028882012, |
| "learning_rate": 3.292987316907792e-06, |
| "loss": 0.346, |
| "step": 502 |
| }, |
| { |
| "epoch": 1.5225122966326143, |
| "grad_norm": 0.5397126036092406, |
| "learning_rate": 3.253852248540994e-06, |
| "loss": 0.3445, |
| "step": 503 |
| }, |
| { |
| "epoch": 1.525539160045403, |
| "grad_norm": 0.5439152626942458, |
| "learning_rate": 3.2149058844286796e-06, |
| "loss": 0.3422, |
| "step": 504 |
| }, |
| { |
| "epoch": 1.5285660234581915, |
| "grad_norm": 0.5424825124147472, |
| "learning_rate": 3.1761493139843734e-06, |
| "loss": 0.347, |
| "step": 505 |
| }, |
| { |
| "epoch": 1.53159288687098, |
| "grad_norm": 0.5153711897693964, |
| "learning_rate": 3.1375836213126653e-06, |
| "loss": 0.3096, |
| "step": 506 |
| }, |
| { |
| "epoch": 1.5346197502837684, |
| "grad_norm": 0.5469298988447148, |
| "learning_rate": 3.099209885178882e-06, |
| "loss": 0.3231, |
| "step": 507 |
| }, |
| { |
| "epoch": 1.537646613696557, |
| "grad_norm": 0.5343635050441716, |
| "learning_rate": 3.0610291789789094e-06, |
| "loss": 0.339, |
| "step": 508 |
| }, |
| { |
| "epoch": 1.5406734771093453, |
| "grad_norm": 0.5412668739465106, |
| "learning_rate": 3.023042570709185e-06, |
| "loss": 0.3427, |
| "step": 509 |
| }, |
| { |
| "epoch": 1.543700340522134, |
| "grad_norm": 0.5545585816484584, |
| "learning_rate": 2.9852511229367862e-06, |
| "loss": 0.3532, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.5467272039349225, |
| "grad_norm": 0.5309574182255647, |
| "learning_rate": 2.9476558927697605e-06, |
| "loss": 0.3346, |
| "step": 511 |
| }, |
| { |
| "epoch": 1.549754067347711, |
| "grad_norm": 0.5337453389866483, |
| "learning_rate": 2.9102579318274994e-06, |
| "loss": 0.3209, |
| "step": 512 |
| }, |
| { |
| "epoch": 1.5527809307604994, |
| "grad_norm": 0.5909095579929825, |
| "learning_rate": 2.8730582862113743e-06, |
| "loss": 0.3577, |
| "step": 513 |
| }, |
| { |
| "epoch": 1.555807794173288, |
| "grad_norm": 0.5313817380647149, |
| "learning_rate": 2.8360579964754277e-06, |
| "loss": 0.3166, |
| "step": 514 |
| }, |
| { |
| "epoch": 1.5588346575860763, |
| "grad_norm": 0.5281743372920349, |
| "learning_rate": 2.7992580975973136e-06, |
| "loss": 0.3304, |
| "step": 515 |
| }, |
| { |
| "epoch": 1.5618615209988649, |
| "grad_norm": 0.5494324841890594, |
| "learning_rate": 2.7626596189492983e-06, |
| "loss": 0.3332, |
| "step": 516 |
| }, |
| { |
| "epoch": 1.5648883844116535, |
| "grad_norm": 0.5344538032398312, |
| "learning_rate": 2.726263584269513e-06, |
| "loss": 0.3546, |
| "step": 517 |
| }, |
| { |
| "epoch": 1.567915247824442, |
| "grad_norm": 0.574286734188576, |
| "learning_rate": 2.690071011633284e-06, |
| "loss": 0.3393, |
| "step": 518 |
| }, |
| { |
| "epoch": 1.5709421112372304, |
| "grad_norm": 0.5768349285826244, |
| "learning_rate": 2.6540829134246683e-06, |
| "loss": 0.3315, |
| "step": 519 |
| }, |
| { |
| "epoch": 1.573968974650019, |
| "grad_norm": 0.5625024418230359, |
| "learning_rate": 2.618300296308135e-06, |
| "loss": 0.3509, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.5769958380628073, |
| "grad_norm": 0.5327769230849426, |
| "learning_rate": 2.582724161200405e-06, |
| "loss": 0.3344, |
| "step": 521 |
| }, |
| { |
| "epoch": 1.5800227014755959, |
| "grad_norm": 0.5421050697925652, |
| "learning_rate": 2.5473555032424534e-06, |
| "loss": 0.3229, |
| "step": 522 |
| }, |
| { |
| "epoch": 1.5830495648883844, |
| "grad_norm": 0.5387385790080916, |
| "learning_rate": 2.5121953117716744e-06, |
| "loss": 0.3445, |
| "step": 523 |
| }, |
| { |
| "epoch": 1.586076428301173, |
| "grad_norm": 0.5191243783724272, |
| "learning_rate": 2.477244570294206e-06, |
| "loss": 0.3227, |
| "step": 524 |
| }, |
| { |
| "epoch": 1.5891032917139614, |
| "grad_norm": 0.545568450389557, |
| "learning_rate": 2.4425042564574186e-06, |
| "loss": 0.3357, |
| "step": 525 |
| }, |
| { |
| "epoch": 1.59213015512675, |
| "grad_norm": 0.5473005411978172, |
| "learning_rate": 2.4079753420225694e-06, |
| "loss": 0.3341, |
| "step": 526 |
| }, |
| { |
| "epoch": 1.5951570185395383, |
| "grad_norm": 0.5214560020541606, |
| "learning_rate": 2.3736587928376197e-06, |
| "loss": 0.3311, |
| "step": 527 |
| }, |
| { |
| "epoch": 1.5981838819523269, |
| "grad_norm": 0.5121042197817159, |
| "learning_rate": 2.339555568810221e-06, |
| "loss": 0.3145, |
| "step": 528 |
| }, |
| { |
| "epoch": 1.6012107453651154, |
| "grad_norm": 0.5354363863049165, |
| "learning_rate": 2.305666623880858e-06, |
| "loss": 0.3319, |
| "step": 529 |
| }, |
| { |
| "epoch": 1.604237608777904, |
| "grad_norm": 0.5531462253765186, |
| "learning_rate": 2.27199290599617e-06, |
| "loss": 0.3435, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.6072644721906924, |
| "grad_norm": 0.5433550687181965, |
| "learning_rate": 2.2385353570824308e-06, |
| "loss": 0.337, |
| "step": 531 |
| }, |
| { |
| "epoch": 1.610291335603481, |
| "grad_norm": 0.5307937589601727, |
| "learning_rate": 2.2052949130192136e-06, |
| "loss": 0.3361, |
| "step": 532 |
| }, |
| { |
| "epoch": 1.6133181990162693, |
| "grad_norm": 0.5349935044913042, |
| "learning_rate": 2.172272503613183e-06, |
| "loss": 0.3347, |
| "step": 533 |
| }, |
| { |
| "epoch": 1.6163450624290578, |
| "grad_norm": 0.5196297934178653, |
| "learning_rate": 2.1394690525721275e-06, |
| "loss": 0.3399, |
| "step": 534 |
| }, |
| { |
| "epoch": 1.6193719258418464, |
| "grad_norm": 0.5297545299951213, |
| "learning_rate": 2.1068854774790783e-06, |
| "loss": 0.3423, |
| "step": 535 |
| }, |
| { |
| "epoch": 1.622398789254635, |
| "grad_norm": 0.5514786858614504, |
| "learning_rate": 2.0745226897666858e-06, |
| "loss": 0.3441, |
| "step": 536 |
| }, |
| { |
| "epoch": 1.6254256526674233, |
| "grad_norm": 0.5369609797548685, |
| "learning_rate": 2.0423815946916783e-06, |
| "loss": 0.3412, |
| "step": 537 |
| }, |
| { |
| "epoch": 1.628452516080212, |
| "grad_norm": 0.5626768055990692, |
| "learning_rate": 2.010463091309587e-06, |
| "loss": 0.3312, |
| "step": 538 |
| }, |
| { |
| "epoch": 1.6314793794930003, |
| "grad_norm": 0.5409337306023044, |
| "learning_rate": 1.9787680724495617e-06, |
| "loss": 0.3626, |
| "step": 539 |
| }, |
| { |
| "epoch": 1.6345062429057888, |
| "grad_norm": 0.5694383205006677, |
| "learning_rate": 1.947297424689414e-06, |
| "loss": 0.3339, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.6375331063185774, |
| "grad_norm": 0.5350701082959706, |
| "learning_rate": 1.9160520283308115e-06, |
| "loss": 0.3256, |
| "step": 541 |
| }, |
| { |
| "epoch": 1.640559969731366, |
| "grad_norm": 0.5271503018364957, |
| "learning_rate": 1.8850327573746584e-06, |
| "loss": 0.3321, |
| "step": 542 |
| }, |
| { |
| "epoch": 1.6435868331441543, |
| "grad_norm": 0.5326263939812926, |
| "learning_rate": 1.854240479496643e-06, |
| "loss": 0.3256, |
| "step": 543 |
| }, |
| { |
| "epoch": 1.646613696556943, |
| "grad_norm": 0.5895426836301729, |
| "learning_rate": 1.8236760560229715e-06, |
| "loss": 0.338, |
| "step": 544 |
| }, |
| { |
| "epoch": 1.6496405599697312, |
| "grad_norm": 0.5484900166169228, |
| "learning_rate": 1.7933403419062689e-06, |
| "loss": 0.3459, |
| "step": 545 |
| }, |
| { |
| "epoch": 1.6526674233825198, |
| "grad_norm": 0.5531195417760699, |
| "learning_rate": 1.7632341857016733e-06, |
| "loss": 0.3348, |
| "step": 546 |
| }, |
| { |
| "epoch": 1.6556942867953084, |
| "grad_norm": 0.5226837919983212, |
| "learning_rate": 1.7333584295430894e-06, |
| "loss": 0.315, |
| "step": 547 |
| }, |
| { |
| "epoch": 1.658721150208097, |
| "grad_norm": 0.545918979119333, |
| "learning_rate": 1.7037139091196396e-06, |
| "loss": 0.334, |
| "step": 548 |
| }, |
| { |
| "epoch": 1.6617480136208853, |
| "grad_norm": 0.5111664636037931, |
| "learning_rate": 1.6743014536522872e-06, |
| "loss": 0.3535, |
| "step": 549 |
| }, |
| { |
| "epoch": 1.6647748770336739, |
| "grad_norm": 0.5368881403465131, |
| "learning_rate": 1.6451218858706374e-06, |
| "loss": 0.3339, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.6678017404464622, |
| "grad_norm": 0.5576209299099644, |
| "learning_rate": 1.616176021989926e-06, |
| "loss": 0.3376, |
| "step": 551 |
| }, |
| { |
| "epoch": 1.6708286038592508, |
| "grad_norm": 0.5089944495209637, |
| "learning_rate": 1.587464671688187e-06, |
| "loss": 0.3377, |
| "step": 552 |
| }, |
| { |
| "epoch": 1.6738554672720394, |
| "grad_norm": 0.5714316099941706, |
| "learning_rate": 1.558988638083616e-06, |
| "loss": 0.3445, |
| "step": 553 |
| }, |
| { |
| "epoch": 1.676882330684828, |
| "grad_norm": 0.5135280939181603, |
| "learning_rate": 1.5307487177120773e-06, |
| "loss": 0.3438, |
| "step": 554 |
| }, |
| { |
| "epoch": 1.6799091940976163, |
| "grad_norm": 0.5642811750118364, |
| "learning_rate": 1.5027457005048573e-06, |
| "loss": 0.3567, |
| "step": 555 |
| }, |
| { |
| "epoch": 1.6829360575104049, |
| "grad_norm": 0.5199707102235239, |
| "learning_rate": 1.4749803697665366e-06, |
| "loss": 0.3441, |
| "step": 556 |
| }, |
| { |
| "epoch": 1.6859629209231932, |
| "grad_norm": 0.5205676338074903, |
| "learning_rate": 1.4474535021531099e-06, |
| "loss": 0.3358, |
| "step": 557 |
| }, |
| { |
| "epoch": 1.6889897843359818, |
| "grad_norm": 0.5362996155339582, |
| "learning_rate": 1.4201658676502294e-06, |
| "loss": 0.3362, |
| "step": 558 |
| }, |
| { |
| "epoch": 1.6920166477487704, |
| "grad_norm": 0.548955323984696, |
| "learning_rate": 1.3931182295516965e-06, |
| "loss": 0.3464, |
| "step": 559 |
| }, |
| { |
| "epoch": 1.695043511161559, |
| "grad_norm": 0.5289697187345612, |
| "learning_rate": 1.3663113444380905e-06, |
| "loss": 0.3184, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.6980703745743473, |
| "grad_norm": 0.5298888577780704, |
| "learning_rate": 1.339745962155613e-06, |
| "loss": 0.3235, |
| "step": 561 |
| }, |
| { |
| "epoch": 1.7010972379871359, |
| "grad_norm": 0.541301268416102, |
| "learning_rate": 1.3134228257951142e-06, |
| "loss": 0.3415, |
| "step": 562 |
| }, |
| { |
| "epoch": 1.7041241013999242, |
| "grad_norm": 0.531286474847516, |
| "learning_rate": 1.2873426716713012e-06, |
| "loss": 0.3332, |
| "step": 563 |
| }, |
| { |
| "epoch": 1.7071509648127128, |
| "grad_norm": 0.5523472234916659, |
| "learning_rate": 1.2615062293021508e-06, |
| "loss": 0.3318, |
| "step": 564 |
| }, |
| { |
| "epoch": 1.7101778282255014, |
| "grad_norm": 0.5095391750771804, |
| "learning_rate": 1.2359142213884933e-06, |
| "loss": 0.3148, |
| "step": 565 |
| }, |
| { |
| "epoch": 1.71320469163829, |
| "grad_norm": 0.5066186192515475, |
| "learning_rate": 1.2105673637938054e-06, |
| "loss": 0.3304, |
| "step": 566 |
| }, |
| { |
| "epoch": 1.7162315550510783, |
| "grad_norm": 0.516379881184284, |
| "learning_rate": 1.1854663655241804e-06, |
| "loss": 0.3389, |
| "step": 567 |
| }, |
| { |
| "epoch": 1.7192584184638668, |
| "grad_norm": 0.5194691989074978, |
| "learning_rate": 1.1606119287084982e-06, |
| "loss": 0.3227, |
| "step": 568 |
| }, |
| { |
| "epoch": 1.7222852818766552, |
| "grad_norm": 0.5234492179327196, |
| "learning_rate": 1.136004748578785e-06, |
| "loss": 0.3329, |
| "step": 569 |
| }, |
| { |
| "epoch": 1.7253121452894438, |
| "grad_norm": 0.5419676642514104, |
| "learning_rate": 1.1116455134507665e-06, |
| "loss": 0.326, |
| "step": 570 |
| }, |
| { |
| "epoch": 1.7283390087022323, |
| "grad_norm": 0.5034568150745946, |
| "learning_rate": 1.0875349047046113e-06, |
| "loss": 0.323, |
| "step": 571 |
| }, |
| { |
| "epoch": 1.731365872115021, |
| "grad_norm": 0.5255454997274771, |
| "learning_rate": 1.0636735967658785e-06, |
| "loss": 0.3419, |
| "step": 572 |
| }, |
| { |
| "epoch": 1.7343927355278093, |
| "grad_norm": 0.5240983345143586, |
| "learning_rate": 1.0400622570866426e-06, |
| "loss": 0.3359, |
| "step": 573 |
| }, |
| { |
| "epoch": 1.7374195989405978, |
| "grad_norm": 0.5211210443855394, |
| "learning_rate": 1.0167015461268303e-06, |
| "loss": 0.3489, |
| "step": 574 |
| }, |
| { |
| "epoch": 1.7404464623533862, |
| "grad_norm": 0.5025003403433331, |
| "learning_rate": 9.935921173357444e-07, |
| "loss": 0.331, |
| "step": 575 |
| }, |
| { |
| "epoch": 1.7434733257661748, |
| "grad_norm": 0.535412370103588, |
| "learning_rate": 9.707346171337895e-07, |
| "loss": 0.3455, |
| "step": 576 |
| }, |
| { |
| "epoch": 1.7465001891789633, |
| "grad_norm": 0.5405804985692432, |
| "learning_rate": 9.481296848943744e-07, |
| "loss": 0.3613, |
| "step": 577 |
| }, |
| { |
| "epoch": 1.749527052591752, |
| "grad_norm": 0.5171554201549999, |
| "learning_rate": 9.257779529260558e-07, |
| "loss": 0.3374, |
| "step": 578 |
| }, |
| { |
| "epoch": 1.7525539160045402, |
| "grad_norm": 0.541760899431086, |
| "learning_rate": 9.036800464548157e-07, |
| "loss": 0.3363, |
| "step": 579 |
| }, |
| { |
| "epoch": 1.7555807794173288, |
| "grad_norm": 0.5284347732678949, |
| "learning_rate": 8.818365836066101e-07, |
| "loss": 0.3371, |
| "step": 580 |
| }, |
| { |
| "epoch": 1.7586076428301172, |
| "grad_norm": 0.515647569313736, |
| "learning_rate": 8.602481753900427e-07, |
| "loss": 0.3343, |
| "step": 581 |
| }, |
| { |
| "epoch": 1.7616345062429057, |
| "grad_norm": 0.5493374263487832, |
| "learning_rate": 8.389154256793042e-07, |
| "loss": 0.3372, |
| "step": 582 |
| }, |
| { |
| "epoch": 1.7646613696556943, |
| "grad_norm": 0.520056067438565, |
| "learning_rate": 8.178389311972612e-07, |
| "loss": 0.3076, |
| "step": 583 |
| }, |
| { |
| "epoch": 1.7676882330684829, |
| "grad_norm": 0.515783537561946, |
| "learning_rate": 7.970192814987676e-07, |
| "loss": 0.3013, |
| "step": 584 |
| }, |
| { |
| "epoch": 1.7707150964812712, |
| "grad_norm": 0.511263622854028, |
| "learning_rate": 7.764570589541876e-07, |
| "loss": 0.3291, |
| "step": 585 |
| }, |
| { |
| "epoch": 1.7737419598940598, |
| "grad_norm": 0.5022716662793987, |
| "learning_rate": 7.561528387330797e-07, |
| "loss": 0.3165, |
| "step": 586 |
| }, |
| { |
| "epoch": 1.7767688233068482, |
| "grad_norm": 0.49706334384218254, |
| "learning_rate": 7.361071887881376e-07, |
| "loss": 0.3069, |
| "step": 587 |
| }, |
| { |
| "epoch": 1.7797956867196367, |
| "grad_norm": 0.5147167557332147, |
| "learning_rate": 7.163206698392744e-07, |
| "loss": 0.323, |
| "step": 588 |
| }, |
| { |
| "epoch": 1.7828225501324253, |
| "grad_norm": 0.5694318054287043, |
| "learning_rate": 6.96793835357964e-07, |
| "loss": 0.3546, |
| "step": 589 |
| }, |
| { |
| "epoch": 1.7858494135452139, |
| "grad_norm": 0.5114207026118898, |
| "learning_rate": 6.775272315517423e-07, |
| "loss": 0.319, |
| "step": 590 |
| }, |
| { |
| "epoch": 1.7888762769580022, |
| "grad_norm": 0.5176754574147087, |
| "learning_rate": 6.585213973489335e-07, |
| "loss": 0.3323, |
| "step": 591 |
| }, |
| { |
| "epoch": 1.7919031403707908, |
| "grad_norm": 0.5161157484947948, |
| "learning_rate": 6.397768643835755e-07, |
| "loss": 0.3291, |
| "step": 592 |
| }, |
| { |
| "epoch": 1.7949300037835791, |
| "grad_norm": 0.5447678048296611, |
| "learning_rate": 6.212941569805508e-07, |
| "loss": 0.3609, |
| "step": 593 |
| }, |
| { |
| "epoch": 1.7979568671963677, |
| "grad_norm": 0.4946458588220733, |
| "learning_rate": 6.030737921409169e-07, |
| "loss": 0.3171, |
| "step": 594 |
| }, |
| { |
| "epoch": 1.8009837306091563, |
| "grad_norm": 0.5234463435365085, |
| "learning_rate": 5.851162795274445e-07, |
| "loss": 0.3178, |
| "step": 595 |
| }, |
| { |
| "epoch": 1.8040105940219449, |
| "grad_norm": 0.5293792192577068, |
| "learning_rate": 5.674221214503639e-07, |
| "loss": 0.3126, |
| "step": 596 |
| }, |
| { |
| "epoch": 1.8070374574347332, |
| "grad_norm": 0.5277044034885998, |
| "learning_rate": 5.499918128533155e-07, |
| "loss": 0.3313, |
| "step": 597 |
| }, |
| { |
| "epoch": 1.8100643208475218, |
| "grad_norm": 0.5092427859911447, |
| "learning_rate": 5.328258412994958e-07, |
| "loss": 0.3239, |
| "step": 598 |
| }, |
| { |
| "epoch": 1.8130911842603101, |
| "grad_norm": 0.5149352616221561, |
| "learning_rate": 5.159246869580348e-07, |
| "loss": 0.3279, |
| "step": 599 |
| }, |
| { |
| "epoch": 1.8161180476730987, |
| "grad_norm": 0.5419286189342575, |
| "learning_rate": 4.992888225905467e-07, |
| "loss": 0.3437, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.8191449110858873, |
| "grad_norm": 0.5324093053643671, |
| "learning_rate": 4.829187135379221e-07, |
| "loss": 0.359, |
| "step": 601 |
| }, |
| { |
| "epoch": 1.8221717744986758, |
| "grad_norm": 0.5201206845094479, |
| "learning_rate": 4.6681481770729844e-07, |
| "loss": 0.3346, |
| "step": 602 |
| }, |
| { |
| "epoch": 1.8251986379114642, |
| "grad_norm": 0.5521218832839953, |
| "learning_rate": 4.509775855592613e-07, |
| "loss": 0.3448, |
| "step": 603 |
| }, |
| { |
| "epoch": 1.8282255013242528, |
| "grad_norm": 0.5015868720849281, |
| "learning_rate": 4.354074600952407e-07, |
| "loss": 0.3078, |
| "step": 604 |
| }, |
| { |
| "epoch": 1.8312523647370411, |
| "grad_norm": 0.5243157913445379, |
| "learning_rate": 4.2010487684511105e-07, |
| "loss": 0.338, |
| "step": 605 |
| }, |
| { |
| "epoch": 1.8342792281498297, |
| "grad_norm": 0.49840216223213707, |
| "learning_rate": 4.0507026385502747e-07, |
| "loss": 0.3216, |
| "step": 606 |
| }, |
| { |
| "epoch": 1.8373060915626183, |
| "grad_norm": 0.5213845291767036, |
| "learning_rate": 3.9030404167542777e-07, |
| "loss": 0.3222, |
| "step": 607 |
| }, |
| { |
| "epoch": 1.8403329549754068, |
| "grad_norm": 0.5403603786906672, |
| "learning_rate": 3.7580662334929517e-07, |
| "loss": 0.3265, |
| "step": 608 |
| }, |
| { |
| "epoch": 1.8433598183881952, |
| "grad_norm": 0.49776468943532864, |
| "learning_rate": 3.615784144005796e-07, |
| "loss": 0.3171, |
| "step": 609 |
| }, |
| { |
| "epoch": 1.8463866818009838, |
| "grad_norm": 0.5257081748863395, |
| "learning_rate": 3.476198128228736e-07, |
| "loss": 0.3397, |
| "step": 610 |
| }, |
| { |
| "epoch": 1.849413545213772, |
| "grad_norm": 0.5539772514631744, |
| "learning_rate": 3.339312090682689e-07, |
| "loss": 0.3204, |
| "step": 611 |
| }, |
| { |
| "epoch": 1.8524404086265607, |
| "grad_norm": 0.4977270342121302, |
| "learning_rate": 3.2051298603643754e-07, |
| "loss": 0.3242, |
| "step": 612 |
| }, |
| { |
| "epoch": 1.8554672720393492, |
| "grad_norm": 0.5067179030194061, |
| "learning_rate": 3.0736551906392354e-07, |
| "loss": 0.3181, |
| "step": 613 |
| }, |
| { |
| "epoch": 1.8584941354521378, |
| "grad_norm": 0.5212549687159126, |
| "learning_rate": 2.9448917591363923e-07, |
| "loss": 0.3353, |
| "step": 614 |
| }, |
| { |
| "epoch": 1.8615209988649262, |
| "grad_norm": 0.4955350740015368, |
| "learning_rate": 2.818843167645835e-07, |
| "loss": 0.3247, |
| "step": 615 |
| }, |
| { |
| "epoch": 1.8645478622777147, |
| "grad_norm": 0.5062996754742579, |
| "learning_rate": 2.6955129420176193e-07, |
| "loss": 0.3144, |
| "step": 616 |
| }, |
| { |
| "epoch": 1.867574725690503, |
| "grad_norm": 0.5125940645385597, |
| "learning_rate": 2.5749045320632824e-07, |
| "loss": 0.3163, |
| "step": 617 |
| }, |
| { |
| "epoch": 1.8706015891032917, |
| "grad_norm": 0.5141111700527322, |
| "learning_rate": 2.4570213114592957e-07, |
| "loss": 0.3114, |
| "step": 618 |
| }, |
| { |
| "epoch": 1.8736284525160802, |
| "grad_norm": 0.5121904268534225, |
| "learning_rate": 2.3418665776527738e-07, |
| "loss": 0.3541, |
| "step": 619 |
| }, |
| { |
| "epoch": 1.8766553159288688, |
| "grad_norm": 0.5231636218499177, |
| "learning_rate": 2.2294435517691504e-07, |
| "loss": 0.3407, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.8796821793416572, |
| "grad_norm": 0.5046178923309044, |
| "learning_rate": 2.119755378522137e-07, |
| "loss": 0.3301, |
| "step": 621 |
| }, |
| { |
| "epoch": 1.8827090427544457, |
| "grad_norm": 0.5401514722744142, |
| "learning_rate": 2.0128051261257165e-07, |
| "loss": 0.3626, |
| "step": 622 |
| }, |
| { |
| "epoch": 1.885735906167234, |
| "grad_norm": 0.48001410503161174, |
| "learning_rate": 1.908595786208367e-07, |
| "loss": 0.2978, |
| "step": 623 |
| }, |
| { |
| "epoch": 1.8887627695800226, |
| "grad_norm": 0.5086952825902071, |
| "learning_rate": 1.8071302737293294e-07, |
| "loss": 0.3226, |
| "step": 624 |
| }, |
| { |
| "epoch": 1.8917896329928112, |
| "grad_norm": 0.51829699327428, |
| "learning_rate": 1.7084114268971275e-07, |
| "loss": 0.3333, |
| "step": 625 |
| }, |
| { |
| "epoch": 1.8948164964055998, |
| "grad_norm": 0.5148122297877614, |
| "learning_rate": 1.612442007090076e-07, |
| "loss": 0.3305, |
| "step": 626 |
| }, |
| { |
| "epoch": 1.8978433598183881, |
| "grad_norm": 0.5051932238631632, |
| "learning_rate": 1.519224698779198e-07, |
| "loss": 0.3167, |
| "step": 627 |
| }, |
| { |
| "epoch": 1.9008702232311767, |
| "grad_norm": 0.5213823161183292, |
| "learning_rate": 1.4287621094529524e-07, |
| "loss": 0.3319, |
| "step": 628 |
| }, |
| { |
| "epoch": 1.903897086643965, |
| "grad_norm": 0.5407521383797909, |
| "learning_rate": 1.3410567695444576e-07, |
| "loss": 0.3408, |
| "step": 629 |
| }, |
| { |
| "epoch": 1.9069239500567536, |
| "grad_norm": 0.5013542586668778, |
| "learning_rate": 1.2561111323605714e-07, |
| "loss": 0.3196, |
| "step": 630 |
| }, |
| { |
| "epoch": 1.9099508134695422, |
| "grad_norm": 0.5235081553246185, |
| "learning_rate": 1.1739275740134004e-07, |
| "loss": 0.3381, |
| "step": 631 |
| }, |
| { |
| "epoch": 1.9129776768823308, |
| "grad_norm": 0.5476725963638974, |
| "learning_rate": 1.0945083933537104e-07, |
| "loss": 0.3532, |
| "step": 632 |
| }, |
| { |
| "epoch": 1.9160045402951191, |
| "grad_norm": 0.5373293708500385, |
| "learning_rate": 1.0178558119067316e-07, |
| "loss": 0.3428, |
| "step": 633 |
| }, |
| { |
| "epoch": 1.9190314037079077, |
| "grad_norm": 0.5173819725119972, |
| "learning_rate": 9.439719738099318e-08, |
| "loss": 0.3557, |
| "step": 634 |
| }, |
| { |
| "epoch": 1.922058267120696, |
| "grad_norm": 0.4929227055067737, |
| "learning_rate": 8.728589457530857e-08, |
| "loss": 0.3093, |
| "step": 635 |
| }, |
| { |
| "epoch": 1.9250851305334846, |
| "grad_norm": 0.5057133944809219, |
| "learning_rate": 8.04518716920466e-08, |
| "loss": 0.3304, |
| "step": 636 |
| }, |
| { |
| "epoch": 1.9281119939462732, |
| "grad_norm": 0.5236307503553929, |
| "learning_rate": 7.389531989351773e-08, |
| "loss": 0.3395, |
| "step": 637 |
| }, |
| { |
| "epoch": 1.9311388573590618, |
| "grad_norm": 0.5118149547852485, |
| "learning_rate": 6.761642258056977e-08, |
| "loss": 0.3097, |
| "step": 638 |
| }, |
| { |
| "epoch": 1.9341657207718501, |
| "grad_norm": 0.5293203250244016, |
| "learning_rate": 6.161535538745877e-08, |
| "loss": 0.3147, |
| "step": 639 |
| }, |
| { |
| "epoch": 1.9371925841846387, |
| "grad_norm": 0.5243005861352074, |
| "learning_rate": 5.5892286176932875e-08, |
| "loss": 0.311, |
| "step": 640 |
| }, |
| { |
| "epoch": 1.940219447597427, |
| "grad_norm": 0.5025317767629421, |
| "learning_rate": 5.044737503554165e-08, |
| "loss": 0.3321, |
| "step": 641 |
| }, |
| { |
| "epoch": 1.9432463110102156, |
| "grad_norm": 0.510055518279952, |
| "learning_rate": 4.528077426915412e-08, |
| "loss": 0.3429, |
| "step": 642 |
| }, |
| { |
| "epoch": 1.9462731744230042, |
| "grad_norm": 0.4983351520579858, |
| "learning_rate": 4.0392628398699954e-08, |
| "loss": 0.316, |
| "step": 643 |
| }, |
| { |
| "epoch": 1.9493000378357928, |
| "grad_norm": 0.51961463710477, |
| "learning_rate": 3.578307415612714e-08, |
| "loss": 0.3306, |
| "step": 644 |
| }, |
| { |
| "epoch": 1.952326901248581, |
| "grad_norm": 0.5241069831320792, |
| "learning_rate": 3.1452240480577265e-08, |
| "loss": 0.3366, |
| "step": 645 |
| }, |
| { |
| "epoch": 1.9553537646613697, |
| "grad_norm": 0.5076726777197932, |
| "learning_rate": 2.7400248514776184e-08, |
| "loss": 0.311, |
| "step": 646 |
| }, |
| { |
| "epoch": 1.958380628074158, |
| "grad_norm": 0.5270372365001028, |
| "learning_rate": 2.3627211601651157e-08, |
| "loss": 0.327, |
| "step": 647 |
| }, |
| { |
| "epoch": 1.9614074914869466, |
| "grad_norm": 0.5345706651089321, |
| "learning_rate": 2.013323528115674e-08, |
| "loss": 0.3305, |
| "step": 648 |
| }, |
| { |
| "epoch": 1.9644343548997352, |
| "grad_norm": 0.5701176651055055, |
| "learning_rate": 1.6918417287318245e-08, |
| "loss": 0.3597, |
| "step": 649 |
| }, |
| { |
| "epoch": 1.9674612183125237, |
| "grad_norm": 0.5096275397795988, |
| "learning_rate": 1.3982847545507271e-08, |
| "loss": 0.3191, |
| "step": 650 |
| }, |
| { |
| "epoch": 1.970488081725312, |
| "grad_norm": 0.5409876712500923, |
| "learning_rate": 1.1326608169920373e-08, |
| "loss": 0.3404, |
| "step": 651 |
| }, |
| { |
| "epoch": 1.9735149451381007, |
| "grad_norm": 0.5203072840312353, |
| "learning_rate": 8.949773461282008e-09, |
| "loss": 0.3225, |
| "step": 652 |
| }, |
| { |
| "epoch": 1.976541808550889, |
| "grad_norm": 0.5320993369980339, |
| "learning_rate": 6.8524099047695415e-09, |
| "loss": 0.348, |
| "step": 653 |
| }, |
| { |
| "epoch": 1.9795686719636776, |
| "grad_norm": 0.5094907409675443, |
| "learning_rate": 5.034576168149175e-09, |
| "loss": 0.3287, |
| "step": 654 |
| }, |
| { |
| "epoch": 1.9825955353764662, |
| "grad_norm": 0.507089285164817, |
| "learning_rate": 3.4963231001383657e-09, |
| "loss": 0.336, |
| "step": 655 |
| }, |
| { |
| "epoch": 1.9856223987892547, |
| "grad_norm": 0.5138496310370845, |
| "learning_rate": 2.237693728981416e-09, |
| "loss": 0.3508, |
| "step": 656 |
| }, |
| { |
| "epoch": 1.988649262202043, |
| "grad_norm": 0.5325962366267458, |
| "learning_rate": 1.2587232612493172e-09, |
| "loss": 0.338, |
| "step": 657 |
| }, |
| { |
| "epoch": 1.9916761256148316, |
| "grad_norm": 0.5281655407968793, |
| "learning_rate": 5.594390808494332e-10, |
| "loss": 0.3279, |
| "step": 658 |
| }, |
| { |
| "epoch": 1.99470298902762, |
| "grad_norm": 0.5236655600734001, |
| "learning_rate": 1.3986074826388697e-10, |
| "loss": 0.3421, |
| "step": 659 |
| }, |
| { |
| "epoch": 1.9977298524404086, |
| "grad_norm": 0.49841312880749544, |
| "learning_rate": 0.0, |
| "loss": 0.3118, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.9977298524404086, |
| "step": 660, |
| "total_flos": 2.708198372933632e+17, |
| "train_loss": 0.41113431255022687, |
| "train_runtime": 20533.8574, |
| "train_samples_per_second": 4.117, |
| "train_steps_per_second": 0.032 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 660, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 2, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.708198372933632e+17, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|