| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.9988956377691882, |
| "eval_steps": 500, |
| "global_step": 2414, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0008282716731087797, |
| "grad_norm": 2.470858573913574, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 1.1215, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0016565433462175593, |
| "grad_norm": 2.5991060733795166, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 1.1219, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.002484815019326339, |
| "grad_norm": 2.7714691162109375, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 1.1248, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0033130866924351186, |
| "grad_norm": 2.732624053955078, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 1.1168, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.0041413583655438985, |
| "grad_norm": 2.796525239944458, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 1.1265, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.004969630038652678, |
| "grad_norm": 2.6405341625213623, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 1.1092, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.005797901711761457, |
| "grad_norm": 2.7414848804473877, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 1.0995, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.006626173384870237, |
| "grad_norm": 2.596143960952759, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 1.112, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.007454445057979017, |
| "grad_norm": 2.3128225803375244, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 1.1044, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.008282716731087797, |
| "grad_norm": 2.6040737628936768, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 1.096, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.009110988404196576, |
| "grad_norm": 2.242492914199829, |
| "learning_rate": 5.5e-07, |
| "loss": 1.1244, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.009939260077305357, |
| "grad_norm": 2.436877965927124, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 1.1101, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.010767531750414136, |
| "grad_norm": 2.2004475593566895, |
| "learning_rate": 6.5e-07, |
| "loss": 1.0876, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.011595803423522915, |
| "grad_norm": 2.0984935760498047, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 1.0862, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.012424075096631695, |
| "grad_norm": 2.1312928199768066, |
| "learning_rate": 7.5e-07, |
| "loss": 1.0734, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.013252346769740474, |
| "grad_norm": 1.986462116241455, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 1.0878, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.014080618442849255, |
| "grad_norm": 1.9178447723388672, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 1.1041, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.014908890115958034, |
| "grad_norm": 1.773003101348877, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 1.082, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.015737161789066815, |
| "grad_norm": 1.7757107019424438, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 1.0938, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.016565433462175594, |
| "grad_norm": 1.6614820957183838, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 1.0883, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.017393705135284373, |
| "grad_norm": 1.5726022720336914, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 1.0699, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.018221976808393152, |
| "grad_norm": 1.5645663738250732, |
| "learning_rate": 1.1e-06, |
| "loss": 1.0667, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.019050248481501934, |
| "grad_norm": 1.4950520992279053, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 1.054, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.019878520154610713, |
| "grad_norm": 1.429142951965332, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 1.0366, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.020706791827719492, |
| "grad_norm": 1.3532471656799316, |
| "learning_rate": 1.25e-06, |
| "loss": 1.0147, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.02153506350082827, |
| "grad_norm": 1.3407552242279053, |
| "learning_rate": 1.3e-06, |
| "loss": 1.0251, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.02236333517393705, |
| "grad_norm": 1.2801262140274048, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 1.023, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.02319160684704583, |
| "grad_norm": 1.2124438285827637, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 1.0164, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.024019878520154612, |
| "grad_norm": 1.169960856437683, |
| "learning_rate": 1.45e-06, |
| "loss": 1.0036, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.02484815019326339, |
| "grad_norm": 1.1589689254760742, |
| "learning_rate": 1.5e-06, |
| "loss": 0.9973, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.02567642186637217, |
| "grad_norm": 1.1903630495071411, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 1.0178, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.02650469353948095, |
| "grad_norm": 1.1258587837219238, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 1.0115, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.027332965212589728, |
| "grad_norm": 1.1103761196136475, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 0.9974, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.02816123688569851, |
| "grad_norm": 1.0767805576324463, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 0.9997, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.02898950855880729, |
| "grad_norm": 1.0786221027374268, |
| "learning_rate": 1.75e-06, |
| "loss": 0.9787, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.02981778023191607, |
| "grad_norm": 1.0584348440170288, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 0.9828, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.030646051905024847, |
| "grad_norm": 1.034424066543579, |
| "learning_rate": 1.85e-06, |
| "loss": 0.9705, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.03147432357813363, |
| "grad_norm": 1.0167292356491089, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 0.9552, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.032302595251242405, |
| "grad_norm": 0.9866489171981812, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 0.9765, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.03313086692435119, |
| "grad_norm": 0.996547281742096, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.9694, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.03395913859745997, |
| "grad_norm": 0.9386445879936218, |
| "learning_rate": 2.05e-06, |
| "loss": 0.9559, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.034787410270568746, |
| "grad_norm": 0.9441668391227722, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.9419, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.03561568194367753, |
| "grad_norm": 0.9332805275917053, |
| "learning_rate": 2.15e-06, |
| "loss": 0.959, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.036443953616786304, |
| "grad_norm": 0.8953099250793457, |
| "learning_rate": 2.2e-06, |
| "loss": 0.923, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.037272225289895086, |
| "grad_norm": 0.8724915385246277, |
| "learning_rate": 2.25e-06, |
| "loss": 0.9277, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.03810049696300387, |
| "grad_norm": 0.9006755352020264, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.9413, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.038928768636112644, |
| "grad_norm": 0.8761175870895386, |
| "learning_rate": 2.35e-06, |
| "loss": 0.9254, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.03975704030922143, |
| "grad_norm": 0.8506019711494446, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.933, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.0405853119823302, |
| "grad_norm": 0.8446137309074402, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.9241, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.041413583655438985, |
| "grad_norm": 0.9333141446113586, |
| "learning_rate": 2.5e-06, |
| "loss": 0.8842, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.04224185532854776, |
| "grad_norm": 0.8349485993385315, |
| "learning_rate": 2.55e-06, |
| "loss": 0.9062, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.04307012700165654, |
| "grad_norm": 0.8209546208381653, |
| "learning_rate": 2.6e-06, |
| "loss": 0.8754, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.043898398674765325, |
| "grad_norm": 0.8063066005706787, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.9051, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.0447266703478741, |
| "grad_norm": 0.8150295615196228, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.8993, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.04555494202098288, |
| "grad_norm": 0.8151445388793945, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.9286, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.04638321369409166, |
| "grad_norm": 0.8064457774162292, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.895, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.04721148536720044, |
| "grad_norm": 0.814802885055542, |
| "learning_rate": 2.85e-06, |
| "loss": 0.897, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.048039757040309224, |
| "grad_norm": 0.8112044930458069, |
| "learning_rate": 2.9e-06, |
| "loss": 0.8848, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.048868028713418, |
| "grad_norm": 0.7848402857780457, |
| "learning_rate": 2.95e-06, |
| "loss": 0.8845, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.04969630038652678, |
| "grad_norm": 0.8031050562858582, |
| "learning_rate": 3e-06, |
| "loss": 0.8721, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.05052457205963556, |
| "grad_norm": 0.7847040295600891, |
| "learning_rate": 3.05e-06, |
| "loss": 0.8821, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.05135284373274434, |
| "grad_norm": 0.8053078055381775, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.8662, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.05218111540585312, |
| "grad_norm": 0.7876397967338562, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.8711, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.0530093870789619, |
| "grad_norm": 0.7634557485580444, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.8789, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.05383765875207068, |
| "grad_norm": 0.7829545140266418, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.8811, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.054665930425179456, |
| "grad_norm": 0.7707326412200928, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.8701, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.05549420209828824, |
| "grad_norm": 0.7840001583099365, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.8931, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.05632247377139702, |
| "grad_norm": 0.7522594928741455, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.8356, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.057150745444505796, |
| "grad_norm": 0.7703195214271545, |
| "learning_rate": 3.45e-06, |
| "loss": 0.8766, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.05797901711761458, |
| "grad_norm": 0.7738405466079712, |
| "learning_rate": 3.5e-06, |
| "loss": 0.8668, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.058807288790723354, |
| "grad_norm": 0.7778448462486267, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.8406, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.05963556046383214, |
| "grad_norm": 0.7515988945960999, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.879, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.06046383213694092, |
| "grad_norm": 0.7842773199081421, |
| "learning_rate": 3.65e-06, |
| "loss": 0.8514, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.061292103810049695, |
| "grad_norm": 0.7790472507476807, |
| "learning_rate": 3.7e-06, |
| "loss": 0.8263, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.06212037548315848, |
| "grad_norm": 0.7859238386154175, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.8568, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.06294864715626726, |
| "grad_norm": 0.7896199822425842, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.8638, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.06377691882937604, |
| "grad_norm": 0.7775096297264099, |
| "learning_rate": 3.85e-06, |
| "loss": 0.8448, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.06460519050248481, |
| "grad_norm": 0.7505279183387756, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.8773, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.06543346217559359, |
| "grad_norm": 0.7714847326278687, |
| "learning_rate": 3.95e-06, |
| "loss": 0.8574, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.06626173384870238, |
| "grad_norm": 0.7867633700370789, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.8644, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.06709000552181116, |
| "grad_norm": 0.7860574126243591, |
| "learning_rate": 4.05e-06, |
| "loss": 0.8522, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.06791827719491994, |
| "grad_norm": 0.7739796042442322, |
| "learning_rate": 4.1e-06, |
| "loss": 0.8151, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.06874654886802871, |
| "grad_norm": 0.8125103712081909, |
| "learning_rate": 4.15e-06, |
| "loss": 0.8723, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.06957482054113749, |
| "grad_norm": 0.801737904548645, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.8666, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.07040309221424627, |
| "grad_norm": 0.7941572070121765, |
| "learning_rate": 4.25e-06, |
| "loss": 0.8491, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.07123136388735506, |
| "grad_norm": 0.82698655128479, |
| "learning_rate": 4.3e-06, |
| "loss": 0.8547, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.07205963556046384, |
| "grad_norm": 0.7598469853401184, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.8426, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.07288790723357261, |
| "grad_norm": 0.7789424657821655, |
| "learning_rate": 4.4e-06, |
| "loss": 0.8549, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.07371617890668139, |
| "grad_norm": 0.7676123976707458, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.8471, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.07454445057979017, |
| "grad_norm": 0.8137206435203552, |
| "learning_rate": 4.5e-06, |
| "loss": 0.8318, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.07537272225289895, |
| "grad_norm": 0.8054167628288269, |
| "learning_rate": 4.5500000000000005e-06, |
| "loss": 0.8411, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.07620099392600774, |
| "grad_norm": 0.7785388231277466, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.8151, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.0770292655991165, |
| "grad_norm": 0.7927106618881226, |
| "learning_rate": 4.65e-06, |
| "loss": 0.8513, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.07785753727222529, |
| "grad_norm": 0.8169398903846741, |
| "learning_rate": 4.7e-06, |
| "loss": 0.84, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.07868580894533407, |
| "grad_norm": 0.8014684319496155, |
| "learning_rate": 4.75e-06, |
| "loss": 0.8278, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.07951408061844285, |
| "grad_norm": 0.8068202137947083, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.8335, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.08034235229155164, |
| "grad_norm": 0.8052696585655212, |
| "learning_rate": 4.85e-06, |
| "loss": 0.8454, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.0811706239646604, |
| "grad_norm": 0.8110672235488892, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 0.8303, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.08199889563776919, |
| "grad_norm": 0.8089601993560791, |
| "learning_rate": 4.95e-06, |
| "loss": 0.8202, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.08282716731087797, |
| "grad_norm": 0.7949985861778259, |
| "learning_rate": 5e-06, |
| "loss": 0.8236, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.08365543898398675, |
| "grad_norm": 0.8146095275878906, |
| "learning_rate": 4.999999758136652e-06, |
| "loss": 0.8214, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.08448371065709552, |
| "grad_norm": 0.7617336511611938, |
| "learning_rate": 4.999999032546657e-06, |
| "loss": 0.8303, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.0853119823302043, |
| "grad_norm": 0.7704808712005615, |
| "learning_rate": 4.999997823230153e-06, |
| "loss": 0.8302, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.08614025400331309, |
| "grad_norm": 0.8084157109260559, |
| "learning_rate": 4.9999961301873736e-06, |
| "loss": 0.8072, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.08696852567642187, |
| "grad_norm": 0.7967061400413513, |
| "learning_rate": 4.999993953418649e-06, |
| "loss": 0.8619, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.08779679734953065, |
| "grad_norm": 0.7868252992630005, |
| "learning_rate": 4.999991292924399e-06, |
| "loss": 0.8352, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.08862506902263942, |
| "grad_norm": 0.7917938232421875, |
| "learning_rate": 4.999988148705138e-06, |
| "loss": 0.8154, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.0894533406957482, |
| "grad_norm": 0.8016756772994995, |
| "learning_rate": 4.999984520761475e-06, |
| "loss": 0.8118, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.09028161236885698, |
| "grad_norm": 0.7837339043617249, |
| "learning_rate": 4.999980409094112e-06, |
| "loss": 0.8411, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.09110988404196577, |
| "grad_norm": 0.8003127574920654, |
| "learning_rate": 4.999975813703844e-06, |
| "loss": 0.7872, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.09193815571507455, |
| "grad_norm": 0.7807706594467163, |
| "learning_rate": 4.9999707345915605e-06, |
| "loss": 0.813, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.09276642738818332, |
| "grad_norm": 0.8342602252960205, |
| "learning_rate": 4.999965171758245e-06, |
| "loss": 0.8146, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.0935946990612921, |
| "grad_norm": 0.8075517416000366, |
| "learning_rate": 4.999959125204973e-06, |
| "loss": 0.8091, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.09442297073440088, |
| "grad_norm": 0.8471419811248779, |
| "learning_rate": 4.999952594932914e-06, |
| "loss": 0.8081, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.09525124240750966, |
| "grad_norm": 0.802861750125885, |
| "learning_rate": 4.999945580943332e-06, |
| "loss": 0.825, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.09607951408061845, |
| "grad_norm": 0.8542435169219971, |
| "learning_rate": 4.999938083237585e-06, |
| "loss": 0.7984, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.09690778575372722, |
| "grad_norm": 0.8055168986320496, |
| "learning_rate": 4.9999301018171225e-06, |
| "loss": 0.8225, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.097736057426836, |
| "grad_norm": 0.8102522492408752, |
| "learning_rate": 4.99992163668349e-06, |
| "loss": 0.8251, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.09856432909994478, |
| "grad_norm": 0.8410570621490479, |
| "learning_rate": 4.999912687838324e-06, |
| "loss": 0.8291, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.09939260077305356, |
| "grad_norm": 0.8076474070549011, |
| "learning_rate": 4.999903255283357e-06, |
| "loss": 0.808, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.10022087244616235, |
| "grad_norm": 0.8374700546264648, |
| "learning_rate": 4.999893339020414e-06, |
| "loss": 0.8295, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.10104914411927111, |
| "grad_norm": 0.8258488774299622, |
| "learning_rate": 4.9998829390514134e-06, |
| "loss": 0.8217, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.1018774157923799, |
| "grad_norm": 0.8309991955757141, |
| "learning_rate": 4.999872055378368e-06, |
| "loss": 0.8245, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.10270568746548868, |
| "grad_norm": 0.9097660779953003, |
| "learning_rate": 4.999860688003384e-06, |
| "loss": 0.8126, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.10353395913859746, |
| "grad_norm": 0.8510633111000061, |
| "learning_rate": 4.9998488369286595e-06, |
| "loss": 0.8318, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.10436223081170624, |
| "grad_norm": 0.8608652353286743, |
| "learning_rate": 4.999836502156487e-06, |
| "loss": 0.8337, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.10519050248481501, |
| "grad_norm": 0.8540942668914795, |
| "learning_rate": 4.9998236836892566e-06, |
| "loss": 0.8074, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.1060187741579238, |
| "grad_norm": 0.8716321587562561, |
| "learning_rate": 4.999810381529447e-06, |
| "loss": 0.8008, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.10684704583103258, |
| "grad_norm": 0.8254547119140625, |
| "learning_rate": 4.999796595679629e-06, |
| "loss": 0.7913, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.10767531750414136, |
| "grad_norm": 0.8553611040115356, |
| "learning_rate": 4.999782326142474e-06, |
| "loss": 0.796, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.10850358917725014, |
| "grad_norm": 0.8653231263160706, |
| "learning_rate": 4.999767572920741e-06, |
| "loss": 0.8074, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.10933186085035891, |
| "grad_norm": 0.826354444026947, |
| "learning_rate": 4.999752336017286e-06, |
| "loss": 0.7975, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.1101601325234677, |
| "grad_norm": 0.8609683513641357, |
| "learning_rate": 4.9997366154350566e-06, |
| "loss": 0.8167, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.11098840419657648, |
| "grad_norm": 0.788241982460022, |
| "learning_rate": 4.999720411177094e-06, |
| "loss": 0.806, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.11181667586968526, |
| "grad_norm": 0.8463270664215088, |
| "learning_rate": 4.999703723246533e-06, |
| "loss": 0.8108, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.11264494754279404, |
| "grad_norm": 0.8122232556343079, |
| "learning_rate": 4.999686551646604e-06, |
| "loss": 0.8096, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.11347321921590281, |
| "grad_norm": 0.8305553793907166, |
| "learning_rate": 4.999668896380629e-06, |
| "loss": 0.8036, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.11430149088901159, |
| "grad_norm": 0.8326082229614258, |
| "learning_rate": 4.999650757452025e-06, |
| "loss": 0.8029, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.11512976256212037, |
| "grad_norm": 0.8478496670722961, |
| "learning_rate": 4.9996321348643e-06, |
| "loss": 0.8006, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.11595803423522916, |
| "grad_norm": 0.8346283435821533, |
| "learning_rate": 4.999613028621059e-06, |
| "loss": 0.8091, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.11678630590833794, |
| "grad_norm": 0.8181941509246826, |
| "learning_rate": 4.999593438725997e-06, |
| "loss": 0.8106, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.11761457758144671, |
| "grad_norm": 0.8323103189468384, |
| "learning_rate": 4.999573365182906e-06, |
| "loss": 0.7757, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.11844284925455549, |
| "grad_norm": 0.8319541811943054, |
| "learning_rate": 4.999552807995669e-06, |
| "loss": 0.7804, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.11927112092766427, |
| "grad_norm": 0.8478372693061829, |
| "learning_rate": 4.9995317671682655e-06, |
| "loss": 0.7576, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.12009939260077306, |
| "grad_norm": 0.8389990329742432, |
| "learning_rate": 4.999510242704765e-06, |
| "loss": 0.8037, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.12092766427388184, |
| "grad_norm": 0.8425178527832031, |
| "learning_rate": 4.999488234609332e-06, |
| "loss": 0.793, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.1217559359469906, |
| "grad_norm": 0.8828648924827576, |
| "learning_rate": 4.999465742886226e-06, |
| "loss": 0.8188, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.12258420762009939, |
| "grad_norm": 0.8396442532539368, |
| "learning_rate": 4.999442767539799e-06, |
| "loss": 0.7897, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.12341247929320817, |
| "grad_norm": 0.8590951561927795, |
| "learning_rate": 4.999419308574494e-06, |
| "loss": 0.7765, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.12424075096631695, |
| "grad_norm": 0.8167644739151001, |
| "learning_rate": 4.999395365994854e-06, |
| "loss": 0.7942, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.12506902263942574, |
| "grad_norm": 0.8681987524032593, |
| "learning_rate": 4.999370939805509e-06, |
| "loss": 0.7925, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.12589729431253452, |
| "grad_norm": 0.8093082904815674, |
| "learning_rate": 4.999346030011186e-06, |
| "loss": 0.774, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.1267255659856433, |
| "grad_norm": 0.8155264854431152, |
| "learning_rate": 4.999320636616705e-06, |
| "loss": 0.7988, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.12755383765875208, |
| "grad_norm": 0.8219082355499268, |
| "learning_rate": 4.99929475962698e-06, |
| "loss": 0.8059, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.12838210933186084, |
| "grad_norm": 0.841028094291687, |
| "learning_rate": 4.999268399047016e-06, |
| "loss": 0.8006, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.12921038100496962, |
| "grad_norm": 0.8428634405136108, |
| "learning_rate": 4.999241554881915e-06, |
| "loss": 0.7756, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.1300386526780784, |
| "grad_norm": 0.8274707794189453, |
| "learning_rate": 4.999214227136871e-06, |
| "loss": 0.7818, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.13086692435118719, |
| "grad_norm": 0.8101240992546082, |
| "learning_rate": 4.99918641581717e-06, |
| "loss": 0.776, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.13169519602429597, |
| "grad_norm": 0.845748782157898, |
| "learning_rate": 4.999158120928196e-06, |
| "loss": 0.8008, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.13252346769740475, |
| "grad_norm": 0.8261645436286926, |
| "learning_rate": 4.999129342475422e-06, |
| "loss": 0.7714, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.13335173937051353, |
| "grad_norm": 0.82314532995224, |
| "learning_rate": 4.9991000804644176e-06, |
| "loss": 0.7907, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.13418001104362232, |
| "grad_norm": 0.8381524085998535, |
| "learning_rate": 4.999070334900843e-06, |
| "loss": 0.8178, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.1350082827167311, |
| "grad_norm": 0.8288442492485046, |
| "learning_rate": 4.999040105790455e-06, |
| "loss": 0.7974, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.13583655438983988, |
| "grad_norm": 0.8183148503303528, |
| "learning_rate": 4.999009393139103e-06, |
| "loss": 0.7948, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.13666482606294864, |
| "grad_norm": 0.8139241933822632, |
| "learning_rate": 4.9989781969527274e-06, |
| "loss": 0.7767, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.13749309773605742, |
| "grad_norm": 0.8243563771247864, |
| "learning_rate": 4.998946517237367e-06, |
| "loss": 0.7742, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.1383213694091662, |
| "grad_norm": 0.8375628590583801, |
| "learning_rate": 4.99891435399915e-06, |
| "loss": 0.7745, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.13914964108227498, |
| "grad_norm": 0.847832202911377, |
| "learning_rate": 4.9988817072443e-06, |
| "loss": 0.7761, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.13997791275538377, |
| "grad_norm": 0.8581261038780212, |
| "learning_rate": 4.998848576979135e-06, |
| "loss": 0.7937, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.14080618442849255, |
| "grad_norm": 0.8112267851829529, |
| "learning_rate": 4.998814963210062e-06, |
| "loss": 0.7604, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.14163445610160133, |
| "grad_norm": 0.8231601119041443, |
| "learning_rate": 4.99878086594359e-06, |
| "loss": 0.768, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.1424627277747101, |
| "grad_norm": 0.8767548203468323, |
| "learning_rate": 4.998746285186312e-06, |
| "loss": 0.7887, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.1432909994478189, |
| "grad_norm": 0.8661550879478455, |
| "learning_rate": 4.998711220944921e-06, |
| "loss": 0.78, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.14411927112092768, |
| "grad_norm": 0.8687271475791931, |
| "learning_rate": 4.998675673226202e-06, |
| "loss": 0.7708, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.14494754279403643, |
| "grad_norm": 0.8159292340278625, |
| "learning_rate": 4.998639642037032e-06, |
| "loss": 0.7821, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.14577581446714521, |
| "grad_norm": 0.8488919138908386, |
| "learning_rate": 4.9986031273843834e-06, |
| "loss": 0.7835, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.146604086140254, |
| "grad_norm": 0.8869878649711609, |
| "learning_rate": 4.9985661292753214e-06, |
| "loss": 0.7912, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.14743235781336278, |
| "grad_norm": 0.8855004906654358, |
| "learning_rate": 4.998528647717006e-06, |
| "loss": 0.7972, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.14826062948647156, |
| "grad_norm": 0.8677574992179871, |
| "learning_rate": 4.998490682716687e-06, |
| "loss": 0.7578, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.14908890115958034, |
| "grad_norm": 0.8483648300170898, |
| "learning_rate": 4.998452234281712e-06, |
| "loss": 0.7741, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.14991717283268913, |
| "grad_norm": 0.8784208297729492, |
| "learning_rate": 4.998413302419519e-06, |
| "loss": 0.7862, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.1507454445057979, |
| "grad_norm": 0.8558729290962219, |
| "learning_rate": 4.998373887137642e-06, |
| "loss": 0.7587, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.1515737161789067, |
| "grad_norm": 0.8496033549308777, |
| "learning_rate": 4.998333988443709e-06, |
| "loss": 0.777, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.15240198785201547, |
| "grad_norm": 0.8854474425315857, |
| "learning_rate": 4.998293606345437e-06, |
| "loss": 0.7839, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.15323025952512423, |
| "grad_norm": 0.8627013564109802, |
| "learning_rate": 4.998252740850641e-06, |
| "loss": 0.7792, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.154058531198233, |
| "grad_norm": 0.8393740653991699, |
| "learning_rate": 4.998211391967228e-06, |
| "loss": 0.7691, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.1548868028713418, |
| "grad_norm": 0.8359440565109253, |
| "learning_rate": 4.998169559703199e-06, |
| "loss": 0.7752, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.15571507454445058, |
| "grad_norm": 0.8436606526374817, |
| "learning_rate": 4.9981272440666486e-06, |
| "loss": 0.7913, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.15654334621755936, |
| "grad_norm": 0.8460832834243774, |
| "learning_rate": 4.998084445065763e-06, |
| "loss": 0.7823, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.15737161789066814, |
| "grad_norm": 0.8450286984443665, |
| "learning_rate": 4.998041162708823e-06, |
| "loss": 0.7817, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.15819988956377692, |
| "grad_norm": 0.8625702857971191, |
| "learning_rate": 4.997997397004206e-06, |
| "loss": 0.7828, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.1590281612368857, |
| "grad_norm": 0.8673499822616577, |
| "learning_rate": 4.9979531479603785e-06, |
| "loss": 0.7558, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.1598564329099945, |
| "grad_norm": 0.838716983795166, |
| "learning_rate": 4.997908415585902e-06, |
| "loss": 0.7695, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.16068470458310327, |
| "grad_norm": 0.8854050636291504, |
| "learning_rate": 4.997863199889433e-06, |
| "loss": 0.7536, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.16151297625621203, |
| "grad_norm": 0.8786794543266296, |
| "learning_rate": 4.997817500879719e-06, |
| "loss": 0.7501, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.1623412479293208, |
| "grad_norm": 0.8861957788467407, |
| "learning_rate": 4.997771318565603e-06, |
| "loss": 0.7973, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.1631695196024296, |
| "grad_norm": 0.8832266926765442, |
| "learning_rate": 4.997724652956021e-06, |
| "loss": 0.7756, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.16399779127553837, |
| "grad_norm": 0.8775597810745239, |
| "learning_rate": 4.9976775040600014e-06, |
| "loss": 0.7576, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.16482606294864716, |
| "grad_norm": 0.858241856098175, |
| "learning_rate": 4.997629871886669e-06, |
| "loss": 0.7635, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.16565433462175594, |
| "grad_norm": 0.8340108394622803, |
| "learning_rate": 4.997581756445238e-06, |
| "loss": 0.7793, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.16648260629486472, |
| "grad_norm": 0.8437999486923218, |
| "learning_rate": 4.9975331577450204e-06, |
| "loss": 0.7598, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.1673108779679735, |
| "grad_norm": 0.8743915557861328, |
| "learning_rate": 4.997484075795417e-06, |
| "loss": 0.7586, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.16813914964108229, |
| "grad_norm": 0.8868902921676636, |
| "learning_rate": 4.997434510605927e-06, |
| "loss": 0.7711, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.16896742131419104, |
| "grad_norm": 0.9128987193107605, |
| "learning_rate": 4.99738446218614e-06, |
| "loss": 0.7319, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.16979569298729982, |
| "grad_norm": 0.8828006386756897, |
| "learning_rate": 4.99733393054574e-06, |
| "loss": 0.7816, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.1706239646604086, |
| "grad_norm": 0.8706868886947632, |
| "learning_rate": 4.997282915694505e-06, |
| "loss": 0.7525, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.1714522363335174, |
| "grad_norm": 0.8714260458946228, |
| "learning_rate": 4.9972314176423035e-06, |
| "loss": 0.7796, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.17228050800662617, |
| "grad_norm": 0.9266532063484192, |
| "learning_rate": 4.997179436399103e-06, |
| "loss": 0.7556, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.17310877967973495, |
| "grad_norm": 0.8408361673355103, |
| "learning_rate": 4.997126971974959e-06, |
| "loss": 0.7607, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.17393705135284374, |
| "grad_norm": 0.8225672245025635, |
| "learning_rate": 4.9970740243800245e-06, |
| "loss": 0.7622, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.17476532302595252, |
| "grad_norm": 0.8456878066062927, |
| "learning_rate": 4.997020593624543e-06, |
| "loss": 0.7658, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.1755935946990613, |
| "grad_norm": 0.8553061485290527, |
| "learning_rate": 4.9969666797188545e-06, |
| "loss": 0.7693, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.17642186637217008, |
| "grad_norm": 0.8672494888305664, |
| "learning_rate": 4.996912282673389e-06, |
| "loss": 0.77, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.17725013804527884, |
| "grad_norm": 0.8420187830924988, |
| "learning_rate": 4.9968574024986735e-06, |
| "loss": 0.7436, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.17807840971838762, |
| "grad_norm": 0.8541450500488281, |
| "learning_rate": 4.9968020392053255e-06, |
| "loss": 0.7303, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.1789066813914964, |
| "grad_norm": 0.871155321598053, |
| "learning_rate": 4.996746192804058e-06, |
| "loss": 0.7647, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.17973495306460519, |
| "grad_norm": 0.8463776707649231, |
| "learning_rate": 4.9966898633056765e-06, |
| "loss": 0.7676, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.18056322473771397, |
| "grad_norm": 0.864533007144928, |
| "learning_rate": 4.99663305072108e-06, |
| "loss": 0.7759, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.18139149641082275, |
| "grad_norm": 0.9121000170707703, |
| "learning_rate": 4.996575755061262e-06, |
| "loss": 0.7513, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.18221976808393153, |
| "grad_norm": 0.8512037992477417, |
| "learning_rate": 4.996517976337308e-06, |
| "loss": 0.7681, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.18304803975704032, |
| "grad_norm": 0.8548815846443176, |
| "learning_rate": 4.9964597145603975e-06, |
| "loss": 0.7723, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.1838763114301491, |
| "grad_norm": 0.8747566342353821, |
| "learning_rate": 4.996400969741805e-06, |
| "loss": 0.7735, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.18470458310325788, |
| "grad_norm": 0.8502812385559082, |
| "learning_rate": 4.996341741892895e-06, |
| "loss": 0.744, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.18553285477636663, |
| "grad_norm": 0.852314293384552, |
| "learning_rate": 4.9962820310251286e-06, |
| "loss": 0.7607, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.18636112644947542, |
| "grad_norm": 0.8693923354148865, |
| "learning_rate": 4.9962218371500595e-06, |
| "loss": 0.7651, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.1871893981225842, |
| "grad_norm": 0.8899632692337036, |
| "learning_rate": 4.996161160279335e-06, |
| "loss": 0.7473, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.18801766979569298, |
| "grad_norm": 0.8715031743049622, |
| "learning_rate": 4.996100000424694e-06, |
| "loss": 0.7469, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.18884594146880176, |
| "grad_norm": 0.9185702204704285, |
| "learning_rate": 4.996038357597971e-06, |
| "loss": 0.7637, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.18967421314191055, |
| "grad_norm": 0.8860998749732971, |
| "learning_rate": 4.995976231811094e-06, |
| "loss": 0.7539, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.19050248481501933, |
| "grad_norm": 0.8578858971595764, |
| "learning_rate": 4.995913623076084e-06, |
| "loss": 0.7545, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.1913307564881281, |
| "grad_norm": 0.9049569368362427, |
| "learning_rate": 4.995850531405054e-06, |
| "loss": 0.7738, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.1921590281612369, |
| "grad_norm": 0.8534235954284668, |
| "learning_rate": 4.995786956810212e-06, |
| "loss": 0.7663, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.19298729983434568, |
| "grad_norm": 0.8139100074768066, |
| "learning_rate": 4.995722899303859e-06, |
| "loss": 0.7642, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.19381557150745443, |
| "grad_norm": 0.8574815392494202, |
| "learning_rate": 4.995658358898391e-06, |
| "loss": 0.7542, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.19464384318056321, |
| "grad_norm": 0.8759371042251587, |
| "learning_rate": 4.995593335606294e-06, |
| "loss": 0.7627, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.195472114853672, |
| "grad_norm": 0.8267927169799805, |
| "learning_rate": 4.9955278294401496e-06, |
| "loss": 0.753, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.19630038652678078, |
| "grad_norm": 0.8891980051994324, |
| "learning_rate": 4.995461840412634e-06, |
| "loss": 0.765, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.19712865819988956, |
| "grad_norm": 0.8700340986251831, |
| "learning_rate": 4.995395368536515e-06, |
| "loss": 0.7882, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.19795692987299834, |
| "grad_norm": 0.8467540740966797, |
| "learning_rate": 4.995328413824653e-06, |
| "loss": 0.765, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.19878520154610713, |
| "grad_norm": 0.8316713571548462, |
| "learning_rate": 4.995260976290005e-06, |
| "loss": 0.7588, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.1996134732192159, |
| "grad_norm": 0.8740516304969788, |
| "learning_rate": 4.995193055945618e-06, |
| "loss": 0.7767, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.2004417448923247, |
| "grad_norm": 0.836982011795044, |
| "learning_rate": 4.995124652804635e-06, |
| "loss": 0.7657, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.20127001656543347, |
| "grad_norm": 0.8811899423599243, |
| "learning_rate": 4.9950557668802905e-06, |
| "loss": 0.7429, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.20209828823854223, |
| "grad_norm": 0.8358396887779236, |
| "learning_rate": 4.994986398185914e-06, |
| "loss": 0.7519, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.202926559911651, |
| "grad_norm": 0.8353927135467529, |
| "learning_rate": 4.994916546734927e-06, |
| "loss": 0.7585, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.2037548315847598, |
| "grad_norm": 0.8592031598091125, |
| "learning_rate": 4.994846212540846e-06, |
| "loss": 0.7529, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.20458310325786858, |
| "grad_norm": 0.889828622341156, |
| "learning_rate": 4.994775395617279e-06, |
| "loss": 0.7707, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.20541137493097736, |
| "grad_norm": 0.9039833545684814, |
| "learning_rate": 4.99470409597793e-06, |
| "loss": 0.7772, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.20623964660408614, |
| "grad_norm": 0.8434988856315613, |
| "learning_rate": 4.994632313636593e-06, |
| "loss": 0.756, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.20706791827719492, |
| "grad_norm": 0.8357129693031311, |
| "learning_rate": 4.994560048607159e-06, |
| "loss": 0.7495, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.2078961899503037, |
| "grad_norm": 0.8626013398170471, |
| "learning_rate": 4.9944873009036074e-06, |
| "loss": 0.7366, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.2087244616234125, |
| "grad_norm": 0.849981427192688, |
| "learning_rate": 4.994414070540018e-06, |
| "loss": 0.7626, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.20955273329652127, |
| "grad_norm": 0.8248019218444824, |
| "learning_rate": 4.994340357530558e-06, |
| "loss": 0.7469, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.21038100496963003, |
| "grad_norm": 0.8796818256378174, |
| "learning_rate": 4.9942661618894915e-06, |
| "loss": 0.7629, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.2112092766427388, |
| "grad_norm": 0.8798612952232361, |
| "learning_rate": 4.9941914836311735e-06, |
| "loss": 0.7749, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.2120375483158476, |
| "grad_norm": 0.8457095623016357, |
| "learning_rate": 4.994116322770054e-06, |
| "loss": 0.749, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.21286581998895637, |
| "grad_norm": 0.9132838249206543, |
| "learning_rate": 4.9940406793206755e-06, |
| "loss": 0.7552, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.21369409166206516, |
| "grad_norm": 0.874715268611908, |
| "learning_rate": 4.993964553297676e-06, |
| "loss": 0.7417, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.21452236333517394, |
| "grad_norm": 0.843243420124054, |
| "learning_rate": 4.993887944715783e-06, |
| "loss": 0.7784, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.21535063500828272, |
| "grad_norm": 0.8927919268608093, |
| "learning_rate": 4.993810853589819e-06, |
| "loss": 0.7314, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.2161789066813915, |
| "grad_norm": 0.8588681817054749, |
| "learning_rate": 4.993733279934704e-06, |
| "loss": 0.7571, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.21700717835450029, |
| "grad_norm": 0.8320103287696838, |
| "learning_rate": 4.993655223765444e-06, |
| "loss": 0.7635, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.21783545002760907, |
| "grad_norm": 0.847937285900116, |
| "learning_rate": 4.993576685097145e-06, |
| "loss": 0.7618, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.21866372170071782, |
| "grad_norm": 0.8549036979675293, |
| "learning_rate": 4.993497663945002e-06, |
| "loss": 0.7534, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.2194919933738266, |
| "grad_norm": 0.8866848349571228, |
| "learning_rate": 4.9934181603243045e-06, |
| "loss": 0.7518, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.2203202650469354, |
| "grad_norm": 0.8868260383605957, |
| "learning_rate": 4.993338174250437e-06, |
| "loss": 0.7504, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.22114853672004417, |
| "grad_norm": 0.8574029207229614, |
| "learning_rate": 4.993257705738876e-06, |
| "loss": 0.7499, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.22197680839315295, |
| "grad_norm": 0.8559240698814392, |
| "learning_rate": 4.993176754805189e-06, |
| "loss": 0.7453, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.22280508006626173, |
| "grad_norm": 0.8979408144950867, |
| "learning_rate": 4.993095321465042e-06, |
| "loss": 0.7398, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.22363335173937052, |
| "grad_norm": 0.8362364768981934, |
| "learning_rate": 4.993013405734191e-06, |
| "loss": 0.7381, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.2244616234124793, |
| "grad_norm": 0.9468829035758972, |
| "learning_rate": 4.9929310076284845e-06, |
| "loss": 0.7575, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.22528989508558808, |
| "grad_norm": 0.8521276712417603, |
| "learning_rate": 4.992848127163868e-06, |
| "loss": 0.7623, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.22611816675869686, |
| "grad_norm": 0.8890922665596008, |
| "learning_rate": 4.992764764356376e-06, |
| "loss": 0.7636, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.22694643843180562, |
| "grad_norm": 0.8513396978378296, |
| "learning_rate": 4.992680919222141e-06, |
| "loss": 0.7509, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.2277747101049144, |
| "grad_norm": 0.8549479842185974, |
| "learning_rate": 4.9925965917773826e-06, |
| "loss": 0.7572, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.22860298177802318, |
| "grad_norm": 0.8640429973602295, |
| "learning_rate": 4.99251178203842e-06, |
| "loss": 0.7352, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.22943125345113197, |
| "grad_norm": 0.8560553193092346, |
| "learning_rate": 4.992426490021662e-06, |
| "loss": 0.7437, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.23025952512424075, |
| "grad_norm": 0.8709871768951416, |
| "learning_rate": 4.992340715743614e-06, |
| "loss": 0.7758, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.23108779679734953, |
| "grad_norm": 0.9014648795127869, |
| "learning_rate": 4.9922544592208695e-06, |
| "loss": 0.7559, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.23191606847045831, |
| "grad_norm": 0.8632292747497559, |
| "learning_rate": 4.992167720470119e-06, |
| "loss": 0.7495, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.2327443401435671, |
| "grad_norm": 0.8730666637420654, |
| "learning_rate": 4.9920804995081475e-06, |
| "loss": 0.7495, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.23357261181667588, |
| "grad_norm": 0.8813666701316833, |
| "learning_rate": 4.99199279635183e-06, |
| "loss": 0.7796, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.23440088348978466, |
| "grad_norm": 0.8753558397293091, |
| "learning_rate": 4.991904611018137e-06, |
| "loss": 0.7478, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.23522915516289342, |
| "grad_norm": 0.866809070110321, |
| "learning_rate": 4.99181594352413e-06, |
| "loss": 0.7406, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.2360574268360022, |
| "grad_norm": 0.9753585457801819, |
| "learning_rate": 4.991726793886966e-06, |
| "loss": 0.7286, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.23688569850911098, |
| "grad_norm": 0.8441777229309082, |
| "learning_rate": 4.991637162123896e-06, |
| "loss": 0.7379, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.23771397018221976, |
| "grad_norm": 0.8535275459289551, |
| "learning_rate": 4.9915470482522625e-06, |
| "loss": 0.7247, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.23854224185532855, |
| "grad_norm": 0.847791850566864, |
| "learning_rate": 4.991456452289499e-06, |
| "loss": 0.7313, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.23937051352843733, |
| "grad_norm": 0.908484697341919, |
| "learning_rate": 4.9913653742531385e-06, |
| "loss": 0.7817, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.2401987852015461, |
| "grad_norm": 0.9068508744239807, |
| "learning_rate": 4.991273814160802e-06, |
| "loss": 0.7389, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.2410270568746549, |
| "grad_norm": 0.8554396629333496, |
| "learning_rate": 4.991181772030206e-06, |
| "loss": 0.7396, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.24185532854776368, |
| "grad_norm": 0.8606077432632446, |
| "learning_rate": 4.991089247879159e-06, |
| "loss": 0.7298, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.24268360022087246, |
| "grad_norm": 0.8490887880325317, |
| "learning_rate": 4.990996241725565e-06, |
| "loss": 0.7502, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.2435118718939812, |
| "grad_norm": 0.815514862537384, |
| "learning_rate": 4.990902753587418e-06, |
| "loss": 0.7316, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.24434014356709, |
| "grad_norm": 0.8659678101539612, |
| "learning_rate": 4.990808783482809e-06, |
| "loss": 0.7172, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.24516841524019878, |
| "grad_norm": 0.8422633409500122, |
| "learning_rate": 4.990714331429919e-06, |
| "loss": 0.7353, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.24599668691330756, |
| "grad_norm": 0.868891179561615, |
| "learning_rate": 4.990619397447024e-06, |
| "loss": 0.7442, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.24682495858641634, |
| "grad_norm": 0.8917765021324158, |
| "learning_rate": 4.990523981552493e-06, |
| "loss": 0.7653, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.24765323025952513, |
| "grad_norm": 0.9119652509689331, |
| "learning_rate": 4.990428083764788e-06, |
| "loss": 0.7594, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.2484815019326339, |
| "grad_norm": 0.9082207083702087, |
| "learning_rate": 4.990331704102464e-06, |
| "loss": 0.7202, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.2493097736057427, |
| "grad_norm": 0.900722861289978, |
| "learning_rate": 4.99023484258417e-06, |
| "loss": 0.7413, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.2501380452788515, |
| "grad_norm": 0.8688990473747253, |
| "learning_rate": 4.990137499228648e-06, |
| "loss": 0.7331, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.25096631695196026, |
| "grad_norm": 0.8912292718887329, |
| "learning_rate": 4.990039674054732e-06, |
| "loss": 0.7574, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.25179458862506904, |
| "grad_norm": 0.9172481298446655, |
| "learning_rate": 4.989941367081351e-06, |
| "loss": 0.725, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.2526228602981778, |
| "grad_norm": 0.9051876068115234, |
| "learning_rate": 4.989842578327526e-06, |
| "loss": 0.7576, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.2534511319712866, |
| "grad_norm": 0.8844485282897949, |
| "learning_rate": 4.989743307812373e-06, |
| "loss": 0.7131, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.2542794036443954, |
| "grad_norm": 0.862833559513092, |
| "learning_rate": 4.989643555555099e-06, |
| "loss": 0.7442, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.25510767531750417, |
| "grad_norm": 0.8755072355270386, |
| "learning_rate": 4.989543321575004e-06, |
| "loss": 0.7323, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.2559359469906129, |
| "grad_norm": 0.8783672451972961, |
| "learning_rate": 4.9894426058914856e-06, |
| "loss": 0.7541, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.2567642186637217, |
| "grad_norm": 0.8187089562416077, |
| "learning_rate": 4.989341408524028e-06, |
| "loss": 0.7343, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.25759249033683046, |
| "grad_norm": 0.8496488332748413, |
| "learning_rate": 4.9892397294922126e-06, |
| "loss": 0.7392, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.25842076200993924, |
| "grad_norm": 0.8765910267829895, |
| "learning_rate": 4.989137568815715e-06, |
| "loss": 0.7129, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.259249033683048, |
| "grad_norm": 0.8490382432937622, |
| "learning_rate": 4.9890349265143005e-06, |
| "loss": 0.7468, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.2600773053561568, |
| "grad_norm": 0.9740565419197083, |
| "learning_rate": 4.988931802607831e-06, |
| "loss": 0.7592, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.2609055770292656, |
| "grad_norm": 0.9038767218589783, |
| "learning_rate": 4.988828197116259e-06, |
| "loss": 0.7611, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.26173384870237437, |
| "grad_norm": 0.9058436155319214, |
| "learning_rate": 4.988724110059631e-06, |
| "loss": 0.7652, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.26256212037548315, |
| "grad_norm": 0.8804724812507629, |
| "learning_rate": 4.988619541458088e-06, |
| "loss": 0.7528, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.26339039204859194, |
| "grad_norm": 0.8618598580360413, |
| "learning_rate": 4.988514491331861e-06, |
| "loss": 0.7438, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.2642186637217007, |
| "grad_norm": 0.8518510460853577, |
| "learning_rate": 4.9884089597012784e-06, |
| "loss": 0.7407, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.2650469353948095, |
| "grad_norm": 0.8606120944023132, |
| "learning_rate": 4.988302946586759e-06, |
| "loss": 0.7429, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.2658752070679183, |
| "grad_norm": 0.875901997089386, |
| "learning_rate": 4.9881964520088144e-06, |
| "loss": 0.7231, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.26670347874102707, |
| "grad_norm": 0.8722794055938721, |
| "learning_rate": 4.988089475988052e-06, |
| "loss": 0.7516, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.26753175041413585, |
| "grad_norm": 0.9031125903129578, |
| "learning_rate": 4.987982018545169e-06, |
| "loss": 0.7148, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.26836002208724463, |
| "grad_norm": 0.88953697681427, |
| "learning_rate": 4.9878740797009585e-06, |
| "loss": 0.7396, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.2691882937603534, |
| "grad_norm": 0.8889352083206177, |
| "learning_rate": 4.987765659476304e-06, |
| "loss": 0.7405, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.2700165654334622, |
| "grad_norm": 0.8522500991821289, |
| "learning_rate": 4.987656757892186e-06, |
| "loss": 0.7217, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.270844837106571, |
| "grad_norm": 0.8650013208389282, |
| "learning_rate": 4.987547374969676e-06, |
| "loss": 0.7471, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.27167310877967976, |
| "grad_norm": 0.8686767816543579, |
| "learning_rate": 4.987437510729936e-06, |
| "loss": 0.7451, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.2725013804527885, |
| "grad_norm": 0.8780227899551392, |
| "learning_rate": 4.987327165194225e-06, |
| "loss": 0.7321, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.27332965212589727, |
| "grad_norm": 0.8709607720375061, |
| "learning_rate": 4.987216338383896e-06, |
| "loss": 0.7334, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.27415792379900605, |
| "grad_norm": 0.8750120997428894, |
| "learning_rate": 4.98710503032039e-06, |
| "loss": 0.7449, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.27498619547211484, |
| "grad_norm": 0.9142486453056335, |
| "learning_rate": 4.986993241025245e-06, |
| "loss": 0.7381, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.2758144671452236, |
| "grad_norm": 0.9027116894721985, |
| "learning_rate": 4.986880970520092e-06, |
| "loss": 0.726, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.2766427388183324, |
| "grad_norm": 0.8772771954536438, |
| "learning_rate": 4.9867682188266524e-06, |
| "loss": 0.7399, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.2774710104914412, |
| "grad_norm": 0.9331325888633728, |
| "learning_rate": 4.986654985966745e-06, |
| "loss": 0.713, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.27829928216454997, |
| "grad_norm": 0.8578592538833618, |
| "learning_rate": 4.986541271962278e-06, |
| "loss": 0.7404, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.27912755383765875, |
| "grad_norm": 0.8695614337921143, |
| "learning_rate": 4.986427076835253e-06, |
| "loss": 0.7252, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.27995582551076753, |
| "grad_norm": 0.8932336568832397, |
| "learning_rate": 4.986312400607767e-06, |
| "loss": 0.7147, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.2807840971838763, |
| "grad_norm": 0.8598549365997314, |
| "learning_rate": 4.98619724330201e-06, |
| "loss": 0.7133, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.2816123688569851, |
| "grad_norm": 0.980992317199707, |
| "learning_rate": 4.986081604940261e-06, |
| "loss": 0.7187, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.2824406405300939, |
| "grad_norm": 0.9004166722297668, |
| "learning_rate": 4.9859654855448966e-06, |
| "loss": 0.7172, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.28326891220320266, |
| "grad_norm": 0.8636890053749084, |
| "learning_rate": 4.985848885138386e-06, |
| "loss": 0.7307, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.28409718387631144, |
| "grad_norm": 0.886384904384613, |
| "learning_rate": 4.985731803743287e-06, |
| "loss": 0.7394, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.2849254555494202, |
| "grad_norm": 0.8931464552879333, |
| "learning_rate": 4.985614241382257e-06, |
| "loss": 0.7435, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.285753727222529, |
| "grad_norm": 0.8984655737876892, |
| "learning_rate": 4.985496198078041e-06, |
| "loss": 0.7224, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.2865819988956378, |
| "grad_norm": 0.8933436870574951, |
| "learning_rate": 4.985377673853481e-06, |
| "loss": 0.7436, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.2874102705687466, |
| "grad_norm": 0.8796603083610535, |
| "learning_rate": 4.9852586687315094e-06, |
| "loss": 0.7382, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.28823854224185536, |
| "grad_norm": 0.9310036301612854, |
| "learning_rate": 4.9851391827351526e-06, |
| "loss": 0.7167, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.2890668139149641, |
| "grad_norm": 0.8710126876831055, |
| "learning_rate": 4.98501921588753e-06, |
| "loss": 0.7472, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.28989508558807286, |
| "grad_norm": 0.8849478363990784, |
| "learning_rate": 4.984898768211853e-06, |
| "loss": 0.7601, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.29072335726118165, |
| "grad_norm": 0.9195541739463806, |
| "learning_rate": 4.98477783973143e-06, |
| "loss": 0.7363, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.29155162893429043, |
| "grad_norm": 0.872543215751648, |
| "learning_rate": 4.984656430469657e-06, |
| "loss": 0.72, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.2923799006073992, |
| "grad_norm": 0.8591517210006714, |
| "learning_rate": 4.984534540450027e-06, |
| "loss": 0.715, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.293208172280508, |
| "grad_norm": 0.8657150268554688, |
| "learning_rate": 4.9844121696961235e-06, |
| "loss": 0.7201, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.2940364439536168, |
| "grad_norm": 0.963168203830719, |
| "learning_rate": 4.984289318231624e-06, |
| "loss": 0.7316, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.29486471562672556, |
| "grad_norm": 0.8789074420928955, |
| "learning_rate": 4.9841659860803e-06, |
| "loss": 0.7375, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.29569298729983434, |
| "grad_norm": 0.8537085652351379, |
| "learning_rate": 4.984042173266016e-06, |
| "loss": 0.7184, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.2965212589729431, |
| "grad_norm": 0.9043181538581848, |
| "learning_rate": 4.983917879812726e-06, |
| "loss": 0.7301, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.2973495306460519, |
| "grad_norm": 0.883391797542572, |
| "learning_rate": 4.983793105744482e-06, |
| "loss": 0.756, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.2981778023191607, |
| "grad_norm": 0.8713216185569763, |
| "learning_rate": 4.983667851085426e-06, |
| "loss": 0.7473, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.29900607399226947, |
| "grad_norm": 0.8796274065971375, |
| "learning_rate": 4.983542115859792e-06, |
| "loss": 0.7679, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.29983434566537825, |
| "grad_norm": 0.9158849716186523, |
| "learning_rate": 4.98341590009191e-06, |
| "loss": 0.7287, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.30066261733848704, |
| "grad_norm": 0.9413859844207764, |
| "learning_rate": 4.983289203806202e-06, |
| "loss": 0.7145, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.3014908890115958, |
| "grad_norm": 0.9053609371185303, |
| "learning_rate": 4.983162027027182e-06, |
| "loss": 0.7404, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.3023191606847046, |
| "grad_norm": 0.8505028486251831, |
| "learning_rate": 4.983034369779457e-06, |
| "loss": 0.7399, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.3031474323578134, |
| "grad_norm": 0.8942279815673828, |
| "learning_rate": 4.982906232087728e-06, |
| "loss": 0.7157, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.30397570403092217, |
| "grad_norm": 0.868363618850708, |
| "learning_rate": 4.982777613976789e-06, |
| "loss": 0.741, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.30480397570403095, |
| "grad_norm": 0.8772525191307068, |
| "learning_rate": 4.982648515471526e-06, |
| "loss": 0.7215, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.3056322473771397, |
| "grad_norm": 0.9214457869529724, |
| "learning_rate": 4.982518936596917e-06, |
| "loss": 0.7281, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.30646051905024846, |
| "grad_norm": 0.9134591817855835, |
| "learning_rate": 4.982388877378037e-06, |
| "loss": 0.7394, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.30728879072335724, |
| "grad_norm": 0.8598564267158508, |
| "learning_rate": 4.982258337840049e-06, |
| "loss": 0.7157, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.308117062396466, |
| "grad_norm": 0.887107789516449, |
| "learning_rate": 4.982127318008212e-06, |
| "loss": 0.7396, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.3089453340695748, |
| "grad_norm": 0.8878049254417419, |
| "learning_rate": 4.9819958179078765e-06, |
| "loss": 0.7387, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.3097736057426836, |
| "grad_norm": 0.8996168375015259, |
| "learning_rate": 4.981863837564488e-06, |
| "loss": 0.7252, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.31060187741579237, |
| "grad_norm": 0.849962592124939, |
| "learning_rate": 4.981731377003583e-06, |
| "loss": 0.7126, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.31143014908890115, |
| "grad_norm": 0.8826934099197388, |
| "learning_rate": 4.98159843625079e-06, |
| "loss": 0.7163, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.31225842076200994, |
| "grad_norm": 0.8865692019462585, |
| "learning_rate": 4.981465015331832e-06, |
| "loss": 0.7392, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.3130866924351187, |
| "grad_norm": 0.9942007064819336, |
| "learning_rate": 4.981331114272527e-06, |
| "loss": 0.7383, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.3139149641082275, |
| "grad_norm": 0.8654554486274719, |
| "learning_rate": 4.9811967330987816e-06, |
| "loss": 0.7429, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.3147432357813363, |
| "grad_norm": 0.8661484718322754, |
| "learning_rate": 4.981061871836597e-06, |
| "loss": 0.7247, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.31557150745444507, |
| "grad_norm": 0.883909285068512, |
| "learning_rate": 4.98092653051207e-06, |
| "loss": 0.7362, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.31639977912755385, |
| "grad_norm": 0.8380483388900757, |
| "learning_rate": 4.980790709151385e-06, |
| "loss": 0.7246, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.31722805080066263, |
| "grad_norm": 0.8815524578094482, |
| "learning_rate": 4.980654407780824e-06, |
| "loss": 0.7332, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.3180563224737714, |
| "grad_norm": 0.8284130692481995, |
| "learning_rate": 4.980517626426758e-06, |
| "loss": 0.7301, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.3188845941468802, |
| "grad_norm": 0.8485239148139954, |
| "learning_rate": 4.980380365115656e-06, |
| "loss": 0.71, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.319712865819989, |
| "grad_norm": 0.9474765658378601, |
| "learning_rate": 4.980242623874075e-06, |
| "loss": 0.7383, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.32054113749309776, |
| "grad_norm": 0.8611152768135071, |
| "learning_rate": 4.980104402728666e-06, |
| "loss": 0.7187, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.32136940916620654, |
| "grad_norm": 0.9033477902412415, |
| "learning_rate": 4.979965701706175e-06, |
| "loss": 0.7356, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.32219768083931527, |
| "grad_norm": 0.8910272717475891, |
| "learning_rate": 4.979826520833439e-06, |
| "loss": 0.7249, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.32302595251242405, |
| "grad_norm": 0.8540544509887695, |
| "learning_rate": 4.979686860137387e-06, |
| "loss": 0.7232, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.32385422418553284, |
| "grad_norm": 0.8533694744110107, |
| "learning_rate": 4.979546719645043e-06, |
| "loss": 0.7222, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.3246824958586416, |
| "grad_norm": 0.861368715763092, |
| "learning_rate": 4.979406099383523e-06, |
| "loss": 0.7284, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.3255107675317504, |
| "grad_norm": 0.8997224569320679, |
| "learning_rate": 4.979264999380035e-06, |
| "loss": 0.7385, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.3263390392048592, |
| "grad_norm": 0.8922696113586426, |
| "learning_rate": 4.979123419661882e-06, |
| "loss": 0.7422, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.32716731087796797, |
| "grad_norm": 0.872469961643219, |
| "learning_rate": 4.978981360256457e-06, |
| "loss": 0.7447, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.32799558255107675, |
| "grad_norm": 0.867304265499115, |
| "learning_rate": 4.978838821191247e-06, |
| "loss": 0.7257, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.32882385422418553, |
| "grad_norm": 0.9332799911499023, |
| "learning_rate": 4.978695802493834e-06, |
| "loss": 0.7269, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.3296521258972943, |
| "grad_norm": 0.8926407694816589, |
| "learning_rate": 4.978552304191888e-06, |
| "loss": 0.7224, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.3304803975704031, |
| "grad_norm": 0.9313523769378662, |
| "learning_rate": 4.9784083263131764e-06, |
| "loss": 0.7524, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.3313086692435119, |
| "grad_norm": 0.8726480007171631, |
| "learning_rate": 4.978263868885557e-06, |
| "loss": 0.7097, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.33213694091662066, |
| "grad_norm": 0.8773100972175598, |
| "learning_rate": 4.978118931936981e-06, |
| "loss": 0.714, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.33296521258972944, |
| "grad_norm": 0.8497999310493469, |
| "learning_rate": 4.9779735154954924e-06, |
| "loss": 0.7276, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.3337934842628382, |
| "grad_norm": 0.867407500743866, |
| "learning_rate": 4.977827619589229e-06, |
| "loss": 0.7217, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.334621755935947, |
| "grad_norm": 0.8989058136940002, |
| "learning_rate": 4.977681244246417e-06, |
| "loss": 0.7065, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.3354500276090558, |
| "grad_norm": 0.8423893451690674, |
| "learning_rate": 4.977534389495382e-06, |
| "loss": 0.7316, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.33627829928216457, |
| "grad_norm": 0.9773634672164917, |
| "learning_rate": 4.9773870553645386e-06, |
| "loss": 0.7401, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.33710657095527335, |
| "grad_norm": 0.876132607460022, |
| "learning_rate": 4.977239241882393e-06, |
| "loss": 0.7157, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.3379348426283821, |
| "grad_norm": 0.8792843222618103, |
| "learning_rate": 4.977090949077547e-06, |
| "loss": 0.7346, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.33876311430149086, |
| "grad_norm": 0.8953642249107361, |
| "learning_rate": 4.9769421769786935e-06, |
| "loss": 0.7267, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.33959138597459965, |
| "grad_norm": 0.8540408611297607, |
| "learning_rate": 4.976792925614619e-06, |
| "loss": 0.7339, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.34041965764770843, |
| "grad_norm": 0.8807007074356079, |
| "learning_rate": 4.9766431950142005e-06, |
| "loss": 0.7245, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.3412479293208172, |
| "grad_norm": 0.9001163840293884, |
| "learning_rate": 4.976492985206412e-06, |
| "loss": 0.715, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.342076200993926, |
| "grad_norm": 0.8746712803840637, |
| "learning_rate": 4.976342296220316e-06, |
| "loss": 0.7064, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.3429044726670348, |
| "grad_norm": 0.8627833127975464, |
| "learning_rate": 4.976191128085069e-06, |
| "loss": 0.7481, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.34373274434014356, |
| "grad_norm": 0.9984755516052246, |
| "learning_rate": 4.976039480829922e-06, |
| "loss": 0.7199, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.34456101601325234, |
| "grad_norm": 0.8609892725944519, |
| "learning_rate": 4.975887354484217e-06, |
| "loss": 0.7284, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.3453892876863611, |
| "grad_norm": 0.9417768716812134, |
| "learning_rate": 4.9757347490773885e-06, |
| "loss": 0.7225, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.3462175593594699, |
| "grad_norm": 0.8718918561935425, |
| "learning_rate": 4.975581664638965e-06, |
| "loss": 0.7285, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.3470458310325787, |
| "grad_norm": 0.9066213369369507, |
| "learning_rate": 4.9754281011985666e-06, |
| "loss": 0.7275, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.34787410270568747, |
| "grad_norm": 0.8322312235832214, |
| "learning_rate": 4.975274058785906e-06, |
| "loss": 0.7406, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.34870237437879625, |
| "grad_norm": 0.9284223318099976, |
| "learning_rate": 4.975119537430789e-06, |
| "loss": 0.7311, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.34953064605190504, |
| "grad_norm": 0.9165488481521606, |
| "learning_rate": 4.974964537163115e-06, |
| "loss": 0.7293, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.3503589177250138, |
| "grad_norm": 0.8896583318710327, |
| "learning_rate": 4.974809058012874e-06, |
| "loss": 0.7274, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.3511871893981226, |
| "grad_norm": 0.8339594602584839, |
| "learning_rate": 4.97465310001015e-06, |
| "loss": 0.7149, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.3520154610712314, |
| "grad_norm": 0.8796001672744751, |
| "learning_rate": 4.97449666318512e-06, |
| "loss": 0.7074, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.35284373274434017, |
| "grad_norm": 0.9246631264686584, |
| "learning_rate": 4.974339747568052e-06, |
| "loss": 0.7349, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.35367200441744895, |
| "grad_norm": 0.9003512859344482, |
| "learning_rate": 4.974182353189309e-06, |
| "loss": 0.7034, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.3545002760905577, |
| "grad_norm": 0.8505542874336243, |
| "learning_rate": 4.974024480079343e-06, |
| "loss": 0.7037, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.35532854776366646, |
| "grad_norm": 0.8567630052566528, |
| "learning_rate": 4.973866128268705e-06, |
| "loss": 0.7099, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.35615681943677524, |
| "grad_norm": 0.8771512508392334, |
| "learning_rate": 4.97370729778803e-06, |
| "loss": 0.71, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.356985091109884, |
| "grad_norm": 0.8831201791763306, |
| "learning_rate": 4.973547988668055e-06, |
| "loss": 0.7297, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.3578133627829928, |
| "grad_norm": 0.9179908633232117, |
| "learning_rate": 4.973388200939601e-06, |
| "loss": 0.7275, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.3586416344561016, |
| "grad_norm": 0.9347992539405823, |
| "learning_rate": 4.9732279346335866e-06, |
| "loss": 0.7217, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.35946990612921037, |
| "grad_norm": 0.9216079115867615, |
| "learning_rate": 4.973067189781022e-06, |
| "loss": 0.7079, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.36029817780231915, |
| "grad_norm": 0.9260441660881042, |
| "learning_rate": 4.97290596641301e-06, |
| "loss": 0.7184, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.36112644947542794, |
| "grad_norm": 0.9058141708374023, |
| "learning_rate": 4.972744264560746e-06, |
| "loss": 0.7327, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.3619547211485367, |
| "grad_norm": 0.8677166700363159, |
| "learning_rate": 4.972582084255517e-06, |
| "loss": 0.7229, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.3627829928216455, |
| "grad_norm": 0.9395062327384949, |
| "learning_rate": 4.972419425528704e-06, |
| "loss": 0.7198, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.3636112644947543, |
| "grad_norm": 0.9541438221931458, |
| "learning_rate": 4.97225628841178e-06, |
| "loss": 0.7381, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.36443953616786307, |
| "grad_norm": 0.9125516414642334, |
| "learning_rate": 4.972092672936309e-06, |
| "loss": 0.7297, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.36526780784097185, |
| "grad_norm": 0.9199169278144836, |
| "learning_rate": 4.971928579133952e-06, |
| "loss": 0.7136, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.36609607951408063, |
| "grad_norm": 0.914042055606842, |
| "learning_rate": 4.971764007036457e-06, |
| "loss": 0.7355, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.3669243511871894, |
| "grad_norm": 0.892221212387085, |
| "learning_rate": 4.971598956675668e-06, |
| "loss": 0.6854, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.3677526228602982, |
| "grad_norm": 0.9111282229423523, |
| "learning_rate": 4.971433428083522e-06, |
| "loss": 0.7114, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.368580894533407, |
| "grad_norm": 0.9543972611427307, |
| "learning_rate": 4.971267421292045e-06, |
| "loss": 0.7133, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.36940916620651576, |
| "grad_norm": 0.8707330226898193, |
| "learning_rate": 4.971100936333359e-06, |
| "loss": 0.7181, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.37023743787962454, |
| "grad_norm": 0.8682164549827576, |
| "learning_rate": 4.970933973239677e-06, |
| "loss": 0.6985, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.37106570955273327, |
| "grad_norm": 0.8785068392753601, |
| "learning_rate": 4.9707665320433055e-06, |
| "loss": 0.7463, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.37189398122584205, |
| "grad_norm": 0.8867245316505432, |
| "learning_rate": 4.970598612776641e-06, |
| "loss": 0.7191, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.37272225289895083, |
| "grad_norm": 0.8420407176017761, |
| "learning_rate": 4.9704302154721776e-06, |
| "loss": 0.7196, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.3735505245720596, |
| "grad_norm": 0.8621135354042053, |
| "learning_rate": 4.970261340162495e-06, |
| "loss": 0.7116, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.3743787962451684, |
| "grad_norm": 0.8836446404457092, |
| "learning_rate": 4.970091986880271e-06, |
| "loss": 0.7198, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.3752070679182772, |
| "grad_norm": 0.9435484409332275, |
| "learning_rate": 4.969922155658273e-06, |
| "loss": 0.7141, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.37603533959138596, |
| "grad_norm": 0.8714984655380249, |
| "learning_rate": 4.969751846529363e-06, |
| "loss": 0.7152, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.37686361126449475, |
| "grad_norm": 0.856543242931366, |
| "learning_rate": 4.9695810595264935e-06, |
| "loss": 0.7282, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.37769188293760353, |
| "grad_norm": 0.8874465823173523, |
| "learning_rate": 4.969409794682709e-06, |
| "loss": 0.6776, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.3785201546107123, |
| "grad_norm": 0.866763174533844, |
| "learning_rate": 4.96923805203115e-06, |
| "loss": 0.7359, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.3793484262838211, |
| "grad_norm": 0.9013122916221619, |
| "learning_rate": 4.969065831605045e-06, |
| "loss": 0.7103, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.3801766979569299, |
| "grad_norm": 0.9364981651306152, |
| "learning_rate": 4.968893133437718e-06, |
| "loss": 0.7146, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.38100496963003866, |
| "grad_norm": 0.9234558343887329, |
| "learning_rate": 4.968719957562585e-06, |
| "loss": 0.7452, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.38183324130314744, |
| "grad_norm": 0.8993266224861145, |
| "learning_rate": 4.968546304013153e-06, |
| "loss": 0.7537, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.3826615129762562, |
| "grad_norm": 0.8993638157844543, |
| "learning_rate": 4.968372172823023e-06, |
| "loss": 0.7287, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.383489784649365, |
| "grad_norm": 0.9135621190071106, |
| "learning_rate": 4.968197564025886e-06, |
| "loss": 0.7232, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.3843180563224738, |
| "grad_norm": 0.9207985401153564, |
| "learning_rate": 4.9680224776555305e-06, |
| "loss": 0.7077, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.38514632799558257, |
| "grad_norm": 0.8783428072929382, |
| "learning_rate": 4.9678469137458315e-06, |
| "loss": 0.6804, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.38597459966869135, |
| "grad_norm": 0.8894262909889221, |
| "learning_rate": 4.96767087233076e-06, |
| "loss": 0.7292, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.38680287134180014, |
| "grad_norm": 0.8916664719581604, |
| "learning_rate": 4.967494353444377e-06, |
| "loss": 0.7022, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.38763114301490886, |
| "grad_norm": 0.9578211307525635, |
| "learning_rate": 4.967317357120839e-06, |
| "loss": 0.7313, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.38845941468801765, |
| "grad_norm": 0.9189325571060181, |
| "learning_rate": 4.967139883394392e-06, |
| "loss": 0.7342, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.38928768636112643, |
| "grad_norm": 0.8896855115890503, |
| "learning_rate": 4.966961932299377e-06, |
| "loss": 0.7014, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.3901159580342352, |
| "grad_norm": 0.8862363696098328, |
| "learning_rate": 4.966783503870224e-06, |
| "loss": 0.7315, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.390944229707344, |
| "grad_norm": 0.9190902709960938, |
| "learning_rate": 4.966604598141458e-06, |
| "loss": 0.7246, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.3917725013804528, |
| "grad_norm": 0.8985234498977661, |
| "learning_rate": 4.9664252151476955e-06, |
| "loss": 0.7094, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.39260077305356156, |
| "grad_norm": 0.9143516421318054, |
| "learning_rate": 4.966245354923645e-06, |
| "loss": 0.6964, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.39342904472667034, |
| "grad_norm": 0.8966991901397705, |
| "learning_rate": 4.96606501750411e-06, |
| "loss": 0.7219, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.3942573163997791, |
| "grad_norm": 0.8638371825218201, |
| "learning_rate": 4.965884202923982e-06, |
| "loss": 0.7265, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.3950855880728879, |
| "grad_norm": 0.9054763317108154, |
| "learning_rate": 4.965702911218247e-06, |
| "loss": 0.7014, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.3959138597459967, |
| "grad_norm": 0.8665671944618225, |
| "learning_rate": 4.965521142421984e-06, |
| "loss": 0.711, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.39674213141910547, |
| "grad_norm": 0.8951340913772583, |
| "learning_rate": 4.965338896570363e-06, |
| "loss": 0.7134, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.39757040309221425, |
| "grad_norm": 0.8655356168746948, |
| "learning_rate": 4.965156173698648e-06, |
| "loss": 0.7184, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.39839867476532304, |
| "grad_norm": 0.8981788754463196, |
| "learning_rate": 4.964972973842191e-06, |
| "loss": 0.7006, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.3992269464384318, |
| "grad_norm": 0.8811180591583252, |
| "learning_rate": 4.964789297036444e-06, |
| "loss": 0.7207, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.4000552181115406, |
| "grad_norm": 0.8399938344955444, |
| "learning_rate": 4.964605143316944e-06, |
| "loss": 0.7181, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.4008834897846494, |
| "grad_norm": 0.8909091353416443, |
| "learning_rate": 4.9644205127193225e-06, |
| "loss": 0.7215, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.40171176145775817, |
| "grad_norm": 0.8896529674530029, |
| "learning_rate": 4.964235405279306e-06, |
| "loss": 0.7223, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.40254003313086695, |
| "grad_norm": 0.8548195958137512, |
| "learning_rate": 4.9640498210327095e-06, |
| "loss": 0.7107, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.40336830480397573, |
| "grad_norm": 0.8928537964820862, |
| "learning_rate": 4.963863760015442e-06, |
| "loss": 0.7301, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.40419657647708446, |
| "grad_norm": 0.9150252938270569, |
| "learning_rate": 4.963677222263505e-06, |
| "loss": 0.7391, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.40502484815019324, |
| "grad_norm": 0.9197206497192383, |
| "learning_rate": 4.963490207812991e-06, |
| "loss": 0.7174, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.405853119823302, |
| "grad_norm": 0.8702535033226013, |
| "learning_rate": 4.963302716700087e-06, |
| "loss": 0.709, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.4066813914964108, |
| "grad_norm": 0.8834036588668823, |
| "learning_rate": 4.9631147489610695e-06, |
| "loss": 0.707, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.4075096631695196, |
| "grad_norm": 0.9147290587425232, |
| "learning_rate": 4.962926304632309e-06, |
| "loss": 0.7199, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.40833793484262837, |
| "grad_norm": 0.9179362058639526, |
| "learning_rate": 4.962737383750268e-06, |
| "loss": 0.7138, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.40916620651573715, |
| "grad_norm": 0.8796111941337585, |
| "learning_rate": 4.962547986351501e-06, |
| "loss": 0.7317, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.40999447818884593, |
| "grad_norm": 0.9005889296531677, |
| "learning_rate": 4.962358112472653e-06, |
| "loss": 0.7127, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.4108227498619547, |
| "grad_norm": 0.8824546337127686, |
| "learning_rate": 4.962167762150465e-06, |
| "loss": 0.7602, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.4116510215350635, |
| "grad_norm": 0.8728845119476318, |
| "learning_rate": 4.9619769354217665e-06, |
| "loss": 0.7098, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.4124792932081723, |
| "grad_norm": 0.9384852051734924, |
| "learning_rate": 4.961785632323481e-06, |
| "loss": 0.7082, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.41330756488128106, |
| "grad_norm": 0.8769698739051819, |
| "learning_rate": 4.961593852892625e-06, |
| "loss": 0.7308, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.41413583655438985, |
| "grad_norm": 0.8503444194793701, |
| "learning_rate": 4.961401597166304e-06, |
| "loss": 0.7056, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.41496410822749863, |
| "grad_norm": 0.9076066613197327, |
| "learning_rate": 4.96120886518172e-06, |
| "loss": 0.7176, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.4157923799006074, |
| "grad_norm": 0.8844983577728271, |
| "learning_rate": 4.961015656976163e-06, |
| "loss": 0.6945, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.4166206515737162, |
| "grad_norm": 0.8888388872146606, |
| "learning_rate": 4.960821972587018e-06, |
| "loss": 0.6956, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.417448923246825, |
| "grad_norm": 0.8812612295150757, |
| "learning_rate": 4.9606278120517606e-06, |
| "loss": 0.7219, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.41827719491993376, |
| "grad_norm": 0.8816961050033569, |
| "learning_rate": 4.96043317540796e-06, |
| "loss": 0.7132, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.41910546659304254, |
| "grad_norm": 0.8712872266769409, |
| "learning_rate": 4.960238062693274e-06, |
| "loss": 0.7337, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.4199337382661513, |
| "grad_norm": 0.8591468930244446, |
| "learning_rate": 4.960042473945459e-06, |
| "loss": 0.7453, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.42076200993926005, |
| "grad_norm": 0.9169349074363708, |
| "learning_rate": 4.959846409202356e-06, |
| "loss": 0.7318, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.42159028161236883, |
| "grad_norm": 0.8944489359855652, |
| "learning_rate": 4.959649868501903e-06, |
| "loss": 0.7116, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.4224185532854776, |
| "grad_norm": 0.8869928121566772, |
| "learning_rate": 4.9594528518821305e-06, |
| "loss": 0.7166, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.4232468249585864, |
| "grad_norm": 0.8585506081581116, |
| "learning_rate": 4.959255359381158e-06, |
| "loss": 0.7094, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.4240750966316952, |
| "grad_norm": 0.8651400208473206, |
| "learning_rate": 4.959057391037197e-06, |
| "loss": 0.7146, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.42490336830480396, |
| "grad_norm": 0.9317715764045715, |
| "learning_rate": 4.958858946888554e-06, |
| "loss": 0.7343, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.42573163997791275, |
| "grad_norm": 0.9394339919090271, |
| "learning_rate": 4.958660026973626e-06, |
| "loss": 0.7177, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.42655991165102153, |
| "grad_norm": 0.879502534866333, |
| "learning_rate": 4.958460631330903e-06, |
| "loss": 0.7314, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.4273881833241303, |
| "grad_norm": 0.8435805439949036, |
| "learning_rate": 4.958260759998965e-06, |
| "loss": 0.7193, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.4282164549972391, |
| "grad_norm": 0.9267009496688843, |
| "learning_rate": 4.958060413016485e-06, |
| "loss": 0.7134, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.4290447266703479, |
| "grad_norm": 0.8646306395530701, |
| "learning_rate": 4.9578595904222296e-06, |
| "loss": 0.6968, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.42987299834345666, |
| "grad_norm": 0.910114586353302, |
| "learning_rate": 4.957658292255055e-06, |
| "loss": 0.696, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.43070127001656544, |
| "grad_norm": 0.9812292456626892, |
| "learning_rate": 4.95745651855391e-06, |
| "loss": 0.7196, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.4315295416896742, |
| "grad_norm": 0.8782804608345032, |
| "learning_rate": 4.9572542693578375e-06, |
| "loss": 0.6887, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.432357813362783, |
| "grad_norm": 0.867567777633667, |
| "learning_rate": 4.9570515447059705e-06, |
| "loss": 0.7072, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.4331860850358918, |
| "grad_norm": 0.8995822668075562, |
| "learning_rate": 4.9568483446375335e-06, |
| "loss": 0.7255, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.43401435670900057, |
| "grad_norm": 0.8991445302963257, |
| "learning_rate": 4.956644669191845e-06, |
| "loss": 0.7019, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.43484262838210935, |
| "grad_norm": 0.8815371990203857, |
| "learning_rate": 4.9564405184083126e-06, |
| "loss": 0.7158, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.43567090005521814, |
| "grad_norm": 0.8883444666862488, |
| "learning_rate": 4.956235892326438e-06, |
| "loss": 0.706, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.4364991717283269, |
| "grad_norm": 0.9254900813102722, |
| "learning_rate": 4.9560307909858165e-06, |
| "loss": 0.6991, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.43732744340143564, |
| "grad_norm": 0.9335938692092896, |
| "learning_rate": 4.955825214426131e-06, |
| "loss": 0.7378, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.4381557150745444, |
| "grad_norm": 0.8578926920890808, |
| "learning_rate": 4.95561916268716e-06, |
| "loss": 0.7085, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.4389839867476532, |
| "grad_norm": 0.8882257342338562, |
| "learning_rate": 4.955412635808771e-06, |
| "loss": 0.6837, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.439812258420762, |
| "grad_norm": 0.9361854195594788, |
| "learning_rate": 4.955205633830927e-06, |
| "loss": 0.7264, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.4406405300938708, |
| "grad_norm": 0.8939785361289978, |
| "learning_rate": 4.95499815679368e-06, |
| "loss": 0.6938, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.44146880176697956, |
| "grad_norm": 0.900846004486084, |
| "learning_rate": 4.954790204737174e-06, |
| "loss": 0.6908, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.44229707344008834, |
| "grad_norm": 0.8563645482063293, |
| "learning_rate": 4.954581777701647e-06, |
| "loss": 0.7239, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.4431253451131971, |
| "grad_norm": 0.8740293383598328, |
| "learning_rate": 4.954372875727427e-06, |
| "loss": 0.7118, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.4439536167863059, |
| "grad_norm": 0.8898264765739441, |
| "learning_rate": 4.954163498854936e-06, |
| "loss": 0.7145, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.4447818884594147, |
| "grad_norm": 0.8909496068954468, |
| "learning_rate": 4.953953647124684e-06, |
| "loss": 0.6988, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.44561016013252347, |
| "grad_norm": 0.8946428298950195, |
| "learning_rate": 4.953743320577278e-06, |
| "loss": 0.702, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.44643843180563225, |
| "grad_norm": 0.9025486707687378, |
| "learning_rate": 4.953532519253412e-06, |
| "loss": 0.7292, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.44726670347874103, |
| "grad_norm": 0.9060450792312622, |
| "learning_rate": 4.953321243193876e-06, |
| "loss": 0.6981, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.4480949751518498, |
| "grad_norm": 0.9270219206809998, |
| "learning_rate": 4.9531094924395485e-06, |
| "loss": 0.7266, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.4489232468249586, |
| "grad_norm": 0.8969677686691284, |
| "learning_rate": 4.952897267031401e-06, |
| "loss": 0.7018, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.4497515184980674, |
| "grad_norm": 0.8647010326385498, |
| "learning_rate": 4.9526845670104995e-06, |
| "loss": 0.7143, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.45057979017117616, |
| "grad_norm": 0.9207059741020203, |
| "learning_rate": 4.952471392417997e-06, |
| "loss": 0.7015, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.45140806184428495, |
| "grad_norm": 0.8863884806632996, |
| "learning_rate": 4.9522577432951416e-06, |
| "loss": 0.7043, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.45223633351739373, |
| "grad_norm": 0.9071975350379944, |
| "learning_rate": 4.952043619683274e-06, |
| "loss": 0.7098, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.4530646051905025, |
| "grad_norm": 0.88376384973526, |
| "learning_rate": 4.951829021623822e-06, |
| "loss": 0.7196, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.45389287686361124, |
| "grad_norm": 0.8879780173301697, |
| "learning_rate": 4.9516139491583094e-06, |
| "loss": 0.7155, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.45472114853672, |
| "grad_norm": 0.8944398760795593, |
| "learning_rate": 4.951398402328353e-06, |
| "loss": 0.6871, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.4555494202098288, |
| "grad_norm": 0.9110270738601685, |
| "learning_rate": 4.9511823811756565e-06, |
| "loss": 0.711, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.4563776918829376, |
| "grad_norm": 0.855482280254364, |
| "learning_rate": 4.95096588574202e-06, |
| "loss": 0.7163, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.45720596355604637, |
| "grad_norm": 0.869230329990387, |
| "learning_rate": 4.95074891606933e-06, |
| "loss": 0.6994, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.45803423522915515, |
| "grad_norm": 0.8858745098114014, |
| "learning_rate": 4.950531472199572e-06, |
| "loss": 0.6838, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.45886250690226393, |
| "grad_norm": 0.8848612904548645, |
| "learning_rate": 4.950313554174817e-06, |
| "loss": 0.715, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.4596907785753727, |
| "grad_norm": 0.8671236038208008, |
| "learning_rate": 4.950095162037232e-06, |
| "loss": 0.7075, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.4605190502484815, |
| "grad_norm": 0.8726053833961487, |
| "learning_rate": 4.9498762958290715e-06, |
| "loss": 0.6957, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.4613473219215903, |
| "grad_norm": 0.902860701084137, |
| "learning_rate": 4.949656955592685e-06, |
| "loss": 0.7327, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.46217559359469906, |
| "grad_norm": 0.9243369102478027, |
| "learning_rate": 4.949437141370514e-06, |
| "loss": 0.7052, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.46300386526780785, |
| "grad_norm": 0.9099013209342957, |
| "learning_rate": 4.949216853205089e-06, |
| "loss": 0.713, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.46383213694091663, |
| "grad_norm": 0.9089868068695068, |
| "learning_rate": 4.948996091139034e-06, |
| "loss": 0.7088, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.4646604086140254, |
| "grad_norm": 0.9116033911705017, |
| "learning_rate": 4.948774855215065e-06, |
| "loss": 0.7188, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.4654886802871342, |
| "grad_norm": 0.8940712213516235, |
| "learning_rate": 4.948553145475989e-06, |
| "loss": 0.6932, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.466316951960243, |
| "grad_norm": 0.8784950375556946, |
| "learning_rate": 4.948330961964703e-06, |
| "loss": 0.6835, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.46714522363335176, |
| "grad_norm": 0.8853821158409119, |
| "learning_rate": 4.948108304724201e-06, |
| "loss": 0.7128, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.46797349530646054, |
| "grad_norm": 0.8694676160812378, |
| "learning_rate": 4.947885173797561e-06, |
| "loss": 0.7195, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.4688017669795693, |
| "grad_norm": 0.9209294319152832, |
| "learning_rate": 4.94766156922796e-06, |
| "loss": 0.7308, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.4696300386526781, |
| "grad_norm": 0.9047339558601379, |
| "learning_rate": 4.947437491058662e-06, |
| "loss": 0.714, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.47045831032578683, |
| "grad_norm": 0.8653138279914856, |
| "learning_rate": 4.947212939333024e-06, |
| "loss": 0.7158, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.4712865819988956, |
| "grad_norm": 0.8724926114082336, |
| "learning_rate": 4.946987914094495e-06, |
| "loss": 0.6968, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.4721148536720044, |
| "grad_norm": 0.8798236846923828, |
| "learning_rate": 4.946762415386615e-06, |
| "loss": 0.7091, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.4729431253451132, |
| "grad_norm": 0.8758096694946289, |
| "learning_rate": 4.946536443253016e-06, |
| "loss": 0.7155, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.47377139701822196, |
| "grad_norm": 0.8818556070327759, |
| "learning_rate": 4.946309997737422e-06, |
| "loss": 0.7233, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.47459966869133074, |
| "grad_norm": 0.8609803318977356, |
| "learning_rate": 4.946083078883647e-06, |
| "loss": 0.6922, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.4754279403644395, |
| "grad_norm": 0.8874679803848267, |
| "learning_rate": 4.945855686735599e-06, |
| "loss": 0.7082, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.4762562120375483, |
| "grad_norm": 0.9097321629524231, |
| "learning_rate": 4.9456278213372755e-06, |
| "loss": 0.7134, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.4770844837106571, |
| "grad_norm": 0.8790103793144226, |
| "learning_rate": 4.945399482732765e-06, |
| "loss": 0.714, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.4779127553837659, |
| "grad_norm": 0.8857675790786743, |
| "learning_rate": 4.945170670966253e-06, |
| "loss": 0.7133, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.47874102705687466, |
| "grad_norm": 0.9326540231704712, |
| "learning_rate": 4.944941386082008e-06, |
| "loss": 0.7135, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.47956929872998344, |
| "grad_norm": 0.8705143928527832, |
| "learning_rate": 4.944711628124396e-06, |
| "loss": 0.7236, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.4803975704030922, |
| "grad_norm": 0.9149577617645264, |
| "learning_rate": 4.944481397137874e-06, |
| "loss": 0.6886, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.481225842076201, |
| "grad_norm": 0.865691602230072, |
| "learning_rate": 4.944250693166988e-06, |
| "loss": 0.7315, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.4820541137493098, |
| "grad_norm": 0.87641841173172, |
| "learning_rate": 4.94401951625638e-06, |
| "loss": 0.701, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.48288238542241857, |
| "grad_norm": 0.9245815277099609, |
| "learning_rate": 4.943787866450776e-06, |
| "loss": 0.7067, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.48371065709552735, |
| "grad_norm": 0.8809130787849426, |
| "learning_rate": 4.943555743795002e-06, |
| "loss": 0.707, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.48453892876863613, |
| "grad_norm": 0.8952463865280151, |
| "learning_rate": 4.943323148333969e-06, |
| "loss": 0.6875, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.4853672004417449, |
| "grad_norm": 0.9045538902282715, |
| "learning_rate": 4.943090080112684e-06, |
| "loss": 0.7151, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.4861954721148537, |
| "grad_norm": 0.887015700340271, |
| "learning_rate": 4.942856539176243e-06, |
| "loss": 0.6812, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.4870237437879624, |
| "grad_norm": 0.8693497180938721, |
| "learning_rate": 4.942622525569832e-06, |
| "loss": 0.7368, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.4878520154610712, |
| "grad_norm": 0.9049307107925415, |
| "learning_rate": 4.942388039338733e-06, |
| "loss": 0.7007, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.48868028713418, |
| "grad_norm": 0.8627849817276001, |
| "learning_rate": 4.942153080528317e-06, |
| "loss": 0.7199, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.4895085588072888, |
| "grad_norm": 0.9065513014793396, |
| "learning_rate": 4.941917649184045e-06, |
| "loss": 0.6951, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.49033683048039756, |
| "grad_norm": 0.8663044571876526, |
| "learning_rate": 4.941681745351471e-06, |
| "loss": 0.6998, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.49116510215350634, |
| "grad_norm": 0.8759627938270569, |
| "learning_rate": 4.94144536907624e-06, |
| "loss": 0.7089, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.4919933738266151, |
| "grad_norm": 0.8970480561256409, |
| "learning_rate": 4.9412085204040896e-06, |
| "loss": 0.691, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.4928216454997239, |
| "grad_norm": 0.8955320715904236, |
| "learning_rate": 4.940971199380846e-06, |
| "loss": 0.7065, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.4936499171728327, |
| "grad_norm": 0.9119921922683716, |
| "learning_rate": 4.940733406052431e-06, |
| "loss": 0.692, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.49447818884594147, |
| "grad_norm": 0.8964525461196899, |
| "learning_rate": 4.9404951404648535e-06, |
| "loss": 0.6989, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.49530646051905025, |
| "grad_norm": 0.9049882888793945, |
| "learning_rate": 4.940256402664216e-06, |
| "loss": 0.6984, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.49613473219215903, |
| "grad_norm": 0.8992204666137695, |
| "learning_rate": 4.940017192696713e-06, |
| "loss": 0.6761, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.4969630038652678, |
| "grad_norm": 0.8661025166511536, |
| "learning_rate": 4.939777510608628e-06, |
| "loss": 0.7274, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.4977912755383766, |
| "grad_norm": 0.8698391318321228, |
| "learning_rate": 4.939537356446339e-06, |
| "loss": 0.7143, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.4986195472114854, |
| "grad_norm": 0.8934414386749268, |
| "learning_rate": 4.939296730256312e-06, |
| "loss": 0.7023, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.49944781888459416, |
| "grad_norm": 0.9578946232795715, |
| "learning_rate": 4.939055632085107e-06, |
| "loss": 0.72, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.500276090557703, |
| "grad_norm": 0.9371532201766968, |
| "learning_rate": 4.9388140619793735e-06, |
| "loss": 0.6943, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.5011043622308117, |
| "grad_norm": 0.8999021053314209, |
| "learning_rate": 4.938572019985853e-06, |
| "loss": 0.7119, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.5019326339039205, |
| "grad_norm": 0.9604562520980835, |
| "learning_rate": 4.93832950615138e-06, |
| "loss": 0.7091, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.5027609055770292, |
| "grad_norm": 0.9451720118522644, |
| "learning_rate": 4.938086520522876e-06, |
| "loss": 0.7125, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.5035891772501381, |
| "grad_norm": 0.9646201729774475, |
| "learning_rate": 4.937843063147359e-06, |
| "loss": 0.7028, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.5044174489232468, |
| "grad_norm": 0.8942106366157532, |
| "learning_rate": 4.937599134071933e-06, |
| "loss": 0.7044, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.5052457205963556, |
| "grad_norm": 0.9242447018623352, |
| "learning_rate": 4.937354733343799e-06, |
| "loss": 0.7169, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.5060739922694644, |
| "grad_norm": 0.8950359225273132, |
| "learning_rate": 4.937109861010244e-06, |
| "loss": 0.691, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.5069022639425732, |
| "grad_norm": 0.8618619441986084, |
| "learning_rate": 4.936864517118649e-06, |
| "loss": 0.6993, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.5077305356156819, |
| "grad_norm": 0.842147946357727, |
| "learning_rate": 4.936618701716487e-06, |
| "loss": 0.7082, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.5085588072887908, |
| "grad_norm": 0.932816207408905, |
| "learning_rate": 4.936372414851321e-06, |
| "loss": 0.7303, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.5093870789618995, |
| "grad_norm": 0.8790065050125122, |
| "learning_rate": 4.936125656570802e-06, |
| "loss": 0.7126, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.5102153506350083, |
| "grad_norm": 0.8632124662399292, |
| "learning_rate": 4.935878426922679e-06, |
| "loss": 0.7226, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.5110436223081171, |
| "grad_norm": 0.8834452033042908, |
| "learning_rate": 4.935630725954787e-06, |
| "loss": 0.7167, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.5118718939812258, |
| "grad_norm": 0.9084344506263733, |
| "learning_rate": 4.935382553715055e-06, |
| "loss": 0.7047, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.5127001656543346, |
| "grad_norm": 0.8920948505401611, |
| "learning_rate": 4.935133910251499e-06, |
| "loss": 0.6782, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.5135284373274434, |
| "grad_norm": 0.8842042684555054, |
| "learning_rate": 4.934884795612234e-06, |
| "loss": 0.699, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.5143567090005522, |
| "grad_norm": 0.8301382660865784, |
| "learning_rate": 4.934635209845458e-06, |
| "loss": 0.6893, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.5151849806736609, |
| "grad_norm": 0.8527718186378479, |
| "learning_rate": 4.934385152999463e-06, |
| "loss": 0.7138, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.5160132523467698, |
| "grad_norm": 0.9009461402893066, |
| "learning_rate": 4.934134625122636e-06, |
| "loss": 0.6976, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.5168415240198785, |
| "grad_norm": 0.8986479640007019, |
| "learning_rate": 4.933883626263448e-06, |
| "loss": 0.7187, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.5176697956929873, |
| "grad_norm": 0.8971537351608276, |
| "learning_rate": 4.933632156470467e-06, |
| "loss": 0.7041, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.518498067366096, |
| "grad_norm": 0.8781216740608215, |
| "learning_rate": 4.93338021579235e-06, |
| "loss": 0.7044, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.5193263390392049, |
| "grad_norm": 0.8780968189239502, |
| "learning_rate": 4.933127804277845e-06, |
| "loss": 0.6821, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.5201546107123136, |
| "grad_norm": 0.9107642769813538, |
| "learning_rate": 4.932874921975792e-06, |
| "loss": 0.684, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.5209828823854225, |
| "grad_norm": 0.882486879825592, |
| "learning_rate": 4.93262156893512e-06, |
| "loss": 0.706, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.5218111540585312, |
| "grad_norm": 0.8533217310905457, |
| "learning_rate": 4.9323677452048505e-06, |
| "loss": 0.7054, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.52263942573164, |
| "grad_norm": 0.8798341155052185, |
| "learning_rate": 4.932113450834097e-06, |
| "loss": 0.6927, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.5234676974047487, |
| "grad_norm": 0.8587237000465393, |
| "learning_rate": 4.931858685872062e-06, |
| "loss": 0.7122, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.5242959690778576, |
| "grad_norm": 0.8657323122024536, |
| "learning_rate": 4.931603450368042e-06, |
| "loss": 0.7149, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.5251242407509663, |
| "grad_norm": 0.8773365616798401, |
| "learning_rate": 4.931347744371421e-06, |
| "loss": 0.7108, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.5259525124240751, |
| "grad_norm": 0.8926152586936951, |
| "learning_rate": 4.931091567931675e-06, |
| "loss": 0.7164, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.5267807840971839, |
| "grad_norm": 0.921498715877533, |
| "learning_rate": 4.930834921098374e-06, |
| "loss": 0.7315, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.5276090557702926, |
| "grad_norm": 0.9108750820159912, |
| "learning_rate": 4.930577803921176e-06, |
| "loss": 0.6969, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.5284373274434014, |
| "grad_norm": 0.877741813659668, |
| "learning_rate": 4.93032021644983e-06, |
| "loss": 0.6986, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.5292655991165102, |
| "grad_norm": 0.8624311089515686, |
| "learning_rate": 4.930062158734178e-06, |
| "loss": 0.7016, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.530093870789619, |
| "grad_norm": 0.8883424401283264, |
| "learning_rate": 4.92980363082415e-06, |
| "loss": 0.7045, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.5309221424627277, |
| "grad_norm": 0.8547376990318298, |
| "learning_rate": 4.92954463276977e-06, |
| "loss": 0.6909, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.5317504141358366, |
| "grad_norm": 0.8899771571159363, |
| "learning_rate": 4.929285164621152e-06, |
| "loss": 0.7149, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.5325786858089453, |
| "grad_norm": 0.8723654747009277, |
| "learning_rate": 4.9290252264285e-06, |
| "loss": 0.6878, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.5334069574820541, |
| "grad_norm": 0.9399120807647705, |
| "learning_rate": 4.928764818242109e-06, |
| "loss": 0.7086, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.5342352291551629, |
| "grad_norm": 0.929222583770752, |
| "learning_rate": 4.928503940112367e-06, |
| "loss": 0.6812, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.5350635008282717, |
| "grad_norm": 0.9498836994171143, |
| "learning_rate": 4.928242592089752e-06, |
| "loss": 0.7074, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.5358917725013804, |
| "grad_norm": 0.9123422503471375, |
| "learning_rate": 4.927980774224829e-06, |
| "loss": 0.709, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.5367200441744893, |
| "grad_norm": 0.8931800723075867, |
| "learning_rate": 4.927718486568261e-06, |
| "loss": 0.7208, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.537548315847598, |
| "grad_norm": 0.9118952751159668, |
| "learning_rate": 4.927455729170796e-06, |
| "loss": 0.6956, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.5383765875207068, |
| "grad_norm": 0.9187706708908081, |
| "learning_rate": 4.927192502083276e-06, |
| "loss": 0.7085, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.5392048591938156, |
| "grad_norm": 0.8925501704216003, |
| "learning_rate": 4.9269288053566335e-06, |
| "loss": 0.7019, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.5400331308669244, |
| "grad_norm": 0.8895928263664246, |
| "learning_rate": 4.92666463904189e-06, |
| "loss": 0.6834, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.5408614025400331, |
| "grad_norm": 0.9343291521072388, |
| "learning_rate": 4.926400003190161e-06, |
| "loss": 0.676, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.541689674213142, |
| "grad_norm": 0.9105545878410339, |
| "learning_rate": 4.926134897852648e-06, |
| "loss": 0.6913, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.5425179458862507, |
| "grad_norm": 0.9266970753669739, |
| "learning_rate": 4.92586932308065e-06, |
| "loss": 0.7159, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.5433462175593595, |
| "grad_norm": 0.8691042065620422, |
| "learning_rate": 4.9256032789255505e-06, |
| "loss": 0.6694, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.5441744892324683, |
| "grad_norm": 0.8922919631004333, |
| "learning_rate": 4.925336765438829e-06, |
| "loss": 0.6853, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.545002760905577, |
| "grad_norm": 0.8806491494178772, |
| "learning_rate": 4.9250697826720515e-06, |
| "loss": 0.7036, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.5458310325786858, |
| "grad_norm": 0.875612199306488, |
| "learning_rate": 4.924802330676877e-06, |
| "loss": 0.6907, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.5466593042517945, |
| "grad_norm": 0.8810746669769287, |
| "learning_rate": 4.924534409505055e-06, |
| "loss": 0.6635, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.5474875759249034, |
| "grad_norm": 0.8923916220664978, |
| "learning_rate": 4.924266019208427e-06, |
| "loss": 0.6848, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.5483158475980121, |
| "grad_norm": 0.8986642360687256, |
| "learning_rate": 4.923997159838923e-06, |
| "loss": 0.6846, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.549144119271121, |
| "grad_norm": 0.876979649066925, |
| "learning_rate": 4.923727831448564e-06, |
| "loss": 0.7052, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.5499723909442297, |
| "grad_norm": 0.873212456703186, |
| "learning_rate": 4.923458034089463e-06, |
| "loss": 0.6622, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.5508006626173385, |
| "grad_norm": 0.8741701245307922, |
| "learning_rate": 4.923187767813825e-06, |
| "loss": 0.7344, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.5516289342904472, |
| "grad_norm": 0.8628636598587036, |
| "learning_rate": 4.922917032673941e-06, |
| "loss": 0.7184, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.5524572059635561, |
| "grad_norm": 0.8481374979019165, |
| "learning_rate": 4.922645828722199e-06, |
| "loss": 0.7057, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.5532854776366648, |
| "grad_norm": 0.8607463240623474, |
| "learning_rate": 4.922374156011071e-06, |
| "loss": 0.6947, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.5541137493097736, |
| "grad_norm": 0.8851789832115173, |
| "learning_rate": 4.922102014593126e-06, |
| "loss": 0.7178, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.5549420209828824, |
| "grad_norm": 0.8644749522209167, |
| "learning_rate": 4.921829404521019e-06, |
| "loss": 0.7116, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.5557702926559912, |
| "grad_norm": 0.8965548276901245, |
| "learning_rate": 4.921556325847499e-06, |
| "loss": 0.7028, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.5565985643290999, |
| "grad_norm": 0.8788665533065796, |
| "learning_rate": 4.921282778625403e-06, |
| "loss": 0.6972, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.5574268360022088, |
| "grad_norm": 0.8776395320892334, |
| "learning_rate": 4.921008762907661e-06, |
| "loss": 0.6981, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.5582551076753175, |
| "grad_norm": 0.8957296013832092, |
| "learning_rate": 4.920734278747291e-06, |
| "loss": 0.6982, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.5590833793484263, |
| "grad_norm": 1.0130045413970947, |
| "learning_rate": 4.920459326197403e-06, |
| "loss": 0.7308, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.5599116510215351, |
| "grad_norm": 0.8804963827133179, |
| "learning_rate": 4.9201839053112e-06, |
| "loss": 0.665, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.5607399226946438, |
| "grad_norm": 0.9046277403831482, |
| "learning_rate": 4.919908016141972e-06, |
| "loss": 0.6962, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.5615681943677526, |
| "grad_norm": 0.8929125666618347, |
| "learning_rate": 4.9196316587431e-06, |
| "loss": 0.7128, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.5623964660408614, |
| "grad_norm": 0.8853150010108948, |
| "learning_rate": 4.919354833168059e-06, |
| "loss": 0.7084, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.5632247377139702, |
| "grad_norm": 0.8837444186210632, |
| "learning_rate": 4.919077539470409e-06, |
| "loss": 0.7168, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.5640530093870789, |
| "grad_norm": 0.8787657022476196, |
| "learning_rate": 4.918799777703806e-06, |
| "loss": 0.7123, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.5648812810601878, |
| "grad_norm": 0.8485670685768127, |
| "learning_rate": 4.9185215479219944e-06, |
| "loss": 0.6758, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.5657095527332965, |
| "grad_norm": 0.9010062217712402, |
| "learning_rate": 4.918242850178808e-06, |
| "loss": 0.7045, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.5665378244064053, |
| "grad_norm": 0.8893844485282898, |
| "learning_rate": 4.9179636845281735e-06, |
| "loss": 0.714, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.567366096079514, |
| "grad_norm": 0.9152569770812988, |
| "learning_rate": 4.917684051024104e-06, |
| "loss": 0.7021, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.5681943677526229, |
| "grad_norm": 0.9026959538459778, |
| "learning_rate": 4.917403949720709e-06, |
| "loss": 0.716, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.5690226394257316, |
| "grad_norm": 0.908470094203949, |
| "learning_rate": 4.9171233806721854e-06, |
| "loss": 0.708, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.5698509110988405, |
| "grad_norm": 0.9315148591995239, |
| "learning_rate": 4.91684234393282e-06, |
| "loss": 0.6995, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.5706791827719492, |
| "grad_norm": 0.9262105226516724, |
| "learning_rate": 4.916560839556989e-06, |
| "loss": 0.7241, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.571507454445058, |
| "grad_norm": 0.8946736454963684, |
| "learning_rate": 4.916278867599163e-06, |
| "loss": 0.7243, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.5723357261181667, |
| "grad_norm": 0.9031121134757996, |
| "learning_rate": 4.915996428113901e-06, |
| "loss": 0.7046, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.5731639977912756, |
| "grad_norm": 0.8951583504676819, |
| "learning_rate": 4.91571352115585e-06, |
| "loss": 0.6825, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.5739922694643843, |
| "grad_norm": 0.9284839034080505, |
| "learning_rate": 4.915430146779754e-06, |
| "loss": 0.7024, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.5748205411374931, |
| "grad_norm": 0.887868344783783, |
| "learning_rate": 4.915146305040439e-06, |
| "loss": 0.6993, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.5756488128106019, |
| "grad_norm": 0.9047157764434814, |
| "learning_rate": 4.914861995992829e-06, |
| "loss": 0.6981, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.5764770844837107, |
| "grad_norm": 0.8794386386871338, |
| "learning_rate": 4.9145772196919336e-06, |
| "loss": 0.68, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.5773053561568194, |
| "grad_norm": 0.8789144158363342, |
| "learning_rate": 4.914291976192854e-06, |
| "loss": 0.6893, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.5781336278299282, |
| "grad_norm": 0.9053775072097778, |
| "learning_rate": 4.914006265550783e-06, |
| "loss": 0.7018, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.578961899503037, |
| "grad_norm": 0.9568415880203247, |
| "learning_rate": 4.913720087821003e-06, |
| "loss": 0.7114, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.5797901711761457, |
| "grad_norm": 0.9020230770111084, |
| "learning_rate": 4.913433443058887e-06, |
| "loss": 0.7037, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.5806184428492546, |
| "grad_norm": 0.8782451152801514, |
| "learning_rate": 4.9131463313198964e-06, |
| "loss": 0.6767, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.5814467145223633, |
| "grad_norm": 0.8985885977745056, |
| "learning_rate": 4.912858752659586e-06, |
| "loss": 0.6972, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.5822749861954721, |
| "grad_norm": 0.8828071355819702, |
| "learning_rate": 4.912570707133599e-06, |
| "loss": 0.6992, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.5831032578685809, |
| "grad_norm": 0.8677062392234802, |
| "learning_rate": 4.91228219479767e-06, |
| "loss": 0.6858, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.5839315295416897, |
| "grad_norm": 0.8759146332740784, |
| "learning_rate": 4.911993215707624e-06, |
| "loss": 0.6965, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.5847598012147984, |
| "grad_norm": 0.9072628021240234, |
| "learning_rate": 4.911703769919375e-06, |
| "loss": 0.6742, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.5855880728879073, |
| "grad_norm": 0.9042706489562988, |
| "learning_rate": 4.9114138574889285e-06, |
| "loss": 0.6852, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.586416344561016, |
| "grad_norm": 0.8949633836746216, |
| "learning_rate": 4.9111234784723785e-06, |
| "loss": 0.6891, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.5872446162341248, |
| "grad_norm": 0.915690004825592, |
| "learning_rate": 4.910832632925912e-06, |
| "loss": 0.6744, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.5880728879072336, |
| "grad_norm": 0.887661874294281, |
| "learning_rate": 4.910541320905804e-06, |
| "loss": 0.7016, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.5889011595803424, |
| "grad_norm": 0.889708936214447, |
| "learning_rate": 4.910249542468422e-06, |
| "loss": 0.7169, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.5897294312534511, |
| "grad_norm": 0.8799639344215393, |
| "learning_rate": 4.909957297670222e-06, |
| "loss": 0.7203, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.59055770292656, |
| "grad_norm": 0.9130553007125854, |
| "learning_rate": 4.909664586567749e-06, |
| "loss": 0.6718, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.5913859745996687, |
| "grad_norm": 0.8891394734382629, |
| "learning_rate": 4.909371409217642e-06, |
| "loss": 0.7085, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.5922142462727775, |
| "grad_norm": 0.8986535668373108, |
| "learning_rate": 4.909077765676627e-06, |
| "loss": 0.7061, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.5930425179458862, |
| "grad_norm": 0.9387540221214294, |
| "learning_rate": 4.908783656001521e-06, |
| "loss": 0.6785, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.593870789618995, |
| "grad_norm": 0.9151632189750671, |
| "learning_rate": 4.9084890802492325e-06, |
| "loss": 0.6992, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.5946990612921038, |
| "grad_norm": 0.8982065320014954, |
| "learning_rate": 4.908194038476758e-06, |
| "loss": 0.6962, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.5955273329652125, |
| "grad_norm": 0.8951371908187866, |
| "learning_rate": 4.9078985307411865e-06, |
| "loss": 0.6657, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.5963556046383214, |
| "grad_norm": 0.9000332951545715, |
| "learning_rate": 4.907602557099696e-06, |
| "loss": 0.7077, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.5971838763114301, |
| "grad_norm": 0.9383907318115234, |
| "learning_rate": 4.9073061176095525e-06, |
| "loss": 0.672, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.5980121479845389, |
| "grad_norm": 0.8791319727897644, |
| "learning_rate": 4.907009212328117e-06, |
| "loss": 0.6902, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.5988404196576477, |
| "grad_norm": 0.884758472442627, |
| "learning_rate": 4.906711841312836e-06, |
| "loss": 0.6898, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.5996686913307565, |
| "grad_norm": 0.8955066204071045, |
| "learning_rate": 4.906414004621249e-06, |
| "loss": 0.6746, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.6004969630038652, |
| "grad_norm": 0.8688821196556091, |
| "learning_rate": 4.9061157023109855e-06, |
| "loss": 0.7049, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.6013252346769741, |
| "grad_norm": 1.0987242460250854, |
| "learning_rate": 4.905816934439762e-06, |
| "loss": 0.7228, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.6021535063500828, |
| "grad_norm": 0.9010651111602783, |
| "learning_rate": 4.905517701065389e-06, |
| "loss": 0.7029, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.6029817780231916, |
| "grad_norm": 0.8809539079666138, |
| "learning_rate": 4.905218002245764e-06, |
| "loss": 0.6885, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.6038100496963004, |
| "grad_norm": 0.8548507690429688, |
| "learning_rate": 4.904917838038877e-06, |
| "loss": 0.7043, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.6046383213694092, |
| "grad_norm": 0.8917130827903748, |
| "learning_rate": 4.904617208502807e-06, |
| "loss": 0.7007, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.6054665930425179, |
| "grad_norm": 0.9022551774978638, |
| "learning_rate": 4.904316113695723e-06, |
| "loss": 0.7168, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.6062948647156268, |
| "grad_norm": 0.8734942674636841, |
| "learning_rate": 4.904014553675883e-06, |
| "loss": 0.6943, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.6071231363887355, |
| "grad_norm": 0.8766399621963501, |
| "learning_rate": 4.903712528501637e-06, |
| "loss": 0.6888, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.6079514080618443, |
| "grad_norm": 0.8706216216087341, |
| "learning_rate": 4.903410038231425e-06, |
| "loss": 0.6848, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.6087796797349531, |
| "grad_norm": 0.87668776512146, |
| "learning_rate": 4.903107082923774e-06, |
| "loss": 0.6831, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.6096079514080619, |
| "grad_norm": 0.9552613496780396, |
| "learning_rate": 4.902803662637304e-06, |
| "loss": 0.6935, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.6104362230811706, |
| "grad_norm": 0.8920005559921265, |
| "learning_rate": 4.9024997774307235e-06, |
| "loss": 0.683, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.6112644947542794, |
| "grad_norm": 0.8755844235420227, |
| "learning_rate": 4.902195427362832e-06, |
| "loss": 0.6418, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.6120927664273882, |
| "grad_norm": 0.91957026720047, |
| "learning_rate": 4.901890612492519e-06, |
| "loss": 0.667, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.6129210381004969, |
| "grad_norm": 0.9341251850128174, |
| "learning_rate": 4.901585332878762e-06, |
| "loss": 0.692, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.6137493097736058, |
| "grad_norm": 0.8738868236541748, |
| "learning_rate": 4.901279588580631e-06, |
| "loss": 0.6872, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.6145775814467145, |
| "grad_norm": 0.9413809776306152, |
| "learning_rate": 4.9009733796572834e-06, |
| "loss": 0.6794, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.6154058531198233, |
| "grad_norm": 0.9214423894882202, |
| "learning_rate": 4.900666706167968e-06, |
| "loss": 0.6853, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.616234124792932, |
| "grad_norm": 0.8955279588699341, |
| "learning_rate": 4.900359568172024e-06, |
| "loss": 0.7165, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.6170623964660409, |
| "grad_norm": 0.9437501430511475, |
| "learning_rate": 4.90005196572888e-06, |
| "loss": 0.707, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.6178906681391496, |
| "grad_norm": 0.932046115398407, |
| "learning_rate": 4.899743898898054e-06, |
| "loss": 0.6921, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.6187189398122585, |
| "grad_norm": 0.8929205536842346, |
| "learning_rate": 4.899435367739152e-06, |
| "loss": 0.699, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.6195472114853672, |
| "grad_norm": 0.8796060681343079, |
| "learning_rate": 4.899126372311876e-06, |
| "loss": 0.7162, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.620375483158476, |
| "grad_norm": 0.8730977177619934, |
| "learning_rate": 4.8988169126760085e-06, |
| "loss": 0.6785, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.6212037548315847, |
| "grad_norm": 0.8956276178359985, |
| "learning_rate": 4.898506988891432e-06, |
| "loss": 0.6966, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.6220320265046936, |
| "grad_norm": 0.9401928186416626, |
| "learning_rate": 4.898196601018111e-06, |
| "loss": 0.6814, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.6228602981778023, |
| "grad_norm": 0.9188504219055176, |
| "learning_rate": 4.897885749116104e-06, |
| "loss": 0.6823, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.6236885698509111, |
| "grad_norm": 0.9491416811943054, |
| "learning_rate": 4.897574433245557e-06, |
| "loss": 0.6998, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.6245168415240199, |
| "grad_norm": 0.9337769746780396, |
| "learning_rate": 4.897262653466706e-06, |
| "loss": 0.6918, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.6253451131971287, |
| "grad_norm": 0.8876436948776245, |
| "learning_rate": 4.8969504098398805e-06, |
| "loss": 0.7087, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.6261733848702374, |
| "grad_norm": 0.9014838337898254, |
| "learning_rate": 4.896637702425493e-06, |
| "loss": 0.6825, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.6270016565433462, |
| "grad_norm": 0.9183393716812134, |
| "learning_rate": 4.896324531284053e-06, |
| "loss": 0.6535, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.627829928216455, |
| "grad_norm": 0.8934096693992615, |
| "learning_rate": 4.896010896476153e-06, |
| "loss": 0.686, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.6286581998895637, |
| "grad_norm": 0.9107242226600647, |
| "learning_rate": 4.8956967980624806e-06, |
| "loss": 0.69, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.6294864715626726, |
| "grad_norm": 0.9305768609046936, |
| "learning_rate": 4.895382236103809e-06, |
| "loss": 0.6988, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.6303147432357813, |
| "grad_norm": 1.190079689025879, |
| "learning_rate": 4.895067210661005e-06, |
| "loss": 0.6713, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.6311430149088901, |
| "grad_norm": 0.9005534648895264, |
| "learning_rate": 4.894751721795022e-06, |
| "loss": 0.6875, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.6319712865819989, |
| "grad_norm": 0.9524604678153992, |
| "learning_rate": 4.894435769566905e-06, |
| "loss": 0.6811, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.6327995582551077, |
| "grad_norm": 0.9321557879447937, |
| "learning_rate": 4.894119354037787e-06, |
| "loss": 0.6917, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.6336278299282164, |
| "grad_norm": 0.9099745154380798, |
| "learning_rate": 4.893802475268892e-06, |
| "loss": 0.698, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.6344561016013253, |
| "grad_norm": 0.9420250654220581, |
| "learning_rate": 4.893485133321533e-06, |
| "loss": 0.6793, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.635284373274434, |
| "grad_norm": 0.9231647849082947, |
| "learning_rate": 4.893167328257111e-06, |
| "loss": 0.6989, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.6361126449475428, |
| "grad_norm": 0.908660352230072, |
| "learning_rate": 4.8928490601371216e-06, |
| "loss": 0.6708, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.6369409166206516, |
| "grad_norm": 0.9816997051239014, |
| "learning_rate": 4.892530329023144e-06, |
| "loss": 0.7185, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.6377691882937604, |
| "grad_norm": 0.9923455119132996, |
| "learning_rate": 4.892211134976851e-06, |
| "loss": 0.7033, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.6385974599668691, |
| "grad_norm": 0.9635297060012817, |
| "learning_rate": 4.8918914780600025e-06, |
| "loss": 0.6986, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.639425731639978, |
| "grad_norm": 0.9307644367218018, |
| "learning_rate": 4.8915713583344505e-06, |
| "loss": 0.6733, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.6402540033130867, |
| "grad_norm": 0.9397634267807007, |
| "learning_rate": 4.891250775862134e-06, |
| "loss": 0.6973, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.6410822749861955, |
| "grad_norm": 0.9603179097175598, |
| "learning_rate": 4.890929730705084e-06, |
| "loss": 0.6791, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.6419105466593042, |
| "grad_norm": 0.9150612354278564, |
| "learning_rate": 4.890608222925419e-06, |
| "loss": 0.6889, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.6427388183324131, |
| "grad_norm": 0.8845817446708679, |
| "learning_rate": 4.890286252585348e-06, |
| "loss": 0.6987, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.6435670900055218, |
| "grad_norm": 0.9086291790008545, |
| "learning_rate": 4.8899638197471685e-06, |
| "loss": 0.6779, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.6443953616786305, |
| "grad_norm": 0.8838508129119873, |
| "learning_rate": 4.88964092447327e-06, |
| "loss": 0.6916, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.6452236333517394, |
| "grad_norm": 0.9464887976646423, |
| "learning_rate": 4.889317566826128e-06, |
| "loss": 0.6983, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.6460519050248481, |
| "grad_norm": 0.8804123997688293, |
| "learning_rate": 4.888993746868309e-06, |
| "loss": 0.6729, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.6468801766979569, |
| "grad_norm": 1.0328186750411987, |
| "learning_rate": 4.88866946466247e-06, |
| "loss": 0.6704, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.6477084483710657, |
| "grad_norm": 0.9279816150665283, |
| "learning_rate": 4.888344720271357e-06, |
| "loss": 0.6833, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.6485367200441745, |
| "grad_norm": 0.8946052193641663, |
| "learning_rate": 4.888019513757804e-06, |
| "loss": 0.6488, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.6493649917172832, |
| "grad_norm": 0.8895952701568604, |
| "learning_rate": 4.8876938451847364e-06, |
| "loss": 0.7093, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.6501932633903921, |
| "grad_norm": 0.9124067425727844, |
| "learning_rate": 4.887367714615167e-06, |
| "loss": 0.6929, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.6510215350635008, |
| "grad_norm": 0.9264699816703796, |
| "learning_rate": 4.8870411221122e-06, |
| "loss": 0.6758, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.6518498067366096, |
| "grad_norm": 0.9201008677482605, |
| "learning_rate": 4.886714067739028e-06, |
| "loss": 0.6804, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.6526780784097184, |
| "grad_norm": 0.8645861148834229, |
| "learning_rate": 4.886386551558933e-06, |
| "loss": 0.6835, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.6535063500828272, |
| "grad_norm": 0.9064589738845825, |
| "learning_rate": 4.886058573635284e-06, |
| "loss": 0.6797, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.6543346217559359, |
| "grad_norm": 0.9053468108177185, |
| "learning_rate": 4.885730134031545e-06, |
| "loss": 0.6947, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.6551628934290448, |
| "grad_norm": 0.9159650206565857, |
| "learning_rate": 4.8854012328112645e-06, |
| "loss": 0.678, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.6559911651021535, |
| "grad_norm": 0.8685979843139648, |
| "learning_rate": 4.885071870038082e-06, |
| "loss": 0.6966, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.6568194367752623, |
| "grad_norm": 0.991212785243988, |
| "learning_rate": 4.884742045775725e-06, |
| "loss": 0.6804, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.6576477084483711, |
| "grad_norm": 0.9351918697357178, |
| "learning_rate": 4.884411760088012e-06, |
| "loss": 0.7034, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.6584759801214799, |
| "grad_norm": 0.8826441764831543, |
| "learning_rate": 4.8840810130388526e-06, |
| "loss": 0.6868, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.6593042517945886, |
| "grad_norm": 0.8855264782905579, |
| "learning_rate": 4.88374980469224e-06, |
| "loss": 0.6823, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.6601325234676974, |
| "grad_norm": 0.857950747013092, |
| "learning_rate": 4.8834181351122615e-06, |
| "loss": 0.6707, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.6609607951408062, |
| "grad_norm": 0.9073869585990906, |
| "learning_rate": 4.883086004363092e-06, |
| "loss": 0.703, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.6617890668139149, |
| "grad_norm": 0.9273548722267151, |
| "learning_rate": 4.882753412508996e-06, |
| "loss": 0.6857, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.6626173384870238, |
| "grad_norm": 0.8696101903915405, |
| "learning_rate": 4.882420359614325e-06, |
| "loss": 0.697, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.6634456101601325, |
| "grad_norm": 0.8772857785224915, |
| "learning_rate": 4.882086845743524e-06, |
| "loss": 0.6933, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.6642738818332413, |
| "grad_norm": 0.8889543414115906, |
| "learning_rate": 4.881752870961123e-06, |
| "loss": 0.7141, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.66510215350635, |
| "grad_norm": 0.8802739381790161, |
| "learning_rate": 4.881418435331745e-06, |
| "loss": 0.6645, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.6659304251794589, |
| "grad_norm": 0.9119336009025574, |
| "learning_rate": 4.881083538920098e-06, |
| "loss": 0.6712, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.6667586968525676, |
| "grad_norm": 0.8963311910629272, |
| "learning_rate": 4.880748181790984e-06, |
| "loss": 0.6998, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.6675869685256764, |
| "grad_norm": 0.9043154120445251, |
| "learning_rate": 4.880412364009289e-06, |
| "loss": 0.6745, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.6684152401987852, |
| "grad_norm": 0.9051250219345093, |
| "learning_rate": 4.880076085639991e-06, |
| "loss": 0.7197, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.669243511871894, |
| "grad_norm": 0.8983393311500549, |
| "learning_rate": 4.879739346748158e-06, |
| "loss": 0.6871, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.6700717835450027, |
| "grad_norm": 0.8760300278663635, |
| "learning_rate": 4.879402147398944e-06, |
| "loss": 0.7178, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.6709000552181116, |
| "grad_norm": 0.930756688117981, |
| "learning_rate": 4.879064487657596e-06, |
| "loss": 0.6993, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.6717283268912203, |
| "grad_norm": 0.8934812545776367, |
| "learning_rate": 4.878726367589446e-06, |
| "loss": 0.6932, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.6725565985643291, |
| "grad_norm": 0.8924756646156311, |
| "learning_rate": 4.878387787259919e-06, |
| "loss": 0.6692, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.6733848702374379, |
| "grad_norm": 0.9171481132507324, |
| "learning_rate": 4.878048746734526e-06, |
| "loss": 0.7174, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.6742131419105467, |
| "grad_norm": 0.8740175366401672, |
| "learning_rate": 4.877709246078869e-06, |
| "loss": 0.6661, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.6750414135836554, |
| "grad_norm": 0.9128527641296387, |
| "learning_rate": 4.877369285358637e-06, |
| "loss": 0.6794, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.6758696852567642, |
| "grad_norm": 0.9389971494674683, |
| "learning_rate": 4.877028864639609e-06, |
| "loss": 0.7066, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.676697956929873, |
| "grad_norm": 0.8990627527236938, |
| "learning_rate": 4.876687983987654e-06, |
| "loss": 0.701, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.6775262286029817, |
| "grad_norm": 0.8894726037979126, |
| "learning_rate": 4.876346643468731e-06, |
| "loss": 0.6834, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.6783545002760906, |
| "grad_norm": 0.9184600710868835, |
| "learning_rate": 4.876004843148884e-06, |
| "loss": 0.6963, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.6791827719491993, |
| "grad_norm": 0.8746675252914429, |
| "learning_rate": 4.875662583094249e-06, |
| "loss": 0.6974, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.6800110436223081, |
| "grad_norm": 0.8938384652137756, |
| "learning_rate": 4.875319863371048e-06, |
| "loss": 0.6856, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.6808393152954169, |
| "grad_norm": 0.9065255522727966, |
| "learning_rate": 4.874976684045597e-06, |
| "loss": 0.6834, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.6816675869685257, |
| "grad_norm": 0.895552396774292, |
| "learning_rate": 4.874633045184297e-06, |
| "loss": 0.6933, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.6824958586416344, |
| "grad_norm": 0.9227414727210999, |
| "learning_rate": 4.874288946853639e-06, |
| "loss": 0.6936, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.6833241303147433, |
| "grad_norm": 0.9171680808067322, |
| "learning_rate": 4.8739443891202025e-06, |
| "loss": 0.7142, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.684152401987852, |
| "grad_norm": 0.939964234828949, |
| "learning_rate": 4.873599372050656e-06, |
| "loss": 0.6843, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.6849806736609608, |
| "grad_norm": 0.9056030511856079, |
| "learning_rate": 4.873253895711757e-06, |
| "loss": 0.7, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.6858089453340696, |
| "grad_norm": 0.8730482459068298, |
| "learning_rate": 4.872907960170353e-06, |
| "loss": 0.6883, |
| "step": 828 |
| }, |
| { |
| "epoch": 0.6866372170071784, |
| "grad_norm": 0.9490803480148315, |
| "learning_rate": 4.872561565493379e-06, |
| "loss": 0.6845, |
| "step": 829 |
| }, |
| { |
| "epoch": 0.6874654886802871, |
| "grad_norm": 0.8881271481513977, |
| "learning_rate": 4.872214711747858e-06, |
| "loss": 0.6876, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.688293760353396, |
| "grad_norm": 0.8695152401924133, |
| "learning_rate": 4.8718673990009045e-06, |
| "loss": 0.6825, |
| "step": 831 |
| }, |
| { |
| "epoch": 0.6891220320265047, |
| "grad_norm": 0.8586530685424805, |
| "learning_rate": 4.871519627319719e-06, |
| "loss": 0.6987, |
| "step": 832 |
| }, |
| { |
| "epoch": 0.6899503036996135, |
| "grad_norm": 0.9191843271255493, |
| "learning_rate": 4.871171396771594e-06, |
| "loss": 0.6819, |
| "step": 833 |
| }, |
| { |
| "epoch": 0.6907785753727222, |
| "grad_norm": 0.9126827716827393, |
| "learning_rate": 4.8708227074239064e-06, |
| "loss": 0.7162, |
| "step": 834 |
| }, |
| { |
| "epoch": 0.6916068470458311, |
| "grad_norm": 0.8728350400924683, |
| "learning_rate": 4.8704735593441255e-06, |
| "loss": 0.6712, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.6924351187189398, |
| "grad_norm": 0.9079543352127075, |
| "learning_rate": 4.8701239525998076e-06, |
| "loss": 0.6837, |
| "step": 836 |
| }, |
| { |
| "epoch": 0.6932633903920485, |
| "grad_norm": 0.8758329153060913, |
| "learning_rate": 4.8697738872586e-06, |
| "loss": 0.7159, |
| "step": 837 |
| }, |
| { |
| "epoch": 0.6940916620651574, |
| "grad_norm": 0.8888401985168457, |
| "learning_rate": 4.869423363388235e-06, |
| "loss": 0.6582, |
| "step": 838 |
| }, |
| { |
| "epoch": 0.6949199337382661, |
| "grad_norm": 0.890082061290741, |
| "learning_rate": 4.869072381056537e-06, |
| "loss": 0.6842, |
| "step": 839 |
| }, |
| { |
| "epoch": 0.6957482054113749, |
| "grad_norm": 0.8615689277648926, |
| "learning_rate": 4.8687209403314175e-06, |
| "loss": 0.7054, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.6965764770844837, |
| "grad_norm": 0.8517857193946838, |
| "learning_rate": 4.8683690412808775e-06, |
| "loss": 0.6679, |
| "step": 841 |
| }, |
| { |
| "epoch": 0.6974047487575925, |
| "grad_norm": 0.923627495765686, |
| "learning_rate": 4.868016683973005e-06, |
| "loss": 0.6783, |
| "step": 842 |
| }, |
| { |
| "epoch": 0.6982330204307012, |
| "grad_norm": 0.9232134819030762, |
| "learning_rate": 4.867663868475978e-06, |
| "loss": 0.6774, |
| "step": 843 |
| }, |
| { |
| "epoch": 0.6990612921038101, |
| "grad_norm": 0.88045334815979, |
| "learning_rate": 4.867310594858065e-06, |
| "loss": 0.7099, |
| "step": 844 |
| }, |
| { |
| "epoch": 0.6998895637769188, |
| "grad_norm": 0.9323690533638, |
| "learning_rate": 4.866956863187619e-06, |
| "loss": 0.6864, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.7007178354500276, |
| "grad_norm": 0.926483154296875, |
| "learning_rate": 4.866602673533084e-06, |
| "loss": 0.6782, |
| "step": 846 |
| }, |
| { |
| "epoch": 0.7015461071231364, |
| "grad_norm": 0.9339326620101929, |
| "learning_rate": 4.866248025962993e-06, |
| "loss": 0.6682, |
| "step": 847 |
| }, |
| { |
| "epoch": 0.7023743787962452, |
| "grad_norm": 0.8889521360397339, |
| "learning_rate": 4.865892920545967e-06, |
| "loss": 0.685, |
| "step": 848 |
| }, |
| { |
| "epoch": 0.7032026504693539, |
| "grad_norm": 0.8796456456184387, |
| "learning_rate": 4.865537357350716e-06, |
| "loss": 0.7055, |
| "step": 849 |
| }, |
| { |
| "epoch": 0.7040309221424628, |
| "grad_norm": 0.9078241586685181, |
| "learning_rate": 4.865181336446036e-06, |
| "loss": 0.6805, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.7048591938155715, |
| "grad_norm": 0.8957457542419434, |
| "learning_rate": 4.864824857900817e-06, |
| "loss": 0.693, |
| "step": 851 |
| }, |
| { |
| "epoch": 0.7056874654886803, |
| "grad_norm": 0.8897283673286438, |
| "learning_rate": 4.864467921784032e-06, |
| "loss": 0.6957, |
| "step": 852 |
| }, |
| { |
| "epoch": 0.7065157371617891, |
| "grad_norm": 0.9502272009849548, |
| "learning_rate": 4.864110528164746e-06, |
| "loss": 0.7029, |
| "step": 853 |
| }, |
| { |
| "epoch": 0.7073440088348979, |
| "grad_norm": 0.8908940553665161, |
| "learning_rate": 4.86375267711211e-06, |
| "loss": 0.6795, |
| "step": 854 |
| }, |
| { |
| "epoch": 0.7081722805080066, |
| "grad_norm": 0.9122167825698853, |
| "learning_rate": 4.863394368695366e-06, |
| "loss": 0.6823, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.7090005521811154, |
| "grad_norm": 0.9393121004104614, |
| "learning_rate": 4.863035602983843e-06, |
| "loss": 0.6828, |
| "step": 856 |
| }, |
| { |
| "epoch": 0.7098288238542242, |
| "grad_norm": 0.8809435963630676, |
| "learning_rate": 4.862676380046959e-06, |
| "loss": 0.6775, |
| "step": 857 |
| }, |
| { |
| "epoch": 0.7106570955273329, |
| "grad_norm": 0.8546228408813477, |
| "learning_rate": 4.86231669995422e-06, |
| "loss": 0.6574, |
| "step": 858 |
| }, |
| { |
| "epoch": 0.7114853672004418, |
| "grad_norm": 0.917534589767456, |
| "learning_rate": 4.86195656277522e-06, |
| "loss": 0.6887, |
| "step": 859 |
| }, |
| { |
| "epoch": 0.7123136388735505, |
| "grad_norm": 0.900645911693573, |
| "learning_rate": 4.861595968579643e-06, |
| "loss": 0.6714, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.7131419105466593, |
| "grad_norm": 0.8719033598899841, |
| "learning_rate": 4.861234917437261e-06, |
| "loss": 0.6822, |
| "step": 861 |
| }, |
| { |
| "epoch": 0.713970182219768, |
| "grad_norm": 0.8870981931686401, |
| "learning_rate": 4.860873409417934e-06, |
| "loss": 0.6699, |
| "step": 862 |
| }, |
| { |
| "epoch": 0.7147984538928769, |
| "grad_norm": 0.8634423017501831, |
| "learning_rate": 4.860511444591609e-06, |
| "loss": 0.668, |
| "step": 863 |
| }, |
| { |
| "epoch": 0.7156267255659856, |
| "grad_norm": 0.9046266078948975, |
| "learning_rate": 4.860149023028325e-06, |
| "loss": 0.6831, |
| "step": 864 |
| }, |
| { |
| "epoch": 0.7164549972390944, |
| "grad_norm": 0.8841381669044495, |
| "learning_rate": 4.859786144798205e-06, |
| "loss": 0.6696, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.7172832689122032, |
| "grad_norm": 0.8761686682701111, |
| "learning_rate": 4.859422809971464e-06, |
| "loss": 0.6871, |
| "step": 866 |
| }, |
| { |
| "epoch": 0.718111540585312, |
| "grad_norm": 0.9123032689094543, |
| "learning_rate": 4.859059018618405e-06, |
| "loss": 0.6704, |
| "step": 867 |
| }, |
| { |
| "epoch": 0.7189398122584207, |
| "grad_norm": 0.9047552943229675, |
| "learning_rate": 4.858694770809415e-06, |
| "loss": 0.704, |
| "step": 868 |
| }, |
| { |
| "epoch": 0.7197680839315296, |
| "grad_norm": 0.8990033268928528, |
| "learning_rate": 4.858330066614975e-06, |
| "loss": 0.6843, |
| "step": 869 |
| }, |
| { |
| "epoch": 0.7205963556046383, |
| "grad_norm": 0.8743424415588379, |
| "learning_rate": 4.857964906105651e-06, |
| "loss": 0.6883, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.7214246272777471, |
| "grad_norm": 0.8834901452064514, |
| "learning_rate": 4.857599289352098e-06, |
| "loss": 0.6803, |
| "step": 871 |
| }, |
| { |
| "epoch": 0.7222528989508559, |
| "grad_norm": 0.8997126221656799, |
| "learning_rate": 4.857233216425061e-06, |
| "loss": 0.6583, |
| "step": 872 |
| }, |
| { |
| "epoch": 0.7230811706239647, |
| "grad_norm": 0.8654860854148865, |
| "learning_rate": 4.8568666873953696e-06, |
| "loss": 0.6759, |
| "step": 873 |
| }, |
| { |
| "epoch": 0.7239094422970734, |
| "grad_norm": 0.8652336001396179, |
| "learning_rate": 4.856499702333944e-06, |
| "loss": 0.684, |
| "step": 874 |
| }, |
| { |
| "epoch": 0.7247377139701823, |
| "grad_norm": 0.9110811352729797, |
| "learning_rate": 4.8561322613117945e-06, |
| "loss": 0.6714, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.725565985643291, |
| "grad_norm": 0.8914695382118225, |
| "learning_rate": 4.855764364400016e-06, |
| "loss": 0.7113, |
| "step": 876 |
| }, |
| { |
| "epoch": 0.7263942573163997, |
| "grad_norm": 0.8843448162078857, |
| "learning_rate": 4.8553960116697915e-06, |
| "loss": 0.6823, |
| "step": 877 |
| }, |
| { |
| "epoch": 0.7272225289895086, |
| "grad_norm": 0.8957106471061707, |
| "learning_rate": 4.8550272031923965e-06, |
| "loss": 0.6849, |
| "step": 878 |
| }, |
| { |
| "epoch": 0.7280508006626173, |
| "grad_norm": 0.9294857978820801, |
| "learning_rate": 4.85465793903919e-06, |
| "loss": 0.7095, |
| "step": 879 |
| }, |
| { |
| "epoch": 0.7288790723357261, |
| "grad_norm": 0.8942342400550842, |
| "learning_rate": 4.854288219281623e-06, |
| "loss": 0.6877, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.7297073440088349, |
| "grad_norm": 0.8833070993423462, |
| "learning_rate": 4.853918043991232e-06, |
| "loss": 0.6723, |
| "step": 881 |
| }, |
| { |
| "epoch": 0.7305356156819437, |
| "grad_norm": 0.8591980338096619, |
| "learning_rate": 4.853547413239642e-06, |
| "loss": 0.6443, |
| "step": 882 |
| }, |
| { |
| "epoch": 0.7313638873550524, |
| "grad_norm": 0.8719205260276794, |
| "learning_rate": 4.853176327098567e-06, |
| "loss": 0.6642, |
| "step": 883 |
| }, |
| { |
| "epoch": 0.7321921590281613, |
| "grad_norm": 0.9222398996353149, |
| "learning_rate": 4.85280478563981e-06, |
| "loss": 0.7067, |
| "step": 884 |
| }, |
| { |
| "epoch": 0.73302043070127, |
| "grad_norm": 0.891812264919281, |
| "learning_rate": 4.8524327889352585e-06, |
| "loss": 0.6788, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.7338487023743788, |
| "grad_norm": 0.8873123526573181, |
| "learning_rate": 4.852060337056891e-06, |
| "loss": 0.681, |
| "step": 886 |
| }, |
| { |
| "epoch": 0.7346769740474876, |
| "grad_norm": 0.8947358131408691, |
| "learning_rate": 4.851687430076775e-06, |
| "loss": 0.7007, |
| "step": 887 |
| }, |
| { |
| "epoch": 0.7355052457205964, |
| "grad_norm": 0.8815891146659851, |
| "learning_rate": 4.851314068067063e-06, |
| "loss": 0.6732, |
| "step": 888 |
| }, |
| { |
| "epoch": 0.7363335173937051, |
| "grad_norm": 0.8730867505073547, |
| "learning_rate": 4.850940251099997e-06, |
| "loss": 0.6759, |
| "step": 889 |
| }, |
| { |
| "epoch": 0.737161789066814, |
| "grad_norm": 0.8972861766815186, |
| "learning_rate": 4.850565979247908e-06, |
| "loss": 0.6994, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.7379900607399227, |
| "grad_norm": 0.8865854740142822, |
| "learning_rate": 4.850191252583214e-06, |
| "loss": 0.6886, |
| "step": 891 |
| }, |
| { |
| "epoch": 0.7388183324130315, |
| "grad_norm": 0.8712268471717834, |
| "learning_rate": 4.84981607117842e-06, |
| "loss": 0.6949, |
| "step": 892 |
| }, |
| { |
| "epoch": 0.7396466040861402, |
| "grad_norm": 0.8797938823699951, |
| "learning_rate": 4.849440435106122e-06, |
| "loss": 0.6835, |
| "step": 893 |
| }, |
| { |
| "epoch": 0.7404748757592491, |
| "grad_norm": 0.8838709592819214, |
| "learning_rate": 4.849064344439e-06, |
| "loss": 0.6922, |
| "step": 894 |
| }, |
| { |
| "epoch": 0.7413031474323578, |
| "grad_norm": 0.8552179932594299, |
| "learning_rate": 4.848687799249826e-06, |
| "loss": 0.6836, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.7421314191054665, |
| "grad_norm": 0.8721039891242981, |
| "learning_rate": 4.848310799611457e-06, |
| "loss": 0.6929, |
| "step": 896 |
| }, |
| { |
| "epoch": 0.7429596907785754, |
| "grad_norm": 0.8712265491485596, |
| "learning_rate": 4.847933345596838e-06, |
| "loss": 0.6755, |
| "step": 897 |
| }, |
| { |
| "epoch": 0.7437879624516841, |
| "grad_norm": 0.9070659279823303, |
| "learning_rate": 4.847555437279004e-06, |
| "loss": 0.6627, |
| "step": 898 |
| }, |
| { |
| "epoch": 0.7446162341247929, |
| "grad_norm": 0.8865673542022705, |
| "learning_rate": 4.847177074731078e-06, |
| "loss": 0.6875, |
| "step": 899 |
| }, |
| { |
| "epoch": 0.7454445057979017, |
| "grad_norm": 0.8886010646820068, |
| "learning_rate": 4.846798258026267e-06, |
| "loss": 0.6859, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.7462727774710105, |
| "grad_norm": 0.886082112789154, |
| "learning_rate": 4.846418987237868e-06, |
| "loss": 0.6541, |
| "step": 901 |
| }, |
| { |
| "epoch": 0.7471010491441192, |
| "grad_norm": 0.8836507797241211, |
| "learning_rate": 4.846039262439271e-06, |
| "loss": 0.7007, |
| "step": 902 |
| }, |
| { |
| "epoch": 0.7479293208172281, |
| "grad_norm": 0.8699943423271179, |
| "learning_rate": 4.8456590837039445e-06, |
| "loss": 0.7039, |
| "step": 903 |
| }, |
| { |
| "epoch": 0.7487575924903368, |
| "grad_norm": 0.9326562285423279, |
| "learning_rate": 4.845278451105451e-06, |
| "loss": 0.6764, |
| "step": 904 |
| }, |
| { |
| "epoch": 0.7495858641634456, |
| "grad_norm": 0.9258645176887512, |
| "learning_rate": 4.8448973647174405e-06, |
| "loss": 0.6972, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.7504141358365544, |
| "grad_norm": 0.893723726272583, |
| "learning_rate": 4.844515824613649e-06, |
| "loss": 0.6971, |
| "step": 906 |
| }, |
| { |
| "epoch": 0.7512424075096632, |
| "grad_norm": 0.8859586715698242, |
| "learning_rate": 4.8441338308679e-06, |
| "loss": 0.685, |
| "step": 907 |
| }, |
| { |
| "epoch": 0.7520706791827719, |
| "grad_norm": 0.9202778339385986, |
| "learning_rate": 4.843751383554106e-06, |
| "loss": 0.672, |
| "step": 908 |
| }, |
| { |
| "epoch": 0.7528989508558808, |
| "grad_norm": 0.9235085844993591, |
| "learning_rate": 4.843368482746269e-06, |
| "loss": 0.6805, |
| "step": 909 |
| }, |
| { |
| "epoch": 0.7537272225289895, |
| "grad_norm": 0.8933305740356445, |
| "learning_rate": 4.842985128518473e-06, |
| "loss": 0.708, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.7545554942020983, |
| "grad_norm": 0.9081675410270691, |
| "learning_rate": 4.842601320944898e-06, |
| "loss": 0.6902, |
| "step": 911 |
| }, |
| { |
| "epoch": 0.7553837658752071, |
| "grad_norm": 0.928717315196991, |
| "learning_rate": 4.842217060099804e-06, |
| "loss": 0.6706, |
| "step": 912 |
| }, |
| { |
| "epoch": 0.7562120375483159, |
| "grad_norm": 0.9361997246742249, |
| "learning_rate": 4.8418323460575425e-06, |
| "loss": 0.6671, |
| "step": 913 |
| }, |
| { |
| "epoch": 0.7570403092214246, |
| "grad_norm": 0.923000693321228, |
| "learning_rate": 4.841447178892552e-06, |
| "loss": 0.7008, |
| "step": 914 |
| }, |
| { |
| "epoch": 0.7578685808945335, |
| "grad_norm": 0.938662588596344, |
| "learning_rate": 4.84106155867936e-06, |
| "loss": 0.687, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.7586968525676422, |
| "grad_norm": 0.9256168007850647, |
| "learning_rate": 4.8406754854925785e-06, |
| "loss": 0.6698, |
| "step": 916 |
| }, |
| { |
| "epoch": 0.7595251242407509, |
| "grad_norm": 0.9616279602050781, |
| "learning_rate": 4.840288959406911e-06, |
| "loss": 0.6905, |
| "step": 917 |
| }, |
| { |
| "epoch": 0.7603533959138598, |
| "grad_norm": 0.9160107970237732, |
| "learning_rate": 4.839901980497145e-06, |
| "loss": 0.668, |
| "step": 918 |
| }, |
| { |
| "epoch": 0.7611816675869685, |
| "grad_norm": 0.8981633186340332, |
| "learning_rate": 4.839514548838158e-06, |
| "loss": 0.6726, |
| "step": 919 |
| }, |
| { |
| "epoch": 0.7620099392600773, |
| "grad_norm": 0.9086101055145264, |
| "learning_rate": 4.839126664504915e-06, |
| "loss": 0.6837, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.762838210933186, |
| "grad_norm": 0.9187242984771729, |
| "learning_rate": 4.838738327572468e-06, |
| "loss": 0.6822, |
| "step": 921 |
| }, |
| { |
| "epoch": 0.7636664826062949, |
| "grad_norm": 0.9325697422027588, |
| "learning_rate": 4.838349538115955e-06, |
| "loss": 0.6911, |
| "step": 922 |
| }, |
| { |
| "epoch": 0.7644947542794036, |
| "grad_norm": 0.9183008670806885, |
| "learning_rate": 4.837960296210605e-06, |
| "loss": 0.6576, |
| "step": 923 |
| }, |
| { |
| "epoch": 0.7653230259525124, |
| "grad_norm": 0.9704478979110718, |
| "learning_rate": 4.837570601931731e-06, |
| "loss": 0.6777, |
| "step": 924 |
| }, |
| { |
| "epoch": 0.7661512976256212, |
| "grad_norm": 0.9274917244911194, |
| "learning_rate": 4.8371804553547365e-06, |
| "loss": 0.6703, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.76697956929873, |
| "grad_norm": 0.9001563787460327, |
| "learning_rate": 4.836789856555111e-06, |
| "loss": 0.6803, |
| "step": 926 |
| }, |
| { |
| "epoch": 0.7678078409718387, |
| "grad_norm": 0.8527753353118896, |
| "learning_rate": 4.836398805608431e-06, |
| "loss": 0.6829, |
| "step": 927 |
| }, |
| { |
| "epoch": 0.7686361126449476, |
| "grad_norm": 0.9473156929016113, |
| "learning_rate": 4.836007302590362e-06, |
| "loss": 0.6672, |
| "step": 928 |
| }, |
| { |
| "epoch": 0.7694643843180563, |
| "grad_norm": 0.8827821016311646, |
| "learning_rate": 4.8356153475766564e-06, |
| "loss": 0.6821, |
| "step": 929 |
| }, |
| { |
| "epoch": 0.7702926559911651, |
| "grad_norm": 0.9502016305923462, |
| "learning_rate": 4.835222940643153e-06, |
| "loss": 0.691, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.7711209276642739, |
| "grad_norm": 0.9170359969139099, |
| "learning_rate": 4.834830081865779e-06, |
| "loss": 0.665, |
| "step": 931 |
| }, |
| { |
| "epoch": 0.7719491993373827, |
| "grad_norm": 0.9085781574249268, |
| "learning_rate": 4.83443677132055e-06, |
| "loss": 0.6855, |
| "step": 932 |
| }, |
| { |
| "epoch": 0.7727774710104914, |
| "grad_norm": 0.884650468826294, |
| "learning_rate": 4.834043009083566e-06, |
| "loss": 0.7019, |
| "step": 933 |
| }, |
| { |
| "epoch": 0.7736057426836003, |
| "grad_norm": 0.9151795506477356, |
| "learning_rate": 4.833648795231018e-06, |
| "loss": 0.6713, |
| "step": 934 |
| }, |
| { |
| "epoch": 0.774434014356709, |
| "grad_norm": 0.8867859840393066, |
| "learning_rate": 4.8332541298391825e-06, |
| "loss": 0.6828, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.7752622860298177, |
| "grad_norm": 0.9155465960502625, |
| "learning_rate": 4.8328590129844224e-06, |
| "loss": 0.6829, |
| "step": 936 |
| }, |
| { |
| "epoch": 0.7760905577029266, |
| "grad_norm": 0.9172464609146118, |
| "learning_rate": 4.832463444743191e-06, |
| "loss": 0.6733, |
| "step": 937 |
| }, |
| { |
| "epoch": 0.7769188293760353, |
| "grad_norm": 0.9752153754234314, |
| "learning_rate": 4.832067425192024e-06, |
| "loss": 0.6635, |
| "step": 938 |
| }, |
| { |
| "epoch": 0.7777471010491441, |
| "grad_norm": 0.9528539180755615, |
| "learning_rate": 4.831670954407551e-06, |
| "loss": 0.6921, |
| "step": 939 |
| }, |
| { |
| "epoch": 0.7785753727222529, |
| "grad_norm": 0.9094734787940979, |
| "learning_rate": 4.831274032466484e-06, |
| "loss": 0.6967, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.7794036443953617, |
| "grad_norm": 0.9265007376670837, |
| "learning_rate": 4.830876659445623e-06, |
| "loss": 0.6797, |
| "step": 941 |
| }, |
| { |
| "epoch": 0.7802319160684704, |
| "grad_norm": 0.908348560333252, |
| "learning_rate": 4.8304788354218564e-06, |
| "loss": 0.6648, |
| "step": 942 |
| }, |
| { |
| "epoch": 0.7810601877415793, |
| "grad_norm": 0.9144517183303833, |
| "learning_rate": 4.83008056047216e-06, |
| "loss": 0.6811, |
| "step": 943 |
| }, |
| { |
| "epoch": 0.781888459414688, |
| "grad_norm": 0.8861756920814514, |
| "learning_rate": 4.829681834673597e-06, |
| "loss": 0.6907, |
| "step": 944 |
| }, |
| { |
| "epoch": 0.7827167310877968, |
| "grad_norm": 0.8865030407905579, |
| "learning_rate": 4.8292826581033135e-06, |
| "loss": 0.6897, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.7835450027609056, |
| "grad_norm": 0.9059233069419861, |
| "learning_rate": 4.828883030838551e-06, |
| "loss": 0.6802, |
| "step": 946 |
| }, |
| { |
| "epoch": 0.7843732744340144, |
| "grad_norm": 0.9029180407524109, |
| "learning_rate": 4.828482952956631e-06, |
| "loss": 0.6718, |
| "step": 947 |
| }, |
| { |
| "epoch": 0.7852015461071231, |
| "grad_norm": 0.8946165442466736, |
| "learning_rate": 4.828082424534965e-06, |
| "loss": 0.688, |
| "step": 948 |
| }, |
| { |
| "epoch": 0.786029817780232, |
| "grad_norm": 0.8942260146141052, |
| "learning_rate": 4.827681445651052e-06, |
| "loss": 0.6786, |
| "step": 949 |
| }, |
| { |
| "epoch": 0.7868580894533407, |
| "grad_norm": 0.9090398550033569, |
| "learning_rate": 4.8272800163824784e-06, |
| "loss": 0.6875, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.7876863611264495, |
| "grad_norm": 0.9562193155288696, |
| "learning_rate": 4.8268781368069156e-06, |
| "loss": 0.7, |
| "step": 951 |
| }, |
| { |
| "epoch": 0.7885146327995582, |
| "grad_norm": 0.9081215262413025, |
| "learning_rate": 4.826475807002125e-06, |
| "loss": 0.6843, |
| "step": 952 |
| }, |
| { |
| "epoch": 0.7893429044726671, |
| "grad_norm": 0.9093075394630432, |
| "learning_rate": 4.826073027045952e-06, |
| "loss": 0.6705, |
| "step": 953 |
| }, |
| { |
| "epoch": 0.7901711761457758, |
| "grad_norm": 0.8742667436599731, |
| "learning_rate": 4.8256697970163315e-06, |
| "loss": 0.6844, |
| "step": 954 |
| }, |
| { |
| "epoch": 0.7909994478188846, |
| "grad_norm": 0.8954585194587708, |
| "learning_rate": 4.825266116991285e-06, |
| "loss": 0.6839, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.7918277194919934, |
| "grad_norm": 0.8787705302238464, |
| "learning_rate": 4.824861987048922e-06, |
| "loss": 0.7106, |
| "step": 956 |
| }, |
| { |
| "epoch": 0.7926559911651021, |
| "grad_norm": 0.8544998168945312, |
| "learning_rate": 4.824457407267436e-06, |
| "loss": 0.6713, |
| "step": 957 |
| }, |
| { |
| "epoch": 0.7934842628382109, |
| "grad_norm": 0.904699981212616, |
| "learning_rate": 4.824052377725109e-06, |
| "loss": 0.6687, |
| "step": 958 |
| }, |
| { |
| "epoch": 0.7943125345113197, |
| "grad_norm": 0.904390811920166, |
| "learning_rate": 4.823646898500311e-06, |
| "loss": 0.6738, |
| "step": 959 |
| }, |
| { |
| "epoch": 0.7951408061844285, |
| "grad_norm": 0.9181792736053467, |
| "learning_rate": 4.823240969671501e-06, |
| "loss": 0.687, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.7959690778575372, |
| "grad_norm": 0.8957958221435547, |
| "learning_rate": 4.822834591317219e-06, |
| "loss": 0.6741, |
| "step": 961 |
| }, |
| { |
| "epoch": 0.7967973495306461, |
| "grad_norm": 0.9045628905296326, |
| "learning_rate": 4.822427763516098e-06, |
| "loss": 0.6777, |
| "step": 962 |
| }, |
| { |
| "epoch": 0.7976256212037548, |
| "grad_norm": 0.8997766971588135, |
| "learning_rate": 4.822020486346852e-06, |
| "loss": 0.684, |
| "step": 963 |
| }, |
| { |
| "epoch": 0.7984538928768636, |
| "grad_norm": 0.8941488265991211, |
| "learning_rate": 4.821612759888289e-06, |
| "loss": 0.6726, |
| "step": 964 |
| }, |
| { |
| "epoch": 0.7992821645499724, |
| "grad_norm": 0.8892595171928406, |
| "learning_rate": 4.821204584219299e-06, |
| "loss": 0.6674, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.8001104362230812, |
| "grad_norm": 0.874564528465271, |
| "learning_rate": 4.820795959418859e-06, |
| "loss": 0.678, |
| "step": 966 |
| }, |
| { |
| "epoch": 0.8009387078961899, |
| "grad_norm": 0.8871595859527588, |
| "learning_rate": 4.820386885566036e-06, |
| "loss": 0.6691, |
| "step": 967 |
| }, |
| { |
| "epoch": 0.8017669795692988, |
| "grad_norm": 0.8457095623016357, |
| "learning_rate": 4.81997736273998e-06, |
| "loss": 0.6762, |
| "step": 968 |
| }, |
| { |
| "epoch": 0.8025952512424075, |
| "grad_norm": 0.8925356268882751, |
| "learning_rate": 4.819567391019931e-06, |
| "loss": 0.6939, |
| "step": 969 |
| }, |
| { |
| "epoch": 0.8034235229155163, |
| "grad_norm": 0.981640100479126, |
| "learning_rate": 4.819156970485216e-06, |
| "loss": 0.7069, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.8042517945886251, |
| "grad_norm": 0.9570382237434387, |
| "learning_rate": 4.818746101215245e-06, |
| "loss": 0.6861, |
| "step": 971 |
| }, |
| { |
| "epoch": 0.8050800662617339, |
| "grad_norm": 0.897760808467865, |
| "learning_rate": 4.818334783289518e-06, |
| "loss": 0.6684, |
| "step": 972 |
| }, |
| { |
| "epoch": 0.8059083379348426, |
| "grad_norm": 0.9033851623535156, |
| "learning_rate": 4.817923016787623e-06, |
| "loss": 0.6958, |
| "step": 973 |
| }, |
| { |
| "epoch": 0.8067366096079515, |
| "grad_norm": 0.8784357905387878, |
| "learning_rate": 4.81751080178923e-06, |
| "loss": 0.7024, |
| "step": 974 |
| }, |
| { |
| "epoch": 0.8075648812810602, |
| "grad_norm": 0.8697920441627502, |
| "learning_rate": 4.817098138374102e-06, |
| "loss": 0.6723, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.8083931529541689, |
| "grad_norm": 0.9209671020507812, |
| "learning_rate": 4.8166850266220835e-06, |
| "loss": 0.6781, |
| "step": 976 |
| }, |
| { |
| "epoch": 0.8092214246272778, |
| "grad_norm": 0.8963499665260315, |
| "learning_rate": 4.8162714666131074e-06, |
| "loss": 0.692, |
| "step": 977 |
| }, |
| { |
| "epoch": 0.8100496963003865, |
| "grad_norm": 0.9002600908279419, |
| "learning_rate": 4.815857458427195e-06, |
| "loss": 0.6982, |
| "step": 978 |
| }, |
| { |
| "epoch": 0.8108779679734953, |
| "grad_norm": 0.9190880656242371, |
| "learning_rate": 4.815443002144453e-06, |
| "loss": 0.6796, |
| "step": 979 |
| }, |
| { |
| "epoch": 0.811706239646604, |
| "grad_norm": 0.9190476536750793, |
| "learning_rate": 4.815028097845074e-06, |
| "loss": 0.6735, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.8125345113197129, |
| "grad_norm": 0.902758777141571, |
| "learning_rate": 4.814612745609338e-06, |
| "loss": 0.7072, |
| "step": 981 |
| }, |
| { |
| "epoch": 0.8133627829928216, |
| "grad_norm": 0.8825267553329468, |
| "learning_rate": 4.814196945517613e-06, |
| "loss": 0.6415, |
| "step": 982 |
| }, |
| { |
| "epoch": 0.8141910546659304, |
| "grad_norm": 0.8995051383972168, |
| "learning_rate": 4.813780697650351e-06, |
| "loss": 0.6795, |
| "step": 983 |
| }, |
| { |
| "epoch": 0.8150193263390392, |
| "grad_norm": 0.9203552007675171, |
| "learning_rate": 4.813364002088093e-06, |
| "loss": 0.6764, |
| "step": 984 |
| }, |
| { |
| "epoch": 0.815847598012148, |
| "grad_norm": 0.9434835314750671, |
| "learning_rate": 4.8129468589114655e-06, |
| "loss": 0.6849, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.8166758696852567, |
| "grad_norm": 0.9363617300987244, |
| "learning_rate": 4.812529268201183e-06, |
| "loss": 0.6759, |
| "step": 986 |
| }, |
| { |
| "epoch": 0.8175041413583656, |
| "grad_norm": 0.9260227084159851, |
| "learning_rate": 4.812111230038043e-06, |
| "loss": 0.6722, |
| "step": 987 |
| }, |
| { |
| "epoch": 0.8183324130314743, |
| "grad_norm": 0.9484657049179077, |
| "learning_rate": 4.811692744502933e-06, |
| "loss": 0.6684, |
| "step": 988 |
| }, |
| { |
| "epoch": 0.8191606847045831, |
| "grad_norm": 0.888095498085022, |
| "learning_rate": 4.8112738116768276e-06, |
| "loss": 0.6448, |
| "step": 989 |
| }, |
| { |
| "epoch": 0.8199889563776919, |
| "grad_norm": 0.9139562249183655, |
| "learning_rate": 4.810854431640784e-06, |
| "loss": 0.7029, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.8208172280508007, |
| "grad_norm": 0.9293085336685181, |
| "learning_rate": 4.81043460447595e-06, |
| "loss": 0.6663, |
| "step": 991 |
| }, |
| { |
| "epoch": 0.8216454997239094, |
| "grad_norm": 0.9336161017417908, |
| "learning_rate": 4.8100143302635575e-06, |
| "loss": 0.6893, |
| "step": 992 |
| }, |
| { |
| "epoch": 0.8224737713970183, |
| "grad_norm": 0.925926148891449, |
| "learning_rate": 4.809593609084925e-06, |
| "loss": 0.6718, |
| "step": 993 |
| }, |
| { |
| "epoch": 0.823302043070127, |
| "grad_norm": 0.9109163880348206, |
| "learning_rate": 4.8091724410214595e-06, |
| "loss": 0.6654, |
| "step": 994 |
| }, |
| { |
| "epoch": 0.8241303147432358, |
| "grad_norm": 0.9777645468711853, |
| "learning_rate": 4.808750826154652e-06, |
| "loss": 0.6893, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.8249585864163446, |
| "grad_norm": 0.9758613109588623, |
| "learning_rate": 4.808328764566082e-06, |
| "loss": 0.6765, |
| "step": 996 |
| }, |
| { |
| "epoch": 0.8257868580894533, |
| "grad_norm": 0.880034327507019, |
| "learning_rate": 4.807906256337414e-06, |
| "loss": 0.6724, |
| "step": 997 |
| }, |
| { |
| "epoch": 0.8266151297625621, |
| "grad_norm": 0.8813809752464294, |
| "learning_rate": 4.8074833015504e-06, |
| "loss": 0.69, |
| "step": 998 |
| }, |
| { |
| "epoch": 0.8274434014356709, |
| "grad_norm": 0.9450415372848511, |
| "learning_rate": 4.8070599002868756e-06, |
| "loss": 0.691, |
| "step": 999 |
| }, |
| { |
| "epoch": 0.8282716731087797, |
| "grad_norm": 1.0346750020980835, |
| "learning_rate": 4.806636052628768e-06, |
| "loss": 0.6851, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.8290999447818884, |
| "grad_norm": 0.9298142790794373, |
| "learning_rate": 4.806211758658086e-06, |
| "loss": 0.6781, |
| "step": 1001 |
| }, |
| { |
| "epoch": 0.8299282164549973, |
| "grad_norm": 0.8722123503684998, |
| "learning_rate": 4.805787018456927e-06, |
| "loss": 0.6909, |
| "step": 1002 |
| }, |
| { |
| "epoch": 0.830756488128106, |
| "grad_norm": 0.9205262660980225, |
| "learning_rate": 4.805361832107474e-06, |
| "loss": 0.6639, |
| "step": 1003 |
| }, |
| { |
| "epoch": 0.8315847598012148, |
| "grad_norm": 0.9853707551956177, |
| "learning_rate": 4.804936199691997e-06, |
| "loss": 0.6857, |
| "step": 1004 |
| }, |
| { |
| "epoch": 0.8324130314743236, |
| "grad_norm": 0.9246019721031189, |
| "learning_rate": 4.804510121292852e-06, |
| "loss": 0.67, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.8332413031474324, |
| "grad_norm": 0.8888720273971558, |
| "learning_rate": 4.80408359699248e-06, |
| "loss": 0.6802, |
| "step": 1006 |
| }, |
| { |
| "epoch": 0.8340695748205411, |
| "grad_norm": 0.9342382550239563, |
| "learning_rate": 4.803656626873412e-06, |
| "loss": 0.6602, |
| "step": 1007 |
| }, |
| { |
| "epoch": 0.83489784649365, |
| "grad_norm": 0.9380409121513367, |
| "learning_rate": 4.80322921101826e-06, |
| "loss": 0.667, |
| "step": 1008 |
| }, |
| { |
| "epoch": 0.8357261181667587, |
| "grad_norm": 0.9002363085746765, |
| "learning_rate": 4.8028013495097255e-06, |
| "loss": 0.6589, |
| "step": 1009 |
| }, |
| { |
| "epoch": 0.8365543898398675, |
| "grad_norm": 0.9009696841239929, |
| "learning_rate": 4.802373042430597e-06, |
| "loss": 0.6671, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.8373826615129762, |
| "grad_norm": 0.8897734880447388, |
| "learning_rate": 4.801944289863747e-06, |
| "loss": 0.6739, |
| "step": 1011 |
| }, |
| { |
| "epoch": 0.8382109331860851, |
| "grad_norm": 0.8874098062515259, |
| "learning_rate": 4.801515091892136e-06, |
| "loss": 0.6827, |
| "step": 1012 |
| }, |
| { |
| "epoch": 0.8390392048591938, |
| "grad_norm": 0.8683866858482361, |
| "learning_rate": 4.801085448598809e-06, |
| "loss": 0.6641, |
| "step": 1013 |
| }, |
| { |
| "epoch": 0.8398674765323026, |
| "grad_norm": 0.912205159664154, |
| "learning_rate": 4.800655360066898e-06, |
| "loss": 0.6835, |
| "step": 1014 |
| }, |
| { |
| "epoch": 0.8406957482054114, |
| "grad_norm": 0.9083834290504456, |
| "learning_rate": 4.800224826379622e-06, |
| "loss": 0.6878, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.8415240198785201, |
| "grad_norm": 0.8820756673812866, |
| "learning_rate": 4.799793847620283e-06, |
| "loss": 0.6559, |
| "step": 1016 |
| }, |
| { |
| "epoch": 0.8423522915516289, |
| "grad_norm": 0.8611956238746643, |
| "learning_rate": 4.799362423872274e-06, |
| "loss": 0.6756, |
| "step": 1017 |
| }, |
| { |
| "epoch": 0.8431805632247377, |
| "grad_norm": 0.9487212300300598, |
| "learning_rate": 4.798930555219069e-06, |
| "loss": 0.6639, |
| "step": 1018 |
| }, |
| { |
| "epoch": 0.8440088348978465, |
| "grad_norm": 0.9564170837402344, |
| "learning_rate": 4.7984982417442325e-06, |
| "loss": 0.6925, |
| "step": 1019 |
| }, |
| { |
| "epoch": 0.8448371065709552, |
| "grad_norm": 0.878726065158844, |
| "learning_rate": 4.798065483531413e-06, |
| "loss": 0.6699, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.8456653782440641, |
| "grad_norm": 0.866711437702179, |
| "learning_rate": 4.797632280664345e-06, |
| "loss": 0.6756, |
| "step": 1021 |
| }, |
| { |
| "epoch": 0.8464936499171728, |
| "grad_norm": 0.884544849395752, |
| "learning_rate": 4.797198633226849e-06, |
| "loss": 0.6631, |
| "step": 1022 |
| }, |
| { |
| "epoch": 0.8473219215902816, |
| "grad_norm": 0.8774108290672302, |
| "learning_rate": 4.796764541302831e-06, |
| "loss": 0.6999, |
| "step": 1023 |
| }, |
| { |
| "epoch": 0.8481501932633904, |
| "grad_norm": 0.8773043751716614, |
| "learning_rate": 4.796330004976285e-06, |
| "loss": 0.658, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.8489784649364992, |
| "grad_norm": 0.8596432209014893, |
| "learning_rate": 4.795895024331289e-06, |
| "loss": 0.665, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.8498067366096079, |
| "grad_norm": 0.8632652163505554, |
| "learning_rate": 4.795459599452007e-06, |
| "loss": 0.6705, |
| "step": 1026 |
| }, |
| { |
| "epoch": 0.8506350082827168, |
| "grad_norm": 0.8760238289833069, |
| "learning_rate": 4.795023730422692e-06, |
| "loss": 0.6779, |
| "step": 1027 |
| }, |
| { |
| "epoch": 0.8514632799558255, |
| "grad_norm": 0.8805958032608032, |
| "learning_rate": 4.794587417327678e-06, |
| "loss": 0.6594, |
| "step": 1028 |
| }, |
| { |
| "epoch": 0.8522915516289343, |
| "grad_norm": 0.893348217010498, |
| "learning_rate": 4.794150660251389e-06, |
| "loss": 0.6847, |
| "step": 1029 |
| }, |
| { |
| "epoch": 0.8531198233020431, |
| "grad_norm": 0.875596821308136, |
| "learning_rate": 4.793713459278333e-06, |
| "loss": 0.6582, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.8539480949751519, |
| "grad_norm": 0.8920791149139404, |
| "learning_rate": 4.793275814493104e-06, |
| "loss": 0.6784, |
| "step": 1031 |
| }, |
| { |
| "epoch": 0.8547763666482606, |
| "grad_norm": 0.9524851441383362, |
| "learning_rate": 4.792837725980383e-06, |
| "loss": 0.6641, |
| "step": 1032 |
| }, |
| { |
| "epoch": 0.8556046383213695, |
| "grad_norm": 0.8868522644042969, |
| "learning_rate": 4.7923991938249355e-06, |
| "loss": 0.6915, |
| "step": 1033 |
| }, |
| { |
| "epoch": 0.8564329099944782, |
| "grad_norm": 0.9237116575241089, |
| "learning_rate": 4.791960218111613e-06, |
| "loss": 0.691, |
| "step": 1034 |
| }, |
| { |
| "epoch": 0.8572611816675869, |
| "grad_norm": 0.8853458166122437, |
| "learning_rate": 4.791520798925354e-06, |
| "loss": 0.6727, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.8580894533406958, |
| "grad_norm": 0.8717471361160278, |
| "learning_rate": 4.7910809363511825e-06, |
| "loss": 0.6757, |
| "step": 1036 |
| }, |
| { |
| "epoch": 0.8589177250138045, |
| "grad_norm": 0.8659184575080872, |
| "learning_rate": 4.790640630474206e-06, |
| "loss": 0.6994, |
| "step": 1037 |
| }, |
| { |
| "epoch": 0.8597459966869133, |
| "grad_norm": 0.9128848314285278, |
| "learning_rate": 4.790199881379621e-06, |
| "loss": 0.6658, |
| "step": 1038 |
| }, |
| { |
| "epoch": 0.860574268360022, |
| "grad_norm": 0.8942065834999084, |
| "learning_rate": 4.789758689152708e-06, |
| "loss": 0.6662, |
| "step": 1039 |
| }, |
| { |
| "epoch": 0.8614025400331309, |
| "grad_norm": 0.8835064172744751, |
| "learning_rate": 4.789317053878833e-06, |
| "loss": 0.6806, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.8622308117062396, |
| "grad_norm": 0.8735736608505249, |
| "learning_rate": 4.788874975643449e-06, |
| "loss": 0.7052, |
| "step": 1041 |
| }, |
| { |
| "epoch": 0.8630590833793484, |
| "grad_norm": 0.8776472806930542, |
| "learning_rate": 4.788432454532093e-06, |
| "loss": 0.6545, |
| "step": 1042 |
| }, |
| { |
| "epoch": 0.8638873550524572, |
| "grad_norm": 0.9023584127426147, |
| "learning_rate": 4.787989490630391e-06, |
| "loss": 0.6955, |
| "step": 1043 |
| }, |
| { |
| "epoch": 0.864715626725566, |
| "grad_norm": 0.890040934085846, |
| "learning_rate": 4.787546084024051e-06, |
| "loss": 0.6592, |
| "step": 1044 |
| }, |
| { |
| "epoch": 0.8655438983986747, |
| "grad_norm": 0.9242737889289856, |
| "learning_rate": 4.787102234798867e-06, |
| "loss": 0.6582, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.8663721700717836, |
| "grad_norm": 0.8844568729400635, |
| "learning_rate": 4.786657943040721e-06, |
| "loss": 0.7028, |
| "step": 1046 |
| }, |
| { |
| "epoch": 0.8672004417448923, |
| "grad_norm": 0.8407277464866638, |
| "learning_rate": 4.78621320883558e-06, |
| "loss": 0.6648, |
| "step": 1047 |
| }, |
| { |
| "epoch": 0.8680287134180011, |
| "grad_norm": 0.8928431868553162, |
| "learning_rate": 4.785768032269493e-06, |
| "loss": 0.6553, |
| "step": 1048 |
| }, |
| { |
| "epoch": 0.8688569850911099, |
| "grad_norm": 0.8959507346153259, |
| "learning_rate": 4.785322413428601e-06, |
| "loss": 0.6677, |
| "step": 1049 |
| }, |
| { |
| "epoch": 0.8696852567642187, |
| "grad_norm": 0.8826605677604675, |
| "learning_rate": 4.784876352399125e-06, |
| "loss": 0.6862, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.8705135284373274, |
| "grad_norm": 0.858877956867218, |
| "learning_rate": 4.784429849267373e-06, |
| "loss": 0.6654, |
| "step": 1051 |
| }, |
| { |
| "epoch": 0.8713418001104363, |
| "grad_norm": 0.9286037087440491, |
| "learning_rate": 4.783982904119743e-06, |
| "loss": 0.6652, |
| "step": 1052 |
| }, |
| { |
| "epoch": 0.872170071783545, |
| "grad_norm": 0.9071895480155945, |
| "learning_rate": 4.783535517042709e-06, |
| "loss": 0.6865, |
| "step": 1053 |
| }, |
| { |
| "epoch": 0.8729983434566538, |
| "grad_norm": 0.9230369329452515, |
| "learning_rate": 4.783087688122842e-06, |
| "loss": 0.6976, |
| "step": 1054 |
| }, |
| { |
| "epoch": 0.8738266151297626, |
| "grad_norm": 0.9384015798568726, |
| "learning_rate": 4.782639417446789e-06, |
| "loss": 0.6788, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.8746548868028713, |
| "grad_norm": 0.8797554969787598, |
| "learning_rate": 4.782190705101286e-06, |
| "loss": 0.692, |
| "step": 1056 |
| }, |
| { |
| "epoch": 0.8754831584759801, |
| "grad_norm": 0.8643795251846313, |
| "learning_rate": 4.781741551173157e-06, |
| "loss": 0.6689, |
| "step": 1057 |
| }, |
| { |
| "epoch": 0.8763114301490889, |
| "grad_norm": 0.9023616909980774, |
| "learning_rate": 4.781291955749307e-06, |
| "loss": 0.6819, |
| "step": 1058 |
| }, |
| { |
| "epoch": 0.8771397018221977, |
| "grad_norm": 0.9468352198600769, |
| "learning_rate": 4.780841918916731e-06, |
| "loss": 0.6624, |
| "step": 1059 |
| }, |
| { |
| "epoch": 0.8779679734953064, |
| "grad_norm": 0.8504934310913086, |
| "learning_rate": 4.780391440762505e-06, |
| "loss": 0.6892, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.8787962451684153, |
| "grad_norm": 0.9521695375442505, |
| "learning_rate": 4.779940521373793e-06, |
| "loss": 0.6705, |
| "step": 1061 |
| }, |
| { |
| "epoch": 0.879624516841524, |
| "grad_norm": 0.9325176477432251, |
| "learning_rate": 4.779489160837842e-06, |
| "loss": 0.6861, |
| "step": 1062 |
| }, |
| { |
| "epoch": 0.8804527885146328, |
| "grad_norm": 0.9324467778205872, |
| "learning_rate": 4.779037359241989e-06, |
| "loss": 0.6494, |
| "step": 1063 |
| }, |
| { |
| "epoch": 0.8812810601877415, |
| "grad_norm": 0.9227094650268555, |
| "learning_rate": 4.7785851166736505e-06, |
| "loss": 0.6679, |
| "step": 1064 |
| }, |
| { |
| "epoch": 0.8821093318608504, |
| "grad_norm": 0.8776607513427734, |
| "learning_rate": 4.778132433220334e-06, |
| "loss": 0.6884, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.8829376035339591, |
| "grad_norm": 0.8877871036529541, |
| "learning_rate": 4.777679308969627e-06, |
| "loss": 0.6647, |
| "step": 1066 |
| }, |
| { |
| "epoch": 0.883765875207068, |
| "grad_norm": 0.8873980045318604, |
| "learning_rate": 4.777225744009207e-06, |
| "loss": 0.6691, |
| "step": 1067 |
| }, |
| { |
| "epoch": 0.8845941468801767, |
| "grad_norm": 0.8805034160614014, |
| "learning_rate": 4.776771738426833e-06, |
| "loss": 0.6735, |
| "step": 1068 |
| }, |
| { |
| "epoch": 0.8854224185532855, |
| "grad_norm": 0.8923252820968628, |
| "learning_rate": 4.776317292310352e-06, |
| "loss": 0.6843, |
| "step": 1069 |
| }, |
| { |
| "epoch": 0.8862506902263942, |
| "grad_norm": 0.8728563785552979, |
| "learning_rate": 4.775862405747694e-06, |
| "loss": 0.6782, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.8870789618995031, |
| "grad_norm": 0.9106407761573792, |
| "learning_rate": 4.775407078826876e-06, |
| "loss": 0.6938, |
| "step": 1071 |
| }, |
| { |
| "epoch": 0.8879072335726118, |
| "grad_norm": 0.8907924890518188, |
| "learning_rate": 4.774951311635999e-06, |
| "loss": 0.6551, |
| "step": 1072 |
| }, |
| { |
| "epoch": 0.8887355052457206, |
| "grad_norm": 0.8816303610801697, |
| "learning_rate": 4.77449510426325e-06, |
| "loss": 0.6771, |
| "step": 1073 |
| }, |
| { |
| "epoch": 0.8895637769188294, |
| "grad_norm": 0.9205693602561951, |
| "learning_rate": 4.774038456796901e-06, |
| "loss": 0.6987, |
| "step": 1074 |
| }, |
| { |
| "epoch": 0.8903920485919381, |
| "grad_norm": 0.8785348534584045, |
| "learning_rate": 4.77358136932531e-06, |
| "loss": 0.6734, |
| "step": 1075 |
| }, |
| { |
| "epoch": 0.8912203202650469, |
| "grad_norm": 0.8760187029838562, |
| "learning_rate": 4.773123841936916e-06, |
| "loss": 0.6432, |
| "step": 1076 |
| }, |
| { |
| "epoch": 0.8920485919381557, |
| "grad_norm": 0.9020200371742249, |
| "learning_rate": 4.7726658747202505e-06, |
| "loss": 0.6756, |
| "step": 1077 |
| }, |
| { |
| "epoch": 0.8928768636112645, |
| "grad_norm": 0.8673221468925476, |
| "learning_rate": 4.772207467763923e-06, |
| "loss": 0.6756, |
| "step": 1078 |
| }, |
| { |
| "epoch": 0.8937051352843732, |
| "grad_norm": 0.8892212510108948, |
| "learning_rate": 4.771748621156631e-06, |
| "loss": 0.6719, |
| "step": 1079 |
| }, |
| { |
| "epoch": 0.8945334069574821, |
| "grad_norm": 0.909163773059845, |
| "learning_rate": 4.7712893349871585e-06, |
| "loss": 0.6721, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.8953616786305908, |
| "grad_norm": 0.9108759164810181, |
| "learning_rate": 4.770829609344372e-06, |
| "loss": 0.6695, |
| "step": 1081 |
| }, |
| { |
| "epoch": 0.8961899503036996, |
| "grad_norm": 0.9178847074508667, |
| "learning_rate": 4.770369444317225e-06, |
| "loss": 0.6618, |
| "step": 1082 |
| }, |
| { |
| "epoch": 0.8970182219768084, |
| "grad_norm": 0.8813229203224182, |
| "learning_rate": 4.769908839994753e-06, |
| "loss": 0.6882, |
| "step": 1083 |
| }, |
| { |
| "epoch": 0.8978464936499172, |
| "grad_norm": 0.872913658618927, |
| "learning_rate": 4.769447796466082e-06, |
| "loss": 0.6637, |
| "step": 1084 |
| }, |
| { |
| "epoch": 0.8986747653230259, |
| "grad_norm": 0.9070210456848145, |
| "learning_rate": 4.7689863138204185e-06, |
| "loss": 0.6706, |
| "step": 1085 |
| }, |
| { |
| "epoch": 0.8995030369961348, |
| "grad_norm": 0.895195722579956, |
| "learning_rate": 4.768524392147052e-06, |
| "loss": 0.671, |
| "step": 1086 |
| }, |
| { |
| "epoch": 0.9003313086692435, |
| "grad_norm": 0.9376201629638672, |
| "learning_rate": 4.768062031535365e-06, |
| "loss": 0.6675, |
| "step": 1087 |
| }, |
| { |
| "epoch": 0.9011595803423523, |
| "grad_norm": 0.9005862474441528, |
| "learning_rate": 4.767599232074816e-06, |
| "loss": 0.6803, |
| "step": 1088 |
| }, |
| { |
| "epoch": 0.9019878520154611, |
| "grad_norm": 0.9138628244400024, |
| "learning_rate": 4.767135993854955e-06, |
| "loss": 0.6571, |
| "step": 1089 |
| }, |
| { |
| "epoch": 0.9028161236885699, |
| "grad_norm": 0.9540771842002869, |
| "learning_rate": 4.766672316965413e-06, |
| "loss": 0.6963, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.9036443953616786, |
| "grad_norm": 0.8940072059631348, |
| "learning_rate": 4.766208201495906e-06, |
| "loss": 0.6779, |
| "step": 1091 |
| }, |
| { |
| "epoch": 0.9044726670347875, |
| "grad_norm": 0.909633457660675, |
| "learning_rate": 4.765743647536238e-06, |
| "loss": 0.6807, |
| "step": 1092 |
| }, |
| { |
| "epoch": 0.9053009387078962, |
| "grad_norm": 0.8975780010223389, |
| "learning_rate": 4.765278655176296e-06, |
| "loss": 0.687, |
| "step": 1093 |
| }, |
| { |
| "epoch": 0.906129210381005, |
| "grad_norm": 0.8824442625045776, |
| "learning_rate": 4.76481322450605e-06, |
| "loss": 0.6583, |
| "step": 1094 |
| }, |
| { |
| "epoch": 0.9069574820541138, |
| "grad_norm": 0.9349527359008789, |
| "learning_rate": 4.764347355615558e-06, |
| "loss": 0.6936, |
| "step": 1095 |
| }, |
| { |
| "epoch": 0.9077857537272225, |
| "grad_norm": 0.8850695490837097, |
| "learning_rate": 4.76388104859496e-06, |
| "loss": 0.6784, |
| "step": 1096 |
| }, |
| { |
| "epoch": 0.9086140254003313, |
| "grad_norm": 0.8885547518730164, |
| "learning_rate": 4.763414303534484e-06, |
| "loss": 0.6632, |
| "step": 1097 |
| }, |
| { |
| "epoch": 0.90944229707344, |
| "grad_norm": 0.8795794248580933, |
| "learning_rate": 4.762947120524438e-06, |
| "loss": 0.6609, |
| "step": 1098 |
| }, |
| { |
| "epoch": 0.9102705687465489, |
| "grad_norm": 0.9121587872505188, |
| "learning_rate": 4.76247949965522e-06, |
| "loss": 0.656, |
| "step": 1099 |
| }, |
| { |
| "epoch": 0.9110988404196576, |
| "grad_norm": 0.9315312504768372, |
| "learning_rate": 4.762011441017309e-06, |
| "loss": 0.7007, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.9119271120927664, |
| "grad_norm": 0.9163249731063843, |
| "learning_rate": 4.7615429447012704e-06, |
| "loss": 0.6666, |
| "step": 1101 |
| }, |
| { |
| "epoch": 0.9127553837658752, |
| "grad_norm": 0.8830770254135132, |
| "learning_rate": 4.761074010797754e-06, |
| "loss": 0.6695, |
| "step": 1102 |
| }, |
| { |
| "epoch": 0.913583655438984, |
| "grad_norm": 0.8936818838119507, |
| "learning_rate": 4.760604639397493e-06, |
| "loss": 0.6651, |
| "step": 1103 |
| }, |
| { |
| "epoch": 0.9144119271120927, |
| "grad_norm": 0.9030522108078003, |
| "learning_rate": 4.7601348305913085e-06, |
| "loss": 0.6757, |
| "step": 1104 |
| }, |
| { |
| "epoch": 0.9152401987852016, |
| "grad_norm": 0.8992725014686584, |
| "learning_rate": 4.759664584470103e-06, |
| "loss": 0.6849, |
| "step": 1105 |
| }, |
| { |
| "epoch": 0.9160684704583103, |
| "grad_norm": 0.9295501708984375, |
| "learning_rate": 4.759193901124864e-06, |
| "loss": 0.6295, |
| "step": 1106 |
| }, |
| { |
| "epoch": 0.9168967421314191, |
| "grad_norm": 0.8760778307914734, |
| "learning_rate": 4.758722780646666e-06, |
| "loss": 0.6766, |
| "step": 1107 |
| }, |
| { |
| "epoch": 0.9177250138045279, |
| "grad_norm": 0.8757495880126953, |
| "learning_rate": 4.758251223126665e-06, |
| "loss": 0.6911, |
| "step": 1108 |
| }, |
| { |
| "epoch": 0.9185532854776367, |
| "grad_norm": 0.8944259881973267, |
| "learning_rate": 4.757779228656103e-06, |
| "loss": 0.6849, |
| "step": 1109 |
| }, |
| { |
| "epoch": 0.9193815571507454, |
| "grad_norm": 0.8586800694465637, |
| "learning_rate": 4.757306797326309e-06, |
| "loss": 0.6407, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.9202098288238543, |
| "grad_norm": 0.891685426235199, |
| "learning_rate": 4.75683392922869e-06, |
| "loss": 0.6424, |
| "step": 1111 |
| }, |
| { |
| "epoch": 0.921038100496963, |
| "grad_norm": 0.8983104228973389, |
| "learning_rate": 4.756360624454744e-06, |
| "loss": 0.6739, |
| "step": 1112 |
| }, |
| { |
| "epoch": 0.9218663721700718, |
| "grad_norm": 0.8887161016464233, |
| "learning_rate": 4.755886883096051e-06, |
| "loss": 0.667, |
| "step": 1113 |
| }, |
| { |
| "epoch": 0.9226946438431806, |
| "grad_norm": 0.9226592779159546, |
| "learning_rate": 4.755412705244274e-06, |
| "loss": 0.6563, |
| "step": 1114 |
| }, |
| { |
| "epoch": 0.9235229155162893, |
| "grad_norm": 0.9521664381027222, |
| "learning_rate": 4.754938090991165e-06, |
| "loss": 0.6591, |
| "step": 1115 |
| }, |
| { |
| "epoch": 0.9243511871893981, |
| "grad_norm": 0.8717860579490662, |
| "learning_rate": 4.754463040428555e-06, |
| "loss": 0.6882, |
| "step": 1116 |
| }, |
| { |
| "epoch": 0.9251794588625069, |
| "grad_norm": 0.8605406880378723, |
| "learning_rate": 4.753987553648363e-06, |
| "loss": 0.6798, |
| "step": 1117 |
| }, |
| { |
| "epoch": 0.9260077305356157, |
| "grad_norm": 0.8895637392997742, |
| "learning_rate": 4.75351163074259e-06, |
| "loss": 0.6677, |
| "step": 1118 |
| }, |
| { |
| "epoch": 0.9268360022087244, |
| "grad_norm": 0.9749095439910889, |
| "learning_rate": 4.753035271803323e-06, |
| "loss": 0.6786, |
| "step": 1119 |
| }, |
| { |
| "epoch": 0.9276642738818333, |
| "grad_norm": 0.9415414929389954, |
| "learning_rate": 4.752558476922735e-06, |
| "loss": 0.6788, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.928492545554942, |
| "grad_norm": 0.885769784450531, |
| "learning_rate": 4.752081246193078e-06, |
| "loss": 0.672, |
| "step": 1121 |
| }, |
| { |
| "epoch": 0.9293208172280508, |
| "grad_norm": 0.9199241399765015, |
| "learning_rate": 4.751603579706695e-06, |
| "loss": 0.6618, |
| "step": 1122 |
| }, |
| { |
| "epoch": 0.9301490889011595, |
| "grad_norm": 0.9153104424476624, |
| "learning_rate": 4.751125477556008e-06, |
| "loss": 0.6833, |
| "step": 1123 |
| }, |
| { |
| "epoch": 0.9309773605742684, |
| "grad_norm": 0.9424253702163696, |
| "learning_rate": 4.750646939833526e-06, |
| "loss": 0.6682, |
| "step": 1124 |
| }, |
| { |
| "epoch": 0.9318056322473771, |
| "grad_norm": 0.8794890642166138, |
| "learning_rate": 4.750167966631842e-06, |
| "loss": 0.6633, |
| "step": 1125 |
| }, |
| { |
| "epoch": 0.932633903920486, |
| "grad_norm": 0.8534522652626038, |
| "learning_rate": 4.7496885580436305e-06, |
| "loss": 0.6497, |
| "step": 1126 |
| }, |
| { |
| "epoch": 0.9334621755935947, |
| "grad_norm": 0.9528769254684448, |
| "learning_rate": 4.749208714161655e-06, |
| "loss": 0.6873, |
| "step": 1127 |
| }, |
| { |
| "epoch": 0.9342904472667035, |
| "grad_norm": 0.9181371331214905, |
| "learning_rate": 4.748728435078761e-06, |
| "loss": 0.6972, |
| "step": 1128 |
| }, |
| { |
| "epoch": 0.9351187189398122, |
| "grad_norm": 0.864180326461792, |
| "learning_rate": 4.748247720887876e-06, |
| "loss": 0.652, |
| "step": 1129 |
| }, |
| { |
| "epoch": 0.9359469906129211, |
| "grad_norm": 0.8931804895401001, |
| "learning_rate": 4.747766571682016e-06, |
| "loss": 0.6527, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.9367752622860298, |
| "grad_norm": 0.9370419383049011, |
| "learning_rate": 4.747284987554277e-06, |
| "loss": 0.6748, |
| "step": 1131 |
| }, |
| { |
| "epoch": 0.9376035339591386, |
| "grad_norm": 0.9016710519790649, |
| "learning_rate": 4.746802968597842e-06, |
| "loss": 0.6998, |
| "step": 1132 |
| }, |
| { |
| "epoch": 0.9384318056322474, |
| "grad_norm": 0.8572346568107605, |
| "learning_rate": 4.746320514905977e-06, |
| "loss": 0.6354, |
| "step": 1133 |
| }, |
| { |
| "epoch": 0.9392600773053562, |
| "grad_norm": 0.8514915704727173, |
| "learning_rate": 4.745837626572033e-06, |
| "loss": 0.6897, |
| "step": 1134 |
| }, |
| { |
| "epoch": 0.9400883489784649, |
| "grad_norm": 0.9322717189788818, |
| "learning_rate": 4.745354303689444e-06, |
| "loss": 0.6671, |
| "step": 1135 |
| }, |
| { |
| "epoch": 0.9409166206515737, |
| "grad_norm": 0.9024900794029236, |
| "learning_rate": 4.744870546351727e-06, |
| "loss": 0.6751, |
| "step": 1136 |
| }, |
| { |
| "epoch": 0.9417448923246825, |
| "grad_norm": 0.8798164129257202, |
| "learning_rate": 4.744386354652486e-06, |
| "loss": 0.6933, |
| "step": 1137 |
| }, |
| { |
| "epoch": 0.9425731639977912, |
| "grad_norm": 0.8897552490234375, |
| "learning_rate": 4.743901728685406e-06, |
| "loss": 0.6696, |
| "step": 1138 |
| }, |
| { |
| "epoch": 0.9434014356709001, |
| "grad_norm": 0.8993925452232361, |
| "learning_rate": 4.74341666854426e-06, |
| "loss": 0.6731, |
| "step": 1139 |
| }, |
| { |
| "epoch": 0.9442297073440088, |
| "grad_norm": 0.9065762758255005, |
| "learning_rate": 4.742931174322902e-06, |
| "loss": 0.6513, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.9450579790171176, |
| "grad_norm": 0.8894189596176147, |
| "learning_rate": 4.742445246115271e-06, |
| "loss": 0.6651, |
| "step": 1141 |
| }, |
| { |
| "epoch": 0.9458862506902264, |
| "grad_norm": 0.8946899771690369, |
| "learning_rate": 4.741958884015387e-06, |
| "loss": 0.6678, |
| "step": 1142 |
| }, |
| { |
| "epoch": 0.9467145223633352, |
| "grad_norm": 1.3527920246124268, |
| "learning_rate": 4.741472088117358e-06, |
| "loss": 0.682, |
| "step": 1143 |
| }, |
| { |
| "epoch": 0.9475427940364439, |
| "grad_norm": 0.9134153127670288, |
| "learning_rate": 4.740984858515376e-06, |
| "loss": 0.6708, |
| "step": 1144 |
| }, |
| { |
| "epoch": 0.9483710657095528, |
| "grad_norm": 0.8724541068077087, |
| "learning_rate": 4.740497195303713e-06, |
| "loss": 0.6561, |
| "step": 1145 |
| }, |
| { |
| "epoch": 0.9491993373826615, |
| "grad_norm": 0.8734033703804016, |
| "learning_rate": 4.740009098576729e-06, |
| "loss": 0.677, |
| "step": 1146 |
| }, |
| { |
| "epoch": 0.9500276090557703, |
| "grad_norm": 0.8965688943862915, |
| "learning_rate": 4.739520568428866e-06, |
| "loss": 0.6695, |
| "step": 1147 |
| }, |
| { |
| "epoch": 0.950855880728879, |
| "grad_norm": 0.8910654783248901, |
| "learning_rate": 4.739031604954648e-06, |
| "loss": 0.6598, |
| "step": 1148 |
| }, |
| { |
| "epoch": 0.9516841524019879, |
| "grad_norm": 0.8933265209197998, |
| "learning_rate": 4.7385422082486874e-06, |
| "loss": 0.674, |
| "step": 1149 |
| }, |
| { |
| "epoch": 0.9525124240750966, |
| "grad_norm": 0.8866152763366699, |
| "learning_rate": 4.738052378405677e-06, |
| "loss": 0.6573, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.9533406957482055, |
| "grad_norm": 0.9387894868850708, |
| "learning_rate": 4.737562115520394e-06, |
| "loss": 0.7162, |
| "step": 1151 |
| }, |
| { |
| "epoch": 0.9541689674213142, |
| "grad_norm": 0.9425864815711975, |
| "learning_rate": 4.7370714196877e-06, |
| "loss": 0.6575, |
| "step": 1152 |
| }, |
| { |
| "epoch": 0.954997239094423, |
| "grad_norm": 0.9336329698562622, |
| "learning_rate": 4.7365802910025405e-06, |
| "loss": 0.6663, |
| "step": 1153 |
| }, |
| { |
| "epoch": 0.9558255107675317, |
| "grad_norm": 0.8798896670341492, |
| "learning_rate": 4.736088729559943e-06, |
| "loss": 0.672, |
| "step": 1154 |
| }, |
| { |
| "epoch": 0.9566537824406405, |
| "grad_norm": 0.9108537435531616, |
| "learning_rate": 4.735596735455022e-06, |
| "loss": 0.6714, |
| "step": 1155 |
| }, |
| { |
| "epoch": 0.9574820541137493, |
| "grad_norm": 0.959479033946991, |
| "learning_rate": 4.735104308782972e-06, |
| "loss": 0.6783, |
| "step": 1156 |
| }, |
| { |
| "epoch": 0.958310325786858, |
| "grad_norm": 0.9587971568107605, |
| "learning_rate": 4.734611449639073e-06, |
| "loss": 0.6614, |
| "step": 1157 |
| }, |
| { |
| "epoch": 0.9591385974599669, |
| "grad_norm": 0.861924409866333, |
| "learning_rate": 4.734118158118689e-06, |
| "loss": 0.6849, |
| "step": 1158 |
| }, |
| { |
| "epoch": 0.9599668691330756, |
| "grad_norm": 0.8837993741035461, |
| "learning_rate": 4.733624434317269e-06, |
| "loss": 0.673, |
| "step": 1159 |
| }, |
| { |
| "epoch": 0.9607951408061844, |
| "grad_norm": 0.9498627185821533, |
| "learning_rate": 4.7331302783303416e-06, |
| "loss": 0.6357, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.9616234124792932, |
| "grad_norm": 0.9200075268745422, |
| "learning_rate": 4.732635690253523e-06, |
| "loss": 0.6614, |
| "step": 1161 |
| }, |
| { |
| "epoch": 0.962451684152402, |
| "grad_norm": 0.9394239187240601, |
| "learning_rate": 4.7321406701825105e-06, |
| "loss": 0.6851, |
| "step": 1162 |
| }, |
| { |
| "epoch": 0.9632799558255107, |
| "grad_norm": 0.8943734765052795, |
| "learning_rate": 4.731645218213086e-06, |
| "loss": 0.6861, |
| "step": 1163 |
| }, |
| { |
| "epoch": 0.9641082274986196, |
| "grad_norm": 0.8939192295074463, |
| "learning_rate": 4.731149334441114e-06, |
| "loss": 0.6747, |
| "step": 1164 |
| }, |
| { |
| "epoch": 0.9649364991717283, |
| "grad_norm": 1.0531766414642334, |
| "learning_rate": 4.730653018962545e-06, |
| "loss": 0.6753, |
| "step": 1165 |
| }, |
| { |
| "epoch": 0.9657647708448371, |
| "grad_norm": 0.9647698998451233, |
| "learning_rate": 4.730156271873412e-06, |
| "loss": 0.6724, |
| "step": 1166 |
| }, |
| { |
| "epoch": 0.9665930425179459, |
| "grad_norm": 0.928109884262085, |
| "learning_rate": 4.729659093269828e-06, |
| "loss": 0.6625, |
| "step": 1167 |
| }, |
| { |
| "epoch": 0.9674213141910547, |
| "grad_norm": 0.8921148777008057, |
| "learning_rate": 4.729161483247994e-06, |
| "loss": 0.6904, |
| "step": 1168 |
| }, |
| { |
| "epoch": 0.9682495858641634, |
| "grad_norm": 0.9641497731208801, |
| "learning_rate": 4.728663441904194e-06, |
| "loss": 0.6961, |
| "step": 1169 |
| }, |
| { |
| "epoch": 0.9690778575372723, |
| "grad_norm": 0.9107950329780579, |
| "learning_rate": 4.7281649693347925e-06, |
| "loss": 0.6762, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.969906129210381, |
| "grad_norm": 0.8863945007324219, |
| "learning_rate": 4.727666065636241e-06, |
| "loss": 0.663, |
| "step": 1171 |
| }, |
| { |
| "epoch": 0.9707344008834898, |
| "grad_norm": 0.8782674670219421, |
| "learning_rate": 4.72716673090507e-06, |
| "loss": 0.6664, |
| "step": 1172 |
| }, |
| { |
| "epoch": 0.9715626725565986, |
| "grad_norm": 0.9692712426185608, |
| "learning_rate": 4.7266669652379005e-06, |
| "loss": 0.6577, |
| "step": 1173 |
| }, |
| { |
| "epoch": 0.9723909442297074, |
| "grad_norm": 0.9157746434211731, |
| "learning_rate": 4.7261667687314285e-06, |
| "loss": 0.6972, |
| "step": 1174 |
| }, |
| { |
| "epoch": 0.9732192159028161, |
| "grad_norm": 0.9174308776855469, |
| "learning_rate": 4.725666141482439e-06, |
| "loss": 0.652, |
| "step": 1175 |
| }, |
| { |
| "epoch": 0.9740474875759249, |
| "grad_norm": 0.8996846079826355, |
| "learning_rate": 4.725165083587798e-06, |
| "loss": 0.6534, |
| "step": 1176 |
| }, |
| { |
| "epoch": 0.9748757592490337, |
| "grad_norm": 0.9376521706581116, |
| "learning_rate": 4.724663595144457e-06, |
| "loss": 0.694, |
| "step": 1177 |
| }, |
| { |
| "epoch": 0.9757040309221424, |
| "grad_norm": 0.909335196018219, |
| "learning_rate": 4.724161676249448e-06, |
| "loss": 0.6893, |
| "step": 1178 |
| }, |
| { |
| "epoch": 0.9765323025952513, |
| "grad_norm": 0.8955535888671875, |
| "learning_rate": 4.723659326999888e-06, |
| "loss": 0.6655, |
| "step": 1179 |
| }, |
| { |
| "epoch": 0.97736057426836, |
| "grad_norm": 0.8697199821472168, |
| "learning_rate": 4.7231565474929765e-06, |
| "loss": 0.6765, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.9781888459414688, |
| "grad_norm": 0.9440643191337585, |
| "learning_rate": 4.722653337825998e-06, |
| "loss": 0.6741, |
| "step": 1181 |
| }, |
| { |
| "epoch": 0.9790171176145775, |
| "grad_norm": 0.9056214690208435, |
| "learning_rate": 4.7221496980963175e-06, |
| "loss": 0.6625, |
| "step": 1182 |
| }, |
| { |
| "epoch": 0.9798453892876864, |
| "grad_norm": 0.928752601146698, |
| "learning_rate": 4.721645628401385e-06, |
| "loss": 0.6891, |
| "step": 1183 |
| }, |
| { |
| "epoch": 0.9806736609607951, |
| "grad_norm": 0.8772798776626587, |
| "learning_rate": 4.721141128838733e-06, |
| "loss": 0.6692, |
| "step": 1184 |
| }, |
| { |
| "epoch": 0.981501932633904, |
| "grad_norm": 0.9029695987701416, |
| "learning_rate": 4.7206361995059775e-06, |
| "loss": 0.6679, |
| "step": 1185 |
| }, |
| { |
| "epoch": 0.9823302043070127, |
| "grad_norm": 0.955608069896698, |
| "learning_rate": 4.720130840500819e-06, |
| "loss": 0.6504, |
| "step": 1186 |
| }, |
| { |
| "epoch": 0.9831584759801215, |
| "grad_norm": 0.9784006476402283, |
| "learning_rate": 4.719625051921038e-06, |
| "loss": 0.6449, |
| "step": 1187 |
| }, |
| { |
| "epoch": 0.9839867476532302, |
| "grad_norm": 0.8987936973571777, |
| "learning_rate": 4.719118833864501e-06, |
| "loss": 0.6672, |
| "step": 1188 |
| }, |
| { |
| "epoch": 0.9848150193263391, |
| "grad_norm": 0.9036010503768921, |
| "learning_rate": 4.718612186429156e-06, |
| "loss": 0.6665, |
| "step": 1189 |
| }, |
| { |
| "epoch": 0.9856432909994478, |
| "grad_norm": 0.9625537991523743, |
| "learning_rate": 4.718105109713034e-06, |
| "loss": 0.6841, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.9864715626725566, |
| "grad_norm": 0.9473212957382202, |
| "learning_rate": 4.7175976038142505e-06, |
| "loss": 0.686, |
| "step": 1191 |
| }, |
| { |
| "epoch": 0.9872998343456654, |
| "grad_norm": 0.9359033107757568, |
| "learning_rate": 4.717089668831002e-06, |
| "loss": 0.674, |
| "step": 1192 |
| }, |
| { |
| "epoch": 0.9881281060187742, |
| "grad_norm": 0.880176842212677, |
| "learning_rate": 4.716581304861571e-06, |
| "loss": 0.6543, |
| "step": 1193 |
| }, |
| { |
| "epoch": 0.9889563776918829, |
| "grad_norm": 0.9304236173629761, |
| "learning_rate": 4.71607251200432e-06, |
| "loss": 0.6662, |
| "step": 1194 |
| }, |
| { |
| "epoch": 0.9897846493649917, |
| "grad_norm": 0.9259325861930847, |
| "learning_rate": 4.715563290357696e-06, |
| "loss": 0.6849, |
| "step": 1195 |
| }, |
| { |
| "epoch": 0.9906129210381005, |
| "grad_norm": 0.9209221601486206, |
| "learning_rate": 4.715053640020228e-06, |
| "loss": 0.6729, |
| "step": 1196 |
| }, |
| { |
| "epoch": 0.9914411927112092, |
| "grad_norm": 0.8723881840705872, |
| "learning_rate": 4.71454356109053e-06, |
| "loss": 0.674, |
| "step": 1197 |
| }, |
| { |
| "epoch": 0.9922694643843181, |
| "grad_norm": 0.8875730633735657, |
| "learning_rate": 4.714033053667296e-06, |
| "loss": 0.6634, |
| "step": 1198 |
| }, |
| { |
| "epoch": 0.9930977360574268, |
| "grad_norm": 0.9303425550460815, |
| "learning_rate": 4.713522117849305e-06, |
| "loss": 0.6828, |
| "step": 1199 |
| }, |
| { |
| "epoch": 0.9939260077305356, |
| "grad_norm": 0.902006983757019, |
| "learning_rate": 4.7130107537354185e-06, |
| "loss": 0.6903, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.9947542794036444, |
| "grad_norm": 0.8688106536865234, |
| "learning_rate": 4.7124989614245805e-06, |
| "loss": 0.6687, |
| "step": 1201 |
| }, |
| { |
| "epoch": 0.9955825510767532, |
| "grad_norm": 0.8675205707550049, |
| "learning_rate": 4.711986741015818e-06, |
| "loss": 0.6925, |
| "step": 1202 |
| }, |
| { |
| "epoch": 0.9964108227498619, |
| "grad_norm": 0.8795785307884216, |
| "learning_rate": 4.711474092608243e-06, |
| "loss": 0.6831, |
| "step": 1203 |
| }, |
| { |
| "epoch": 0.9972390944229708, |
| "grad_norm": 0.8771690726280212, |
| "learning_rate": 4.7109610163010435e-06, |
| "loss": 0.6576, |
| "step": 1204 |
| }, |
| { |
| "epoch": 0.9980673660960795, |
| "grad_norm": 0.8708536028862, |
| "learning_rate": 4.710447512193499e-06, |
| "loss": 0.636, |
| "step": 1205 |
| }, |
| { |
| "epoch": 0.9988956377691883, |
| "grad_norm": 0.8870919942855835, |
| "learning_rate": 4.709933580384965e-06, |
| "loss": 0.6716, |
| "step": 1206 |
| }, |
| { |
| "epoch": 0.999723909442297, |
| "grad_norm": 0.9387498497962952, |
| "learning_rate": 4.709419220974886e-06, |
| "loss": 0.6643, |
| "step": 1207 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.9387498497962952, |
| "learning_rate": 4.708904434062783e-06, |
| "loss": 0.6175, |
| "step": 1208 |
| }, |
| { |
| "epoch": 1.0008282716731087, |
| "grad_norm": 1.6915117502212524, |
| "learning_rate": 4.708389219748263e-06, |
| "loss": 0.6427, |
| "step": 1209 |
| }, |
| { |
| "epoch": 1.0016565433462175, |
| "grad_norm": 0.8601691722869873, |
| "learning_rate": 4.707873578131015e-06, |
| "loss": 0.6466, |
| "step": 1210 |
| }, |
| { |
| "epoch": 1.0024848150193264, |
| "grad_norm": 0.8543884754180908, |
| "learning_rate": 4.707357509310811e-06, |
| "loss": 0.6577, |
| "step": 1211 |
| }, |
| { |
| "epoch": 1.0033130866924351, |
| "grad_norm": 0.9008343815803528, |
| "learning_rate": 4.7068410133875066e-06, |
| "loss": 0.6367, |
| "step": 1212 |
| }, |
| { |
| "epoch": 1.0041413583655439, |
| "grad_norm": 0.8876969218254089, |
| "learning_rate": 4.706324090461037e-06, |
| "loss": 0.6298, |
| "step": 1213 |
| }, |
| { |
| "epoch": 1.0049696300386526, |
| "grad_norm": 0.8754454255104065, |
| "learning_rate": 4.7058067406314235e-06, |
| "loss": 0.6342, |
| "step": 1214 |
| }, |
| { |
| "epoch": 1.0057979017117615, |
| "grad_norm": 0.8877934813499451, |
| "learning_rate": 4.7052889639987674e-06, |
| "loss": 0.6563, |
| "step": 1215 |
| }, |
| { |
| "epoch": 1.0066261733848703, |
| "grad_norm": 0.8910914063453674, |
| "learning_rate": 4.704770760663255e-06, |
| "loss": 0.6478, |
| "step": 1216 |
| }, |
| { |
| "epoch": 1.007454445057979, |
| "grad_norm": 0.8597622513771057, |
| "learning_rate": 4.704252130725152e-06, |
| "loss": 0.6483, |
| "step": 1217 |
| }, |
| { |
| "epoch": 1.0082827167310877, |
| "grad_norm": 0.8797935843467712, |
| "learning_rate": 4.7037330742848105e-06, |
| "loss": 0.6533, |
| "step": 1218 |
| }, |
| { |
| "epoch": 1.0091109884041967, |
| "grad_norm": 0.855843186378479, |
| "learning_rate": 4.703213591442663e-06, |
| "loss": 0.6412, |
| "step": 1219 |
| }, |
| { |
| "epoch": 1.0099392600773054, |
| "grad_norm": 0.8675623536109924, |
| "learning_rate": 4.702693682299222e-06, |
| "loss": 0.6331, |
| "step": 1220 |
| }, |
| { |
| "epoch": 1.0107675317504141, |
| "grad_norm": 0.8637425303459167, |
| "learning_rate": 4.7021733469550865e-06, |
| "loss": 0.6369, |
| "step": 1221 |
| }, |
| { |
| "epoch": 1.0115958034235228, |
| "grad_norm": 0.8803741335868835, |
| "learning_rate": 4.7016525855109376e-06, |
| "loss": 0.637, |
| "step": 1222 |
| }, |
| { |
| "epoch": 1.0124240750966318, |
| "grad_norm": 0.8958247900009155, |
| "learning_rate": 4.701131398067537e-06, |
| "loss": 0.6225, |
| "step": 1223 |
| }, |
| { |
| "epoch": 1.0132523467697405, |
| "grad_norm": 0.8833504915237427, |
| "learning_rate": 4.70060978472573e-06, |
| "loss": 0.6513, |
| "step": 1224 |
| }, |
| { |
| "epoch": 1.0140806184428492, |
| "grad_norm": 0.9138036966323853, |
| "learning_rate": 4.700087745586442e-06, |
| "loss": 0.6328, |
| "step": 1225 |
| }, |
| { |
| "epoch": 1.014908890115958, |
| "grad_norm": 0.8690045475959778, |
| "learning_rate": 4.699565280750685e-06, |
| "loss": 0.6568, |
| "step": 1226 |
| }, |
| { |
| "epoch": 1.015737161789067, |
| "grad_norm": 0.8909130096435547, |
| "learning_rate": 4.69904239031955e-06, |
| "loss": 0.6506, |
| "step": 1227 |
| }, |
| { |
| "epoch": 1.0165654334621756, |
| "grad_norm": 0.8841529488563538, |
| "learning_rate": 4.698519074394212e-06, |
| "loss": 0.6347, |
| "step": 1228 |
| }, |
| { |
| "epoch": 1.0173937051352844, |
| "grad_norm": 0.9377810955047607, |
| "learning_rate": 4.697995333075927e-06, |
| "loss": 0.631, |
| "step": 1229 |
| }, |
| { |
| "epoch": 1.018221976808393, |
| "grad_norm": 0.9000067114830017, |
| "learning_rate": 4.697471166466035e-06, |
| "loss": 0.6623, |
| "step": 1230 |
| }, |
| { |
| "epoch": 1.0190502484815018, |
| "grad_norm": 0.888957142829895, |
| "learning_rate": 4.696946574665957e-06, |
| "loss": 0.6412, |
| "step": 1231 |
| }, |
| { |
| "epoch": 1.0198785201546108, |
| "grad_norm": 0.8862919211387634, |
| "learning_rate": 4.696421557777196e-06, |
| "loss": 0.6601, |
| "step": 1232 |
| }, |
| { |
| "epoch": 1.0207067918277195, |
| "grad_norm": 0.9067803025245667, |
| "learning_rate": 4.6958961159013386e-06, |
| "loss": 0.6465, |
| "step": 1233 |
| }, |
| { |
| "epoch": 1.0215350635008282, |
| "grad_norm": 0.9380660057067871, |
| "learning_rate": 4.695370249140052e-06, |
| "loss": 0.6569, |
| "step": 1234 |
| }, |
| { |
| "epoch": 1.022363335173937, |
| "grad_norm": 0.8945363163948059, |
| "learning_rate": 4.6948439575950885e-06, |
| "loss": 0.6277, |
| "step": 1235 |
| }, |
| { |
| "epoch": 1.023191606847046, |
| "grad_norm": 0.9212456345558167, |
| "learning_rate": 4.694317241368278e-06, |
| "loss": 0.6259, |
| "step": 1236 |
| }, |
| { |
| "epoch": 1.0240198785201546, |
| "grad_norm": 0.9022555947303772, |
| "learning_rate": 4.693790100561537e-06, |
| "loss": 0.6384, |
| "step": 1237 |
| }, |
| { |
| "epoch": 1.0248481501932634, |
| "grad_norm": 0.8946983814239502, |
| "learning_rate": 4.693262535276863e-06, |
| "loss": 0.6579, |
| "step": 1238 |
| }, |
| { |
| "epoch": 1.025676421866372, |
| "grad_norm": 0.9557907581329346, |
| "learning_rate": 4.692734545616331e-06, |
| "loss": 0.6628, |
| "step": 1239 |
| }, |
| { |
| "epoch": 1.026504693539481, |
| "grad_norm": 0.922622799873352, |
| "learning_rate": 4.692206131682106e-06, |
| "loss": 0.6448, |
| "step": 1240 |
| }, |
| { |
| "epoch": 1.0273329652125898, |
| "grad_norm": 0.9776833653450012, |
| "learning_rate": 4.691677293576431e-06, |
| "loss": 0.65, |
| "step": 1241 |
| }, |
| { |
| "epoch": 1.0281612368856985, |
| "grad_norm": 0.8609880208969116, |
| "learning_rate": 4.69114803140163e-06, |
| "loss": 0.6734, |
| "step": 1242 |
| }, |
| { |
| "epoch": 1.0289895085588072, |
| "grad_norm": 0.9334732890129089, |
| "learning_rate": 4.69061834526011e-06, |
| "loss": 0.6727, |
| "step": 1243 |
| }, |
| { |
| "epoch": 1.0298177802319162, |
| "grad_norm": 0.9625932574272156, |
| "learning_rate": 4.6900882352543614e-06, |
| "loss": 0.6679, |
| "step": 1244 |
| }, |
| { |
| "epoch": 1.030646051905025, |
| "grad_norm": 0.9794696569442749, |
| "learning_rate": 4.689557701486954e-06, |
| "loss": 0.6479, |
| "step": 1245 |
| }, |
| { |
| "epoch": 1.0314743235781336, |
| "grad_norm": 0.9065360426902771, |
| "learning_rate": 4.689026744060543e-06, |
| "loss": 0.6389, |
| "step": 1246 |
| }, |
| { |
| "epoch": 1.0323025952512423, |
| "grad_norm": 0.9200260639190674, |
| "learning_rate": 4.688495363077863e-06, |
| "loss": 0.6756, |
| "step": 1247 |
| }, |
| { |
| "epoch": 1.0331308669243513, |
| "grad_norm": 0.9406585693359375, |
| "learning_rate": 4.687963558641731e-06, |
| "loss": 0.6415, |
| "step": 1248 |
| }, |
| { |
| "epoch": 1.03395913859746, |
| "grad_norm": 0.918449342250824, |
| "learning_rate": 4.687431330855046e-06, |
| "loss": 0.6493, |
| "step": 1249 |
| }, |
| { |
| "epoch": 1.0347874102705688, |
| "grad_norm": 0.9120164513587952, |
| "learning_rate": 4.686898679820789e-06, |
| "loss": 0.6765, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.0356156819436775, |
| "grad_norm": 0.9046673774719238, |
| "learning_rate": 4.686365605642025e-06, |
| "loss": 0.6194, |
| "step": 1251 |
| }, |
| { |
| "epoch": 1.0364439536167862, |
| "grad_norm": 0.9200103878974915, |
| "learning_rate": 4.685832108421896e-06, |
| "loss": 0.6505, |
| "step": 1252 |
| }, |
| { |
| "epoch": 1.0372722252898952, |
| "grad_norm": 0.9182124137878418, |
| "learning_rate": 4.685298188263631e-06, |
| "loss": 0.6598, |
| "step": 1253 |
| }, |
| { |
| "epoch": 1.0381004969630039, |
| "grad_norm": 0.889716386795044, |
| "learning_rate": 4.684763845270537e-06, |
| "loss": 0.6404, |
| "step": 1254 |
| }, |
| { |
| "epoch": 1.0389287686361126, |
| "grad_norm": 0.9016664028167725, |
| "learning_rate": 4.684229079546005e-06, |
| "loss": 0.6293, |
| "step": 1255 |
| }, |
| { |
| "epoch": 1.0397570403092213, |
| "grad_norm": 0.9141029715538025, |
| "learning_rate": 4.683693891193508e-06, |
| "loss": 0.6492, |
| "step": 1256 |
| }, |
| { |
| "epoch": 1.0405853119823303, |
| "grad_norm": 0.8858665823936462, |
| "learning_rate": 4.683158280316599e-06, |
| "loss": 0.6427, |
| "step": 1257 |
| }, |
| { |
| "epoch": 1.041413583655439, |
| "grad_norm": 0.8972795009613037, |
| "learning_rate": 4.682622247018913e-06, |
| "loss": 0.6217, |
| "step": 1258 |
| }, |
| { |
| "epoch": 1.0422418553285477, |
| "grad_norm": 0.8855913281440735, |
| "learning_rate": 4.682085791404169e-06, |
| "loss": 0.6521, |
| "step": 1259 |
| }, |
| { |
| "epoch": 1.0430701270016565, |
| "grad_norm": 0.8924818634986877, |
| "learning_rate": 4.681548913576165e-06, |
| "loss": 0.6433, |
| "step": 1260 |
| }, |
| { |
| "epoch": 1.0438983986747654, |
| "grad_norm": 0.8819104433059692, |
| "learning_rate": 4.6810116136387825e-06, |
| "loss": 0.6726, |
| "step": 1261 |
| }, |
| { |
| "epoch": 1.0447266703478741, |
| "grad_norm": 0.9010201692581177, |
| "learning_rate": 4.680473891695985e-06, |
| "loss": 0.6408, |
| "step": 1262 |
| }, |
| { |
| "epoch": 1.0455549420209829, |
| "grad_norm": 0.9048166275024414, |
| "learning_rate": 4.6799357478518145e-06, |
| "loss": 0.6443, |
| "step": 1263 |
| }, |
| { |
| "epoch": 1.0463832136940916, |
| "grad_norm": 0.9104632139205933, |
| "learning_rate": 4.6793971822103985e-06, |
| "loss": 0.6644, |
| "step": 1264 |
| }, |
| { |
| "epoch": 1.0472114853672005, |
| "grad_norm": 0.9136452078819275, |
| "learning_rate": 4.678858194875944e-06, |
| "loss": 0.643, |
| "step": 1265 |
| }, |
| { |
| "epoch": 1.0480397570403093, |
| "grad_norm": 0.9014383554458618, |
| "learning_rate": 4.678318785952739e-06, |
| "loss": 0.6391, |
| "step": 1266 |
| }, |
| { |
| "epoch": 1.048868028713418, |
| "grad_norm": 0.8964272141456604, |
| "learning_rate": 4.677778955545155e-06, |
| "loss": 0.6569, |
| "step": 1267 |
| }, |
| { |
| "epoch": 1.0496963003865267, |
| "grad_norm": 0.9296204447746277, |
| "learning_rate": 4.677238703757644e-06, |
| "loss": 0.6467, |
| "step": 1268 |
| }, |
| { |
| "epoch": 1.0505245720596355, |
| "grad_norm": 0.9120078682899475, |
| "learning_rate": 4.676698030694741e-06, |
| "loss": 0.6239, |
| "step": 1269 |
| }, |
| { |
| "epoch": 1.0513528437327444, |
| "grad_norm": 0.9010439515113831, |
| "learning_rate": 4.6761569364610585e-06, |
| "loss": 0.6456, |
| "step": 1270 |
| }, |
| { |
| "epoch": 1.0521811154058531, |
| "grad_norm": 0.9114722609519958, |
| "learning_rate": 4.675615421161295e-06, |
| "loss": 0.63, |
| "step": 1271 |
| }, |
| { |
| "epoch": 1.0530093870789619, |
| "grad_norm": 0.9014853835105896, |
| "learning_rate": 4.675073484900229e-06, |
| "loss": 0.6382, |
| "step": 1272 |
| }, |
| { |
| "epoch": 1.0538376587520706, |
| "grad_norm": 0.9215754270553589, |
| "learning_rate": 4.674531127782718e-06, |
| "loss": 0.6471, |
| "step": 1273 |
| }, |
| { |
| "epoch": 1.0546659304251795, |
| "grad_norm": 0.8762101531028748, |
| "learning_rate": 4.673988349913707e-06, |
| "loss": 0.6321, |
| "step": 1274 |
| }, |
| { |
| "epoch": 1.0554942020982883, |
| "grad_norm": 0.9086922407150269, |
| "learning_rate": 4.673445151398214e-06, |
| "loss": 0.6456, |
| "step": 1275 |
| }, |
| { |
| "epoch": 1.056322473771397, |
| "grad_norm": 0.9069847464561462, |
| "learning_rate": 4.672901532341346e-06, |
| "loss": 0.6439, |
| "step": 1276 |
| }, |
| { |
| "epoch": 1.0571507454445057, |
| "grad_norm": 0.8990313410758972, |
| "learning_rate": 4.6723574928482865e-06, |
| "loss": 0.6565, |
| "step": 1277 |
| }, |
| { |
| "epoch": 1.0579790171176147, |
| "grad_norm": 0.9467563629150391, |
| "learning_rate": 4.6718130330243014e-06, |
| "loss": 0.6512, |
| "step": 1278 |
| }, |
| { |
| "epoch": 1.0588072887907234, |
| "grad_norm": 0.8764270544052124, |
| "learning_rate": 4.671268152974742e-06, |
| "loss": 0.6388, |
| "step": 1279 |
| }, |
| { |
| "epoch": 1.0596355604638321, |
| "grad_norm": 0.8909838795661926, |
| "learning_rate": 4.670722852805033e-06, |
| "loss": 0.6452, |
| "step": 1280 |
| }, |
| { |
| "epoch": 1.0604638321369408, |
| "grad_norm": 0.9503293037414551, |
| "learning_rate": 4.670177132620689e-06, |
| "loss": 0.6544, |
| "step": 1281 |
| }, |
| { |
| "epoch": 1.0612921038100498, |
| "grad_norm": 0.9159129858016968, |
| "learning_rate": 4.669630992527299e-06, |
| "loss": 0.6494, |
| "step": 1282 |
| }, |
| { |
| "epoch": 1.0621203754831585, |
| "grad_norm": 0.9771336913108826, |
| "learning_rate": 4.6690844326305374e-06, |
| "loss": 0.6418, |
| "step": 1283 |
| }, |
| { |
| "epoch": 1.0629486471562672, |
| "grad_norm": 0.9041289687156677, |
| "learning_rate": 4.668537453036159e-06, |
| "loss": 0.6456, |
| "step": 1284 |
| }, |
| { |
| "epoch": 1.063776918829376, |
| "grad_norm": 0.934086799621582, |
| "learning_rate": 4.667990053849997e-06, |
| "loss": 0.635, |
| "step": 1285 |
| }, |
| { |
| "epoch": 1.0646051905024847, |
| "grad_norm": 0.9609100222587585, |
| "learning_rate": 4.667442235177969e-06, |
| "loss": 0.6479, |
| "step": 1286 |
| }, |
| { |
| "epoch": 1.0654334621755936, |
| "grad_norm": 0.9312746524810791, |
| "learning_rate": 4.666893997126074e-06, |
| "loss": 0.6392, |
| "step": 1287 |
| }, |
| { |
| "epoch": 1.0662617338487024, |
| "grad_norm": 0.8859624266624451, |
| "learning_rate": 4.66634533980039e-06, |
| "loss": 0.6377, |
| "step": 1288 |
| }, |
| { |
| "epoch": 1.067090005521811, |
| "grad_norm": 0.9386303424835205, |
| "learning_rate": 4.6657962633070765e-06, |
| "loss": 0.6421, |
| "step": 1289 |
| }, |
| { |
| "epoch": 1.06791827719492, |
| "grad_norm": 0.9250341653823853, |
| "learning_rate": 4.665246767752376e-06, |
| "loss": 0.622, |
| "step": 1290 |
| }, |
| { |
| "epoch": 1.0687465488680288, |
| "grad_norm": 0.9207826852798462, |
| "learning_rate": 4.66469685324261e-06, |
| "loss": 0.649, |
| "step": 1291 |
| }, |
| { |
| "epoch": 1.0695748205411375, |
| "grad_norm": 0.927555501461029, |
| "learning_rate": 4.664146519884182e-06, |
| "loss": 0.6402, |
| "step": 1292 |
| }, |
| { |
| "epoch": 1.0704030922142462, |
| "grad_norm": 0.9016607999801636, |
| "learning_rate": 4.663595767783575e-06, |
| "loss": 0.6423, |
| "step": 1293 |
| }, |
| { |
| "epoch": 1.071231363887355, |
| "grad_norm": 0.9201986193656921, |
| "learning_rate": 4.6630445970473576e-06, |
| "loss": 0.6382, |
| "step": 1294 |
| }, |
| { |
| "epoch": 1.072059635560464, |
| "grad_norm": 0.8747836351394653, |
| "learning_rate": 4.662493007782174e-06, |
| "loss": 0.6248, |
| "step": 1295 |
| }, |
| { |
| "epoch": 1.0728879072335726, |
| "grad_norm": 0.9549235701560974, |
| "learning_rate": 4.661941000094751e-06, |
| "loss": 0.6305, |
| "step": 1296 |
| }, |
| { |
| "epoch": 1.0737161789066814, |
| "grad_norm": 0.9476044178009033, |
| "learning_rate": 4.661388574091898e-06, |
| "loss": 0.6304, |
| "step": 1297 |
| }, |
| { |
| "epoch": 1.07454445057979, |
| "grad_norm": 0.8923697471618652, |
| "learning_rate": 4.660835729880505e-06, |
| "loss": 0.6368, |
| "step": 1298 |
| }, |
| { |
| "epoch": 1.075372722252899, |
| "grad_norm": 0.9301371574401855, |
| "learning_rate": 4.660282467567541e-06, |
| "loss": 0.6331, |
| "step": 1299 |
| }, |
| { |
| "epoch": 1.0762009939260078, |
| "grad_norm": 0.964452862739563, |
| "learning_rate": 4.659728787260057e-06, |
| "loss": 0.6216, |
| "step": 1300 |
| }, |
| { |
| "epoch": 1.0770292655991165, |
| "grad_norm": 0.9461387395858765, |
| "learning_rate": 4.659174689065185e-06, |
| "loss": 0.6404, |
| "step": 1301 |
| }, |
| { |
| "epoch": 1.0778575372722252, |
| "grad_norm": 0.8948859572410583, |
| "learning_rate": 4.6586201730901395e-06, |
| "loss": 0.6488, |
| "step": 1302 |
| }, |
| { |
| "epoch": 1.0786858089453342, |
| "grad_norm": 0.8966079354286194, |
| "learning_rate": 4.658065239442212e-06, |
| "loss": 0.6291, |
| "step": 1303 |
| }, |
| { |
| "epoch": 1.079514080618443, |
| "grad_norm": 0.9006171226501465, |
| "learning_rate": 4.657509888228779e-06, |
| "loss": 0.6451, |
| "step": 1304 |
| }, |
| { |
| "epoch": 1.0803423522915516, |
| "grad_norm": 0.9222291111946106, |
| "learning_rate": 4.656954119557293e-06, |
| "loss": 0.6146, |
| "step": 1305 |
| }, |
| { |
| "epoch": 1.0811706239646603, |
| "grad_norm": 0.9588384032249451, |
| "learning_rate": 4.656397933535293e-06, |
| "loss": 0.6373, |
| "step": 1306 |
| }, |
| { |
| "epoch": 1.0819988956377693, |
| "grad_norm": 0.9192469120025635, |
| "learning_rate": 4.655841330270393e-06, |
| "loss": 0.636, |
| "step": 1307 |
| }, |
| { |
| "epoch": 1.082827167310878, |
| "grad_norm": 0.8987453579902649, |
| "learning_rate": 4.655284309870294e-06, |
| "loss": 0.6372, |
| "step": 1308 |
| }, |
| { |
| "epoch": 1.0836554389839868, |
| "grad_norm": 0.9247817397117615, |
| "learning_rate": 4.654726872442771e-06, |
| "loss": 0.6297, |
| "step": 1309 |
| }, |
| { |
| "epoch": 1.0844837106570955, |
| "grad_norm": 0.8973072171211243, |
| "learning_rate": 4.654169018095685e-06, |
| "loss": 0.6577, |
| "step": 1310 |
| }, |
| { |
| "epoch": 1.0853119823302042, |
| "grad_norm": 0.8974944949150085, |
| "learning_rate": 4.653610746936975e-06, |
| "loss": 0.6387, |
| "step": 1311 |
| }, |
| { |
| "epoch": 1.0861402540033132, |
| "grad_norm": 0.9119291305541992, |
| "learning_rate": 4.653052059074661e-06, |
| "loss": 0.6438, |
| "step": 1312 |
| }, |
| { |
| "epoch": 1.0869685256764219, |
| "grad_norm": 0.8913506865501404, |
| "learning_rate": 4.652492954616844e-06, |
| "loss": 0.636, |
| "step": 1313 |
| }, |
| { |
| "epoch": 1.0877967973495306, |
| "grad_norm": 0.8785920143127441, |
| "learning_rate": 4.6519334336717055e-06, |
| "loss": 0.6393, |
| "step": 1314 |
| }, |
| { |
| "epoch": 1.0886250690226393, |
| "grad_norm": 0.9109119176864624, |
| "learning_rate": 4.651373496347508e-06, |
| "loss": 0.6221, |
| "step": 1315 |
| }, |
| { |
| "epoch": 1.0894533406957483, |
| "grad_norm": 0.8932192325592041, |
| "learning_rate": 4.650813142752593e-06, |
| "loss": 0.6216, |
| "step": 1316 |
| }, |
| { |
| "epoch": 1.090281612368857, |
| "grad_norm": 0.8906638622283936, |
| "learning_rate": 4.6502523729953855e-06, |
| "loss": 0.6439, |
| "step": 1317 |
| }, |
| { |
| "epoch": 1.0911098840419657, |
| "grad_norm": 0.9296264052391052, |
| "learning_rate": 4.649691187184387e-06, |
| "loss": 0.666, |
| "step": 1318 |
| }, |
| { |
| "epoch": 1.0919381557150745, |
| "grad_norm": 0.9644883871078491, |
| "learning_rate": 4.649129585428184e-06, |
| "loss": 0.6458, |
| "step": 1319 |
| }, |
| { |
| "epoch": 1.0927664273881834, |
| "grad_norm": 0.9602929353713989, |
| "learning_rate": 4.64856756783544e-06, |
| "loss": 0.6459, |
| "step": 1320 |
| }, |
| { |
| "epoch": 1.0935946990612921, |
| "grad_norm": 0.9335957169532776, |
| "learning_rate": 4.6480051345149e-06, |
| "loss": 0.6357, |
| "step": 1321 |
| }, |
| { |
| "epoch": 1.0944229707344009, |
| "grad_norm": 0.8983004689216614, |
| "learning_rate": 4.64744228557539e-06, |
| "loss": 0.6294, |
| "step": 1322 |
| }, |
| { |
| "epoch": 1.0952512424075096, |
| "grad_norm": 0.9320319890975952, |
| "learning_rate": 4.646879021125816e-06, |
| "loss": 0.6748, |
| "step": 1323 |
| }, |
| { |
| "epoch": 1.0960795140806185, |
| "grad_norm": 0.898932158946991, |
| "learning_rate": 4.646315341275164e-06, |
| "loss": 0.624, |
| "step": 1324 |
| }, |
| { |
| "epoch": 1.0969077857537273, |
| "grad_norm": 0.9280585646629333, |
| "learning_rate": 4.645751246132501e-06, |
| "loss": 0.6555, |
| "step": 1325 |
| }, |
| { |
| "epoch": 1.097736057426836, |
| "grad_norm": 0.8680840134620667, |
| "learning_rate": 4.645186735806976e-06, |
| "loss": 0.6489, |
| "step": 1326 |
| }, |
| { |
| "epoch": 1.0985643290999447, |
| "grad_norm": 0.8832307457923889, |
| "learning_rate": 4.644621810407813e-06, |
| "loss": 0.6461, |
| "step": 1327 |
| }, |
| { |
| "epoch": 1.0993926007730535, |
| "grad_norm": 0.8860934376716614, |
| "learning_rate": 4.644056470044323e-06, |
| "loss": 0.6713, |
| "step": 1328 |
| }, |
| { |
| "epoch": 1.1002208724461624, |
| "grad_norm": 0.917672336101532, |
| "learning_rate": 4.6434907148258915e-06, |
| "loss": 0.6255, |
| "step": 1329 |
| }, |
| { |
| "epoch": 1.1010491441192711, |
| "grad_norm": 0.9062732458114624, |
| "learning_rate": 4.642924544861988e-06, |
| "loss": 0.6526, |
| "step": 1330 |
| }, |
| { |
| "epoch": 1.1018774157923799, |
| "grad_norm": 0.9103027582168579, |
| "learning_rate": 4.642357960262162e-06, |
| "loss": 0.6535, |
| "step": 1331 |
| }, |
| { |
| "epoch": 1.1027056874654886, |
| "grad_norm": 0.8931050300598145, |
| "learning_rate": 4.641790961136041e-06, |
| "loss": 0.6239, |
| "step": 1332 |
| }, |
| { |
| "epoch": 1.1035339591385975, |
| "grad_norm": 0.898331344127655, |
| "learning_rate": 4.641223547593334e-06, |
| "loss": 0.6638, |
| "step": 1333 |
| }, |
| { |
| "epoch": 1.1043622308117063, |
| "grad_norm": 0.8908964991569519, |
| "learning_rate": 4.6406557197438324e-06, |
| "loss": 0.6223, |
| "step": 1334 |
| }, |
| { |
| "epoch": 1.105190502484815, |
| "grad_norm": 0.9101983904838562, |
| "learning_rate": 4.640087477697403e-06, |
| "loss": 0.6203, |
| "step": 1335 |
| }, |
| { |
| "epoch": 1.1060187741579237, |
| "grad_norm": 0.8900510668754578, |
| "learning_rate": 4.639518821563997e-06, |
| "loss": 0.629, |
| "step": 1336 |
| }, |
| { |
| "epoch": 1.1068470458310327, |
| "grad_norm": 0.9052078723907471, |
| "learning_rate": 4.6389497514536415e-06, |
| "loss": 0.6242, |
| "step": 1337 |
| }, |
| { |
| "epoch": 1.1076753175041414, |
| "grad_norm": 0.9012704491615295, |
| "learning_rate": 4.63838026747645e-06, |
| "loss": 0.6316, |
| "step": 1338 |
| }, |
| { |
| "epoch": 1.1085035891772501, |
| "grad_norm": 0.9398553967475891, |
| "learning_rate": 4.637810369742609e-06, |
| "loss": 0.6308, |
| "step": 1339 |
| }, |
| { |
| "epoch": 1.1093318608503588, |
| "grad_norm": 0.9674614667892456, |
| "learning_rate": 4.637240058362391e-06, |
| "loss": 0.6575, |
| "step": 1340 |
| }, |
| { |
| "epoch": 1.1101601325234678, |
| "grad_norm": 0.9048489332199097, |
| "learning_rate": 4.636669333446145e-06, |
| "loss": 0.6468, |
| "step": 1341 |
| }, |
| { |
| "epoch": 1.1109884041965765, |
| "grad_norm": 0.911857008934021, |
| "learning_rate": 4.636098195104299e-06, |
| "loss": 0.6367, |
| "step": 1342 |
| }, |
| { |
| "epoch": 1.1118166758696852, |
| "grad_norm": 0.9007687568664551, |
| "learning_rate": 4.6355266434473665e-06, |
| "loss": 0.6417, |
| "step": 1343 |
| }, |
| { |
| "epoch": 1.112644947542794, |
| "grad_norm": 0.8986724019050598, |
| "learning_rate": 4.634954678585935e-06, |
| "loss": 0.6454, |
| "step": 1344 |
| }, |
| { |
| "epoch": 1.1134732192159027, |
| "grad_norm": 0.9101186394691467, |
| "learning_rate": 4.634382300630675e-06, |
| "loss": 0.6158, |
| "step": 1345 |
| }, |
| { |
| "epoch": 1.1143014908890116, |
| "grad_norm": 1.0062702894210815, |
| "learning_rate": 4.633809509692336e-06, |
| "loss": 0.6459, |
| "step": 1346 |
| }, |
| { |
| "epoch": 1.1151297625621204, |
| "grad_norm": 1.0598455667495728, |
| "learning_rate": 4.6332363058817484e-06, |
| "loss": 0.6385, |
| "step": 1347 |
| }, |
| { |
| "epoch": 1.115958034235229, |
| "grad_norm": 0.875592827796936, |
| "learning_rate": 4.632662689309821e-06, |
| "loss": 0.6564, |
| "step": 1348 |
| }, |
| { |
| "epoch": 1.116786305908338, |
| "grad_norm": 0.9157524704933167, |
| "learning_rate": 4.632088660087545e-06, |
| "loss": 0.629, |
| "step": 1349 |
| }, |
| { |
| "epoch": 1.1176145775814468, |
| "grad_norm": 0.9279395341873169, |
| "learning_rate": 4.631514218325987e-06, |
| "loss": 0.6492, |
| "step": 1350 |
| }, |
| { |
| "epoch": 1.1184428492545555, |
| "grad_norm": 0.9317731261253357, |
| "learning_rate": 4.630939364136298e-06, |
| "loss": 0.6566, |
| "step": 1351 |
| }, |
| { |
| "epoch": 1.1192711209276642, |
| "grad_norm": 0.907844603061676, |
| "learning_rate": 4.630364097629706e-06, |
| "loss": 0.6352, |
| "step": 1352 |
| }, |
| { |
| "epoch": 1.120099392600773, |
| "grad_norm": 0.9384779334068298, |
| "learning_rate": 4.62978841891752e-06, |
| "loss": 0.6261, |
| "step": 1353 |
| }, |
| { |
| "epoch": 1.120927664273882, |
| "grad_norm": 0.9077824950218201, |
| "learning_rate": 4.629212328111129e-06, |
| "loss": 0.6783, |
| "step": 1354 |
| }, |
| { |
| "epoch": 1.1217559359469906, |
| "grad_norm": 0.8752540349960327, |
| "learning_rate": 4.628635825322e-06, |
| "loss": 0.6519, |
| "step": 1355 |
| }, |
| { |
| "epoch": 1.1225842076200994, |
| "grad_norm": 0.8709624409675598, |
| "learning_rate": 4.628058910661683e-06, |
| "loss": 0.6473, |
| "step": 1356 |
| }, |
| { |
| "epoch": 1.123412479293208, |
| "grad_norm": 0.8955983519554138, |
| "learning_rate": 4.627481584241803e-06, |
| "loss": 0.6467, |
| "step": 1357 |
| }, |
| { |
| "epoch": 1.124240750966317, |
| "grad_norm": 0.8938992023468018, |
| "learning_rate": 4.62690384617407e-06, |
| "loss": 0.6193, |
| "step": 1358 |
| }, |
| { |
| "epoch": 1.1250690226394258, |
| "grad_norm": 0.942881166934967, |
| "learning_rate": 4.6263256965702684e-06, |
| "loss": 0.6491, |
| "step": 1359 |
| }, |
| { |
| "epoch": 1.1258972943125345, |
| "grad_norm": 0.8961532115936279, |
| "learning_rate": 4.6257471355422654e-06, |
| "loss": 0.6603, |
| "step": 1360 |
| }, |
| { |
| "epoch": 1.1267255659856432, |
| "grad_norm": 0.9317561388015747, |
| "learning_rate": 4.625168163202009e-06, |
| "loss": 0.6313, |
| "step": 1361 |
| }, |
| { |
| "epoch": 1.1275538376587522, |
| "grad_norm": 0.9453751444816589, |
| "learning_rate": 4.624588779661523e-06, |
| "loss": 0.6194, |
| "step": 1362 |
| }, |
| { |
| "epoch": 1.128382109331861, |
| "grad_norm": 0.9124789237976074, |
| "learning_rate": 4.624008985032913e-06, |
| "loss": 0.648, |
| "step": 1363 |
| }, |
| { |
| "epoch": 1.1292103810049696, |
| "grad_norm": 0.9069437980651855, |
| "learning_rate": 4.623428779428365e-06, |
| "loss": 0.6449, |
| "step": 1364 |
| }, |
| { |
| "epoch": 1.1300386526780783, |
| "grad_norm": 0.910844624042511, |
| "learning_rate": 4.6228481629601415e-06, |
| "loss": 0.6641, |
| "step": 1365 |
| }, |
| { |
| "epoch": 1.1308669243511873, |
| "grad_norm": 0.941060483455658, |
| "learning_rate": 4.622267135740588e-06, |
| "loss": 0.6404, |
| "step": 1366 |
| }, |
| { |
| "epoch": 1.131695196024296, |
| "grad_norm": 0.9685953259468079, |
| "learning_rate": 4.6216856978821275e-06, |
| "loss": 0.6358, |
| "step": 1367 |
| }, |
| { |
| "epoch": 1.1325234676974048, |
| "grad_norm": 0.8873149156570435, |
| "learning_rate": 4.621103849497261e-06, |
| "loss": 0.6481, |
| "step": 1368 |
| }, |
| { |
| "epoch": 1.1333517393705135, |
| "grad_norm": 0.9186413884162903, |
| "learning_rate": 4.620521590698574e-06, |
| "loss": 0.6574, |
| "step": 1369 |
| }, |
| { |
| "epoch": 1.1341800110436222, |
| "grad_norm": 0.9178755879402161, |
| "learning_rate": 4.6199389215987266e-06, |
| "loss": 0.6508, |
| "step": 1370 |
| }, |
| { |
| "epoch": 1.1350082827167312, |
| "grad_norm": 0.9138978123664856, |
| "learning_rate": 4.619355842310459e-06, |
| "loss": 0.6538, |
| "step": 1371 |
| }, |
| { |
| "epoch": 1.1358365543898399, |
| "grad_norm": 0.8917097449302673, |
| "learning_rate": 4.618772352946593e-06, |
| "loss": 0.638, |
| "step": 1372 |
| }, |
| { |
| "epoch": 1.1366648260629486, |
| "grad_norm": 0.9094858765602112, |
| "learning_rate": 4.618188453620026e-06, |
| "loss": 0.6253, |
| "step": 1373 |
| }, |
| { |
| "epoch": 1.1374930977360573, |
| "grad_norm": 0.883600652217865, |
| "learning_rate": 4.61760414444374e-06, |
| "loss": 0.6511, |
| "step": 1374 |
| }, |
| { |
| "epoch": 1.1383213694091663, |
| "grad_norm": 0.9034351110458374, |
| "learning_rate": 4.617019425530791e-06, |
| "loss": 0.6696, |
| "step": 1375 |
| }, |
| { |
| "epoch": 1.139149641082275, |
| "grad_norm": 0.9252607822418213, |
| "learning_rate": 4.6164342969943196e-06, |
| "loss": 0.6382, |
| "step": 1376 |
| }, |
| { |
| "epoch": 1.1399779127553837, |
| "grad_norm": 0.9082643389701843, |
| "learning_rate": 4.615848758947539e-06, |
| "loss": 0.6393, |
| "step": 1377 |
| }, |
| { |
| "epoch": 1.1408061844284925, |
| "grad_norm": 0.936077892780304, |
| "learning_rate": 4.615262811503749e-06, |
| "loss": 0.6415, |
| "step": 1378 |
| }, |
| { |
| "epoch": 1.1416344561016014, |
| "grad_norm": 0.968610405921936, |
| "learning_rate": 4.614676454776321e-06, |
| "loss": 0.6566, |
| "step": 1379 |
| }, |
| { |
| "epoch": 1.1424627277747101, |
| "grad_norm": 0.9258063435554504, |
| "learning_rate": 4.614089688878713e-06, |
| "loss": 0.6548, |
| "step": 1380 |
| }, |
| { |
| "epoch": 1.1432909994478189, |
| "grad_norm": 0.8805782198905945, |
| "learning_rate": 4.613502513924459e-06, |
| "loss": 0.6543, |
| "step": 1381 |
| }, |
| { |
| "epoch": 1.1441192711209276, |
| "grad_norm": 0.8910253643989563, |
| "learning_rate": 4.612914930027168e-06, |
| "loss": 0.6226, |
| "step": 1382 |
| }, |
| { |
| "epoch": 1.1449475427940365, |
| "grad_norm": 0.9264798760414124, |
| "learning_rate": 4.612326937300535e-06, |
| "loss": 0.6516, |
| "step": 1383 |
| }, |
| { |
| "epoch": 1.1457758144671453, |
| "grad_norm": 0.9472189545631409, |
| "learning_rate": 4.6117385358583315e-06, |
| "loss": 0.6375, |
| "step": 1384 |
| }, |
| { |
| "epoch": 1.146604086140254, |
| "grad_norm": 0.9199020266532898, |
| "learning_rate": 4.611149725814406e-06, |
| "loss": 0.6501, |
| "step": 1385 |
| }, |
| { |
| "epoch": 1.1474323578133627, |
| "grad_norm": 0.8969109654426575, |
| "learning_rate": 4.610560507282688e-06, |
| "loss": 0.636, |
| "step": 1386 |
| }, |
| { |
| "epoch": 1.1482606294864715, |
| "grad_norm": 0.8784180283546448, |
| "learning_rate": 4.609970880377187e-06, |
| "loss": 0.655, |
| "step": 1387 |
| }, |
| { |
| "epoch": 1.1490889011595804, |
| "grad_norm": 0.9104288220405579, |
| "learning_rate": 4.60938084521199e-06, |
| "loss": 0.6526, |
| "step": 1388 |
| }, |
| { |
| "epoch": 1.1499171728326891, |
| "grad_norm": 0.8898188471794128, |
| "learning_rate": 4.608790401901262e-06, |
| "loss": 0.6434, |
| "step": 1389 |
| }, |
| { |
| "epoch": 1.1507454445057979, |
| "grad_norm": 0.9119797348976135, |
| "learning_rate": 4.608199550559249e-06, |
| "loss": 0.6543, |
| "step": 1390 |
| }, |
| { |
| "epoch": 1.1515737161789068, |
| "grad_norm": 0.9239185452461243, |
| "learning_rate": 4.607608291300275e-06, |
| "loss": 0.6535, |
| "step": 1391 |
| }, |
| { |
| "epoch": 1.1524019878520155, |
| "grad_norm": 0.8849360942840576, |
| "learning_rate": 4.607016624238744e-06, |
| "loss": 0.6353, |
| "step": 1392 |
| }, |
| { |
| "epoch": 1.1532302595251243, |
| "grad_norm": 0.9566167593002319, |
| "learning_rate": 4.606424549489138e-06, |
| "loss": 0.6496, |
| "step": 1393 |
| }, |
| { |
| "epoch": 1.154058531198233, |
| "grad_norm": 0.909223198890686, |
| "learning_rate": 4.605832067166017e-06, |
| "loss": 0.6301, |
| "step": 1394 |
| }, |
| { |
| "epoch": 1.1548868028713417, |
| "grad_norm": 0.8883793354034424, |
| "learning_rate": 4.605239177384021e-06, |
| "loss": 0.6511, |
| "step": 1395 |
| }, |
| { |
| "epoch": 1.1557150745444507, |
| "grad_norm": 0.89354008436203, |
| "learning_rate": 4.604645880257869e-06, |
| "loss": 0.6477, |
| "step": 1396 |
| }, |
| { |
| "epoch": 1.1565433462175594, |
| "grad_norm": 0.9844399094581604, |
| "learning_rate": 4.604052175902359e-06, |
| "loss": 0.6362, |
| "step": 1397 |
| }, |
| { |
| "epoch": 1.1573716178906681, |
| "grad_norm": 0.9096093773841858, |
| "learning_rate": 4.603458064432367e-06, |
| "loss": 0.6486, |
| "step": 1398 |
| }, |
| { |
| "epoch": 1.1581998895637768, |
| "grad_norm": 0.8702238202095032, |
| "learning_rate": 4.6028635459628476e-06, |
| "loss": 0.6186, |
| "step": 1399 |
| }, |
| { |
| "epoch": 1.1590281612368858, |
| "grad_norm": 0.8790991902351379, |
| "learning_rate": 4.602268620608834e-06, |
| "loss": 0.6651, |
| "step": 1400 |
| }, |
| { |
| "epoch": 1.1598564329099945, |
| "grad_norm": 0.942251443862915, |
| "learning_rate": 4.60167328848544e-06, |
| "loss": 0.6291, |
| "step": 1401 |
| }, |
| { |
| "epoch": 1.1606847045831032, |
| "grad_norm": 0.916533350944519, |
| "learning_rate": 4.601077549707856e-06, |
| "loss": 0.6361, |
| "step": 1402 |
| }, |
| { |
| "epoch": 1.161512976256212, |
| "grad_norm": 0.8632711172103882, |
| "learning_rate": 4.600481404391352e-06, |
| "loss": 0.6353, |
| "step": 1403 |
| }, |
| { |
| "epoch": 1.1623412479293207, |
| "grad_norm": 0.9344790577888489, |
| "learning_rate": 4.599884852651277e-06, |
| "loss": 0.6373, |
| "step": 1404 |
| }, |
| { |
| "epoch": 1.1631695196024296, |
| "grad_norm": 0.9122592806816101, |
| "learning_rate": 4.5992878946030575e-06, |
| "loss": 0.6389, |
| "step": 1405 |
| }, |
| { |
| "epoch": 1.1639977912755384, |
| "grad_norm": 0.9013955593109131, |
| "learning_rate": 4.5986905303622e-06, |
| "loss": 0.6524, |
| "step": 1406 |
| }, |
| { |
| "epoch": 1.164826062948647, |
| "grad_norm": 0.9473676681518555, |
| "learning_rate": 4.598092760044288e-06, |
| "loss": 0.6823, |
| "step": 1407 |
| }, |
| { |
| "epoch": 1.165654334621756, |
| "grad_norm": 0.8996672630310059, |
| "learning_rate": 4.597494583764987e-06, |
| "loss": 0.6465, |
| "step": 1408 |
| }, |
| { |
| "epoch": 1.1664826062948648, |
| "grad_norm": 0.8875576257705688, |
| "learning_rate": 4.596896001640034e-06, |
| "loss": 0.6374, |
| "step": 1409 |
| }, |
| { |
| "epoch": 1.1673108779679735, |
| "grad_norm": 0.8938291072845459, |
| "learning_rate": 4.596297013785253e-06, |
| "loss": 0.613, |
| "step": 1410 |
| }, |
| { |
| "epoch": 1.1681391496410822, |
| "grad_norm": 0.9555407762527466, |
| "learning_rate": 4.5956976203165414e-06, |
| "loss": 0.6307, |
| "step": 1411 |
| }, |
| { |
| "epoch": 1.168967421314191, |
| "grad_norm": 0.9139370918273926, |
| "learning_rate": 4.595097821349876e-06, |
| "loss": 0.6557, |
| "step": 1412 |
| }, |
| { |
| "epoch": 1.1697956929873, |
| "grad_norm": 0.9221656918525696, |
| "learning_rate": 4.594497617001312e-06, |
| "loss": 0.6248, |
| "step": 1413 |
| }, |
| { |
| "epoch": 1.1706239646604086, |
| "grad_norm": 0.9012216925621033, |
| "learning_rate": 4.593897007386985e-06, |
| "loss": 0.6414, |
| "step": 1414 |
| }, |
| { |
| "epoch": 1.1714522363335174, |
| "grad_norm": 0.8991276621818542, |
| "learning_rate": 4.593295992623105e-06, |
| "loss": 0.6594, |
| "step": 1415 |
| }, |
| { |
| "epoch": 1.172280508006626, |
| "grad_norm": 0.8782510757446289, |
| "learning_rate": 4.592694572825964e-06, |
| "loss": 0.6654, |
| "step": 1416 |
| }, |
| { |
| "epoch": 1.173108779679735, |
| "grad_norm": 0.873396098613739, |
| "learning_rate": 4.592092748111931e-06, |
| "loss": 0.6617, |
| "step": 1417 |
| }, |
| { |
| "epoch": 1.1739370513528438, |
| "grad_norm": 0.8768683075904846, |
| "learning_rate": 4.5914905185974535e-06, |
| "loss": 0.6437, |
| "step": 1418 |
| }, |
| { |
| "epoch": 1.1747653230259525, |
| "grad_norm": 0.9255250096321106, |
| "learning_rate": 4.590887884399058e-06, |
| "loss": 0.6635, |
| "step": 1419 |
| }, |
| { |
| "epoch": 1.1755935946990612, |
| "grad_norm": 0.8961500525474548, |
| "learning_rate": 4.590284845633347e-06, |
| "loss": 0.6491, |
| "step": 1420 |
| }, |
| { |
| "epoch": 1.1764218663721702, |
| "grad_norm": 0.8930363655090332, |
| "learning_rate": 4.589681402417005e-06, |
| "loss": 0.6394, |
| "step": 1421 |
| }, |
| { |
| "epoch": 1.177250138045279, |
| "grad_norm": 0.9169489741325378, |
| "learning_rate": 4.5890775548667895e-06, |
| "loss": 0.6103, |
| "step": 1422 |
| }, |
| { |
| "epoch": 1.1780784097183876, |
| "grad_norm": 0.8852291703224182, |
| "learning_rate": 4.588473303099542e-06, |
| "loss": 0.6426, |
| "step": 1423 |
| }, |
| { |
| "epoch": 1.1789066813914963, |
| "grad_norm": 0.9556336998939514, |
| "learning_rate": 4.587868647232179e-06, |
| "loss": 0.6402, |
| "step": 1424 |
| }, |
| { |
| "epoch": 1.1797349530646053, |
| "grad_norm": 0.8800534605979919, |
| "learning_rate": 4.587263587381696e-06, |
| "loss": 0.6518, |
| "step": 1425 |
| }, |
| { |
| "epoch": 1.180563224737714, |
| "grad_norm": 0.8836509585380554, |
| "learning_rate": 4.586658123665165e-06, |
| "loss": 0.6442, |
| "step": 1426 |
| }, |
| { |
| "epoch": 1.1813914964108228, |
| "grad_norm": 0.8761002421379089, |
| "learning_rate": 4.58605225619974e-06, |
| "loss": 0.6451, |
| "step": 1427 |
| }, |
| { |
| "epoch": 1.1822197680839315, |
| "grad_norm": 0.9124804139137268, |
| "learning_rate": 4.585445985102649e-06, |
| "loss": 0.6565, |
| "step": 1428 |
| }, |
| { |
| "epoch": 1.1830480397570402, |
| "grad_norm": 0.8805012702941895, |
| "learning_rate": 4.584839310491199e-06, |
| "loss": 0.6504, |
| "step": 1429 |
| }, |
| { |
| "epoch": 1.1838763114301492, |
| "grad_norm": 0.8901008367538452, |
| "learning_rate": 4.584232232482778e-06, |
| "loss": 0.6423, |
| "step": 1430 |
| }, |
| { |
| "epoch": 1.1847045831032579, |
| "grad_norm": 0.9029923677444458, |
| "learning_rate": 4.58362475119485e-06, |
| "loss": 0.6494, |
| "step": 1431 |
| }, |
| { |
| "epoch": 1.1855328547763666, |
| "grad_norm": 0.8865271210670471, |
| "learning_rate": 4.5830168667449544e-06, |
| "loss": 0.6411, |
| "step": 1432 |
| }, |
| { |
| "epoch": 1.1863611264494753, |
| "grad_norm": 0.9174125790596008, |
| "learning_rate": 4.5824085792507135e-06, |
| "loss": 0.6451, |
| "step": 1433 |
| }, |
| { |
| "epoch": 1.1871893981225843, |
| "grad_norm": 0.8976553082466125, |
| "learning_rate": 4.581799888829824e-06, |
| "loss": 0.6381, |
| "step": 1434 |
| }, |
| { |
| "epoch": 1.188017669795693, |
| "grad_norm": 0.9103914499282837, |
| "learning_rate": 4.581190795600062e-06, |
| "loss": 0.6554, |
| "step": 1435 |
| }, |
| { |
| "epoch": 1.1888459414688017, |
| "grad_norm": 0.8944932222366333, |
| "learning_rate": 4.580581299679282e-06, |
| "loss": 0.6362, |
| "step": 1436 |
| }, |
| { |
| "epoch": 1.1896742131419105, |
| "grad_norm": 0.9190399050712585, |
| "learning_rate": 4.579971401185416e-06, |
| "loss": 0.6308, |
| "step": 1437 |
| }, |
| { |
| "epoch": 1.1905024848150194, |
| "grad_norm": 0.8663450479507446, |
| "learning_rate": 4.579361100236473e-06, |
| "loss": 0.6487, |
| "step": 1438 |
| }, |
| { |
| "epoch": 1.1913307564881281, |
| "grad_norm": 0.8797769546508789, |
| "learning_rate": 4.5787503969505405e-06, |
| "loss": 0.6214, |
| "step": 1439 |
| }, |
| { |
| "epoch": 1.1921590281612369, |
| "grad_norm": 0.8942309021949768, |
| "learning_rate": 4.578139291445783e-06, |
| "loss": 0.6343, |
| "step": 1440 |
| }, |
| { |
| "epoch": 1.1929872998343456, |
| "grad_norm": 0.9059276580810547, |
| "learning_rate": 4.577527783840447e-06, |
| "loss": 0.6602, |
| "step": 1441 |
| }, |
| { |
| "epoch": 1.1938155715074545, |
| "grad_norm": 0.931620717048645, |
| "learning_rate": 4.5769158742528494e-06, |
| "loss": 0.6608, |
| "step": 1442 |
| }, |
| { |
| "epoch": 1.1946438431805633, |
| "grad_norm": 0.8962466716766357, |
| "learning_rate": 4.576303562801392e-06, |
| "loss": 0.6445, |
| "step": 1443 |
| }, |
| { |
| "epoch": 1.195472114853672, |
| "grad_norm": 0.881026566028595, |
| "learning_rate": 4.57569084960455e-06, |
| "loss": 0.6342, |
| "step": 1444 |
| }, |
| { |
| "epoch": 1.1963003865267807, |
| "grad_norm": 0.9046021699905396, |
| "learning_rate": 4.5750777347808775e-06, |
| "loss": 0.6022, |
| "step": 1445 |
| }, |
| { |
| "epoch": 1.1971286581998895, |
| "grad_norm": 0.9648088216781616, |
| "learning_rate": 4.574464218449006e-06, |
| "loss": 0.6204, |
| "step": 1446 |
| }, |
| { |
| "epoch": 1.1979569298729984, |
| "grad_norm": 0.897537887096405, |
| "learning_rate": 4.573850300727648e-06, |
| "loss": 0.607, |
| "step": 1447 |
| }, |
| { |
| "epoch": 1.1987852015461071, |
| "grad_norm": 0.8594499230384827, |
| "learning_rate": 4.5732359817355885e-06, |
| "loss": 0.6329, |
| "step": 1448 |
| }, |
| { |
| "epoch": 1.1996134732192159, |
| "grad_norm": 0.9005447030067444, |
| "learning_rate": 4.572621261591692e-06, |
| "loss": 0.6265, |
| "step": 1449 |
| }, |
| { |
| "epoch": 1.2004417448923248, |
| "grad_norm": 0.8954119086265564, |
| "learning_rate": 4.572006140414903e-06, |
| "loss": 0.6525, |
| "step": 1450 |
| }, |
| { |
| "epoch": 1.2012700165654335, |
| "grad_norm": 0.9050043821334839, |
| "learning_rate": 4.5713906183242404e-06, |
| "loss": 0.643, |
| "step": 1451 |
| }, |
| { |
| "epoch": 1.2020982882385423, |
| "grad_norm": 0.8929612040519714, |
| "learning_rate": 4.5707746954388034e-06, |
| "loss": 0.625, |
| "step": 1452 |
| }, |
| { |
| "epoch": 1.202926559911651, |
| "grad_norm": 0.8998628854751587, |
| "learning_rate": 4.570158371877766e-06, |
| "loss": 0.6428, |
| "step": 1453 |
| }, |
| { |
| "epoch": 1.2037548315847597, |
| "grad_norm": 0.9374850988388062, |
| "learning_rate": 4.569541647760381e-06, |
| "loss": 0.619, |
| "step": 1454 |
| }, |
| { |
| "epoch": 1.2045831032578687, |
| "grad_norm": 0.8873240351676941, |
| "learning_rate": 4.568924523205979e-06, |
| "loss": 0.6633, |
| "step": 1455 |
| }, |
| { |
| "epoch": 1.2054113749309774, |
| "grad_norm": 0.9118494987487793, |
| "learning_rate": 4.568306998333968e-06, |
| "loss": 0.6486, |
| "step": 1456 |
| }, |
| { |
| "epoch": 1.2062396466040861, |
| "grad_norm": 0.9309497475624084, |
| "learning_rate": 4.567689073263834e-06, |
| "loss": 0.6705, |
| "step": 1457 |
| }, |
| { |
| "epoch": 1.2070679182771948, |
| "grad_norm": 0.9181755781173706, |
| "learning_rate": 4.567070748115139e-06, |
| "loss": 0.645, |
| "step": 1458 |
| }, |
| { |
| "epoch": 1.2078961899503038, |
| "grad_norm": 0.8733282089233398, |
| "learning_rate": 4.566452023007523e-06, |
| "loss": 0.6452, |
| "step": 1459 |
| }, |
| { |
| "epoch": 1.2087244616234125, |
| "grad_norm": 0.9107138514518738, |
| "learning_rate": 4.565832898060703e-06, |
| "loss": 0.6452, |
| "step": 1460 |
| }, |
| { |
| "epoch": 1.2095527332965212, |
| "grad_norm": 0.9111687541007996, |
| "learning_rate": 4.565213373394476e-06, |
| "loss": 0.626, |
| "step": 1461 |
| }, |
| { |
| "epoch": 1.21038100496963, |
| "grad_norm": 0.9195486307144165, |
| "learning_rate": 4.5645934491287115e-06, |
| "loss": 0.6405, |
| "step": 1462 |
| }, |
| { |
| "epoch": 1.2112092766427387, |
| "grad_norm": 0.9375872015953064, |
| "learning_rate": 4.563973125383362e-06, |
| "loss": 0.6361, |
| "step": 1463 |
| }, |
| { |
| "epoch": 1.2120375483158476, |
| "grad_norm": 0.9200862050056458, |
| "learning_rate": 4.563352402278451e-06, |
| "loss": 0.6199, |
| "step": 1464 |
| }, |
| { |
| "epoch": 1.2128658199889564, |
| "grad_norm": 0.9497359991073608, |
| "learning_rate": 4.562731279934086e-06, |
| "loss": 0.6495, |
| "step": 1465 |
| }, |
| { |
| "epoch": 1.213694091662065, |
| "grad_norm": 0.8916203379631042, |
| "learning_rate": 4.562109758470446e-06, |
| "loss": 0.626, |
| "step": 1466 |
| }, |
| { |
| "epoch": 1.214522363335174, |
| "grad_norm": 0.9152259826660156, |
| "learning_rate": 4.561487838007792e-06, |
| "loss": 0.6258, |
| "step": 1467 |
| }, |
| { |
| "epoch": 1.2153506350082828, |
| "grad_norm": 0.8966463804244995, |
| "learning_rate": 4.560865518666458e-06, |
| "loss": 0.6425, |
| "step": 1468 |
| }, |
| { |
| "epoch": 1.2161789066813915, |
| "grad_norm": 0.8885869979858398, |
| "learning_rate": 4.560242800566855e-06, |
| "loss": 0.6583, |
| "step": 1469 |
| }, |
| { |
| "epoch": 1.2170071783545002, |
| "grad_norm": 0.9269018769264221, |
| "learning_rate": 4.559619683829477e-06, |
| "loss": 0.6401, |
| "step": 1470 |
| }, |
| { |
| "epoch": 1.217835450027609, |
| "grad_norm": 0.8695492744445801, |
| "learning_rate": 4.55899616857489e-06, |
| "loss": 0.6259, |
| "step": 1471 |
| }, |
| { |
| "epoch": 1.218663721700718, |
| "grad_norm": 0.8998788595199585, |
| "learning_rate": 4.558372254923738e-06, |
| "loss": 0.6324, |
| "step": 1472 |
| }, |
| { |
| "epoch": 1.2194919933738266, |
| "grad_norm": 0.906700074672699, |
| "learning_rate": 4.557747942996742e-06, |
| "loss": 0.6277, |
| "step": 1473 |
| }, |
| { |
| "epoch": 1.2203202650469354, |
| "grad_norm": 0.8918790817260742, |
| "learning_rate": 4.557123232914702e-06, |
| "loss": 0.6344, |
| "step": 1474 |
| }, |
| { |
| "epoch": 1.221148536720044, |
| "grad_norm": 0.8861198425292969, |
| "learning_rate": 4.556498124798492e-06, |
| "loss": 0.6132, |
| "step": 1475 |
| }, |
| { |
| "epoch": 1.221976808393153, |
| "grad_norm": 0.9222586750984192, |
| "learning_rate": 4.555872618769065e-06, |
| "loss": 0.6527, |
| "step": 1476 |
| }, |
| { |
| "epoch": 1.2228050800662618, |
| "grad_norm": 0.9404293298721313, |
| "learning_rate": 4.555246714947452e-06, |
| "loss": 0.6556, |
| "step": 1477 |
| }, |
| { |
| "epoch": 1.2236333517393705, |
| "grad_norm": 0.9302054643630981, |
| "learning_rate": 4.554620413454757e-06, |
| "loss": 0.6487, |
| "step": 1478 |
| }, |
| { |
| "epoch": 1.2244616234124792, |
| "grad_norm": 0.9029037952423096, |
| "learning_rate": 4.553993714412166e-06, |
| "loss": 0.6466, |
| "step": 1479 |
| }, |
| { |
| "epoch": 1.2252898950855882, |
| "grad_norm": 1.2422658205032349, |
| "learning_rate": 4.553366617940938e-06, |
| "loss": 0.6329, |
| "step": 1480 |
| }, |
| { |
| "epoch": 1.226118166758697, |
| "grad_norm": 0.9293041825294495, |
| "learning_rate": 4.552739124162411e-06, |
| "loss": 0.6396, |
| "step": 1481 |
| }, |
| { |
| "epoch": 1.2269464384318056, |
| "grad_norm": 0.8815674185752869, |
| "learning_rate": 4.552111233197999e-06, |
| "loss": 0.6695, |
| "step": 1482 |
| }, |
| { |
| "epoch": 1.2277747101049143, |
| "grad_norm": 0.894963800907135, |
| "learning_rate": 4.551482945169191e-06, |
| "loss": 0.6204, |
| "step": 1483 |
| }, |
| { |
| "epoch": 1.2286029817780233, |
| "grad_norm": 0.9117598533630371, |
| "learning_rate": 4.550854260197559e-06, |
| "loss": 0.6382, |
| "step": 1484 |
| }, |
| { |
| "epoch": 1.229431253451132, |
| "grad_norm": 0.8815339207649231, |
| "learning_rate": 4.550225178404744e-06, |
| "loss": 0.6396, |
| "step": 1485 |
| }, |
| { |
| "epoch": 1.2302595251242407, |
| "grad_norm": 0.8825898766517639, |
| "learning_rate": 4.54959569991247e-06, |
| "loss": 0.652, |
| "step": 1486 |
| }, |
| { |
| "epoch": 1.2310877967973495, |
| "grad_norm": 0.9561044573783875, |
| "learning_rate": 4.548965824842534e-06, |
| "loss": 0.6524, |
| "step": 1487 |
| }, |
| { |
| "epoch": 1.2319160684704582, |
| "grad_norm": 0.9169737696647644, |
| "learning_rate": 4.548335553316811e-06, |
| "loss": 0.6532, |
| "step": 1488 |
| }, |
| { |
| "epoch": 1.2327443401435672, |
| "grad_norm": 0.9096890687942505, |
| "learning_rate": 4.5477048854572524e-06, |
| "loss": 0.6302, |
| "step": 1489 |
| }, |
| { |
| "epoch": 1.2335726118166759, |
| "grad_norm": 0.9441587328910828, |
| "learning_rate": 4.547073821385888e-06, |
| "loss": 0.6401, |
| "step": 1490 |
| }, |
| { |
| "epoch": 1.2344008834897846, |
| "grad_norm": 0.9128026366233826, |
| "learning_rate": 4.54644236122482e-06, |
| "loss": 0.6452, |
| "step": 1491 |
| }, |
| { |
| "epoch": 1.2352291551628933, |
| "grad_norm": 0.9111308455467224, |
| "learning_rate": 4.545810505096233e-06, |
| "loss": 0.6346, |
| "step": 1492 |
| }, |
| { |
| "epoch": 1.2360574268360023, |
| "grad_norm": 0.9101661443710327, |
| "learning_rate": 4.545178253122384e-06, |
| "loss": 0.6446, |
| "step": 1493 |
| }, |
| { |
| "epoch": 1.236885698509111, |
| "grad_norm": 0.9132974743843079, |
| "learning_rate": 4.544545605425607e-06, |
| "loss": 0.6372, |
| "step": 1494 |
| }, |
| { |
| "epoch": 1.2377139701822197, |
| "grad_norm": 0.9918434023857117, |
| "learning_rate": 4.543912562128316e-06, |
| "loss": 0.6318, |
| "step": 1495 |
| }, |
| { |
| "epoch": 1.2385422418553285, |
| "grad_norm": 0.9110086560249329, |
| "learning_rate": 4.543279123352996e-06, |
| "loss": 0.6341, |
| "step": 1496 |
| }, |
| { |
| "epoch": 1.2393705135284374, |
| "grad_norm": 0.9584184885025024, |
| "learning_rate": 4.542645289222214e-06, |
| "loss": 0.6368, |
| "step": 1497 |
| }, |
| { |
| "epoch": 1.2401987852015461, |
| "grad_norm": 0.9323190450668335, |
| "learning_rate": 4.542011059858609e-06, |
| "loss": 0.6599, |
| "step": 1498 |
| }, |
| { |
| "epoch": 1.2410270568746549, |
| "grad_norm": 0.9016517996788025, |
| "learning_rate": 4.541376435384899e-06, |
| "loss": 0.6496, |
| "step": 1499 |
| }, |
| { |
| "epoch": 1.2418553285477636, |
| "grad_norm": 0.8975968360900879, |
| "learning_rate": 4.540741415923878e-06, |
| "loss": 0.6486, |
| "step": 1500 |
| }, |
| { |
| "epoch": 1.2426836002208725, |
| "grad_norm": 0.9171814322471619, |
| "learning_rate": 4.540106001598418e-06, |
| "loss": 0.6434, |
| "step": 1501 |
| }, |
| { |
| "epoch": 1.2435118718939813, |
| "grad_norm": 0.8989073038101196, |
| "learning_rate": 4.539470192531463e-06, |
| "loss": 0.6293, |
| "step": 1502 |
| }, |
| { |
| "epoch": 1.24434014356709, |
| "grad_norm": 0.9193532466888428, |
| "learning_rate": 4.538833988846039e-06, |
| "loss": 0.629, |
| "step": 1503 |
| }, |
| { |
| "epoch": 1.2451684152401987, |
| "grad_norm": 0.8692934513092041, |
| "learning_rate": 4.538197390665242e-06, |
| "loss": 0.6546, |
| "step": 1504 |
| }, |
| { |
| "epoch": 1.2459966869133074, |
| "grad_norm": 0.9461122751235962, |
| "learning_rate": 4.537560398112251e-06, |
| "loss": 0.6484, |
| "step": 1505 |
| }, |
| { |
| "epoch": 1.2468249585864164, |
| "grad_norm": 0.9432145953178406, |
| "learning_rate": 4.536923011310317e-06, |
| "loss": 0.6454, |
| "step": 1506 |
| }, |
| { |
| "epoch": 1.2476532302595251, |
| "grad_norm": 0.9390096068382263, |
| "learning_rate": 4.536285230382767e-06, |
| "loss": 0.6444, |
| "step": 1507 |
| }, |
| { |
| "epoch": 1.2484815019326339, |
| "grad_norm": 0.9381594061851501, |
| "learning_rate": 4.535647055453007e-06, |
| "loss": 0.63, |
| "step": 1508 |
| }, |
| { |
| "epoch": 1.2493097736057428, |
| "grad_norm": 0.9019590020179749, |
| "learning_rate": 4.5350084866445195e-06, |
| "loss": 0.6173, |
| "step": 1509 |
| }, |
| { |
| "epoch": 1.2501380452788515, |
| "grad_norm": 0.8888704776763916, |
| "learning_rate": 4.534369524080858e-06, |
| "loss": 0.628, |
| "step": 1510 |
| }, |
| { |
| "epoch": 1.2509663169519603, |
| "grad_norm": 0.8706669211387634, |
| "learning_rate": 4.5337301678856595e-06, |
| "loss": 0.6573, |
| "step": 1511 |
| }, |
| { |
| "epoch": 1.251794588625069, |
| "grad_norm": 0.9508364796638489, |
| "learning_rate": 4.533090418182631e-06, |
| "loss": 0.6347, |
| "step": 1512 |
| }, |
| { |
| "epoch": 1.2526228602981777, |
| "grad_norm": 0.8919034004211426, |
| "learning_rate": 4.532450275095558e-06, |
| "loss": 0.6485, |
| "step": 1513 |
| }, |
| { |
| "epoch": 1.2534511319712867, |
| "grad_norm": 0.8912845253944397, |
| "learning_rate": 4.531809738748304e-06, |
| "loss": 0.6509, |
| "step": 1514 |
| }, |
| { |
| "epoch": 1.2542794036443954, |
| "grad_norm": 0.9199928641319275, |
| "learning_rate": 4.531168809264805e-06, |
| "loss": 0.645, |
| "step": 1515 |
| }, |
| { |
| "epoch": 1.2551076753175041, |
| "grad_norm": 0.923501193523407, |
| "learning_rate": 4.530527486769077e-06, |
| "loss": 0.6447, |
| "step": 1516 |
| }, |
| { |
| "epoch": 1.2559359469906128, |
| "grad_norm": 0.8973106145858765, |
| "learning_rate": 4.529885771385208e-06, |
| "loss": 0.6674, |
| "step": 1517 |
| }, |
| { |
| "epoch": 1.2567642186637218, |
| "grad_norm": 0.9002721309661865, |
| "learning_rate": 4.529243663237365e-06, |
| "loss": 0.6265, |
| "step": 1518 |
| }, |
| { |
| "epoch": 1.2575924903368305, |
| "grad_norm": 0.9033929705619812, |
| "learning_rate": 4.528601162449788e-06, |
| "loss": 0.6354, |
| "step": 1519 |
| }, |
| { |
| "epoch": 1.2584207620099392, |
| "grad_norm": 0.9078013896942139, |
| "learning_rate": 4.527958269146798e-06, |
| "loss": 0.6368, |
| "step": 1520 |
| }, |
| { |
| "epoch": 1.259249033683048, |
| "grad_norm": 0.8775674700737, |
| "learning_rate": 4.527314983452787e-06, |
| "loss": 0.6443, |
| "step": 1521 |
| }, |
| { |
| "epoch": 1.2600773053561567, |
| "grad_norm": 0.9218764305114746, |
| "learning_rate": 4.526671305492225e-06, |
| "loss": 0.6545, |
| "step": 1522 |
| }, |
| { |
| "epoch": 1.2609055770292656, |
| "grad_norm": 0.9059268832206726, |
| "learning_rate": 4.526027235389658e-06, |
| "loss": 0.6506, |
| "step": 1523 |
| }, |
| { |
| "epoch": 1.2617338487023744, |
| "grad_norm": 0.9096934795379639, |
| "learning_rate": 4.525382773269706e-06, |
| "loss": 0.6542, |
| "step": 1524 |
| }, |
| { |
| "epoch": 1.262562120375483, |
| "grad_norm": 0.8997412323951721, |
| "learning_rate": 4.5247379192570695e-06, |
| "loss": 0.6308, |
| "step": 1525 |
| }, |
| { |
| "epoch": 1.263390392048592, |
| "grad_norm": 0.8729543089866638, |
| "learning_rate": 4.524092673476519e-06, |
| "loss": 0.6499, |
| "step": 1526 |
| }, |
| { |
| "epoch": 1.2642186637217008, |
| "grad_norm": 0.8625515699386597, |
| "learning_rate": 4.5234470360529045e-06, |
| "loss": 0.6421, |
| "step": 1527 |
| }, |
| { |
| "epoch": 1.2650469353948095, |
| "grad_norm": 0.9196262359619141, |
| "learning_rate": 4.522801007111152e-06, |
| "loss": 0.635, |
| "step": 1528 |
| }, |
| { |
| "epoch": 1.2658752070679182, |
| "grad_norm": 0.9273878335952759, |
| "learning_rate": 4.52215458677626e-06, |
| "loss": 0.6354, |
| "step": 1529 |
| }, |
| { |
| "epoch": 1.266703478741027, |
| "grad_norm": 0.8949822187423706, |
| "learning_rate": 4.521507775173305e-06, |
| "loss": 0.6409, |
| "step": 1530 |
| }, |
| { |
| "epoch": 1.267531750414136, |
| "grad_norm": 0.9149484038352966, |
| "learning_rate": 4.5208605724274415e-06, |
| "loss": 0.6358, |
| "step": 1531 |
| }, |
| { |
| "epoch": 1.2683600220872446, |
| "grad_norm": 0.8670255541801453, |
| "learning_rate": 4.520212978663894e-06, |
| "loss": 0.6386, |
| "step": 1532 |
| }, |
| { |
| "epoch": 1.2691882937603534, |
| "grad_norm": 0.8867493867874146, |
| "learning_rate": 4.519564994007968e-06, |
| "loss": 0.6552, |
| "step": 1533 |
| }, |
| { |
| "epoch": 1.2700165654334623, |
| "grad_norm": 0.8947916030883789, |
| "learning_rate": 4.518916618585042e-06, |
| "loss": 0.6552, |
| "step": 1534 |
| }, |
| { |
| "epoch": 1.270844837106571, |
| "grad_norm": 0.9064925909042358, |
| "learning_rate": 4.51826785252057e-06, |
| "loss": 0.6446, |
| "step": 1535 |
| }, |
| { |
| "epoch": 1.2716731087796798, |
| "grad_norm": 0.9144765138626099, |
| "learning_rate": 4.517618695940082e-06, |
| "loss": 0.6302, |
| "step": 1536 |
| }, |
| { |
| "epoch": 1.2725013804527885, |
| "grad_norm": 0.9165725111961365, |
| "learning_rate": 4.516969148969186e-06, |
| "loss": 0.6332, |
| "step": 1537 |
| }, |
| { |
| "epoch": 1.2733296521258972, |
| "grad_norm": 0.9271799325942993, |
| "learning_rate": 4.51631921173356e-06, |
| "loss": 0.6545, |
| "step": 1538 |
| }, |
| { |
| "epoch": 1.274157923799006, |
| "grad_norm": 0.9447293877601624, |
| "learning_rate": 4.515668884358963e-06, |
| "loss": 0.6523, |
| "step": 1539 |
| }, |
| { |
| "epoch": 1.274986195472115, |
| "grad_norm": 0.9644302129745483, |
| "learning_rate": 4.515018166971227e-06, |
| "loss": 0.6497, |
| "step": 1540 |
| }, |
| { |
| "epoch": 1.2758144671452236, |
| "grad_norm": 0.867371678352356, |
| "learning_rate": 4.514367059696259e-06, |
| "loss": 0.6161, |
| "step": 1541 |
| }, |
| { |
| "epoch": 1.2766427388183323, |
| "grad_norm": 0.9276342391967773, |
| "learning_rate": 4.513715562660043e-06, |
| "loss": 0.6452, |
| "step": 1542 |
| }, |
| { |
| "epoch": 1.2774710104914413, |
| "grad_norm": 0.9901152849197388, |
| "learning_rate": 4.513063675988637e-06, |
| "loss": 0.6471, |
| "step": 1543 |
| }, |
| { |
| "epoch": 1.27829928216455, |
| "grad_norm": 0.9561542868614197, |
| "learning_rate": 4.512411399808175e-06, |
| "loss": 0.6473, |
| "step": 1544 |
| }, |
| { |
| "epoch": 1.2791275538376587, |
| "grad_norm": 0.9195652604103088, |
| "learning_rate": 4.511758734244867e-06, |
| "loss": 0.6669, |
| "step": 1545 |
| }, |
| { |
| "epoch": 1.2799558255107675, |
| "grad_norm": 0.9481717944145203, |
| "learning_rate": 4.511105679424998e-06, |
| "loss": 0.6251, |
| "step": 1546 |
| }, |
| { |
| "epoch": 1.2807840971838762, |
| "grad_norm": 0.9228855967521667, |
| "learning_rate": 4.510452235474926e-06, |
| "loss": 0.6235, |
| "step": 1547 |
| }, |
| { |
| "epoch": 1.2816123688569852, |
| "grad_norm": 0.9248308539390564, |
| "learning_rate": 4.509798402521088e-06, |
| "loss": 0.6421, |
| "step": 1548 |
| }, |
| { |
| "epoch": 1.2824406405300939, |
| "grad_norm": 0.8686515092849731, |
| "learning_rate": 4.509144180689995e-06, |
| "loss": 0.6296, |
| "step": 1549 |
| }, |
| { |
| "epoch": 1.2832689122032026, |
| "grad_norm": 0.8854945302009583, |
| "learning_rate": 4.508489570108231e-06, |
| "loss": 0.6308, |
| "step": 1550 |
| }, |
| { |
| "epoch": 1.2840971838763116, |
| "grad_norm": 0.9525130987167358, |
| "learning_rate": 4.507834570902459e-06, |
| "loss": 0.6408, |
| "step": 1551 |
| }, |
| { |
| "epoch": 1.2849254555494203, |
| "grad_norm": 0.9100849032402039, |
| "learning_rate": 4.507179183199414e-06, |
| "loss": 0.6216, |
| "step": 1552 |
| }, |
| { |
| "epoch": 1.285753727222529, |
| "grad_norm": 0.9318796992301941, |
| "learning_rate": 4.506523407125907e-06, |
| "loss": 0.6224, |
| "step": 1553 |
| }, |
| { |
| "epoch": 1.2865819988956377, |
| "grad_norm": 0.953236997127533, |
| "learning_rate": 4.505867242808826e-06, |
| "loss": 0.643, |
| "step": 1554 |
| }, |
| { |
| "epoch": 1.2874102705687465, |
| "grad_norm": 0.8913330435752869, |
| "learning_rate": 4.5052106903751315e-06, |
| "loss": 0.6455, |
| "step": 1555 |
| }, |
| { |
| "epoch": 1.2882385422418554, |
| "grad_norm": 0.9269417524337769, |
| "learning_rate": 4.504553749951861e-06, |
| "loss": 0.6276, |
| "step": 1556 |
| }, |
| { |
| "epoch": 1.2890668139149641, |
| "grad_norm": 0.8864873647689819, |
| "learning_rate": 4.503896421666126e-06, |
| "loss": 0.6367, |
| "step": 1557 |
| }, |
| { |
| "epoch": 1.2898950855880729, |
| "grad_norm": 0.9062260985374451, |
| "learning_rate": 4.503238705645114e-06, |
| "loss": 0.6318, |
| "step": 1558 |
| }, |
| { |
| "epoch": 1.2907233572611816, |
| "grad_norm": 0.9680311679840088, |
| "learning_rate": 4.5025806020160865e-06, |
| "loss": 0.6526, |
| "step": 1559 |
| }, |
| { |
| "epoch": 1.2915516289342905, |
| "grad_norm": 0.9309170842170715, |
| "learning_rate": 4.5019221109063795e-06, |
| "loss": 0.6352, |
| "step": 1560 |
| }, |
| { |
| "epoch": 1.2923799006073993, |
| "grad_norm": 0.9134782552719116, |
| "learning_rate": 4.5012632324434065e-06, |
| "loss": 0.6485, |
| "step": 1561 |
| }, |
| { |
| "epoch": 1.293208172280508, |
| "grad_norm": 0.9063471555709839, |
| "learning_rate": 4.500603966754653e-06, |
| "loss": 0.6373, |
| "step": 1562 |
| }, |
| { |
| "epoch": 1.2940364439536167, |
| "grad_norm": 0.9087017178535461, |
| "learning_rate": 4.499944313967681e-06, |
| "loss": 0.6391, |
| "step": 1563 |
| }, |
| { |
| "epoch": 1.2948647156267254, |
| "grad_norm": 0.9258682727813721, |
| "learning_rate": 4.499284274210128e-06, |
| "loss": 0.6329, |
| "step": 1564 |
| }, |
| { |
| "epoch": 1.2956929872998344, |
| "grad_norm": 0.9042671918869019, |
| "learning_rate": 4.4986238476097055e-06, |
| "loss": 0.6485, |
| "step": 1565 |
| }, |
| { |
| "epoch": 1.2965212589729431, |
| "grad_norm": 0.9020970463752747, |
| "learning_rate": 4.4979630342941994e-06, |
| "loss": 0.6491, |
| "step": 1566 |
| }, |
| { |
| "epoch": 1.2973495306460519, |
| "grad_norm": 0.9361850023269653, |
| "learning_rate": 4.49730183439147e-06, |
| "loss": 0.6387, |
| "step": 1567 |
| }, |
| { |
| "epoch": 1.2981778023191608, |
| "grad_norm": 0.9135700464248657, |
| "learning_rate": 4.4966402480294545e-06, |
| "loss": 0.6231, |
| "step": 1568 |
| }, |
| { |
| "epoch": 1.2990060739922695, |
| "grad_norm": 0.9091359972953796, |
| "learning_rate": 4.495978275336164e-06, |
| "loss": 0.6369, |
| "step": 1569 |
| }, |
| { |
| "epoch": 1.2998343456653783, |
| "grad_norm": 0.8785583972930908, |
| "learning_rate": 4.4953159164396835e-06, |
| "loss": 0.6332, |
| "step": 1570 |
| }, |
| { |
| "epoch": 1.300662617338487, |
| "grad_norm": 0.8954260945320129, |
| "learning_rate": 4.494653171468173e-06, |
| "loss": 0.6364, |
| "step": 1571 |
| }, |
| { |
| "epoch": 1.3014908890115957, |
| "grad_norm": 0.917712926864624, |
| "learning_rate": 4.4939900405498675e-06, |
| "loss": 0.6318, |
| "step": 1572 |
| }, |
| { |
| "epoch": 1.3023191606847047, |
| "grad_norm": 0.9337782263755798, |
| "learning_rate": 4.4933265238130775e-06, |
| "loss": 0.6628, |
| "step": 1573 |
| }, |
| { |
| "epoch": 1.3031474323578134, |
| "grad_norm": 0.9178342223167419, |
| "learning_rate": 4.492662621386186e-06, |
| "loss": 0.6271, |
| "step": 1574 |
| }, |
| { |
| "epoch": 1.3039757040309221, |
| "grad_norm": 0.898301899433136, |
| "learning_rate": 4.4919983333976525e-06, |
| "loss": 0.6386, |
| "step": 1575 |
| }, |
| { |
| "epoch": 1.304803975704031, |
| "grad_norm": 0.8860357403755188, |
| "learning_rate": 4.491333659976011e-06, |
| "loss": 0.6553, |
| "step": 1576 |
| }, |
| { |
| "epoch": 1.3056322473771398, |
| "grad_norm": 0.9163990020751953, |
| "learning_rate": 4.4906686012498705e-06, |
| "loss": 0.6252, |
| "step": 1577 |
| }, |
| { |
| "epoch": 1.3064605190502485, |
| "grad_norm": 0.9047706127166748, |
| "learning_rate": 4.490003157347911e-06, |
| "loss": 0.6333, |
| "step": 1578 |
| }, |
| { |
| "epoch": 1.3072887907233572, |
| "grad_norm": 0.9197080135345459, |
| "learning_rate": 4.489337328398891e-06, |
| "loss": 0.6295, |
| "step": 1579 |
| }, |
| { |
| "epoch": 1.308117062396466, |
| "grad_norm": 0.9046565890312195, |
| "learning_rate": 4.488671114531643e-06, |
| "loss": 0.6356, |
| "step": 1580 |
| }, |
| { |
| "epoch": 1.3089453340695747, |
| "grad_norm": 0.8985337615013123, |
| "learning_rate": 4.488004515875072e-06, |
| "loss": 0.6349, |
| "step": 1581 |
| }, |
| { |
| "epoch": 1.3097736057426836, |
| "grad_norm": 0.9027533531188965, |
| "learning_rate": 4.4873375325581596e-06, |
| "loss": 0.6501, |
| "step": 1582 |
| }, |
| { |
| "epoch": 1.3106018774157924, |
| "grad_norm": 0.8807138204574585, |
| "learning_rate": 4.48667016470996e-06, |
| "loss": 0.6357, |
| "step": 1583 |
| }, |
| { |
| "epoch": 1.311430149088901, |
| "grad_norm": 0.8908722996711731, |
| "learning_rate": 4.486002412459603e-06, |
| "loss": 0.643, |
| "step": 1584 |
| }, |
| { |
| "epoch": 1.31225842076201, |
| "grad_norm": 0.8833957314491272, |
| "learning_rate": 4.4853342759362924e-06, |
| "loss": 0.6512, |
| "step": 1585 |
| }, |
| { |
| "epoch": 1.3130866924351188, |
| "grad_norm": 0.9173742532730103, |
| "learning_rate": 4.484665755269307e-06, |
| "loss": 0.6524, |
| "step": 1586 |
| }, |
| { |
| "epoch": 1.3139149641082275, |
| "grad_norm": 0.911236584186554, |
| "learning_rate": 4.483996850587998e-06, |
| "loss": 0.6526, |
| "step": 1587 |
| }, |
| { |
| "epoch": 1.3147432357813362, |
| "grad_norm": 0.893855094909668, |
| "learning_rate": 4.483327562021794e-06, |
| "loss": 0.5948, |
| "step": 1588 |
| }, |
| { |
| "epoch": 1.315571507454445, |
| "grad_norm": 0.8917174935340881, |
| "learning_rate": 4.482657889700194e-06, |
| "loss": 0.6299, |
| "step": 1589 |
| }, |
| { |
| "epoch": 1.316399779127554, |
| "grad_norm": 0.9155331254005432, |
| "learning_rate": 4.481987833752776e-06, |
| "loss": 0.6268, |
| "step": 1590 |
| }, |
| { |
| "epoch": 1.3172280508006626, |
| "grad_norm": 0.9582436680793762, |
| "learning_rate": 4.4813173943091865e-06, |
| "loss": 0.6231, |
| "step": 1591 |
| }, |
| { |
| "epoch": 1.3180563224737714, |
| "grad_norm": 0.9484363794326782, |
| "learning_rate": 4.480646571499151e-06, |
| "loss": 0.6289, |
| "step": 1592 |
| }, |
| { |
| "epoch": 1.3188845941468803, |
| "grad_norm": 0.9011389017105103, |
| "learning_rate": 4.479975365452468e-06, |
| "loss": 0.6333, |
| "step": 1593 |
| }, |
| { |
| "epoch": 1.319712865819989, |
| "grad_norm": 0.8992411494255066, |
| "learning_rate": 4.479303776299008e-06, |
| "loss": 0.6409, |
| "step": 1594 |
| }, |
| { |
| "epoch": 1.3205411374930978, |
| "grad_norm": 0.9245702028274536, |
| "learning_rate": 4.478631804168719e-06, |
| "loss": 0.6532, |
| "step": 1595 |
| }, |
| { |
| "epoch": 1.3213694091662065, |
| "grad_norm": 0.9116750359535217, |
| "learning_rate": 4.47795944919162e-06, |
| "loss": 0.6399, |
| "step": 1596 |
| }, |
| { |
| "epoch": 1.3221976808393152, |
| "grad_norm": 1.0552476644515991, |
| "learning_rate": 4.4772867114978056e-06, |
| "loss": 0.6648, |
| "step": 1597 |
| }, |
| { |
| "epoch": 1.323025952512424, |
| "grad_norm": 0.9082247018814087, |
| "learning_rate": 4.4766135912174455e-06, |
| "loss": 0.6459, |
| "step": 1598 |
| }, |
| { |
| "epoch": 1.323854224185533, |
| "grad_norm": 0.8959988355636597, |
| "learning_rate": 4.4759400884807805e-06, |
| "loss": 0.6457, |
| "step": 1599 |
| }, |
| { |
| "epoch": 1.3246824958586416, |
| "grad_norm": 0.9263050556182861, |
| "learning_rate": 4.475266203418127e-06, |
| "loss": 0.6255, |
| "step": 1600 |
| }, |
| { |
| "epoch": 1.3255107675317503, |
| "grad_norm": 0.895063579082489, |
| "learning_rate": 4.474591936159878e-06, |
| "loss": 0.638, |
| "step": 1601 |
| }, |
| { |
| "epoch": 1.3263390392048593, |
| "grad_norm": 0.9055312871932983, |
| "learning_rate": 4.473917286836496e-06, |
| "loss": 0.6089, |
| "step": 1602 |
| }, |
| { |
| "epoch": 1.327167310877968, |
| "grad_norm": 0.9017457962036133, |
| "learning_rate": 4.4732422555785196e-06, |
| "loss": 0.6345, |
| "step": 1603 |
| }, |
| { |
| "epoch": 1.3279955825510767, |
| "grad_norm": 0.9150136113166809, |
| "learning_rate": 4.4725668425165605e-06, |
| "loss": 0.645, |
| "step": 1604 |
| }, |
| { |
| "epoch": 1.3288238542241855, |
| "grad_norm": 0.8798539638519287, |
| "learning_rate": 4.4718910477813055e-06, |
| "loss": 0.636, |
| "step": 1605 |
| }, |
| { |
| "epoch": 1.3296521258972942, |
| "grad_norm": 0.9142611026763916, |
| "learning_rate": 4.471214871503514e-06, |
| "loss": 0.636, |
| "step": 1606 |
| }, |
| { |
| "epoch": 1.3304803975704032, |
| "grad_norm": 0.913762629032135, |
| "learning_rate": 4.470538313814021e-06, |
| "loss": 0.6358, |
| "step": 1607 |
| }, |
| { |
| "epoch": 1.3313086692435119, |
| "grad_norm": 0.9341318011283875, |
| "learning_rate": 4.469861374843734e-06, |
| "loss": 0.6668, |
| "step": 1608 |
| }, |
| { |
| "epoch": 1.3321369409166206, |
| "grad_norm": 0.8921927809715271, |
| "learning_rate": 4.469184054723632e-06, |
| "loss": 0.6542, |
| "step": 1609 |
| }, |
| { |
| "epoch": 1.3329652125897296, |
| "grad_norm": 0.8882393836975098, |
| "learning_rate": 4.468506353584773e-06, |
| "loss": 0.6438, |
| "step": 1610 |
| }, |
| { |
| "epoch": 1.3337934842628383, |
| "grad_norm": 0.9279580116271973, |
| "learning_rate": 4.467828271558286e-06, |
| "loss": 0.6199, |
| "step": 1611 |
| }, |
| { |
| "epoch": 1.334621755935947, |
| "grad_norm": 0.892322838306427, |
| "learning_rate": 4.467149808775371e-06, |
| "loss": 0.6422, |
| "step": 1612 |
| }, |
| { |
| "epoch": 1.3354500276090557, |
| "grad_norm": 0.8872779011726379, |
| "learning_rate": 4.466470965367306e-06, |
| "loss": 0.6403, |
| "step": 1613 |
| }, |
| { |
| "epoch": 1.3362782992821645, |
| "grad_norm": 0.9038597345352173, |
| "learning_rate": 4.465791741465439e-06, |
| "loss": 0.644, |
| "step": 1614 |
| }, |
| { |
| "epoch": 1.3371065709552734, |
| "grad_norm": 0.9336594343185425, |
| "learning_rate": 4.465112137201197e-06, |
| "loss": 0.6516, |
| "step": 1615 |
| }, |
| { |
| "epoch": 1.3379348426283821, |
| "grad_norm": 0.9163441061973572, |
| "learning_rate": 4.464432152706073e-06, |
| "loss": 0.626, |
| "step": 1616 |
| }, |
| { |
| "epoch": 1.3387631143014909, |
| "grad_norm": 0.943213701248169, |
| "learning_rate": 4.46375178811164e-06, |
| "loss": 0.6586, |
| "step": 1617 |
| }, |
| { |
| "epoch": 1.3395913859745996, |
| "grad_norm": 0.8931381106376648, |
| "learning_rate": 4.463071043549543e-06, |
| "loss": 0.6305, |
| "step": 1618 |
| }, |
| { |
| "epoch": 1.3404196576477085, |
| "grad_norm": 0.9050998091697693, |
| "learning_rate": 4.462389919151498e-06, |
| "loss": 0.6351, |
| "step": 1619 |
| }, |
| { |
| "epoch": 1.3412479293208173, |
| "grad_norm": 0.9165998101234436, |
| "learning_rate": 4.461708415049297e-06, |
| "loss": 0.6442, |
| "step": 1620 |
| }, |
| { |
| "epoch": 1.342076200993926, |
| "grad_norm": 0.9142123460769653, |
| "learning_rate": 4.461026531374804e-06, |
| "loss": 0.659, |
| "step": 1621 |
| }, |
| { |
| "epoch": 1.3429044726670347, |
| "grad_norm": 0.8937869668006897, |
| "learning_rate": 4.460344268259958e-06, |
| "loss": 0.6493, |
| "step": 1622 |
| }, |
| { |
| "epoch": 1.3437327443401434, |
| "grad_norm": 0.8987247347831726, |
| "learning_rate": 4.45966162583677e-06, |
| "loss": 0.6233, |
| "step": 1623 |
| }, |
| { |
| "epoch": 1.3445610160132524, |
| "grad_norm": 0.8980271220207214, |
| "learning_rate": 4.458978604237325e-06, |
| "loss": 0.6443, |
| "step": 1624 |
| }, |
| { |
| "epoch": 1.3453892876863611, |
| "grad_norm": 0.9287035465240479, |
| "learning_rate": 4.458295203593782e-06, |
| "loss": 0.6457, |
| "step": 1625 |
| }, |
| { |
| "epoch": 1.3462175593594699, |
| "grad_norm": 0.9521138072013855, |
| "learning_rate": 4.4576114240383725e-06, |
| "loss": 0.6719, |
| "step": 1626 |
| }, |
| { |
| "epoch": 1.3470458310325788, |
| "grad_norm": 0.9113882184028625, |
| "learning_rate": 4.4569272657034005e-06, |
| "loss": 0.6301, |
| "step": 1627 |
| }, |
| { |
| "epoch": 1.3478741027056875, |
| "grad_norm": 0.8929673433303833, |
| "learning_rate": 4.456242728721244e-06, |
| "loss": 0.6654, |
| "step": 1628 |
| }, |
| { |
| "epoch": 1.3487023743787963, |
| "grad_norm": 0.9024208784103394, |
| "learning_rate": 4.455557813224356e-06, |
| "loss": 0.6265, |
| "step": 1629 |
| }, |
| { |
| "epoch": 1.349530646051905, |
| "grad_norm": 0.9108287692070007, |
| "learning_rate": 4.454872519345261e-06, |
| "loss": 0.6589, |
| "step": 1630 |
| }, |
| { |
| "epoch": 1.3503589177250137, |
| "grad_norm": 0.8906548619270325, |
| "learning_rate": 4.454186847216556e-06, |
| "loss": 0.6199, |
| "step": 1631 |
| }, |
| { |
| "epoch": 1.3511871893981227, |
| "grad_norm": 0.9320098161697388, |
| "learning_rate": 4.453500796970913e-06, |
| "loss": 0.6457, |
| "step": 1632 |
| }, |
| { |
| "epoch": 1.3520154610712314, |
| "grad_norm": 0.9064130187034607, |
| "learning_rate": 4.452814368741076e-06, |
| "loss": 0.644, |
| "step": 1633 |
| }, |
| { |
| "epoch": 1.35284373274434, |
| "grad_norm": 0.8940742611885071, |
| "learning_rate": 4.452127562659864e-06, |
| "loss": 0.6419, |
| "step": 1634 |
| }, |
| { |
| "epoch": 1.353672004417449, |
| "grad_norm": 0.890978217124939, |
| "learning_rate": 4.4514403788601645e-06, |
| "loss": 0.6345, |
| "step": 1635 |
| }, |
| { |
| "epoch": 1.3545002760905578, |
| "grad_norm": 0.890136182308197, |
| "learning_rate": 4.4507528174749425e-06, |
| "loss": 0.6477, |
| "step": 1636 |
| }, |
| { |
| "epoch": 1.3553285477636665, |
| "grad_norm": 0.9398824572563171, |
| "learning_rate": 4.450064878637235e-06, |
| "loss": 0.6166, |
| "step": 1637 |
| }, |
| { |
| "epoch": 1.3561568194367752, |
| "grad_norm": 0.9006707072257996, |
| "learning_rate": 4.449376562480153e-06, |
| "loss": 0.6202, |
| "step": 1638 |
| }, |
| { |
| "epoch": 1.356985091109884, |
| "grad_norm": 0.8951538801193237, |
| "learning_rate": 4.448687869136878e-06, |
| "loss": 0.6382, |
| "step": 1639 |
| }, |
| { |
| "epoch": 1.3578133627829927, |
| "grad_norm": 0.8883405327796936, |
| "learning_rate": 4.447998798740665e-06, |
| "loss": 0.6117, |
| "step": 1640 |
| }, |
| { |
| "epoch": 1.3586416344561016, |
| "grad_norm": 0.8780978322029114, |
| "learning_rate": 4.447309351424843e-06, |
| "loss": 0.6181, |
| "step": 1641 |
| }, |
| { |
| "epoch": 1.3594699061292104, |
| "grad_norm": 0.8939958810806274, |
| "learning_rate": 4.446619527322815e-06, |
| "loss": 0.6493, |
| "step": 1642 |
| }, |
| { |
| "epoch": 1.360298177802319, |
| "grad_norm": 0.9094101786613464, |
| "learning_rate": 4.445929326568055e-06, |
| "loss": 0.6233, |
| "step": 1643 |
| }, |
| { |
| "epoch": 1.361126449475428, |
| "grad_norm": 0.9170175790786743, |
| "learning_rate": 4.445238749294109e-06, |
| "loss": 0.6453, |
| "step": 1644 |
| }, |
| { |
| "epoch": 1.3619547211485368, |
| "grad_norm": 0.9239453077316284, |
| "learning_rate": 4.444547795634598e-06, |
| "loss": 0.6279, |
| "step": 1645 |
| }, |
| { |
| "epoch": 1.3627829928216455, |
| "grad_norm": 0.8805254697799683, |
| "learning_rate": 4.443856465723216e-06, |
| "loss": 0.6387, |
| "step": 1646 |
| }, |
| { |
| "epoch": 1.3636112644947542, |
| "grad_norm": 0.933503270149231, |
| "learning_rate": 4.443164759693729e-06, |
| "loss": 0.6303, |
| "step": 1647 |
| }, |
| { |
| "epoch": 1.364439536167863, |
| "grad_norm": 0.928102970123291, |
| "learning_rate": 4.4424726776799745e-06, |
| "loss": 0.6371, |
| "step": 1648 |
| }, |
| { |
| "epoch": 1.365267807840972, |
| "grad_norm": 0.8979846835136414, |
| "learning_rate": 4.441780219815863e-06, |
| "loss": 0.6449, |
| "step": 1649 |
| }, |
| { |
| "epoch": 1.3660960795140806, |
| "grad_norm": 0.9022951126098633, |
| "learning_rate": 4.441087386235382e-06, |
| "loss": 0.6378, |
| "step": 1650 |
| }, |
| { |
| "epoch": 1.3669243511871894, |
| "grad_norm": 0.9205405116081238, |
| "learning_rate": 4.440394177072586e-06, |
| "loss": 0.6504, |
| "step": 1651 |
| }, |
| { |
| "epoch": 1.3677526228602983, |
| "grad_norm": 0.9068577885627747, |
| "learning_rate": 4.439700592461604e-06, |
| "loss": 0.649, |
| "step": 1652 |
| }, |
| { |
| "epoch": 1.368580894533407, |
| "grad_norm": 0.8923604488372803, |
| "learning_rate": 4.439006632536639e-06, |
| "loss": 0.6403, |
| "step": 1653 |
| }, |
| { |
| "epoch": 1.3694091662065158, |
| "grad_norm": 0.8762369155883789, |
| "learning_rate": 4.438312297431967e-06, |
| "loss": 0.6097, |
| "step": 1654 |
| }, |
| { |
| "epoch": 1.3702374378796245, |
| "grad_norm": 0.8747726678848267, |
| "learning_rate": 4.437617587281932e-06, |
| "loss": 0.6327, |
| "step": 1655 |
| }, |
| { |
| "epoch": 1.3710657095527332, |
| "grad_norm": 0.8822766542434692, |
| "learning_rate": 4.436922502220958e-06, |
| "loss": 0.6168, |
| "step": 1656 |
| }, |
| { |
| "epoch": 1.371893981225842, |
| "grad_norm": 0.8921328783035278, |
| "learning_rate": 4.436227042383535e-06, |
| "loss": 0.6637, |
| "step": 1657 |
| }, |
| { |
| "epoch": 1.372722252898951, |
| "grad_norm": 0.9359865188598633, |
| "learning_rate": 4.435531207904229e-06, |
| "loss": 0.6301, |
| "step": 1658 |
| }, |
| { |
| "epoch": 1.3735505245720596, |
| "grad_norm": 0.9351629614830017, |
| "learning_rate": 4.434834998917677e-06, |
| "loss": 0.6215, |
| "step": 1659 |
| }, |
| { |
| "epoch": 1.3743787962451683, |
| "grad_norm": 0.915340006351471, |
| "learning_rate": 4.434138415558588e-06, |
| "loss": 0.6303, |
| "step": 1660 |
| }, |
| { |
| "epoch": 1.3752070679182773, |
| "grad_norm": 0.9022544622421265, |
| "learning_rate": 4.433441457961746e-06, |
| "loss": 0.6409, |
| "step": 1661 |
| }, |
| { |
| "epoch": 1.376035339591386, |
| "grad_norm": 0.9443245530128479, |
| "learning_rate": 4.432744126262005e-06, |
| "loss": 0.6274, |
| "step": 1662 |
| }, |
| { |
| "epoch": 1.3768636112644947, |
| "grad_norm": 0.8980973362922668, |
| "learning_rate": 4.432046420594292e-06, |
| "loss": 0.6303, |
| "step": 1663 |
| }, |
| { |
| "epoch": 1.3776918829376035, |
| "grad_norm": 0.921539843082428, |
| "learning_rate": 4.431348341093608e-06, |
| "loss": 0.6274, |
| "step": 1664 |
| }, |
| { |
| "epoch": 1.3785201546107122, |
| "grad_norm": 0.8997762799263, |
| "learning_rate": 4.430649887895025e-06, |
| "loss": 0.6408, |
| "step": 1665 |
| }, |
| { |
| "epoch": 1.3793484262838211, |
| "grad_norm": 0.9214429259300232, |
| "learning_rate": 4.429951061133683e-06, |
| "loss": 0.6212, |
| "step": 1666 |
| }, |
| { |
| "epoch": 1.3801766979569299, |
| "grad_norm": 0.8804141879081726, |
| "learning_rate": 4.429251860944803e-06, |
| "loss": 0.6318, |
| "step": 1667 |
| }, |
| { |
| "epoch": 1.3810049696300386, |
| "grad_norm": 0.9276593923568726, |
| "learning_rate": 4.428552287463672e-06, |
| "loss": 0.6327, |
| "step": 1668 |
| }, |
| { |
| "epoch": 1.3818332413031476, |
| "grad_norm": 0.9059844017028809, |
| "learning_rate": 4.427852340825651e-06, |
| "loss": 0.6437, |
| "step": 1669 |
| }, |
| { |
| "epoch": 1.3826615129762563, |
| "grad_norm": 0.9188094139099121, |
| "learning_rate": 4.4271520211661735e-06, |
| "loss": 0.6403, |
| "step": 1670 |
| }, |
| { |
| "epoch": 1.383489784649365, |
| "grad_norm": 0.9513197541236877, |
| "learning_rate": 4.426451328620744e-06, |
| "loss": 0.6404, |
| "step": 1671 |
| }, |
| { |
| "epoch": 1.3843180563224737, |
| "grad_norm": 0.8870213031768799, |
| "learning_rate": 4.425750263324942e-06, |
| "loss": 0.6426, |
| "step": 1672 |
| }, |
| { |
| "epoch": 1.3851463279955825, |
| "grad_norm": 0.8709429502487183, |
| "learning_rate": 4.425048825414414e-06, |
| "loss": 0.6452, |
| "step": 1673 |
| }, |
| { |
| "epoch": 1.3859745996686914, |
| "grad_norm": 0.911976158618927, |
| "learning_rate": 4.424347015024884e-06, |
| "loss": 0.6344, |
| "step": 1674 |
| }, |
| { |
| "epoch": 1.3868028713418001, |
| "grad_norm": 0.8740667104721069, |
| "learning_rate": 4.4236448322921445e-06, |
| "loss": 0.6209, |
| "step": 1675 |
| }, |
| { |
| "epoch": 1.3876311430149089, |
| "grad_norm": 0.9136989116668701, |
| "learning_rate": 4.422942277352063e-06, |
| "loss": 0.6374, |
| "step": 1676 |
| }, |
| { |
| "epoch": 1.3884594146880176, |
| "grad_norm": 0.8812222480773926, |
| "learning_rate": 4.4222393503405754e-06, |
| "loss": 0.6168, |
| "step": 1677 |
| }, |
| { |
| "epoch": 1.3892876863611265, |
| "grad_norm": 0.9468962550163269, |
| "learning_rate": 4.421536051393694e-06, |
| "loss": 0.6239, |
| "step": 1678 |
| }, |
| { |
| "epoch": 1.3901159580342353, |
| "grad_norm": 0.906792163848877, |
| "learning_rate": 4.420832380647498e-06, |
| "loss": 0.6338, |
| "step": 1679 |
| }, |
| { |
| "epoch": 1.390944229707344, |
| "grad_norm": 0.9141556620597839, |
| "learning_rate": 4.420128338238141e-06, |
| "loss": 0.6277, |
| "step": 1680 |
| }, |
| { |
| "epoch": 1.3917725013804527, |
| "grad_norm": 0.8898590803146362, |
| "learning_rate": 4.419423924301851e-06, |
| "loss": 0.6147, |
| "step": 1681 |
| }, |
| { |
| "epoch": 1.3926007730535614, |
| "grad_norm": 0.9074358344078064, |
| "learning_rate": 4.418719138974924e-06, |
| "loss": 0.6409, |
| "step": 1682 |
| }, |
| { |
| "epoch": 1.3934290447266704, |
| "grad_norm": 0.9150132536888123, |
| "learning_rate": 4.41801398239373e-06, |
| "loss": 0.6117, |
| "step": 1683 |
| }, |
| { |
| "epoch": 1.3942573163997791, |
| "grad_norm": 0.9165640473365784, |
| "learning_rate": 4.417308454694709e-06, |
| "loss": 0.6241, |
| "step": 1684 |
| }, |
| { |
| "epoch": 1.3950855880728878, |
| "grad_norm": 0.9284071922302246, |
| "learning_rate": 4.4166025560143756e-06, |
| "loss": 0.6273, |
| "step": 1685 |
| }, |
| { |
| "epoch": 1.3959138597459968, |
| "grad_norm": 0.8834767937660217, |
| "learning_rate": 4.415896286489314e-06, |
| "loss": 0.6124, |
| "step": 1686 |
| }, |
| { |
| "epoch": 1.3967421314191055, |
| "grad_norm": 0.937128484249115, |
| "learning_rate": 4.4151896462561805e-06, |
| "loss": 0.645, |
| "step": 1687 |
| }, |
| { |
| "epoch": 1.3975704030922143, |
| "grad_norm": 0.926329493522644, |
| "learning_rate": 4.414482635451704e-06, |
| "loss": 0.5993, |
| "step": 1688 |
| }, |
| { |
| "epoch": 1.398398674765323, |
| "grad_norm": 0.9097526669502258, |
| "learning_rate": 4.413775254212683e-06, |
| "loss": 0.6356, |
| "step": 1689 |
| }, |
| { |
| "epoch": 1.3992269464384317, |
| "grad_norm": 1.0116560459136963, |
| "learning_rate": 4.413067502675992e-06, |
| "loss": 0.6298, |
| "step": 1690 |
| }, |
| { |
| "epoch": 1.4000552181115407, |
| "grad_norm": 0.9532334804534912, |
| "learning_rate": 4.412359380978571e-06, |
| "loss": 0.6401, |
| "step": 1691 |
| }, |
| { |
| "epoch": 1.4008834897846494, |
| "grad_norm": 0.9269129037857056, |
| "learning_rate": 4.4116508892574375e-06, |
| "loss": 0.6215, |
| "step": 1692 |
| }, |
| { |
| "epoch": 1.401711761457758, |
| "grad_norm": 0.9056585431098938, |
| "learning_rate": 4.410942027649677e-06, |
| "loss": 0.6203, |
| "step": 1693 |
| }, |
| { |
| "epoch": 1.402540033130867, |
| "grad_norm": 0.9435274004936218, |
| "learning_rate": 4.410232796292447e-06, |
| "loss": 0.6315, |
| "step": 1694 |
| }, |
| { |
| "epoch": 1.4033683048039758, |
| "grad_norm": 0.8925377726554871, |
| "learning_rate": 4.409523195322979e-06, |
| "loss": 0.6054, |
| "step": 1695 |
| }, |
| { |
| "epoch": 1.4041965764770845, |
| "grad_norm": 0.933591902256012, |
| "learning_rate": 4.408813224878573e-06, |
| "loss": 0.6405, |
| "step": 1696 |
| }, |
| { |
| "epoch": 1.4050248481501932, |
| "grad_norm": 0.8659296035766602, |
| "learning_rate": 4.408102885096601e-06, |
| "loss": 0.6557, |
| "step": 1697 |
| }, |
| { |
| "epoch": 1.405853119823302, |
| "grad_norm": 0.9109735488891602, |
| "learning_rate": 4.407392176114509e-06, |
| "loss": 0.6287, |
| "step": 1698 |
| }, |
| { |
| "epoch": 1.4066813914964107, |
| "grad_norm": 0.9408119916915894, |
| "learning_rate": 4.40668109806981e-06, |
| "loss": 0.6418, |
| "step": 1699 |
| }, |
| { |
| "epoch": 1.4075096631695196, |
| "grad_norm": 0.9397682547569275, |
| "learning_rate": 4.405969651100093e-06, |
| "loss": 0.6345, |
| "step": 1700 |
| }, |
| { |
| "epoch": 1.4083379348426284, |
| "grad_norm": 0.8860097527503967, |
| "learning_rate": 4.405257835343015e-06, |
| "loss": 0.6376, |
| "step": 1701 |
| }, |
| { |
| "epoch": 1.409166206515737, |
| "grad_norm": 0.8886877298355103, |
| "learning_rate": 4.404545650936307e-06, |
| "loss": 0.6246, |
| "step": 1702 |
| }, |
| { |
| "epoch": 1.409994478188846, |
| "grad_norm": 0.9213271737098694, |
| "learning_rate": 4.40383309801777e-06, |
| "loss": 0.6563, |
| "step": 1703 |
| }, |
| { |
| "epoch": 1.4108227498619548, |
| "grad_norm": 0.9072388410568237, |
| "learning_rate": 4.403120176725276e-06, |
| "loss": 0.642, |
| "step": 1704 |
| }, |
| { |
| "epoch": 1.4116510215350635, |
| "grad_norm": 0.9467195272445679, |
| "learning_rate": 4.402406887196767e-06, |
| "loss": 0.6443, |
| "step": 1705 |
| }, |
| { |
| "epoch": 1.4124792932081722, |
| "grad_norm": 0.944360077381134, |
| "learning_rate": 4.401693229570261e-06, |
| "loss": 0.6544, |
| "step": 1706 |
| }, |
| { |
| "epoch": 1.413307564881281, |
| "grad_norm": 0.8722824454307556, |
| "learning_rate": 4.400979203983841e-06, |
| "loss": 0.6072, |
| "step": 1707 |
| }, |
| { |
| "epoch": 1.41413583655439, |
| "grad_norm": 0.9134342670440674, |
| "learning_rate": 4.4002648105756665e-06, |
| "loss": 0.6654, |
| "step": 1708 |
| }, |
| { |
| "epoch": 1.4149641082274986, |
| "grad_norm": 0.9005776643753052, |
| "learning_rate": 4.3995500494839656e-06, |
| "loss": 0.6153, |
| "step": 1709 |
| }, |
| { |
| "epoch": 1.4157923799006074, |
| "grad_norm": 0.8971237540245056, |
| "learning_rate": 4.398834920847037e-06, |
| "loss": 0.6315, |
| "step": 1710 |
| }, |
| { |
| "epoch": 1.4166206515737163, |
| "grad_norm": 0.9089283347129822, |
| "learning_rate": 4.398119424803252e-06, |
| "loss": 0.612, |
| "step": 1711 |
| }, |
| { |
| "epoch": 1.417448923246825, |
| "grad_norm": 0.907715916633606, |
| "learning_rate": 4.397403561491052e-06, |
| "loss": 0.6375, |
| "step": 1712 |
| }, |
| { |
| "epoch": 1.4182771949199338, |
| "grad_norm": 0.8920016288757324, |
| "learning_rate": 4.3966873310489505e-06, |
| "loss": 0.6504, |
| "step": 1713 |
| }, |
| { |
| "epoch": 1.4191054665930425, |
| "grad_norm": 0.8758318424224854, |
| "learning_rate": 4.395970733615531e-06, |
| "loss": 0.63, |
| "step": 1714 |
| }, |
| { |
| "epoch": 1.4199337382661512, |
| "grad_norm": 0.8907516598701477, |
| "learning_rate": 4.395253769329449e-06, |
| "loss": 0.6104, |
| "step": 1715 |
| }, |
| { |
| "epoch": 1.42076200993926, |
| "grad_norm": 0.8966229557991028, |
| "learning_rate": 4.39453643832943e-06, |
| "loss": 0.6296, |
| "step": 1716 |
| }, |
| { |
| "epoch": 1.421590281612369, |
| "grad_norm": 0.9029335975646973, |
| "learning_rate": 4.39381874075427e-06, |
| "loss": 0.6672, |
| "step": 1717 |
| }, |
| { |
| "epoch": 1.4224185532854776, |
| "grad_norm": 0.8990710377693176, |
| "learning_rate": 4.393100676742838e-06, |
| "loss": 0.6126, |
| "step": 1718 |
| }, |
| { |
| "epoch": 1.4232468249585863, |
| "grad_norm": 0.8805904388427734, |
| "learning_rate": 4.392382246434073e-06, |
| "loss": 0.6257, |
| "step": 1719 |
| }, |
| { |
| "epoch": 1.4240750966316953, |
| "grad_norm": 0.8973903656005859, |
| "learning_rate": 4.391663449966983e-06, |
| "loss": 0.5979, |
| "step": 1720 |
| }, |
| { |
| "epoch": 1.424903368304804, |
| "grad_norm": 0.8957810997962952, |
| "learning_rate": 4.39094428748065e-06, |
| "loss": 0.6653, |
| "step": 1721 |
| }, |
| { |
| "epoch": 1.4257316399779127, |
| "grad_norm": 0.9048615097999573, |
| "learning_rate": 4.390224759114224e-06, |
| "loss": 0.6306, |
| "step": 1722 |
| }, |
| { |
| "epoch": 1.4265599116510215, |
| "grad_norm": 0.895323634147644, |
| "learning_rate": 4.389504865006927e-06, |
| "loss": 0.6653, |
| "step": 1723 |
| }, |
| { |
| "epoch": 1.4273881833241302, |
| "grad_norm": 0.9001028537750244, |
| "learning_rate": 4.388784605298053e-06, |
| "loss": 0.6234, |
| "step": 1724 |
| }, |
| { |
| "epoch": 1.4282164549972391, |
| "grad_norm": 0.867677628993988, |
| "learning_rate": 4.388063980126966e-06, |
| "loss": 0.6412, |
| "step": 1725 |
| }, |
| { |
| "epoch": 1.4290447266703479, |
| "grad_norm": 0.8741756081581116, |
| "learning_rate": 4.387342989633097e-06, |
| "loss": 0.6171, |
| "step": 1726 |
| }, |
| { |
| "epoch": 1.4298729983434566, |
| "grad_norm": 0.8784465193748474, |
| "learning_rate": 4.386621633955955e-06, |
| "loss": 0.6219, |
| "step": 1727 |
| }, |
| { |
| "epoch": 1.4307012700165656, |
| "grad_norm": 0.9013152122497559, |
| "learning_rate": 4.3858999132351124e-06, |
| "loss": 0.637, |
| "step": 1728 |
| }, |
| { |
| "epoch": 1.4315295416896743, |
| "grad_norm": 0.8971224427223206, |
| "learning_rate": 4.385177827610218e-06, |
| "loss": 0.6288, |
| "step": 1729 |
| }, |
| { |
| "epoch": 1.432357813362783, |
| "grad_norm": 0.9458419680595398, |
| "learning_rate": 4.384455377220986e-06, |
| "loss": 0.6405, |
| "step": 1730 |
| }, |
| { |
| "epoch": 1.4331860850358917, |
| "grad_norm": 0.9465500116348267, |
| "learning_rate": 4.383732562207206e-06, |
| "loss": 0.6447, |
| "step": 1731 |
| }, |
| { |
| "epoch": 1.4340143567090005, |
| "grad_norm": 0.9431823492050171, |
| "learning_rate": 4.383009382708734e-06, |
| "loss": 0.6489, |
| "step": 1732 |
| }, |
| { |
| "epoch": 1.4348426283821094, |
| "grad_norm": 0.8999877572059631, |
| "learning_rate": 4.382285838865501e-06, |
| "loss": 0.6178, |
| "step": 1733 |
| }, |
| { |
| "epoch": 1.4356709000552181, |
| "grad_norm": 0.9128695130348206, |
| "learning_rate": 4.381561930817504e-06, |
| "loss": 0.6423, |
| "step": 1734 |
| }, |
| { |
| "epoch": 1.4364991717283269, |
| "grad_norm": 0.8949567079544067, |
| "learning_rate": 4.380837658704813e-06, |
| "loss": 0.6382, |
| "step": 1735 |
| }, |
| { |
| "epoch": 1.4373274434014356, |
| "grad_norm": 0.9008680582046509, |
| "learning_rate": 4.3801130226675685e-06, |
| "loss": 0.6508, |
| "step": 1736 |
| }, |
| { |
| "epoch": 1.4381557150745445, |
| "grad_norm": 0.90611732006073, |
| "learning_rate": 4.37938802284598e-06, |
| "loss": 0.6673, |
| "step": 1737 |
| }, |
| { |
| "epoch": 1.4389839867476533, |
| "grad_norm": 0.9072968363761902, |
| "learning_rate": 4.378662659380329e-06, |
| "loss": 0.6424, |
| "step": 1738 |
| }, |
| { |
| "epoch": 1.439812258420762, |
| "grad_norm": 0.9252943396568298, |
| "learning_rate": 4.377936932410966e-06, |
| "loss": 0.6336, |
| "step": 1739 |
| }, |
| { |
| "epoch": 1.4406405300938707, |
| "grad_norm": 0.87615567445755, |
| "learning_rate": 4.377210842078311e-06, |
| "loss": 0.6345, |
| "step": 1740 |
| }, |
| { |
| "epoch": 1.4414688017669794, |
| "grad_norm": 0.9328572154045105, |
| "learning_rate": 4.376484388522858e-06, |
| "loss": 0.6126, |
| "step": 1741 |
| }, |
| { |
| "epoch": 1.4422970734400884, |
| "grad_norm": 0.8869463801383972, |
| "learning_rate": 4.375757571885169e-06, |
| "loss": 0.6307, |
| "step": 1742 |
| }, |
| { |
| "epoch": 1.4431253451131971, |
| "grad_norm": 0.8959669470787048, |
| "learning_rate": 4.375030392305875e-06, |
| "loss": 0.6309, |
| "step": 1743 |
| }, |
| { |
| "epoch": 1.4439536167863058, |
| "grad_norm": 0.8859886527061462, |
| "learning_rate": 4.374302849925679e-06, |
| "loss": 0.6182, |
| "step": 1744 |
| }, |
| { |
| "epoch": 1.4447818884594148, |
| "grad_norm": 0.8727422952651978, |
| "learning_rate": 4.373574944885354e-06, |
| "loss": 0.6218, |
| "step": 1745 |
| }, |
| { |
| "epoch": 1.4456101601325235, |
| "grad_norm": 0.9092820882797241, |
| "learning_rate": 4.3728466773257414e-06, |
| "loss": 0.6315, |
| "step": 1746 |
| }, |
| { |
| "epoch": 1.4464384318056323, |
| "grad_norm": 0.888469398021698, |
| "learning_rate": 4.3721180473877555e-06, |
| "loss": 0.6049, |
| "step": 1747 |
| }, |
| { |
| "epoch": 1.447266703478741, |
| "grad_norm": 0.8818171620368958, |
| "learning_rate": 4.371389055212379e-06, |
| "loss": 0.6244, |
| "step": 1748 |
| }, |
| { |
| "epoch": 1.4480949751518497, |
| "grad_norm": 0.8827747106552124, |
| "learning_rate": 4.370659700940665e-06, |
| "loss": 0.6255, |
| "step": 1749 |
| }, |
| { |
| "epoch": 1.4489232468249587, |
| "grad_norm": 0.8968432545661926, |
| "learning_rate": 4.369929984713738e-06, |
| "loss": 0.6348, |
| "step": 1750 |
| }, |
| { |
| "epoch": 1.4497515184980674, |
| "grad_norm": 0.8865929841995239, |
| "learning_rate": 4.369199906672789e-06, |
| "loss": 0.6463, |
| "step": 1751 |
| }, |
| { |
| "epoch": 1.450579790171176, |
| "grad_norm": 0.8960037231445312, |
| "learning_rate": 4.368469466959083e-06, |
| "loss": 0.6572, |
| "step": 1752 |
| }, |
| { |
| "epoch": 1.451408061844285, |
| "grad_norm": 0.8849257230758667, |
| "learning_rate": 4.367738665713953e-06, |
| "loss": 0.6775, |
| "step": 1753 |
| }, |
| { |
| "epoch": 1.4522363335173938, |
| "grad_norm": 0.9201130867004395, |
| "learning_rate": 4.367007503078802e-06, |
| "loss": 0.6489, |
| "step": 1754 |
| }, |
| { |
| "epoch": 1.4530646051905025, |
| "grad_norm": 0.8754178285598755, |
| "learning_rate": 4.366275979195102e-06, |
| "loss": 0.6343, |
| "step": 1755 |
| }, |
| { |
| "epoch": 1.4538928768636112, |
| "grad_norm": 0.8744989037513733, |
| "learning_rate": 4.365544094204399e-06, |
| "loss": 0.6332, |
| "step": 1756 |
| }, |
| { |
| "epoch": 1.45472114853672, |
| "grad_norm": 0.8900396227836609, |
| "learning_rate": 4.364811848248303e-06, |
| "loss": 0.6622, |
| "step": 1757 |
| }, |
| { |
| "epoch": 1.4555494202098287, |
| "grad_norm": 0.9113397598266602, |
| "learning_rate": 4.364079241468499e-06, |
| "loss": 0.6253, |
| "step": 1758 |
| }, |
| { |
| "epoch": 1.4563776918829376, |
| "grad_norm": 0.9042403101921082, |
| "learning_rate": 4.3633462740067375e-06, |
| "loss": 0.6719, |
| "step": 1759 |
| }, |
| { |
| "epoch": 1.4572059635560464, |
| "grad_norm": 0.8962395191192627, |
| "learning_rate": 4.362612946004843e-06, |
| "loss": 0.627, |
| "step": 1760 |
| }, |
| { |
| "epoch": 1.458034235229155, |
| "grad_norm": 0.8955368995666504, |
| "learning_rate": 4.3618792576047054e-06, |
| "loss": 0.6331, |
| "step": 1761 |
| }, |
| { |
| "epoch": 1.458862506902264, |
| "grad_norm": 0.8628577589988708, |
| "learning_rate": 4.361145208948287e-06, |
| "loss": 0.6266, |
| "step": 1762 |
| }, |
| { |
| "epoch": 1.4596907785753728, |
| "grad_norm": 0.9271597862243652, |
| "learning_rate": 4.360410800177622e-06, |
| "loss": 0.6384, |
| "step": 1763 |
| }, |
| { |
| "epoch": 1.4605190502484815, |
| "grad_norm": 0.9368716478347778, |
| "learning_rate": 4.359676031434808e-06, |
| "loss": 0.632, |
| "step": 1764 |
| }, |
| { |
| "epoch": 1.4613473219215902, |
| "grad_norm": 0.8827003240585327, |
| "learning_rate": 4.358940902862019e-06, |
| "loss": 0.6362, |
| "step": 1765 |
| }, |
| { |
| "epoch": 1.462175593594699, |
| "grad_norm": 0.9115442037582397, |
| "learning_rate": 4.358205414601493e-06, |
| "loss": 0.6438, |
| "step": 1766 |
| }, |
| { |
| "epoch": 1.463003865267808, |
| "grad_norm": 0.923832356929779, |
| "learning_rate": 4.357469566795542e-06, |
| "loss": 0.6231, |
| "step": 1767 |
| }, |
| { |
| "epoch": 1.4638321369409166, |
| "grad_norm": 0.9251851439476013, |
| "learning_rate": 4.356733359586545e-06, |
| "loss": 0.625, |
| "step": 1768 |
| }, |
| { |
| "epoch": 1.4646604086140254, |
| "grad_norm": 0.9081542491912842, |
| "learning_rate": 4.35599679311695e-06, |
| "loss": 0.6513, |
| "step": 1769 |
| }, |
| { |
| "epoch": 1.4654886802871343, |
| "grad_norm": 0.9315375685691833, |
| "learning_rate": 4.355259867529278e-06, |
| "loss": 0.6347, |
| "step": 1770 |
| }, |
| { |
| "epoch": 1.466316951960243, |
| "grad_norm": 0.9111939668655396, |
| "learning_rate": 4.354522582966116e-06, |
| "loss": 0.6504, |
| "step": 1771 |
| }, |
| { |
| "epoch": 1.4671452236333518, |
| "grad_norm": 0.9330397844314575, |
| "learning_rate": 4.353784939570121e-06, |
| "loss": 0.6242, |
| "step": 1772 |
| }, |
| { |
| "epoch": 1.4679734953064605, |
| "grad_norm": 0.8857918977737427, |
| "learning_rate": 4.3530469374840226e-06, |
| "loss": 0.6354, |
| "step": 1773 |
| }, |
| { |
| "epoch": 1.4688017669795692, |
| "grad_norm": 0.9483981728553772, |
| "learning_rate": 4.352308576850615e-06, |
| "loss": 0.6276, |
| "step": 1774 |
| }, |
| { |
| "epoch": 1.4696300386526782, |
| "grad_norm": 0.9508888125419617, |
| "learning_rate": 4.351569857812764e-06, |
| "loss": 0.6362, |
| "step": 1775 |
| }, |
| { |
| "epoch": 1.4704583103257869, |
| "grad_norm": 0.9107303023338318, |
| "learning_rate": 4.3508307805134055e-06, |
| "loss": 0.6506, |
| "step": 1776 |
| }, |
| { |
| "epoch": 1.4712865819988956, |
| "grad_norm": 0.9065224528312683, |
| "learning_rate": 4.350091345095545e-06, |
| "loss": 0.6401, |
| "step": 1777 |
| }, |
| { |
| "epoch": 1.4721148536720043, |
| "grad_norm": 0.9157469868659973, |
| "learning_rate": 4.349351551702256e-06, |
| "loss": 0.6567, |
| "step": 1778 |
| }, |
| { |
| "epoch": 1.4729431253451133, |
| "grad_norm": 0.8782876133918762, |
| "learning_rate": 4.34861140047668e-06, |
| "loss": 0.6394, |
| "step": 1779 |
| }, |
| { |
| "epoch": 1.473771397018222, |
| "grad_norm": 0.8990570306777954, |
| "learning_rate": 4.347870891562032e-06, |
| "loss": 0.6583, |
| "step": 1780 |
| }, |
| { |
| "epoch": 1.4745996686913307, |
| "grad_norm": 0.8911288380622864, |
| "learning_rate": 4.34713002510159e-06, |
| "loss": 0.6335, |
| "step": 1781 |
| }, |
| { |
| "epoch": 1.4754279403644395, |
| "grad_norm": 0.8673627376556396, |
| "learning_rate": 4.346388801238708e-06, |
| "loss": 0.6371, |
| "step": 1782 |
| }, |
| { |
| "epoch": 1.4762562120375482, |
| "grad_norm": 0.8823532462120056, |
| "learning_rate": 4.345647220116805e-06, |
| "loss": 0.6303, |
| "step": 1783 |
| }, |
| { |
| "epoch": 1.4770844837106571, |
| "grad_norm": 0.9219037890434265, |
| "learning_rate": 4.34490528187937e-06, |
| "loss": 0.6254, |
| "step": 1784 |
| }, |
| { |
| "epoch": 1.4779127553837659, |
| "grad_norm": 0.9348562955856323, |
| "learning_rate": 4.344162986669961e-06, |
| "loss": 0.626, |
| "step": 1785 |
| }, |
| { |
| "epoch": 1.4787410270568746, |
| "grad_norm": 0.9027411341667175, |
| "learning_rate": 4.343420334632204e-06, |
| "loss": 0.6319, |
| "step": 1786 |
| }, |
| { |
| "epoch": 1.4795692987299836, |
| "grad_norm": 0.9026404619216919, |
| "learning_rate": 4.342677325909798e-06, |
| "loss": 0.6317, |
| "step": 1787 |
| }, |
| { |
| "epoch": 1.4803975704030923, |
| "grad_norm": 0.9441322088241577, |
| "learning_rate": 4.341933960646505e-06, |
| "loss": 0.6153, |
| "step": 1788 |
| }, |
| { |
| "epoch": 1.481225842076201, |
| "grad_norm": 0.916111409664154, |
| "learning_rate": 4.341190238986162e-06, |
| "loss": 0.6253, |
| "step": 1789 |
| }, |
| { |
| "epoch": 1.4820541137493097, |
| "grad_norm": 0.905154824256897, |
| "learning_rate": 4.340446161072671e-06, |
| "loss": 0.6433, |
| "step": 1790 |
| }, |
| { |
| "epoch": 1.4828823854224185, |
| "grad_norm": 0.8921060562133789, |
| "learning_rate": 4.339701727050004e-06, |
| "loss": 0.6401, |
| "step": 1791 |
| }, |
| { |
| "epoch": 1.4837106570955274, |
| "grad_norm": 0.8754278421401978, |
| "learning_rate": 4.338956937062203e-06, |
| "loss": 0.6436, |
| "step": 1792 |
| }, |
| { |
| "epoch": 1.4845389287686361, |
| "grad_norm": 0.9079127311706543, |
| "learning_rate": 4.338211791253376e-06, |
| "loss": 0.6388, |
| "step": 1793 |
| }, |
| { |
| "epoch": 1.4853672004417449, |
| "grad_norm": 0.9441506266593933, |
| "learning_rate": 4.337466289767704e-06, |
| "loss": 0.63, |
| "step": 1794 |
| }, |
| { |
| "epoch": 1.4861954721148538, |
| "grad_norm": 0.8751823306083679, |
| "learning_rate": 4.336720432749433e-06, |
| "loss": 0.6244, |
| "step": 1795 |
| }, |
| { |
| "epoch": 1.4870237437879625, |
| "grad_norm": 0.935542106628418, |
| "learning_rate": 4.33597422034288e-06, |
| "loss": 0.6439, |
| "step": 1796 |
| }, |
| { |
| "epoch": 1.4878520154610713, |
| "grad_norm": 0.9206563830375671, |
| "learning_rate": 4.3352276526924295e-06, |
| "loss": 0.6377, |
| "step": 1797 |
| }, |
| { |
| "epoch": 1.48868028713418, |
| "grad_norm": 0.9200771450996399, |
| "learning_rate": 4.334480729942537e-06, |
| "loss": 0.656, |
| "step": 1798 |
| }, |
| { |
| "epoch": 1.4895085588072887, |
| "grad_norm": 0.8923482298851013, |
| "learning_rate": 4.3337334522377235e-06, |
| "loss": 0.6369, |
| "step": 1799 |
| }, |
| { |
| "epoch": 1.4903368304803974, |
| "grad_norm": 0.9128824472427368, |
| "learning_rate": 4.332985819722582e-06, |
| "loss": 0.6437, |
| "step": 1800 |
| }, |
| { |
| "epoch": 1.4911651021535064, |
| "grad_norm": 0.8780473470687866, |
| "learning_rate": 4.3322378325417705e-06, |
| "loss": 0.6331, |
| "step": 1801 |
| }, |
| { |
| "epoch": 1.4919933738266151, |
| "grad_norm": 0.9012449979782104, |
| "learning_rate": 4.3314894908400175e-06, |
| "loss": 0.619, |
| "step": 1802 |
| }, |
| { |
| "epoch": 1.4928216454997238, |
| "grad_norm": 0.8845897912979126, |
| "learning_rate": 4.3307407947621216e-06, |
| "loss": 0.637, |
| "step": 1803 |
| }, |
| { |
| "epoch": 1.4936499171728328, |
| "grad_norm": 0.901826798915863, |
| "learning_rate": 4.329991744452948e-06, |
| "loss": 0.6248, |
| "step": 1804 |
| }, |
| { |
| "epoch": 1.4944781888459415, |
| "grad_norm": 0.8989822268486023, |
| "learning_rate": 4.32924234005743e-06, |
| "loss": 0.6492, |
| "step": 1805 |
| }, |
| { |
| "epoch": 1.4953064605190503, |
| "grad_norm": 0.9251338243484497, |
| "learning_rate": 4.328492581720573e-06, |
| "loss": 0.6165, |
| "step": 1806 |
| }, |
| { |
| "epoch": 1.496134732192159, |
| "grad_norm": 0.9007204174995422, |
| "learning_rate": 4.327742469587445e-06, |
| "loss": 0.6217, |
| "step": 1807 |
| }, |
| { |
| "epoch": 1.4969630038652677, |
| "grad_norm": 0.9013358354568481, |
| "learning_rate": 4.326992003803187e-06, |
| "loss": 0.6204, |
| "step": 1808 |
| }, |
| { |
| "epoch": 1.4977912755383767, |
| "grad_norm": 0.889497697353363, |
| "learning_rate": 4.326241184513008e-06, |
| "loss": 0.6242, |
| "step": 1809 |
| }, |
| { |
| "epoch": 1.4986195472114854, |
| "grad_norm": 0.9591777920722961, |
| "learning_rate": 4.325490011862184e-06, |
| "loss": 0.6158, |
| "step": 1810 |
| }, |
| { |
| "epoch": 1.499447818884594, |
| "grad_norm": 0.941960871219635, |
| "learning_rate": 4.324738485996058e-06, |
| "loss": 0.6225, |
| "step": 1811 |
| }, |
| { |
| "epoch": 1.500276090557703, |
| "grad_norm": 0.901639997959137, |
| "learning_rate": 4.323986607060047e-06, |
| "loss": 0.6336, |
| "step": 1812 |
| }, |
| { |
| "epoch": 1.5011043622308118, |
| "grad_norm": 0.8986237645149231, |
| "learning_rate": 4.32323437519963e-06, |
| "loss": 0.6313, |
| "step": 1813 |
| }, |
| { |
| "epoch": 1.5019326339039205, |
| "grad_norm": 0.9180042147636414, |
| "learning_rate": 4.322481790560356e-06, |
| "loss": 0.6262, |
| "step": 1814 |
| }, |
| { |
| "epoch": 1.5027609055770292, |
| "grad_norm": 0.9090250134468079, |
| "learning_rate": 4.321728853287846e-06, |
| "loss": 0.6347, |
| "step": 1815 |
| }, |
| { |
| "epoch": 1.503589177250138, |
| "grad_norm": 0.9318317174911499, |
| "learning_rate": 4.320975563527785e-06, |
| "loss": 0.597, |
| "step": 1816 |
| }, |
| { |
| "epoch": 1.5044174489232467, |
| "grad_norm": 0.8896520137786865, |
| "learning_rate": 4.320221921425926e-06, |
| "loss": 0.6428, |
| "step": 1817 |
| }, |
| { |
| "epoch": 1.5052457205963556, |
| "grad_norm": 0.9162147641181946, |
| "learning_rate": 4.3194679271280935e-06, |
| "loss": 0.6173, |
| "step": 1818 |
| }, |
| { |
| "epoch": 1.5060739922694644, |
| "grad_norm": 1.0285958051681519, |
| "learning_rate": 4.318713580780179e-06, |
| "loss": 0.6303, |
| "step": 1819 |
| }, |
| { |
| "epoch": 1.5069022639425733, |
| "grad_norm": 0.9337003231048584, |
| "learning_rate": 4.317958882528139e-06, |
| "loss": 0.6248, |
| "step": 1820 |
| }, |
| { |
| "epoch": 1.507730535615682, |
| "grad_norm": 0.8943479657173157, |
| "learning_rate": 4.317203832518003e-06, |
| "loss": 0.632, |
| "step": 1821 |
| }, |
| { |
| "epoch": 1.5085588072887908, |
| "grad_norm": 0.9251554012298584, |
| "learning_rate": 4.316448430895866e-06, |
| "loss": 0.6373, |
| "step": 1822 |
| }, |
| { |
| "epoch": 1.5093870789618995, |
| "grad_norm": 0.8673262596130371, |
| "learning_rate": 4.315692677807889e-06, |
| "loss": 0.6275, |
| "step": 1823 |
| }, |
| { |
| "epoch": 1.5102153506350082, |
| "grad_norm": 0.8874304890632629, |
| "learning_rate": 4.314936573400305e-06, |
| "loss": 0.6446, |
| "step": 1824 |
| }, |
| { |
| "epoch": 1.511043622308117, |
| "grad_norm": 0.9122821688652039, |
| "learning_rate": 4.314180117819412e-06, |
| "loss": 0.6517, |
| "step": 1825 |
| }, |
| { |
| "epoch": 1.5118718939812257, |
| "grad_norm": 0.87696373462677, |
| "learning_rate": 4.313423311211578e-06, |
| "loss": 0.6215, |
| "step": 1826 |
| }, |
| { |
| "epoch": 1.5127001656543346, |
| "grad_norm": 0.887030303478241, |
| "learning_rate": 4.312666153723238e-06, |
| "loss": 0.6592, |
| "step": 1827 |
| }, |
| { |
| "epoch": 1.5135284373274434, |
| "grad_norm": 0.9180002808570862, |
| "learning_rate": 4.3119086455008956e-06, |
| "loss": 0.6562, |
| "step": 1828 |
| }, |
| { |
| "epoch": 1.5143567090005523, |
| "grad_norm": 0.9204010367393494, |
| "learning_rate": 4.311150786691119e-06, |
| "loss": 0.6323, |
| "step": 1829 |
| }, |
| { |
| "epoch": 1.515184980673661, |
| "grad_norm": 0.9182308316230774, |
| "learning_rate": 4.310392577440549e-06, |
| "loss": 0.638, |
| "step": 1830 |
| }, |
| { |
| "epoch": 1.5160132523467698, |
| "grad_norm": 0.8942297697067261, |
| "learning_rate": 4.309634017895893e-06, |
| "loss": 0.6485, |
| "step": 1831 |
| }, |
| { |
| "epoch": 1.5168415240198785, |
| "grad_norm": 0.9294339418411255, |
| "learning_rate": 4.308875108203922e-06, |
| "loss": 0.6351, |
| "step": 1832 |
| }, |
| { |
| "epoch": 1.5176697956929872, |
| "grad_norm": 0.8703927397727966, |
| "learning_rate": 4.30811584851148e-06, |
| "loss": 0.6293, |
| "step": 1833 |
| }, |
| { |
| "epoch": 1.518498067366096, |
| "grad_norm": 0.8861719965934753, |
| "learning_rate": 4.3073562389654764e-06, |
| "loss": 0.6121, |
| "step": 1834 |
| }, |
| { |
| "epoch": 1.5193263390392049, |
| "grad_norm": 0.9109041094779968, |
| "learning_rate": 4.306596279712889e-06, |
| "loss": 0.6431, |
| "step": 1835 |
| }, |
| { |
| "epoch": 1.5201546107123136, |
| "grad_norm": 0.8826959133148193, |
| "learning_rate": 4.305835970900762e-06, |
| "loss": 0.6422, |
| "step": 1836 |
| }, |
| { |
| "epoch": 1.5209828823854226, |
| "grad_norm": 0.9116647839546204, |
| "learning_rate": 4.305075312676209e-06, |
| "loss": 0.6416, |
| "step": 1837 |
| }, |
| { |
| "epoch": 1.5218111540585313, |
| "grad_norm": 0.8709720373153687, |
| "learning_rate": 4.304314305186409e-06, |
| "loss": 0.6175, |
| "step": 1838 |
| }, |
| { |
| "epoch": 1.52263942573164, |
| "grad_norm": 0.89812833070755, |
| "learning_rate": 4.303552948578611e-06, |
| "loss": 0.6068, |
| "step": 1839 |
| }, |
| { |
| "epoch": 1.5234676974047487, |
| "grad_norm": 0.896033763885498, |
| "learning_rate": 4.30279124300013e-06, |
| "loss": 0.6087, |
| "step": 1840 |
| }, |
| { |
| "epoch": 1.5242959690778575, |
| "grad_norm": 0.8810445666313171, |
| "learning_rate": 4.302029188598348e-06, |
| "loss": 0.6195, |
| "step": 1841 |
| }, |
| { |
| "epoch": 1.5251242407509662, |
| "grad_norm": 0.9204195737838745, |
| "learning_rate": 4.301266785520718e-06, |
| "loss": 0.6454, |
| "step": 1842 |
| }, |
| { |
| "epoch": 1.5259525124240751, |
| "grad_norm": 0.8809438943862915, |
| "learning_rate": 4.300504033914754e-06, |
| "loss": 0.6493, |
| "step": 1843 |
| }, |
| { |
| "epoch": 1.5267807840971839, |
| "grad_norm": 0.9505495429039001, |
| "learning_rate": 4.299740933928045e-06, |
| "loss": 0.6368, |
| "step": 1844 |
| }, |
| { |
| "epoch": 1.5276090557702926, |
| "grad_norm": 0.911090075969696, |
| "learning_rate": 4.298977485708243e-06, |
| "loss": 0.6375, |
| "step": 1845 |
| }, |
| { |
| "epoch": 1.5284373274434015, |
| "grad_norm": 0.9050395488739014, |
| "learning_rate": 4.2982136894030655e-06, |
| "loss": 0.654, |
| "step": 1846 |
| }, |
| { |
| "epoch": 1.5292655991165103, |
| "grad_norm": 0.9020348787307739, |
| "learning_rate": 4.297449545160303e-06, |
| "loss": 0.6317, |
| "step": 1847 |
| }, |
| { |
| "epoch": 1.530093870789619, |
| "grad_norm": 0.9083995223045349, |
| "learning_rate": 4.2966850531278086e-06, |
| "loss": 0.6366, |
| "step": 1848 |
| }, |
| { |
| "epoch": 1.5309221424627277, |
| "grad_norm": 0.8798359632492065, |
| "learning_rate": 4.295920213453504e-06, |
| "loss": 0.6262, |
| "step": 1849 |
| }, |
| { |
| "epoch": 1.5317504141358365, |
| "grad_norm": 0.9002899527549744, |
| "learning_rate": 4.295155026285382e-06, |
| "loss": 0.6275, |
| "step": 1850 |
| }, |
| { |
| "epoch": 1.5325786858089452, |
| "grad_norm": 0.903822660446167, |
| "learning_rate": 4.2943894917714946e-06, |
| "loss": 0.6143, |
| "step": 1851 |
| }, |
| { |
| "epoch": 1.5334069574820541, |
| "grad_norm": 0.9259656667709351, |
| "learning_rate": 4.293623610059968e-06, |
| "loss": 0.6469, |
| "step": 1852 |
| }, |
| { |
| "epoch": 1.5342352291551629, |
| "grad_norm": 0.9102208614349365, |
| "learning_rate": 4.292857381298993e-06, |
| "loss": 0.6321, |
| "step": 1853 |
| }, |
| { |
| "epoch": 1.5350635008282718, |
| "grad_norm": 0.9360750317573547, |
| "learning_rate": 4.292090805636827e-06, |
| "loss": 0.6324, |
| "step": 1854 |
| }, |
| { |
| "epoch": 1.5358917725013805, |
| "grad_norm": 0.907356321811676, |
| "learning_rate": 4.291323883221797e-06, |
| "loss": 0.6199, |
| "step": 1855 |
| }, |
| { |
| "epoch": 1.5367200441744893, |
| "grad_norm": 0.8895739316940308, |
| "learning_rate": 4.290556614202292e-06, |
| "loss": 0.6247, |
| "step": 1856 |
| }, |
| { |
| "epoch": 1.537548315847598, |
| "grad_norm": 0.8984767198562622, |
| "learning_rate": 4.2897889987267755e-06, |
| "loss": 0.6156, |
| "step": 1857 |
| }, |
| { |
| "epoch": 1.5383765875207067, |
| "grad_norm": 0.9505225419998169, |
| "learning_rate": 4.289021036943771e-06, |
| "loss": 0.627, |
| "step": 1858 |
| }, |
| { |
| "epoch": 1.5392048591938154, |
| "grad_norm": 0.876225471496582, |
| "learning_rate": 4.288252729001874e-06, |
| "loss": 0.624, |
| "step": 1859 |
| }, |
| { |
| "epoch": 1.5400331308669244, |
| "grad_norm": 0.8987903594970703, |
| "learning_rate": 4.287484075049743e-06, |
| "loss": 0.5969, |
| "step": 1860 |
| }, |
| { |
| "epoch": 1.5408614025400331, |
| "grad_norm": 0.8898966908454895, |
| "learning_rate": 4.286715075236106e-06, |
| "loss": 0.6165, |
| "step": 1861 |
| }, |
| { |
| "epoch": 1.541689674213142, |
| "grad_norm": 0.9726698994636536, |
| "learning_rate": 4.285945729709757e-06, |
| "loss": 0.641, |
| "step": 1862 |
| }, |
| { |
| "epoch": 1.5425179458862508, |
| "grad_norm": 0.9130645394325256, |
| "learning_rate": 4.285176038619559e-06, |
| "loss": 0.6279, |
| "step": 1863 |
| }, |
| { |
| "epoch": 1.5433462175593595, |
| "grad_norm": 0.9202905893325806, |
| "learning_rate": 4.284406002114438e-06, |
| "loss": 0.6433, |
| "step": 1864 |
| }, |
| { |
| "epoch": 1.5441744892324683, |
| "grad_norm": 0.8697655200958252, |
| "learning_rate": 4.28363562034339e-06, |
| "loss": 0.6117, |
| "step": 1865 |
| }, |
| { |
| "epoch": 1.545002760905577, |
| "grad_norm": 0.8877424597740173, |
| "learning_rate": 4.282864893455476e-06, |
| "loss": 0.6381, |
| "step": 1866 |
| }, |
| { |
| "epoch": 1.5458310325786857, |
| "grad_norm": 0.914487898349762, |
| "learning_rate": 4.282093821599824e-06, |
| "loss": 0.6462, |
| "step": 1867 |
| }, |
| { |
| "epoch": 1.5466593042517944, |
| "grad_norm": 0.8734384775161743, |
| "learning_rate": 4.281322404925631e-06, |
| "loss": 0.6522, |
| "step": 1868 |
| }, |
| { |
| "epoch": 1.5474875759249034, |
| "grad_norm": 0.8768365383148193, |
| "learning_rate": 4.280550643582158e-06, |
| "loss": 0.6432, |
| "step": 1869 |
| }, |
| { |
| "epoch": 1.548315847598012, |
| "grad_norm": 0.896403431892395, |
| "learning_rate": 4.279778537718733e-06, |
| "loss": 0.6322, |
| "step": 1870 |
| }, |
| { |
| "epoch": 1.549144119271121, |
| "grad_norm": 0.8916926383972168, |
| "learning_rate": 4.2790060874847525e-06, |
| "loss": 0.6463, |
| "step": 1871 |
| }, |
| { |
| "epoch": 1.5499723909442298, |
| "grad_norm": 0.892316997051239, |
| "learning_rate": 4.278233293029677e-06, |
| "loss": 0.643, |
| "step": 1872 |
| }, |
| { |
| "epoch": 1.5508006626173385, |
| "grad_norm": 0.8984261751174927, |
| "learning_rate": 4.277460154503036e-06, |
| "loss": 0.6457, |
| "step": 1873 |
| }, |
| { |
| "epoch": 1.5516289342904472, |
| "grad_norm": 0.9160579442977905, |
| "learning_rate": 4.276686672054424e-06, |
| "loss": 0.6255, |
| "step": 1874 |
| }, |
| { |
| "epoch": 1.552457205963556, |
| "grad_norm": 0.9075242877006531, |
| "learning_rate": 4.275912845833504e-06, |
| "loss": 0.6402, |
| "step": 1875 |
| }, |
| { |
| "epoch": 1.5532854776366647, |
| "grad_norm": 0.9001226425170898, |
| "learning_rate": 4.275138675990003e-06, |
| "loss": 0.6496, |
| "step": 1876 |
| }, |
| { |
| "epoch": 1.5541137493097736, |
| "grad_norm": 0.8965322375297546, |
| "learning_rate": 4.274364162673715e-06, |
| "loss": 0.6277, |
| "step": 1877 |
| }, |
| { |
| "epoch": 1.5549420209828824, |
| "grad_norm": 0.9139416217803955, |
| "learning_rate": 4.273589306034503e-06, |
| "loss": 0.6582, |
| "step": 1878 |
| }, |
| { |
| "epoch": 1.5557702926559913, |
| "grad_norm": 0.9557336568832397, |
| "learning_rate": 4.272814106222293e-06, |
| "loss": 0.6117, |
| "step": 1879 |
| }, |
| { |
| "epoch": 1.5565985643291, |
| "grad_norm": 0.8972452878952026, |
| "learning_rate": 4.272038563387079e-06, |
| "loss": 0.6261, |
| "step": 1880 |
| }, |
| { |
| "epoch": 1.5574268360022088, |
| "grad_norm": 0.9440129995346069, |
| "learning_rate": 4.2712626776789224e-06, |
| "loss": 0.6163, |
| "step": 1881 |
| }, |
| { |
| "epoch": 1.5582551076753175, |
| "grad_norm": 0.9144585132598877, |
| "learning_rate": 4.270486449247949e-06, |
| "loss": 0.6375, |
| "step": 1882 |
| }, |
| { |
| "epoch": 1.5590833793484262, |
| "grad_norm": 0.9198307991027832, |
| "learning_rate": 4.269709878244352e-06, |
| "loss": 0.6602, |
| "step": 1883 |
| }, |
| { |
| "epoch": 1.559911651021535, |
| "grad_norm": 0.9294776320457458, |
| "learning_rate": 4.268932964818389e-06, |
| "loss": 0.6221, |
| "step": 1884 |
| }, |
| { |
| "epoch": 1.5607399226946437, |
| "grad_norm": 0.8959762454032898, |
| "learning_rate": 4.26815570912039e-06, |
| "loss": 0.6186, |
| "step": 1885 |
| }, |
| { |
| "epoch": 1.5615681943677526, |
| "grad_norm": 0.8978918194770813, |
| "learning_rate": 4.2673781113007415e-06, |
| "loss": 0.6416, |
| "step": 1886 |
| }, |
| { |
| "epoch": 1.5623964660408614, |
| "grad_norm": 0.9102988839149475, |
| "learning_rate": 4.266600171509905e-06, |
| "loss": 0.6398, |
| "step": 1887 |
| }, |
| { |
| "epoch": 1.5632247377139703, |
| "grad_norm": 0.9007701277732849, |
| "learning_rate": 4.265821889898402e-06, |
| "loss": 0.6212, |
| "step": 1888 |
| }, |
| { |
| "epoch": 1.564053009387079, |
| "grad_norm": 0.8861967921257019, |
| "learning_rate": 4.265043266616825e-06, |
| "loss": 0.6357, |
| "step": 1889 |
| }, |
| { |
| "epoch": 1.5648812810601878, |
| "grad_norm": 0.9220154881477356, |
| "learning_rate": 4.26426430181583e-06, |
| "loss": 0.6309, |
| "step": 1890 |
| }, |
| { |
| "epoch": 1.5657095527332965, |
| "grad_norm": 0.8884119391441345, |
| "learning_rate": 4.263484995646138e-06, |
| "loss": 0.6053, |
| "step": 1891 |
| }, |
| { |
| "epoch": 1.5665378244064052, |
| "grad_norm": 0.9028825759887695, |
| "learning_rate": 4.262705348258538e-06, |
| "loss": 0.6257, |
| "step": 1892 |
| }, |
| { |
| "epoch": 1.567366096079514, |
| "grad_norm": 0.9207663536071777, |
| "learning_rate": 4.261925359803886e-06, |
| "loss": 0.6303, |
| "step": 1893 |
| }, |
| { |
| "epoch": 1.5681943677526229, |
| "grad_norm": 0.8736982941627502, |
| "learning_rate": 4.261145030433101e-06, |
| "loss": 0.603, |
| "step": 1894 |
| }, |
| { |
| "epoch": 1.5690226394257316, |
| "grad_norm": 0.906385600566864, |
| "learning_rate": 4.26036436029717e-06, |
| "loss": 0.6467, |
| "step": 1895 |
| }, |
| { |
| "epoch": 1.5698509110988406, |
| "grad_norm": 0.9183187484741211, |
| "learning_rate": 4.259583349547144e-06, |
| "loss": 0.622, |
| "step": 1896 |
| }, |
| { |
| "epoch": 1.5706791827719493, |
| "grad_norm": 0.8934562802314758, |
| "learning_rate": 4.258801998334143e-06, |
| "loss": 0.6443, |
| "step": 1897 |
| }, |
| { |
| "epoch": 1.571507454445058, |
| "grad_norm": 0.9267480969429016, |
| "learning_rate": 4.258020306809351e-06, |
| "loss": 0.637, |
| "step": 1898 |
| }, |
| { |
| "epoch": 1.5723357261181667, |
| "grad_norm": 1.389512062072754, |
| "learning_rate": 4.257238275124017e-06, |
| "loss": 0.6513, |
| "step": 1899 |
| }, |
| { |
| "epoch": 1.5731639977912755, |
| "grad_norm": 0.9137914776802063, |
| "learning_rate": 4.256455903429459e-06, |
| "loss": 0.6253, |
| "step": 1900 |
| }, |
| { |
| "epoch": 1.5739922694643842, |
| "grad_norm": 0.8945105671882629, |
| "learning_rate": 4.2556731918770565e-06, |
| "loss": 0.6379, |
| "step": 1901 |
| }, |
| { |
| "epoch": 1.5748205411374931, |
| "grad_norm": 0.8723625540733337, |
| "learning_rate": 4.254890140618257e-06, |
| "loss": 0.6032, |
| "step": 1902 |
| }, |
| { |
| "epoch": 1.5756488128106019, |
| "grad_norm": 0.9247622489929199, |
| "learning_rate": 4.254106749804575e-06, |
| "loss": 0.633, |
| "step": 1903 |
| }, |
| { |
| "epoch": 1.5764770844837108, |
| "grad_norm": 0.9282797574996948, |
| "learning_rate": 4.253323019587588e-06, |
| "loss": 0.6189, |
| "step": 1904 |
| }, |
| { |
| "epoch": 1.5773053561568195, |
| "grad_norm": 0.9438830018043518, |
| "learning_rate": 4.25253895011894e-06, |
| "loss": 0.6025, |
| "step": 1905 |
| }, |
| { |
| "epoch": 1.5781336278299283, |
| "grad_norm": 0.90346360206604, |
| "learning_rate": 4.251754541550343e-06, |
| "loss": 0.6336, |
| "step": 1906 |
| }, |
| { |
| "epoch": 1.578961899503037, |
| "grad_norm": 0.9451606869697571, |
| "learning_rate": 4.250969794033573e-06, |
| "loss": 0.6348, |
| "step": 1907 |
| }, |
| { |
| "epoch": 1.5797901711761457, |
| "grad_norm": 0.8982184529304504, |
| "learning_rate": 4.2501847077204685e-06, |
| "loss": 0.6304, |
| "step": 1908 |
| }, |
| { |
| "epoch": 1.5806184428492545, |
| "grad_norm": 0.8807822465896606, |
| "learning_rate": 4.249399282762938e-06, |
| "loss": 0.6318, |
| "step": 1909 |
| }, |
| { |
| "epoch": 1.5814467145223632, |
| "grad_norm": 0.8895682096481323, |
| "learning_rate": 4.248613519312955e-06, |
| "loss": 0.6275, |
| "step": 1910 |
| }, |
| { |
| "epoch": 1.5822749861954721, |
| "grad_norm": 0.9105965495109558, |
| "learning_rate": 4.247827417522557e-06, |
| "loss": 0.6328, |
| "step": 1911 |
| }, |
| { |
| "epoch": 1.5831032578685809, |
| "grad_norm": 0.8939966559410095, |
| "learning_rate": 4.247040977543845e-06, |
| "loss": 0.6244, |
| "step": 1912 |
| }, |
| { |
| "epoch": 1.5839315295416898, |
| "grad_norm": 0.8973875641822815, |
| "learning_rate": 4.246254199528991e-06, |
| "loss": 0.6333, |
| "step": 1913 |
| }, |
| { |
| "epoch": 1.5847598012147985, |
| "grad_norm": 0.9207412600517273, |
| "learning_rate": 4.245467083630227e-06, |
| "loss": 0.6186, |
| "step": 1914 |
| }, |
| { |
| "epoch": 1.5855880728879073, |
| "grad_norm": 0.9205051064491272, |
| "learning_rate": 4.244679629999853e-06, |
| "loss": 0.6327, |
| "step": 1915 |
| }, |
| { |
| "epoch": 1.586416344561016, |
| "grad_norm": 0.9485655426979065, |
| "learning_rate": 4.243891838790235e-06, |
| "loss": 0.6355, |
| "step": 1916 |
| }, |
| { |
| "epoch": 1.5872446162341247, |
| "grad_norm": 0.8944463133811951, |
| "learning_rate": 4.243103710153802e-06, |
| "loss": 0.635, |
| "step": 1917 |
| }, |
| { |
| "epoch": 1.5880728879072334, |
| "grad_norm": 0.9133086800575256, |
| "learning_rate": 4.24231524424305e-06, |
| "loss": 0.6247, |
| "step": 1918 |
| }, |
| { |
| "epoch": 1.5889011595803424, |
| "grad_norm": 0.9051132798194885, |
| "learning_rate": 4.241526441210541e-06, |
| "loss": 0.6172, |
| "step": 1919 |
| }, |
| { |
| "epoch": 1.5897294312534511, |
| "grad_norm": 0.8892381191253662, |
| "learning_rate": 4.240737301208899e-06, |
| "loss": 0.6404, |
| "step": 1920 |
| }, |
| { |
| "epoch": 1.59055770292656, |
| "grad_norm": 0.8600128293037415, |
| "learning_rate": 4.2399478243908164e-06, |
| "loss": 0.6223, |
| "step": 1921 |
| }, |
| { |
| "epoch": 1.5913859745996688, |
| "grad_norm": 0.8858703970909119, |
| "learning_rate": 4.239158010909049e-06, |
| "loss": 0.6078, |
| "step": 1922 |
| }, |
| { |
| "epoch": 1.5922142462727775, |
| "grad_norm": 0.8994208574295044, |
| "learning_rate": 4.23836786091642e-06, |
| "loss": 0.6444, |
| "step": 1923 |
| }, |
| { |
| "epoch": 1.5930425179458862, |
| "grad_norm": 0.9102290272712708, |
| "learning_rate": 4.237577374565813e-06, |
| "loss": 0.6418, |
| "step": 1924 |
| }, |
| { |
| "epoch": 1.593870789618995, |
| "grad_norm": 0.9099964499473572, |
| "learning_rate": 4.236786552010182e-06, |
| "loss": 0.673, |
| "step": 1925 |
| }, |
| { |
| "epoch": 1.5946990612921037, |
| "grad_norm": 0.9349698424339294, |
| "learning_rate": 4.235995393402544e-06, |
| "loss": 0.6327, |
| "step": 1926 |
| }, |
| { |
| "epoch": 1.5955273329652124, |
| "grad_norm": 0.9073255658149719, |
| "learning_rate": 4.23520389889598e-06, |
| "loss": 0.6315, |
| "step": 1927 |
| }, |
| { |
| "epoch": 1.5963556046383214, |
| "grad_norm": 0.922819197177887, |
| "learning_rate": 4.234412068643637e-06, |
| "loss": 0.6045, |
| "step": 1928 |
| }, |
| { |
| "epoch": 1.59718387631143, |
| "grad_norm": 0.9121822118759155, |
| "learning_rate": 4.233619902798727e-06, |
| "loss": 0.6185, |
| "step": 1929 |
| }, |
| { |
| "epoch": 1.598012147984539, |
| "grad_norm": 0.9063760042190552, |
| "learning_rate": 4.232827401514525e-06, |
| "loss": 0.6438, |
| "step": 1930 |
| }, |
| { |
| "epoch": 1.5988404196576478, |
| "grad_norm": 0.8978169560432434, |
| "learning_rate": 4.232034564944375e-06, |
| "loss": 0.6265, |
| "step": 1931 |
| }, |
| { |
| "epoch": 1.5996686913307565, |
| "grad_norm": 0.872261643409729, |
| "learning_rate": 4.231241393241682e-06, |
| "loss": 0.6379, |
| "step": 1932 |
| }, |
| { |
| "epoch": 1.6004969630038652, |
| "grad_norm": 0.9157246351242065, |
| "learning_rate": 4.230447886559919e-06, |
| "loss": 0.6132, |
| "step": 1933 |
| }, |
| { |
| "epoch": 1.601325234676974, |
| "grad_norm": 0.8782444000244141, |
| "learning_rate": 4.22965404505262e-06, |
| "loss": 0.654, |
| "step": 1934 |
| }, |
| { |
| "epoch": 1.6021535063500827, |
| "grad_norm": 0.85744708776474, |
| "learning_rate": 4.228859868873387e-06, |
| "loss": 0.6271, |
| "step": 1935 |
| }, |
| { |
| "epoch": 1.6029817780231916, |
| "grad_norm": 0.9052027463912964, |
| "learning_rate": 4.228065358175886e-06, |
| "loss": 0.606, |
| "step": 1936 |
| }, |
| { |
| "epoch": 1.6038100496963004, |
| "grad_norm": 0.939439594745636, |
| "learning_rate": 4.227270513113846e-06, |
| "loss": 0.6265, |
| "step": 1937 |
| }, |
| { |
| "epoch": 1.6046383213694093, |
| "grad_norm": 0.899934709072113, |
| "learning_rate": 4.226475333841063e-06, |
| "loss": 0.6138, |
| "step": 1938 |
| }, |
| { |
| "epoch": 1.605466593042518, |
| "grad_norm": 0.8990822434425354, |
| "learning_rate": 4.225679820511398e-06, |
| "loss": 0.6406, |
| "step": 1939 |
| }, |
| { |
| "epoch": 1.6062948647156268, |
| "grad_norm": 0.8888954520225525, |
| "learning_rate": 4.224883973278775e-06, |
| "loss": 0.6238, |
| "step": 1940 |
| }, |
| { |
| "epoch": 1.6071231363887355, |
| "grad_norm": 0.9115962982177734, |
| "learning_rate": 4.224087792297181e-06, |
| "loss": 0.629, |
| "step": 1941 |
| }, |
| { |
| "epoch": 1.6079514080618442, |
| "grad_norm": 0.9213771820068359, |
| "learning_rate": 4.223291277720671e-06, |
| "loss": 0.6503, |
| "step": 1942 |
| }, |
| { |
| "epoch": 1.608779679734953, |
| "grad_norm": 0.8854106664657593, |
| "learning_rate": 4.222494429703363e-06, |
| "loss": 0.6227, |
| "step": 1943 |
| }, |
| { |
| "epoch": 1.609607951408062, |
| "grad_norm": 0.8777014017105103, |
| "learning_rate": 4.22169724839944e-06, |
| "loss": 0.6356, |
| "step": 1944 |
| }, |
| { |
| "epoch": 1.6104362230811706, |
| "grad_norm": 0.9046356081962585, |
| "learning_rate": 4.22089973396315e-06, |
| "loss": 0.6601, |
| "step": 1945 |
| }, |
| { |
| "epoch": 1.6112644947542794, |
| "grad_norm": 0.9103376269340515, |
| "learning_rate": 4.220101886548802e-06, |
| "loss": 0.63, |
| "step": 1946 |
| }, |
| { |
| "epoch": 1.6120927664273883, |
| "grad_norm": 0.8996292948722839, |
| "learning_rate": 4.219303706310775e-06, |
| "loss": 0.6276, |
| "step": 1947 |
| }, |
| { |
| "epoch": 1.612921038100497, |
| "grad_norm": 0.8811854124069214, |
| "learning_rate": 4.218505193403506e-06, |
| "loss": 0.6438, |
| "step": 1948 |
| }, |
| { |
| "epoch": 1.6137493097736058, |
| "grad_norm": 0.9015316367149353, |
| "learning_rate": 4.2177063479815025e-06, |
| "loss": 0.6351, |
| "step": 1949 |
| }, |
| { |
| "epoch": 1.6145775814467145, |
| "grad_norm": 0.8783132433891296, |
| "learning_rate": 4.216907170199334e-06, |
| "loss": 0.6048, |
| "step": 1950 |
| }, |
| { |
| "epoch": 1.6154058531198232, |
| "grad_norm": 0.8911328315734863, |
| "learning_rate": 4.216107660211632e-06, |
| "loss": 0.6306, |
| "step": 1951 |
| }, |
| { |
| "epoch": 1.616234124792932, |
| "grad_norm": 0.9286449551582336, |
| "learning_rate": 4.215307818173095e-06, |
| "loss": 0.6189, |
| "step": 1952 |
| }, |
| { |
| "epoch": 1.6170623964660409, |
| "grad_norm": 0.9025077819824219, |
| "learning_rate": 4.214507644238485e-06, |
| "loss": 0.6302, |
| "step": 1953 |
| }, |
| { |
| "epoch": 1.6178906681391496, |
| "grad_norm": 0.9191645383834839, |
| "learning_rate": 4.213707138562628e-06, |
| "loss": 0.6329, |
| "step": 1954 |
| }, |
| { |
| "epoch": 1.6187189398122586, |
| "grad_norm": 0.8865083456039429, |
| "learning_rate": 4.212906301300415e-06, |
| "loss": 0.6251, |
| "step": 1955 |
| }, |
| { |
| "epoch": 1.6195472114853673, |
| "grad_norm": 0.8876498341560364, |
| "learning_rate": 4.212105132606801e-06, |
| "loss": 0.6243, |
| "step": 1956 |
| }, |
| { |
| "epoch": 1.620375483158476, |
| "grad_norm": 0.9114043116569519, |
| "learning_rate": 4.211303632636804e-06, |
| "loss": 0.6297, |
| "step": 1957 |
| }, |
| { |
| "epoch": 1.6212037548315847, |
| "grad_norm": 0.8960936665534973, |
| "learning_rate": 4.210501801545506e-06, |
| "loss": 0.6114, |
| "step": 1958 |
| }, |
| { |
| "epoch": 1.6220320265046935, |
| "grad_norm": 0.8905466794967651, |
| "learning_rate": 4.209699639488054e-06, |
| "loss": 0.6206, |
| "step": 1959 |
| }, |
| { |
| "epoch": 1.6228602981778022, |
| "grad_norm": 0.8884881734848022, |
| "learning_rate": 4.20889714661966e-06, |
| "loss": 0.63, |
| "step": 1960 |
| }, |
| { |
| "epoch": 1.6236885698509111, |
| "grad_norm": 0.9011329412460327, |
| "learning_rate": 4.208094323095597e-06, |
| "loss": 0.6228, |
| "step": 1961 |
| }, |
| { |
| "epoch": 1.6245168415240199, |
| "grad_norm": 0.8968059420585632, |
| "learning_rate": 4.207291169071207e-06, |
| "loss": 0.6073, |
| "step": 1962 |
| }, |
| { |
| "epoch": 1.6253451131971288, |
| "grad_norm": 0.9052163362503052, |
| "learning_rate": 4.20648768470189e-06, |
| "loss": 0.652, |
| "step": 1963 |
| }, |
| { |
| "epoch": 1.6261733848702375, |
| "grad_norm": 0.918620765209198, |
| "learning_rate": 4.205683870143114e-06, |
| "loss": 0.6177, |
| "step": 1964 |
| }, |
| { |
| "epoch": 1.6270016565433463, |
| "grad_norm": 0.9271581172943115, |
| "learning_rate": 4.204879725550409e-06, |
| "loss": 0.6074, |
| "step": 1965 |
| }, |
| { |
| "epoch": 1.627829928216455, |
| "grad_norm": 0.9070274233818054, |
| "learning_rate": 4.20407525107937e-06, |
| "loss": 0.6097, |
| "step": 1966 |
| }, |
| { |
| "epoch": 1.6286581998895637, |
| "grad_norm": 0.8994570970535278, |
| "learning_rate": 4.203270446885655e-06, |
| "loss": 0.622, |
| "step": 1967 |
| }, |
| { |
| "epoch": 1.6294864715626725, |
| "grad_norm": 0.8905465006828308, |
| "learning_rate": 4.202465313124986e-06, |
| "loss": 0.6341, |
| "step": 1968 |
| }, |
| { |
| "epoch": 1.6303147432357812, |
| "grad_norm": 0.8982875347137451, |
| "learning_rate": 4.2016598499531495e-06, |
| "loss": 0.6411, |
| "step": 1969 |
| }, |
| { |
| "epoch": 1.6311430149088901, |
| "grad_norm": 0.8779447078704834, |
| "learning_rate": 4.200854057525995e-06, |
| "loss": 0.6263, |
| "step": 1970 |
| }, |
| { |
| "epoch": 1.6319712865819989, |
| "grad_norm": 0.8946086764335632, |
| "learning_rate": 4.200047935999435e-06, |
| "loss": 0.6353, |
| "step": 1971 |
| }, |
| { |
| "epoch": 1.6327995582551078, |
| "grad_norm": 0.922795832157135, |
| "learning_rate": 4.199241485529446e-06, |
| "loss": 0.6203, |
| "step": 1972 |
| }, |
| { |
| "epoch": 1.6336278299282165, |
| "grad_norm": 0.8978541493415833, |
| "learning_rate": 4.198434706272072e-06, |
| "loss": 0.6105, |
| "step": 1973 |
| }, |
| { |
| "epoch": 1.6344561016013253, |
| "grad_norm": 0.9198719263076782, |
| "learning_rate": 4.1976275983834135e-06, |
| "loss": 0.6439, |
| "step": 1974 |
| }, |
| { |
| "epoch": 1.635284373274434, |
| "grad_norm": 0.924688994884491, |
| "learning_rate": 4.1968201620196404e-06, |
| "loss": 0.6339, |
| "step": 1975 |
| }, |
| { |
| "epoch": 1.6361126449475427, |
| "grad_norm": 0.8788520097732544, |
| "learning_rate": 4.196012397336983e-06, |
| "loss": 0.6292, |
| "step": 1976 |
| }, |
| { |
| "epoch": 1.6369409166206514, |
| "grad_norm": 0.9319709539413452, |
| "learning_rate": 4.195204304491737e-06, |
| "loss": 0.6232, |
| "step": 1977 |
| }, |
| { |
| "epoch": 1.6377691882937604, |
| "grad_norm": 1.0228428840637207, |
| "learning_rate": 4.19439588364026e-06, |
| "loss": 0.6212, |
| "step": 1978 |
| }, |
| { |
| "epoch": 1.6385974599668691, |
| "grad_norm": 0.9047924280166626, |
| "learning_rate": 4.193587134938975e-06, |
| "loss": 0.6374, |
| "step": 1979 |
| }, |
| { |
| "epoch": 1.639425731639978, |
| "grad_norm": 0.8888682126998901, |
| "learning_rate": 4.192778058544367e-06, |
| "loss": 0.6495, |
| "step": 1980 |
| }, |
| { |
| "epoch": 1.6402540033130868, |
| "grad_norm": 0.9080407619476318, |
| "learning_rate": 4.191968654612984e-06, |
| "loss": 0.6241, |
| "step": 1981 |
| }, |
| { |
| "epoch": 1.6410822749861955, |
| "grad_norm": 0.8986839056015015, |
| "learning_rate": 4.191158923301439e-06, |
| "loss": 0.5911, |
| "step": 1982 |
| }, |
| { |
| "epoch": 1.6419105466593042, |
| "grad_norm": 0.8815017342567444, |
| "learning_rate": 4.190348864766407e-06, |
| "loss": 0.6234, |
| "step": 1983 |
| }, |
| { |
| "epoch": 1.642738818332413, |
| "grad_norm": 0.8722735643386841, |
| "learning_rate": 4.189538479164627e-06, |
| "loss": 0.6224, |
| "step": 1984 |
| }, |
| { |
| "epoch": 1.6435670900055217, |
| "grad_norm": 0.8894723057746887, |
| "learning_rate": 4.188727766652901e-06, |
| "loss": 0.6512, |
| "step": 1985 |
| }, |
| { |
| "epoch": 1.6443953616786304, |
| "grad_norm": 0.9304888248443604, |
| "learning_rate": 4.187916727388094e-06, |
| "loss": 0.6223, |
| "step": 1986 |
| }, |
| { |
| "epoch": 1.6452236333517394, |
| "grad_norm": 0.927659809589386, |
| "learning_rate": 4.187105361527135e-06, |
| "loss": 0.6246, |
| "step": 1987 |
| }, |
| { |
| "epoch": 1.646051905024848, |
| "grad_norm": 0.9076297879219055, |
| "learning_rate": 4.186293669227016e-06, |
| "loss": 0.5989, |
| "step": 1988 |
| }, |
| { |
| "epoch": 1.646880176697957, |
| "grad_norm": 0.9241738319396973, |
| "learning_rate": 4.18548165064479e-06, |
| "loss": 0.6525, |
| "step": 1989 |
| }, |
| { |
| "epoch": 1.6477084483710658, |
| "grad_norm": 0.9331569075584412, |
| "learning_rate": 4.184669305937577e-06, |
| "loss": 0.5968, |
| "step": 1990 |
| }, |
| { |
| "epoch": 1.6485367200441745, |
| "grad_norm": 0.9216887354850769, |
| "learning_rate": 4.183856635262558e-06, |
| "loss": 0.6167, |
| "step": 1991 |
| }, |
| { |
| "epoch": 1.6493649917172832, |
| "grad_norm": 0.8903350234031677, |
| "learning_rate": 4.183043638776978e-06, |
| "loss": 0.6234, |
| "step": 1992 |
| }, |
| { |
| "epoch": 1.650193263390392, |
| "grad_norm": 0.9149366617202759, |
| "learning_rate": 4.182230316638142e-06, |
| "loss": 0.6318, |
| "step": 1993 |
| }, |
| { |
| "epoch": 1.6510215350635007, |
| "grad_norm": 0.8998929262161255, |
| "learning_rate": 4.181416669003421e-06, |
| "loss": 0.593, |
| "step": 1994 |
| }, |
| { |
| "epoch": 1.6518498067366096, |
| "grad_norm": 0.8983708024024963, |
| "learning_rate": 4.180602696030248e-06, |
| "loss": 0.6406, |
| "step": 1995 |
| }, |
| { |
| "epoch": 1.6526780784097184, |
| "grad_norm": 0.9061058759689331, |
| "learning_rate": 4.179788397876121e-06, |
| "loss": 0.6365, |
| "step": 1996 |
| }, |
| { |
| "epoch": 1.6535063500828273, |
| "grad_norm": 0.9117223620414734, |
| "learning_rate": 4.178973774698597e-06, |
| "loss": 0.6108, |
| "step": 1997 |
| }, |
| { |
| "epoch": 1.654334621755936, |
| "grad_norm": 0.9081946611404419, |
| "learning_rate": 4.178158826655299e-06, |
| "loss": 0.6135, |
| "step": 1998 |
| }, |
| { |
| "epoch": 1.6551628934290448, |
| "grad_norm": 0.9064502120018005, |
| "learning_rate": 4.177343553903912e-06, |
| "loss": 0.6419, |
| "step": 1999 |
| }, |
| { |
| "epoch": 1.6559911651021535, |
| "grad_norm": 0.9274568557739258, |
| "learning_rate": 4.176527956602182e-06, |
| "loss": 0.6361, |
| "step": 2000 |
| }, |
| { |
| "epoch": 1.6568194367752622, |
| "grad_norm": 0.9133802056312561, |
| "learning_rate": 4.175712034907922e-06, |
| "loss": 0.6308, |
| "step": 2001 |
| }, |
| { |
| "epoch": 1.657647708448371, |
| "grad_norm": 0.9161511063575745, |
| "learning_rate": 4.174895788979004e-06, |
| "loss": 0.6176, |
| "step": 2002 |
| }, |
| { |
| "epoch": 1.65847598012148, |
| "grad_norm": 0.948261022567749, |
| "learning_rate": 4.174079218973364e-06, |
| "loss": 0.6217, |
| "step": 2003 |
| }, |
| { |
| "epoch": 1.6593042517945886, |
| "grad_norm": 0.9284013509750366, |
| "learning_rate": 4.173262325049e-06, |
| "loss": 0.6409, |
| "step": 2004 |
| }, |
| { |
| "epoch": 1.6601325234676974, |
| "grad_norm": 0.8958327770233154, |
| "learning_rate": 4.172445107363974e-06, |
| "loss": 0.6551, |
| "step": 2005 |
| }, |
| { |
| "epoch": 1.6609607951408063, |
| "grad_norm": 0.877156138420105, |
| "learning_rate": 4.17162756607641e-06, |
| "loss": 0.6075, |
| "step": 2006 |
| }, |
| { |
| "epoch": 1.661789066813915, |
| "grad_norm": 0.891502857208252, |
| "learning_rate": 4.170809701344496e-06, |
| "loss": 0.6149, |
| "step": 2007 |
| }, |
| { |
| "epoch": 1.6626173384870238, |
| "grad_norm": 0.9643301367759705, |
| "learning_rate": 4.169991513326479e-06, |
| "loss": 0.634, |
| "step": 2008 |
| }, |
| { |
| "epoch": 1.6634456101601325, |
| "grad_norm": 0.9262866377830505, |
| "learning_rate": 4.169173002180672e-06, |
| "loss": 0.6236, |
| "step": 2009 |
| }, |
| { |
| "epoch": 1.6642738818332412, |
| "grad_norm": 0.9970850348472595, |
| "learning_rate": 4.168354168065448e-06, |
| "loss": 0.635, |
| "step": 2010 |
| }, |
| { |
| "epoch": 1.66510215350635, |
| "grad_norm": 0.8804837465286255, |
| "learning_rate": 4.167535011139246e-06, |
| "loss": 0.6259, |
| "step": 2011 |
| }, |
| { |
| "epoch": 1.6659304251794589, |
| "grad_norm": 0.9024919271469116, |
| "learning_rate": 4.166715531560564e-06, |
| "loss": 0.622, |
| "step": 2012 |
| }, |
| { |
| "epoch": 1.6667586968525676, |
| "grad_norm": 0.960240364074707, |
| "learning_rate": 4.165895729487962e-06, |
| "loss": 0.6286, |
| "step": 2013 |
| }, |
| { |
| "epoch": 1.6675869685256766, |
| "grad_norm": 0.9259498715400696, |
| "learning_rate": 4.165075605080066e-06, |
| "loss": 0.6242, |
| "step": 2014 |
| }, |
| { |
| "epoch": 1.6684152401987853, |
| "grad_norm": 0.9427163004875183, |
| "learning_rate": 4.164255158495563e-06, |
| "loss": 0.6564, |
| "step": 2015 |
| }, |
| { |
| "epoch": 1.669243511871894, |
| "grad_norm": 0.900632917881012, |
| "learning_rate": 4.1634343898932e-06, |
| "loss": 0.6175, |
| "step": 2016 |
| }, |
| { |
| "epoch": 1.6700717835450027, |
| "grad_norm": 0.9424959421157837, |
| "learning_rate": 4.162613299431789e-06, |
| "loss": 0.6308, |
| "step": 2017 |
| }, |
| { |
| "epoch": 1.6709000552181115, |
| "grad_norm": 0.9475871324539185, |
| "learning_rate": 4.161791887270203e-06, |
| "loss": 0.6304, |
| "step": 2018 |
| }, |
| { |
| "epoch": 1.6717283268912202, |
| "grad_norm": 0.8703360557556152, |
| "learning_rate": 4.160970153567379e-06, |
| "loss": 0.6174, |
| "step": 2019 |
| }, |
| { |
| "epoch": 1.6725565985643291, |
| "grad_norm": 0.8985229730606079, |
| "learning_rate": 4.160148098482313e-06, |
| "loss": 0.6228, |
| "step": 2020 |
| }, |
| { |
| "epoch": 1.6733848702374379, |
| "grad_norm": 0.9509932994842529, |
| "learning_rate": 4.159325722174065e-06, |
| "loss": 0.6451, |
| "step": 2021 |
| }, |
| { |
| "epoch": 1.6742131419105468, |
| "grad_norm": 0.9234849810600281, |
| "learning_rate": 4.158503024801759e-06, |
| "loss": 0.624, |
| "step": 2022 |
| }, |
| { |
| "epoch": 1.6750414135836555, |
| "grad_norm": 0.9451475143432617, |
| "learning_rate": 4.157680006524577e-06, |
| "loss": 0.6418, |
| "step": 2023 |
| }, |
| { |
| "epoch": 1.6758696852567643, |
| "grad_norm": 0.8933409452438354, |
| "learning_rate": 4.1568566675017675e-06, |
| "loss": 0.639, |
| "step": 2024 |
| }, |
| { |
| "epoch": 1.676697956929873, |
| "grad_norm": 0.8995954990386963, |
| "learning_rate": 4.156033007892637e-06, |
| "loss": 0.6223, |
| "step": 2025 |
| }, |
| { |
| "epoch": 1.6775262286029817, |
| "grad_norm": 0.8978449106216431, |
| "learning_rate": 4.155209027856557e-06, |
| "loss": 0.6264, |
| "step": 2026 |
| }, |
| { |
| "epoch": 1.6783545002760905, |
| "grad_norm": 0.9240763187408447, |
| "learning_rate": 4.15438472755296e-06, |
| "loss": 0.6339, |
| "step": 2027 |
| }, |
| { |
| "epoch": 1.6791827719491992, |
| "grad_norm": 0.8767935633659363, |
| "learning_rate": 4.15356010714134e-06, |
| "loss": 0.6407, |
| "step": 2028 |
| }, |
| { |
| "epoch": 1.6800110436223081, |
| "grad_norm": 0.8665036559104919, |
| "learning_rate": 4.152735166781253e-06, |
| "loss": 0.6299, |
| "step": 2029 |
| }, |
| { |
| "epoch": 1.6808393152954169, |
| "grad_norm": 0.9480779767036438, |
| "learning_rate": 4.151909906632319e-06, |
| "loss": 0.6166, |
| "step": 2030 |
| }, |
| { |
| "epoch": 1.6816675869685258, |
| "grad_norm": 0.9063659906387329, |
| "learning_rate": 4.151084326854216e-06, |
| "loss": 0.6449, |
| "step": 2031 |
| }, |
| { |
| "epoch": 1.6824958586416345, |
| "grad_norm": 0.9177497029304504, |
| "learning_rate": 4.150258427606687e-06, |
| "loss": 0.6064, |
| "step": 2032 |
| }, |
| { |
| "epoch": 1.6833241303147433, |
| "grad_norm": 0.9536518454551697, |
| "learning_rate": 4.149432209049536e-06, |
| "loss": 0.6149, |
| "step": 2033 |
| }, |
| { |
| "epoch": 1.684152401987852, |
| "grad_norm": 0.9266525506973267, |
| "learning_rate": 4.148605671342629e-06, |
| "loss": 0.6237, |
| "step": 2034 |
| }, |
| { |
| "epoch": 1.6849806736609607, |
| "grad_norm": 0.8746016621589661, |
| "learning_rate": 4.147778814645892e-06, |
| "loss": 0.6432, |
| "step": 2035 |
| }, |
| { |
| "epoch": 1.6858089453340694, |
| "grad_norm": 0.8574067950248718, |
| "learning_rate": 4.1469516391193155e-06, |
| "loss": 0.6302, |
| "step": 2036 |
| }, |
| { |
| "epoch": 1.6866372170071784, |
| "grad_norm": 0.8573758006095886, |
| "learning_rate": 4.146124144922949e-06, |
| "loss": 0.6157, |
| "step": 2037 |
| }, |
| { |
| "epoch": 1.6874654886802871, |
| "grad_norm": 0.9027220010757446, |
| "learning_rate": 4.1452963322169046e-06, |
| "loss": 0.6341, |
| "step": 2038 |
| }, |
| { |
| "epoch": 1.688293760353396, |
| "grad_norm": 0.9101020097732544, |
| "learning_rate": 4.144468201161358e-06, |
| "loss": 0.6381, |
| "step": 2039 |
| }, |
| { |
| "epoch": 1.6891220320265048, |
| "grad_norm": 0.890192985534668, |
| "learning_rate": 4.143639751916544e-06, |
| "loss": 0.6249, |
| "step": 2040 |
| }, |
| { |
| "epoch": 1.6899503036996135, |
| "grad_norm": 0.8839165568351746, |
| "learning_rate": 4.14281098464276e-06, |
| "loss": 0.6168, |
| "step": 2041 |
| }, |
| { |
| "epoch": 1.6907785753727222, |
| "grad_norm": 0.8947030305862427, |
| "learning_rate": 4.141981899500364e-06, |
| "loss": 0.6386, |
| "step": 2042 |
| }, |
| { |
| "epoch": 1.691606847045831, |
| "grad_norm": 0.9320660829544067, |
| "learning_rate": 4.1411524966497775e-06, |
| "loss": 0.6484, |
| "step": 2043 |
| }, |
| { |
| "epoch": 1.6924351187189397, |
| "grad_norm": 0.8932178020477295, |
| "learning_rate": 4.140322776251481e-06, |
| "loss": 0.6419, |
| "step": 2044 |
| }, |
| { |
| "epoch": 1.6932633903920484, |
| "grad_norm": 0.8857268691062927, |
| "learning_rate": 4.1394927384660185e-06, |
| "loss": 0.6415, |
| "step": 2045 |
| }, |
| { |
| "epoch": 1.6940916620651574, |
| "grad_norm": 0.9078783392906189, |
| "learning_rate": 4.138662383453994e-06, |
| "loss": 0.6069, |
| "step": 2046 |
| }, |
| { |
| "epoch": 1.694919933738266, |
| "grad_norm": 0.9120603799819946, |
| "learning_rate": 4.137831711376074e-06, |
| "loss": 0.6325, |
| "step": 2047 |
| }, |
| { |
| "epoch": 1.695748205411375, |
| "grad_norm": 0.8772771954536438, |
| "learning_rate": 4.137000722392985e-06, |
| "loss": 0.6328, |
| "step": 2048 |
| }, |
| { |
| "epoch": 1.6965764770844838, |
| "grad_norm": 0.9126537442207336, |
| "learning_rate": 4.136169416665517e-06, |
| "loss": 0.6215, |
| "step": 2049 |
| }, |
| { |
| "epoch": 1.6974047487575925, |
| "grad_norm": 0.8777123689651489, |
| "learning_rate": 4.135337794354518e-06, |
| "loss": 0.6257, |
| "step": 2050 |
| }, |
| { |
| "epoch": 1.6982330204307012, |
| "grad_norm": 0.875190258026123, |
| "learning_rate": 4.134505855620902e-06, |
| "loss": 0.5996, |
| "step": 2051 |
| }, |
| { |
| "epoch": 1.69906129210381, |
| "grad_norm": 0.9347050189971924, |
| "learning_rate": 4.133673600625637e-06, |
| "loss": 0.6107, |
| "step": 2052 |
| }, |
| { |
| "epoch": 1.6998895637769187, |
| "grad_norm": 0.9359575510025024, |
| "learning_rate": 4.132841029529762e-06, |
| "loss": 0.6197, |
| "step": 2053 |
| }, |
| { |
| "epoch": 1.7007178354500276, |
| "grad_norm": 0.9131503701210022, |
| "learning_rate": 4.132008142494367e-06, |
| "loss": 0.6318, |
| "step": 2054 |
| }, |
| { |
| "epoch": 1.7015461071231364, |
| "grad_norm": 0.9072123765945435, |
| "learning_rate": 4.13117493968061e-06, |
| "loss": 0.6079, |
| "step": 2055 |
| }, |
| { |
| "epoch": 1.7023743787962453, |
| "grad_norm": 0.8848493695259094, |
| "learning_rate": 4.130341421249708e-06, |
| "loss": 0.6551, |
| "step": 2056 |
| }, |
| { |
| "epoch": 1.703202650469354, |
| "grad_norm": 0.9258943200111389, |
| "learning_rate": 4.129507587362939e-06, |
| "loss": 0.6237, |
| "step": 2057 |
| }, |
| { |
| "epoch": 1.7040309221424628, |
| "grad_norm": 0.9188808798789978, |
| "learning_rate": 4.128673438181642e-06, |
| "loss": 0.6332, |
| "step": 2058 |
| }, |
| { |
| "epoch": 1.7048591938155715, |
| "grad_norm": 0.9025812149047852, |
| "learning_rate": 4.127838973867217e-06, |
| "loss": 0.6168, |
| "step": 2059 |
| }, |
| { |
| "epoch": 1.7056874654886802, |
| "grad_norm": 0.9013999700546265, |
| "learning_rate": 4.127004194581125e-06, |
| "loss": 0.6199, |
| "step": 2060 |
| }, |
| { |
| "epoch": 1.706515737161789, |
| "grad_norm": 0.9194703698158264, |
| "learning_rate": 4.126169100484888e-06, |
| "loss": 0.6179, |
| "step": 2061 |
| }, |
| { |
| "epoch": 1.707344008834898, |
| "grad_norm": 0.8993985056877136, |
| "learning_rate": 4.12533369174009e-06, |
| "loss": 0.6115, |
| "step": 2062 |
| }, |
| { |
| "epoch": 1.7081722805080066, |
| "grad_norm": 0.9132640957832336, |
| "learning_rate": 4.124497968508372e-06, |
| "loss": 0.6122, |
| "step": 2063 |
| }, |
| { |
| "epoch": 1.7090005521811154, |
| "grad_norm": 0.8918151259422302, |
| "learning_rate": 4.123661930951441e-06, |
| "loss": 0.6191, |
| "step": 2064 |
| }, |
| { |
| "epoch": 1.7098288238542243, |
| "grad_norm": 0.9052293300628662, |
| "learning_rate": 4.122825579231063e-06, |
| "loss": 0.6571, |
| "step": 2065 |
| }, |
| { |
| "epoch": 1.710657095527333, |
| "grad_norm": 0.9067489504814148, |
| "learning_rate": 4.121988913509062e-06, |
| "loss": 0.6351, |
| "step": 2066 |
| }, |
| { |
| "epoch": 1.7114853672004418, |
| "grad_norm": 0.9140691161155701, |
| "learning_rate": 4.121151933947327e-06, |
| "loss": 0.601, |
| "step": 2067 |
| }, |
| { |
| "epoch": 1.7123136388735505, |
| "grad_norm": 0.896967351436615, |
| "learning_rate": 4.120314640707804e-06, |
| "loss": 0.6264, |
| "step": 2068 |
| }, |
| { |
| "epoch": 1.7131419105466592, |
| "grad_norm": 0.899226188659668, |
| "learning_rate": 4.119477033952504e-06, |
| "loss": 0.6361, |
| "step": 2069 |
| }, |
| { |
| "epoch": 1.713970182219768, |
| "grad_norm": 0.8884353041648865, |
| "learning_rate": 4.118639113843493e-06, |
| "loss": 0.6403, |
| "step": 2070 |
| }, |
| { |
| "epoch": 1.7147984538928769, |
| "grad_norm": 0.9573675990104675, |
| "learning_rate": 4.117800880542904e-06, |
| "loss": 0.6408, |
| "step": 2071 |
| }, |
| { |
| "epoch": 1.7156267255659856, |
| "grad_norm": 0.9664042592048645, |
| "learning_rate": 4.116962334212924e-06, |
| "loss": 0.6149, |
| "step": 2072 |
| }, |
| { |
| "epoch": 1.7164549972390946, |
| "grad_norm": 0.8810500502586365, |
| "learning_rate": 4.116123475015806e-06, |
| "loss": 0.6336, |
| "step": 2073 |
| }, |
| { |
| "epoch": 1.7172832689122033, |
| "grad_norm": 0.860697329044342, |
| "learning_rate": 4.1152843031138604e-06, |
| "loss": 0.6416, |
| "step": 2074 |
| }, |
| { |
| "epoch": 1.718111540585312, |
| "grad_norm": 0.9242175221443176, |
| "learning_rate": 4.114444818669461e-06, |
| "loss": 0.6103, |
| "step": 2075 |
| }, |
| { |
| "epoch": 1.7189398122584207, |
| "grad_norm": 0.9451380968093872, |
| "learning_rate": 4.113605021845037e-06, |
| "loss": 0.6425, |
| "step": 2076 |
| }, |
| { |
| "epoch": 1.7197680839315295, |
| "grad_norm": 0.915439248085022, |
| "learning_rate": 4.112764912803084e-06, |
| "loss": 0.6351, |
| "step": 2077 |
| }, |
| { |
| "epoch": 1.7205963556046382, |
| "grad_norm": 0.894934356212616, |
| "learning_rate": 4.111924491706154e-06, |
| "loss": 0.6147, |
| "step": 2078 |
| }, |
| { |
| "epoch": 1.7214246272777471, |
| "grad_norm": 0.9247839450836182, |
| "learning_rate": 4.111083758716862e-06, |
| "loss": 0.6299, |
| "step": 2079 |
| }, |
| { |
| "epoch": 1.7222528989508559, |
| "grad_norm": 0.9241382479667664, |
| "learning_rate": 4.11024271399788e-06, |
| "loss": 0.6396, |
| "step": 2080 |
| }, |
| { |
| "epoch": 1.7230811706239648, |
| "grad_norm": 0.91069495677948, |
| "learning_rate": 4.109401357711944e-06, |
| "loss": 0.6348, |
| "step": 2081 |
| }, |
| { |
| "epoch": 1.7239094422970735, |
| "grad_norm": 0.9032791256904602, |
| "learning_rate": 4.1085596900218476e-06, |
| "loss": 0.6329, |
| "step": 2082 |
| }, |
| { |
| "epoch": 1.7247377139701823, |
| "grad_norm": 0.8897013068199158, |
| "learning_rate": 4.107717711090446e-06, |
| "loss": 0.637, |
| "step": 2083 |
| }, |
| { |
| "epoch": 1.725565985643291, |
| "grad_norm": 0.9571179747581482, |
| "learning_rate": 4.106875421080654e-06, |
| "loss": 0.6064, |
| "step": 2084 |
| }, |
| { |
| "epoch": 1.7263942573163997, |
| "grad_norm": 0.8908339738845825, |
| "learning_rate": 4.106032820155447e-06, |
| "loss": 0.6225, |
| "step": 2085 |
| }, |
| { |
| "epoch": 1.7272225289895085, |
| "grad_norm": 0.976216197013855, |
| "learning_rate": 4.105189908477862e-06, |
| "loss": 0.5926, |
| "step": 2086 |
| }, |
| { |
| "epoch": 1.7280508006626172, |
| "grad_norm": 0.8874435424804688, |
| "learning_rate": 4.104346686210992e-06, |
| "loss": 0.6299, |
| "step": 2087 |
| }, |
| { |
| "epoch": 1.7288790723357261, |
| "grad_norm": 0.9100506901741028, |
| "learning_rate": 4.103503153517993e-06, |
| "loss": 0.6541, |
| "step": 2088 |
| }, |
| { |
| "epoch": 1.7297073440088349, |
| "grad_norm": 0.8994580507278442, |
| "learning_rate": 4.102659310562082e-06, |
| "loss": 0.6179, |
| "step": 2089 |
| }, |
| { |
| "epoch": 1.7305356156819438, |
| "grad_norm": 0.9204248785972595, |
| "learning_rate": 4.101815157506534e-06, |
| "loss": 0.6299, |
| "step": 2090 |
| }, |
| { |
| "epoch": 1.7313638873550525, |
| "grad_norm": 0.9059334993362427, |
| "learning_rate": 4.100970694514687e-06, |
| "loss": 0.6278, |
| "step": 2091 |
| }, |
| { |
| "epoch": 1.7321921590281613, |
| "grad_norm": 1.0540361404418945, |
| "learning_rate": 4.100125921749932e-06, |
| "loss": 0.6422, |
| "step": 2092 |
| }, |
| { |
| "epoch": 1.73302043070127, |
| "grad_norm": 0.8868762254714966, |
| "learning_rate": 4.099280839375729e-06, |
| "loss": 0.6412, |
| "step": 2093 |
| }, |
| { |
| "epoch": 1.7338487023743787, |
| "grad_norm": 0.9248695373535156, |
| "learning_rate": 4.098435447555591e-06, |
| "loss": 0.6259, |
| "step": 2094 |
| }, |
| { |
| "epoch": 1.7346769740474874, |
| "grad_norm": 0.925979733467102, |
| "learning_rate": 4.097589746453094e-06, |
| "loss": 0.6196, |
| "step": 2095 |
| }, |
| { |
| "epoch": 1.7355052457205964, |
| "grad_norm": 0.9159940481185913, |
| "learning_rate": 4.096743736231875e-06, |
| "loss": 0.6408, |
| "step": 2096 |
| }, |
| { |
| "epoch": 1.7363335173937051, |
| "grad_norm": 0.8592323064804077, |
| "learning_rate": 4.095897417055627e-06, |
| "loss": 0.6389, |
| "step": 2097 |
| }, |
| { |
| "epoch": 1.737161789066814, |
| "grad_norm": 0.8902189135551453, |
| "learning_rate": 4.095050789088106e-06, |
| "loss": 0.6207, |
| "step": 2098 |
| }, |
| { |
| "epoch": 1.7379900607399228, |
| "grad_norm": 0.9215813279151917, |
| "learning_rate": 4.0942038524931245e-06, |
| "loss": 0.6388, |
| "step": 2099 |
| }, |
| { |
| "epoch": 1.7388183324130315, |
| "grad_norm": 0.9118078947067261, |
| "learning_rate": 4.09335660743456e-06, |
| "loss": 0.6382, |
| "step": 2100 |
| }, |
| { |
| "epoch": 1.7396466040861402, |
| "grad_norm": 0.956487774848938, |
| "learning_rate": 4.092509054076345e-06, |
| "loss": 0.6434, |
| "step": 2101 |
| }, |
| { |
| "epoch": 1.740474875759249, |
| "grad_norm": 0.9605410099029541, |
| "learning_rate": 4.091661192582473e-06, |
| "loss": 0.6233, |
| "step": 2102 |
| }, |
| { |
| "epoch": 1.7413031474323577, |
| "grad_norm": 0.9352620244026184, |
| "learning_rate": 4.0908130231169975e-06, |
| "loss": 0.6279, |
| "step": 2103 |
| }, |
| { |
| "epoch": 1.7421314191054664, |
| "grad_norm": 0.9066663384437561, |
| "learning_rate": 4.089964545844032e-06, |
| "loss": 0.6292, |
| "step": 2104 |
| }, |
| { |
| "epoch": 1.7429596907785754, |
| "grad_norm": 0.8917461037635803, |
| "learning_rate": 4.0891157609277475e-06, |
| "loss": 0.6609, |
| "step": 2105 |
| }, |
| { |
| "epoch": 1.743787962451684, |
| "grad_norm": 0.897274911403656, |
| "learning_rate": 4.088266668532377e-06, |
| "loss": 0.6111, |
| "step": 2106 |
| }, |
| { |
| "epoch": 1.744616234124793, |
| "grad_norm": 0.9012236595153809, |
| "learning_rate": 4.087417268822211e-06, |
| "loss": 0.6172, |
| "step": 2107 |
| }, |
| { |
| "epoch": 1.7454445057979018, |
| "grad_norm": 0.9241657853126526, |
| "learning_rate": 4.086567561961602e-06, |
| "loss": 0.6362, |
| "step": 2108 |
| }, |
| { |
| "epoch": 1.7462727774710105, |
| "grad_norm": 0.8780818581581116, |
| "learning_rate": 4.08571754811496e-06, |
| "loss": 0.616, |
| "step": 2109 |
| }, |
| { |
| "epoch": 1.7471010491441192, |
| "grad_norm": 0.8949935436248779, |
| "learning_rate": 4.084867227446753e-06, |
| "loss": 0.6157, |
| "step": 2110 |
| }, |
| { |
| "epoch": 1.747929320817228, |
| "grad_norm": 0.8971223831176758, |
| "learning_rate": 4.084016600121513e-06, |
| "loss": 0.6509, |
| "step": 2111 |
| }, |
| { |
| "epoch": 1.7487575924903367, |
| "grad_norm": 0.9038648009300232, |
| "learning_rate": 4.083165666303826e-06, |
| "loss": 0.6192, |
| "step": 2112 |
| }, |
| { |
| "epoch": 1.7495858641634456, |
| "grad_norm": 0.9209643006324768, |
| "learning_rate": 4.08231442615834e-06, |
| "loss": 0.6339, |
| "step": 2113 |
| }, |
| { |
| "epoch": 1.7504141358365544, |
| "grad_norm": 0.9203094244003296, |
| "learning_rate": 4.081462879849765e-06, |
| "loss": 0.6347, |
| "step": 2114 |
| }, |
| { |
| "epoch": 1.7512424075096633, |
| "grad_norm": 0.9235814213752747, |
| "learning_rate": 4.080611027542863e-06, |
| "loss": 0.6479, |
| "step": 2115 |
| }, |
| { |
| "epoch": 1.752070679182772, |
| "grad_norm": 0.8877378702163696, |
| "learning_rate": 4.079758869402462e-06, |
| "loss": 0.6173, |
| "step": 2116 |
| }, |
| { |
| "epoch": 1.7528989508558808, |
| "grad_norm": 0.9004576206207275, |
| "learning_rate": 4.078906405593446e-06, |
| "loss": 0.6159, |
| "step": 2117 |
| }, |
| { |
| "epoch": 1.7537272225289895, |
| "grad_norm": 0.9145035147666931, |
| "learning_rate": 4.078053636280759e-06, |
| "loss": 0.6203, |
| "step": 2118 |
| }, |
| { |
| "epoch": 1.7545554942020982, |
| "grad_norm": 0.8826667070388794, |
| "learning_rate": 4.0772005616294034e-06, |
| "loss": 0.644, |
| "step": 2119 |
| }, |
| { |
| "epoch": 1.755383765875207, |
| "grad_norm": 0.9083713293075562, |
| "learning_rate": 4.076347181804442e-06, |
| "loss": 0.631, |
| "step": 2120 |
| }, |
| { |
| "epoch": 1.756212037548316, |
| "grad_norm": 0.9335309267044067, |
| "learning_rate": 4.075493496970996e-06, |
| "loss": 0.64, |
| "step": 2121 |
| }, |
| { |
| "epoch": 1.7570403092214246, |
| "grad_norm": 0.9234142899513245, |
| "learning_rate": 4.074639507294243e-06, |
| "loss": 0.6428, |
| "step": 2122 |
| }, |
| { |
| "epoch": 1.7578685808945336, |
| "grad_norm": 0.9190629720687866, |
| "learning_rate": 4.0737852129394265e-06, |
| "loss": 0.6137, |
| "step": 2123 |
| }, |
| { |
| "epoch": 1.7586968525676423, |
| "grad_norm": 0.9001408815383911, |
| "learning_rate": 4.07293061407184e-06, |
| "loss": 0.6354, |
| "step": 2124 |
| }, |
| { |
| "epoch": 1.759525124240751, |
| "grad_norm": 0.9193227291107178, |
| "learning_rate": 4.072075710856843e-06, |
| "loss": 0.6314, |
| "step": 2125 |
| }, |
| { |
| "epoch": 1.7603533959138598, |
| "grad_norm": 0.9072568416595459, |
| "learning_rate": 4.07122050345985e-06, |
| "loss": 0.6323, |
| "step": 2126 |
| }, |
| { |
| "epoch": 1.7611816675869685, |
| "grad_norm": 0.8799459338188171, |
| "learning_rate": 4.070364992046338e-06, |
| "loss": 0.6177, |
| "step": 2127 |
| }, |
| { |
| "epoch": 1.7620099392600772, |
| "grad_norm": 0.8928991556167603, |
| "learning_rate": 4.069509176781838e-06, |
| "loss": 0.6148, |
| "step": 2128 |
| }, |
| { |
| "epoch": 1.762838210933186, |
| "grad_norm": 0.9133786559104919, |
| "learning_rate": 4.068653057831942e-06, |
| "loss": 0.6518, |
| "step": 2129 |
| }, |
| { |
| "epoch": 1.7636664826062949, |
| "grad_norm": 0.9186939597129822, |
| "learning_rate": 4.067796635362303e-06, |
| "loss": 0.6276, |
| "step": 2130 |
| }, |
| { |
| "epoch": 1.7644947542794036, |
| "grad_norm": 0.8823785185813904, |
| "learning_rate": 4.06693990953863e-06, |
| "loss": 0.6505, |
| "step": 2131 |
| }, |
| { |
| "epoch": 1.7653230259525126, |
| "grad_norm": 0.9109732508659363, |
| "learning_rate": 4.0660828805266915e-06, |
| "loss": 0.6428, |
| "step": 2132 |
| }, |
| { |
| "epoch": 1.7661512976256213, |
| "grad_norm": 0.9158151745796204, |
| "learning_rate": 4.065225548492314e-06, |
| "loss": 0.6219, |
| "step": 2133 |
| }, |
| { |
| "epoch": 1.76697956929873, |
| "grad_norm": 0.9166013598442078, |
| "learning_rate": 4.064367913601384e-06, |
| "loss": 0.6169, |
| "step": 2134 |
| }, |
| { |
| "epoch": 1.7678078409718387, |
| "grad_norm": 0.9100304245948792, |
| "learning_rate": 4.063509976019845e-06, |
| "loss": 0.6226, |
| "step": 2135 |
| }, |
| { |
| "epoch": 1.7686361126449475, |
| "grad_norm": 0.9220807552337646, |
| "learning_rate": 4.0626517359137e-06, |
| "loss": 0.6121, |
| "step": 2136 |
| }, |
| { |
| "epoch": 1.7694643843180562, |
| "grad_norm": 0.9118103981018066, |
| "learning_rate": 4.061793193449013e-06, |
| "loss": 0.6035, |
| "step": 2137 |
| }, |
| { |
| "epoch": 1.7702926559911651, |
| "grad_norm": 0.9077460169792175, |
| "learning_rate": 4.0609343487919005e-06, |
| "loss": 0.632, |
| "step": 2138 |
| }, |
| { |
| "epoch": 1.7711209276642739, |
| "grad_norm": 0.9414910674095154, |
| "learning_rate": 4.060075202108542e-06, |
| "loss": 0.6408, |
| "step": 2139 |
| }, |
| { |
| "epoch": 1.7719491993373828, |
| "grad_norm": 0.9064626693725586, |
| "learning_rate": 4.059215753565176e-06, |
| "loss": 0.6141, |
| "step": 2140 |
| }, |
| { |
| "epoch": 1.7727774710104915, |
| "grad_norm": 0.9015988111495972, |
| "learning_rate": 4.058356003328095e-06, |
| "loss": 0.6386, |
| "step": 2141 |
| }, |
| { |
| "epoch": 1.7736057426836003, |
| "grad_norm": 0.8964430689811707, |
| "learning_rate": 4.057495951563655e-06, |
| "loss": 0.6146, |
| "step": 2142 |
| }, |
| { |
| "epoch": 1.774434014356709, |
| "grad_norm": 0.9055870771408081, |
| "learning_rate": 4.0566355984382676e-06, |
| "loss": 0.6469, |
| "step": 2143 |
| }, |
| { |
| "epoch": 1.7752622860298177, |
| "grad_norm": 0.9215596318244934, |
| "learning_rate": 4.0557749441184024e-06, |
| "loss": 0.6378, |
| "step": 2144 |
| }, |
| { |
| "epoch": 1.7760905577029265, |
| "grad_norm": 0.9089261889457703, |
| "learning_rate": 4.054913988770588e-06, |
| "loss": 0.653, |
| "step": 2145 |
| }, |
| { |
| "epoch": 1.7769188293760352, |
| "grad_norm": 0.895435631275177, |
| "learning_rate": 4.054052732561411e-06, |
| "loss": 0.639, |
| "step": 2146 |
| }, |
| { |
| "epoch": 1.7777471010491441, |
| "grad_norm": 0.9045991897583008, |
| "learning_rate": 4.053191175657518e-06, |
| "loss": 0.6428, |
| "step": 2147 |
| }, |
| { |
| "epoch": 1.7785753727222529, |
| "grad_norm": 0.8864037394523621, |
| "learning_rate": 4.052329318225611e-06, |
| "loss": 0.6188, |
| "step": 2148 |
| }, |
| { |
| "epoch": 1.7794036443953618, |
| "grad_norm": 0.8713943362236023, |
| "learning_rate": 4.0514671604324505e-06, |
| "loss": 0.6246, |
| "step": 2149 |
| }, |
| { |
| "epoch": 1.7802319160684705, |
| "grad_norm": 0.9116864800453186, |
| "learning_rate": 4.0506047024448585e-06, |
| "loss": 0.6129, |
| "step": 2150 |
| }, |
| { |
| "epoch": 1.7810601877415793, |
| "grad_norm": 0.8880242705345154, |
| "learning_rate": 4.04974194442971e-06, |
| "loss": 0.6298, |
| "step": 2151 |
| }, |
| { |
| "epoch": 1.781888459414688, |
| "grad_norm": 0.8981452584266663, |
| "learning_rate": 4.048878886553942e-06, |
| "loss": 0.6299, |
| "step": 2152 |
| }, |
| { |
| "epoch": 1.7827167310877967, |
| "grad_norm": 0.8840851783752441, |
| "learning_rate": 4.048015528984548e-06, |
| "loss": 0.6246, |
| "step": 2153 |
| }, |
| { |
| "epoch": 1.7835450027609054, |
| "grad_norm": 0.9036140441894531, |
| "learning_rate": 4.04715187188858e-06, |
| "loss": 0.6284, |
| "step": 2154 |
| }, |
| { |
| "epoch": 1.7843732744340144, |
| "grad_norm": 0.8673656582832336, |
| "learning_rate": 4.046287915433147e-06, |
| "loss": 0.6003, |
| "step": 2155 |
| }, |
| { |
| "epoch": 1.7852015461071231, |
| "grad_norm": 0.8956523537635803, |
| "learning_rate": 4.045423659785417e-06, |
| "loss": 0.6142, |
| "step": 2156 |
| }, |
| { |
| "epoch": 1.786029817780232, |
| "grad_norm": 0.9444572329521179, |
| "learning_rate": 4.044559105112614e-06, |
| "loss": 0.6313, |
| "step": 2157 |
| }, |
| { |
| "epoch": 1.7868580894533408, |
| "grad_norm": 0.8938515186309814, |
| "learning_rate": 4.043694251582023e-06, |
| "loss": 0.6381, |
| "step": 2158 |
| }, |
| { |
| "epoch": 1.7876863611264495, |
| "grad_norm": 0.9006900191307068, |
| "learning_rate": 4.0428290993609845e-06, |
| "loss": 0.6316, |
| "step": 2159 |
| }, |
| { |
| "epoch": 1.7885146327995582, |
| "grad_norm": 0.9383251667022705, |
| "learning_rate": 4.041963648616897e-06, |
| "loss": 0.6261, |
| "step": 2160 |
| }, |
| { |
| "epoch": 1.789342904472667, |
| "grad_norm": 0.8938176035881042, |
| "learning_rate": 4.041097899517218e-06, |
| "loss": 0.6293, |
| "step": 2161 |
| }, |
| { |
| "epoch": 1.7901711761457757, |
| "grad_norm": 0.8930737972259521, |
| "learning_rate": 4.040231852229462e-06, |
| "loss": 0.6621, |
| "step": 2162 |
| }, |
| { |
| "epoch": 1.7909994478188846, |
| "grad_norm": 0.8892739415168762, |
| "learning_rate": 4.039365506921198e-06, |
| "loss": 0.625, |
| "step": 2163 |
| }, |
| { |
| "epoch": 1.7918277194919934, |
| "grad_norm": 0.9283694624900818, |
| "learning_rate": 4.038498863760061e-06, |
| "loss": 0.622, |
| "step": 2164 |
| }, |
| { |
| "epoch": 1.792655991165102, |
| "grad_norm": 0.8910278081893921, |
| "learning_rate": 4.037631922913733e-06, |
| "loss": 0.6322, |
| "step": 2165 |
| }, |
| { |
| "epoch": 1.793484262838211, |
| "grad_norm": 0.9199251532554626, |
| "learning_rate": 4.0367646845499635e-06, |
| "loss": 0.6184, |
| "step": 2166 |
| }, |
| { |
| "epoch": 1.7943125345113198, |
| "grad_norm": 0.8959246873855591, |
| "learning_rate": 4.035897148836553e-06, |
| "loss": 0.6254, |
| "step": 2167 |
| }, |
| { |
| "epoch": 1.7951408061844285, |
| "grad_norm": 0.9153772592544556, |
| "learning_rate": 4.03502931594136e-06, |
| "loss": 0.5978, |
| "step": 2168 |
| }, |
| { |
| "epoch": 1.7959690778575372, |
| "grad_norm": 0.9109706878662109, |
| "learning_rate": 4.034161186032306e-06, |
| "loss": 0.6139, |
| "step": 2169 |
| }, |
| { |
| "epoch": 1.796797349530646, |
| "grad_norm": 0.9562530517578125, |
| "learning_rate": 4.0332927592773615e-06, |
| "loss": 0.6094, |
| "step": 2170 |
| }, |
| { |
| "epoch": 1.7976256212037547, |
| "grad_norm": 0.8981059789657593, |
| "learning_rate": 4.032424035844564e-06, |
| "loss": 0.6297, |
| "step": 2171 |
| }, |
| { |
| "epoch": 1.7984538928768636, |
| "grad_norm": 0.9379450082778931, |
| "learning_rate": 4.031555015901998e-06, |
| "loss": 0.5991, |
| "step": 2172 |
| }, |
| { |
| "epoch": 1.7992821645499724, |
| "grad_norm": 0.8644198775291443, |
| "learning_rate": 4.030685699617815e-06, |
| "loss": 0.618, |
| "step": 2173 |
| }, |
| { |
| "epoch": 1.8001104362230813, |
| "grad_norm": 0.8995630741119385, |
| "learning_rate": 4.029816087160217e-06, |
| "loss": 0.6287, |
| "step": 2174 |
| }, |
| { |
| "epoch": 1.80093870789619, |
| "grad_norm": 0.8938413858413696, |
| "learning_rate": 4.028946178697467e-06, |
| "loss": 0.6441, |
| "step": 2175 |
| }, |
| { |
| "epoch": 1.8017669795692988, |
| "grad_norm": 0.9178694486618042, |
| "learning_rate": 4.0280759743978835e-06, |
| "loss": 0.6238, |
| "step": 2176 |
| }, |
| { |
| "epoch": 1.8025952512424075, |
| "grad_norm": 0.9186379313468933, |
| "learning_rate": 4.027205474429845e-06, |
| "loss": 0.6137, |
| "step": 2177 |
| }, |
| { |
| "epoch": 1.8034235229155162, |
| "grad_norm": 0.8768324255943298, |
| "learning_rate": 4.026334678961783e-06, |
| "loss": 0.6345, |
| "step": 2178 |
| }, |
| { |
| "epoch": 1.804251794588625, |
| "grad_norm": 0.9008378982543945, |
| "learning_rate": 4.025463588162189e-06, |
| "loss": 0.6435, |
| "step": 2179 |
| }, |
| { |
| "epoch": 1.805080066261734, |
| "grad_norm": 0.9311049580574036, |
| "learning_rate": 4.024592202199611e-06, |
| "loss": 0.6422, |
| "step": 2180 |
| }, |
| { |
| "epoch": 1.8059083379348426, |
| "grad_norm": 0.9087911248207092, |
| "learning_rate": 4.023720521242653e-06, |
| "loss": 0.626, |
| "step": 2181 |
| }, |
| { |
| "epoch": 1.8067366096079516, |
| "grad_norm": 0.9007377028465271, |
| "learning_rate": 4.022848545459979e-06, |
| "loss": 0.6084, |
| "step": 2182 |
| }, |
| { |
| "epoch": 1.8075648812810603, |
| "grad_norm": 0.9112033247947693, |
| "learning_rate": 4.021976275020307e-06, |
| "loss": 0.6089, |
| "step": 2183 |
| }, |
| { |
| "epoch": 1.808393152954169, |
| "grad_norm": 0.9283015131950378, |
| "learning_rate": 4.0211037100924135e-06, |
| "loss": 0.6258, |
| "step": 2184 |
| }, |
| { |
| "epoch": 1.8092214246272778, |
| "grad_norm": 0.8517472147941589, |
| "learning_rate": 4.020230850845132e-06, |
| "loss": 0.6241, |
| "step": 2185 |
| }, |
| { |
| "epoch": 1.8100496963003865, |
| "grad_norm": 0.8855345845222473, |
| "learning_rate": 4.019357697447352e-06, |
| "loss": 0.6217, |
| "step": 2186 |
| }, |
| { |
| "epoch": 1.8108779679734952, |
| "grad_norm": 0.9075968861579895, |
| "learning_rate": 4.018484250068021e-06, |
| "loss": 0.615, |
| "step": 2187 |
| }, |
| { |
| "epoch": 1.811706239646604, |
| "grad_norm": 0.8998132944107056, |
| "learning_rate": 4.017610508876143e-06, |
| "loss": 0.625, |
| "step": 2188 |
| }, |
| { |
| "epoch": 1.8125345113197129, |
| "grad_norm": 0.9262106418609619, |
| "learning_rate": 4.016736474040778e-06, |
| "loss": 0.6359, |
| "step": 2189 |
| }, |
| { |
| "epoch": 1.8133627829928216, |
| "grad_norm": 0.899603009223938, |
| "learning_rate": 4.015862145731044e-06, |
| "loss": 0.6078, |
| "step": 2190 |
| }, |
| { |
| "epoch": 1.8141910546659306, |
| "grad_norm": 0.890383780002594, |
| "learning_rate": 4.014987524116115e-06, |
| "loss": 0.6253, |
| "step": 2191 |
| }, |
| { |
| "epoch": 1.8150193263390393, |
| "grad_norm": 0.8725044131278992, |
| "learning_rate": 4.014112609365224e-06, |
| "loss": 0.6304, |
| "step": 2192 |
| }, |
| { |
| "epoch": 1.815847598012148, |
| "grad_norm": 0.9035672545433044, |
| "learning_rate": 4.013237401647656e-06, |
| "loss": 0.6277, |
| "step": 2193 |
| }, |
| { |
| "epoch": 1.8166758696852567, |
| "grad_norm": 0.9032866954803467, |
| "learning_rate": 4.012361901132758e-06, |
| "loss": 0.6259, |
| "step": 2194 |
| }, |
| { |
| "epoch": 1.8175041413583655, |
| "grad_norm": 0.9181798100471497, |
| "learning_rate": 4.01148610798993e-06, |
| "loss": 0.6201, |
| "step": 2195 |
| }, |
| { |
| "epoch": 1.8183324130314742, |
| "grad_norm": 0.8995611667633057, |
| "learning_rate": 4.01061002238863e-06, |
| "loss": 0.6188, |
| "step": 2196 |
| }, |
| { |
| "epoch": 1.8191606847045831, |
| "grad_norm": 0.898936927318573, |
| "learning_rate": 4.009733644498372e-06, |
| "loss": 0.646, |
| "step": 2197 |
| }, |
| { |
| "epoch": 1.8199889563776919, |
| "grad_norm": 0.8975027203559875, |
| "learning_rate": 4.008856974488728e-06, |
| "loss": 0.6268, |
| "step": 2198 |
| }, |
| { |
| "epoch": 1.8208172280508008, |
| "grad_norm": 0.8810194730758667, |
| "learning_rate": 4.007980012529324e-06, |
| "loss": 0.6232, |
| "step": 2199 |
| }, |
| { |
| "epoch": 1.8216454997239095, |
| "grad_norm": 0.8760704398155212, |
| "learning_rate": 4.007102758789846e-06, |
| "loss": 0.6353, |
| "step": 2200 |
| }, |
| { |
| "epoch": 1.8224737713970183, |
| "grad_norm": 0.8992496728897095, |
| "learning_rate": 4.006225213440032e-06, |
| "loss": 0.6049, |
| "step": 2201 |
| }, |
| { |
| "epoch": 1.823302043070127, |
| "grad_norm": 0.8596404790878296, |
| "learning_rate": 4.0053473766496805e-06, |
| "loss": 0.6399, |
| "step": 2202 |
| }, |
| { |
| "epoch": 1.8241303147432357, |
| "grad_norm": 0.9254772663116455, |
| "learning_rate": 4.004469248588644e-06, |
| "loss": 0.6435, |
| "step": 2203 |
| }, |
| { |
| "epoch": 1.8249585864163445, |
| "grad_norm": 0.904025137424469, |
| "learning_rate": 4.003590829426832e-06, |
| "loss": 0.614, |
| "step": 2204 |
| }, |
| { |
| "epoch": 1.8257868580894532, |
| "grad_norm": 0.9014979600906372, |
| "learning_rate": 4.002712119334212e-06, |
| "loss": 0.6578, |
| "step": 2205 |
| }, |
| { |
| "epoch": 1.8266151297625621, |
| "grad_norm": 0.9288680553436279, |
| "learning_rate": 4.001833118480804e-06, |
| "loss": 0.6202, |
| "step": 2206 |
| }, |
| { |
| "epoch": 1.8274434014356709, |
| "grad_norm": 0.9325555562973022, |
| "learning_rate": 4.000953827036689e-06, |
| "loss": 0.6257, |
| "step": 2207 |
| }, |
| { |
| "epoch": 1.8282716731087798, |
| "grad_norm": 0.9118613004684448, |
| "learning_rate": 4.000074245171999e-06, |
| "loss": 0.6349, |
| "step": 2208 |
| }, |
| { |
| "epoch": 1.8290999447818885, |
| "grad_norm": 0.9021779298782349, |
| "learning_rate": 3.999194373056926e-06, |
| "loss": 0.6439, |
| "step": 2209 |
| }, |
| { |
| "epoch": 1.8299282164549973, |
| "grad_norm": 0.9250462055206299, |
| "learning_rate": 3.998314210861718e-06, |
| "loss": 0.6012, |
| "step": 2210 |
| }, |
| { |
| "epoch": 1.830756488128106, |
| "grad_norm": 0.8969385623931885, |
| "learning_rate": 3.9974337587566765e-06, |
| "loss": 0.6288, |
| "step": 2211 |
| }, |
| { |
| "epoch": 1.8315847598012147, |
| "grad_norm": 0.9066111445426941, |
| "learning_rate": 3.996553016912161e-06, |
| "loss": 0.6365, |
| "step": 2212 |
| }, |
| { |
| "epoch": 1.8324130314743234, |
| "grad_norm": 0.8986483216285706, |
| "learning_rate": 3.995671985498589e-06, |
| "loss": 0.624, |
| "step": 2213 |
| }, |
| { |
| "epoch": 1.8332413031474324, |
| "grad_norm": 0.9514189958572388, |
| "learning_rate": 3.994790664686429e-06, |
| "loss": 0.6512, |
| "step": 2214 |
| }, |
| { |
| "epoch": 1.8340695748205411, |
| "grad_norm": 0.8546944260597229, |
| "learning_rate": 3.99390905464621e-06, |
| "loss": 0.6126, |
| "step": 2215 |
| }, |
| { |
| "epoch": 1.83489784649365, |
| "grad_norm": 0.9364279508590698, |
| "learning_rate": 3.993027155548515e-06, |
| "loss": 0.6198, |
| "step": 2216 |
| }, |
| { |
| "epoch": 1.8357261181667588, |
| "grad_norm": 0.9515073299407959, |
| "learning_rate": 3.992144967563984e-06, |
| "loss": 0.5975, |
| "step": 2217 |
| }, |
| { |
| "epoch": 1.8365543898398675, |
| "grad_norm": 0.8759047389030457, |
| "learning_rate": 3.99126249086331e-06, |
| "loss": 0.6098, |
| "step": 2218 |
| }, |
| { |
| "epoch": 1.8373826615129762, |
| "grad_norm": 0.8610429763793945, |
| "learning_rate": 3.9903797256172464e-06, |
| "loss": 0.6302, |
| "step": 2219 |
| }, |
| { |
| "epoch": 1.838210933186085, |
| "grad_norm": 0.9127179384231567, |
| "learning_rate": 3.989496671996599e-06, |
| "loss": 0.6409, |
| "step": 2220 |
| }, |
| { |
| "epoch": 1.8390392048591937, |
| "grad_norm": 0.9010443687438965, |
| "learning_rate": 3.98861333017223e-06, |
| "loss": 0.5914, |
| "step": 2221 |
| }, |
| { |
| "epoch": 1.8398674765323026, |
| "grad_norm": 0.8914235234260559, |
| "learning_rate": 3.987729700315059e-06, |
| "loss": 0.6129, |
| "step": 2222 |
| }, |
| { |
| "epoch": 1.8406957482054114, |
| "grad_norm": 0.8986005783081055, |
| "learning_rate": 3.986845782596058e-06, |
| "loss": 0.6232, |
| "step": 2223 |
| }, |
| { |
| "epoch": 1.84152401987852, |
| "grad_norm": 0.9035130739212036, |
| "learning_rate": 3.9859615771862595e-06, |
| "loss": 0.6227, |
| "step": 2224 |
| }, |
| { |
| "epoch": 1.842352291551629, |
| "grad_norm": 1.0105544328689575, |
| "learning_rate": 3.985077084256747e-06, |
| "loss": 0.6132, |
| "step": 2225 |
| }, |
| { |
| "epoch": 1.8431805632247378, |
| "grad_norm": 0.8978928923606873, |
| "learning_rate": 3.984192303978663e-06, |
| "loss": 0.6404, |
| "step": 2226 |
| }, |
| { |
| "epoch": 1.8440088348978465, |
| "grad_norm": 0.9029719233512878, |
| "learning_rate": 3.983307236523203e-06, |
| "loss": 0.596, |
| "step": 2227 |
| }, |
| { |
| "epoch": 1.8448371065709552, |
| "grad_norm": 0.8865892291069031, |
| "learning_rate": 3.982421882061621e-06, |
| "loss": 0.6303, |
| "step": 2228 |
| }, |
| { |
| "epoch": 1.845665378244064, |
| "grad_norm": 0.8852880001068115, |
| "learning_rate": 3.981536240765223e-06, |
| "loss": 0.6201, |
| "step": 2229 |
| }, |
| { |
| "epoch": 1.8464936499171727, |
| "grad_norm": 0.8960217237472534, |
| "learning_rate": 3.980650312805373e-06, |
| "loss": 0.6018, |
| "step": 2230 |
| }, |
| { |
| "epoch": 1.8473219215902816, |
| "grad_norm": 0.8915679454803467, |
| "learning_rate": 3.979764098353491e-06, |
| "loss": 0.637, |
| "step": 2231 |
| }, |
| { |
| "epoch": 1.8481501932633904, |
| "grad_norm": 0.8920756578445435, |
| "learning_rate": 3.978877597581049e-06, |
| "loss": 0.6375, |
| "step": 2232 |
| }, |
| { |
| "epoch": 1.8489784649364993, |
| "grad_norm": 0.9024947881698608, |
| "learning_rate": 3.977990810659579e-06, |
| "loss": 0.6414, |
| "step": 2233 |
| }, |
| { |
| "epoch": 1.849806736609608, |
| "grad_norm": 0.8905020356178284, |
| "learning_rate": 3.9771037377606634e-06, |
| "loss": 0.6023, |
| "step": 2234 |
| }, |
| { |
| "epoch": 1.8506350082827168, |
| "grad_norm": 0.8941815495491028, |
| "learning_rate": 3.976216379055944e-06, |
| "loss": 0.6226, |
| "step": 2235 |
| }, |
| { |
| "epoch": 1.8514632799558255, |
| "grad_norm": 0.8991032838821411, |
| "learning_rate": 3.975328734717117e-06, |
| "loss": 0.6286, |
| "step": 2236 |
| }, |
| { |
| "epoch": 1.8522915516289342, |
| "grad_norm": 0.9136471152305603, |
| "learning_rate": 3.974440804915932e-06, |
| "loss": 0.6247, |
| "step": 2237 |
| }, |
| { |
| "epoch": 1.853119823302043, |
| "grad_norm": 0.906333863735199, |
| "learning_rate": 3.973552589824196e-06, |
| "loss": 0.6327, |
| "step": 2238 |
| }, |
| { |
| "epoch": 1.853948094975152, |
| "grad_norm": 0.8604477643966675, |
| "learning_rate": 3.972664089613772e-06, |
| "loss": 0.5794, |
| "step": 2239 |
| }, |
| { |
| "epoch": 1.8547763666482606, |
| "grad_norm": 0.8741788268089294, |
| "learning_rate": 3.971775304456573e-06, |
| "loss": 0.6144, |
| "step": 2240 |
| }, |
| { |
| "epoch": 1.8556046383213696, |
| "grad_norm": 0.9187965989112854, |
| "learning_rate": 3.970886234524573e-06, |
| "loss": 0.6091, |
| "step": 2241 |
| }, |
| { |
| "epoch": 1.8564329099944783, |
| "grad_norm": 0.9095160365104675, |
| "learning_rate": 3.969996879989797e-06, |
| "loss": 0.61, |
| "step": 2242 |
| }, |
| { |
| "epoch": 1.857261181667587, |
| "grad_norm": 0.9259464144706726, |
| "learning_rate": 3.969107241024328e-06, |
| "loss": 0.6382, |
| "step": 2243 |
| }, |
| { |
| "epoch": 1.8580894533406958, |
| "grad_norm": 0.893516480922699, |
| "learning_rate": 3.968217317800303e-06, |
| "loss": 0.6338, |
| "step": 2244 |
| }, |
| { |
| "epoch": 1.8589177250138045, |
| "grad_norm": 0.8910942673683167, |
| "learning_rate": 3.967327110489913e-06, |
| "loss": 0.6255, |
| "step": 2245 |
| }, |
| { |
| "epoch": 1.8597459966869132, |
| "grad_norm": 0.912207305431366, |
| "learning_rate": 3.9664366192654066e-06, |
| "loss": 0.626, |
| "step": 2246 |
| }, |
| { |
| "epoch": 1.860574268360022, |
| "grad_norm": 0.9025877714157104, |
| "learning_rate": 3.965545844299084e-06, |
| "loss": 0.6099, |
| "step": 2247 |
| }, |
| { |
| "epoch": 1.8614025400331309, |
| "grad_norm": 0.8995302319526672, |
| "learning_rate": 3.964654785763301e-06, |
| "loss": 0.624, |
| "step": 2248 |
| }, |
| { |
| "epoch": 1.8622308117062396, |
| "grad_norm": 0.8687958121299744, |
| "learning_rate": 3.9637634438304715e-06, |
| "loss": 0.6153, |
| "step": 2249 |
| }, |
| { |
| "epoch": 1.8630590833793486, |
| "grad_norm": 0.8754738569259644, |
| "learning_rate": 3.962871818673061e-06, |
| "loss": 0.639, |
| "step": 2250 |
| }, |
| { |
| "epoch": 1.8638873550524573, |
| "grad_norm": 0.9835318326950073, |
| "learning_rate": 3.961979910463589e-06, |
| "loss": 0.6076, |
| "step": 2251 |
| }, |
| { |
| "epoch": 1.864715626725566, |
| "grad_norm": 0.9048300385475159, |
| "learning_rate": 3.9610877193746335e-06, |
| "loss": 0.6321, |
| "step": 2252 |
| }, |
| { |
| "epoch": 1.8655438983986747, |
| "grad_norm": 0.9182642698287964, |
| "learning_rate": 3.960195245578825e-06, |
| "loss": 0.6215, |
| "step": 2253 |
| }, |
| { |
| "epoch": 1.8663721700717835, |
| "grad_norm": 0.8928918838500977, |
| "learning_rate": 3.959302489248848e-06, |
| "loss": 0.6328, |
| "step": 2254 |
| }, |
| { |
| "epoch": 1.8672004417448922, |
| "grad_norm": 0.8927018642425537, |
| "learning_rate": 3.958409450557441e-06, |
| "loss": 0.6099, |
| "step": 2255 |
| }, |
| { |
| "epoch": 1.8680287134180011, |
| "grad_norm": 0.9087390303611755, |
| "learning_rate": 3.957516129677402e-06, |
| "loss": 0.6247, |
| "step": 2256 |
| }, |
| { |
| "epoch": 1.8688569850911099, |
| "grad_norm": 0.9374837279319763, |
| "learning_rate": 3.956622526781579e-06, |
| "loss": 0.6309, |
| "step": 2257 |
| }, |
| { |
| "epoch": 1.8696852567642188, |
| "grad_norm": 0.8770498633384705, |
| "learning_rate": 3.955728642042874e-06, |
| "loss": 0.6264, |
| "step": 2258 |
| }, |
| { |
| "epoch": 1.8705135284373275, |
| "grad_norm": 0.9087657928466797, |
| "learning_rate": 3.9548344756342486e-06, |
| "loss": 0.6141, |
| "step": 2259 |
| }, |
| { |
| "epoch": 1.8713418001104363, |
| "grad_norm": 0.8835033178329468, |
| "learning_rate": 3.9539400277287134e-06, |
| "loss": 0.6595, |
| "step": 2260 |
| }, |
| { |
| "epoch": 1.872170071783545, |
| "grad_norm": 0.8794443607330322, |
| "learning_rate": 3.953045298499335e-06, |
| "loss": 0.6502, |
| "step": 2261 |
| }, |
| { |
| "epoch": 1.8729983434566537, |
| "grad_norm": 0.8940923810005188, |
| "learning_rate": 3.952150288119239e-06, |
| "loss": 0.6213, |
| "step": 2262 |
| }, |
| { |
| "epoch": 1.8738266151297625, |
| "grad_norm": 0.9037386178970337, |
| "learning_rate": 3.9512549967615965e-06, |
| "loss": 0.6286, |
| "step": 2263 |
| }, |
| { |
| "epoch": 1.8746548868028712, |
| "grad_norm": 0.9349599480628967, |
| "learning_rate": 3.950359424599642e-06, |
| "loss": 0.6363, |
| "step": 2264 |
| }, |
| { |
| "epoch": 1.8754831584759801, |
| "grad_norm": 0.9206270575523376, |
| "learning_rate": 3.949463571806658e-06, |
| "loss": 0.6351, |
| "step": 2265 |
| }, |
| { |
| "epoch": 1.8763114301490889, |
| "grad_norm": 0.922217607498169, |
| "learning_rate": 3.948567438555986e-06, |
| "loss": 0.6068, |
| "step": 2266 |
| }, |
| { |
| "epoch": 1.8771397018221978, |
| "grad_norm": 0.9312251210212708, |
| "learning_rate": 3.947671025021017e-06, |
| "loss": 0.6197, |
| "step": 2267 |
| }, |
| { |
| "epoch": 1.8779679734953065, |
| "grad_norm": 0.8621645569801331, |
| "learning_rate": 3.946774331375199e-06, |
| "loss": 0.6186, |
| "step": 2268 |
| }, |
| { |
| "epoch": 1.8787962451684153, |
| "grad_norm": 0.8905045390129089, |
| "learning_rate": 3.9458773577920365e-06, |
| "loss": 0.6244, |
| "step": 2269 |
| }, |
| { |
| "epoch": 1.879624516841524, |
| "grad_norm": 0.8987746834754944, |
| "learning_rate": 3.944980104445082e-06, |
| "loss": 0.6211, |
| "step": 2270 |
| }, |
| { |
| "epoch": 1.8804527885146327, |
| "grad_norm": 0.8749679327011108, |
| "learning_rate": 3.9440825715079475e-06, |
| "loss": 0.6358, |
| "step": 2271 |
| }, |
| { |
| "epoch": 1.8812810601877414, |
| "grad_norm": 0.8962264657020569, |
| "learning_rate": 3.943184759154297e-06, |
| "loss": 0.6378, |
| "step": 2272 |
| }, |
| { |
| "epoch": 1.8821093318608504, |
| "grad_norm": 0.908346951007843, |
| "learning_rate": 3.942286667557849e-06, |
| "loss": 0.6268, |
| "step": 2273 |
| }, |
| { |
| "epoch": 1.8829376035339591, |
| "grad_norm": 0.9319090247154236, |
| "learning_rate": 3.9413882968923754e-06, |
| "loss": 0.6119, |
| "step": 2274 |
| }, |
| { |
| "epoch": 1.883765875207068, |
| "grad_norm": 0.8884478807449341, |
| "learning_rate": 3.940489647331703e-06, |
| "loss": 0.6188, |
| "step": 2275 |
| }, |
| { |
| "epoch": 1.8845941468801768, |
| "grad_norm": 0.8935524821281433, |
| "learning_rate": 3.939590719049712e-06, |
| "loss": 0.6033, |
| "step": 2276 |
| }, |
| { |
| "epoch": 1.8854224185532855, |
| "grad_norm": 0.8943868279457092, |
| "learning_rate": 3.938691512220336e-06, |
| "loss": 0.6136, |
| "step": 2277 |
| }, |
| { |
| "epoch": 1.8862506902263942, |
| "grad_norm": 0.9294694662094116, |
| "learning_rate": 3.937792027017564e-06, |
| "loss": 0.6163, |
| "step": 2278 |
| }, |
| { |
| "epoch": 1.887078961899503, |
| "grad_norm": 0.8787100315093994, |
| "learning_rate": 3.936892263615438e-06, |
| "loss": 0.6309, |
| "step": 2279 |
| }, |
| { |
| "epoch": 1.8879072335726117, |
| "grad_norm": 0.8840365409851074, |
| "learning_rate": 3.935992222188054e-06, |
| "loss": 0.6344, |
| "step": 2280 |
| }, |
| { |
| "epoch": 1.8887355052457206, |
| "grad_norm": 0.8865869045257568, |
| "learning_rate": 3.93509190290956e-06, |
| "loss": 0.6436, |
| "step": 2281 |
| }, |
| { |
| "epoch": 1.8895637769188294, |
| "grad_norm": 0.9087560176849365, |
| "learning_rate": 3.934191305954161e-06, |
| "loss": 0.6065, |
| "step": 2282 |
| }, |
| { |
| "epoch": 1.890392048591938, |
| "grad_norm": 0.9400967359542847, |
| "learning_rate": 3.933290431496114e-06, |
| "loss": 0.62, |
| "step": 2283 |
| }, |
| { |
| "epoch": 1.891220320265047, |
| "grad_norm": 0.9081943035125732, |
| "learning_rate": 3.932389279709729e-06, |
| "loss": 0.6255, |
| "step": 2284 |
| }, |
| { |
| "epoch": 1.8920485919381558, |
| "grad_norm": 0.8887969255447388, |
| "learning_rate": 3.931487850769371e-06, |
| "loss": 0.6328, |
| "step": 2285 |
| }, |
| { |
| "epoch": 1.8928768636112645, |
| "grad_norm": 0.9041091799736023, |
| "learning_rate": 3.930586144849459e-06, |
| "loss": 0.6189, |
| "step": 2286 |
| }, |
| { |
| "epoch": 1.8937051352843732, |
| "grad_norm": 0.8961500525474548, |
| "learning_rate": 3.929684162124462e-06, |
| "loss": 0.6157, |
| "step": 2287 |
| }, |
| { |
| "epoch": 1.894533406957482, |
| "grad_norm": 0.9259622097015381, |
| "learning_rate": 3.928781902768908e-06, |
| "loss": 0.6217, |
| "step": 2288 |
| }, |
| { |
| "epoch": 1.8953616786305907, |
| "grad_norm": 0.8945308327674866, |
| "learning_rate": 3.927879366957374e-06, |
| "loss": 0.6358, |
| "step": 2289 |
| }, |
| { |
| "epoch": 1.8961899503036996, |
| "grad_norm": 0.9441898465156555, |
| "learning_rate": 3.926976554864495e-06, |
| "loss": 0.6403, |
| "step": 2290 |
| }, |
| { |
| "epoch": 1.8970182219768084, |
| "grad_norm": 0.9030803442001343, |
| "learning_rate": 3.926073466664953e-06, |
| "loss": 0.6305, |
| "step": 2291 |
| }, |
| { |
| "epoch": 1.8978464936499173, |
| "grad_norm": 0.8858312964439392, |
| "learning_rate": 3.92517010253349e-06, |
| "loss": 0.6474, |
| "step": 2292 |
| }, |
| { |
| "epoch": 1.898674765323026, |
| "grad_norm": 0.9101796746253967, |
| "learning_rate": 3.924266462644896e-06, |
| "loss": 0.6079, |
| "step": 2293 |
| }, |
| { |
| "epoch": 1.8995030369961348, |
| "grad_norm": 0.8956684470176697, |
| "learning_rate": 3.923362547174019e-06, |
| "loss": 0.6366, |
| "step": 2294 |
| }, |
| { |
| "epoch": 1.9003313086692435, |
| "grad_norm": 0.9146638512611389, |
| "learning_rate": 3.922458356295757e-06, |
| "loss": 0.6167, |
| "step": 2295 |
| }, |
| { |
| "epoch": 1.9011595803423522, |
| "grad_norm": 0.899584949016571, |
| "learning_rate": 3.921553890185064e-06, |
| "loss": 0.601, |
| "step": 2296 |
| }, |
| { |
| "epoch": 1.901987852015461, |
| "grad_norm": 0.8994655609130859, |
| "learning_rate": 3.920649149016944e-06, |
| "loss": 0.6312, |
| "step": 2297 |
| }, |
| { |
| "epoch": 1.90281612368857, |
| "grad_norm": 0.9097009897232056, |
| "learning_rate": 3.919744132966457e-06, |
| "loss": 0.6298, |
| "step": 2298 |
| }, |
| { |
| "epoch": 1.9036443953616786, |
| "grad_norm": 0.8923477530479431, |
| "learning_rate": 3.918838842208715e-06, |
| "loss": 0.6283, |
| "step": 2299 |
| }, |
| { |
| "epoch": 1.9044726670347876, |
| "grad_norm": 0.9193882346153259, |
| "learning_rate": 3.917933276918884e-06, |
| "loss": 0.6148, |
| "step": 2300 |
| }, |
| { |
| "epoch": 1.9053009387078963, |
| "grad_norm": 0.9286842346191406, |
| "learning_rate": 3.917027437272182e-06, |
| "loss": 0.611, |
| "step": 2301 |
| }, |
| { |
| "epoch": 1.906129210381005, |
| "grad_norm": 0.8983910083770752, |
| "learning_rate": 3.916121323443879e-06, |
| "loss": 0.6329, |
| "step": 2302 |
| }, |
| { |
| "epoch": 1.9069574820541138, |
| "grad_norm": 0.8978155255317688, |
| "learning_rate": 3.9152149356093e-06, |
| "loss": 0.6238, |
| "step": 2303 |
| }, |
| { |
| "epoch": 1.9077857537272225, |
| "grad_norm": 0.9089433550834656, |
| "learning_rate": 3.914308273943826e-06, |
| "loss": 0.6117, |
| "step": 2304 |
| }, |
| { |
| "epoch": 1.9086140254003312, |
| "grad_norm": 0.8742703199386597, |
| "learning_rate": 3.913401338622883e-06, |
| "loss": 0.5938, |
| "step": 2305 |
| }, |
| { |
| "epoch": 1.90944229707344, |
| "grad_norm": 0.8856895565986633, |
| "learning_rate": 3.912494129821958e-06, |
| "loss": 0.6236, |
| "step": 2306 |
| }, |
| { |
| "epoch": 1.9102705687465489, |
| "grad_norm": 0.8872947096824646, |
| "learning_rate": 3.911586647716585e-06, |
| "loss": 0.6407, |
| "step": 2307 |
| }, |
| { |
| "epoch": 1.9110988404196576, |
| "grad_norm": 0.9206568002700806, |
| "learning_rate": 3.910678892482354e-06, |
| "loss": 0.6153, |
| "step": 2308 |
| }, |
| { |
| "epoch": 1.9119271120927666, |
| "grad_norm": 0.9155313372612, |
| "learning_rate": 3.9097708642949085e-06, |
| "loss": 0.6389, |
| "step": 2309 |
| }, |
| { |
| "epoch": 1.9127553837658753, |
| "grad_norm": 0.8918697237968445, |
| "learning_rate": 3.908862563329941e-06, |
| "loss": 0.6381, |
| "step": 2310 |
| }, |
| { |
| "epoch": 1.913583655438984, |
| "grad_norm": 0.8947675824165344, |
| "learning_rate": 3.907953989763202e-06, |
| "loss": 0.6384, |
| "step": 2311 |
| }, |
| { |
| "epoch": 1.9144119271120927, |
| "grad_norm": 0.9177634119987488, |
| "learning_rate": 3.90704514377049e-06, |
| "loss": 0.6058, |
| "step": 2312 |
| }, |
| { |
| "epoch": 1.9152401987852015, |
| "grad_norm": 0.8761132955551147, |
| "learning_rate": 3.90613602552766e-06, |
| "loss": 0.6076, |
| "step": 2313 |
| }, |
| { |
| "epoch": 1.9160684704583102, |
| "grad_norm": 0.8839958310127258, |
| "learning_rate": 3.905226635210616e-06, |
| "loss": 0.6126, |
| "step": 2314 |
| }, |
| { |
| "epoch": 1.9168967421314191, |
| "grad_norm": 0.8875505924224854, |
| "learning_rate": 3.904316972995318e-06, |
| "loss": 0.6106, |
| "step": 2315 |
| }, |
| { |
| "epoch": 1.9177250138045279, |
| "grad_norm": 0.9095442891120911, |
| "learning_rate": 3.9034070390577765e-06, |
| "loss": 0.6181, |
| "step": 2316 |
| }, |
| { |
| "epoch": 1.9185532854776368, |
| "grad_norm": 0.8854146003723145, |
| "learning_rate": 3.9024968335740556e-06, |
| "loss": 0.6212, |
| "step": 2317 |
| }, |
| { |
| "epoch": 1.9193815571507455, |
| "grad_norm": 0.8790372610092163, |
| "learning_rate": 3.90158635672027e-06, |
| "loss": 0.6157, |
| "step": 2318 |
| }, |
| { |
| "epoch": 1.9202098288238543, |
| "grad_norm": 0.8464177250862122, |
| "learning_rate": 3.900675608672592e-06, |
| "loss": 0.6135, |
| "step": 2319 |
| }, |
| { |
| "epoch": 1.921038100496963, |
| "grad_norm": 0.8695027828216553, |
| "learning_rate": 3.899764589607239e-06, |
| "loss": 0.6277, |
| "step": 2320 |
| }, |
| { |
| "epoch": 1.9218663721700717, |
| "grad_norm": 0.8922004103660583, |
| "learning_rate": 3.898853299700486e-06, |
| "loss": 0.6133, |
| "step": 2321 |
| }, |
| { |
| "epoch": 1.9226946438431805, |
| "grad_norm": 0.8697700500488281, |
| "learning_rate": 3.89794173912866e-06, |
| "loss": 0.5987, |
| "step": 2322 |
| }, |
| { |
| "epoch": 1.9235229155162892, |
| "grad_norm": 0.8974730968475342, |
| "learning_rate": 3.897029908068139e-06, |
| "loss": 0.642, |
| "step": 2323 |
| }, |
| { |
| "epoch": 1.9243511871893981, |
| "grad_norm": 0.8962305784225464, |
| "learning_rate": 3.896117806695355e-06, |
| "loss": 0.6264, |
| "step": 2324 |
| }, |
| { |
| "epoch": 1.9251794588625069, |
| "grad_norm": 0.8809826970100403, |
| "learning_rate": 3.8952054351867885e-06, |
| "loss": 0.6254, |
| "step": 2325 |
| }, |
| { |
| "epoch": 1.9260077305356158, |
| "grad_norm": 0.8647112846374512, |
| "learning_rate": 3.894292793718976e-06, |
| "loss": 0.6089, |
| "step": 2326 |
| }, |
| { |
| "epoch": 1.9268360022087245, |
| "grad_norm": 0.8670300245285034, |
| "learning_rate": 3.893379882468506e-06, |
| "loss": 0.6296, |
| "step": 2327 |
| }, |
| { |
| "epoch": 1.9276642738818333, |
| "grad_norm": 0.9044064879417419, |
| "learning_rate": 3.8924667016120175e-06, |
| "loss": 0.61, |
| "step": 2328 |
| }, |
| { |
| "epoch": 1.928492545554942, |
| "grad_norm": 0.9086777567863464, |
| "learning_rate": 3.891553251326203e-06, |
| "loss": 0.6184, |
| "step": 2329 |
| }, |
| { |
| "epoch": 1.9293208172280507, |
| "grad_norm": 0.8742218017578125, |
| "learning_rate": 3.890639531787805e-06, |
| "loss": 0.6155, |
| "step": 2330 |
| }, |
| { |
| "epoch": 1.9301490889011594, |
| "grad_norm": 0.8769245743751526, |
| "learning_rate": 3.8897255431736225e-06, |
| "loss": 0.6175, |
| "step": 2331 |
| }, |
| { |
| "epoch": 1.9309773605742684, |
| "grad_norm": 0.9024838805198669, |
| "learning_rate": 3.888811285660501e-06, |
| "loss": 0.6342, |
| "step": 2332 |
| }, |
| { |
| "epoch": 1.9318056322473771, |
| "grad_norm": 0.9286251068115234, |
| "learning_rate": 3.887896759425342e-06, |
| "loss": 0.6332, |
| "step": 2333 |
| }, |
| { |
| "epoch": 1.932633903920486, |
| "grad_norm": 0.8754233717918396, |
| "learning_rate": 3.886981964645099e-06, |
| "loss": 0.6242, |
| "step": 2334 |
| }, |
| { |
| "epoch": 1.9334621755935948, |
| "grad_norm": 0.8935874104499817, |
| "learning_rate": 3.886066901496775e-06, |
| "loss": 0.6287, |
| "step": 2335 |
| }, |
| { |
| "epoch": 1.9342904472667035, |
| "grad_norm": 0.8786823153495789, |
| "learning_rate": 3.885151570157426e-06, |
| "loss": 0.6332, |
| "step": 2336 |
| }, |
| { |
| "epoch": 1.9351187189398122, |
| "grad_norm": 0.8927508592605591, |
| "learning_rate": 3.88423597080416e-06, |
| "loss": 0.6125, |
| "step": 2337 |
| }, |
| { |
| "epoch": 1.935946990612921, |
| "grad_norm": 0.884454607963562, |
| "learning_rate": 3.883320103614137e-06, |
| "loss": 0.5915, |
| "step": 2338 |
| }, |
| { |
| "epoch": 1.9367752622860297, |
| "grad_norm": 0.90716153383255, |
| "learning_rate": 3.88240396876457e-06, |
| "loss": 0.6356, |
| "step": 2339 |
| }, |
| { |
| "epoch": 1.9376035339591386, |
| "grad_norm": 0.9265467524528503, |
| "learning_rate": 3.881487566432722e-06, |
| "loss": 0.6304, |
| "step": 2340 |
| }, |
| { |
| "epoch": 1.9384318056322474, |
| "grad_norm": 0.9072451591491699, |
| "learning_rate": 3.880570896795908e-06, |
| "loss": 0.6182, |
| "step": 2341 |
| }, |
| { |
| "epoch": 1.9392600773053563, |
| "grad_norm": 0.8928481340408325, |
| "learning_rate": 3.879653960031494e-06, |
| "loss": 0.6187, |
| "step": 2342 |
| }, |
| { |
| "epoch": 1.940088348978465, |
| "grad_norm": 0.9217382669448853, |
| "learning_rate": 3.8787367563169e-06, |
| "loss": 0.5996, |
| "step": 2343 |
| }, |
| { |
| "epoch": 1.9409166206515738, |
| "grad_norm": 0.8897376656532288, |
| "learning_rate": 3.877819285829596e-06, |
| "loss": 0.6538, |
| "step": 2344 |
| }, |
| { |
| "epoch": 1.9417448923246825, |
| "grad_norm": 0.903363049030304, |
| "learning_rate": 3.876901548747104e-06, |
| "loss": 0.6247, |
| "step": 2345 |
| }, |
| { |
| "epoch": 1.9425731639977912, |
| "grad_norm": 0.8842670917510986, |
| "learning_rate": 3.875983545246997e-06, |
| "loss": 0.612, |
| "step": 2346 |
| }, |
| { |
| "epoch": 1.9434014356709, |
| "grad_norm": 0.9046406149864197, |
| "learning_rate": 3.8750652755069025e-06, |
| "loss": 0.6166, |
| "step": 2347 |
| }, |
| { |
| "epoch": 1.9442297073440087, |
| "grad_norm": 0.8867412805557251, |
| "learning_rate": 3.874146739704494e-06, |
| "loss": 0.5892, |
| "step": 2348 |
| }, |
| { |
| "epoch": 1.9450579790171176, |
| "grad_norm": 0.8600903749465942, |
| "learning_rate": 3.873227938017502e-06, |
| "loss": 0.6073, |
| "step": 2349 |
| }, |
| { |
| "epoch": 1.9458862506902264, |
| "grad_norm": 0.8921798467636108, |
| "learning_rate": 3.872308870623704e-06, |
| "loss": 0.6476, |
| "step": 2350 |
| }, |
| { |
| "epoch": 1.9467145223633353, |
| "grad_norm": 0.8750210404396057, |
| "learning_rate": 3.871389537700933e-06, |
| "loss": 0.6249, |
| "step": 2351 |
| }, |
| { |
| "epoch": 1.947542794036444, |
| "grad_norm": 0.9159662127494812, |
| "learning_rate": 3.87046993942707e-06, |
| "loss": 0.6326, |
| "step": 2352 |
| }, |
| { |
| "epoch": 1.9483710657095528, |
| "grad_norm": 0.8933431506156921, |
| "learning_rate": 3.869550075980049e-06, |
| "loss": 0.6162, |
| "step": 2353 |
| }, |
| { |
| "epoch": 1.9491993373826615, |
| "grad_norm": 0.8558392524719238, |
| "learning_rate": 3.868629947537855e-06, |
| "loss": 0.6239, |
| "step": 2354 |
| }, |
| { |
| "epoch": 1.9500276090557702, |
| "grad_norm": 0.881146252155304, |
| "learning_rate": 3.867709554278524e-06, |
| "loss": 0.6282, |
| "step": 2355 |
| }, |
| { |
| "epoch": 1.950855880728879, |
| "grad_norm": 0.8813173770904541, |
| "learning_rate": 3.8667888963801445e-06, |
| "loss": 0.611, |
| "step": 2356 |
| }, |
| { |
| "epoch": 1.951684152401988, |
| "grad_norm": 0.8674578070640564, |
| "learning_rate": 3.865867974020854e-06, |
| "loss": 0.6333, |
| "step": 2357 |
| }, |
| { |
| "epoch": 1.9525124240750966, |
| "grad_norm": 0.9121171832084656, |
| "learning_rate": 3.8649467873788436e-06, |
| "loss": 0.5979, |
| "step": 2358 |
| }, |
| { |
| "epoch": 1.9533406957482056, |
| "grad_norm": 0.8936792612075806, |
| "learning_rate": 3.8640253366323535e-06, |
| "loss": 0.5979, |
| "step": 2359 |
| }, |
| { |
| "epoch": 1.9541689674213143, |
| "grad_norm": 0.90774005651474, |
| "learning_rate": 3.863103621959676e-06, |
| "loss": 0.6258, |
| "step": 2360 |
| }, |
| { |
| "epoch": 1.954997239094423, |
| "grad_norm": 0.8977681994438171, |
| "learning_rate": 3.862181643539155e-06, |
| "loss": 0.6429, |
| "step": 2361 |
| }, |
| { |
| "epoch": 1.9558255107675317, |
| "grad_norm": 0.9216882586479187, |
| "learning_rate": 3.861259401549183e-06, |
| "loss": 0.6468, |
| "step": 2362 |
| }, |
| { |
| "epoch": 1.9566537824406405, |
| "grad_norm": 0.9281133413314819, |
| "learning_rate": 3.860336896168207e-06, |
| "loss": 0.6444, |
| "step": 2363 |
| }, |
| { |
| "epoch": 1.9574820541137492, |
| "grad_norm": 0.9120447039604187, |
| "learning_rate": 3.859414127574722e-06, |
| "loss": 0.6154, |
| "step": 2364 |
| }, |
| { |
| "epoch": 1.958310325786858, |
| "grad_norm": 0.8925580978393555, |
| "learning_rate": 3.858491095947275e-06, |
| "loss": 0.6199, |
| "step": 2365 |
| }, |
| { |
| "epoch": 1.9591385974599669, |
| "grad_norm": 0.9041692018508911, |
| "learning_rate": 3.857567801464466e-06, |
| "loss": 0.6176, |
| "step": 2366 |
| }, |
| { |
| "epoch": 1.9599668691330756, |
| "grad_norm": 0.9044784307479858, |
| "learning_rate": 3.8566442443049425e-06, |
| "loss": 0.6265, |
| "step": 2367 |
| }, |
| { |
| "epoch": 1.9607951408061846, |
| "grad_norm": 0.8784162402153015, |
| "learning_rate": 3.855720424647403e-06, |
| "loss": 0.5991, |
| "step": 2368 |
| }, |
| { |
| "epoch": 1.9616234124792933, |
| "grad_norm": 0.9075899124145508, |
| "learning_rate": 3.854796342670601e-06, |
| "loss": 0.6469, |
| "step": 2369 |
| }, |
| { |
| "epoch": 1.962451684152402, |
| "grad_norm": 0.8535478115081787, |
| "learning_rate": 3.853871998553334e-06, |
| "loss": 0.6449, |
| "step": 2370 |
| }, |
| { |
| "epoch": 1.9632799558255107, |
| "grad_norm": 0.9097660779953003, |
| "learning_rate": 3.852947392474457e-06, |
| "loss": 0.6561, |
| "step": 2371 |
| }, |
| { |
| "epoch": 1.9641082274986195, |
| "grad_norm": 0.8776025176048279, |
| "learning_rate": 3.852022524612872e-06, |
| "loss": 0.6473, |
| "step": 2372 |
| }, |
| { |
| "epoch": 1.9649364991717282, |
| "grad_norm": 0.8871668577194214, |
| "learning_rate": 3.8510973951475305e-06, |
| "loss": 0.6263, |
| "step": 2373 |
| }, |
| { |
| "epoch": 1.9657647708448371, |
| "grad_norm": 0.8681115508079529, |
| "learning_rate": 3.850172004257438e-06, |
| "loss": 0.6431, |
| "step": 2374 |
| }, |
| { |
| "epoch": 1.9665930425179459, |
| "grad_norm": 0.8922575116157532, |
| "learning_rate": 3.84924635212165e-06, |
| "loss": 0.634, |
| "step": 2375 |
| }, |
| { |
| "epoch": 1.9674213141910548, |
| "grad_norm": 0.877199113368988, |
| "learning_rate": 3.8483204389192695e-06, |
| "loss": 0.6444, |
| "step": 2376 |
| }, |
| { |
| "epoch": 1.9682495858641635, |
| "grad_norm": 0.9119161367416382, |
| "learning_rate": 3.847394264829453e-06, |
| "loss": 0.6106, |
| "step": 2377 |
| }, |
| { |
| "epoch": 1.9690778575372723, |
| "grad_norm": 0.8658766150474548, |
| "learning_rate": 3.846467830031407e-06, |
| "loss": 0.5946, |
| "step": 2378 |
| }, |
| { |
| "epoch": 1.969906129210381, |
| "grad_norm": 0.8818032145500183, |
| "learning_rate": 3.845541134704387e-06, |
| "loss": 0.6001, |
| "step": 2379 |
| }, |
| { |
| "epoch": 1.9707344008834897, |
| "grad_norm": 0.8885626792907715, |
| "learning_rate": 3.844614179027701e-06, |
| "loss": 0.6073, |
| "step": 2380 |
| }, |
| { |
| "epoch": 1.9715626725565985, |
| "grad_norm": 0.9010289311408997, |
| "learning_rate": 3.843686963180705e-06, |
| "loss": 0.633, |
| "step": 2381 |
| }, |
| { |
| "epoch": 1.9723909442297074, |
| "grad_norm": 0.8869554400444031, |
| "learning_rate": 3.842759487342808e-06, |
| "loss": 0.6189, |
| "step": 2382 |
| }, |
| { |
| "epoch": 1.9732192159028161, |
| "grad_norm": 0.9025073647499084, |
| "learning_rate": 3.841831751693466e-06, |
| "loss": 0.6178, |
| "step": 2383 |
| }, |
| { |
| "epoch": 1.9740474875759249, |
| "grad_norm": 0.8754774928092957, |
| "learning_rate": 3.8409037564121895e-06, |
| "loss": 0.6389, |
| "step": 2384 |
| }, |
| { |
| "epoch": 1.9748757592490338, |
| "grad_norm": 0.8868889212608337, |
| "learning_rate": 3.839975501678536e-06, |
| "loss": 0.6508, |
| "step": 2385 |
| }, |
| { |
| "epoch": 1.9757040309221425, |
| "grad_norm": 0.9014704823493958, |
| "learning_rate": 3.839046987672113e-06, |
| "loss": 0.6173, |
| "step": 2386 |
| }, |
| { |
| "epoch": 1.9765323025952513, |
| "grad_norm": 0.8838919401168823, |
| "learning_rate": 3.8381182145725815e-06, |
| "loss": 0.6105, |
| "step": 2387 |
| }, |
| { |
| "epoch": 1.97736057426836, |
| "grad_norm": 0.9038326144218445, |
| "learning_rate": 3.837189182559649e-06, |
| "loss": 0.6079, |
| "step": 2388 |
| }, |
| { |
| "epoch": 1.9781888459414687, |
| "grad_norm": 0.8764126896858215, |
| "learning_rate": 3.836259891813075e-06, |
| "loss": 0.5991, |
| "step": 2389 |
| }, |
| { |
| "epoch": 1.9790171176145774, |
| "grad_norm": 0.8777499198913574, |
| "learning_rate": 3.835330342512667e-06, |
| "loss": 0.6111, |
| "step": 2390 |
| }, |
| { |
| "epoch": 1.9798453892876864, |
| "grad_norm": 0.9013993740081787, |
| "learning_rate": 3.834400534838287e-06, |
| "loss": 0.6413, |
| "step": 2391 |
| }, |
| { |
| "epoch": 1.9806736609607951, |
| "grad_norm": 0.9299284219741821, |
| "learning_rate": 3.833470468969842e-06, |
| "loss": 0.6251, |
| "step": 2392 |
| }, |
| { |
| "epoch": 1.981501932633904, |
| "grad_norm": 0.9132182002067566, |
| "learning_rate": 3.832540145087291e-06, |
| "loss": 0.6048, |
| "step": 2393 |
| }, |
| { |
| "epoch": 1.9823302043070128, |
| "grad_norm": 0.8729045987129211, |
| "learning_rate": 3.8316095633706455e-06, |
| "loss": 0.5999, |
| "step": 2394 |
| }, |
| { |
| "epoch": 1.9831584759801215, |
| "grad_norm": 0.9028182029724121, |
| "learning_rate": 3.830678723999961e-06, |
| "loss": 0.639, |
| "step": 2395 |
| }, |
| { |
| "epoch": 1.9839867476532302, |
| "grad_norm": 0.9099806547164917, |
| "learning_rate": 3.829747627155348e-06, |
| "loss": 0.619, |
| "step": 2396 |
| }, |
| { |
| "epoch": 1.984815019326339, |
| "grad_norm": 0.9119778871536255, |
| "learning_rate": 3.828816273016966e-06, |
| "loss": 0.625, |
| "step": 2397 |
| }, |
| { |
| "epoch": 1.9856432909994477, |
| "grad_norm": 0.9013711810112, |
| "learning_rate": 3.827884661765022e-06, |
| "loss": 0.6175, |
| "step": 2398 |
| }, |
| { |
| "epoch": 1.9864715626725566, |
| "grad_norm": 0.8806619644165039, |
| "learning_rate": 3.826952793579774e-06, |
| "loss": 0.6127, |
| "step": 2399 |
| }, |
| { |
| "epoch": 1.9872998343456654, |
| "grad_norm": 0.8944020867347717, |
| "learning_rate": 3.8260206686415295e-06, |
| "loss": 0.6286, |
| "step": 2400 |
| }, |
| { |
| "epoch": 1.9881281060187743, |
| "grad_norm": 0.8929147720336914, |
| "learning_rate": 3.825088287130647e-06, |
| "loss": 0.6356, |
| "step": 2401 |
| }, |
| { |
| "epoch": 1.988956377691883, |
| "grad_norm": 0.9230188727378845, |
| "learning_rate": 3.8241556492275335e-06, |
| "loss": 0.6424, |
| "step": 2402 |
| }, |
| { |
| "epoch": 1.9897846493649918, |
| "grad_norm": 0.9055824279785156, |
| "learning_rate": 3.823222755112646e-06, |
| "loss": 0.6224, |
| "step": 2403 |
| }, |
| { |
| "epoch": 1.9906129210381005, |
| "grad_norm": 0.9289576411247253, |
| "learning_rate": 3.822289604966489e-06, |
| "loss": 0.6194, |
| "step": 2404 |
| }, |
| { |
| "epoch": 1.9914411927112092, |
| "grad_norm": 0.8802800178527832, |
| "learning_rate": 3.82135619896962e-06, |
| "loss": 0.6035, |
| "step": 2405 |
| }, |
| { |
| "epoch": 1.992269464384318, |
| "grad_norm": 0.9002155065536499, |
| "learning_rate": 3.820422537302645e-06, |
| "loss": 0.6468, |
| "step": 2406 |
| }, |
| { |
| "epoch": 1.9930977360574267, |
| "grad_norm": 0.8828989863395691, |
| "learning_rate": 3.819488620146218e-06, |
| "loss": 0.6327, |
| "step": 2407 |
| }, |
| { |
| "epoch": 1.9939260077305356, |
| "grad_norm": 0.9240519404411316, |
| "learning_rate": 3.818554447681041e-06, |
| "loss": 0.6257, |
| "step": 2408 |
| }, |
| { |
| "epoch": 1.9947542794036444, |
| "grad_norm": 0.9319384694099426, |
| "learning_rate": 3.817620020087872e-06, |
| "loss": 0.6394, |
| "step": 2409 |
| }, |
| { |
| "epoch": 1.9955825510767533, |
| "grad_norm": 0.8855710029602051, |
| "learning_rate": 3.816685337547511e-06, |
| "loss": 0.6145, |
| "step": 2410 |
| }, |
| { |
| "epoch": 1.996410822749862, |
| "grad_norm": 0.8893790245056152, |
| "learning_rate": 3.815750400240811e-06, |
| "loss": 0.5968, |
| "step": 2411 |
| }, |
| { |
| "epoch": 1.9972390944229708, |
| "grad_norm": 0.8707254528999329, |
| "learning_rate": 3.814815208348673e-06, |
| "loss": 0.6256, |
| "step": 2412 |
| }, |
| { |
| "epoch": 1.9980673660960795, |
| "grad_norm": 0.9413359761238098, |
| "learning_rate": 3.8138797620520507e-06, |
| "loss": 0.6306, |
| "step": 2413 |
| }, |
| { |
| "epoch": 1.9988956377691882, |
| "grad_norm": 0.9274361729621887, |
| "learning_rate": 3.8129440615319414e-06, |
| "loss": 0.6132, |
| "step": 2414 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 7242, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 1207, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 6.096168561514906e+18, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|