| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.0, |
| "eval_steps": 500, |
| "global_step": 2848, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.00035112359550561797, |
| "grad_norm": 2.8951518535614014, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 1.0953, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0007022471910112359, |
| "grad_norm": 2.683199167251587, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 1.0614, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.001053370786516854, |
| "grad_norm": 2.72992205619812, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 1.0674, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0014044943820224719, |
| "grad_norm": 2.6386663913726807, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 1.058, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.0017556179775280898, |
| "grad_norm": 2.672447443008423, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 1.0523, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.002106741573033708, |
| "grad_norm": 2.5482404232025146, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 1.0454, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0024578651685393258, |
| "grad_norm": 2.651028633117676, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 1.0646, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.0028089887640449437, |
| "grad_norm": 2.76011323928833, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 1.061, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.0031601123595505617, |
| "grad_norm": 2.4421677589416504, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 1.0827, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.0035112359550561797, |
| "grad_norm": 2.460603713989258, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 1.0925, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.0038623595505617976, |
| "grad_norm": 2.3482112884521484, |
| "learning_rate": 5.5e-07, |
| "loss": 1.0679, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.004213483146067416, |
| "grad_norm": 2.4199249744415283, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 1.0685, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.004564606741573034, |
| "grad_norm": 2.171464443206787, |
| "learning_rate": 6.5e-07, |
| "loss": 1.0455, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.0049157303370786515, |
| "grad_norm": 2.212657928466797, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 1.0718, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.0052668539325842695, |
| "grad_norm": 2.0458426475524902, |
| "learning_rate": 7.5e-07, |
| "loss": 1.0213, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.0056179775280898875, |
| "grad_norm": 1.8622595071792603, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 1.0266, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.0059691011235955055, |
| "grad_norm": 1.7484514713287354, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 1.0439, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.006320224719101123, |
| "grad_norm": 1.626994252204895, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 1.0274, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.006671348314606741, |
| "grad_norm": 1.462096095085144, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 1.036, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.007022471910112359, |
| "grad_norm": 1.4871413707733154, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 1.033, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.007373595505617977, |
| "grad_norm": 1.4984582662582397, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 1.0279, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.007724719101123595, |
| "grad_norm": 1.5213954448699951, |
| "learning_rate": 1.1e-06, |
| "loss": 1.0468, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.008075842696629214, |
| "grad_norm": 1.3945717811584473, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 1.0252, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.008426966292134831, |
| "grad_norm": 1.2632803916931152, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 0.9637, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.00877808988764045, |
| "grad_norm": 1.2537329196929932, |
| "learning_rate": 1.25e-06, |
| "loss": 1.0194, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.009129213483146067, |
| "grad_norm": 1.287577748298645, |
| "learning_rate": 1.3e-06, |
| "loss": 0.9491, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.009480337078651686, |
| "grad_norm": 1.1792410612106323, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 1.0, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.009831460674157303, |
| "grad_norm": 1.1308109760284424, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 1.002, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.010182584269662922, |
| "grad_norm": 1.1292983293533325, |
| "learning_rate": 1.45e-06, |
| "loss": 1.0114, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.010533707865168539, |
| "grad_norm": 1.0696202516555786, |
| "learning_rate": 1.5e-06, |
| "loss": 0.9371, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.010884831460674158, |
| "grad_norm": 1.0532927513122559, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 0.9805, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.011235955056179775, |
| "grad_norm": 1.039662480354309, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 0.9591, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.011587078651685394, |
| "grad_norm": 0.9709979891777039, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 0.9599, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.011938202247191011, |
| "grad_norm": 0.9826909303665161, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 0.9492, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.01228932584269663, |
| "grad_norm": 1.01341712474823, |
| "learning_rate": 1.75e-06, |
| "loss": 1.0066, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.012640449438202247, |
| "grad_norm": 1.0003409385681152, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 0.9641, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.012991573033707866, |
| "grad_norm": 0.9853388667106628, |
| "learning_rate": 1.85e-06, |
| "loss": 0.9681, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.013342696629213483, |
| "grad_norm": 0.9508251547813416, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 0.9242, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.013693820224719102, |
| "grad_norm": 0.9218304753303528, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 0.8917, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.014044943820224719, |
| "grad_norm": 0.9143016934394836, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.8715, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.014396067415730338, |
| "grad_norm": 0.9491222500801086, |
| "learning_rate": 2.05e-06, |
| "loss": 0.9157, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.014747191011235955, |
| "grad_norm": 0.9453691244125366, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.9537, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.015098314606741573, |
| "grad_norm": 0.9372655153274536, |
| "learning_rate": 2.15e-06, |
| "loss": 0.9288, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.01544943820224719, |
| "grad_norm": 0.9183003306388855, |
| "learning_rate": 2.2e-06, |
| "loss": 0.8841, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.01580056179775281, |
| "grad_norm": 0.924336314201355, |
| "learning_rate": 2.25e-06, |
| "loss": 0.9305, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.016151685393258428, |
| "grad_norm": 0.9038196206092834, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.9376, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.016502808988764044, |
| "grad_norm": 0.9091538786888123, |
| "learning_rate": 2.35e-06, |
| "loss": 0.9243, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.016853932584269662, |
| "grad_norm": 0.9212149381637573, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.9515, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.01720505617977528, |
| "grad_norm": 0.8974660038948059, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.893, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.0175561797752809, |
| "grad_norm": 0.8716650009155273, |
| "learning_rate": 2.5e-06, |
| "loss": 0.9485, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.017907303370786515, |
| "grad_norm": 0.8729946613311768, |
| "learning_rate": 2.55e-06, |
| "loss": 0.912, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.018258426966292134, |
| "grad_norm": 0.8486078977584839, |
| "learning_rate": 2.6e-06, |
| "loss": 0.9479, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.018609550561797753, |
| "grad_norm": 0.8713150024414062, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.9049, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.018960674157303372, |
| "grad_norm": 0.855265200138092, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.921, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.019311797752808987, |
| "grad_norm": 0.8279822468757629, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.8507, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.019662921348314606, |
| "grad_norm": 0.8624402284622192, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.9292, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.020014044943820225, |
| "grad_norm": 0.8482896089553833, |
| "learning_rate": 2.85e-06, |
| "loss": 0.881, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.020365168539325844, |
| "grad_norm": 0.8510951399803162, |
| "learning_rate": 2.9e-06, |
| "loss": 0.8702, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.02071629213483146, |
| "grad_norm": 0.8771702647209167, |
| "learning_rate": 2.95e-06, |
| "loss": 0.9329, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.021067415730337078, |
| "grad_norm": 0.8551446199417114, |
| "learning_rate": 3e-06, |
| "loss": 0.8908, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.021418539325842697, |
| "grad_norm": 0.8884496688842773, |
| "learning_rate": 3.05e-06, |
| "loss": 0.8791, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.021769662921348316, |
| "grad_norm": 0.8115338087081909, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.8544, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.02212078651685393, |
| "grad_norm": 0.8549612760543823, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.8963, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.02247191011235955, |
| "grad_norm": 0.8259643316268921, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.8299, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.02282303370786517, |
| "grad_norm": 0.8829634785652161, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.8873, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.023174157303370788, |
| "grad_norm": 0.8536106944084167, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.9009, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.023525280898876403, |
| "grad_norm": 0.8468846082687378, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.8921, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.023876404494382022, |
| "grad_norm": 0.8253934383392334, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.848, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.02422752808988764, |
| "grad_norm": 0.8778877854347229, |
| "learning_rate": 3.45e-06, |
| "loss": 0.8887, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.02457865168539326, |
| "grad_norm": 0.8790880441665649, |
| "learning_rate": 3.5e-06, |
| "loss": 0.8849, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.024929775280898875, |
| "grad_norm": 0.843599259853363, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.8972, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.025280898876404494, |
| "grad_norm": 0.8428463339805603, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.9027, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.025632022471910113, |
| "grad_norm": 0.8088714480400085, |
| "learning_rate": 3.65e-06, |
| "loss": 0.8562, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.02598314606741573, |
| "grad_norm": 0.857280969619751, |
| "learning_rate": 3.7e-06, |
| "loss": 0.8725, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.026334269662921347, |
| "grad_norm": 0.8321940898895264, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.8656, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.026685393258426966, |
| "grad_norm": 0.8289700150489807, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.8211, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.027036516853932584, |
| "grad_norm": 0.8454659581184387, |
| "learning_rate": 3.85e-06, |
| "loss": 0.9003, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.027387640449438203, |
| "grad_norm": 0.8450126647949219, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.8595, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.02773876404494382, |
| "grad_norm": 0.8216248750686646, |
| "learning_rate": 3.95e-06, |
| "loss": 0.8503, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.028089887640449437, |
| "grad_norm": 0.8196912407875061, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.8965, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.028441011235955056, |
| "grad_norm": 0.8264586329460144, |
| "learning_rate": 4.05e-06, |
| "loss": 0.879, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.028792134831460675, |
| "grad_norm": 0.8712279200553894, |
| "learning_rate": 4.1e-06, |
| "loss": 0.8825, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.02914325842696629, |
| "grad_norm": 0.8609493970870972, |
| "learning_rate": 4.15e-06, |
| "loss": 0.8901, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.02949438202247191, |
| "grad_norm": 0.8273038864135742, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.8589, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.029845505617977528, |
| "grad_norm": 0.8164820075035095, |
| "learning_rate": 4.25e-06, |
| "loss": 0.8099, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.030196629213483147, |
| "grad_norm": 0.8666368722915649, |
| "learning_rate": 4.3e-06, |
| "loss": 0.8535, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.030547752808988762, |
| "grad_norm": 0.8173773288726807, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.806, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.03089887640449438, |
| "grad_norm": 0.8484762907028198, |
| "learning_rate": 4.4e-06, |
| "loss": 0.8678, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.03125, |
| "grad_norm": 0.8572366833686829, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.8939, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.03160112359550562, |
| "grad_norm": 0.8265636563301086, |
| "learning_rate": 4.5e-06, |
| "loss": 0.8373, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.03195224719101124, |
| "grad_norm": 0.8756334185600281, |
| "learning_rate": 4.5500000000000005e-06, |
| "loss": 0.8322, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.032303370786516857, |
| "grad_norm": 0.8236197829246521, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.8425, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.032654494382022475, |
| "grad_norm": 0.855594277381897, |
| "learning_rate": 4.65e-06, |
| "loss": 0.8779, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.03300561797752809, |
| "grad_norm": 0.8004137873649597, |
| "learning_rate": 4.7e-06, |
| "loss": 0.8078, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.033356741573033706, |
| "grad_norm": 0.7953411340713501, |
| "learning_rate": 4.75e-06, |
| "loss": 0.8363, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.033707865168539325, |
| "grad_norm": 0.8278580904006958, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.8301, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.034058988764044944, |
| "grad_norm": 0.8473982810974121, |
| "learning_rate": 4.85e-06, |
| "loss": 0.8408, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.03441011235955056, |
| "grad_norm": 0.8142527341842651, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 0.8295, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.03476123595505618, |
| "grad_norm": 0.8610883355140686, |
| "learning_rate": 4.95e-06, |
| "loss": 0.8405, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.0351123595505618, |
| "grad_norm": 0.8903759717941284, |
| "learning_rate": 5e-06, |
| "loss": 0.8793, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.03546348314606742, |
| "grad_norm": 0.8775374293327332, |
| "learning_rate": 4.999999957251071e-06, |
| "loss": 0.8516, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.03581460674157303, |
| "grad_norm": 0.8515768647193909, |
| "learning_rate": 4.999999829004281e-06, |
| "loss": 0.8045, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.03616573033707865, |
| "grad_norm": 1.0148895978927612, |
| "learning_rate": 4.9999996152596355e-06, |
| "loss": 0.8247, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.03651685393258427, |
| "grad_norm": 0.829903244972229, |
| "learning_rate": 4.999999316017144e-06, |
| "loss": 0.8282, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.03686797752808989, |
| "grad_norm": 0.8467276096343994, |
| "learning_rate": 4.999998931276815e-06, |
| "loss": 0.8325, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.037219101123595506, |
| "grad_norm": 0.8355855941772461, |
| "learning_rate": 4.999998461038661e-06, |
| "loss": 0.813, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.037570224719101125, |
| "grad_norm": 0.8558890223503113, |
| "learning_rate": 4.999997905302699e-06, |
| "loss": 0.8592, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.037921348314606744, |
| "grad_norm": 0.9446994662284851, |
| "learning_rate": 4.999997264068948e-06, |
| "loss": 0.8323, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.03827247191011236, |
| "grad_norm": 0.8493000268936157, |
| "learning_rate": 4.99999653733743e-06, |
| "loss": 0.8178, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.038623595505617975, |
| "grad_norm": 0.8132368922233582, |
| "learning_rate": 4.99999572510817e-06, |
| "loss": 0.7977, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.038974719101123594, |
| "grad_norm": 0.8313632607460022, |
| "learning_rate": 4.999994827381196e-06, |
| "loss": 0.8407, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.03932584269662921, |
| "grad_norm": 0.8673725724220276, |
| "learning_rate": 4.999993844156537e-06, |
| "loss": 0.862, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.03967696629213483, |
| "grad_norm": 0.8794373273849487, |
| "learning_rate": 4.999992775434229e-06, |
| "loss": 0.8599, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.04002808988764045, |
| "grad_norm": 0.8612070083618164, |
| "learning_rate": 4.9999916212143065e-06, |
| "loss": 0.8013, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.04037921348314607, |
| "grad_norm": 0.8925568461418152, |
| "learning_rate": 4.9999903814968095e-06, |
| "loss": 0.7529, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.04073033707865169, |
| "grad_norm": 0.8791881799697876, |
| "learning_rate": 4.999989056281781e-06, |
| "loss": 0.8756, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.04108146067415731, |
| "grad_norm": 0.8983411192893982, |
| "learning_rate": 4.999987645569267e-06, |
| "loss": 0.8329, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.04143258426966292, |
| "grad_norm": 0.8358251452445984, |
| "learning_rate": 4.999986149359314e-06, |
| "loss": 0.7792, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.04178370786516854, |
| "grad_norm": 0.8848925828933716, |
| "learning_rate": 4.999984567651974e-06, |
| "loss": 0.8242, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.042134831460674156, |
| "grad_norm": 0.9365871548652649, |
| "learning_rate": 4.999982900447301e-06, |
| "loss": 0.7875, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.042485955056179775, |
| "grad_norm": 0.8330186605453491, |
| "learning_rate": 4.999981147745352e-06, |
| "loss": 0.8387, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.042837078651685394, |
| "grad_norm": 0.8438044190406799, |
| "learning_rate": 4.999979309546187e-06, |
| "loss": 0.8547, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.04318820224719101, |
| "grad_norm": 0.8812524676322937, |
| "learning_rate": 4.999977385849869e-06, |
| "loss": 0.7891, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.04353932584269663, |
| "grad_norm": 0.8498764634132385, |
| "learning_rate": 4.999975376656464e-06, |
| "loss": 0.8259, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.04389044943820225, |
| "grad_norm": 0.8646953105926514, |
| "learning_rate": 4.99997328196604e-06, |
| "loss": 0.8269, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.04424157303370786, |
| "grad_norm": 0.8175535798072815, |
| "learning_rate": 4.9999711017786686e-06, |
| "loss": 0.7943, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.04459269662921348, |
| "grad_norm": 0.8761390447616577, |
| "learning_rate": 4.999968836094425e-06, |
| "loss": 0.8668, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.0449438202247191, |
| "grad_norm": 0.8925103545188904, |
| "learning_rate": 4.999966484913387e-06, |
| "loss": 0.8562, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.04529494382022472, |
| "grad_norm": 0.8750078678131104, |
| "learning_rate": 4.999964048235634e-06, |
| "loss": 0.8058, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.04564606741573034, |
| "grad_norm": 0.8446925282478333, |
| "learning_rate": 4.999961526061251e-06, |
| "loss": 0.8197, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.045997191011235956, |
| "grad_norm": 0.8947305679321289, |
| "learning_rate": 4.999958918390321e-06, |
| "loss": 0.8347, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.046348314606741575, |
| "grad_norm": 0.8591403961181641, |
| "learning_rate": 4.999956225222937e-06, |
| "loss": 0.8117, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.046699438202247194, |
| "grad_norm": 0.8698204755783081, |
| "learning_rate": 4.999953446559188e-06, |
| "loss": 0.8378, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.047050561797752806, |
| "grad_norm": 0.8451970219612122, |
| "learning_rate": 4.999950582399171e-06, |
| "loss": 0.8324, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.047401685393258425, |
| "grad_norm": 0.8550758957862854, |
| "learning_rate": 4.999947632742984e-06, |
| "loss": 0.8291, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.047752808988764044, |
| "grad_norm": 0.8345015645027161, |
| "learning_rate": 4.999944597590726e-06, |
| "loss": 0.8315, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.04810393258426966, |
| "grad_norm": 0.9103791117668152, |
| "learning_rate": 4.999941476942502e-06, |
| "loss": 0.8009, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.04845505617977528, |
| "grad_norm": 0.9898645877838135, |
| "learning_rate": 4.99993827079842e-06, |
| "loss": 0.8574, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.0488061797752809, |
| "grad_norm": 0.8579599261283875, |
| "learning_rate": 4.9999349791585876e-06, |
| "loss": 0.7747, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.04915730337078652, |
| "grad_norm": 0.8869126439094543, |
| "learning_rate": 4.999931602023118e-06, |
| "loss": 0.8472, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.04950842696629214, |
| "grad_norm": 0.9130380749702454, |
| "learning_rate": 4.999928139392127e-06, |
| "loss": 0.8247, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.04985955056179775, |
| "grad_norm": 0.8806836605072021, |
| "learning_rate": 4.999924591265732e-06, |
| "loss": 0.8658, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.05021067415730337, |
| "grad_norm": 0.9065203666687012, |
| "learning_rate": 4.999920957644057e-06, |
| "loss": 0.8097, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.05056179775280899, |
| "grad_norm": 0.8982106447219849, |
| "learning_rate": 4.999917238527223e-06, |
| "loss": 0.8207, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.050912921348314606, |
| "grad_norm": 0.9152344465255737, |
| "learning_rate": 4.999913433915359e-06, |
| "loss": 0.845, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.051264044943820225, |
| "grad_norm": 0.8733872771263123, |
| "learning_rate": 4.999909543808595e-06, |
| "loss": 0.8337, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.051615168539325844, |
| "grad_norm": 0.8482681512832642, |
| "learning_rate": 4.999905568207064e-06, |
| "loss": 0.8226, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.05196629213483146, |
| "grad_norm": 0.8690842986106873, |
| "learning_rate": 4.999901507110901e-06, |
| "loss": 0.8677, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.05231741573033708, |
| "grad_norm": 0.8329693675041199, |
| "learning_rate": 4.999897360520246e-06, |
| "loss": 0.7628, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.05266853932584269, |
| "grad_norm": 0.913270890712738, |
| "learning_rate": 4.999893128435241e-06, |
| "loss": 0.8374, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.05301966292134831, |
| "grad_norm": 0.843010425567627, |
| "learning_rate": 4.999888810856029e-06, |
| "loss": 0.8343, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.05337078651685393, |
| "grad_norm": 0.8601850867271423, |
| "learning_rate": 4.99988440778276e-06, |
| "loss": 0.8254, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.05372191011235955, |
| "grad_norm": 0.8458811640739441, |
| "learning_rate": 4.999879919215583e-06, |
| "loss": 0.8238, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.05407303370786517, |
| "grad_norm": 0.8649472594261169, |
| "learning_rate": 4.999875345154652e-06, |
| "loss": 0.8545, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.05442415730337079, |
| "grad_norm": 0.8692646622657776, |
| "learning_rate": 4.999870685600123e-06, |
| "loss": 0.769, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.054775280898876406, |
| "grad_norm": 0.8382206559181213, |
| "learning_rate": 4.999865940552157e-06, |
| "loss": 0.8076, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.055126404494382025, |
| "grad_norm": 0.8909122943878174, |
| "learning_rate": 4.999861110010914e-06, |
| "loss": 0.8118, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.05547752808988764, |
| "grad_norm": 0.8760329484939575, |
| "learning_rate": 4.99985619397656e-06, |
| "loss": 0.8625, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.055828651685393256, |
| "grad_norm": 1.0767908096313477, |
| "learning_rate": 4.999851192449263e-06, |
| "loss": 0.8038, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.056179775280898875, |
| "grad_norm": 0.8571433424949646, |
| "learning_rate": 4.999846105429196e-06, |
| "loss": 0.8123, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.056530898876404494, |
| "grad_norm": 0.8204314112663269, |
| "learning_rate": 4.99984093291653e-06, |
| "loss": 0.7713, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.05688202247191011, |
| "grad_norm": 0.8622918128967285, |
| "learning_rate": 4.9998356749114434e-06, |
| "loss": 0.7816, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.05723314606741573, |
| "grad_norm": 0.991470456123352, |
| "learning_rate": 4.999830331414116e-06, |
| "loss": 0.8171, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.05758426966292135, |
| "grad_norm": 0.9167494177818298, |
| "learning_rate": 4.99982490242473e-06, |
| "loss": 0.8096, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.05793539325842697, |
| "grad_norm": 0.8794699311256409, |
| "learning_rate": 4.999819387943472e-06, |
| "loss": 0.8284, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.05828651685393258, |
| "grad_norm": 0.9374570250511169, |
| "learning_rate": 4.99981378797053e-06, |
| "loss": 0.7887, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.0586376404494382, |
| "grad_norm": 0.8803961277008057, |
| "learning_rate": 4.999808102506095e-06, |
| "loss": 0.8331, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.05898876404494382, |
| "grad_norm": 0.903806209564209, |
| "learning_rate": 4.999802331550363e-06, |
| "loss": 0.8002, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.05933988764044944, |
| "grad_norm": 0.8599410057067871, |
| "learning_rate": 4.99979647510353e-06, |
| "loss": 0.8342, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.059691011235955056, |
| "grad_norm": 0.9048354625701904, |
| "learning_rate": 4.999790533165797e-06, |
| "loss": 0.8197, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.060042134831460675, |
| "grad_norm": 0.8594435453414917, |
| "learning_rate": 4.999784505737366e-06, |
| "loss": 0.7994, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.060393258426966294, |
| "grad_norm": 0.8999492526054382, |
| "learning_rate": 4.999778392818444e-06, |
| "loss": 0.8062, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.06074438202247191, |
| "grad_norm": 0.8730378150939941, |
| "learning_rate": 4.999772194409241e-06, |
| "loss": 0.8348, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.061095505617977525, |
| "grad_norm": 0.8853862881660461, |
| "learning_rate": 4.999765910509967e-06, |
| "loss": 0.8235, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.061446629213483143, |
| "grad_norm": 0.8512648344039917, |
| "learning_rate": 4.999759541120839e-06, |
| "loss": 0.8254, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.06179775280898876, |
| "grad_norm": 0.925890326499939, |
| "learning_rate": 4.999753086242073e-06, |
| "loss": 0.8498, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.06214887640449438, |
| "grad_norm": 0.9359338283538818, |
| "learning_rate": 4.999746545873891e-06, |
| "loss": 0.8006, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.0625, |
| "grad_norm": 0.8511932492256165, |
| "learning_rate": 4.999739920016516e-06, |
| "loss": 0.7843, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.06285112359550561, |
| "grad_norm": 0.920088529586792, |
| "learning_rate": 4.999733208670174e-06, |
| "loss": 0.8173, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.06320224719101124, |
| "grad_norm": 0.8991772532463074, |
| "learning_rate": 4.9997264118350965e-06, |
| "loss": 0.8227, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.06355337078651685, |
| "grad_norm": 0.9123396873474121, |
| "learning_rate": 4.9997195295115145e-06, |
| "loss": 0.7767, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.06390449438202248, |
| "grad_norm": 0.888533353805542, |
| "learning_rate": 4.9997125616996635e-06, |
| "loss": 0.8093, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.06425561797752809, |
| "grad_norm": 0.9099261164665222, |
| "learning_rate": 4.999705508399782e-06, |
| "loss": 0.8142, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.06460674157303371, |
| "grad_norm": 0.9250622987747192, |
| "learning_rate": 4.999698369612111e-06, |
| "loss": 0.7913, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.06495786516853932, |
| "grad_norm": 0.8754624128341675, |
| "learning_rate": 4.999691145336895e-06, |
| "loss": 0.7755, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.06530898876404495, |
| "grad_norm": 0.8488731384277344, |
| "learning_rate": 4.99968383557438e-06, |
| "loss": 0.8209, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.06566011235955056, |
| "grad_norm": 0.8827488422393799, |
| "learning_rate": 4.9996764403248175e-06, |
| "loss": 0.8121, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.06601123595505617, |
| "grad_norm": 0.8604486584663391, |
| "learning_rate": 4.99966895958846e-06, |
| "loss": 0.8046, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.0663623595505618, |
| "grad_norm": 0.855309247970581, |
| "learning_rate": 4.999661393365562e-06, |
| "loss": 0.7345, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.06671348314606741, |
| "grad_norm": 0.8943816423416138, |
| "learning_rate": 4.999653741656385e-06, |
| "loss": 0.7937, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.06706460674157304, |
| "grad_norm": 0.9106631278991699, |
| "learning_rate": 4.999646004461188e-06, |
| "loss": 0.8157, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.06741573033707865, |
| "grad_norm": 0.9037092924118042, |
| "learning_rate": 4.999638181780237e-06, |
| "loss": 0.8091, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.06776685393258428, |
| "grad_norm": 0.854783833026886, |
| "learning_rate": 4.9996302736137994e-06, |
| "loss": 0.7784, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.06811797752808989, |
| "grad_norm": 0.8792631030082703, |
| "learning_rate": 4.999622279962144e-06, |
| "loss": 0.7737, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.0684691011235955, |
| "grad_norm": 0.8999345898628235, |
| "learning_rate": 4.9996142008255465e-06, |
| "loss": 0.7858, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.06882022471910113, |
| "grad_norm": 0.8954832553863525, |
| "learning_rate": 4.999606036204282e-06, |
| "loss": 0.8045, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.06917134831460674, |
| "grad_norm": 0.8854857087135315, |
| "learning_rate": 4.999597786098631e-06, |
| "loss": 0.7842, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.06952247191011236, |
| "grad_norm": 0.8617668747901917, |
| "learning_rate": 4.999589450508874e-06, |
| "loss": 0.807, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.06987359550561797, |
| "grad_norm": 0.8737844824790955, |
| "learning_rate": 4.999581029435296e-06, |
| "loss": 0.8268, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.0702247191011236, |
| "grad_norm": 0.8421310782432556, |
| "learning_rate": 4.999572522878186e-06, |
| "loss": 0.7701, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.07057584269662921, |
| "grad_norm": 0.8911473155021667, |
| "learning_rate": 4.9995639308378365e-06, |
| "loss": 0.756, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.07092696629213484, |
| "grad_norm": 0.879984974861145, |
| "learning_rate": 4.999555253314538e-06, |
| "loss": 0.8023, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.07127808988764045, |
| "grad_norm": 0.9726935625076294, |
| "learning_rate": 4.9995464903085885e-06, |
| "loss": 0.8017, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.07162921348314606, |
| "grad_norm": 0.9257283210754395, |
| "learning_rate": 4.999537641820288e-06, |
| "loss": 0.8336, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.07198033707865169, |
| "grad_norm": 0.9612401723861694, |
| "learning_rate": 4.9995287078499385e-06, |
| "loss": 0.8014, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.0723314606741573, |
| "grad_norm": 0.8790804743766785, |
| "learning_rate": 4.999519688397847e-06, |
| "loss": 0.7592, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.07268258426966293, |
| "grad_norm": 0.9160227179527283, |
| "learning_rate": 4.999510583464321e-06, |
| "loss": 0.7905, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.07303370786516854, |
| "grad_norm": 0.9160066246986389, |
| "learning_rate": 4.999501393049671e-06, |
| "loss": 0.8382, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.07338483146067416, |
| "grad_norm": 0.8971685767173767, |
| "learning_rate": 4.999492117154214e-06, |
| "loss": 0.7881, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.07373595505617977, |
| "grad_norm": 0.9171693921089172, |
| "learning_rate": 4.9994827557782644e-06, |
| "loss": 0.8101, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.07408707865168539, |
| "grad_norm": 0.8446071743965149, |
| "learning_rate": 4.999473308922143e-06, |
| "loss": 0.7749, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.07443820224719101, |
| "grad_norm": 0.8867083787918091, |
| "learning_rate": 4.999463776586174e-06, |
| "loss": 0.7656, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.07478932584269662, |
| "grad_norm": 0.8746408820152283, |
| "learning_rate": 4.9994541587706815e-06, |
| "loss": 0.7778, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.07514044943820225, |
| "grad_norm": 0.884986162185669, |
| "learning_rate": 4.999444455475997e-06, |
| "loss": 0.7975, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.07549157303370786, |
| "grad_norm": 0.8919047713279724, |
| "learning_rate": 4.9994346667024505e-06, |
| "loss": 0.7552, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.07584269662921349, |
| "grad_norm": 0.8840435147285461, |
| "learning_rate": 4.999424792450377e-06, |
| "loss": 0.7876, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.0761938202247191, |
| "grad_norm": 0.9024021625518799, |
| "learning_rate": 4.999414832720114e-06, |
| "loss": 0.7921, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.07654494382022473, |
| "grad_norm": 0.8887689113616943, |
| "learning_rate": 4.999404787512003e-06, |
| "loss": 0.7843, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.07689606741573034, |
| "grad_norm": 0.903285026550293, |
| "learning_rate": 4.9993946568263866e-06, |
| "loss": 0.8164, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.07724719101123595, |
| "grad_norm": 0.8626529574394226, |
| "learning_rate": 4.9993844406636124e-06, |
| "loss": 0.7941, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.07759831460674158, |
| "grad_norm": 0.8919528126716614, |
| "learning_rate": 4.999374139024028e-06, |
| "loss": 0.8054, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.07794943820224719, |
| "grad_norm": 0.9159940481185913, |
| "learning_rate": 4.999363751907988e-06, |
| "loss": 0.8071, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.07830056179775281, |
| "grad_norm": 0.8792368173599243, |
| "learning_rate": 4.999353279315846e-06, |
| "loss": 0.7949, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.07865168539325842, |
| "grad_norm": 0.9184228181838989, |
| "learning_rate": 4.9993427212479604e-06, |
| "loss": 0.8013, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.07900280898876405, |
| "grad_norm": 0.8925946354866028, |
| "learning_rate": 4.999332077704692e-06, |
| "loss": 0.813, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.07935393258426966, |
| "grad_norm": 0.9273242354393005, |
| "learning_rate": 4.999321348686406e-06, |
| "loss": 0.8107, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.07970505617977527, |
| "grad_norm": 0.8914328813552856, |
| "learning_rate": 4.999310534193468e-06, |
| "loss": 0.8229, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.0800561797752809, |
| "grad_norm": 0.9217993021011353, |
| "learning_rate": 4.999299634226249e-06, |
| "loss": 0.7722, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.08040730337078651, |
| "grad_norm": 0.9615241289138794, |
| "learning_rate": 4.999288648785121e-06, |
| "loss": 0.8105, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.08075842696629214, |
| "grad_norm": 0.9098490476608276, |
| "learning_rate": 4.99927757787046e-06, |
| "loss": 0.814, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.08110955056179775, |
| "grad_norm": 0.9211421012878418, |
| "learning_rate": 4.999266421482645e-06, |
| "loss": 0.7542, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.08146067415730338, |
| "grad_norm": 0.841416597366333, |
| "learning_rate": 4.999255179622056e-06, |
| "loss": 0.7961, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.08181179775280899, |
| "grad_norm": 0.8942486643791199, |
| "learning_rate": 4.9992438522890785e-06, |
| "loss": 0.8095, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.08216292134831461, |
| "grad_norm": 0.8535089492797852, |
| "learning_rate": 4.9992324394841005e-06, |
| "loss": 0.801, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.08251404494382023, |
| "grad_norm": 1.02827787399292, |
| "learning_rate": 4.999220941207511e-06, |
| "loss": 0.8057, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.08286516853932584, |
| "grad_norm": 0.9189361929893494, |
| "learning_rate": 4.9992093574597046e-06, |
| "loss": 0.7682, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.08321629213483146, |
| "grad_norm": 0.9160380959510803, |
| "learning_rate": 4.999197688241076e-06, |
| "loss": 0.8218, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.08356741573033707, |
| "grad_norm": 0.8860989809036255, |
| "learning_rate": 4.999185933552027e-06, |
| "loss": 0.7907, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.0839185393258427, |
| "grad_norm": 0.8413179516792297, |
| "learning_rate": 4.999174093392956e-06, |
| "loss": 0.7711, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.08426966292134831, |
| "grad_norm": 0.8843936324119568, |
| "learning_rate": 4.99916216776427e-06, |
| "loss": 0.7356, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.08462078651685394, |
| "grad_norm": 0.9339604377746582, |
| "learning_rate": 4.999150156666376e-06, |
| "loss": 0.8017, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.08497191011235955, |
| "grad_norm": 0.8932393789291382, |
| "learning_rate": 4.999138060099685e-06, |
| "loss": 0.7773, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.08532303370786516, |
| "grad_norm": 0.9117473363876343, |
| "learning_rate": 4.999125878064611e-06, |
| "loss": 0.7828, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.08567415730337079, |
| "grad_norm": 0.8696773052215576, |
| "learning_rate": 4.999113610561571e-06, |
| "loss": 0.7681, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.0860252808988764, |
| "grad_norm": 0.9062797427177429, |
| "learning_rate": 4.999101257590985e-06, |
| "loss": 0.7766, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.08637640449438203, |
| "grad_norm": 0.893018364906311, |
| "learning_rate": 4.999088819153273e-06, |
| "loss": 0.735, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.08672752808988764, |
| "grad_norm": 0.9077386856079102, |
| "learning_rate": 4.999076295248863e-06, |
| "loss": 0.8092, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.08707865168539326, |
| "grad_norm": 0.9474624395370483, |
| "learning_rate": 4.9990636858781805e-06, |
| "loss": 0.7841, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.08742977528089887, |
| "grad_norm": 0.8823204040527344, |
| "learning_rate": 4.9990509910416595e-06, |
| "loss": 0.7827, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.0877808988764045, |
| "grad_norm": 0.9361265897750854, |
| "learning_rate": 4.999038210739733e-06, |
| "loss": 0.8274, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.08813202247191011, |
| "grad_norm": 0.9128723740577698, |
| "learning_rate": 4.999025344972838e-06, |
| "loss": 0.7681, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.08848314606741572, |
| "grad_norm": 0.8730833530426025, |
| "learning_rate": 4.999012393741415e-06, |
| "loss": 0.8086, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.08883426966292135, |
| "grad_norm": 0.8662939071655273, |
| "learning_rate": 4.998999357045906e-06, |
| "loss": 0.7587, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.08918539325842696, |
| "grad_norm": 0.8748407959938049, |
| "learning_rate": 4.998986234886758e-06, |
| "loss": 0.7937, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.08953651685393259, |
| "grad_norm": 0.9770292639732361, |
| "learning_rate": 4.998973027264419e-06, |
| "loss": 0.7815, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.0898876404494382, |
| "grad_norm": 0.8764484524726868, |
| "learning_rate": 4.99895973417934e-06, |
| "loss": 0.766, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.09023876404494383, |
| "grad_norm": 0.8940339684486389, |
| "learning_rate": 4.998946355631978e-06, |
| "loss": 0.8166, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.09058988764044944, |
| "grad_norm": 0.903728723526001, |
| "learning_rate": 4.998932891622788e-06, |
| "loss": 0.7801, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.09094101123595505, |
| "grad_norm": 0.9147090315818787, |
| "learning_rate": 4.998919342152232e-06, |
| "loss": 0.814, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.09129213483146068, |
| "grad_norm": 0.9121170043945312, |
| "learning_rate": 4.9989057072207725e-06, |
| "loss": 0.7718, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.09164325842696629, |
| "grad_norm": 0.8610017895698547, |
| "learning_rate": 4.998891986828877e-06, |
| "loss": 0.7539, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.09199438202247191, |
| "grad_norm": 0.9325037598609924, |
| "learning_rate": 4.9988781809770124e-06, |
| "loss": 0.7699, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.09234550561797752, |
| "grad_norm": 0.92354416847229, |
| "learning_rate": 4.998864289665654e-06, |
| "loss": 0.8224, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.09269662921348315, |
| "grad_norm": 0.9121785759925842, |
| "learning_rate": 4.998850312895274e-06, |
| "loss": 0.7881, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.09304775280898876, |
| "grad_norm": 0.8718385696411133, |
| "learning_rate": 4.998836250666352e-06, |
| "loss": 0.7865, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.09339887640449439, |
| "grad_norm": 0.8944951295852661, |
| "learning_rate": 4.9988221029793685e-06, |
| "loss": 0.8315, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.09375, |
| "grad_norm": 0.9362013339996338, |
| "learning_rate": 4.998807869834807e-06, |
| "loss": 0.7572, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.09410112359550561, |
| "grad_norm": 0.8949217796325684, |
| "learning_rate": 4.998793551233155e-06, |
| "loss": 0.7614, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.09445224719101124, |
| "grad_norm": 0.9898912310600281, |
| "learning_rate": 4.9987791471749015e-06, |
| "loss": 0.8111, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.09480337078651685, |
| "grad_norm": 0.8814307451248169, |
| "learning_rate": 4.9987646576605395e-06, |
| "loss": 0.7774, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.09515449438202248, |
| "grad_norm": 0.9214469790458679, |
| "learning_rate": 4.998750082690564e-06, |
| "loss": 0.7944, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.09550561797752809, |
| "grad_norm": 0.8990268707275391, |
| "learning_rate": 4.998735422265475e-06, |
| "loss": 0.7661, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.09585674157303371, |
| "grad_norm": 0.8504504561424255, |
| "learning_rate": 4.998720676385772e-06, |
| "loss": 0.7643, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.09620786516853932, |
| "grad_norm": 0.9197403788566589, |
| "learning_rate": 4.99870584505196e-06, |
| "loss": 0.7763, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.09655898876404495, |
| "grad_norm": 0.9017580151557922, |
| "learning_rate": 4.998690928264547e-06, |
| "loss": 0.8137, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.09691011235955056, |
| "grad_norm": 0.880264937877655, |
| "learning_rate": 4.9986759260240416e-06, |
| "loss": 0.8088, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.09726123595505617, |
| "grad_norm": 0.8751420378684998, |
| "learning_rate": 4.998660838330958e-06, |
| "loss": 0.7802, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.0976123595505618, |
| "grad_norm": 0.8619885444641113, |
| "learning_rate": 4.998645665185812e-06, |
| "loss": 0.7584, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.09796348314606741, |
| "grad_norm": 0.8772746324539185, |
| "learning_rate": 4.998630406589122e-06, |
| "loss": 0.7739, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.09831460674157304, |
| "grad_norm": 0.9019191265106201, |
| "learning_rate": 4.998615062541411e-06, |
| "loss": 0.806, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.09866573033707865, |
| "grad_norm": 0.9600672721862793, |
| "learning_rate": 4.998599633043202e-06, |
| "loss": 0.7902, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.09901685393258428, |
| "grad_norm": 0.9067400097846985, |
| "learning_rate": 4.9985841180950245e-06, |
| "loss": 0.7779, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.09936797752808989, |
| "grad_norm": 0.8911014795303345, |
| "learning_rate": 4.998568517697408e-06, |
| "loss": 0.7605, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.0997191011235955, |
| "grad_norm": 0.8919551968574524, |
| "learning_rate": 4.998552831850886e-06, |
| "loss": 0.7416, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.10007022471910113, |
| "grad_norm": 0.8880231380462646, |
| "learning_rate": 4.998537060555995e-06, |
| "loss": 0.7757, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.10042134831460674, |
| "grad_norm": 0.9274663329124451, |
| "learning_rate": 4.998521203813275e-06, |
| "loss": 0.7925, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.10077247191011236, |
| "grad_norm": 0.9023240804672241, |
| "learning_rate": 4.998505261623266e-06, |
| "loss": 0.8081, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.10112359550561797, |
| "grad_norm": 0.8825272917747498, |
| "learning_rate": 4.998489233986518e-06, |
| "loss": 0.7864, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.1014747191011236, |
| "grad_norm": 0.885826826095581, |
| "learning_rate": 4.9984731209035745e-06, |
| "loss": 0.7759, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.10182584269662921, |
| "grad_norm": 0.8802645802497864, |
| "learning_rate": 4.998456922374988e-06, |
| "loss": 0.769, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.10217696629213484, |
| "grad_norm": 0.9090613126754761, |
| "learning_rate": 4.998440638401314e-06, |
| "loss": 0.7931, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.10252808988764045, |
| "grad_norm": 0.9373996257781982, |
| "learning_rate": 4.998424268983107e-06, |
| "loss": 0.7443, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.10287921348314606, |
| "grad_norm": 0.9003362655639648, |
| "learning_rate": 4.998407814120928e-06, |
| "loss": 0.7983, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.10323033707865169, |
| "grad_norm": 0.9281851053237915, |
| "learning_rate": 4.998391273815341e-06, |
| "loss": 0.8026, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.1035814606741573, |
| "grad_norm": 0.8824150562286377, |
| "learning_rate": 4.998374648066909e-06, |
| "loss": 0.7435, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.10393258426966293, |
| "grad_norm": 0.9006941318511963, |
| "learning_rate": 4.998357936876202e-06, |
| "loss": 0.7567, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.10428370786516854, |
| "grad_norm": 0.8622567057609558, |
| "learning_rate": 4.998341140243792e-06, |
| "loss": 0.7532, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.10463483146067416, |
| "grad_norm": 0.8983448147773743, |
| "learning_rate": 4.9983242581702525e-06, |
| "loss": 0.7686, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.10498595505617977, |
| "grad_norm": 0.8893060684204102, |
| "learning_rate": 4.998307290656162e-06, |
| "loss": 0.7697, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.10533707865168539, |
| "grad_norm": 0.8906247615814209, |
| "learning_rate": 4.998290237702098e-06, |
| "loss": 0.7628, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.10568820224719101, |
| "grad_norm": 0.8763146996498108, |
| "learning_rate": 4.998273099308647e-06, |
| "loss": 0.7742, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.10603932584269662, |
| "grad_norm": 0.8954668045043945, |
| "learning_rate": 4.998255875476394e-06, |
| "loss": 0.7666, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.10639044943820225, |
| "grad_norm": 0.9178535342216492, |
| "learning_rate": 4.9982385662059275e-06, |
| "loss": 0.7679, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.10674157303370786, |
| "grad_norm": 0.9487611651420593, |
| "learning_rate": 4.99822117149784e-06, |
| "loss": 0.7744, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.10709269662921349, |
| "grad_norm": 0.9466339945793152, |
| "learning_rate": 4.998203691352726e-06, |
| "loss": 0.7608, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.1074438202247191, |
| "grad_norm": 0.9768984913825989, |
| "learning_rate": 4.998186125771184e-06, |
| "loss": 0.8028, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.10779494382022473, |
| "grad_norm": 0.9045122861862183, |
| "learning_rate": 4.998168474753814e-06, |
| "loss": 0.7365, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.10814606741573034, |
| "grad_norm": 0.9124319553375244, |
| "learning_rate": 4.99815073830122e-06, |
| "loss": 0.7686, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.10849719101123595, |
| "grad_norm": 0.9172065854072571, |
| "learning_rate": 4.998132916414008e-06, |
| "loss": 0.778, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.10884831460674158, |
| "grad_norm": 0.9291918873786926, |
| "learning_rate": 4.9981150090927876e-06, |
| "loss": 0.7828, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.10919943820224719, |
| "grad_norm": 0.9258780479431152, |
| "learning_rate": 4.998097016338172e-06, |
| "loss": 0.7822, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.10955056179775281, |
| "grad_norm": 0.9169499278068542, |
| "learning_rate": 4.998078938150776e-06, |
| "loss": 0.7735, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.10990168539325842, |
| "grad_norm": 0.8983737826347351, |
| "learning_rate": 4.998060774531218e-06, |
| "loss": 0.7729, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.11025280898876405, |
| "grad_norm": 0.949192225933075, |
| "learning_rate": 4.99804252548012e-06, |
| "loss": 0.8153, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.11060393258426966, |
| "grad_norm": 0.9175098538398743, |
| "learning_rate": 4.998024190998104e-06, |
| "loss": 0.7784, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.11095505617977527, |
| "grad_norm": 0.9033719897270203, |
| "learning_rate": 4.998005771085799e-06, |
| "loss": 0.748, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.1113061797752809, |
| "grad_norm": 0.9692884683609009, |
| "learning_rate": 4.997987265743834e-06, |
| "loss": 0.7795, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.11165730337078651, |
| "grad_norm": 0.8999868035316467, |
| "learning_rate": 4.997968674972842e-06, |
| "loss": 0.7564, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.11200842696629214, |
| "grad_norm": 0.9252747893333435, |
| "learning_rate": 4.997949998773458e-06, |
| "loss": 0.8054, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.11235955056179775, |
| "grad_norm": 0.8910780549049377, |
| "learning_rate": 4.997931237146323e-06, |
| "loss": 0.7671, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.11271067415730338, |
| "grad_norm": 0.9301937818527222, |
| "learning_rate": 4.997912390092077e-06, |
| "loss": 0.7906, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.11306179775280899, |
| "grad_norm": 0.9235373735427856, |
| "learning_rate": 4.997893457611364e-06, |
| "loss": 0.7626, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.11341292134831461, |
| "grad_norm": 0.9015495777130127, |
| "learning_rate": 4.997874439704833e-06, |
| "loss": 0.7634, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.11376404494382023, |
| "grad_norm": 0.8859757781028748, |
| "learning_rate": 4.997855336373133e-06, |
| "loss": 0.7435, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.11411516853932584, |
| "grad_norm": 0.9129098057746887, |
| "learning_rate": 4.9978361476169176e-06, |
| "loss": 0.7505, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.11446629213483146, |
| "grad_norm": 0.9024680256843567, |
| "learning_rate": 4.997816873436845e-06, |
| "loss": 0.7402, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.11481741573033707, |
| "grad_norm": 0.9123002290725708, |
| "learning_rate": 4.997797513833571e-06, |
| "loss": 0.7751, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.1151685393258427, |
| "grad_norm": 0.8788150548934937, |
| "learning_rate": 4.9977780688077606e-06, |
| "loss": 0.7882, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.11551966292134831, |
| "grad_norm": 0.9502024054527283, |
| "learning_rate": 4.997758538360077e-06, |
| "loss": 0.7777, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.11587078651685394, |
| "grad_norm": 0.9458324909210205, |
| "learning_rate": 4.9977389224911884e-06, |
| "loss": 0.7666, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.11622191011235955, |
| "grad_norm": 0.9191945195198059, |
| "learning_rate": 4.997719221201766e-06, |
| "loss": 0.7678, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.11657303370786516, |
| "grad_norm": 0.9368025660514832, |
| "learning_rate": 4.997699434492485e-06, |
| "loss": 0.7689, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.11692415730337079, |
| "grad_norm": 0.9120011925697327, |
| "learning_rate": 4.99767956236402e-06, |
| "loss": 0.7489, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.1172752808988764, |
| "grad_norm": 0.9374188780784607, |
| "learning_rate": 4.997659604817051e-06, |
| "loss": 0.7358, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.11762640449438203, |
| "grad_norm": 0.9129819273948669, |
| "learning_rate": 4.997639561852261e-06, |
| "loss": 0.7591, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.11797752808988764, |
| "grad_norm": 0.9249037504196167, |
| "learning_rate": 4.997619433470334e-06, |
| "loss": 0.782, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.11832865168539326, |
| "grad_norm": 0.8872626423835754, |
| "learning_rate": 4.9975992196719614e-06, |
| "loss": 0.7477, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.11867977528089887, |
| "grad_norm": 0.9343289732933044, |
| "learning_rate": 4.9975789204578315e-06, |
| "loss": 0.7702, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.1190308988764045, |
| "grad_norm": 0.9379141926765442, |
| "learning_rate": 4.997558535828641e-06, |
| "loss": 0.7847, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.11938202247191011, |
| "grad_norm": 0.9160316586494446, |
| "learning_rate": 4.997538065785085e-06, |
| "loss": 0.7658, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.11973314606741572, |
| "grad_norm": 0.9021292328834534, |
| "learning_rate": 4.997517510327864e-06, |
| "loss": 0.8114, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.12008426966292135, |
| "grad_norm": 0.9155046343803406, |
| "learning_rate": 4.997496869457681e-06, |
| "loss": 0.7579, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.12043539325842696, |
| "grad_norm": 0.8777235746383667, |
| "learning_rate": 4.997476143175243e-06, |
| "loss": 0.7832, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.12078651685393259, |
| "grad_norm": 0.8728359341621399, |
| "learning_rate": 4.997455331481258e-06, |
| "loss": 0.7647, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.1211376404494382, |
| "grad_norm": 0.9153422713279724, |
| "learning_rate": 4.997434434376437e-06, |
| "loss": 0.7396, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.12148876404494383, |
| "grad_norm": 0.900204598903656, |
| "learning_rate": 4.9974134518614964e-06, |
| "loss": 0.7619, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.12183988764044944, |
| "grad_norm": 0.9725254774093628, |
| "learning_rate": 4.997392383937153e-06, |
| "loss": 0.7588, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.12219101123595505, |
| "grad_norm": 0.9041215777397156, |
| "learning_rate": 4.997371230604126e-06, |
| "loss": 0.7712, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.12254213483146068, |
| "grad_norm": 0.9517892599105835, |
| "learning_rate": 4.997349991863141e-06, |
| "loss": 0.7758, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.12289325842696629, |
| "grad_norm": 0.9502705335617065, |
| "learning_rate": 4.997328667714922e-06, |
| "loss": 0.8003, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.12324438202247191, |
| "grad_norm": 0.9965865612030029, |
| "learning_rate": 4.9973072581602005e-06, |
| "loss": 0.751, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.12359550561797752, |
| "grad_norm": 0.9221112132072449, |
| "learning_rate": 4.997285763199707e-06, |
| "loss": 0.7679, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.12394662921348315, |
| "grad_norm": 0.940747857093811, |
| "learning_rate": 4.997264182834179e-06, |
| "loss": 0.7863, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.12429775280898876, |
| "grad_norm": 0.925940215587616, |
| "learning_rate": 4.997242517064351e-06, |
| "loss": 0.7711, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.12464887640449439, |
| "grad_norm": 0.8775484561920166, |
| "learning_rate": 4.997220765890967e-06, |
| "loss": 0.7504, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.125, |
| "grad_norm": 0.9700306057929993, |
| "learning_rate": 4.997198929314769e-06, |
| "loss": 0.7385, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.12535112359550563, |
| "grad_norm": 0.9070924520492554, |
| "learning_rate": 4.997177007336505e-06, |
| "loss": 0.7538, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.12570224719101122, |
| "grad_norm": 0.8817124962806702, |
| "learning_rate": 4.997154999956924e-06, |
| "loss": 0.7549, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.12605337078651685, |
| "grad_norm": 0.9093615412712097, |
| "learning_rate": 4.997132907176779e-06, |
| "loss": 0.7621, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.12640449438202248, |
| "grad_norm": 0.8783479332923889, |
| "learning_rate": 4.9971107289968256e-06, |
| "loss": 0.7369, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.1267556179775281, |
| "grad_norm": 0.9039416313171387, |
| "learning_rate": 4.997088465417822e-06, |
| "loss": 0.7664, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.1271067415730337, |
| "grad_norm": 0.9135423302650452, |
| "learning_rate": 4.997066116440529e-06, |
| "loss": 0.7887, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.12745786516853932, |
| "grad_norm": 0.8956524133682251, |
| "learning_rate": 4.997043682065712e-06, |
| "loss": 0.7303, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.12780898876404495, |
| "grad_norm": 0.9221348166465759, |
| "learning_rate": 4.997021162294138e-06, |
| "loss": 0.7707, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.12816011235955055, |
| "grad_norm": 0.920876681804657, |
| "learning_rate": 4.996998557126577e-06, |
| "loss": 0.7825, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.12851123595505617, |
| "grad_norm": 0.9243044853210449, |
| "learning_rate": 4.996975866563802e-06, |
| "loss": 0.7362, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.1288623595505618, |
| "grad_norm": 0.89571613073349, |
| "learning_rate": 4.996953090606589e-06, |
| "loss": 0.7645, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.12921348314606743, |
| "grad_norm": 0.9237456917762756, |
| "learning_rate": 4.9969302292557165e-06, |
| "loss": 0.766, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.12956460674157302, |
| "grad_norm": 0.9664248824119568, |
| "learning_rate": 4.996907282511968e-06, |
| "loss": 0.7801, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.12991573033707865, |
| "grad_norm": 0.9590004682540894, |
| "learning_rate": 4.9968842503761265e-06, |
| "loss": 0.7544, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.13026685393258428, |
| "grad_norm": 0.9026381969451904, |
| "learning_rate": 4.99686113284898e-06, |
| "loss": 0.7756, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.1306179775280899, |
| "grad_norm": 0.9528935551643372, |
| "learning_rate": 4.9968379299313195e-06, |
| "loss": 0.7673, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.1309691011235955, |
| "grad_norm": 0.9700410962104797, |
| "learning_rate": 4.996814641623937e-06, |
| "loss": 0.7746, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.13132022471910113, |
| "grad_norm": 0.931694746017456, |
| "learning_rate": 4.996791267927632e-06, |
| "loss": 0.8078, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.13167134831460675, |
| "grad_norm": 0.9812837839126587, |
| "learning_rate": 4.996767808843203e-06, |
| "loss": 0.7662, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.13202247191011235, |
| "grad_norm": 0.9266939163208008, |
| "learning_rate": 4.996744264371449e-06, |
| "loss": 0.7468, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.13237359550561797, |
| "grad_norm": 0.8732329607009888, |
| "learning_rate": 4.996720634513179e-06, |
| "loss": 0.7774, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.1327247191011236, |
| "grad_norm": 0.9104029536247253, |
| "learning_rate": 4.9966969192692e-06, |
| "loss": 0.7423, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.13307584269662923, |
| "grad_norm": 0.8799782991409302, |
| "learning_rate": 4.996673118640323e-06, |
| "loss": 0.7086, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.13342696629213482, |
| "grad_norm": 0.9318313598632812, |
| "learning_rate": 4.9966492326273605e-06, |
| "loss": 0.7492, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.13377808988764045, |
| "grad_norm": 0.9577075839042664, |
| "learning_rate": 4.996625261231131e-06, |
| "loss": 0.7935, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.13412921348314608, |
| "grad_norm": 0.9912057518959045, |
| "learning_rate": 4.996601204452455e-06, |
| "loss": 0.7515, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.13448033707865167, |
| "grad_norm": 0.9166970252990723, |
| "learning_rate": 4.996577062292154e-06, |
| "loss": 0.7364, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.1348314606741573, |
| "grad_norm": 0.9245953559875488, |
| "learning_rate": 4.996552834751053e-06, |
| "loss": 0.7895, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.13518258426966293, |
| "grad_norm": 0.9169378876686096, |
| "learning_rate": 4.996528521829982e-06, |
| "loss": 0.7632, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.13553370786516855, |
| "grad_norm": 0.8847085237503052, |
| "learning_rate": 4.996504123529772e-06, |
| "loss": 0.7607, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.13588483146067415, |
| "grad_norm": 0.8744500875473022, |
| "learning_rate": 4.996479639851256e-06, |
| "loss": 0.7146, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.13623595505617977, |
| "grad_norm": 0.9195184707641602, |
| "learning_rate": 4.996455070795274e-06, |
| "loss": 0.771, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.1365870786516854, |
| "grad_norm": 0.8896746039390564, |
| "learning_rate": 4.996430416362664e-06, |
| "loss": 0.7239, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.136938202247191, |
| "grad_norm": 0.9265782237052917, |
| "learning_rate": 4.99640567655427e-06, |
| "loss": 0.7607, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.13728932584269662, |
| "grad_norm": 0.9531838893890381, |
| "learning_rate": 4.996380851370939e-06, |
| "loss": 0.7487, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.13764044943820225, |
| "grad_norm": 0.9503312706947327, |
| "learning_rate": 4.996355940813518e-06, |
| "loss": 0.7889, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.13799157303370788, |
| "grad_norm": 0.8666608929634094, |
| "learning_rate": 4.99633094488286e-06, |
| "loss": 0.7539, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.13834269662921347, |
| "grad_norm": 0.9313355684280396, |
| "learning_rate": 4.996305863579821e-06, |
| "loss": 0.7674, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.1386938202247191, |
| "grad_norm": 0.9345743656158447, |
| "learning_rate": 4.9962806969052565e-06, |
| "loss": 0.8057, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.13904494382022473, |
| "grad_norm": 0.922758936882019, |
| "learning_rate": 4.996255444860029e-06, |
| "loss": 0.7404, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.13939606741573032, |
| "grad_norm": 0.8888351321220398, |
| "learning_rate": 4.996230107445001e-06, |
| "loss": 0.753, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.13974719101123595, |
| "grad_norm": 0.9118607640266418, |
| "learning_rate": 4.996204684661039e-06, |
| "loss": 0.7562, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.14009831460674158, |
| "grad_norm": 0.9427488446235657, |
| "learning_rate": 4.996179176509013e-06, |
| "loss": 0.7085, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.1404494382022472, |
| "grad_norm": 0.9284588098526001, |
| "learning_rate": 4.996153582989795e-06, |
| "loss": 0.7674, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.1408005617977528, |
| "grad_norm": 0.8856985569000244, |
| "learning_rate": 4.996127904104261e-06, |
| "loss": 0.7357, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.14115168539325842, |
| "grad_norm": 0.9342722296714783, |
| "learning_rate": 4.996102139853289e-06, |
| "loss": 0.7387, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.14150280898876405, |
| "grad_norm": 0.9672777056694031, |
| "learning_rate": 4.996076290237759e-06, |
| "loss": 0.7681, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.14185393258426968, |
| "grad_norm": 0.929104745388031, |
| "learning_rate": 4.996050355258556e-06, |
| "loss": 0.7266, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.14220505617977527, |
| "grad_norm": 0.9371368288993835, |
| "learning_rate": 4.996024334916567e-06, |
| "loss": 0.7841, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.1425561797752809, |
| "grad_norm": 0.8848541975021362, |
| "learning_rate": 4.995998229212681e-06, |
| "loss": 0.7659, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.14290730337078653, |
| "grad_norm": 0.9632852673530579, |
| "learning_rate": 4.995972038147792e-06, |
| "loss": 0.7871, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.14325842696629212, |
| "grad_norm": 0.9157865047454834, |
| "learning_rate": 4.9959457617227946e-06, |
| "loss": 0.7529, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.14360955056179775, |
| "grad_norm": 0.9337014555931091, |
| "learning_rate": 4.995919399938588e-06, |
| "loss": 0.7427, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.14396067415730338, |
| "grad_norm": 0.9515252113342285, |
| "learning_rate": 4.995892952796074e-06, |
| "loss": 0.7577, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.144311797752809, |
| "grad_norm": 0.9301478266716003, |
| "learning_rate": 4.995866420296157e-06, |
| "loss": 0.7825, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.1446629213483146, |
| "grad_norm": 1.4020230770111084, |
| "learning_rate": 4.995839802439745e-06, |
| "loss": 0.7434, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.14501404494382023, |
| "grad_norm": 0.9150766730308533, |
| "learning_rate": 4.995813099227745e-06, |
| "loss": 0.7038, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.14536516853932585, |
| "grad_norm": 0.9467854499816895, |
| "learning_rate": 4.995786310661075e-06, |
| "loss": 0.7721, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.14571629213483145, |
| "grad_norm": 0.8949883580207825, |
| "learning_rate": 4.995759436740648e-06, |
| "loss": 0.7174, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.14606741573033707, |
| "grad_norm": 0.9632487893104553, |
| "learning_rate": 4.995732477467383e-06, |
| "loss": 0.8002, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.1464185393258427, |
| "grad_norm": 0.8924590945243835, |
| "learning_rate": 4.995705432842204e-06, |
| "loss": 0.7575, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.14676966292134833, |
| "grad_norm": 0.8902977705001831, |
| "learning_rate": 4.995678302866035e-06, |
| "loss": 0.73, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.14712078651685392, |
| "grad_norm": 0.9031338095664978, |
| "learning_rate": 4.9956510875398035e-06, |
| "loss": 0.7697, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.14747191011235955, |
| "grad_norm": 0.9241436719894409, |
| "learning_rate": 4.99562378686444e-06, |
| "loss": 0.7594, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.14782303370786518, |
| "grad_norm": 1.0044499635696411, |
| "learning_rate": 4.995596400840879e-06, |
| "loss": 0.7938, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.14817415730337077, |
| "grad_norm": 0.9393342733383179, |
| "learning_rate": 4.995568929470055e-06, |
| "loss": 0.8142, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.1485252808988764, |
| "grad_norm": 0.9527561664581299, |
| "learning_rate": 4.99554137275291e-06, |
| "loss": 0.7825, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.14887640449438203, |
| "grad_norm": 0.9195716381072998, |
| "learning_rate": 4.995513730690386e-06, |
| "loss": 0.7577, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.14922752808988765, |
| "grad_norm": 0.9349954128265381, |
| "learning_rate": 4.995486003283428e-06, |
| "loss": 0.7159, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.14957865168539325, |
| "grad_norm": 0.922205924987793, |
| "learning_rate": 4.9954581905329845e-06, |
| "loss": 0.8017, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.14992977528089887, |
| "grad_norm": 0.955886721611023, |
| "learning_rate": 4.995430292440005e-06, |
| "loss": 0.7813, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.1502808988764045, |
| "grad_norm": 0.9729349613189697, |
| "learning_rate": 4.995402309005445e-06, |
| "loss": 0.7744, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.1506320224719101, |
| "grad_norm": 0.8943726420402527, |
| "learning_rate": 4.995374240230262e-06, |
| "loss": 0.775, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.15098314606741572, |
| "grad_norm": 0.9235265254974365, |
| "learning_rate": 4.995346086115416e-06, |
| "loss": 0.759, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.15133426966292135, |
| "grad_norm": 0.9518915414810181, |
| "learning_rate": 4.995317846661868e-06, |
| "loss": 0.7563, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.15168539325842698, |
| "grad_norm": 0.9186070561408997, |
| "learning_rate": 4.9952895218705845e-06, |
| "loss": 0.7551, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.15203651685393257, |
| "grad_norm": 0.9712167978286743, |
| "learning_rate": 4.995261111742536e-06, |
| "loss": 0.7765, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.1523876404494382, |
| "grad_norm": 0.9043035507202148, |
| "learning_rate": 4.995232616278691e-06, |
| "loss": 0.7224, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.15273876404494383, |
| "grad_norm": 1.357176661491394, |
| "learning_rate": 4.995204035480027e-06, |
| "loss": 0.7538, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.15308988764044945, |
| "grad_norm": 0.9199062585830688, |
| "learning_rate": 4.995175369347521e-06, |
| "loss": 0.7396, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.15344101123595505, |
| "grad_norm": 0.9276919960975647, |
| "learning_rate": 4.995146617882151e-06, |
| "loss": 0.7626, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.15379213483146068, |
| "grad_norm": 0.8831456303596497, |
| "learning_rate": 4.995117781084901e-06, |
| "loss": 0.7692, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.1541432584269663, |
| "grad_norm": 0.9020641446113586, |
| "learning_rate": 4.99508885895676e-06, |
| "loss": 0.7578, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.1544943820224719, |
| "grad_norm": 0.9140202403068542, |
| "learning_rate": 4.995059851498714e-06, |
| "loss": 0.7728, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.15484550561797752, |
| "grad_norm": 0.9247411489486694, |
| "learning_rate": 4.995030758711756e-06, |
| "loss": 0.7397, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.15519662921348315, |
| "grad_norm": 0.8848091959953308, |
| "learning_rate": 4.995001580596882e-06, |
| "loss": 0.6819, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.15554775280898878, |
| "grad_norm": 1.0068150758743286, |
| "learning_rate": 4.994972317155088e-06, |
| "loss": 0.7437, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.15589887640449437, |
| "grad_norm": 0.9932318329811096, |
| "learning_rate": 4.994942968387376e-06, |
| "loss": 0.7693, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.15625, |
| "grad_norm": 0.920421838760376, |
| "learning_rate": 4.99491353429475e-06, |
| "loss": 0.7649, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.15660112359550563, |
| "grad_norm": 0.9176994562149048, |
| "learning_rate": 4.9948840148782165e-06, |
| "loss": 0.7391, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.15695224719101122, |
| "grad_norm": 0.9588678479194641, |
| "learning_rate": 4.994854410138783e-06, |
| "loss": 0.7615, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.15730337078651685, |
| "grad_norm": 0.9149226546287537, |
| "learning_rate": 4.9948247200774645e-06, |
| "loss": 0.75, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.15765449438202248, |
| "grad_norm": 0.8968827724456787, |
| "learning_rate": 4.994794944695275e-06, |
| "loss": 0.7582, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.1580056179775281, |
| "grad_norm": 0.8770960569381714, |
| "learning_rate": 4.994765083993234e-06, |
| "loss": 0.7121, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.1583567415730337, |
| "grad_norm": 0.9019699692726135, |
| "learning_rate": 4.9947351379723605e-06, |
| "loss": 0.7131, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.15870786516853932, |
| "grad_norm": 0.8884302973747253, |
| "learning_rate": 4.994705106633682e-06, |
| "loss": 0.7292, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.15905898876404495, |
| "grad_norm": 0.9590429663658142, |
| "learning_rate": 4.994674989978222e-06, |
| "loss": 0.7454, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.15941011235955055, |
| "grad_norm": 0.8948072195053101, |
| "learning_rate": 4.9946447880070124e-06, |
| "loss": 0.7667, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.15976123595505617, |
| "grad_norm": 0.9360845685005188, |
| "learning_rate": 4.994614500721086e-06, |
| "loss": 0.7633, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.1601123595505618, |
| "grad_norm": 0.9061880111694336, |
| "learning_rate": 4.9945841281214785e-06, |
| "loss": 0.7026, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.16046348314606743, |
| "grad_norm": 0.918328583240509, |
| "learning_rate": 4.994553670209228e-06, |
| "loss": 0.7367, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.16081460674157302, |
| "grad_norm": 0.9201732277870178, |
| "learning_rate": 4.994523126985377e-06, |
| "loss": 0.7326, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.16116573033707865, |
| "grad_norm": 0.9703920483589172, |
| "learning_rate": 4.9944924984509695e-06, |
| "loss": 0.7638, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.16151685393258428, |
| "grad_norm": 0.8904596567153931, |
| "learning_rate": 4.994461784607053e-06, |
| "loss": 0.7717, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.1618679775280899, |
| "grad_norm": 0.9822039604187012, |
| "learning_rate": 4.994430985454678e-06, |
| "loss": 0.7674, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.1622191011235955, |
| "grad_norm": 0.9268808364868164, |
| "learning_rate": 4.9944001009948986e-06, |
| "loss": 0.7725, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.16257022471910113, |
| "grad_norm": 0.8941001296043396, |
| "learning_rate": 4.994369131228769e-06, |
| "loss": 0.7361, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.16292134831460675, |
| "grad_norm": 0.8854597210884094, |
| "learning_rate": 4.99433807615735e-06, |
| "loss": 0.7426, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.16327247191011235, |
| "grad_norm": 1.0173115730285645, |
| "learning_rate": 4.994306935781704e-06, |
| "loss": 0.7724, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.16362359550561797, |
| "grad_norm": 0.9098212122917175, |
| "learning_rate": 4.994275710102894e-06, |
| "loss": 0.6989, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.1639747191011236, |
| "grad_norm": 0.9020840525627136, |
| "learning_rate": 4.99424439912199e-06, |
| "loss": 0.7312, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.16432584269662923, |
| "grad_norm": 0.9145157337188721, |
| "learning_rate": 4.994213002840062e-06, |
| "loss": 0.7334, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.16467696629213482, |
| "grad_norm": 0.9347027540206909, |
| "learning_rate": 4.994181521258183e-06, |
| "loss": 0.7508, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.16502808988764045, |
| "grad_norm": 0.9202942252159119, |
| "learning_rate": 4.994149954377431e-06, |
| "loss": 0.7351, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.16537921348314608, |
| "grad_norm": 0.9013196229934692, |
| "learning_rate": 4.994118302198884e-06, |
| "loss": 0.7618, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.16573033707865167, |
| "grad_norm": 0.9281770586967468, |
| "learning_rate": 4.9940865647236255e-06, |
| "loss": 0.7661, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.1660814606741573, |
| "grad_norm": 0.9201061129570007, |
| "learning_rate": 4.99405474195274e-06, |
| "loss": 0.7203, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.16643258426966293, |
| "grad_norm": 0.9211746454238892, |
| "learning_rate": 4.994022833887318e-06, |
| "loss": 0.724, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.16678370786516855, |
| "grad_norm": 0.9289520978927612, |
| "learning_rate": 4.993990840528448e-06, |
| "loss": 0.7907, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.16713483146067415, |
| "grad_norm": 0.9343749284744263, |
| "learning_rate": 4.993958761877227e-06, |
| "loss": 0.7707, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.16748595505617977, |
| "grad_norm": 0.9140953421592712, |
| "learning_rate": 4.9939265979347495e-06, |
| "loss": 0.7558, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.1678370786516854, |
| "grad_norm": 0.8889614939689636, |
| "learning_rate": 4.993894348702117e-06, |
| "loss": 0.7615, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.168188202247191, |
| "grad_norm": 0.8894888162612915, |
| "learning_rate": 4.993862014180431e-06, |
| "loss": 0.7536, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.16853932584269662, |
| "grad_norm": 0.8904688954353333, |
| "learning_rate": 4.993829594370798e-06, |
| "loss": 0.7471, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.16889044943820225, |
| "grad_norm": 0.9064297080039978, |
| "learning_rate": 4.993797089274327e-06, |
| "loss": 0.7524, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.16924157303370788, |
| "grad_norm": 0.8890877962112427, |
| "learning_rate": 4.99376449889213e-06, |
| "loss": 0.7727, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.16959269662921347, |
| "grad_norm": 0.9089619517326355, |
| "learning_rate": 4.993731823225321e-06, |
| "loss": 0.7661, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.1699438202247191, |
| "grad_norm": 0.9284274578094482, |
| "learning_rate": 4.993699062275017e-06, |
| "loss": 0.7337, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.17029494382022473, |
| "grad_norm": 0.9275715947151184, |
| "learning_rate": 4.99366621604234e-06, |
| "loss": 0.7417, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.17064606741573032, |
| "grad_norm": 0.9078076481819153, |
| "learning_rate": 4.9936332845284105e-06, |
| "loss": 0.7366, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.17099719101123595, |
| "grad_norm": 0.9106430411338806, |
| "learning_rate": 4.993600267734359e-06, |
| "loss": 0.7738, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.17134831460674158, |
| "grad_norm": 1.0632219314575195, |
| "learning_rate": 4.99356716566131e-06, |
| "loss": 0.724, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.1716994382022472, |
| "grad_norm": 0.9347037076950073, |
| "learning_rate": 4.9935339783103985e-06, |
| "loss": 0.7273, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.1720505617977528, |
| "grad_norm": 0.9145859479904175, |
| "learning_rate": 4.9935007056827586e-06, |
| "loss": 0.7603, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.17240168539325842, |
| "grad_norm": 0.924089252948761, |
| "learning_rate": 4.993467347779529e-06, |
| "loss": 0.7289, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.17275280898876405, |
| "grad_norm": 0.9254995584487915, |
| "learning_rate": 4.993433904601849e-06, |
| "loss": 0.7614, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.17310393258426968, |
| "grad_norm": 0.9493343234062195, |
| "learning_rate": 4.993400376150863e-06, |
| "loss": 0.7889, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.17345505617977527, |
| "grad_norm": 0.8984596729278564, |
| "learning_rate": 4.9933667624277184e-06, |
| "loss": 0.7304, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.1738061797752809, |
| "grad_norm": 0.9199086427688599, |
| "learning_rate": 4.9933330634335634e-06, |
| "loss": 0.7386, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.17415730337078653, |
| "grad_norm": 0.9047686457633972, |
| "learning_rate": 4.993299279169552e-06, |
| "loss": 0.7469, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.17450842696629212, |
| "grad_norm": 0.9360693097114563, |
| "learning_rate": 4.993265409636839e-06, |
| "loss": 0.7595, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.17485955056179775, |
| "grad_norm": 0.9289848804473877, |
| "learning_rate": 4.993231454836581e-06, |
| "loss": 0.7701, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.17521067415730338, |
| "grad_norm": 0.8970044255256653, |
| "learning_rate": 4.993197414769942e-06, |
| "loss": 0.7191, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.175561797752809, |
| "grad_norm": 0.92835932970047, |
| "learning_rate": 4.993163289438085e-06, |
| "loss": 0.7395, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.1759129213483146, |
| "grad_norm": 0.9345782995223999, |
| "learning_rate": 4.993129078842176e-06, |
| "loss": 0.8146, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.17626404494382023, |
| "grad_norm": 0.9650789499282837, |
| "learning_rate": 4.993094782983386e-06, |
| "loss": 0.7634, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.17661516853932585, |
| "grad_norm": 0.9201986193656921, |
| "learning_rate": 4.993060401862888e-06, |
| "loss": 0.7476, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.17696629213483145, |
| "grad_norm": 0.964146077632904, |
| "learning_rate": 4.993025935481858e-06, |
| "loss": 0.7822, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.17731741573033707, |
| "grad_norm": 0.8705427646636963, |
| "learning_rate": 4.992991383841475e-06, |
| "loss": 0.6773, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.1776685393258427, |
| "grad_norm": 0.9599820971488953, |
| "learning_rate": 4.992956746942919e-06, |
| "loss": 0.7516, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.17801966292134833, |
| "grad_norm": 0.9984298944473267, |
| "learning_rate": 4.992922024787374e-06, |
| "loss": 0.7714, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.17837078651685392, |
| "grad_norm": 0.9363481402397156, |
| "learning_rate": 4.992887217376032e-06, |
| "loss": 0.784, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.17872191011235955, |
| "grad_norm": 0.8900370597839355, |
| "learning_rate": 4.992852324710078e-06, |
| "loss": 0.7749, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.17907303370786518, |
| "grad_norm": 0.9664030075073242, |
| "learning_rate": 4.9928173467907085e-06, |
| "loss": 0.7635, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.17942415730337077, |
| "grad_norm": 0.9362422227859497, |
| "learning_rate": 4.9927822836191185e-06, |
| "loss": 0.7497, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.1797752808988764, |
| "grad_norm": 0.8915205001831055, |
| "learning_rate": 4.992747135196508e-06, |
| "loss": 0.7257, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.18012640449438203, |
| "grad_norm": 0.9136397838592529, |
| "learning_rate": 4.992711901524079e-06, |
| "loss": 0.733, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.18047752808988765, |
| "grad_norm": 0.9412914514541626, |
| "learning_rate": 4.992676582603035e-06, |
| "loss": 0.7757, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.18082865168539325, |
| "grad_norm": 0.8885975480079651, |
| "learning_rate": 4.992641178434586e-06, |
| "loss": 0.7219, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.18117977528089887, |
| "grad_norm": 0.950423538684845, |
| "learning_rate": 4.992605689019941e-06, |
| "loss": 0.7596, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.1815308988764045, |
| "grad_norm": 0.9234071969985962, |
| "learning_rate": 4.992570114360314e-06, |
| "loss": 0.7624, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.1818820224719101, |
| "grad_norm": 0.9259780049324036, |
| "learning_rate": 4.9925344544569225e-06, |
| "loss": 0.7755, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.18223314606741572, |
| "grad_norm": 0.9503250122070312, |
| "learning_rate": 4.992498709310986e-06, |
| "loss": 0.7051, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.18258426966292135, |
| "grad_norm": 0.9408488273620605, |
| "learning_rate": 4.992462878923725e-06, |
| "loss": 0.7281, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.18293539325842698, |
| "grad_norm": 0.9426605105400085, |
| "learning_rate": 4.992426963296369e-06, |
| "loss": 0.7666, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.18328651685393257, |
| "grad_norm": 0.9386591911315918, |
| "learning_rate": 4.992390962430142e-06, |
| "loss": 0.7352, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.1836376404494382, |
| "grad_norm": 0.9154810309410095, |
| "learning_rate": 4.992354876326276e-06, |
| "loss": 0.7597, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.18398876404494383, |
| "grad_norm": 0.936019241809845, |
| "learning_rate": 4.992318704986007e-06, |
| "loss": 0.7497, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.18433988764044945, |
| "grad_norm": 0.9355201721191406, |
| "learning_rate": 4.99228244841057e-06, |
| "loss": 0.7868, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.18469101123595505, |
| "grad_norm": 0.9210653305053711, |
| "learning_rate": 4.9922461066012075e-06, |
| "loss": 0.6998, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.18504213483146068, |
| "grad_norm": 0.9298537969589233, |
| "learning_rate": 4.99220967955916e-06, |
| "loss": 0.7253, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.1853932584269663, |
| "grad_norm": 0.9049412608146667, |
| "learning_rate": 4.9921731672856746e-06, |
| "loss": 0.7621, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.1857443820224719, |
| "grad_norm": 0.9317775964736938, |
| "learning_rate": 4.992136569781999e-06, |
| "loss": 0.7393, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.18609550561797752, |
| "grad_norm": 0.923850417137146, |
| "learning_rate": 4.9920998870493856e-06, |
| "loss": 0.7384, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.18644662921348315, |
| "grad_norm": 0.9435550570487976, |
| "learning_rate": 4.992063119089088e-06, |
| "loss": 0.7811, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.18679775280898878, |
| "grad_norm": 0.9249152541160583, |
| "learning_rate": 4.992026265902364e-06, |
| "loss": 0.7451, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.18714887640449437, |
| "grad_norm": 0.9371122717857361, |
| "learning_rate": 4.991989327490475e-06, |
| "loss": 0.7507, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.1875, |
| "grad_norm": 0.8902071714401245, |
| "learning_rate": 4.991952303854683e-06, |
| "loss": 0.7445, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.18785112359550563, |
| "grad_norm": 0.8958204984664917, |
| "learning_rate": 4.991915194996254e-06, |
| "loss": 0.7029, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.18820224719101122, |
| "grad_norm": 0.9265782237052917, |
| "learning_rate": 4.991878000916459e-06, |
| "loss": 0.7421, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.18855337078651685, |
| "grad_norm": 0.8842232823371887, |
| "learning_rate": 4.991840721616568e-06, |
| "loss": 0.7678, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.18890449438202248, |
| "grad_norm": 0.9105944037437439, |
| "learning_rate": 4.9918033570978565e-06, |
| "loss": 0.7257, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.1892556179775281, |
| "grad_norm": 0.875582754611969, |
| "learning_rate": 4.991765907361603e-06, |
| "loss": 0.739, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.1896067415730337, |
| "grad_norm": 0.90901780128479, |
| "learning_rate": 4.991728372409087e-06, |
| "loss": 0.7467, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.18995786516853932, |
| "grad_norm": 0.9725927710533142, |
| "learning_rate": 4.991690752241594e-06, |
| "loss": 0.7779, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.19030898876404495, |
| "grad_norm": 0.9324540495872498, |
| "learning_rate": 4.991653046860408e-06, |
| "loss": 0.7337, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.19066011235955055, |
| "grad_norm": 0.929425060749054, |
| "learning_rate": 4.9916152562668205e-06, |
| "loss": 0.7432, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.19101123595505617, |
| "grad_norm": 0.9117162227630615, |
| "learning_rate": 4.991577380462124e-06, |
| "loss": 0.7556, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.1913623595505618, |
| "grad_norm": 0.9356319904327393, |
| "learning_rate": 4.9915394194476115e-06, |
| "loss": 0.783, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.19171348314606743, |
| "grad_norm": 0.9007960557937622, |
| "learning_rate": 4.991501373224584e-06, |
| "loss": 0.7485, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.19206460674157302, |
| "grad_norm": 0.9083287715911865, |
| "learning_rate": 4.991463241794342e-06, |
| "loss": 0.7497, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.19241573033707865, |
| "grad_norm": 0.9226256608963013, |
| "learning_rate": 4.991425025158188e-06, |
| "loss": 0.7687, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.19276685393258428, |
| "grad_norm": 0.8791014552116394, |
| "learning_rate": 4.991386723317432e-06, |
| "loss": 0.7542, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.1931179775280899, |
| "grad_norm": 0.8970210552215576, |
| "learning_rate": 4.991348336273381e-06, |
| "loss": 0.7169, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.1934691011235955, |
| "grad_norm": 0.9191828966140747, |
| "learning_rate": 4.9913098640273475e-06, |
| "loss": 0.7261, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.19382022471910113, |
| "grad_norm": 0.9275765419006348, |
| "learning_rate": 4.991271306580649e-06, |
| "loss": 0.7469, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.19417134831460675, |
| "grad_norm": 0.9179903864860535, |
| "learning_rate": 4.9912326639346045e-06, |
| "loss": 0.7385, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.19452247191011235, |
| "grad_norm": 0.9225823283195496, |
| "learning_rate": 4.991193936090535e-06, |
| "loss": 0.6986, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.19487359550561797, |
| "grad_norm": 0.9241051077842712, |
| "learning_rate": 4.991155123049764e-06, |
| "loss": 0.783, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.1952247191011236, |
| "grad_norm": 0.9368206858634949, |
| "learning_rate": 4.991116224813619e-06, |
| "loss": 0.7712, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.19557584269662923, |
| "grad_norm": 0.9148329496383667, |
| "learning_rate": 4.991077241383431e-06, |
| "loss": 0.8179, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.19592696629213482, |
| "grad_norm": 0.8920343518257141, |
| "learning_rate": 4.991038172760533e-06, |
| "loss": 0.7271, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.19627808988764045, |
| "grad_norm": 0.9275470972061157, |
| "learning_rate": 4.990999018946262e-06, |
| "loss": 0.7704, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.19662921348314608, |
| "grad_norm": 0.9231950640678406, |
| "learning_rate": 4.990959779941955e-06, |
| "loss": 0.6871, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.19698033707865167, |
| "grad_norm": 0.9179824590682983, |
| "learning_rate": 4.990920455748955e-06, |
| "loss": 0.7573, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.1973314606741573, |
| "grad_norm": 0.9076899290084839, |
| "learning_rate": 4.9908810463686076e-06, |
| "loss": 0.7463, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.19768258426966293, |
| "grad_norm": 0.9685149788856506, |
| "learning_rate": 4.99084155180226e-06, |
| "loss": 0.7207, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.19803370786516855, |
| "grad_norm": 1.3696738481521606, |
| "learning_rate": 4.990801972051262e-06, |
| "loss": 0.768, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.19838483146067415, |
| "grad_norm": 0.9174782633781433, |
| "learning_rate": 4.990762307116969e-06, |
| "loss": 0.7711, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.19873595505617977, |
| "grad_norm": 0.9449765086174011, |
| "learning_rate": 4.990722557000736e-06, |
| "loss": 0.7267, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.1990870786516854, |
| "grad_norm": 0.9291553497314453, |
| "learning_rate": 4.990682721703922e-06, |
| "loss": 0.7017, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.199438202247191, |
| "grad_norm": 0.920346736907959, |
| "learning_rate": 4.9906428012278915e-06, |
| "loss": 0.7486, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.19978932584269662, |
| "grad_norm": 0.9206670522689819, |
| "learning_rate": 4.990602795574007e-06, |
| "loss": 0.7833, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.20014044943820225, |
| "grad_norm": 0.9318204522132874, |
| "learning_rate": 4.99056270474364e-06, |
| "loss": 0.7467, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.20049157303370788, |
| "grad_norm": 0.9259383082389832, |
| "learning_rate": 4.990522528738159e-06, |
| "loss": 0.7196, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.20084269662921347, |
| "grad_norm": 0.9336618185043335, |
| "learning_rate": 4.9904822675589385e-06, |
| "loss": 0.7498, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.2011938202247191, |
| "grad_norm": 0.9120561480522156, |
| "learning_rate": 4.990441921207356e-06, |
| "loss": 0.7621, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.20154494382022473, |
| "grad_norm": 0.9047495126724243, |
| "learning_rate": 4.99040148968479e-06, |
| "loss": 0.7103, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.20189606741573032, |
| "grad_norm": 0.9809283018112183, |
| "learning_rate": 4.990360972992625e-06, |
| "loss": 0.7341, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.20224719101123595, |
| "grad_norm": 1.0163681507110596, |
| "learning_rate": 4.990320371132245e-06, |
| "loss": 0.7219, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.20259831460674158, |
| "grad_norm": 0.8804516792297363, |
| "learning_rate": 4.99027968410504e-06, |
| "loss": 0.718, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.2029494382022472, |
| "grad_norm": 0.9172428846359253, |
| "learning_rate": 4.9902389119124005e-06, |
| "loss": 0.7361, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.2033005617977528, |
| "grad_norm": 0.9390194416046143, |
| "learning_rate": 4.990198054555721e-06, |
| "loss": 0.7195, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.20365168539325842, |
| "grad_norm": 0.9496874213218689, |
| "learning_rate": 4.990157112036399e-06, |
| "loss": 0.7305, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.20400280898876405, |
| "grad_norm": 0.9417531490325928, |
| "learning_rate": 4.990116084355835e-06, |
| "loss": 0.7647, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.20435393258426968, |
| "grad_norm": 0.8786060810089111, |
| "learning_rate": 4.99007497151543e-06, |
| "loss": 0.7654, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.20470505617977527, |
| "grad_norm": 0.8952715992927551, |
| "learning_rate": 4.990033773516594e-06, |
| "loss": 0.7149, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.2050561797752809, |
| "grad_norm": 0.9071550369262695, |
| "learning_rate": 4.989992490360734e-06, |
| "loss": 0.7368, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.20540730337078653, |
| "grad_norm": 0.8937875628471375, |
| "learning_rate": 4.989951122049259e-06, |
| "loss": 0.7469, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.20575842696629212, |
| "grad_norm": 0.9116646647453308, |
| "learning_rate": 4.989909668583588e-06, |
| "loss": 0.7348, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.20610955056179775, |
| "grad_norm": 0.9020063877105713, |
| "learning_rate": 4.989868129965137e-06, |
| "loss": 0.6871, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.20646067415730338, |
| "grad_norm": 0.8991970419883728, |
| "learning_rate": 4.989826506195326e-06, |
| "loss": 0.7136, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.206811797752809, |
| "grad_norm": 0.9156064987182617, |
| "learning_rate": 4.9897847972755795e-06, |
| "loss": 0.7082, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.2071629213483146, |
| "grad_norm": 0.9545648097991943, |
| "learning_rate": 4.989743003207323e-06, |
| "loss": 0.7628, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.20751404494382023, |
| "grad_norm": 0.9077872633934021, |
| "learning_rate": 4.989701123991987e-06, |
| "loss": 0.7595, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.20786516853932585, |
| "grad_norm": 0.9414958953857422, |
| "learning_rate": 4.989659159631003e-06, |
| "loss": 0.7604, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.20821629213483145, |
| "grad_norm": 0.9313225150108337, |
| "learning_rate": 4.989617110125805e-06, |
| "loss": 0.7175, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.20856741573033707, |
| "grad_norm": 0.9247497320175171, |
| "learning_rate": 4.989574975477833e-06, |
| "loss": 0.7497, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.2089185393258427, |
| "grad_norm": 0.8851250410079956, |
| "learning_rate": 4.989532755688527e-06, |
| "loss": 0.665, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.20926966292134833, |
| "grad_norm": 0.9276209473609924, |
| "learning_rate": 4.989490450759331e-06, |
| "loss": 0.6919, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.20962078651685392, |
| "grad_norm": 0.9292905330657959, |
| "learning_rate": 4.9894480606916925e-06, |
| "loss": 0.7545, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.20997191011235955, |
| "grad_norm": 0.9364456534385681, |
| "learning_rate": 4.98940558548706e-06, |
| "loss": 0.8026, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.21032303370786518, |
| "grad_norm": 0.9156911969184875, |
| "learning_rate": 4.9893630251468874e-06, |
| "loss": 0.7279, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.21067415730337077, |
| "grad_norm": 0.9316357374191284, |
| "learning_rate": 4.989320379672629e-06, |
| "loss": 0.7664, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.2110252808988764, |
| "grad_norm": 0.945326566696167, |
| "learning_rate": 4.989277649065744e-06, |
| "loss": 0.7456, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.21137640449438203, |
| "grad_norm": 0.9577007293701172, |
| "learning_rate": 4.989234833327693e-06, |
| "loss": 0.7097, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.21172752808988765, |
| "grad_norm": 0.898679792881012, |
| "learning_rate": 4.989191932459941e-06, |
| "loss": 0.7085, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.21207865168539325, |
| "grad_norm": 0.9334394931793213, |
| "learning_rate": 4.989148946463955e-06, |
| "loss": 0.7441, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.21242977528089887, |
| "grad_norm": 0.9490911364555359, |
| "learning_rate": 4.989105875341206e-06, |
| "loss": 0.7571, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.2127808988764045, |
| "grad_norm": 0.9073877930641174, |
| "learning_rate": 4.989062719093164e-06, |
| "loss": 0.748, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.2131320224719101, |
| "grad_norm": 0.9007843136787415, |
| "learning_rate": 4.989019477721309e-06, |
| "loss": 0.7099, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.21348314606741572, |
| "grad_norm": 0.9324343204498291, |
| "learning_rate": 4.988976151227116e-06, |
| "loss": 0.7688, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.21383426966292135, |
| "grad_norm": 0.9213019013404846, |
| "learning_rate": 4.988932739612069e-06, |
| "loss": 0.7302, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.21418539325842698, |
| "grad_norm": 0.9618836641311646, |
| "learning_rate": 4.988889242877653e-06, |
| "loss": 0.7484, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.21453651685393257, |
| "grad_norm": 0.9354860186576843, |
| "learning_rate": 4.988845661025353e-06, |
| "loss": 0.7192, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.2148876404494382, |
| "grad_norm": 0.9579604268074036, |
| "learning_rate": 4.988801994056663e-06, |
| "loss": 0.7727, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.21523876404494383, |
| "grad_norm": 0.8972249627113342, |
| "learning_rate": 4.988758241973073e-06, |
| "loss": 0.7816, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.21558988764044945, |
| "grad_norm": 0.9025394916534424, |
| "learning_rate": 4.988714404776082e-06, |
| "loss": 0.6994, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.21594101123595505, |
| "grad_norm": 0.9159374833106995, |
| "learning_rate": 4.9886704824671874e-06, |
| "loss": 0.7319, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.21629213483146068, |
| "grad_norm": 0.9703385829925537, |
| "learning_rate": 4.988626475047892e-06, |
| "loss": 0.7739, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.2166432584269663, |
| "grad_norm": 0.9332022070884705, |
| "learning_rate": 4.9885823825197e-06, |
| "loss": 0.7209, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.2169943820224719, |
| "grad_norm": 0.9919439554214478, |
| "learning_rate": 4.988538204884121e-06, |
| "loss": 0.7631, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.21734550561797752, |
| "grad_norm": 0.9095845818519592, |
| "learning_rate": 4.988493942142664e-06, |
| "loss": 0.7311, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.21769662921348315, |
| "grad_norm": 0.9250304698944092, |
| "learning_rate": 4.988449594296845e-06, |
| "loss": 0.7616, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.21804775280898878, |
| "grad_norm": 0.9156744480133057, |
| "learning_rate": 4.988405161348178e-06, |
| "loss": 0.7446, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.21839887640449437, |
| "grad_norm": 0.9540274143218994, |
| "learning_rate": 4.988360643298184e-06, |
| "loss": 0.745, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.21875, |
| "grad_norm": 0.9059514999389648, |
| "learning_rate": 4.988316040148386e-06, |
| "loss": 0.7169, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.21910112359550563, |
| "grad_norm": 0.8970401287078857, |
| "learning_rate": 4.988271351900308e-06, |
| "loss": 0.7551, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.21945224719101122, |
| "grad_norm": 0.8862085938453674, |
| "learning_rate": 4.988226578555479e-06, |
| "loss": 0.7141, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.21980337078651685, |
| "grad_norm": 0.9373227953910828, |
| "learning_rate": 4.98818172011543e-06, |
| "loss": 0.7637, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.22015449438202248, |
| "grad_norm": 0.9211307168006897, |
| "learning_rate": 4.988136776581697e-06, |
| "loss": 0.7428, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.2205056179775281, |
| "grad_norm": 0.9092309474945068, |
| "learning_rate": 4.9880917479558135e-06, |
| "loss": 0.7341, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.2208567415730337, |
| "grad_norm": 0.9342793822288513, |
| "learning_rate": 4.988046634239322e-06, |
| "loss": 0.7397, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.22120786516853932, |
| "grad_norm": 0.9277600646018982, |
| "learning_rate": 4.9880014354337645e-06, |
| "loss": 0.718, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.22155898876404495, |
| "grad_norm": 1.1010304689407349, |
| "learning_rate": 4.987956151540687e-06, |
| "loss": 0.7199, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.22191011235955055, |
| "grad_norm": 0.9733719229698181, |
| "learning_rate": 4.9879107825616375e-06, |
| "loss": 0.7518, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.22226123595505617, |
| "grad_norm": 0.9306866526603699, |
| "learning_rate": 4.9878653284981694e-06, |
| "loss": 0.7388, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.2226123595505618, |
| "grad_norm": 0.9351629614830017, |
| "learning_rate": 4.987819789351835e-06, |
| "loss": 0.7326, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.22296348314606743, |
| "grad_norm": 0.9454215168952942, |
| "learning_rate": 4.987774165124194e-06, |
| "loss": 0.7929, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.22331460674157302, |
| "grad_norm": 0.8752790093421936, |
| "learning_rate": 4.987728455816804e-06, |
| "loss": 0.7282, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.22366573033707865, |
| "grad_norm": 0.9457923173904419, |
| "learning_rate": 4.98768266143123e-06, |
| "loss": 0.7297, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.22401685393258428, |
| "grad_norm": 0.9447070360183716, |
| "learning_rate": 4.987636781969038e-06, |
| "loss": 0.7502, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.2243679775280899, |
| "grad_norm": 1.4757858514785767, |
| "learning_rate": 4.987590817431797e-06, |
| "loss": 0.754, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.2247191011235955, |
| "grad_norm": 0.9355869889259338, |
| "learning_rate": 4.987544767821078e-06, |
| "loss": 0.7468, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.22507022471910113, |
| "grad_norm": 0.9021878242492676, |
| "learning_rate": 4.9874986331384566e-06, |
| "loss": 0.6914, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.22542134831460675, |
| "grad_norm": 0.9627546072006226, |
| "learning_rate": 4.98745241338551e-06, |
| "loss": 0.6998, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.22577247191011235, |
| "grad_norm": 0.9815264940261841, |
| "learning_rate": 4.98740610856382e-06, |
| "loss": 0.7392, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.22612359550561797, |
| "grad_norm": 0.9428890943527222, |
| "learning_rate": 4.98735971867497e-06, |
| "loss": 0.737, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.2264747191011236, |
| "grad_norm": 0.9130154252052307, |
| "learning_rate": 4.987313243720546e-06, |
| "loss": 0.7363, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.22682584269662923, |
| "grad_norm": 0.9495055079460144, |
| "learning_rate": 4.987266683702137e-06, |
| "loss": 0.7104, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.22717696629213482, |
| "grad_norm": 0.9111932516098022, |
| "learning_rate": 4.987220038621336e-06, |
| "loss": 0.7107, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.22752808988764045, |
| "grad_norm": 0.9328299760818481, |
| "learning_rate": 4.987173308479738e-06, |
| "loss": 0.7294, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.22787921348314608, |
| "grad_norm": 0.9401968717575073, |
| "learning_rate": 4.987126493278942e-06, |
| "loss": 0.7089, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.22823033707865167, |
| "grad_norm": 0.9041705131530762, |
| "learning_rate": 4.987079593020547e-06, |
| "loss": 0.7249, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.2285814606741573, |
| "grad_norm": 0.8919897675514221, |
| "learning_rate": 4.987032607706158e-06, |
| "loss": 0.6662, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.22893258426966293, |
| "grad_norm": 0.9634642004966736, |
| "learning_rate": 4.986985537337384e-06, |
| "loss": 0.7547, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.22928370786516855, |
| "grad_norm": 0.9561633467674255, |
| "learning_rate": 4.9869383819158305e-06, |
| "loss": 0.7448, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.22963483146067415, |
| "grad_norm": 0.9113203287124634, |
| "learning_rate": 4.986891141443113e-06, |
| "loss": 0.7064, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.22998595505617977, |
| "grad_norm": 0.8830083012580872, |
| "learning_rate": 4.986843815920847e-06, |
| "loss": 0.6909, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.2303370786516854, |
| "grad_norm": 0.9515541791915894, |
| "learning_rate": 4.98679640535065e-06, |
| "loss": 0.7262, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.230688202247191, |
| "grad_norm": 0.9469254612922668, |
| "learning_rate": 4.986748909734145e-06, |
| "loss": 0.7177, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.23103932584269662, |
| "grad_norm": 0.9044115543365479, |
| "learning_rate": 4.986701329072954e-06, |
| "loss": 0.7371, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.23139044943820225, |
| "grad_norm": 0.9210987687110901, |
| "learning_rate": 4.986653663368706e-06, |
| "loss": 0.7327, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.23174157303370788, |
| "grad_norm": 0.8826014995574951, |
| "learning_rate": 4.98660591262303e-06, |
| "loss": 0.7455, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.23209269662921347, |
| "grad_norm": 0.8898245096206665, |
| "learning_rate": 4.986558076837561e-06, |
| "loss": 0.7242, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.2324438202247191, |
| "grad_norm": 0.9125939607620239, |
| "learning_rate": 4.9865101560139324e-06, |
| "loss": 0.7587, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.23279494382022473, |
| "grad_norm": 0.9783604741096497, |
| "learning_rate": 4.9864621501537845e-06, |
| "loss": 0.7356, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.23314606741573032, |
| "grad_norm": 0.9296882152557373, |
| "learning_rate": 4.986414059258758e-06, |
| "loss": 0.7059, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.23349719101123595, |
| "grad_norm": 0.9341956973075867, |
| "learning_rate": 4.9863658833305e-06, |
| "loss": 0.7342, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.23384831460674158, |
| "grad_norm": 0.9274594783782959, |
| "learning_rate": 4.986317622370655e-06, |
| "loss": 0.7254, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.2341994382022472, |
| "grad_norm": 0.8959493637084961, |
| "learning_rate": 4.986269276380875e-06, |
| "loss": 0.7138, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.2345505617977528, |
| "grad_norm": 0.9621686935424805, |
| "learning_rate": 4.986220845362814e-06, |
| "loss": 0.7301, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.23490168539325842, |
| "grad_norm": 0.9666121602058411, |
| "learning_rate": 4.9861723293181275e-06, |
| "loss": 0.754, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.23525280898876405, |
| "grad_norm": 0.9423872828483582, |
| "learning_rate": 4.986123728248474e-06, |
| "loss": 0.7464, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.23560393258426968, |
| "grad_norm": 0.9208926558494568, |
| "learning_rate": 4.986075042155517e-06, |
| "loss": 0.7092, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.23595505617977527, |
| "grad_norm": 0.880066454410553, |
| "learning_rate": 4.986026271040921e-06, |
| "loss": 0.6935, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.2363061797752809, |
| "grad_norm": 0.9050423502922058, |
| "learning_rate": 4.985977414906353e-06, |
| "loss": 0.742, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.23665730337078653, |
| "grad_norm": 0.9089376330375671, |
| "learning_rate": 4.985928473753486e-06, |
| "loss": 0.7366, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.23700842696629212, |
| "grad_norm": 0.9702712893486023, |
| "learning_rate": 4.985879447583992e-06, |
| "loss": 0.7314, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.23735955056179775, |
| "grad_norm": 0.9272444248199463, |
| "learning_rate": 4.985830336399548e-06, |
| "loss": 0.7156, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.23771067415730338, |
| "grad_norm": 0.9623541831970215, |
| "learning_rate": 4.985781140201833e-06, |
| "loss": 0.705, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.238061797752809, |
| "grad_norm": 0.9228547215461731, |
| "learning_rate": 4.985731858992531e-06, |
| "loss": 0.7254, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.2384129213483146, |
| "grad_norm": 0.8912785649299622, |
| "learning_rate": 4.985682492773326e-06, |
| "loss": 0.7254, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.23876404494382023, |
| "grad_norm": 0.9913116693496704, |
| "learning_rate": 4.985633041545906e-06, |
| "loss": 0.7172, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.23911516853932585, |
| "grad_norm": 0.9851749539375305, |
| "learning_rate": 4.985583505311965e-06, |
| "loss": 0.7711, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.23946629213483145, |
| "grad_norm": 0.9252983331680298, |
| "learning_rate": 4.9855338840731934e-06, |
| "loss": 0.7356, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.23981741573033707, |
| "grad_norm": 0.9541698694229126, |
| "learning_rate": 4.9854841778312905e-06, |
| "loss": 0.7111, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.2401685393258427, |
| "grad_norm": 0.9459705948829651, |
| "learning_rate": 4.985434386587956e-06, |
| "loss": 0.7254, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.24051966292134833, |
| "grad_norm": 1.0094040632247925, |
| "learning_rate": 4.985384510344892e-06, |
| "loss": 0.7638, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.24087078651685392, |
| "grad_norm": 0.9937487244606018, |
| "learning_rate": 4.985334549103804e-06, |
| "loss": 0.7373, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.24122191011235955, |
| "grad_norm": 0.9358577132225037, |
| "learning_rate": 4.985284502866401e-06, |
| "loss": 0.697, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.24157303370786518, |
| "grad_norm": 0.9751777052879333, |
| "learning_rate": 4.985234371634395e-06, |
| "loss": 0.7626, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.24192415730337077, |
| "grad_norm": 0.9775291085243225, |
| "learning_rate": 4.9851841554095e-06, |
| "loss": 0.7031, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.2422752808988764, |
| "grad_norm": 0.9366273880004883, |
| "learning_rate": 4.9851338541934345e-06, |
| "loss": 0.7268, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.24262640449438203, |
| "grad_norm": 0.8989854454994202, |
| "learning_rate": 4.985083467987917e-06, |
| "loss": 0.7102, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.24297752808988765, |
| "grad_norm": 0.9326997399330139, |
| "learning_rate": 4.9850329967946715e-06, |
| "loss": 0.6963, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.24332865168539325, |
| "grad_norm": 0.9004776477813721, |
| "learning_rate": 4.984982440615423e-06, |
| "loss": 0.7133, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.24367977528089887, |
| "grad_norm": 0.915814220905304, |
| "learning_rate": 4.984931799451903e-06, |
| "loss": 0.7197, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.2440308988764045, |
| "grad_norm": 0.9562146067619324, |
| "learning_rate": 4.984881073305841e-06, |
| "loss": 0.7175, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.2443820224719101, |
| "grad_norm": 0.9829107522964478, |
| "learning_rate": 4.984830262178974e-06, |
| "loss": 0.7343, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.24473314606741572, |
| "grad_norm": 0.9095830917358398, |
| "learning_rate": 4.984779366073036e-06, |
| "loss": 0.7474, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.24508426966292135, |
| "grad_norm": 0.9279952049255371, |
| "learning_rate": 4.984728384989772e-06, |
| "loss": 0.7574, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.24543539325842698, |
| "grad_norm": 1.07504141330719, |
| "learning_rate": 4.984677318930922e-06, |
| "loss": 0.7309, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.24578651685393257, |
| "grad_norm": 0.9406894445419312, |
| "learning_rate": 4.984626167898235e-06, |
| "loss": 0.7187, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.2461376404494382, |
| "grad_norm": 0.9229064583778381, |
| "learning_rate": 4.9845749318934575e-06, |
| "loss": 0.7379, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.24648876404494383, |
| "grad_norm": 0.9469361901283264, |
| "learning_rate": 4.984523610918345e-06, |
| "loss": 0.7585, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.24683988764044945, |
| "grad_norm": 0.9020796418190002, |
| "learning_rate": 4.984472204974651e-06, |
| "loss": 0.714, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.24719101123595505, |
| "grad_norm": 0.9876411557197571, |
| "learning_rate": 4.9844207140641336e-06, |
| "loss": 0.7013, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.24754213483146068, |
| "grad_norm": 0.9139506816864014, |
| "learning_rate": 4.984369138188553e-06, |
| "loss": 0.7252, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.2478932584269663, |
| "grad_norm": 0.9223290681838989, |
| "learning_rate": 4.9843174773496735e-06, |
| "loss": 0.7603, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.2482443820224719, |
| "grad_norm": 0.9541411995887756, |
| "learning_rate": 4.984265731549263e-06, |
| "loss": 0.6997, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.24859550561797752, |
| "grad_norm": 0.9302672147750854, |
| "learning_rate": 4.98421390078909e-06, |
| "loss": 0.7252, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.24894662921348315, |
| "grad_norm": 0.9554900527000427, |
| "learning_rate": 4.984161985070927e-06, |
| "loss": 0.7305, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.24929775280898878, |
| "grad_norm": 0.9729623794555664, |
| "learning_rate": 4.98410998439655e-06, |
| "loss": 0.7318, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.24964887640449437, |
| "grad_norm": 0.9298332929611206, |
| "learning_rate": 4.984057898767738e-06, |
| "loss": 0.7348, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 0.9653823375701904, |
| "learning_rate": 4.984005728186271e-06, |
| "loss": 0.7566, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.2503511235955056, |
| "grad_norm": 0.9283461570739746, |
| "learning_rate": 4.983953472653933e-06, |
| "loss": 0.6999, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.25070224719101125, |
| "grad_norm": 0.9600170254707336, |
| "learning_rate": 4.983901132172513e-06, |
| "loss": 0.7378, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.25105337078651685, |
| "grad_norm": 0.9261863827705383, |
| "learning_rate": 4.983848706743799e-06, |
| "loss": 0.7245, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.25140449438202245, |
| "grad_norm": 0.9267969727516174, |
| "learning_rate": 4.983796196369585e-06, |
| "loss": 0.6921, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.2517556179775281, |
| "grad_norm": 0.9277015328407288, |
| "learning_rate": 4.983743601051666e-06, |
| "loss": 0.715, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.2521067415730337, |
| "grad_norm": 0.9029396176338196, |
| "learning_rate": 4.9836909207918415e-06, |
| "loss": 0.7739, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.25245786516853935, |
| "grad_norm": 0.9498785138130188, |
| "learning_rate": 4.983638155591913e-06, |
| "loss": 0.7071, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.25280898876404495, |
| "grad_norm": 0.934562087059021, |
| "learning_rate": 4.983585305453685e-06, |
| "loss": 0.7183, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.25316011235955055, |
| "grad_norm": 0.9572330713272095, |
| "learning_rate": 4.983532370378964e-06, |
| "loss": 0.7535, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.2535112359550562, |
| "grad_norm": 0.9443022608757019, |
| "learning_rate": 4.983479350369562e-06, |
| "loss": 0.7127, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.2538623595505618, |
| "grad_norm": 0.9258742928504944, |
| "learning_rate": 4.983426245427291e-06, |
| "loss": 0.7002, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.2542134831460674, |
| "grad_norm": 0.939900279045105, |
| "learning_rate": 4.983373055553968e-06, |
| "loss": 0.7234, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.25456460674157305, |
| "grad_norm": 0.9580402374267578, |
| "learning_rate": 4.983319780751411e-06, |
| "loss": 0.7519, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.25491573033707865, |
| "grad_norm": 0.9556792974472046, |
| "learning_rate": 4.983266421021442e-06, |
| "loss": 0.7701, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.25526685393258425, |
| "grad_norm": 0.9831913709640503, |
| "learning_rate": 4.983212976365887e-06, |
| "loss": 0.7189, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.2556179775280899, |
| "grad_norm": 0.9363849759101868, |
| "learning_rate": 4.983159446786573e-06, |
| "loss": 0.7061, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.2559691011235955, |
| "grad_norm": 0.9291892647743225, |
| "learning_rate": 4.983105832285331e-06, |
| "loss": 0.6942, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.2563202247191011, |
| "grad_norm": 0.9837853908538818, |
| "learning_rate": 4.983052132863995e-06, |
| "loss": 0.7251, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.25667134831460675, |
| "grad_norm": 0.9630160331726074, |
| "learning_rate": 4.9829983485244e-06, |
| "loss": 0.694, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.25702247191011235, |
| "grad_norm": 0.972981333732605, |
| "learning_rate": 4.982944479268386e-06, |
| "loss": 0.7232, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.257373595505618, |
| "grad_norm": 0.9112554788589478, |
| "learning_rate": 4.9828905250977955e-06, |
| "loss": 0.7553, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.2577247191011236, |
| "grad_norm": 0.9407148361206055, |
| "learning_rate": 4.982836486014474e-06, |
| "loss": 0.7931, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.2580758426966292, |
| "grad_norm": 0.9683741927146912, |
| "learning_rate": 4.98278236202027e-06, |
| "loss": 0.76, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.25842696629213485, |
| "grad_norm": 0.9538892507553101, |
| "learning_rate": 4.9827281531170325e-06, |
| "loss": 0.7491, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.25877808988764045, |
| "grad_norm": 0.9792529344558716, |
| "learning_rate": 4.982673859306617e-06, |
| "loss": 0.752, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.25912921348314605, |
| "grad_norm": 0.9384421110153198, |
| "learning_rate": 4.98261948059088e-06, |
| "loss": 0.7587, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.2594803370786517, |
| "grad_norm": 0.9048983454704285, |
| "learning_rate": 4.982565016971682e-06, |
| "loss": 0.7429, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.2598314606741573, |
| "grad_norm": 0.893500804901123, |
| "learning_rate": 4.982510468450884e-06, |
| "loss": 0.6985, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.2601825842696629, |
| "grad_norm": 0.9900831580162048, |
| "learning_rate": 4.982455835030352e-06, |
| "loss": 0.7336, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.26053370786516855, |
| "grad_norm": 0.9558880925178528, |
| "learning_rate": 4.982401116711955e-06, |
| "loss": 0.7561, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.26088483146067415, |
| "grad_norm": 0.8868340849876404, |
| "learning_rate": 4.982346313497564e-06, |
| "loss": 0.6967, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.2612359550561798, |
| "grad_norm": 0.9625622034072876, |
| "learning_rate": 4.9822914253890535e-06, |
| "loss": 0.7382, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.2615870786516854, |
| "grad_norm": 0.9891008138656616, |
| "learning_rate": 4.9822364523883e-06, |
| "loss": 0.7316, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.261938202247191, |
| "grad_norm": 0.9161337018013, |
| "learning_rate": 4.982181394497184e-06, |
| "loss": 0.7225, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.26228932584269665, |
| "grad_norm": 0.9532581567764282, |
| "learning_rate": 4.982126251717589e-06, |
| "loss": 0.7412, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.26264044943820225, |
| "grad_norm": 0.90705406665802, |
| "learning_rate": 4.982071024051399e-06, |
| "loss": 0.6938, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.26299157303370785, |
| "grad_norm": 0.9263889789581299, |
| "learning_rate": 4.982015711500505e-06, |
| "loss": 0.7575, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.2633426966292135, |
| "grad_norm": 0.9247738718986511, |
| "learning_rate": 4.981960314066797e-06, |
| "loss": 0.7184, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.2636938202247191, |
| "grad_norm": 0.9485296607017517, |
| "learning_rate": 4.98190483175217e-06, |
| "loss": 0.7297, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.2640449438202247, |
| "grad_norm": 0.9109437465667725, |
| "learning_rate": 4.981849264558523e-06, |
| "loss": 0.7422, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.26439606741573035, |
| "grad_norm": 0.9430893063545227, |
| "learning_rate": 4.981793612487753e-06, |
| "loss": 0.7634, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.26474719101123595, |
| "grad_norm": 0.9232211709022522, |
| "learning_rate": 4.981737875541765e-06, |
| "loss": 0.7587, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.26509831460674155, |
| "grad_norm": 0.8920214176177979, |
| "learning_rate": 4.9816820537224675e-06, |
| "loss": 0.7222, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.2654494382022472, |
| "grad_norm": 0.9339509010314941, |
| "learning_rate": 4.981626147031765e-06, |
| "loss": 0.7253, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.2658005617977528, |
| "grad_norm": 0.890581488609314, |
| "learning_rate": 4.981570155471573e-06, |
| "loss": 0.7166, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.26615168539325845, |
| "grad_norm": 0.9297827482223511, |
| "learning_rate": 4.981514079043806e-06, |
| "loss": 0.713, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.26650280898876405, |
| "grad_norm": 0.9213199019432068, |
| "learning_rate": 4.98145791775038e-06, |
| "loss": 0.7312, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.26685393258426965, |
| "grad_norm": 0.9185107946395874, |
| "learning_rate": 4.981401671593216e-06, |
| "loss": 0.7215, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.2672050561797753, |
| "grad_norm": 0.8874849081039429, |
| "learning_rate": 4.981345340574239e-06, |
| "loss": 0.7082, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.2675561797752809, |
| "grad_norm": 0.9179455637931824, |
| "learning_rate": 4.981288924695374e-06, |
| "loss": 0.7243, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.2679073033707865, |
| "grad_norm": 0.8956063985824585, |
| "learning_rate": 4.981232423958552e-06, |
| "loss": 0.704, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.26825842696629215, |
| "grad_norm": 0.9451838731765747, |
| "learning_rate": 4.981175838365704e-06, |
| "loss": 0.7518, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.26860955056179775, |
| "grad_norm": 0.9265857338905334, |
| "learning_rate": 4.981119167918765e-06, |
| "loss": 0.7143, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.26896067415730335, |
| "grad_norm": 0.9493303298950195, |
| "learning_rate": 4.981062412619674e-06, |
| "loss": 0.7172, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.269311797752809, |
| "grad_norm": 0.9223463535308838, |
| "learning_rate": 4.981005572470372e-06, |
| "loss": 0.7383, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.2696629213483146, |
| "grad_norm": 0.9502999782562256, |
| "learning_rate": 4.980948647472802e-06, |
| "loss": 0.7137, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.2700140449438202, |
| "grad_norm": 0.9198105335235596, |
| "learning_rate": 4.980891637628911e-06, |
| "loss": 0.706, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.27036516853932585, |
| "grad_norm": 0.9435210227966309, |
| "learning_rate": 4.980834542940649e-06, |
| "loss": 0.6921, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.27071629213483145, |
| "grad_norm": 0.9600005745887756, |
| "learning_rate": 4.980777363409969e-06, |
| "loss": 0.72, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.2710674157303371, |
| "grad_norm": 0.9591335654258728, |
| "learning_rate": 4.980720099038825e-06, |
| "loss": 0.7386, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.2714185393258427, |
| "grad_norm": 0.9055672883987427, |
| "learning_rate": 4.980662749829177e-06, |
| "loss": 0.7568, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.2717696629213483, |
| "grad_norm": 0.9077644348144531, |
| "learning_rate": 4.9806053157829855e-06, |
| "loss": 0.7122, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.27212078651685395, |
| "grad_norm": 0.9248702526092529, |
| "learning_rate": 4.980547796902216e-06, |
| "loss": 0.6866, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.27247191011235955, |
| "grad_norm": 0.9374347925186157, |
| "learning_rate": 4.980490193188834e-06, |
| "loss": 0.7283, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.27282303370786515, |
| "grad_norm": 0.9280509352684021, |
| "learning_rate": 4.980432504644809e-06, |
| "loss": 0.6896, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.2731741573033708, |
| "grad_norm": 0.9158201813697815, |
| "learning_rate": 4.980374731272115e-06, |
| "loss": 0.7222, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.2735252808988764, |
| "grad_norm": 0.918114960193634, |
| "learning_rate": 4.980316873072729e-06, |
| "loss": 0.7528, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.273876404494382, |
| "grad_norm": 0.891121506690979, |
| "learning_rate": 4.980258930048627e-06, |
| "loss": 0.7265, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.27422752808988765, |
| "grad_norm": 0.9570785760879517, |
| "learning_rate": 4.980200902201792e-06, |
| "loss": 0.6957, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.27457865168539325, |
| "grad_norm": 0.8888586759567261, |
| "learning_rate": 4.980142789534209e-06, |
| "loss": 0.7258, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.2749297752808989, |
| "grad_norm": 0.9602726101875305, |
| "learning_rate": 4.980084592047866e-06, |
| "loss": 0.741, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.2752808988764045, |
| "grad_norm": 0.9402101635932922, |
| "learning_rate": 4.9800263097447505e-06, |
| "loss": 0.7789, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.2756320224719101, |
| "grad_norm": 0.916023850440979, |
| "learning_rate": 4.9799679426268575e-06, |
| "loss": 0.6746, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.27598314606741575, |
| "grad_norm": 1.0306038856506348, |
| "learning_rate": 4.979909490696184e-06, |
| "loss": 0.7396, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.27633426966292135, |
| "grad_norm": 0.9112997651100159, |
| "learning_rate": 4.979850953954727e-06, |
| "loss": 0.7225, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.27668539325842695, |
| "grad_norm": 0.9124306440353394, |
| "learning_rate": 4.97979233240449e-06, |
| "loss": 0.7244, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.2770365168539326, |
| "grad_norm": 0.8682541847229004, |
| "learning_rate": 4.979733626047477e-06, |
| "loss": 0.6983, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.2773876404494382, |
| "grad_norm": 0.9099353551864624, |
| "learning_rate": 4.979674834885696e-06, |
| "loss": 0.723, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.2777387640449438, |
| "grad_norm": 0.9057777523994446, |
| "learning_rate": 4.979615958921158e-06, |
| "loss": 0.7063, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.27808988764044945, |
| "grad_norm": 0.9777807593345642, |
| "learning_rate": 4.979556998155876e-06, |
| "loss": 0.7047, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.27844101123595505, |
| "grad_norm": 0.952314019203186, |
| "learning_rate": 4.979497952591865e-06, |
| "loss": 0.7589, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.27879213483146065, |
| "grad_norm": 0.8948432207107544, |
| "learning_rate": 4.979438822231147e-06, |
| "loss": 0.6893, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.2791432584269663, |
| "grad_norm": 0.9998204708099365, |
| "learning_rate": 4.979379607075743e-06, |
| "loss": 0.7176, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.2794943820224719, |
| "grad_norm": 0.9392699003219604, |
| "learning_rate": 4.979320307127678e-06, |
| "loss": 0.7607, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.27984550561797755, |
| "grad_norm": 0.916340708732605, |
| "learning_rate": 4.97926092238898e-06, |
| "loss": 0.6797, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.28019662921348315, |
| "grad_norm": 0.9007791876792908, |
| "learning_rate": 4.979201452861679e-06, |
| "loss": 0.7015, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.28054775280898875, |
| "grad_norm": 0.9042453765869141, |
| "learning_rate": 4.979141898547811e-06, |
| "loss": 0.7113, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.2808988764044944, |
| "grad_norm": 0.9455578327178955, |
| "learning_rate": 4.9790822594494116e-06, |
| "loss": 0.7031, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.28125, |
| "grad_norm": 0.9787691235542297, |
| "learning_rate": 4.979022535568519e-06, |
| "loss": 0.7328, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.2816011235955056, |
| "grad_norm": 0.9504345059394836, |
| "learning_rate": 4.978962726907179e-06, |
| "loss": 0.7301, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.28195224719101125, |
| "grad_norm": 0.9304360747337341, |
| "learning_rate": 4.978902833467434e-06, |
| "loss": 0.7195, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.28230337078651685, |
| "grad_norm": 0.9509701132774353, |
| "learning_rate": 4.978842855251333e-06, |
| "loss": 0.7532, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.28265449438202245, |
| "grad_norm": 0.9462277889251709, |
| "learning_rate": 4.9787827922609276e-06, |
| "loss": 0.708, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.2830056179775281, |
| "grad_norm": 0.9095754027366638, |
| "learning_rate": 4.978722644498272e-06, |
| "loss": 0.7484, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.2833567415730337, |
| "grad_norm": 0.8992451429367065, |
| "learning_rate": 4.978662411965424e-06, |
| "loss": 0.7033, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.28370786516853935, |
| "grad_norm": 0.913910984992981, |
| "learning_rate": 4.978602094664441e-06, |
| "loss": 0.7333, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.28405898876404495, |
| "grad_norm": 0.9041484594345093, |
| "learning_rate": 4.978541692597388e-06, |
| "loss": 0.7196, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.28441011235955055, |
| "grad_norm": 0.9570571184158325, |
| "learning_rate": 4.97848120576633e-06, |
| "loss": 0.7199, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.2847612359550562, |
| "grad_norm": 0.9184895753860474, |
| "learning_rate": 4.978420634173336e-06, |
| "loss": 0.6982, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.2851123595505618, |
| "grad_norm": 0.9278954267501831, |
| "learning_rate": 4.978359977820476e-06, |
| "loss": 0.7475, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.2854634831460674, |
| "grad_norm": 0.9236921668052673, |
| "learning_rate": 4.978299236709826e-06, |
| "loss": 0.7343, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.28581460674157305, |
| "grad_norm": 0.9183304309844971, |
| "learning_rate": 4.978238410843464e-06, |
| "loss": 0.7111, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.28616573033707865, |
| "grad_norm": 0.895050585269928, |
| "learning_rate": 4.978177500223468e-06, |
| "loss": 0.6746, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.28651685393258425, |
| "grad_norm": 0.9193557500839233, |
| "learning_rate": 4.9781165048519216e-06, |
| "loss": 0.7025, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.2868679775280899, |
| "grad_norm": 0.9046199917793274, |
| "learning_rate": 4.978055424730912e-06, |
| "loss": 0.701, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.2872191011235955, |
| "grad_norm": 0.9200829267501831, |
| "learning_rate": 4.977994259862527e-06, |
| "loss": 0.755, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.2875702247191011, |
| "grad_norm": 0.929964542388916, |
| "learning_rate": 4.977933010248859e-06, |
| "loss": 0.7274, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.28792134831460675, |
| "grad_norm": 0.9332923293113708, |
| "learning_rate": 4.977871675892003e-06, |
| "loss": 0.7503, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.28827247191011235, |
| "grad_norm": 0.8993156552314758, |
| "learning_rate": 4.977810256794055e-06, |
| "loss": 0.6954, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.288623595505618, |
| "grad_norm": 0.8985475301742554, |
| "learning_rate": 4.977748752957116e-06, |
| "loss": 0.7287, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.2889747191011236, |
| "grad_norm": 0.9083667397499084, |
| "learning_rate": 4.977687164383291e-06, |
| "loss": 0.7352, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.2893258426966292, |
| "grad_norm": 0.9311912059783936, |
| "learning_rate": 4.977625491074686e-06, |
| "loss": 0.7199, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.28967696629213485, |
| "grad_norm": 0.8959439396858215, |
| "learning_rate": 4.977563733033408e-06, |
| "loss": 0.6992, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.29002808988764045, |
| "grad_norm": 1.018567681312561, |
| "learning_rate": 4.9775018902615715e-06, |
| "loss": 0.7278, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.29037921348314605, |
| "grad_norm": 0.9925201535224915, |
| "learning_rate": 4.977439962761289e-06, |
| "loss": 0.7423, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.2907303370786517, |
| "grad_norm": 1.0001310110092163, |
| "learning_rate": 4.977377950534681e-06, |
| "loss": 0.7395, |
| "step": 828 |
| }, |
| { |
| "epoch": 0.2910814606741573, |
| "grad_norm": 0.9196740984916687, |
| "learning_rate": 4.977315853583866e-06, |
| "loss": 0.7588, |
| "step": 829 |
| }, |
| { |
| "epoch": 0.2914325842696629, |
| "grad_norm": 0.9672794342041016, |
| "learning_rate": 4.977253671910969e-06, |
| "loss": 0.7378, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.29178370786516855, |
| "grad_norm": 1.0301952362060547, |
| "learning_rate": 4.977191405518116e-06, |
| "loss": 0.7757, |
| "step": 831 |
| }, |
| { |
| "epoch": 0.29213483146067415, |
| "grad_norm": 0.9453630447387695, |
| "learning_rate": 4.977129054407437e-06, |
| "loss": 0.7335, |
| "step": 832 |
| }, |
| { |
| "epoch": 0.2924859550561798, |
| "grad_norm": 0.9876764416694641, |
| "learning_rate": 4.977066618581065e-06, |
| "loss": 0.7437, |
| "step": 833 |
| }, |
| { |
| "epoch": 0.2928370786516854, |
| "grad_norm": 0.9211217164993286, |
| "learning_rate": 4.977004098041133e-06, |
| "loss": 0.7333, |
| "step": 834 |
| }, |
| { |
| "epoch": 0.293188202247191, |
| "grad_norm": 0.9148357510566711, |
| "learning_rate": 4.976941492789781e-06, |
| "loss": 0.677, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.29353932584269665, |
| "grad_norm": 0.9743471741676331, |
| "learning_rate": 4.976878802829149e-06, |
| "loss": 0.7586, |
| "step": 836 |
| }, |
| { |
| "epoch": 0.29389044943820225, |
| "grad_norm": 0.9516198039054871, |
| "learning_rate": 4.976816028161382e-06, |
| "loss": 0.7364, |
| "step": 837 |
| }, |
| { |
| "epoch": 0.29424157303370785, |
| "grad_norm": 0.8972299098968506, |
| "learning_rate": 4.9767531687886265e-06, |
| "loss": 0.6912, |
| "step": 838 |
| }, |
| { |
| "epoch": 0.2945926966292135, |
| "grad_norm": 0.8950793147087097, |
| "learning_rate": 4.976690224713031e-06, |
| "loss": 0.6969, |
| "step": 839 |
| }, |
| { |
| "epoch": 0.2949438202247191, |
| "grad_norm": 0.9539402723312378, |
| "learning_rate": 4.976627195936749e-06, |
| "loss": 0.7348, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.2952949438202247, |
| "grad_norm": 1.32026207447052, |
| "learning_rate": 4.976564082461938e-06, |
| "loss": 0.7192, |
| "step": 841 |
| }, |
| { |
| "epoch": 0.29564606741573035, |
| "grad_norm": 0.9319935441017151, |
| "learning_rate": 4.976500884290753e-06, |
| "loss": 0.6957, |
| "step": 842 |
| }, |
| { |
| "epoch": 0.29599719101123595, |
| "grad_norm": 0.9559399485588074, |
| "learning_rate": 4.976437601425358e-06, |
| "loss": 0.7394, |
| "step": 843 |
| }, |
| { |
| "epoch": 0.29634831460674155, |
| "grad_norm": 0.9115216135978699, |
| "learning_rate": 4.976374233867915e-06, |
| "loss": 0.6955, |
| "step": 844 |
| }, |
| { |
| "epoch": 0.2966994382022472, |
| "grad_norm": 0.8987361788749695, |
| "learning_rate": 4.976310781620592e-06, |
| "loss": 0.7057, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.2970505617977528, |
| "grad_norm": 0.9885380268096924, |
| "learning_rate": 4.976247244685561e-06, |
| "loss": 0.7542, |
| "step": 846 |
| }, |
| { |
| "epoch": 0.29740168539325845, |
| "grad_norm": 0.9176011681556702, |
| "learning_rate": 4.9761836230649914e-06, |
| "loss": 0.6836, |
| "step": 847 |
| }, |
| { |
| "epoch": 0.29775280898876405, |
| "grad_norm": 0.9235329031944275, |
| "learning_rate": 4.976119916761063e-06, |
| "loss": 0.7541, |
| "step": 848 |
| }, |
| { |
| "epoch": 0.29810393258426965, |
| "grad_norm": 0.9174650311470032, |
| "learning_rate": 4.9760561257759504e-06, |
| "loss": 0.7149, |
| "step": 849 |
| }, |
| { |
| "epoch": 0.2984550561797753, |
| "grad_norm": 0.9441576600074768, |
| "learning_rate": 4.975992250111837e-06, |
| "loss": 0.7222, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.2988061797752809, |
| "grad_norm": 0.9702540636062622, |
| "learning_rate": 4.975928289770909e-06, |
| "loss": 0.7193, |
| "step": 851 |
| }, |
| { |
| "epoch": 0.2991573033707865, |
| "grad_norm": 0.9554623961448669, |
| "learning_rate": 4.9758642447553505e-06, |
| "loss": 0.7537, |
| "step": 852 |
| }, |
| { |
| "epoch": 0.29950842696629215, |
| "grad_norm": 0.9210932850837708, |
| "learning_rate": 4.975800115067355e-06, |
| "loss": 0.7159, |
| "step": 853 |
| }, |
| { |
| "epoch": 0.29985955056179775, |
| "grad_norm": 0.902009904384613, |
| "learning_rate": 4.975735900709113e-06, |
| "loss": 0.6952, |
| "step": 854 |
| }, |
| { |
| "epoch": 0.30021067415730335, |
| "grad_norm": 0.9207450151443481, |
| "learning_rate": 4.975671601682822e-06, |
| "loss": 0.6705, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.300561797752809, |
| "grad_norm": 0.9092718362808228, |
| "learning_rate": 4.975607217990681e-06, |
| "loss": 0.723, |
| "step": 856 |
| }, |
| { |
| "epoch": 0.3009129213483146, |
| "grad_norm": 0.955410897731781, |
| "learning_rate": 4.975542749634891e-06, |
| "loss": 0.728, |
| "step": 857 |
| }, |
| { |
| "epoch": 0.3012640449438202, |
| "grad_norm": 0.9404045343399048, |
| "learning_rate": 4.975478196617658e-06, |
| "loss": 0.7381, |
| "step": 858 |
| }, |
| { |
| "epoch": 0.30161516853932585, |
| "grad_norm": 0.9133435487747192, |
| "learning_rate": 4.9754135589411875e-06, |
| "loss": 0.7306, |
| "step": 859 |
| }, |
| { |
| "epoch": 0.30196629213483145, |
| "grad_norm": 0.9013397693634033, |
| "learning_rate": 4.975348836607693e-06, |
| "loss": 0.7189, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.3023174157303371, |
| "grad_norm": 0.9037486910820007, |
| "learning_rate": 4.975284029619386e-06, |
| "loss": 0.7152, |
| "step": 861 |
| }, |
| { |
| "epoch": 0.3026685393258427, |
| "grad_norm": 0.9141895771026611, |
| "learning_rate": 4.975219137978484e-06, |
| "loss": 0.71, |
| "step": 862 |
| }, |
| { |
| "epoch": 0.3030196629213483, |
| "grad_norm": 0.942010223865509, |
| "learning_rate": 4.975154161687205e-06, |
| "loss": 0.7255, |
| "step": 863 |
| }, |
| { |
| "epoch": 0.30337078651685395, |
| "grad_norm": 0.9464311599731445, |
| "learning_rate": 4.9750891007477716e-06, |
| "loss": 0.7012, |
| "step": 864 |
| }, |
| { |
| "epoch": 0.30372191011235955, |
| "grad_norm": 0.9062106609344482, |
| "learning_rate": 4.975023955162409e-06, |
| "loss": 0.6909, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.30407303370786515, |
| "grad_norm": 0.921981155872345, |
| "learning_rate": 4.974958724933344e-06, |
| "loss": 0.7312, |
| "step": 866 |
| }, |
| { |
| "epoch": 0.3044241573033708, |
| "grad_norm": 0.902531087398529, |
| "learning_rate": 4.97489341006281e-06, |
| "loss": 0.674, |
| "step": 867 |
| }, |
| { |
| "epoch": 0.3047752808988764, |
| "grad_norm": 0.9487978219985962, |
| "learning_rate": 4.974828010553039e-06, |
| "loss": 0.7054, |
| "step": 868 |
| }, |
| { |
| "epoch": 0.305126404494382, |
| "grad_norm": 0.9971505999565125, |
| "learning_rate": 4.9747625264062675e-06, |
| "loss": 0.7676, |
| "step": 869 |
| }, |
| { |
| "epoch": 0.30547752808988765, |
| "grad_norm": 0.9108356237411499, |
| "learning_rate": 4.974696957624735e-06, |
| "loss": 0.7199, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.30582865168539325, |
| "grad_norm": 0.9270719885826111, |
| "learning_rate": 4.974631304210684e-06, |
| "loss": 0.748, |
| "step": 871 |
| }, |
| { |
| "epoch": 0.3061797752808989, |
| "grad_norm": 0.9216623306274414, |
| "learning_rate": 4.97456556616636e-06, |
| "loss": 0.7333, |
| "step": 872 |
| }, |
| { |
| "epoch": 0.3065308988764045, |
| "grad_norm": 0.980196475982666, |
| "learning_rate": 4.9744997434940125e-06, |
| "loss": 0.7175, |
| "step": 873 |
| }, |
| { |
| "epoch": 0.3068820224719101, |
| "grad_norm": 0.8955034613609314, |
| "learning_rate": 4.97443383619589e-06, |
| "loss": 0.7133, |
| "step": 874 |
| }, |
| { |
| "epoch": 0.30723314606741575, |
| "grad_norm": 0.9280632734298706, |
| "learning_rate": 4.974367844274248e-06, |
| "loss": 0.6953, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.30758426966292135, |
| "grad_norm": 0.9301517605781555, |
| "learning_rate": 4.974301767731343e-06, |
| "loss": 0.7057, |
| "step": 876 |
| }, |
| { |
| "epoch": 0.30793539325842695, |
| "grad_norm": 0.9015063047409058, |
| "learning_rate": 4.974235606569434e-06, |
| "loss": 0.699, |
| "step": 877 |
| }, |
| { |
| "epoch": 0.3082865168539326, |
| "grad_norm": 0.9064421057701111, |
| "learning_rate": 4.974169360790786e-06, |
| "loss": 0.6937, |
| "step": 878 |
| }, |
| { |
| "epoch": 0.3086376404494382, |
| "grad_norm": 0.9030662775039673, |
| "learning_rate": 4.974103030397662e-06, |
| "loss": 0.7455, |
| "step": 879 |
| }, |
| { |
| "epoch": 0.3089887640449438, |
| "grad_norm": 0.9049381613731384, |
| "learning_rate": 4.974036615392332e-06, |
| "loss": 0.7203, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.30933988764044945, |
| "grad_norm": 0.9038983583450317, |
| "learning_rate": 4.973970115777067e-06, |
| "loss": 0.7122, |
| "step": 881 |
| }, |
| { |
| "epoch": 0.30969101123595505, |
| "grad_norm": 0.96694415807724, |
| "learning_rate": 4.973903531554141e-06, |
| "loss": 0.7292, |
| "step": 882 |
| }, |
| { |
| "epoch": 0.31004213483146065, |
| "grad_norm": 0.9090932011604309, |
| "learning_rate": 4.973836862725831e-06, |
| "loss": 0.7463, |
| "step": 883 |
| }, |
| { |
| "epoch": 0.3103932584269663, |
| "grad_norm": 0.9024604558944702, |
| "learning_rate": 4.973770109294416e-06, |
| "loss": 0.7488, |
| "step": 884 |
| }, |
| { |
| "epoch": 0.3107443820224719, |
| "grad_norm": 0.9796269536018372, |
| "learning_rate": 4.973703271262181e-06, |
| "loss": 0.7398, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.31109550561797755, |
| "grad_norm": 0.9187832474708557, |
| "learning_rate": 4.973636348631412e-06, |
| "loss": 0.7162, |
| "step": 886 |
| }, |
| { |
| "epoch": 0.31144662921348315, |
| "grad_norm": 0.8963474631309509, |
| "learning_rate": 4.973569341404395e-06, |
| "loss": 0.6812, |
| "step": 887 |
| }, |
| { |
| "epoch": 0.31179775280898875, |
| "grad_norm": 0.934047281742096, |
| "learning_rate": 4.973502249583424e-06, |
| "loss": 0.7176, |
| "step": 888 |
| }, |
| { |
| "epoch": 0.3121488764044944, |
| "grad_norm": 0.9109435677528381, |
| "learning_rate": 4.9734350731707925e-06, |
| "loss": 0.684, |
| "step": 889 |
| }, |
| { |
| "epoch": 0.3125, |
| "grad_norm": 0.9629603028297424, |
| "learning_rate": 4.973367812168798e-06, |
| "loss": 0.756, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.3128511235955056, |
| "grad_norm": 0.8999819159507751, |
| "learning_rate": 4.973300466579741e-06, |
| "loss": 0.7211, |
| "step": 891 |
| }, |
| { |
| "epoch": 0.31320224719101125, |
| "grad_norm": 0.9093006253242493, |
| "learning_rate": 4.9732330364059245e-06, |
| "loss": 0.6893, |
| "step": 892 |
| }, |
| { |
| "epoch": 0.31355337078651685, |
| "grad_norm": 0.925269603729248, |
| "learning_rate": 4.973165521649655e-06, |
| "loss": 0.6631, |
| "step": 893 |
| }, |
| { |
| "epoch": 0.31390449438202245, |
| "grad_norm": 0.9515416622161865, |
| "learning_rate": 4.973097922313239e-06, |
| "loss": 0.7578, |
| "step": 894 |
| }, |
| { |
| "epoch": 0.3142556179775281, |
| "grad_norm": 0.9192783236503601, |
| "learning_rate": 4.973030238398992e-06, |
| "loss": 0.6884, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.3146067415730337, |
| "grad_norm": 0.905805230140686, |
| "learning_rate": 4.972962469909227e-06, |
| "loss": 0.7037, |
| "step": 896 |
| }, |
| { |
| "epoch": 0.31495786516853935, |
| "grad_norm": 0.9037303328514099, |
| "learning_rate": 4.972894616846261e-06, |
| "loss": 0.6989, |
| "step": 897 |
| }, |
| { |
| "epoch": 0.31530898876404495, |
| "grad_norm": 0.8707857728004456, |
| "learning_rate": 4.9728266792124155e-06, |
| "loss": 0.6865, |
| "step": 898 |
| }, |
| { |
| "epoch": 0.31566011235955055, |
| "grad_norm": 0.9623267650604248, |
| "learning_rate": 4.972758657010014e-06, |
| "loss": 0.7028, |
| "step": 899 |
| }, |
| { |
| "epoch": 0.3160112359550562, |
| "grad_norm": 0.9062806367874146, |
| "learning_rate": 4.972690550241383e-06, |
| "loss": 0.6803, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.3163623595505618, |
| "grad_norm": 0.9363368153572083, |
| "learning_rate": 4.97262235890885e-06, |
| "loss": 0.7359, |
| "step": 901 |
| }, |
| { |
| "epoch": 0.3167134831460674, |
| "grad_norm": 0.9362224340438843, |
| "learning_rate": 4.972554083014748e-06, |
| "loss": 0.7432, |
| "step": 902 |
| }, |
| { |
| "epoch": 0.31706460674157305, |
| "grad_norm": 0.9579148292541504, |
| "learning_rate": 4.972485722561412e-06, |
| "loss": 0.7348, |
| "step": 903 |
| }, |
| { |
| "epoch": 0.31741573033707865, |
| "grad_norm": 0.9559216499328613, |
| "learning_rate": 4.97241727755118e-06, |
| "loss": 0.7095, |
| "step": 904 |
| }, |
| { |
| "epoch": 0.31776685393258425, |
| "grad_norm": 0.9043166637420654, |
| "learning_rate": 4.972348747986394e-06, |
| "loss": 0.7289, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.3181179775280899, |
| "grad_norm": 0.925491988658905, |
| "learning_rate": 4.972280133869396e-06, |
| "loss": 0.6921, |
| "step": 906 |
| }, |
| { |
| "epoch": 0.3184691011235955, |
| "grad_norm": 0.9442777037620544, |
| "learning_rate": 4.972211435202533e-06, |
| "loss": 0.7104, |
| "step": 907 |
| }, |
| { |
| "epoch": 0.3188202247191011, |
| "grad_norm": 0.943748950958252, |
| "learning_rate": 4.972142651988153e-06, |
| "loss": 0.7688, |
| "step": 908 |
| }, |
| { |
| "epoch": 0.31917134831460675, |
| "grad_norm": 0.8944805860519409, |
| "learning_rate": 4.972073784228612e-06, |
| "loss": 0.7321, |
| "step": 909 |
| }, |
| { |
| "epoch": 0.31952247191011235, |
| "grad_norm": 0.9088547825813293, |
| "learning_rate": 4.972004831926261e-06, |
| "loss": 0.6973, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.319873595505618, |
| "grad_norm": 0.9270226359367371, |
| "learning_rate": 4.971935795083461e-06, |
| "loss": 0.6956, |
| "step": 911 |
| }, |
| { |
| "epoch": 0.3202247191011236, |
| "grad_norm": 0.910492479801178, |
| "learning_rate": 4.971866673702573e-06, |
| "loss": 0.681, |
| "step": 912 |
| }, |
| { |
| "epoch": 0.3205758426966292, |
| "grad_norm": 0.9591485857963562, |
| "learning_rate": 4.971797467785958e-06, |
| "loss": 0.7763, |
| "step": 913 |
| }, |
| { |
| "epoch": 0.32092696629213485, |
| "grad_norm": 0.9132441878318787, |
| "learning_rate": 4.971728177335986e-06, |
| "loss": 0.7294, |
| "step": 914 |
| }, |
| { |
| "epoch": 0.32127808988764045, |
| "grad_norm": 0.9387941956520081, |
| "learning_rate": 4.971658802355025e-06, |
| "loss": 0.755, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.32162921348314605, |
| "grad_norm": 0.930505096912384, |
| "learning_rate": 4.971589342845449e-06, |
| "loss": 0.7337, |
| "step": 916 |
| }, |
| { |
| "epoch": 0.3219803370786517, |
| "grad_norm": 0.8818848133087158, |
| "learning_rate": 4.971519798809631e-06, |
| "loss": 0.7114, |
| "step": 917 |
| }, |
| { |
| "epoch": 0.3223314606741573, |
| "grad_norm": 0.9763118028640747, |
| "learning_rate": 4.971450170249952e-06, |
| "loss": 0.73, |
| "step": 918 |
| }, |
| { |
| "epoch": 0.3226825842696629, |
| "grad_norm": 0.865503191947937, |
| "learning_rate": 4.971380457168791e-06, |
| "loss": 0.6733, |
| "step": 919 |
| }, |
| { |
| "epoch": 0.32303370786516855, |
| "grad_norm": 0.9227094650268555, |
| "learning_rate": 4.9713106595685336e-06, |
| "loss": 0.6975, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.32338483146067415, |
| "grad_norm": 0.9164808988571167, |
| "learning_rate": 4.971240777451566e-06, |
| "loss": 0.7266, |
| "step": 921 |
| }, |
| { |
| "epoch": 0.3237359550561798, |
| "grad_norm": 0.9115430116653442, |
| "learning_rate": 4.971170810820279e-06, |
| "loss": 0.6775, |
| "step": 922 |
| }, |
| { |
| "epoch": 0.3240870786516854, |
| "grad_norm": 0.9133177399635315, |
| "learning_rate": 4.971100759677064e-06, |
| "loss": 0.7163, |
| "step": 923 |
| }, |
| { |
| "epoch": 0.324438202247191, |
| "grad_norm": 0.9289098381996155, |
| "learning_rate": 4.971030624024319e-06, |
| "loss": 0.7448, |
| "step": 924 |
| }, |
| { |
| "epoch": 0.32478932584269665, |
| "grad_norm": 0.872006356716156, |
| "learning_rate": 4.97096040386444e-06, |
| "loss": 0.6492, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.32514044943820225, |
| "grad_norm": 0.9258365035057068, |
| "learning_rate": 4.97089009919983e-06, |
| "loss": 0.6931, |
| "step": 926 |
| }, |
| { |
| "epoch": 0.32549157303370785, |
| "grad_norm": 0.9044914841651917, |
| "learning_rate": 4.970819710032893e-06, |
| "loss": 0.7002, |
| "step": 927 |
| }, |
| { |
| "epoch": 0.3258426966292135, |
| "grad_norm": 0.9293612837791443, |
| "learning_rate": 4.970749236366037e-06, |
| "loss": 0.7471, |
| "step": 928 |
| }, |
| { |
| "epoch": 0.3261938202247191, |
| "grad_norm": 0.8999515771865845, |
| "learning_rate": 4.9706786782016706e-06, |
| "loss": 0.6992, |
| "step": 929 |
| }, |
| { |
| "epoch": 0.3265449438202247, |
| "grad_norm": 0.9285963177680969, |
| "learning_rate": 4.970608035542207e-06, |
| "loss": 0.7229, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.32689606741573035, |
| "grad_norm": 0.9883245229721069, |
| "learning_rate": 4.970537308390063e-06, |
| "loss": 0.7343, |
| "step": 931 |
| }, |
| { |
| "epoch": 0.32724719101123595, |
| "grad_norm": 0.9650030732154846, |
| "learning_rate": 4.970466496747658e-06, |
| "loss": 0.7206, |
| "step": 932 |
| }, |
| { |
| "epoch": 0.32759831460674155, |
| "grad_norm": 0.9341951012611389, |
| "learning_rate": 4.9703956006174125e-06, |
| "loss": 0.7029, |
| "step": 933 |
| }, |
| { |
| "epoch": 0.3279494382022472, |
| "grad_norm": 0.9105697274208069, |
| "learning_rate": 4.9703246200017504e-06, |
| "loss": 0.7127, |
| "step": 934 |
| }, |
| { |
| "epoch": 0.3283005617977528, |
| "grad_norm": 0.8891416192054749, |
| "learning_rate": 4.970253554903102e-06, |
| "loss": 0.6952, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.32865168539325845, |
| "grad_norm": 0.9152025580406189, |
| "learning_rate": 4.970182405323894e-06, |
| "loss": 0.7126, |
| "step": 936 |
| }, |
| { |
| "epoch": 0.32900280898876405, |
| "grad_norm": 0.9279428720474243, |
| "learning_rate": 4.970111171266563e-06, |
| "loss": 0.7066, |
| "step": 937 |
| }, |
| { |
| "epoch": 0.32935393258426965, |
| "grad_norm": 0.9324163198471069, |
| "learning_rate": 4.970039852733542e-06, |
| "loss": 0.6796, |
| "step": 938 |
| }, |
| { |
| "epoch": 0.3297050561797753, |
| "grad_norm": 0.9290011525154114, |
| "learning_rate": 4.9699684497272736e-06, |
| "loss": 0.7192, |
| "step": 939 |
| }, |
| { |
| "epoch": 0.3300561797752809, |
| "grad_norm": 0.8959920406341553, |
| "learning_rate": 4.969896962250196e-06, |
| "loss": 0.7132, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.3304073033707865, |
| "grad_norm": 0.8849387168884277, |
| "learning_rate": 4.969825390304757e-06, |
| "loss": 0.6618, |
| "step": 941 |
| }, |
| { |
| "epoch": 0.33075842696629215, |
| "grad_norm": 0.8804863691329956, |
| "learning_rate": 4.969753733893403e-06, |
| "loss": 0.7021, |
| "step": 942 |
| }, |
| { |
| "epoch": 0.33110955056179775, |
| "grad_norm": 0.9022026062011719, |
| "learning_rate": 4.969681993018584e-06, |
| "loss": 0.6842, |
| "step": 943 |
| }, |
| { |
| "epoch": 0.33146067415730335, |
| "grad_norm": 0.9250811338424683, |
| "learning_rate": 4.969610167682754e-06, |
| "loss": 0.6977, |
| "step": 944 |
| }, |
| { |
| "epoch": 0.331811797752809, |
| "grad_norm": 0.9264232516288757, |
| "learning_rate": 4.969538257888371e-06, |
| "loss": 0.7135, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.3321629213483146, |
| "grad_norm": 0.9282289147377014, |
| "learning_rate": 4.969466263637892e-06, |
| "loss": 0.7304, |
| "step": 946 |
| }, |
| { |
| "epoch": 0.3325140449438202, |
| "grad_norm": 0.8798340559005737, |
| "learning_rate": 4.9693941849337804e-06, |
| "loss": 0.6996, |
| "step": 947 |
| }, |
| { |
| "epoch": 0.33286516853932585, |
| "grad_norm": 0.9778987169265747, |
| "learning_rate": 4.9693220217785e-06, |
| "loss": 0.7132, |
| "step": 948 |
| }, |
| { |
| "epoch": 0.33321629213483145, |
| "grad_norm": 0.9029282331466675, |
| "learning_rate": 4.969249774174519e-06, |
| "loss": 0.6951, |
| "step": 949 |
| }, |
| { |
| "epoch": 0.3335674157303371, |
| "grad_norm": 0.9430015087127686, |
| "learning_rate": 4.96917744212431e-06, |
| "loss": 0.6943, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.3339185393258427, |
| "grad_norm": 0.9794940948486328, |
| "learning_rate": 4.9691050256303445e-06, |
| "loss": 0.7054, |
| "step": 951 |
| }, |
| { |
| "epoch": 0.3342696629213483, |
| "grad_norm": 0.9288370013237, |
| "learning_rate": 4.969032524695101e-06, |
| "loss": 0.7122, |
| "step": 952 |
| }, |
| { |
| "epoch": 0.33462078651685395, |
| "grad_norm": 0.9557552933692932, |
| "learning_rate": 4.968959939321057e-06, |
| "loss": 0.7598, |
| "step": 953 |
| }, |
| { |
| "epoch": 0.33497191011235955, |
| "grad_norm": 0.9375066161155701, |
| "learning_rate": 4.968887269510696e-06, |
| "loss": 0.6924, |
| "step": 954 |
| }, |
| { |
| "epoch": 0.33532303370786515, |
| "grad_norm": 0.9338163733482361, |
| "learning_rate": 4.968814515266503e-06, |
| "loss": 0.7267, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.3356741573033708, |
| "grad_norm": 0.9154375791549683, |
| "learning_rate": 4.968741676590966e-06, |
| "loss": 0.7321, |
| "step": 956 |
| }, |
| { |
| "epoch": 0.3360252808988764, |
| "grad_norm": 0.9233065843582153, |
| "learning_rate": 4.968668753486576e-06, |
| "loss": 0.7245, |
| "step": 957 |
| }, |
| { |
| "epoch": 0.336376404494382, |
| "grad_norm": 0.9419113993644714, |
| "learning_rate": 4.968595745955828e-06, |
| "loss": 0.7007, |
| "step": 958 |
| }, |
| { |
| "epoch": 0.33672752808988765, |
| "grad_norm": 0.961121678352356, |
| "learning_rate": 4.968522654001217e-06, |
| "loss": 0.7308, |
| "step": 959 |
| }, |
| { |
| "epoch": 0.33707865168539325, |
| "grad_norm": 0.934877336025238, |
| "learning_rate": 4.9684494776252445e-06, |
| "loss": 0.7388, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.3374297752808989, |
| "grad_norm": 0.912398636341095, |
| "learning_rate": 4.9683762168304115e-06, |
| "loss": 0.7088, |
| "step": 961 |
| }, |
| { |
| "epoch": 0.3377808988764045, |
| "grad_norm": 0.948833167552948, |
| "learning_rate": 4.9683028716192246e-06, |
| "loss": 0.7278, |
| "step": 962 |
| }, |
| { |
| "epoch": 0.3381320224719101, |
| "grad_norm": 0.9257057905197144, |
| "learning_rate": 4.968229441994191e-06, |
| "loss": 0.7338, |
| "step": 963 |
| }, |
| { |
| "epoch": 0.33848314606741575, |
| "grad_norm": 0.9129422903060913, |
| "learning_rate": 4.968155927957823e-06, |
| "loss": 0.7058, |
| "step": 964 |
| }, |
| { |
| "epoch": 0.33883426966292135, |
| "grad_norm": 0.9381750822067261, |
| "learning_rate": 4.968082329512634e-06, |
| "loss": 0.7326, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.33918539325842695, |
| "grad_norm": 0.9108783602714539, |
| "learning_rate": 4.968008646661141e-06, |
| "loss": 0.66, |
| "step": 966 |
| }, |
| { |
| "epoch": 0.3395365168539326, |
| "grad_norm": 0.9329748153686523, |
| "learning_rate": 4.9679348794058655e-06, |
| "loss": 0.7117, |
| "step": 967 |
| }, |
| { |
| "epoch": 0.3398876404494382, |
| "grad_norm": 0.9643568992614746, |
| "learning_rate": 4.967861027749327e-06, |
| "loss": 0.7555, |
| "step": 968 |
| }, |
| { |
| "epoch": 0.3402387640449438, |
| "grad_norm": 0.9551818370819092, |
| "learning_rate": 4.967787091694055e-06, |
| "loss": 0.7474, |
| "step": 969 |
| }, |
| { |
| "epoch": 0.34058988764044945, |
| "grad_norm": 0.9381354451179504, |
| "learning_rate": 4.967713071242576e-06, |
| "loss": 0.7155, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.34094101123595505, |
| "grad_norm": 0.9513474702835083, |
| "learning_rate": 4.967638966397421e-06, |
| "loss": 0.765, |
| "step": 971 |
| }, |
| { |
| "epoch": 0.34129213483146065, |
| "grad_norm": 0.9263571500778198, |
| "learning_rate": 4.967564777161124e-06, |
| "loss": 0.7112, |
| "step": 972 |
| }, |
| { |
| "epoch": 0.3416432584269663, |
| "grad_norm": 0.9278532862663269, |
| "learning_rate": 4.967490503536225e-06, |
| "loss": 0.7183, |
| "step": 973 |
| }, |
| { |
| "epoch": 0.3419943820224719, |
| "grad_norm": 0.9487691521644592, |
| "learning_rate": 4.967416145525261e-06, |
| "loss": 0.7312, |
| "step": 974 |
| }, |
| { |
| "epoch": 0.34234550561797755, |
| "grad_norm": 0.8866797089576721, |
| "learning_rate": 4.967341703130777e-06, |
| "loss": 0.704, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.34269662921348315, |
| "grad_norm": 0.9202519655227661, |
| "learning_rate": 4.967267176355318e-06, |
| "loss": 0.7434, |
| "step": 976 |
| }, |
| { |
| "epoch": 0.34304775280898875, |
| "grad_norm": 0.9155771732330322, |
| "learning_rate": 4.967192565201432e-06, |
| "loss": 0.7166, |
| "step": 977 |
| }, |
| { |
| "epoch": 0.3433988764044944, |
| "grad_norm": 0.9582788944244385, |
| "learning_rate": 4.967117869671672e-06, |
| "loss": 0.7536, |
| "step": 978 |
| }, |
| { |
| "epoch": 0.34375, |
| "grad_norm": 0.9524344801902771, |
| "learning_rate": 4.967043089768592e-06, |
| "loss": 0.7739, |
| "step": 979 |
| }, |
| { |
| "epoch": 0.3441011235955056, |
| "grad_norm": 0.9230315685272217, |
| "learning_rate": 4.96696822549475e-06, |
| "loss": 0.7181, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.34445224719101125, |
| "grad_norm": 0.9080458283424377, |
| "learning_rate": 4.966893276852704e-06, |
| "loss": 0.7107, |
| "step": 981 |
| }, |
| { |
| "epoch": 0.34480337078651685, |
| "grad_norm": 0.9102587699890137, |
| "learning_rate": 4.966818243845021e-06, |
| "loss": 0.7051, |
| "step": 982 |
| }, |
| { |
| "epoch": 0.34515449438202245, |
| "grad_norm": 0.8925254940986633, |
| "learning_rate": 4.966743126474264e-06, |
| "loss": 0.7106, |
| "step": 983 |
| }, |
| { |
| "epoch": 0.3455056179775281, |
| "grad_norm": 0.9102403521537781, |
| "learning_rate": 4.966667924743002e-06, |
| "loss": 0.6849, |
| "step": 984 |
| }, |
| { |
| "epoch": 0.3458567415730337, |
| "grad_norm": 0.9164739847183228, |
| "learning_rate": 4.966592638653808e-06, |
| "loss": 0.7027, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.34620786516853935, |
| "grad_norm": 0.89588463306427, |
| "learning_rate": 4.966517268209258e-06, |
| "loss": 0.7086, |
| "step": 986 |
| }, |
| { |
| "epoch": 0.34655898876404495, |
| "grad_norm": 0.8812805414199829, |
| "learning_rate": 4.966441813411928e-06, |
| "loss": 0.7073, |
| "step": 987 |
| }, |
| { |
| "epoch": 0.34691011235955055, |
| "grad_norm": 0.8935489654541016, |
| "learning_rate": 4.966366274264397e-06, |
| "loss": 0.6749, |
| "step": 988 |
| }, |
| { |
| "epoch": 0.3472612359550562, |
| "grad_norm": 0.935444712638855, |
| "learning_rate": 4.966290650769251e-06, |
| "loss": 0.7283, |
| "step": 989 |
| }, |
| { |
| "epoch": 0.3476123595505618, |
| "grad_norm": 0.9046710729598999, |
| "learning_rate": 4.966214942929074e-06, |
| "loss": 0.7138, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.3479634831460674, |
| "grad_norm": 0.9328753352165222, |
| "learning_rate": 4.966139150746458e-06, |
| "loss": 0.7022, |
| "step": 991 |
| }, |
| { |
| "epoch": 0.34831460674157305, |
| "grad_norm": 0.9144490361213684, |
| "learning_rate": 4.966063274223992e-06, |
| "loss": 0.738, |
| "step": 992 |
| }, |
| { |
| "epoch": 0.34866573033707865, |
| "grad_norm": 0.9606062173843384, |
| "learning_rate": 4.965987313364272e-06, |
| "loss": 0.6937, |
| "step": 993 |
| }, |
| { |
| "epoch": 0.34901685393258425, |
| "grad_norm": 0.94340980052948, |
| "learning_rate": 4.965911268169897e-06, |
| "loss": 0.6811, |
| "step": 994 |
| }, |
| { |
| "epoch": 0.3493679775280899, |
| "grad_norm": 0.8755514025688171, |
| "learning_rate": 4.9658351386434665e-06, |
| "loss": 0.6707, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.3497191011235955, |
| "grad_norm": 0.8640532493591309, |
| "learning_rate": 4.965758924787584e-06, |
| "loss": 0.6378, |
| "step": 996 |
| }, |
| { |
| "epoch": 0.3500702247191011, |
| "grad_norm": 0.9261877536773682, |
| "learning_rate": 4.965682626604856e-06, |
| "loss": 0.6953, |
| "step": 997 |
| }, |
| { |
| "epoch": 0.35042134831460675, |
| "grad_norm": 0.8998399376869202, |
| "learning_rate": 4.965606244097894e-06, |
| "loss": 0.7154, |
| "step": 998 |
| }, |
| { |
| "epoch": 0.35077247191011235, |
| "grad_norm": 0.9440179467201233, |
| "learning_rate": 4.9655297772693066e-06, |
| "loss": 0.6992, |
| "step": 999 |
| }, |
| { |
| "epoch": 0.351123595505618, |
| "grad_norm": 0.9105573892593384, |
| "learning_rate": 4.965453226121711e-06, |
| "loss": 0.7033, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.3514747191011236, |
| "grad_norm": 0.9109785556793213, |
| "learning_rate": 4.965376590657724e-06, |
| "loss": 0.7057, |
| "step": 1001 |
| }, |
| { |
| "epoch": 0.3518258426966292, |
| "grad_norm": 0.9462383985519409, |
| "learning_rate": 4.9652998708799684e-06, |
| "loss": 0.6979, |
| "step": 1002 |
| }, |
| { |
| "epoch": 0.35217696629213485, |
| "grad_norm": 0.9546105861663818, |
| "learning_rate": 4.965223066791066e-06, |
| "loss": 0.7227, |
| "step": 1003 |
| }, |
| { |
| "epoch": 0.35252808988764045, |
| "grad_norm": 1.0831300020217896, |
| "learning_rate": 4.965146178393645e-06, |
| "loss": 0.6849, |
| "step": 1004 |
| }, |
| { |
| "epoch": 0.35287921348314605, |
| "grad_norm": 0.9323068261146545, |
| "learning_rate": 4.965069205690334e-06, |
| "loss": 0.739, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.3532303370786517, |
| "grad_norm": 0.9407396912574768, |
| "learning_rate": 4.964992148683765e-06, |
| "loss": 0.7386, |
| "step": 1006 |
| }, |
| { |
| "epoch": 0.3535814606741573, |
| "grad_norm": 0.9513671398162842, |
| "learning_rate": 4.964915007376574e-06, |
| "loss": 0.666, |
| "step": 1007 |
| }, |
| { |
| "epoch": 0.3539325842696629, |
| "grad_norm": 0.9194923043251038, |
| "learning_rate": 4.964837781771399e-06, |
| "loss": 0.7601, |
| "step": 1008 |
| }, |
| { |
| "epoch": 0.35428370786516855, |
| "grad_norm": 0.9639756083488464, |
| "learning_rate": 4.964760471870882e-06, |
| "loss": 0.7124, |
| "step": 1009 |
| }, |
| { |
| "epoch": 0.35463483146067415, |
| "grad_norm": 0.9628319144248962, |
| "learning_rate": 4.964683077677664e-06, |
| "loss": 0.7003, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.3549859550561798, |
| "grad_norm": 0.9003061652183533, |
| "learning_rate": 4.964605599194395e-06, |
| "loss": 0.7199, |
| "step": 1011 |
| }, |
| { |
| "epoch": 0.3553370786516854, |
| "grad_norm": 0.9365134835243225, |
| "learning_rate": 4.964528036423723e-06, |
| "loss": 0.6651, |
| "step": 1012 |
| }, |
| { |
| "epoch": 0.355688202247191, |
| "grad_norm": 0.9309297204017639, |
| "learning_rate": 4.964450389368303e-06, |
| "loss": 0.7354, |
| "step": 1013 |
| }, |
| { |
| "epoch": 0.35603932584269665, |
| "grad_norm": 0.9624804258346558, |
| "learning_rate": 4.964372658030786e-06, |
| "loss": 0.6955, |
| "step": 1014 |
| }, |
| { |
| "epoch": 0.35639044943820225, |
| "grad_norm": 0.9497263431549072, |
| "learning_rate": 4.964294842413835e-06, |
| "loss": 0.7374, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.35674157303370785, |
| "grad_norm": 0.9235740900039673, |
| "learning_rate": 4.964216942520108e-06, |
| "loss": 0.6906, |
| "step": 1016 |
| }, |
| { |
| "epoch": 0.3570926966292135, |
| "grad_norm": 0.9985111951828003, |
| "learning_rate": 4.9641389583522705e-06, |
| "loss": 0.7131, |
| "step": 1017 |
| }, |
| { |
| "epoch": 0.3574438202247191, |
| "grad_norm": 0.9399976134300232, |
| "learning_rate": 4.964060889912988e-06, |
| "loss": 0.6935, |
| "step": 1018 |
| }, |
| { |
| "epoch": 0.3577949438202247, |
| "grad_norm": 0.9676353931427002, |
| "learning_rate": 4.9639827372049335e-06, |
| "loss": 0.7513, |
| "step": 1019 |
| }, |
| { |
| "epoch": 0.35814606741573035, |
| "grad_norm": 0.9015031456947327, |
| "learning_rate": 4.9639045002307765e-06, |
| "loss": 0.687, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.35849719101123595, |
| "grad_norm": 0.9401846528053284, |
| "learning_rate": 4.963826178993195e-06, |
| "loss": 0.6963, |
| "step": 1021 |
| }, |
| { |
| "epoch": 0.35884831460674155, |
| "grad_norm": 1.00020170211792, |
| "learning_rate": 4.9637477734948654e-06, |
| "loss": 0.6747, |
| "step": 1022 |
| }, |
| { |
| "epoch": 0.3591994382022472, |
| "grad_norm": 0.8997200727462769, |
| "learning_rate": 4.963669283738471e-06, |
| "loss": 0.7253, |
| "step": 1023 |
| }, |
| { |
| "epoch": 0.3595505617977528, |
| "grad_norm": 0.8761312365531921, |
| "learning_rate": 4.963590709726695e-06, |
| "loss": 0.6832, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.35990168539325845, |
| "grad_norm": 0.9415677189826965, |
| "learning_rate": 4.963512051462226e-06, |
| "loss": 0.6998, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.36025280898876405, |
| "grad_norm": 0.9139187932014465, |
| "learning_rate": 4.963433308947751e-06, |
| "loss": 0.7292, |
| "step": 1026 |
| }, |
| { |
| "epoch": 0.36060393258426965, |
| "grad_norm": 0.9207375049591064, |
| "learning_rate": 4.963354482185966e-06, |
| "loss": 0.6779, |
| "step": 1027 |
| }, |
| { |
| "epoch": 0.3609550561797753, |
| "grad_norm": 0.9517902135848999, |
| "learning_rate": 4.963275571179565e-06, |
| "loss": 0.7525, |
| "step": 1028 |
| }, |
| { |
| "epoch": 0.3613061797752809, |
| "grad_norm": 0.9088241457939148, |
| "learning_rate": 4.963196575931248e-06, |
| "loss": 0.7196, |
| "step": 1029 |
| }, |
| { |
| "epoch": 0.3616573033707865, |
| "grad_norm": 0.8898507356643677, |
| "learning_rate": 4.963117496443715e-06, |
| "loss": 0.7205, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.36200842696629215, |
| "grad_norm": 0.9045488834381104, |
| "learning_rate": 4.963038332719672e-06, |
| "loss": 0.6937, |
| "step": 1031 |
| }, |
| { |
| "epoch": 0.36235955056179775, |
| "grad_norm": 0.9505866169929504, |
| "learning_rate": 4.962959084761826e-06, |
| "loss": 0.7006, |
| "step": 1032 |
| }, |
| { |
| "epoch": 0.36271067415730335, |
| "grad_norm": 0.8908640742301941, |
| "learning_rate": 4.962879752572885e-06, |
| "loss": 0.7191, |
| "step": 1033 |
| }, |
| { |
| "epoch": 0.363061797752809, |
| "grad_norm": 0.925959050655365, |
| "learning_rate": 4.962800336155567e-06, |
| "loss": 0.6963, |
| "step": 1034 |
| }, |
| { |
| "epoch": 0.3634129213483146, |
| "grad_norm": 0.9082480072975159, |
| "learning_rate": 4.962720835512583e-06, |
| "loss": 0.7084, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.3637640449438202, |
| "grad_norm": 0.9313839077949524, |
| "learning_rate": 4.962641250646654e-06, |
| "loss": 0.7425, |
| "step": 1036 |
| }, |
| { |
| "epoch": 0.36411516853932585, |
| "grad_norm": 0.9484413266181946, |
| "learning_rate": 4.962561581560501e-06, |
| "loss": 0.6814, |
| "step": 1037 |
| }, |
| { |
| "epoch": 0.36446629213483145, |
| "grad_norm": 0.9550908207893372, |
| "learning_rate": 4.96248182825685e-06, |
| "loss": 0.6747, |
| "step": 1038 |
| }, |
| { |
| "epoch": 0.3648174157303371, |
| "grad_norm": 0.9393624663352966, |
| "learning_rate": 4.962401990738428e-06, |
| "loss": 0.7172, |
| "step": 1039 |
| }, |
| { |
| "epoch": 0.3651685393258427, |
| "grad_norm": 0.961174726486206, |
| "learning_rate": 4.962322069007964e-06, |
| "loss": 0.6967, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.3655196629213483, |
| "grad_norm": 0.9074354767799377, |
| "learning_rate": 4.962242063068193e-06, |
| "loss": 0.7019, |
| "step": 1041 |
| }, |
| { |
| "epoch": 0.36587078651685395, |
| "grad_norm": 0.9572650194168091, |
| "learning_rate": 4.962161972921851e-06, |
| "loss": 0.717, |
| "step": 1042 |
| }, |
| { |
| "epoch": 0.36622191011235955, |
| "grad_norm": 0.9262009263038635, |
| "learning_rate": 4.962081798571675e-06, |
| "loss": 0.7332, |
| "step": 1043 |
| }, |
| { |
| "epoch": 0.36657303370786515, |
| "grad_norm": 0.8846391439437866, |
| "learning_rate": 4.9620015400204094e-06, |
| "loss": 0.7359, |
| "step": 1044 |
| }, |
| { |
| "epoch": 0.3669241573033708, |
| "grad_norm": 0.9351332187652588, |
| "learning_rate": 4.961921197270797e-06, |
| "loss": 0.7809, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.3672752808988764, |
| "grad_norm": 0.9081857204437256, |
| "learning_rate": 4.961840770325587e-06, |
| "loss": 0.7169, |
| "step": 1046 |
| }, |
| { |
| "epoch": 0.367626404494382, |
| "grad_norm": 0.9414200782775879, |
| "learning_rate": 4.961760259187529e-06, |
| "loss": 0.7154, |
| "step": 1047 |
| }, |
| { |
| "epoch": 0.36797752808988765, |
| "grad_norm": 0.9009916186332703, |
| "learning_rate": 4.961679663859377e-06, |
| "loss": 0.7013, |
| "step": 1048 |
| }, |
| { |
| "epoch": 0.36832865168539325, |
| "grad_norm": 0.9196215271949768, |
| "learning_rate": 4.961598984343886e-06, |
| "loss": 0.706, |
| "step": 1049 |
| }, |
| { |
| "epoch": 0.3686797752808989, |
| "grad_norm": 0.947223424911499, |
| "learning_rate": 4.961518220643817e-06, |
| "loss": 0.712, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.3690308988764045, |
| "grad_norm": 0.8798839449882507, |
| "learning_rate": 4.961437372761932e-06, |
| "loss": 0.6821, |
| "step": 1051 |
| }, |
| { |
| "epoch": 0.3693820224719101, |
| "grad_norm": 0.9458513259887695, |
| "learning_rate": 4.961356440700994e-06, |
| "loss": 0.7503, |
| "step": 1052 |
| }, |
| { |
| "epoch": 0.36973314606741575, |
| "grad_norm": 0.9466217160224915, |
| "learning_rate": 4.961275424463772e-06, |
| "loss": 0.7086, |
| "step": 1053 |
| }, |
| { |
| "epoch": 0.37008426966292135, |
| "grad_norm": 0.9832736849784851, |
| "learning_rate": 4.961194324053037e-06, |
| "loss": 0.7332, |
| "step": 1054 |
| }, |
| { |
| "epoch": 0.37043539325842695, |
| "grad_norm": 0.9869272708892822, |
| "learning_rate": 4.961113139471562e-06, |
| "loss": 0.7595, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.3707865168539326, |
| "grad_norm": 0.9590395092964172, |
| "learning_rate": 4.961031870722123e-06, |
| "loss": 0.7471, |
| "step": 1056 |
| }, |
| { |
| "epoch": 0.3711376404494382, |
| "grad_norm": 0.9194419980049133, |
| "learning_rate": 4.9609505178075015e-06, |
| "loss": 0.7147, |
| "step": 1057 |
| }, |
| { |
| "epoch": 0.3714887640449438, |
| "grad_norm": 0.8812661170959473, |
| "learning_rate": 4.960869080730477e-06, |
| "loss": 0.6723, |
| "step": 1058 |
| }, |
| { |
| "epoch": 0.37183988764044945, |
| "grad_norm": 0.9303286671638489, |
| "learning_rate": 4.960787559493836e-06, |
| "loss": 0.7253, |
| "step": 1059 |
| }, |
| { |
| "epoch": 0.37219101123595505, |
| "grad_norm": 0.9536207318305969, |
| "learning_rate": 4.960705954100366e-06, |
| "loss": 0.7325, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.37254213483146065, |
| "grad_norm": 0.9165772795677185, |
| "learning_rate": 4.960624264552858e-06, |
| "loss": 0.6949, |
| "step": 1061 |
| }, |
| { |
| "epoch": 0.3728932584269663, |
| "grad_norm": 0.9144523739814758, |
| "learning_rate": 4.9605424908541065e-06, |
| "loss": 0.7355, |
| "step": 1062 |
| }, |
| { |
| "epoch": 0.3732443820224719, |
| "grad_norm": 0.8873782753944397, |
| "learning_rate": 4.960460633006906e-06, |
| "loss": 0.6819, |
| "step": 1063 |
| }, |
| { |
| "epoch": 0.37359550561797755, |
| "grad_norm": 0.9444409012794495, |
| "learning_rate": 4.960378691014058e-06, |
| "loss": 0.6851, |
| "step": 1064 |
| }, |
| { |
| "epoch": 0.37394662921348315, |
| "grad_norm": 0.959851861000061, |
| "learning_rate": 4.960296664878364e-06, |
| "loss": 0.7127, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.37429775280898875, |
| "grad_norm": 0.9353641867637634, |
| "learning_rate": 4.9602145546026305e-06, |
| "loss": 0.7208, |
| "step": 1066 |
| }, |
| { |
| "epoch": 0.3746488764044944, |
| "grad_norm": 0.9105174541473389, |
| "learning_rate": 4.960132360189663e-06, |
| "loss": 0.6717, |
| "step": 1067 |
| }, |
| { |
| "epoch": 0.375, |
| "grad_norm": 0.9150325655937195, |
| "learning_rate": 4.960050081642274e-06, |
| "loss": 0.7233, |
| "step": 1068 |
| }, |
| { |
| "epoch": 0.3753511235955056, |
| "grad_norm": 0.9387450814247131, |
| "learning_rate": 4.959967718963279e-06, |
| "loss": 0.7104, |
| "step": 1069 |
| }, |
| { |
| "epoch": 0.37570224719101125, |
| "grad_norm": 0.9268593788146973, |
| "learning_rate": 4.959885272155491e-06, |
| "loss": 0.7209, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.37605337078651685, |
| "grad_norm": 0.9124265909194946, |
| "learning_rate": 4.959802741221733e-06, |
| "loss": 0.6982, |
| "step": 1071 |
| }, |
| { |
| "epoch": 0.37640449438202245, |
| "grad_norm": 0.9302802681922913, |
| "learning_rate": 4.9597201261648255e-06, |
| "loss": 0.7105, |
| "step": 1072 |
| }, |
| { |
| "epoch": 0.3767556179775281, |
| "grad_norm": 0.9560679793357849, |
| "learning_rate": 4.959637426987595e-06, |
| "loss": 0.7115, |
| "step": 1073 |
| }, |
| { |
| "epoch": 0.3771067415730337, |
| "grad_norm": 0.9246500730514526, |
| "learning_rate": 4.9595546436928685e-06, |
| "loss": 0.7319, |
| "step": 1074 |
| }, |
| { |
| "epoch": 0.37745786516853935, |
| "grad_norm": 0.9188322424888611, |
| "learning_rate": 4.959471776283479e-06, |
| "loss": 0.6829, |
| "step": 1075 |
| }, |
| { |
| "epoch": 0.37780898876404495, |
| "grad_norm": 0.9240451455116272, |
| "learning_rate": 4.959388824762258e-06, |
| "loss": 0.7183, |
| "step": 1076 |
| }, |
| { |
| "epoch": 0.37816011235955055, |
| "grad_norm": 0.8934212923049927, |
| "learning_rate": 4.959305789132044e-06, |
| "loss": 0.6982, |
| "step": 1077 |
| }, |
| { |
| "epoch": 0.3785112359550562, |
| "grad_norm": 0.9386299848556519, |
| "learning_rate": 4.959222669395677e-06, |
| "loss": 0.6942, |
| "step": 1078 |
| }, |
| { |
| "epoch": 0.3788623595505618, |
| "grad_norm": 0.9435991644859314, |
| "learning_rate": 4.959139465555999e-06, |
| "loss": 0.7382, |
| "step": 1079 |
| }, |
| { |
| "epoch": 0.3792134831460674, |
| "grad_norm": 0.9004935026168823, |
| "learning_rate": 4.959056177615855e-06, |
| "loss": 0.6944, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.37956460674157305, |
| "grad_norm": 0.9269630312919617, |
| "learning_rate": 4.958972805578095e-06, |
| "loss": 0.7037, |
| "step": 1081 |
| }, |
| { |
| "epoch": 0.37991573033707865, |
| "grad_norm": 0.9682616591453552, |
| "learning_rate": 4.9588893494455684e-06, |
| "loss": 0.6874, |
| "step": 1082 |
| }, |
| { |
| "epoch": 0.38026685393258425, |
| "grad_norm": 0.8845170140266418, |
| "learning_rate": 4.958805809221131e-06, |
| "loss": 0.6945, |
| "step": 1083 |
| }, |
| { |
| "epoch": 0.3806179775280899, |
| "grad_norm": 0.940234363079071, |
| "learning_rate": 4.958722184907638e-06, |
| "loss": 0.6813, |
| "step": 1084 |
| }, |
| { |
| "epoch": 0.3809691011235955, |
| "grad_norm": 0.9013071656227112, |
| "learning_rate": 4.95863847650795e-06, |
| "loss": 0.694, |
| "step": 1085 |
| }, |
| { |
| "epoch": 0.3813202247191011, |
| "grad_norm": 0.8996568322181702, |
| "learning_rate": 4.958554684024931e-06, |
| "loss": 0.6558, |
| "step": 1086 |
| }, |
| { |
| "epoch": 0.38167134831460675, |
| "grad_norm": 0.9193875193595886, |
| "learning_rate": 4.9584708074614455e-06, |
| "loss": 0.6996, |
| "step": 1087 |
| }, |
| { |
| "epoch": 0.38202247191011235, |
| "grad_norm": 0.9180407524108887, |
| "learning_rate": 4.958386846820361e-06, |
| "loss": 0.7147, |
| "step": 1088 |
| }, |
| { |
| "epoch": 0.382373595505618, |
| "grad_norm": 1.0345251560211182, |
| "learning_rate": 4.958302802104551e-06, |
| "loss": 0.7393, |
| "step": 1089 |
| }, |
| { |
| "epoch": 0.3827247191011236, |
| "grad_norm": 0.9022161364555359, |
| "learning_rate": 4.958218673316889e-06, |
| "loss": 0.6949, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.3830758426966292, |
| "grad_norm": 0.9014359712600708, |
| "learning_rate": 4.958134460460251e-06, |
| "loss": 0.6947, |
| "step": 1091 |
| }, |
| { |
| "epoch": 0.38342696629213485, |
| "grad_norm": 0.9596666693687439, |
| "learning_rate": 4.958050163537519e-06, |
| "loss": 0.7763, |
| "step": 1092 |
| }, |
| { |
| "epoch": 0.38377808988764045, |
| "grad_norm": 0.8888493180274963, |
| "learning_rate": 4.957965782551574e-06, |
| "loss": 0.7093, |
| "step": 1093 |
| }, |
| { |
| "epoch": 0.38412921348314605, |
| "grad_norm": 0.9068475365638733, |
| "learning_rate": 4.957881317505303e-06, |
| "loss": 0.6893, |
| "step": 1094 |
| }, |
| { |
| "epoch": 0.3844803370786517, |
| "grad_norm": 0.8918416500091553, |
| "learning_rate": 4.957796768401594e-06, |
| "loss": 0.6926, |
| "step": 1095 |
| }, |
| { |
| "epoch": 0.3848314606741573, |
| "grad_norm": 0.8772871494293213, |
| "learning_rate": 4.957712135243339e-06, |
| "loss": 0.6714, |
| "step": 1096 |
| }, |
| { |
| "epoch": 0.3851825842696629, |
| "grad_norm": 0.9466787576675415, |
| "learning_rate": 4.9576274180334315e-06, |
| "loss": 0.693, |
| "step": 1097 |
| }, |
| { |
| "epoch": 0.38553370786516855, |
| "grad_norm": 0.8940508961677551, |
| "learning_rate": 4.957542616774768e-06, |
| "loss": 0.6727, |
| "step": 1098 |
| }, |
| { |
| "epoch": 0.38588483146067415, |
| "grad_norm": 0.92746901512146, |
| "learning_rate": 4.957457731470253e-06, |
| "loss": 0.6832, |
| "step": 1099 |
| }, |
| { |
| "epoch": 0.3862359550561798, |
| "grad_norm": 0.9406936168670654, |
| "learning_rate": 4.9573727621227845e-06, |
| "loss": 0.7121, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.3865870786516854, |
| "grad_norm": 0.9280776381492615, |
| "learning_rate": 4.957287708735271e-06, |
| "loss": 0.7149, |
| "step": 1101 |
| }, |
| { |
| "epoch": 0.386938202247191, |
| "grad_norm": 0.9443319439888, |
| "learning_rate": 4.9572025713106195e-06, |
| "loss": 0.7205, |
| "step": 1102 |
| }, |
| { |
| "epoch": 0.38728932584269665, |
| "grad_norm": 0.9101063013076782, |
| "learning_rate": 4.957117349851745e-06, |
| "loss": 0.703, |
| "step": 1103 |
| }, |
| { |
| "epoch": 0.38764044943820225, |
| "grad_norm": 0.8856722116470337, |
| "learning_rate": 4.957032044361557e-06, |
| "loss": 0.7079, |
| "step": 1104 |
| }, |
| { |
| "epoch": 0.38799157303370785, |
| "grad_norm": 0.928821325302124, |
| "learning_rate": 4.956946654842977e-06, |
| "loss": 0.6935, |
| "step": 1105 |
| }, |
| { |
| "epoch": 0.3883426966292135, |
| "grad_norm": 0.9314661026000977, |
| "learning_rate": 4.956861181298924e-06, |
| "loss": 0.7062, |
| "step": 1106 |
| }, |
| { |
| "epoch": 0.3886938202247191, |
| "grad_norm": 0.9111589193344116, |
| "learning_rate": 4.956775623732321e-06, |
| "loss": 0.7343, |
| "step": 1107 |
| }, |
| { |
| "epoch": 0.3890449438202247, |
| "grad_norm": 0.8977031707763672, |
| "learning_rate": 4.9566899821460935e-06, |
| "loss": 0.6702, |
| "step": 1108 |
| }, |
| { |
| "epoch": 0.38939606741573035, |
| "grad_norm": 0.9246605634689331, |
| "learning_rate": 4.956604256543171e-06, |
| "loss": 0.7062, |
| "step": 1109 |
| }, |
| { |
| "epoch": 0.38974719101123595, |
| "grad_norm": 0.9252159595489502, |
| "learning_rate": 4.956518446926485e-06, |
| "loss": 0.6981, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.39009831460674155, |
| "grad_norm": 0.9128794074058533, |
| "learning_rate": 4.956432553298971e-06, |
| "loss": 0.6802, |
| "step": 1111 |
| }, |
| { |
| "epoch": 0.3904494382022472, |
| "grad_norm": 0.9178376197814941, |
| "learning_rate": 4.956346575663564e-06, |
| "loss": 0.6753, |
| "step": 1112 |
| }, |
| { |
| "epoch": 0.3908005617977528, |
| "grad_norm": 0.8804501891136169, |
| "learning_rate": 4.956260514023207e-06, |
| "loss": 0.7378, |
| "step": 1113 |
| }, |
| { |
| "epoch": 0.39115168539325845, |
| "grad_norm": 0.9001629948616028, |
| "learning_rate": 4.956174368380842e-06, |
| "loss": 0.7061, |
| "step": 1114 |
| }, |
| { |
| "epoch": 0.39150280898876405, |
| "grad_norm": 0.8731476664543152, |
| "learning_rate": 4.956088138739416e-06, |
| "loss": 0.6749, |
| "step": 1115 |
| }, |
| { |
| "epoch": 0.39185393258426965, |
| "grad_norm": 0.9055764079093933, |
| "learning_rate": 4.956001825101877e-06, |
| "loss": 0.7172, |
| "step": 1116 |
| }, |
| { |
| "epoch": 0.3922050561797753, |
| "grad_norm": 0.9398018717765808, |
| "learning_rate": 4.955915427471178e-06, |
| "loss": 0.711, |
| "step": 1117 |
| }, |
| { |
| "epoch": 0.3925561797752809, |
| "grad_norm": 0.9070323705673218, |
| "learning_rate": 4.9558289458502715e-06, |
| "loss": 0.7467, |
| "step": 1118 |
| }, |
| { |
| "epoch": 0.3929073033707865, |
| "grad_norm": 0.916070818901062, |
| "learning_rate": 4.9557423802421166e-06, |
| "loss": 0.6929, |
| "step": 1119 |
| }, |
| { |
| "epoch": 0.39325842696629215, |
| "grad_norm": 0.9212610721588135, |
| "learning_rate": 4.955655730649674e-06, |
| "loss": 0.7098, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.39360955056179775, |
| "grad_norm": 0.8813148736953735, |
| "learning_rate": 4.955568997075908e-06, |
| "loss": 0.7241, |
| "step": 1121 |
| }, |
| { |
| "epoch": 0.39396067415730335, |
| "grad_norm": 0.960132360458374, |
| "learning_rate": 4.955482179523782e-06, |
| "loss": 0.7531, |
| "step": 1122 |
| }, |
| { |
| "epoch": 0.394311797752809, |
| "grad_norm": 0.8893491625785828, |
| "learning_rate": 4.955395277996268e-06, |
| "loss": 0.708, |
| "step": 1123 |
| }, |
| { |
| "epoch": 0.3946629213483146, |
| "grad_norm": 0.9316296577453613, |
| "learning_rate": 4.955308292496336e-06, |
| "loss": 0.7129, |
| "step": 1124 |
| }, |
| { |
| "epoch": 0.3950140449438202, |
| "grad_norm": 0.8866797685623169, |
| "learning_rate": 4.955221223026961e-06, |
| "loss": 0.6986, |
| "step": 1125 |
| }, |
| { |
| "epoch": 0.39536516853932585, |
| "grad_norm": 0.9073642492294312, |
| "learning_rate": 4.955134069591121e-06, |
| "loss": 0.7192, |
| "step": 1126 |
| }, |
| { |
| "epoch": 0.39571629213483145, |
| "grad_norm": 0.8996924757957458, |
| "learning_rate": 4.9550468321917975e-06, |
| "loss": 0.6902, |
| "step": 1127 |
| }, |
| { |
| "epoch": 0.3960674157303371, |
| "grad_norm": 0.9362848401069641, |
| "learning_rate": 4.9549595108319726e-06, |
| "loss": 0.7331, |
| "step": 1128 |
| }, |
| { |
| "epoch": 0.3964185393258427, |
| "grad_norm": 0.9147123098373413, |
| "learning_rate": 4.954872105514633e-06, |
| "loss": 0.731, |
| "step": 1129 |
| }, |
| { |
| "epoch": 0.3967696629213483, |
| "grad_norm": 0.9537502527236938, |
| "learning_rate": 4.954784616242769e-06, |
| "loss": 0.7347, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.39712078651685395, |
| "grad_norm": 0.9200554490089417, |
| "learning_rate": 4.9546970430193705e-06, |
| "loss": 0.7205, |
| "step": 1131 |
| }, |
| { |
| "epoch": 0.39747191011235955, |
| "grad_norm": 0.929223358631134, |
| "learning_rate": 4.954609385847434e-06, |
| "loss": 0.7108, |
| "step": 1132 |
| }, |
| { |
| "epoch": 0.39782303370786515, |
| "grad_norm": 0.9166401624679565, |
| "learning_rate": 4.954521644729958e-06, |
| "loss": 0.732, |
| "step": 1133 |
| }, |
| { |
| "epoch": 0.3981741573033708, |
| "grad_norm": 0.9084526300430298, |
| "learning_rate": 4.954433819669941e-06, |
| "loss": 0.7078, |
| "step": 1134 |
| }, |
| { |
| "epoch": 0.3985252808988764, |
| "grad_norm": 0.9210569858551025, |
| "learning_rate": 4.954345910670388e-06, |
| "loss": 0.6905, |
| "step": 1135 |
| }, |
| { |
| "epoch": 0.398876404494382, |
| "grad_norm": 0.890738308429718, |
| "learning_rate": 4.9542579177343054e-06, |
| "loss": 0.6975, |
| "step": 1136 |
| }, |
| { |
| "epoch": 0.39922752808988765, |
| "grad_norm": 0.8724343776702881, |
| "learning_rate": 4.954169840864701e-06, |
| "loss": 0.6323, |
| "step": 1137 |
| }, |
| { |
| "epoch": 0.39957865168539325, |
| "grad_norm": 0.9185892343521118, |
| "learning_rate": 4.95408168006459e-06, |
| "loss": 0.7106, |
| "step": 1138 |
| }, |
| { |
| "epoch": 0.3999297752808989, |
| "grad_norm": 0.9605926871299744, |
| "learning_rate": 4.953993435336983e-06, |
| "loss": 0.7208, |
| "step": 1139 |
| }, |
| { |
| "epoch": 0.4002808988764045, |
| "grad_norm": 0.9934019446372986, |
| "learning_rate": 4.953905106684902e-06, |
| "loss": 0.7394, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.4006320224719101, |
| "grad_norm": 0.8674790263175964, |
| "learning_rate": 4.953816694111367e-06, |
| "loss": 0.6588, |
| "step": 1141 |
| }, |
| { |
| "epoch": 0.40098314606741575, |
| "grad_norm": 0.9420093894004822, |
| "learning_rate": 4.9537281976194e-06, |
| "loss": 0.6934, |
| "step": 1142 |
| }, |
| { |
| "epoch": 0.40133426966292135, |
| "grad_norm": 0.8882573246955872, |
| "learning_rate": 4.953639617212028e-06, |
| "loss": 0.7244, |
| "step": 1143 |
| }, |
| { |
| "epoch": 0.40168539325842695, |
| "grad_norm": 1.0398857593536377, |
| "learning_rate": 4.95355095289228e-06, |
| "loss": 0.7402, |
| "step": 1144 |
| }, |
| { |
| "epoch": 0.4020365168539326, |
| "grad_norm": 0.9207865595817566, |
| "learning_rate": 4.95346220466319e-06, |
| "loss": 0.6942, |
| "step": 1145 |
| }, |
| { |
| "epoch": 0.4023876404494382, |
| "grad_norm": 0.9174979329109192, |
| "learning_rate": 4.9533733725277924e-06, |
| "loss": 0.7241, |
| "step": 1146 |
| }, |
| { |
| "epoch": 0.4027387640449438, |
| "grad_norm": 0.9054567217826843, |
| "learning_rate": 4.953284456489124e-06, |
| "loss": 0.6666, |
| "step": 1147 |
| }, |
| { |
| "epoch": 0.40308988764044945, |
| "grad_norm": 0.9376466870307922, |
| "learning_rate": 4.953195456550227e-06, |
| "loss": 0.718, |
| "step": 1148 |
| }, |
| { |
| "epoch": 0.40344101123595505, |
| "grad_norm": 0.9617171883583069, |
| "learning_rate": 4.953106372714144e-06, |
| "loss": 0.7226, |
| "step": 1149 |
| }, |
| { |
| "epoch": 0.40379213483146065, |
| "grad_norm": 0.8789179921150208, |
| "learning_rate": 4.953017204983923e-06, |
| "loss": 0.6981, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.4041432584269663, |
| "grad_norm": 0.8860397934913635, |
| "learning_rate": 4.952927953362612e-06, |
| "loss": 0.6894, |
| "step": 1151 |
| }, |
| { |
| "epoch": 0.4044943820224719, |
| "grad_norm": 0.9390692114830017, |
| "learning_rate": 4.952838617853265e-06, |
| "loss": 0.6788, |
| "step": 1152 |
| }, |
| { |
| "epoch": 0.40484550561797755, |
| "grad_norm": 0.9032955765724182, |
| "learning_rate": 4.952749198458935e-06, |
| "loss": 0.7417, |
| "step": 1153 |
| }, |
| { |
| "epoch": 0.40519662921348315, |
| "grad_norm": 0.9163577556610107, |
| "learning_rate": 4.952659695182682e-06, |
| "loss": 0.7195, |
| "step": 1154 |
| }, |
| { |
| "epoch": 0.40554775280898875, |
| "grad_norm": 0.8781930804252625, |
| "learning_rate": 4.952570108027566e-06, |
| "loss": 0.6691, |
| "step": 1155 |
| }, |
| { |
| "epoch": 0.4058988764044944, |
| "grad_norm": 0.887157678604126, |
| "learning_rate": 4.9524804369966505e-06, |
| "loss": 0.6993, |
| "step": 1156 |
| }, |
| { |
| "epoch": 0.40625, |
| "grad_norm": 0.9891164302825928, |
| "learning_rate": 4.9523906820930035e-06, |
| "loss": 0.7357, |
| "step": 1157 |
| }, |
| { |
| "epoch": 0.4066011235955056, |
| "grad_norm": 0.9419492483139038, |
| "learning_rate": 4.952300843319693e-06, |
| "loss": 0.674, |
| "step": 1158 |
| }, |
| { |
| "epoch": 0.40695224719101125, |
| "grad_norm": 0.9248577356338501, |
| "learning_rate": 4.952210920679793e-06, |
| "loss": 0.724, |
| "step": 1159 |
| }, |
| { |
| "epoch": 0.40730337078651685, |
| "grad_norm": 0.9277257919311523, |
| "learning_rate": 4.9521209141763775e-06, |
| "loss": 0.7179, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.40765449438202245, |
| "grad_norm": 0.9654591083526611, |
| "learning_rate": 4.952030823812525e-06, |
| "loss": 0.6796, |
| "step": 1161 |
| }, |
| { |
| "epoch": 0.4080056179775281, |
| "grad_norm": 0.9730082154273987, |
| "learning_rate": 4.951940649591316e-06, |
| "loss": 0.715, |
| "step": 1162 |
| }, |
| { |
| "epoch": 0.4083567415730337, |
| "grad_norm": 0.9230988621711731, |
| "learning_rate": 4.9518503915158356e-06, |
| "loss": 0.7218, |
| "step": 1163 |
| }, |
| { |
| "epoch": 0.40870786516853935, |
| "grad_norm": 0.9274692535400391, |
| "learning_rate": 4.951760049589169e-06, |
| "loss": 0.6773, |
| "step": 1164 |
| }, |
| { |
| "epoch": 0.40905898876404495, |
| "grad_norm": 0.894001841545105, |
| "learning_rate": 4.951669623814408e-06, |
| "loss": 0.7118, |
| "step": 1165 |
| }, |
| { |
| "epoch": 0.40941011235955055, |
| "grad_norm": 0.924102783203125, |
| "learning_rate": 4.951579114194643e-06, |
| "loss": 0.6648, |
| "step": 1166 |
| }, |
| { |
| "epoch": 0.4097612359550562, |
| "grad_norm": 0.9237919449806213, |
| "learning_rate": 4.95148852073297e-06, |
| "loss": 0.697, |
| "step": 1167 |
| }, |
| { |
| "epoch": 0.4101123595505618, |
| "grad_norm": 0.9063787460327148, |
| "learning_rate": 4.951397843432489e-06, |
| "loss": 0.699, |
| "step": 1168 |
| }, |
| { |
| "epoch": 0.4104634831460674, |
| "grad_norm": 0.8865030407905579, |
| "learning_rate": 4.951307082296297e-06, |
| "loss": 0.6768, |
| "step": 1169 |
| }, |
| { |
| "epoch": 0.41081460674157305, |
| "grad_norm": 0.9309532046318054, |
| "learning_rate": 4.9512162373275015e-06, |
| "loss": 0.7168, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.41116573033707865, |
| "grad_norm": 0.9091811776161194, |
| "learning_rate": 4.951125308529208e-06, |
| "loss": 0.6962, |
| "step": 1171 |
| }, |
| { |
| "epoch": 0.41151685393258425, |
| "grad_norm": 0.8983654975891113, |
| "learning_rate": 4.951034295904527e-06, |
| "loss": 0.7193, |
| "step": 1172 |
| }, |
| { |
| "epoch": 0.4118679775280899, |
| "grad_norm": 0.8983482718467712, |
| "learning_rate": 4.95094319945657e-06, |
| "loss": 0.7234, |
| "step": 1173 |
| }, |
| { |
| "epoch": 0.4122191011235955, |
| "grad_norm": 0.9178565740585327, |
| "learning_rate": 4.950852019188454e-06, |
| "loss": 0.7133, |
| "step": 1174 |
| }, |
| { |
| "epoch": 0.4125702247191011, |
| "grad_norm": 0.9206750988960266, |
| "learning_rate": 4.950760755103294e-06, |
| "loss": 0.7417, |
| "step": 1175 |
| }, |
| { |
| "epoch": 0.41292134831460675, |
| "grad_norm": 0.9141495227813721, |
| "learning_rate": 4.950669407204215e-06, |
| "loss": 0.7238, |
| "step": 1176 |
| }, |
| { |
| "epoch": 0.41327247191011235, |
| "grad_norm": 0.9251378178596497, |
| "learning_rate": 4.9505779754943385e-06, |
| "loss": 0.6944, |
| "step": 1177 |
| }, |
| { |
| "epoch": 0.413623595505618, |
| "grad_norm": 0.9240232706069946, |
| "learning_rate": 4.950486459976793e-06, |
| "loss": 0.7241, |
| "step": 1178 |
| }, |
| { |
| "epoch": 0.4139747191011236, |
| "grad_norm": 0.9050787687301636, |
| "learning_rate": 4.950394860654707e-06, |
| "loss": 0.7044, |
| "step": 1179 |
| }, |
| { |
| "epoch": 0.4143258426966292, |
| "grad_norm": 0.9517340064048767, |
| "learning_rate": 4.950303177531213e-06, |
| "loss": 0.7363, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.41467696629213485, |
| "grad_norm": 0.9461115598678589, |
| "learning_rate": 4.950211410609448e-06, |
| "loss": 0.6786, |
| "step": 1181 |
| }, |
| { |
| "epoch": 0.41502808988764045, |
| "grad_norm": 0.9148724675178528, |
| "learning_rate": 4.950119559892549e-06, |
| "loss": 0.6873, |
| "step": 1182 |
| }, |
| { |
| "epoch": 0.41537921348314605, |
| "grad_norm": 0.9085513353347778, |
| "learning_rate": 4.950027625383657e-06, |
| "loss": 0.7209, |
| "step": 1183 |
| }, |
| { |
| "epoch": 0.4157303370786517, |
| "grad_norm": 0.9327560067176819, |
| "learning_rate": 4.949935607085919e-06, |
| "loss": 0.7086, |
| "step": 1184 |
| }, |
| { |
| "epoch": 0.4160814606741573, |
| "grad_norm": 0.936128556728363, |
| "learning_rate": 4.949843505002478e-06, |
| "loss": 0.6894, |
| "step": 1185 |
| }, |
| { |
| "epoch": 0.4164325842696629, |
| "grad_norm": 0.931978702545166, |
| "learning_rate": 4.9497513191364855e-06, |
| "loss": 0.7209, |
| "step": 1186 |
| }, |
| { |
| "epoch": 0.41678370786516855, |
| "grad_norm": 0.8966789245605469, |
| "learning_rate": 4.949659049491095e-06, |
| "loss": 0.7211, |
| "step": 1187 |
| }, |
| { |
| "epoch": 0.41713483146067415, |
| "grad_norm": 0.9003615975379944, |
| "learning_rate": 4.9495666960694596e-06, |
| "loss": 0.6835, |
| "step": 1188 |
| }, |
| { |
| "epoch": 0.4174859550561798, |
| "grad_norm": 0.933268666267395, |
| "learning_rate": 4.9494742588747405e-06, |
| "loss": 0.7335, |
| "step": 1189 |
| }, |
| { |
| "epoch": 0.4178370786516854, |
| "grad_norm": 0.8876200318336487, |
| "learning_rate": 4.949381737910098e-06, |
| "loss": 0.7132, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.418188202247191, |
| "grad_norm": 0.9531140923500061, |
| "learning_rate": 4.949289133178695e-06, |
| "loss": 0.746, |
| "step": 1191 |
| }, |
| { |
| "epoch": 0.41853932584269665, |
| "grad_norm": 0.9262682795524597, |
| "learning_rate": 4.949196444683701e-06, |
| "loss": 0.7155, |
| "step": 1192 |
| }, |
| { |
| "epoch": 0.41889044943820225, |
| "grad_norm": 0.9318521022796631, |
| "learning_rate": 4.949103672428282e-06, |
| "loss": 0.7234, |
| "step": 1193 |
| }, |
| { |
| "epoch": 0.41924157303370785, |
| "grad_norm": 0.922127366065979, |
| "learning_rate": 4.949010816415616e-06, |
| "loss": 0.7116, |
| "step": 1194 |
| }, |
| { |
| "epoch": 0.4195926966292135, |
| "grad_norm": 0.9445079565048218, |
| "learning_rate": 4.948917876648874e-06, |
| "loss": 0.6993, |
| "step": 1195 |
| }, |
| { |
| "epoch": 0.4199438202247191, |
| "grad_norm": 0.9283545613288879, |
| "learning_rate": 4.948824853131237e-06, |
| "loss": 0.7078, |
| "step": 1196 |
| }, |
| { |
| "epoch": 0.4202949438202247, |
| "grad_norm": 0.8923372626304626, |
| "learning_rate": 4.9487317458658845e-06, |
| "loss": 0.6818, |
| "step": 1197 |
| }, |
| { |
| "epoch": 0.42064606741573035, |
| "grad_norm": 0.907924234867096, |
| "learning_rate": 4.948638554856002e-06, |
| "loss": 0.6799, |
| "step": 1198 |
| }, |
| { |
| "epoch": 0.42099719101123595, |
| "grad_norm": 0.8753466606140137, |
| "learning_rate": 4.948545280104777e-06, |
| "loss": 0.6789, |
| "step": 1199 |
| }, |
| { |
| "epoch": 0.42134831460674155, |
| "grad_norm": 0.965340256690979, |
| "learning_rate": 4.948451921615398e-06, |
| "loss": 0.6911, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.4216994382022472, |
| "grad_norm": 0.9519873261451721, |
| "learning_rate": 4.948358479391059e-06, |
| "loss": 0.6899, |
| "step": 1201 |
| }, |
| { |
| "epoch": 0.4220505617977528, |
| "grad_norm": 0.907811164855957, |
| "learning_rate": 4.948264953434955e-06, |
| "loss": 0.7401, |
| "step": 1202 |
| }, |
| { |
| "epoch": 0.42240168539325845, |
| "grad_norm": 0.9398930072784424, |
| "learning_rate": 4.948171343750284e-06, |
| "loss": 0.7036, |
| "step": 1203 |
| }, |
| { |
| "epoch": 0.42275280898876405, |
| "grad_norm": 0.9245709776878357, |
| "learning_rate": 4.948077650340248e-06, |
| "loss": 0.7042, |
| "step": 1204 |
| }, |
| { |
| "epoch": 0.42310393258426965, |
| "grad_norm": 0.8742227554321289, |
| "learning_rate": 4.947983873208052e-06, |
| "loss": 0.6941, |
| "step": 1205 |
| }, |
| { |
| "epoch": 0.4234550561797753, |
| "grad_norm": 0.9183447957038879, |
| "learning_rate": 4.947890012356902e-06, |
| "loss": 0.7432, |
| "step": 1206 |
| }, |
| { |
| "epoch": 0.4238061797752809, |
| "grad_norm": 0.9594230055809021, |
| "learning_rate": 4.947796067790009e-06, |
| "loss": 0.7486, |
| "step": 1207 |
| }, |
| { |
| "epoch": 0.4241573033707865, |
| "grad_norm": 0.9002628326416016, |
| "learning_rate": 4.947702039510583e-06, |
| "loss": 0.6652, |
| "step": 1208 |
| }, |
| { |
| "epoch": 0.42450842696629215, |
| "grad_norm": 0.9353854060173035, |
| "learning_rate": 4.947607927521844e-06, |
| "loss": 0.7501, |
| "step": 1209 |
| }, |
| { |
| "epoch": 0.42485955056179775, |
| "grad_norm": 0.8836169242858887, |
| "learning_rate": 4.947513731827007e-06, |
| "loss": 0.6904, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.42521067415730335, |
| "grad_norm": 0.9366084337234497, |
| "learning_rate": 4.947419452429295e-06, |
| "loss": 0.7013, |
| "step": 1211 |
| }, |
| { |
| "epoch": 0.425561797752809, |
| "grad_norm": 0.9252647161483765, |
| "learning_rate": 4.947325089331932e-06, |
| "loss": 0.7343, |
| "step": 1212 |
| }, |
| { |
| "epoch": 0.4259129213483146, |
| "grad_norm": 0.9203290343284607, |
| "learning_rate": 4.947230642538145e-06, |
| "loss": 0.7129, |
| "step": 1213 |
| }, |
| { |
| "epoch": 0.4262640449438202, |
| "grad_norm": 0.8908094763755798, |
| "learning_rate": 4.947136112051164e-06, |
| "loss": 0.6785, |
| "step": 1214 |
| }, |
| { |
| "epoch": 0.42661516853932585, |
| "grad_norm": 0.9249038696289062, |
| "learning_rate": 4.947041497874223e-06, |
| "loss": 0.6664, |
| "step": 1215 |
| }, |
| { |
| "epoch": 0.42696629213483145, |
| "grad_norm": 0.9546428918838501, |
| "learning_rate": 4.946946800010556e-06, |
| "loss": 0.7276, |
| "step": 1216 |
| }, |
| { |
| "epoch": 0.4273174157303371, |
| "grad_norm": 0.9032455682754517, |
| "learning_rate": 4.946852018463403e-06, |
| "loss": 0.6757, |
| "step": 1217 |
| }, |
| { |
| "epoch": 0.4276685393258427, |
| "grad_norm": 0.9056017994880676, |
| "learning_rate": 4.946757153236003e-06, |
| "loss": 0.6878, |
| "step": 1218 |
| }, |
| { |
| "epoch": 0.4280196629213483, |
| "grad_norm": 0.9265672564506531, |
| "learning_rate": 4.946662204331603e-06, |
| "loss": 0.7043, |
| "step": 1219 |
| }, |
| { |
| "epoch": 0.42837078651685395, |
| "grad_norm": 0.8721897006034851, |
| "learning_rate": 4.94656717175345e-06, |
| "loss": 0.627, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.42872191011235955, |
| "grad_norm": 0.9220138192176819, |
| "learning_rate": 4.9464720555047935e-06, |
| "loss": 0.7233, |
| "step": 1221 |
| }, |
| { |
| "epoch": 0.42907303370786515, |
| "grad_norm": 0.9333030581474304, |
| "learning_rate": 4.9463768555888855e-06, |
| "loss": 0.7065, |
| "step": 1222 |
| }, |
| { |
| "epoch": 0.4294241573033708, |
| "grad_norm": 0.9611704349517822, |
| "learning_rate": 4.946281572008982e-06, |
| "loss": 0.7044, |
| "step": 1223 |
| }, |
| { |
| "epoch": 0.4297752808988764, |
| "grad_norm": 0.9443504810333252, |
| "learning_rate": 4.946186204768343e-06, |
| "loss": 0.7322, |
| "step": 1224 |
| }, |
| { |
| "epoch": 0.430126404494382, |
| "grad_norm": 0.9180960059165955, |
| "learning_rate": 4.946090753870229e-06, |
| "loss": 0.7435, |
| "step": 1225 |
| }, |
| { |
| "epoch": 0.43047752808988765, |
| "grad_norm": 0.9297470450401306, |
| "learning_rate": 4.945995219317903e-06, |
| "loss": 0.7211, |
| "step": 1226 |
| }, |
| { |
| "epoch": 0.43082865168539325, |
| "grad_norm": 0.9243416786193848, |
| "learning_rate": 4.945899601114634e-06, |
| "loss": 0.7255, |
| "step": 1227 |
| }, |
| { |
| "epoch": 0.4311797752808989, |
| "grad_norm": 0.9293515086174011, |
| "learning_rate": 4.945803899263692e-06, |
| "loss": 0.7466, |
| "step": 1228 |
| }, |
| { |
| "epoch": 0.4315308988764045, |
| "grad_norm": 0.8804196715354919, |
| "learning_rate": 4.94570811376835e-06, |
| "loss": 0.6495, |
| "step": 1229 |
| }, |
| { |
| "epoch": 0.4318820224719101, |
| "grad_norm": 0.8709548115730286, |
| "learning_rate": 4.945612244631884e-06, |
| "loss": 0.6573, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.43223314606741575, |
| "grad_norm": 0.9267548322677612, |
| "learning_rate": 4.94551629185757e-06, |
| "loss": 0.6949, |
| "step": 1231 |
| }, |
| { |
| "epoch": 0.43258426966292135, |
| "grad_norm": 0.9049427509307861, |
| "learning_rate": 4.945420255448693e-06, |
| "loss": 0.6724, |
| "step": 1232 |
| }, |
| { |
| "epoch": 0.43293539325842695, |
| "grad_norm": 0.9540070295333862, |
| "learning_rate": 4.945324135408535e-06, |
| "loss": 0.7162, |
| "step": 1233 |
| }, |
| { |
| "epoch": 0.4332865168539326, |
| "grad_norm": 0.9069761633872986, |
| "learning_rate": 4.945227931740384e-06, |
| "loss": 0.6851, |
| "step": 1234 |
| }, |
| { |
| "epoch": 0.4336376404494382, |
| "grad_norm": 0.9255860447883606, |
| "learning_rate": 4.94513164444753e-06, |
| "loss": 0.7108, |
| "step": 1235 |
| }, |
| { |
| "epoch": 0.4339887640449438, |
| "grad_norm": 0.9611523151397705, |
| "learning_rate": 4.945035273533267e-06, |
| "loss": 0.7156, |
| "step": 1236 |
| }, |
| { |
| "epoch": 0.43433988764044945, |
| "grad_norm": 0.9171714782714844, |
| "learning_rate": 4.9449388190008896e-06, |
| "loss": 0.6744, |
| "step": 1237 |
| }, |
| { |
| "epoch": 0.43469101123595505, |
| "grad_norm": 0.8625194430351257, |
| "learning_rate": 4.944842280853696e-06, |
| "loss": 0.6792, |
| "step": 1238 |
| }, |
| { |
| "epoch": 0.43504213483146065, |
| "grad_norm": 0.945565938949585, |
| "learning_rate": 4.94474565909499e-06, |
| "loss": 0.7015, |
| "step": 1239 |
| }, |
| { |
| "epoch": 0.4353932584269663, |
| "grad_norm": 0.8865808248519897, |
| "learning_rate": 4.944648953728073e-06, |
| "loss": 0.6664, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.4357443820224719, |
| "grad_norm": 0.8900501728057861, |
| "learning_rate": 4.9445521647562545e-06, |
| "loss": 0.6831, |
| "step": 1241 |
| }, |
| { |
| "epoch": 0.43609550561797755, |
| "grad_norm": 0.9169394373893738, |
| "learning_rate": 4.944455292182844e-06, |
| "loss": 0.6377, |
| "step": 1242 |
| }, |
| { |
| "epoch": 0.43644662921348315, |
| "grad_norm": 0.9147849082946777, |
| "learning_rate": 4.944358336011154e-06, |
| "loss": 0.735, |
| "step": 1243 |
| }, |
| { |
| "epoch": 0.43679775280898875, |
| "grad_norm": 0.9505800604820251, |
| "learning_rate": 4.9442612962445e-06, |
| "loss": 0.7309, |
| "step": 1244 |
| }, |
| { |
| "epoch": 0.4371488764044944, |
| "grad_norm": 0.8858315348625183, |
| "learning_rate": 4.9441641728862015e-06, |
| "loss": 0.6904, |
| "step": 1245 |
| }, |
| { |
| "epoch": 0.4375, |
| "grad_norm": 0.9335656762123108, |
| "learning_rate": 4.94406696593958e-06, |
| "loss": 0.7219, |
| "step": 1246 |
| }, |
| { |
| "epoch": 0.4378511235955056, |
| "grad_norm": 0.8928138613700867, |
| "learning_rate": 4.94396967540796e-06, |
| "loss": 0.6521, |
| "step": 1247 |
| }, |
| { |
| "epoch": 0.43820224719101125, |
| "grad_norm": 0.9080971479415894, |
| "learning_rate": 4.943872301294668e-06, |
| "loss": 0.6925, |
| "step": 1248 |
| }, |
| { |
| "epoch": 0.43855337078651685, |
| "grad_norm": 0.9383649826049805, |
| "learning_rate": 4.943774843603034e-06, |
| "loss": 0.708, |
| "step": 1249 |
| }, |
| { |
| "epoch": 0.43890449438202245, |
| "grad_norm": 0.9101510047912598, |
| "learning_rate": 4.943677302336393e-06, |
| "loss": 0.6901, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.4392556179775281, |
| "grad_norm": 0.9402358531951904, |
| "learning_rate": 4.943579677498078e-06, |
| "loss": 0.7169, |
| "step": 1251 |
| }, |
| { |
| "epoch": 0.4396067415730337, |
| "grad_norm": 0.8998861908912659, |
| "learning_rate": 4.943481969091429e-06, |
| "loss": 0.6962, |
| "step": 1252 |
| }, |
| { |
| "epoch": 0.43995786516853935, |
| "grad_norm": 0.9605402946472168, |
| "learning_rate": 4.943384177119788e-06, |
| "loss": 0.6783, |
| "step": 1253 |
| }, |
| { |
| "epoch": 0.44030898876404495, |
| "grad_norm": 0.9242511987686157, |
| "learning_rate": 4.9432863015865e-06, |
| "loss": 0.71, |
| "step": 1254 |
| }, |
| { |
| "epoch": 0.44066011235955055, |
| "grad_norm": 0.9048755168914795, |
| "learning_rate": 4.943188342494909e-06, |
| "loss": 0.7143, |
| "step": 1255 |
| }, |
| { |
| "epoch": 0.4410112359550562, |
| "grad_norm": 0.883234977722168, |
| "learning_rate": 4.9430902998483685e-06, |
| "loss": 0.6922, |
| "step": 1256 |
| }, |
| { |
| "epoch": 0.4413623595505618, |
| "grad_norm": 0.9546087980270386, |
| "learning_rate": 4.942992173650231e-06, |
| "loss": 0.725, |
| "step": 1257 |
| }, |
| { |
| "epoch": 0.4417134831460674, |
| "grad_norm": 0.9286230206489563, |
| "learning_rate": 4.942893963903851e-06, |
| "loss": 0.7263, |
| "step": 1258 |
| }, |
| { |
| "epoch": 0.44206460674157305, |
| "grad_norm": 0.901672899723053, |
| "learning_rate": 4.942795670612588e-06, |
| "loss": 0.6826, |
| "step": 1259 |
| }, |
| { |
| "epoch": 0.44241573033707865, |
| "grad_norm": 0.9127205610275269, |
| "learning_rate": 4.9426972937798035e-06, |
| "loss": 0.6727, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.44276685393258425, |
| "grad_norm": 0.9450622797012329, |
| "learning_rate": 4.942598833408861e-06, |
| "loss": 0.726, |
| "step": 1261 |
| }, |
| { |
| "epoch": 0.4431179775280899, |
| "grad_norm": 0.9145720601081848, |
| "learning_rate": 4.94250028950313e-06, |
| "loss": 0.702, |
| "step": 1262 |
| }, |
| { |
| "epoch": 0.4434691011235955, |
| "grad_norm": 0.9019625782966614, |
| "learning_rate": 4.9424016620659786e-06, |
| "loss": 0.6682, |
| "step": 1263 |
| }, |
| { |
| "epoch": 0.4438202247191011, |
| "grad_norm": 0.9257623553276062, |
| "learning_rate": 4.9423029511007795e-06, |
| "loss": 0.6981, |
| "step": 1264 |
| }, |
| { |
| "epoch": 0.44417134831460675, |
| "grad_norm": 0.9080988764762878, |
| "learning_rate": 4.94220415661091e-06, |
| "loss": 0.708, |
| "step": 1265 |
| }, |
| { |
| "epoch": 0.44452247191011235, |
| "grad_norm": 0.9560781121253967, |
| "learning_rate": 4.942105278599748e-06, |
| "loss": 0.7021, |
| "step": 1266 |
| }, |
| { |
| "epoch": 0.444873595505618, |
| "grad_norm": 0.9422893524169922, |
| "learning_rate": 4.942006317070676e-06, |
| "loss": 0.6861, |
| "step": 1267 |
| }, |
| { |
| "epoch": 0.4452247191011236, |
| "grad_norm": 0.8909929990768433, |
| "learning_rate": 4.9419072720270764e-06, |
| "loss": 0.7143, |
| "step": 1268 |
| }, |
| { |
| "epoch": 0.4455758426966292, |
| "grad_norm": 0.9080603718757629, |
| "learning_rate": 4.941808143472339e-06, |
| "loss": 0.7073, |
| "step": 1269 |
| }, |
| { |
| "epoch": 0.44592696629213485, |
| "grad_norm": 0.8691872358322144, |
| "learning_rate": 4.941708931409851e-06, |
| "loss": 0.6425, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.44627808988764045, |
| "grad_norm": 0.9223096966743469, |
| "learning_rate": 4.9416096358430084e-06, |
| "loss": 0.6876, |
| "step": 1271 |
| }, |
| { |
| "epoch": 0.44662921348314605, |
| "grad_norm": 0.9030603766441345, |
| "learning_rate": 4.9415102567752045e-06, |
| "loss": 0.6893, |
| "step": 1272 |
| }, |
| { |
| "epoch": 0.4469803370786517, |
| "grad_norm": 0.9421838521957397, |
| "learning_rate": 4.94141079420984e-06, |
| "loss": 0.7267, |
| "step": 1273 |
| }, |
| { |
| "epoch": 0.4473314606741573, |
| "grad_norm": 0.8724151253700256, |
| "learning_rate": 4.941311248150314e-06, |
| "loss": 0.6587, |
| "step": 1274 |
| }, |
| { |
| "epoch": 0.4476825842696629, |
| "grad_norm": 0.8822656869888306, |
| "learning_rate": 4.9412116186000346e-06, |
| "loss": 0.6969, |
| "step": 1275 |
| }, |
| { |
| "epoch": 0.44803370786516855, |
| "grad_norm": 0.8846755623817444, |
| "learning_rate": 4.941111905562405e-06, |
| "loss": 0.6848, |
| "step": 1276 |
| }, |
| { |
| "epoch": 0.44838483146067415, |
| "grad_norm": 0.8999640941619873, |
| "learning_rate": 4.941012109040838e-06, |
| "loss": 0.6465, |
| "step": 1277 |
| }, |
| { |
| "epoch": 0.4487359550561798, |
| "grad_norm": 0.9194796681404114, |
| "learning_rate": 4.940912229038746e-06, |
| "loss": 0.6995, |
| "step": 1278 |
| }, |
| { |
| "epoch": 0.4490870786516854, |
| "grad_norm": 0.9296586513519287, |
| "learning_rate": 4.940812265559543e-06, |
| "loss": 0.6399, |
| "step": 1279 |
| }, |
| { |
| "epoch": 0.449438202247191, |
| "grad_norm": 0.9012690782546997, |
| "learning_rate": 4.94071221860665e-06, |
| "loss": 0.7109, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.44978932584269665, |
| "grad_norm": 0.9507624506950378, |
| "learning_rate": 4.940612088183488e-06, |
| "loss": 0.7498, |
| "step": 1281 |
| }, |
| { |
| "epoch": 0.45014044943820225, |
| "grad_norm": 0.8964364528656006, |
| "learning_rate": 4.940511874293481e-06, |
| "loss": 0.674, |
| "step": 1282 |
| }, |
| { |
| "epoch": 0.45049157303370785, |
| "grad_norm": 0.9396047592163086, |
| "learning_rate": 4.940411576940056e-06, |
| "loss": 0.699, |
| "step": 1283 |
| }, |
| { |
| "epoch": 0.4508426966292135, |
| "grad_norm": 0.9038118720054626, |
| "learning_rate": 4.940311196126643e-06, |
| "loss": 0.6598, |
| "step": 1284 |
| }, |
| { |
| "epoch": 0.4511938202247191, |
| "grad_norm": 0.9001778960227966, |
| "learning_rate": 4.940210731856675e-06, |
| "loss": 0.7129, |
| "step": 1285 |
| }, |
| { |
| "epoch": 0.4515449438202247, |
| "grad_norm": 0.9313659071922302, |
| "learning_rate": 4.94011018413359e-06, |
| "loss": 0.7138, |
| "step": 1286 |
| }, |
| { |
| "epoch": 0.45189606741573035, |
| "grad_norm": 0.9225174784660339, |
| "learning_rate": 4.9400095529608226e-06, |
| "loss": 0.7192, |
| "step": 1287 |
| }, |
| { |
| "epoch": 0.45224719101123595, |
| "grad_norm": 0.9100403189659119, |
| "learning_rate": 4.939908838341818e-06, |
| "loss": 0.6593, |
| "step": 1288 |
| }, |
| { |
| "epoch": 0.45259831460674155, |
| "grad_norm": 0.8900116086006165, |
| "learning_rate": 4.939808040280017e-06, |
| "loss": 0.6799, |
| "step": 1289 |
| }, |
| { |
| "epoch": 0.4529494382022472, |
| "grad_norm": 0.885908305644989, |
| "learning_rate": 4.939707158778872e-06, |
| "loss": 0.6889, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.4533005617977528, |
| "grad_norm": 0.9219464659690857, |
| "learning_rate": 4.939606193841827e-06, |
| "loss": 0.7198, |
| "step": 1291 |
| }, |
| { |
| "epoch": 0.45365168539325845, |
| "grad_norm": 0.9270339012145996, |
| "learning_rate": 4.939505145472339e-06, |
| "loss": 0.7121, |
| "step": 1292 |
| }, |
| { |
| "epoch": 0.45400280898876405, |
| "grad_norm": 0.9297556281089783, |
| "learning_rate": 4.939404013673862e-06, |
| "loss": 0.7119, |
| "step": 1293 |
| }, |
| { |
| "epoch": 0.45435393258426965, |
| "grad_norm": 0.9266655445098877, |
| "learning_rate": 4.9393027984498555e-06, |
| "loss": 0.6994, |
| "step": 1294 |
| }, |
| { |
| "epoch": 0.4547050561797753, |
| "grad_norm": 0.9152787923812866, |
| "learning_rate": 4.939201499803781e-06, |
| "loss": 0.6618, |
| "step": 1295 |
| }, |
| { |
| "epoch": 0.4550561797752809, |
| "grad_norm": 0.9240756034851074, |
| "learning_rate": 4.939100117739102e-06, |
| "loss": 0.7359, |
| "step": 1296 |
| }, |
| { |
| "epoch": 0.4554073033707865, |
| "grad_norm": 0.9413486123085022, |
| "learning_rate": 4.9389986522592855e-06, |
| "loss": 0.7266, |
| "step": 1297 |
| }, |
| { |
| "epoch": 0.45575842696629215, |
| "grad_norm": 0.8976860046386719, |
| "learning_rate": 4.938897103367803e-06, |
| "loss": 0.7063, |
| "step": 1298 |
| }, |
| { |
| "epoch": 0.45610955056179775, |
| "grad_norm": 0.9674167633056641, |
| "learning_rate": 4.938795471068126e-06, |
| "loss": 0.7191, |
| "step": 1299 |
| }, |
| { |
| "epoch": 0.45646067415730335, |
| "grad_norm": 0.9469913840293884, |
| "learning_rate": 4.938693755363731e-06, |
| "loss": 0.7263, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.456811797752809, |
| "grad_norm": 0.9106466174125671, |
| "learning_rate": 4.938591956258097e-06, |
| "loss": 0.7324, |
| "step": 1301 |
| }, |
| { |
| "epoch": 0.4571629213483146, |
| "grad_norm": 0.9906820058822632, |
| "learning_rate": 4.938490073754705e-06, |
| "loss": 0.7273, |
| "step": 1302 |
| }, |
| { |
| "epoch": 0.4575140449438202, |
| "grad_norm": 0.9152560234069824, |
| "learning_rate": 4.938388107857038e-06, |
| "loss": 0.6648, |
| "step": 1303 |
| }, |
| { |
| "epoch": 0.45786516853932585, |
| "grad_norm": 0.9459682703018188, |
| "learning_rate": 4.938286058568585e-06, |
| "loss": 0.6869, |
| "step": 1304 |
| }, |
| { |
| "epoch": 0.45821629213483145, |
| "grad_norm": 0.9387467503547668, |
| "learning_rate": 4.938183925892835e-06, |
| "loss": 0.7028, |
| "step": 1305 |
| }, |
| { |
| "epoch": 0.4585674157303371, |
| "grad_norm": 0.9118907451629639, |
| "learning_rate": 4.938081709833281e-06, |
| "loss": 0.6875, |
| "step": 1306 |
| }, |
| { |
| "epoch": 0.4589185393258427, |
| "grad_norm": 0.9032610654830933, |
| "learning_rate": 4.937979410393419e-06, |
| "loss": 0.7157, |
| "step": 1307 |
| }, |
| { |
| "epoch": 0.4592696629213483, |
| "grad_norm": 0.9495104551315308, |
| "learning_rate": 4.937877027576748e-06, |
| "loss": 0.6914, |
| "step": 1308 |
| }, |
| { |
| "epoch": 0.45962078651685395, |
| "grad_norm": 0.8904275894165039, |
| "learning_rate": 4.937774561386768e-06, |
| "loss": 0.7351, |
| "step": 1309 |
| }, |
| { |
| "epoch": 0.45997191011235955, |
| "grad_norm": 0.8993387222290039, |
| "learning_rate": 4.937672011826984e-06, |
| "loss": 0.7004, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.46032303370786515, |
| "grad_norm": 0.9243264198303223, |
| "learning_rate": 4.937569378900903e-06, |
| "loss": 0.7059, |
| "step": 1311 |
| }, |
| { |
| "epoch": 0.4606741573033708, |
| "grad_norm": 0.8777183294296265, |
| "learning_rate": 4.937466662612036e-06, |
| "loss": 0.6795, |
| "step": 1312 |
| }, |
| { |
| "epoch": 0.4610252808988764, |
| "grad_norm": 0.9348961114883423, |
| "learning_rate": 4.937363862963895e-06, |
| "loss": 0.6726, |
| "step": 1313 |
| }, |
| { |
| "epoch": 0.461376404494382, |
| "grad_norm": 0.9088345766067505, |
| "learning_rate": 4.937260979959994e-06, |
| "loss": 0.7114, |
| "step": 1314 |
| }, |
| { |
| "epoch": 0.46172752808988765, |
| "grad_norm": 0.9143536686897278, |
| "learning_rate": 4.937158013603854e-06, |
| "loss": 0.6819, |
| "step": 1315 |
| }, |
| { |
| "epoch": 0.46207865168539325, |
| "grad_norm": 0.9480457901954651, |
| "learning_rate": 4.937054963898996e-06, |
| "loss": 0.7335, |
| "step": 1316 |
| }, |
| { |
| "epoch": 0.4624297752808989, |
| "grad_norm": 0.9207438230514526, |
| "learning_rate": 4.9369518308489425e-06, |
| "loss": 0.7091, |
| "step": 1317 |
| }, |
| { |
| "epoch": 0.4627808988764045, |
| "grad_norm": 0.9547027349472046, |
| "learning_rate": 4.936848614457223e-06, |
| "loss": 0.7388, |
| "step": 1318 |
| }, |
| { |
| "epoch": 0.4631320224719101, |
| "grad_norm": 0.9442094564437866, |
| "learning_rate": 4.9367453147273645e-06, |
| "loss": 0.709, |
| "step": 1319 |
| }, |
| { |
| "epoch": 0.46348314606741575, |
| "grad_norm": 0.9056798815727234, |
| "learning_rate": 4.9366419316629024e-06, |
| "loss": 0.6919, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.46383426966292135, |
| "grad_norm": 0.8800448179244995, |
| "learning_rate": 4.936538465267371e-06, |
| "loss": 0.7141, |
| "step": 1321 |
| }, |
| { |
| "epoch": 0.46418539325842695, |
| "grad_norm": 0.9331648349761963, |
| "learning_rate": 4.93643491554431e-06, |
| "loss": 0.6871, |
| "step": 1322 |
| }, |
| { |
| "epoch": 0.4645365168539326, |
| "grad_norm": 0.9699183702468872, |
| "learning_rate": 4.9363312824972586e-06, |
| "loss": 0.729, |
| "step": 1323 |
| }, |
| { |
| "epoch": 0.4648876404494382, |
| "grad_norm": 0.9243112206459045, |
| "learning_rate": 4.9362275661297634e-06, |
| "loss": 0.6751, |
| "step": 1324 |
| }, |
| { |
| "epoch": 0.4652387640449438, |
| "grad_norm": 0.9310767650604248, |
| "learning_rate": 4.936123766445368e-06, |
| "loss": 0.7263, |
| "step": 1325 |
| }, |
| { |
| "epoch": 0.46558988764044945, |
| "grad_norm": 0.9163933992385864, |
| "learning_rate": 4.936019883447627e-06, |
| "loss": 0.715, |
| "step": 1326 |
| }, |
| { |
| "epoch": 0.46594101123595505, |
| "grad_norm": 0.9764875173568726, |
| "learning_rate": 4.935915917140089e-06, |
| "loss": 0.7139, |
| "step": 1327 |
| }, |
| { |
| "epoch": 0.46629213483146065, |
| "grad_norm": 0.9456989169120789, |
| "learning_rate": 4.935811867526311e-06, |
| "loss": 0.6667, |
| "step": 1328 |
| }, |
| { |
| "epoch": 0.4666432584269663, |
| "grad_norm": 0.9617751836776733, |
| "learning_rate": 4.935707734609852e-06, |
| "loss": 0.691, |
| "step": 1329 |
| }, |
| { |
| "epoch": 0.4669943820224719, |
| "grad_norm": 0.8945748209953308, |
| "learning_rate": 4.935603518394273e-06, |
| "loss": 0.711, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.46734550561797755, |
| "grad_norm": 0.9066562652587891, |
| "learning_rate": 4.935499218883137e-06, |
| "loss": 0.685, |
| "step": 1331 |
| }, |
| { |
| "epoch": 0.46769662921348315, |
| "grad_norm": 0.8701803684234619, |
| "learning_rate": 4.935394836080012e-06, |
| "loss": 0.6735, |
| "step": 1332 |
| }, |
| { |
| "epoch": 0.46804775280898875, |
| "grad_norm": 0.9355669021606445, |
| "learning_rate": 4.935290369988468e-06, |
| "loss": 0.7335, |
| "step": 1333 |
| }, |
| { |
| "epoch": 0.4683988764044944, |
| "grad_norm": 0.882905900478363, |
| "learning_rate": 4.9351858206120774e-06, |
| "loss": 0.7196, |
| "step": 1334 |
| }, |
| { |
| "epoch": 0.46875, |
| "grad_norm": 0.9127156734466553, |
| "learning_rate": 4.935081187954415e-06, |
| "loss": 0.714, |
| "step": 1335 |
| }, |
| { |
| "epoch": 0.4691011235955056, |
| "grad_norm": 1.284045934677124, |
| "learning_rate": 4.9349764720190595e-06, |
| "loss": 0.7167, |
| "step": 1336 |
| }, |
| { |
| "epoch": 0.46945224719101125, |
| "grad_norm": 0.919476330280304, |
| "learning_rate": 4.934871672809594e-06, |
| "loss": 0.6804, |
| "step": 1337 |
| }, |
| { |
| "epoch": 0.46980337078651685, |
| "grad_norm": 0.9066704511642456, |
| "learning_rate": 4.934766790329599e-06, |
| "loss": 0.706, |
| "step": 1338 |
| }, |
| { |
| "epoch": 0.47015449438202245, |
| "grad_norm": 0.9020872116088867, |
| "learning_rate": 4.934661824582664e-06, |
| "loss": 0.7277, |
| "step": 1339 |
| }, |
| { |
| "epoch": 0.4705056179775281, |
| "grad_norm": 0.9096556901931763, |
| "learning_rate": 4.934556775572377e-06, |
| "loss": 0.7184, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.4708567415730337, |
| "grad_norm": 0.8852599859237671, |
| "learning_rate": 4.934451643302332e-06, |
| "loss": 0.6952, |
| "step": 1341 |
| }, |
| { |
| "epoch": 0.47120786516853935, |
| "grad_norm": 0.9317033290863037, |
| "learning_rate": 4.934346427776125e-06, |
| "loss": 0.7087, |
| "step": 1342 |
| }, |
| { |
| "epoch": 0.47155898876404495, |
| "grad_norm": 0.9349064230918884, |
| "learning_rate": 4.934241128997353e-06, |
| "loss": 0.7295, |
| "step": 1343 |
| }, |
| { |
| "epoch": 0.47191011235955055, |
| "grad_norm": 0.8978156447410583, |
| "learning_rate": 4.934135746969617e-06, |
| "loss": 0.7129, |
| "step": 1344 |
| }, |
| { |
| "epoch": 0.4722612359550562, |
| "grad_norm": 0.8900869488716125, |
| "learning_rate": 4.934030281696521e-06, |
| "loss": 0.6737, |
| "step": 1345 |
| }, |
| { |
| "epoch": 0.4726123595505618, |
| "grad_norm": 0.9193541407585144, |
| "learning_rate": 4.933924733181673e-06, |
| "loss": 0.7189, |
| "step": 1346 |
| }, |
| { |
| "epoch": 0.4729634831460674, |
| "grad_norm": 0.8839302659034729, |
| "learning_rate": 4.933819101428681e-06, |
| "loss": 0.6883, |
| "step": 1347 |
| }, |
| { |
| "epoch": 0.47331460674157305, |
| "grad_norm": 0.8768748641014099, |
| "learning_rate": 4.933713386441159e-06, |
| "loss": 0.6825, |
| "step": 1348 |
| }, |
| { |
| "epoch": 0.47366573033707865, |
| "grad_norm": 0.9012611508369446, |
| "learning_rate": 4.933607588222721e-06, |
| "loss": 0.7477, |
| "step": 1349 |
| }, |
| { |
| "epoch": 0.47401685393258425, |
| "grad_norm": 0.9348760843276978, |
| "learning_rate": 4.933501706776986e-06, |
| "loss": 0.7071, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.4743679775280899, |
| "grad_norm": 0.9126866459846497, |
| "learning_rate": 4.933395742107576e-06, |
| "loss": 0.7259, |
| "step": 1351 |
| }, |
| { |
| "epoch": 0.4747191011235955, |
| "grad_norm": 1.1260316371917725, |
| "learning_rate": 4.9332896942181135e-06, |
| "loss": 0.6647, |
| "step": 1352 |
| }, |
| { |
| "epoch": 0.4750702247191011, |
| "grad_norm": 0.9544892907142639, |
| "learning_rate": 4.933183563112226e-06, |
| "loss": 0.7228, |
| "step": 1353 |
| }, |
| { |
| "epoch": 0.47542134831460675, |
| "grad_norm": 0.9179894924163818, |
| "learning_rate": 4.933077348793542e-06, |
| "loss": 0.7115, |
| "step": 1354 |
| }, |
| { |
| "epoch": 0.47577247191011235, |
| "grad_norm": 0.8785374164581299, |
| "learning_rate": 4.932971051265695e-06, |
| "loss": 0.6651, |
| "step": 1355 |
| }, |
| { |
| "epoch": 0.476123595505618, |
| "grad_norm": 0.9006591439247131, |
| "learning_rate": 4.93286467053232e-06, |
| "loss": 0.7166, |
| "step": 1356 |
| }, |
| { |
| "epoch": 0.4764747191011236, |
| "grad_norm": 0.9105557799339294, |
| "learning_rate": 4.932758206597055e-06, |
| "loss": 0.6876, |
| "step": 1357 |
| }, |
| { |
| "epoch": 0.4768258426966292, |
| "grad_norm": 0.8968350887298584, |
| "learning_rate": 4.932651659463541e-06, |
| "loss": 0.6873, |
| "step": 1358 |
| }, |
| { |
| "epoch": 0.47717696629213485, |
| "grad_norm": 0.9052225947380066, |
| "learning_rate": 4.932545029135422e-06, |
| "loss": 0.7134, |
| "step": 1359 |
| }, |
| { |
| "epoch": 0.47752808988764045, |
| "grad_norm": 0.946305513381958, |
| "learning_rate": 4.932438315616345e-06, |
| "loss": 0.7061, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.47787921348314605, |
| "grad_norm": 0.9128836393356323, |
| "learning_rate": 4.9323315189099586e-06, |
| "loss": 0.7101, |
| "step": 1361 |
| }, |
| { |
| "epoch": 0.4782303370786517, |
| "grad_norm": 0.9199392199516296, |
| "learning_rate": 4.9322246390199155e-06, |
| "loss": 0.7073, |
| "step": 1362 |
| }, |
| { |
| "epoch": 0.4785814606741573, |
| "grad_norm": 1.077110767364502, |
| "learning_rate": 4.932117675949871e-06, |
| "loss": 0.6828, |
| "step": 1363 |
| }, |
| { |
| "epoch": 0.4789325842696629, |
| "grad_norm": 0.9046890735626221, |
| "learning_rate": 4.932010629703484e-06, |
| "loss": 0.6991, |
| "step": 1364 |
| }, |
| { |
| "epoch": 0.47928370786516855, |
| "grad_norm": 0.8711754083633423, |
| "learning_rate": 4.931903500284414e-06, |
| "loss": 0.6788, |
| "step": 1365 |
| }, |
| { |
| "epoch": 0.47963483146067415, |
| "grad_norm": 0.8938144445419312, |
| "learning_rate": 4.931796287696325e-06, |
| "loss": 0.7219, |
| "step": 1366 |
| }, |
| { |
| "epoch": 0.4799859550561798, |
| "grad_norm": 0.9003678560256958, |
| "learning_rate": 4.931688991942884e-06, |
| "loss": 0.6908, |
| "step": 1367 |
| }, |
| { |
| "epoch": 0.4803370786516854, |
| "grad_norm": 0.9107855558395386, |
| "learning_rate": 4.93158161302776e-06, |
| "loss": 0.6821, |
| "step": 1368 |
| }, |
| { |
| "epoch": 0.480688202247191, |
| "grad_norm": 0.9014522433280945, |
| "learning_rate": 4.931474150954626e-06, |
| "loss": 0.7071, |
| "step": 1369 |
| }, |
| { |
| "epoch": 0.48103932584269665, |
| "grad_norm": 0.9273253083229065, |
| "learning_rate": 4.931366605727157e-06, |
| "loss": 0.7027, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.48139044943820225, |
| "grad_norm": 0.9555942416191101, |
| "learning_rate": 4.93125897734903e-06, |
| "loss": 0.6952, |
| "step": 1371 |
| }, |
| { |
| "epoch": 0.48174157303370785, |
| "grad_norm": 0.9457711577415466, |
| "learning_rate": 4.931151265823927e-06, |
| "loss": 0.6893, |
| "step": 1372 |
| }, |
| { |
| "epoch": 0.4820926966292135, |
| "grad_norm": 0.9268205761909485, |
| "learning_rate": 4.931043471155531e-06, |
| "loss": 0.7169, |
| "step": 1373 |
| }, |
| { |
| "epoch": 0.4824438202247191, |
| "grad_norm": 0.9136787056922913, |
| "learning_rate": 4.930935593347529e-06, |
| "loss": 0.7326, |
| "step": 1374 |
| }, |
| { |
| "epoch": 0.4827949438202247, |
| "grad_norm": 0.925116777420044, |
| "learning_rate": 4.930827632403608e-06, |
| "loss": 0.7037, |
| "step": 1375 |
| }, |
| { |
| "epoch": 0.48314606741573035, |
| "grad_norm": 0.9161933064460754, |
| "learning_rate": 4.930719588327464e-06, |
| "loss": 0.7222, |
| "step": 1376 |
| }, |
| { |
| "epoch": 0.48349719101123595, |
| "grad_norm": 0.9238081574440002, |
| "learning_rate": 4.93061146112279e-06, |
| "loss": 0.7186, |
| "step": 1377 |
| }, |
| { |
| "epoch": 0.48384831460674155, |
| "grad_norm": 0.9430643320083618, |
| "learning_rate": 4.930503250793283e-06, |
| "loss": 0.7622, |
| "step": 1378 |
| }, |
| { |
| "epoch": 0.4841994382022472, |
| "grad_norm": 0.9295469522476196, |
| "learning_rate": 4.930394957342645e-06, |
| "loss": 0.7354, |
| "step": 1379 |
| }, |
| { |
| "epoch": 0.4845505617977528, |
| "grad_norm": 0.9064199924468994, |
| "learning_rate": 4.9302865807745795e-06, |
| "loss": 0.6487, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.48490168539325845, |
| "grad_norm": 0.9455371499061584, |
| "learning_rate": 4.930178121092792e-06, |
| "loss": 0.7468, |
| "step": 1381 |
| }, |
| { |
| "epoch": 0.48525280898876405, |
| "grad_norm": 0.9263501763343811, |
| "learning_rate": 4.930069578300992e-06, |
| "loss": 0.7099, |
| "step": 1382 |
| }, |
| { |
| "epoch": 0.48560393258426965, |
| "grad_norm": 0.8904842734336853, |
| "learning_rate": 4.929960952402892e-06, |
| "loss": 0.6519, |
| "step": 1383 |
| }, |
| { |
| "epoch": 0.4859550561797753, |
| "grad_norm": 0.8946258425712585, |
| "learning_rate": 4.929852243402208e-06, |
| "loss": 0.7011, |
| "step": 1384 |
| }, |
| { |
| "epoch": 0.4863061797752809, |
| "grad_norm": 0.9584969282150269, |
| "learning_rate": 4.929743451302656e-06, |
| "loss": 0.724, |
| "step": 1385 |
| }, |
| { |
| "epoch": 0.4866573033707865, |
| "grad_norm": 0.9212526082992554, |
| "learning_rate": 4.929634576107956e-06, |
| "loss": 0.6821, |
| "step": 1386 |
| }, |
| { |
| "epoch": 0.48700842696629215, |
| "grad_norm": 0.9416231513023376, |
| "learning_rate": 4.929525617821834e-06, |
| "loss": 0.7457, |
| "step": 1387 |
| }, |
| { |
| "epoch": 0.48735955056179775, |
| "grad_norm": 0.9377028346061707, |
| "learning_rate": 4.929416576448014e-06, |
| "loss": 0.6871, |
| "step": 1388 |
| }, |
| { |
| "epoch": 0.48771067415730335, |
| "grad_norm": 0.9065041542053223, |
| "learning_rate": 4.929307451990226e-06, |
| "loss": 0.7261, |
| "step": 1389 |
| }, |
| { |
| "epoch": 0.488061797752809, |
| "grad_norm": 0.844350278377533, |
| "learning_rate": 4.929198244452202e-06, |
| "loss": 0.7099, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.4884129213483146, |
| "grad_norm": 0.9054086804389954, |
| "learning_rate": 4.9290889538376775e-06, |
| "loss": 0.669, |
| "step": 1391 |
| }, |
| { |
| "epoch": 0.4887640449438202, |
| "grad_norm": 0.8729206919670105, |
| "learning_rate": 4.928979580150388e-06, |
| "loss": 0.6816, |
| "step": 1392 |
| }, |
| { |
| "epoch": 0.48911516853932585, |
| "grad_norm": 0.9076043367385864, |
| "learning_rate": 4.928870123394077e-06, |
| "loss": 0.7044, |
| "step": 1393 |
| }, |
| { |
| "epoch": 0.48946629213483145, |
| "grad_norm": 0.9312844276428223, |
| "learning_rate": 4.928760583572485e-06, |
| "loss": 0.7107, |
| "step": 1394 |
| }, |
| { |
| "epoch": 0.4898174157303371, |
| "grad_norm": 0.9900015592575073, |
| "learning_rate": 4.928650960689359e-06, |
| "loss": 0.703, |
| "step": 1395 |
| }, |
| { |
| "epoch": 0.4901685393258427, |
| "grad_norm": 0.9289678931236267, |
| "learning_rate": 4.92854125474845e-06, |
| "loss": 0.714, |
| "step": 1396 |
| }, |
| { |
| "epoch": 0.4905196629213483, |
| "grad_norm": 0.9454885125160217, |
| "learning_rate": 4.928431465753507e-06, |
| "loss": 0.7312, |
| "step": 1397 |
| }, |
| { |
| "epoch": 0.49087078651685395, |
| "grad_norm": 0.9117372035980225, |
| "learning_rate": 4.928321593708286e-06, |
| "loss": 0.6984, |
| "step": 1398 |
| }, |
| { |
| "epoch": 0.49122191011235955, |
| "grad_norm": 0.9010490775108337, |
| "learning_rate": 4.928211638616545e-06, |
| "loss": 0.672, |
| "step": 1399 |
| }, |
| { |
| "epoch": 0.49157303370786515, |
| "grad_norm": 0.9090813994407654, |
| "learning_rate": 4.928101600482043e-06, |
| "loss": 0.7132, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.4919241573033708, |
| "grad_norm": 0.905497133731842, |
| "learning_rate": 4.927991479308545e-06, |
| "loss": 0.677, |
| "step": 1401 |
| }, |
| { |
| "epoch": 0.4922752808988764, |
| "grad_norm": 0.902889609336853, |
| "learning_rate": 4.927881275099815e-06, |
| "loss": 0.6601, |
| "step": 1402 |
| }, |
| { |
| "epoch": 0.492626404494382, |
| "grad_norm": 0.9104518294334412, |
| "learning_rate": 4.927770987859624e-06, |
| "loss": 0.7192, |
| "step": 1403 |
| }, |
| { |
| "epoch": 0.49297752808988765, |
| "grad_norm": 0.8784206509590149, |
| "learning_rate": 4.9276606175917415e-06, |
| "loss": 0.6556, |
| "step": 1404 |
| }, |
| { |
| "epoch": 0.49332865168539325, |
| "grad_norm": 0.9406437277793884, |
| "learning_rate": 4.927550164299944e-06, |
| "loss": 0.679, |
| "step": 1405 |
| }, |
| { |
| "epoch": 0.4936797752808989, |
| "grad_norm": 0.8956720232963562, |
| "learning_rate": 4.9274396279880075e-06, |
| "loss": 0.662, |
| "step": 1406 |
| }, |
| { |
| "epoch": 0.4940308988764045, |
| "grad_norm": 0.9246423840522766, |
| "learning_rate": 4.927329008659714e-06, |
| "loss": 0.7155, |
| "step": 1407 |
| }, |
| { |
| "epoch": 0.4943820224719101, |
| "grad_norm": 0.9392451047897339, |
| "learning_rate": 4.927218306318845e-06, |
| "loss": 0.6819, |
| "step": 1408 |
| }, |
| { |
| "epoch": 0.49473314606741575, |
| "grad_norm": 0.9309847950935364, |
| "learning_rate": 4.927107520969187e-06, |
| "loss": 0.7195, |
| "step": 1409 |
| }, |
| { |
| "epoch": 0.49508426966292135, |
| "grad_norm": 0.890052080154419, |
| "learning_rate": 4.926996652614529e-06, |
| "loss": 0.6703, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.49543539325842695, |
| "grad_norm": 0.9011236429214478, |
| "learning_rate": 4.926885701258661e-06, |
| "loss": 0.6822, |
| "step": 1411 |
| }, |
| { |
| "epoch": 0.4957865168539326, |
| "grad_norm": 0.915877103805542, |
| "learning_rate": 4.92677466690538e-06, |
| "loss": 0.7156, |
| "step": 1412 |
| }, |
| { |
| "epoch": 0.4961376404494382, |
| "grad_norm": 0.8800505995750427, |
| "learning_rate": 4.9266635495584814e-06, |
| "loss": 0.6817, |
| "step": 1413 |
| }, |
| { |
| "epoch": 0.4964887640449438, |
| "grad_norm": 0.9167606234550476, |
| "learning_rate": 4.926552349221766e-06, |
| "loss": 0.6982, |
| "step": 1414 |
| }, |
| { |
| "epoch": 0.49683988764044945, |
| "grad_norm": 0.9250441193580627, |
| "learning_rate": 4.926441065899037e-06, |
| "loss": 0.6559, |
| "step": 1415 |
| }, |
| { |
| "epoch": 0.49719101123595505, |
| "grad_norm": 0.910899817943573, |
| "learning_rate": 4.9263296995941004e-06, |
| "loss": 0.6654, |
| "step": 1416 |
| }, |
| { |
| "epoch": 0.49754213483146065, |
| "grad_norm": 0.8955602049827576, |
| "learning_rate": 4.926218250310764e-06, |
| "loss": 0.7022, |
| "step": 1417 |
| }, |
| { |
| "epoch": 0.4978932584269663, |
| "grad_norm": 0.9195224642753601, |
| "learning_rate": 4.926106718052839e-06, |
| "loss": 0.7058, |
| "step": 1418 |
| }, |
| { |
| "epoch": 0.4982443820224719, |
| "grad_norm": 0.9715495705604553, |
| "learning_rate": 4.92599510282414e-06, |
| "loss": 0.6917, |
| "step": 1419 |
| }, |
| { |
| "epoch": 0.49859550561797755, |
| "grad_norm": 0.8591254353523254, |
| "learning_rate": 4.925883404628485e-06, |
| "loss": 0.6415, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.49894662921348315, |
| "grad_norm": 0.903109610080719, |
| "learning_rate": 4.925771623469694e-06, |
| "loss": 0.702, |
| "step": 1421 |
| }, |
| { |
| "epoch": 0.49929775280898875, |
| "grad_norm": 0.9081088900566101, |
| "learning_rate": 4.9256597593515885e-06, |
| "loss": 0.7098, |
| "step": 1422 |
| }, |
| { |
| "epoch": 0.4996488764044944, |
| "grad_norm": 0.9099250435829163, |
| "learning_rate": 4.925547812277994e-06, |
| "loss": 0.6752, |
| "step": 1423 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.9163993000984192, |
| "learning_rate": 4.925435782252742e-06, |
| "loss": 0.7098, |
| "step": 1424 |
| }, |
| { |
| "epoch": 0.5003511235955056, |
| "grad_norm": 0.9282369017601013, |
| "learning_rate": 4.92532366927966e-06, |
| "loss": 0.7003, |
| "step": 1425 |
| }, |
| { |
| "epoch": 0.5007022471910112, |
| "grad_norm": 0.9375564455986023, |
| "learning_rate": 4.925211473362585e-06, |
| "loss": 0.6981, |
| "step": 1426 |
| }, |
| { |
| "epoch": 0.5010533707865169, |
| "grad_norm": 0.9100632071495056, |
| "learning_rate": 4.925099194505352e-06, |
| "loss": 0.6794, |
| "step": 1427 |
| }, |
| { |
| "epoch": 0.5014044943820225, |
| "grad_norm": 0.8592130541801453, |
| "learning_rate": 4.924986832711802e-06, |
| "loss": 0.6547, |
| "step": 1428 |
| }, |
| { |
| "epoch": 0.5017556179775281, |
| "grad_norm": 0.9379070401191711, |
| "learning_rate": 4.924874387985777e-06, |
| "loss": 0.6829, |
| "step": 1429 |
| }, |
| { |
| "epoch": 0.5021067415730337, |
| "grad_norm": 0.9097589254379272, |
| "learning_rate": 4.924761860331125e-06, |
| "loss": 0.682, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.5024578651685393, |
| "grad_norm": 0.925594687461853, |
| "learning_rate": 4.92464924975169e-06, |
| "loss": 0.7312, |
| "step": 1431 |
| }, |
| { |
| "epoch": 0.5028089887640449, |
| "grad_norm": 0.8913887739181519, |
| "learning_rate": 4.924536556251326e-06, |
| "loss": 0.6613, |
| "step": 1432 |
| }, |
| { |
| "epoch": 0.5031601123595506, |
| "grad_norm": 0.8830445408821106, |
| "learning_rate": 4.924423779833887e-06, |
| "loss": 0.689, |
| "step": 1433 |
| }, |
| { |
| "epoch": 0.5035112359550562, |
| "grad_norm": 0.9521420001983643, |
| "learning_rate": 4.924310920503229e-06, |
| "loss": 0.678, |
| "step": 1434 |
| }, |
| { |
| "epoch": 0.5038623595505618, |
| "grad_norm": 0.918493390083313, |
| "learning_rate": 4.924197978263212e-06, |
| "loss": 0.7188, |
| "step": 1435 |
| }, |
| { |
| "epoch": 0.5042134831460674, |
| "grad_norm": 0.9049700498580933, |
| "learning_rate": 4.9240849531176986e-06, |
| "loss": 0.6846, |
| "step": 1436 |
| }, |
| { |
| "epoch": 0.504564606741573, |
| "grad_norm": 0.8993126153945923, |
| "learning_rate": 4.9239718450705545e-06, |
| "loss": 0.7298, |
| "step": 1437 |
| }, |
| { |
| "epoch": 0.5049157303370787, |
| "grad_norm": 0.9134989380836487, |
| "learning_rate": 4.923858654125647e-06, |
| "loss": 0.6945, |
| "step": 1438 |
| }, |
| { |
| "epoch": 0.5052668539325843, |
| "grad_norm": 0.9160113334655762, |
| "learning_rate": 4.9237453802868475e-06, |
| "loss": 0.7074, |
| "step": 1439 |
| }, |
| { |
| "epoch": 0.5056179775280899, |
| "grad_norm": 0.9044978022575378, |
| "learning_rate": 4.923632023558031e-06, |
| "loss": 0.6814, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.5059691011235955, |
| "grad_norm": 0.9281947612762451, |
| "learning_rate": 4.923518583943072e-06, |
| "loss": 0.6806, |
| "step": 1441 |
| }, |
| { |
| "epoch": 0.5063202247191011, |
| "grad_norm": 0.8679924607276917, |
| "learning_rate": 4.923405061445852e-06, |
| "loss": 0.6654, |
| "step": 1442 |
| }, |
| { |
| "epoch": 0.5066713483146067, |
| "grad_norm": 0.905013918876648, |
| "learning_rate": 4.923291456070252e-06, |
| "loss": 0.6938, |
| "step": 1443 |
| }, |
| { |
| "epoch": 0.5070224719101124, |
| "grad_norm": 0.8952536582946777, |
| "learning_rate": 4.923177767820158e-06, |
| "loss": 0.6802, |
| "step": 1444 |
| }, |
| { |
| "epoch": 0.507373595505618, |
| "grad_norm": 0.9175031185150146, |
| "learning_rate": 4.923063996699458e-06, |
| "loss": 0.6984, |
| "step": 1445 |
| }, |
| { |
| "epoch": 0.5077247191011236, |
| "grad_norm": 0.8895412087440491, |
| "learning_rate": 4.922950142712044e-06, |
| "loss": 0.7255, |
| "step": 1446 |
| }, |
| { |
| "epoch": 0.5080758426966292, |
| "grad_norm": 0.874991238117218, |
| "learning_rate": 4.922836205861807e-06, |
| "loss": 0.6709, |
| "step": 1447 |
| }, |
| { |
| "epoch": 0.5084269662921348, |
| "grad_norm": 0.8750325441360474, |
| "learning_rate": 4.922722186152646e-06, |
| "loss": 0.6414, |
| "step": 1448 |
| }, |
| { |
| "epoch": 0.5087780898876404, |
| "grad_norm": 0.8989003300666809, |
| "learning_rate": 4.922608083588459e-06, |
| "loss": 0.6928, |
| "step": 1449 |
| }, |
| { |
| "epoch": 0.5091292134831461, |
| "grad_norm": 0.9451534152030945, |
| "learning_rate": 4.922493898173148e-06, |
| "loss": 0.7143, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.5094803370786517, |
| "grad_norm": 0.9093509912490845, |
| "learning_rate": 4.922379629910619e-06, |
| "loss": 0.7027, |
| "step": 1451 |
| }, |
| { |
| "epoch": 0.5098314606741573, |
| "grad_norm": 0.932784914970398, |
| "learning_rate": 4.9222652788047795e-06, |
| "loss": 0.7024, |
| "step": 1452 |
| }, |
| { |
| "epoch": 0.5101825842696629, |
| "grad_norm": 0.9492952227592468, |
| "learning_rate": 4.92215084485954e-06, |
| "loss": 0.6856, |
| "step": 1453 |
| }, |
| { |
| "epoch": 0.5105337078651685, |
| "grad_norm": 0.8805875182151794, |
| "learning_rate": 4.922036328078815e-06, |
| "loss": 0.7114, |
| "step": 1454 |
| }, |
| { |
| "epoch": 0.5108848314606742, |
| "grad_norm": 0.8962169289588928, |
| "learning_rate": 4.92192172846652e-06, |
| "loss": 0.6962, |
| "step": 1455 |
| }, |
| { |
| "epoch": 0.5112359550561798, |
| "grad_norm": 0.923744797706604, |
| "learning_rate": 4.921807046026574e-06, |
| "loss": 0.7325, |
| "step": 1456 |
| }, |
| { |
| "epoch": 0.5115870786516854, |
| "grad_norm": 0.9285497069358826, |
| "learning_rate": 4.9216922807628995e-06, |
| "loss": 0.7208, |
| "step": 1457 |
| }, |
| { |
| "epoch": 0.511938202247191, |
| "grad_norm": 0.879386842250824, |
| "learning_rate": 4.921577432679421e-06, |
| "loss": 0.6404, |
| "step": 1458 |
| }, |
| { |
| "epoch": 0.5122893258426966, |
| "grad_norm": 0.9378132820129395, |
| "learning_rate": 4.921462501780067e-06, |
| "loss": 0.7286, |
| "step": 1459 |
| }, |
| { |
| "epoch": 0.5126404494382022, |
| "grad_norm": 0.9210785031318665, |
| "learning_rate": 4.921347488068767e-06, |
| "loss": 0.7344, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.5129915730337079, |
| "grad_norm": 0.9228889346122742, |
| "learning_rate": 4.921232391549456e-06, |
| "loss": 0.7108, |
| "step": 1461 |
| }, |
| { |
| "epoch": 0.5133426966292135, |
| "grad_norm": 0.8554604649543762, |
| "learning_rate": 4.9211172122260675e-06, |
| "loss": 0.6272, |
| "step": 1462 |
| }, |
| { |
| "epoch": 0.5136938202247191, |
| "grad_norm": 0.8758804202079773, |
| "learning_rate": 4.921001950102543e-06, |
| "loss": 0.6463, |
| "step": 1463 |
| }, |
| { |
| "epoch": 0.5140449438202247, |
| "grad_norm": 0.9541190266609192, |
| "learning_rate": 4.920886605182823e-06, |
| "loss": 0.7274, |
| "step": 1464 |
| }, |
| { |
| "epoch": 0.5143960674157303, |
| "grad_norm": 0.9359861016273499, |
| "learning_rate": 4.920771177470853e-06, |
| "loss": 0.6961, |
| "step": 1465 |
| }, |
| { |
| "epoch": 0.514747191011236, |
| "grad_norm": 0.8696016073226929, |
| "learning_rate": 4.92065566697058e-06, |
| "loss": 0.7263, |
| "step": 1466 |
| }, |
| { |
| "epoch": 0.5150983146067416, |
| "grad_norm": 0.8890820741653442, |
| "learning_rate": 4.920540073685956e-06, |
| "loss": 0.7063, |
| "step": 1467 |
| }, |
| { |
| "epoch": 0.5154494382022472, |
| "grad_norm": 0.90772545337677, |
| "learning_rate": 4.920424397620931e-06, |
| "loss": 0.6858, |
| "step": 1468 |
| }, |
| { |
| "epoch": 0.5158005617977528, |
| "grad_norm": 0.9410818219184875, |
| "learning_rate": 4.920308638779463e-06, |
| "loss": 0.6655, |
| "step": 1469 |
| }, |
| { |
| "epoch": 0.5161516853932584, |
| "grad_norm": 0.8993090987205505, |
| "learning_rate": 4.920192797165511e-06, |
| "loss": 0.7215, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.516502808988764, |
| "grad_norm": 0.8889127969741821, |
| "learning_rate": 4.920076872783036e-06, |
| "loss": 0.7214, |
| "step": 1471 |
| }, |
| { |
| "epoch": 0.5168539325842697, |
| "grad_norm": 0.9573481678962708, |
| "learning_rate": 4.919960865636004e-06, |
| "loss": 0.6754, |
| "step": 1472 |
| }, |
| { |
| "epoch": 0.5172050561797753, |
| "grad_norm": 0.9702395796775818, |
| "learning_rate": 4.91984477572838e-06, |
| "loss": 0.6986, |
| "step": 1473 |
| }, |
| { |
| "epoch": 0.5175561797752809, |
| "grad_norm": 0.9364273548126221, |
| "learning_rate": 4.919728603064136e-06, |
| "loss": 0.7098, |
| "step": 1474 |
| }, |
| { |
| "epoch": 0.5179073033707865, |
| "grad_norm": 1.0006465911865234, |
| "learning_rate": 4.919612347647244e-06, |
| "loss": 0.7143, |
| "step": 1475 |
| }, |
| { |
| "epoch": 0.5182584269662921, |
| "grad_norm": 0.9335517287254333, |
| "learning_rate": 4.91949600948168e-06, |
| "loss": 0.6837, |
| "step": 1476 |
| }, |
| { |
| "epoch": 0.5186095505617978, |
| "grad_norm": 0.9131624698638916, |
| "learning_rate": 4.919379588571424e-06, |
| "loss": 0.7254, |
| "step": 1477 |
| }, |
| { |
| "epoch": 0.5189606741573034, |
| "grad_norm": 0.9354121685028076, |
| "learning_rate": 4.919263084920455e-06, |
| "loss": 0.707, |
| "step": 1478 |
| }, |
| { |
| "epoch": 0.519311797752809, |
| "grad_norm": 0.8909806609153748, |
| "learning_rate": 4.919146498532758e-06, |
| "loss": 0.6655, |
| "step": 1479 |
| }, |
| { |
| "epoch": 0.5196629213483146, |
| "grad_norm": 0.9220510721206665, |
| "learning_rate": 4.919029829412323e-06, |
| "loss": 0.6859, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.5200140449438202, |
| "grad_norm": 0.9528585076332092, |
| "learning_rate": 4.918913077563137e-06, |
| "loss": 0.694, |
| "step": 1481 |
| }, |
| { |
| "epoch": 0.5203651685393258, |
| "grad_norm": 0.9287347793579102, |
| "learning_rate": 4.918796242989193e-06, |
| "loss": 0.6775, |
| "step": 1482 |
| }, |
| { |
| "epoch": 0.5207162921348315, |
| "grad_norm": 0.9118848443031311, |
| "learning_rate": 4.9186793256944885e-06, |
| "loss": 0.6869, |
| "step": 1483 |
| }, |
| { |
| "epoch": 0.5210674157303371, |
| "grad_norm": 0.8987846374511719, |
| "learning_rate": 4.91856232568302e-06, |
| "loss": 0.6883, |
| "step": 1484 |
| }, |
| { |
| "epoch": 0.5214185393258427, |
| "grad_norm": 0.8864469528198242, |
| "learning_rate": 4.918445242958789e-06, |
| "loss": 0.7144, |
| "step": 1485 |
| }, |
| { |
| "epoch": 0.5217696629213483, |
| "grad_norm": 0.8598546385765076, |
| "learning_rate": 4.918328077525802e-06, |
| "loss": 0.6752, |
| "step": 1486 |
| }, |
| { |
| "epoch": 0.5221207865168539, |
| "grad_norm": 0.8941078782081604, |
| "learning_rate": 4.918210829388061e-06, |
| "loss": 0.6733, |
| "step": 1487 |
| }, |
| { |
| "epoch": 0.5224719101123596, |
| "grad_norm": 0.914069414138794, |
| "learning_rate": 4.918093498549582e-06, |
| "loss": 0.7032, |
| "step": 1488 |
| }, |
| { |
| "epoch": 0.5228230337078652, |
| "grad_norm": 0.9472395181655884, |
| "learning_rate": 4.917976085014373e-06, |
| "loss": 0.6666, |
| "step": 1489 |
| }, |
| { |
| "epoch": 0.5231741573033708, |
| "grad_norm": 0.9576021432876587, |
| "learning_rate": 4.9178585887864515e-06, |
| "loss": 0.7056, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.5235252808988764, |
| "grad_norm": 0.9019284248352051, |
| "learning_rate": 4.9177410098698354e-06, |
| "loss": 0.6841, |
| "step": 1491 |
| }, |
| { |
| "epoch": 0.523876404494382, |
| "grad_norm": 0.8970656394958496, |
| "learning_rate": 4.917623348268546e-06, |
| "loss": 0.7169, |
| "step": 1492 |
| }, |
| { |
| "epoch": 0.5242275280898876, |
| "grad_norm": 0.8948590159416199, |
| "learning_rate": 4.917505603986605e-06, |
| "loss": 0.6666, |
| "step": 1493 |
| }, |
| { |
| "epoch": 0.5245786516853933, |
| "grad_norm": 0.9220554828643799, |
| "learning_rate": 4.9173877770280425e-06, |
| "loss": 0.7249, |
| "step": 1494 |
| }, |
| { |
| "epoch": 0.5249297752808989, |
| "grad_norm": 0.9420086741447449, |
| "learning_rate": 4.917269867396887e-06, |
| "loss": 0.7205, |
| "step": 1495 |
| }, |
| { |
| "epoch": 0.5252808988764045, |
| "grad_norm": 0.9278038144111633, |
| "learning_rate": 4.917151875097169e-06, |
| "loss": 0.7032, |
| "step": 1496 |
| }, |
| { |
| "epoch": 0.5256320224719101, |
| "grad_norm": 0.8682548999786377, |
| "learning_rate": 4.917033800132927e-06, |
| "loss": 0.7, |
| "step": 1497 |
| }, |
| { |
| "epoch": 0.5259831460674157, |
| "grad_norm": 0.8908578753471375, |
| "learning_rate": 4.916915642508196e-06, |
| "loss": 0.6891, |
| "step": 1498 |
| }, |
| { |
| "epoch": 0.5263342696629213, |
| "grad_norm": 0.8928547501564026, |
| "learning_rate": 4.916797402227019e-06, |
| "loss": 0.7247, |
| "step": 1499 |
| }, |
| { |
| "epoch": 0.526685393258427, |
| "grad_norm": 0.9188054203987122, |
| "learning_rate": 4.916679079293438e-06, |
| "loss": 0.7001, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.5270365168539326, |
| "grad_norm": 0.8950523138046265, |
| "learning_rate": 4.916560673711502e-06, |
| "loss": 0.6473, |
| "step": 1501 |
| }, |
| { |
| "epoch": 0.5273876404494382, |
| "grad_norm": 0.9325989484786987, |
| "learning_rate": 4.916442185485258e-06, |
| "loss": 0.7096, |
| "step": 1502 |
| }, |
| { |
| "epoch": 0.5277387640449438, |
| "grad_norm": 0.9268350005149841, |
| "learning_rate": 4.9163236146187586e-06, |
| "loss": 0.7089, |
| "step": 1503 |
| }, |
| { |
| "epoch": 0.5280898876404494, |
| "grad_norm": 1.009692907333374, |
| "learning_rate": 4.91620496111606e-06, |
| "loss": 0.6721, |
| "step": 1504 |
| }, |
| { |
| "epoch": 0.5284410112359551, |
| "grad_norm": 0.8952979445457458, |
| "learning_rate": 4.916086224981219e-06, |
| "loss": 0.7074, |
| "step": 1505 |
| }, |
| { |
| "epoch": 0.5287921348314607, |
| "grad_norm": 0.896232008934021, |
| "learning_rate": 4.915967406218296e-06, |
| "loss": 0.6765, |
| "step": 1506 |
| }, |
| { |
| "epoch": 0.5291432584269663, |
| "grad_norm": 0.9074482917785645, |
| "learning_rate": 4.915848504831356e-06, |
| "loss": 0.7296, |
| "step": 1507 |
| }, |
| { |
| "epoch": 0.5294943820224719, |
| "grad_norm": 0.8722943067550659, |
| "learning_rate": 4.9157295208244635e-06, |
| "loss": 0.636, |
| "step": 1508 |
| }, |
| { |
| "epoch": 0.5298455056179775, |
| "grad_norm": 0.917375922203064, |
| "learning_rate": 4.915610454201689e-06, |
| "loss": 0.6844, |
| "step": 1509 |
| }, |
| { |
| "epoch": 0.5301966292134831, |
| "grad_norm": 0.8942883610725403, |
| "learning_rate": 4.9154913049671045e-06, |
| "loss": 0.6589, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.5305477528089888, |
| "grad_norm": 0.894601583480835, |
| "learning_rate": 4.915372073124784e-06, |
| "loss": 0.6919, |
| "step": 1511 |
| }, |
| { |
| "epoch": 0.5308988764044944, |
| "grad_norm": 1.0230928659439087, |
| "learning_rate": 4.9152527586788056e-06, |
| "loss": 0.6949, |
| "step": 1512 |
| }, |
| { |
| "epoch": 0.53125, |
| "grad_norm": 1.0016642808914185, |
| "learning_rate": 4.915133361633249e-06, |
| "loss": 0.6792, |
| "step": 1513 |
| }, |
| { |
| "epoch": 0.5316011235955056, |
| "grad_norm": 0.8965452909469604, |
| "learning_rate": 4.9150138819921985e-06, |
| "loss": 0.6993, |
| "step": 1514 |
| }, |
| { |
| "epoch": 0.5319522471910112, |
| "grad_norm": 0.9045464396476746, |
| "learning_rate": 4.91489431975974e-06, |
| "loss": 0.6905, |
| "step": 1515 |
| }, |
| { |
| "epoch": 0.5323033707865169, |
| "grad_norm": 0.9282147288322449, |
| "learning_rate": 4.914774674939962e-06, |
| "loss": 0.7121, |
| "step": 1516 |
| }, |
| { |
| "epoch": 0.5326544943820225, |
| "grad_norm": 0.9581758379936218, |
| "learning_rate": 4.914654947536956e-06, |
| "loss": 0.7156, |
| "step": 1517 |
| }, |
| { |
| "epoch": 0.5330056179775281, |
| "grad_norm": 0.8986783623695374, |
| "learning_rate": 4.9145351375548184e-06, |
| "loss": 0.6827, |
| "step": 1518 |
| }, |
| { |
| "epoch": 0.5333567415730337, |
| "grad_norm": 1.0093053579330444, |
| "learning_rate": 4.9144152449976444e-06, |
| "loss": 0.6642, |
| "step": 1519 |
| }, |
| { |
| "epoch": 0.5337078651685393, |
| "grad_norm": 1.0274914503097534, |
| "learning_rate": 4.914295269869535e-06, |
| "loss": 0.6667, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.5340589887640449, |
| "grad_norm": 0.9218011498451233, |
| "learning_rate": 4.914175212174594e-06, |
| "loss": 0.6902, |
| "step": 1521 |
| }, |
| { |
| "epoch": 0.5344101123595506, |
| "grad_norm": 0.902390718460083, |
| "learning_rate": 4.914055071916925e-06, |
| "loss": 0.7009, |
| "step": 1522 |
| }, |
| { |
| "epoch": 0.5347612359550562, |
| "grad_norm": 0.9239642024040222, |
| "learning_rate": 4.9139348491006396e-06, |
| "loss": 0.7038, |
| "step": 1523 |
| }, |
| { |
| "epoch": 0.5351123595505618, |
| "grad_norm": 0.8971140384674072, |
| "learning_rate": 4.913814543729847e-06, |
| "loss": 0.7187, |
| "step": 1524 |
| }, |
| { |
| "epoch": 0.5354634831460674, |
| "grad_norm": 0.9525965452194214, |
| "learning_rate": 4.913694155808663e-06, |
| "loss": 0.6694, |
| "step": 1525 |
| }, |
| { |
| "epoch": 0.535814606741573, |
| "grad_norm": 0.915665864944458, |
| "learning_rate": 4.913573685341205e-06, |
| "loss": 0.7181, |
| "step": 1526 |
| }, |
| { |
| "epoch": 0.5361657303370787, |
| "grad_norm": 0.913693368434906, |
| "learning_rate": 4.913453132331592e-06, |
| "loss": 0.7122, |
| "step": 1527 |
| }, |
| { |
| "epoch": 0.5365168539325843, |
| "grad_norm": 0.91572505235672, |
| "learning_rate": 4.913332496783947e-06, |
| "loss": 0.7143, |
| "step": 1528 |
| }, |
| { |
| "epoch": 0.5368679775280899, |
| "grad_norm": 0.947823166847229, |
| "learning_rate": 4.913211778702395e-06, |
| "loss": 0.7239, |
| "step": 1529 |
| }, |
| { |
| "epoch": 0.5372191011235955, |
| "grad_norm": 0.9384491443634033, |
| "learning_rate": 4.913090978091066e-06, |
| "loss": 0.7147, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.5375702247191011, |
| "grad_norm": 0.882896363735199, |
| "learning_rate": 4.912970094954091e-06, |
| "loss": 0.6548, |
| "step": 1531 |
| }, |
| { |
| "epoch": 0.5379213483146067, |
| "grad_norm": 0.8977818489074707, |
| "learning_rate": 4.912849129295601e-06, |
| "loss": 0.6898, |
| "step": 1532 |
| }, |
| { |
| "epoch": 0.5382724719101124, |
| "grad_norm": 0.8661186099052429, |
| "learning_rate": 4.912728081119738e-06, |
| "loss": 0.6605, |
| "step": 1533 |
| }, |
| { |
| "epoch": 0.538623595505618, |
| "grad_norm": 0.8856679797172546, |
| "learning_rate": 4.9126069504306385e-06, |
| "loss": 0.6891, |
| "step": 1534 |
| }, |
| { |
| "epoch": 0.5389747191011236, |
| "grad_norm": 0.9184430241584778, |
| "learning_rate": 4.9124857372324455e-06, |
| "loss": 0.7221, |
| "step": 1535 |
| }, |
| { |
| "epoch": 0.5393258426966292, |
| "grad_norm": 0.9198493957519531, |
| "learning_rate": 4.912364441529306e-06, |
| "loss": 0.7011, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.5396769662921348, |
| "grad_norm": 0.9443540573120117, |
| "learning_rate": 4.912243063325365e-06, |
| "loss": 0.6523, |
| "step": 1537 |
| }, |
| { |
| "epoch": 0.5400280898876404, |
| "grad_norm": 0.9004515409469604, |
| "learning_rate": 4.912121602624777e-06, |
| "loss": 0.6674, |
| "step": 1538 |
| }, |
| { |
| "epoch": 0.5403792134831461, |
| "grad_norm": 0.9147352576255798, |
| "learning_rate": 4.912000059431693e-06, |
| "loss": 0.684, |
| "step": 1539 |
| }, |
| { |
| "epoch": 0.5407303370786517, |
| "grad_norm": 0.9233749508857727, |
| "learning_rate": 4.911878433750271e-06, |
| "loss": 0.702, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.5410814606741573, |
| "grad_norm": 0.8945138454437256, |
| "learning_rate": 4.911756725584672e-06, |
| "loss": 0.6694, |
| "step": 1541 |
| }, |
| { |
| "epoch": 0.5414325842696629, |
| "grad_norm": 0.9332498908042908, |
| "learning_rate": 4.911634934939055e-06, |
| "loss": 0.703, |
| "step": 1542 |
| }, |
| { |
| "epoch": 0.5417837078651685, |
| "grad_norm": 0.8984339237213135, |
| "learning_rate": 4.911513061817588e-06, |
| "loss": 0.6607, |
| "step": 1543 |
| }, |
| { |
| "epoch": 0.5421348314606742, |
| "grad_norm": 0.9096390604972839, |
| "learning_rate": 4.9113911062244364e-06, |
| "loss": 0.6786, |
| "step": 1544 |
| }, |
| { |
| "epoch": 0.5424859550561798, |
| "grad_norm": 0.8916593790054321, |
| "learning_rate": 4.911269068163775e-06, |
| "loss": 0.6644, |
| "step": 1545 |
| }, |
| { |
| "epoch": 0.5428370786516854, |
| "grad_norm": 0.9222617149353027, |
| "learning_rate": 4.911146947639772e-06, |
| "loss": 0.6983, |
| "step": 1546 |
| }, |
| { |
| "epoch": 0.543188202247191, |
| "grad_norm": 0.8953000903129578, |
| "learning_rate": 4.911024744656608e-06, |
| "loss": 0.7026, |
| "step": 1547 |
| }, |
| { |
| "epoch": 0.5435393258426966, |
| "grad_norm": 0.9042890071868896, |
| "learning_rate": 4.910902459218461e-06, |
| "loss": 0.7196, |
| "step": 1548 |
| }, |
| { |
| "epoch": 0.5438904494382022, |
| "grad_norm": 0.9504362940788269, |
| "learning_rate": 4.910780091329513e-06, |
| "loss": 0.6707, |
| "step": 1549 |
| }, |
| { |
| "epoch": 0.5442415730337079, |
| "grad_norm": 0.9307934641838074, |
| "learning_rate": 4.910657640993948e-06, |
| "loss": 0.683, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.5445926966292135, |
| "grad_norm": 0.9294170141220093, |
| "learning_rate": 4.910535108215955e-06, |
| "loss": 0.6969, |
| "step": 1551 |
| }, |
| { |
| "epoch": 0.5449438202247191, |
| "grad_norm": 0.9138461351394653, |
| "learning_rate": 4.910412492999725e-06, |
| "loss": 0.7347, |
| "step": 1552 |
| }, |
| { |
| "epoch": 0.5452949438202247, |
| "grad_norm": 0.8926948308944702, |
| "learning_rate": 4.910289795349449e-06, |
| "loss": 0.7217, |
| "step": 1553 |
| }, |
| { |
| "epoch": 0.5456460674157303, |
| "grad_norm": 0.8452489376068115, |
| "learning_rate": 4.910167015269325e-06, |
| "loss": 0.6478, |
| "step": 1554 |
| }, |
| { |
| "epoch": 0.545997191011236, |
| "grad_norm": 0.8928716778755188, |
| "learning_rate": 4.9100441527635516e-06, |
| "loss": 0.6246, |
| "step": 1555 |
| }, |
| { |
| "epoch": 0.5463483146067416, |
| "grad_norm": 0.8877882361412048, |
| "learning_rate": 4.90992120783633e-06, |
| "loss": 0.6771, |
| "step": 1556 |
| }, |
| { |
| "epoch": 0.5466994382022472, |
| "grad_norm": 0.8971658945083618, |
| "learning_rate": 4.909798180491865e-06, |
| "loss": 0.6664, |
| "step": 1557 |
| }, |
| { |
| "epoch": 0.5470505617977528, |
| "grad_norm": 0.9224132895469666, |
| "learning_rate": 4.909675070734365e-06, |
| "loss": 0.6958, |
| "step": 1558 |
| }, |
| { |
| "epoch": 0.5474016853932584, |
| "grad_norm": 0.9234408140182495, |
| "learning_rate": 4.9095518785680395e-06, |
| "loss": 0.6944, |
| "step": 1559 |
| }, |
| { |
| "epoch": 0.547752808988764, |
| "grad_norm": 0.9604536294937134, |
| "learning_rate": 4.909428603997101e-06, |
| "loss": 0.6597, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.5481039325842697, |
| "grad_norm": 0.9268457889556885, |
| "learning_rate": 4.909305247025767e-06, |
| "loss": 0.6942, |
| "step": 1561 |
| }, |
| { |
| "epoch": 0.5484550561797753, |
| "grad_norm": 0.9222556352615356, |
| "learning_rate": 4.909181807658256e-06, |
| "loss": 0.6747, |
| "step": 1562 |
| }, |
| { |
| "epoch": 0.5488061797752809, |
| "grad_norm": 0.8885440826416016, |
| "learning_rate": 4.909058285898787e-06, |
| "loss": 0.6555, |
| "step": 1563 |
| }, |
| { |
| "epoch": 0.5491573033707865, |
| "grad_norm": 0.9378191232681274, |
| "learning_rate": 4.908934681751586e-06, |
| "loss": 0.7272, |
| "step": 1564 |
| }, |
| { |
| "epoch": 0.5495084269662921, |
| "grad_norm": 0.9337999820709229, |
| "learning_rate": 4.908810995220882e-06, |
| "loss": 0.6721, |
| "step": 1565 |
| }, |
| { |
| "epoch": 0.5498595505617978, |
| "grad_norm": 0.8756289482116699, |
| "learning_rate": 4.908687226310902e-06, |
| "loss": 0.6733, |
| "step": 1566 |
| }, |
| { |
| "epoch": 0.5502106741573034, |
| "grad_norm": 0.8794893026351929, |
| "learning_rate": 4.90856337502588e-06, |
| "loss": 0.6225, |
| "step": 1567 |
| }, |
| { |
| "epoch": 0.550561797752809, |
| "grad_norm": 0.8720929026603699, |
| "learning_rate": 4.9084394413700515e-06, |
| "loss": 0.7074, |
| "step": 1568 |
| }, |
| { |
| "epoch": 0.5509129213483146, |
| "grad_norm": 0.8621073365211487, |
| "learning_rate": 4.908315425347655e-06, |
| "loss": 0.6439, |
| "step": 1569 |
| }, |
| { |
| "epoch": 0.5512640449438202, |
| "grad_norm": 0.937415361404419, |
| "learning_rate": 4.908191326962932e-06, |
| "loss": 0.7121, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.5516151685393258, |
| "grad_norm": 0.908561646938324, |
| "learning_rate": 4.908067146220127e-06, |
| "loss": 0.699, |
| "step": 1571 |
| }, |
| { |
| "epoch": 0.5519662921348315, |
| "grad_norm": 0.8949597477912903, |
| "learning_rate": 4.907942883123485e-06, |
| "loss": 0.7148, |
| "step": 1572 |
| }, |
| { |
| "epoch": 0.5523174157303371, |
| "grad_norm": 0.9320929646492004, |
| "learning_rate": 4.907818537677258e-06, |
| "loss": 0.6894, |
| "step": 1573 |
| }, |
| { |
| "epoch": 0.5526685393258427, |
| "grad_norm": 0.8810732960700989, |
| "learning_rate": 4.907694109885697e-06, |
| "loss": 0.6814, |
| "step": 1574 |
| }, |
| { |
| "epoch": 0.5530196629213483, |
| "grad_norm": 0.9485206007957458, |
| "learning_rate": 4.907569599753058e-06, |
| "loss": 0.7125, |
| "step": 1575 |
| }, |
| { |
| "epoch": 0.5533707865168539, |
| "grad_norm": 0.8823009729385376, |
| "learning_rate": 4.907445007283598e-06, |
| "loss": 0.7239, |
| "step": 1576 |
| }, |
| { |
| "epoch": 0.5537219101123596, |
| "grad_norm": 0.9091922640800476, |
| "learning_rate": 4.907320332481579e-06, |
| "loss": 0.6949, |
| "step": 1577 |
| }, |
| { |
| "epoch": 0.5540730337078652, |
| "grad_norm": 0.9078152775764465, |
| "learning_rate": 4.907195575351265e-06, |
| "loss": 0.701, |
| "step": 1578 |
| }, |
| { |
| "epoch": 0.5544241573033708, |
| "grad_norm": 0.9011525511741638, |
| "learning_rate": 4.907070735896923e-06, |
| "loss": 0.7039, |
| "step": 1579 |
| }, |
| { |
| "epoch": 0.5547752808988764, |
| "grad_norm": 0.8794327974319458, |
| "learning_rate": 4.90694581412282e-06, |
| "loss": 0.6543, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.555126404494382, |
| "grad_norm": 0.921090841293335, |
| "learning_rate": 4.90682081003323e-06, |
| "loss": 0.7031, |
| "step": 1581 |
| }, |
| { |
| "epoch": 0.5554775280898876, |
| "grad_norm": 0.8931931853294373, |
| "learning_rate": 4.906695723632429e-06, |
| "loss": 0.6938, |
| "step": 1582 |
| }, |
| { |
| "epoch": 0.5558286516853933, |
| "grad_norm": 0.9168527722358704, |
| "learning_rate": 4.9065705549246935e-06, |
| "loss": 0.7472, |
| "step": 1583 |
| }, |
| { |
| "epoch": 0.5561797752808989, |
| "grad_norm": 0.9369787573814392, |
| "learning_rate": 4.906445303914303e-06, |
| "loss": 0.6945, |
| "step": 1584 |
| }, |
| { |
| "epoch": 0.5565308988764045, |
| "grad_norm": 0.9574870467185974, |
| "learning_rate": 4.906319970605544e-06, |
| "loss": 0.7117, |
| "step": 1585 |
| }, |
| { |
| "epoch": 0.5568820224719101, |
| "grad_norm": 0.9056572318077087, |
| "learning_rate": 4.9061945550027e-06, |
| "loss": 0.7117, |
| "step": 1586 |
| }, |
| { |
| "epoch": 0.5572331460674157, |
| "grad_norm": 0.9152799844741821, |
| "learning_rate": 4.906069057110062e-06, |
| "loss": 0.7262, |
| "step": 1587 |
| }, |
| { |
| "epoch": 0.5575842696629213, |
| "grad_norm": 0.9061770439147949, |
| "learning_rate": 4.9059434769319205e-06, |
| "loss": 0.6746, |
| "step": 1588 |
| }, |
| { |
| "epoch": 0.557935393258427, |
| "grad_norm": 0.8756861090660095, |
| "learning_rate": 4.905817814472572e-06, |
| "loss": 0.6547, |
| "step": 1589 |
| }, |
| { |
| "epoch": 0.5582865168539326, |
| "grad_norm": 0.8801287412643433, |
| "learning_rate": 4.905692069736312e-06, |
| "loss": 0.7026, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.5586376404494382, |
| "grad_norm": 0.871894359588623, |
| "learning_rate": 4.9055662427274415e-06, |
| "loss": 0.6967, |
| "step": 1591 |
| }, |
| { |
| "epoch": 0.5589887640449438, |
| "grad_norm": 0.8951716423034668, |
| "learning_rate": 4.905440333450264e-06, |
| "loss": 0.6769, |
| "step": 1592 |
| }, |
| { |
| "epoch": 0.5593398876404494, |
| "grad_norm": 0.8958552479743958, |
| "learning_rate": 4.905314341909086e-06, |
| "loss": 0.6462, |
| "step": 1593 |
| }, |
| { |
| "epoch": 0.5596910112359551, |
| "grad_norm": 0.8950044512748718, |
| "learning_rate": 4.9051882681082155e-06, |
| "loss": 0.6943, |
| "step": 1594 |
| }, |
| { |
| "epoch": 0.5600421348314607, |
| "grad_norm": 0.8931154608726501, |
| "learning_rate": 4.905062112051965e-06, |
| "loss": 0.6712, |
| "step": 1595 |
| }, |
| { |
| "epoch": 0.5603932584269663, |
| "grad_norm": 0.9284601211547852, |
| "learning_rate": 4.904935873744647e-06, |
| "loss": 0.7057, |
| "step": 1596 |
| }, |
| { |
| "epoch": 0.5607443820224719, |
| "grad_norm": 0.8948965072631836, |
| "learning_rate": 4.904809553190582e-06, |
| "loss": 0.6923, |
| "step": 1597 |
| }, |
| { |
| "epoch": 0.5610955056179775, |
| "grad_norm": 0.9073783755302429, |
| "learning_rate": 4.904683150394086e-06, |
| "loss": 0.7183, |
| "step": 1598 |
| }, |
| { |
| "epoch": 0.5614466292134831, |
| "grad_norm": 0.9169145226478577, |
| "learning_rate": 4.904556665359485e-06, |
| "loss": 0.71, |
| "step": 1599 |
| }, |
| { |
| "epoch": 0.5617977528089888, |
| "grad_norm": 1.0053675174713135, |
| "learning_rate": 4.904430098091104e-06, |
| "loss": 0.7078, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.5621488764044944, |
| "grad_norm": 0.893157422542572, |
| "learning_rate": 4.904303448593271e-06, |
| "loss": 0.6814, |
| "step": 1601 |
| }, |
| { |
| "epoch": 0.5625, |
| "grad_norm": 0.8943068981170654, |
| "learning_rate": 4.904176716870317e-06, |
| "loss": 0.6915, |
| "step": 1602 |
| }, |
| { |
| "epoch": 0.5628511235955056, |
| "grad_norm": 0.8920650482177734, |
| "learning_rate": 4.904049902926576e-06, |
| "loss": 0.6953, |
| "step": 1603 |
| }, |
| { |
| "epoch": 0.5632022471910112, |
| "grad_norm": 0.8903055787086487, |
| "learning_rate": 4.9039230067663865e-06, |
| "loss": 0.6708, |
| "step": 1604 |
| }, |
| { |
| "epoch": 0.5635533707865169, |
| "grad_norm": 0.9348129034042358, |
| "learning_rate": 4.903796028394086e-06, |
| "loss": 0.6983, |
| "step": 1605 |
| }, |
| { |
| "epoch": 0.5639044943820225, |
| "grad_norm": 0.946652889251709, |
| "learning_rate": 4.903668967814021e-06, |
| "loss": 0.6997, |
| "step": 1606 |
| }, |
| { |
| "epoch": 0.5642556179775281, |
| "grad_norm": 0.9114198684692383, |
| "learning_rate": 4.9035418250305314e-06, |
| "loss": 0.6741, |
| "step": 1607 |
| }, |
| { |
| "epoch": 0.5646067415730337, |
| "grad_norm": 0.8928046226501465, |
| "learning_rate": 4.903414600047969e-06, |
| "loss": 0.6866, |
| "step": 1608 |
| }, |
| { |
| "epoch": 0.5649578651685393, |
| "grad_norm": 0.9179702997207642, |
| "learning_rate": 4.903287292870684e-06, |
| "loss": 0.696, |
| "step": 1609 |
| }, |
| { |
| "epoch": 0.5653089887640449, |
| "grad_norm": 0.9323627948760986, |
| "learning_rate": 4.903159903503031e-06, |
| "loss": 0.6939, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.5656601123595506, |
| "grad_norm": 0.8928480744361877, |
| "learning_rate": 4.903032431949365e-06, |
| "loss": 0.6819, |
| "step": 1611 |
| }, |
| { |
| "epoch": 0.5660112359550562, |
| "grad_norm": 0.8628221154212952, |
| "learning_rate": 4.9029048782140454e-06, |
| "loss": 0.687, |
| "step": 1612 |
| }, |
| { |
| "epoch": 0.5663623595505618, |
| "grad_norm": 0.8917481303215027, |
| "learning_rate": 4.9027772423014354e-06, |
| "loss": 0.7087, |
| "step": 1613 |
| }, |
| { |
| "epoch": 0.5667134831460674, |
| "grad_norm": 0.8859778642654419, |
| "learning_rate": 4.9026495242159e-06, |
| "loss": 0.6619, |
| "step": 1614 |
| }, |
| { |
| "epoch": 0.567064606741573, |
| "grad_norm": 0.9722530841827393, |
| "learning_rate": 4.902521723961807e-06, |
| "loss": 0.7133, |
| "step": 1615 |
| }, |
| { |
| "epoch": 0.5674157303370787, |
| "grad_norm": 0.8985425233840942, |
| "learning_rate": 4.9023938415435265e-06, |
| "loss": 0.6599, |
| "step": 1616 |
| }, |
| { |
| "epoch": 0.5677668539325843, |
| "grad_norm": 0.9105679392814636, |
| "learning_rate": 4.902265876965434e-06, |
| "loss": 0.6985, |
| "step": 1617 |
| }, |
| { |
| "epoch": 0.5681179775280899, |
| "grad_norm": 0.8673763871192932, |
| "learning_rate": 4.902137830231903e-06, |
| "loss": 0.692, |
| "step": 1618 |
| }, |
| { |
| "epoch": 0.5684691011235955, |
| "grad_norm": 0.8669525384902954, |
| "learning_rate": 4.902009701347313e-06, |
| "loss": 0.6515, |
| "step": 1619 |
| }, |
| { |
| "epoch": 0.5688202247191011, |
| "grad_norm": 0.9088237881660461, |
| "learning_rate": 4.9018814903160475e-06, |
| "loss": 0.6847, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.5691713483146067, |
| "grad_norm": 0.9107149243354797, |
| "learning_rate": 4.90175319714249e-06, |
| "loss": 0.6769, |
| "step": 1621 |
| }, |
| { |
| "epoch": 0.5695224719101124, |
| "grad_norm": 0.8820178508758545, |
| "learning_rate": 4.901624821831028e-06, |
| "loss": 0.7169, |
| "step": 1622 |
| }, |
| { |
| "epoch": 0.569873595505618, |
| "grad_norm": 0.8633939623832703, |
| "learning_rate": 4.901496364386053e-06, |
| "loss": 0.6395, |
| "step": 1623 |
| }, |
| { |
| "epoch": 0.5702247191011236, |
| "grad_norm": 0.9044683575630188, |
| "learning_rate": 4.901367824811958e-06, |
| "loss": 0.7011, |
| "step": 1624 |
| }, |
| { |
| "epoch": 0.5705758426966292, |
| "grad_norm": 0.9092311263084412, |
| "learning_rate": 4.901239203113137e-06, |
| "loss": 0.6643, |
| "step": 1625 |
| }, |
| { |
| "epoch": 0.5709269662921348, |
| "grad_norm": 0.9932267665863037, |
| "learning_rate": 4.90111049929399e-06, |
| "loss": 0.7203, |
| "step": 1626 |
| }, |
| { |
| "epoch": 0.5712780898876404, |
| "grad_norm": 0.8947046995162964, |
| "learning_rate": 4.9009817133589186e-06, |
| "loss": 0.683, |
| "step": 1627 |
| }, |
| { |
| "epoch": 0.5716292134831461, |
| "grad_norm": 0.9035671353340149, |
| "learning_rate": 4.900852845312328e-06, |
| "loss": 0.6761, |
| "step": 1628 |
| }, |
| { |
| "epoch": 0.5719803370786517, |
| "grad_norm": 0.905048668384552, |
| "learning_rate": 4.900723895158624e-06, |
| "loss": 0.7018, |
| "step": 1629 |
| }, |
| { |
| "epoch": 0.5723314606741573, |
| "grad_norm": 0.9121686220169067, |
| "learning_rate": 4.9005948629022175e-06, |
| "loss": 0.7506, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.5726825842696629, |
| "grad_norm": 0.9041679501533508, |
| "learning_rate": 4.90046574854752e-06, |
| "loss": 0.7058, |
| "step": 1631 |
| }, |
| { |
| "epoch": 0.5730337078651685, |
| "grad_norm": 0.8760393857955933, |
| "learning_rate": 4.900336552098949e-06, |
| "loss": 0.7067, |
| "step": 1632 |
| }, |
| { |
| "epoch": 0.5733848314606742, |
| "grad_norm": 0.9228975772857666, |
| "learning_rate": 4.900207273560921e-06, |
| "loss": 0.6959, |
| "step": 1633 |
| }, |
| { |
| "epoch": 0.5737359550561798, |
| "grad_norm": 0.8803319334983826, |
| "learning_rate": 4.900077912937858e-06, |
| "loss": 0.634, |
| "step": 1634 |
| }, |
| { |
| "epoch": 0.5740870786516854, |
| "grad_norm": 0.8787641525268555, |
| "learning_rate": 4.899948470234185e-06, |
| "loss": 0.7098, |
| "step": 1635 |
| }, |
| { |
| "epoch": 0.574438202247191, |
| "grad_norm": 0.8598030209541321, |
| "learning_rate": 4.899818945454328e-06, |
| "loss": 0.7062, |
| "step": 1636 |
| }, |
| { |
| "epoch": 0.5747893258426966, |
| "grad_norm": 0.920392632484436, |
| "learning_rate": 4.899689338602716e-06, |
| "loss": 0.6514, |
| "step": 1637 |
| }, |
| { |
| "epoch": 0.5751404494382022, |
| "grad_norm": 0.9178962111473083, |
| "learning_rate": 4.899559649683783e-06, |
| "loss": 0.6626, |
| "step": 1638 |
| }, |
| { |
| "epoch": 0.5754915730337079, |
| "grad_norm": 0.8671183586120605, |
| "learning_rate": 4.899429878701963e-06, |
| "loss": 0.704, |
| "step": 1639 |
| }, |
| { |
| "epoch": 0.5758426966292135, |
| "grad_norm": 0.8965294361114502, |
| "learning_rate": 4.899300025661694e-06, |
| "loss": 0.7058, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.5761938202247191, |
| "grad_norm": 0.8801050186157227, |
| "learning_rate": 4.8991700905674165e-06, |
| "loss": 0.695, |
| "step": 1641 |
| }, |
| { |
| "epoch": 0.5765449438202247, |
| "grad_norm": 0.9138525724411011, |
| "learning_rate": 4.899040073423577e-06, |
| "loss": 0.7061, |
| "step": 1642 |
| }, |
| { |
| "epoch": 0.5768960674157303, |
| "grad_norm": 0.8814200162887573, |
| "learning_rate": 4.898909974234618e-06, |
| "loss": 0.6659, |
| "step": 1643 |
| }, |
| { |
| "epoch": 0.577247191011236, |
| "grad_norm": 0.927841305732727, |
| "learning_rate": 4.898779793004991e-06, |
| "loss": 0.6864, |
| "step": 1644 |
| }, |
| { |
| "epoch": 0.5775983146067416, |
| "grad_norm": 0.9304884076118469, |
| "learning_rate": 4.898649529739148e-06, |
| "loss": 0.66, |
| "step": 1645 |
| }, |
| { |
| "epoch": 0.5779494382022472, |
| "grad_norm": 0.8991161584854126, |
| "learning_rate": 4.898519184441544e-06, |
| "loss": 0.6901, |
| "step": 1646 |
| }, |
| { |
| "epoch": 0.5783005617977528, |
| "grad_norm": 0.8874373435974121, |
| "learning_rate": 4.898388757116636e-06, |
| "loss": 0.6866, |
| "step": 1647 |
| }, |
| { |
| "epoch": 0.5786516853932584, |
| "grad_norm": 0.8752896785736084, |
| "learning_rate": 4.898258247768885e-06, |
| "loss": 0.6966, |
| "step": 1648 |
| }, |
| { |
| "epoch": 0.579002808988764, |
| "grad_norm": 0.9005473852157593, |
| "learning_rate": 4.898127656402753e-06, |
| "loss": 0.7192, |
| "step": 1649 |
| }, |
| { |
| "epoch": 0.5793539325842697, |
| "grad_norm": 0.8968993425369263, |
| "learning_rate": 4.897996983022709e-06, |
| "loss": 0.7156, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.5797050561797753, |
| "grad_norm": 0.881846010684967, |
| "learning_rate": 4.897866227633219e-06, |
| "loss": 0.6848, |
| "step": 1651 |
| }, |
| { |
| "epoch": 0.5800561797752809, |
| "grad_norm": 0.9290207028388977, |
| "learning_rate": 4.897735390238756e-06, |
| "loss": 0.7263, |
| "step": 1652 |
| }, |
| { |
| "epoch": 0.5804073033707865, |
| "grad_norm": 0.8702242374420166, |
| "learning_rate": 4.897604470843795e-06, |
| "loss": 0.6796, |
| "step": 1653 |
| }, |
| { |
| "epoch": 0.5807584269662921, |
| "grad_norm": 0.9286425709724426, |
| "learning_rate": 4.897473469452812e-06, |
| "loss": 0.7203, |
| "step": 1654 |
| }, |
| { |
| "epoch": 0.5811095505617978, |
| "grad_norm": 0.9703604578971863, |
| "learning_rate": 4.897342386070289e-06, |
| "loss": 0.6574, |
| "step": 1655 |
| }, |
| { |
| "epoch": 0.5814606741573034, |
| "grad_norm": 0.9450727701187134, |
| "learning_rate": 4.897211220700707e-06, |
| "loss": 0.7004, |
| "step": 1656 |
| }, |
| { |
| "epoch": 0.581811797752809, |
| "grad_norm": 0.8609772324562073, |
| "learning_rate": 4.897079973348553e-06, |
| "loss": 0.658, |
| "step": 1657 |
| }, |
| { |
| "epoch": 0.5821629213483146, |
| "grad_norm": 0.8902559876441956, |
| "learning_rate": 4.896948644018315e-06, |
| "loss": 0.6995, |
| "step": 1658 |
| }, |
| { |
| "epoch": 0.5825140449438202, |
| "grad_norm": 0.8805756568908691, |
| "learning_rate": 4.896817232714484e-06, |
| "loss": 0.6919, |
| "step": 1659 |
| }, |
| { |
| "epoch": 0.5828651685393258, |
| "grad_norm": 0.8647357225418091, |
| "learning_rate": 4.896685739441555e-06, |
| "loss": 0.7053, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.5832162921348315, |
| "grad_norm": 0.8912749290466309, |
| "learning_rate": 4.896554164204025e-06, |
| "loss": 0.6848, |
| "step": 1661 |
| }, |
| { |
| "epoch": 0.5835674157303371, |
| "grad_norm": 0.8912277221679688, |
| "learning_rate": 4.896422507006392e-06, |
| "loss": 0.687, |
| "step": 1662 |
| }, |
| { |
| "epoch": 0.5839185393258427, |
| "grad_norm": 0.8887133598327637, |
| "learning_rate": 4.896290767853161e-06, |
| "loss": 0.6703, |
| "step": 1663 |
| }, |
| { |
| "epoch": 0.5842696629213483, |
| "grad_norm": 0.9464722871780396, |
| "learning_rate": 4.896158946748837e-06, |
| "loss": 0.6834, |
| "step": 1664 |
| }, |
| { |
| "epoch": 0.5846207865168539, |
| "grad_norm": 0.8864126801490784, |
| "learning_rate": 4.8960270436979265e-06, |
| "loss": 0.6983, |
| "step": 1665 |
| }, |
| { |
| "epoch": 0.5849719101123596, |
| "grad_norm": 0.9276545643806458, |
| "learning_rate": 4.895895058704941e-06, |
| "loss": 0.6989, |
| "step": 1666 |
| }, |
| { |
| "epoch": 0.5853230337078652, |
| "grad_norm": 0.9041825532913208, |
| "learning_rate": 4.8957629917743955e-06, |
| "loss": 0.7188, |
| "step": 1667 |
| }, |
| { |
| "epoch": 0.5856741573033708, |
| "grad_norm": 0.9835352301597595, |
| "learning_rate": 4.895630842910805e-06, |
| "loss": 0.6981, |
| "step": 1668 |
| }, |
| { |
| "epoch": 0.5860252808988764, |
| "grad_norm": 0.8841162323951721, |
| "learning_rate": 4.89549861211869e-06, |
| "loss": 0.6686, |
| "step": 1669 |
| }, |
| { |
| "epoch": 0.586376404494382, |
| "grad_norm": 0.9026679992675781, |
| "learning_rate": 4.895366299402572e-06, |
| "loss": 0.6782, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.5867275280898876, |
| "grad_norm": 0.8805919885635376, |
| "learning_rate": 4.895233904766978e-06, |
| "loss": 0.7267, |
| "step": 1671 |
| }, |
| { |
| "epoch": 0.5870786516853933, |
| "grad_norm": 0.8851740956306458, |
| "learning_rate": 4.895101428216432e-06, |
| "loss": 0.6841, |
| "step": 1672 |
| }, |
| { |
| "epoch": 0.5874297752808989, |
| "grad_norm": 0.911161482334137, |
| "learning_rate": 4.894968869755467e-06, |
| "loss": 0.6938, |
| "step": 1673 |
| }, |
| { |
| "epoch": 0.5877808988764045, |
| "grad_norm": 0.900994598865509, |
| "learning_rate": 4.894836229388617e-06, |
| "loss": 0.6809, |
| "step": 1674 |
| }, |
| { |
| "epoch": 0.5881320224719101, |
| "grad_norm": 0.9121220111846924, |
| "learning_rate": 4.894703507120417e-06, |
| "loss": 0.6941, |
| "step": 1675 |
| }, |
| { |
| "epoch": 0.5884831460674157, |
| "grad_norm": 0.8991403579711914, |
| "learning_rate": 4.894570702955406e-06, |
| "loss": 0.6925, |
| "step": 1676 |
| }, |
| { |
| "epoch": 0.5888342696629213, |
| "grad_norm": 0.8850685358047485, |
| "learning_rate": 4.894437816898125e-06, |
| "loss": 0.6904, |
| "step": 1677 |
| }, |
| { |
| "epoch": 0.589185393258427, |
| "grad_norm": 0.8999640345573425, |
| "learning_rate": 4.894304848953122e-06, |
| "loss": 0.6959, |
| "step": 1678 |
| }, |
| { |
| "epoch": 0.5895365168539326, |
| "grad_norm": 0.9228804707527161, |
| "learning_rate": 4.89417179912494e-06, |
| "loss": 0.6663, |
| "step": 1679 |
| }, |
| { |
| "epoch": 0.5898876404494382, |
| "grad_norm": 0.8978350162506104, |
| "learning_rate": 4.894038667418132e-06, |
| "loss": 0.6948, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.5902387640449438, |
| "grad_norm": 0.9258869290351868, |
| "learning_rate": 4.89390545383725e-06, |
| "loss": 0.7268, |
| "step": 1681 |
| }, |
| { |
| "epoch": 0.5905898876404494, |
| "grad_norm": 0.9010106325149536, |
| "learning_rate": 4.893772158386851e-06, |
| "loss": 0.6819, |
| "step": 1682 |
| }, |
| { |
| "epoch": 0.5909410112359551, |
| "grad_norm": 0.9146981835365295, |
| "learning_rate": 4.8936387810714904e-06, |
| "loss": 0.6942, |
| "step": 1683 |
| }, |
| { |
| "epoch": 0.5912921348314607, |
| "grad_norm": 0.959784746170044, |
| "learning_rate": 4.893505321895733e-06, |
| "loss": 0.7312, |
| "step": 1684 |
| }, |
| { |
| "epoch": 0.5916432584269663, |
| "grad_norm": 0.9104639887809753, |
| "learning_rate": 4.8933717808641415e-06, |
| "loss": 0.6842, |
| "step": 1685 |
| }, |
| { |
| "epoch": 0.5919943820224719, |
| "grad_norm": 0.8836262822151184, |
| "learning_rate": 4.893238157981283e-06, |
| "loss": 0.6613, |
| "step": 1686 |
| }, |
| { |
| "epoch": 0.5923455056179775, |
| "grad_norm": 0.903002142906189, |
| "learning_rate": 4.893104453251728e-06, |
| "loss": 0.6558, |
| "step": 1687 |
| }, |
| { |
| "epoch": 0.5926966292134831, |
| "grad_norm": 0.9008224010467529, |
| "learning_rate": 4.892970666680048e-06, |
| "loss": 0.7283, |
| "step": 1688 |
| }, |
| { |
| "epoch": 0.5930477528089888, |
| "grad_norm": 0.9016857743263245, |
| "learning_rate": 4.892836798270819e-06, |
| "loss": 0.704, |
| "step": 1689 |
| }, |
| { |
| "epoch": 0.5933988764044944, |
| "grad_norm": 0.9839923977851868, |
| "learning_rate": 4.8927028480286195e-06, |
| "loss": 0.6804, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.59375, |
| "grad_norm": 0.9112361669540405, |
| "learning_rate": 4.8925688159580294e-06, |
| "loss": 0.6885, |
| "step": 1691 |
| }, |
| { |
| "epoch": 0.5941011235955056, |
| "grad_norm": 0.9427493214607239, |
| "learning_rate": 4.892434702063634e-06, |
| "loss": 0.6816, |
| "step": 1692 |
| }, |
| { |
| "epoch": 0.5944522471910112, |
| "grad_norm": 0.9189691543579102, |
| "learning_rate": 4.892300506350018e-06, |
| "loss": 0.7258, |
| "step": 1693 |
| }, |
| { |
| "epoch": 0.5948033707865169, |
| "grad_norm": 0.9497784972190857, |
| "learning_rate": 4.892166228821772e-06, |
| "loss": 0.6805, |
| "step": 1694 |
| }, |
| { |
| "epoch": 0.5951544943820225, |
| "grad_norm": 0.867682933807373, |
| "learning_rate": 4.892031869483489e-06, |
| "loss": 0.6952, |
| "step": 1695 |
| }, |
| { |
| "epoch": 0.5955056179775281, |
| "grad_norm": 0.9247297644615173, |
| "learning_rate": 4.891897428339763e-06, |
| "loss": 0.6892, |
| "step": 1696 |
| }, |
| { |
| "epoch": 0.5958567415730337, |
| "grad_norm": 0.9473358392715454, |
| "learning_rate": 4.89176290539519e-06, |
| "loss": 0.704, |
| "step": 1697 |
| }, |
| { |
| "epoch": 0.5962078651685393, |
| "grad_norm": 0.9231151938438416, |
| "learning_rate": 4.891628300654373e-06, |
| "loss": 0.725, |
| "step": 1698 |
| }, |
| { |
| "epoch": 0.5965589887640449, |
| "grad_norm": 0.9092977046966553, |
| "learning_rate": 4.891493614121915e-06, |
| "loss": 0.7209, |
| "step": 1699 |
| }, |
| { |
| "epoch": 0.5969101123595506, |
| "grad_norm": 0.9357640147209167, |
| "learning_rate": 4.891358845802422e-06, |
| "loss": 0.6577, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.5972612359550562, |
| "grad_norm": 0.9201985001564026, |
| "learning_rate": 4.891223995700503e-06, |
| "loss": 0.7037, |
| "step": 1701 |
| }, |
| { |
| "epoch": 0.5976123595505618, |
| "grad_norm": 0.8873964548110962, |
| "learning_rate": 4.8910890638207685e-06, |
| "loss": 0.6655, |
| "step": 1702 |
| }, |
| { |
| "epoch": 0.5979634831460674, |
| "grad_norm": 0.8884997367858887, |
| "learning_rate": 4.890954050167835e-06, |
| "loss": 0.6927, |
| "step": 1703 |
| }, |
| { |
| "epoch": 0.598314606741573, |
| "grad_norm": 0.9006896018981934, |
| "learning_rate": 4.890818954746318e-06, |
| "loss": 0.6879, |
| "step": 1704 |
| }, |
| { |
| "epoch": 0.5986657303370787, |
| "grad_norm": 0.8921645283699036, |
| "learning_rate": 4.8906837775608395e-06, |
| "loss": 0.7091, |
| "step": 1705 |
| }, |
| { |
| "epoch": 0.5990168539325843, |
| "grad_norm": 0.8784016370773315, |
| "learning_rate": 4.890548518616022e-06, |
| "loss": 0.6611, |
| "step": 1706 |
| }, |
| { |
| "epoch": 0.5993679775280899, |
| "grad_norm": 0.8995918035507202, |
| "learning_rate": 4.89041317791649e-06, |
| "loss": 0.699, |
| "step": 1707 |
| }, |
| { |
| "epoch": 0.5997191011235955, |
| "grad_norm": 0.9034433364868164, |
| "learning_rate": 4.890277755466872e-06, |
| "loss": 0.6861, |
| "step": 1708 |
| }, |
| { |
| "epoch": 0.6000702247191011, |
| "grad_norm": 0.896384596824646, |
| "learning_rate": 4.890142251271801e-06, |
| "loss": 0.6658, |
| "step": 1709 |
| }, |
| { |
| "epoch": 0.6004213483146067, |
| "grad_norm": 0.8753630518913269, |
| "learning_rate": 4.890006665335911e-06, |
| "loss": 0.6886, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.6007724719101124, |
| "grad_norm": 0.9117717146873474, |
| "learning_rate": 4.889870997663837e-06, |
| "loss": 0.6885, |
| "step": 1711 |
| }, |
| { |
| "epoch": 0.601123595505618, |
| "grad_norm": 0.9002839922904968, |
| "learning_rate": 4.889735248260221e-06, |
| "loss": 0.6707, |
| "step": 1712 |
| }, |
| { |
| "epoch": 0.6014747191011236, |
| "grad_norm": 0.849831223487854, |
| "learning_rate": 4.889599417129703e-06, |
| "loss": 0.6458, |
| "step": 1713 |
| }, |
| { |
| "epoch": 0.6018258426966292, |
| "grad_norm": 0.9035419225692749, |
| "learning_rate": 4.889463504276931e-06, |
| "loss": 0.6465, |
| "step": 1714 |
| }, |
| { |
| "epoch": 0.6021769662921348, |
| "grad_norm": 0.8928185701370239, |
| "learning_rate": 4.889327509706552e-06, |
| "loss": 0.6596, |
| "step": 1715 |
| }, |
| { |
| "epoch": 0.6025280898876404, |
| "grad_norm": 0.9307925701141357, |
| "learning_rate": 4.889191433423216e-06, |
| "loss": 0.6855, |
| "step": 1716 |
| }, |
| { |
| "epoch": 0.6028792134831461, |
| "grad_norm": 0.8939083814620972, |
| "learning_rate": 4.889055275431578e-06, |
| "loss": 0.7251, |
| "step": 1717 |
| }, |
| { |
| "epoch": 0.6032303370786517, |
| "grad_norm": 0.8944793343544006, |
| "learning_rate": 4.888919035736293e-06, |
| "loss": 0.6952, |
| "step": 1718 |
| }, |
| { |
| "epoch": 0.6035814606741573, |
| "grad_norm": 0.92487633228302, |
| "learning_rate": 4.888782714342022e-06, |
| "loss": 0.685, |
| "step": 1719 |
| }, |
| { |
| "epoch": 0.6039325842696629, |
| "grad_norm": 0.9212489724159241, |
| "learning_rate": 4.888646311253426e-06, |
| "loss": 0.6487, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.6042837078651685, |
| "grad_norm": 0.9022998213768005, |
| "learning_rate": 4.888509826475171e-06, |
| "loss": 0.6699, |
| "step": 1721 |
| }, |
| { |
| "epoch": 0.6046348314606742, |
| "grad_norm": 0.868482232093811, |
| "learning_rate": 4.8883732600119225e-06, |
| "loss": 0.6771, |
| "step": 1722 |
| }, |
| { |
| "epoch": 0.6049859550561798, |
| "grad_norm": 0.8934330344200134, |
| "learning_rate": 4.888236611868353e-06, |
| "loss": 0.7027, |
| "step": 1723 |
| }, |
| { |
| "epoch": 0.6053370786516854, |
| "grad_norm": 0.9520792961120605, |
| "learning_rate": 4.888099882049135e-06, |
| "loss": 0.701, |
| "step": 1724 |
| }, |
| { |
| "epoch": 0.605688202247191, |
| "grad_norm": 0.921665370464325, |
| "learning_rate": 4.887963070558944e-06, |
| "loss": 0.7058, |
| "step": 1725 |
| }, |
| { |
| "epoch": 0.6060393258426966, |
| "grad_norm": 0.8855586647987366, |
| "learning_rate": 4.88782617740246e-06, |
| "loss": 0.6789, |
| "step": 1726 |
| }, |
| { |
| "epoch": 0.6063904494382022, |
| "grad_norm": 0.9030165672302246, |
| "learning_rate": 4.887689202584364e-06, |
| "loss": 0.6883, |
| "step": 1727 |
| }, |
| { |
| "epoch": 0.6067415730337079, |
| "grad_norm": 0.8876542448997498, |
| "learning_rate": 4.88755214610934e-06, |
| "loss": 0.713, |
| "step": 1728 |
| }, |
| { |
| "epoch": 0.6070926966292135, |
| "grad_norm": 0.9762006998062134, |
| "learning_rate": 4.887415007982076e-06, |
| "loss": 0.7003, |
| "step": 1729 |
| }, |
| { |
| "epoch": 0.6074438202247191, |
| "grad_norm": 0.9035171270370483, |
| "learning_rate": 4.887277788207261e-06, |
| "loss": 0.7089, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.6077949438202247, |
| "grad_norm": 0.8891435265541077, |
| "learning_rate": 4.887140486789589e-06, |
| "loss": 0.6846, |
| "step": 1731 |
| }, |
| { |
| "epoch": 0.6081460674157303, |
| "grad_norm": 0.899974524974823, |
| "learning_rate": 4.887003103733755e-06, |
| "loss": 0.6662, |
| "step": 1732 |
| }, |
| { |
| "epoch": 0.608497191011236, |
| "grad_norm": 0.958362340927124, |
| "learning_rate": 4.886865639044458e-06, |
| "loss": 0.6732, |
| "step": 1733 |
| }, |
| { |
| "epoch": 0.6088483146067416, |
| "grad_norm": 0.9114941954612732, |
| "learning_rate": 4.8867280927263985e-06, |
| "loss": 0.6872, |
| "step": 1734 |
| }, |
| { |
| "epoch": 0.6091994382022472, |
| "grad_norm": 0.9042050242424011, |
| "learning_rate": 4.88659046478428e-06, |
| "loss": 0.6654, |
| "step": 1735 |
| }, |
| { |
| "epoch": 0.6095505617977528, |
| "grad_norm": 0.8774390816688538, |
| "learning_rate": 4.88645275522281e-06, |
| "loss": 0.666, |
| "step": 1736 |
| }, |
| { |
| "epoch": 0.6099016853932584, |
| "grad_norm": 0.8930482268333435, |
| "learning_rate": 4.8863149640466975e-06, |
| "loss": 0.6796, |
| "step": 1737 |
| }, |
| { |
| "epoch": 0.610252808988764, |
| "grad_norm": 0.9068736433982849, |
| "learning_rate": 4.886177091260656e-06, |
| "loss": 0.6599, |
| "step": 1738 |
| }, |
| { |
| "epoch": 0.6106039325842697, |
| "grad_norm": 0.8848494291305542, |
| "learning_rate": 4.8860391368694e-06, |
| "loss": 0.7131, |
| "step": 1739 |
| }, |
| { |
| "epoch": 0.6109550561797753, |
| "grad_norm": 0.9204060435295105, |
| "learning_rate": 4.885901100877647e-06, |
| "loss": 0.7236, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.6113061797752809, |
| "grad_norm": 0.912101149559021, |
| "learning_rate": 4.885762983290117e-06, |
| "loss": 0.7401, |
| "step": 1741 |
| }, |
| { |
| "epoch": 0.6116573033707865, |
| "grad_norm": 0.8925957083702087, |
| "learning_rate": 4.885624784111536e-06, |
| "loss": 0.6837, |
| "step": 1742 |
| }, |
| { |
| "epoch": 0.6120084269662921, |
| "grad_norm": 0.9438378214836121, |
| "learning_rate": 4.885486503346628e-06, |
| "loss": 0.6398, |
| "step": 1743 |
| }, |
| { |
| "epoch": 0.6123595505617978, |
| "grad_norm": 0.885036826133728, |
| "learning_rate": 4.8853481410001225e-06, |
| "loss": 0.6341, |
| "step": 1744 |
| }, |
| { |
| "epoch": 0.6127106741573034, |
| "grad_norm": 0.8819115161895752, |
| "learning_rate": 4.885209697076752e-06, |
| "loss": 0.673, |
| "step": 1745 |
| }, |
| { |
| "epoch": 0.613061797752809, |
| "grad_norm": 0.9269865155220032, |
| "learning_rate": 4.8850711715812515e-06, |
| "loss": 0.7188, |
| "step": 1746 |
| }, |
| { |
| "epoch": 0.6134129213483146, |
| "grad_norm": 0.9204302430152893, |
| "learning_rate": 4.8849325645183575e-06, |
| "loss": 0.7151, |
| "step": 1747 |
| }, |
| { |
| "epoch": 0.6137640449438202, |
| "grad_norm": 0.9573183655738831, |
| "learning_rate": 4.88479387589281e-06, |
| "loss": 0.7048, |
| "step": 1748 |
| }, |
| { |
| "epoch": 0.6141151685393258, |
| "grad_norm": 1.0055536031723022, |
| "learning_rate": 4.884655105709354e-06, |
| "loss": 0.6865, |
| "step": 1749 |
| }, |
| { |
| "epoch": 0.6144662921348315, |
| "grad_norm": 0.9006555080413818, |
| "learning_rate": 4.8845162539727335e-06, |
| "loss": 0.6931, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.6148174157303371, |
| "grad_norm": 0.8895266056060791, |
| "learning_rate": 4.884377320687697e-06, |
| "loss": 0.6788, |
| "step": 1751 |
| }, |
| { |
| "epoch": 0.6151685393258427, |
| "grad_norm": 0.8907827138900757, |
| "learning_rate": 4.884238305858997e-06, |
| "loss": 0.6559, |
| "step": 1752 |
| }, |
| { |
| "epoch": 0.6155196629213483, |
| "grad_norm": 0.8671404719352722, |
| "learning_rate": 4.884099209491386e-06, |
| "loss": 0.692, |
| "step": 1753 |
| }, |
| { |
| "epoch": 0.6158707865168539, |
| "grad_norm": 0.8965876698493958, |
| "learning_rate": 4.883960031589624e-06, |
| "loss": 0.6662, |
| "step": 1754 |
| }, |
| { |
| "epoch": 0.6162219101123596, |
| "grad_norm": 0.8783154487609863, |
| "learning_rate": 4.883820772158467e-06, |
| "loss": 0.6802, |
| "step": 1755 |
| }, |
| { |
| "epoch": 0.6165730337078652, |
| "grad_norm": 0.9085580706596375, |
| "learning_rate": 4.883681431202682e-06, |
| "loss": 0.7184, |
| "step": 1756 |
| }, |
| { |
| "epoch": 0.6169241573033708, |
| "grad_norm": 0.9406449794769287, |
| "learning_rate": 4.883542008727029e-06, |
| "loss": 0.6998, |
| "step": 1757 |
| }, |
| { |
| "epoch": 0.6172752808988764, |
| "grad_norm": 0.9234215021133423, |
| "learning_rate": 4.88340250473628e-06, |
| "loss": 0.7069, |
| "step": 1758 |
| }, |
| { |
| "epoch": 0.617626404494382, |
| "grad_norm": 0.8770806789398193, |
| "learning_rate": 4.883262919235205e-06, |
| "loss": 0.738, |
| "step": 1759 |
| }, |
| { |
| "epoch": 0.6179775280898876, |
| "grad_norm": 0.9186333417892456, |
| "learning_rate": 4.8831232522285786e-06, |
| "loss": 0.7019, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.6183286516853933, |
| "grad_norm": 0.9067515730857849, |
| "learning_rate": 4.8829835037211745e-06, |
| "loss": 0.6724, |
| "step": 1761 |
| }, |
| { |
| "epoch": 0.6186797752808989, |
| "grad_norm": 0.962090790271759, |
| "learning_rate": 4.8828436737177744e-06, |
| "loss": 0.6521, |
| "step": 1762 |
| }, |
| { |
| "epoch": 0.6190308988764045, |
| "grad_norm": 0.8877134323120117, |
| "learning_rate": 4.88270376222316e-06, |
| "loss": 0.7029, |
| "step": 1763 |
| }, |
| { |
| "epoch": 0.6193820224719101, |
| "grad_norm": 0.9146241545677185, |
| "learning_rate": 4.882563769242116e-06, |
| "loss": 0.6999, |
| "step": 1764 |
| }, |
| { |
| "epoch": 0.6197331460674157, |
| "grad_norm": 0.8948286175727844, |
| "learning_rate": 4.882423694779429e-06, |
| "loss": 0.6677, |
| "step": 1765 |
| }, |
| { |
| "epoch": 0.6200842696629213, |
| "grad_norm": 0.958892285823822, |
| "learning_rate": 4.88228353883989e-06, |
| "loss": 0.7261, |
| "step": 1766 |
| }, |
| { |
| "epoch": 0.620435393258427, |
| "grad_norm": 0.906204104423523, |
| "learning_rate": 4.882143301428293e-06, |
| "loss": 0.671, |
| "step": 1767 |
| }, |
| { |
| "epoch": 0.6207865168539326, |
| "grad_norm": 0.9508899450302124, |
| "learning_rate": 4.882002982549435e-06, |
| "loss": 0.6718, |
| "step": 1768 |
| }, |
| { |
| "epoch": 0.6211376404494382, |
| "grad_norm": 0.8806086778640747, |
| "learning_rate": 4.8818625822081116e-06, |
| "loss": 0.6835, |
| "step": 1769 |
| }, |
| { |
| "epoch": 0.6214887640449438, |
| "grad_norm": 0.9053204655647278, |
| "learning_rate": 4.8817221004091266e-06, |
| "loss": 0.7018, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.6218398876404494, |
| "grad_norm": 0.8768627047538757, |
| "learning_rate": 4.881581537157284e-06, |
| "loss": 0.6819, |
| "step": 1771 |
| }, |
| { |
| "epoch": 0.6221910112359551, |
| "grad_norm": 0.8701360821723938, |
| "learning_rate": 4.881440892457391e-06, |
| "loss": 0.6766, |
| "step": 1772 |
| }, |
| { |
| "epoch": 0.6225421348314607, |
| "grad_norm": 0.9027068614959717, |
| "learning_rate": 4.881300166314257e-06, |
| "loss": 0.6819, |
| "step": 1773 |
| }, |
| { |
| "epoch": 0.6228932584269663, |
| "grad_norm": 0.9427441954612732, |
| "learning_rate": 4.881159358732695e-06, |
| "loss": 0.6904, |
| "step": 1774 |
| }, |
| { |
| "epoch": 0.6232443820224719, |
| "grad_norm": 0.9292178750038147, |
| "learning_rate": 4.88101846971752e-06, |
| "loss": 0.7045, |
| "step": 1775 |
| }, |
| { |
| "epoch": 0.6235955056179775, |
| "grad_norm": 0.864090085029602, |
| "learning_rate": 4.880877499273551e-06, |
| "loss": 0.6496, |
| "step": 1776 |
| }, |
| { |
| "epoch": 0.6239466292134831, |
| "grad_norm": 0.9168904423713684, |
| "learning_rate": 4.880736447405609e-06, |
| "loss": 0.6805, |
| "step": 1777 |
| }, |
| { |
| "epoch": 0.6242977528089888, |
| "grad_norm": 0.9072659015655518, |
| "learning_rate": 4.880595314118518e-06, |
| "loss": 0.6699, |
| "step": 1778 |
| }, |
| { |
| "epoch": 0.6246488764044944, |
| "grad_norm": 0.9015219807624817, |
| "learning_rate": 4.880454099417104e-06, |
| "loss": 0.6781, |
| "step": 1779 |
| }, |
| { |
| "epoch": 0.625, |
| "grad_norm": 0.9149530529975891, |
| "learning_rate": 4.880312803306197e-06, |
| "loss": 0.7069, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.6253511235955056, |
| "grad_norm": 0.9412938952445984, |
| "learning_rate": 4.880171425790629e-06, |
| "loss": 0.6851, |
| "step": 1781 |
| }, |
| { |
| "epoch": 0.6257022471910112, |
| "grad_norm": 0.9359396696090698, |
| "learning_rate": 4.880029966875235e-06, |
| "loss": 0.6738, |
| "step": 1782 |
| }, |
| { |
| "epoch": 0.6260533707865169, |
| "grad_norm": 0.9232779741287231, |
| "learning_rate": 4.879888426564853e-06, |
| "loss": 0.6688, |
| "step": 1783 |
| }, |
| { |
| "epoch": 0.6264044943820225, |
| "grad_norm": 0.9085204005241394, |
| "learning_rate": 4.879746804864323e-06, |
| "loss": 0.6738, |
| "step": 1784 |
| }, |
| { |
| "epoch": 0.6267556179775281, |
| "grad_norm": 0.916195809841156, |
| "learning_rate": 4.879605101778488e-06, |
| "loss": 0.6691, |
| "step": 1785 |
| }, |
| { |
| "epoch": 0.6271067415730337, |
| "grad_norm": 0.891173243522644, |
| "learning_rate": 4.8794633173121954e-06, |
| "loss": 0.6941, |
| "step": 1786 |
| }, |
| { |
| "epoch": 0.6274578651685393, |
| "grad_norm": 0.8935718536376953, |
| "learning_rate": 4.879321451470294e-06, |
| "loss": 0.65, |
| "step": 1787 |
| }, |
| { |
| "epoch": 0.6278089887640449, |
| "grad_norm": 0.9326649308204651, |
| "learning_rate": 4.879179504257634e-06, |
| "loss": 0.7116, |
| "step": 1788 |
| }, |
| { |
| "epoch": 0.6281601123595506, |
| "grad_norm": 0.9544984102249146, |
| "learning_rate": 4.879037475679071e-06, |
| "loss": 0.6531, |
| "step": 1789 |
| }, |
| { |
| "epoch": 0.6285112359550562, |
| "grad_norm": 0.8751811981201172, |
| "learning_rate": 4.878895365739463e-06, |
| "loss": 0.7009, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.6288623595505618, |
| "grad_norm": 0.860333263874054, |
| "learning_rate": 4.878753174443669e-06, |
| "loss": 0.7003, |
| "step": 1791 |
| }, |
| { |
| "epoch": 0.6292134831460674, |
| "grad_norm": 0.9002732038497925, |
| "learning_rate": 4.878610901796551e-06, |
| "loss": 0.6768, |
| "step": 1792 |
| }, |
| { |
| "epoch": 0.629564606741573, |
| "grad_norm": 0.8901023864746094, |
| "learning_rate": 4.8784685478029765e-06, |
| "loss": 0.6572, |
| "step": 1793 |
| }, |
| { |
| "epoch": 0.6299157303370787, |
| "grad_norm": 0.8898550271987915, |
| "learning_rate": 4.878326112467813e-06, |
| "loss": 0.7141, |
| "step": 1794 |
| }, |
| { |
| "epoch": 0.6302668539325843, |
| "grad_norm": 0.9085826277732849, |
| "learning_rate": 4.878183595795931e-06, |
| "loss": 0.7033, |
| "step": 1795 |
| }, |
| { |
| "epoch": 0.6306179775280899, |
| "grad_norm": 0.9423730969429016, |
| "learning_rate": 4.878040997792205e-06, |
| "loss": 0.6763, |
| "step": 1796 |
| }, |
| { |
| "epoch": 0.6309691011235955, |
| "grad_norm": 0.9046456217765808, |
| "learning_rate": 4.877898318461512e-06, |
| "loss": 0.6438, |
| "step": 1797 |
| }, |
| { |
| "epoch": 0.6313202247191011, |
| "grad_norm": 0.9388726353645325, |
| "learning_rate": 4.877755557808731e-06, |
| "loss": 0.6921, |
| "step": 1798 |
| }, |
| { |
| "epoch": 0.6316713483146067, |
| "grad_norm": 0.8939900398254395, |
| "learning_rate": 4.877612715838746e-06, |
| "loss": 0.6729, |
| "step": 1799 |
| }, |
| { |
| "epoch": 0.6320224719101124, |
| "grad_norm": 0.919826328754425, |
| "learning_rate": 4.877469792556439e-06, |
| "loss": 0.7064, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.632373595505618, |
| "grad_norm": 0.8846567273139954, |
| "learning_rate": 4.8773267879667e-06, |
| "loss": 0.6463, |
| "step": 1801 |
| }, |
| { |
| "epoch": 0.6327247191011236, |
| "grad_norm": 0.9120215177536011, |
| "learning_rate": 4.8771837020744195e-06, |
| "loss": 0.7055, |
| "step": 1802 |
| }, |
| { |
| "epoch": 0.6330758426966292, |
| "grad_norm": 0.9156368970870972, |
| "learning_rate": 4.87704053488449e-06, |
| "loss": 0.7379, |
| "step": 1803 |
| }, |
| { |
| "epoch": 0.6334269662921348, |
| "grad_norm": 0.9091489315032959, |
| "learning_rate": 4.876897286401808e-06, |
| "loss": 0.6618, |
| "step": 1804 |
| }, |
| { |
| "epoch": 0.6337780898876404, |
| "grad_norm": 0.8682206273078918, |
| "learning_rate": 4.876753956631274e-06, |
| "loss": 0.6773, |
| "step": 1805 |
| }, |
| { |
| "epoch": 0.6341292134831461, |
| "grad_norm": 0.9142380952835083, |
| "learning_rate": 4.876610545577788e-06, |
| "loss": 0.7102, |
| "step": 1806 |
| }, |
| { |
| "epoch": 0.6344803370786517, |
| "grad_norm": 0.938529908657074, |
| "learning_rate": 4.876467053246255e-06, |
| "loss": 0.6967, |
| "step": 1807 |
| }, |
| { |
| "epoch": 0.6348314606741573, |
| "grad_norm": 0.9012477397918701, |
| "learning_rate": 4.876323479641582e-06, |
| "loss": 0.6548, |
| "step": 1808 |
| }, |
| { |
| "epoch": 0.6351825842696629, |
| "grad_norm": 0.8724120855331421, |
| "learning_rate": 4.8761798247686795e-06, |
| "loss": 0.6205, |
| "step": 1809 |
| }, |
| { |
| "epoch": 0.6355337078651685, |
| "grad_norm": 0.9118915796279907, |
| "learning_rate": 4.876036088632461e-06, |
| "loss": 0.6931, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.6358848314606742, |
| "grad_norm": 0.8679929375648499, |
| "learning_rate": 4.875892271237841e-06, |
| "loss": 0.6808, |
| "step": 1811 |
| }, |
| { |
| "epoch": 0.6362359550561798, |
| "grad_norm": 0.8951993584632874, |
| "learning_rate": 4.875748372589738e-06, |
| "loss": 0.6665, |
| "step": 1812 |
| }, |
| { |
| "epoch": 0.6365870786516854, |
| "grad_norm": 0.9559075832366943, |
| "learning_rate": 4.875604392693074e-06, |
| "loss": 0.6634, |
| "step": 1813 |
| }, |
| { |
| "epoch": 0.636938202247191, |
| "grad_norm": 0.8831387758255005, |
| "learning_rate": 4.875460331552772e-06, |
| "loss": 0.7054, |
| "step": 1814 |
| }, |
| { |
| "epoch": 0.6372893258426966, |
| "grad_norm": 0.8540201783180237, |
| "learning_rate": 4.875316189173761e-06, |
| "loss": 0.6676, |
| "step": 1815 |
| }, |
| { |
| "epoch": 0.6376404494382022, |
| "grad_norm": 0.8935166001319885, |
| "learning_rate": 4.875171965560967e-06, |
| "loss": 0.6789, |
| "step": 1816 |
| }, |
| { |
| "epoch": 0.6379915730337079, |
| "grad_norm": 0.9121273159980774, |
| "learning_rate": 4.875027660719326e-06, |
| "loss": 0.6645, |
| "step": 1817 |
| }, |
| { |
| "epoch": 0.6383426966292135, |
| "grad_norm": 0.9133923053741455, |
| "learning_rate": 4.87488327465377e-06, |
| "loss": 0.7053, |
| "step": 1818 |
| }, |
| { |
| "epoch": 0.6386938202247191, |
| "grad_norm": 0.9285811185836792, |
| "learning_rate": 4.874738807369239e-06, |
| "loss": 0.7172, |
| "step": 1819 |
| }, |
| { |
| "epoch": 0.6390449438202247, |
| "grad_norm": 0.9026995301246643, |
| "learning_rate": 4.874594258870673e-06, |
| "loss": 0.6848, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.6393960674157303, |
| "grad_norm": 0.9294858574867249, |
| "learning_rate": 4.874449629163015e-06, |
| "loss": 0.7073, |
| "step": 1821 |
| }, |
| { |
| "epoch": 0.639747191011236, |
| "grad_norm": 0.9029496908187866, |
| "learning_rate": 4.874304918251212e-06, |
| "loss": 0.7049, |
| "step": 1822 |
| }, |
| { |
| "epoch": 0.6400983146067416, |
| "grad_norm": 0.8920079469680786, |
| "learning_rate": 4.874160126140211e-06, |
| "loss": 0.6649, |
| "step": 1823 |
| }, |
| { |
| "epoch": 0.6404494382022472, |
| "grad_norm": 0.8667442798614502, |
| "learning_rate": 4.874015252834966e-06, |
| "loss": 0.6327, |
| "step": 1824 |
| }, |
| { |
| "epoch": 0.6408005617977528, |
| "grad_norm": 0.8910001516342163, |
| "learning_rate": 4.8738702983404315e-06, |
| "loss": 0.6964, |
| "step": 1825 |
| }, |
| { |
| "epoch": 0.6411516853932584, |
| "grad_norm": 0.8773640394210815, |
| "learning_rate": 4.873725262661564e-06, |
| "loss": 0.6763, |
| "step": 1826 |
| }, |
| { |
| "epoch": 0.641502808988764, |
| "grad_norm": 0.9394330978393555, |
| "learning_rate": 4.873580145803323e-06, |
| "loss": 0.7796, |
| "step": 1827 |
| }, |
| { |
| "epoch": 0.6418539325842697, |
| "grad_norm": 0.8596976399421692, |
| "learning_rate": 4.8734349477706725e-06, |
| "loss": 0.6229, |
| "step": 1828 |
| }, |
| { |
| "epoch": 0.6422050561797753, |
| "grad_norm": 0.9220896363258362, |
| "learning_rate": 4.873289668568578e-06, |
| "loss": 0.6801, |
| "step": 1829 |
| }, |
| { |
| "epoch": 0.6425561797752809, |
| "grad_norm": 0.8582680821418762, |
| "learning_rate": 4.873144308202008e-06, |
| "loss": 0.6475, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.6429073033707865, |
| "grad_norm": 0.916015088558197, |
| "learning_rate": 4.872998866675933e-06, |
| "loss": 0.6296, |
| "step": 1831 |
| }, |
| { |
| "epoch": 0.6432584269662921, |
| "grad_norm": 0.8732319474220276, |
| "learning_rate": 4.872853343995327e-06, |
| "loss": 0.6294, |
| "step": 1832 |
| }, |
| { |
| "epoch": 0.6436095505617978, |
| "grad_norm": 0.8985139727592468, |
| "learning_rate": 4.872707740165168e-06, |
| "loss": 0.6647, |
| "step": 1833 |
| }, |
| { |
| "epoch": 0.6439606741573034, |
| "grad_norm": 0.9100089073181152, |
| "learning_rate": 4.872562055190433e-06, |
| "loss": 0.66, |
| "step": 1834 |
| }, |
| { |
| "epoch": 0.644311797752809, |
| "grad_norm": 0.8807663917541504, |
| "learning_rate": 4.872416289076107e-06, |
| "loss": 0.714, |
| "step": 1835 |
| }, |
| { |
| "epoch": 0.6446629213483146, |
| "grad_norm": 0.924670934677124, |
| "learning_rate": 4.872270441827174e-06, |
| "loss": 0.6851, |
| "step": 1836 |
| }, |
| { |
| "epoch": 0.6450140449438202, |
| "grad_norm": 0.9182237982749939, |
| "learning_rate": 4.872124513448622e-06, |
| "loss": 0.6728, |
| "step": 1837 |
| }, |
| { |
| "epoch": 0.6453651685393258, |
| "grad_norm": 0.8587349057197571, |
| "learning_rate": 4.871978503945442e-06, |
| "loss": 0.6632, |
| "step": 1838 |
| }, |
| { |
| "epoch": 0.6457162921348315, |
| "grad_norm": 0.8839679956436157, |
| "learning_rate": 4.871832413322626e-06, |
| "loss": 0.6531, |
| "step": 1839 |
| }, |
| { |
| "epoch": 0.6460674157303371, |
| "grad_norm": 0.9729541540145874, |
| "learning_rate": 4.871686241585171e-06, |
| "loss": 0.6855, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.6464185393258427, |
| "grad_norm": 0.895285964012146, |
| "learning_rate": 4.871539988738076e-06, |
| "loss": 0.6738, |
| "step": 1841 |
| }, |
| { |
| "epoch": 0.6467696629213483, |
| "grad_norm": 0.8678845167160034, |
| "learning_rate": 4.871393654786344e-06, |
| "loss": 0.6494, |
| "step": 1842 |
| }, |
| { |
| "epoch": 0.6471207865168539, |
| "grad_norm": 0.8896480798721313, |
| "learning_rate": 4.871247239734977e-06, |
| "loss": 0.6823, |
| "step": 1843 |
| }, |
| { |
| "epoch": 0.6474719101123596, |
| "grad_norm": 0.8339593410491943, |
| "learning_rate": 4.871100743588983e-06, |
| "loss": 0.6193, |
| "step": 1844 |
| }, |
| { |
| "epoch": 0.6478230337078652, |
| "grad_norm": 0.8970949053764343, |
| "learning_rate": 4.870954166353374e-06, |
| "loss": 0.6944, |
| "step": 1845 |
| }, |
| { |
| "epoch": 0.6481741573033708, |
| "grad_norm": 0.8810001015663147, |
| "learning_rate": 4.870807508033161e-06, |
| "loss": 0.7222, |
| "step": 1846 |
| }, |
| { |
| "epoch": 0.6485252808988764, |
| "grad_norm": 0.8821990489959717, |
| "learning_rate": 4.870660768633359e-06, |
| "loss": 0.6881, |
| "step": 1847 |
| }, |
| { |
| "epoch": 0.648876404494382, |
| "grad_norm": 0.9234027862548828, |
| "learning_rate": 4.8705139481589885e-06, |
| "loss": 0.7074, |
| "step": 1848 |
| }, |
| { |
| "epoch": 0.6492275280898876, |
| "grad_norm": 0.9003174901008606, |
| "learning_rate": 4.870367046615069e-06, |
| "loss": 0.6754, |
| "step": 1849 |
| }, |
| { |
| "epoch": 0.6495786516853933, |
| "grad_norm": 0.9838910698890686, |
| "learning_rate": 4.870220064006625e-06, |
| "loss": 0.716, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.6499297752808989, |
| "grad_norm": 0.9417749047279358, |
| "learning_rate": 4.870073000338683e-06, |
| "loss": 0.6826, |
| "step": 1851 |
| }, |
| { |
| "epoch": 0.6502808988764045, |
| "grad_norm": 0.9156447052955627, |
| "learning_rate": 4.869925855616272e-06, |
| "loss": 0.6472, |
| "step": 1852 |
| }, |
| { |
| "epoch": 0.6506320224719101, |
| "grad_norm": 0.8980277180671692, |
| "learning_rate": 4.869778629844425e-06, |
| "loss": 0.6536, |
| "step": 1853 |
| }, |
| { |
| "epoch": 0.6509831460674157, |
| "grad_norm": 0.9398206472396851, |
| "learning_rate": 4.8696313230281776e-06, |
| "loss": 0.6618, |
| "step": 1854 |
| }, |
| { |
| "epoch": 0.6513342696629213, |
| "grad_norm": 0.9095361232757568, |
| "learning_rate": 4.869483935172566e-06, |
| "loss": 0.6758, |
| "step": 1855 |
| }, |
| { |
| "epoch": 0.651685393258427, |
| "grad_norm": 0.9065535664558411, |
| "learning_rate": 4.86933646628263e-06, |
| "loss": 0.6785, |
| "step": 1856 |
| }, |
| { |
| "epoch": 0.6520365168539326, |
| "grad_norm": 0.8876499533653259, |
| "learning_rate": 4.869188916363417e-06, |
| "loss": 0.642, |
| "step": 1857 |
| }, |
| { |
| "epoch": 0.6523876404494382, |
| "grad_norm": 0.8867465853691101, |
| "learning_rate": 4.869041285419968e-06, |
| "loss": 0.6926, |
| "step": 1858 |
| }, |
| { |
| "epoch": 0.6527387640449438, |
| "grad_norm": 0.9016279578208923, |
| "learning_rate": 4.868893573457335e-06, |
| "loss": 0.7007, |
| "step": 1859 |
| }, |
| { |
| "epoch": 0.6530898876404494, |
| "grad_norm": 0.9355380535125732, |
| "learning_rate": 4.868745780480569e-06, |
| "loss": 0.7025, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.6534410112359551, |
| "grad_norm": 0.8738915324211121, |
| "learning_rate": 4.868597906494725e-06, |
| "loss": 0.6633, |
| "step": 1861 |
| }, |
| { |
| "epoch": 0.6537921348314607, |
| "grad_norm": 0.9167248010635376, |
| "learning_rate": 4.868449951504859e-06, |
| "loss": 0.6669, |
| "step": 1862 |
| }, |
| { |
| "epoch": 0.6541432584269663, |
| "grad_norm": 0.9220420122146606, |
| "learning_rate": 4.868301915516031e-06, |
| "loss": 0.6419, |
| "step": 1863 |
| }, |
| { |
| "epoch": 0.6544943820224719, |
| "grad_norm": 0.9482148885726929, |
| "learning_rate": 4.868153798533304e-06, |
| "loss": 0.6934, |
| "step": 1864 |
| }, |
| { |
| "epoch": 0.6548455056179775, |
| "grad_norm": 0.928871214389801, |
| "learning_rate": 4.8680056005617436e-06, |
| "loss": 0.6506, |
| "step": 1865 |
| }, |
| { |
| "epoch": 0.6551966292134831, |
| "grad_norm": 0.9240955114364624, |
| "learning_rate": 4.867857321606418e-06, |
| "loss": 0.6745, |
| "step": 1866 |
| }, |
| { |
| "epoch": 0.6555477528089888, |
| "grad_norm": 0.8898888230323792, |
| "learning_rate": 4.867708961672399e-06, |
| "loss": 0.6925, |
| "step": 1867 |
| }, |
| { |
| "epoch": 0.6558988764044944, |
| "grad_norm": 0.9390647411346436, |
| "learning_rate": 4.8675605207647595e-06, |
| "loss": 0.7212, |
| "step": 1868 |
| }, |
| { |
| "epoch": 0.65625, |
| "grad_norm": 0.9547630548477173, |
| "learning_rate": 4.8674119988885756e-06, |
| "loss": 0.7168, |
| "step": 1869 |
| }, |
| { |
| "epoch": 0.6566011235955056, |
| "grad_norm": 0.8770663738250732, |
| "learning_rate": 4.867263396048927e-06, |
| "loss": 0.6373, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.6569522471910112, |
| "grad_norm": 0.899808406829834, |
| "learning_rate": 4.867114712250897e-06, |
| "loss": 0.708, |
| "step": 1871 |
| }, |
| { |
| "epoch": 0.6573033707865169, |
| "grad_norm": 0.9099377989768982, |
| "learning_rate": 4.866965947499568e-06, |
| "loss": 0.6888, |
| "step": 1872 |
| }, |
| { |
| "epoch": 0.6576544943820225, |
| "grad_norm": 0.896460235118866, |
| "learning_rate": 4.866817101800029e-06, |
| "loss": 0.6995, |
| "step": 1873 |
| }, |
| { |
| "epoch": 0.6580056179775281, |
| "grad_norm": 0.8833721280097961, |
| "learning_rate": 4.866668175157372e-06, |
| "loss": 0.659, |
| "step": 1874 |
| }, |
| { |
| "epoch": 0.6583567415730337, |
| "grad_norm": 0.830697774887085, |
| "learning_rate": 4.866519167576687e-06, |
| "loss": 0.6501, |
| "step": 1875 |
| }, |
| { |
| "epoch": 0.6587078651685393, |
| "grad_norm": 0.8897443413734436, |
| "learning_rate": 4.866370079063073e-06, |
| "loss": 0.6769, |
| "step": 1876 |
| }, |
| { |
| "epoch": 0.6590589887640449, |
| "grad_norm": 0.8772610425949097, |
| "learning_rate": 4.866220909621625e-06, |
| "loss": 0.6474, |
| "step": 1877 |
| }, |
| { |
| "epoch": 0.6594101123595506, |
| "grad_norm": 0.9042930006980896, |
| "learning_rate": 4.8660716592574486e-06, |
| "loss": 0.6827, |
| "step": 1878 |
| }, |
| { |
| "epoch": 0.6597612359550562, |
| "grad_norm": 0.9102811217308044, |
| "learning_rate": 4.865922327975646e-06, |
| "loss": 0.7126, |
| "step": 1879 |
| }, |
| { |
| "epoch": 0.6601123595505618, |
| "grad_norm": 0.9031997323036194, |
| "learning_rate": 4.865772915781324e-06, |
| "loss": 0.6963, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.6604634831460674, |
| "grad_norm": 0.89446622133255, |
| "learning_rate": 4.865623422679593e-06, |
| "loss": 0.6718, |
| "step": 1881 |
| }, |
| { |
| "epoch": 0.660814606741573, |
| "grad_norm": 0.879215657711029, |
| "learning_rate": 4.865473848675565e-06, |
| "loss": 0.6711, |
| "step": 1882 |
| }, |
| { |
| "epoch": 0.6611657303370787, |
| "grad_norm": 0.916566789150238, |
| "learning_rate": 4.865324193774355e-06, |
| "loss": 0.66, |
| "step": 1883 |
| }, |
| { |
| "epoch": 0.6615168539325843, |
| "grad_norm": 0.8872631192207336, |
| "learning_rate": 4.865174457981082e-06, |
| "loss": 0.6994, |
| "step": 1884 |
| }, |
| { |
| "epoch": 0.6618679775280899, |
| "grad_norm": 0.9287483096122742, |
| "learning_rate": 4.865024641300866e-06, |
| "loss": 0.7071, |
| "step": 1885 |
| }, |
| { |
| "epoch": 0.6622191011235955, |
| "grad_norm": 0.8984839916229248, |
| "learning_rate": 4.864874743738832e-06, |
| "loss": 0.6512, |
| "step": 1886 |
| }, |
| { |
| "epoch": 0.6625702247191011, |
| "grad_norm": 0.8624014258384705, |
| "learning_rate": 4.864724765300105e-06, |
| "loss": 0.6649, |
| "step": 1887 |
| }, |
| { |
| "epoch": 0.6629213483146067, |
| "grad_norm": 0.8853806257247925, |
| "learning_rate": 4.864574705989814e-06, |
| "loss": 0.6562, |
| "step": 1888 |
| }, |
| { |
| "epoch": 0.6632724719101124, |
| "grad_norm": 0.9202688336372375, |
| "learning_rate": 4.864424565813092e-06, |
| "loss": 0.7005, |
| "step": 1889 |
| }, |
| { |
| "epoch": 0.663623595505618, |
| "grad_norm": 0.9426562786102295, |
| "learning_rate": 4.8642743447750735e-06, |
| "loss": 0.6802, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.6639747191011236, |
| "grad_norm": 0.9027799367904663, |
| "learning_rate": 4.864124042880895e-06, |
| "loss": 0.6718, |
| "step": 1891 |
| }, |
| { |
| "epoch": 0.6643258426966292, |
| "grad_norm": 0.9138725399971008, |
| "learning_rate": 4.863973660135697e-06, |
| "loss": 0.7225, |
| "step": 1892 |
| }, |
| { |
| "epoch": 0.6646769662921348, |
| "grad_norm": 0.9003073573112488, |
| "learning_rate": 4.863823196544623e-06, |
| "loss": 0.6603, |
| "step": 1893 |
| }, |
| { |
| "epoch": 0.6650280898876404, |
| "grad_norm": 0.9345358610153198, |
| "learning_rate": 4.863672652112819e-06, |
| "loss": 0.6648, |
| "step": 1894 |
| }, |
| { |
| "epoch": 0.6653792134831461, |
| "grad_norm": 0.9087908864021301, |
| "learning_rate": 4.863522026845432e-06, |
| "loss": 0.701, |
| "step": 1895 |
| }, |
| { |
| "epoch": 0.6657303370786517, |
| "grad_norm": 0.889224112033844, |
| "learning_rate": 4.863371320747615e-06, |
| "loss": 0.7157, |
| "step": 1896 |
| }, |
| { |
| "epoch": 0.6660814606741573, |
| "grad_norm": 0.8854907751083374, |
| "learning_rate": 4.863220533824521e-06, |
| "loss": 0.6965, |
| "step": 1897 |
| }, |
| { |
| "epoch": 0.6664325842696629, |
| "grad_norm": 0.9223704934120178, |
| "learning_rate": 4.863069666081307e-06, |
| "loss": 0.6962, |
| "step": 1898 |
| }, |
| { |
| "epoch": 0.6667837078651685, |
| "grad_norm": 0.9045940637588501, |
| "learning_rate": 4.8629187175231314e-06, |
| "loss": 0.6394, |
| "step": 1899 |
| }, |
| { |
| "epoch": 0.6671348314606742, |
| "grad_norm": 0.8898254036903381, |
| "learning_rate": 4.862767688155159e-06, |
| "loss": 0.6781, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.6674859550561798, |
| "grad_norm": 0.882418692111969, |
| "learning_rate": 4.862616577982553e-06, |
| "loss": 0.6654, |
| "step": 1901 |
| }, |
| { |
| "epoch": 0.6678370786516854, |
| "grad_norm": 0.8898456692695618, |
| "learning_rate": 4.862465387010482e-06, |
| "loss": 0.6733, |
| "step": 1902 |
| }, |
| { |
| "epoch": 0.668188202247191, |
| "grad_norm": 0.9240460395812988, |
| "learning_rate": 4.862314115244116e-06, |
| "loss": 0.6661, |
| "step": 1903 |
| }, |
| { |
| "epoch": 0.6685393258426966, |
| "grad_norm": 0.8646409511566162, |
| "learning_rate": 4.862162762688629e-06, |
| "loss": 0.6751, |
| "step": 1904 |
| }, |
| { |
| "epoch": 0.6688904494382022, |
| "grad_norm": 0.9161674976348877, |
| "learning_rate": 4.862011329349196e-06, |
| "loss": 0.6762, |
| "step": 1905 |
| }, |
| { |
| "epoch": 0.6692415730337079, |
| "grad_norm": 0.8989123702049255, |
| "learning_rate": 4.861859815230998e-06, |
| "loss": 0.6948, |
| "step": 1906 |
| }, |
| { |
| "epoch": 0.6695926966292135, |
| "grad_norm": 0.9007300138473511, |
| "learning_rate": 4.861708220339215e-06, |
| "loss": 0.6954, |
| "step": 1907 |
| }, |
| { |
| "epoch": 0.6699438202247191, |
| "grad_norm": 0.9009419679641724, |
| "learning_rate": 4.861556544679032e-06, |
| "loss": 0.6968, |
| "step": 1908 |
| }, |
| { |
| "epoch": 0.6702949438202247, |
| "grad_norm": 0.8684524297714233, |
| "learning_rate": 4.861404788255636e-06, |
| "loss": 0.6548, |
| "step": 1909 |
| }, |
| { |
| "epoch": 0.6706460674157303, |
| "grad_norm": 0.8866965174674988, |
| "learning_rate": 4.861252951074216e-06, |
| "loss": 0.6867, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.670997191011236, |
| "grad_norm": 0.8514602184295654, |
| "learning_rate": 4.861101033139967e-06, |
| "loss": 0.6677, |
| "step": 1911 |
| }, |
| { |
| "epoch": 0.6713483146067416, |
| "grad_norm": 0.9115993976593018, |
| "learning_rate": 4.8609490344580815e-06, |
| "loss": 0.7163, |
| "step": 1912 |
| }, |
| { |
| "epoch": 0.6716994382022472, |
| "grad_norm": 0.9031485319137573, |
| "learning_rate": 4.86079695503376e-06, |
| "loss": 0.6557, |
| "step": 1913 |
| }, |
| { |
| "epoch": 0.6720505617977528, |
| "grad_norm": 0.9129601716995239, |
| "learning_rate": 4.8606447948722036e-06, |
| "loss": 0.6857, |
| "step": 1914 |
| }, |
| { |
| "epoch": 0.6724016853932584, |
| "grad_norm": 0.8930435180664062, |
| "learning_rate": 4.860492553978614e-06, |
| "loss": 0.6888, |
| "step": 1915 |
| }, |
| { |
| "epoch": 0.672752808988764, |
| "grad_norm": 0.8881438374519348, |
| "learning_rate": 4.860340232358199e-06, |
| "loss": 0.6979, |
| "step": 1916 |
| }, |
| { |
| "epoch": 0.6731039325842697, |
| "grad_norm": 0.8837933540344238, |
| "learning_rate": 4.860187830016169e-06, |
| "loss": 0.6729, |
| "step": 1917 |
| }, |
| { |
| "epoch": 0.6734550561797753, |
| "grad_norm": 0.8822264671325684, |
| "learning_rate": 4.860035346957734e-06, |
| "loss": 0.6883, |
| "step": 1918 |
| }, |
| { |
| "epoch": 0.6738061797752809, |
| "grad_norm": 0.8592543005943298, |
| "learning_rate": 4.8598827831881105e-06, |
| "loss": 0.6579, |
| "step": 1919 |
| }, |
| { |
| "epoch": 0.6741573033707865, |
| "grad_norm": 0.8520956039428711, |
| "learning_rate": 4.859730138712514e-06, |
| "loss": 0.6547, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.6745084269662921, |
| "grad_norm": 0.8868786096572876, |
| "learning_rate": 4.859577413536167e-06, |
| "loss": 0.6834, |
| "step": 1921 |
| }, |
| { |
| "epoch": 0.6748595505617978, |
| "grad_norm": 0.894353449344635, |
| "learning_rate": 4.859424607664291e-06, |
| "loss": 0.6622, |
| "step": 1922 |
| }, |
| { |
| "epoch": 0.6752106741573034, |
| "grad_norm": 0.8527143597602844, |
| "learning_rate": 4.859271721102112e-06, |
| "loss": 0.6557, |
| "step": 1923 |
| }, |
| { |
| "epoch": 0.675561797752809, |
| "grad_norm": 0.840531587600708, |
| "learning_rate": 4.85911875385486e-06, |
| "loss": 0.6392, |
| "step": 1924 |
| }, |
| { |
| "epoch": 0.6759129213483146, |
| "grad_norm": 0.8790932297706604, |
| "learning_rate": 4.858965705927765e-06, |
| "loss": 0.7031, |
| "step": 1925 |
| }, |
| { |
| "epoch": 0.6762640449438202, |
| "grad_norm": 0.8507084250450134, |
| "learning_rate": 4.85881257732606e-06, |
| "loss": 0.6459, |
| "step": 1926 |
| }, |
| { |
| "epoch": 0.6766151685393258, |
| "grad_norm": 0.894951581954956, |
| "learning_rate": 4.858659368054986e-06, |
| "loss": 0.6885, |
| "step": 1927 |
| }, |
| { |
| "epoch": 0.6769662921348315, |
| "grad_norm": 0.9011364579200745, |
| "learning_rate": 4.858506078119778e-06, |
| "loss": 0.7135, |
| "step": 1928 |
| }, |
| { |
| "epoch": 0.6773174157303371, |
| "grad_norm": 0.8751960396766663, |
| "learning_rate": 4.858352707525681e-06, |
| "loss": 0.6964, |
| "step": 1929 |
| }, |
| { |
| "epoch": 0.6776685393258427, |
| "grad_norm": 0.8583717942237854, |
| "learning_rate": 4.85819925627794e-06, |
| "loss": 0.6495, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.6780196629213483, |
| "grad_norm": 0.9073567390441895, |
| "learning_rate": 4.858045724381801e-06, |
| "loss": 0.6693, |
| "step": 1931 |
| }, |
| { |
| "epoch": 0.6783707865168539, |
| "grad_norm": 0.875085711479187, |
| "learning_rate": 4.857892111842517e-06, |
| "loss": 0.6552, |
| "step": 1932 |
| }, |
| { |
| "epoch": 0.6787219101123596, |
| "grad_norm": 0.890121340751648, |
| "learning_rate": 4.8577384186653405e-06, |
| "loss": 0.6816, |
| "step": 1933 |
| }, |
| { |
| "epoch": 0.6790730337078652, |
| "grad_norm": 0.9079762697219849, |
| "learning_rate": 4.8575846448555275e-06, |
| "loss": 0.7012, |
| "step": 1934 |
| }, |
| { |
| "epoch": 0.6794241573033708, |
| "grad_norm": 0.9067320823669434, |
| "learning_rate": 4.857430790418337e-06, |
| "loss": 0.6707, |
| "step": 1935 |
| }, |
| { |
| "epoch": 0.6797752808988764, |
| "grad_norm": 0.863487184047699, |
| "learning_rate": 4.857276855359031e-06, |
| "loss": 0.6532, |
| "step": 1936 |
| }, |
| { |
| "epoch": 0.680126404494382, |
| "grad_norm": 0.887187659740448, |
| "learning_rate": 4.857122839682873e-06, |
| "loss": 0.68, |
| "step": 1937 |
| }, |
| { |
| "epoch": 0.6804775280898876, |
| "grad_norm": 0.9105770587921143, |
| "learning_rate": 4.856968743395132e-06, |
| "loss": 0.7263, |
| "step": 1938 |
| }, |
| { |
| "epoch": 0.6808286516853933, |
| "grad_norm": 0.859266459941864, |
| "learning_rate": 4.856814566501075e-06, |
| "loss": 0.6612, |
| "step": 1939 |
| }, |
| { |
| "epoch": 0.6811797752808989, |
| "grad_norm": 0.8715479969978333, |
| "learning_rate": 4.856660309005979e-06, |
| "loss": 0.7078, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.6815308988764045, |
| "grad_norm": 0.8754439949989319, |
| "learning_rate": 4.856505970915115e-06, |
| "loss": 0.7088, |
| "step": 1941 |
| }, |
| { |
| "epoch": 0.6818820224719101, |
| "grad_norm": 0.8877387642860413, |
| "learning_rate": 4.856351552233764e-06, |
| "loss": 0.6944, |
| "step": 1942 |
| }, |
| { |
| "epoch": 0.6822331460674157, |
| "grad_norm": 0.9069793224334717, |
| "learning_rate": 4.856197052967207e-06, |
| "loss": 0.6567, |
| "step": 1943 |
| }, |
| { |
| "epoch": 0.6825842696629213, |
| "grad_norm": 0.9112107157707214, |
| "learning_rate": 4.856042473120726e-06, |
| "loss": 0.6617, |
| "step": 1944 |
| }, |
| { |
| "epoch": 0.682935393258427, |
| "grad_norm": 0.9331689476966858, |
| "learning_rate": 4.855887812699609e-06, |
| "loss": 0.7051, |
| "step": 1945 |
| }, |
| { |
| "epoch": 0.6832865168539326, |
| "grad_norm": 0.909070611000061, |
| "learning_rate": 4.855733071709145e-06, |
| "loss": 0.6827, |
| "step": 1946 |
| }, |
| { |
| "epoch": 0.6836376404494382, |
| "grad_norm": 0.9068970084190369, |
| "learning_rate": 4.855578250154624e-06, |
| "loss": 0.658, |
| "step": 1947 |
| }, |
| { |
| "epoch": 0.6839887640449438, |
| "grad_norm": 0.9052385687828064, |
| "learning_rate": 4.855423348041345e-06, |
| "loss": 0.6991, |
| "step": 1948 |
| }, |
| { |
| "epoch": 0.6843398876404494, |
| "grad_norm": 0.8857588768005371, |
| "learning_rate": 4.855268365374601e-06, |
| "loss": 0.6604, |
| "step": 1949 |
| }, |
| { |
| "epoch": 0.6846910112359551, |
| "grad_norm": 0.8484088778495789, |
| "learning_rate": 4.855113302159697e-06, |
| "loss": 0.6591, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.6850421348314607, |
| "grad_norm": 0.9859545826911926, |
| "learning_rate": 4.854958158401931e-06, |
| "loss": 0.6652, |
| "step": 1951 |
| }, |
| { |
| "epoch": 0.6853932584269663, |
| "grad_norm": 0.8704702854156494, |
| "learning_rate": 4.854802934106611e-06, |
| "loss": 0.6792, |
| "step": 1952 |
| }, |
| { |
| "epoch": 0.6857443820224719, |
| "grad_norm": 0.8603319525718689, |
| "learning_rate": 4.854647629279047e-06, |
| "loss": 0.6768, |
| "step": 1953 |
| }, |
| { |
| "epoch": 0.6860955056179775, |
| "grad_norm": 0.9137224555015564, |
| "learning_rate": 4.854492243924549e-06, |
| "loss": 0.6865, |
| "step": 1954 |
| }, |
| { |
| "epoch": 0.6864466292134831, |
| "grad_norm": 0.8924287557601929, |
| "learning_rate": 4.8543367780484304e-06, |
| "loss": 0.685, |
| "step": 1955 |
| }, |
| { |
| "epoch": 0.6867977528089888, |
| "grad_norm": 0.8736245632171631, |
| "learning_rate": 4.854181231656009e-06, |
| "loss": 0.6747, |
| "step": 1956 |
| }, |
| { |
| "epoch": 0.6871488764044944, |
| "grad_norm": 0.8849700093269348, |
| "learning_rate": 4.854025604752604e-06, |
| "loss": 0.679, |
| "step": 1957 |
| }, |
| { |
| "epoch": 0.6875, |
| "grad_norm": 0.9219119548797607, |
| "learning_rate": 4.853869897343537e-06, |
| "loss": 0.7069, |
| "step": 1958 |
| }, |
| { |
| "epoch": 0.6878511235955056, |
| "grad_norm": 0.8864405751228333, |
| "learning_rate": 4.853714109434134e-06, |
| "loss": 0.648, |
| "step": 1959 |
| }, |
| { |
| "epoch": 0.6882022471910112, |
| "grad_norm": 0.9157871603965759, |
| "learning_rate": 4.853558241029723e-06, |
| "loss": 0.6838, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.6885533707865169, |
| "grad_norm": 0.8845330476760864, |
| "learning_rate": 4.853402292135634e-06, |
| "loss": 0.67, |
| "step": 1961 |
| }, |
| { |
| "epoch": 0.6889044943820225, |
| "grad_norm": 0.8602401614189148, |
| "learning_rate": 4.8532462627572e-06, |
| "loss": 0.6661, |
| "step": 1962 |
| }, |
| { |
| "epoch": 0.6892556179775281, |
| "grad_norm": 0.9007517695426941, |
| "learning_rate": 4.853090152899758e-06, |
| "loss": 0.6717, |
| "step": 1963 |
| }, |
| { |
| "epoch": 0.6896067415730337, |
| "grad_norm": 0.8941755890846252, |
| "learning_rate": 4.852933962568647e-06, |
| "loss": 0.6824, |
| "step": 1964 |
| }, |
| { |
| "epoch": 0.6899578651685393, |
| "grad_norm": 0.9077978134155273, |
| "learning_rate": 4.852777691769207e-06, |
| "loss": 0.695, |
| "step": 1965 |
| }, |
| { |
| "epoch": 0.6903089887640449, |
| "grad_norm": 0.9053329229354858, |
| "learning_rate": 4.852621340506782e-06, |
| "loss": 0.6781, |
| "step": 1966 |
| }, |
| { |
| "epoch": 0.6906601123595506, |
| "grad_norm": 0.885201096534729, |
| "learning_rate": 4.852464908786722e-06, |
| "loss": 0.6307, |
| "step": 1967 |
| }, |
| { |
| "epoch": 0.6910112359550562, |
| "grad_norm": 0.9176447987556458, |
| "learning_rate": 4.8523083966143755e-06, |
| "loss": 0.6797, |
| "step": 1968 |
| }, |
| { |
| "epoch": 0.6913623595505618, |
| "grad_norm": 0.8907782435417175, |
| "learning_rate": 4.852151803995094e-06, |
| "loss": 0.661, |
| "step": 1969 |
| }, |
| { |
| "epoch": 0.6917134831460674, |
| "grad_norm": 0.9267505407333374, |
| "learning_rate": 4.8519951309342335e-06, |
| "loss": 0.6711, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.692064606741573, |
| "grad_norm": 0.9260626435279846, |
| "learning_rate": 4.8518383774371525e-06, |
| "loss": 0.6667, |
| "step": 1971 |
| }, |
| { |
| "epoch": 0.6924157303370787, |
| "grad_norm": 0.947289228439331, |
| "learning_rate": 4.851681543509211e-06, |
| "loss": 0.6802, |
| "step": 1972 |
| }, |
| { |
| "epoch": 0.6927668539325843, |
| "grad_norm": 0.9016532897949219, |
| "learning_rate": 4.851524629155773e-06, |
| "loss": 0.6788, |
| "step": 1973 |
| }, |
| { |
| "epoch": 0.6931179775280899, |
| "grad_norm": 0.926960825920105, |
| "learning_rate": 4.851367634382206e-06, |
| "loss": 0.6868, |
| "step": 1974 |
| }, |
| { |
| "epoch": 0.6934691011235955, |
| "grad_norm": 0.886313259601593, |
| "learning_rate": 4.851210559193877e-06, |
| "loss": 0.664, |
| "step": 1975 |
| }, |
| { |
| "epoch": 0.6938202247191011, |
| "grad_norm": 0.8867488503456116, |
| "learning_rate": 4.851053403596158e-06, |
| "loss": 0.6903, |
| "step": 1976 |
| }, |
| { |
| "epoch": 0.6941713483146067, |
| "grad_norm": 0.8759608864784241, |
| "learning_rate": 4.850896167594426e-06, |
| "loss": 0.6542, |
| "step": 1977 |
| }, |
| { |
| "epoch": 0.6945224719101124, |
| "grad_norm": 0.8738287091255188, |
| "learning_rate": 4.8507388511940565e-06, |
| "loss": 0.6844, |
| "step": 1978 |
| }, |
| { |
| "epoch": 0.694873595505618, |
| "grad_norm": 0.875167191028595, |
| "learning_rate": 4.85058145440043e-06, |
| "loss": 0.6515, |
| "step": 1979 |
| }, |
| { |
| "epoch": 0.6952247191011236, |
| "grad_norm": 0.8686906099319458, |
| "learning_rate": 4.850423977218929e-06, |
| "loss": 0.6558, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.6955758426966292, |
| "grad_norm": 0.8697443604469299, |
| "learning_rate": 4.850266419654939e-06, |
| "loss": 0.6431, |
| "step": 1981 |
| }, |
| { |
| "epoch": 0.6959269662921348, |
| "grad_norm": 0.9007163047790527, |
| "learning_rate": 4.850108781713848e-06, |
| "loss": 0.6996, |
| "step": 1982 |
| }, |
| { |
| "epoch": 0.6962780898876404, |
| "grad_norm": 0.9064307808876038, |
| "learning_rate": 4.849951063401049e-06, |
| "loss": 0.6625, |
| "step": 1983 |
| }, |
| { |
| "epoch": 0.6966292134831461, |
| "grad_norm": 0.9589636921882629, |
| "learning_rate": 4.849793264721934e-06, |
| "loss": 0.7276, |
| "step": 1984 |
| }, |
| { |
| "epoch": 0.6969803370786517, |
| "grad_norm": 0.9150680899620056, |
| "learning_rate": 4.8496353856819e-06, |
| "loss": 0.6808, |
| "step": 1985 |
| }, |
| { |
| "epoch": 0.6973314606741573, |
| "grad_norm": 0.9379653334617615, |
| "learning_rate": 4.849477426286346e-06, |
| "loss": 0.7148, |
| "step": 1986 |
| }, |
| { |
| "epoch": 0.6976825842696629, |
| "grad_norm": 0.8566982746124268, |
| "learning_rate": 4.849319386540675e-06, |
| "loss": 0.676, |
| "step": 1987 |
| }, |
| { |
| "epoch": 0.6980337078651685, |
| "grad_norm": 0.9016757607460022, |
| "learning_rate": 4.84916126645029e-06, |
| "loss": 0.6672, |
| "step": 1988 |
| }, |
| { |
| "epoch": 0.6983848314606742, |
| "grad_norm": 0.9286409020423889, |
| "learning_rate": 4.849003066020602e-06, |
| "loss": 0.7028, |
| "step": 1989 |
| }, |
| { |
| "epoch": 0.6987359550561798, |
| "grad_norm": 0.8700134754180908, |
| "learning_rate": 4.848844785257019e-06, |
| "loss": 0.6919, |
| "step": 1990 |
| }, |
| { |
| "epoch": 0.6990870786516854, |
| "grad_norm": 0.8518657088279724, |
| "learning_rate": 4.848686424164953e-06, |
| "loss": 0.663, |
| "step": 1991 |
| }, |
| { |
| "epoch": 0.699438202247191, |
| "grad_norm": 0.8883944749832153, |
| "learning_rate": 4.848527982749822e-06, |
| "loss": 0.6249, |
| "step": 1992 |
| }, |
| { |
| "epoch": 0.6997893258426966, |
| "grad_norm": 0.9609950184822083, |
| "learning_rate": 4.848369461017043e-06, |
| "loss": 0.7114, |
| "step": 1993 |
| }, |
| { |
| "epoch": 0.7001404494382022, |
| "grad_norm": 0.8920437097549438, |
| "learning_rate": 4.848210858972039e-06, |
| "loss": 0.7093, |
| "step": 1994 |
| }, |
| { |
| "epoch": 0.7004915730337079, |
| "grad_norm": 0.9256541132926941, |
| "learning_rate": 4.848052176620232e-06, |
| "loss": 0.6713, |
| "step": 1995 |
| }, |
| { |
| "epoch": 0.7008426966292135, |
| "grad_norm": 0.8984493613243103, |
| "learning_rate": 4.847893413967049e-06, |
| "loss": 0.6921, |
| "step": 1996 |
| }, |
| { |
| "epoch": 0.7011938202247191, |
| "grad_norm": 0.889815628528595, |
| "learning_rate": 4.847734571017923e-06, |
| "loss": 0.6539, |
| "step": 1997 |
| }, |
| { |
| "epoch": 0.7015449438202247, |
| "grad_norm": 0.856476366519928, |
| "learning_rate": 4.847575647778281e-06, |
| "loss": 0.6538, |
| "step": 1998 |
| }, |
| { |
| "epoch": 0.7018960674157303, |
| "grad_norm": 0.9042733311653137, |
| "learning_rate": 4.847416644253562e-06, |
| "loss": 0.672, |
| "step": 1999 |
| }, |
| { |
| "epoch": 0.702247191011236, |
| "grad_norm": 0.847193717956543, |
| "learning_rate": 4.847257560449202e-06, |
| "loss": 0.6296, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.7025983146067416, |
| "grad_norm": 0.922281801700592, |
| "learning_rate": 4.847098396370643e-06, |
| "loss": 0.6306, |
| "step": 2001 |
| }, |
| { |
| "epoch": 0.7029494382022472, |
| "grad_norm": 0.9229936599731445, |
| "learning_rate": 4.846939152023326e-06, |
| "loss": 0.7127, |
| "step": 2002 |
| }, |
| { |
| "epoch": 0.7033005617977528, |
| "grad_norm": 0.8789034485816956, |
| "learning_rate": 4.8467798274126995e-06, |
| "loss": 0.6477, |
| "step": 2003 |
| }, |
| { |
| "epoch": 0.7036516853932584, |
| "grad_norm": 0.87799471616745, |
| "learning_rate": 4.84662042254421e-06, |
| "loss": 0.646, |
| "step": 2004 |
| }, |
| { |
| "epoch": 0.704002808988764, |
| "grad_norm": 0.8978516459465027, |
| "learning_rate": 4.846460937423311e-06, |
| "loss": 0.6327, |
| "step": 2005 |
| }, |
| { |
| "epoch": 0.7043539325842697, |
| "grad_norm": 0.9151999354362488, |
| "learning_rate": 4.846301372055455e-06, |
| "loss": 0.6726, |
| "step": 2006 |
| }, |
| { |
| "epoch": 0.7047050561797753, |
| "grad_norm": 0.9256817698478699, |
| "learning_rate": 4.846141726446101e-06, |
| "loss": 0.6842, |
| "step": 2007 |
| }, |
| { |
| "epoch": 0.7050561797752809, |
| "grad_norm": 0.89638352394104, |
| "learning_rate": 4.845982000600706e-06, |
| "loss": 0.6792, |
| "step": 2008 |
| }, |
| { |
| "epoch": 0.7054073033707865, |
| "grad_norm": 0.8957139849662781, |
| "learning_rate": 4.845822194524735e-06, |
| "loss": 0.7032, |
| "step": 2009 |
| }, |
| { |
| "epoch": 0.7057584269662921, |
| "grad_norm": 0.9059911370277405, |
| "learning_rate": 4.845662308223652e-06, |
| "loss": 0.7272, |
| "step": 2010 |
| }, |
| { |
| "epoch": 0.7061095505617978, |
| "grad_norm": 0.8813321590423584, |
| "learning_rate": 4.845502341702926e-06, |
| "loss": 0.6918, |
| "step": 2011 |
| }, |
| { |
| "epoch": 0.7064606741573034, |
| "grad_norm": 0.9412849545478821, |
| "learning_rate": 4.845342294968026e-06, |
| "loss": 0.7301, |
| "step": 2012 |
| }, |
| { |
| "epoch": 0.706811797752809, |
| "grad_norm": 0.9104598164558411, |
| "learning_rate": 4.845182168024427e-06, |
| "loss": 0.6655, |
| "step": 2013 |
| }, |
| { |
| "epoch": 0.7071629213483146, |
| "grad_norm": 0.8984184861183167, |
| "learning_rate": 4.845021960877605e-06, |
| "loss": 0.6816, |
| "step": 2014 |
| }, |
| { |
| "epoch": 0.7075140449438202, |
| "grad_norm": 0.9031736850738525, |
| "learning_rate": 4.844861673533038e-06, |
| "loss": 0.6941, |
| "step": 2015 |
| }, |
| { |
| "epoch": 0.7078651685393258, |
| "grad_norm": 0.9205414652824402, |
| "learning_rate": 4.8447013059962085e-06, |
| "loss": 0.6976, |
| "step": 2016 |
| }, |
| { |
| "epoch": 0.7082162921348315, |
| "grad_norm": 0.9202782511711121, |
| "learning_rate": 4.8445408582726e-06, |
| "loss": 0.6859, |
| "step": 2017 |
| }, |
| { |
| "epoch": 0.7085674157303371, |
| "grad_norm": 0.9281151294708252, |
| "learning_rate": 4.844380330367701e-06, |
| "loss": 0.6827, |
| "step": 2018 |
| }, |
| { |
| "epoch": 0.7089185393258427, |
| "grad_norm": 0.8505613207817078, |
| "learning_rate": 4.844219722287001e-06, |
| "loss": 0.6198, |
| "step": 2019 |
| }, |
| { |
| "epoch": 0.7092696629213483, |
| "grad_norm": 0.8852872252464294, |
| "learning_rate": 4.844059034035993e-06, |
| "loss": 0.7026, |
| "step": 2020 |
| }, |
| { |
| "epoch": 0.7096207865168539, |
| "grad_norm": 0.8684709072113037, |
| "learning_rate": 4.843898265620171e-06, |
| "loss": 0.6602, |
| "step": 2021 |
| }, |
| { |
| "epoch": 0.7099719101123596, |
| "grad_norm": 0.9983372688293457, |
| "learning_rate": 4.843737417045035e-06, |
| "loss": 0.6646, |
| "step": 2022 |
| }, |
| { |
| "epoch": 0.7103230337078652, |
| "grad_norm": 0.8986789584159851, |
| "learning_rate": 4.843576488316084e-06, |
| "loss": 0.7246, |
| "step": 2023 |
| }, |
| { |
| "epoch": 0.7106741573033708, |
| "grad_norm": 0.898418664932251, |
| "learning_rate": 4.843415479438822e-06, |
| "loss": 0.7011, |
| "step": 2024 |
| }, |
| { |
| "epoch": 0.7110252808988764, |
| "grad_norm": 0.9410746097564697, |
| "learning_rate": 4.843254390418757e-06, |
| "loss": 0.6753, |
| "step": 2025 |
| }, |
| { |
| "epoch": 0.711376404494382, |
| "grad_norm": 0.9196873307228088, |
| "learning_rate": 4.843093221261396e-06, |
| "loss": 0.7028, |
| "step": 2026 |
| }, |
| { |
| "epoch": 0.7117275280898876, |
| "grad_norm": 0.8739240169525146, |
| "learning_rate": 4.842931971972252e-06, |
| "loss": 0.6477, |
| "step": 2027 |
| }, |
| { |
| "epoch": 0.7120786516853933, |
| "grad_norm": 0.863125205039978, |
| "learning_rate": 4.842770642556841e-06, |
| "loss": 0.6855, |
| "step": 2028 |
| }, |
| { |
| "epoch": 0.7124297752808989, |
| "grad_norm": 0.8746199607849121, |
| "learning_rate": 4.842609233020677e-06, |
| "loss": 0.6368, |
| "step": 2029 |
| }, |
| { |
| "epoch": 0.7127808988764045, |
| "grad_norm": 0.9002125859260559, |
| "learning_rate": 4.842447743369283e-06, |
| "loss": 0.666, |
| "step": 2030 |
| }, |
| { |
| "epoch": 0.7131320224719101, |
| "grad_norm": 0.8976905345916748, |
| "learning_rate": 4.84228617360818e-06, |
| "loss": 0.6807, |
| "step": 2031 |
| }, |
| { |
| "epoch": 0.7134831460674157, |
| "grad_norm": 0.9237125515937805, |
| "learning_rate": 4.8421245237428935e-06, |
| "loss": 0.6749, |
| "step": 2032 |
| }, |
| { |
| "epoch": 0.7138342696629213, |
| "grad_norm": 0.8990357518196106, |
| "learning_rate": 4.841962793778954e-06, |
| "loss": 0.7136, |
| "step": 2033 |
| }, |
| { |
| "epoch": 0.714185393258427, |
| "grad_norm": 0.9369171857833862, |
| "learning_rate": 4.84180098372189e-06, |
| "loss": 0.7157, |
| "step": 2034 |
| }, |
| { |
| "epoch": 0.7145365168539326, |
| "grad_norm": 0.9113399386405945, |
| "learning_rate": 4.841639093577236e-06, |
| "loss": 0.6733, |
| "step": 2035 |
| }, |
| { |
| "epoch": 0.7148876404494382, |
| "grad_norm": 0.9038696885108948, |
| "learning_rate": 4.84147712335053e-06, |
| "loss": 0.7065, |
| "step": 2036 |
| }, |
| { |
| "epoch": 0.7152387640449438, |
| "grad_norm": 0.8818309903144836, |
| "learning_rate": 4.841315073047309e-06, |
| "loss": 0.6633, |
| "step": 2037 |
| }, |
| { |
| "epoch": 0.7155898876404494, |
| "grad_norm": 0.8951486349105835, |
| "learning_rate": 4.841152942673116e-06, |
| "loss": 0.6965, |
| "step": 2038 |
| }, |
| { |
| "epoch": 0.7159410112359551, |
| "grad_norm": 0.8958115577697754, |
| "learning_rate": 4.840990732233496e-06, |
| "loss": 0.6536, |
| "step": 2039 |
| }, |
| { |
| "epoch": 0.7162921348314607, |
| "grad_norm": 0.8991239070892334, |
| "learning_rate": 4.840828441733996e-06, |
| "loss": 0.7002, |
| "step": 2040 |
| }, |
| { |
| "epoch": 0.7166432584269663, |
| "grad_norm": 0.8847957849502563, |
| "learning_rate": 4.840666071180167e-06, |
| "loss": 0.7011, |
| "step": 2041 |
| }, |
| { |
| "epoch": 0.7169943820224719, |
| "grad_norm": 0.8697375655174255, |
| "learning_rate": 4.840503620577561e-06, |
| "loss": 0.6869, |
| "step": 2042 |
| }, |
| { |
| "epoch": 0.7173455056179775, |
| "grad_norm": 0.8789458274841309, |
| "learning_rate": 4.840341089931734e-06, |
| "loss": 0.6624, |
| "step": 2043 |
| }, |
| { |
| "epoch": 0.7176966292134831, |
| "grad_norm": 0.868335485458374, |
| "learning_rate": 4.8401784792482436e-06, |
| "loss": 0.6624, |
| "step": 2044 |
| }, |
| { |
| "epoch": 0.7180477528089888, |
| "grad_norm": 0.8949484825134277, |
| "learning_rate": 4.840015788532653e-06, |
| "loss": 0.6621, |
| "step": 2045 |
| }, |
| { |
| "epoch": 0.7183988764044944, |
| "grad_norm": 0.8744524717330933, |
| "learning_rate": 4.839853017790523e-06, |
| "loss": 0.6055, |
| "step": 2046 |
| }, |
| { |
| "epoch": 0.71875, |
| "grad_norm": 0.8852532505989075, |
| "learning_rate": 4.8396901670274236e-06, |
| "loss": 0.7299, |
| "step": 2047 |
| }, |
| { |
| "epoch": 0.7191011235955056, |
| "grad_norm": 0.8787648677825928, |
| "learning_rate": 4.839527236248922e-06, |
| "loss": 0.6687, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.7194522471910112, |
| "grad_norm": 0.8882533311843872, |
| "learning_rate": 4.839364225460591e-06, |
| "loss": 0.6488, |
| "step": 2049 |
| }, |
| { |
| "epoch": 0.7198033707865169, |
| "grad_norm": 0.8676881194114685, |
| "learning_rate": 4.839201134668006e-06, |
| "loss": 0.7084, |
| "step": 2050 |
| }, |
| { |
| "epoch": 0.7201544943820225, |
| "grad_norm": 0.9025247097015381, |
| "learning_rate": 4.839037963876743e-06, |
| "loss": 0.6805, |
| "step": 2051 |
| }, |
| { |
| "epoch": 0.7205056179775281, |
| "grad_norm": 0.8956056833267212, |
| "learning_rate": 4.838874713092383e-06, |
| "loss": 0.6905, |
| "step": 2052 |
| }, |
| { |
| "epoch": 0.7208567415730337, |
| "grad_norm": 0.8418490290641785, |
| "learning_rate": 4.8387113823205104e-06, |
| "loss": 0.6224, |
| "step": 2053 |
| }, |
| { |
| "epoch": 0.7212078651685393, |
| "grad_norm": 0.897898256778717, |
| "learning_rate": 4.838547971566708e-06, |
| "loss": 0.6505, |
| "step": 2054 |
| }, |
| { |
| "epoch": 0.7215589887640449, |
| "grad_norm": 0.9430592060089111, |
| "learning_rate": 4.838384480836568e-06, |
| "loss": 0.7031, |
| "step": 2055 |
| }, |
| { |
| "epoch": 0.7219101123595506, |
| "grad_norm": 0.9099498987197876, |
| "learning_rate": 4.838220910135678e-06, |
| "loss": 0.7372, |
| "step": 2056 |
| }, |
| { |
| "epoch": 0.7222612359550562, |
| "grad_norm": 0.8936906456947327, |
| "learning_rate": 4.838057259469635e-06, |
| "loss": 0.6782, |
| "step": 2057 |
| }, |
| { |
| "epoch": 0.7226123595505618, |
| "grad_norm": 0.8931390643119812, |
| "learning_rate": 4.8378935288440346e-06, |
| "loss": 0.6552, |
| "step": 2058 |
| }, |
| { |
| "epoch": 0.7229634831460674, |
| "grad_norm": 0.893640398979187, |
| "learning_rate": 4.837729718264475e-06, |
| "loss": 0.6971, |
| "step": 2059 |
| }, |
| { |
| "epoch": 0.723314606741573, |
| "grad_norm": 0.9098536968231201, |
| "learning_rate": 4.83756582773656e-06, |
| "loss": 0.6816, |
| "step": 2060 |
| }, |
| { |
| "epoch": 0.7236657303370787, |
| "grad_norm": 0.8403578400611877, |
| "learning_rate": 4.837401857265894e-06, |
| "loss": 0.6233, |
| "step": 2061 |
| }, |
| { |
| "epoch": 0.7240168539325843, |
| "grad_norm": 0.894605815410614, |
| "learning_rate": 4.8372378068580835e-06, |
| "loss": 0.6747, |
| "step": 2062 |
| }, |
| { |
| "epoch": 0.7243679775280899, |
| "grad_norm": 0.8893696665763855, |
| "learning_rate": 4.837073676518742e-06, |
| "loss": 0.6769, |
| "step": 2063 |
| }, |
| { |
| "epoch": 0.7247191011235955, |
| "grad_norm": 0.9210422039031982, |
| "learning_rate": 4.836909466253479e-06, |
| "loss": 0.7177, |
| "step": 2064 |
| }, |
| { |
| "epoch": 0.7250702247191011, |
| "grad_norm": 0.9131370782852173, |
| "learning_rate": 4.836745176067913e-06, |
| "loss": 0.6899, |
| "step": 2065 |
| }, |
| { |
| "epoch": 0.7254213483146067, |
| "grad_norm": 0.8986336588859558, |
| "learning_rate": 4.836580805967661e-06, |
| "loss": 0.68, |
| "step": 2066 |
| }, |
| { |
| "epoch": 0.7257724719101124, |
| "grad_norm": 0.8927890658378601, |
| "learning_rate": 4.836416355958345e-06, |
| "loss": 0.6971, |
| "step": 2067 |
| }, |
| { |
| "epoch": 0.726123595505618, |
| "grad_norm": 0.9166001677513123, |
| "learning_rate": 4.836251826045589e-06, |
| "loss": 0.7008, |
| "step": 2068 |
| }, |
| { |
| "epoch": 0.7264747191011236, |
| "grad_norm": 0.8931081891059875, |
| "learning_rate": 4.836087216235021e-06, |
| "loss": 0.6816, |
| "step": 2069 |
| }, |
| { |
| "epoch": 0.7268258426966292, |
| "grad_norm": 0.8896619081497192, |
| "learning_rate": 4.835922526532267e-06, |
| "loss": 0.7011, |
| "step": 2070 |
| }, |
| { |
| "epoch": 0.7271769662921348, |
| "grad_norm": 0.8900657296180725, |
| "learning_rate": 4.835757756942963e-06, |
| "loss": 0.6754, |
| "step": 2071 |
| }, |
| { |
| "epoch": 0.7275280898876404, |
| "grad_norm": 0.860923707485199, |
| "learning_rate": 4.8355929074727415e-06, |
| "loss": 0.6407, |
| "step": 2072 |
| }, |
| { |
| "epoch": 0.7278792134831461, |
| "grad_norm": 0.8783923983573914, |
| "learning_rate": 4.835427978127241e-06, |
| "loss": 0.7052, |
| "step": 2073 |
| }, |
| { |
| "epoch": 0.7282303370786517, |
| "grad_norm": 0.9070163369178772, |
| "learning_rate": 4.835262968912103e-06, |
| "loss": 0.7003, |
| "step": 2074 |
| }, |
| { |
| "epoch": 0.7285814606741573, |
| "grad_norm": 1.025547742843628, |
| "learning_rate": 4.83509787983297e-06, |
| "loss": 0.6829, |
| "step": 2075 |
| }, |
| { |
| "epoch": 0.7289325842696629, |
| "grad_norm": 0.9013466835021973, |
| "learning_rate": 4.834932710895486e-06, |
| "loss": 0.6475, |
| "step": 2076 |
| }, |
| { |
| "epoch": 0.7292837078651685, |
| "grad_norm": 0.894381582736969, |
| "learning_rate": 4.834767462105303e-06, |
| "loss": 0.6414, |
| "step": 2077 |
| }, |
| { |
| "epoch": 0.7296348314606742, |
| "grad_norm": 0.8694182634353638, |
| "learning_rate": 4.83460213346807e-06, |
| "loss": 0.6442, |
| "step": 2078 |
| }, |
| { |
| "epoch": 0.7299859550561798, |
| "grad_norm": 0.8485740423202515, |
| "learning_rate": 4.834436724989442e-06, |
| "loss": 0.6105, |
| "step": 2079 |
| }, |
| { |
| "epoch": 0.7303370786516854, |
| "grad_norm": 0.8988609910011292, |
| "learning_rate": 4.834271236675076e-06, |
| "loss": 0.6615, |
| "step": 2080 |
| }, |
| { |
| "epoch": 0.730688202247191, |
| "grad_norm": 0.9217643141746521, |
| "learning_rate": 4.8341056685306295e-06, |
| "loss": 0.7194, |
| "step": 2081 |
| }, |
| { |
| "epoch": 0.7310393258426966, |
| "grad_norm": 0.8651581406593323, |
| "learning_rate": 4.833940020561768e-06, |
| "loss": 0.6831, |
| "step": 2082 |
| }, |
| { |
| "epoch": 0.7313904494382022, |
| "grad_norm": 0.9073237776756287, |
| "learning_rate": 4.833774292774154e-06, |
| "loss": 0.7034, |
| "step": 2083 |
| }, |
| { |
| "epoch": 0.7317415730337079, |
| "grad_norm": 0.9331315159797668, |
| "learning_rate": 4.833608485173458e-06, |
| "loss": 0.7328, |
| "step": 2084 |
| }, |
| { |
| "epoch": 0.7320926966292135, |
| "grad_norm": 0.9014981985092163, |
| "learning_rate": 4.833442597765346e-06, |
| "loss": 0.6776, |
| "step": 2085 |
| }, |
| { |
| "epoch": 0.7324438202247191, |
| "grad_norm": 0.8719879388809204, |
| "learning_rate": 4.833276630555496e-06, |
| "loss": 0.6672, |
| "step": 2086 |
| }, |
| { |
| "epoch": 0.7327949438202247, |
| "grad_norm": 0.8750829100608826, |
| "learning_rate": 4.8331105835495816e-06, |
| "loss": 0.6344, |
| "step": 2087 |
| }, |
| { |
| "epoch": 0.7331460674157303, |
| "grad_norm": 0.8622833490371704, |
| "learning_rate": 4.832944456753281e-06, |
| "loss": 0.6453, |
| "step": 2088 |
| }, |
| { |
| "epoch": 0.733497191011236, |
| "grad_norm": 0.8901452422142029, |
| "learning_rate": 4.8327782501722766e-06, |
| "loss": 0.6809, |
| "step": 2089 |
| }, |
| { |
| "epoch": 0.7338483146067416, |
| "grad_norm": 0.9039089679718018, |
| "learning_rate": 4.8326119638122525e-06, |
| "loss": 0.7185, |
| "step": 2090 |
| }, |
| { |
| "epoch": 0.7341994382022472, |
| "grad_norm": 0.9443939328193665, |
| "learning_rate": 4.832445597678895e-06, |
| "loss": 0.6647, |
| "step": 2091 |
| }, |
| { |
| "epoch": 0.7345505617977528, |
| "grad_norm": 0.9074206352233887, |
| "learning_rate": 4.8322791517778945e-06, |
| "loss": 0.6696, |
| "step": 2092 |
| }, |
| { |
| "epoch": 0.7349016853932584, |
| "grad_norm": 0.9349908232688904, |
| "learning_rate": 4.832112626114942e-06, |
| "loss": 0.6735, |
| "step": 2093 |
| }, |
| { |
| "epoch": 0.735252808988764, |
| "grad_norm": 0.8699564933776855, |
| "learning_rate": 4.831946020695733e-06, |
| "loss": 0.7143, |
| "step": 2094 |
| }, |
| { |
| "epoch": 0.7356039325842697, |
| "grad_norm": 0.8945705890655518, |
| "learning_rate": 4.831779335525967e-06, |
| "loss": 0.6748, |
| "step": 2095 |
| }, |
| { |
| "epoch": 0.7359550561797753, |
| "grad_norm": 0.8274276256561279, |
| "learning_rate": 4.831612570611343e-06, |
| "loss": 0.647, |
| "step": 2096 |
| }, |
| { |
| "epoch": 0.7363061797752809, |
| "grad_norm": 0.8812181949615479, |
| "learning_rate": 4.831445725957563e-06, |
| "loss": 0.6644, |
| "step": 2097 |
| }, |
| { |
| "epoch": 0.7366573033707865, |
| "grad_norm": 0.8417778015136719, |
| "learning_rate": 4.831278801570335e-06, |
| "loss": 0.613, |
| "step": 2098 |
| }, |
| { |
| "epoch": 0.7370084269662921, |
| "grad_norm": 0.862417459487915, |
| "learning_rate": 4.831111797455366e-06, |
| "loss": 0.6592, |
| "step": 2099 |
| }, |
| { |
| "epoch": 0.7373595505617978, |
| "grad_norm": 0.8994525074958801, |
| "learning_rate": 4.830944713618369e-06, |
| "loss": 0.7038, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.7377106741573034, |
| "grad_norm": 0.8670651316642761, |
| "learning_rate": 4.830777550065058e-06, |
| "loss": 0.6344, |
| "step": 2101 |
| }, |
| { |
| "epoch": 0.738061797752809, |
| "grad_norm": 0.9172879457473755, |
| "learning_rate": 4.830610306801148e-06, |
| "loss": 0.657, |
| "step": 2102 |
| }, |
| { |
| "epoch": 0.7384129213483146, |
| "grad_norm": 0.8903508186340332, |
| "learning_rate": 4.830442983832361e-06, |
| "loss": 0.6419, |
| "step": 2103 |
| }, |
| { |
| "epoch": 0.7387640449438202, |
| "grad_norm": 0.913711428642273, |
| "learning_rate": 4.830275581164417e-06, |
| "loss": 0.6551, |
| "step": 2104 |
| }, |
| { |
| "epoch": 0.7391151685393258, |
| "grad_norm": 0.8964325189590454, |
| "learning_rate": 4.830108098803043e-06, |
| "loss": 0.6839, |
| "step": 2105 |
| }, |
| { |
| "epoch": 0.7394662921348315, |
| "grad_norm": 0.8451694846153259, |
| "learning_rate": 4.829940536753965e-06, |
| "loss": 0.6846, |
| "step": 2106 |
| }, |
| { |
| "epoch": 0.7398174157303371, |
| "grad_norm": 0.8537585735321045, |
| "learning_rate": 4.829772895022916e-06, |
| "loss": 0.6632, |
| "step": 2107 |
| }, |
| { |
| "epoch": 0.7401685393258427, |
| "grad_norm": 0.882422924041748, |
| "learning_rate": 4.829605173615627e-06, |
| "loss": 0.6827, |
| "step": 2108 |
| }, |
| { |
| "epoch": 0.7405196629213483, |
| "grad_norm": 0.9608381986618042, |
| "learning_rate": 4.829437372537834e-06, |
| "loss": 0.6698, |
| "step": 2109 |
| }, |
| { |
| "epoch": 0.7408707865168539, |
| "grad_norm": 0.8602509498596191, |
| "learning_rate": 4.829269491795277e-06, |
| "loss": 0.6842, |
| "step": 2110 |
| }, |
| { |
| "epoch": 0.7412219101123596, |
| "grad_norm": 0.9101216197013855, |
| "learning_rate": 4.829101531393695e-06, |
| "loss": 0.6909, |
| "step": 2111 |
| }, |
| { |
| "epoch": 0.7415730337078652, |
| "grad_norm": 0.9232078194618225, |
| "learning_rate": 4.8289334913388344e-06, |
| "loss": 0.6544, |
| "step": 2112 |
| }, |
| { |
| "epoch": 0.7419241573033708, |
| "grad_norm": 0.9027913212776184, |
| "learning_rate": 4.828765371636442e-06, |
| "loss": 0.7055, |
| "step": 2113 |
| }, |
| { |
| "epoch": 0.7422752808988764, |
| "grad_norm": 0.8847595453262329, |
| "learning_rate": 4.828597172292266e-06, |
| "loss": 0.6585, |
| "step": 2114 |
| }, |
| { |
| "epoch": 0.742626404494382, |
| "grad_norm": 0.9057716131210327, |
| "learning_rate": 4.8284288933120595e-06, |
| "loss": 0.6815, |
| "step": 2115 |
| }, |
| { |
| "epoch": 0.7429775280898876, |
| "grad_norm": 0.833390474319458, |
| "learning_rate": 4.828260534701577e-06, |
| "loss": 0.6113, |
| "step": 2116 |
| }, |
| { |
| "epoch": 0.7433286516853933, |
| "grad_norm": 0.9086408615112305, |
| "learning_rate": 4.828092096466578e-06, |
| "loss": 0.6537, |
| "step": 2117 |
| }, |
| { |
| "epoch": 0.7436797752808989, |
| "grad_norm": 0.9064264297485352, |
| "learning_rate": 4.82792357861282e-06, |
| "loss": 0.71, |
| "step": 2118 |
| }, |
| { |
| "epoch": 0.7440308988764045, |
| "grad_norm": 0.9102289080619812, |
| "learning_rate": 4.827754981146068e-06, |
| "loss": 0.6711, |
| "step": 2119 |
| }, |
| { |
| "epoch": 0.7443820224719101, |
| "grad_norm": 0.9276719093322754, |
| "learning_rate": 4.827586304072087e-06, |
| "loss": 0.6667, |
| "step": 2120 |
| }, |
| { |
| "epoch": 0.7447331460674157, |
| "grad_norm": 0.9276329278945923, |
| "learning_rate": 4.827417547396647e-06, |
| "loss": 0.7072, |
| "step": 2121 |
| }, |
| { |
| "epoch": 0.7450842696629213, |
| "grad_norm": 0.8715528249740601, |
| "learning_rate": 4.827248711125518e-06, |
| "loss": 0.6921, |
| "step": 2122 |
| }, |
| { |
| "epoch": 0.745435393258427, |
| "grad_norm": 0.8835285902023315, |
| "learning_rate": 4.827079795264475e-06, |
| "loss": 0.6557, |
| "step": 2123 |
| }, |
| { |
| "epoch": 0.7457865168539326, |
| "grad_norm": 0.9126173853874207, |
| "learning_rate": 4.826910799819293e-06, |
| "loss": 0.6965, |
| "step": 2124 |
| }, |
| { |
| "epoch": 0.7461376404494382, |
| "grad_norm": 0.8943221569061279, |
| "learning_rate": 4.8267417247957545e-06, |
| "loss": 0.6532, |
| "step": 2125 |
| }, |
| { |
| "epoch": 0.7464887640449438, |
| "grad_norm": 0.8731197714805603, |
| "learning_rate": 4.826572570199639e-06, |
| "loss": 0.6462, |
| "step": 2126 |
| }, |
| { |
| "epoch": 0.7468398876404494, |
| "grad_norm": 0.8800104856491089, |
| "learning_rate": 4.826403336036733e-06, |
| "loss": 0.6862, |
| "step": 2127 |
| }, |
| { |
| "epoch": 0.7471910112359551, |
| "grad_norm": 0.8647268414497375, |
| "learning_rate": 4.826234022312823e-06, |
| "loss": 0.6901, |
| "step": 2128 |
| }, |
| { |
| "epoch": 0.7475421348314607, |
| "grad_norm": 0.8699238896369934, |
| "learning_rate": 4.826064629033701e-06, |
| "loss": 0.6848, |
| "step": 2129 |
| }, |
| { |
| "epoch": 0.7478932584269663, |
| "grad_norm": 0.881797730922699, |
| "learning_rate": 4.825895156205158e-06, |
| "loss": 0.7022, |
| "step": 2130 |
| }, |
| { |
| "epoch": 0.7482443820224719, |
| "grad_norm": 0.8925601840019226, |
| "learning_rate": 4.825725603832992e-06, |
| "loss": 0.6668, |
| "step": 2131 |
| }, |
| { |
| "epoch": 0.7485955056179775, |
| "grad_norm": 0.874530553817749, |
| "learning_rate": 4.825555971923e-06, |
| "loss": 0.6914, |
| "step": 2132 |
| }, |
| { |
| "epoch": 0.7489466292134831, |
| "grad_norm": 0.8640453815460205, |
| "learning_rate": 4.8253862604809836e-06, |
| "loss": 0.6301, |
| "step": 2133 |
| }, |
| { |
| "epoch": 0.7492977528089888, |
| "grad_norm": 0.9012243151664734, |
| "learning_rate": 4.8252164695127475e-06, |
| "loss": 0.6876, |
| "step": 2134 |
| }, |
| { |
| "epoch": 0.7496488764044944, |
| "grad_norm": 0.8837961554527283, |
| "learning_rate": 4.825046599024098e-06, |
| "loss": 0.6898, |
| "step": 2135 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.8629834651947021, |
| "learning_rate": 4.8248766490208434e-06, |
| "loss": 0.6709, |
| "step": 2136 |
| }, |
| { |
| "epoch": 0.7503511235955056, |
| "grad_norm": 0.8934418559074402, |
| "learning_rate": 4.824706619508797e-06, |
| "loss": 0.6406, |
| "step": 2137 |
| }, |
| { |
| "epoch": 0.7507022471910112, |
| "grad_norm": 0.8818619251251221, |
| "learning_rate": 4.8245365104937734e-06, |
| "loss": 0.6751, |
| "step": 2138 |
| }, |
| { |
| "epoch": 0.7510533707865169, |
| "grad_norm": 0.871660590171814, |
| "learning_rate": 4.824366321981591e-06, |
| "loss": 0.6628, |
| "step": 2139 |
| }, |
| { |
| "epoch": 0.7514044943820225, |
| "grad_norm": 0.8862018585205078, |
| "learning_rate": 4.824196053978069e-06, |
| "loss": 0.655, |
| "step": 2140 |
| }, |
| { |
| "epoch": 0.7517556179775281, |
| "grad_norm": 0.8964344263076782, |
| "learning_rate": 4.824025706489031e-06, |
| "loss": 0.6784, |
| "step": 2141 |
| }, |
| { |
| "epoch": 0.7521067415730337, |
| "grad_norm": 0.8510351181030273, |
| "learning_rate": 4.8238552795203025e-06, |
| "loss": 0.6521, |
| "step": 2142 |
| }, |
| { |
| "epoch": 0.7524578651685393, |
| "grad_norm": 0.8770909905433655, |
| "learning_rate": 4.823684773077712e-06, |
| "loss": 0.6828, |
| "step": 2143 |
| }, |
| { |
| "epoch": 0.7528089887640449, |
| "grad_norm": 0.9084598422050476, |
| "learning_rate": 4.823514187167091e-06, |
| "loss": 0.7081, |
| "step": 2144 |
| }, |
| { |
| "epoch": 0.7531601123595506, |
| "grad_norm": 0.8897272944450378, |
| "learning_rate": 4.823343521794273e-06, |
| "loss": 0.6812, |
| "step": 2145 |
| }, |
| { |
| "epoch": 0.7535112359550562, |
| "grad_norm": 0.8769894242286682, |
| "learning_rate": 4.8231727769650934e-06, |
| "loss": 0.69, |
| "step": 2146 |
| }, |
| { |
| "epoch": 0.7538623595505618, |
| "grad_norm": 0.8911151885986328, |
| "learning_rate": 4.823001952685394e-06, |
| "loss": 0.6573, |
| "step": 2147 |
| }, |
| { |
| "epoch": 0.7542134831460674, |
| "grad_norm": 0.8679448366165161, |
| "learning_rate": 4.822831048961015e-06, |
| "loss": 0.7047, |
| "step": 2148 |
| }, |
| { |
| "epoch": 0.754564606741573, |
| "grad_norm": 0.8955689072608948, |
| "learning_rate": 4.8226600657978024e-06, |
| "loss": 0.6775, |
| "step": 2149 |
| }, |
| { |
| "epoch": 0.7549157303370787, |
| "grad_norm": 0.8726634383201599, |
| "learning_rate": 4.822489003201602e-06, |
| "loss": 0.6744, |
| "step": 2150 |
| }, |
| { |
| "epoch": 0.7552668539325843, |
| "grad_norm": 0.8858023881912231, |
| "learning_rate": 4.822317861178266e-06, |
| "loss": 0.6975, |
| "step": 2151 |
| }, |
| { |
| "epoch": 0.7556179775280899, |
| "grad_norm": 0.8668063879013062, |
| "learning_rate": 4.822146639733647e-06, |
| "loss": 0.6866, |
| "step": 2152 |
| }, |
| { |
| "epoch": 0.7559691011235955, |
| "grad_norm": 0.8951655626296997, |
| "learning_rate": 4.821975338873599e-06, |
| "loss": 0.7165, |
| "step": 2153 |
| }, |
| { |
| "epoch": 0.7563202247191011, |
| "grad_norm": 0.9189617037773132, |
| "learning_rate": 4.821803958603982e-06, |
| "loss": 0.6899, |
| "step": 2154 |
| }, |
| { |
| "epoch": 0.7566713483146067, |
| "grad_norm": 0.8782833814620972, |
| "learning_rate": 4.821632498930656e-06, |
| "loss": 0.6613, |
| "step": 2155 |
| }, |
| { |
| "epoch": 0.7570224719101124, |
| "grad_norm": 0.882354736328125, |
| "learning_rate": 4.821460959859485e-06, |
| "loss": 0.7047, |
| "step": 2156 |
| }, |
| { |
| "epoch": 0.757373595505618, |
| "grad_norm": 0.8961043357849121, |
| "learning_rate": 4.821289341396336e-06, |
| "loss": 0.6699, |
| "step": 2157 |
| }, |
| { |
| "epoch": 0.7577247191011236, |
| "grad_norm": 0.9026042819023132, |
| "learning_rate": 4.8211176435470774e-06, |
| "loss": 0.6874, |
| "step": 2158 |
| }, |
| { |
| "epoch": 0.7580758426966292, |
| "grad_norm": 0.89781653881073, |
| "learning_rate": 4.820945866317582e-06, |
| "loss": 0.6764, |
| "step": 2159 |
| }, |
| { |
| "epoch": 0.7584269662921348, |
| "grad_norm": 0.9029053449630737, |
| "learning_rate": 4.820774009713725e-06, |
| "loss": 0.671, |
| "step": 2160 |
| }, |
| { |
| "epoch": 0.7587780898876404, |
| "grad_norm": 0.8790138959884644, |
| "learning_rate": 4.820602073741381e-06, |
| "loss": 0.6855, |
| "step": 2161 |
| }, |
| { |
| "epoch": 0.7591292134831461, |
| "grad_norm": 0.8737415671348572, |
| "learning_rate": 4.820430058406433e-06, |
| "loss": 0.6746, |
| "step": 2162 |
| }, |
| { |
| "epoch": 0.7594803370786517, |
| "grad_norm": 0.8937375545501709, |
| "learning_rate": 4.8202579637147615e-06, |
| "loss": 0.6742, |
| "step": 2163 |
| }, |
| { |
| "epoch": 0.7598314606741573, |
| "grad_norm": 0.8693381547927856, |
| "learning_rate": 4.820085789672254e-06, |
| "loss": 0.643, |
| "step": 2164 |
| }, |
| { |
| "epoch": 0.7601825842696629, |
| "grad_norm": 0.8404312133789062, |
| "learning_rate": 4.819913536284798e-06, |
| "loss": 0.6247, |
| "step": 2165 |
| }, |
| { |
| "epoch": 0.7605337078651685, |
| "grad_norm": 0.9133322238922119, |
| "learning_rate": 4.819741203558284e-06, |
| "loss": 0.6999, |
| "step": 2166 |
| }, |
| { |
| "epoch": 0.7608848314606742, |
| "grad_norm": 0.878828763961792, |
| "learning_rate": 4.8195687914986055e-06, |
| "loss": 0.642, |
| "step": 2167 |
| }, |
| { |
| "epoch": 0.7612359550561798, |
| "grad_norm": 0.8653201460838318, |
| "learning_rate": 4.81939630011166e-06, |
| "loss": 0.6668, |
| "step": 2168 |
| }, |
| { |
| "epoch": 0.7615870786516854, |
| "grad_norm": 0.8635656833648682, |
| "learning_rate": 4.819223729403345e-06, |
| "loss": 0.6749, |
| "step": 2169 |
| }, |
| { |
| "epoch": 0.761938202247191, |
| "grad_norm": 0.9288333654403687, |
| "learning_rate": 4.819051079379563e-06, |
| "loss": 0.6687, |
| "step": 2170 |
| }, |
| { |
| "epoch": 0.7622893258426966, |
| "grad_norm": 0.8941252827644348, |
| "learning_rate": 4.818878350046219e-06, |
| "loss": 0.6795, |
| "step": 2171 |
| }, |
| { |
| "epoch": 0.7626404494382022, |
| "grad_norm": 0.8719800114631653, |
| "learning_rate": 4.81870554140922e-06, |
| "loss": 0.6273, |
| "step": 2172 |
| }, |
| { |
| "epoch": 0.7629915730337079, |
| "grad_norm": 0.9125568866729736, |
| "learning_rate": 4.818532653474475e-06, |
| "loss": 0.6992, |
| "step": 2173 |
| }, |
| { |
| "epoch": 0.7633426966292135, |
| "grad_norm": 0.9224469661712646, |
| "learning_rate": 4.818359686247898e-06, |
| "loss": 0.6984, |
| "step": 2174 |
| }, |
| { |
| "epoch": 0.7636938202247191, |
| "grad_norm": 0.9007505178451538, |
| "learning_rate": 4.818186639735403e-06, |
| "loss": 0.6994, |
| "step": 2175 |
| }, |
| { |
| "epoch": 0.7640449438202247, |
| "grad_norm": 0.9869738817214966, |
| "learning_rate": 4.818013513942908e-06, |
| "loss": 0.7248, |
| "step": 2176 |
| }, |
| { |
| "epoch": 0.7643960674157303, |
| "grad_norm": 0.9010109305381775, |
| "learning_rate": 4.817840308876335e-06, |
| "loss": 0.6987, |
| "step": 2177 |
| }, |
| { |
| "epoch": 0.764747191011236, |
| "grad_norm": 0.8850472569465637, |
| "learning_rate": 4.8176670245416076e-06, |
| "loss": 0.6561, |
| "step": 2178 |
| }, |
| { |
| "epoch": 0.7650983146067416, |
| "grad_norm": 0.9040166139602661, |
| "learning_rate": 4.81749366094465e-06, |
| "loss": 0.6789, |
| "step": 2179 |
| }, |
| { |
| "epoch": 0.7654494382022472, |
| "grad_norm": 0.879569411277771, |
| "learning_rate": 4.817320218091393e-06, |
| "loss": 0.6728, |
| "step": 2180 |
| }, |
| { |
| "epoch": 0.7658005617977528, |
| "grad_norm": 0.8991327285766602, |
| "learning_rate": 4.817146695987768e-06, |
| "loss": 0.6919, |
| "step": 2181 |
| }, |
| { |
| "epoch": 0.7661516853932584, |
| "grad_norm": 0.8922879695892334, |
| "learning_rate": 4.816973094639708e-06, |
| "loss": 0.6696, |
| "step": 2182 |
| }, |
| { |
| "epoch": 0.766502808988764, |
| "grad_norm": 0.9196280837059021, |
| "learning_rate": 4.8167994140531504e-06, |
| "loss": 0.6928, |
| "step": 2183 |
| }, |
| { |
| "epoch": 0.7668539325842697, |
| "grad_norm": 0.8855655193328857, |
| "learning_rate": 4.816625654234036e-06, |
| "loss": 0.67, |
| "step": 2184 |
| }, |
| { |
| "epoch": 0.7672050561797753, |
| "grad_norm": 0.9374614953994751, |
| "learning_rate": 4.816451815188306e-06, |
| "loss": 0.6514, |
| "step": 2185 |
| }, |
| { |
| "epoch": 0.7675561797752809, |
| "grad_norm": 0.8807061910629272, |
| "learning_rate": 4.816277896921907e-06, |
| "loss": 0.666, |
| "step": 2186 |
| }, |
| { |
| "epoch": 0.7679073033707865, |
| "grad_norm": 0.8804175853729248, |
| "learning_rate": 4.816103899440785e-06, |
| "loss": 0.6687, |
| "step": 2187 |
| }, |
| { |
| "epoch": 0.7682584269662921, |
| "grad_norm": 0.8722162842750549, |
| "learning_rate": 4.815929822750893e-06, |
| "loss": 0.6924, |
| "step": 2188 |
| }, |
| { |
| "epoch": 0.7686095505617978, |
| "grad_norm": 0.8696702718734741, |
| "learning_rate": 4.815755666858182e-06, |
| "loss": 0.6883, |
| "step": 2189 |
| }, |
| { |
| "epoch": 0.7689606741573034, |
| "grad_norm": 0.8709911704063416, |
| "learning_rate": 4.815581431768609e-06, |
| "loss": 0.6533, |
| "step": 2190 |
| }, |
| { |
| "epoch": 0.769311797752809, |
| "grad_norm": 0.9215819835662842, |
| "learning_rate": 4.815407117488132e-06, |
| "loss": 0.6519, |
| "step": 2191 |
| }, |
| { |
| "epoch": 0.7696629213483146, |
| "grad_norm": 0.9619512557983398, |
| "learning_rate": 4.815232724022713e-06, |
| "loss": 0.6523, |
| "step": 2192 |
| }, |
| { |
| "epoch": 0.7700140449438202, |
| "grad_norm": 0.8896926045417786, |
| "learning_rate": 4.815058251378316e-06, |
| "loss": 0.6772, |
| "step": 2193 |
| }, |
| { |
| "epoch": 0.7703651685393258, |
| "grad_norm": 0.9150664806365967, |
| "learning_rate": 4.814883699560908e-06, |
| "loss": 0.7005, |
| "step": 2194 |
| }, |
| { |
| "epoch": 0.7707162921348315, |
| "grad_norm": 0.8808259963989258, |
| "learning_rate": 4.8147090685764585e-06, |
| "loss": 0.683, |
| "step": 2195 |
| }, |
| { |
| "epoch": 0.7710674157303371, |
| "grad_norm": 0.8727423548698425, |
| "learning_rate": 4.8145343584309404e-06, |
| "loss": 0.6499, |
| "step": 2196 |
| }, |
| { |
| "epoch": 0.7714185393258427, |
| "grad_norm": 0.8704618811607361, |
| "learning_rate": 4.814359569130326e-06, |
| "loss": 0.6678, |
| "step": 2197 |
| }, |
| { |
| "epoch": 0.7717696629213483, |
| "grad_norm": 0.8853801488876343, |
| "learning_rate": 4.814184700680596e-06, |
| "loss": 0.6788, |
| "step": 2198 |
| }, |
| { |
| "epoch": 0.7721207865168539, |
| "grad_norm": 0.8947190046310425, |
| "learning_rate": 4.81400975308773e-06, |
| "loss": 0.6919, |
| "step": 2199 |
| }, |
| { |
| "epoch": 0.7724719101123596, |
| "grad_norm": 0.8671098351478577, |
| "learning_rate": 4.813834726357709e-06, |
| "loss": 0.6344, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.7728230337078652, |
| "grad_norm": 0.8669817447662354, |
| "learning_rate": 4.813659620496521e-06, |
| "loss": 0.6655, |
| "step": 2201 |
| }, |
| { |
| "epoch": 0.7731741573033708, |
| "grad_norm": 0.8825884461402893, |
| "learning_rate": 4.813484435510154e-06, |
| "loss": 0.687, |
| "step": 2202 |
| }, |
| { |
| "epoch": 0.7735252808988764, |
| "grad_norm": 0.9186983704566956, |
| "learning_rate": 4.8133091714045985e-06, |
| "loss": 0.6807, |
| "step": 2203 |
| }, |
| { |
| "epoch": 0.773876404494382, |
| "grad_norm": 0.8848013877868652, |
| "learning_rate": 4.81313382818585e-06, |
| "loss": 0.6974, |
| "step": 2204 |
| }, |
| { |
| "epoch": 0.7742275280898876, |
| "grad_norm": 0.8753053545951843, |
| "learning_rate": 4.812958405859903e-06, |
| "loss": 0.7025, |
| "step": 2205 |
| }, |
| { |
| "epoch": 0.7745786516853933, |
| "grad_norm": 0.8476365208625793, |
| "learning_rate": 4.812782904432758e-06, |
| "loss": 0.6466, |
| "step": 2206 |
| }, |
| { |
| "epoch": 0.7749297752808989, |
| "grad_norm": 0.8795397281646729, |
| "learning_rate": 4.812607323910416e-06, |
| "loss": 0.6669, |
| "step": 2207 |
| }, |
| { |
| "epoch": 0.7752808988764045, |
| "grad_norm": 0.8774227499961853, |
| "learning_rate": 4.812431664298884e-06, |
| "loss": 0.6488, |
| "step": 2208 |
| }, |
| { |
| "epoch": 0.7756320224719101, |
| "grad_norm": 0.888466477394104, |
| "learning_rate": 4.812255925604167e-06, |
| "loss": 0.6367, |
| "step": 2209 |
| }, |
| { |
| "epoch": 0.7759831460674157, |
| "grad_norm": 0.8345751166343689, |
| "learning_rate": 4.812080107832275e-06, |
| "loss": 0.6204, |
| "step": 2210 |
| }, |
| { |
| "epoch": 0.7763342696629213, |
| "grad_norm": 0.8765496611595154, |
| "learning_rate": 4.811904210989223e-06, |
| "loss": 0.697, |
| "step": 2211 |
| }, |
| { |
| "epoch": 0.776685393258427, |
| "grad_norm": 0.893101692199707, |
| "learning_rate": 4.8117282350810255e-06, |
| "loss": 0.6673, |
| "step": 2212 |
| }, |
| { |
| "epoch": 0.7770365168539326, |
| "grad_norm": 0.909160852432251, |
| "learning_rate": 4.8115521801137e-06, |
| "loss": 0.6973, |
| "step": 2213 |
| }, |
| { |
| "epoch": 0.7773876404494382, |
| "grad_norm": 0.8834896087646484, |
| "learning_rate": 4.811376046093268e-06, |
| "loss": 0.6878, |
| "step": 2214 |
| }, |
| { |
| "epoch": 0.7777387640449438, |
| "grad_norm": 0.8608538508415222, |
| "learning_rate": 4.8111998330257535e-06, |
| "loss": 0.6688, |
| "step": 2215 |
| }, |
| { |
| "epoch": 0.7780898876404494, |
| "grad_norm": 0.9701086282730103, |
| "learning_rate": 4.811023540917181e-06, |
| "loss": 0.6721, |
| "step": 2216 |
| }, |
| { |
| "epoch": 0.7784410112359551, |
| "grad_norm": 0.8717615008354187, |
| "learning_rate": 4.8108471697735826e-06, |
| "loss": 0.6834, |
| "step": 2217 |
| }, |
| { |
| "epoch": 0.7787921348314607, |
| "grad_norm": 0.9188772439956665, |
| "learning_rate": 4.810670719600987e-06, |
| "loss": 0.7135, |
| "step": 2218 |
| }, |
| { |
| "epoch": 0.7791432584269663, |
| "grad_norm": 0.8877284526824951, |
| "learning_rate": 4.810494190405432e-06, |
| "loss": 0.7184, |
| "step": 2219 |
| }, |
| { |
| "epoch": 0.7794943820224719, |
| "grad_norm": 0.8861910104751587, |
| "learning_rate": 4.8103175821929515e-06, |
| "loss": 0.6545, |
| "step": 2220 |
| }, |
| { |
| "epoch": 0.7798455056179775, |
| "grad_norm": 0.9048117995262146, |
| "learning_rate": 4.810140894969587e-06, |
| "loss": 0.6995, |
| "step": 2221 |
| }, |
| { |
| "epoch": 0.7801966292134831, |
| "grad_norm": 0.8994266390800476, |
| "learning_rate": 4.809964128741381e-06, |
| "loss": 0.6837, |
| "step": 2222 |
| }, |
| { |
| "epoch": 0.7805477528089888, |
| "grad_norm": 0.9110033512115479, |
| "learning_rate": 4.8097872835143775e-06, |
| "loss": 0.6575, |
| "step": 2223 |
| }, |
| { |
| "epoch": 0.7808988764044944, |
| "grad_norm": 0.8434987664222717, |
| "learning_rate": 4.8096103592946265e-06, |
| "loss": 0.6535, |
| "step": 2224 |
| }, |
| { |
| "epoch": 0.78125, |
| "grad_norm": 0.91326504945755, |
| "learning_rate": 4.8094333560881765e-06, |
| "loss": 0.6714, |
| "step": 2225 |
| }, |
| { |
| "epoch": 0.7816011235955056, |
| "grad_norm": 0.8931390643119812, |
| "learning_rate": 4.809256273901083e-06, |
| "loss": 0.667, |
| "step": 2226 |
| }, |
| { |
| "epoch": 0.7819522471910112, |
| "grad_norm": 0.8923385143280029, |
| "learning_rate": 4.8090791127393995e-06, |
| "loss": 0.6792, |
| "step": 2227 |
| }, |
| { |
| "epoch": 0.7823033707865169, |
| "grad_norm": 0.8582352995872498, |
| "learning_rate": 4.808901872609188e-06, |
| "loss": 0.674, |
| "step": 2228 |
| }, |
| { |
| "epoch": 0.7826544943820225, |
| "grad_norm": 1.004380702972412, |
| "learning_rate": 4.808724553516507e-06, |
| "loss": 0.6736, |
| "step": 2229 |
| }, |
| { |
| "epoch": 0.7830056179775281, |
| "grad_norm": 0.9153397083282471, |
| "learning_rate": 4.808547155467422e-06, |
| "loss": 0.6842, |
| "step": 2230 |
| }, |
| { |
| "epoch": 0.7833567415730337, |
| "grad_norm": 0.8752025961875916, |
| "learning_rate": 4.808369678468001e-06, |
| "loss": 0.702, |
| "step": 2231 |
| }, |
| { |
| "epoch": 0.7837078651685393, |
| "grad_norm": 0.9121509790420532, |
| "learning_rate": 4.808192122524311e-06, |
| "loss": 0.6794, |
| "step": 2232 |
| }, |
| { |
| "epoch": 0.7840589887640449, |
| "grad_norm": 0.893732488155365, |
| "learning_rate": 4.808014487642426e-06, |
| "loss": 0.6783, |
| "step": 2233 |
| }, |
| { |
| "epoch": 0.7844101123595506, |
| "grad_norm": 0.8836367726325989, |
| "learning_rate": 4.8078367738284205e-06, |
| "loss": 0.7095, |
| "step": 2234 |
| }, |
| { |
| "epoch": 0.7847612359550562, |
| "grad_norm": 0.8630282878875732, |
| "learning_rate": 4.8076589810883725e-06, |
| "loss": 0.6262, |
| "step": 2235 |
| }, |
| { |
| "epoch": 0.7851123595505618, |
| "grad_norm": 0.8813517093658447, |
| "learning_rate": 4.807481109428362e-06, |
| "loss": 0.6728, |
| "step": 2236 |
| }, |
| { |
| "epoch": 0.7854634831460674, |
| "grad_norm": 0.8723793625831604, |
| "learning_rate": 4.80730315885447e-06, |
| "loss": 0.6554, |
| "step": 2237 |
| }, |
| { |
| "epoch": 0.785814606741573, |
| "grad_norm": 0.8989778757095337, |
| "learning_rate": 4.807125129372787e-06, |
| "loss": 0.6587, |
| "step": 2238 |
| }, |
| { |
| "epoch": 0.7861657303370787, |
| "grad_norm": 0.8957408666610718, |
| "learning_rate": 4.8069470209893976e-06, |
| "loss": 0.6604, |
| "step": 2239 |
| }, |
| { |
| "epoch": 0.7865168539325843, |
| "grad_norm": 0.9145181775093079, |
| "learning_rate": 4.806768833710395e-06, |
| "loss": 0.6794, |
| "step": 2240 |
| }, |
| { |
| "epoch": 0.7868679775280899, |
| "grad_norm": 0.8940600752830505, |
| "learning_rate": 4.806590567541871e-06, |
| "loss": 0.686, |
| "step": 2241 |
| }, |
| { |
| "epoch": 0.7872191011235955, |
| "grad_norm": 0.8759947419166565, |
| "learning_rate": 4.806412222489925e-06, |
| "loss": 0.7002, |
| "step": 2242 |
| }, |
| { |
| "epoch": 0.7875702247191011, |
| "grad_norm": 0.8928653001785278, |
| "learning_rate": 4.8062337985606535e-06, |
| "loss": 0.6343, |
| "step": 2243 |
| }, |
| { |
| "epoch": 0.7879213483146067, |
| "grad_norm": 0.9009339809417725, |
| "learning_rate": 4.806055295760159e-06, |
| "loss": 0.7096, |
| "step": 2244 |
| }, |
| { |
| "epoch": 0.7882724719101124, |
| "grad_norm": 0.8576456308364868, |
| "learning_rate": 4.8058767140945475e-06, |
| "loss": 0.6449, |
| "step": 2245 |
| }, |
| { |
| "epoch": 0.788623595505618, |
| "grad_norm": 0.8837461471557617, |
| "learning_rate": 4.805698053569926e-06, |
| "loss": 0.6565, |
| "step": 2246 |
| }, |
| { |
| "epoch": 0.7889747191011236, |
| "grad_norm": 1.0404770374298096, |
| "learning_rate": 4.805519314192403e-06, |
| "loss": 0.6726, |
| "step": 2247 |
| }, |
| { |
| "epoch": 0.7893258426966292, |
| "grad_norm": 0.9084009528160095, |
| "learning_rate": 4.805340495968093e-06, |
| "loss": 0.6518, |
| "step": 2248 |
| }, |
| { |
| "epoch": 0.7896769662921348, |
| "grad_norm": 0.9170441031455994, |
| "learning_rate": 4.805161598903111e-06, |
| "loss": 0.6733, |
| "step": 2249 |
| }, |
| { |
| "epoch": 0.7900280898876404, |
| "grad_norm": 0.9394940733909607, |
| "learning_rate": 4.804982623003574e-06, |
| "loss": 0.6898, |
| "step": 2250 |
| }, |
| { |
| "epoch": 0.7903792134831461, |
| "grad_norm": 0.9983392953872681, |
| "learning_rate": 4.8048035682756046e-06, |
| "loss": 0.6739, |
| "step": 2251 |
| }, |
| { |
| "epoch": 0.7907303370786517, |
| "grad_norm": 0.9523327946662903, |
| "learning_rate": 4.8046244347253245e-06, |
| "loss": 0.6999, |
| "step": 2252 |
| }, |
| { |
| "epoch": 0.7910814606741573, |
| "grad_norm": 0.8619253635406494, |
| "learning_rate": 4.804445222358862e-06, |
| "loss": 0.6699, |
| "step": 2253 |
| }, |
| { |
| "epoch": 0.7914325842696629, |
| "grad_norm": 0.9346986413002014, |
| "learning_rate": 4.804265931182343e-06, |
| "loss": 0.6971, |
| "step": 2254 |
| }, |
| { |
| "epoch": 0.7917837078651685, |
| "grad_norm": 0.912071943283081, |
| "learning_rate": 4.804086561201903e-06, |
| "loss": 0.6913, |
| "step": 2255 |
| }, |
| { |
| "epoch": 0.7921348314606742, |
| "grad_norm": 0.9198614358901978, |
| "learning_rate": 4.8039071124236734e-06, |
| "loss": 0.6787, |
| "step": 2256 |
| }, |
| { |
| "epoch": 0.7924859550561798, |
| "grad_norm": 0.8504485487937927, |
| "learning_rate": 4.803727584853792e-06, |
| "loss": 0.6574, |
| "step": 2257 |
| }, |
| { |
| "epoch": 0.7928370786516854, |
| "grad_norm": 0.860133707523346, |
| "learning_rate": 4.8035479784983984e-06, |
| "loss": 0.7023, |
| "step": 2258 |
| }, |
| { |
| "epoch": 0.793188202247191, |
| "grad_norm": 0.8657998442649841, |
| "learning_rate": 4.803368293363635e-06, |
| "loss": 0.667, |
| "step": 2259 |
| }, |
| { |
| "epoch": 0.7935393258426966, |
| "grad_norm": 0.8903993368148804, |
| "learning_rate": 4.803188529455647e-06, |
| "loss": 0.6504, |
| "step": 2260 |
| }, |
| { |
| "epoch": 0.7938904494382022, |
| "grad_norm": 0.9583306908607483, |
| "learning_rate": 4.803008686780582e-06, |
| "loss": 0.6624, |
| "step": 2261 |
| }, |
| { |
| "epoch": 0.7942415730337079, |
| "grad_norm": 0.9079137444496155, |
| "learning_rate": 4.802828765344592e-06, |
| "loss": 0.6845, |
| "step": 2262 |
| }, |
| { |
| "epoch": 0.7945926966292135, |
| "grad_norm": 0.8480777740478516, |
| "learning_rate": 4.802648765153828e-06, |
| "loss": 0.6416, |
| "step": 2263 |
| }, |
| { |
| "epoch": 0.7949438202247191, |
| "grad_norm": 0.9052068591117859, |
| "learning_rate": 4.802468686214446e-06, |
| "loss": 0.676, |
| "step": 2264 |
| }, |
| { |
| "epoch": 0.7952949438202247, |
| "grad_norm": 0.8713710904121399, |
| "learning_rate": 4.802288528532606e-06, |
| "loss": 0.6823, |
| "step": 2265 |
| }, |
| { |
| "epoch": 0.7956460674157303, |
| "grad_norm": 0.8888510465621948, |
| "learning_rate": 4.802108292114468e-06, |
| "loss": 0.6932, |
| "step": 2266 |
| }, |
| { |
| "epoch": 0.795997191011236, |
| "grad_norm": 0.9380448460578918, |
| "learning_rate": 4.8019279769661965e-06, |
| "loss": 0.7137, |
| "step": 2267 |
| }, |
| { |
| "epoch": 0.7963483146067416, |
| "grad_norm": 0.8584917783737183, |
| "learning_rate": 4.801747583093958e-06, |
| "loss": 0.66, |
| "step": 2268 |
| }, |
| { |
| "epoch": 0.7966994382022472, |
| "grad_norm": 0.8634847402572632, |
| "learning_rate": 4.8015671105039205e-06, |
| "loss": 0.6553, |
| "step": 2269 |
| }, |
| { |
| "epoch": 0.7970505617977528, |
| "grad_norm": 1.1672545671463013, |
| "learning_rate": 4.801386559202259e-06, |
| "loss": 0.7002, |
| "step": 2270 |
| }, |
| { |
| "epoch": 0.7974016853932584, |
| "grad_norm": 0.8866782188415527, |
| "learning_rate": 4.801205929195146e-06, |
| "loss": 0.6791, |
| "step": 2271 |
| }, |
| { |
| "epoch": 0.797752808988764, |
| "grad_norm": 0.92203289270401, |
| "learning_rate": 4.801025220488759e-06, |
| "loss": 0.6586, |
| "step": 2272 |
| }, |
| { |
| "epoch": 0.7981039325842697, |
| "grad_norm": 0.9017732739448547, |
| "learning_rate": 4.8008444330892785e-06, |
| "loss": 0.6559, |
| "step": 2273 |
| }, |
| { |
| "epoch": 0.7984550561797753, |
| "grad_norm": 0.9230211973190308, |
| "learning_rate": 4.800663567002888e-06, |
| "loss": 0.6968, |
| "step": 2274 |
| }, |
| { |
| "epoch": 0.7988061797752809, |
| "grad_norm": 0.9016463160514832, |
| "learning_rate": 4.8004826222357715e-06, |
| "loss": 0.6624, |
| "step": 2275 |
| }, |
| { |
| "epoch": 0.7991573033707865, |
| "grad_norm": 0.851717472076416, |
| "learning_rate": 4.8003015987941185e-06, |
| "loss": 0.6454, |
| "step": 2276 |
| }, |
| { |
| "epoch": 0.7995084269662921, |
| "grad_norm": 0.8657079935073853, |
| "learning_rate": 4.800120496684119e-06, |
| "loss": 0.6537, |
| "step": 2277 |
| }, |
| { |
| "epoch": 0.7998595505617978, |
| "grad_norm": 0.8756804466247559, |
| "learning_rate": 4.7999393159119665e-06, |
| "loss": 0.6777, |
| "step": 2278 |
| }, |
| { |
| "epoch": 0.8002106741573034, |
| "grad_norm": 0.8858351111412048, |
| "learning_rate": 4.799758056483858e-06, |
| "loss": 0.673, |
| "step": 2279 |
| }, |
| { |
| "epoch": 0.800561797752809, |
| "grad_norm": 1.0514605045318604, |
| "learning_rate": 4.799576718405991e-06, |
| "loss": 0.6417, |
| "step": 2280 |
| }, |
| { |
| "epoch": 0.8009129213483146, |
| "grad_norm": 0.8649700284004211, |
| "learning_rate": 4.799395301684569e-06, |
| "loss": 0.6753, |
| "step": 2281 |
| }, |
| { |
| "epoch": 0.8012640449438202, |
| "grad_norm": 0.893130362033844, |
| "learning_rate": 4.799213806325794e-06, |
| "loss": 0.7144, |
| "step": 2282 |
| }, |
| { |
| "epoch": 0.8016151685393258, |
| "grad_norm": 0.8683095574378967, |
| "learning_rate": 4.799032232335875e-06, |
| "loss": 0.6565, |
| "step": 2283 |
| }, |
| { |
| "epoch": 0.8019662921348315, |
| "grad_norm": 0.8900540471076965, |
| "learning_rate": 4.798850579721022e-06, |
| "loss": 0.7065, |
| "step": 2284 |
| }, |
| { |
| "epoch": 0.8023174157303371, |
| "grad_norm": 0.9185386300086975, |
| "learning_rate": 4.7986688484874446e-06, |
| "loss": 0.6371, |
| "step": 2285 |
| }, |
| { |
| "epoch": 0.8026685393258427, |
| "grad_norm": 0.8507232666015625, |
| "learning_rate": 4.79848703864136e-06, |
| "loss": 0.66, |
| "step": 2286 |
| }, |
| { |
| "epoch": 0.8030196629213483, |
| "grad_norm": 0.8661529421806335, |
| "learning_rate": 4.798305150188986e-06, |
| "loss": 0.6643, |
| "step": 2287 |
| }, |
| { |
| "epoch": 0.8033707865168539, |
| "grad_norm": 0.8859586119651794, |
| "learning_rate": 4.798123183136542e-06, |
| "loss": 0.652, |
| "step": 2288 |
| }, |
| { |
| "epoch": 0.8037219101123596, |
| "grad_norm": 0.9179227948188782, |
| "learning_rate": 4.797941137490253e-06, |
| "loss": 0.7033, |
| "step": 2289 |
| }, |
| { |
| "epoch": 0.8040730337078652, |
| "grad_norm": 0.8715042471885681, |
| "learning_rate": 4.797759013256343e-06, |
| "loss": 0.6482, |
| "step": 2290 |
| }, |
| { |
| "epoch": 0.8044241573033708, |
| "grad_norm": 0.8953105211257935, |
| "learning_rate": 4.797576810441041e-06, |
| "loss": 0.6986, |
| "step": 2291 |
| }, |
| { |
| "epoch": 0.8047752808988764, |
| "grad_norm": 0.9580156803131104, |
| "learning_rate": 4.797394529050577e-06, |
| "loss": 0.6512, |
| "step": 2292 |
| }, |
| { |
| "epoch": 0.805126404494382, |
| "grad_norm": 0.9238266348838806, |
| "learning_rate": 4.7972121690911865e-06, |
| "loss": 0.6796, |
| "step": 2293 |
| }, |
| { |
| "epoch": 0.8054775280898876, |
| "grad_norm": 0.8915815949440002, |
| "learning_rate": 4.797029730569106e-06, |
| "loss": 0.707, |
| "step": 2294 |
| }, |
| { |
| "epoch": 0.8058286516853933, |
| "grad_norm": 0.8978826999664307, |
| "learning_rate": 4.796847213490574e-06, |
| "loss": 0.7162, |
| "step": 2295 |
| }, |
| { |
| "epoch": 0.8061797752808989, |
| "grad_norm": 0.8888537287712097, |
| "learning_rate": 4.796664617861832e-06, |
| "loss": 0.6832, |
| "step": 2296 |
| }, |
| { |
| "epoch": 0.8065308988764045, |
| "grad_norm": 0.8834527730941772, |
| "learning_rate": 4.796481943689127e-06, |
| "loss": 0.6863, |
| "step": 2297 |
| }, |
| { |
| "epoch": 0.8068820224719101, |
| "grad_norm": 0.8643321394920349, |
| "learning_rate": 4.796299190978704e-06, |
| "loss": 0.6739, |
| "step": 2298 |
| }, |
| { |
| "epoch": 0.8072331460674157, |
| "grad_norm": 0.8717115521430969, |
| "learning_rate": 4.796116359736813e-06, |
| "loss": 0.6502, |
| "step": 2299 |
| }, |
| { |
| "epoch": 0.8075842696629213, |
| "grad_norm": 0.8730166554450989, |
| "learning_rate": 4.7959334499697085e-06, |
| "loss": 0.6604, |
| "step": 2300 |
| }, |
| { |
| "epoch": 0.807935393258427, |
| "grad_norm": 0.8617185950279236, |
| "learning_rate": 4.795750461683643e-06, |
| "loss": 0.667, |
| "step": 2301 |
| }, |
| { |
| "epoch": 0.8082865168539326, |
| "grad_norm": 0.826866626739502, |
| "learning_rate": 4.795567394884878e-06, |
| "loss": 0.6273, |
| "step": 2302 |
| }, |
| { |
| "epoch": 0.8086376404494382, |
| "grad_norm": 0.9160782098770142, |
| "learning_rate": 4.795384249579672e-06, |
| "loss": 0.6841, |
| "step": 2303 |
| }, |
| { |
| "epoch": 0.8089887640449438, |
| "grad_norm": 0.8521285653114319, |
| "learning_rate": 4.795201025774287e-06, |
| "loss": 0.6311, |
| "step": 2304 |
| }, |
| { |
| "epoch": 0.8093398876404494, |
| "grad_norm": 0.9138549566268921, |
| "learning_rate": 4.795017723474993e-06, |
| "loss": 0.6848, |
| "step": 2305 |
| }, |
| { |
| "epoch": 0.8096910112359551, |
| "grad_norm": 0.8857465386390686, |
| "learning_rate": 4.794834342688056e-06, |
| "loss": 0.6457, |
| "step": 2306 |
| }, |
| { |
| "epoch": 0.8100421348314607, |
| "grad_norm": 0.934290885925293, |
| "learning_rate": 4.7946508834197495e-06, |
| "loss": 0.6853, |
| "step": 2307 |
| }, |
| { |
| "epoch": 0.8103932584269663, |
| "grad_norm": 0.8928509950637817, |
| "learning_rate": 4.794467345676345e-06, |
| "loss": 0.6884, |
| "step": 2308 |
| }, |
| { |
| "epoch": 0.8107443820224719, |
| "grad_norm": 0.9139081835746765, |
| "learning_rate": 4.79428372946412e-06, |
| "loss": 0.7047, |
| "step": 2309 |
| }, |
| { |
| "epoch": 0.8110955056179775, |
| "grad_norm": 0.8674109578132629, |
| "learning_rate": 4.794100034789356e-06, |
| "loss": 0.6397, |
| "step": 2310 |
| }, |
| { |
| "epoch": 0.8114466292134831, |
| "grad_norm": 0.9009836912155151, |
| "learning_rate": 4.793916261658333e-06, |
| "loss": 0.6954, |
| "step": 2311 |
| }, |
| { |
| "epoch": 0.8117977528089888, |
| "grad_norm": 0.8968256115913391, |
| "learning_rate": 4.7937324100773375e-06, |
| "loss": 0.6983, |
| "step": 2312 |
| }, |
| { |
| "epoch": 0.8121488764044944, |
| "grad_norm": 0.9040331244468689, |
| "learning_rate": 4.793548480052656e-06, |
| "loss": 0.6932, |
| "step": 2313 |
| }, |
| { |
| "epoch": 0.8125, |
| "grad_norm": 0.9160118699073792, |
| "learning_rate": 4.7933644715905794e-06, |
| "loss": 0.702, |
| "step": 2314 |
| }, |
| { |
| "epoch": 0.8128511235955056, |
| "grad_norm": 0.8957852721214294, |
| "learning_rate": 4.7931803846974005e-06, |
| "loss": 0.6737, |
| "step": 2315 |
| }, |
| { |
| "epoch": 0.8132022471910112, |
| "grad_norm": 0.9111645817756653, |
| "learning_rate": 4.7929962193794145e-06, |
| "loss": 0.6716, |
| "step": 2316 |
| }, |
| { |
| "epoch": 0.8135533707865169, |
| "grad_norm": 0.9393349885940552, |
| "learning_rate": 4.792811975642919e-06, |
| "loss": 0.7166, |
| "step": 2317 |
| }, |
| { |
| "epoch": 0.8139044943820225, |
| "grad_norm": 0.8632532358169556, |
| "learning_rate": 4.792627653494217e-06, |
| "loss": 0.6545, |
| "step": 2318 |
| }, |
| { |
| "epoch": 0.8142556179775281, |
| "grad_norm": 0.8554111123085022, |
| "learning_rate": 4.792443252939612e-06, |
| "loss": 0.6421, |
| "step": 2319 |
| }, |
| { |
| "epoch": 0.8146067415730337, |
| "grad_norm": 0.8907461166381836, |
| "learning_rate": 4.792258773985408e-06, |
| "loss": 0.6874, |
| "step": 2320 |
| }, |
| { |
| "epoch": 0.8149578651685393, |
| "grad_norm": 0.8739638924598694, |
| "learning_rate": 4.792074216637916e-06, |
| "loss": 0.6662, |
| "step": 2321 |
| }, |
| { |
| "epoch": 0.8153089887640449, |
| "grad_norm": 0.8694685101509094, |
| "learning_rate": 4.791889580903446e-06, |
| "loss": 0.6536, |
| "step": 2322 |
| }, |
| { |
| "epoch": 0.8156601123595506, |
| "grad_norm": 0.9057086706161499, |
| "learning_rate": 4.791704866788314e-06, |
| "loss": 0.6655, |
| "step": 2323 |
| }, |
| { |
| "epoch": 0.8160112359550562, |
| "grad_norm": 0.9522676467895508, |
| "learning_rate": 4.791520074298837e-06, |
| "loss": 0.7235, |
| "step": 2324 |
| }, |
| { |
| "epoch": 0.8163623595505618, |
| "grad_norm": 0.8711917996406555, |
| "learning_rate": 4.791335203441335e-06, |
| "loss": 0.7015, |
| "step": 2325 |
| }, |
| { |
| "epoch": 0.8167134831460674, |
| "grad_norm": 0.8643499612808228, |
| "learning_rate": 4.791150254222129e-06, |
| "loss": 0.6229, |
| "step": 2326 |
| }, |
| { |
| "epoch": 0.817064606741573, |
| "grad_norm": 0.8863927721977234, |
| "learning_rate": 4.790965226647545e-06, |
| "loss": 0.7017, |
| "step": 2327 |
| }, |
| { |
| "epoch": 0.8174157303370787, |
| "grad_norm": 0.8969694375991821, |
| "learning_rate": 4.790780120723911e-06, |
| "loss": 0.6926, |
| "step": 2328 |
| }, |
| { |
| "epoch": 0.8177668539325843, |
| "grad_norm": 0.923423171043396, |
| "learning_rate": 4.790594936457556e-06, |
| "loss": 0.707, |
| "step": 2329 |
| }, |
| { |
| "epoch": 0.8181179775280899, |
| "grad_norm": 0.8579330444335938, |
| "learning_rate": 4.790409673854815e-06, |
| "loss": 0.6224, |
| "step": 2330 |
| }, |
| { |
| "epoch": 0.8184691011235955, |
| "grad_norm": 0.8940487504005432, |
| "learning_rate": 4.790224332922023e-06, |
| "loss": 0.6859, |
| "step": 2331 |
| }, |
| { |
| "epoch": 0.8188202247191011, |
| "grad_norm": 0.8670837879180908, |
| "learning_rate": 4.790038913665519e-06, |
| "loss": 0.6669, |
| "step": 2332 |
| }, |
| { |
| "epoch": 0.8191713483146067, |
| "grad_norm": 0.8588036298751831, |
| "learning_rate": 4.789853416091643e-06, |
| "loss": 0.6951, |
| "step": 2333 |
| }, |
| { |
| "epoch": 0.8195224719101124, |
| "grad_norm": 0.8730223774909973, |
| "learning_rate": 4.78966784020674e-06, |
| "loss": 0.6751, |
| "step": 2334 |
| }, |
| { |
| "epoch": 0.819873595505618, |
| "grad_norm": 0.8985008001327515, |
| "learning_rate": 4.789482186017157e-06, |
| "loss": 0.6927, |
| "step": 2335 |
| }, |
| { |
| "epoch": 0.8202247191011236, |
| "grad_norm": 0.8898709416389465, |
| "learning_rate": 4.78929645352924e-06, |
| "loss": 0.6307, |
| "step": 2336 |
| }, |
| { |
| "epoch": 0.8205758426966292, |
| "grad_norm": 0.8730369806289673, |
| "learning_rate": 4.789110642749346e-06, |
| "loss": 0.6548, |
| "step": 2337 |
| }, |
| { |
| "epoch": 0.8209269662921348, |
| "grad_norm": 0.8780831694602966, |
| "learning_rate": 4.788924753683826e-06, |
| "loss": 0.6451, |
| "step": 2338 |
| }, |
| { |
| "epoch": 0.8212780898876404, |
| "grad_norm": 0.8665900230407715, |
| "learning_rate": 4.788738786339037e-06, |
| "loss": 0.6656, |
| "step": 2339 |
| }, |
| { |
| "epoch": 0.8216292134831461, |
| "grad_norm": 0.9199172854423523, |
| "learning_rate": 4.788552740721341e-06, |
| "loss": 0.6629, |
| "step": 2340 |
| }, |
| { |
| "epoch": 0.8219803370786517, |
| "grad_norm": 0.8883637189865112, |
| "learning_rate": 4.7883666168370996e-06, |
| "loss": 0.6789, |
| "step": 2341 |
| }, |
| { |
| "epoch": 0.8223314606741573, |
| "grad_norm": 0.8635849356651306, |
| "learning_rate": 4.788180414692678e-06, |
| "loss": 0.6823, |
| "step": 2342 |
| }, |
| { |
| "epoch": 0.8226825842696629, |
| "grad_norm": 0.9055063128471375, |
| "learning_rate": 4.787994134294444e-06, |
| "loss": 0.6667, |
| "step": 2343 |
| }, |
| { |
| "epoch": 0.8230337078651685, |
| "grad_norm": 0.8952557444572449, |
| "learning_rate": 4.787807775648768e-06, |
| "loss": 0.6694, |
| "step": 2344 |
| }, |
| { |
| "epoch": 0.8233848314606742, |
| "grad_norm": 0.9171862006187439, |
| "learning_rate": 4.787621338762025e-06, |
| "loss": 0.6938, |
| "step": 2345 |
| }, |
| { |
| "epoch": 0.8237359550561798, |
| "grad_norm": 0.9036487936973572, |
| "learning_rate": 4.787434823640589e-06, |
| "loss": 0.6565, |
| "step": 2346 |
| }, |
| { |
| "epoch": 0.8240870786516854, |
| "grad_norm": 0.8997727036476135, |
| "learning_rate": 4.787248230290839e-06, |
| "loss": 0.6586, |
| "step": 2347 |
| }, |
| { |
| "epoch": 0.824438202247191, |
| "grad_norm": 0.9203174114227295, |
| "learning_rate": 4.787061558719158e-06, |
| "loss": 0.6684, |
| "step": 2348 |
| }, |
| { |
| "epoch": 0.8247893258426966, |
| "grad_norm": 0.8566135168075562, |
| "learning_rate": 4.786874808931928e-06, |
| "loss": 0.6366, |
| "step": 2349 |
| }, |
| { |
| "epoch": 0.8251404494382022, |
| "grad_norm": 0.8947523832321167, |
| "learning_rate": 4.786687980935536e-06, |
| "loss": 0.6954, |
| "step": 2350 |
| }, |
| { |
| "epoch": 0.8254915730337079, |
| "grad_norm": 0.8714900016784668, |
| "learning_rate": 4.7865010747363725e-06, |
| "loss": 0.6678, |
| "step": 2351 |
| }, |
| { |
| "epoch": 0.8258426966292135, |
| "grad_norm": 0.8742727041244507, |
| "learning_rate": 4.786314090340829e-06, |
| "loss": 0.6356, |
| "step": 2352 |
| }, |
| { |
| "epoch": 0.8261938202247191, |
| "grad_norm": 0.8638887405395508, |
| "learning_rate": 4.7861270277553e-06, |
| "loss": 0.6643, |
| "step": 2353 |
| }, |
| { |
| "epoch": 0.8265449438202247, |
| "grad_norm": 0.8582507967948914, |
| "learning_rate": 4.785939886986182e-06, |
| "loss": 0.6943, |
| "step": 2354 |
| }, |
| { |
| "epoch": 0.8268960674157303, |
| "grad_norm": 0.8985755443572998, |
| "learning_rate": 4.785752668039877e-06, |
| "loss": 0.6899, |
| "step": 2355 |
| }, |
| { |
| "epoch": 0.827247191011236, |
| "grad_norm": 0.889373242855072, |
| "learning_rate": 4.7855653709227856e-06, |
| "loss": 0.6687, |
| "step": 2356 |
| }, |
| { |
| "epoch": 0.8275983146067416, |
| "grad_norm": 0.8748716711997986, |
| "learning_rate": 4.785377995641315e-06, |
| "loss": 0.6813, |
| "step": 2357 |
| }, |
| { |
| "epoch": 0.8279494382022472, |
| "grad_norm": 0.912598729133606, |
| "learning_rate": 4.785190542201873e-06, |
| "loss": 0.7037, |
| "step": 2358 |
| }, |
| { |
| "epoch": 0.8283005617977528, |
| "grad_norm": 0.8579078912734985, |
| "learning_rate": 4.785003010610869e-06, |
| "loss": 0.651, |
| "step": 2359 |
| }, |
| { |
| "epoch": 0.8286516853932584, |
| "grad_norm": 0.8841166496276855, |
| "learning_rate": 4.7848154008747185e-06, |
| "loss": 0.6527, |
| "step": 2360 |
| }, |
| { |
| "epoch": 0.829002808988764, |
| "grad_norm": 0.9118697047233582, |
| "learning_rate": 4.784627712999836e-06, |
| "loss": 0.7196, |
| "step": 2361 |
| }, |
| { |
| "epoch": 0.8293539325842697, |
| "grad_norm": 0.8655892610549927, |
| "learning_rate": 4.78443994699264e-06, |
| "loss": 0.6813, |
| "step": 2362 |
| }, |
| { |
| "epoch": 0.8297050561797753, |
| "grad_norm": 0.8933113217353821, |
| "learning_rate": 4.784252102859553e-06, |
| "loss": 0.6276, |
| "step": 2363 |
| }, |
| { |
| "epoch": 0.8300561797752809, |
| "grad_norm": 0.8977401852607727, |
| "learning_rate": 4.784064180606999e-06, |
| "loss": 0.7107, |
| "step": 2364 |
| }, |
| { |
| "epoch": 0.8304073033707865, |
| "grad_norm": 0.8428375720977783, |
| "learning_rate": 4.783876180241404e-06, |
| "loss": 0.6187, |
| "step": 2365 |
| }, |
| { |
| "epoch": 0.8307584269662921, |
| "grad_norm": 0.8943214416503906, |
| "learning_rate": 4.783688101769199e-06, |
| "loss": 0.71, |
| "step": 2366 |
| }, |
| { |
| "epoch": 0.8311095505617978, |
| "grad_norm": 0.8977285027503967, |
| "learning_rate": 4.783499945196814e-06, |
| "loss": 0.6895, |
| "step": 2367 |
| }, |
| { |
| "epoch": 0.8314606741573034, |
| "grad_norm": 0.8746622800827026, |
| "learning_rate": 4.783311710530685e-06, |
| "loss": 0.656, |
| "step": 2368 |
| }, |
| { |
| "epoch": 0.831811797752809, |
| "grad_norm": 0.9107025265693665, |
| "learning_rate": 4.783123397777249e-06, |
| "loss": 0.6869, |
| "step": 2369 |
| }, |
| { |
| "epoch": 0.8321629213483146, |
| "grad_norm": 0.8555697202682495, |
| "learning_rate": 4.782935006942948e-06, |
| "loss": 0.6607, |
| "step": 2370 |
| }, |
| { |
| "epoch": 0.8325140449438202, |
| "grad_norm": 0.9654397368431091, |
| "learning_rate": 4.782746538034222e-06, |
| "loss": 0.7033, |
| "step": 2371 |
| }, |
| { |
| "epoch": 0.8328651685393258, |
| "grad_norm": 0.8786224722862244, |
| "learning_rate": 4.782557991057519e-06, |
| "loss": 0.6855, |
| "step": 2372 |
| }, |
| { |
| "epoch": 0.8332162921348315, |
| "grad_norm": 0.8756742477416992, |
| "learning_rate": 4.782369366019285e-06, |
| "loss": 0.6665, |
| "step": 2373 |
| }, |
| { |
| "epoch": 0.8335674157303371, |
| "grad_norm": 0.8499640226364136, |
| "learning_rate": 4.782180662925971e-06, |
| "loss": 0.6311, |
| "step": 2374 |
| }, |
| { |
| "epoch": 0.8339185393258427, |
| "grad_norm": 0.8776542544364929, |
| "learning_rate": 4.781991881784033e-06, |
| "loss": 0.6894, |
| "step": 2375 |
| }, |
| { |
| "epoch": 0.8342696629213483, |
| "grad_norm": 0.8762704730033875, |
| "learning_rate": 4.7818030225999245e-06, |
| "loss": 0.6637, |
| "step": 2376 |
| }, |
| { |
| "epoch": 0.8346207865168539, |
| "grad_norm": 0.8762661218643188, |
| "learning_rate": 4.7816140853801054e-06, |
| "loss": 0.6672, |
| "step": 2377 |
| }, |
| { |
| "epoch": 0.8349719101123596, |
| "grad_norm": 0.8843420743942261, |
| "learning_rate": 4.781425070131036e-06, |
| "loss": 0.6614, |
| "step": 2378 |
| }, |
| { |
| "epoch": 0.8353230337078652, |
| "grad_norm": 0.8788619041442871, |
| "learning_rate": 4.781235976859183e-06, |
| "loss": 0.6853, |
| "step": 2379 |
| }, |
| { |
| "epoch": 0.8356741573033708, |
| "grad_norm": 0.9134128093719482, |
| "learning_rate": 4.78104680557101e-06, |
| "loss": 0.6904, |
| "step": 2380 |
| }, |
| { |
| "epoch": 0.8360252808988764, |
| "grad_norm": 0.8329752683639526, |
| "learning_rate": 4.7808575562729894e-06, |
| "loss": 0.6318, |
| "step": 2381 |
| }, |
| { |
| "epoch": 0.836376404494382, |
| "grad_norm": 0.8506373763084412, |
| "learning_rate": 4.780668228971592e-06, |
| "loss": 0.6757, |
| "step": 2382 |
| }, |
| { |
| "epoch": 0.8367275280898876, |
| "grad_norm": 0.8934277892112732, |
| "learning_rate": 4.780478823673292e-06, |
| "loss": 0.6961, |
| "step": 2383 |
| }, |
| { |
| "epoch": 0.8370786516853933, |
| "grad_norm": 0.90306556224823, |
| "learning_rate": 4.780289340384569e-06, |
| "loss": 0.729, |
| "step": 2384 |
| }, |
| { |
| "epoch": 0.8374297752808989, |
| "grad_norm": 0.8692042231559753, |
| "learning_rate": 4.780099779111901e-06, |
| "loss": 0.6695, |
| "step": 2385 |
| }, |
| { |
| "epoch": 0.8377808988764045, |
| "grad_norm": 0.8926328420639038, |
| "learning_rate": 4.779910139861771e-06, |
| "loss": 0.6895, |
| "step": 2386 |
| }, |
| { |
| "epoch": 0.8381320224719101, |
| "grad_norm": 0.8823839426040649, |
| "learning_rate": 4.779720422640666e-06, |
| "loss": 0.6587, |
| "step": 2387 |
| }, |
| { |
| "epoch": 0.8384831460674157, |
| "grad_norm": 0.8437162637710571, |
| "learning_rate": 4.7795306274550735e-06, |
| "loss": 0.6577, |
| "step": 2388 |
| }, |
| { |
| "epoch": 0.8388342696629213, |
| "grad_norm": 0.8754181265830994, |
| "learning_rate": 4.779340754311483e-06, |
| "loss": 0.6751, |
| "step": 2389 |
| }, |
| { |
| "epoch": 0.839185393258427, |
| "grad_norm": 0.8797729015350342, |
| "learning_rate": 4.77915080321639e-06, |
| "loss": 0.675, |
| "step": 2390 |
| }, |
| { |
| "epoch": 0.8395365168539326, |
| "grad_norm": 0.8657658100128174, |
| "learning_rate": 4.778960774176291e-06, |
| "loss": 0.6504, |
| "step": 2391 |
| }, |
| { |
| "epoch": 0.8398876404494382, |
| "grad_norm": 0.8967252969741821, |
| "learning_rate": 4.778770667197682e-06, |
| "loss": 0.6623, |
| "step": 2392 |
| }, |
| { |
| "epoch": 0.8402387640449438, |
| "grad_norm": 0.881457507610321, |
| "learning_rate": 4.778580482287067e-06, |
| "loss": 0.6514, |
| "step": 2393 |
| }, |
| { |
| "epoch": 0.8405898876404494, |
| "grad_norm": 0.8704044222831726, |
| "learning_rate": 4.778390219450949e-06, |
| "loss": 0.6695, |
| "step": 2394 |
| }, |
| { |
| "epoch": 0.8409410112359551, |
| "grad_norm": 0.9062277674674988, |
| "learning_rate": 4.778199878695835e-06, |
| "loss": 0.6394, |
| "step": 2395 |
| }, |
| { |
| "epoch": 0.8412921348314607, |
| "grad_norm": 0.873767614364624, |
| "learning_rate": 4.778009460028235e-06, |
| "loss": 0.6059, |
| "step": 2396 |
| }, |
| { |
| "epoch": 0.8416432584269663, |
| "grad_norm": 0.9467976689338684, |
| "learning_rate": 4.77781896345466e-06, |
| "loss": 0.6834, |
| "step": 2397 |
| }, |
| { |
| "epoch": 0.8419943820224719, |
| "grad_norm": 0.8977733254432678, |
| "learning_rate": 4.777628388981626e-06, |
| "loss": 0.703, |
| "step": 2398 |
| }, |
| { |
| "epoch": 0.8423455056179775, |
| "grad_norm": 0.8968080282211304, |
| "learning_rate": 4.7774377366156505e-06, |
| "loss": 0.678, |
| "step": 2399 |
| }, |
| { |
| "epoch": 0.8426966292134831, |
| "grad_norm": 0.8892068862915039, |
| "learning_rate": 4.777247006363253e-06, |
| "loss": 0.7203, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.8430477528089888, |
| "grad_norm": 0.8844308853149414, |
| "learning_rate": 4.777056198230956e-06, |
| "loss": 0.6694, |
| "step": 2401 |
| }, |
| { |
| "epoch": 0.8433988764044944, |
| "grad_norm": 0.8951604962348938, |
| "learning_rate": 4.7768653122252865e-06, |
| "loss": 0.6701, |
| "step": 2402 |
| }, |
| { |
| "epoch": 0.84375, |
| "grad_norm": 0.8704971075057983, |
| "learning_rate": 4.7766743483527715e-06, |
| "loss": 0.6616, |
| "step": 2403 |
| }, |
| { |
| "epoch": 0.8441011235955056, |
| "grad_norm": 0.908189058303833, |
| "learning_rate": 4.776483306619941e-06, |
| "loss": 0.6668, |
| "step": 2404 |
| }, |
| { |
| "epoch": 0.8444522471910112, |
| "grad_norm": 0.8829559087753296, |
| "learning_rate": 4.77629218703333e-06, |
| "loss": 0.6891, |
| "step": 2405 |
| }, |
| { |
| "epoch": 0.8448033707865169, |
| "grad_norm": 0.912239134311676, |
| "learning_rate": 4.776100989599475e-06, |
| "loss": 0.7016, |
| "step": 2406 |
| }, |
| { |
| "epoch": 0.8451544943820225, |
| "grad_norm": 0.8914303779602051, |
| "learning_rate": 4.775909714324912e-06, |
| "loss": 0.6593, |
| "step": 2407 |
| }, |
| { |
| "epoch": 0.8455056179775281, |
| "grad_norm": 0.9197940826416016, |
| "learning_rate": 4.775718361216186e-06, |
| "loss": 0.677, |
| "step": 2408 |
| }, |
| { |
| "epoch": 0.8458567415730337, |
| "grad_norm": 0.9217466115951538, |
| "learning_rate": 4.775526930279839e-06, |
| "loss": 0.7026, |
| "step": 2409 |
| }, |
| { |
| "epoch": 0.8462078651685393, |
| "grad_norm": 0.8749791383743286, |
| "learning_rate": 4.775335421522418e-06, |
| "loss": 0.6502, |
| "step": 2410 |
| }, |
| { |
| "epoch": 0.8465589887640449, |
| "grad_norm": 0.8961182236671448, |
| "learning_rate": 4.7751438349504716e-06, |
| "loss": 0.651, |
| "step": 2411 |
| }, |
| { |
| "epoch": 0.8469101123595506, |
| "grad_norm": 0.8679896593093872, |
| "learning_rate": 4.7749521705705546e-06, |
| "loss": 0.6445, |
| "step": 2412 |
| }, |
| { |
| "epoch": 0.8472612359550562, |
| "grad_norm": 0.8874735236167908, |
| "learning_rate": 4.774760428389219e-06, |
| "loss": 0.685, |
| "step": 2413 |
| }, |
| { |
| "epoch": 0.8476123595505618, |
| "grad_norm": 0.8967884182929993, |
| "learning_rate": 4.774568608413024e-06, |
| "loss": 0.6812, |
| "step": 2414 |
| }, |
| { |
| "epoch": 0.8479634831460674, |
| "grad_norm": 0.8984326720237732, |
| "learning_rate": 4.774376710648528e-06, |
| "loss": 0.6847, |
| "step": 2415 |
| }, |
| { |
| "epoch": 0.848314606741573, |
| "grad_norm": 0.861842155456543, |
| "learning_rate": 4.7741847351022955e-06, |
| "loss": 0.6643, |
| "step": 2416 |
| }, |
| { |
| "epoch": 0.8486657303370787, |
| "grad_norm": 0.8478919267654419, |
| "learning_rate": 4.773992681780891e-06, |
| "loss": 0.6309, |
| "step": 2417 |
| }, |
| { |
| "epoch": 0.8490168539325843, |
| "grad_norm": 0.8753276467323303, |
| "learning_rate": 4.773800550690882e-06, |
| "loss": 0.6906, |
| "step": 2418 |
| }, |
| { |
| "epoch": 0.8493679775280899, |
| "grad_norm": 0.9123186469078064, |
| "learning_rate": 4.7736083418388404e-06, |
| "loss": 0.6698, |
| "step": 2419 |
| }, |
| { |
| "epoch": 0.8497191011235955, |
| "grad_norm": 0.8448005318641663, |
| "learning_rate": 4.773416055231339e-06, |
| "loss": 0.6671, |
| "step": 2420 |
| }, |
| { |
| "epoch": 0.8500702247191011, |
| "grad_norm": 0.886045515537262, |
| "learning_rate": 4.773223690874954e-06, |
| "loss": 0.6925, |
| "step": 2421 |
| }, |
| { |
| "epoch": 0.8504213483146067, |
| "grad_norm": 0.8761569857597351, |
| "learning_rate": 4.773031248776264e-06, |
| "loss": 0.6988, |
| "step": 2422 |
| }, |
| { |
| "epoch": 0.8507724719101124, |
| "grad_norm": 0.8618139028549194, |
| "learning_rate": 4.772838728941851e-06, |
| "loss": 0.6345, |
| "step": 2423 |
| }, |
| { |
| "epoch": 0.851123595505618, |
| "grad_norm": 0.8982943296432495, |
| "learning_rate": 4.772646131378297e-06, |
| "loss": 0.6775, |
| "step": 2424 |
| }, |
| { |
| "epoch": 0.8514747191011236, |
| "grad_norm": 0.9190080761909485, |
| "learning_rate": 4.772453456092191e-06, |
| "loss": 0.6683, |
| "step": 2425 |
| }, |
| { |
| "epoch": 0.8518258426966292, |
| "grad_norm": 0.8613485097885132, |
| "learning_rate": 4.772260703090121e-06, |
| "loss": 0.6378, |
| "step": 2426 |
| }, |
| { |
| "epoch": 0.8521769662921348, |
| "grad_norm": 0.8885965943336487, |
| "learning_rate": 4.7720678723786796e-06, |
| "loss": 0.6971, |
| "step": 2427 |
| }, |
| { |
| "epoch": 0.8525280898876404, |
| "grad_norm": 0.8576337099075317, |
| "learning_rate": 4.771874963964461e-06, |
| "loss": 0.6417, |
| "step": 2428 |
| }, |
| { |
| "epoch": 0.8528792134831461, |
| "grad_norm": 0.8758781552314758, |
| "learning_rate": 4.771681977854062e-06, |
| "loss": 0.668, |
| "step": 2429 |
| }, |
| { |
| "epoch": 0.8532303370786517, |
| "grad_norm": 0.881989061832428, |
| "learning_rate": 4.771488914054085e-06, |
| "loss": 0.655, |
| "step": 2430 |
| }, |
| { |
| "epoch": 0.8535814606741573, |
| "grad_norm": 0.8963753581047058, |
| "learning_rate": 4.77129577257113e-06, |
| "loss": 0.647, |
| "step": 2431 |
| }, |
| { |
| "epoch": 0.8539325842696629, |
| "grad_norm": 0.8885678648948669, |
| "learning_rate": 4.771102553411803e-06, |
| "loss": 0.6832, |
| "step": 2432 |
| }, |
| { |
| "epoch": 0.8542837078651685, |
| "grad_norm": 0.8854667544364929, |
| "learning_rate": 4.770909256582712e-06, |
| "loss": 0.6387, |
| "step": 2433 |
| }, |
| { |
| "epoch": 0.8546348314606742, |
| "grad_norm": 0.9203517436981201, |
| "learning_rate": 4.770715882090468e-06, |
| "loss": 0.6606, |
| "step": 2434 |
| }, |
| { |
| "epoch": 0.8549859550561798, |
| "grad_norm": 0.9271900653839111, |
| "learning_rate": 4.770522429941683e-06, |
| "loss": 0.6787, |
| "step": 2435 |
| }, |
| { |
| "epoch": 0.8553370786516854, |
| "grad_norm": 0.8828155994415283, |
| "learning_rate": 4.770328900142975e-06, |
| "loss": 0.6742, |
| "step": 2436 |
| }, |
| { |
| "epoch": 0.855688202247191, |
| "grad_norm": 0.8759010434150696, |
| "learning_rate": 4.770135292700961e-06, |
| "loss": 0.6742, |
| "step": 2437 |
| }, |
| { |
| "epoch": 0.8560393258426966, |
| "grad_norm": 0.9363099932670593, |
| "learning_rate": 4.769941607622262e-06, |
| "loss": 0.6997, |
| "step": 2438 |
| }, |
| { |
| "epoch": 0.8563904494382022, |
| "grad_norm": 0.89815753698349, |
| "learning_rate": 4.769747844913503e-06, |
| "loss": 0.6643, |
| "step": 2439 |
| }, |
| { |
| "epoch": 0.8567415730337079, |
| "grad_norm": 0.8725033402442932, |
| "learning_rate": 4.76955400458131e-06, |
| "loss": 0.6433, |
| "step": 2440 |
| }, |
| { |
| "epoch": 0.8570926966292135, |
| "grad_norm": 0.8719140291213989, |
| "learning_rate": 4.769360086632312e-06, |
| "loss": 0.6215, |
| "step": 2441 |
| }, |
| { |
| "epoch": 0.8574438202247191, |
| "grad_norm": 0.8736863732337952, |
| "learning_rate": 4.76916609107314e-06, |
| "loss": 0.6854, |
| "step": 2442 |
| }, |
| { |
| "epoch": 0.8577949438202247, |
| "grad_norm": 0.8882306814193726, |
| "learning_rate": 4.768972017910432e-06, |
| "loss": 0.678, |
| "step": 2443 |
| }, |
| { |
| "epoch": 0.8581460674157303, |
| "grad_norm": 0.8788784146308899, |
| "learning_rate": 4.76877786715082e-06, |
| "loss": 0.6413, |
| "step": 2444 |
| }, |
| { |
| "epoch": 0.858497191011236, |
| "grad_norm": 0.8777339458465576, |
| "learning_rate": 4.768583638800947e-06, |
| "loss": 0.703, |
| "step": 2445 |
| }, |
| { |
| "epoch": 0.8588483146067416, |
| "grad_norm": 0.8905528783798218, |
| "learning_rate": 4.7683893328674556e-06, |
| "loss": 0.6653, |
| "step": 2446 |
| }, |
| { |
| "epoch": 0.8591994382022472, |
| "grad_norm": 0.8809261322021484, |
| "learning_rate": 4.76819494935699e-06, |
| "loss": 0.6357, |
| "step": 2447 |
| }, |
| { |
| "epoch": 0.8595505617977528, |
| "grad_norm": 0.9078806638717651, |
| "learning_rate": 4.768000488276198e-06, |
| "loss": 0.6924, |
| "step": 2448 |
| }, |
| { |
| "epoch": 0.8599016853932584, |
| "grad_norm": 0.8684207201004028, |
| "learning_rate": 4.767805949631729e-06, |
| "loss": 0.6753, |
| "step": 2449 |
| }, |
| { |
| "epoch": 0.860252808988764, |
| "grad_norm": 0.9299638867378235, |
| "learning_rate": 4.767611333430238e-06, |
| "loss": 0.6986, |
| "step": 2450 |
| }, |
| { |
| "epoch": 0.8606039325842697, |
| "grad_norm": 0.9025764465332031, |
| "learning_rate": 4.7674166396783796e-06, |
| "loss": 0.6832, |
| "step": 2451 |
| }, |
| { |
| "epoch": 0.8609550561797753, |
| "grad_norm": 0.8947761058807373, |
| "learning_rate": 4.767221868382812e-06, |
| "loss": 0.6927, |
| "step": 2452 |
| }, |
| { |
| "epoch": 0.8613061797752809, |
| "grad_norm": 0.9044678807258606, |
| "learning_rate": 4.767027019550198e-06, |
| "loss": 0.6803, |
| "step": 2453 |
| }, |
| { |
| "epoch": 0.8616573033707865, |
| "grad_norm": 0.8838576078414917, |
| "learning_rate": 4.766832093187199e-06, |
| "loss": 0.7069, |
| "step": 2454 |
| }, |
| { |
| "epoch": 0.8620084269662921, |
| "grad_norm": 0.8987936973571777, |
| "learning_rate": 4.766637089300481e-06, |
| "loss": 0.6986, |
| "step": 2455 |
| }, |
| { |
| "epoch": 0.8623595505617978, |
| "grad_norm": 0.9236810207366943, |
| "learning_rate": 4.766442007896716e-06, |
| "loss": 0.6912, |
| "step": 2456 |
| }, |
| { |
| "epoch": 0.8627106741573034, |
| "grad_norm": 0.9139038920402527, |
| "learning_rate": 4.766246848982573e-06, |
| "loss": 0.6647, |
| "step": 2457 |
| }, |
| { |
| "epoch": 0.863061797752809, |
| "grad_norm": 0.8569420576095581, |
| "learning_rate": 4.766051612564726e-06, |
| "loss": 0.6092, |
| "step": 2458 |
| }, |
| { |
| "epoch": 0.8634129213483146, |
| "grad_norm": 0.895006537437439, |
| "learning_rate": 4.765856298649855e-06, |
| "loss": 0.7071, |
| "step": 2459 |
| }, |
| { |
| "epoch": 0.8637640449438202, |
| "grad_norm": 0.8703930377960205, |
| "learning_rate": 4.7656609072446355e-06, |
| "loss": 0.6839, |
| "step": 2460 |
| }, |
| { |
| "epoch": 0.8641151685393258, |
| "grad_norm": 0.8899871706962585, |
| "learning_rate": 4.765465438355753e-06, |
| "loss": 0.6543, |
| "step": 2461 |
| }, |
| { |
| "epoch": 0.8644662921348315, |
| "grad_norm": 0.929097592830658, |
| "learning_rate": 4.765269891989891e-06, |
| "loss": 0.6848, |
| "step": 2462 |
| }, |
| { |
| "epoch": 0.8648174157303371, |
| "grad_norm": 0.8808541297912598, |
| "learning_rate": 4.765074268153736e-06, |
| "loss": 0.6625, |
| "step": 2463 |
| }, |
| { |
| "epoch": 0.8651685393258427, |
| "grad_norm": 0.9321450591087341, |
| "learning_rate": 4.764878566853981e-06, |
| "loss": 0.6483, |
| "step": 2464 |
| }, |
| { |
| "epoch": 0.8655196629213483, |
| "grad_norm": 0.8869593143463135, |
| "learning_rate": 4.764682788097317e-06, |
| "loss": 0.6546, |
| "step": 2465 |
| }, |
| { |
| "epoch": 0.8658707865168539, |
| "grad_norm": 0.9124717116355896, |
| "learning_rate": 4.764486931890438e-06, |
| "loss": 0.6637, |
| "step": 2466 |
| }, |
| { |
| "epoch": 0.8662219101123596, |
| "grad_norm": 0.9037688374519348, |
| "learning_rate": 4.764290998240044e-06, |
| "loss": 0.6656, |
| "step": 2467 |
| }, |
| { |
| "epoch": 0.8665730337078652, |
| "grad_norm": 0.9029697179794312, |
| "learning_rate": 4.7640949871528365e-06, |
| "loss": 0.6837, |
| "step": 2468 |
| }, |
| { |
| "epoch": 0.8669241573033708, |
| "grad_norm": 0.9010921716690063, |
| "learning_rate": 4.763898898635517e-06, |
| "loss": 0.6527, |
| "step": 2469 |
| }, |
| { |
| "epoch": 0.8672752808988764, |
| "grad_norm": 0.8654772043228149, |
| "learning_rate": 4.763702732694792e-06, |
| "loss": 0.6983, |
| "step": 2470 |
| }, |
| { |
| "epoch": 0.867626404494382, |
| "grad_norm": 0.8883764147758484, |
| "learning_rate": 4.763506489337372e-06, |
| "loss": 0.7035, |
| "step": 2471 |
| }, |
| { |
| "epoch": 0.8679775280898876, |
| "grad_norm": 0.902722179889679, |
| "learning_rate": 4.763310168569966e-06, |
| "loss": 0.6928, |
| "step": 2472 |
| }, |
| { |
| "epoch": 0.8683286516853933, |
| "grad_norm": 0.900360643863678, |
| "learning_rate": 4.76311377039929e-06, |
| "loss": 0.653, |
| "step": 2473 |
| }, |
| { |
| "epoch": 0.8686797752808989, |
| "grad_norm": 0.8905735015869141, |
| "learning_rate": 4.762917294832058e-06, |
| "loss": 0.6644, |
| "step": 2474 |
| }, |
| { |
| "epoch": 0.8690308988764045, |
| "grad_norm": 0.8971735835075378, |
| "learning_rate": 4.762720741874992e-06, |
| "loss": 0.6751, |
| "step": 2475 |
| }, |
| { |
| "epoch": 0.8693820224719101, |
| "grad_norm": 0.9062357544898987, |
| "learning_rate": 4.762524111534813e-06, |
| "loss": 0.6207, |
| "step": 2476 |
| }, |
| { |
| "epoch": 0.8697331460674157, |
| "grad_norm": 0.8765476942062378, |
| "learning_rate": 4.762327403818245e-06, |
| "loss": 0.6972, |
| "step": 2477 |
| }, |
| { |
| "epoch": 0.8700842696629213, |
| "grad_norm": 0.8708880543708801, |
| "learning_rate": 4.762130618732016e-06, |
| "loss": 0.6704, |
| "step": 2478 |
| }, |
| { |
| "epoch": 0.870435393258427, |
| "grad_norm": 0.8837103247642517, |
| "learning_rate": 4.761933756282856e-06, |
| "loss": 0.6314, |
| "step": 2479 |
| }, |
| { |
| "epoch": 0.8707865168539326, |
| "grad_norm": 0.8385997414588928, |
| "learning_rate": 4.7617368164774956e-06, |
| "loss": 0.6511, |
| "step": 2480 |
| }, |
| { |
| "epoch": 0.8711376404494382, |
| "grad_norm": 0.8760436773300171, |
| "learning_rate": 4.761539799322672e-06, |
| "loss": 0.6927, |
| "step": 2481 |
| }, |
| { |
| "epoch": 0.8714887640449438, |
| "grad_norm": 0.8821952939033508, |
| "learning_rate": 4.761342704825124e-06, |
| "loss": 0.6682, |
| "step": 2482 |
| }, |
| { |
| "epoch": 0.8718398876404494, |
| "grad_norm": 0.8891538381576538, |
| "learning_rate": 4.761145532991589e-06, |
| "loss": 0.6796, |
| "step": 2483 |
| }, |
| { |
| "epoch": 0.8721910112359551, |
| "grad_norm": 0.9022207260131836, |
| "learning_rate": 4.760948283828812e-06, |
| "loss": 0.7124, |
| "step": 2484 |
| }, |
| { |
| "epoch": 0.8725421348314607, |
| "grad_norm": 0.8623314499855042, |
| "learning_rate": 4.760750957343538e-06, |
| "loss": 0.6677, |
| "step": 2485 |
| }, |
| { |
| "epoch": 0.8728932584269663, |
| "grad_norm": 0.8784610033035278, |
| "learning_rate": 4.760553553542517e-06, |
| "loss": 0.6731, |
| "step": 2486 |
| }, |
| { |
| "epoch": 0.8732443820224719, |
| "grad_norm": 0.8761183619499207, |
| "learning_rate": 4.7603560724324985e-06, |
| "loss": 0.6246, |
| "step": 2487 |
| }, |
| { |
| "epoch": 0.8735955056179775, |
| "grad_norm": 0.8424711227416992, |
| "learning_rate": 4.7601585140202354e-06, |
| "loss": 0.6453, |
| "step": 2488 |
| }, |
| { |
| "epoch": 0.8739466292134831, |
| "grad_norm": 0.8858649730682373, |
| "learning_rate": 4.7599608783124865e-06, |
| "loss": 0.6869, |
| "step": 2489 |
| }, |
| { |
| "epoch": 0.8742977528089888, |
| "grad_norm": 0.8897294402122498, |
| "learning_rate": 4.759763165316008e-06, |
| "loss": 0.673, |
| "step": 2490 |
| }, |
| { |
| "epoch": 0.8746488764044944, |
| "grad_norm": 0.8692971467971802, |
| "learning_rate": 4.759565375037564e-06, |
| "loss": 0.6691, |
| "step": 2491 |
| }, |
| { |
| "epoch": 0.875, |
| "grad_norm": 0.9153478741645813, |
| "learning_rate": 4.759367507483918e-06, |
| "loss": 0.7012, |
| "step": 2492 |
| }, |
| { |
| "epoch": 0.8753511235955056, |
| "grad_norm": 0.9016615748405457, |
| "learning_rate": 4.759169562661836e-06, |
| "loss": 0.6758, |
| "step": 2493 |
| }, |
| { |
| "epoch": 0.8757022471910112, |
| "grad_norm": 0.8769146203994751, |
| "learning_rate": 4.758971540578088e-06, |
| "loss": 0.6989, |
| "step": 2494 |
| }, |
| { |
| "epoch": 0.8760533707865169, |
| "grad_norm": 0.8640443086624146, |
| "learning_rate": 4.758773441239447e-06, |
| "loss": 0.6355, |
| "step": 2495 |
| }, |
| { |
| "epoch": 0.8764044943820225, |
| "grad_norm": 0.9241236448287964, |
| "learning_rate": 4.758575264652687e-06, |
| "loss": 0.642, |
| "step": 2496 |
| }, |
| { |
| "epoch": 0.8767556179775281, |
| "grad_norm": 0.8472598791122437, |
| "learning_rate": 4.758377010824585e-06, |
| "loss": 0.6229, |
| "step": 2497 |
| }, |
| { |
| "epoch": 0.8771067415730337, |
| "grad_norm": 0.8446848392486572, |
| "learning_rate": 4.758178679761922e-06, |
| "loss": 0.6234, |
| "step": 2498 |
| }, |
| { |
| "epoch": 0.8774578651685393, |
| "grad_norm": 0.8826449513435364, |
| "learning_rate": 4.7579802714714815e-06, |
| "loss": 0.6799, |
| "step": 2499 |
| }, |
| { |
| "epoch": 0.8778089887640449, |
| "grad_norm": 0.8934155106544495, |
| "learning_rate": 4.757781785960047e-06, |
| "loss": 0.655, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.8781601123595506, |
| "grad_norm": 0.8733129501342773, |
| "learning_rate": 4.757583223234408e-06, |
| "loss": 0.6754, |
| "step": 2501 |
| }, |
| { |
| "epoch": 0.8785112359550562, |
| "grad_norm": 0.8651498556137085, |
| "learning_rate": 4.757384583301353e-06, |
| "loss": 0.6839, |
| "step": 2502 |
| }, |
| { |
| "epoch": 0.8788623595505618, |
| "grad_norm": 0.9135331511497498, |
| "learning_rate": 4.757185866167679e-06, |
| "loss": 0.6593, |
| "step": 2503 |
| }, |
| { |
| "epoch": 0.8792134831460674, |
| "grad_norm": 0.86785888671875, |
| "learning_rate": 4.756987071840179e-06, |
| "loss": 0.6568, |
| "step": 2504 |
| }, |
| { |
| "epoch": 0.879564606741573, |
| "grad_norm": 0.9271560311317444, |
| "learning_rate": 4.756788200325652e-06, |
| "loss": 0.6364, |
| "step": 2505 |
| }, |
| { |
| "epoch": 0.8799157303370787, |
| "grad_norm": 0.911849319934845, |
| "learning_rate": 4.7565892516309e-06, |
| "loss": 0.675, |
| "step": 2506 |
| }, |
| { |
| "epoch": 0.8802668539325843, |
| "grad_norm": 0.8646685481071472, |
| "learning_rate": 4.756390225762727e-06, |
| "loss": 0.6056, |
| "step": 2507 |
| }, |
| { |
| "epoch": 0.8806179775280899, |
| "grad_norm": 0.8820457458496094, |
| "learning_rate": 4.756191122727939e-06, |
| "loss": 0.6632, |
| "step": 2508 |
| }, |
| { |
| "epoch": 0.8809691011235955, |
| "grad_norm": 0.8895183801651001, |
| "learning_rate": 4.755991942533344e-06, |
| "loss": 0.6819, |
| "step": 2509 |
| }, |
| { |
| "epoch": 0.8813202247191011, |
| "grad_norm": 0.8657563924789429, |
| "learning_rate": 4.755792685185758e-06, |
| "loss": 0.649, |
| "step": 2510 |
| }, |
| { |
| "epoch": 0.8816713483146067, |
| "grad_norm": 0.9095283150672913, |
| "learning_rate": 4.755593350691989e-06, |
| "loss": 0.718, |
| "step": 2511 |
| }, |
| { |
| "epoch": 0.8820224719101124, |
| "grad_norm": 0.8777075409889221, |
| "learning_rate": 4.75539393905886e-06, |
| "loss": 0.6915, |
| "step": 2512 |
| }, |
| { |
| "epoch": 0.882373595505618, |
| "grad_norm": 0.8759570121765137, |
| "learning_rate": 4.755194450293187e-06, |
| "loss": 0.6749, |
| "step": 2513 |
| }, |
| { |
| "epoch": 0.8827247191011236, |
| "grad_norm": 0.8764015436172485, |
| "learning_rate": 4.754994884401794e-06, |
| "loss": 0.6652, |
| "step": 2514 |
| }, |
| { |
| "epoch": 0.8830758426966292, |
| "grad_norm": 0.9175446033477783, |
| "learning_rate": 4.754795241391505e-06, |
| "loss": 0.6626, |
| "step": 2515 |
| }, |
| { |
| "epoch": 0.8834269662921348, |
| "grad_norm": 0.871394693851471, |
| "learning_rate": 4.754595521269149e-06, |
| "loss": 0.694, |
| "step": 2516 |
| }, |
| { |
| "epoch": 0.8837780898876404, |
| "grad_norm": 0.9084363579750061, |
| "learning_rate": 4.754395724041556e-06, |
| "loss": 0.6725, |
| "step": 2517 |
| }, |
| { |
| "epoch": 0.8841292134831461, |
| "grad_norm": 0.8901112079620361, |
| "learning_rate": 4.754195849715557e-06, |
| "loss": 0.6614, |
| "step": 2518 |
| }, |
| { |
| "epoch": 0.8844803370786517, |
| "grad_norm": 0.8882237672805786, |
| "learning_rate": 4.7539958982979894e-06, |
| "loss": 0.6605, |
| "step": 2519 |
| }, |
| { |
| "epoch": 0.8848314606741573, |
| "grad_norm": 0.899763286113739, |
| "learning_rate": 4.753795869795691e-06, |
| "loss": 0.6423, |
| "step": 2520 |
| }, |
| { |
| "epoch": 0.8851825842696629, |
| "grad_norm": 0.9296014904975891, |
| "learning_rate": 4.753595764215503e-06, |
| "loss": 0.7166, |
| "step": 2521 |
| }, |
| { |
| "epoch": 0.8855337078651685, |
| "grad_norm": 0.883310854434967, |
| "learning_rate": 4.753395581564267e-06, |
| "loss": 0.6404, |
| "step": 2522 |
| }, |
| { |
| "epoch": 0.8858848314606742, |
| "grad_norm": 0.8889510035514832, |
| "learning_rate": 4.753195321848831e-06, |
| "loss": 0.649, |
| "step": 2523 |
| }, |
| { |
| "epoch": 0.8862359550561798, |
| "grad_norm": 0.9097694754600525, |
| "learning_rate": 4.752994985076044e-06, |
| "loss": 0.6604, |
| "step": 2524 |
| }, |
| { |
| "epoch": 0.8865870786516854, |
| "grad_norm": 0.8716392517089844, |
| "learning_rate": 4.752794571252755e-06, |
| "loss": 0.6989, |
| "step": 2525 |
| }, |
| { |
| "epoch": 0.886938202247191, |
| "grad_norm": 0.8778386116027832, |
| "learning_rate": 4.75259408038582e-06, |
| "loss": 0.638, |
| "step": 2526 |
| }, |
| { |
| "epoch": 0.8872893258426966, |
| "grad_norm": 0.8789075016975403, |
| "learning_rate": 4.752393512482095e-06, |
| "loss": 0.6713, |
| "step": 2527 |
| }, |
| { |
| "epoch": 0.8876404494382022, |
| "grad_norm": 0.8854068517684937, |
| "learning_rate": 4.752192867548439e-06, |
| "loss": 0.6534, |
| "step": 2528 |
| }, |
| { |
| "epoch": 0.8879915730337079, |
| "grad_norm": 0.8878395557403564, |
| "learning_rate": 4.7519921455917145e-06, |
| "loss": 0.684, |
| "step": 2529 |
| }, |
| { |
| "epoch": 0.8883426966292135, |
| "grad_norm": 0.8584818840026855, |
| "learning_rate": 4.751791346618786e-06, |
| "loss": 0.6259, |
| "step": 2530 |
| }, |
| { |
| "epoch": 0.8886938202247191, |
| "grad_norm": 0.8890157341957092, |
| "learning_rate": 4.75159047063652e-06, |
| "loss": 0.6607, |
| "step": 2531 |
| }, |
| { |
| "epoch": 0.8890449438202247, |
| "grad_norm": 0.851600706577301, |
| "learning_rate": 4.751389517651787e-06, |
| "loss": 0.66, |
| "step": 2532 |
| }, |
| { |
| "epoch": 0.8893960674157303, |
| "grad_norm": 0.8763266801834106, |
| "learning_rate": 4.751188487671457e-06, |
| "loss": 0.6427, |
| "step": 2533 |
| }, |
| { |
| "epoch": 0.889747191011236, |
| "grad_norm": 0.9055187702178955, |
| "learning_rate": 4.75098738070241e-06, |
| "loss": 0.6804, |
| "step": 2534 |
| }, |
| { |
| "epoch": 0.8900983146067416, |
| "grad_norm": 0.8860245943069458, |
| "learning_rate": 4.750786196751519e-06, |
| "loss": 0.6534, |
| "step": 2535 |
| }, |
| { |
| "epoch": 0.8904494382022472, |
| "grad_norm": 0.8951229453086853, |
| "learning_rate": 4.7505849358256665e-06, |
| "loss": 0.6637, |
| "step": 2536 |
| }, |
| { |
| "epoch": 0.8908005617977528, |
| "grad_norm": 0.9056205153465271, |
| "learning_rate": 4.7503835979317345e-06, |
| "loss": 0.6742, |
| "step": 2537 |
| }, |
| { |
| "epoch": 0.8911516853932584, |
| "grad_norm": 0.8903935551643372, |
| "learning_rate": 4.75018218307661e-06, |
| "loss": 0.6889, |
| "step": 2538 |
| }, |
| { |
| "epoch": 0.891502808988764, |
| "grad_norm": 0.9006560444831848, |
| "learning_rate": 4.74998069126718e-06, |
| "loss": 0.7042, |
| "step": 2539 |
| }, |
| { |
| "epoch": 0.8918539325842697, |
| "grad_norm": 0.8635199069976807, |
| "learning_rate": 4.749779122510337e-06, |
| "loss": 0.6692, |
| "step": 2540 |
| }, |
| { |
| "epoch": 0.8922050561797753, |
| "grad_norm": 0.8788262009620667, |
| "learning_rate": 4.749577476812971e-06, |
| "loss": 0.6652, |
| "step": 2541 |
| }, |
| { |
| "epoch": 0.8925561797752809, |
| "grad_norm": 0.8723106384277344, |
| "learning_rate": 4.749375754181981e-06, |
| "loss": 0.6373, |
| "step": 2542 |
| }, |
| { |
| "epoch": 0.8929073033707865, |
| "grad_norm": 0.8863840103149414, |
| "learning_rate": 4.749173954624265e-06, |
| "loss": 0.6701, |
| "step": 2543 |
| }, |
| { |
| "epoch": 0.8932584269662921, |
| "grad_norm": 0.9265256524085999, |
| "learning_rate": 4.748972078146725e-06, |
| "loss": 0.6591, |
| "step": 2544 |
| }, |
| { |
| "epoch": 0.8936095505617978, |
| "grad_norm": 0.9035506844520569, |
| "learning_rate": 4.748770124756265e-06, |
| "loss": 0.6585, |
| "step": 2545 |
| }, |
| { |
| "epoch": 0.8939606741573034, |
| "grad_norm": 0.8865588307380676, |
| "learning_rate": 4.748568094459789e-06, |
| "loss": 0.6903, |
| "step": 2546 |
| }, |
| { |
| "epoch": 0.894311797752809, |
| "grad_norm": 0.8840475678443909, |
| "learning_rate": 4.74836598726421e-06, |
| "loss": 0.6704, |
| "step": 2547 |
| }, |
| { |
| "epoch": 0.8946629213483146, |
| "grad_norm": 0.8937922120094299, |
| "learning_rate": 4.748163803176437e-06, |
| "loss": 0.6649, |
| "step": 2548 |
| }, |
| { |
| "epoch": 0.8950140449438202, |
| "grad_norm": 0.897407054901123, |
| "learning_rate": 4.747961542203387e-06, |
| "loss": 0.6705, |
| "step": 2549 |
| }, |
| { |
| "epoch": 0.8953651685393258, |
| "grad_norm": 0.8580518364906311, |
| "learning_rate": 4.747759204351975e-06, |
| "loss": 0.6217, |
| "step": 2550 |
| }, |
| { |
| "epoch": 0.8957162921348315, |
| "grad_norm": 0.8982558250427246, |
| "learning_rate": 4.747556789629121e-06, |
| "loss": 0.6873, |
| "step": 2551 |
| }, |
| { |
| "epoch": 0.8960674157303371, |
| "grad_norm": 0.922828197479248, |
| "learning_rate": 4.747354298041749e-06, |
| "loss": 0.6833, |
| "step": 2552 |
| }, |
| { |
| "epoch": 0.8964185393258427, |
| "grad_norm": 0.9047195315361023, |
| "learning_rate": 4.747151729596783e-06, |
| "loss": 0.6731, |
| "step": 2553 |
| }, |
| { |
| "epoch": 0.8967696629213483, |
| "grad_norm": 0.9132267236709595, |
| "learning_rate": 4.7469490843011515e-06, |
| "loss": 0.6752, |
| "step": 2554 |
| }, |
| { |
| "epoch": 0.8971207865168539, |
| "grad_norm": 0.8471392393112183, |
| "learning_rate": 4.7467463621617825e-06, |
| "loss": 0.6552, |
| "step": 2555 |
| }, |
| { |
| "epoch": 0.8974719101123596, |
| "grad_norm": 0.8814915418624878, |
| "learning_rate": 4.746543563185612e-06, |
| "loss": 0.676, |
| "step": 2556 |
| }, |
| { |
| "epoch": 0.8978230337078652, |
| "grad_norm": 0.8573102355003357, |
| "learning_rate": 4.746340687379573e-06, |
| "loss": 0.6397, |
| "step": 2557 |
| }, |
| { |
| "epoch": 0.8981741573033708, |
| "grad_norm": 0.8620328903198242, |
| "learning_rate": 4.746137734750604e-06, |
| "loss": 0.625, |
| "step": 2558 |
| }, |
| { |
| "epoch": 0.8985252808988764, |
| "grad_norm": 0.8743171691894531, |
| "learning_rate": 4.745934705305648e-06, |
| "loss": 0.6729, |
| "step": 2559 |
| }, |
| { |
| "epoch": 0.898876404494382, |
| "grad_norm": 0.838996410369873, |
| "learning_rate": 4.745731599051646e-06, |
| "loss": 0.6507, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.8992275280898876, |
| "grad_norm": 0.9150429964065552, |
| "learning_rate": 4.745528415995546e-06, |
| "loss": 0.6869, |
| "step": 2561 |
| }, |
| { |
| "epoch": 0.8995786516853933, |
| "grad_norm": 0.8738797903060913, |
| "learning_rate": 4.745325156144295e-06, |
| "loss": 0.6843, |
| "step": 2562 |
| }, |
| { |
| "epoch": 0.8999297752808989, |
| "grad_norm": 0.871146559715271, |
| "learning_rate": 4.745121819504845e-06, |
| "loss": 0.6773, |
| "step": 2563 |
| }, |
| { |
| "epoch": 0.9002808988764045, |
| "grad_norm": 0.8615120649337769, |
| "learning_rate": 4.74491840608415e-06, |
| "loss": 0.6383, |
| "step": 2564 |
| }, |
| { |
| "epoch": 0.9006320224719101, |
| "grad_norm": 0.8572788834571838, |
| "learning_rate": 4.744714915889167e-06, |
| "loss": 0.6488, |
| "step": 2565 |
| }, |
| { |
| "epoch": 0.9009831460674157, |
| "grad_norm": 0.8589732050895691, |
| "learning_rate": 4.744511348926855e-06, |
| "loss": 0.6464, |
| "step": 2566 |
| }, |
| { |
| "epoch": 0.9013342696629213, |
| "grad_norm": 0.8711311221122742, |
| "learning_rate": 4.744307705204174e-06, |
| "loss": 0.6809, |
| "step": 2567 |
| }, |
| { |
| "epoch": 0.901685393258427, |
| "grad_norm": 0.9025472402572632, |
| "learning_rate": 4.744103984728091e-06, |
| "loss": 0.6973, |
| "step": 2568 |
| }, |
| { |
| "epoch": 0.9020365168539326, |
| "grad_norm": 0.8648887872695923, |
| "learning_rate": 4.743900187505572e-06, |
| "loss": 0.6666, |
| "step": 2569 |
| }, |
| { |
| "epoch": 0.9023876404494382, |
| "grad_norm": 0.8922040462493896, |
| "learning_rate": 4.7436963135435864e-06, |
| "loss": 0.6875, |
| "step": 2570 |
| }, |
| { |
| "epoch": 0.9027387640449438, |
| "grad_norm": 0.8693889379501343, |
| "learning_rate": 4.743492362849107e-06, |
| "loss": 0.6926, |
| "step": 2571 |
| }, |
| { |
| "epoch": 0.9030898876404494, |
| "grad_norm": 0.8809205889701843, |
| "learning_rate": 4.743288335429109e-06, |
| "loss": 0.6841, |
| "step": 2572 |
| }, |
| { |
| "epoch": 0.9034410112359551, |
| "grad_norm": 0.8538480401039124, |
| "learning_rate": 4.743084231290569e-06, |
| "loss": 0.6416, |
| "step": 2573 |
| }, |
| { |
| "epoch": 0.9037921348314607, |
| "grad_norm": 0.8628817200660706, |
| "learning_rate": 4.742880050440468e-06, |
| "loss": 0.649, |
| "step": 2574 |
| }, |
| { |
| "epoch": 0.9041432584269663, |
| "grad_norm": 0.9055478572845459, |
| "learning_rate": 4.742675792885788e-06, |
| "loss": 0.6819, |
| "step": 2575 |
| }, |
| { |
| "epoch": 0.9044943820224719, |
| "grad_norm": 0.8601617217063904, |
| "learning_rate": 4.742471458633516e-06, |
| "loss": 0.6675, |
| "step": 2576 |
| }, |
| { |
| "epoch": 0.9048455056179775, |
| "grad_norm": 0.8635382056236267, |
| "learning_rate": 4.742267047690638e-06, |
| "loss": 0.6537, |
| "step": 2577 |
| }, |
| { |
| "epoch": 0.9051966292134831, |
| "grad_norm": 0.892162561416626, |
| "learning_rate": 4.742062560064146e-06, |
| "loss": 0.6627, |
| "step": 2578 |
| }, |
| { |
| "epoch": 0.9055477528089888, |
| "grad_norm": 0.9016808867454529, |
| "learning_rate": 4.741857995761033e-06, |
| "loss": 0.666, |
| "step": 2579 |
| }, |
| { |
| "epoch": 0.9058988764044944, |
| "grad_norm": 0.8793184161186218, |
| "learning_rate": 4.741653354788295e-06, |
| "loss": 0.638, |
| "step": 2580 |
| }, |
| { |
| "epoch": 0.90625, |
| "grad_norm": 0.8721537590026855, |
| "learning_rate": 4.741448637152931e-06, |
| "loss": 0.6816, |
| "step": 2581 |
| }, |
| { |
| "epoch": 0.9066011235955056, |
| "grad_norm": 0.9141138792037964, |
| "learning_rate": 4.741243842861941e-06, |
| "loss": 0.6959, |
| "step": 2582 |
| }, |
| { |
| "epoch": 0.9069522471910112, |
| "grad_norm": 0.8552782535552979, |
| "learning_rate": 4.741038971922329e-06, |
| "loss": 0.6868, |
| "step": 2583 |
| }, |
| { |
| "epoch": 0.9073033707865169, |
| "grad_norm": 0.8622416257858276, |
| "learning_rate": 4.740834024341103e-06, |
| "loss": 0.6861, |
| "step": 2584 |
| }, |
| { |
| "epoch": 0.9076544943820225, |
| "grad_norm": 0.8589463829994202, |
| "learning_rate": 4.74062900012527e-06, |
| "loss": 0.6525, |
| "step": 2585 |
| }, |
| { |
| "epoch": 0.9080056179775281, |
| "grad_norm": 0.9055203795433044, |
| "learning_rate": 4.740423899281843e-06, |
| "loss": 0.6724, |
| "step": 2586 |
| }, |
| { |
| "epoch": 0.9083567415730337, |
| "grad_norm": 0.9066650867462158, |
| "learning_rate": 4.740218721817836e-06, |
| "loss": 0.6929, |
| "step": 2587 |
| }, |
| { |
| "epoch": 0.9087078651685393, |
| "grad_norm": 0.8654912114143372, |
| "learning_rate": 4.7400134677402655e-06, |
| "loss": 0.6704, |
| "step": 2588 |
| }, |
| { |
| "epoch": 0.9090589887640449, |
| "grad_norm": 0.9301373958587646, |
| "learning_rate": 4.739808137056151e-06, |
| "loss": 0.6797, |
| "step": 2589 |
| }, |
| { |
| "epoch": 0.9094101123595506, |
| "grad_norm": 0.8802602291107178, |
| "learning_rate": 4.739602729772515e-06, |
| "loss": 0.6657, |
| "step": 2590 |
| }, |
| { |
| "epoch": 0.9097612359550562, |
| "grad_norm": 0.873859167098999, |
| "learning_rate": 4.739397245896382e-06, |
| "loss": 0.6364, |
| "step": 2591 |
| }, |
| { |
| "epoch": 0.9101123595505618, |
| "grad_norm": 0.8515645861625671, |
| "learning_rate": 4.739191685434779e-06, |
| "loss": 0.6472, |
| "step": 2592 |
| }, |
| { |
| "epoch": 0.9104634831460674, |
| "grad_norm": 0.8645114898681641, |
| "learning_rate": 4.738986048394737e-06, |
| "loss": 0.6792, |
| "step": 2593 |
| }, |
| { |
| "epoch": 0.910814606741573, |
| "grad_norm": 0.862397313117981, |
| "learning_rate": 4.738780334783288e-06, |
| "loss": 0.6522, |
| "step": 2594 |
| }, |
| { |
| "epoch": 0.9111657303370787, |
| "grad_norm": 0.8489444255828857, |
| "learning_rate": 4.738574544607467e-06, |
| "loss": 0.6757, |
| "step": 2595 |
| }, |
| { |
| "epoch": 0.9115168539325843, |
| "grad_norm": 0.8827484250068665, |
| "learning_rate": 4.738368677874313e-06, |
| "loss": 0.6842, |
| "step": 2596 |
| }, |
| { |
| "epoch": 0.9118679775280899, |
| "grad_norm": 0.8966019749641418, |
| "learning_rate": 4.738162734590865e-06, |
| "loss": 0.6658, |
| "step": 2597 |
| }, |
| { |
| "epoch": 0.9122191011235955, |
| "grad_norm": 0.8434582948684692, |
| "learning_rate": 4.7379567147641665e-06, |
| "loss": 0.6195, |
| "step": 2598 |
| }, |
| { |
| "epoch": 0.9125702247191011, |
| "grad_norm": 0.9089177250862122, |
| "learning_rate": 4.737750618401264e-06, |
| "loss": 0.7168, |
| "step": 2599 |
| }, |
| { |
| "epoch": 0.9129213483146067, |
| "grad_norm": 0.9108870625495911, |
| "learning_rate": 4.737544445509204e-06, |
| "loss": 0.6995, |
| "step": 2600 |
| }, |
| { |
| "epoch": 0.9132724719101124, |
| "grad_norm": 0.8936570882797241, |
| "learning_rate": 4.73733819609504e-06, |
| "loss": 0.6726, |
| "step": 2601 |
| }, |
| { |
| "epoch": 0.913623595505618, |
| "grad_norm": 0.8761941194534302, |
| "learning_rate": 4.737131870165824e-06, |
| "loss": 0.6642, |
| "step": 2602 |
| }, |
| { |
| "epoch": 0.9139747191011236, |
| "grad_norm": 0.870694100856781, |
| "learning_rate": 4.736925467728612e-06, |
| "loss": 0.6945, |
| "step": 2603 |
| }, |
| { |
| "epoch": 0.9143258426966292, |
| "grad_norm": 0.8931704759597778, |
| "learning_rate": 4.736718988790464e-06, |
| "loss": 0.6777, |
| "step": 2604 |
| }, |
| { |
| "epoch": 0.9146769662921348, |
| "grad_norm": 0.8875551223754883, |
| "learning_rate": 4.73651243335844e-06, |
| "loss": 0.6664, |
| "step": 2605 |
| }, |
| { |
| "epoch": 0.9150280898876404, |
| "grad_norm": 0.8898186087608337, |
| "learning_rate": 4.736305801439603e-06, |
| "loss": 0.6652, |
| "step": 2606 |
| }, |
| { |
| "epoch": 0.9153792134831461, |
| "grad_norm": 0.8893354535102844, |
| "learning_rate": 4.736099093041023e-06, |
| "loss": 0.6587, |
| "step": 2607 |
| }, |
| { |
| "epoch": 0.9157303370786517, |
| "grad_norm": 0.9278960824012756, |
| "learning_rate": 4.735892308169768e-06, |
| "loss": 0.6674, |
| "step": 2608 |
| }, |
| { |
| "epoch": 0.9160814606741573, |
| "grad_norm": 0.9112112522125244, |
| "learning_rate": 4.735685446832908e-06, |
| "loss": 0.6435, |
| "step": 2609 |
| }, |
| { |
| "epoch": 0.9164325842696629, |
| "grad_norm": 0.902930736541748, |
| "learning_rate": 4.735478509037519e-06, |
| "loss": 0.6402, |
| "step": 2610 |
| }, |
| { |
| "epoch": 0.9167837078651685, |
| "grad_norm": 0.8808385729789734, |
| "learning_rate": 4.7352714947906785e-06, |
| "loss": 0.707, |
| "step": 2611 |
| }, |
| { |
| "epoch": 0.9171348314606742, |
| "grad_norm": 0.8619163036346436, |
| "learning_rate": 4.735064404099465e-06, |
| "loss": 0.6728, |
| "step": 2612 |
| }, |
| { |
| "epoch": 0.9174859550561798, |
| "grad_norm": 0.8795300126075745, |
| "learning_rate": 4.734857236970962e-06, |
| "loss": 0.6543, |
| "step": 2613 |
| }, |
| { |
| "epoch": 0.9178370786516854, |
| "grad_norm": 0.9281381964683533, |
| "learning_rate": 4.734649993412254e-06, |
| "loss": 0.6594, |
| "step": 2614 |
| }, |
| { |
| "epoch": 0.918188202247191, |
| "grad_norm": 0.8637987971305847, |
| "learning_rate": 4.734442673430428e-06, |
| "loss": 0.6664, |
| "step": 2615 |
| }, |
| { |
| "epoch": 0.9185393258426966, |
| "grad_norm": 0.8663271069526672, |
| "learning_rate": 4.734235277032575e-06, |
| "loss": 0.6658, |
| "step": 2616 |
| }, |
| { |
| "epoch": 0.9188904494382022, |
| "grad_norm": 0.9028787016868591, |
| "learning_rate": 4.734027804225787e-06, |
| "loss": 0.6473, |
| "step": 2617 |
| }, |
| { |
| "epoch": 0.9192415730337079, |
| "grad_norm": 0.8826063275337219, |
| "learning_rate": 4.733820255017161e-06, |
| "loss": 0.626, |
| "step": 2618 |
| }, |
| { |
| "epoch": 0.9195926966292135, |
| "grad_norm": 0.8579033613204956, |
| "learning_rate": 4.7336126294137915e-06, |
| "loss": 0.6527, |
| "step": 2619 |
| }, |
| { |
| "epoch": 0.9199438202247191, |
| "grad_norm": 0.8992764353752136, |
| "learning_rate": 4.733404927422783e-06, |
| "loss": 0.6841, |
| "step": 2620 |
| }, |
| { |
| "epoch": 0.9202949438202247, |
| "grad_norm": 0.9202234148979187, |
| "learning_rate": 4.733197149051237e-06, |
| "loss": 0.6871, |
| "step": 2621 |
| }, |
| { |
| "epoch": 0.9206460674157303, |
| "grad_norm": 0.8881893754005432, |
| "learning_rate": 4.73298929430626e-06, |
| "loss": 0.6851, |
| "step": 2622 |
| }, |
| { |
| "epoch": 0.920997191011236, |
| "grad_norm": 0.9517401456832886, |
| "learning_rate": 4.732781363194958e-06, |
| "loss": 0.6748, |
| "step": 2623 |
| }, |
| { |
| "epoch": 0.9213483146067416, |
| "grad_norm": 0.9222083687782288, |
| "learning_rate": 4.7325733557244455e-06, |
| "loss": 0.6939, |
| "step": 2624 |
| }, |
| { |
| "epoch": 0.9216994382022472, |
| "grad_norm": 0.8639568090438843, |
| "learning_rate": 4.732365271901834e-06, |
| "loss": 0.664, |
| "step": 2625 |
| }, |
| { |
| "epoch": 0.9220505617977528, |
| "grad_norm": 0.9032568335533142, |
| "learning_rate": 4.732157111734241e-06, |
| "loss": 0.6779, |
| "step": 2626 |
| }, |
| { |
| "epoch": 0.9224016853932584, |
| "grad_norm": 0.9278046488761902, |
| "learning_rate": 4.731948875228784e-06, |
| "loss": 0.682, |
| "step": 2627 |
| }, |
| { |
| "epoch": 0.922752808988764, |
| "grad_norm": 0.9298899173736572, |
| "learning_rate": 4.731740562392587e-06, |
| "loss": 0.687, |
| "step": 2628 |
| }, |
| { |
| "epoch": 0.9231039325842697, |
| "grad_norm": 0.8798907995223999, |
| "learning_rate": 4.7315321732327715e-06, |
| "loss": 0.6722, |
| "step": 2629 |
| }, |
| { |
| "epoch": 0.9234550561797753, |
| "grad_norm": 0.8544273376464844, |
| "learning_rate": 4.731323707756465e-06, |
| "loss": 0.6801, |
| "step": 2630 |
| }, |
| { |
| "epoch": 0.9238061797752809, |
| "grad_norm": 0.8940860033035278, |
| "learning_rate": 4.731115165970797e-06, |
| "loss": 0.6548, |
| "step": 2631 |
| }, |
| { |
| "epoch": 0.9241573033707865, |
| "grad_norm": 0.9600421190261841, |
| "learning_rate": 4.730906547882901e-06, |
| "loss": 0.6935, |
| "step": 2632 |
| }, |
| { |
| "epoch": 0.9245084269662921, |
| "grad_norm": 0.9062276482582092, |
| "learning_rate": 4.730697853499908e-06, |
| "loss": 0.7043, |
| "step": 2633 |
| }, |
| { |
| "epoch": 0.9248595505617978, |
| "grad_norm": 0.8218638300895691, |
| "learning_rate": 4.730489082828959e-06, |
| "loss": 0.5993, |
| "step": 2634 |
| }, |
| { |
| "epoch": 0.9252106741573034, |
| "grad_norm": 0.8894577622413635, |
| "learning_rate": 4.7302802358771905e-06, |
| "loss": 0.6411, |
| "step": 2635 |
| }, |
| { |
| "epoch": 0.925561797752809, |
| "grad_norm": 0.881628155708313, |
| "learning_rate": 4.730071312651747e-06, |
| "loss": 0.6686, |
| "step": 2636 |
| }, |
| { |
| "epoch": 0.9259129213483146, |
| "grad_norm": 0.925878643989563, |
| "learning_rate": 4.7298623131597735e-06, |
| "loss": 0.6674, |
| "step": 2637 |
| }, |
| { |
| "epoch": 0.9262640449438202, |
| "grad_norm": 0.8464183211326599, |
| "learning_rate": 4.729653237408416e-06, |
| "loss": 0.6902, |
| "step": 2638 |
| }, |
| { |
| "epoch": 0.9266151685393258, |
| "grad_norm": 0.873918890953064, |
| "learning_rate": 4.729444085404826e-06, |
| "loss": 0.6737, |
| "step": 2639 |
| }, |
| { |
| "epoch": 0.9269662921348315, |
| "grad_norm": 0.8666852116584778, |
| "learning_rate": 4.7292348571561566e-06, |
| "loss": 0.6601, |
| "step": 2640 |
| }, |
| { |
| "epoch": 0.9273174157303371, |
| "grad_norm": 1.0832674503326416, |
| "learning_rate": 4.729025552669563e-06, |
| "loss": 0.654, |
| "step": 2641 |
| }, |
| { |
| "epoch": 0.9276685393258427, |
| "grad_norm": 0.8796826004981995, |
| "learning_rate": 4.728816171952202e-06, |
| "loss": 0.6365, |
| "step": 2642 |
| }, |
| { |
| "epoch": 0.9280196629213483, |
| "grad_norm": 0.8894056081771851, |
| "learning_rate": 4.728606715011235e-06, |
| "loss": 0.6955, |
| "step": 2643 |
| }, |
| { |
| "epoch": 0.9283707865168539, |
| "grad_norm": 0.9130917191505432, |
| "learning_rate": 4.728397181853826e-06, |
| "loss": 0.6807, |
| "step": 2644 |
| }, |
| { |
| "epoch": 0.9287219101123596, |
| "grad_norm": 0.8895679116249084, |
| "learning_rate": 4.72818757248714e-06, |
| "loss": 0.71, |
| "step": 2645 |
| }, |
| { |
| "epoch": 0.9290730337078652, |
| "grad_norm": 0.8796029090881348, |
| "learning_rate": 4.727977886918345e-06, |
| "loss": 0.666, |
| "step": 2646 |
| }, |
| { |
| "epoch": 0.9294241573033708, |
| "grad_norm": 0.8870053887367249, |
| "learning_rate": 4.727768125154614e-06, |
| "loss": 0.6724, |
| "step": 2647 |
| }, |
| { |
| "epoch": 0.9297752808988764, |
| "grad_norm": 0.8785473108291626, |
| "learning_rate": 4.727558287203119e-06, |
| "loss": 0.6735, |
| "step": 2648 |
| }, |
| { |
| "epoch": 0.930126404494382, |
| "grad_norm": 0.8722987174987793, |
| "learning_rate": 4.727348373071037e-06, |
| "loss": 0.6816, |
| "step": 2649 |
| }, |
| { |
| "epoch": 0.9304775280898876, |
| "grad_norm": 0.8995219469070435, |
| "learning_rate": 4.7271383827655455e-06, |
| "loss": 0.6657, |
| "step": 2650 |
| }, |
| { |
| "epoch": 0.9308286516853933, |
| "grad_norm": 0.8835882544517517, |
| "learning_rate": 4.726928316293828e-06, |
| "loss": 0.6973, |
| "step": 2651 |
| }, |
| { |
| "epoch": 0.9311797752808989, |
| "grad_norm": 0.8654088973999023, |
| "learning_rate": 4.726718173663068e-06, |
| "loss": 0.6083, |
| "step": 2652 |
| }, |
| { |
| "epoch": 0.9315308988764045, |
| "grad_norm": 0.832149088382721, |
| "learning_rate": 4.726507954880451e-06, |
| "loss": 0.5915, |
| "step": 2653 |
| }, |
| { |
| "epoch": 0.9318820224719101, |
| "grad_norm": 0.8820677995681763, |
| "learning_rate": 4.726297659953169e-06, |
| "loss": 0.641, |
| "step": 2654 |
| }, |
| { |
| "epoch": 0.9322331460674157, |
| "grad_norm": 0.9029566049575806, |
| "learning_rate": 4.7260872888884105e-06, |
| "loss": 0.6845, |
| "step": 2655 |
| }, |
| { |
| "epoch": 0.9325842696629213, |
| "grad_norm": 0.8395662307739258, |
| "learning_rate": 4.725876841693372e-06, |
| "loss": 0.6566, |
| "step": 2656 |
| }, |
| { |
| "epoch": 0.932935393258427, |
| "grad_norm": 0.894307017326355, |
| "learning_rate": 4.7256663183752495e-06, |
| "loss": 0.6985, |
| "step": 2657 |
| }, |
| { |
| "epoch": 0.9332865168539326, |
| "grad_norm": 0.9100427031517029, |
| "learning_rate": 4.725455718941245e-06, |
| "loss": 0.6687, |
| "step": 2658 |
| }, |
| { |
| "epoch": 0.9336376404494382, |
| "grad_norm": 0.874563455581665, |
| "learning_rate": 4.725245043398558e-06, |
| "loss": 0.6692, |
| "step": 2659 |
| }, |
| { |
| "epoch": 0.9339887640449438, |
| "grad_norm": 0.8920844793319702, |
| "learning_rate": 4.725034291754394e-06, |
| "loss": 0.6334, |
| "step": 2660 |
| }, |
| { |
| "epoch": 0.9343398876404494, |
| "grad_norm": 0.8333306908607483, |
| "learning_rate": 4.724823464015963e-06, |
| "loss": 0.6322, |
| "step": 2661 |
| }, |
| { |
| "epoch": 0.9346910112359551, |
| "grad_norm": 0.8971813917160034, |
| "learning_rate": 4.724612560190473e-06, |
| "loss": 0.6823, |
| "step": 2662 |
| }, |
| { |
| "epoch": 0.9350421348314607, |
| "grad_norm": 0.8798514008522034, |
| "learning_rate": 4.724401580285136e-06, |
| "loss": 0.6742, |
| "step": 2663 |
| }, |
| { |
| "epoch": 0.9353932584269663, |
| "grad_norm": 0.827732503414154, |
| "learning_rate": 4.724190524307169e-06, |
| "loss": 0.6574, |
| "step": 2664 |
| }, |
| { |
| "epoch": 0.9357443820224719, |
| "grad_norm": 0.8697452545166016, |
| "learning_rate": 4.7239793922637896e-06, |
| "loss": 0.637, |
| "step": 2665 |
| }, |
| { |
| "epoch": 0.9360955056179775, |
| "grad_norm": 0.887448251247406, |
| "learning_rate": 4.723768184162217e-06, |
| "loss": 0.6762, |
| "step": 2666 |
| }, |
| { |
| "epoch": 0.9364466292134831, |
| "grad_norm": 0.9078186750411987, |
| "learning_rate": 4.723556900009677e-06, |
| "loss": 0.6832, |
| "step": 2667 |
| }, |
| { |
| "epoch": 0.9367977528089888, |
| "grad_norm": 0.8617350459098816, |
| "learning_rate": 4.723345539813393e-06, |
| "loss": 0.6591, |
| "step": 2668 |
| }, |
| { |
| "epoch": 0.9371488764044944, |
| "grad_norm": 0.8676525354385376, |
| "learning_rate": 4.723134103580594e-06, |
| "loss": 0.6941, |
| "step": 2669 |
| }, |
| { |
| "epoch": 0.9375, |
| "grad_norm": 0.8551449179649353, |
| "learning_rate": 4.722922591318511e-06, |
| "loss": 0.6142, |
| "step": 2670 |
| }, |
| { |
| "epoch": 0.9378511235955056, |
| "grad_norm": 0.9027611017227173, |
| "learning_rate": 4.722711003034378e-06, |
| "loss": 0.6893, |
| "step": 2671 |
| }, |
| { |
| "epoch": 0.9382022471910112, |
| "grad_norm": 0.8602604269981384, |
| "learning_rate": 4.7224993387354305e-06, |
| "loss": 0.644, |
| "step": 2672 |
| }, |
| { |
| "epoch": 0.9385533707865169, |
| "grad_norm": 0.9275681376457214, |
| "learning_rate": 4.722287598428907e-06, |
| "loss": 0.676, |
| "step": 2673 |
| }, |
| { |
| "epoch": 0.9389044943820225, |
| "grad_norm": 0.880574107170105, |
| "learning_rate": 4.7220757821220495e-06, |
| "loss": 0.6068, |
| "step": 2674 |
| }, |
| { |
| "epoch": 0.9392556179775281, |
| "grad_norm": 0.8658181428909302, |
| "learning_rate": 4.721863889822102e-06, |
| "loss": 0.6714, |
| "step": 2675 |
| }, |
| { |
| "epoch": 0.9396067415730337, |
| "grad_norm": 0.890250563621521, |
| "learning_rate": 4.7216519215363095e-06, |
| "loss": 0.6624, |
| "step": 2676 |
| }, |
| { |
| "epoch": 0.9399578651685393, |
| "grad_norm": 0.870218813419342, |
| "learning_rate": 4.721439877271924e-06, |
| "loss": 0.6692, |
| "step": 2677 |
| }, |
| { |
| "epoch": 0.9403089887640449, |
| "grad_norm": 0.8716661334037781, |
| "learning_rate": 4.721227757036195e-06, |
| "loss": 0.6513, |
| "step": 2678 |
| }, |
| { |
| "epoch": 0.9406601123595506, |
| "grad_norm": 0.8979821801185608, |
| "learning_rate": 4.721015560836377e-06, |
| "loss": 0.6658, |
| "step": 2679 |
| }, |
| { |
| "epoch": 0.9410112359550562, |
| "grad_norm": 1.3192576169967651, |
| "learning_rate": 4.7208032886797285e-06, |
| "loss": 0.6726, |
| "step": 2680 |
| }, |
| { |
| "epoch": 0.9413623595505618, |
| "grad_norm": 0.9058758020401001, |
| "learning_rate": 4.720590940573507e-06, |
| "loss": 0.6764, |
| "step": 2681 |
| }, |
| { |
| "epoch": 0.9417134831460674, |
| "grad_norm": 0.8641902208328247, |
| "learning_rate": 4.720378516524976e-06, |
| "loss": 0.7075, |
| "step": 2682 |
| }, |
| { |
| "epoch": 0.942064606741573, |
| "grad_norm": 0.8593077659606934, |
| "learning_rate": 4.7201660165413994e-06, |
| "loss": 0.6575, |
| "step": 2683 |
| }, |
| { |
| "epoch": 0.9424157303370787, |
| "grad_norm": 0.8111205697059631, |
| "learning_rate": 4.719953440630045e-06, |
| "loss": 0.5865, |
| "step": 2684 |
| }, |
| { |
| "epoch": 0.9427668539325843, |
| "grad_norm": 0.8845921754837036, |
| "learning_rate": 4.719740788798182e-06, |
| "loss": 0.6787, |
| "step": 2685 |
| }, |
| { |
| "epoch": 0.9431179775280899, |
| "grad_norm": 0.893483579158783, |
| "learning_rate": 4.719528061053084e-06, |
| "loss": 0.657, |
| "step": 2686 |
| }, |
| { |
| "epoch": 0.9434691011235955, |
| "grad_norm": 0.882802426815033, |
| "learning_rate": 4.719315257402026e-06, |
| "loss": 0.6678, |
| "step": 2687 |
| }, |
| { |
| "epoch": 0.9438202247191011, |
| "grad_norm": 0.8567876815795898, |
| "learning_rate": 4.7191023778522844e-06, |
| "loss": 0.6167, |
| "step": 2688 |
| }, |
| { |
| "epoch": 0.9441713483146067, |
| "grad_norm": 0.8992242217063904, |
| "learning_rate": 4.718889422411141e-06, |
| "loss": 0.6691, |
| "step": 2689 |
| }, |
| { |
| "epoch": 0.9445224719101124, |
| "grad_norm": 0.8735215663909912, |
| "learning_rate": 4.718676391085878e-06, |
| "loss": 0.63, |
| "step": 2690 |
| }, |
| { |
| "epoch": 0.944873595505618, |
| "grad_norm": 0.8774729371070862, |
| "learning_rate": 4.71846328388378e-06, |
| "loss": 0.6702, |
| "step": 2691 |
| }, |
| { |
| "epoch": 0.9452247191011236, |
| "grad_norm": 0.9470273852348328, |
| "learning_rate": 4.718250100812138e-06, |
| "loss": 0.6997, |
| "step": 2692 |
| }, |
| { |
| "epoch": 0.9455758426966292, |
| "grad_norm": 0.9050356149673462, |
| "learning_rate": 4.71803684187824e-06, |
| "loss": 0.6451, |
| "step": 2693 |
| }, |
| { |
| "epoch": 0.9459269662921348, |
| "grad_norm": 0.9217947125434875, |
| "learning_rate": 4.7178235070893796e-06, |
| "loss": 0.6556, |
| "step": 2694 |
| }, |
| { |
| "epoch": 0.9462780898876404, |
| "grad_norm": 0.909965991973877, |
| "learning_rate": 4.717610096452852e-06, |
| "loss": 0.6772, |
| "step": 2695 |
| }, |
| { |
| "epoch": 0.9466292134831461, |
| "grad_norm": 0.8807794451713562, |
| "learning_rate": 4.717396609975959e-06, |
| "loss": 0.6805, |
| "step": 2696 |
| }, |
| { |
| "epoch": 0.9469803370786517, |
| "grad_norm": 0.9008699059486389, |
| "learning_rate": 4.717183047665998e-06, |
| "loss": 0.6718, |
| "step": 2697 |
| }, |
| { |
| "epoch": 0.9473314606741573, |
| "grad_norm": 0.9289125204086304, |
| "learning_rate": 4.716969409530274e-06, |
| "loss": 0.6705, |
| "step": 2698 |
| }, |
| { |
| "epoch": 0.9476825842696629, |
| "grad_norm": 0.9074335098266602, |
| "learning_rate": 4.716755695576094e-06, |
| "loss": 0.6897, |
| "step": 2699 |
| }, |
| { |
| "epoch": 0.9480337078651685, |
| "grad_norm": 0.9094159603118896, |
| "learning_rate": 4.716541905810766e-06, |
| "loss": 0.6625, |
| "step": 2700 |
| }, |
| { |
| "epoch": 0.9483848314606742, |
| "grad_norm": 0.8951701521873474, |
| "learning_rate": 4.716328040241601e-06, |
| "loss": 0.6234, |
| "step": 2701 |
| }, |
| { |
| "epoch": 0.9487359550561798, |
| "grad_norm": 0.8777403831481934, |
| "learning_rate": 4.716114098875914e-06, |
| "loss": 0.6418, |
| "step": 2702 |
| }, |
| { |
| "epoch": 0.9490870786516854, |
| "grad_norm": 0.9449620842933655, |
| "learning_rate": 4.715900081721021e-06, |
| "loss": 0.6814, |
| "step": 2703 |
| }, |
| { |
| "epoch": 0.949438202247191, |
| "grad_norm": 0.8588343262672424, |
| "learning_rate": 4.7156859887842416e-06, |
| "loss": 0.6042, |
| "step": 2704 |
| }, |
| { |
| "epoch": 0.9497893258426966, |
| "grad_norm": 0.8675066232681274, |
| "learning_rate": 4.715471820072896e-06, |
| "loss": 0.6591, |
| "step": 2705 |
| }, |
| { |
| "epoch": 0.9501404494382022, |
| "grad_norm": 0.8843618631362915, |
| "learning_rate": 4.715257575594312e-06, |
| "loss": 0.6594, |
| "step": 2706 |
| }, |
| { |
| "epoch": 0.9504915730337079, |
| "grad_norm": 0.8740993142127991, |
| "learning_rate": 4.715043255355814e-06, |
| "loss": 0.6578, |
| "step": 2707 |
| }, |
| { |
| "epoch": 0.9508426966292135, |
| "grad_norm": 0.8732353448867798, |
| "learning_rate": 4.7148288593647315e-06, |
| "loss": 0.6399, |
| "step": 2708 |
| }, |
| { |
| "epoch": 0.9511938202247191, |
| "grad_norm": 0.8841140866279602, |
| "learning_rate": 4.714614387628398e-06, |
| "loss": 0.6879, |
| "step": 2709 |
| }, |
| { |
| "epoch": 0.9515449438202247, |
| "grad_norm": 0.9016351103782654, |
| "learning_rate": 4.714399840154147e-06, |
| "loss": 0.6618, |
| "step": 2710 |
| }, |
| { |
| "epoch": 0.9518960674157303, |
| "grad_norm": 0.8737030625343323, |
| "learning_rate": 4.714185216949317e-06, |
| "loss": 0.6616, |
| "step": 2711 |
| }, |
| { |
| "epoch": 0.952247191011236, |
| "grad_norm": 0.8543287515640259, |
| "learning_rate": 4.713970518021246e-06, |
| "loss": 0.627, |
| "step": 2712 |
| }, |
| { |
| "epoch": 0.9525983146067416, |
| "grad_norm": 0.8959774374961853, |
| "learning_rate": 4.713755743377278e-06, |
| "loss": 0.6936, |
| "step": 2713 |
| }, |
| { |
| "epoch": 0.9529494382022472, |
| "grad_norm": 0.9039850831031799, |
| "learning_rate": 4.713540893024759e-06, |
| "loss": 0.6465, |
| "step": 2714 |
| }, |
| { |
| "epoch": 0.9533005617977528, |
| "grad_norm": 0.8787552118301392, |
| "learning_rate": 4.713325966971035e-06, |
| "loss": 0.6127, |
| "step": 2715 |
| }, |
| { |
| "epoch": 0.9536516853932584, |
| "grad_norm": 0.8473005294799805, |
| "learning_rate": 4.713110965223457e-06, |
| "loss": 0.6648, |
| "step": 2716 |
| }, |
| { |
| "epoch": 0.954002808988764, |
| "grad_norm": 0.937426745891571, |
| "learning_rate": 4.712895887789377e-06, |
| "loss": 0.6594, |
| "step": 2717 |
| }, |
| { |
| "epoch": 0.9543539325842697, |
| "grad_norm": 0.8719070553779602, |
| "learning_rate": 4.712680734676153e-06, |
| "loss": 0.6652, |
| "step": 2718 |
| }, |
| { |
| "epoch": 0.9547050561797753, |
| "grad_norm": 0.8872880935668945, |
| "learning_rate": 4.7124655058911395e-06, |
| "loss": 0.6987, |
| "step": 2719 |
| }, |
| { |
| "epoch": 0.9550561797752809, |
| "grad_norm": 0.8662359714508057, |
| "learning_rate": 4.7122502014417e-06, |
| "loss": 0.6723, |
| "step": 2720 |
| }, |
| { |
| "epoch": 0.9554073033707865, |
| "grad_norm": 0.8790738582611084, |
| "learning_rate": 4.712034821335196e-06, |
| "loss": 0.6421, |
| "step": 2721 |
| }, |
| { |
| "epoch": 0.9557584269662921, |
| "grad_norm": 0.8946492671966553, |
| "learning_rate": 4.711819365578995e-06, |
| "loss": 0.6439, |
| "step": 2722 |
| }, |
| { |
| "epoch": 0.9561095505617978, |
| "grad_norm": 0.9047174453735352, |
| "learning_rate": 4.711603834180464e-06, |
| "loss": 0.6859, |
| "step": 2723 |
| }, |
| { |
| "epoch": 0.9564606741573034, |
| "grad_norm": 0.9128047823905945, |
| "learning_rate": 4.711388227146974e-06, |
| "loss": 0.671, |
| "step": 2724 |
| }, |
| { |
| "epoch": 0.956811797752809, |
| "grad_norm": 0.8675251007080078, |
| "learning_rate": 4.711172544485898e-06, |
| "loss": 0.672, |
| "step": 2725 |
| }, |
| { |
| "epoch": 0.9571629213483146, |
| "grad_norm": 0.8741472363471985, |
| "learning_rate": 4.710956786204614e-06, |
| "loss": 0.6614, |
| "step": 2726 |
| }, |
| { |
| "epoch": 0.9575140449438202, |
| "grad_norm": 0.851613461971283, |
| "learning_rate": 4.710740952310501e-06, |
| "loss": 0.645, |
| "step": 2727 |
| }, |
| { |
| "epoch": 0.9578651685393258, |
| "grad_norm": 0.9370160698890686, |
| "learning_rate": 4.7105250428109375e-06, |
| "loss": 0.6743, |
| "step": 2728 |
| }, |
| { |
| "epoch": 0.9582162921348315, |
| "grad_norm": 0.9051042199134827, |
| "learning_rate": 4.71030905771331e-06, |
| "loss": 0.6166, |
| "step": 2729 |
| }, |
| { |
| "epoch": 0.9585674157303371, |
| "grad_norm": 0.8878385424613953, |
| "learning_rate": 4.7100929970250035e-06, |
| "loss": 0.6447, |
| "step": 2730 |
| }, |
| { |
| "epoch": 0.9589185393258427, |
| "grad_norm": 0.857430100440979, |
| "learning_rate": 4.709876860753409e-06, |
| "loss": 0.6557, |
| "step": 2731 |
| }, |
| { |
| "epoch": 0.9592696629213483, |
| "grad_norm": 0.8934675455093384, |
| "learning_rate": 4.709660648905916e-06, |
| "loss": 0.675, |
| "step": 2732 |
| }, |
| { |
| "epoch": 0.9596207865168539, |
| "grad_norm": 0.8855241537094116, |
| "learning_rate": 4.709444361489918e-06, |
| "loss": 0.6705, |
| "step": 2733 |
| }, |
| { |
| "epoch": 0.9599719101123596, |
| "grad_norm": 0.8473418354988098, |
| "learning_rate": 4.7092279985128165e-06, |
| "loss": 0.6398, |
| "step": 2734 |
| }, |
| { |
| "epoch": 0.9603230337078652, |
| "grad_norm": 0.9071454405784607, |
| "learning_rate": 4.709011559982006e-06, |
| "loss": 0.6824, |
| "step": 2735 |
| }, |
| { |
| "epoch": 0.9606741573033708, |
| "grad_norm": 0.8947198390960693, |
| "learning_rate": 4.708795045904891e-06, |
| "loss": 0.6794, |
| "step": 2736 |
| }, |
| { |
| "epoch": 0.9610252808988764, |
| "grad_norm": 0.8824878931045532, |
| "learning_rate": 4.708578456288875e-06, |
| "loss": 0.6453, |
| "step": 2737 |
| }, |
| { |
| "epoch": 0.961376404494382, |
| "grad_norm": 0.8897850513458252, |
| "learning_rate": 4.708361791141367e-06, |
| "loss": 0.6708, |
| "step": 2738 |
| }, |
| { |
| "epoch": 0.9617275280898876, |
| "grad_norm": 0.8753004670143127, |
| "learning_rate": 4.708145050469774e-06, |
| "loss": 0.6574, |
| "step": 2739 |
| }, |
| { |
| "epoch": 0.9620786516853933, |
| "grad_norm": 0.8702830672264099, |
| "learning_rate": 4.7079282342815105e-06, |
| "loss": 0.6363, |
| "step": 2740 |
| }, |
| { |
| "epoch": 0.9624297752808989, |
| "grad_norm": 0.8557823896408081, |
| "learning_rate": 4.707711342583991e-06, |
| "loss": 0.6607, |
| "step": 2741 |
| }, |
| { |
| "epoch": 0.9627808988764045, |
| "grad_norm": 0.9189360737800598, |
| "learning_rate": 4.7074943753846324e-06, |
| "loss": 0.6732, |
| "step": 2742 |
| }, |
| { |
| "epoch": 0.9631320224719101, |
| "grad_norm": 0.8712623715400696, |
| "learning_rate": 4.707277332690855e-06, |
| "loss": 0.6962, |
| "step": 2743 |
| }, |
| { |
| "epoch": 0.9634831460674157, |
| "grad_norm": 0.8758993744850159, |
| "learning_rate": 4.707060214510082e-06, |
| "loss": 0.688, |
| "step": 2744 |
| }, |
| { |
| "epoch": 0.9638342696629213, |
| "grad_norm": 0.9074702262878418, |
| "learning_rate": 4.706843020849738e-06, |
| "loss": 0.6668, |
| "step": 2745 |
| }, |
| { |
| "epoch": 0.964185393258427, |
| "grad_norm": 0.8798408508300781, |
| "learning_rate": 4.706625751717251e-06, |
| "loss": 0.6728, |
| "step": 2746 |
| }, |
| { |
| "epoch": 0.9645365168539326, |
| "grad_norm": 0.8691076636314392, |
| "learning_rate": 4.706408407120053e-06, |
| "loss": 0.6217, |
| "step": 2747 |
| }, |
| { |
| "epoch": 0.9648876404494382, |
| "grad_norm": 0.8737530708312988, |
| "learning_rate": 4.7061909870655734e-06, |
| "loss": 0.6618, |
| "step": 2748 |
| }, |
| { |
| "epoch": 0.9652387640449438, |
| "grad_norm": 0.893994152545929, |
| "learning_rate": 4.70597349156125e-06, |
| "loss": 0.6899, |
| "step": 2749 |
| }, |
| { |
| "epoch": 0.9655898876404494, |
| "grad_norm": 0.8533192276954651, |
| "learning_rate": 4.705755920614522e-06, |
| "loss": 0.601, |
| "step": 2750 |
| }, |
| { |
| "epoch": 0.9659410112359551, |
| "grad_norm": 0.8612004518508911, |
| "learning_rate": 4.705538274232829e-06, |
| "loss": 0.6407, |
| "step": 2751 |
| }, |
| { |
| "epoch": 0.9662921348314607, |
| "grad_norm": 0.8780084848403931, |
| "learning_rate": 4.7053205524236136e-06, |
| "loss": 0.6689, |
| "step": 2752 |
| }, |
| { |
| "epoch": 0.9666432584269663, |
| "grad_norm": 0.8959993124008179, |
| "learning_rate": 4.705102755194323e-06, |
| "loss": 0.6588, |
| "step": 2753 |
| }, |
| { |
| "epoch": 0.9669943820224719, |
| "grad_norm": 0.9221241474151611, |
| "learning_rate": 4.704884882552405e-06, |
| "loss": 0.6454, |
| "step": 2754 |
| }, |
| { |
| "epoch": 0.9673455056179775, |
| "grad_norm": 0.8571012020111084, |
| "learning_rate": 4.70466693450531e-06, |
| "loss": 0.6101, |
| "step": 2755 |
| }, |
| { |
| "epoch": 0.9676966292134831, |
| "grad_norm": 0.9270880222320557, |
| "learning_rate": 4.704448911060493e-06, |
| "loss": 0.6561, |
| "step": 2756 |
| }, |
| { |
| "epoch": 0.9680477528089888, |
| "grad_norm": 0.8872186541557312, |
| "learning_rate": 4.70423081222541e-06, |
| "loss": 0.6704, |
| "step": 2757 |
| }, |
| { |
| "epoch": 0.9683988764044944, |
| "grad_norm": 0.8734565377235413, |
| "learning_rate": 4.704012638007519e-06, |
| "loss": 0.6626, |
| "step": 2758 |
| }, |
| { |
| "epoch": 0.96875, |
| "grad_norm": 0.9009286761283875, |
| "learning_rate": 4.703794388414281e-06, |
| "loss": 0.6727, |
| "step": 2759 |
| }, |
| { |
| "epoch": 0.9691011235955056, |
| "grad_norm": 0.8499953746795654, |
| "learning_rate": 4.703576063453162e-06, |
| "loss": 0.6574, |
| "step": 2760 |
| }, |
| { |
| "epoch": 0.9694522471910112, |
| "grad_norm": 0.8824490308761597, |
| "learning_rate": 4.703357663131627e-06, |
| "loss": 0.6618, |
| "step": 2761 |
| }, |
| { |
| "epoch": 0.9698033707865169, |
| "grad_norm": 0.8579809665679932, |
| "learning_rate": 4.703139187457145e-06, |
| "loss": 0.6677, |
| "step": 2762 |
| }, |
| { |
| "epoch": 0.9701544943820225, |
| "grad_norm": 0.8739168643951416, |
| "learning_rate": 4.702920636437188e-06, |
| "loss": 0.6648, |
| "step": 2763 |
| }, |
| { |
| "epoch": 0.9705056179775281, |
| "grad_norm": 0.8557081818580627, |
| "learning_rate": 4.702702010079231e-06, |
| "loss": 0.6588, |
| "step": 2764 |
| }, |
| { |
| "epoch": 0.9708567415730337, |
| "grad_norm": 0.8667842149734497, |
| "learning_rate": 4.702483308390749e-06, |
| "loss": 0.6432, |
| "step": 2765 |
| }, |
| { |
| "epoch": 0.9712078651685393, |
| "grad_norm": 0.8367511034011841, |
| "learning_rate": 4.7022645313792235e-06, |
| "loss": 0.6428, |
| "step": 2766 |
| }, |
| { |
| "epoch": 0.9715589887640449, |
| "grad_norm": 0.8607786297798157, |
| "learning_rate": 4.702045679052136e-06, |
| "loss": 0.628, |
| "step": 2767 |
| }, |
| { |
| "epoch": 0.9719101123595506, |
| "grad_norm": 0.8698323369026184, |
| "learning_rate": 4.701826751416969e-06, |
| "loss": 0.6241, |
| "step": 2768 |
| }, |
| { |
| "epoch": 0.9722612359550562, |
| "grad_norm": 0.8620527386665344, |
| "learning_rate": 4.701607748481213e-06, |
| "loss": 0.6698, |
| "step": 2769 |
| }, |
| { |
| "epoch": 0.9726123595505618, |
| "grad_norm": 0.8832960724830627, |
| "learning_rate": 4.701388670252355e-06, |
| "loss": 0.6332, |
| "step": 2770 |
| }, |
| { |
| "epoch": 0.9729634831460674, |
| "grad_norm": 0.8846637606620789, |
| "learning_rate": 4.701169516737888e-06, |
| "loss": 0.6968, |
| "step": 2771 |
| }, |
| { |
| "epoch": 0.973314606741573, |
| "grad_norm": 0.8862726092338562, |
| "learning_rate": 4.7009502879453076e-06, |
| "loss": 0.7282, |
| "step": 2772 |
| }, |
| { |
| "epoch": 0.9736657303370787, |
| "grad_norm": 0.8758227229118347, |
| "learning_rate": 4.70073098388211e-06, |
| "loss": 0.658, |
| "step": 2773 |
| }, |
| { |
| "epoch": 0.9740168539325843, |
| "grad_norm": 0.8702247738838196, |
| "learning_rate": 4.700511604555796e-06, |
| "loss": 0.65, |
| "step": 2774 |
| }, |
| { |
| "epoch": 0.9743679775280899, |
| "grad_norm": 0.8507943153381348, |
| "learning_rate": 4.700292149973869e-06, |
| "loss": 0.6625, |
| "step": 2775 |
| }, |
| { |
| "epoch": 0.9747191011235955, |
| "grad_norm": 0.8701155185699463, |
| "learning_rate": 4.700072620143833e-06, |
| "loss": 0.6742, |
| "step": 2776 |
| }, |
| { |
| "epoch": 0.9750702247191011, |
| "grad_norm": 0.8407930135726929, |
| "learning_rate": 4.6998530150731955e-06, |
| "loss": 0.642, |
| "step": 2777 |
| }, |
| { |
| "epoch": 0.9754213483146067, |
| "grad_norm": 0.858070969581604, |
| "learning_rate": 4.6996333347694675e-06, |
| "loss": 0.6752, |
| "step": 2778 |
| }, |
| { |
| "epoch": 0.9757724719101124, |
| "grad_norm": 0.8673107624053955, |
| "learning_rate": 4.6994135792401615e-06, |
| "loss": 0.6826, |
| "step": 2779 |
| }, |
| { |
| "epoch": 0.976123595505618, |
| "grad_norm": 0.8591476082801819, |
| "learning_rate": 4.699193748492794e-06, |
| "loss": 0.6972, |
| "step": 2780 |
| }, |
| { |
| "epoch": 0.9764747191011236, |
| "grad_norm": 0.8738571405410767, |
| "learning_rate": 4.698973842534882e-06, |
| "loss": 0.6559, |
| "step": 2781 |
| }, |
| { |
| "epoch": 0.9768258426966292, |
| "grad_norm": 0.8602177500724792, |
| "learning_rate": 4.698753861373946e-06, |
| "loss": 0.6433, |
| "step": 2782 |
| }, |
| { |
| "epoch": 0.9771769662921348, |
| "grad_norm": 0.8604179620742798, |
| "learning_rate": 4.6985338050175096e-06, |
| "loss": 0.6499, |
| "step": 2783 |
| }, |
| { |
| "epoch": 0.9775280898876404, |
| "grad_norm": 0.8430880904197693, |
| "learning_rate": 4.698313673473098e-06, |
| "loss": 0.6008, |
| "step": 2784 |
| }, |
| { |
| "epoch": 0.9778792134831461, |
| "grad_norm": 0.8904216885566711, |
| "learning_rate": 4.69809346674824e-06, |
| "loss": 0.6869, |
| "step": 2785 |
| }, |
| { |
| "epoch": 0.9782303370786517, |
| "grad_norm": 0.8424625396728516, |
| "learning_rate": 4.6978731848504665e-06, |
| "loss": 0.6519, |
| "step": 2786 |
| }, |
| { |
| "epoch": 0.9785814606741573, |
| "grad_norm": 0.8511930704116821, |
| "learning_rate": 4.697652827787311e-06, |
| "loss": 0.6511, |
| "step": 2787 |
| }, |
| { |
| "epoch": 0.9789325842696629, |
| "grad_norm": 0.8731825351715088, |
| "learning_rate": 4.697432395566309e-06, |
| "loss": 0.6756, |
| "step": 2788 |
| }, |
| { |
| "epoch": 0.9792837078651685, |
| "grad_norm": 0.8926572203636169, |
| "learning_rate": 4.697211888194999e-06, |
| "loss": 0.6457, |
| "step": 2789 |
| }, |
| { |
| "epoch": 0.9796348314606742, |
| "grad_norm": 0.8796569108963013, |
| "learning_rate": 4.696991305680923e-06, |
| "loss": 0.6435, |
| "step": 2790 |
| }, |
| { |
| "epoch": 0.9799859550561798, |
| "grad_norm": 0.89739990234375, |
| "learning_rate": 4.6967706480316256e-06, |
| "loss": 0.666, |
| "step": 2791 |
| }, |
| { |
| "epoch": 0.9803370786516854, |
| "grad_norm": 0.8666273951530457, |
| "learning_rate": 4.69654991525465e-06, |
| "loss": 0.6658, |
| "step": 2792 |
| }, |
| { |
| "epoch": 0.980688202247191, |
| "grad_norm": 0.8648870587348938, |
| "learning_rate": 4.696329107357548e-06, |
| "loss": 0.6541, |
| "step": 2793 |
| }, |
| { |
| "epoch": 0.9810393258426966, |
| "grad_norm": 0.8609452247619629, |
| "learning_rate": 4.69610822434787e-06, |
| "loss": 0.6646, |
| "step": 2794 |
| }, |
| { |
| "epoch": 0.9813904494382022, |
| "grad_norm": 0.8994244933128357, |
| "learning_rate": 4.695887266233169e-06, |
| "loss": 0.6778, |
| "step": 2795 |
| }, |
| { |
| "epoch": 0.9817415730337079, |
| "grad_norm": 0.8689225316047668, |
| "learning_rate": 4.6956662330210035e-06, |
| "loss": 0.6581, |
| "step": 2796 |
| }, |
| { |
| "epoch": 0.9820926966292135, |
| "grad_norm": 0.8597514629364014, |
| "learning_rate": 4.695445124718932e-06, |
| "loss": 0.6322, |
| "step": 2797 |
| }, |
| { |
| "epoch": 0.9824438202247191, |
| "grad_norm": 0.8898642063140869, |
| "learning_rate": 4.695223941334514e-06, |
| "loss": 0.6866, |
| "step": 2798 |
| }, |
| { |
| "epoch": 0.9827949438202247, |
| "grad_norm": 0.9043296575546265, |
| "learning_rate": 4.695002682875317e-06, |
| "loss": 0.686, |
| "step": 2799 |
| }, |
| { |
| "epoch": 0.9831460674157303, |
| "grad_norm": 0.8787916898727417, |
| "learning_rate": 4.694781349348907e-06, |
| "loss": 0.6819, |
| "step": 2800 |
| }, |
| { |
| "epoch": 0.983497191011236, |
| "grad_norm": 0.86040198802948, |
| "learning_rate": 4.694559940762853e-06, |
| "loss": 0.6591, |
| "step": 2801 |
| }, |
| { |
| "epoch": 0.9838483146067416, |
| "grad_norm": 0.8670945167541504, |
| "learning_rate": 4.694338457124726e-06, |
| "loss": 0.6828, |
| "step": 2802 |
| }, |
| { |
| "epoch": 0.9841994382022472, |
| "grad_norm": 0.8818464875221252, |
| "learning_rate": 4.694116898442102e-06, |
| "loss": 0.6556, |
| "step": 2803 |
| }, |
| { |
| "epoch": 0.9845505617977528, |
| "grad_norm": 0.850309431552887, |
| "learning_rate": 4.693895264722557e-06, |
| "loss": 0.6803, |
| "step": 2804 |
| }, |
| { |
| "epoch": 0.9849016853932584, |
| "grad_norm": 0.8555431365966797, |
| "learning_rate": 4.6936735559736715e-06, |
| "loss": 0.6484, |
| "step": 2805 |
| }, |
| { |
| "epoch": 0.985252808988764, |
| "grad_norm": 0.8949291110038757, |
| "learning_rate": 4.693451772203028e-06, |
| "loss": 0.6889, |
| "step": 2806 |
| }, |
| { |
| "epoch": 0.9856039325842697, |
| "grad_norm": 0.8468831181526184, |
| "learning_rate": 4.6932299134182104e-06, |
| "loss": 0.6398, |
| "step": 2807 |
| }, |
| { |
| "epoch": 0.9859550561797753, |
| "grad_norm": 0.8642711043357849, |
| "learning_rate": 4.693007979626807e-06, |
| "loss": 0.6789, |
| "step": 2808 |
| }, |
| { |
| "epoch": 0.9863061797752809, |
| "grad_norm": 0.8652757406234741, |
| "learning_rate": 4.692785970836407e-06, |
| "loss": 0.6862, |
| "step": 2809 |
| }, |
| { |
| "epoch": 0.9866573033707865, |
| "grad_norm": 0.8652501106262207, |
| "learning_rate": 4.692563887054603e-06, |
| "loss": 0.647, |
| "step": 2810 |
| }, |
| { |
| "epoch": 0.9870084269662921, |
| "grad_norm": 0.8379038572311401, |
| "learning_rate": 4.69234172828899e-06, |
| "loss": 0.616, |
| "step": 2811 |
| }, |
| { |
| "epoch": 0.9873595505617978, |
| "grad_norm": 0.8986461758613586, |
| "learning_rate": 4.692119494547167e-06, |
| "loss": 0.6746, |
| "step": 2812 |
| }, |
| { |
| "epoch": 0.9877106741573034, |
| "grad_norm": 0.8548451662063599, |
| "learning_rate": 4.691897185836733e-06, |
| "loss": 0.6607, |
| "step": 2813 |
| }, |
| { |
| "epoch": 0.988061797752809, |
| "grad_norm": 0.8519273400306702, |
| "learning_rate": 4.69167480216529e-06, |
| "loss": 0.6341, |
| "step": 2814 |
| }, |
| { |
| "epoch": 0.9884129213483146, |
| "grad_norm": 0.8934301137924194, |
| "learning_rate": 4.691452343540445e-06, |
| "loss": 0.6601, |
| "step": 2815 |
| }, |
| { |
| "epoch": 0.9887640449438202, |
| "grad_norm": 0.8649933338165283, |
| "learning_rate": 4.6912298099698046e-06, |
| "loss": 0.6406, |
| "step": 2816 |
| }, |
| { |
| "epoch": 0.9891151685393258, |
| "grad_norm": 0.8976009488105774, |
| "learning_rate": 4.69100720146098e-06, |
| "loss": 0.6391, |
| "step": 2817 |
| }, |
| { |
| "epoch": 0.9894662921348315, |
| "grad_norm": 0.8619791865348816, |
| "learning_rate": 4.690784518021584e-06, |
| "loss": 0.6932, |
| "step": 2818 |
| }, |
| { |
| "epoch": 0.9898174157303371, |
| "grad_norm": 0.8594460487365723, |
| "learning_rate": 4.690561759659232e-06, |
| "loss": 0.6666, |
| "step": 2819 |
| }, |
| { |
| "epoch": 0.9901685393258427, |
| "grad_norm": 0.8755744099617004, |
| "learning_rate": 4.690338926381542e-06, |
| "loss": 0.6458, |
| "step": 2820 |
| }, |
| { |
| "epoch": 0.9905196629213483, |
| "grad_norm": 0.8844261765480042, |
| "learning_rate": 4.690116018196135e-06, |
| "loss": 0.6344, |
| "step": 2821 |
| }, |
| { |
| "epoch": 0.9908707865168539, |
| "grad_norm": 0.871732234954834, |
| "learning_rate": 4.689893035110635e-06, |
| "loss": 0.6316, |
| "step": 2822 |
| }, |
| { |
| "epoch": 0.9912219101123596, |
| "grad_norm": 0.8930003046989441, |
| "learning_rate": 4.689669977132667e-06, |
| "loss": 0.6887, |
| "step": 2823 |
| }, |
| { |
| "epoch": 0.9915730337078652, |
| "grad_norm": 0.9592486619949341, |
| "learning_rate": 4.689446844269859e-06, |
| "loss": 0.653, |
| "step": 2824 |
| }, |
| { |
| "epoch": 0.9919241573033708, |
| "grad_norm": 0.8865694999694824, |
| "learning_rate": 4.6892236365298425e-06, |
| "loss": 0.648, |
| "step": 2825 |
| }, |
| { |
| "epoch": 0.9922752808988764, |
| "grad_norm": 0.8786531686782837, |
| "learning_rate": 4.689000353920251e-06, |
| "loss": 0.647, |
| "step": 2826 |
| }, |
| { |
| "epoch": 0.992626404494382, |
| "grad_norm": 0.8980904817581177, |
| "learning_rate": 4.6887769964487215e-06, |
| "loss": 0.6556, |
| "step": 2827 |
| }, |
| { |
| "epoch": 0.9929775280898876, |
| "grad_norm": 0.8920478224754333, |
| "learning_rate": 4.688553564122891e-06, |
| "loss": 0.6802, |
| "step": 2828 |
| }, |
| { |
| "epoch": 0.9933286516853933, |
| "grad_norm": 0.8804338574409485, |
| "learning_rate": 4.688330056950401e-06, |
| "loss": 0.6695, |
| "step": 2829 |
| }, |
| { |
| "epoch": 0.9936797752808989, |
| "grad_norm": 0.8846019506454468, |
| "learning_rate": 4.688106474938896e-06, |
| "loss": 0.6484, |
| "step": 2830 |
| }, |
| { |
| "epoch": 0.9940308988764045, |
| "grad_norm": 0.8795493841171265, |
| "learning_rate": 4.687882818096022e-06, |
| "loss": 0.6465, |
| "step": 2831 |
| }, |
| { |
| "epoch": 0.9943820224719101, |
| "grad_norm": 0.858067512512207, |
| "learning_rate": 4.687659086429427e-06, |
| "loss": 0.6471, |
| "step": 2832 |
| }, |
| { |
| "epoch": 0.9947331460674157, |
| "grad_norm": 0.8714587092399597, |
| "learning_rate": 4.687435279946765e-06, |
| "loss": 0.6446, |
| "step": 2833 |
| }, |
| { |
| "epoch": 0.9950842696629213, |
| "grad_norm": 0.8674041628837585, |
| "learning_rate": 4.687211398655686e-06, |
| "loss": 0.6419, |
| "step": 2834 |
| }, |
| { |
| "epoch": 0.995435393258427, |
| "grad_norm": 0.8507161736488342, |
| "learning_rate": 4.6869874425638505e-06, |
| "loss": 0.6202, |
| "step": 2835 |
| }, |
| { |
| "epoch": 0.9957865168539326, |
| "grad_norm": 0.8718125224113464, |
| "learning_rate": 4.686763411678915e-06, |
| "loss": 0.6807, |
| "step": 2836 |
| }, |
| { |
| "epoch": 0.9961376404494382, |
| "grad_norm": 0.8641071319580078, |
| "learning_rate": 4.686539306008543e-06, |
| "loss": 0.6492, |
| "step": 2837 |
| }, |
| { |
| "epoch": 0.9964887640449438, |
| "grad_norm": 0.8599060773849487, |
| "learning_rate": 4.686315125560397e-06, |
| "loss": 0.6779, |
| "step": 2838 |
| }, |
| { |
| "epoch": 0.9968398876404494, |
| "grad_norm": 0.8444222211837769, |
| "learning_rate": 4.686090870342145e-06, |
| "loss": 0.6841, |
| "step": 2839 |
| }, |
| { |
| "epoch": 0.9971910112359551, |
| "grad_norm": 0.86264568567276, |
| "learning_rate": 4.685866540361456e-06, |
| "loss": 0.6627, |
| "step": 2840 |
| }, |
| { |
| "epoch": 0.9975421348314607, |
| "grad_norm": 0.9181270599365234, |
| "learning_rate": 4.685642135626001e-06, |
| "loss": 0.6537, |
| "step": 2841 |
| }, |
| { |
| "epoch": 0.9978932584269663, |
| "grad_norm": 0.8701741695404053, |
| "learning_rate": 4.685417656143456e-06, |
| "loss": 0.6545, |
| "step": 2842 |
| }, |
| { |
| "epoch": 0.9982443820224719, |
| "grad_norm": 0.8788411021232605, |
| "learning_rate": 4.685193101921497e-06, |
| "loss": 0.6431, |
| "step": 2843 |
| }, |
| { |
| "epoch": 0.9985955056179775, |
| "grad_norm": 0.8748607635498047, |
| "learning_rate": 4.684968472967804e-06, |
| "loss": 0.6698, |
| "step": 2844 |
| }, |
| { |
| "epoch": 0.9989466292134831, |
| "grad_norm": 0.9054082632064819, |
| "learning_rate": 4.684743769290059e-06, |
| "loss": 0.6801, |
| "step": 2845 |
| }, |
| { |
| "epoch": 0.9992977528089888, |
| "grad_norm": 0.8799982666969299, |
| "learning_rate": 4.684518990895945e-06, |
| "loss": 0.6764, |
| "step": 2846 |
| }, |
| { |
| "epoch": 0.9996488764044944, |
| "grad_norm": 0.9407274127006531, |
| "learning_rate": 4.684294137793153e-06, |
| "loss": 0.7098, |
| "step": 2847 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.91474848985672, |
| "learning_rate": 4.684069209989369e-06, |
| "loss": 0.6617, |
| "step": 2848 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 17088, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 2848, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 7.194152512373391e+18, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|