| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9904153354632586, | |
| "eval_steps": 500, | |
| "global_step": 312, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.009584664536741214, | |
| "grad_norm": 5.816951093345819, | |
| "learning_rate": 6.25e-07, | |
| "loss": 0.8809, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.019169329073482427, | |
| "grad_norm": 5.964814555570492, | |
| "learning_rate": 1.25e-06, | |
| "loss": 0.8476, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.02875399361022364, | |
| "grad_norm": 5.91437668793362, | |
| "learning_rate": 1.8750000000000003e-06, | |
| "loss": 0.8771, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.038338658146964855, | |
| "grad_norm": 5.7274742814752155, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.8549, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.04792332268370607, | |
| "grad_norm": 5.324202754304387, | |
| "learning_rate": 3.125e-06, | |
| "loss": 0.8394, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.05750798722044728, | |
| "grad_norm": 4.070433381694975, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.8099, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0670926517571885, | |
| "grad_norm": 2.4484114306405496, | |
| "learning_rate": 4.3750000000000005e-06, | |
| "loss": 0.7524, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.07667731629392971, | |
| "grad_norm": 2.2317547400553917, | |
| "learning_rate": 5e-06, | |
| "loss": 0.7533, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.08626198083067092, | |
| "grad_norm": 4.021700030471694, | |
| "learning_rate": 5.625e-06, | |
| "loss": 0.7731, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.09584664536741214, | |
| "grad_norm": 4.242475862537413, | |
| "learning_rate": 6.25e-06, | |
| "loss": 0.7965, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.10543130990415335, | |
| "grad_norm": 4.187322997258876, | |
| "learning_rate": 6.875e-06, | |
| "loss": 0.7696, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.11501597444089456, | |
| "grad_norm": 3.5880075046075364, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.7105, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.12460063897763578, | |
| "grad_norm": 3.5147425543441346, | |
| "learning_rate": 8.125000000000001e-06, | |
| "loss": 0.7182, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.134185303514377, | |
| "grad_norm": 2.698461280108951, | |
| "learning_rate": 8.750000000000001e-06, | |
| "loss": 0.6811, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.14376996805111822, | |
| "grad_norm": 1.890037499452564, | |
| "learning_rate": 9.375000000000001e-06, | |
| "loss": 0.6807, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.15335463258785942, | |
| "grad_norm": 1.831321882805547, | |
| "learning_rate": 1e-05, | |
| "loss": 0.696, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.16293929712460065, | |
| "grad_norm": 2.061073210204071, | |
| "learning_rate": 1.0625e-05, | |
| "loss": 0.6692, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.17252396166134185, | |
| "grad_norm": 1.8734300870572917, | |
| "learning_rate": 1.125e-05, | |
| "loss": 0.6655, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.18210862619808307, | |
| "grad_norm": 1.3757902351252964, | |
| "learning_rate": 1.1875e-05, | |
| "loss": 0.641, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.19169329073482427, | |
| "grad_norm": 1.3308745862219742, | |
| "learning_rate": 1.25e-05, | |
| "loss": 0.6173, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.2012779552715655, | |
| "grad_norm": 1.3502732169912994, | |
| "learning_rate": 1.3125e-05, | |
| "loss": 0.6143, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.2108626198083067, | |
| "grad_norm": 1.179130020860865, | |
| "learning_rate": 1.375e-05, | |
| "loss": 0.6165, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.22044728434504793, | |
| "grad_norm": 1.1153340921244757, | |
| "learning_rate": 1.4375e-05, | |
| "loss": 0.595, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.23003194888178913, | |
| "grad_norm": 0.93782430241953, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.6081, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.23961661341853036, | |
| "grad_norm": 1.0101672937842767, | |
| "learning_rate": 1.5625e-05, | |
| "loss": 0.5846, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.24920127795527156, | |
| "grad_norm": 0.8994475569314913, | |
| "learning_rate": 1.6250000000000002e-05, | |
| "loss": 0.5652, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.25878594249201275, | |
| "grad_norm": 0.7372445592319721, | |
| "learning_rate": 1.6875e-05, | |
| "loss": 0.569, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.268370607028754, | |
| "grad_norm": 0.7790129120230198, | |
| "learning_rate": 1.7500000000000002e-05, | |
| "loss": 0.5454, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.2779552715654952, | |
| "grad_norm": 0.79836828642288, | |
| "learning_rate": 1.8125e-05, | |
| "loss": 0.5854, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.28753993610223644, | |
| "grad_norm": 0.797607856474264, | |
| "learning_rate": 1.8750000000000002e-05, | |
| "loss": 0.5628, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.2971246006389776, | |
| "grad_norm": 0.8495331973366639, | |
| "learning_rate": 1.9375e-05, | |
| "loss": 0.5589, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.30670926517571884, | |
| "grad_norm": 0.8939715492547688, | |
| "learning_rate": 2e-05, | |
| "loss": 0.588, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.31629392971246006, | |
| "grad_norm": 0.7945325544236974, | |
| "learning_rate": 1.9999370567547008e-05, | |
| "loss": 0.5531, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.3258785942492013, | |
| "grad_norm": 0.75081365184287, | |
| "learning_rate": 1.999748234942507e-05, | |
| "loss": 0.5781, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.3354632587859425, | |
| "grad_norm": 0.7667797396040631, | |
| "learning_rate": 1.9994335583335336e-05, | |
| "loss": 0.5532, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.3450479233226837, | |
| "grad_norm": 0.6810165168871454, | |
| "learning_rate": 1.9989930665413148e-05, | |
| "loss": 0.5592, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.3546325878594249, | |
| "grad_norm": 0.8020192657808985, | |
| "learning_rate": 1.998426815017817e-05, | |
| "loss": 0.5703, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.36421725239616615, | |
| "grad_norm": 0.765862886699684, | |
| "learning_rate": 1.997734875046456e-05, | |
| "loss": 0.5859, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.3738019169329074, | |
| "grad_norm": 0.6206240716622619, | |
| "learning_rate": 1.9969173337331283e-05, | |
| "loss": 0.5084, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.38338658146964855, | |
| "grad_norm": 0.6800428463570417, | |
| "learning_rate": 1.9959742939952393e-05, | |
| "loss": 0.5402, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.3929712460063898, | |
| "grad_norm": 0.6898204949273792, | |
| "learning_rate": 1.9949058745487524e-05, | |
| "loss": 0.5576, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.402555910543131, | |
| "grad_norm": 0.7038921388379388, | |
| "learning_rate": 1.9937122098932428e-05, | |
| "loss": 0.5814, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.41214057507987223, | |
| "grad_norm": 0.6386669258124261, | |
| "learning_rate": 1.9923934502949645e-05, | |
| "loss": 0.5433, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.4217252396166134, | |
| "grad_norm": 0.8544716563932903, | |
| "learning_rate": 1.990949761767935e-05, | |
| "loss": 0.5562, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.43130990415335463, | |
| "grad_norm": 0.6590987393801254, | |
| "learning_rate": 1.9893813260530368e-05, | |
| "loss": 0.5507, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.44089456869009586, | |
| "grad_norm": 0.7325945250857884, | |
| "learning_rate": 1.9876883405951378e-05, | |
| "loss": 0.5472, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.4504792332268371, | |
| "grad_norm": 0.8060439236010339, | |
| "learning_rate": 1.985871018518236e-05, | |
| "loss": 0.5651, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.46006389776357826, | |
| "grad_norm": 0.6140083254189492, | |
| "learning_rate": 1.98392958859863e-05, | |
| "loss": 0.5595, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.4696485623003195, | |
| "grad_norm": 0.7132583174991013, | |
| "learning_rate": 1.9818642952361188e-05, | |
| "loss": 0.5526, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.4792332268370607, | |
| "grad_norm": 0.59664072981028, | |
| "learning_rate": 1.9796753984232357e-05, | |
| "loss": 0.547, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.48881789137380194, | |
| "grad_norm": 0.6993452708095715, | |
| "learning_rate": 1.9773631737125192e-05, | |
| "loss": 0.5361, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.4984025559105431, | |
| "grad_norm": 0.6473604609615206, | |
| "learning_rate": 1.9749279121818235e-05, | |
| "loss": 0.5535, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.5079872204472844, | |
| "grad_norm": 0.7074480280853533, | |
| "learning_rate": 1.9723699203976768e-05, | |
| "loss": 0.4981, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.5175718849840255, | |
| "grad_norm": 0.7557340666761172, | |
| "learning_rate": 1.969689520376687e-05, | |
| "loss": 0.5379, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.5271565495207667, | |
| "grad_norm": 0.636305070807052, | |
| "learning_rate": 1.9668870495450064e-05, | |
| "loss": 0.5481, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.536741214057508, | |
| "grad_norm": 0.7057386270005085, | |
| "learning_rate": 1.9639628606958535e-05, | |
| "loss": 0.5118, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.5463258785942492, | |
| "grad_norm": 0.5999038558802152, | |
| "learning_rate": 1.9609173219450998e-05, | |
| "loss": 0.5532, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.5559105431309904, | |
| "grad_norm": 0.6446336459611116, | |
| "learning_rate": 1.9577508166849308e-05, | |
| "loss": 0.5443, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.5654952076677316, | |
| "grad_norm": 0.7779704431661995, | |
| "learning_rate": 1.954463743535581e-05, | |
| "loss": 0.5282, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.5750798722044729, | |
| "grad_norm": 0.6196806885678676, | |
| "learning_rate": 1.9510565162951538e-05, | |
| "loss": 0.528, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.5846645367412141, | |
| "grad_norm": 0.7481597405788767, | |
| "learning_rate": 1.947529563887529e-05, | |
| "loss": 0.5258, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.5942492012779552, | |
| "grad_norm": 0.5954399244612109, | |
| "learning_rate": 1.9438833303083677e-05, | |
| "loss": 0.5045, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.6038338658146964, | |
| "grad_norm": 0.773965376234353, | |
| "learning_rate": 1.940118274569219e-05, | |
| "loss": 0.5025, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.6134185303514377, | |
| "grad_norm": 0.5603381146719852, | |
| "learning_rate": 1.9362348706397374e-05, | |
| "loss": 0.5024, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.6230031948881789, | |
| "grad_norm": 0.7685723417664032, | |
| "learning_rate": 1.9322336073880143e-05, | |
| "loss": 0.5357, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.6325878594249201, | |
| "grad_norm": 0.6579285301369759, | |
| "learning_rate": 1.928114988519039e-05, | |
| "loss": 0.5126, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.6421725239616614, | |
| "grad_norm": 0.7627659065606073, | |
| "learning_rate": 1.9238795325112867e-05, | |
| "loss": 0.5234, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.6517571884984026, | |
| "grad_norm": 0.6307353614086453, | |
| "learning_rate": 1.919527772551451e-05, | |
| "loss": 0.5188, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.6613418530351438, | |
| "grad_norm": 0.6854214793729667, | |
| "learning_rate": 1.91506025646732e-05, | |
| "loss": 0.509, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.670926517571885, | |
| "grad_norm": 0.7177977977714395, | |
| "learning_rate": 1.9104775466588162e-05, | |
| "loss": 0.5363, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.6805111821086262, | |
| "grad_norm": 0.5740574361421374, | |
| "learning_rate": 1.9057802200271943e-05, | |
| "loss": 0.5269, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.6900958466453674, | |
| "grad_norm": 0.7278614746672593, | |
| "learning_rate": 1.900968867902419e-05, | |
| "loss": 0.5214, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.6996805111821086, | |
| "grad_norm": 0.6367723242487334, | |
| "learning_rate": 1.8960440959687254e-05, | |
| "loss": 0.5559, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.7092651757188498, | |
| "grad_norm": 0.6483330441964968, | |
| "learning_rate": 1.891006524188368e-05, | |
| "loss": 0.5321, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.7188498402555911, | |
| "grad_norm": 0.6290627842102959, | |
| "learning_rate": 1.88585678672358e-05, | |
| "loss": 0.4939, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.7284345047923323, | |
| "grad_norm": 0.6410854610772762, | |
| "learning_rate": 1.880595531856738e-05, | |
| "loss": 0.5319, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.7380191693290735, | |
| "grad_norm": 0.6731639380913599, | |
| "learning_rate": 1.8752234219087538e-05, | |
| "loss": 0.5273, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.7476038338658147, | |
| "grad_norm": 0.5316769493193355, | |
| "learning_rate": 1.8697411331556958e-05, | |
| "loss": 0.518, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.7571884984025559, | |
| "grad_norm": 0.6062668049547616, | |
| "learning_rate": 1.864149355743655e-05, | |
| "loss": 0.5075, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.7667731629392971, | |
| "grad_norm": 0.6276805556072127, | |
| "learning_rate": 1.8584487936018663e-05, | |
| "loss": 0.5058, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.7763578274760383, | |
| "grad_norm": 0.5471527462104901, | |
| "learning_rate": 1.8526401643540924e-05, | |
| "loss": 0.5031, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.7859424920127795, | |
| "grad_norm": 0.6123087953792351, | |
| "learning_rate": 1.8467241992282842e-05, | |
| "loss": 0.5374, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.7955271565495208, | |
| "grad_norm": 0.6180809301088696, | |
| "learning_rate": 1.8407016429645305e-05, | |
| "loss": 0.5352, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.805111821086262, | |
| "grad_norm": 0.5770189713402363, | |
| "learning_rate": 1.834573253721303e-05, | |
| "loss": 0.5001, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.8146964856230032, | |
| "grad_norm": 0.5611559252392707, | |
| "learning_rate": 1.8283398029800167e-05, | |
| "loss": 0.4936, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.8242811501597445, | |
| "grad_norm": 0.5376330084381403, | |
| "learning_rate": 1.8220020754479104e-05, | |
| "loss": 0.5128, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.8338658146964856, | |
| "grad_norm": 0.5930317746662469, | |
| "learning_rate": 1.8155608689592604e-05, | |
| "loss": 0.5195, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.8434504792332268, | |
| "grad_norm": 0.5582349649774669, | |
| "learning_rate": 1.8090169943749477e-05, | |
| "loss": 0.5412, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.853035143769968, | |
| "grad_norm": 0.5840770892840872, | |
| "learning_rate": 1.8023712754803783e-05, | |
| "loss": 0.4931, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.8626198083067093, | |
| "grad_norm": 0.5770400921032719, | |
| "learning_rate": 1.795624548881781e-05, | |
| "loss": 0.5077, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.8722044728434505, | |
| "grad_norm": 0.47321773400110334, | |
| "learning_rate": 1.7887776639008912e-05, | |
| "loss": 0.5271, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.8817891373801917, | |
| "grad_norm": 0.6014894804474964, | |
| "learning_rate": 1.78183148246803e-05, | |
| "loss": 0.5068, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.8913738019169329, | |
| "grad_norm": 0.5268826943097094, | |
| "learning_rate": 1.7747868790136012e-05, | |
| "loss": 0.5062, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.9009584664536742, | |
| "grad_norm": 0.538258599224071, | |
| "learning_rate": 1.7676447403580114e-05, | |
| "loss": 0.5192, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.9105431309904153, | |
| "grad_norm": 0.5235902995089281, | |
| "learning_rate": 1.7604059656000313e-05, | |
| "loss": 0.512, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.9201277955271565, | |
| "grad_norm": 0.5922248527214893, | |
| "learning_rate": 1.7530714660036112e-05, | |
| "loss": 0.5055, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.9297124600638977, | |
| "grad_norm": 0.5855685386711252, | |
| "learning_rate": 1.7456421648831658e-05, | |
| "loss": 0.4984, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.939297124600639, | |
| "grad_norm": 0.5458918187075826, | |
| "learning_rate": 1.738118997487341e-05, | |
| "loss": 0.5076, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.9488817891373802, | |
| "grad_norm": 0.6515459002851856, | |
| "learning_rate": 1.7305029108812777e-05, | |
| "loss": 0.5229, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.9584664536741214, | |
| "grad_norm": 0.5315082766100536, | |
| "learning_rate": 1.7227948638273918e-05, | |
| "loss": 0.5093, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.9680511182108626, | |
| "grad_norm": 0.7875828584853896, | |
| "learning_rate": 1.7149958266646756e-05, | |
| "loss": 0.494, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.9776357827476039, | |
| "grad_norm": 0.5868831184446074, | |
| "learning_rate": 1.7071067811865477e-05, | |
| "loss": 0.4865, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.987220447284345, | |
| "grad_norm": 0.5730857899752412, | |
| "learning_rate": 1.6991287205172575e-05, | |
| "loss": 0.513, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.9968051118210862, | |
| "grad_norm": 0.7358514287452019, | |
| "learning_rate": 1.691062648986865e-05, | |
| "loss": 0.5143, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.0063897763578276, | |
| "grad_norm": 1.1699812558564358, | |
| "learning_rate": 1.682909582004807e-05, | |
| "loss": 0.8838, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.0159744408945688, | |
| "grad_norm": 0.6902201583829356, | |
| "learning_rate": 1.6746705459320746e-05, | |
| "loss": 0.4084, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.0255591054313098, | |
| "grad_norm": 0.6792794510906274, | |
| "learning_rate": 1.6663465779520042e-05, | |
| "loss": 0.436, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.035143769968051, | |
| "grad_norm": 0.7421226478591257, | |
| "learning_rate": 1.657938725939713e-05, | |
| "loss": 0.4414, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.0447284345047922, | |
| "grad_norm": 0.6881793520210084, | |
| "learning_rate": 1.6494480483301836e-05, | |
| "loss": 0.4859, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.0543130990415335, | |
| "grad_norm": 0.821089177012622, | |
| "learning_rate": 1.6408756139850243e-05, | |
| "loss": 0.4273, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.0638977635782747, | |
| "grad_norm": 0.6928979324587471, | |
| "learning_rate": 1.63222250205791e-05, | |
| "loss": 0.4819, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.073482428115016, | |
| "grad_norm": 0.7022553739631447, | |
| "learning_rate": 1.6234898018587336e-05, | |
| "loss": 0.3695, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.0830670926517572, | |
| "grad_norm": 0.725323982592671, | |
| "learning_rate": 1.6146786127164773e-05, | |
| "loss": 0.461, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.0926517571884984, | |
| "grad_norm": 0.7048555706325064, | |
| "learning_rate": 1.60579004384082e-05, | |
| "loss": 0.4494, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.1022364217252396, | |
| "grad_norm": 0.6640188248580515, | |
| "learning_rate": 1.5968252141825038e-05, | |
| "loss": 0.4685, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.1118210862619808, | |
| "grad_norm": 0.7030304823361989, | |
| "learning_rate": 1.5877852522924733e-05, | |
| "loss": 0.4277, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.121405750798722, | |
| "grad_norm": 0.7123317956342814, | |
| "learning_rate": 1.578671296179806e-05, | |
| "loss": 0.4592, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.1309904153354633, | |
| "grad_norm": 0.6086700980301015, | |
| "learning_rate": 1.569484493168452e-05, | |
| "loss": 0.412, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.1405750798722045, | |
| "grad_norm": 0.6390103576527882, | |
| "learning_rate": 1.5602259997528028e-05, | |
| "loss": 0.4797, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.1501597444089458, | |
| "grad_norm": 0.5469865905883947, | |
| "learning_rate": 1.5508969814521026e-05, | |
| "loss": 0.3935, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.159744408945687, | |
| "grad_norm": 0.5758273790847533, | |
| "learning_rate": 1.541498612663726e-05, | |
| "loss": 0.4702, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.1693290734824282, | |
| "grad_norm": 0.4971600554124392, | |
| "learning_rate": 1.5320320765153367e-05, | |
| "loss": 0.416, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.1789137380191694, | |
| "grad_norm": 0.6214619772407622, | |
| "learning_rate": 1.5224985647159489e-05, | |
| "loss": 0.4474, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.1884984025559104, | |
| "grad_norm": 0.6547899224858453, | |
| "learning_rate": 1.5128992774059063e-05, | |
| "loss": 0.4558, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.1980830670926517, | |
| "grad_norm": 0.5890574227637935, | |
| "learning_rate": 1.5032354230058004e-05, | |
| "loss": 0.47, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.207667731629393, | |
| "grad_norm": 0.7361985894763436, | |
| "learning_rate": 1.493508218064347e-05, | |
| "loss": 0.4309, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.2172523961661341, | |
| "grad_norm": 0.5173156701874413, | |
| "learning_rate": 1.4837188871052399e-05, | |
| "loss": 0.4814, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.2268370607028753, | |
| "grad_norm": 0.6237781339666125, | |
| "learning_rate": 1.4738686624729987e-05, | |
| "loss": 0.4185, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.2364217252396166, | |
| "grad_norm": 0.6273705352740975, | |
| "learning_rate": 1.4639587841778342e-05, | |
| "loss": 0.4637, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.2460063897763578, | |
| "grad_norm": 0.5605628666508281, | |
| "learning_rate": 1.4539904997395468e-05, | |
| "loss": 0.4803, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.255591054313099, | |
| "grad_norm": 0.597522913497888, | |
| "learning_rate": 1.4439650640304822e-05, | |
| "loss": 0.454, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.2651757188498403, | |
| "grad_norm": 0.5715006951258484, | |
| "learning_rate": 1.4338837391175582e-05, | |
| "loss": 0.4441, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.2747603833865815, | |
| "grad_norm": 0.5270300631154146, | |
| "learning_rate": 1.4237477941033888e-05, | |
| "loss": 0.4424, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.2843450479233227, | |
| "grad_norm": 0.6111693532495731, | |
| "learning_rate": 1.4135585049665207e-05, | |
| "loss": 0.4688, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.293929712460064, | |
| "grad_norm": 0.5720421473056696, | |
| "learning_rate": 1.4033171544008053e-05, | |
| "loss": 0.47, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.3035143769968052, | |
| "grad_norm": 0.6142039193499456, | |
| "learning_rate": 1.3930250316539237e-05, | |
| "loss": 0.444, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.3130990415335464, | |
| "grad_norm": 0.5632554602858334, | |
| "learning_rate": 1.3826834323650899e-05, | |
| "loss": 0.4447, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.3226837060702876, | |
| "grad_norm": 0.5306960211119153, | |
| "learning_rate": 1.3722936584019453e-05, | |
| "loss": 0.4464, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.3322683706070286, | |
| "grad_norm": 0.5026216587572911, | |
| "learning_rate": 1.3618570176966723e-05, | |
| "loss": 0.4635, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.34185303514377, | |
| "grad_norm": 0.5048409273626058, | |
| "learning_rate": 1.3513748240813429e-05, | |
| "loss": 0.4719, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.351437699680511, | |
| "grad_norm": 0.5143189141152029, | |
| "learning_rate": 1.340848397122525e-05, | |
| "loss": 0.423, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.3610223642172525, | |
| "grad_norm": 0.4906781147146303, | |
| "learning_rate": 1.3302790619551673e-05, | |
| "loss": 0.4658, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.3706070287539935, | |
| "grad_norm": 0.45394412594825295, | |
| "learning_rate": 1.3196681491157816e-05, | |
| "loss": 0.4383, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.3801916932907348, | |
| "grad_norm": 0.4565249809642115, | |
| "learning_rate": 1.3090169943749475e-05, | |
| "loss": 0.4071, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.389776357827476, | |
| "grad_norm": 0.5436018324080563, | |
| "learning_rate": 1.2983269385691562e-05, | |
| "loss": 0.4942, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.3993610223642172, | |
| "grad_norm": 0.40894998786022985, | |
| "learning_rate": 1.2875993274320173e-05, | |
| "loss": 0.3947, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.4089456869009584, | |
| "grad_norm": 0.5288187343789331, | |
| "learning_rate": 1.2768355114248493e-05, | |
| "loss": 0.4712, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.4185303514376997, | |
| "grad_norm": 0.4768621544922896, | |
| "learning_rate": 1.2660368455666752e-05, | |
| "loss": 0.4382, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.428115015974441, | |
| "grad_norm": 0.431738859820575, | |
| "learning_rate": 1.2552046892636427e-05, | |
| "loss": 0.4071, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.4376996805111821, | |
| "grad_norm": 0.498599086228044, | |
| "learning_rate": 1.2443404061378941e-05, | |
| "loss": 0.4755, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.4472843450479234, | |
| "grad_norm": 0.4988206100753027, | |
| "learning_rate": 1.2334453638559057e-05, | |
| "loss": 0.4083, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.4568690095846646, | |
| "grad_norm": 0.45822495711811395, | |
| "learning_rate": 1.2225209339563144e-05, | |
| "loss": 0.485, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.4664536741214058, | |
| "grad_norm": 0.5023477915233621, | |
| "learning_rate": 1.211568491677263e-05, | |
| "loss": 0.4076, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.476038338658147, | |
| "grad_norm": 0.45113635635588584, | |
| "learning_rate": 1.200589415783273e-05, | |
| "loss": 0.4015, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.4856230031948883, | |
| "grad_norm": 0.5334811995129992, | |
| "learning_rate": 1.1895850883916786e-05, | |
| "loss": 0.4759, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.4952076677316293, | |
| "grad_norm": 0.40436266318440944, | |
| "learning_rate": 1.1785568947986368e-05, | |
| "loss": 0.419, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.5047923322683707, | |
| "grad_norm": 0.4755461357936275, | |
| "learning_rate": 1.1675062233047365e-05, | |
| "loss": 0.4221, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.5143769968051117, | |
| "grad_norm": 0.5300609622813354, | |
| "learning_rate": 1.156434465040231e-05, | |
| "loss": 0.4645, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.5239616613418532, | |
| "grad_norm": 0.40635331262756796, | |
| "learning_rate": 1.1453430137899129e-05, | |
| "loss": 0.3713, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.5335463258785942, | |
| "grad_norm": 0.4735547448374356, | |
| "learning_rate": 1.1342332658176556e-05, | |
| "loss": 0.4324, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.5431309904153354, | |
| "grad_norm": 0.4495839729557387, | |
| "learning_rate": 1.123106619690643e-05, | |
| "loss": 0.4791, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.5527156549520766, | |
| "grad_norm": 0.4173934080478346, | |
| "learning_rate": 1.1119644761033079e-05, | |
| "loss": 0.3956, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.5623003194888179, | |
| "grad_norm": 0.5099574697612899, | |
| "learning_rate": 1.1008082377010045e-05, | |
| "loss": 0.502, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.571884984025559, | |
| "grad_norm": 0.4022180449613417, | |
| "learning_rate": 1.0896393089034336e-05, | |
| "loss": 0.3678, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.5814696485623003, | |
| "grad_norm": 0.46434170894732985, | |
| "learning_rate": 1.0784590957278452e-05, | |
| "loss": 0.4521, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.5910543130990416, | |
| "grad_norm": 0.5222429942261165, | |
| "learning_rate": 1.0672690056120398e-05, | |
| "loss": 0.4578, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.6006389776357828, | |
| "grad_norm": 0.4714562379867376, | |
| "learning_rate": 1.0560704472371919e-05, | |
| "loss": 0.4804, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.610223642172524, | |
| "grad_norm": 0.4257910311693338, | |
| "learning_rate": 1.044864830350515e-05, | |
| "loss": 0.3879, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.619808306709265, | |
| "grad_norm": 0.4931956302496282, | |
| "learning_rate": 1.0336535655877942e-05, | |
| "loss": 0.4756, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.6293929712460065, | |
| "grad_norm": 0.4307049907622935, | |
| "learning_rate": 1.0224380642958052e-05, | |
| "loss": 0.3645, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.6389776357827475, | |
| "grad_norm": 0.5837734683656293, | |
| "learning_rate": 1.011219738354646e-05, | |
| "loss": 0.4896, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.648562300319489, | |
| "grad_norm": 0.4274446462676884, | |
| "learning_rate": 1e-05, | |
| "loss": 0.4297, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.65814696485623, | |
| "grad_norm": 0.47050426719612987, | |
| "learning_rate": 9.887802616453543e-06, | |
| "loss": 0.4016, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.6677316293929714, | |
| "grad_norm": 0.46798902119678215, | |
| "learning_rate": 9.775619357041952e-06, | |
| "loss": 0.4468, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.6773162939297124, | |
| "grad_norm": 0.4231517848150463, | |
| "learning_rate": 9.663464344122064e-06, | |
| "loss": 0.4054, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.6869009584664538, | |
| "grad_norm": 0.5047554859497767, | |
| "learning_rate": 9.551351696494854e-06, | |
| "loss": 0.4564, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.6964856230031948, | |
| "grad_norm": 0.37022389130163336, | |
| "learning_rate": 9.439295527628083e-06, | |
| "loss": 0.4373, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.706070287539936, | |
| "grad_norm": 0.5285912537102634, | |
| "learning_rate": 9.327309943879604e-06, | |
| "loss": 0.4722, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.7156549520766773, | |
| "grad_norm": 0.4785755524774783, | |
| "learning_rate": 9.215409042721553e-06, | |
| "loss": 0.426, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.7252396166134185, | |
| "grad_norm": 0.4506434737215472, | |
| "learning_rate": 9.103606910965666e-06, | |
| "loss": 0.4384, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.7348242811501597, | |
| "grad_norm": 0.45206163197522226, | |
| "learning_rate": 8.991917622989957e-06, | |
| "loss": 0.465, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.744408945686901, | |
| "grad_norm": 0.4264275243649636, | |
| "learning_rate": 8.880355238966923e-06, | |
| "loss": 0.4294, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.7539936102236422, | |
| "grad_norm": 0.4305322367018383, | |
| "learning_rate": 8.768933803093573e-06, | |
| "loss": 0.439, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.7635782747603834, | |
| "grad_norm": 0.444413629694292, | |
| "learning_rate": 8.657667341823449e-06, | |
| "loss": 0.4471, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.7731629392971247, | |
| "grad_norm": 0.44726871944510105, | |
| "learning_rate": 8.546569862100876e-06, | |
| "loss": 0.4291, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.7827476038338657, | |
| "grad_norm": 0.5141706810151837, | |
| "learning_rate": 8.43565534959769e-06, | |
| "loss": 0.4411, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.792332268370607, | |
| "grad_norm": 0.4135951238897178, | |
| "learning_rate": 8.324937766952638e-06, | |
| "loss": 0.4325, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.8019169329073481, | |
| "grad_norm": 0.45368873581779895, | |
| "learning_rate": 8.214431052013636e-06, | |
| "loss": 0.4228, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.8115015974440896, | |
| "grad_norm": 0.491585413229281, | |
| "learning_rate": 8.104149116083216e-06, | |
| "loss": 0.4516, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.8210862619808306, | |
| "grad_norm": 0.40058123705120063, | |
| "learning_rate": 7.994105842167274e-06, | |
| "loss": 0.4331, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.830670926517572, | |
| "grad_norm": 0.4537379960494452, | |
| "learning_rate": 7.884315083227373e-06, | |
| "loss": 0.4685, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.840255591054313, | |
| "grad_norm": 0.4212878067511151, | |
| "learning_rate": 7.774790660436857e-06, | |
| "loss": 0.4405, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.8498402555910545, | |
| "grad_norm": 0.4051053145614115, | |
| "learning_rate": 7.66554636144095e-06, | |
| "loss": 0.438, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.8594249201277955, | |
| "grad_norm": 0.40403658818972926, | |
| "learning_rate": 7.556595938621058e-06, | |
| "loss": 0.4153, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.8690095846645367, | |
| "grad_norm": 0.39919476552418626, | |
| "learning_rate": 7.447953107363574e-06, | |
| "loss": 0.4042, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.878594249201278, | |
| "grad_norm": 0.40390408443700876, | |
| "learning_rate": 7.33963154433325e-06, | |
| "loss": 0.4027, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.8881789137380192, | |
| "grad_norm": 0.39441465040012735, | |
| "learning_rate": 7.2316448857515076e-06, | |
| "loss": 0.4435, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.8977635782747604, | |
| "grad_norm": 0.43435324742018255, | |
| "learning_rate": 7.124006725679828e-06, | |
| "loss": 0.4618, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.9073482428115016, | |
| "grad_norm": 0.4117241007254349, | |
| "learning_rate": 7.01673061430844e-06, | |
| "loss": 0.4302, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.9169329073482428, | |
| "grad_norm": 0.42716318836747486, | |
| "learning_rate": 6.909830056250527e-06, | |
| "loss": 0.4766, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.926517571884984, | |
| "grad_norm": 0.4284737496521204, | |
| "learning_rate": 6.8033185088421874e-06, | |
| "loss": 0.455, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.9361022364217253, | |
| "grad_norm": 0.4709549975113482, | |
| "learning_rate": 6.697209380448333e-06, | |
| "loss": 0.4249, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.9456869009584663, | |
| "grad_norm": 0.38457354298640695, | |
| "learning_rate": 6.59151602877475e-06, | |
| "loss": 0.4298, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.9552715654952078, | |
| "grad_norm": 0.40219036129356933, | |
| "learning_rate": 6.486251759186573e-06, | |
| "loss": 0.46, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.9648562300319488, | |
| "grad_norm": 0.4394898406507064, | |
| "learning_rate": 6.381429823033281e-06, | |
| "loss": 0.4507, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.9744408945686902, | |
| "grad_norm": 0.428010655667797, | |
| "learning_rate": 6.277063415980549e-06, | |
| "loss": 0.4289, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.9840255591054312, | |
| "grad_norm": 0.41752005507808904, | |
| "learning_rate": 6.173165676349103e-06, | |
| "loss": 0.4253, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.9936102236421727, | |
| "grad_norm": 0.4446826344625602, | |
| "learning_rate": 6.069749683460765e-06, | |
| "loss": 0.4653, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.0031948881789137, | |
| "grad_norm": 0.7825903799604366, | |
| "learning_rate": 5.966828455991951e-06, | |
| "loss": 0.6218, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.012779552715655, | |
| "grad_norm": 0.4401481205860557, | |
| "learning_rate": 5.864414950334796e-06, | |
| "loss": 0.3884, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.022364217252396, | |
| "grad_norm": 0.45142190780046537, | |
| "learning_rate": 5.7625220589661136e-06, | |
| "loss": 0.392, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.0319488817891376, | |
| "grad_norm": 0.48929701776811646, | |
| "learning_rate": 5.66116260882442e-06, | |
| "loss": 0.3631, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.0415335463258786, | |
| "grad_norm": 0.6210973844231107, | |
| "learning_rate": 5.560349359695181e-06, | |
| "loss": 0.4092, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.0511182108626196, | |
| "grad_norm": 0.44860747421683334, | |
| "learning_rate": 5.460095002604533e-06, | |
| "loss": 0.3753, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.060702875399361, | |
| "grad_norm": 0.43389253461155286, | |
| "learning_rate": 5.360412158221661e-06, | |
| "loss": 0.356, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.070287539936102, | |
| "grad_norm": 0.5565953093774033, | |
| "learning_rate": 5.2613133752700145e-06, | |
| "loss": 0.4177, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.0798722044728435, | |
| "grad_norm": 0.5439364657270599, | |
| "learning_rate": 5.1628111289476025e-06, | |
| "loss": 0.3691, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 2.0894568690095845, | |
| "grad_norm": 0.4250244085573423, | |
| "learning_rate": 5.064917819356532e-06, | |
| "loss": 0.3823, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 2.099041533546326, | |
| "grad_norm": 0.4633828235328361, | |
| "learning_rate": 4.967645769942e-06, | |
| "loss": 0.3456, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 2.108626198083067, | |
| "grad_norm": 0.4872033142578477, | |
| "learning_rate": 4.87100722594094e-06, | |
| "loss": 0.3792, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.1182108626198084, | |
| "grad_norm": 0.434441186947985, | |
| "learning_rate": 4.775014352840512e-06, | |
| "loss": 0.3891, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 2.1277955271565494, | |
| "grad_norm": 0.4065479253193355, | |
| "learning_rate": 4.679679234846636e-06, | |
| "loss": 0.35, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 2.137380191693291, | |
| "grad_norm": 0.4617361261302904, | |
| "learning_rate": 4.5850138733627435e-06, | |
| "loss": 0.3809, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.146964856230032, | |
| "grad_norm": 0.4239909798768424, | |
| "learning_rate": 4.491030185478976e-06, | |
| "loss": 0.3707, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.1565495207667733, | |
| "grad_norm": 0.4556545694731096, | |
| "learning_rate": 4.397740002471973e-06, | |
| "loss": 0.4152, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.1661341853035143, | |
| "grad_norm": 0.39176510868793746, | |
| "learning_rate": 4.305155068315481e-06, | |
| "loss": 0.3372, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.1757188498402558, | |
| "grad_norm": 0.4030244459032741, | |
| "learning_rate": 4.213287038201943e-06, | |
| "loss": 0.3655, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.1853035143769968, | |
| "grad_norm": 0.44687274849513065, | |
| "learning_rate": 4.12214747707527e-06, | |
| "loss": 0.406, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.194888178913738, | |
| "grad_norm": 0.3944935531176264, | |
| "learning_rate": 4.0317478581749644e-06, | |
| "loss": 0.372, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.2044728434504792, | |
| "grad_norm": 0.40549466106843335, | |
| "learning_rate": 3.942099561591802e-06, | |
| "loss": 0.4101, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.2140575079872207, | |
| "grad_norm": 0.33462197396443605, | |
| "learning_rate": 3.853213872835229e-06, | |
| "loss": 0.3337, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 2.2236421725239617, | |
| "grad_norm": 0.4099961550027212, | |
| "learning_rate": 3.7651019814126656e-06, | |
| "loss": 0.4058, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 2.2332268370607027, | |
| "grad_norm": 0.4303748197397045, | |
| "learning_rate": 3.677774979420904e-06, | |
| "loss": 0.3918, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 2.242811501597444, | |
| "grad_norm": 0.35212450130725803, | |
| "learning_rate": 3.591243860149759e-06, | |
| "loss": 0.3621, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 2.252396166134185, | |
| "grad_norm": 0.35000139197525687, | |
| "learning_rate": 3.505519516698165e-06, | |
| "loss": 0.3925, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 2.2619808306709266, | |
| "grad_norm": 0.34903321724384345, | |
| "learning_rate": 3.4206127406028744e-06, | |
| "loss": 0.3365, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 2.2715654952076676, | |
| "grad_norm": 0.3495761955307319, | |
| "learning_rate": 3.3365342204799613e-06, | |
| "loss": 0.3639, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 2.281150159744409, | |
| "grad_norm": 0.3559973478644341, | |
| "learning_rate": 3.2532945406792573e-06, | |
| "loss": 0.3617, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 2.29073482428115, | |
| "grad_norm": 0.3830718690572721, | |
| "learning_rate": 3.1709041799519312e-06, | |
| "loss": 0.3911, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 2.3003194888178915, | |
| "grad_norm": 0.3328630776804002, | |
| "learning_rate": 3.089373510131354e-06, | |
| "loss": 0.3543, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 2.3099041533546325, | |
| "grad_norm": 0.31154242898607376, | |
| "learning_rate": 3.0087127948274264e-06, | |
| "loss": 0.3149, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 2.319488817891374, | |
| "grad_norm": 0.3589233869362801, | |
| "learning_rate": 2.9289321881345257e-06, | |
| "loss": 0.3974, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 2.329073482428115, | |
| "grad_norm": 0.32834847417622853, | |
| "learning_rate": 2.850041733353247e-06, | |
| "loss": 0.3367, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 2.3386581469648564, | |
| "grad_norm": 0.35226107708331433, | |
| "learning_rate": 2.7720513617260857e-06, | |
| "loss": 0.3803, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 2.3482428115015974, | |
| "grad_norm": 0.32975966920662375, | |
| "learning_rate": 2.694970891187225e-06, | |
| "loss": 0.3815, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 2.357827476038339, | |
| "grad_norm": 0.35188866709000055, | |
| "learning_rate": 2.6188100251265947e-06, | |
| "loss": 0.3935, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 2.36741214057508, | |
| "grad_norm": 0.3180683406057308, | |
| "learning_rate": 2.5435783511683444e-06, | |
| "loss": 0.3229, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 2.376996805111821, | |
| "grad_norm": 0.3659434112836393, | |
| "learning_rate": 2.469285339963892e-06, | |
| "loss": 0.383, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 2.3865814696485623, | |
| "grad_norm": 0.3548003669740148, | |
| "learning_rate": 2.395940343999691e-06, | |
| "loss": 0.3828, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 2.3961661341853033, | |
| "grad_norm": 0.3271621525630648, | |
| "learning_rate": 2.323552596419889e-06, | |
| "loss": 0.3815, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.405750798722045, | |
| "grad_norm": 0.33849807601480136, | |
| "learning_rate": 2.2521312098639914e-06, | |
| "loss": 0.3819, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 2.415335463258786, | |
| "grad_norm": 0.3558165011110794, | |
| "learning_rate": 2.1816851753197023e-06, | |
| "loss": 0.4087, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 2.4249201277955272, | |
| "grad_norm": 0.3353146638437276, | |
| "learning_rate": 2.1122233609910903e-06, | |
| "loss": 0.3657, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 2.4345047923322682, | |
| "grad_norm": 0.33861735298320605, | |
| "learning_rate": 2.043754511182191e-06, | |
| "loss": 0.3457, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 2.4440894568690097, | |
| "grad_norm": 0.35349259037933695, | |
| "learning_rate": 1.9762872451962214e-06, | |
| "loss": 0.3919, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 2.4536741214057507, | |
| "grad_norm": 0.3027836405978437, | |
| "learning_rate": 1.9098300562505266e-06, | |
| "loss": 0.3691, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 2.463258785942492, | |
| "grad_norm": 0.3156869857843323, | |
| "learning_rate": 1.8443913104073984e-06, | |
| "loss": 0.364, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 2.472843450479233, | |
| "grad_norm": 0.3386183902480921, | |
| "learning_rate": 1.7799792455209019e-06, | |
| "loss": 0.3308, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 2.4824281150159746, | |
| "grad_norm": 0.3665955325247484, | |
| "learning_rate": 1.716601970199836e-06, | |
| "loss": 0.3769, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 2.4920127795527156, | |
| "grad_norm": 0.36101096856608017, | |
| "learning_rate": 1.6542674627869738e-06, | |
| "loss": 0.4193, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.501597444089457, | |
| "grad_norm": 0.3385060552291423, | |
| "learning_rate": 1.5929835703546992e-06, | |
| "loss": 0.3798, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 2.511182108626198, | |
| "grad_norm": 0.3429361880047483, | |
| "learning_rate": 1.5327580077171589e-06, | |
| "loss": 0.3632, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 2.520766773162939, | |
| "grad_norm": 0.33218156060244974, | |
| "learning_rate": 1.4735983564590784e-06, | |
| "loss": 0.3762, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 2.5303514376996805, | |
| "grad_norm": 0.33211325486143817, | |
| "learning_rate": 1.4155120639813392e-06, | |
| "loss": 0.3675, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 2.539936102236422, | |
| "grad_norm": 0.33667239529882725, | |
| "learning_rate": 1.3585064425634542e-06, | |
| "loss": 0.3627, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.549520766773163, | |
| "grad_norm": 0.3520337488712381, | |
| "learning_rate": 1.3025886684430467e-06, | |
| "loss": 0.4046, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 2.559105431309904, | |
| "grad_norm": 0.3238937013404258, | |
| "learning_rate": 1.2477657809124632e-06, | |
| "loss": 0.3789, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 2.5686900958466454, | |
| "grad_norm": 0.3268026171586817, | |
| "learning_rate": 1.19404468143262e-06, | |
| "loss": 0.3703, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 2.5782747603833864, | |
| "grad_norm": 0.3255582279602733, | |
| "learning_rate": 1.1414321327642019e-06, | |
| "loss": 0.3394, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 2.587859424920128, | |
| "grad_norm": 0.31640120333334965, | |
| "learning_rate": 1.0899347581163222e-06, | |
| "loss": 0.3589, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.597444089456869, | |
| "grad_norm": 0.3426513122706542, | |
| "learning_rate": 1.0395590403127487e-06, | |
| "loss": 0.3832, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 2.6070287539936103, | |
| "grad_norm": 0.2998344813799427, | |
| "learning_rate": 9.903113209758098e-07, | |
| "loss": 0.3646, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 2.6166134185303513, | |
| "grad_norm": 0.3271156026842227, | |
| "learning_rate": 9.421977997280596e-07, | |
| "loss": 0.3973, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 2.626198083067093, | |
| "grad_norm": 0.3276947619903817, | |
| "learning_rate": 8.952245334118415e-07, | |
| "loss": 0.3828, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 2.635782747603834, | |
| "grad_norm": 0.3208361600065217, | |
| "learning_rate": 8.493974353268019e-07, | |
| "loss": 0.3961, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.6453674121405752, | |
| "grad_norm": 0.3187235157801297, | |
| "learning_rate": 8.047222744854943e-07, | |
| "loss": 0.3796, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 2.6549520766773163, | |
| "grad_norm": 0.31827291746375636, | |
| "learning_rate": 7.612046748871327e-07, | |
| "loss": 0.3777, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 2.6645367412140573, | |
| "grad_norm": 0.314592617245762, | |
| "learning_rate": 7.188501148096117e-07, | |
| "loss": 0.354, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 2.6741214057507987, | |
| "grad_norm": 0.3411145251233779, | |
| "learning_rate": 6.776639261198581e-07, | |
| "loss": 0.4123, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 2.68370607028754, | |
| "grad_norm": 0.30394694643551917, | |
| "learning_rate": 6.37651293602628e-07, | |
| "loss": 0.3542, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.693290734824281, | |
| "grad_norm": 0.3294079402554833, | |
| "learning_rate": 5.988172543078097e-07, | |
| "loss": 0.3847, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 2.702875399361022, | |
| "grad_norm": 0.3286750975459561, | |
| "learning_rate": 5.611666969163243e-07, | |
| "loss": 0.374, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 2.7124600638977636, | |
| "grad_norm": 0.31556283150275327, | |
| "learning_rate": 5.247043611247127e-07, | |
| "loss": 0.3634, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 2.722044728434505, | |
| "grad_norm": 0.28616841430584555, | |
| "learning_rate": 4.894348370484648e-07, | |
| "loss": 0.3363, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 2.731629392971246, | |
| "grad_norm": 0.3347741210841466, | |
| "learning_rate": 4.553625646441928e-07, | |
| "loss": 0.3938, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 2.741214057507987, | |
| "grad_norm": 0.303400322030674, | |
| "learning_rate": 4.224918331506955e-07, | |
| "loss": 0.3544, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 2.7507987220447285, | |
| "grad_norm": 0.32472397378256507, | |
| "learning_rate": 3.908267805490051e-07, | |
| "loss": 0.3973, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 2.7603833865814695, | |
| "grad_norm": 0.31173649557105365, | |
| "learning_rate": 3.603713930414676e-07, | |
| "loss": 0.3991, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 2.769968051118211, | |
| "grad_norm": 0.3137833568204568, | |
| "learning_rate": 3.3112950454993633e-07, | |
| "loss": 0.3604, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 2.779552715654952, | |
| "grad_norm": 0.30759530236879995, | |
| "learning_rate": 3.0310479623313125e-07, | |
| "loss": 0.3654, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.7891373801916934, | |
| "grad_norm": 0.3181021720539179, | |
| "learning_rate": 2.7630079602323447e-07, | |
| "loss": 0.427, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 2.7987220447284344, | |
| "grad_norm": 0.29030962682069206, | |
| "learning_rate": 2.507208781817638e-07, | |
| "loss": 0.3213, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 2.8083067092651754, | |
| "grad_norm": 0.32615431635966197, | |
| "learning_rate": 2.2636826287480872e-07, | |
| "loss": 0.372, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 2.817891373801917, | |
| "grad_norm": 0.29846601491141916, | |
| "learning_rate": 2.0324601576764525e-07, | |
| "loss": 0.3641, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 2.8274760383386583, | |
| "grad_norm": 0.34873022597553716, | |
| "learning_rate": 1.81357047638816e-07, | |
| "loss": 0.4412, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.8370607028753994, | |
| "grad_norm": 0.3147497934101791, | |
| "learning_rate": 1.6070411401370335e-07, | |
| "loss": 0.3375, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 2.8466453674121404, | |
| "grad_norm": 0.3104782796474926, | |
| "learning_rate": 1.4128981481764115e-07, | |
| "loss": 0.398, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 2.856230031948882, | |
| "grad_norm": 0.3140238921021234, | |
| "learning_rate": 1.231165940486234e-07, | |
| "loss": 0.3686, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 2.8658146964856233, | |
| "grad_norm": 0.4288142439350452, | |
| "learning_rate": 1.0618673946963365e-07, | |
| "loss": 0.4175, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 2.8753993610223643, | |
| "grad_norm": 0.2842824480889967, | |
| "learning_rate": 9.0502382320653e-08, | |
| "loss": 0.3317, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.8849840255591053, | |
| "grad_norm": 0.3129493055661085, | |
| "learning_rate": 7.606549705035937e-08, | |
| "loss": 0.3893, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 2.8945686900958467, | |
| "grad_norm": 0.31085989958792076, | |
| "learning_rate": 6.287790106757396e-08, | |
| "loss": 0.411, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 2.9041533546325877, | |
| "grad_norm": 0.29788052650610436, | |
| "learning_rate": 5.094125451247656e-08, | |
| "loss": 0.3882, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 2.913738019169329, | |
| "grad_norm": 0.330785113441627, | |
| "learning_rate": 4.025706004760932e-08, | |
| "loss": 0.4154, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 2.92332268370607, | |
| "grad_norm": 0.29464975259883275, | |
| "learning_rate": 3.082666266872036e-08, | |
| "loss": 0.3293, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.9329073482428116, | |
| "grad_norm": 0.29689954664312995, | |
| "learning_rate": 2.265124953543918e-08, | |
| "loss": 0.3585, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 2.9424920127795526, | |
| "grad_norm": 0.3533149363735046, | |
| "learning_rate": 1.5731849821833955e-08, | |
| "loss": 0.4593, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 2.952076677316294, | |
| "grad_norm": 0.28841598481778036, | |
| "learning_rate": 1.0069334586854106e-08, | |
| "loss": 0.3501, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 2.961661341853035, | |
| "grad_norm": 0.3018611204905917, | |
| "learning_rate": 5.664416664666883e-09, | |
| "loss": 0.3676, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 2.9712460063897765, | |
| "grad_norm": 0.3072862218552703, | |
| "learning_rate": 2.5176505749346937e-09, | |
| "loss": 0.3948, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.9808306709265175, | |
| "grad_norm": 0.29271951419638015, | |
| "learning_rate": 6.294324529942942e-10, | |
| "loss": 0.3643, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 2.9904153354632586, | |
| "grad_norm": 0.30292122661734633, | |
| "learning_rate": 0.0, | |
| "loss": 0.3668, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 2.9904153354632586, | |
| "step": 312, | |
| "total_flos": 4.4115474684667494e+17, | |
| "train_loss": 0.46701200955953354, | |
| "train_runtime": 9975.6836, | |
| "train_samples_per_second": 3.006, | |
| "train_steps_per_second": 0.031 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 312, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4.4115474684667494e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |