| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 13.0, | |
| "eval_steps": 500, | |
| "global_step": 130, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 6.406434822955886, | |
| "learning_rate": 7.692307692307694e-07, | |
| "loss": 0.8184, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 6.050120875628806, | |
| "learning_rate": 1.5384615384615387e-06, | |
| "loss": 0.7751, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 5.973162061519674, | |
| "learning_rate": 2.307692307692308e-06, | |
| "loss": 0.7837, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 6.240081653995824, | |
| "learning_rate": 3.0769230769230774e-06, | |
| "loss": 0.8498, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 4.397548775254785, | |
| "learning_rate": 3.846153846153847e-06, | |
| "loss": 0.7572, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 3.172348291544264, | |
| "learning_rate": 4.615384615384616e-06, | |
| "loss": 0.7761, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 2.042997335597531, | |
| "learning_rate": 5.384615384615385e-06, | |
| "loss": 0.6876, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 2.9985692110248405, | |
| "learning_rate": 6.153846153846155e-06, | |
| "loss": 0.6836, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 4.0017636216604195, | |
| "learning_rate": 6.923076923076923e-06, | |
| "loss": 0.743, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 4.001397606486537, | |
| "learning_rate": 7.692307692307694e-06, | |
| "loss": 0.6965, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 3.3907240731148844, | |
| "learning_rate": 8.461538461538462e-06, | |
| "loss": 0.6517, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 3.4724283078163096, | |
| "learning_rate": 9.230769230769232e-06, | |
| "loss": 0.6744, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 2.2062783344708725, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6217, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 1.5139535578992613, | |
| "learning_rate": 9.998197638354428e-06, | |
| "loss": 0.647, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 1.4998811352419374, | |
| "learning_rate": 9.992791852820709e-06, | |
| "loss": 0.6382, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 1.5152193679244708, | |
| "learning_rate": 9.983786540671052e-06, | |
| "loss": 0.6005, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "grad_norm": 1.3145190337086123, | |
| "learning_rate": 9.971188194237141e-06, | |
| "loss": 0.5995, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 0.8867964338383407, | |
| "learning_rate": 9.955005896229543e-06, | |
| "loss": 0.5899, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 1.05839871676367, | |
| "learning_rate": 9.935251313189564e-06, | |
| "loss": 0.5485, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.9906608834588045, | |
| "learning_rate": 9.911938687078324e-06, | |
| "loss": 0.5949, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "grad_norm": 0.8007350656249063, | |
| "learning_rate": 9.885084825009085e-06, | |
| "loss": 0.5138, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "grad_norm": 0.8824785901318277, | |
| "learning_rate": 9.854709087130261e-06, | |
| "loss": 0.5351, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "grad_norm": 0.711400863283526, | |
| "learning_rate": 9.820833372667813e-06, | |
| "loss": 0.5099, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 0.7213992666671218, | |
| "learning_rate": 9.783482104137127e-06, | |
| "loss": 0.5059, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.7855387101768623, | |
| "learning_rate": 9.742682209735727e-06, | |
| "loss": 0.5104, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 0.8752906594677986, | |
| "learning_rate": 9.698463103929542e-06, | |
| "loss": 0.4932, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "grad_norm": 0.8037774778927409, | |
| "learning_rate": 9.650856666246693e-06, | |
| "loss": 0.5227, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 0.6447586134635721, | |
| "learning_rate": 9.599897218294122e-06, | |
| "loss": 0.5382, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 0.7688158191556873, | |
| "learning_rate": 9.54562149901362e-06, | |
| "loss": 0.5191, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.7293374831788674, | |
| "learning_rate": 9.488068638195072e-06, | |
| "loss": 0.4871, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "grad_norm": 0.6491622694568068, | |
| "learning_rate": 9.427280128266049e-06, | |
| "loss": 0.4639, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "grad_norm": 0.6114910471730025, | |
| "learning_rate": 9.363299794378072e-06, | |
| "loss": 0.4625, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "grad_norm": 0.5788052371967694, | |
| "learning_rate": 9.296173762811084e-06, | |
| "loss": 0.4376, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "grad_norm": 0.4997790383243411, | |
| "learning_rate": 9.225950427718974e-06, | |
| "loss": 0.4252, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "grad_norm": 0.6138943105359977, | |
| "learning_rate": 9.152680416240059e-06, | |
| "loss": 0.44, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "grad_norm": 0.7466556876227075, | |
| "learning_rate": 9.076416551997721e-06, | |
| "loss": 0.4495, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "grad_norm": 0.5417590899215192, | |
| "learning_rate": 8.997213817017508e-06, | |
| "loss": 0.4255, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "grad_norm": 0.5620818018440447, | |
| "learning_rate": 8.915129312088112e-06, | |
| "loss": 0.4265, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "grad_norm": 0.5768283035576824, | |
| "learning_rate": 8.83022221559489e-06, | |
| "loss": 0.4582, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.5446235172234148, | |
| "learning_rate": 8.742553740855507e-06, | |
| "loss": 0.4376, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "grad_norm": 0.4858658023497463, | |
| "learning_rate": 8.652187091988516e-06, | |
| "loss": 0.3799, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "grad_norm": 0.5615679556175788, | |
| "learning_rate": 8.559187418346703e-06, | |
| "loss": 0.3973, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "grad_norm": 0.5174139912714532, | |
| "learning_rate": 8.463621767547998e-06, | |
| "loss": 0.3669, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "grad_norm": 0.523592420752059, | |
| "learning_rate": 8.36555903713785e-06, | |
| "loss": 0.3805, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "grad_norm": 0.5916421788221691, | |
| "learning_rate": 8.265069924917925e-06, | |
| "loss": 0.3969, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "grad_norm": 0.5718656306016315, | |
| "learning_rate": 8.162226877976886e-06, | |
| "loss": 0.3943, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "grad_norm": 0.5774244564572114, | |
| "learning_rate": 8.057104040460062e-06, | |
| "loss": 0.3525, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "grad_norm": 0.5391795609620162, | |
| "learning_rate": 7.949777200115617e-06, | |
| "loss": 0.377, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "grad_norm": 0.5605098032466217, | |
| "learning_rate": 7.84032373365578e-06, | |
| "loss": 0.3747, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 0.5698276703284092, | |
| "learning_rate": 7.728822550972523e-06, | |
| "loss": 0.3541, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "grad_norm": 0.5305710024735654, | |
| "learning_rate": 7.615354038247889e-06, | |
| "loss": 0.3409, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "grad_norm": 0.5177761634861122, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.3125, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "grad_norm": 0.5695074379743326, | |
| "learning_rate": 7.382843600106539e-06, | |
| "loss": 0.331, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "grad_norm": 0.5433519622291749, | |
| "learning_rate": 7.263969301848188e-06, | |
| "loss": 0.3242, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "grad_norm": 0.5333226029726502, | |
| "learning_rate": 7.143462807015271e-06, | |
| "loss": 0.3129, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "grad_norm": 0.5136297799005793, | |
| "learning_rate": 7.021410994121525e-06, | |
| "loss": 0.3165, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "grad_norm": 0.4891911271140611, | |
| "learning_rate": 6.897901855769483e-06, | |
| "loss": 0.3156, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "grad_norm": 0.4708007358074562, | |
| "learning_rate": 6.773024435212678e-06, | |
| "loss": 0.3061, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "grad_norm": 0.5425580852805459, | |
| "learning_rate": 6.646868762160399e-06, | |
| "loss": 0.3084, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 0.5493570530206037, | |
| "learning_rate": 6.519525787871235e-06, | |
| "loss": 0.2861, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "grad_norm": 0.4833067925873124, | |
| "learning_rate": 6.391087319582264e-06, | |
| "loss": 0.2815, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "grad_norm": 0.49602275476882435, | |
| "learning_rate": 6.261645954321109e-06, | |
| "loss": 0.2612, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "grad_norm": 0.6567436236873981, | |
| "learning_rate": 6.131295012148613e-06, | |
| "loss": 0.2762, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "grad_norm": 0.47076138820947433, | |
| "learning_rate": 6.000128468880223e-06, | |
| "loss": 0.2506, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 6.5, | |
| "grad_norm": 0.5185544219548429, | |
| "learning_rate": 5.8682408883346535e-06, | |
| "loss": 0.2522, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "grad_norm": 0.5141879144829652, | |
| "learning_rate": 5.735727354158581e-06, | |
| "loss": 0.2555, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 6.7, | |
| "grad_norm": 0.45111183558895634, | |
| "learning_rate": 5.6026834012766155e-06, | |
| "loss": 0.2451, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "grad_norm": 0.5508482395178778, | |
| "learning_rate": 5.469204947015897e-06, | |
| "loss": 0.2528, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "grad_norm": 0.5939116369983654, | |
| "learning_rate": 5.335388221955012e-06, | |
| "loss": 0.2463, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "grad_norm": 0.48701851450165085, | |
| "learning_rate": 5.201329700547077e-06, | |
| "loss": 0.2484, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 7.1, | |
| "grad_norm": 0.5924911882015839, | |
| "learning_rate": 5.067126031566988e-06, | |
| "loss": 0.2245, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "grad_norm": 0.5106877318469961, | |
| "learning_rate": 4.932873968433014e-06, | |
| "loss": 0.2132, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 7.3, | |
| "grad_norm": 0.6550115699580499, | |
| "learning_rate": 4.798670299452926e-06, | |
| "loss": 0.1946, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 7.4, | |
| "grad_norm": 0.6378120713474744, | |
| "learning_rate": 4.664611778044988e-06, | |
| "loss": 0.1983, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "grad_norm": 0.5523836747155239, | |
| "learning_rate": 4.530795052984104e-06, | |
| "loss": 0.2091, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "grad_norm": 0.6017585190535615, | |
| "learning_rate": 4.397316598723385e-06, | |
| "loss": 0.2015, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 7.7, | |
| "grad_norm": 0.4623423067246445, | |
| "learning_rate": 4.264272645841419e-06, | |
| "loss": 0.2026, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 7.8, | |
| "grad_norm": 0.5311265961326088, | |
| "learning_rate": 4.131759111665349e-06, | |
| "loss": 0.1998, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 7.9, | |
| "grad_norm": 0.5633293866199777, | |
| "learning_rate": 3.999871531119779e-06, | |
| "loss": 0.2134, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "grad_norm": 0.48099000790792046, | |
| "learning_rate": 3.86870498785139e-06, | |
| "loss": 0.1993, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 8.1, | |
| "grad_norm": 0.628125234755407, | |
| "learning_rate": 3.7383540456788915e-06, | |
| "loss": 0.1758, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 8.2, | |
| "grad_norm": 0.5274655507343834, | |
| "learning_rate": 3.6089126804177373e-06, | |
| "loss": 0.177, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 8.3, | |
| "grad_norm": 0.560295745218829, | |
| "learning_rate": 3.480474212128766e-06, | |
| "loss": 0.1768, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 8.4, | |
| "grad_norm": 0.6373574973339968, | |
| "learning_rate": 3.3531312378396026e-06, | |
| "loss": 0.1571, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "grad_norm": 0.532651139921807, | |
| "learning_rate": 3.226975564787322e-06, | |
| "loss": 0.1655, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 8.6, | |
| "grad_norm": 0.4596914335546075, | |
| "learning_rate": 3.1020981442305187e-06, | |
| "loss": 0.1469, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 8.7, | |
| "grad_norm": 0.46693477478096307, | |
| "learning_rate": 2.978589005878476e-06, | |
| "loss": 0.1633, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "grad_norm": 0.47802268923339625, | |
| "learning_rate": 2.8565371929847286e-06, | |
| "loss": 0.1652, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 8.9, | |
| "grad_norm": 0.4748270656820746, | |
| "learning_rate": 2.736030698151815e-06, | |
| "loss": 0.156, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "grad_norm": 0.4509056379100403, | |
| "learning_rate": 2.6171563998934605e-06, | |
| "loss": 0.1531, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 9.1, | |
| "grad_norm": 0.5231058102855545, | |
| "learning_rate": 2.5000000000000015e-06, | |
| "loss": 0.1387, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 9.2, | |
| "grad_norm": 0.46519672106847, | |
| "learning_rate": 2.384645961752113e-06, | |
| "loss": 0.1403, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 9.3, | |
| "grad_norm": 0.4610724522598861, | |
| "learning_rate": 2.2711774490274767e-06, | |
| "loss": 0.1225, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 9.4, | |
| "grad_norm": 0.5462753922132856, | |
| "learning_rate": 2.159676266344222e-06, | |
| "loss": 0.149, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "grad_norm": 0.4854015842562361, | |
| "learning_rate": 2.050222799884387e-06, | |
| "loss": 0.1286, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 9.6, | |
| "grad_norm": 0.44699667872921667, | |
| "learning_rate": 1.942895959539939e-06, | |
| "loss": 0.138, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 9.7, | |
| "grad_norm": 0.46575730526640735, | |
| "learning_rate": 1.8377731220231144e-06, | |
| "loss": 0.13, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 9.8, | |
| "grad_norm": 0.5160350222503428, | |
| "learning_rate": 1.7349300750820758e-06, | |
| "loss": 0.1304, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 9.9, | |
| "grad_norm": 0.44446591013040804, | |
| "learning_rate": 1.6344409628621482e-06, | |
| "loss": 0.1353, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.41182911189466076, | |
| "learning_rate": 1.5363782324520033e-06, | |
| "loss": 0.1207, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 10.1, | |
| "grad_norm": 0.39551485823622784, | |
| "learning_rate": 1.4408125816532981e-06, | |
| "loss": 0.1259, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 10.2, | |
| "grad_norm": 0.3947288811794202, | |
| "learning_rate": 1.347812908011485e-06, | |
| "loss": 0.1109, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 10.3, | |
| "grad_norm": 0.45940398345062416, | |
| "learning_rate": 1.257446259144494e-06, | |
| "loss": 0.1091, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 10.4, | |
| "grad_norm": 0.4342771733323927, | |
| "learning_rate": 1.1697777844051105e-06, | |
| "loss": 0.1162, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 10.5, | |
| "grad_norm": 0.48255390506499657, | |
| "learning_rate": 1.0848706879118893e-06, | |
| "loss": 0.1253, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 10.6, | |
| "grad_norm": 0.3868242861484001, | |
| "learning_rate": 1.0027861829824953e-06, | |
| "loss": 0.1033, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 10.7, | |
| "grad_norm": 0.41846883483365804, | |
| "learning_rate": 9.235834480022788e-07, | |
| "loss": 0.1215, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 10.8, | |
| "grad_norm": 0.39470733507984346, | |
| "learning_rate": 8.473195837599419e-07, | |
| "loss": 0.1178, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 10.9, | |
| "grad_norm": 0.4414264815415124, | |
| "learning_rate": 7.740495722810271e-07, | |
| "loss": 0.1065, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "grad_norm": 0.3996918434589147, | |
| "learning_rate": 7.03826237188916e-07, | |
| "loss": 0.1096, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 11.1, | |
| "grad_norm": 0.473066401851595, | |
| "learning_rate": 6.367002056219285e-07, | |
| "loss": 0.109, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 11.2, | |
| "grad_norm": 0.3850055105838304, | |
| "learning_rate": 5.727198717339511e-07, | |
| "loss": 0.1152, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 11.3, | |
| "grad_norm": 0.35651815751033067, | |
| "learning_rate": 5.119313618049309e-07, | |
| "loss": 0.1058, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 11.4, | |
| "grad_norm": 0.36639706051855464, | |
| "learning_rate": 4.54378500986381e-07, | |
| "loss": 0.1086, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 11.5, | |
| "grad_norm": 0.3637609711601751, | |
| "learning_rate": 4.001027817058789e-07, | |
| "loss": 0.101, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 11.6, | |
| "grad_norm": 0.3917886287928482, | |
| "learning_rate": 3.49143333753309e-07, | |
| "loss": 0.1026, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 11.7, | |
| "grad_norm": 0.415556307319554, | |
| "learning_rate": 3.015368960704584e-07, | |
| "loss": 0.1044, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 11.8, | |
| "grad_norm": 0.41917660446727584, | |
| "learning_rate": 2.573177902642726e-07, | |
| "loss": 0.0994, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 11.9, | |
| "grad_norm": 0.39031115952953677, | |
| "learning_rate": 2.1651789586287442e-07, | |
| "loss": 0.1048, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "grad_norm": 0.3936212931230405, | |
| "learning_rate": 1.7916662733218848e-07, | |
| "loss": 0.0973, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 12.1, | |
| "grad_norm": 0.3674851826076043, | |
| "learning_rate": 1.4529091286973994e-07, | |
| "loss": 0.103, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 12.2, | |
| "grad_norm": 0.32808820001282346, | |
| "learning_rate": 1.1491517499091498e-07, | |
| "loss": 0.1028, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 12.3, | |
| "grad_norm": 0.3383623944755406, | |
| "learning_rate": 8.80613129216762e-08, | |
| "loss": 0.0959, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 12.4, | |
| "grad_norm": 0.3446835188012497, | |
| "learning_rate": 6.474868681043578e-08, | |
| "loss": 0.0942, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "grad_norm": 0.3400962969587619, | |
| "learning_rate": 4.499410377045765e-08, | |
| "loss": 0.0933, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 12.6, | |
| "grad_norm": 0.3368079573448201, | |
| "learning_rate": 2.8811805762860578e-08, | |
| "loss": 0.1046, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 12.7, | |
| "grad_norm": 0.3425746558057366, | |
| "learning_rate": 1.6213459328950355e-08, | |
| "loss": 0.1004, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 12.8, | |
| "grad_norm": 0.3249227034331293, | |
| "learning_rate": 7.2081471792911914e-09, | |
| "loss": 0.1005, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 12.9, | |
| "grad_norm": 0.33980073926096327, | |
| "learning_rate": 1.8023616455731253e-09, | |
| "loss": 0.1064, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 13.0, | |
| "grad_norm": 0.34472991481965193, | |
| "learning_rate": 0.0, | |
| "loss": 0.1025, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 13.0, | |
| "step": 130, | |
| "total_flos": 9.157709874069504e+16, | |
| "train_loss": 0.3155503452970431, | |
| "train_runtime": 2341.7222, | |
| "train_samples_per_second": 1.754, | |
| "train_steps_per_second": 0.056 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 130, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 13, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 9.157709874069504e+16, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |