| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 13.0, | |
| "eval_steps": 500, | |
| "global_step": 130, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 6.386161677293477, | |
| "learning_rate": 7.692307692307694e-07, | |
| "loss": 0.8232, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 5.788862999926078, | |
| "learning_rate": 1.5384615384615387e-06, | |
| "loss": 0.7577, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 6.089771869043863, | |
| "learning_rate": 2.307692307692308e-06, | |
| "loss": 0.8074, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 5.530564848940156, | |
| "learning_rate": 3.0769230769230774e-06, | |
| "loss": 0.7735, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 4.749387381550836, | |
| "learning_rate": 3.846153846153847e-06, | |
| "loss": 0.8007, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 2.98694558209613, | |
| "learning_rate": 4.615384615384616e-06, | |
| "loss": 0.7435, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 2.1084014505461224, | |
| "learning_rate": 5.384615384615385e-06, | |
| "loss": 0.7065, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 3.116946691613287, | |
| "learning_rate": 6.153846153846155e-06, | |
| "loss": 0.7246, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 3.9489227037720247, | |
| "learning_rate": 6.923076923076923e-06, | |
| "loss": 0.7353, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 3.677012003463203, | |
| "learning_rate": 7.692307692307694e-06, | |
| "loss": 0.7193, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 3.4372002699485944, | |
| "learning_rate": 8.461538461538462e-06, | |
| "loss": 0.6508, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 3.1144442043719662, | |
| "learning_rate": 9.230769230769232e-06, | |
| "loss": 0.6456, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 2.307984522281305, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6362, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 1.5290107382746236, | |
| "learning_rate": 9.998197638354428e-06, | |
| "loss": 0.6371, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 1.697688730440158, | |
| "learning_rate": 9.992791852820709e-06, | |
| "loss": 0.6048, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 1.5565398227671812, | |
| "learning_rate": 9.983786540671052e-06, | |
| "loss": 0.613, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "grad_norm": 1.3075427642560082, | |
| "learning_rate": 9.971188194237141e-06, | |
| "loss": 0.6121, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 1.0269564943162095, | |
| "learning_rate": 9.955005896229543e-06, | |
| "loss": 0.6018, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 1.0439040773392119, | |
| "learning_rate": 9.935251313189564e-06, | |
| "loss": 0.5703, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 1.0170369861500186, | |
| "learning_rate": 9.911938687078324e-06, | |
| "loss": 0.5473, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "grad_norm": 0.7455651816973794, | |
| "learning_rate": 9.885084825009085e-06, | |
| "loss": 0.5125, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "grad_norm": 0.8944746570671356, | |
| "learning_rate": 9.854709087130261e-06, | |
| "loss": 0.5163, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "grad_norm": 0.803630123143971, | |
| "learning_rate": 9.820833372667813e-06, | |
| "loss": 0.5412, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 0.632793706217569, | |
| "learning_rate": 9.783482104137127e-06, | |
| "loss": 0.5358, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.7160848344412326, | |
| "learning_rate": 9.742682209735727e-06, | |
| "loss": 0.5214, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 0.7951044982500916, | |
| "learning_rate": 9.698463103929542e-06, | |
| "loss": 0.5003, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "grad_norm": 0.7007069598148137, | |
| "learning_rate": 9.650856666246693e-06, | |
| "loss": 0.5117, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 0.6140259827250877, | |
| "learning_rate": 9.599897218294122e-06, | |
| "loss": 0.486, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 0.6656114536025003, | |
| "learning_rate": 9.54562149901362e-06, | |
| "loss": 0.4916, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.6892716419036327, | |
| "learning_rate": 9.488068638195072e-06, | |
| "loss": 0.506, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "grad_norm": 0.7244884673532641, | |
| "learning_rate": 9.427280128266049e-06, | |
| "loss": 0.4912, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "grad_norm": 0.5426847683274364, | |
| "learning_rate": 9.363299794378072e-06, | |
| "loss": 0.4332, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "grad_norm": 0.5785685300456992, | |
| "learning_rate": 9.296173762811084e-06, | |
| "loss": 0.4303, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "grad_norm": 0.6153621619103451, | |
| "learning_rate": 9.225950427718974e-06, | |
| "loss": 0.4517, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "grad_norm": 0.6367398000548985, | |
| "learning_rate": 9.152680416240059e-06, | |
| "loss": 0.4478, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "grad_norm": 0.5427160630798059, | |
| "learning_rate": 9.076416551997721e-06, | |
| "loss": 0.4179, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "grad_norm": 0.5791551626727143, | |
| "learning_rate": 8.997213817017508e-06, | |
| "loss": 0.4134, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "grad_norm": 0.6274653571131423, | |
| "learning_rate": 8.915129312088112e-06, | |
| "loss": 0.441, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "grad_norm": 0.5514390937253335, | |
| "learning_rate": 8.83022221559489e-06, | |
| "loss": 0.4107, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.6316582659631306, | |
| "learning_rate": 8.742553740855507e-06, | |
| "loss": 0.4468, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "grad_norm": 0.5479914139148436, | |
| "learning_rate": 8.652187091988516e-06, | |
| "loss": 0.392, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "grad_norm": 0.5841482883441135, | |
| "learning_rate": 8.559187418346703e-06, | |
| "loss": 0.3847, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "grad_norm": 0.48049170424730986, | |
| "learning_rate": 8.463621767547998e-06, | |
| "loss": 0.3692, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "grad_norm": 0.6382821765846853, | |
| "learning_rate": 8.36555903713785e-06, | |
| "loss": 0.3709, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "grad_norm": 0.5432285085480744, | |
| "learning_rate": 8.265069924917925e-06, | |
| "loss": 0.3901, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "grad_norm": 0.556019826799584, | |
| "learning_rate": 8.162226877976886e-06, | |
| "loss": 0.3559, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "grad_norm": 0.6307920959245049, | |
| "learning_rate": 8.057104040460062e-06, | |
| "loss": 0.3741, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "grad_norm": 0.5340433239758777, | |
| "learning_rate": 7.949777200115617e-06, | |
| "loss": 0.3604, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "grad_norm": 0.5436995835285903, | |
| "learning_rate": 7.84032373365578e-06, | |
| "loss": 0.3603, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 0.5881925777832674, | |
| "learning_rate": 7.728822550972523e-06, | |
| "loss": 0.3532, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "grad_norm": 0.49189206693924803, | |
| "learning_rate": 7.615354038247889e-06, | |
| "loss": 0.3302, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "grad_norm": 0.46315657559951595, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.3092, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "grad_norm": 0.7851916742807717, | |
| "learning_rate": 7.382843600106539e-06, | |
| "loss": 0.3225, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "grad_norm": 0.49506852008578145, | |
| "learning_rate": 7.263969301848188e-06, | |
| "loss": 0.3023, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "grad_norm": 0.5345899008912007, | |
| "learning_rate": 7.143462807015271e-06, | |
| "loss": 0.3088, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "grad_norm": 0.4990114393805594, | |
| "learning_rate": 7.021410994121525e-06, | |
| "loss": 0.2849, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "grad_norm": 0.5476860773850676, | |
| "learning_rate": 6.897901855769483e-06, | |
| "loss": 0.3232, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "grad_norm": 0.5265711060859419, | |
| "learning_rate": 6.773024435212678e-06, | |
| "loss": 0.3193, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "grad_norm": 0.5105710927433839, | |
| "learning_rate": 6.646868762160399e-06, | |
| "loss": 0.2988, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 0.5616629150774188, | |
| "learning_rate": 6.519525787871235e-06, | |
| "loss": 0.2897, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "grad_norm": 0.6372519651405034, | |
| "learning_rate": 6.391087319582264e-06, | |
| "loss": 0.2849, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "grad_norm": 0.5196816963211703, | |
| "learning_rate": 6.261645954321109e-06, | |
| "loss": 0.2475, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "grad_norm": 0.834610615369563, | |
| "learning_rate": 6.131295012148613e-06, | |
| "loss": 0.2566, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "grad_norm": 0.5721374328330264, | |
| "learning_rate": 6.000128468880223e-06, | |
| "loss": 0.2403, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 6.5, | |
| "grad_norm": 0.5376236567732122, | |
| "learning_rate": 5.8682408883346535e-06, | |
| "loss": 0.2451, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "grad_norm": 0.5809480508904948, | |
| "learning_rate": 5.735727354158581e-06, | |
| "loss": 0.264, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 6.7, | |
| "grad_norm": 0.5274059473129785, | |
| "learning_rate": 5.6026834012766155e-06, | |
| "loss": 0.2441, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "grad_norm": 0.5345433168955913, | |
| "learning_rate": 5.469204947015897e-06, | |
| "loss": 0.2461, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "grad_norm": 0.5222685205615564, | |
| "learning_rate": 5.335388221955012e-06, | |
| "loss": 0.2394, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "grad_norm": 0.5190654609353134, | |
| "learning_rate": 5.201329700547077e-06, | |
| "loss": 0.2288, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 7.1, | |
| "grad_norm": 0.5838330491733378, | |
| "learning_rate": 5.067126031566988e-06, | |
| "loss": 0.2077, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "grad_norm": 0.5089602893209374, | |
| "learning_rate": 4.932873968433014e-06, | |
| "loss": 0.2105, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 7.3, | |
| "grad_norm": 0.5070846588969402, | |
| "learning_rate": 4.798670299452926e-06, | |
| "loss": 0.2103, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 7.4, | |
| "grad_norm": 0.6883031359270138, | |
| "learning_rate": 4.664611778044988e-06, | |
| "loss": 0.2018, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "grad_norm": 0.5239025704353519, | |
| "learning_rate": 4.530795052984104e-06, | |
| "loss": 0.2032, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "grad_norm": 0.5239130307793262, | |
| "learning_rate": 4.397316598723385e-06, | |
| "loss": 0.1925, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 7.7, | |
| "grad_norm": 0.42493855494696026, | |
| "learning_rate": 4.264272645841419e-06, | |
| "loss": 0.1873, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 7.8, | |
| "grad_norm": 0.4775651169983152, | |
| "learning_rate": 4.131759111665349e-06, | |
| "loss": 0.205, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 7.9, | |
| "grad_norm": 0.5531392554272524, | |
| "learning_rate": 3.999871531119779e-06, | |
| "loss": 0.1824, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "grad_norm": 0.4437431227666663, | |
| "learning_rate": 3.86870498785139e-06, | |
| "loss": 0.1854, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 8.1, | |
| "grad_norm": 0.48770248259764865, | |
| "learning_rate": 3.7383540456788915e-06, | |
| "loss": 0.1711, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 8.2, | |
| "grad_norm": 0.46242554796567836, | |
| "learning_rate": 3.6089126804177373e-06, | |
| "loss": 0.1664, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 8.3, | |
| "grad_norm": 0.5233044042522361, | |
| "learning_rate": 3.480474212128766e-06, | |
| "loss": 0.167, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 8.4, | |
| "grad_norm": 0.443376133846697, | |
| "learning_rate": 3.3531312378396026e-06, | |
| "loss": 0.1495, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "grad_norm": 0.49486073217191295, | |
| "learning_rate": 3.226975564787322e-06, | |
| "loss": 0.1678, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 8.6, | |
| "grad_norm": 0.43216776634060416, | |
| "learning_rate": 3.1020981442305187e-06, | |
| "loss": 0.1433, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 8.7, | |
| "grad_norm": 0.46149119325242743, | |
| "learning_rate": 2.978589005878476e-06, | |
| "loss": 0.1463, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "grad_norm": 0.43503072543238425, | |
| "learning_rate": 2.8565371929847286e-06, | |
| "loss": 0.1433, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 8.9, | |
| "grad_norm": 0.4733010195230542, | |
| "learning_rate": 2.736030698151815e-06, | |
| "loss": 0.1561, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "grad_norm": 0.4657865067556952, | |
| "learning_rate": 2.6171563998934605e-06, | |
| "loss": 0.1584, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 9.1, | |
| "grad_norm": 0.6007397623854547, | |
| "learning_rate": 2.5000000000000015e-06, | |
| "loss": 0.1324, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 9.2, | |
| "grad_norm": 0.4193969928262585, | |
| "learning_rate": 2.384645961752113e-06, | |
| "loss": 0.1301, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 9.3, | |
| "grad_norm": 0.4458332069117744, | |
| "learning_rate": 2.2711774490274767e-06, | |
| "loss": 0.1196, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 9.4, | |
| "grad_norm": 0.6050128721325025, | |
| "learning_rate": 2.159676266344222e-06, | |
| "loss": 0.1505, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "grad_norm": 0.5615260624763575, | |
| "learning_rate": 2.050222799884387e-06, | |
| "loss": 0.1294, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 9.6, | |
| "grad_norm": 0.42708844812970337, | |
| "learning_rate": 1.942895959539939e-06, | |
| "loss": 0.119, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 9.7, | |
| "grad_norm": 0.5230551382534893, | |
| "learning_rate": 1.8377731220231144e-06, | |
| "loss": 0.1311, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 9.8, | |
| "grad_norm": 0.5165273700880167, | |
| "learning_rate": 1.7349300750820758e-06, | |
| "loss": 0.1258, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 9.9, | |
| "grad_norm": 0.45249293196876417, | |
| "learning_rate": 1.6344409628621482e-06, | |
| "loss": 0.1213, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.43209481821479434, | |
| "learning_rate": 1.5363782324520033e-06, | |
| "loss": 0.1164, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 10.1, | |
| "grad_norm": 0.4279310862097852, | |
| "learning_rate": 1.4408125816532981e-06, | |
| "loss": 0.1156, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 10.2, | |
| "grad_norm": 0.5022381366166877, | |
| "learning_rate": 1.347812908011485e-06, | |
| "loss": 0.1194, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 10.3, | |
| "grad_norm": 0.4297307588471197, | |
| "learning_rate": 1.257446259144494e-06, | |
| "loss": 0.1075, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 10.4, | |
| "grad_norm": 0.4627925195345167, | |
| "learning_rate": 1.1697777844051105e-06, | |
| "loss": 0.1174, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 10.5, | |
| "grad_norm": 0.5354554790064636, | |
| "learning_rate": 1.0848706879118893e-06, | |
| "loss": 0.099, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 10.6, | |
| "grad_norm": 0.424493294145761, | |
| "learning_rate": 1.0027861829824953e-06, | |
| "loss": 0.1006, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 10.7, | |
| "grad_norm": 0.3826938814101227, | |
| "learning_rate": 9.235834480022788e-07, | |
| "loss": 0.1143, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 10.8, | |
| "grad_norm": 0.439350861945064, | |
| "learning_rate": 8.473195837599419e-07, | |
| "loss": 0.1074, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 10.9, | |
| "grad_norm": 0.4070012509892313, | |
| "learning_rate": 7.740495722810271e-07, | |
| "loss": 0.1044, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "grad_norm": 0.4061365640158113, | |
| "learning_rate": 7.03826237188916e-07, | |
| "loss": 0.1086, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 11.1, | |
| "grad_norm": 0.41713574157716543, | |
| "learning_rate": 6.367002056219285e-07, | |
| "loss": 0.1062, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 11.2, | |
| "grad_norm": 0.40116707546042335, | |
| "learning_rate": 5.727198717339511e-07, | |
| "loss": 0.1089, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 11.3, | |
| "grad_norm": 0.37558887796710816, | |
| "learning_rate": 5.119313618049309e-07, | |
| "loss": 0.1039, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 11.4, | |
| "grad_norm": 0.3773946018145813, | |
| "learning_rate": 4.54378500986381e-07, | |
| "loss": 0.1041, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 11.5, | |
| "grad_norm": 0.3818729923747989, | |
| "learning_rate": 4.001027817058789e-07, | |
| "loss": 0.0968, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 11.6, | |
| "grad_norm": 0.41531243956811437, | |
| "learning_rate": 3.49143333753309e-07, | |
| "loss": 0.0968, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 11.7, | |
| "grad_norm": 0.41969496107916626, | |
| "learning_rate": 3.015368960704584e-07, | |
| "loss": 0.0912, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 11.8, | |
| "grad_norm": 0.522155151859783, | |
| "learning_rate": 2.573177902642726e-07, | |
| "loss": 0.1023, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 11.9, | |
| "grad_norm": 0.3825514360143708, | |
| "learning_rate": 2.1651789586287442e-07, | |
| "loss": 0.0958, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "grad_norm": 0.4004379053777073, | |
| "learning_rate": 1.7916662733218848e-07, | |
| "loss": 0.0884, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 12.1, | |
| "grad_norm": 0.3479547459076913, | |
| "learning_rate": 1.4529091286973994e-07, | |
| "loss": 0.0954, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 12.2, | |
| "grad_norm": 0.3553194553043943, | |
| "learning_rate": 1.1491517499091498e-07, | |
| "loss": 0.0958, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 12.3, | |
| "grad_norm": 0.3524081827483391, | |
| "learning_rate": 8.80613129216762e-08, | |
| "loss": 0.0964, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 12.4, | |
| "grad_norm": 0.3666981249678168, | |
| "learning_rate": 6.474868681043578e-08, | |
| "loss": 0.095, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "grad_norm": 0.3488860601634519, | |
| "learning_rate": 4.499410377045765e-08, | |
| "loss": 0.0929, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 12.6, | |
| "grad_norm": 0.3330379072924316, | |
| "learning_rate": 2.8811805762860578e-08, | |
| "loss": 0.0966, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 12.7, | |
| "grad_norm": 0.36599631260024057, | |
| "learning_rate": 1.6213459328950355e-08, | |
| "loss": 0.0936, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 12.8, | |
| "grad_norm": 0.36249992824323646, | |
| "learning_rate": 7.2081471792911914e-09, | |
| "loss": 0.1011, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 12.9, | |
| "grad_norm": 0.34083319122623174, | |
| "learning_rate": 1.8023616455731253e-09, | |
| "loss": 0.0922, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 13.0, | |
| "grad_norm": 0.33065097672172516, | |
| "learning_rate": 0.0, | |
| "loss": 0.0941, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 13.0, | |
| "step": 130, | |
| "total_flos": 8.494680528807526e+16, | |
| "train_loss": 0.3106696891096922, | |
| "train_runtime": 1872.4404, | |
| "train_samples_per_second": 2.194, | |
| "train_steps_per_second": 0.069 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 130, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 13, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.494680528807526e+16, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |