| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 10.0, |
| "eval_steps": 500, |
| "global_step": 113710, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.04397150646381145, |
| "grad_norm": 7.127589225769043, |
| "learning_rate": 9.956028493536189e-06, |
| "loss": 0.6405, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.0879430129276229, |
| "grad_norm": 10.408858299255371, |
| "learning_rate": 9.912056987072378e-06, |
| "loss": 0.4966, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.13191451939143434, |
| "grad_norm": 8.54762077331543, |
| "learning_rate": 9.868085480608566e-06, |
| "loss": 0.4625, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.1758860258552458, |
| "grad_norm": 8.460881233215332, |
| "learning_rate": 9.824113974144756e-06, |
| "loss": 0.4435, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.21985753231905725, |
| "grad_norm": 6.150853633880615, |
| "learning_rate": 9.780142467680944e-06, |
| "loss": 0.4288, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.2638290387828687, |
| "grad_norm": 7.34383487701416, |
| "learning_rate": 9.736170961217132e-06, |
| "loss": 0.4045, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.30780054524668016, |
| "grad_norm": 10.842817306518555, |
| "learning_rate": 9.69219945475332e-06, |
| "loss": 0.4014, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.3517720517104916, |
| "grad_norm": 11.085785865783691, |
| "learning_rate": 9.64822794828951e-06, |
| "loss": 0.4087, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.39574355817430307, |
| "grad_norm": 10.106760025024414, |
| "learning_rate": 9.604256441825697e-06, |
| "loss": 0.391, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.4397150646381145, |
| "grad_norm": 12.384110450744629, |
| "learning_rate": 9.560284935361887e-06, |
| "loss": 0.3846, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.483686571101926, |
| "grad_norm": 6.796555519104004, |
| "learning_rate": 9.516313428898075e-06, |
| "loss": 0.3803, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.5276580775657373, |
| "grad_norm": 10.099310874938965, |
| "learning_rate": 9.472341922434265e-06, |
| "loss": 0.3739, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.5716295840295489, |
| "grad_norm": 8.258896827697754, |
| "learning_rate": 9.428370415970451e-06, |
| "loss": 0.3694, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.6156010904933603, |
| "grad_norm": 6.4571356773376465, |
| "learning_rate": 9.38439890950664e-06, |
| "loss": 0.3582, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.6595725969571717, |
| "grad_norm": 10.41157341003418, |
| "learning_rate": 9.340427403042828e-06, |
| "loss": 0.3611, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.7035441034209832, |
| "grad_norm": 8.555697441101074, |
| "learning_rate": 9.296455896579018e-06, |
| "loss": 0.3491, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.7475156098847947, |
| "grad_norm": 10.460009574890137, |
| "learning_rate": 9.252484390115206e-06, |
| "loss": 0.3507, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.7914871163486061, |
| "grad_norm": 9.326970100402832, |
| "learning_rate": 9.208512883651394e-06, |
| "loss": 0.3465, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.8354586228124176, |
| "grad_norm": 12.12388801574707, |
| "learning_rate": 9.164541377187584e-06, |
| "loss": 0.3419, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.879430129276229, |
| "grad_norm": 9.742505073547363, |
| "learning_rate": 9.120569870723772e-06, |
| "loss": 0.3492, |
| "step": 10000 |
| }, |
| { |
| "epoch": 0.9234016357400404, |
| "grad_norm": 8.42115306854248, |
| "learning_rate": 9.07659836425996e-06, |
| "loss": 0.3405, |
| "step": 10500 |
| }, |
| { |
| "epoch": 0.967373142203852, |
| "grad_norm": 11.557007789611816, |
| "learning_rate": 9.03262685779615e-06, |
| "loss": 0.3353, |
| "step": 11000 |
| }, |
| { |
| "epoch": 1.0113446486676634, |
| "grad_norm": 6.1636786460876465, |
| "learning_rate": 8.988655351332337e-06, |
| "loss": 0.3218, |
| "step": 11500 |
| }, |
| { |
| "epoch": 1.0553161551314747, |
| "grad_norm": 8.901851654052734, |
| "learning_rate": 8.944683844868525e-06, |
| "loss": 0.2812, |
| "step": 12000 |
| }, |
| { |
| "epoch": 1.0992876615952862, |
| "grad_norm": 8.753206253051758, |
| "learning_rate": 8.900712338404715e-06, |
| "loss": 0.2744, |
| "step": 12500 |
| }, |
| { |
| "epoch": 1.1432591680590978, |
| "grad_norm": 19.687353134155273, |
| "learning_rate": 8.856740831940903e-06, |
| "loss": 0.2716, |
| "step": 13000 |
| }, |
| { |
| "epoch": 1.187230674522909, |
| "grad_norm": 8.538763999938965, |
| "learning_rate": 8.812769325477092e-06, |
| "loss": 0.2731, |
| "step": 13500 |
| }, |
| { |
| "epoch": 1.2312021809867206, |
| "grad_norm": 14.86081314086914, |
| "learning_rate": 8.76879781901328e-06, |
| "loss": 0.2686, |
| "step": 14000 |
| }, |
| { |
| "epoch": 1.2751736874505322, |
| "grad_norm": 8.427505493164062, |
| "learning_rate": 8.72482631254947e-06, |
| "loss": 0.2694, |
| "step": 14500 |
| }, |
| { |
| "epoch": 1.3191451939143435, |
| "grad_norm": 11.6419095993042, |
| "learning_rate": 8.680854806085656e-06, |
| "loss": 0.2666, |
| "step": 15000 |
| }, |
| { |
| "epoch": 1.363116700378155, |
| "grad_norm": 11.019262313842773, |
| "learning_rate": 8.636883299621846e-06, |
| "loss": 0.2738, |
| "step": 15500 |
| }, |
| { |
| "epoch": 1.4070882068419663, |
| "grad_norm": 12.447158813476562, |
| "learning_rate": 8.592911793158034e-06, |
| "loss": 0.2712, |
| "step": 16000 |
| }, |
| { |
| "epoch": 1.4510597133057779, |
| "grad_norm": 13.097723007202148, |
| "learning_rate": 8.548940286694223e-06, |
| "loss": 0.2635, |
| "step": 16500 |
| }, |
| { |
| "epoch": 1.4950312197695892, |
| "grad_norm": 15.15329360961914, |
| "learning_rate": 8.504968780230411e-06, |
| "loss": 0.2662, |
| "step": 17000 |
| }, |
| { |
| "epoch": 1.5390027262334007, |
| "grad_norm": 6.069318771362305, |
| "learning_rate": 8.460997273766601e-06, |
| "loss": 0.2604, |
| "step": 17500 |
| }, |
| { |
| "epoch": 1.5829742326972123, |
| "grad_norm": 13.966374397277832, |
| "learning_rate": 8.417025767302789e-06, |
| "loss": 0.2657, |
| "step": 18000 |
| }, |
| { |
| "epoch": 1.6269457391610236, |
| "grad_norm": 21.772634506225586, |
| "learning_rate": 8.373054260838977e-06, |
| "loss": 0.262, |
| "step": 18500 |
| }, |
| { |
| "epoch": 1.6709172456248351, |
| "grad_norm": 11.764993667602539, |
| "learning_rate": 8.329082754375165e-06, |
| "loss": 0.2721, |
| "step": 19000 |
| }, |
| { |
| "epoch": 1.7148887520886467, |
| "grad_norm": 11.220842361450195, |
| "learning_rate": 8.285111247911354e-06, |
| "loss": 0.2611, |
| "step": 19500 |
| }, |
| { |
| "epoch": 1.758860258552458, |
| "grad_norm": 12.129228591918945, |
| "learning_rate": 8.241139741447542e-06, |
| "loss": 0.2684, |
| "step": 20000 |
| }, |
| { |
| "epoch": 1.8028317650162693, |
| "grad_norm": 13.62337875366211, |
| "learning_rate": 8.19716823498373e-06, |
| "loss": 0.2593, |
| "step": 20500 |
| }, |
| { |
| "epoch": 1.846803271480081, |
| "grad_norm": 9.742755889892578, |
| "learning_rate": 8.15319672851992e-06, |
| "loss": 0.2633, |
| "step": 21000 |
| }, |
| { |
| "epoch": 1.8907747779438924, |
| "grad_norm": 11.295736312866211, |
| "learning_rate": 8.109225222056108e-06, |
| "loss": 0.2652, |
| "step": 21500 |
| }, |
| { |
| "epoch": 1.9347462844077037, |
| "grad_norm": 8.879314422607422, |
| "learning_rate": 8.065253715592298e-06, |
| "loss": 0.2651, |
| "step": 22000 |
| }, |
| { |
| "epoch": 1.9787177908715152, |
| "grad_norm": 9.949023246765137, |
| "learning_rate": 8.021282209128485e-06, |
| "loss": 0.27, |
| "step": 22500 |
| }, |
| { |
| "epoch": 2.0226892973353268, |
| "grad_norm": 4.5543622970581055, |
| "learning_rate": 7.977310702664673e-06, |
| "loss": 0.2283, |
| "step": 23000 |
| }, |
| { |
| "epoch": 2.066660803799138, |
| "grad_norm": 13.172735214233398, |
| "learning_rate": 7.933339196200861e-06, |
| "loss": 0.2061, |
| "step": 23500 |
| }, |
| { |
| "epoch": 2.1106323102629494, |
| "grad_norm": 12.874068260192871, |
| "learning_rate": 7.889367689737051e-06, |
| "loss": 0.2019, |
| "step": 24000 |
| }, |
| { |
| "epoch": 2.154603816726761, |
| "grad_norm": 17.02819061279297, |
| "learning_rate": 7.845396183273239e-06, |
| "loss": 0.2046, |
| "step": 24500 |
| }, |
| { |
| "epoch": 2.1985753231905725, |
| "grad_norm": 15.347699165344238, |
| "learning_rate": 7.801424676809429e-06, |
| "loss": 0.2035, |
| "step": 25000 |
| }, |
| { |
| "epoch": 2.242546829654384, |
| "grad_norm": 15.82715892791748, |
| "learning_rate": 7.757453170345617e-06, |
| "loss": 0.2088, |
| "step": 25500 |
| }, |
| { |
| "epoch": 2.2865183361181955, |
| "grad_norm": 7.533407211303711, |
| "learning_rate": 7.713481663881806e-06, |
| "loss": 0.2063, |
| "step": 26000 |
| }, |
| { |
| "epoch": 2.330489842582007, |
| "grad_norm": 21.629718780517578, |
| "learning_rate": 7.669510157417994e-06, |
| "loss": 0.2115, |
| "step": 26500 |
| }, |
| { |
| "epoch": 2.374461349045818, |
| "grad_norm": 15.344660758972168, |
| "learning_rate": 7.625538650954182e-06, |
| "loss": 0.2008, |
| "step": 27000 |
| }, |
| { |
| "epoch": 2.41843285550963, |
| "grad_norm": 12.559733390808105, |
| "learning_rate": 7.58156714449037e-06, |
| "loss": 0.1939, |
| "step": 27500 |
| }, |
| { |
| "epoch": 2.4624043619734413, |
| "grad_norm": 13.165969848632812, |
| "learning_rate": 7.537595638026559e-06, |
| "loss": 0.2105, |
| "step": 28000 |
| }, |
| { |
| "epoch": 2.5063758684372526, |
| "grad_norm": 12.558066368103027, |
| "learning_rate": 7.493624131562748e-06, |
| "loss": 0.1951, |
| "step": 28500 |
| }, |
| { |
| "epoch": 2.5503473749010643, |
| "grad_norm": 13.841544151306152, |
| "learning_rate": 7.449652625098936e-06, |
| "loss": 0.1997, |
| "step": 29000 |
| }, |
| { |
| "epoch": 2.5943188813648757, |
| "grad_norm": 6.036389350891113, |
| "learning_rate": 7.405681118635125e-06, |
| "loss": 0.2056, |
| "step": 29500 |
| }, |
| { |
| "epoch": 2.638290387828687, |
| "grad_norm": 24.980592727661133, |
| "learning_rate": 7.361709612171314e-06, |
| "loss": 0.2021, |
| "step": 30000 |
| }, |
| { |
| "epoch": 2.6822618942924983, |
| "grad_norm": 3.522369861602783, |
| "learning_rate": 7.317738105707503e-06, |
| "loss": 0.2109, |
| "step": 30500 |
| }, |
| { |
| "epoch": 2.72623340075631, |
| "grad_norm": 1.9453462362289429, |
| "learning_rate": 7.273766599243692e-06, |
| "loss": 0.1934, |
| "step": 31000 |
| }, |
| { |
| "epoch": 2.7702049072201214, |
| "grad_norm": 14.508398056030273, |
| "learning_rate": 7.229795092779879e-06, |
| "loss": 0.2064, |
| "step": 31500 |
| }, |
| { |
| "epoch": 2.8141764136839327, |
| "grad_norm": 11.040177345275879, |
| "learning_rate": 7.1858235863160675e-06, |
| "loss": 0.2016, |
| "step": 32000 |
| }, |
| { |
| "epoch": 2.8581479201477444, |
| "grad_norm": 7.400022506713867, |
| "learning_rate": 7.141852079852256e-06, |
| "loss": 0.2043, |
| "step": 32500 |
| }, |
| { |
| "epoch": 2.9021194266115558, |
| "grad_norm": 5.055144309997559, |
| "learning_rate": 7.097880573388445e-06, |
| "loss": 0.1952, |
| "step": 33000 |
| }, |
| { |
| "epoch": 2.946090933075367, |
| "grad_norm": 10.668570518493652, |
| "learning_rate": 7.053909066924634e-06, |
| "loss": 0.1949, |
| "step": 33500 |
| }, |
| { |
| "epoch": 2.9900624395391784, |
| "grad_norm": 2.9759585857391357, |
| "learning_rate": 7.009937560460822e-06, |
| "loss": 0.1987, |
| "step": 34000 |
| }, |
| { |
| "epoch": 3.03403394600299, |
| "grad_norm": 17.75304412841797, |
| "learning_rate": 6.965966053997011e-06, |
| "loss": 0.1568, |
| "step": 34500 |
| }, |
| { |
| "epoch": 3.0780054524668015, |
| "grad_norm": 30.556795120239258, |
| "learning_rate": 6.921994547533199e-06, |
| "loss": 0.1476, |
| "step": 35000 |
| }, |
| { |
| "epoch": 3.1219769589306128, |
| "grad_norm": 5.082170486450195, |
| "learning_rate": 6.878023041069387e-06, |
| "loss": 0.1513, |
| "step": 35500 |
| }, |
| { |
| "epoch": 3.1659484653944245, |
| "grad_norm": 1.4760559797286987, |
| "learning_rate": 6.834051534605576e-06, |
| "loss": 0.1551, |
| "step": 36000 |
| }, |
| { |
| "epoch": 3.209919971858236, |
| "grad_norm": 2.336458683013916, |
| "learning_rate": 6.790080028141764e-06, |
| "loss": 0.1554, |
| "step": 36500 |
| }, |
| { |
| "epoch": 3.253891478322047, |
| "grad_norm": 23.16608428955078, |
| "learning_rate": 6.746108521677953e-06, |
| "loss": 0.1529, |
| "step": 37000 |
| }, |
| { |
| "epoch": 3.297862984785859, |
| "grad_norm": 10.948084831237793, |
| "learning_rate": 6.702137015214142e-06, |
| "loss": 0.1583, |
| "step": 37500 |
| }, |
| { |
| "epoch": 3.3418344912496702, |
| "grad_norm": 15.917346954345703, |
| "learning_rate": 6.6581655087503305e-06, |
| "loss": 0.167, |
| "step": 38000 |
| }, |
| { |
| "epoch": 3.3858059977134816, |
| "grad_norm": 1.641739845275879, |
| "learning_rate": 6.614194002286519e-06, |
| "loss": 0.1539, |
| "step": 38500 |
| }, |
| { |
| "epoch": 3.4297775041772933, |
| "grad_norm": 0.5845322608947754, |
| "learning_rate": 6.570222495822708e-06, |
| "loss": 0.1576, |
| "step": 39000 |
| }, |
| { |
| "epoch": 3.4737490106411046, |
| "grad_norm": 16.46019744873047, |
| "learning_rate": 6.526250989358895e-06, |
| "loss": 0.1561, |
| "step": 39500 |
| }, |
| { |
| "epoch": 3.517720517104916, |
| "grad_norm": 31.895326614379883, |
| "learning_rate": 6.482279482895084e-06, |
| "loss": 0.1459, |
| "step": 40000 |
| }, |
| { |
| "epoch": 3.5616920235687273, |
| "grad_norm": 3.9819185733795166, |
| "learning_rate": 6.438307976431273e-06, |
| "loss": 0.1623, |
| "step": 40500 |
| }, |
| { |
| "epoch": 3.605663530032539, |
| "grad_norm": 13.1094970703125, |
| "learning_rate": 6.3943364699674615e-06, |
| "loss": 0.1566, |
| "step": 41000 |
| }, |
| { |
| "epoch": 3.6496350364963503, |
| "grad_norm": 6.929986953735352, |
| "learning_rate": 6.35036496350365e-06, |
| "loss": 0.1656, |
| "step": 41500 |
| }, |
| { |
| "epoch": 3.6936065429601617, |
| "grad_norm": 7.520695686340332, |
| "learning_rate": 6.306393457039839e-06, |
| "loss": 0.1599, |
| "step": 42000 |
| }, |
| { |
| "epoch": 3.7375780494239734, |
| "grad_norm": 11.514050483703613, |
| "learning_rate": 6.262421950576028e-06, |
| "loss": 0.1664, |
| "step": 42500 |
| }, |
| { |
| "epoch": 3.7815495558877847, |
| "grad_norm": 1.5625334978103638, |
| "learning_rate": 6.218450444112216e-06, |
| "loss": 0.1603, |
| "step": 43000 |
| }, |
| { |
| "epoch": 3.825521062351596, |
| "grad_norm": 22.2111873626709, |
| "learning_rate": 6.174478937648405e-06, |
| "loss": 0.1645, |
| "step": 43500 |
| }, |
| { |
| "epoch": 3.8694925688154074, |
| "grad_norm": 9.370536804199219, |
| "learning_rate": 6.130507431184593e-06, |
| "loss": 0.153, |
| "step": 44000 |
| }, |
| { |
| "epoch": 3.913464075279219, |
| "grad_norm": 1.3509089946746826, |
| "learning_rate": 6.086535924720781e-06, |
| "loss": 0.1605, |
| "step": 44500 |
| }, |
| { |
| "epoch": 3.9574355817430305, |
| "grad_norm": 1.670966386795044, |
| "learning_rate": 6.04256441825697e-06, |
| "loss": 0.1602, |
| "step": 45000 |
| }, |
| { |
| "epoch": 4.001407088206842, |
| "grad_norm": 22.55781364440918, |
| "learning_rate": 5.998592911793158e-06, |
| "loss": 0.1582, |
| "step": 45500 |
| }, |
| { |
| "epoch": 4.0453785946706535, |
| "grad_norm": 18.12227439880371, |
| "learning_rate": 5.954621405329347e-06, |
| "loss": 0.1256, |
| "step": 46000 |
| }, |
| { |
| "epoch": 4.089350101134465, |
| "grad_norm": 11.872714042663574, |
| "learning_rate": 5.910649898865536e-06, |
| "loss": 0.1241, |
| "step": 46500 |
| }, |
| { |
| "epoch": 4.133321607598276, |
| "grad_norm": 26.38855743408203, |
| "learning_rate": 5.8666783924017245e-06, |
| "loss": 0.1301, |
| "step": 47000 |
| }, |
| { |
| "epoch": 4.1772931140620875, |
| "grad_norm": 25.3267879486084, |
| "learning_rate": 5.822706885937913e-06, |
| "loss": 0.1236, |
| "step": 47500 |
| }, |
| { |
| "epoch": 4.221264620525899, |
| "grad_norm": 27.222675323486328, |
| "learning_rate": 5.7787353794741e-06, |
| "loss": 0.1217, |
| "step": 48000 |
| }, |
| { |
| "epoch": 4.265236126989711, |
| "grad_norm": 53.458518981933594, |
| "learning_rate": 5.734763873010289e-06, |
| "loss": 0.1315, |
| "step": 48500 |
| }, |
| { |
| "epoch": 4.309207633453522, |
| "grad_norm": 27.357641220092773, |
| "learning_rate": 5.690792366546478e-06, |
| "loss": 0.1253, |
| "step": 49000 |
| }, |
| { |
| "epoch": 4.353179139917334, |
| "grad_norm": 18.440649032592773, |
| "learning_rate": 5.646820860082667e-06, |
| "loss": 0.132, |
| "step": 49500 |
| }, |
| { |
| "epoch": 4.397150646381145, |
| "grad_norm": 8.659114837646484, |
| "learning_rate": 5.6028493536188556e-06, |
| "loss": 0.1262, |
| "step": 50000 |
| }, |
| { |
| "epoch": 4.441122152844956, |
| "grad_norm": 22.664106369018555, |
| "learning_rate": 5.558877847155044e-06, |
| "loss": 0.1185, |
| "step": 50500 |
| }, |
| { |
| "epoch": 4.485093659308768, |
| "grad_norm": 2.488041400909424, |
| "learning_rate": 5.514906340691233e-06, |
| "loss": 0.1264, |
| "step": 51000 |
| }, |
| { |
| "epoch": 4.529065165772579, |
| "grad_norm": 35.29222869873047, |
| "learning_rate": 5.470934834227422e-06, |
| "loss": 0.134, |
| "step": 51500 |
| }, |
| { |
| "epoch": 4.573036672236391, |
| "grad_norm": 25.144588470458984, |
| "learning_rate": 5.426963327763609e-06, |
| "loss": 0.1265, |
| "step": 52000 |
| }, |
| { |
| "epoch": 4.617008178700202, |
| "grad_norm": 1.2160580158233643, |
| "learning_rate": 5.382991821299798e-06, |
| "loss": 0.1306, |
| "step": 52500 |
| }, |
| { |
| "epoch": 4.660979685164014, |
| "grad_norm": 22.59756088256836, |
| "learning_rate": 5.339020314835987e-06, |
| "loss": 0.127, |
| "step": 53000 |
| }, |
| { |
| "epoch": 4.704951191627825, |
| "grad_norm": 17.720712661743164, |
| "learning_rate": 5.295048808372175e-06, |
| "loss": 0.1233, |
| "step": 53500 |
| }, |
| { |
| "epoch": 4.748922698091636, |
| "grad_norm": 24.757396697998047, |
| "learning_rate": 5.251077301908364e-06, |
| "loss": 0.135, |
| "step": 54000 |
| }, |
| { |
| "epoch": 4.792894204555449, |
| "grad_norm": 13.89427375793457, |
| "learning_rate": 5.207105795444552e-06, |
| "loss": 0.1232, |
| "step": 54500 |
| }, |
| { |
| "epoch": 4.83686571101926, |
| "grad_norm": 6.014657974243164, |
| "learning_rate": 5.163134288980741e-06, |
| "loss": 0.1322, |
| "step": 55000 |
| }, |
| { |
| "epoch": 4.880837217483071, |
| "grad_norm": 16.53226089477539, |
| "learning_rate": 5.11916278251693e-06, |
| "loss": 0.1254, |
| "step": 55500 |
| }, |
| { |
| "epoch": 4.9248087239468825, |
| "grad_norm": 4.393960952758789, |
| "learning_rate": 5.075191276053118e-06, |
| "loss": 0.1343, |
| "step": 56000 |
| }, |
| { |
| "epoch": 4.968780230410694, |
| "grad_norm": 6.577863693237305, |
| "learning_rate": 5.0312197695893065e-06, |
| "loss": 0.1302, |
| "step": 56500 |
| }, |
| { |
| "epoch": 5.012751736874505, |
| "grad_norm": 0.385308176279068, |
| "learning_rate": 4.987248263125494e-06, |
| "loss": 0.1134, |
| "step": 57000 |
| }, |
| { |
| "epoch": 5.0567232433383165, |
| "grad_norm": 17.352643966674805, |
| "learning_rate": 4.943276756661683e-06, |
| "loss": 0.1024, |
| "step": 57500 |
| }, |
| { |
| "epoch": 5.100694749802129, |
| "grad_norm": 44.88164520263672, |
| "learning_rate": 4.899305250197872e-06, |
| "loss": 0.1095, |
| "step": 58000 |
| }, |
| { |
| "epoch": 5.14466625626594, |
| "grad_norm": 2.88139009475708, |
| "learning_rate": 4.855333743734061e-06, |
| "loss": 0.1022, |
| "step": 58500 |
| }, |
| { |
| "epoch": 5.188637762729751, |
| "grad_norm": 30.369657516479492, |
| "learning_rate": 4.81136223727025e-06, |
| "loss": 0.1029, |
| "step": 59000 |
| }, |
| { |
| "epoch": 5.232609269193563, |
| "grad_norm": 21.04435920715332, |
| "learning_rate": 4.7673907308064375e-06, |
| "loss": 0.0973, |
| "step": 59500 |
| }, |
| { |
| "epoch": 5.276580775657374, |
| "grad_norm": 0.3326770067214966, |
| "learning_rate": 4.723419224342626e-06, |
| "loss": 0.0969, |
| "step": 60000 |
| }, |
| { |
| "epoch": 5.320552282121185, |
| "grad_norm": 26.21080207824707, |
| "learning_rate": 4.679447717878815e-06, |
| "loss": 0.1072, |
| "step": 60500 |
| }, |
| { |
| "epoch": 5.364523788584997, |
| "grad_norm": 66.27179718017578, |
| "learning_rate": 4.635476211415004e-06, |
| "loss": 0.11, |
| "step": 61000 |
| }, |
| { |
| "epoch": 5.408495295048809, |
| "grad_norm": 0.6907067894935608, |
| "learning_rate": 4.591504704951192e-06, |
| "loss": 0.1077, |
| "step": 61500 |
| }, |
| { |
| "epoch": 5.45246680151262, |
| "grad_norm": 18.443483352661133, |
| "learning_rate": 4.547533198487381e-06, |
| "loss": 0.0996, |
| "step": 62000 |
| }, |
| { |
| "epoch": 5.496438307976431, |
| "grad_norm": 30.685518264770508, |
| "learning_rate": 4.5035616920235695e-06, |
| "loss": 0.0937, |
| "step": 62500 |
| }, |
| { |
| "epoch": 5.540409814440243, |
| "grad_norm": 8.975428581237793, |
| "learning_rate": 4.459590185559758e-06, |
| "loss": 0.1074, |
| "step": 63000 |
| }, |
| { |
| "epoch": 5.584381320904054, |
| "grad_norm": 21.28555679321289, |
| "learning_rate": 4.415618679095946e-06, |
| "loss": 0.0958, |
| "step": 63500 |
| }, |
| { |
| "epoch": 5.628352827367865, |
| "grad_norm": 35.58885192871094, |
| "learning_rate": 4.371647172632135e-06, |
| "loss": 0.1072, |
| "step": 64000 |
| }, |
| { |
| "epoch": 5.672324333831677, |
| "grad_norm": 31.6656436920166, |
| "learning_rate": 4.327675666168324e-06, |
| "loss": 0.1075, |
| "step": 64500 |
| }, |
| { |
| "epoch": 5.716295840295489, |
| "grad_norm": 1.064644455909729, |
| "learning_rate": 4.283704159704512e-06, |
| "loss": 0.1069, |
| "step": 65000 |
| }, |
| { |
| "epoch": 5.7602673467593, |
| "grad_norm": 8.807537078857422, |
| "learning_rate": 4.2397326532407005e-06, |
| "loss": 0.1059, |
| "step": 65500 |
| }, |
| { |
| "epoch": 5.8042388532231115, |
| "grad_norm": 6.445720672607422, |
| "learning_rate": 4.1957611467768885e-06, |
| "loss": 0.1046, |
| "step": 66000 |
| }, |
| { |
| "epoch": 5.848210359686923, |
| "grad_norm": 10.035616874694824, |
| "learning_rate": 4.151789640313077e-06, |
| "loss": 0.1061, |
| "step": 66500 |
| }, |
| { |
| "epoch": 5.892181866150734, |
| "grad_norm": 0.3110928535461426, |
| "learning_rate": 4.107818133849266e-06, |
| "loss": 0.1087, |
| "step": 67000 |
| }, |
| { |
| "epoch": 5.9361533726145455, |
| "grad_norm": 1.1699435710906982, |
| "learning_rate": 4.063846627385454e-06, |
| "loss": 0.101, |
| "step": 67500 |
| }, |
| { |
| "epoch": 5.980124879078357, |
| "grad_norm": 4.037410736083984, |
| "learning_rate": 4.019875120921643e-06, |
| "loss": 0.1123, |
| "step": 68000 |
| }, |
| { |
| "epoch": 6.024096385542169, |
| "grad_norm": 17.392499923706055, |
| "learning_rate": 3.975903614457832e-06, |
| "loss": 0.0904, |
| "step": 68500 |
| }, |
| { |
| "epoch": 6.06806789200598, |
| "grad_norm": 2.474785089492798, |
| "learning_rate": 3.93193210799402e-06, |
| "loss": 0.078, |
| "step": 69000 |
| }, |
| { |
| "epoch": 6.112039398469792, |
| "grad_norm": 6.585563659667969, |
| "learning_rate": 3.887960601530208e-06, |
| "loss": 0.0805, |
| "step": 69500 |
| }, |
| { |
| "epoch": 6.156010904933603, |
| "grad_norm": 22.513330459594727, |
| "learning_rate": 3.843989095066397e-06, |
| "loss": 0.0839, |
| "step": 70000 |
| }, |
| { |
| "epoch": 6.199982411397414, |
| "grad_norm": 13.566338539123535, |
| "learning_rate": 3.800017588602586e-06, |
| "loss": 0.0812, |
| "step": 70500 |
| }, |
| { |
| "epoch": 6.2439539178612256, |
| "grad_norm": 0.06740322709083557, |
| "learning_rate": 3.7560460821387747e-06, |
| "loss": 0.085, |
| "step": 71000 |
| }, |
| { |
| "epoch": 6.287925424325038, |
| "grad_norm": 7.70227575302124, |
| "learning_rate": 3.7120745756749626e-06, |
| "loss": 0.0797, |
| "step": 71500 |
| }, |
| { |
| "epoch": 6.331896930788849, |
| "grad_norm": 0.8043323159217834, |
| "learning_rate": 3.6681030692111514e-06, |
| "loss": 0.083, |
| "step": 72000 |
| }, |
| { |
| "epoch": 6.37586843725266, |
| "grad_norm": 0.23601041734218597, |
| "learning_rate": 3.62413156274734e-06, |
| "loss": 0.0881, |
| "step": 72500 |
| }, |
| { |
| "epoch": 6.419839943716472, |
| "grad_norm": 24.715349197387695, |
| "learning_rate": 3.5801600562835286e-06, |
| "loss": 0.0845, |
| "step": 73000 |
| }, |
| { |
| "epoch": 6.463811450180283, |
| "grad_norm": 32.790096282958984, |
| "learning_rate": 3.5361885498197174e-06, |
| "loss": 0.0873, |
| "step": 73500 |
| }, |
| { |
| "epoch": 6.507782956644094, |
| "grad_norm": 0.44961002469062805, |
| "learning_rate": 3.4922170433559053e-06, |
| "loss": 0.0926, |
| "step": 74000 |
| }, |
| { |
| "epoch": 6.5517544631079065, |
| "grad_norm": 30.2086181640625, |
| "learning_rate": 3.448245536892094e-06, |
| "loss": 0.0869, |
| "step": 74500 |
| }, |
| { |
| "epoch": 6.595725969571718, |
| "grad_norm": 21.381420135498047, |
| "learning_rate": 3.404274030428283e-06, |
| "loss": 0.0897, |
| "step": 75000 |
| }, |
| { |
| "epoch": 6.639697476035529, |
| "grad_norm": 9.947957038879395, |
| "learning_rate": 3.3603025239644717e-06, |
| "loss": 0.0862, |
| "step": 75500 |
| }, |
| { |
| "epoch": 6.6836689824993405, |
| "grad_norm": 0.4237179160118103, |
| "learning_rate": 3.3163310175006597e-06, |
| "loss": 0.0839, |
| "step": 76000 |
| }, |
| { |
| "epoch": 6.727640488963152, |
| "grad_norm": 8.52582836151123, |
| "learning_rate": 3.2723595110368485e-06, |
| "loss": 0.0838, |
| "step": 76500 |
| }, |
| { |
| "epoch": 6.771611995426963, |
| "grad_norm": 21.18242073059082, |
| "learning_rate": 3.228388004573037e-06, |
| "loss": 0.0858, |
| "step": 77000 |
| }, |
| { |
| "epoch": 6.815583501890774, |
| "grad_norm": 42.046844482421875, |
| "learning_rate": 3.1844164981092256e-06, |
| "loss": 0.0825, |
| "step": 77500 |
| }, |
| { |
| "epoch": 6.859555008354587, |
| "grad_norm": 3.5966274738311768, |
| "learning_rate": 3.140444991645414e-06, |
| "loss": 0.0873, |
| "step": 78000 |
| }, |
| { |
| "epoch": 6.903526514818398, |
| "grad_norm": 25.500396728515625, |
| "learning_rate": 3.0964734851816024e-06, |
| "loss": 0.0885, |
| "step": 78500 |
| }, |
| { |
| "epoch": 6.947498021282209, |
| "grad_norm": 59.15459442138672, |
| "learning_rate": 3.052501978717791e-06, |
| "loss": 0.0855, |
| "step": 79000 |
| }, |
| { |
| "epoch": 6.991469527746021, |
| "grad_norm": 0.11808668822050095, |
| "learning_rate": 3.00853047225398e-06, |
| "loss": 0.0837, |
| "step": 79500 |
| }, |
| { |
| "epoch": 7.035441034209832, |
| "grad_norm": 0.7973224520683289, |
| "learning_rate": 2.964558965790168e-06, |
| "loss": 0.0648, |
| "step": 80000 |
| }, |
| { |
| "epoch": 7.079412540673643, |
| "grad_norm": 46.84673309326172, |
| "learning_rate": 2.9205874593263567e-06, |
| "loss": 0.0709, |
| "step": 80500 |
| }, |
| { |
| "epoch": 7.1233840471374545, |
| "grad_norm": 0.13476693630218506, |
| "learning_rate": 2.8766159528625455e-06, |
| "loss": 0.0751, |
| "step": 81000 |
| }, |
| { |
| "epoch": 7.167355553601267, |
| "grad_norm": 30.29783821105957, |
| "learning_rate": 2.832644446398734e-06, |
| "loss": 0.0677, |
| "step": 81500 |
| }, |
| { |
| "epoch": 7.211327060065078, |
| "grad_norm": 12.256621360778809, |
| "learning_rate": 2.7886729399349222e-06, |
| "loss": 0.0717, |
| "step": 82000 |
| }, |
| { |
| "epoch": 7.255298566528889, |
| "grad_norm": 18.782052993774414, |
| "learning_rate": 2.744701433471111e-06, |
| "loss": 0.0652, |
| "step": 82500 |
| }, |
| { |
| "epoch": 7.299270072992701, |
| "grad_norm": 4.884202480316162, |
| "learning_rate": 2.7007299270072994e-06, |
| "loss": 0.0702, |
| "step": 83000 |
| }, |
| { |
| "epoch": 7.343241579456512, |
| "grad_norm": 0.8015443682670593, |
| "learning_rate": 2.656758420543488e-06, |
| "loss": 0.0742, |
| "step": 83500 |
| }, |
| { |
| "epoch": 7.387213085920323, |
| "grad_norm": 0.11882288008928299, |
| "learning_rate": 2.6127869140796765e-06, |
| "loss": 0.0677, |
| "step": 84000 |
| }, |
| { |
| "epoch": 7.431184592384135, |
| "grad_norm": 24.17609214782715, |
| "learning_rate": 2.568815407615865e-06, |
| "loss": 0.0693, |
| "step": 84500 |
| }, |
| { |
| "epoch": 7.475156098847947, |
| "grad_norm": 8.958883285522461, |
| "learning_rate": 2.5248439011520537e-06, |
| "loss": 0.0754, |
| "step": 85000 |
| }, |
| { |
| "epoch": 7.519127605311758, |
| "grad_norm": 0.03229560703039169, |
| "learning_rate": 2.480872394688242e-06, |
| "loss": 0.0696, |
| "step": 85500 |
| }, |
| { |
| "epoch": 7.5630991117755695, |
| "grad_norm": 26.437501907348633, |
| "learning_rate": 2.436900888224431e-06, |
| "loss": 0.0704, |
| "step": 86000 |
| }, |
| { |
| "epoch": 7.607070618239381, |
| "grad_norm": 33.26008605957031, |
| "learning_rate": 2.3929293817606197e-06, |
| "loss": 0.0679, |
| "step": 86500 |
| }, |
| { |
| "epoch": 7.651042124703192, |
| "grad_norm": 4.0587334632873535, |
| "learning_rate": 2.348957875296808e-06, |
| "loss": 0.0727, |
| "step": 87000 |
| }, |
| { |
| "epoch": 7.695013631167003, |
| "grad_norm": 1.8055332899093628, |
| "learning_rate": 2.3049863688329964e-06, |
| "loss": 0.0714, |
| "step": 87500 |
| }, |
| { |
| "epoch": 7.738985137630815, |
| "grad_norm": 0.08867678046226501, |
| "learning_rate": 2.2610148623691848e-06, |
| "loss": 0.0714, |
| "step": 88000 |
| }, |
| { |
| "epoch": 7.782956644094627, |
| "grad_norm": 51.05652618408203, |
| "learning_rate": 2.2170433559053736e-06, |
| "loss": 0.0662, |
| "step": 88500 |
| }, |
| { |
| "epoch": 7.826928150558438, |
| "grad_norm": 0.3058519959449768, |
| "learning_rate": 2.173071849441562e-06, |
| "loss": 0.0637, |
| "step": 89000 |
| }, |
| { |
| "epoch": 7.87089965702225, |
| "grad_norm": 0.05084119364619255, |
| "learning_rate": 2.1291003429777507e-06, |
| "loss": 0.0676, |
| "step": 89500 |
| }, |
| { |
| "epoch": 7.914871163486061, |
| "grad_norm": 0.23956036567687988, |
| "learning_rate": 2.085128836513939e-06, |
| "loss": 0.0818, |
| "step": 90000 |
| }, |
| { |
| "epoch": 7.958842669949872, |
| "grad_norm": 15.964754104614258, |
| "learning_rate": 2.041157330050128e-06, |
| "loss": 0.076, |
| "step": 90500 |
| }, |
| { |
| "epoch": 8.002814176413684, |
| "grad_norm": 1.4704182147979736, |
| "learning_rate": 1.9971858235863163e-06, |
| "loss": 0.0664, |
| "step": 91000 |
| }, |
| { |
| "epoch": 8.046785682877495, |
| "grad_norm": 63.657440185546875, |
| "learning_rate": 1.953214317122505e-06, |
| "loss": 0.0583, |
| "step": 91500 |
| }, |
| { |
| "epoch": 8.090757189341307, |
| "grad_norm": 8.421917915344238, |
| "learning_rate": 1.9092428106586934e-06, |
| "loss": 0.0613, |
| "step": 92000 |
| }, |
| { |
| "epoch": 8.134728695805117, |
| "grad_norm": 0.17341230809688568, |
| "learning_rate": 1.865271304194882e-06, |
| "loss": 0.0554, |
| "step": 92500 |
| }, |
| { |
| "epoch": 8.17870020226893, |
| "grad_norm": 0.09282279014587402, |
| "learning_rate": 1.8212997977310704e-06, |
| "loss": 0.0607, |
| "step": 93000 |
| }, |
| { |
| "epoch": 8.222671708732742, |
| "grad_norm": 0.0653434619307518, |
| "learning_rate": 1.777328291267259e-06, |
| "loss": 0.058, |
| "step": 93500 |
| }, |
| { |
| "epoch": 8.266643215196552, |
| "grad_norm": 20.4820556640625, |
| "learning_rate": 1.7333567848034475e-06, |
| "loss": 0.0564, |
| "step": 94000 |
| }, |
| { |
| "epoch": 8.310614721660365, |
| "grad_norm": 2.014326333999634, |
| "learning_rate": 1.6893852783396361e-06, |
| "loss": 0.0589, |
| "step": 94500 |
| }, |
| { |
| "epoch": 8.354586228124175, |
| "grad_norm": 0.2292569875717163, |
| "learning_rate": 1.6454137718758245e-06, |
| "loss": 0.0572, |
| "step": 95000 |
| }, |
| { |
| "epoch": 8.398557734587987, |
| "grad_norm": 0.014584074728190899, |
| "learning_rate": 1.6014422654120133e-06, |
| "loss": 0.0497, |
| "step": 95500 |
| }, |
| { |
| "epoch": 8.442529241051798, |
| "grad_norm": 30.090499877929688, |
| "learning_rate": 1.5574707589482016e-06, |
| "loss": 0.0609, |
| "step": 96000 |
| }, |
| { |
| "epoch": 8.48650074751561, |
| "grad_norm": 0.038359276950359344, |
| "learning_rate": 1.5134992524843902e-06, |
| "loss": 0.0544, |
| "step": 96500 |
| }, |
| { |
| "epoch": 8.530472253979422, |
| "grad_norm": 0.1624363213777542, |
| "learning_rate": 1.4695277460205786e-06, |
| "loss": 0.0578, |
| "step": 97000 |
| }, |
| { |
| "epoch": 8.574443760443232, |
| "grad_norm": 0.23937764763832092, |
| "learning_rate": 1.4255562395567674e-06, |
| "loss": 0.058, |
| "step": 97500 |
| }, |
| { |
| "epoch": 8.618415266907045, |
| "grad_norm": 0.08367595821619034, |
| "learning_rate": 1.3815847330929558e-06, |
| "loss": 0.0567, |
| "step": 98000 |
| }, |
| { |
| "epoch": 8.662386773370855, |
| "grad_norm": 0.07520689815282822, |
| "learning_rate": 1.3376132266291445e-06, |
| "loss": 0.0507, |
| "step": 98500 |
| }, |
| { |
| "epoch": 8.706358279834667, |
| "grad_norm": 0.20980244874954224, |
| "learning_rate": 1.293641720165333e-06, |
| "loss": 0.0621, |
| "step": 99000 |
| }, |
| { |
| "epoch": 8.75032978629848, |
| "grad_norm": 2.3352205753326416, |
| "learning_rate": 1.2496702137015215e-06, |
| "loss": 0.053, |
| "step": 99500 |
| }, |
| { |
| "epoch": 8.79430129276229, |
| "grad_norm": 0.1555749624967575, |
| "learning_rate": 1.20569870723771e-06, |
| "loss": 0.0572, |
| "step": 100000 |
| }, |
| { |
| "epoch": 8.838272799226102, |
| "grad_norm": 17.412338256835938, |
| "learning_rate": 1.1617272007738987e-06, |
| "loss": 0.0562, |
| "step": 100500 |
| }, |
| { |
| "epoch": 8.882244305689913, |
| "grad_norm": 0.07252756506204605, |
| "learning_rate": 1.1177556943100872e-06, |
| "loss": 0.0627, |
| "step": 101000 |
| }, |
| { |
| "epoch": 8.926215812153725, |
| "grad_norm": 2.5696682929992676, |
| "learning_rate": 1.0737841878462756e-06, |
| "loss": 0.0593, |
| "step": 101500 |
| }, |
| { |
| "epoch": 8.970187318617535, |
| "grad_norm": 38.372249603271484, |
| "learning_rate": 1.0298126813824642e-06, |
| "loss": 0.0533, |
| "step": 102000 |
| }, |
| { |
| "epoch": 9.014158825081347, |
| "grad_norm": 0.0533902570605278, |
| "learning_rate": 9.858411749186528e-07, |
| "loss": 0.0542, |
| "step": 102500 |
| }, |
| { |
| "epoch": 9.05813033154516, |
| "grad_norm": 1.9466789960861206, |
| "learning_rate": 9.418696684548414e-07, |
| "loss": 0.0524, |
| "step": 103000 |
| }, |
| { |
| "epoch": 9.10210183800897, |
| "grad_norm": 38.15435791015625, |
| "learning_rate": 8.978981619910298e-07, |
| "loss": 0.0459, |
| "step": 103500 |
| }, |
| { |
| "epoch": 9.146073344472782, |
| "grad_norm": 0.12077120691537857, |
| "learning_rate": 8.539266555272184e-07, |
| "loss": 0.0486, |
| "step": 104000 |
| }, |
| { |
| "epoch": 9.190044850936593, |
| "grad_norm": 5.501252174377441, |
| "learning_rate": 8.09955149063407e-07, |
| "loss": 0.0491, |
| "step": 104500 |
| }, |
| { |
| "epoch": 9.234016357400405, |
| "grad_norm": 0.05747182294726372, |
| "learning_rate": 7.659836425995955e-07, |
| "loss": 0.053, |
| "step": 105000 |
| }, |
| { |
| "epoch": 9.277987863864215, |
| "grad_norm": 0.712147057056427, |
| "learning_rate": 7.22012136135784e-07, |
| "loss": 0.0612, |
| "step": 105500 |
| }, |
| { |
| "epoch": 9.321959370328027, |
| "grad_norm": 79.47268676757812, |
| "learning_rate": 6.780406296719725e-07, |
| "loss": 0.0445, |
| "step": 106000 |
| }, |
| { |
| "epoch": 9.36593087679184, |
| "grad_norm": 0.05525458604097366, |
| "learning_rate": 6.340691232081611e-07, |
| "loss": 0.0484, |
| "step": 106500 |
| }, |
| { |
| "epoch": 9.40990238325565, |
| "grad_norm": 80.07630157470703, |
| "learning_rate": 5.900976167443497e-07, |
| "loss": 0.0486, |
| "step": 107000 |
| }, |
| { |
| "epoch": 9.453873889719462, |
| "grad_norm": 31.79228973388672, |
| "learning_rate": 5.461261102805383e-07, |
| "loss": 0.0488, |
| "step": 107500 |
| }, |
| { |
| "epoch": 9.497845396183273, |
| "grad_norm": 27.02912139892578, |
| "learning_rate": 5.021546038167269e-07, |
| "loss": 0.0533, |
| "step": 108000 |
| }, |
| { |
| "epoch": 9.541816902647085, |
| "grad_norm": 0.05077936500310898, |
| "learning_rate": 4.581830973529153e-07, |
| "loss": 0.0485, |
| "step": 108500 |
| }, |
| { |
| "epoch": 9.585788409110895, |
| "grad_norm": 0.13254009187221527, |
| "learning_rate": 4.142115908891039e-07, |
| "loss": 0.0475, |
| "step": 109000 |
| }, |
| { |
| "epoch": 9.629759915574708, |
| "grad_norm": 4.606433391571045, |
| "learning_rate": 3.7024008442529244e-07, |
| "loss": 0.0465, |
| "step": 109500 |
| }, |
| { |
| "epoch": 9.67373142203852, |
| "grad_norm": 12.37204647064209, |
| "learning_rate": 3.2626857796148096e-07, |
| "loss": 0.0431, |
| "step": 110000 |
| }, |
| { |
| "epoch": 9.71770292850233, |
| "grad_norm": 68.52936553955078, |
| "learning_rate": 2.8229707149766955e-07, |
| "loss": 0.0498, |
| "step": 110500 |
| }, |
| { |
| "epoch": 9.761674434966142, |
| "grad_norm": 11.432622909545898, |
| "learning_rate": 2.3832556503385807e-07, |
| "loss": 0.0512, |
| "step": 111000 |
| }, |
| { |
| "epoch": 9.805645941429953, |
| "grad_norm": 0.3197621703147888, |
| "learning_rate": 1.9435405857004663e-07, |
| "loss": 0.0494, |
| "step": 111500 |
| }, |
| { |
| "epoch": 9.849617447893765, |
| "grad_norm": 0.03288858011364937, |
| "learning_rate": 1.5038255210623516e-07, |
| "loss": 0.0504, |
| "step": 112000 |
| }, |
| { |
| "epoch": 9.893588954357575, |
| "grad_norm": 0.5140511989593506, |
| "learning_rate": 1.0641104564242372e-07, |
| "loss": 0.0476, |
| "step": 112500 |
| }, |
| { |
| "epoch": 9.937560460821388, |
| "grad_norm": 0.2467552274465561, |
| "learning_rate": 6.243953917861227e-08, |
| "loss": 0.0484, |
| "step": 113000 |
| }, |
| { |
| "epoch": 9.9815319672852, |
| "grad_norm": 9.55329418182373, |
| "learning_rate": 1.846803271480081e-08, |
| "loss": 0.0474, |
| "step": 113500 |
| }, |
| { |
| "epoch": 10.0, |
| "step": 113710, |
| "total_flos": 2.4428070234989568e+17, |
| "train_loss": 0.1514742186763295, |
| "train_runtime": 20029.6511, |
| "train_samples_per_second": 181.654, |
| "train_steps_per_second": 5.677 |
| } |
| ], |
| "logging_steps": 500, |
| "max_steps": 113710, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 10, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.4428070234989568e+17, |
| "train_batch_size": 32, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|