| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 230, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.008695652173913044, | |
| "grad_norm": 17.666513442993164, | |
| "learning_rate": 0.0, | |
| "loss": 4.5266, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.017391304347826087, | |
| "grad_norm": 21.562305450439453, | |
| "learning_rate": 1e-05, | |
| "loss": 4.6683, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.02608695652173913, | |
| "grad_norm": 17.93792152404785, | |
| "learning_rate": 2e-05, | |
| "loss": 4.4263, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.034782608695652174, | |
| "grad_norm": 31.087865829467773, | |
| "learning_rate": 3e-05, | |
| "loss": 5.3455, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.043478260869565216, | |
| "grad_norm": 16.84352684020996, | |
| "learning_rate": 4e-05, | |
| "loss": 4.7065, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.05217391304347826, | |
| "grad_norm": 15.276086807250977, | |
| "learning_rate": 5e-05, | |
| "loss": 3.5936, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.06086956521739131, | |
| "grad_norm": 15.930822372436523, | |
| "learning_rate": 6e-05, | |
| "loss": 3.7262, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.06956521739130435, | |
| "grad_norm": 16.219757080078125, | |
| "learning_rate": 7e-05, | |
| "loss": 4.0023, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.0782608695652174, | |
| "grad_norm": 13.747424125671387, | |
| "learning_rate": 8e-05, | |
| "loss": 3.8195, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.08695652173913043, | |
| "grad_norm": 28.324934005737305, | |
| "learning_rate": 9e-05, | |
| "loss": 4.1877, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.09565217391304348, | |
| "grad_norm": 11.464014053344727, | |
| "learning_rate": 0.0001, | |
| "loss": 2.5265, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.10434782608695652, | |
| "grad_norm": 11.093965530395508, | |
| "learning_rate": 9.999490215047167e-05, | |
| "loss": 2.0965, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.11304347826086956, | |
| "grad_norm": 13.068178176879883, | |
| "learning_rate": 9.997960964140947e-05, | |
| "loss": 2.4444, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.12173913043478261, | |
| "grad_norm": 12.251531600952148, | |
| "learning_rate": 9.995412559116979e-05, | |
| "loss": 2.6647, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.13043478260869565, | |
| "grad_norm": 15.61241340637207, | |
| "learning_rate": 9.991845519630678e-05, | |
| "loss": 2.9974, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.1391304347826087, | |
| "grad_norm": 11.974143981933594, | |
| "learning_rate": 9.987260573051269e-05, | |
| "loss": 1.9998, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.14782608695652175, | |
| "grad_norm": 11.015225410461426, | |
| "learning_rate": 9.981658654313457e-05, | |
| "loss": 2.1787, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.1565217391304348, | |
| "grad_norm": 10.231270790100098, | |
| "learning_rate": 9.975040905726798e-05, | |
| "loss": 1.3827, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.16521739130434782, | |
| "grad_norm": 10.304975509643555, | |
| "learning_rate": 9.967408676742751e-05, | |
| "loss": 1.8596, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.17391304347826086, | |
| "grad_norm": 11.838497161865234, | |
| "learning_rate": 9.958763523679514e-05, | |
| "loss": 1.8492, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.1826086956521739, | |
| "grad_norm": 14.471789360046387, | |
| "learning_rate": 9.949107209404665e-05, | |
| "loss": 1.4539, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.19130434782608696, | |
| "grad_norm": 13.361359596252441, | |
| "learning_rate": 9.938441702975689e-05, | |
| "loss": 2.3912, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 9.12480640411377, | |
| "learning_rate": 9.926769179238466e-05, | |
| "loss": 1.188, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.20869565217391303, | |
| "grad_norm": 11.180279731750488, | |
| "learning_rate": 9.914092018383778e-05, | |
| "loss": 1.6116, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.21739130434782608, | |
| "grad_norm": 11.030242919921875, | |
| "learning_rate": 9.900412805461967e-05, | |
| "loss": 1.9637, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.22608695652173913, | |
| "grad_norm": 9.22864818572998, | |
| "learning_rate": 9.885734329855798e-05, | |
| "loss": 1.8468, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.23478260869565218, | |
| "grad_norm": 14.804508209228516, | |
| "learning_rate": 9.870059584711668e-05, | |
| "loss": 0.9744, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.24347826086956523, | |
| "grad_norm": 11.706965446472168, | |
| "learning_rate": 9.853391766329263e-05, | |
| "loss": 1.9317, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.25217391304347825, | |
| "grad_norm": 11.159095764160156, | |
| "learning_rate": 9.835734273509786e-05, | |
| "loss": 1.7246, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.2608695652173913, | |
| "grad_norm": 8.815181732177734, | |
| "learning_rate": 9.817090706862895e-05, | |
| "loss": 1.2355, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.26956521739130435, | |
| "grad_norm": 11.841676712036133, | |
| "learning_rate": 9.797464868072488e-05, | |
| "loss": 3.3564, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.2782608695652174, | |
| "grad_norm": 15.580975532531738, | |
| "learning_rate": 9.776860759121484e-05, | |
| "loss": 2.0827, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.28695652173913044, | |
| "grad_norm": 8.459304809570312, | |
| "learning_rate": 9.755282581475769e-05, | |
| "loss": 1.2298, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.2956521739130435, | |
| "grad_norm": 9.495912551879883, | |
| "learning_rate": 9.73273473522745e-05, | |
| "loss": 1.3418, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.30434782608695654, | |
| "grad_norm": 13.812179565429688, | |
| "learning_rate": 9.709221818197624e-05, | |
| "loss": 1.6258, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.3130434782608696, | |
| "grad_norm": 9.413185119628906, | |
| "learning_rate": 9.68474862499881e-05, | |
| "loss": 1.168, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.3217391304347826, | |
| "grad_norm": 13.937978744506836, | |
| "learning_rate": 9.659320146057262e-05, | |
| "loss": 1.4273, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.33043478260869563, | |
| "grad_norm": 9.947927474975586, | |
| "learning_rate": 9.632941566595357e-05, | |
| "loss": 1.3857, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.3391304347826087, | |
| "grad_norm": 10.392467498779297, | |
| "learning_rate": 9.60561826557425e-05, | |
| "loss": 0.7599, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.34782608695652173, | |
| "grad_norm": 12.23806381225586, | |
| "learning_rate": 9.577355814597031e-05, | |
| "loss": 2.0868, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.3565217391304348, | |
| "grad_norm": 6.938766002655029, | |
| "learning_rate": 9.548159976772592e-05, | |
| "loss": 0.4847, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.3652173913043478, | |
| "grad_norm": 9.958048820495605, | |
| "learning_rate": 9.518036705540458e-05, | |
| "loss": 1.609, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.3739130434782609, | |
| "grad_norm": 11.910273551940918, | |
| "learning_rate": 9.486992143456792e-05, | |
| "loss": 0.7905, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.3826086956521739, | |
| "grad_norm": 8.610260963439941, | |
| "learning_rate": 9.45503262094184e-05, | |
| "loss": 1.4258, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.391304347826087, | |
| "grad_norm": 7.331972122192383, | |
| "learning_rate": 9.422164654989072e-05, | |
| "loss": 0.9974, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 8.997504234313965, | |
| "learning_rate": 9.388394947836279e-05, | |
| "loss": 1.1141, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.40869565217391307, | |
| "grad_norm": 8.395123481750488, | |
| "learning_rate": 9.353730385598887e-05, | |
| "loss": 0.9686, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.41739130434782606, | |
| "grad_norm": 8.04129695892334, | |
| "learning_rate": 9.318178036865785e-05, | |
| "loss": 0.3619, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.4260869565217391, | |
| "grad_norm": 5.985868453979492, | |
| "learning_rate": 9.281745151257946e-05, | |
| "loss": 0.6158, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.43478260869565216, | |
| "grad_norm": 12.913836479187012, | |
| "learning_rate": 9.244439157950114e-05, | |
| "loss": 1.6131, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.4434782608695652, | |
| "grad_norm": 7.429251194000244, | |
| "learning_rate": 9.206267664155907e-05, | |
| "loss": 0.8433, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.45217391304347826, | |
| "grad_norm": 8.468223571777344, | |
| "learning_rate": 9.167238453576589e-05, | |
| "loss": 0.8761, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.4608695652173913, | |
| "grad_norm": 10.999699592590332, | |
| "learning_rate": 9.12735948481387e-05, | |
| "loss": 1.7577, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.46956521739130436, | |
| "grad_norm": 8.069478988647461, | |
| "learning_rate": 9.086638889747035e-05, | |
| "loss": 1.1008, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.4782608695652174, | |
| "grad_norm": 9.439266204833984, | |
| "learning_rate": 9.045084971874738e-05, | |
| "loss": 0.7977, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.48695652173913045, | |
| "grad_norm": 8.834954261779785, | |
| "learning_rate": 9.002706204621803e-05, | |
| "loss": 1.2913, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.4956521739130435, | |
| "grad_norm": 14.728219032287598, | |
| "learning_rate": 8.959511229611376e-05, | |
| "loss": 1.566, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.5043478260869565, | |
| "grad_norm": 14.64725112915039, | |
| "learning_rate": 8.915508854902778e-05, | |
| "loss": 0.7095, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.5130434782608696, | |
| "grad_norm": 17.302316665649414, | |
| "learning_rate": 8.870708053195413e-05, | |
| "loss": 1.1349, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.5217391304347826, | |
| "grad_norm": 13.19262981414795, | |
| "learning_rate": 8.825117959999116e-05, | |
| "loss": 1.0779, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.5304347826086957, | |
| "grad_norm": 10.02542495727539, | |
| "learning_rate": 8.778747871771292e-05, | |
| "loss": 0.7204, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.5391304347826087, | |
| "grad_norm": 2.038048505783081, | |
| "learning_rate": 8.731607244021236e-05, | |
| "loss": 0.0617, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.5478260869565217, | |
| "grad_norm": 8.415672302246094, | |
| "learning_rate": 8.683705689382024e-05, | |
| "loss": 1.199, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.5565217391304348, | |
| "grad_norm": 10.012858390808105, | |
| "learning_rate": 8.635052975650369e-05, | |
| "loss": 1.442, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.5652173913043478, | |
| "grad_norm": 10.651287078857422, | |
| "learning_rate": 8.585659023794818e-05, | |
| "loss": 1.6201, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.5739130434782609, | |
| "grad_norm": 8.589141845703125, | |
| "learning_rate": 8.535533905932738e-05, | |
| "loss": 1.1879, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.5826086956521739, | |
| "grad_norm": 9.711124420166016, | |
| "learning_rate": 8.484687843276469e-05, | |
| "loss": 1.2133, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.591304347826087, | |
| "grad_norm": 9.756671905517578, | |
| "learning_rate": 8.433131204049067e-05, | |
| "loss": 1.1111, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 10.514204025268555, | |
| "learning_rate": 8.380874501370097e-05, | |
| "loss": 0.4235, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.6086956521739131, | |
| "grad_norm": 6.404147148132324, | |
| "learning_rate": 8.327928391111841e-05, | |
| "loss": 0.1837, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.6173913043478261, | |
| "grad_norm": 12.888923645019531, | |
| "learning_rate": 8.274303669726426e-05, | |
| "loss": 1.2996, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.6260869565217392, | |
| "grad_norm": 12.083588600158691, | |
| "learning_rate": 8.220011272044277e-05, | |
| "loss": 0.3065, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.6347826086956522, | |
| "grad_norm": 7.7776665687561035, | |
| "learning_rate": 8.165062269044353e-05, | |
| "loss": 0.7543, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.6434782608695652, | |
| "grad_norm": 8.809155464172363, | |
| "learning_rate": 8.109467865596612e-05, | |
| "loss": 1.0967, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.6521739130434783, | |
| "grad_norm": 18.32766342163086, | |
| "learning_rate": 8.053239398177191e-05, | |
| "loss": 2.4623, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.6608695652173913, | |
| "grad_norm": 12.604337692260742, | |
| "learning_rate": 7.996388332556735e-05, | |
| "loss": 1.7208, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.6695652173913044, | |
| "grad_norm": 12.69120979309082, | |
| "learning_rate": 7.938926261462366e-05, | |
| "loss": 0.883, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.6782608695652174, | |
| "grad_norm": 12.506765365600586, | |
| "learning_rate": 7.880864902213765e-05, | |
| "loss": 1.83, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.6869565217391305, | |
| "grad_norm": 8.688498497009277, | |
| "learning_rate": 7.822216094333847e-05, | |
| "loss": 0.9735, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.6956521739130435, | |
| "grad_norm": 14.368943214416504, | |
| "learning_rate": 7.762991797134514e-05, | |
| "loss": 2.193, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.7043478260869566, | |
| "grad_norm": 6.735222339630127, | |
| "learning_rate": 7.703204087277988e-05, | |
| "loss": 0.6682, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.7130434782608696, | |
| "grad_norm": 15.296740531921387, | |
| "learning_rate": 7.64286515631421e-05, | |
| "loss": 0.9763, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.7217391304347827, | |
| "grad_norm": 16.399547576904297, | |
| "learning_rate": 7.58198730819481e-05, | |
| "loss": 2.4849, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.7304347826086957, | |
| "grad_norm": 11.530505180358887, | |
| "learning_rate": 7.52058295676416e-05, | |
| "loss": 1.8405, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.7391304347826086, | |
| "grad_norm": 7.331994533538818, | |
| "learning_rate": 7.45866462322802e-05, | |
| "loss": 0.9336, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.7478260869565218, | |
| "grad_norm": 20.28060531616211, | |
| "learning_rate": 7.396244933600285e-05, | |
| "loss": 1.2999, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.7565217391304347, | |
| "grad_norm": 8.961277961730957, | |
| "learning_rate": 7.333336616128369e-05, | |
| "loss": 1.0094, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.7652173913043478, | |
| "grad_norm": 7.146012306213379, | |
| "learning_rate": 7.269952498697734e-05, | |
| "loss": 0.8734, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.7739130434782608, | |
| "grad_norm": 12.372821807861328, | |
| "learning_rate": 7.206105506216106e-05, | |
| "loss": 1.5859, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.782608695652174, | |
| "grad_norm": 8.59324836730957, | |
| "learning_rate": 7.141808657977907e-05, | |
| "loss": 1.0735, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.7913043478260869, | |
| "grad_norm": 6.564212799072266, | |
| "learning_rate": 7.077075065009433e-05, | |
| "loss": 0.8069, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 10.416309356689453, | |
| "learning_rate": 7.01191792739534e-05, | |
| "loss": 1.0411, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.808695652173913, | |
| "grad_norm": 11.143993377685547, | |
| "learning_rate": 6.946350531586959e-05, | |
| "loss": 0.8595, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.8173913043478261, | |
| "grad_norm": 9.861275672912598, | |
| "learning_rate": 6.880386247692999e-05, | |
| "loss": 0.9368, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.8260869565217391, | |
| "grad_norm": 10.142037391662598, | |
| "learning_rate": 6.814038526753205e-05, | |
| "loss": 1.5361, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.8347826086956521, | |
| "grad_norm": 6.418692588806152, | |
| "learning_rate": 6.747320897995493e-05, | |
| "loss": 0.7167, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.8434782608695652, | |
| "grad_norm": 4.113799571990967, | |
| "learning_rate": 6.680246966077151e-05, | |
| "loss": 0.3922, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.8521739130434782, | |
| "grad_norm": 10.264501571655273, | |
| "learning_rate": 6.61283040831067e-05, | |
| "loss": 2.2257, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.8608695652173913, | |
| "grad_norm": 15.571903228759766, | |
| "learning_rate": 6.545084971874738e-05, | |
| "loss": 2.4521, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.8695652173913043, | |
| "grad_norm": 7.295588970184326, | |
| "learning_rate": 6.477024471011001e-05, | |
| "loss": 0.7105, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.8782608695652174, | |
| "grad_norm": 11.271722793579102, | |
| "learning_rate": 6.408662784207149e-05, | |
| "loss": 2.0426, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.8869565217391304, | |
| "grad_norm": 8.137569427490234, | |
| "learning_rate": 6.340013851366896e-05, | |
| "loss": 1.3701, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.8956521739130435, | |
| "grad_norm": 10.493950843811035, | |
| "learning_rate": 6.271091670967436e-05, | |
| "loss": 1.1864, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.9043478260869565, | |
| "grad_norm": 9.262134552001953, | |
| "learning_rate": 6.201910297204962e-05, | |
| "loss": 1.0413, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.9130434782608695, | |
| "grad_norm": 10.564716339111328, | |
| "learning_rate": 6.132483837128823e-05, | |
| "loss": 2.303, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.9217391304347826, | |
| "grad_norm": 4.90865421295166, | |
| "learning_rate": 6.062826447764883e-05, | |
| "loss": 0.3802, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.9304347826086956, | |
| "grad_norm": 9.297486305236816, | |
| "learning_rate": 5.992952333228728e-05, | |
| "loss": 1.2512, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.9391304347826087, | |
| "grad_norm": 8.179728507995605, | |
| "learning_rate": 5.9228757418292266e-05, | |
| "loss": 0.4771, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.9478260869565217, | |
| "grad_norm": 18.19074249267578, | |
| "learning_rate": 5.85261096316312e-05, | |
| "loss": 2.0378, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.9565217391304348, | |
| "grad_norm": 4.808218955993652, | |
| "learning_rate": 5.782172325201155e-05, | |
| "loss": 0.14, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.9652173913043478, | |
| "grad_norm": 15.154193878173828, | |
| "learning_rate": 5.7115741913664264e-05, | |
| "loss": 1.3785, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.9739130434782609, | |
| "grad_norm": 7.520651340484619, | |
| "learning_rate": 5.640830957605465e-05, | |
| "loss": 0.7942, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.9826086956521739, | |
| "grad_norm": 11.108746528625488, | |
| "learning_rate": 5.569957049452703e-05, | |
| "loss": 2.0485, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.991304347826087, | |
| "grad_norm": 8.641176223754883, | |
| "learning_rate": 5.4989669190889136e-05, | |
| "loss": 1.1212, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 6.526795387268066, | |
| "learning_rate": 5.427875042394199e-05, | |
| "loss": 0.7003, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.008695652173913, | |
| "grad_norm": 7.168546676635742, | |
| "learning_rate": 5.3566959159961615e-05, | |
| "loss": 1.0922, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.017391304347826, | |
| "grad_norm": 8.669252395629883, | |
| "learning_rate": 5.2854440543138406e-05, | |
| "loss": 0.8577, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.0260869565217392, | |
| "grad_norm": 7.540567398071289, | |
| "learning_rate": 5.2141339865980134e-05, | |
| "loss": 0.7922, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.0347826086956522, | |
| "grad_norm": 7.116650581359863, | |
| "learning_rate": 5.142780253968481e-05, | |
| "loss": 0.7596, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.0434782608695652, | |
| "grad_norm": 8.778299331665039, | |
| "learning_rate": 5.0713974064489367e-05, | |
| "loss": 2.5862, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.0521739130434782, | |
| "grad_norm": 8.101160049438477, | |
| "learning_rate": 5e-05, | |
| "loss": 0.6208, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.0608695652173914, | |
| "grad_norm": 5.799407958984375, | |
| "learning_rate": 4.928602593551065e-05, | |
| "loss": 0.5619, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.0695652173913044, | |
| "grad_norm": 5.000042915344238, | |
| "learning_rate": 4.85721974603152e-05, | |
| "loss": 0.2951, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.0782608695652174, | |
| "grad_norm": 8.474133491516113, | |
| "learning_rate": 4.7858660134019884e-05, | |
| "loss": 1.2103, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.0869565217391304, | |
| "grad_norm": 10.887007713317871, | |
| "learning_rate": 4.71455594568616e-05, | |
| "loss": 1.7797, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.0956521739130434, | |
| "grad_norm": 7.625034809112549, | |
| "learning_rate": 4.643304084003839e-05, | |
| "loss": 0.7967, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.1043478260869566, | |
| "grad_norm": 8.578575134277344, | |
| "learning_rate": 4.5721249576058027e-05, | |
| "loss": 0.6249, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.1130434782608696, | |
| "grad_norm": 6.662336349487305, | |
| "learning_rate": 4.501033080911086e-05, | |
| "loss": 0.637, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.1217391304347826, | |
| "grad_norm": 8.214466094970703, | |
| "learning_rate": 4.4300429505472976e-05, | |
| "loss": 1.2037, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.1304347826086956, | |
| "grad_norm": 9.812641143798828, | |
| "learning_rate": 4.359169042394536e-05, | |
| "loss": 1.0598, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.1391304347826088, | |
| "grad_norm": 5.729506969451904, | |
| "learning_rate": 4.288425808633575e-05, | |
| "loss": 0.4854, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.1478260869565218, | |
| "grad_norm": 7.675601959228516, | |
| "learning_rate": 4.2178276747988446e-05, | |
| "loss": 1.6833, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.1565217391304348, | |
| "grad_norm": 5.756649494171143, | |
| "learning_rate": 4.147389036836881e-05, | |
| "loss": 0.586, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.1652173913043478, | |
| "grad_norm": 4.710939407348633, | |
| "learning_rate": 4.077124258170774e-05, | |
| "loss": 0.1098, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.1739130434782608, | |
| "grad_norm": 8.076876640319824, | |
| "learning_rate": 4.007047666771274e-05, | |
| "loss": 0.8247, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.182608695652174, | |
| "grad_norm": 6.092112064361572, | |
| "learning_rate": 3.937173552235117e-05, | |
| "loss": 0.4976, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.191304347826087, | |
| "grad_norm": 12.222753524780273, | |
| "learning_rate": 3.8675161628711776e-05, | |
| "loss": 0.7614, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 9.492755889892578, | |
| "learning_rate": 3.798089702795038e-05, | |
| "loss": 0.7278, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.208695652173913, | |
| "grad_norm": 8.09307861328125, | |
| "learning_rate": 3.728908329032567e-05, | |
| "loss": 1.0424, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.2173913043478262, | |
| "grad_norm": 13.750467300415039, | |
| "learning_rate": 3.659986148633107e-05, | |
| "loss": 1.3154, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.2260869565217392, | |
| "grad_norm": 5.85883903503418, | |
| "learning_rate": 3.591337215792852e-05, | |
| "loss": 0.151, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.2347826086956522, | |
| "grad_norm": 7.29556131362915, | |
| "learning_rate": 3.522975528989e-05, | |
| "loss": 0.4567, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.2434782608695651, | |
| "grad_norm": 10.308717727661133, | |
| "learning_rate": 3.4549150281252636e-05, | |
| "loss": 1.3977, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.2521739130434781, | |
| "grad_norm": 9.417724609375, | |
| "learning_rate": 3.3871695916893314e-05, | |
| "loss": 0.948, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.2608695652173914, | |
| "grad_norm": 8.931994438171387, | |
| "learning_rate": 3.3197530339228487e-05, | |
| "loss": 0.9489, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.2695652173913043, | |
| "grad_norm": 14.605033874511719, | |
| "learning_rate": 3.2526791020045086e-05, | |
| "loss": 1.851, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.2782608695652173, | |
| "grad_norm": 7.451152324676514, | |
| "learning_rate": 3.1859614732467954e-05, | |
| "loss": 0.4653, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.2869565217391306, | |
| "grad_norm": 7.847959041595459, | |
| "learning_rate": 3.119613752307002e-05, | |
| "loss": 0.8261, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.2956521739130435, | |
| "grad_norm": 6.851174831390381, | |
| "learning_rate": 3.053649468413043e-05, | |
| "loss": 0.5726, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.3043478260869565, | |
| "grad_norm": 7.77811336517334, | |
| "learning_rate": 2.988082072604661e-05, | |
| "loss": 0.8105, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.3130434782608695, | |
| "grad_norm": 9.027288436889648, | |
| "learning_rate": 2.9229249349905684e-05, | |
| "loss": 1.3896, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.3217391304347825, | |
| "grad_norm": 6.307928085327148, | |
| "learning_rate": 2.858191342022095e-05, | |
| "loss": 0.4174, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.3304347826086955, | |
| "grad_norm": 6.78994083404541, | |
| "learning_rate": 2.7938944937838923e-05, | |
| "loss": 0.5739, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.3391304347826087, | |
| "grad_norm": 7.77897834777832, | |
| "learning_rate": 2.7300475013022663e-05, | |
| "loss": 0.6822, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.3478260869565217, | |
| "grad_norm": 10.742020606994629, | |
| "learning_rate": 2.6666633838716314e-05, | |
| "loss": 1.1842, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.3565217391304347, | |
| "grad_norm": 6.034131050109863, | |
| "learning_rate": 2.603755066399718e-05, | |
| "loss": 0.1571, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.365217391304348, | |
| "grad_norm": 8.707799911499023, | |
| "learning_rate": 2.5413353767719805e-05, | |
| "loss": 0.9573, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.373913043478261, | |
| "grad_norm": 5.427333354949951, | |
| "learning_rate": 2.4794170432358415e-05, | |
| "loss": 0.4321, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.382608695652174, | |
| "grad_norm": 18.620357513427734, | |
| "learning_rate": 2.418012691805191e-05, | |
| "loss": 2.3272, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.391304347826087, | |
| "grad_norm": 10.270234107971191, | |
| "learning_rate": 2.3571348436857904e-05, | |
| "loss": 0.7804, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 4.281362056732178, | |
| "learning_rate": 2.296795912722014e-05, | |
| "loss": 0.1796, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.4086956521739131, | |
| "grad_norm": 6.379110336303711, | |
| "learning_rate": 2.2370082028654866e-05, | |
| "loss": 0.5788, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.4173913043478261, | |
| "grad_norm": 1.9616364240646362, | |
| "learning_rate": 2.1777839056661554e-05, | |
| "loss": 0.0406, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.4260869565217391, | |
| "grad_norm": 8.34288215637207, | |
| "learning_rate": 2.119135097786236e-05, | |
| "loss": 0.914, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.434782608695652, | |
| "grad_norm": 2.7565298080444336, | |
| "learning_rate": 2.061073738537635e-05, | |
| "loss": 0.0697, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.4434782608695653, | |
| "grad_norm": 12.554067611694336, | |
| "learning_rate": 2.0036116674432654e-05, | |
| "loss": 1.2686, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.4521739130434783, | |
| "grad_norm": 7.41255521774292, | |
| "learning_rate": 1.946760601822809e-05, | |
| "loss": 0.6045, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.4608695652173913, | |
| "grad_norm": 8.661652565002441, | |
| "learning_rate": 1.8905321344033898e-05, | |
| "loss": 0.2404, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.4695652173913043, | |
| "grad_norm": 8.020094871520996, | |
| "learning_rate": 1.8349377309556486e-05, | |
| "loss": 0.627, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.4782608695652173, | |
| "grad_norm": 6.1133222579956055, | |
| "learning_rate": 1.7799887279557237e-05, | |
| "loss": 0.6232, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.4869565217391305, | |
| "grad_norm": 11.473217010498047, | |
| "learning_rate": 1.725696330273575e-05, | |
| "loss": 0.8715, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.4956521739130435, | |
| "grad_norm": 9.100458145141602, | |
| "learning_rate": 1.6720716088881594e-05, | |
| "loss": 1.0355, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.5043478260869565, | |
| "grad_norm": 10.638351440429688, | |
| "learning_rate": 1.619125498629904e-05, | |
| "loss": 1.3327, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.5130434782608697, | |
| "grad_norm": 9.494583129882812, | |
| "learning_rate": 1.566868795950932e-05, | |
| "loss": 0.3127, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.5217391304347827, | |
| "grad_norm": 9.716391563415527, | |
| "learning_rate": 1.5153121567235335e-05, | |
| "loss": 0.7739, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.5304347826086957, | |
| "grad_norm": 8.240947723388672, | |
| "learning_rate": 1.4644660940672627e-05, | |
| "loss": 0.7155, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.5391304347826087, | |
| "grad_norm": 8.80859661102295, | |
| "learning_rate": 1.414340976205183e-05, | |
| "loss": 0.8378, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.5478260869565217, | |
| "grad_norm": 7.551889896392822, | |
| "learning_rate": 1.3649470243496326e-05, | |
| "loss": 0.5837, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.5565217391304347, | |
| "grad_norm": 7.766860008239746, | |
| "learning_rate": 1.3162943106179749e-05, | |
| "loss": 0.6585, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.5652173913043477, | |
| "grad_norm": 7.3335347175598145, | |
| "learning_rate": 1.2683927559787655e-05, | |
| "loss": 0.5556, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.5739130434782609, | |
| "grad_norm": 3.552537441253662, | |
| "learning_rate": 1.2212521282287092e-05, | |
| "loss": 0.0628, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.5826086956521739, | |
| "grad_norm": 7.300377368927002, | |
| "learning_rate": 1.1748820400008843e-05, | |
| "loss": 0.6523, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.591304347826087, | |
| "grad_norm": 10.045459747314453, | |
| "learning_rate": 1.1292919468045877e-05, | |
| "loss": 0.7011, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 10.843255043029785, | |
| "learning_rate": 1.0844911450972229e-05, | |
| "loss": 0.3824, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.608695652173913, | |
| "grad_norm": 7.702304840087891, | |
| "learning_rate": 1.0404887703886251e-05, | |
| "loss": 0.782, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.617391304347826, | |
| "grad_norm": 6.250712871551514, | |
| "learning_rate": 9.972937953781986e-06, | |
| "loss": 0.3592, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.626086956521739, | |
| "grad_norm": 8.084029197692871, | |
| "learning_rate": 9.549150281252633e-06, | |
| "loss": 0.5398, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.634782608695652, | |
| "grad_norm": 3.528081178665161, | |
| "learning_rate": 9.133611102529654e-06, | |
| "loss": 0.0699, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.643478260869565, | |
| "grad_norm": 3.4632437229156494, | |
| "learning_rate": 8.7264051518613e-06, | |
| "loss": 0.0686, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.6521739130434783, | |
| "grad_norm": 7.657890319824219, | |
| "learning_rate": 8.327615464234129e-06, | |
| "loss": 0.6129, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.6608695652173913, | |
| "grad_norm": 9.106538772583008, | |
| "learning_rate": 7.937323358440935e-06, | |
| "loss": 0.8445, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.6695652173913045, | |
| "grad_norm": 6.055113315582275, | |
| "learning_rate": 7.555608420498872e-06, | |
| "loss": 0.6026, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.6782608695652175, | |
| "grad_norm": 17.939279556274414, | |
| "learning_rate": 7.182548487420554e-06, | |
| "loss": 1.5311, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.6869565217391305, | |
| "grad_norm": 5.84937858581543, | |
| "learning_rate": 6.818219631342149e-06, | |
| "loss": 0.1457, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.6956521739130435, | |
| "grad_norm": 10.647073745727539, | |
| "learning_rate": 6.462696144011149e-06, | |
| "loss": 1.0994, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.7043478260869565, | |
| "grad_norm": 6.252867221832275, | |
| "learning_rate": 6.116050521637218e-06, | |
| "loss": 0.3648, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.7130434782608694, | |
| "grad_norm": 6.860467910766602, | |
| "learning_rate": 5.778353450109286e-06, | |
| "loss": 0.6171, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.7217391304347827, | |
| "grad_norm": 6.052587032318115, | |
| "learning_rate": 5.449673790581611e-06, | |
| "loss": 0.3541, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.7304347826086957, | |
| "grad_norm": 4.472194194793701, | |
| "learning_rate": 5.13007856543209e-06, | |
| "loss": 0.0866, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.7391304347826086, | |
| "grad_norm": 9.050908088684082, | |
| "learning_rate": 4.819632944595415e-06, | |
| "loss": 0.7291, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.7478260869565219, | |
| "grad_norm": 15.068604469299316, | |
| "learning_rate": 4.5184002322740785e-06, | |
| "loss": 0.7179, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.7565217391304349, | |
| "grad_norm": 9.076845169067383, | |
| "learning_rate": 4.2264418540297e-06, | |
| "loss": 1.4375, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.7652173913043478, | |
| "grad_norm": 7.426385879516602, | |
| "learning_rate": 3.9438173442575e-06, | |
| "loss": 0.7565, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.7739130434782608, | |
| "grad_norm": 7.295407772064209, | |
| "learning_rate": 3.6705843340464286e-06, | |
| "loss": 0.4621, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.7826086956521738, | |
| "grad_norm": 5.500656604766846, | |
| "learning_rate": 3.406798539427386e-06, | |
| "loss": 0.0947, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.7913043478260868, | |
| "grad_norm": 7.827192783355713, | |
| "learning_rate": 3.1525137500119207e-06, | |
| "loss": 0.5042, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 10.173295021057129, | |
| "learning_rate": 2.9077818180237693e-06, | |
| "loss": 0.7344, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.808695652173913, | |
| "grad_norm": 8.067317962646484, | |
| "learning_rate": 2.6726526477254987e-06, | |
| "loss": 0.8057, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.8173913043478263, | |
| "grad_norm": 6.457295894622803, | |
| "learning_rate": 2.4471741852423237e-06, | |
| "loss": 0.5597, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.8260869565217392, | |
| "grad_norm": 6.850070953369141, | |
| "learning_rate": 2.2313924087851656e-06, | |
| "loss": 0.556, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.8347826086956522, | |
| "grad_norm": 6.4678144454956055, | |
| "learning_rate": 2.0253513192751373e-06, | |
| "loss": 0.351, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.8434782608695652, | |
| "grad_norm": 6.126864433288574, | |
| "learning_rate": 1.8290929313710513e-06, | |
| "loss": 0.3537, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.8521739130434782, | |
| "grad_norm": 6.90460729598999, | |
| "learning_rate": 1.6426572649021476e-06, | |
| "loss": 0.1795, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.8608695652173912, | |
| "grad_norm": 8.008868217468262, | |
| "learning_rate": 1.4660823367073751e-06, | |
| "loss": 0.5865, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.8695652173913042, | |
| "grad_norm": 9.392267227172852, | |
| "learning_rate": 1.2994041528833266e-06, | |
| "loss": 0.8658, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.8782608695652174, | |
| "grad_norm": 8.422052383422852, | |
| "learning_rate": 1.1426567014420297e-06, | |
| "loss": 0.636, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.8869565217391304, | |
| "grad_norm": 12.24099063873291, | |
| "learning_rate": 9.958719453803278e-07, | |
| "loss": 1.9606, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.8956521739130436, | |
| "grad_norm": 6.257460117340088, | |
| "learning_rate": 8.590798161622227e-07, | |
| "loss": 0.4456, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.9043478260869566, | |
| "grad_norm": 12.042879104614258, | |
| "learning_rate": 7.323082076153509e-07, | |
| "loss": 1.299, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.9130434782608696, | |
| "grad_norm": 16.869014739990234, | |
| "learning_rate": 6.15582970243117e-07, | |
| "loss": 1.6858, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.9217391304347826, | |
| "grad_norm": 10.271821975708008, | |
| "learning_rate": 5.089279059533658e-07, | |
| "loss": 0.8549, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.9304347826086956, | |
| "grad_norm": 6.406334400177002, | |
| "learning_rate": 4.123647632048644e-07, | |
| "loss": 0.5526, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.9391304347826086, | |
| "grad_norm": 7.156313896179199, | |
| "learning_rate": 3.2591323257248893e-07, | |
| "loss": 0.2008, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.9478260869565216, | |
| "grad_norm": 8.381826400756836, | |
| "learning_rate": 2.4959094273201977e-07, | |
| "loss": 0.4191, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.9565217391304348, | |
| "grad_norm": 7.376916408538818, | |
| "learning_rate": 1.8341345686543332e-07, | |
| "loss": 0.4707, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.9652173913043478, | |
| "grad_norm": 9.728565216064453, | |
| "learning_rate": 1.2739426948732424e-07, | |
| "loss": 1.1065, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.973913043478261, | |
| "grad_norm": 7.97186279296875, | |
| "learning_rate": 8.15448036932176e-08, | |
| "loss": 0.879, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.982608695652174, | |
| "grad_norm": 4.87440824508667, | |
| "learning_rate": 4.5874408830215434e-08, | |
| "loss": 0.1072, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.991304347826087, | |
| "grad_norm": 9.028919219970703, | |
| "learning_rate": 2.0390358590538504e-08, | |
| "loss": 0.6967, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 7.545851230621338, | |
| "learning_rate": 5.097849528334919e-09, | |
| "loss": 0.6529, | |
| "step": 230 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 230, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 33878645932032.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |