| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 100, | |
| "global_step": 1231, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.008123476848090982, | |
| "grad_norm": 3.203125, | |
| "learning_rate": 1.6129032258064516e-06, | |
| "loss": 1.5961, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.016246953696181964, | |
| "grad_norm": 3.453125, | |
| "learning_rate": 3.225806451612903e-06, | |
| "loss": 1.5961, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.024370430544272948, | |
| "grad_norm": 2.796875, | |
| "learning_rate": 4.838709677419355e-06, | |
| "loss": 1.5969, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03249390739236393, | |
| "grad_norm": 2.609375, | |
| "learning_rate": 6.451612903225806e-06, | |
| "loss": 1.5125, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04061738424045491, | |
| "grad_norm": 2.015625, | |
| "learning_rate": 8.064516129032258e-06, | |
| "loss": 1.4758, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.048740861088545896, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 9.67741935483871e-06, | |
| "loss": 1.3445, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.05686433793663688, | |
| "grad_norm": 2.21875, | |
| "learning_rate": 1.1290322580645164e-05, | |
| "loss": 1.4297, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.06498781478472786, | |
| "grad_norm": 1.84375, | |
| "learning_rate": 1.2903225806451613e-05, | |
| "loss": 1.3977, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.07311129163281885, | |
| "grad_norm": 1.9765625, | |
| "learning_rate": 1.4516129032258066e-05, | |
| "loss": 1.3516, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.08123476848090982, | |
| "grad_norm": 1.8828125, | |
| "learning_rate": 1.6129032258064517e-05, | |
| "loss": 1.3461, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.08935824532900082, | |
| "grad_norm": 2.015625, | |
| "learning_rate": 1.774193548387097e-05, | |
| "loss": 1.3195, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.09748172217709179, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 1.935483870967742e-05, | |
| "loss": 1.3828, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.10560519902518278, | |
| "grad_norm": 1.84375, | |
| "learning_rate": 1.989159891598916e-05, | |
| "loss": 1.3586, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.11372867587327376, | |
| "grad_norm": 2.0625, | |
| "learning_rate": 1.971093044263776e-05, | |
| "loss": 1.3945, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.12185215272136475, | |
| "grad_norm": 1.984375, | |
| "learning_rate": 1.9530261969286363e-05, | |
| "loss": 1.343, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.12997562956945571, | |
| "grad_norm": 2.0625, | |
| "learning_rate": 1.934959349593496e-05, | |
| "loss": 1.3172, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.13809910641754672, | |
| "grad_norm": 1.8671875, | |
| "learning_rate": 1.916892502258356e-05, | |
| "loss": 1.3141, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.1462225832656377, | |
| "grad_norm": 2.171875, | |
| "learning_rate": 1.898825654923216e-05, | |
| "loss": 1.3242, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.15434606011372867, | |
| "grad_norm": 1.921875, | |
| "learning_rate": 1.8807588075880762e-05, | |
| "loss": 1.3633, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.16246953696181965, | |
| "grad_norm": 1.890625, | |
| "learning_rate": 1.862691960252936e-05, | |
| "loss": 1.3359, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.17059301380991065, | |
| "grad_norm": 1.7421875, | |
| "learning_rate": 1.8446251129177958e-05, | |
| "loss": 1.3266, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.17871649065800163, | |
| "grad_norm": 1.984375, | |
| "learning_rate": 1.826558265582656e-05, | |
| "loss": 1.3367, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.1868399675060926, | |
| "grad_norm": 2.09375, | |
| "learning_rate": 1.808491418247516e-05, | |
| "loss": 1.3023, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.19496344435418358, | |
| "grad_norm": 1.8515625, | |
| "learning_rate": 1.790424570912376e-05, | |
| "loss": 1.2961, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.20308692120227456, | |
| "grad_norm": 1.859375, | |
| "learning_rate": 1.772357723577236e-05, | |
| "loss": 1.3711, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.21121039805036557, | |
| "grad_norm": 1.9375, | |
| "learning_rate": 1.754290876242096e-05, | |
| "loss": 1.3062, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.21933387489845654, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 1.7362240289069557e-05, | |
| "loss": 1.3281, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.22745735174654752, | |
| "grad_norm": 1.7734375, | |
| "learning_rate": 1.718157181571816e-05, | |
| "loss": 1.2883, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.2355808285946385, | |
| "grad_norm": 1.9921875, | |
| "learning_rate": 1.700090334236676e-05, | |
| "loss": 1.2695, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2437043054427295, | |
| "grad_norm": 1.765625, | |
| "learning_rate": 1.6820234869015358e-05, | |
| "loss": 1.3047, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.2518277822908205, | |
| "grad_norm": 1.8828125, | |
| "learning_rate": 1.6639566395663956e-05, | |
| "loss": 1.2727, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.25995125913891143, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 1.6458897922312558e-05, | |
| "loss": 1.318, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.26807473598700243, | |
| "grad_norm": 1.953125, | |
| "learning_rate": 1.627822944896116e-05, | |
| "loss": 1.3477, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.27619821283509344, | |
| "grad_norm": 1.921875, | |
| "learning_rate": 1.6097560975609757e-05, | |
| "loss": 1.3273, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.2843216896831844, | |
| "grad_norm": 2.09375, | |
| "learning_rate": 1.5916892502258355e-05, | |
| "loss": 1.3086, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.2924451665312754, | |
| "grad_norm": 1.78125, | |
| "learning_rate": 1.5736224028906957e-05, | |
| "loss": 1.2727, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.3005686433793664, | |
| "grad_norm": 1.8359375, | |
| "learning_rate": 1.555555555555556e-05, | |
| "loss": 1.3195, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.30869212022745735, | |
| "grad_norm": 1.8203125, | |
| "learning_rate": 1.5374887082204156e-05, | |
| "loss": 1.2828, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.31681559707554835, | |
| "grad_norm": 1.7265625, | |
| "learning_rate": 1.5194218608852756e-05, | |
| "loss": 1.2719, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.3249390739236393, | |
| "grad_norm": 2.03125, | |
| "learning_rate": 1.5013550135501356e-05, | |
| "loss": 1.2914, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.3330625507717303, | |
| "grad_norm": 1.9140625, | |
| "learning_rate": 1.4832881662149956e-05, | |
| "loss": 1.25, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.3411860276198213, | |
| "grad_norm": 1.6328125, | |
| "learning_rate": 1.4652213188798556e-05, | |
| "loss": 1.282, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.34930950446791226, | |
| "grad_norm": 2.015625, | |
| "learning_rate": 1.4471544715447157e-05, | |
| "loss": 1.368, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.35743298131600326, | |
| "grad_norm": 1.953125, | |
| "learning_rate": 1.4290876242095755e-05, | |
| "loss": 1.3383, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.3655564581640942, | |
| "grad_norm": 1.8203125, | |
| "learning_rate": 1.4110207768744355e-05, | |
| "loss": 1.2992, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.3736799350121852, | |
| "grad_norm": 1.9375, | |
| "learning_rate": 1.3929539295392955e-05, | |
| "loss": 1.3234, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.3818034118602762, | |
| "grad_norm": 1.875, | |
| "learning_rate": 1.3748870822041556e-05, | |
| "loss": 1.2969, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.38992688870836717, | |
| "grad_norm": 1.9140625, | |
| "learning_rate": 1.3568202348690154e-05, | |
| "loss": 1.2648, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.3980503655564582, | |
| "grad_norm": 2.046875, | |
| "learning_rate": 1.3387533875338754e-05, | |
| "loss": 1.3109, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.4061738424045491, | |
| "grad_norm": 1.953125, | |
| "learning_rate": 1.3206865401987354e-05, | |
| "loss": 1.3086, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.41429731925264013, | |
| "grad_norm": 1.8046875, | |
| "learning_rate": 1.3026196928635954e-05, | |
| "loss": 1.2188, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.42242079610073113, | |
| "grad_norm": 1.8984375, | |
| "learning_rate": 1.2845528455284555e-05, | |
| "loss": 1.2383, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.4305442729488221, | |
| "grad_norm": 1.7421875, | |
| "learning_rate": 1.2664859981933153e-05, | |
| "loss": 1.3164, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.4386677497969131, | |
| "grad_norm": 2.0, | |
| "learning_rate": 1.2484191508581753e-05, | |
| "loss": 1.3047, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.44679122664500404, | |
| "grad_norm": 1.8359375, | |
| "learning_rate": 1.2303523035230353e-05, | |
| "loss": 1.3023, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.45491470349309504, | |
| "grad_norm": 1.734375, | |
| "learning_rate": 1.2122854561878954e-05, | |
| "loss": 1.3242, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.46303818034118605, | |
| "grad_norm": 2.171875, | |
| "learning_rate": 1.1942186088527553e-05, | |
| "loss": 1.2961, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.471161657189277, | |
| "grad_norm": 2.0625, | |
| "learning_rate": 1.1761517615176152e-05, | |
| "loss": 1.325, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.479285134037368, | |
| "grad_norm": 2.0625, | |
| "learning_rate": 1.1580849141824752e-05, | |
| "loss": 1.232, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.487408610885459, | |
| "grad_norm": 2.15625, | |
| "learning_rate": 1.1400180668473354e-05, | |
| "loss": 1.2805, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.49553208773354995, | |
| "grad_norm": 1.8046875, | |
| "learning_rate": 1.1219512195121953e-05, | |
| "loss": 1.2867, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.503655564581641, | |
| "grad_norm": 1.765625, | |
| "learning_rate": 1.1038843721770552e-05, | |
| "loss": 1.3094, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.511779041429732, | |
| "grad_norm": 1.8515625, | |
| "learning_rate": 1.0858175248419151e-05, | |
| "loss": 1.3133, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.5199025182778229, | |
| "grad_norm": 1.8203125, | |
| "learning_rate": 1.0677506775067751e-05, | |
| "loss": 1.3578, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.5280259951259139, | |
| "grad_norm": 1.8828125, | |
| "learning_rate": 1.0496838301716353e-05, | |
| "loss": 1.2969, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.5361494719740049, | |
| "grad_norm": 1.8359375, | |
| "learning_rate": 1.031616982836495e-05, | |
| "loss": 1.2867, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.5442729488220959, | |
| "grad_norm": 1.8984375, | |
| "learning_rate": 1.013550135501355e-05, | |
| "loss": 1.2977, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.5523964256701869, | |
| "grad_norm": 2.0625, | |
| "learning_rate": 9.95483288166215e-06, | |
| "loss": 1.2828, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.5605199025182778, | |
| "grad_norm": 1.796875, | |
| "learning_rate": 9.77416440831075e-06, | |
| "loss": 1.2984, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.5686433793663688, | |
| "grad_norm": 1.796875, | |
| "learning_rate": 9.59349593495935e-06, | |
| "loss": 1.2656, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.5767668562144598, | |
| "grad_norm": 1.703125, | |
| "learning_rate": 9.412827461607951e-06, | |
| "loss": 1.2453, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.5848903330625508, | |
| "grad_norm": 1.859375, | |
| "learning_rate": 9.23215898825655e-06, | |
| "loss": 1.3477, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.5930138099106418, | |
| "grad_norm": 2.171875, | |
| "learning_rate": 9.051490514905151e-06, | |
| "loss": 1.3141, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.6011372867587328, | |
| "grad_norm": 1.84375, | |
| "learning_rate": 8.870822041553749e-06, | |
| "loss": 1.2219, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.6092607636068237, | |
| "grad_norm": 1.796875, | |
| "learning_rate": 8.690153568202349e-06, | |
| "loss": 1.2766, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.6173842404549147, | |
| "grad_norm": 2.0625, | |
| "learning_rate": 8.509485094850949e-06, | |
| "loss": 1.2789, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.6255077173030057, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 8.328816621499549e-06, | |
| "loss": 1.2828, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.6336311941510967, | |
| "grad_norm": 1.9140625, | |
| "learning_rate": 8.148148148148148e-06, | |
| "loss": 1.2492, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.6417546709991877, | |
| "grad_norm": 1.84375, | |
| "learning_rate": 7.967479674796748e-06, | |
| "loss": 1.2836, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.6498781478472786, | |
| "grad_norm": 2.15625, | |
| "learning_rate": 7.78681120144535e-06, | |
| "loss": 1.282, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.6580016246953696, | |
| "grad_norm": 1.984375, | |
| "learning_rate": 7.6061427280939486e-06, | |
| "loss": 1.268, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.6661251015434606, | |
| "grad_norm": 1.8046875, | |
| "learning_rate": 7.425474254742548e-06, | |
| "loss": 1.3234, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.6742485783915516, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 7.244805781391147e-06, | |
| "loss": 1.3195, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.6823720552396426, | |
| "grad_norm": 1.9453125, | |
| "learning_rate": 7.064137308039748e-06, | |
| "loss": 1.2617, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.6904955320877335, | |
| "grad_norm": 1.84375, | |
| "learning_rate": 6.883468834688347e-06, | |
| "loss": 1.3188, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.6986190089358245, | |
| "grad_norm": 1.96875, | |
| "learning_rate": 6.7028003613369475e-06, | |
| "loss": 1.2969, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.7067424857839155, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 6.5221318879855465e-06, | |
| "loss": 1.3078, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.7148659626320065, | |
| "grad_norm": 2.0, | |
| "learning_rate": 6.341463414634147e-06, | |
| "loss": 1.2789, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.7229894394800975, | |
| "grad_norm": 2.046875, | |
| "learning_rate": 6.160794941282746e-06, | |
| "loss": 1.2898, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.7311129163281884, | |
| "grad_norm": 1.875, | |
| "learning_rate": 5.980126467931347e-06, | |
| "loss": 1.2625, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.7392363931762794, | |
| "grad_norm": 2.0625, | |
| "learning_rate": 5.7994579945799465e-06, | |
| "loss": 1.3141, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.7473598700243704, | |
| "grad_norm": 1.640625, | |
| "learning_rate": 5.618789521228546e-06, | |
| "loss": 1.2594, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.7554833468724614, | |
| "grad_norm": 1.859375, | |
| "learning_rate": 5.438121047877146e-06, | |
| "loss": 1.3594, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.7636068237205524, | |
| "grad_norm": 2.015625, | |
| "learning_rate": 5.257452574525745e-06, | |
| "loss": 1.2742, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.7717303005686433, | |
| "grad_norm": 2.125, | |
| "learning_rate": 5.076784101174346e-06, | |
| "loss": 1.2391, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.7798537774167343, | |
| "grad_norm": 1.90625, | |
| "learning_rate": 4.8961156278229455e-06, | |
| "loss": 1.3109, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.7879772542648253, | |
| "grad_norm": 1.734375, | |
| "learning_rate": 4.715447154471545e-06, | |
| "loss": 1.3289, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.7961007311129163, | |
| "grad_norm": 2.125, | |
| "learning_rate": 4.534778681120145e-06, | |
| "loss": 1.2641, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.8042242079610074, | |
| "grad_norm": 1.8828125, | |
| "learning_rate": 4.354110207768745e-06, | |
| "loss": 1.2391, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.8123476848090982, | |
| "grad_norm": 1.6875, | |
| "learning_rate": 4.173441734417345e-06, | |
| "loss": 1.2977, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.8204711616571893, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 3.9927732610659445e-06, | |
| "loss": 1.2625, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.8285946385052803, | |
| "grad_norm": 1.6484375, | |
| "learning_rate": 3.812104787714544e-06, | |
| "loss": 1.2937, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.8367181153533713, | |
| "grad_norm": 1.96875, | |
| "learning_rate": 3.6314363143631437e-06, | |
| "loss": 1.3109, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.8448415922014623, | |
| "grad_norm": 1.84375, | |
| "learning_rate": 3.450767841011744e-06, | |
| "loss": 1.2125, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.8529650690495532, | |
| "grad_norm": 1.8203125, | |
| "learning_rate": 3.2700993676603437e-06, | |
| "loss": 1.2547, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.8610885458976442, | |
| "grad_norm": 1.8515625, | |
| "learning_rate": 3.0894308943089435e-06, | |
| "loss": 1.2547, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.8692120227457352, | |
| "grad_norm": 1.8984375, | |
| "learning_rate": 2.9087624209575433e-06, | |
| "loss": 1.2523, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.8773354995938262, | |
| "grad_norm": 1.8359375, | |
| "learning_rate": 2.728093947606143e-06, | |
| "loss": 1.3062, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.8854589764419172, | |
| "grad_norm": 1.7890625, | |
| "learning_rate": 2.547425474254743e-06, | |
| "loss": 1.2961, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.8935824532900081, | |
| "grad_norm": 1.7890625, | |
| "learning_rate": 2.3667570009033427e-06, | |
| "loss": 1.2406, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.9017059301380991, | |
| "grad_norm": 1.765625, | |
| "learning_rate": 2.186088527551942e-06, | |
| "loss": 1.3203, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.9098294069861901, | |
| "grad_norm": 2.03125, | |
| "learning_rate": 2.0054200542005423e-06, | |
| "loss": 1.3039, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.9179528838342811, | |
| "grad_norm": 2.09375, | |
| "learning_rate": 1.824751580849142e-06, | |
| "loss": 1.2844, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.9260763606823721, | |
| "grad_norm": 1.8359375, | |
| "learning_rate": 1.6440831074977418e-06, | |
| "loss": 1.3031, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.934199837530463, | |
| "grad_norm": 1.9609375, | |
| "learning_rate": 1.4634146341463414e-06, | |
| "loss": 1.2391, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.942323314378554, | |
| "grad_norm": 1.875, | |
| "learning_rate": 1.2827461607949414e-06, | |
| "loss": 1.2508, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.950446791226645, | |
| "grad_norm": 1.859375, | |
| "learning_rate": 1.102077687443541e-06, | |
| "loss": 1.3148, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.958570268074736, | |
| "grad_norm": 1.875, | |
| "learning_rate": 9.21409214092141e-07, | |
| "loss": 1.2883, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.966693744922827, | |
| "grad_norm": 2.03125, | |
| "learning_rate": 7.407407407407407e-07, | |
| "loss": 1.282, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.974817221770918, | |
| "grad_norm": 1.75, | |
| "learning_rate": 5.600722673893405e-07, | |
| "loss": 1.318, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.9829406986190089, | |
| "grad_norm": 2.0, | |
| "learning_rate": 3.794037940379404e-07, | |
| "loss": 1.2641, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.9910641754670999, | |
| "grad_norm": 2.1875, | |
| "learning_rate": 1.987353206865402e-07, | |
| "loss": 1.3148, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.9991876523151909, | |
| "grad_norm": 1.953125, | |
| "learning_rate": 1.806684733514002e-08, | |
| "loss": 1.2688, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1231, | |
| "total_flos": 5.907000409747046e+17, | |
| "train_loss": 1.310145461007311, | |
| "train_runtime": 4855.0115, | |
| "train_samples_per_second": 8.111, | |
| "train_steps_per_second": 0.254 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 1231, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.907000409747046e+17, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |