| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 5.0, | |
| "eval_steps": 1000, | |
| "global_step": 4425, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.011299435028248588, | |
| "grad_norm": 0.7552605867385864, | |
| "learning_rate": 4.5146726862302485e-06, | |
| "loss": 1.0465, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.022598870056497175, | |
| "grad_norm": 0.8854597210884094, | |
| "learning_rate": 9.029345372460497e-06, | |
| "loss": 1.0939, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03389830508474576, | |
| "grad_norm": 0.7813611030578613, | |
| "learning_rate": 1.3544018058690747e-05, | |
| "loss": 1.0292, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.04519774011299435, | |
| "grad_norm": 0.6397517919540405, | |
| "learning_rate": 1.8058690744920994e-05, | |
| "loss": 0.995, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.05649717514124294, | |
| "grad_norm": 0.4727611839771271, | |
| "learning_rate": 2.2573363431151244e-05, | |
| "loss": 0.896, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.06779661016949153, | |
| "grad_norm": 0.37453365325927734, | |
| "learning_rate": 2.7088036117381494e-05, | |
| "loss": 0.8037, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.07909604519774012, | |
| "grad_norm": 0.5303831696510315, | |
| "learning_rate": 3.1602708803611745e-05, | |
| "loss": 0.7872, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.0903954802259887, | |
| "grad_norm": 0.3839394152164459, | |
| "learning_rate": 3.611738148984199e-05, | |
| "loss": 0.7697, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.1016949152542373, | |
| "grad_norm": 0.39426788687705994, | |
| "learning_rate": 4.063205417607224e-05, | |
| "loss": 0.7481, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.11299435028248588, | |
| "grad_norm": 0.29685771465301514, | |
| "learning_rate": 4.514672686230249e-05, | |
| "loss": 0.7166, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.12429378531073447, | |
| "grad_norm": 0.3550776243209839, | |
| "learning_rate": 4.966139954853273e-05, | |
| "loss": 0.7455, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.13559322033898305, | |
| "grad_norm": 0.49571502208709717, | |
| "learning_rate": 5.417607223476299e-05, | |
| "loss": 0.7451, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.14689265536723164, | |
| "grad_norm": 0.3369849622249603, | |
| "learning_rate": 5.869074492099323e-05, | |
| "loss": 0.7235, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.15819209039548024, | |
| "grad_norm": 0.37830305099487305, | |
| "learning_rate": 6.320541760722349e-05, | |
| "loss": 0.7201, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.1694915254237288, | |
| "grad_norm": 0.42834165692329407, | |
| "learning_rate": 6.772009029345373e-05, | |
| "loss": 0.7199, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.1807909604519774, | |
| "grad_norm": 0.3480307459831238, | |
| "learning_rate": 7.223476297968398e-05, | |
| "loss": 0.7134, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.192090395480226, | |
| "grad_norm": 0.47217118740081787, | |
| "learning_rate": 7.674943566591423e-05, | |
| "loss": 0.7193, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.2033898305084746, | |
| "grad_norm": 0.43827682733535767, | |
| "learning_rate": 8.126410835214448e-05, | |
| "loss": 0.7049, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.21468926553672316, | |
| "grad_norm": 0.38405266404151917, | |
| "learning_rate": 8.577878103837473e-05, | |
| "loss": 0.719, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.22598870056497175, | |
| "grad_norm": 0.48938581347465515, | |
| "learning_rate": 9.029345372460498e-05, | |
| "loss": 0.7077, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.23728813559322035, | |
| "grad_norm": 0.4330425560474396, | |
| "learning_rate": 9.480812641083521e-05, | |
| "loss": 0.7301, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.24858757062146894, | |
| "grad_norm": 0.3442066013813019, | |
| "learning_rate": 9.932279909706546e-05, | |
| "loss": 0.7174, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.2598870056497175, | |
| "grad_norm": 0.41479647159576416, | |
| "learning_rate": 0.00010383747178329573, | |
| "loss": 0.7163, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.2711864406779661, | |
| "grad_norm": 0.4269746243953705, | |
| "learning_rate": 0.00010835214446952598, | |
| "loss": 0.7206, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.2824858757062147, | |
| "grad_norm": 0.4595146179199219, | |
| "learning_rate": 0.00011286681715575623, | |
| "loss": 0.7198, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2937853107344633, | |
| "grad_norm": 0.459258496761322, | |
| "learning_rate": 0.00011738148984198646, | |
| "loss": 0.6791, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.3050847457627119, | |
| "grad_norm": 0.40073147416114807, | |
| "learning_rate": 0.00012189616252821671, | |
| "loss": 0.7379, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.3163841807909605, | |
| "grad_norm": 0.4646053910255432, | |
| "learning_rate": 0.00012641083521444698, | |
| "loss": 0.7146, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.327683615819209, | |
| "grad_norm": 0.5096935629844666, | |
| "learning_rate": 0.00013092550790067722, | |
| "loss": 0.6855, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.3389830508474576, | |
| "grad_norm": 0.37696900963783264, | |
| "learning_rate": 0.00013544018058690745, | |
| "loss": 0.7169, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.3502824858757062, | |
| "grad_norm": 0.344178169965744, | |
| "learning_rate": 0.00013995485327313772, | |
| "loss": 0.7079, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.3615819209039548, | |
| "grad_norm": 0.33671554923057556, | |
| "learning_rate": 0.00014446952595936795, | |
| "loss": 0.68, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.3728813559322034, | |
| "grad_norm": 0.365024596452713, | |
| "learning_rate": 0.00014898419864559822, | |
| "loss": 0.7247, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.384180790960452, | |
| "grad_norm": 0.36559364199638367, | |
| "learning_rate": 0.00015349887133182845, | |
| "loss": 0.6753, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.3954802259887006, | |
| "grad_norm": 0.3757419288158417, | |
| "learning_rate": 0.0001580135440180587, | |
| "loss": 0.6616, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.4067796610169492, | |
| "grad_norm": 0.5549542307853699, | |
| "learning_rate": 0.00016252821670428895, | |
| "loss": 0.686, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.4180790960451977, | |
| "grad_norm": 0.3681774139404297, | |
| "learning_rate": 0.0001670428893905192, | |
| "loss": 0.7086, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.4293785310734463, | |
| "grad_norm": 0.3036465346813202, | |
| "learning_rate": 0.00017155756207674945, | |
| "loss": 0.6966, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.4406779661016949, | |
| "grad_norm": 0.32314741611480713, | |
| "learning_rate": 0.0001760722347629797, | |
| "loss": 0.7406, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.4519774011299435, | |
| "grad_norm": 0.41190987825393677, | |
| "learning_rate": 0.00018058690744920995, | |
| "loss": 0.6772, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.4632768361581921, | |
| "grad_norm": 0.28181225061416626, | |
| "learning_rate": 0.0001851015801354402, | |
| "loss": 0.6843, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.4745762711864407, | |
| "grad_norm": 0.2925048768520355, | |
| "learning_rate": 0.00018961625282167043, | |
| "loss": 0.7206, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.4858757062146893, | |
| "grad_norm": 0.33646613359451294, | |
| "learning_rate": 0.0001941309255079007, | |
| "loss": 0.6879, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.4971751412429379, | |
| "grad_norm": 0.40007829666137695, | |
| "learning_rate": 0.00019864559819413093, | |
| "loss": 0.7032, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.5084745762711864, | |
| "grad_norm": 0.29928356409072876, | |
| "learning_rate": 0.00019999847502678901, | |
| "loss": 0.7086, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.519774011299435, | |
| "grad_norm": 0.285306453704834, | |
| "learning_rate": 0.00019999100588221217, | |
| "loss": 0.6709, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.5310734463276836, | |
| "grad_norm": 0.5055399537086487, | |
| "learning_rate": 0.00019997731293347473, | |
| "loss": 0.6901, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.5423728813559322, | |
| "grad_norm": 0.3232295513153076, | |
| "learning_rate": 0.00019995739703287558, | |
| "loss": 0.6748, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.5536723163841808, | |
| "grad_norm": 0.40595105290412903, | |
| "learning_rate": 0.00019993125942005297, | |
| "loss": 0.6908, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.5649717514124294, | |
| "grad_norm": 0.29886409640312195, | |
| "learning_rate": 0.00019989890172190697, | |
| "loss": 0.6961, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.576271186440678, | |
| "grad_norm": 0.3396940231323242, | |
| "learning_rate": 0.00019986032595249855, | |
| "loss": 0.6741, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.5875706214689266, | |
| "grad_norm": 0.3772926926612854, | |
| "learning_rate": 0.00019981553451292396, | |
| "loss": 0.6681, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.5988700564971752, | |
| "grad_norm": 0.3138965964317322, | |
| "learning_rate": 0.00019976453019116547, | |
| "loss": 0.7027, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.6101694915254238, | |
| "grad_norm": 0.31275707483291626, | |
| "learning_rate": 0.00019970731616191772, | |
| "loss": 0.6955, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.6214689265536724, | |
| "grad_norm": 0.28057295083999634, | |
| "learning_rate": 0.00019964389598639012, | |
| "loss": 0.6617, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.632768361581921, | |
| "grad_norm": 0.5602757930755615, | |
| "learning_rate": 0.00019957427361208522, | |
| "loss": 0.6896, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.6440677966101694, | |
| "grad_norm": 0.43884241580963135, | |
| "learning_rate": 0.00019949845337255306, | |
| "loss": 0.6786, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.655367231638418, | |
| "grad_norm": 0.3063630759716034, | |
| "learning_rate": 0.00019941643998712128, | |
| "loss": 0.685, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 0.2912139594554901, | |
| "learning_rate": 0.00019932823856060153, | |
| "loss": 0.691, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.6779661016949152, | |
| "grad_norm": 0.5503478646278381, | |
| "learning_rate": 0.00019923385458297168, | |
| "loss": 0.7076, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.6892655367231638, | |
| "grad_norm": 0.30678126215934753, | |
| "learning_rate": 0.00019913329392903396, | |
| "loss": 0.6628, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.7005649717514124, | |
| "grad_norm": 0.341691792011261, | |
| "learning_rate": 0.00019902656285804954, | |
| "loss": 0.6973, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.711864406779661, | |
| "grad_norm": 0.2795998454093933, | |
| "learning_rate": 0.0001989136680133488, | |
| "loss": 0.668, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.7231638418079096, | |
| "grad_norm": 0.3217742443084717, | |
| "learning_rate": 0.00019879461642191774, | |
| "loss": 0.6839, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.7344632768361582, | |
| "grad_norm": 0.2668191194534302, | |
| "learning_rate": 0.00019866941549396075, | |
| "loss": 0.6929, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.7457627118644068, | |
| "grad_norm": 0.8005201816558838, | |
| "learning_rate": 0.0001985380730224393, | |
| "loss": 0.701, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.7570621468926554, | |
| "grad_norm": 0.29280129075050354, | |
| "learning_rate": 0.00019840059718258693, | |
| "loss": 0.7222, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.768361581920904, | |
| "grad_norm": 0.3449852764606476, | |
| "learning_rate": 0.00019825699653140032, | |
| "loss": 0.6919, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.7796610169491526, | |
| "grad_norm": 0.2684054970741272, | |
| "learning_rate": 0.0001981072800071066, | |
| "loss": 0.6859, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.7909604519774012, | |
| "grad_norm": 0.3716861307621002, | |
| "learning_rate": 0.00019795145692860726, | |
| "loss": 0.666, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.8022598870056498, | |
| "grad_norm": 0.3001028597354889, | |
| "learning_rate": 0.00019778953699489785, | |
| "loss": 0.6587, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.8135593220338984, | |
| "grad_norm": 0.476034551858902, | |
| "learning_rate": 0.0001976215302844644, | |
| "loss": 0.6766, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.8248587570621468, | |
| "grad_norm": 0.3516661524772644, | |
| "learning_rate": 0.00019744744725465617, | |
| "loss": 0.6836, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.8361581920903954, | |
| "grad_norm": 0.275155246257782, | |
| "learning_rate": 0.00019726729874103448, | |
| "loss": 0.6729, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.847457627118644, | |
| "grad_norm": 0.31217560172080994, | |
| "learning_rate": 0.00019708109595669858, | |
| "loss": 0.6811, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.8587570621468926, | |
| "grad_norm": 0.3042563199996948, | |
| "learning_rate": 0.00019688885049158752, | |
| "loss": 0.7353, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.8700564971751412, | |
| "grad_norm": 0.3564906120300293, | |
| "learning_rate": 0.0001966905743117588, | |
| "loss": 0.6938, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.8813559322033898, | |
| "grad_norm": 0.3310820162296295, | |
| "learning_rate": 0.00019648627975864355, | |
| "loss": 0.6724, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.8926553672316384, | |
| "grad_norm": 0.4258658289909363, | |
| "learning_rate": 0.00019627597954827833, | |
| "loss": 0.6901, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.903954802259887, | |
| "grad_norm": 0.3146076202392578, | |
| "learning_rate": 0.00019605968677051378, | |
| "loss": 0.7008, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.9152542372881356, | |
| "grad_norm": 0.3338530957698822, | |
| "learning_rate": 0.0001958374148881996, | |
| "loss": 0.6567, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.9265536723163842, | |
| "grad_norm": 0.3822636008262634, | |
| "learning_rate": 0.00019560917773634676, | |
| "loss": 0.6756, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.9378531073446328, | |
| "grad_norm": 0.3074444830417633, | |
| "learning_rate": 0.00019537498952126642, | |
| "loss": 0.6958, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.9491525423728814, | |
| "grad_norm": 0.31832125782966614, | |
| "learning_rate": 0.00019513486481968545, | |
| "loss": 0.6871, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.96045197740113, | |
| "grad_norm": 0.3670668303966522, | |
| "learning_rate": 0.00019488881857783935, | |
| "loss": 0.6956, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.9717514124293786, | |
| "grad_norm": 0.2784234881401062, | |
| "learning_rate": 0.00019463686611054172, | |
| "loss": 0.6865, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.9830508474576272, | |
| "grad_norm": 0.3210557997226715, | |
| "learning_rate": 0.00019437902310023126, | |
| "loss": 0.6845, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.9943502824858758, | |
| "grad_norm": 0.3313763737678528, | |
| "learning_rate": 0.0001941153055959954, | |
| "loss": 0.6589, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.0056497175141244, | |
| "grad_norm": 0.35079240798950195, | |
| "learning_rate": 0.00019384573001257155, | |
| "loss": 0.671, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.0169491525423728, | |
| "grad_norm": 0.3715152442455292, | |
| "learning_rate": 0.0001935703131293252, | |
| "loss": 0.608, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.0282485875706215, | |
| "grad_norm": 0.3646804988384247, | |
| "learning_rate": 0.00019328907208920567, | |
| "loss": 0.6457, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.03954802259887, | |
| "grad_norm": 0.3106479048728943, | |
| "learning_rate": 0.00019300202439767898, | |
| "loss": 0.6105, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.0508474576271187, | |
| "grad_norm": 0.29659685492515564, | |
| "learning_rate": 0.00019270918792163827, | |
| "loss": 0.6221, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.0621468926553672, | |
| "grad_norm": 0.4973548948764801, | |
| "learning_rate": 0.00019241058088829166, | |
| "loss": 0.6465, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.073446327683616, | |
| "grad_norm": 0.4067288637161255, | |
| "learning_rate": 0.00019210622188402788, | |
| "loss": 0.6374, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.0847457627118644, | |
| "grad_norm": 0.4106499254703522, | |
| "learning_rate": 0.00019179612985325908, | |
| "loss": 0.6226, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.0960451977401129, | |
| "grad_norm": 0.31531280279159546, | |
| "learning_rate": 0.000191480324097242, | |
| "loss": 0.6604, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.1073446327683616, | |
| "grad_norm": 0.3855787515640259, | |
| "learning_rate": 0.00019115882427287638, | |
| "loss": 0.6466, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.11864406779661, | |
| "grad_norm": 0.36246171593666077, | |
| "learning_rate": 0.00019083165039148153, | |
| "loss": 0.6315, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.1299435028248588, | |
| "grad_norm": 0.4511447846889496, | |
| "learning_rate": 0.00019049882281755066, | |
| "loss": 0.6341, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.1299435028248588, | |
| "eval_loss": 0.7100452184677124, | |
| "eval_runtime": 16.7354, | |
| "eval_samples_per_second": 89.093, | |
| "eval_steps_per_second": 11.174, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.1412429378531073, | |
| "grad_norm": 0.39097025990486145, | |
| "learning_rate": 0.00019016036226748346, | |
| "loss": 0.6144, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.152542372881356, | |
| "grad_norm": 0.40734922885894775, | |
| "learning_rate": 0.00018981628980829652, | |
| "loss": 0.6458, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.1638418079096045, | |
| "grad_norm": 0.3473665118217468, | |
| "learning_rate": 0.00018946662685631203, | |
| "loss": 0.6417, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.1751412429378532, | |
| "grad_norm": 0.41758203506469727, | |
| "learning_rate": 0.0001891113951758249, | |
| "loss": 0.6355, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.1864406779661016, | |
| "grad_norm": 0.6268119215965271, | |
| "learning_rate": 0.0001887506168777479, | |
| "loss": 0.6332, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.1977401129943503, | |
| "grad_norm": 0.3336067795753479, | |
| "learning_rate": 0.00018838431441823547, | |
| "loss": 0.6445, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.2090395480225988, | |
| "grad_norm": 0.3316787779331207, | |
| "learning_rate": 0.00018801251059728604, | |
| "loss": 0.6314, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.2203389830508475, | |
| "grad_norm": 0.304683119058609, | |
| "learning_rate": 0.00018763522855732266, | |
| "loss": 0.6232, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.231638418079096, | |
| "grad_norm": 0.34174299240112305, | |
| "learning_rate": 0.00018725249178175286, | |
| "loss": 0.6561, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.2429378531073447, | |
| "grad_norm": 0.3202422559261322, | |
| "learning_rate": 0.0001868643240935066, | |
| "loss": 0.6066, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.2542372881355932, | |
| "grad_norm": 0.3420744836330414, | |
| "learning_rate": 0.0001864707496535537, | |
| "loss": 0.6367, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.2655367231638417, | |
| "grad_norm": 0.38407158851623535, | |
| "learning_rate": 0.00018607179295939983, | |
| "loss": 0.6278, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.2768361581920904, | |
| "grad_norm": 0.37982526421546936, | |
| "learning_rate": 0.00018566747884356182, | |
| "loss": 0.6209, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.288135593220339, | |
| "grad_norm": 0.33808642625808716, | |
| "learning_rate": 0.00018525783247202188, | |
| "loss": 0.6525, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.2994350282485876, | |
| "grad_norm": 0.45425844192504883, | |
| "learning_rate": 0.00018484287934266122, | |
| "loss": 0.6367, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.310734463276836, | |
| "grad_norm": 0.422880083322525, | |
| "learning_rate": 0.00018442264528367304, | |
| "loss": 0.6499, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.3220338983050848, | |
| "grad_norm": 0.3406778573989868, | |
| "learning_rate": 0.0001839971564519547, | |
| "loss": 0.6478, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 0.37413087487220764, | |
| "learning_rate": 0.00018356643933147986, | |
| "loss": 0.6382, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.344632768361582, | |
| "grad_norm": 0.3171542286872864, | |
| "learning_rate": 0.00018313052073164982, | |
| "loss": 0.6373, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.3559322033898304, | |
| "grad_norm": 0.3208696246147156, | |
| "learning_rate": 0.00018268942778562494, | |
| "loss": 0.6231, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.3672316384180792, | |
| "grad_norm": 0.37951213121414185, | |
| "learning_rate": 0.0001822431879486357, | |
| "loss": 0.6659, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.3785310734463276, | |
| "grad_norm": 0.3654930591583252, | |
| "learning_rate": 0.00018179182899627386, | |
| "loss": 0.6467, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.3898305084745763, | |
| "grad_norm": 0.5204548239707947, | |
| "learning_rate": 0.00018133537902276342, | |
| "loss": 0.6479, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.4011299435028248, | |
| "grad_norm": 0.33258357644081116, | |
| "learning_rate": 0.0001808738664392122, | |
| "loss": 0.613, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.4124293785310735, | |
| "grad_norm": 0.3374476134777069, | |
| "learning_rate": 0.0001804073199718432, | |
| "loss": 0.6308, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.423728813559322, | |
| "grad_norm": 0.3677334189414978, | |
| "learning_rate": 0.0001799357686602068, | |
| "loss": 0.6412, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.4350282485875705, | |
| "grad_norm": 0.503189742565155, | |
| "learning_rate": 0.00017945924185537295, | |
| "loss": 0.6242, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.4463276836158192, | |
| "grad_norm": 0.44822338223457336, | |
| "learning_rate": 0.00017897776921810453, | |
| "loss": 0.6269, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.457627118644068, | |
| "grad_norm": 0.38298189640045166, | |
| "learning_rate": 0.00017849138071701092, | |
| "loss": 0.6206, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.4689265536723164, | |
| "grad_norm": 0.40119263529777527, | |
| "learning_rate": 0.0001780001066266829, | |
| "loss": 0.6398, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.4802259887005649, | |
| "grad_norm": 0.5280100703239441, | |
| "learning_rate": 0.00017750397752580796, | |
| "loss": 0.6299, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.4915254237288136, | |
| "grad_norm": 0.35528528690338135, | |
| "learning_rate": 0.00017700302429526723, | |
| "loss": 0.6511, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.5028248587570623, | |
| "grad_norm": 0.3741821348667145, | |
| "learning_rate": 0.00017649727811621315, | |
| "loss": 0.6564, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.5141242937853108, | |
| "grad_norm": 0.5371260046958923, | |
| "learning_rate": 0.0001759867704681288, | |
| "loss": 0.6186, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.5254237288135593, | |
| "grad_norm": 0.3590337336063385, | |
| "learning_rate": 0.00017547153312686832, | |
| "loss": 0.6372, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.536723163841808, | |
| "grad_norm": 0.34499940276145935, | |
| "learning_rate": 0.00017495159816267929, | |
| "loss": 0.6442, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.5480225988700564, | |
| "grad_norm": 0.45834365487098694, | |
| "learning_rate": 0.00017442699793820631, | |
| "loss": 0.6363, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.559322033898305, | |
| "grad_norm": 0.3389630615711212, | |
| "learning_rate": 0.00017389776510647688, | |
| "loss": 0.6399, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.5706214689265536, | |
| "grad_norm": 1.0764904022216797, | |
| "learning_rate": 0.0001733639326088688, | |
| "loss": 0.6073, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.5819209039548023, | |
| "grad_norm": 0.32130682468414307, | |
| "learning_rate": 0.00017282553367305975, | |
| "loss": 0.65, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.5932203389830508, | |
| "grad_norm": 0.5010311603546143, | |
| "learning_rate": 0.00017228260181095925, | |
| "loss": 0.626, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.6045197740112993, | |
| "grad_norm": 0.3811778128147125, | |
| "learning_rate": 0.00017173517081662255, | |
| "loss": 0.6565, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.615819209039548, | |
| "grad_norm": 0.793699324131012, | |
| "learning_rate": 0.00017118327476414735, | |
| "loss": 0.6555, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.6271186440677967, | |
| "grad_norm": 0.33635199069976807, | |
| "learning_rate": 0.0001706269480055528, | |
| "loss": 0.6431, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.6384180790960452, | |
| "grad_norm": 0.46125972270965576, | |
| "learning_rate": 0.00017006622516864127, | |
| "loss": 0.6383, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.6497175141242937, | |
| "grad_norm": 0.37351667881011963, | |
| "learning_rate": 0.00016950114115484315, | |
| "loss": 0.6304, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.6610169491525424, | |
| "grad_norm": 0.35749056935310364, | |
| "learning_rate": 0.0001689317311370442, | |
| "loss": 0.6647, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.672316384180791, | |
| "grad_norm": 0.3775118887424469, | |
| "learning_rate": 0.0001683580305573966, | |
| "loss": 0.6288, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.6836158192090396, | |
| "grad_norm": 0.3228208124637604, | |
| "learning_rate": 0.0001677800751251126, | |
| "loss": 0.6067, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.694915254237288, | |
| "grad_norm": 0.38675859570503235, | |
| "learning_rate": 0.00016719790081424192, | |
| "loss": 0.6438, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.7062146892655368, | |
| "grad_norm": 0.3832201063632965, | |
| "learning_rate": 0.0001666115438614328, | |
| "loss": 0.6269, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.7175141242937855, | |
| "grad_norm": 0.5429854989051819, | |
| "learning_rate": 0.00016602104076367624, | |
| "loss": 0.6397, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.7288135593220337, | |
| "grad_norm": 0.4048645496368408, | |
| "learning_rate": 0.0001654264282760343, | |
| "loss": 0.6402, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.7401129943502824, | |
| "grad_norm": 0.3748385012149811, | |
| "learning_rate": 0.00016482774340935267, | |
| "loss": 0.626, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.7514124293785311, | |
| "grad_norm": 0.44016194343566895, | |
| "learning_rate": 0.00016422502342795648, | |
| "loss": 0.6542, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.7627118644067796, | |
| "grad_norm": 0.34061527252197266, | |
| "learning_rate": 0.00016361830584733124, | |
| "loss": 0.6472, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.774011299435028, | |
| "grad_norm": 0.3848430812358856, | |
| "learning_rate": 0.00016300762843178754, | |
| "loss": 0.6304, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.7853107344632768, | |
| "grad_norm": 0.3756044805049896, | |
| "learning_rate": 0.00016239302919211052, | |
| "loss": 0.6161, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.7966101694915255, | |
| "grad_norm": 0.40179935097694397, | |
| "learning_rate": 0.0001617745463831939, | |
| "loss": 0.6428, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.807909604519774, | |
| "grad_norm": 0.4611279368400574, | |
| "learning_rate": 0.00016115221850165892, | |
| "loss": 0.6661, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.8192090395480225, | |
| "grad_norm": 0.5279085636138916, | |
| "learning_rate": 0.0001605260842834581, | |
| "loss": 0.6293, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.8305084745762712, | |
| "grad_norm": 0.5711485743522644, | |
| "learning_rate": 0.00015989618270146423, | |
| "loss": 0.6368, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.84180790960452, | |
| "grad_norm": 0.37090200185775757, | |
| "learning_rate": 0.00015926255296304454, | |
| "loss": 0.6297, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.8531073446327684, | |
| "grad_norm": 0.3836909830570221, | |
| "learning_rate": 0.00015862523450762032, | |
| "loss": 0.6817, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.8644067796610169, | |
| "grad_norm": 0.4297361969947815, | |
| "learning_rate": 0.00015798426700421196, | |
| "loss": 0.6586, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.8757062146892656, | |
| "grad_norm": 0.3696504235267639, | |
| "learning_rate": 0.0001573396903489699, | |
| "loss": 0.6466, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.8870056497175143, | |
| "grad_norm": 0.39131200313568115, | |
| "learning_rate": 0.00015669154466269137, | |
| "loss": 0.6448, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.8983050847457628, | |
| "grad_norm": 0.3583720624446869, | |
| "learning_rate": 0.00015603987028832303, | |
| "loss": 0.6385, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.9096045197740112, | |
| "grad_norm": 0.40514451265335083, | |
| "learning_rate": 0.00015538470778844995, | |
| "loss": 0.6368, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.92090395480226, | |
| "grad_norm": 0.4644288122653961, | |
| "learning_rate": 0.00015472609794277083, | |
| "loss": 0.6341, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.9322033898305084, | |
| "grad_norm": 0.6364967823028564, | |
| "learning_rate": 0.00015406408174555976, | |
| "loss": 0.619, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.943502824858757, | |
| "grad_norm": 0.3623166084289551, | |
| "learning_rate": 0.00015339870040311448, | |
| "loss": 0.6591, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.9548022598870056, | |
| "grad_norm": 0.39671990275382996, | |
| "learning_rate": 0.00015272999533119162, | |
| "loss": 0.645, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.9661016949152543, | |
| "grad_norm": 0.45107388496398926, | |
| "learning_rate": 0.0001520580081524289, | |
| "loss": 0.625, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.9774011299435028, | |
| "grad_norm": 0.4473719596862793, | |
| "learning_rate": 0.00015138278069375423, | |
| "loss": 0.6481, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.9887005649717513, | |
| "grad_norm": 0.390774130821228, | |
| "learning_rate": 0.00015070435498378243, | |
| "loss": 0.6482, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.41401803493499756, | |
| "learning_rate": 0.00015002277325019904, | |
| "loss": 0.623, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 2.0112994350282487, | |
| "grad_norm": 0.7757445573806763, | |
| "learning_rate": 0.00014933807791713203, | |
| "loss": 0.573, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 2.022598870056497, | |
| "grad_norm": 0.44817861914634705, | |
| "learning_rate": 0.00014865031160251116, | |
| "loss": 0.6054, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 2.0338983050847457, | |
| "grad_norm": 0.4326503574848175, | |
| "learning_rate": 0.0001479595171154152, | |
| "loss": 0.5724, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.0451977401129944, | |
| "grad_norm": 0.4020352065563202, | |
| "learning_rate": 0.00014726573745340747, | |
| "loss": 0.5536, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 2.056497175141243, | |
| "grad_norm": 0.3883920907974243, | |
| "learning_rate": 0.00014656901579985934, | |
| "loss": 0.5591, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 2.0677966101694913, | |
| "grad_norm": 0.4672842025756836, | |
| "learning_rate": 0.00014586939552126255, | |
| "loss": 0.5467, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 2.07909604519774, | |
| "grad_norm": 0.5037193894386292, | |
| "learning_rate": 0.0001451669201645298, | |
| "loss": 0.5548, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 2.0903954802259888, | |
| "grad_norm": 0.46837809681892395, | |
| "learning_rate": 0.00014446163345428411, | |
| "loss": 0.5464, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 2.1016949152542375, | |
| "grad_norm": 0.4075257480144501, | |
| "learning_rate": 0.00014375357929013763, | |
| "loss": 0.5658, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 2.1129943502824857, | |
| "grad_norm": 0.5770477652549744, | |
| "learning_rate": 0.00014304280174395862, | |
| "loss": 0.5657, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 2.1242937853107344, | |
| "grad_norm": 0.38950198888778687, | |
| "learning_rate": 0.00014232934505712877, | |
| "loss": 0.5488, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 2.135593220338983, | |
| "grad_norm": 0.45963385701179504, | |
| "learning_rate": 0.00014161325363778914, | |
| "loss": 0.5536, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 2.146892655367232, | |
| "grad_norm": 0.5212920904159546, | |
| "learning_rate": 0.00014089457205807614, | |
| "loss": 0.5518, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 2.15819209039548, | |
| "grad_norm": 0.7819460034370422, | |
| "learning_rate": 0.00014017334505134722, | |
| "loss": 0.5493, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 2.169491525423729, | |
| "grad_norm": 0.41791173815727234, | |
| "learning_rate": 0.00013944961750939644, | |
| "loss": 0.5685, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 2.1807909604519775, | |
| "grad_norm": 0.40155965089797974, | |
| "learning_rate": 0.00013872343447966033, | |
| "loss": 0.5385, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 2.1920903954802258, | |
| "grad_norm": 0.3540368974208832, | |
| "learning_rate": 0.00013799484116241382, | |
| "loss": 0.5601, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 2.2033898305084745, | |
| "grad_norm": 0.45144760608673096, | |
| "learning_rate": 0.00013726388290795697, | |
| "loss": 0.5547, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 2.214689265536723, | |
| "grad_norm": 0.4121219515800476, | |
| "learning_rate": 0.00013653060521379216, | |
| "loss": 0.5627, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 2.225988700564972, | |
| "grad_norm": 0.5211389660835266, | |
| "learning_rate": 0.0001357950537217921, | |
| "loss": 0.5797, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 2.23728813559322, | |
| "grad_norm": 0.5209491848945618, | |
| "learning_rate": 0.00013505727421535892, | |
| "loss": 0.5644, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 2.248587570621469, | |
| "grad_norm": 0.40179020166397095, | |
| "learning_rate": 0.00013431731261657462, | |
| "loss": 0.5446, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 2.2598870056497176, | |
| "grad_norm": 0.45757922530174255, | |
| "learning_rate": 0.0001335752149833424, | |
| "loss": 0.5528, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.2598870056497176, | |
| "eval_loss": 0.731483519077301, | |
| "eval_runtime": 16.8378, | |
| "eval_samples_per_second": 88.551, | |
| "eval_steps_per_second": 11.106, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.2711864406779663, | |
| "grad_norm": 0.5437088012695312, | |
| "learning_rate": 0.00013283102750652007, | |
| "loss": 0.5476, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 2.2824858757062145, | |
| "grad_norm": 0.5207261443138123, | |
| "learning_rate": 0.00013208479650704503, | |
| "loss": 0.5839, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 2.2937853107344632, | |
| "grad_norm": 0.47627148032188416, | |
| "learning_rate": 0.00013133656843305078, | |
| "loss": 0.5791, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 2.305084745762712, | |
| "grad_norm": 0.4785435199737549, | |
| "learning_rate": 0.00013058638985697618, | |
| "loss": 0.5389, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 2.3163841807909606, | |
| "grad_norm": 0.40203753113746643, | |
| "learning_rate": 0.0001298343074726663, | |
| "loss": 0.5417, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 2.327683615819209, | |
| "grad_norm": 0.6017877459526062, | |
| "learning_rate": 0.00012908036809246623, | |
| "loss": 0.544, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 2.3389830508474576, | |
| "grad_norm": 0.5637748837471008, | |
| "learning_rate": 0.0001283246186443073, | |
| "loss": 0.5429, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 2.3502824858757063, | |
| "grad_norm": 0.4815099835395813, | |
| "learning_rate": 0.00012756710616878593, | |
| "loss": 0.5466, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 2.361581920903955, | |
| "grad_norm": 0.4496362805366516, | |
| "learning_rate": 0.00012680787781623582, | |
| "loss": 0.5894, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 2.3728813559322033, | |
| "grad_norm": 0.4290124475955963, | |
| "learning_rate": 0.00012604698084379322, | |
| "loss": 0.5504, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 2.384180790960452, | |
| "grad_norm": 0.5070333480834961, | |
| "learning_rate": 0.00012528446261245515, | |
| "loss": 0.5522, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 2.3954802259887007, | |
| "grad_norm": 0.5735118985176086, | |
| "learning_rate": 0.00012452037058413194, | |
| "loss": 0.5674, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 2.406779661016949, | |
| "grad_norm": 0.40572336316108704, | |
| "learning_rate": 0.00012375475231869249, | |
| "loss": 0.5814, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 2.4180790960451977, | |
| "grad_norm": 0.5074964165687561, | |
| "learning_rate": 0.00012298765547100445, | |
| "loss": 0.5663, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 2.4293785310734464, | |
| "grad_norm": 0.445563942193985, | |
| "learning_rate": 0.0001222191277879678, | |
| "loss": 0.5476, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 2.440677966101695, | |
| "grad_norm": 0.5029497742652893, | |
| "learning_rate": 0.00012144921710554278, | |
| "loss": 0.5818, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 2.4519774011299433, | |
| "grad_norm": 0.4790378212928772, | |
| "learning_rate": 0.00012067797134577275, | |
| "loss": 0.5878, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 2.463276836158192, | |
| "grad_norm": 0.43805941939353943, | |
| "learning_rate": 0.00011990543851380094, | |
| "loss": 0.5602, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 2.4745762711864407, | |
| "grad_norm": 0.49226316809654236, | |
| "learning_rate": 0.00011913166669488278, | |
| "loss": 0.5692, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 2.4858757062146895, | |
| "grad_norm": 0.44978803396224976, | |
| "learning_rate": 0.00011835670405139272, | |
| "loss": 0.5698, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.4971751412429377, | |
| "grad_norm": 0.4665353000164032, | |
| "learning_rate": 0.00011758059881982639, | |
| "loss": 0.567, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 2.5084745762711864, | |
| "grad_norm": 0.3619357943534851, | |
| "learning_rate": 0.00011680339930779843, | |
| "loss": 0.5528, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 2.519774011299435, | |
| "grad_norm": 0.5655903220176697, | |
| "learning_rate": 0.00011602515389103529, | |
| "loss": 0.57, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 2.5310734463276834, | |
| "grad_norm": 0.5191211700439453, | |
| "learning_rate": 0.00011524591101036446, | |
| "loss": 0.5731, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 2.542372881355932, | |
| "grad_norm": 0.4011457562446594, | |
| "learning_rate": 0.00011446571916869917, | |
| "loss": 0.5603, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 2.553672316384181, | |
| "grad_norm": 0.4324322044849396, | |
| "learning_rate": 0.00011368462692801944, | |
| "loss": 0.5508, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 2.5649717514124295, | |
| "grad_norm": 0.44678226113319397, | |
| "learning_rate": 0.00011290268290634943, | |
| "loss": 0.5695, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 2.576271186440678, | |
| "grad_norm": 0.7076367139816284, | |
| "learning_rate": 0.00011211993577473121, | |
| "loss": 0.5812, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 2.5875706214689265, | |
| "grad_norm": 0.36905479431152344, | |
| "learning_rate": 0.00011133643425419531, | |
| "loss": 0.5775, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 2.598870056497175, | |
| "grad_norm": 0.4640151560306549, | |
| "learning_rate": 0.0001105522271127283, | |
| "loss": 0.5932, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.610169491525424, | |
| "grad_norm": 0.541059672832489, | |
| "learning_rate": 0.00010976736316223698, | |
| "loss": 0.5803, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 2.621468926553672, | |
| "grad_norm": 0.46233227849006653, | |
| "learning_rate": 0.00010898189125551052, | |
| "loss": 0.5939, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 2.632768361581921, | |
| "grad_norm": 0.49569135904312134, | |
| "learning_rate": 0.0001081958602831794, | |
| "loss": 0.5762, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 2.6440677966101696, | |
| "grad_norm": 0.46241089701652527, | |
| "learning_rate": 0.00010740931917067231, | |
| "loss": 0.5455, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 2.655367231638418, | |
| "grad_norm": 0.4825233221054077, | |
| "learning_rate": 0.00010662231687517112, | |
| "loss": 0.5697, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 0.3972626328468323, | |
| "learning_rate": 0.00010583490238256322, | |
| "loss": 0.5544, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 2.6779661016949152, | |
| "grad_norm": 0.458835244178772, | |
| "learning_rate": 0.00010504712470439266, | |
| "loss": 0.5603, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 2.689265536723164, | |
| "grad_norm": 0.5480552911758423, | |
| "learning_rate": 0.00010425903287480969, | |
| "loss": 0.5555, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 2.7005649717514126, | |
| "grad_norm": 0.5089293122291565, | |
| "learning_rate": 0.0001034706759475182, | |
| "loss": 0.5374, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 2.711864406779661, | |
| "grad_norm": 0.39211511611938477, | |
| "learning_rate": 0.00010268210299272294, | |
| "loss": 0.5499, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 2.7231638418079096, | |
| "grad_norm": 0.4466584324836731, | |
| "learning_rate": 0.00010189336309407492, | |
| "loss": 0.5692, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 2.7344632768361583, | |
| "grad_norm": 0.4684606194496155, | |
| "learning_rate": 0.00010110450534561629, | |
| "loss": 0.5858, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 2.7457627118644066, | |
| "grad_norm": 0.4925701320171356, | |
| "learning_rate": 0.00010031557884872471, | |
| "loss": 0.5577, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 2.7570621468926553, | |
| "grad_norm": 0.5621470212936401, | |
| "learning_rate": 9.952663270905691e-05, | |
| "loss": 0.543, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 2.768361581920904, | |
| "grad_norm": 0.5334658622741699, | |
| "learning_rate": 9.873771603349229e-05, | |
| "loss": 0.5673, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 2.7796610169491527, | |
| "grad_norm": 0.4116109311580658, | |
| "learning_rate": 9.79488779270763e-05, | |
| "loss": 0.5759, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 2.7909604519774014, | |
| "grad_norm": 0.579428493976593, | |
| "learning_rate": 9.716016748996397e-05, | |
| "loss": 0.5804, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 2.8022598870056497, | |
| "grad_norm": 0.6294684410095215, | |
| "learning_rate": 9.637163381436368e-05, | |
| "loss": 0.573, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 2.8135593220338984, | |
| "grad_norm": 0.4215356409549713, | |
| "learning_rate": 9.558332598148156e-05, | |
| "loss": 0.5702, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 2.824858757062147, | |
| "grad_norm": 0.6607437133789062, | |
| "learning_rate": 9.479529305846652e-05, | |
| "loss": 0.5556, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 2.8361581920903953, | |
| "grad_norm": 0.4144829213619232, | |
| "learning_rate": 9.400758409535597e-05, | |
| "loss": 0.5417, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 2.847457627118644, | |
| "grad_norm": 0.47953999042510986, | |
| "learning_rate": 9.322024812202295e-05, | |
| "loss": 0.5531, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 2.8587570621468927, | |
| "grad_norm": 0.4800129532814026, | |
| "learning_rate": 9.243333414512428e-05, | |
| "loss": 0.5692, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 2.870056497175141, | |
| "grad_norm": 0.6145460605621338, | |
| "learning_rate": 9.164689114505007e-05, | |
| "loss": 0.556, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 2.8813559322033897, | |
| "grad_norm": 0.45880794525146484, | |
| "learning_rate": 9.086096807287525e-05, | |
| "loss": 0.547, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 2.8926553672316384, | |
| "grad_norm": 0.4852420389652252, | |
| "learning_rate": 9.007561384731249e-05, | |
| "loss": 0.5843, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 2.903954802259887, | |
| "grad_norm": 0.4079766571521759, | |
| "learning_rate": 8.929087735166738e-05, | |
| "loss": 0.5642, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 2.915254237288136, | |
| "grad_norm": 0.41701099276542664, | |
| "learning_rate": 8.850680743079576e-05, | |
| "loss": 0.5191, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 2.926553672316384, | |
| "grad_norm": 0.51291823387146, | |
| "learning_rate": 8.772345288806338e-05, | |
| "loss": 0.5823, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 2.937853107344633, | |
| "grad_norm": 0.3969896137714386, | |
| "learning_rate": 8.694086248230836e-05, | |
| "loss": 0.5673, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 2.9491525423728815, | |
| "grad_norm": 0.5091503858566284, | |
| "learning_rate": 8.615908492480598e-05, | |
| "loss": 0.5555, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 2.9604519774011298, | |
| "grad_norm": 0.5286157131195068, | |
| "learning_rate": 8.537816887623706e-05, | |
| "loss": 0.5883, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 2.9717514124293785, | |
| "grad_norm": 0.42479345202445984, | |
| "learning_rate": 8.459816294365892e-05, | |
| "loss": 0.6002, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 2.983050847457627, | |
| "grad_norm": 0.6383501291275024, | |
| "learning_rate": 8.381911567747986e-05, | |
| "loss": 0.5751, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 2.994350282485876, | |
| "grad_norm": 0.4221038520336151, | |
| "learning_rate": 8.304107556843739e-05, | |
| "loss": 0.5583, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 3.005649717514124, | |
| "grad_norm": 0.46783480048179626, | |
| "learning_rate": 8.22640910445799e-05, | |
| "loss": 0.5075, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 3.016949152542373, | |
| "grad_norm": 0.48267218470573425, | |
| "learning_rate": 8.14882104682522e-05, | |
| "loss": 0.4768, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 3.0282485875706215, | |
| "grad_norm": 0.3863345682621002, | |
| "learning_rate": 8.07134821330855e-05, | |
| "loss": 0.4959, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 3.0395480225988702, | |
| "grad_norm": 0.5208786129951477, | |
| "learning_rate": 7.993995426099132e-05, | |
| "loss": 0.4765, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 3.0508474576271185, | |
| "grad_norm": 0.4619675874710083, | |
| "learning_rate": 7.916767499915998e-05, | |
| "loss": 0.4615, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 3.062146892655367, | |
| "grad_norm": 0.4720297157764435, | |
| "learning_rate": 7.839669241706368e-05, | |
| "loss": 0.495, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 3.073446327683616, | |
| "grad_norm": 0.5332727432250977, | |
| "learning_rate": 7.762705450346462e-05, | |
| "loss": 0.486, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 3.084745762711864, | |
| "grad_norm": 0.5170637965202332, | |
| "learning_rate": 7.685880916342795e-05, | |
| "loss": 0.4816, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 3.096045197740113, | |
| "grad_norm": 0.47557196021080017, | |
| "learning_rate": 7.609200421533991e-05, | |
| "loss": 0.4706, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 3.1073446327683616, | |
| "grad_norm": 0.7031615376472473, | |
| "learning_rate": 7.532668738793159e-05, | |
| "loss": 0.479, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 3.1186440677966103, | |
| "grad_norm": 0.4733956456184387, | |
| "learning_rate": 7.456290631730795e-05, | |
| "loss": 0.5023, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 3.1299435028248586, | |
| "grad_norm": 0.5425024032592773, | |
| "learning_rate": 7.380070854398292e-05, | |
| "loss": 0.477, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 3.1412429378531073, | |
| "grad_norm": 0.4106122851371765, | |
| "learning_rate": 7.304014150992016e-05, | |
| "loss": 0.511, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 3.152542372881356, | |
| "grad_norm": 0.580517053604126, | |
| "learning_rate": 7.228125255558026e-05, | |
| "loss": 0.4797, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 3.1638418079096047, | |
| "grad_norm": 0.6407458186149597, | |
| "learning_rate": 7.152408891697406e-05, | |
| "loss": 0.4908, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 3.175141242937853, | |
| "grad_norm": 0.6218848824501038, | |
| "learning_rate": 7.076869772272231e-05, | |
| "loss": 0.4944, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 3.1864406779661016, | |
| "grad_norm": 0.49968206882476807, | |
| "learning_rate": 7.001512599112254e-05, | |
| "loss": 0.4764, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 3.1977401129943503, | |
| "grad_norm": 0.465017706155777, | |
| "learning_rate": 6.926342062722223e-05, | |
| "loss": 0.4917, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 3.209039548022599, | |
| "grad_norm": 0.49751996994018555, | |
| "learning_rate": 6.851362841989926e-05, | |
| "loss": 0.5105, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 3.2203389830508473, | |
| "grad_norm": 0.5394355654716492, | |
| "learning_rate": 6.776579603894983e-05, | |
| "loss": 0.4888, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 3.231638418079096, | |
| "grad_norm": 0.628495454788208, | |
| "learning_rate": 6.701997003218334e-05, | |
| "loss": 0.4597, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 3.2429378531073447, | |
| "grad_norm": 0.544737696647644, | |
| "learning_rate": 6.627619682252514e-05, | |
| "loss": 0.5198, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 3.2542372881355934, | |
| "grad_norm": 0.3959389626979828, | |
| "learning_rate": 6.553452270512708e-05, | |
| "loss": 0.4881, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 3.2655367231638417, | |
| "grad_norm": 0.7966346740722656, | |
| "learning_rate": 6.47949938444858e-05, | |
| "loss": 0.4839, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 3.2768361581920904, | |
| "grad_norm": 0.5471003651618958, | |
| "learning_rate": 6.405765627156934e-05, | |
| "loss": 0.4953, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 3.288135593220339, | |
| "grad_norm": 0.6175327301025391, | |
| "learning_rate": 6.332255588095211e-05, | |
| "loss": 0.4936, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 3.2994350282485874, | |
| "grad_norm": 0.5062096118927002, | |
| "learning_rate": 6.258973842795803e-05, | |
| "loss": 0.4927, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 3.310734463276836, | |
| "grad_norm": 0.5980800986289978, | |
| "learning_rate": 6.185924952581286e-05, | |
| "loss": 0.4908, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 3.3220338983050848, | |
| "grad_norm": 0.47181573510169983, | |
| "learning_rate": 6.11311346428046e-05, | |
| "loss": 0.4938, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 3.3333333333333335, | |
| "grad_norm": 0.4729149341583252, | |
| "learning_rate": 6.040543909945393e-05, | |
| "loss": 0.4795, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 3.3446327683615817, | |
| "grad_norm": 0.4721032977104187, | |
| "learning_rate": 5.968220806569289e-05, | |
| "loss": 0.4821, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 3.3559322033898304, | |
| "grad_norm": 0.49077048897743225, | |
| "learning_rate": 5.89614865580534e-05, | |
| "loss": 0.4765, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 3.367231638418079, | |
| "grad_norm": 0.4721141755580902, | |
| "learning_rate": 5.8243319436865554e-05, | |
| "loss": 0.4622, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 3.378531073446328, | |
| "grad_norm": 0.5188248157501221, | |
| "learning_rate": 5.7527751403464935e-05, | |
| "loss": 0.4966, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 3.389830508474576, | |
| "grad_norm": 0.5185617208480835, | |
| "learning_rate": 5.681482699741052e-05, | |
| "loss": 0.4847, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 3.389830508474576, | |
| "eval_loss": 0.7709711194038391, | |
| "eval_runtime": 16.7631, | |
| "eval_samples_per_second": 88.945, | |
| "eval_steps_per_second": 11.155, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 3.401129943502825, | |
| "grad_norm": 0.44520846009254456, | |
| "learning_rate": 5.610459059371229e-05, | |
| "loss": 0.4904, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 3.4124293785310735, | |
| "grad_norm": 0.4945251941680908, | |
| "learning_rate": 5.53970864000693e-05, | |
| "loss": 0.486, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 3.423728813559322, | |
| "grad_norm": 0.45569783449172974, | |
| "learning_rate": 5.4692358454117596e-05, | |
| "loss": 0.485, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 3.4350282485875705, | |
| "grad_norm": 0.6237002015113831, | |
| "learning_rate": 5.399045062068977e-05, | |
| "loss": 0.5055, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 3.446327683615819, | |
| "grad_norm": 0.4695291519165039, | |
| "learning_rate": 5.329140658908423e-05, | |
| "loss": 0.5047, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 3.457627118644068, | |
| "grad_norm": 0.5210557579994202, | |
| "learning_rate": 5.259526987034593e-05, | |
| "loss": 0.5181, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 3.4689265536723166, | |
| "grad_norm": 0.5110927224159241, | |
| "learning_rate": 5.190208379455802e-05, | |
| "loss": 0.4901, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 3.480225988700565, | |
| "grad_norm": 0.5741479396820068, | |
| "learning_rate": 5.121189150814507e-05, | |
| "loss": 0.4919, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 3.4915254237288136, | |
| "grad_norm": 0.49151936173439026, | |
| "learning_rate": 5.052473597118715e-05, | |
| "loss": 0.4954, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 3.5028248587570623, | |
| "grad_norm": 0.5301800966262817, | |
| "learning_rate": 4.984065995474601e-05, | |
| "loss": 0.4966, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 3.5141242937853105, | |
| "grad_norm": 0.5711139440536499, | |
| "learning_rate": 4.915970603820293e-05, | |
| "loss": 0.4907, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 3.5254237288135593, | |
| "grad_norm": 0.6174890995025635, | |
| "learning_rate": 4.84819166066082e-05, | |
| "loss": 0.4957, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 3.536723163841808, | |
| "grad_norm": 0.5370630621910095, | |
| "learning_rate": 4.780733384804312e-05, | |
| "loss": 0.5131, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 3.548022598870056, | |
| "grad_norm": 0.4994024634361267, | |
| "learning_rate": 4.713599975099399e-05, | |
| "loss": 0.5214, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 3.559322033898305, | |
| "grad_norm": 0.8335681557655334, | |
| "learning_rate": 4.646795610173864e-05, | |
| "loss": 0.498, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 3.5706214689265536, | |
| "grad_norm": 0.5772273540496826, | |
| "learning_rate": 4.5803244481745275e-05, | |
| "loss": 0.4889, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 3.5819209039548023, | |
| "grad_norm": 0.725974977016449, | |
| "learning_rate": 4.514190626508465e-05, | |
| "loss": 0.4889, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 3.593220338983051, | |
| "grad_norm": 0.4802244305610657, | |
| "learning_rate": 4.448398261585459e-05, | |
| "loss": 0.4688, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 3.6045197740112993, | |
| "grad_norm": 1.078092098236084, | |
| "learning_rate": 4.3829514485617754e-05, | |
| "loss": 0.4719, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 3.615819209039548, | |
| "grad_norm": 0.5553079843521118, | |
| "learning_rate": 4.31785426108527e-05, | |
| "loss": 0.4816, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 3.6271186440677967, | |
| "grad_norm": 0.48087623715400696, | |
| "learning_rate": 4.253110751041846e-05, | |
| "loss": 0.4894, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 3.638418079096045, | |
| "grad_norm": 0.6121565103530884, | |
| "learning_rate": 4.1887249483032195e-05, | |
| "loss": 0.4978, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 3.6497175141242937, | |
| "grad_norm": 0.616296648979187, | |
| "learning_rate": 4.1247008604761096e-05, | |
| "loss": 0.4834, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 3.6610169491525424, | |
| "grad_norm": 0.5769548416137695, | |
| "learning_rate": 4.061042472652786e-05, | |
| "loss": 0.5177, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 3.672316384180791, | |
| "grad_norm": 0.5508149862289429, | |
| "learning_rate": 3.997753747163014e-05, | |
| "loss": 0.5206, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 3.68361581920904, | |
| "grad_norm": 0.4783056080341339, | |
| "learning_rate": 3.934838623327427e-05, | |
| "loss": 0.4996, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 3.694915254237288, | |
| "grad_norm": 0.9651548266410828, | |
| "learning_rate": 3.872301017212337e-05, | |
| "loss": 0.4741, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 3.7062146892655368, | |
| "grad_norm": 0.5215980410575867, | |
| "learning_rate": 3.8101448213859846e-05, | |
| "loss": 0.4757, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 3.7175141242937855, | |
| "grad_norm": 0.4626060724258423, | |
| "learning_rate": 3.7483739046762326e-05, | |
| "loss": 0.4887, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 3.7288135593220337, | |
| "grad_norm": 0.5218005180358887, | |
| "learning_rate": 3.6869921119297714e-05, | |
| "loss": 0.4603, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 3.7401129943502824, | |
| "grad_norm": 0.557963490486145, | |
| "learning_rate": 3.626003263772813e-05, | |
| "loss": 0.5009, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 3.751412429378531, | |
| "grad_norm": 0.7049193382263184, | |
| "learning_rate": 3.565411156373252e-05, | |
| "loss": 0.4982, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 3.7627118644067794, | |
| "grad_norm": 0.5267760753631592, | |
| "learning_rate": 3.505219561204398e-05, | |
| "loss": 0.4716, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 3.774011299435028, | |
| "grad_norm": 0.5169788002967834, | |
| "learning_rate": 3.445432224810232e-05, | |
| "loss": 0.4992, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 3.785310734463277, | |
| "grad_norm": 0.5268625020980835, | |
| "learning_rate": 3.386052868572186e-05, | |
| "loss": 0.5063, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 3.7966101694915255, | |
| "grad_norm": 0.5163342356681824, | |
| "learning_rate": 3.3270851884775203e-05, | |
| "loss": 0.5045, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 3.8079096045197742, | |
| "grad_norm": 0.6581404805183411, | |
| "learning_rate": 3.268532854889287e-05, | |
| "loss": 0.4818, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 3.8192090395480225, | |
| "grad_norm": 0.5500733256340027, | |
| "learning_rate": 3.210399512317849e-05, | |
| "loss": 0.5064, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 3.830508474576271, | |
| "grad_norm": 0.530192494392395, | |
| "learning_rate": 3.1526887791940395e-05, | |
| "loss": 0.4761, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 3.84180790960452, | |
| "grad_norm": 0.5225520133972168, | |
| "learning_rate": 3.095404247643955e-05, | |
| "loss": 0.4874, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 3.853107344632768, | |
| "grad_norm": 0.52415531873703, | |
| "learning_rate": 3.038549483265348e-05, | |
| "loss": 0.4748, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 3.864406779661017, | |
| "grad_norm": 0.6530227661132812, | |
| "learning_rate": 2.982128024905697e-05, | |
| "loss": 0.4841, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 3.8757062146892656, | |
| "grad_norm": 0.5648587346076965, | |
| "learning_rate": 2.9261433844419306e-05, | |
| "loss": 0.4687, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 3.8870056497175143, | |
| "grad_norm": 0.957992672920227, | |
| "learning_rate": 2.870599046561858e-05, | |
| "loss": 0.489, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 3.898305084745763, | |
| "grad_norm": 0.5714943408966064, | |
| "learning_rate": 2.8154984685472373e-05, | |
| "loss": 0.4886, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 3.9096045197740112, | |
| "grad_norm": 0.48912012577056885, | |
| "learning_rate": 2.7608450800586016e-05, | |
| "loss": 0.5028, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 3.92090395480226, | |
| "grad_norm": 0.5882663726806641, | |
| "learning_rate": 2.7066422829217875e-05, | |
| "loss": 0.5216, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 3.9322033898305087, | |
| "grad_norm": 0.5730474591255188, | |
| "learning_rate": 2.6528934509161763e-05, | |
| "loss": 0.4596, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 3.943502824858757, | |
| "grad_norm": 0.6839239001274109, | |
| "learning_rate": 2.599601929564709e-05, | |
| "loss": 0.4944, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 3.9548022598870056, | |
| "grad_norm": 0.5317196249961853, | |
| "learning_rate": 2.5467710359256557e-05, | |
| "loss": 0.4779, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 3.9661016949152543, | |
| "grad_norm": 0.5707146525382996, | |
| "learning_rate": 2.4944040583861284e-05, | |
| "loss": 0.4878, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 3.9774011299435026, | |
| "grad_norm": 0.781765341758728, | |
| "learning_rate": 2.4425042564574184e-05, | |
| "loss": 0.4719, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 3.9887005649717513, | |
| "grad_norm": 0.6404000520706177, | |
| "learning_rate": 2.3910748605721066e-05, | |
| "loss": 0.4983, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.6482042074203491, | |
| "learning_rate": 2.3401190718829945e-05, | |
| "loss": 0.4916, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 4.011299435028248, | |
| "grad_norm": 0.4203549027442932, | |
| "learning_rate": 2.289640062063839e-05, | |
| "loss": 0.4653, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 4.022598870056497, | |
| "grad_norm": 0.48745134472846985, | |
| "learning_rate": 2.239640973111945e-05, | |
| "loss": 0.4595, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 4.033898305084746, | |
| "grad_norm": 0.5251093506813049, | |
| "learning_rate": 2.1901249171526063e-05, | |
| "loss": 0.4471, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 4.045197740112994, | |
| "grad_norm": 0.6273601651191711, | |
| "learning_rate": 2.1410949762453735e-05, | |
| "loss": 0.437, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 4.056497175141243, | |
| "grad_norm": 0.5651510953903198, | |
| "learning_rate": 2.0925542021922283e-05, | |
| "loss": 0.4194, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 4.067796610169491, | |
| "grad_norm": 0.7425692081451416, | |
| "learning_rate": 2.0445056163476374e-05, | |
| "loss": 0.4568, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 4.0790960451977405, | |
| "grad_norm": 0.49514123797416687, | |
| "learning_rate": 1.9969522094304704e-05, | |
| "loss": 0.4673, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 4.090395480225989, | |
| "grad_norm": 0.7164167761802673, | |
| "learning_rate": 1.949896941337861e-05, | |
| "loss": 0.4507, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 4.101694915254237, | |
| "grad_norm": 0.5990516543388367, | |
| "learning_rate": 1.9033427409609738e-05, | |
| "loss": 0.4271, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 4.112994350282486, | |
| "grad_norm": 0.4122953414916992, | |
| "learning_rate": 1.8572925060026868e-05, | |
| "loss": 0.4331, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 4.124293785310734, | |
| "grad_norm": 0.5930763483047485, | |
| "learning_rate": 1.8117491027972476e-05, | |
| "loss": 0.4388, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 4.135593220338983, | |
| "grad_norm": 0.5768269300460815, | |
| "learning_rate": 1.766715366131837e-05, | |
| "loss": 0.4432, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 4.146892655367232, | |
| "grad_norm": 1.2128398418426514, | |
| "learning_rate": 1.722194099070148e-05, | |
| "loss": 0.4445, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 4.15819209039548, | |
| "grad_norm": 0.5718667507171631, | |
| "learning_rate": 1.678188072777891e-05, | |
| "loss": 0.4348, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 4.169491525423728, | |
| "grad_norm": 0.6063762903213501, | |
| "learning_rate": 1.6347000263503176e-05, | |
| "loss": 0.4293, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 4.1807909604519775, | |
| "grad_norm": 0.5244751572608948, | |
| "learning_rate": 1.591732666641732e-05, | |
| "loss": 0.4469, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 4.192090395480226, | |
| "grad_norm": 0.6261086463928223, | |
| "learning_rate": 1.5492886680969963e-05, | |
| "loss": 0.4415, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 4.203389830508475, | |
| "grad_norm": 1.0888699293136597, | |
| "learning_rate": 1.5073706725850679e-05, | |
| "loss": 0.4365, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 4.214689265536723, | |
| "grad_norm": 0.5183214545249939, | |
| "learning_rate": 1.4659812892345692e-05, | |
| "loss": 0.4717, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 4.2259887005649714, | |
| "grad_norm": 0.5120430588722229, | |
| "learning_rate": 1.425123094271369e-05, | |
| "loss": 0.4484, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 4.237288135593221, | |
| "grad_norm": 0.4748518764972687, | |
| "learning_rate": 1.3847986308582384e-05, | |
| "loss": 0.4566, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 4.248587570621469, | |
| "grad_norm": 0.5438849329948425, | |
| "learning_rate": 1.3450104089365611e-05, | |
| "loss": 0.4768, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 4.259887005649717, | |
| "grad_norm": 0.4719776213169098, | |
| "learning_rate": 1.3057609050700881e-05, | |
| "loss": 0.4657, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 4.271186440677966, | |
| "grad_norm": 0.5300427675247192, | |
| "learning_rate": 1.267052562290808e-05, | |
| "loss": 0.4412, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 4.2824858757062145, | |
| "grad_norm": 0.6340104937553406, | |
| "learning_rate": 1.2288877899468653e-05, | |
| "loss": 0.4528, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 4.293785310734464, | |
| "grad_norm": 0.5274319648742676, | |
| "learning_rate": 1.1912689635526086e-05, | |
| "loss": 0.4466, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 4.305084745762712, | |
| "grad_norm": 0.5659282803535461, | |
| "learning_rate": 1.1541984246407189e-05, | |
| "loss": 0.4332, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 4.31638418079096, | |
| "grad_norm": 0.5424573421478271, | |
| "learning_rate": 1.1176784806164676e-05, | |
| "loss": 0.4223, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 4.327683615819209, | |
| "grad_norm": 0.5446699261665344, | |
| "learning_rate": 1.081711404614104e-05, | |
| "loss": 0.4335, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 4.338983050847458, | |
| "grad_norm": 0.4954063594341278, | |
| "learning_rate": 1.0462994353553512e-05, | |
| "loss": 0.4221, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 4.350282485875706, | |
| "grad_norm": 0.6320489048957825, | |
| "learning_rate": 1.0114447770100688e-05, | |
| "loss": 0.43, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 4.361581920903955, | |
| "grad_norm": 0.5482968688011169, | |
| "learning_rate": 9.77149599059063e-06, | |
| "loss": 0.4407, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 4.372881355932203, | |
| "grad_norm": 0.6075878739356995, | |
| "learning_rate": 9.434160361590372e-06, | |
| "loss": 0.4434, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 4.3841807909604515, | |
| "grad_norm": 0.6049032211303711, | |
| "learning_rate": 9.102461880097302e-06, | |
| "loss": 0.4411, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 4.395480225988701, | |
| "grad_norm": 0.5798004865646362, | |
| "learning_rate": 8.776421192232298e-06, | |
| "loss": 0.431, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 4.406779661016949, | |
| "grad_norm": 0.5278260707855225, | |
| "learning_rate": 8.456058591954463e-06, | |
| "loss": 0.4422, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 4.418079096045198, | |
| "grad_norm": 0.568495512008667, | |
| "learning_rate": 8.141394019798177e-06, | |
| "loss": 0.4413, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 4.429378531073446, | |
| "grad_norm": 0.5533962845802307, | |
| "learning_rate": 7.832447061631688e-06, | |
| "loss": 0.4191, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 4.440677966101695, | |
| "grad_norm": 0.5724579691886902, | |
| "learning_rate": 7.529236947438256e-06, | |
| "loss": 0.4428, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 4.451977401129944, | |
| "grad_norm": 0.5295354723930359, | |
| "learning_rate": 7.231782550119015e-06, | |
| "loss": 0.4363, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 4.463276836158192, | |
| "grad_norm": 0.6352728605270386, | |
| "learning_rate": 6.940102384318314e-06, | |
| "loss": 0.4462, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 4.47457627118644, | |
| "grad_norm": 0.4653913080692291, | |
| "learning_rate": 6.654214605271414e-06, | |
| "loss": 0.4463, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 4.4858757062146895, | |
| "grad_norm": 0.5769869685173035, | |
| "learning_rate": 6.37413700767423e-06, | |
| "loss": 0.4388, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 4.497175141242938, | |
| "grad_norm": 0.4576704204082489, | |
| "learning_rate": 6.099887024575901e-06, | |
| "loss": 0.4466, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 4.508474576271187, | |
| "grad_norm": 0.5665444135665894, | |
| "learning_rate": 5.831481726293609e-06, | |
| "loss": 0.4628, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 4.519774011299435, | |
| "grad_norm": 0.527612566947937, | |
| "learning_rate": 5.568937819350084e-06, | |
| "loss": 0.4261, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 4.519774011299435, | |
| "eval_loss": 0.8213748335838318, | |
| "eval_runtime": 16.8138, | |
| "eval_samples_per_second": 88.677, | |
| "eval_steps_per_second": 11.122, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 4.531073446327683, | |
| "grad_norm": 0.668229877948761, | |
| "learning_rate": 5.312271645433697e-06, | |
| "loss": 0.4298, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 4.5423728813559325, | |
| "grad_norm": 0.5199172496795654, | |
| "learning_rate": 5.061499180381391e-06, | |
| "loss": 0.4547, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 4.553672316384181, | |
| "grad_norm": 0.5515559911727905, | |
| "learning_rate": 4.816636033184119e-06, | |
| "loss": 0.4172, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 4.564971751412429, | |
| "grad_norm": 0.648692786693573, | |
| "learning_rate": 4.577697445015472e-06, | |
| "loss": 0.4664, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 4.576271186440678, | |
| "grad_norm": 0.5000047087669373, | |
| "learning_rate": 4.3446982882828555e-06, | |
| "loss": 0.4401, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 4.5875706214689265, | |
| "grad_norm": 0.51460862159729, | |
| "learning_rate": 4.117653065701899e-06, | |
| "loss": 0.4297, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 4.598870056497175, | |
| "grad_norm": 0.5801824331283569, | |
| "learning_rate": 3.896575909393663e-06, | |
| "loss": 0.453, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 4.610169491525424, | |
| "grad_norm": 0.5613316297531128, | |
| "learning_rate": 3.6814805800050255e-06, | |
| "loss": 0.445, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 4.621468926553672, | |
| "grad_norm": 0.556430459022522, | |
| "learning_rate": 3.4723804658522318e-06, | |
| "loss": 0.4332, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 4.632768361581921, | |
| "grad_norm": 0.7290319204330444, | |
| "learning_rate": 3.269288582087493e-06, | |
| "loss": 0.4667, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 4.6440677966101696, | |
| "grad_norm": 0.5109266638755798, | |
| "learning_rate": 3.072217569888847e-06, | |
| "loss": 0.4484, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 4.655367231638418, | |
| "grad_norm": 0.5184862017631531, | |
| "learning_rate": 2.8811796956734418e-06, | |
| "loss": 0.4476, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 4.666666666666667, | |
| "grad_norm": 0.45906636118888855, | |
| "learning_rate": 2.696186850333893e-06, | |
| "loss": 0.4556, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 4.677966101694915, | |
| "grad_norm": 0.6825990080833435, | |
| "learning_rate": 2.5172505484982733e-06, | |
| "loss": 0.4361, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 4.6892655367231635, | |
| "grad_norm": 0.6751702427864075, | |
| "learning_rate": 2.3443819278132996e-06, | |
| "loss": 0.454, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 4.700564971751413, | |
| "grad_norm": 0.6289049983024597, | |
| "learning_rate": 2.17759174825114e-06, | |
| "loss": 0.4438, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 4.711864406779661, | |
| "grad_norm": 0.6290619969367981, | |
| "learning_rate": 2.016890391439674e-06, | |
| "loss": 0.4665, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 4.72316384180791, | |
| "grad_norm": 0.5864591002464294, | |
| "learning_rate": 1.8622878600162763e-06, | |
| "loss": 0.4477, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 4.734463276836158, | |
| "grad_norm": 0.558210015296936, | |
| "learning_rate": 1.7137937770052349e-06, | |
| "loss": 0.4605, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 4.745762711864407, | |
| "grad_norm": 0.6048043966293335, | |
| "learning_rate": 1.5714173852187875e-06, | |
| "loss": 0.4458, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 4.757062146892656, | |
| "grad_norm": 0.6476958990097046, | |
| "learning_rate": 1.4351675466817705e-06, | |
| "loss": 0.4311, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 4.768361581920904, | |
| "grad_norm": 0.6173336505889893, | |
| "learning_rate": 1.3050527420800928e-06, | |
| "loss": 0.4526, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 4.779661016949152, | |
| "grad_norm": 0.5486857295036316, | |
| "learning_rate": 1.1810810702327701e-06, | |
| "loss": 0.4407, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 4.790960451977401, | |
| "grad_norm": 0.4905402362346649, | |
| "learning_rate": 1.063260247587905e-06, | |
| "loss": 0.4367, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 4.80225988700565, | |
| "grad_norm": 0.6765434741973877, | |
| "learning_rate": 9.515976077423716e-07, | |
| "loss": 0.4438, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 4.813559322033898, | |
| "grad_norm": 0.5942672491073608, | |
| "learning_rate": 8.461001009852809e-07, | |
| "loss": 0.4326, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 4.824858757062147, | |
| "grad_norm": 0.5058199167251587, | |
| "learning_rate": 7.46774293865471e-07, | |
| "loss": 0.4258, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 4.836158192090395, | |
| "grad_norm": 0.5257803797721863, | |
| "learning_rate": 6.536263687827005e-07, | |
| "loss": 0.4377, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 4.847457627118644, | |
| "grad_norm": 0.5546150207519531, | |
| "learning_rate": 5.66662123602879e-07, | |
| "loss": 0.447, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 4.858757062146893, | |
| "grad_norm": 0.5133059024810791, | |
| "learning_rate": 4.858869712971669e-07, | |
| "loss": 0.4536, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 4.870056497175141, | |
| "grad_norm": 0.6438310742378235, | |
| "learning_rate": 4.113059396050445e-07, | |
| "loss": 0.4328, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 4.88135593220339, | |
| "grad_norm": 0.5640572309494019, | |
| "learning_rate": 3.4292367072140677e-07, | |
| "loss": 0.4629, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 4.892655367231638, | |
| "grad_norm": 0.6676624417304993, | |
| "learning_rate": 2.807444210075616e-07, | |
| "loss": 0.4347, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 4.903954802259887, | |
| "grad_norm": 0.45671963691711426, | |
| "learning_rate": 2.2477206072633038e-07, | |
| "loss": 0.4536, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 4.915254237288136, | |
| "grad_norm": 0.6115604043006897, | |
| "learning_rate": 1.7501007380115174e-07, | |
| "loss": 0.4453, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 4.926553672316384, | |
| "grad_norm": 0.7250502109527588, | |
| "learning_rate": 1.3146155759923285e-07, | |
| "loss": 0.4454, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 4.937853107344633, | |
| "grad_norm": 0.5707434415817261, | |
| "learning_rate": 9.412922273871471e-08, | |
| "loss": 0.4107, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 4.9491525423728815, | |
| "grad_norm": 0.6125264763832092, | |
| "learning_rate": 6.301539292001834e-08, | |
| "loss": 0.4355, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 4.96045197740113, | |
| "grad_norm": 0.6177554130554199, | |
| "learning_rate": 3.8122004781149246e-08, | |
| "loss": 0.4502, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 4.971751412429379, | |
| "grad_norm": 0.7295418381690979, | |
| "learning_rate": 1.9450607777204978e-08, | |
| "loss": 0.4426, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 4.983050847457627, | |
| "grad_norm": 0.5891022682189941, | |
| "learning_rate": 7.00236408388566e-09, | |
| "loss": 0.449, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 4.994350282485875, | |
| "grad_norm": 0.5898358225822449, | |
| "learning_rate": 7.780485252073533e-10, | |
| "loss": 0.4552, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "step": 4425, | |
| "total_flos": 1.2071933436588196e+18, | |
| "train_loss": 0.5702266787943867, | |
| "train_runtime": 4287.601, | |
| "train_samples_per_second": 33.024, | |
| "train_steps_per_second": 1.032 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 4425, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.2071933436588196e+18, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |