| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1044, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0009578544061302681, | |
| "grad_norm": 0.6597593171619324, | |
| "learning_rate": 9.523809523809525e-08, | |
| "loss": 1.1525, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.004789272030651341, | |
| "grad_norm": 0.6607550915738474, | |
| "learning_rate": 4.7619047619047623e-07, | |
| "loss": 1.1632, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.009578544061302681, | |
| "grad_norm": 0.6985874969645404, | |
| "learning_rate": 9.523809523809525e-07, | |
| "loss": 1.1753, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.014367816091954023, | |
| "grad_norm": 0.6553768135986224, | |
| "learning_rate": 1.4285714285714286e-06, | |
| "loss": 1.1488, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.019157088122605363, | |
| "grad_norm": 0.6087816631352209, | |
| "learning_rate": 1.904761904761905e-06, | |
| "loss": 1.1384, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.023946360153256706, | |
| "grad_norm": 0.5225466109798246, | |
| "learning_rate": 2.380952380952381e-06, | |
| "loss": 1.122, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.028735632183908046, | |
| "grad_norm": 0.5080052683286845, | |
| "learning_rate": 2.8571428571428573e-06, | |
| "loss": 1.1505, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.033524904214559385, | |
| "grad_norm": 0.4341207873557241, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 1.1339, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.038314176245210725, | |
| "grad_norm": 0.3985137882747194, | |
| "learning_rate": 3.80952380952381e-06, | |
| "loss": 1.1282, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04310344827586207, | |
| "grad_norm": 0.36938748261120696, | |
| "learning_rate": 4.2857142857142855e-06, | |
| "loss": 1.1141, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.04789272030651341, | |
| "grad_norm": 0.31186688358132403, | |
| "learning_rate": 4.761904761904762e-06, | |
| "loss": 1.0914, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05268199233716475, | |
| "grad_norm": 0.3279958963323018, | |
| "learning_rate": 5.2380952380952384e-06, | |
| "loss": 1.1115, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.05747126436781609, | |
| "grad_norm": 0.3107640896000187, | |
| "learning_rate": 5.7142857142857145e-06, | |
| "loss": 1.0897, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06226053639846743, | |
| "grad_norm": 0.3071723661825106, | |
| "learning_rate": 6.1904761904761914e-06, | |
| "loss": 1.1045, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.06704980842911877, | |
| "grad_norm": 0.2936092764163838, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 1.0661, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.07183908045977011, | |
| "grad_norm": 0.28053978486957076, | |
| "learning_rate": 7.1428571428571436e-06, | |
| "loss": 1.0887, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.07662835249042145, | |
| "grad_norm": 0.2891456438800518, | |
| "learning_rate": 7.61904761904762e-06, | |
| "loss": 1.0756, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.08141762452107279, | |
| "grad_norm": 0.24950135731624706, | |
| "learning_rate": 8.095238095238097e-06, | |
| "loss": 1.0478, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.08620689655172414, | |
| "grad_norm": 0.23670461068088686, | |
| "learning_rate": 8.571428571428571e-06, | |
| "loss": 1.0389, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.09099616858237548, | |
| "grad_norm": 0.2507461508986584, | |
| "learning_rate": 9.047619047619049e-06, | |
| "loss": 1.0509, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.09578544061302682, | |
| "grad_norm": 0.22930855800514297, | |
| "learning_rate": 9.523809523809525e-06, | |
| "loss": 1.0377, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.10057471264367816, | |
| "grad_norm": 0.24445927836597864, | |
| "learning_rate": 1e-05, | |
| "loss": 1.0506, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.1053639846743295, | |
| "grad_norm": 0.22388714842997454, | |
| "learning_rate": 9.999300418283908e-06, | |
| "loss": 1.0377, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.11015325670498084, | |
| "grad_norm": 0.2406057364217601, | |
| "learning_rate": 9.997201868901463e-06, | |
| "loss": 1.0374, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.11494252873563218, | |
| "grad_norm": 0.2275701466092952, | |
| "learning_rate": 9.993704939095376e-06, | |
| "loss": 1.0663, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.11973180076628352, | |
| "grad_norm": 0.22200651495287974, | |
| "learning_rate": 9.988810607420912e-06, | |
| "loss": 1.0448, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.12452107279693486, | |
| "grad_norm": 0.22051254365376602, | |
| "learning_rate": 9.982520243472044e-06, | |
| "loss": 1.0099, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.12931034482758622, | |
| "grad_norm": 0.23279527845715264, | |
| "learning_rate": 9.974835607498224e-06, | |
| "loss": 1.0212, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.13409961685823754, | |
| "grad_norm": 0.22813702069156522, | |
| "learning_rate": 9.965758849911774e-06, | |
| "loss": 1.023, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.1388888888888889, | |
| "grad_norm": 0.23555996785615885, | |
| "learning_rate": 9.955292510686156e-06, | |
| "loss": 0.9997, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.14367816091954022, | |
| "grad_norm": 0.2560040654609538, | |
| "learning_rate": 9.943439518645193e-06, | |
| "loss": 1.0172, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.14846743295019157, | |
| "grad_norm": 0.24096780952464245, | |
| "learning_rate": 9.930203190643491e-06, | |
| "loss": 0.9876, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.1532567049808429, | |
| "grad_norm": 0.24193989881098193, | |
| "learning_rate": 9.915587230638269e-06, | |
| "loss": 1.0417, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.15804597701149425, | |
| "grad_norm": 0.24788380685714667, | |
| "learning_rate": 9.899595728652883e-06, | |
| "loss": 1.0332, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.16283524904214558, | |
| "grad_norm": 0.2570877531631883, | |
| "learning_rate": 9.882233159632297e-06, | |
| "loss": 1.0129, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.16762452107279693, | |
| "grad_norm": 0.26735645578353723, | |
| "learning_rate": 9.863504382190838e-06, | |
| "loss": 1.0255, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.1724137931034483, | |
| "grad_norm": 0.24596191852493296, | |
| "learning_rate": 9.843414637252615e-06, | |
| "loss": 1.0125, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.1772030651340996, | |
| "grad_norm": 0.2510254446995529, | |
| "learning_rate": 9.821969546584922e-06, | |
| "loss": 1.022, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.18199233716475097, | |
| "grad_norm": 0.2626288820781652, | |
| "learning_rate": 9.79917511122509e-06, | |
| "loss": 1.0016, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.1867816091954023, | |
| "grad_norm": 0.2588872211996587, | |
| "learning_rate": 9.775037709801206e-06, | |
| "loss": 1.0292, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.19157088122605365, | |
| "grad_norm": 0.28272889728543066, | |
| "learning_rate": 9.749564096747148e-06, | |
| "loss": 1.0255, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.19636015325670497, | |
| "grad_norm": 0.25691560926098406, | |
| "learning_rate": 9.722761400412496e-06, | |
| "loss": 1.0205, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.20114942528735633, | |
| "grad_norm": 0.31206698507075104, | |
| "learning_rate": 9.694637121067764e-06, | |
| "loss": 1.0018, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.20593869731800765, | |
| "grad_norm": 0.24725309050346184, | |
| "learning_rate": 9.6651991288056e-06, | |
| "loss": 1.013, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.210727969348659, | |
| "grad_norm": 0.2530356632425841, | |
| "learning_rate": 9.63445566133846e-06, | |
| "loss": 0.9921, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.21551724137931033, | |
| "grad_norm": 0.2784122469884546, | |
| "learning_rate": 9.602415321693434e-06, | |
| "loss": 1.0066, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.22030651340996169, | |
| "grad_norm": 0.3098632552557301, | |
| "learning_rate": 9.569087075804842e-06, | |
| "loss": 1.0062, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.22509578544061304, | |
| "grad_norm": 0.26451308039501314, | |
| "learning_rate": 9.534480250005263e-06, | |
| "loss": 0.9951, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.22988505747126436, | |
| "grad_norm": 0.27301629134605937, | |
| "learning_rate": 9.498604528415731e-06, | |
| "loss": 1.0347, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.23467432950191572, | |
| "grad_norm": 0.2891326880180606, | |
| "learning_rate": 9.461469950235795e-06, | |
| "loss": 1.0114, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.23946360153256704, | |
| "grad_norm": 0.26574747916476, | |
| "learning_rate": 9.423086906934228e-06, | |
| "loss": 1.0272, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2442528735632184, | |
| "grad_norm": 0.26114773270931335, | |
| "learning_rate": 9.38346613934115e-06, | |
| "loss": 1.0039, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.24904214559386972, | |
| "grad_norm": 0.28383740993290196, | |
| "learning_rate": 9.342618734642395e-06, | |
| "loss": 1.0077, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.25383141762452105, | |
| "grad_norm": 0.28127484887890697, | |
| "learning_rate": 9.300556123276955e-06, | |
| "loss": 1.0306, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.25862068965517243, | |
| "grad_norm": 0.2702807344360773, | |
| "learning_rate": 9.257290075738365e-06, | |
| "loss": 0.9924, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.26340996168582376, | |
| "grad_norm": 0.2912366855364206, | |
| "learning_rate": 9.212832699280942e-06, | |
| "loss": 1.026, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.2681992337164751, | |
| "grad_norm": 0.30617702200806085, | |
| "learning_rate": 9.16719643453177e-06, | |
| "loss": 1.0247, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.27298850574712646, | |
| "grad_norm": 0.2590934020046758, | |
| "learning_rate": 9.120394052009412e-06, | |
| "loss": 1.0211, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.2777777777777778, | |
| "grad_norm": 0.27434400868197933, | |
| "learning_rate": 9.072438648550304e-06, | |
| "loss": 1.0243, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2825670498084291, | |
| "grad_norm": 0.2813695079919259, | |
| "learning_rate": 9.023343643643821e-06, | |
| "loss": 1.0008, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.28735632183908044, | |
| "grad_norm": 0.2764043745947579, | |
| "learning_rate": 8.973122775677078e-06, | |
| "loss": 1.0066, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.2921455938697318, | |
| "grad_norm": 0.2993769190948342, | |
| "learning_rate": 8.921790098090477e-06, | |
| "loss": 1.015, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.29693486590038315, | |
| "grad_norm": 0.28718834695385625, | |
| "learning_rate": 8.869359975445085e-06, | |
| "loss": 1.0212, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.3017241379310345, | |
| "grad_norm": 0.3787907086651271, | |
| "learning_rate": 8.815847079402972e-06, | |
| "loss": 1.0079, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.3065134099616858, | |
| "grad_norm": 0.3012426621088511, | |
| "learning_rate": 8.761266384621599e-06, | |
| "loss": 1.0245, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.3113026819923372, | |
| "grad_norm": 0.545229044151294, | |
| "learning_rate": 8.705633164563413e-06, | |
| "loss": 1.0014, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.3160919540229885, | |
| "grad_norm": 0.3018913245413703, | |
| "learning_rate": 8.648962987221837e-06, | |
| "loss": 1.0035, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.32088122605363983, | |
| "grad_norm": 0.2937330943277994, | |
| "learning_rate": 8.591271710764839e-06, | |
| "loss": 0.9932, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.32567049808429116, | |
| "grad_norm": 0.30415781861083496, | |
| "learning_rate": 8.532575479097294e-06, | |
| "loss": 0.982, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.33045977011494254, | |
| "grad_norm": 0.2726446228184506, | |
| "learning_rate": 8.472890717343391e-06, | |
| "loss": 0.9992, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.33524904214559387, | |
| "grad_norm": 0.2841460417699516, | |
| "learning_rate": 8.412234127250353e-06, | |
| "loss": 1.0007, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.3400383141762452, | |
| "grad_norm": 0.313253280455998, | |
| "learning_rate": 8.350622682514735e-06, | |
| "loss": 0.9951, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.3448275862068966, | |
| "grad_norm": 0.33772538052784323, | |
| "learning_rate": 8.288073624032634e-06, | |
| "loss": 1.0169, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.3496168582375479, | |
| "grad_norm": 0.2810004404857808, | |
| "learning_rate": 8.224604455075115e-06, | |
| "loss": 1.0086, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.3544061302681992, | |
| "grad_norm": 0.2817546952474833, | |
| "learning_rate": 8.160232936390239e-06, | |
| "loss": 0.9888, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.35919540229885055, | |
| "grad_norm": 0.32316453097441, | |
| "learning_rate": 8.094977081233006e-06, | |
| "loss": 0.997, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.36398467432950193, | |
| "grad_norm": 0.2920029553424864, | |
| "learning_rate": 8.02885515032467e-06, | |
| "loss": 1.0172, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.36877394636015326, | |
| "grad_norm": 0.28736749227532504, | |
| "learning_rate": 7.961885646742793e-06, | |
| "loss": 1.0092, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.3735632183908046, | |
| "grad_norm": 0.29552008976856375, | |
| "learning_rate": 7.894087310743468e-06, | |
| "loss": 0.9952, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.3783524904214559, | |
| "grad_norm": 0.28037261402408636, | |
| "learning_rate": 7.825479114517197e-06, | |
| "loss": 1.0148, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.3831417624521073, | |
| "grad_norm": 0.3259544387907705, | |
| "learning_rate": 7.756080256879837e-06, | |
| "loss": 1.0172, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.3879310344827586, | |
| "grad_norm": 0.2866306787997861, | |
| "learning_rate": 7.685910157900158e-06, | |
| "loss": 0.9969, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.39272030651340994, | |
| "grad_norm": 0.27836252924957877, | |
| "learning_rate": 7.614988453465469e-06, | |
| "loss": 0.9981, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.3975095785440613, | |
| "grad_norm": 0.2881265537652179, | |
| "learning_rate": 7.5433349897868445e-06, | |
| "loss": 1.0075, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.40229885057471265, | |
| "grad_norm": 0.286210264814618, | |
| "learning_rate": 7.470969817845518e-06, | |
| "loss": 1.0025, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.407088122605364, | |
| "grad_norm": 0.28784515054514276, | |
| "learning_rate": 7.397913187781962e-06, | |
| "loss": 0.9918, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.4118773946360153, | |
| "grad_norm": 0.28225636678596183, | |
| "learning_rate": 7.324185543229226e-06, | |
| "loss": 1.0164, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.4166666666666667, | |
| "grad_norm": 0.30273570716493303, | |
| "learning_rate": 7.249807515592149e-06, | |
| "loss": 0.991, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.421455938697318, | |
| "grad_norm": 0.29638416230646264, | |
| "learning_rate": 7.174799918274018e-06, | |
| "loss": 1.0103, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.42624521072796934, | |
| "grad_norm": 0.27796285045314706, | |
| "learning_rate": 7.099183740852296e-06, | |
| "loss": 0.9929, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.43103448275862066, | |
| "grad_norm": 0.3024800017009751, | |
| "learning_rate": 7.022980143205046e-06, | |
| "loss": 0.9945, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.43582375478927204, | |
| "grad_norm": 0.308729839004599, | |
| "learning_rate": 6.946210449589714e-06, | |
| "loss": 1.0131, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.44061302681992337, | |
| "grad_norm": 0.29787252415350113, | |
| "learning_rate": 6.868896142675903e-06, | |
| "loss": 1.0053, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.4454022988505747, | |
| "grad_norm": 0.28555796101935293, | |
| "learning_rate": 6.791058857533814e-06, | |
| "loss": 1.0106, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.4501915708812261, | |
| "grad_norm": 0.27754653040108523, | |
| "learning_rate": 6.712720375580057e-06, | |
| "loss": 1.0127, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.4549808429118774, | |
| "grad_norm": 0.3064883256256046, | |
| "learning_rate": 6.633902618482484e-06, | |
| "loss": 1.0137, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.45977011494252873, | |
| "grad_norm": 0.2871757827962217, | |
| "learning_rate": 6.554627642025807e-06, | |
| "loss": 0.9808, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.46455938697318006, | |
| "grad_norm": 0.32016740057261645, | |
| "learning_rate": 6.474917629939652e-06, | |
| "loss": 1.0154, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.46934865900383144, | |
| "grad_norm": 0.28945157272232624, | |
| "learning_rate": 6.394794887690838e-06, | |
| "loss": 0.987, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.47413793103448276, | |
| "grad_norm": 0.45234637623400176, | |
| "learning_rate": 6.314281836241573e-06, | |
| "loss": 1.0072, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.4789272030651341, | |
| "grad_norm": 0.3043561526142259, | |
| "learning_rate": 6.233401005775339e-06, | |
| "loss": 0.9947, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.4837164750957854, | |
| "grad_norm": 0.3021339898048032, | |
| "learning_rate": 6.1521750293922035e-06, | |
| "loss": 1.0168, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.4885057471264368, | |
| "grad_norm": 0.262840671697266, | |
| "learning_rate": 6.070626636775349e-06, | |
| "loss": 0.9854, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.4932950191570881, | |
| "grad_norm": 0.2717384304425024, | |
| "learning_rate": 5.988778647830554e-06, | |
| "loss": 0.9847, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.49808429118773945, | |
| "grad_norm": 0.3005959006799932, | |
| "learning_rate": 5.906653966300444e-06, | |
| "loss": 1.0007, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.5028735632183908, | |
| "grad_norm": 0.2810709946318669, | |
| "learning_rate": 5.824275573355278e-06, | |
| "loss": 0.9891, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.5076628352490421, | |
| "grad_norm": 0.31324328313914135, | |
| "learning_rate": 5.741666521162055e-06, | |
| "loss": 1.0049, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.5124521072796935, | |
| "grad_norm": 0.29241234641844166, | |
| "learning_rate": 5.658849926433774e-06, | |
| "loss": 1.0019, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.5172413793103449, | |
| "grad_norm": 0.29226941844525284, | |
| "learning_rate": 5.575848963960621e-06, | |
| "loss": 0.9964, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5220306513409961, | |
| "grad_norm": 0.2869562590824223, | |
| "learning_rate": 5.4926868601249e-06, | |
| "loss": 1.003, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.5268199233716475, | |
| "grad_norm": 0.2887086296052449, | |
| "learning_rate": 5.4093868864015405e-06, | |
| "loss": 0.9911, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.5316091954022989, | |
| "grad_norm": 0.2990975733324071, | |
| "learning_rate": 5.325972352845965e-06, | |
| "loss": 0.9961, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.5363984674329502, | |
| "grad_norm": 0.2964344269886325, | |
| "learning_rate": 5.24246660157119e-06, | |
| "loss": 1.0045, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.5411877394636015, | |
| "grad_norm": 0.3080697315686108, | |
| "learning_rate": 5.1588930002159255e-06, | |
| "loss": 0.9897, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.5459770114942529, | |
| "grad_norm": 0.3217970889035437, | |
| "learning_rate": 5.075274935405554e-06, | |
| "loss": 1.0022, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.5507662835249042, | |
| "grad_norm": 0.32311237689799727, | |
| "learning_rate": 4.991635806207788e-06, | |
| "loss": 0.9918, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.5555555555555556, | |
| "grad_norm": 0.3076150767243147, | |
| "learning_rate": 4.90799901758484e-06, | |
| "loss": 1.0156, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.5603448275862069, | |
| "grad_norm": 0.29579117268352634, | |
| "learning_rate": 4.824387973843957e-06, | |
| "loss": 0.9859, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.5651340996168582, | |
| "grad_norm": 0.27808702972070926, | |
| "learning_rate": 4.74082607208812e-06, | |
| "loss": 0.988, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.5699233716475096, | |
| "grad_norm": 0.2772865889207572, | |
| "learning_rate": 4.6573366956687885e-06, | |
| "loss": 1.0042, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.5747126436781609, | |
| "grad_norm": 0.2741988412251713, | |
| "learning_rate": 4.573943207642452e-06, | |
| "loss": 1.018, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.5795019157088123, | |
| "grad_norm": 0.3041843000888798, | |
| "learning_rate": 4.4906689442328935e-06, | |
| "loss": 1.0095, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.5842911877394636, | |
| "grad_norm": 0.3234627396285633, | |
| "learning_rate": 4.407537208300957e-06, | |
| "loss": 0.9981, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.5890804597701149, | |
| "grad_norm": 0.30361837699199173, | |
| "learning_rate": 4.3245712628236356e-06, | |
| "loss": 0.9945, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.5938697318007663, | |
| "grad_norm": 0.3068819956047309, | |
| "learning_rate": 4.241794324384334e-06, | |
| "loss": 0.9829, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.5986590038314177, | |
| "grad_norm": 0.3010857723276443, | |
| "learning_rate": 4.159229556676111e-06, | |
| "loss": 0.9778, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.603448275862069, | |
| "grad_norm": 0.3212249651416007, | |
| "learning_rate": 4.076900064019721e-06, | |
| "loss": 1.007, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.6082375478927203, | |
| "grad_norm": 0.2842660737481042, | |
| "learning_rate": 3.994828884898267e-06, | |
| "loss": 1.0056, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.6130268199233716, | |
| "grad_norm": 0.2887728882458368, | |
| "learning_rate": 3.91303898551028e-06, | |
| "loss": 1.0131, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.617816091954023, | |
| "grad_norm": 0.31374850764975326, | |
| "learning_rate": 3.8315532533430285e-06, | |
| "loss": 0.9979, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.6226053639846744, | |
| "grad_norm": 0.2990812859204113, | |
| "learning_rate": 3.7503944907678543e-06, | |
| "loss": 0.9979, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.6273946360153256, | |
| "grad_norm": 0.31174935012060845, | |
| "learning_rate": 3.6695854086593126e-06, | |
| "loss": 0.9907, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.632183908045977, | |
| "grad_norm": 0.30150077718156976, | |
| "learning_rate": 3.5891486200399413e-06, | |
| "loss": 0.9937, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.6369731800766284, | |
| "grad_norm": 0.304753149818871, | |
| "learning_rate": 3.509106633752387e-06, | |
| "loss": 1.0164, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.6417624521072797, | |
| "grad_norm": 0.2993950641947178, | |
| "learning_rate": 3.429481848160702e-06, | |
| "loss": 1.0093, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.646551724137931, | |
| "grad_norm": 0.3102107207132909, | |
| "learning_rate": 3.350296544882543e-06, | |
| "loss": 0.969, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.6513409961685823, | |
| "grad_norm": 0.27647028696280174, | |
| "learning_rate": 3.2715728825540525e-06, | |
| "loss": 1.0102, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.6561302681992337, | |
| "grad_norm": 0.30329159137172645, | |
| "learning_rate": 3.19333289062915e-06, | |
| "loss": 0.9992, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.6609195402298851, | |
| "grad_norm": 0.3233989260192753, | |
| "learning_rate": 3.1155984632149565e-06, | |
| "loss": 0.9984, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.6657088122605364, | |
| "grad_norm": 0.284592333087901, | |
| "learning_rate": 3.0383913529451286e-06, | |
| "loss": 1.0097, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.6704980842911877, | |
| "grad_norm": 0.284014773245373, | |
| "learning_rate": 2.961733164892744e-06, | |
| "loss": 1.0048, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.6752873563218391, | |
| "grad_norm": 0.29864923395210635, | |
| "learning_rate": 2.8856453505245018e-06, | |
| "loss": 1.008, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.6800766283524904, | |
| "grad_norm": 0.28455877244795186, | |
| "learning_rate": 2.8101492016979027e-06, | |
| "loss": 1.0082, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.6848659003831418, | |
| "grad_norm": 0.29297293441472344, | |
| "learning_rate": 2.7352658447030882e-06, | |
| "loss": 1.0137, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.6896551724137931, | |
| "grad_norm": 0.29774107321754356, | |
| "learning_rate": 2.6610162343510183e-06, | |
| "loss": 0.9878, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.6944444444444444, | |
| "grad_norm": 0.2886426546973218, | |
| "learning_rate": 2.587421148109619e-06, | |
| "loss": 0.9855, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.6992337164750958, | |
| "grad_norm": 0.30340545161406907, | |
| "learning_rate": 2.5145011802895835e-06, | |
| "loss": 1.004, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.7040229885057471, | |
| "grad_norm": 0.28629744556011416, | |
| "learning_rate": 2.4422767362814045e-06, | |
| "loss": 0.9935, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.7088122605363985, | |
| "grad_norm": 0.29480150488982737, | |
| "learning_rate": 2.370768026845276e-06, | |
| "loss": 1.0013, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.7136015325670498, | |
| "grad_norm": 0.30424158130358797, | |
| "learning_rate": 2.299995062455459e-06, | |
| "loss": 0.9932, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.7183908045977011, | |
| "grad_norm": 0.3179663498265742, | |
| "learning_rate": 2.2299776477007073e-06, | |
| "loss": 1.007, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.7231800766283525, | |
| "grad_norm": 0.303515618658323, | |
| "learning_rate": 2.16073537574229e-06, | |
| "loss": 0.9963, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.7279693486590039, | |
| "grad_norm": 0.30307569076630475, | |
| "learning_rate": 2.0922876228311833e-06, | |
| "loss": 0.9772, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.7327586206896551, | |
| "grad_norm": 0.3143565291039063, | |
| "learning_rate": 2.0246535428859652e-06, | |
| "loss": 0.9899, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.7375478927203065, | |
| "grad_norm": 0.28738666111079514, | |
| "learning_rate": 1.957852062132924e-06, | |
| "loss": 0.9848, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.7423371647509579, | |
| "grad_norm": 0.2850271965375348, | |
| "learning_rate": 1.8919018738098704e-06, | |
| "loss": 1.0076, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.7471264367816092, | |
| "grad_norm": 0.29940367717031596, | |
| "learning_rate": 1.8268214329351797e-06, | |
| "loss": 0.9864, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.7519157088122606, | |
| "grad_norm": 0.2957695153675884, | |
| "learning_rate": 1.762628951143454e-06, | |
| "loss": 0.9972, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.7567049808429118, | |
| "grad_norm": 0.2884019240313734, | |
| "learning_rate": 1.6993423915893241e-06, | |
| "loss": 0.9969, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.7614942528735632, | |
| "grad_norm": 0.30882836660635715, | |
| "learning_rate": 1.6369794639207626e-06, | |
| "loss": 1.0005, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.7662835249042146, | |
| "grad_norm": 0.2880996272538283, | |
| "learning_rate": 1.575557619323353e-06, | |
| "loss": 0.9853, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.7710727969348659, | |
| "grad_norm": 0.28083999464104503, | |
| "learning_rate": 1.5150940456368784e-06, | |
| "loss": 0.9579, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.7758620689655172, | |
| "grad_norm": 0.2977795145319976, | |
| "learning_rate": 1.4556056625455922e-06, | |
| "loss": 0.9944, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.7806513409961686, | |
| "grad_norm": 0.3044689990672244, | |
| "learning_rate": 1.3971091168435463e-06, | |
| "loss": 0.997, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.7854406130268199, | |
| "grad_norm": 0.2951506289424605, | |
| "learning_rate": 1.3396207777762732e-06, | |
| "loss": 1.0116, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.7902298850574713, | |
| "grad_norm": 0.284297818009739, | |
| "learning_rate": 1.2831567324601325e-06, | |
| "loss": 0.9792, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.7950191570881227, | |
| "grad_norm": 0.31450502170794314, | |
| "learning_rate": 1.2277327813806123e-06, | |
| "loss": 0.9927, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.7998084291187739, | |
| "grad_norm": 0.2896076523698672, | |
| "learning_rate": 1.173364433970835e-06, | |
| "loss": 0.9795, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.8045977011494253, | |
| "grad_norm": 0.2834770010415917, | |
| "learning_rate": 1.1200669042715163e-06, | |
| "loss": 0.9966, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.8093869731800766, | |
| "grad_norm": 0.34009482243032485, | |
| "learning_rate": 1.0678551066735671e-06, | |
| "loss": 0.9767, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.814176245210728, | |
| "grad_norm": 0.29936688994813515, | |
| "learning_rate": 1.0167436517445777e-06, | |
| "loss": 1.003, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.8189655172413793, | |
| "grad_norm": 0.29389391810900556, | |
| "learning_rate": 9.66746842140287e-07, | |
| "loss": 0.9888, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.8237547892720306, | |
| "grad_norm": 0.29840441886793445, | |
| "learning_rate": 9.178786686022417e-07, | |
| "loss": 1.0011, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.828544061302682, | |
| "grad_norm": 0.3050673773124508, | |
| "learning_rate": 8.701528060427194e-07, | |
| "loss": 0.9867, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 0.2809885669064277, | |
| "learning_rate": 8.235826097180566e-07, | |
| "loss": 0.9802, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.8381226053639846, | |
| "grad_norm": 0.29288746432276136, | |
| "learning_rate": 7.781811114913995e-07, | |
| "loss": 0.9965, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.842911877394636, | |
| "grad_norm": 0.29110203494522685, | |
| "learning_rate": 7.339610161859618e-07, | |
| "loss": 0.9809, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.8477011494252874, | |
| "grad_norm": 0.30532295542101273, | |
| "learning_rate": 6.909346980298093e-07, | |
| "loss": 1.0039, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.8524904214559387, | |
| "grad_norm": 0.3178843529727934, | |
| "learning_rate": 6.49114197193137e-07, | |
| "loss": 0.9992, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.85727969348659, | |
| "grad_norm": 0.30030807864509074, | |
| "learning_rate": 6.085112164190466e-07, | |
| "loss": 0.9967, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.8620689655172413, | |
| "grad_norm": 0.283276426877559, | |
| "learning_rate": 5.691371177487215e-07, | |
| "loss": 0.9951, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.8668582375478927, | |
| "grad_norm": 0.2771047728402987, | |
| "learning_rate": 5.310029193419697e-07, | |
| "loss": 0.9823, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.8716475095785441, | |
| "grad_norm": 0.30200927801260424, | |
| "learning_rate": 4.941192923939769e-07, | |
| "loss": 0.9944, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.8764367816091954, | |
| "grad_norm": 0.29243200143497705, | |
| "learning_rate": 4.5849655814915683e-07, | |
| "loss": 0.9923, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.8812260536398467, | |
| "grad_norm": 0.38073762499381975, | |
| "learning_rate": 4.2414468501293217e-07, | |
| "loss": 0.9931, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.8860153256704981, | |
| "grad_norm": 0.28457589633356245, | |
| "learning_rate": 3.9107328576224736e-07, | |
| "loss": 0.9879, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.8908045977011494, | |
| "grad_norm": 0.29913915061920765, | |
| "learning_rate": 3.5929161485559694e-07, | |
| "loss": 1.0269, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.8955938697318008, | |
| "grad_norm": 0.28795408054019833, | |
| "learning_rate": 3.2880856584333043e-07, | |
| "loss": 0.984, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.9003831417624522, | |
| "grad_norm": 0.28456219117461123, | |
| "learning_rate": 2.9963266887894526e-07, | |
| "loss": 1.0007, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.9051724137931034, | |
| "grad_norm": 0.30655155945282825, | |
| "learning_rate": 2.717720883320685e-07, | |
| "loss": 1.0093, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.9099616858237548, | |
| "grad_norm": 0.28653578966574017, | |
| "learning_rate": 2.4523462050379864e-07, | |
| "loss": 0.9861, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.9147509578544061, | |
| "grad_norm": 0.30075605446410747, | |
| "learning_rate": 2.2002769144504943e-07, | |
| "loss": 0.997, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.9195402298850575, | |
| "grad_norm": 0.28984184857418177, | |
| "learning_rate": 1.9615835487849677e-07, | |
| "loss": 0.9772, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.9243295019157088, | |
| "grad_norm": 0.2971882152661699, | |
| "learning_rate": 1.7363329022471564e-07, | |
| "loss": 1.0125, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.9291187739463601, | |
| "grad_norm": 0.30026576782404996, | |
| "learning_rate": 1.5245880073305963e-07, | |
| "loss": 1.0128, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.9339080459770115, | |
| "grad_norm": 0.2971224564720333, | |
| "learning_rate": 1.3264081171780797e-07, | |
| "loss": 1.0114, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.9386973180076629, | |
| "grad_norm": 0.278422035094591, | |
| "learning_rate": 1.1418486890006574e-07, | |
| "loss": 0.982, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.9434865900383141, | |
| "grad_norm": 0.29059990973676236, | |
| "learning_rate": 9.709613685589314e-08, | |
| "loss": 0.998, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.9482758620689655, | |
| "grad_norm": 0.29092073403074437, | |
| "learning_rate": 8.137939757108526e-08, | |
| "loss": 1.011, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.9530651340996169, | |
| "grad_norm": 0.2923297366913393, | |
| "learning_rate": 6.703904910301929e-08, | |
| "loss": 0.9656, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.9578544061302682, | |
| "grad_norm": 0.2931478943843858, | |
| "learning_rate": 5.4079104349929465e-08, | |
| "loss": 1.0036, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.9626436781609196, | |
| "grad_norm": 0.2923376776086664, | |
| "learning_rate": 4.250318992797375e-08, | |
| "loss": 1.0083, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.9674329501915708, | |
| "grad_norm": 0.2863912260830555, | |
| "learning_rate": 3.231454515638221e-08, | |
| "loss": 0.9955, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.9722222222222222, | |
| "grad_norm": 0.2974177186982834, | |
| "learning_rate": 2.351602115099272e-08, | |
| "loss": 0.9865, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.9770114942528736, | |
| "grad_norm": 0.29561321400349233, | |
| "learning_rate": 1.6110080026414123e-08, | |
| "loss": 1.0083, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.9818007662835249, | |
| "grad_norm": 0.29932685669861786, | |
| "learning_rate": 1.0098794207047402e-08, | |
| "loss": 1.0118, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.9865900383141762, | |
| "grad_norm": 0.29155813115628504, | |
| "learning_rate": 5.483845847151226e-09, | |
| "loss": 0.9846, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.9913793103448276, | |
| "grad_norm": 0.2910040393119768, | |
| "learning_rate": 2.2665263601240328e-09, | |
| "loss": 0.9812, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.9961685823754789, | |
| "grad_norm": 0.28919827046140006, | |
| "learning_rate": 4.4773605712089554e-10, | |
| "loss": 1.0115, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_runtime": 6595.9355, | |
| "eval_samples_per_second": 3.504, | |
| "eval_steps_per_second": 0.876, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1044, | |
| "total_flos": 1940427569627136.0, | |
| "train_loss": 1.0122476654034465, | |
| "train_runtime": 20247.7008, | |
| "train_samples_per_second": 3.298, | |
| "train_steps_per_second": 0.052 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1044, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1940427569627136.0, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |