| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1925, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005194805194805195, | |
| "grad_norm": 1.3003292727167528, | |
| "learning_rate": 2.8735632183908046e-06, | |
| "loss": 0.5747, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01038961038961039, | |
| "grad_norm": 0.8164205391130951, | |
| "learning_rate": 5.747126436781609e-06, | |
| "loss": 0.5181, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.015584415584415584, | |
| "grad_norm": 0.488946468114722, | |
| "learning_rate": 8.620689655172414e-06, | |
| "loss": 0.4525, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02077922077922078, | |
| "grad_norm": 0.5240257767228009, | |
| "learning_rate": 1.1494252873563218e-05, | |
| "loss": 0.4153, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.025974025974025976, | |
| "grad_norm": 0.3878833187137561, | |
| "learning_rate": 1.4367816091954022e-05, | |
| "loss": 0.3917, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03116883116883117, | |
| "grad_norm": 0.3932438541306065, | |
| "learning_rate": 1.7241379310344828e-05, | |
| "loss": 0.3707, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.03636363636363636, | |
| "grad_norm": 0.4466114932768719, | |
| "learning_rate": 2.0114942528735632e-05, | |
| "loss": 0.3605, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04155844155844156, | |
| "grad_norm": 0.8032738854038463, | |
| "learning_rate": 2.2988505747126437e-05, | |
| "loss": 0.3591, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.046753246753246755, | |
| "grad_norm": 0.543812557370986, | |
| "learning_rate": 2.5862068965517244e-05, | |
| "loss": 0.3472, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.05194805194805195, | |
| "grad_norm": 0.5733743111451876, | |
| "learning_rate": 2.8735632183908045e-05, | |
| "loss": 0.3341, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.05714285714285714, | |
| "grad_norm": 0.9136032757030011, | |
| "learning_rate": 3.160919540229885e-05, | |
| "loss": 0.34, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06233766233766234, | |
| "grad_norm": 0.8015940010159968, | |
| "learning_rate": 3.4482758620689657e-05, | |
| "loss": 0.3346, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.06753246753246753, | |
| "grad_norm": 0.5317651402718742, | |
| "learning_rate": 3.735632183908046e-05, | |
| "loss": 0.3205, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.07272727272727272, | |
| "grad_norm": 0.9794116224334949, | |
| "learning_rate": 4.0229885057471265e-05, | |
| "loss": 0.3247, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.07792207792207792, | |
| "grad_norm": 0.5926607382301553, | |
| "learning_rate": 4.3103448275862066e-05, | |
| "loss": 0.3137, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.08311688311688312, | |
| "grad_norm": 0.501504811182817, | |
| "learning_rate": 4.597701149425287e-05, | |
| "loss": 0.3196, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.08831168831168831, | |
| "grad_norm": 0.4844466938932139, | |
| "learning_rate": 4.885057471264368e-05, | |
| "loss": 0.3185, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.09350649350649351, | |
| "grad_norm": 0.8792222600261355, | |
| "learning_rate": 4.999985842691236e-05, | |
| "loss": 0.317, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.0987012987012987, | |
| "grad_norm": 0.538260729358137, | |
| "learning_rate": 4.999899326385009e-05, | |
| "loss": 0.3122, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.1038961038961039, | |
| "grad_norm": 0.5532332343125024, | |
| "learning_rate": 4.99973416166265e-05, | |
| "loss": 0.308, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.10909090909090909, | |
| "grad_norm": 0.5065931571721304, | |
| "learning_rate": 4.999490353720347e-05, | |
| "loss": 0.305, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.11428571428571428, | |
| "grad_norm": 0.4661004020519144, | |
| "learning_rate": 4.9991679102284494e-05, | |
| "loss": 0.3031, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.11948051948051948, | |
| "grad_norm": 0.4350447913864305, | |
| "learning_rate": 4.998766841331236e-05, | |
| "loss": 0.2979, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.12467532467532468, | |
| "grad_norm": 0.3782265622233307, | |
| "learning_rate": 4.998287159646586e-05, | |
| "loss": 0.3035, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.12987012987012986, | |
| "grad_norm": 0.3863507226733708, | |
| "learning_rate": 4.997728880265592e-05, | |
| "loss": 0.3024, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.13506493506493505, | |
| "grad_norm": 0.45465564793802865, | |
| "learning_rate": 4.9970920207520756e-05, | |
| "loss": 0.2984, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.14025974025974025, | |
| "grad_norm": 0.331617769411873, | |
| "learning_rate": 4.9963766011420394e-05, | |
| "loss": 0.2947, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.14545454545454545, | |
| "grad_norm": 0.4323281120205803, | |
| "learning_rate": 4.9955826439430384e-05, | |
| "loss": 0.2885, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.15064935064935064, | |
| "grad_norm": 0.3828139513983012, | |
| "learning_rate": 4.994710174133469e-05, | |
| "loss": 0.2957, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.15584415584415584, | |
| "grad_norm": 0.45442672024966796, | |
| "learning_rate": 4.9937592191617846e-05, | |
| "loss": 0.2929, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.16103896103896104, | |
| "grad_norm": 0.4324048943234123, | |
| "learning_rate": 4.992729808945629e-05, | |
| "loss": 0.287, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.16623376623376623, | |
| "grad_norm": 0.3358736775493111, | |
| "learning_rate": 4.991621975870901e-05, | |
| "loss": 0.2831, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.17142857142857143, | |
| "grad_norm": 0.33113790181444713, | |
| "learning_rate": 4.990435754790731e-05, | |
| "loss": 0.2868, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.17662337662337663, | |
| "grad_norm": 0.3207965425558816, | |
| "learning_rate": 4.9891711830243845e-05, | |
| "loss": 0.2911, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.18181818181818182, | |
| "grad_norm": 0.361708114933415, | |
| "learning_rate": 4.987828300356091e-05, | |
| "loss": 0.2857, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.18701298701298702, | |
| "grad_norm": 0.34168626805695096, | |
| "learning_rate": 4.9864071490337896e-05, | |
| "loss": 0.2849, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.19220779220779222, | |
| "grad_norm": 0.2908926746764898, | |
| "learning_rate": 4.9849077737678e-05, | |
| "loss": 0.2794, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.1974025974025974, | |
| "grad_norm": 0.5087944109212423, | |
| "learning_rate": 4.983330221729419e-05, | |
| "loss": 0.2787, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.2025974025974026, | |
| "grad_norm": 0.27804001121817934, | |
| "learning_rate": 4.9816745425494326e-05, | |
| "loss": 0.2759, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.2077922077922078, | |
| "grad_norm": 0.4013635528070106, | |
| "learning_rate": 4.979940788316556e-05, | |
| "loss": 0.2817, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.21298701298701297, | |
| "grad_norm": 0.25511672135848257, | |
| "learning_rate": 4.978129013575796e-05, | |
| "loss": 0.2785, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.21818181818181817, | |
| "grad_norm": 0.298813003536876, | |
| "learning_rate": 4.976239275326733e-05, | |
| "loss": 0.2803, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.22337662337662337, | |
| "grad_norm": 0.25617082862388485, | |
| "learning_rate": 4.974271633021729e-05, | |
| "loss": 0.2736, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.22857142857142856, | |
| "grad_norm": 0.29247425322362797, | |
| "learning_rate": 4.9722261485640584e-05, | |
| "loss": 0.2767, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.23376623376623376, | |
| "grad_norm": 0.33775737625365165, | |
| "learning_rate": 4.9701028863059563e-05, | |
| "loss": 0.2753, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.23896103896103896, | |
| "grad_norm": 0.38447368527387143, | |
| "learning_rate": 4.967901913046598e-05, | |
| "loss": 0.2805, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.24415584415584415, | |
| "grad_norm": 0.36418471782114614, | |
| "learning_rate": 4.9656232980299976e-05, | |
| "loss": 0.2707, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.24935064935064935, | |
| "grad_norm": 0.2826049309528878, | |
| "learning_rate": 4.963267112942826e-05, | |
| "loss": 0.2775, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.2545454545454545, | |
| "grad_norm": 0.3501209948268494, | |
| "learning_rate": 4.9608334319121584e-05, | |
| "loss": 0.2731, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2597402597402597, | |
| "grad_norm": 0.3608819312798831, | |
| "learning_rate": 4.958322331503141e-05, | |
| "loss": 0.2707, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.2649350649350649, | |
| "grad_norm": 0.299020142675044, | |
| "learning_rate": 4.9557338907165833e-05, | |
| "loss": 0.2732, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.2701298701298701, | |
| "grad_norm": 0.2685575579055106, | |
| "learning_rate": 4.9530681909864724e-05, | |
| "loss": 0.2689, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.2753246753246753, | |
| "grad_norm": 0.23478660501034596, | |
| "learning_rate": 4.950325316177409e-05, | |
| "loss": 0.2726, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.2805194805194805, | |
| "grad_norm": 0.26267550311986976, | |
| "learning_rate": 4.947505352581974e-05, | |
| "loss": 0.2688, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.2857142857142857, | |
| "grad_norm": 0.2804831055299143, | |
| "learning_rate": 4.944608388918005e-05, | |
| "loss": 0.2724, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.2909090909090909, | |
| "grad_norm": 0.36597728037439853, | |
| "learning_rate": 4.941634516325816e-05, | |
| "loss": 0.2674, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.2961038961038961, | |
| "grad_norm": 0.27054673370836463, | |
| "learning_rate": 4.9385838283653216e-05, | |
| "loss": 0.2649, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.3012987012987013, | |
| "grad_norm": 0.3066834081735395, | |
| "learning_rate": 4.9354564210130976e-05, | |
| "loss": 0.2677, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.3064935064935065, | |
| "grad_norm": 0.2950401672059928, | |
| "learning_rate": 4.93225239265936e-05, | |
| "loss": 0.2622, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.3116883116883117, | |
| "grad_norm": 0.31772179112234966, | |
| "learning_rate": 4.928971844104868e-05, | |
| "loss": 0.2641, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.3168831168831169, | |
| "grad_norm": 0.27924556453889027, | |
| "learning_rate": 4.9256148785577606e-05, | |
| "loss": 0.2647, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.3220779220779221, | |
| "grad_norm": 0.28644663252200886, | |
| "learning_rate": 4.9221816016302966e-05, | |
| "loss": 0.2625, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.32727272727272727, | |
| "grad_norm": 0.2606246292323375, | |
| "learning_rate": 4.9186721213355455e-05, | |
| "loss": 0.2636, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.33246753246753247, | |
| "grad_norm": 0.3172496714001626, | |
| "learning_rate": 4.915086548083978e-05, | |
| "loss": 0.2683, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.33766233766233766, | |
| "grad_norm": 0.24927905040341644, | |
| "learning_rate": 4.9114249946800003e-05, | |
| "loss": 0.2654, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.34285714285714286, | |
| "grad_norm": 0.32250447729794757, | |
| "learning_rate": 4.907687576318401e-05, | |
| "loss": 0.2636, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.34805194805194806, | |
| "grad_norm": 0.32565974721616914, | |
| "learning_rate": 4.903874410580731e-05, | |
| "loss": 0.2601, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.35324675324675325, | |
| "grad_norm": 0.23517482221948124, | |
| "learning_rate": 4.899985617431597e-05, | |
| "loss": 0.2611, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.35844155844155845, | |
| "grad_norm": 0.2438523561922534, | |
| "learning_rate": 4.896021319214895e-05, | |
| "loss": 0.2601, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.36363636363636365, | |
| "grad_norm": 0.2668670929832916, | |
| "learning_rate": 4.8919816406499584e-05, | |
| "loss": 0.2696, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.36883116883116884, | |
| "grad_norm": 0.24937135360115686, | |
| "learning_rate": 4.887866708827633e-05, | |
| "loss": 0.2602, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.37402597402597404, | |
| "grad_norm": 0.23999629542679116, | |
| "learning_rate": 4.8836766532062804e-05, | |
| "loss": 0.2588, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.37922077922077924, | |
| "grad_norm": 0.23334349462758497, | |
| "learning_rate": 4.879411605607704e-05, | |
| "loss": 0.2606, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.38441558441558443, | |
| "grad_norm": 0.2215145938323352, | |
| "learning_rate": 4.8750717002130024e-05, | |
| "loss": 0.2567, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.38961038961038963, | |
| "grad_norm": 0.3218548674660387, | |
| "learning_rate": 4.870657073558349e-05, | |
| "loss": 0.2627, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.3948051948051948, | |
| "grad_norm": 0.29152150655446074, | |
| "learning_rate": 4.866167864530693e-05, | |
| "loss": 0.2561, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.29823420885887736, | |
| "learning_rate": 4.8616042143633937e-05, | |
| "loss": 0.2594, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.4051948051948052, | |
| "grad_norm": 0.256792339442467, | |
| "learning_rate": 4.856966266631777e-05, | |
| "loss": 0.2565, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.4103896103896104, | |
| "grad_norm": 0.25804885977573755, | |
| "learning_rate": 4.8522541672486156e-05, | |
| "loss": 0.2577, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.4155844155844156, | |
| "grad_norm": 0.2903609751193798, | |
| "learning_rate": 4.84746806445954e-05, | |
| "loss": 0.2534, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.42077922077922075, | |
| "grad_norm": 0.26400859462593973, | |
| "learning_rate": 4.8426081088383756e-05, | |
| "loss": 0.2586, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.42597402597402595, | |
| "grad_norm": 0.2960587838599708, | |
| "learning_rate": 4.837674453282404e-05, | |
| "loss": 0.261, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.43116883116883115, | |
| "grad_norm": 0.23331561495605277, | |
| "learning_rate": 4.832667253007554e-05, | |
| "loss": 0.2536, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.43636363636363634, | |
| "grad_norm": 0.24020811230350025, | |
| "learning_rate": 4.8275866655435175e-05, | |
| "loss": 0.2564, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.44155844155844154, | |
| "grad_norm": 0.22023402453548904, | |
| "learning_rate": 4.8224328507287946e-05, | |
| "loss": 0.2562, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.44675324675324674, | |
| "grad_norm": 0.293317498213313, | |
| "learning_rate": 4.8172059707056626e-05, | |
| "loss": 0.2565, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.45194805194805193, | |
| "grad_norm": 0.2692215437341758, | |
| "learning_rate": 4.811906189915078e-05, | |
| "loss": 0.2506, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.45714285714285713, | |
| "grad_norm": 0.23694698773474526, | |
| "learning_rate": 4.806533675091501e-05, | |
| "loss": 0.2518, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.4623376623376623, | |
| "grad_norm": 0.22885916720084376, | |
| "learning_rate": 4.80108859525765e-05, | |
| "loss": 0.252, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.4675324675324675, | |
| "grad_norm": 0.24916135308130166, | |
| "learning_rate": 4.795571121719187e-05, | |
| "loss": 0.253, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.4727272727272727, | |
| "grad_norm": 0.24894984701102493, | |
| "learning_rate": 4.7899814280593226e-05, | |
| "loss": 0.2529, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.4779220779220779, | |
| "grad_norm": 0.2723728137565129, | |
| "learning_rate": 4.78431969013336e-05, | |
| "loss": 0.2555, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.4831168831168831, | |
| "grad_norm": 0.3183305552275493, | |
| "learning_rate": 4.778586086063159e-05, | |
| "loss": 0.2514, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.4883116883116883, | |
| "grad_norm": 0.2414413013327865, | |
| "learning_rate": 4.772780796231537e-05, | |
| "loss": 0.2484, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.4935064935064935, | |
| "grad_norm": 0.22563943539011178, | |
| "learning_rate": 4.766904003276589e-05, | |
| "loss": 0.2539, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.4987012987012987, | |
| "grad_norm": 0.30205872999506944, | |
| "learning_rate": 4.760955892085942e-05, | |
| "loss": 0.2527, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5038961038961038, | |
| "grad_norm": 0.2860786126383834, | |
| "learning_rate": 4.754936649790942e-05, | |
| "loss": 0.2516, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.509090909090909, | |
| "grad_norm": 0.27572406640999436, | |
| "learning_rate": 4.7488464657607635e-05, | |
| "loss": 0.2498, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5142857142857142, | |
| "grad_norm": 0.24673507061013106, | |
| "learning_rate": 4.7426855315964535e-05, | |
| "loss": 0.2531, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5194805194805194, | |
| "grad_norm": 0.21033978842271397, | |
| "learning_rate": 4.736454041124904e-05, | |
| "loss": 0.2504, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.5246753246753246, | |
| "grad_norm": 0.24381830272002009, | |
| "learning_rate": 4.7301521903927505e-05, | |
| "loss": 0.2428, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.5298701298701298, | |
| "grad_norm": 0.2334282560856222, | |
| "learning_rate": 4.723780177660209e-05, | |
| "loss": 0.2501, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.535064935064935, | |
| "grad_norm": 0.2751808654527514, | |
| "learning_rate": 4.717338203394836e-05, | |
| "loss": 0.2507, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.5402597402597402, | |
| "grad_norm": 0.2697637268340861, | |
| "learning_rate": 4.71082647026522e-05, | |
| "loss": 0.2503, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.5454545454545454, | |
| "grad_norm": 0.22403492548323756, | |
| "learning_rate": 4.7042451831346136e-05, | |
| "loss": 0.2495, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.5506493506493506, | |
| "grad_norm": 0.22425594786040917, | |
| "learning_rate": 4.697594549054474e-05, | |
| "loss": 0.2475, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.5558441558441558, | |
| "grad_norm": 0.24265650046282458, | |
| "learning_rate": 4.690874777257964e-05, | |
| "loss": 0.2491, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.561038961038961, | |
| "grad_norm": 0.22077426247794457, | |
| "learning_rate": 4.684086079153359e-05, | |
| "loss": 0.2449, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.5662337662337662, | |
| "grad_norm": 0.30484192484636535, | |
| "learning_rate": 4.6772286683174025e-05, | |
| "loss": 0.245, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.5714285714285714, | |
| "grad_norm": 0.22182045074526108, | |
| "learning_rate": 4.670302760488582e-05, | |
| "loss": 0.2477, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.5766233766233766, | |
| "grad_norm": 0.2200234503444333, | |
| "learning_rate": 4.663308573560343e-05, | |
| "loss": 0.2506, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.5818181818181818, | |
| "grad_norm": 0.21994287683589026, | |
| "learning_rate": 4.656246327574238e-05, | |
| "loss": 0.2421, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.587012987012987, | |
| "grad_norm": 0.272334351757034, | |
| "learning_rate": 4.649116244712998e-05, | |
| "loss": 0.2476, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.5922077922077922, | |
| "grad_norm": 0.2236806239622702, | |
| "learning_rate": 4.641918549293545e-05, | |
| "loss": 0.2454, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.5974025974025974, | |
| "grad_norm": 0.23546182291174625, | |
| "learning_rate": 4.634653467759936e-05, | |
| "loss": 0.2477, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.6025974025974026, | |
| "grad_norm": 0.2505748637769869, | |
| "learning_rate": 4.6273212286762376e-05, | |
| "loss": 0.2449, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.6077922077922078, | |
| "grad_norm": 0.18468403137918, | |
| "learning_rate": 4.619922062719335e-05, | |
| "loss": 0.2432, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.612987012987013, | |
| "grad_norm": 0.23235694779454488, | |
| "learning_rate": 4.6124562026716766e-05, | |
| "loss": 0.2457, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.6181818181818182, | |
| "grad_norm": 0.2587545121058708, | |
| "learning_rate": 4.604923883413946e-05, | |
| "loss": 0.2467, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.6233766233766234, | |
| "grad_norm": 0.23524218849591322, | |
| "learning_rate": 4.59732534191768e-05, | |
| "loss": 0.2425, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.6285714285714286, | |
| "grad_norm": 0.20482111091171828, | |
| "learning_rate": 4.589660817237805e-05, | |
| "loss": 0.2446, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.6337662337662338, | |
| "grad_norm": 0.23935891812153473, | |
| "learning_rate": 4.581930550505122e-05, | |
| "loss": 0.2359, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.638961038961039, | |
| "grad_norm": 0.2088944848253035, | |
| "learning_rate": 4.5741347849187186e-05, | |
| "loss": 0.2435, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.6441558441558441, | |
| "grad_norm": 0.19769218239953817, | |
| "learning_rate": 4.566273765738318e-05, | |
| "loss": 0.2429, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.6493506493506493, | |
| "grad_norm": 0.21104513975336958, | |
| "learning_rate": 4.558347740276562e-05, | |
| "loss": 0.2414, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.6545454545454545, | |
| "grad_norm": 0.22299730594468617, | |
| "learning_rate": 4.550356957891232e-05, | |
| "loss": 0.2405, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.6597402597402597, | |
| "grad_norm": 0.2553717349606562, | |
| "learning_rate": 4.5423016699774025e-05, | |
| "loss": 0.242, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.6649350649350649, | |
| "grad_norm": 0.184621184174687, | |
| "learning_rate": 4.5341821299595334e-05, | |
| "loss": 0.2377, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.6701298701298701, | |
| "grad_norm": 0.1931019421949112, | |
| "learning_rate": 4.525998593283496e-05, | |
| "loss": 0.2401, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.6753246753246753, | |
| "grad_norm": 0.2327800605150306, | |
| "learning_rate": 4.517751317408537e-05, | |
| "loss": 0.2405, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.6805194805194805, | |
| "grad_norm": 0.21060753158636902, | |
| "learning_rate": 4.5094405617991796e-05, | |
| "loss": 0.2363, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.6857142857142857, | |
| "grad_norm": 0.18780386461582757, | |
| "learning_rate": 4.501066587917058e-05, | |
| "loss": 0.2437, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.6909090909090909, | |
| "grad_norm": 0.2671736773226871, | |
| "learning_rate": 4.4926296592126946e-05, | |
| "loss": 0.2431, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.6961038961038961, | |
| "grad_norm": 0.23810395896846592, | |
| "learning_rate": 4.484130041117211e-05, | |
| "loss": 0.243, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.7012987012987013, | |
| "grad_norm": 0.18025186403432847, | |
| "learning_rate": 4.475568001033974e-05, | |
| "loss": 0.2457, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.7064935064935065, | |
| "grad_norm": 0.21115305940327297, | |
| "learning_rate": 4.466943808330189e-05, | |
| "loss": 0.2415, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.7116883116883117, | |
| "grad_norm": 0.25256979094205834, | |
| "learning_rate": 4.45825773432842e-05, | |
| "loss": 0.2407, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.7168831168831169, | |
| "grad_norm": 0.22014008453128092, | |
| "learning_rate": 4.449510052298056e-05, | |
| "loss": 0.2357, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.7220779220779221, | |
| "grad_norm": 0.20062628753000003, | |
| "learning_rate": 4.440701037446714e-05, | |
| "loss": 0.2396, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.7272727272727273, | |
| "grad_norm": 0.2083485579826855, | |
| "learning_rate": 4.431830966911582e-05, | |
| "loss": 0.2391, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.7324675324675325, | |
| "grad_norm": 0.18184305857175756, | |
| "learning_rate": 4.422900119750695e-05, | |
| "loss": 0.2355, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.7376623376623377, | |
| "grad_norm": 0.2048232593631597, | |
| "learning_rate": 4.4139087769341625e-05, | |
| "loss": 0.2332, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.7428571428571429, | |
| "grad_norm": 0.21132211989073768, | |
| "learning_rate": 4.4048572213353234e-05, | |
| "loss": 0.2422, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.7480519480519481, | |
| "grad_norm": 0.19792895604344352, | |
| "learning_rate": 4.39574573772185e-05, | |
| "loss": 0.2334, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.7532467532467533, | |
| "grad_norm": 0.22178836320367148, | |
| "learning_rate": 4.3865746127467876e-05, | |
| "loss": 0.2423, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.7584415584415585, | |
| "grad_norm": 0.21967650568135474, | |
| "learning_rate": 4.3773441349395374e-05, | |
| "loss": 0.2357, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.7636363636363637, | |
| "grad_norm": 0.1917556477695145, | |
| "learning_rate": 4.368054594696775e-05, | |
| "loss": 0.2443, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.7688311688311689, | |
| "grad_norm": 0.20969861600848638, | |
| "learning_rate": 4.3587062842733216e-05, | |
| "loss": 0.2341, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.7740259740259741, | |
| "grad_norm": 0.19871375239851857, | |
| "learning_rate": 4.349299497772945e-05, | |
| "loss": 0.2361, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.7792207792207793, | |
| "grad_norm": 0.19082750332598916, | |
| "learning_rate": 4.339834531139104e-05, | |
| "loss": 0.2316, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.7844155844155845, | |
| "grad_norm": 0.2177029161255871, | |
| "learning_rate": 4.330311682145645e-05, | |
| "loss": 0.2343, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.7896103896103897, | |
| "grad_norm": 0.20562958726540304, | |
| "learning_rate": 4.320731250387429e-05, | |
| "loss": 0.2401, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.7948051948051948, | |
| "grad_norm": 0.2070237852219627, | |
| "learning_rate": 4.311093537270905e-05, | |
| "loss": 0.2374, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.18967395002327114, | |
| "learning_rate": 4.301398846004634e-05, | |
| "loss": 0.2363, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.8051948051948052, | |
| "grad_norm": 0.1970271386066234, | |
| "learning_rate": 4.291647481589742e-05, | |
| "loss": 0.2302, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.8103896103896104, | |
| "grad_norm": 0.18673676151020974, | |
| "learning_rate": 4.28183975081033e-05, | |
| "loss": 0.2416, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.8155844155844156, | |
| "grad_norm": 0.2183111540642943, | |
| "learning_rate": 4.271975962223821e-05, | |
| "loss": 0.2342, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.8207792207792208, | |
| "grad_norm": 0.1792298886397136, | |
| "learning_rate": 4.2620564261512496e-05, | |
| "loss": 0.2388, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.825974025974026, | |
| "grad_norm": 0.21429193275126804, | |
| "learning_rate": 4.2520814546675037e-05, | |
| "loss": 0.2323, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.8311688311688312, | |
| "grad_norm": 0.1923357673969473, | |
| "learning_rate": 4.242051361591505e-05, | |
| "loss": 0.2398, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.8363636363636363, | |
| "grad_norm": 0.1825902322292911, | |
| "learning_rate": 4.2319664624763325e-05, | |
| "loss": 0.2355, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.8415584415584415, | |
| "grad_norm": 0.1708452665847616, | |
| "learning_rate": 4.2218270745993016e-05, | |
| "loss": 0.2361, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.8467532467532467, | |
| "grad_norm": 0.2003983431936864, | |
| "learning_rate": 4.211633516951975e-05, | |
| "loss": 0.237, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.8519480519480519, | |
| "grad_norm": 0.1809948763155965, | |
| "learning_rate": 4.201386110230134e-05, | |
| "loss": 0.2291, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.8571428571428571, | |
| "grad_norm": 0.19621979591943875, | |
| "learning_rate": 4.1910851768236825e-05, | |
| "loss": 0.2284, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.8623376623376623, | |
| "grad_norm": 0.20784597945629102, | |
| "learning_rate": 4.180731040806511e-05, | |
| "loss": 0.2359, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.8675324675324675, | |
| "grad_norm": 0.22581034014160772, | |
| "learning_rate": 4.170324027926297e-05, | |
| "loss": 0.2329, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.8727272727272727, | |
| "grad_norm": 0.1789163119753752, | |
| "learning_rate": 4.159864465594255e-05, | |
| "loss": 0.2338, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.8779220779220779, | |
| "grad_norm": 0.1949206924337472, | |
| "learning_rate": 4.1493526828748416e-05, | |
| "loss": 0.2392, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.8831168831168831, | |
| "grad_norm": 0.20147429000086556, | |
| "learning_rate": 4.1387890104754004e-05, | |
| "loss": 0.233, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.8883116883116883, | |
| "grad_norm": 0.1537005161376695, | |
| "learning_rate": 4.128173780735753e-05, | |
| "loss": 0.2291, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.8935064935064935, | |
| "grad_norm": 0.17777763693741433, | |
| "learning_rate": 4.117507327617751e-05, | |
| "loss": 0.2291, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.8987012987012987, | |
| "grad_norm": 0.174198062693491, | |
| "learning_rate": 4.1067899866947665e-05, | |
| "loss": 0.2294, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.9038961038961039, | |
| "grad_norm": 0.1884364748511166, | |
| "learning_rate": 4.096022095141132e-05, | |
| "loss": 0.235, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.9090909090909091, | |
| "grad_norm": 0.1912652069094164, | |
| "learning_rate": 4.085203991721535e-05, | |
| "loss": 0.2318, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.9142857142857143, | |
| "grad_norm": 0.206558739242339, | |
| "learning_rate": 4.0743360167803614e-05, | |
| "loss": 0.2317, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.9194805194805195, | |
| "grad_norm": 0.18252166114267931, | |
| "learning_rate": 4.063418512230987e-05, | |
| "loss": 0.2346, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.9246753246753247, | |
| "grad_norm": 0.18463778266166328, | |
| "learning_rate": 4.0524518215450166e-05, | |
| "loss": 0.2306, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.9298701298701298, | |
| "grad_norm": 0.207923278938462, | |
| "learning_rate": 4.041436289741489e-05, | |
| "loss": 0.2301, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.935064935064935, | |
| "grad_norm": 0.25335695776490813, | |
| "learning_rate": 4.0303722633760085e-05, | |
| "loss": 0.2258, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.9402597402597402, | |
| "grad_norm": 0.16805426564943104, | |
| "learning_rate": 4.019260090529854e-05, | |
| "loss": 0.2284, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.9454545454545454, | |
| "grad_norm": 0.1884837989936669, | |
| "learning_rate": 4.008100120799019e-05, | |
| "loss": 0.2285, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.9506493506493506, | |
| "grad_norm": 0.19643081968195814, | |
| "learning_rate": 3.996892705283222e-05, | |
| "loss": 0.2354, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.9558441558441558, | |
| "grad_norm": 0.18949399067442121, | |
| "learning_rate": 3.9856381965748506e-05, | |
| "loss": 0.234, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.961038961038961, | |
| "grad_norm": 0.18422150470243814, | |
| "learning_rate": 3.974336948747879e-05, | |
| "loss": 0.228, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.9662337662337662, | |
| "grad_norm": 0.179837212988977, | |
| "learning_rate": 3.962989317346722e-05, | |
| "loss": 0.2283, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.9714285714285714, | |
| "grad_norm": 0.18190735457463206, | |
| "learning_rate": 3.951595659375048e-05, | |
| "loss": 0.2337, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.9766233766233766, | |
| "grad_norm": 0.17903757429753223, | |
| "learning_rate": 3.9401563332845545e-05, | |
| "loss": 0.2225, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.9818181818181818, | |
| "grad_norm": 0.18775008592274955, | |
| "learning_rate": 3.928671698963686e-05, | |
| "loss": 0.226, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.987012987012987, | |
| "grad_norm": 0.16101556678112905, | |
| "learning_rate": 3.917142117726312e-05, | |
| "loss": 0.2312, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.9922077922077922, | |
| "grad_norm": 0.18133929144854413, | |
| "learning_rate": 3.90556795230036e-05, | |
| "loss": 0.2264, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.9974025974025974, | |
| "grad_norm": 0.21238218367672568, | |
| "learning_rate": 3.893949566816404e-05, | |
| "loss": 0.2273, | |
| "step": 1920 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 5775, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6056617202155520.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |