| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9723320158102768, | |
| "eval_steps": 500, | |
| "global_step": 250, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.007905138339920948, | |
| "grad_norm": 29.443620681762695, | |
| "learning_rate": 0.0, | |
| "loss": 6.8345, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.015810276679841896, | |
| "grad_norm": 14.875253677368164, | |
| "learning_rate": 2.5e-05, | |
| "loss": 6.6279, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.023715415019762844, | |
| "grad_norm": 60.56179428100586, | |
| "learning_rate": 5e-05, | |
| "loss": 6.7804, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.03162055335968379, | |
| "grad_norm": 8.8980712890625, | |
| "learning_rate": 7.500000000000001e-05, | |
| "loss": 5.7093, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.039525691699604744, | |
| "grad_norm": 11.861969947814941, | |
| "learning_rate": 0.0001, | |
| "loss": 4.9035, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.04743083003952569, | |
| "grad_norm": 5.465580940246582, | |
| "learning_rate": 0.000125, | |
| "loss": 4.0769, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.05533596837944664, | |
| "grad_norm": 3.910144329071045, | |
| "learning_rate": 0.00015000000000000001, | |
| "loss": 4.1908, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.06324110671936758, | |
| "grad_norm": 2.9628067016601562, | |
| "learning_rate": 0.000175, | |
| "loss": 3.0805, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.07114624505928854, | |
| "grad_norm": 3.2732276916503906, | |
| "learning_rate": 0.0002, | |
| "loss": 3.063, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.07905138339920949, | |
| "grad_norm": 2.3448641300201416, | |
| "learning_rate": 0.0001999915737775817, | |
| "loss": 2.4361, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.08695652173913043, | |
| "grad_norm": 3.226670265197754, | |
| "learning_rate": 0.00019996629653035126, | |
| "loss": 2.142, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.09486166007905138, | |
| "grad_norm": 2.9033336639404297, | |
| "learning_rate": 0.00019992417251814282, | |
| "loss": 1.928, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.10276679841897234, | |
| "grad_norm": 2.5437309741973877, | |
| "learning_rate": 0.00019986520883988232, | |
| "loss": 1.4877, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.11067193675889328, | |
| "grad_norm": 2.364232301712036, | |
| "learning_rate": 0.0001997894154323911, | |
| "loss": 1.3451, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.11857707509881422, | |
| "grad_norm": 2.6934092044830322, | |
| "learning_rate": 0.00019969680506871137, | |
| "loss": 1.1716, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.12648221343873517, | |
| "grad_norm": 2.518855333328247, | |
| "learning_rate": 0.0001995873933559535, | |
| "loss": 0.9187, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.13438735177865613, | |
| "grad_norm": 2.960550546646118, | |
| "learning_rate": 0.00019946119873266613, | |
| "loss": 0.7536, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.1422924901185771, | |
| "grad_norm": 2.3193135261535645, | |
| "learning_rate": 0.0001993182424657285, | |
| "loss": 0.7687, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.15019762845849802, | |
| "grad_norm": 1.7276065349578857, | |
| "learning_rate": 0.00019915854864676664, | |
| "loss": 0.7496, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.15810276679841898, | |
| "grad_norm": 1.7833150625228882, | |
| "learning_rate": 0.0001989821441880933, | |
| "loss": 0.6497, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.16600790513833993, | |
| "grad_norm": 1.5743191242218018, | |
| "learning_rate": 0.00019878905881817252, | |
| "loss": 0.5612, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.17391304347826086, | |
| "grad_norm": 1.4150418043136597, | |
| "learning_rate": 0.0001985793250766098, | |
| "loss": 0.6368, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.18181818181818182, | |
| "grad_norm": 2.5285849571228027, | |
| "learning_rate": 0.00019835297830866826, | |
| "loss": 0.51, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.18972332015810275, | |
| "grad_norm": 1.3967912197113037, | |
| "learning_rate": 0.00019811005665931205, | |
| "loss": 0.4095, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.1976284584980237, | |
| "grad_norm": 1.952337384223938, | |
| "learning_rate": 0.00019785060106677818, | |
| "loss": 0.8365, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.20553359683794467, | |
| "grad_norm": 1.570603609085083, | |
| "learning_rate": 0.0001975746552556772, | |
| "loss": 0.3903, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.2134387351778656, | |
| "grad_norm": 1.3032807111740112, | |
| "learning_rate": 0.00019728226572962473, | |
| "loss": 0.4507, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.22134387351778656, | |
| "grad_norm": 1.129608392715454, | |
| "learning_rate": 0.0001969734817634044, | |
| "loss": 0.6392, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.22924901185770752, | |
| "grad_norm": 2.1530044078826904, | |
| "learning_rate": 0.0001966483553946637, | |
| "loss": 0.5927, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.23715415019762845, | |
| "grad_norm": 1.7631202936172485, | |
| "learning_rate": 0.00019630694141514464, | |
| "loss": 0.317, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.2450592885375494, | |
| "grad_norm": 3.5597596168518066, | |
| "learning_rate": 0.00019594929736144976, | |
| "loss": 0.3338, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.25296442687747034, | |
| "grad_norm": 1.4273171424865723, | |
| "learning_rate": 0.0001955754835053459, | |
| "loss": 0.7187, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.2608695652173913, | |
| "grad_norm": 2.1714446544647217, | |
| "learning_rate": 0.00019518556284360696, | |
| "loss": 0.4304, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.26877470355731226, | |
| "grad_norm": 1.3923094272613525, | |
| "learning_rate": 0.0001947796010873974, | |
| "loss": 0.3955, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.2766798418972332, | |
| "grad_norm": 1.089962124824524, | |
| "learning_rate": 0.0001943576666511982, | |
| "loss": 0.6046, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.2845849802371542, | |
| "grad_norm": 0.9553408622741699, | |
| "learning_rate": 0.0001939198306412775, | |
| "loss": 0.6164, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.2924901185770751, | |
| "grad_norm": 0.9228238463401794, | |
| "learning_rate": 0.0001934661668437073, | |
| "loss": 0.4104, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.30039525691699603, | |
| "grad_norm": 0.8698107004165649, | |
| "learning_rate": 0.0001929967517119289, | |
| "loss": 0.6589, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.308300395256917, | |
| "grad_norm": 0.7418029308319092, | |
| "learning_rate": 0.0001925116643538684, | |
| "loss": 0.2634, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.31620553359683795, | |
| "grad_norm": 1.038150668144226, | |
| "learning_rate": 0.0001920109865186052, | |
| "loss": 0.6815, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.3241106719367589, | |
| "grad_norm": 0.7771694660186768, | |
| "learning_rate": 0.00019149480258259533, | |
| "loss": 0.4593, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.33201581027667987, | |
| "grad_norm": 1.0811573266983032, | |
| "learning_rate": 0.00019096319953545185, | |
| "loss": 0.3129, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.33992094861660077, | |
| "grad_norm": 1.2685577869415283, | |
| "learning_rate": 0.00019041626696528503, | |
| "loss": 0.2788, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.34782608695652173, | |
| "grad_norm": 1.1799741983413696, | |
| "learning_rate": 0.00018985409704360456, | |
| "loss": 0.6224, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.3557312252964427, | |
| "grad_norm": 1.3152222633361816, | |
| "learning_rate": 0.0001892767845097864, | |
| "loss": 0.4609, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.36363636363636365, | |
| "grad_norm": 0.8674153089523315, | |
| "learning_rate": 0.00018868442665510678, | |
| "loss": 0.3661, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.3715415019762846, | |
| "grad_norm": 1.9650135040283203, | |
| "learning_rate": 0.00018807712330634642, | |
| "loss": 0.4214, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.3794466403162055, | |
| "grad_norm": 0.8908179402351379, | |
| "learning_rate": 0.00018745497680896722, | |
| "loss": 0.3789, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.38735177865612647, | |
| "grad_norm": 0.8767942786216736, | |
| "learning_rate": 0.0001868180920098644, | |
| "loss": 0.432, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.3952569169960474, | |
| "grad_norm": 1.3174779415130615, | |
| "learning_rate": 0.0001861665762396974, | |
| "loss": 0.8093, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.4031620553359684, | |
| "grad_norm": 1.12623929977417, | |
| "learning_rate": 0.00018550053929480202, | |
| "loss": 0.3248, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.41106719367588934, | |
| "grad_norm": 2.023007392883301, | |
| "learning_rate": 0.00018482009341868697, | |
| "loss": 0.2736, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.4189723320158103, | |
| "grad_norm": 0.981887936592102, | |
| "learning_rate": 0.00018412535328311814, | |
| "loss": 0.3832, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.4268774703557312, | |
| "grad_norm": 2.4076719284057617, | |
| "learning_rate": 0.00018341643596879367, | |
| "loss": 0.857, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.43478260869565216, | |
| "grad_norm": 1.1959154605865479, | |
| "learning_rate": 0.0001826934609456129, | |
| "loss": 0.4592, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.4426877470355731, | |
| "grad_norm": 0.5865926742553711, | |
| "learning_rate": 0.00018195655005254273, | |
| "loss": 0.1814, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.4505928853754941, | |
| "grad_norm": 0.8474725484848022, | |
| "learning_rate": 0.00018120582747708502, | |
| "loss": 0.2844, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.45849802371541504, | |
| "grad_norm": 1.0415821075439453, | |
| "learning_rate": 0.00018044141973434758, | |
| "loss": 0.3637, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.466403162055336, | |
| "grad_norm": 1.8770055770874023, | |
| "learning_rate": 0.0001796634556457236, | |
| "loss": 0.2462, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.4743083003952569, | |
| "grad_norm": 0.9714164733886719, | |
| "learning_rate": 0.00017887206631718203, | |
| "loss": 0.3059, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.48221343873517786, | |
| "grad_norm": 0.9178167581558228, | |
| "learning_rate": 0.0001780673851171728, | |
| "loss": 0.4233, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.4901185770750988, | |
| "grad_norm": 0.7472209930419922, | |
| "learning_rate": 0.00017724954765415137, | |
| "loss": 0.1977, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.4980237154150198, | |
| "grad_norm": 0.7857463955879211, | |
| "learning_rate": 0.00017641869175372493, | |
| "loss": 0.2279, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.5059288537549407, | |
| "grad_norm": 2.13358211517334, | |
| "learning_rate": 0.00017557495743542585, | |
| "loss": 0.4481, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.5138339920948617, | |
| "grad_norm": 0.9242135882377625, | |
| "learning_rate": 0.00017471848688911464, | |
| "loss": 0.3843, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.5217391304347826, | |
| "grad_norm": 0.7868145704269409, | |
| "learning_rate": 0.00017384942445101772, | |
| "loss": 0.1393, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.5296442687747036, | |
| "grad_norm": 0.6896469593048096, | |
| "learning_rate": 0.000172967916579403, | |
| "loss": 0.3294, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.5375494071146245, | |
| "grad_norm": 0.691632866859436, | |
| "learning_rate": 0.00017207411182989832, | |
| "loss": 0.2405, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.5454545454545454, | |
| "grad_norm": 0.6870527267456055, | |
| "learning_rate": 0.00017116816083045602, | |
| "loss": 0.2379, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.5533596837944664, | |
| "grad_norm": 0.6751531958580017, | |
| "learning_rate": 0.00017025021625596853, | |
| "loss": 0.2365, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.5612648221343873, | |
| "grad_norm": 1.061668872833252, | |
| "learning_rate": 0.0001693204328025389, | |
| "loss": 0.5389, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.5691699604743083, | |
| "grad_norm": 0.7164187431335449, | |
| "learning_rate": 0.0001683789671614107, | |
| "loss": 0.3441, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.5770750988142292, | |
| "grad_norm": 1.1519713401794434, | |
| "learning_rate": 0.00016742597799256182, | |
| "loss": 0.2715, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.5849802371541502, | |
| "grad_norm": 1.568219542503357, | |
| "learning_rate": 0.00016646162589796615, | |
| "loss": 0.3667, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.5928853754940712, | |
| "grad_norm": 0.7082597017288208, | |
| "learning_rate": 0.00016548607339452853, | |
| "loss": 0.3271, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.6007905138339921, | |
| "grad_norm": 0.7822222709655762, | |
| "learning_rate": 0.00016449948488669639, | |
| "loss": 0.395, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.6086956521739131, | |
| "grad_norm": 0.70323646068573, | |
| "learning_rate": 0.00016350202663875386, | |
| "loss": 0.3593, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.616600790513834, | |
| "grad_norm": 0.8593727350234985, | |
| "learning_rate": 0.00016249386674680184, | |
| "loss": 0.4751, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.6245059288537549, | |
| "grad_norm": 0.8436718583106995, | |
| "learning_rate": 0.0001614751751104301, | |
| "loss": 0.2788, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.6324110671936759, | |
| "grad_norm": 1.0489970445632935, | |
| "learning_rate": 0.00016044612340408466, | |
| "loss": 0.2804, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.6403162055335968, | |
| "grad_norm": 0.4728718101978302, | |
| "learning_rate": 0.00015940688504813662, | |
| "loss": 0.1463, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.6482213438735178, | |
| "grad_norm": 0.8874382972717285, | |
| "learning_rate": 0.00015835763517965673, | |
| "loss": 0.547, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.6561264822134387, | |
| "grad_norm": 0.9559019804000854, | |
| "learning_rate": 0.00015729855062290022, | |
| "loss": 0.2586, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.6640316205533597, | |
| "grad_norm": 1.4548382759094238, | |
| "learning_rate": 0.0001562298098595078, | |
| "loss": 0.4085, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.6719367588932806, | |
| "grad_norm": 1.6104789972305298, | |
| "learning_rate": 0.00015515159299842707, | |
| "loss": 0.3712, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.6798418972332015, | |
| "grad_norm": 0.7389092445373535, | |
| "learning_rate": 0.00015406408174555976, | |
| "loss": 0.2789, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.6877470355731226, | |
| "grad_norm": 0.6817464232444763, | |
| "learning_rate": 0.00015296745937313987, | |
| "loss": 0.2501, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.6956521739130435, | |
| "grad_norm": 0.9103575348854065, | |
| "learning_rate": 0.00015186191068884775, | |
| "loss": 0.5931, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.7035573122529645, | |
| "grad_norm": 0.9242210388183594, | |
| "learning_rate": 0.00015074762200466556, | |
| "loss": 0.6431, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.7114624505928854, | |
| "grad_norm": 0.9366337060928345, | |
| "learning_rate": 0.00014962478110547918, | |
| "loss": 0.4411, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.7193675889328063, | |
| "grad_norm": 0.531251072883606, | |
| "learning_rate": 0.00014849357721743168, | |
| "loss": 0.1745, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.7272727272727273, | |
| "grad_norm": 1.2808730602264404, | |
| "learning_rate": 0.0001473542009760343, | |
| "loss": 0.4046, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.7351778656126482, | |
| "grad_norm": 1.19551682472229, | |
| "learning_rate": 0.00014620684439403962, | |
| "loss": 0.4524, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.7430830039525692, | |
| "grad_norm": 0.8160001039505005, | |
| "learning_rate": 0.0001450517008290827, | |
| "loss": 0.2237, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.7509881422924901, | |
| "grad_norm": 0.5796943306922913, | |
| "learning_rate": 0.0001438889649510956, | |
| "loss": 0.2322, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.758893280632411, | |
| "grad_norm": 0.6521222591400146, | |
| "learning_rate": 0.00014271883270950073, | |
| "loss": 0.2682, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.766798418972332, | |
| "grad_norm": 1.001668095588684, | |
| "learning_rate": 0.00014154150130018866, | |
| "loss": 0.3352, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.7747035573122529, | |
| "grad_norm": 0.8112585544586182, | |
| "learning_rate": 0.00014035716913228568, | |
| "loss": 0.173, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.782608695652174, | |
| "grad_norm": 1.2103630304336548, | |
| "learning_rate": 0.00013916603579471705, | |
| "loss": 1.0565, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.7905138339920948, | |
| "grad_norm": 1.0214811563491821, | |
| "learning_rate": 0.0001379683020225714, | |
| "loss": 0.3749, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.7984189723320159, | |
| "grad_norm": 0.6681635975837708, | |
| "learning_rate": 0.000136764169663272, | |
| "loss": 0.1641, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.8063241106719368, | |
| "grad_norm": 0.9718304872512817, | |
| "learning_rate": 0.00013555384164256048, | |
| "loss": 0.1989, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.8142292490118577, | |
| "grad_norm": 0.5994592905044556, | |
| "learning_rate": 0.00013433752193029886, | |
| "loss": 0.2865, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.8221343873517787, | |
| "grad_norm": 0.7605760097503662, | |
| "learning_rate": 0.00013311541550609565, | |
| "loss": 0.4609, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.8300395256916996, | |
| "grad_norm": 0.4863194227218628, | |
| "learning_rate": 0.00013188772832476188, | |
| "loss": 0.1356, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.8379446640316206, | |
| "grad_norm": 0.4657331705093384, | |
| "learning_rate": 0.00013065466728160252, | |
| "loss": 0.1501, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.8458498023715415, | |
| "grad_norm": 0.6451147794723511, | |
| "learning_rate": 0.00012941644017754964, | |
| "loss": 0.3072, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.8537549407114624, | |
| "grad_norm": 0.7699165344238281, | |
| "learning_rate": 0.00012817325568414297, | |
| "loss": 0.4145, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.8616600790513834, | |
| "grad_norm": 1.160872459411621, | |
| "learning_rate": 0.00012692532330836346, | |
| "loss": 0.2784, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.8695652173913043, | |
| "grad_norm": 0.9029427766799927, | |
| "learning_rate": 0.00012567285335732633, | |
| "loss": 0.3273, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.8774703557312253, | |
| "grad_norm": 0.7525882720947266, | |
| "learning_rate": 0.00012441605690283915, | |
| "loss": 0.2811, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.8853754940711462, | |
| "grad_norm": 0.7249767184257507, | |
| "learning_rate": 0.00012315514574583113, | |
| "loss": 0.2133, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.8932806324110671, | |
| "grad_norm": 2.000819683074951, | |
| "learning_rate": 0.0001218903323806595, | |
| "loss": 0.4112, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.9011857707509882, | |
| "grad_norm": 0.751440703868866, | |
| "learning_rate": 0.00012062182995929882, | |
| "loss": 0.3087, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.9090909090909091, | |
| "grad_norm": 1.0243513584136963, | |
| "learning_rate": 0.00011934985225541998, | |
| "loss": 0.4851, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.9169960474308301, | |
| "grad_norm": 0.7862813472747803, | |
| "learning_rate": 0.0001180746136283638, | |
| "loss": 0.3612, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.924901185770751, | |
| "grad_norm": 1.0955418348312378, | |
| "learning_rate": 0.00011679632898701649, | |
| "loss": 0.3377, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.932806324110672, | |
| "grad_norm": 0.8309126496315002, | |
| "learning_rate": 0.00011551521375359206, | |
| "loss": 0.282, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.9407114624505929, | |
| "grad_norm": 1.5201857089996338, | |
| "learning_rate": 0.00011423148382732853, | |
| "loss": 0.2581, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.9486166007905138, | |
| "grad_norm": 0.6001629829406738, | |
| "learning_rate": 0.00011294535554810354, | |
| "loss": 0.2002, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.9565217391304348, | |
| "grad_norm": 0.5865480899810791, | |
| "learning_rate": 0.00011165704565997593, | |
| "loss": 0.137, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.9644268774703557, | |
| "grad_norm": 0.8828125596046448, | |
| "learning_rate": 0.00011036677127465889, | |
| "loss": 0.3052, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.9723320158102767, | |
| "grad_norm": 0.6774571537971497, | |
| "learning_rate": 0.00010907474983493144, | |
| "loss": 0.4175, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.9802371541501976, | |
| "grad_norm": 0.9369196891784668, | |
| "learning_rate": 0.00010778119907799398, | |
| "loss": 0.4036, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.9881422924901185, | |
| "grad_norm": 0.9793416261672974, | |
| "learning_rate": 0.0001064863369987743, | |
| "loss": 0.2515, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.9960474308300395, | |
| "grad_norm": 1.6688363552093506, | |
| "learning_rate": 0.00010519038181318999, | |
| "loss": 0.2686, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.589106559753418, | |
| "learning_rate": 0.00010389355192137377, | |
| "loss": 0.3921, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.007905138339921, | |
| "grad_norm": 0.6045113205909729, | |
| "learning_rate": 0.00010259606587086783, | |
| "loss": 0.2638, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.0158102766798418, | |
| "grad_norm": 0.6287718415260315, | |
| "learning_rate": 0.0001012981423197931, | |
| "loss": 0.3818, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.023715415019763, | |
| "grad_norm": 0.6705343723297119, | |
| "learning_rate": 0.0001, | |
| "loss": 0.1436, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.0316205533596838, | |
| "grad_norm": 0.7970353960990906, | |
| "learning_rate": 9.870185768020693e-05, | |
| "loss": 0.1986, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.0395256916996047, | |
| "grad_norm": 1.4546546936035156, | |
| "learning_rate": 9.740393412913219e-05, | |
| "loss": 0.3551, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.0474308300395256, | |
| "grad_norm": 0.5461708307266235, | |
| "learning_rate": 9.610644807862625e-05, | |
| "loss": 0.2015, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.0553359683794465, | |
| "grad_norm": 1.8389110565185547, | |
| "learning_rate": 9.480961818681004e-05, | |
| "loss": 0.2769, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.0632411067193677, | |
| "grad_norm": 0.6100041270256042, | |
| "learning_rate": 9.35136630012257e-05, | |
| "loss": 0.1516, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.0711462450592886, | |
| "grad_norm": 0.5065872669219971, | |
| "learning_rate": 9.221880092200601e-05, | |
| "loss": 0.1945, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.0790513833992095, | |
| "grad_norm": 1.9398545026779175, | |
| "learning_rate": 9.092525016506858e-05, | |
| "loss": 0.2246, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.0869565217391304, | |
| "grad_norm": 0.6197378635406494, | |
| "learning_rate": 8.963322872534114e-05, | |
| "loss": 0.1768, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.0948616600790513, | |
| "grad_norm": 1.256683349609375, | |
| "learning_rate": 8.83429543400241e-05, | |
| "loss": 0.3742, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.1027667984189724, | |
| "grad_norm": 0.8401638865470886, | |
| "learning_rate": 8.705464445189647e-05, | |
| "loss": 0.1974, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.1106719367588933, | |
| "grad_norm": 0.7496947646141052, | |
| "learning_rate": 8.57685161726715e-05, | |
| "loss": 0.3825, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.1185770750988142, | |
| "grad_norm": 1.0039342641830444, | |
| "learning_rate": 8.448478624640797e-05, | |
| "loss": 0.1535, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.1264822134387351, | |
| "grad_norm": 0.5387095808982849, | |
| "learning_rate": 8.320367101298351e-05, | |
| "loss": 0.2106, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.1343873517786562, | |
| "grad_norm": 0.5525783896446228, | |
| "learning_rate": 8.192538637163621e-05, | |
| "loss": 0.169, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.1422924901185771, | |
| "grad_norm": 0.6670098304748535, | |
| "learning_rate": 8.065014774458003e-05, | |
| "loss": 0.1897, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.150197628458498, | |
| "grad_norm": 0.701342761516571, | |
| "learning_rate": 7.93781700407012e-05, | |
| "loss": 0.1784, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.158102766798419, | |
| "grad_norm": 0.52852463722229, | |
| "learning_rate": 7.810966761934053e-05, | |
| "loss": 0.1712, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.1660079051383399, | |
| "grad_norm": 0.5911440849304199, | |
| "learning_rate": 7.684485425416888e-05, | |
| "loss": 0.1378, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.1739130434782608, | |
| "grad_norm": 0.7355263829231262, | |
| "learning_rate": 7.558394309716088e-05, | |
| "loss": 0.1088, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.1818181818181819, | |
| "grad_norm": 0.5206398367881775, | |
| "learning_rate": 7.432714664267373e-05, | |
| "loss": 0.1724, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.1897233201581028, | |
| "grad_norm": 0.5356054902076721, | |
| "learning_rate": 7.307467669163655e-05, | |
| "loss": 0.1792, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.1976284584980237, | |
| "grad_norm": 0.5152002573013306, | |
| "learning_rate": 7.182674431585704e-05, | |
| "loss": 0.2179, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.2055335968379446, | |
| "grad_norm": 0.6617997288703918, | |
| "learning_rate": 7.058355982245037e-05, | |
| "loss": 0.1654, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.2134387351778657, | |
| "grad_norm": 1.776577115058899, | |
| "learning_rate": 6.934533271839752e-05, | |
| "loss": 0.3008, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.2213438735177866, | |
| "grad_norm": 0.5596933364868164, | |
| "learning_rate": 6.811227167523815e-05, | |
| "loss": 0.3617, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.2292490118577075, | |
| "grad_norm": 0.5993364453315735, | |
| "learning_rate": 6.688458449390437e-05, | |
| "loss": 0.244, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.2371541501976284, | |
| "grad_norm": 0.9282941818237305, | |
| "learning_rate": 6.566247806970119e-05, | |
| "loss": 0.2579, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.2450592885375493, | |
| "grad_norm": 0.677804172039032, | |
| "learning_rate": 6.444615835743955e-05, | |
| "loss": 0.2758, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.2529644268774702, | |
| "grad_norm": 3.5637006759643555, | |
| "learning_rate": 6.323583033672799e-05, | |
| "loss": 0.2121, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.2608695652173914, | |
| "grad_norm": 0.7661057710647583, | |
| "learning_rate": 6.203169797742861e-05, | |
| "loss": 0.3532, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.2687747035573123, | |
| "grad_norm": 1.0855592489242554, | |
| "learning_rate": 6.083396420528298e-05, | |
| "loss": 0.1676, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.2766798418972332, | |
| "grad_norm": 1.2014670372009277, | |
| "learning_rate": 5.964283086771435e-05, | |
| "loss": 0.3823, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.2845849802371543, | |
| "grad_norm": 1.5054898262023926, | |
| "learning_rate": 5.845849869981137e-05, | |
| "loss": 0.1683, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.2924901185770752, | |
| "grad_norm": 1.071014404296875, | |
| "learning_rate": 5.728116729049928e-05, | |
| "loss": 0.6714, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.300395256916996, | |
| "grad_norm": 0.34447920322418213, | |
| "learning_rate": 5.611103504890444e-05, | |
| "loss": 0.0817, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.308300395256917, | |
| "grad_norm": 0.7190909385681152, | |
| "learning_rate": 5.4948299170917325e-05, | |
| "loss": 0.2317, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.316205533596838, | |
| "grad_norm": 1.5183690786361694, | |
| "learning_rate": 5.379315560596038e-05, | |
| "loss": 0.2293, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.3241106719367588, | |
| "grad_norm": 1.6111899614334106, | |
| "learning_rate": 5.26457990239657e-05, | |
| "loss": 0.1938, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.33201581027668, | |
| "grad_norm": 1.1006243228912354, | |
| "learning_rate": 5.1506422782568345e-05, | |
| "loss": 0.2764, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.3399209486166008, | |
| "grad_norm": 0.48925891518592834, | |
| "learning_rate": 5.0375218894520834e-05, | |
| "loss": 0.0881, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.3478260869565217, | |
| "grad_norm": 1.2127503156661987, | |
| "learning_rate": 4.9252377995334444e-05, | |
| "loss": 0.1581, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.3557312252964426, | |
| "grad_norm": 0.70034259557724, | |
| "learning_rate": 4.813808931115228e-05, | |
| "loss": 0.1765, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.3636363636363638, | |
| "grad_norm": 0.9129422307014465, | |
| "learning_rate": 4.703254062686017e-05, | |
| "loss": 0.4467, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.3715415019762847, | |
| "grad_norm": 1.0346431732177734, | |
| "learning_rate": 4.593591825444028e-05, | |
| "loss": 0.324, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.3794466403162056, | |
| "grad_norm": 1.1604621410369873, | |
| "learning_rate": 4.484840700157295e-05, | |
| "loss": 0.2579, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.3873517786561265, | |
| "grad_norm": 0.5738621354103088, | |
| "learning_rate": 4.377019014049223e-05, | |
| "loss": 0.1351, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.3952569169960474, | |
| "grad_norm": 0.8380118608474731, | |
| "learning_rate": 4.270144937709981e-05, | |
| "loss": 0.1808, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.4031620553359683, | |
| "grad_norm": 0.9386733770370483, | |
| "learning_rate": 4.164236482034327e-05, | |
| "loss": 0.2689, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.4110671936758894, | |
| "grad_norm": 0.451817125082016, | |
| "learning_rate": 4.059311495186338e-05, | |
| "loss": 0.1063, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.4189723320158103, | |
| "grad_norm": 0.9198683500289917, | |
| "learning_rate": 3.9553876595915375e-05, | |
| "loss": 0.0927, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.4268774703557312, | |
| "grad_norm": 0.5517873167991638, | |
| "learning_rate": 3.852482488956992e-05, | |
| "loss": 0.1326, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.434782608695652, | |
| "grad_norm": 2.099898338317871, | |
| "learning_rate": 3.750613325319817e-05, | |
| "loss": 0.2189, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.4426877470355732, | |
| "grad_norm": 0.5490242838859558, | |
| "learning_rate": 3.649797336124615e-05, | |
| "loss": 0.1408, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.4505928853754941, | |
| "grad_norm": 0.6548390984535217, | |
| "learning_rate": 3.550051511330361e-05, | |
| "loss": 0.2264, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.458498023715415, | |
| "grad_norm": 1.2147459983825684, | |
| "learning_rate": 3.45139266054715e-05, | |
| "loss": 0.2715, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.466403162055336, | |
| "grad_norm": 0.9193414449691772, | |
| "learning_rate": 3.3538374102033866e-05, | |
| "loss": 0.1903, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.4743083003952568, | |
| "grad_norm": 0.8377665281295776, | |
| "learning_rate": 3.257402200743821e-05, | |
| "loss": 0.1726, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.4822134387351777, | |
| "grad_norm": 1.05824613571167, | |
| "learning_rate": 3.1621032838589305e-05, | |
| "loss": 0.1822, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.4901185770750989, | |
| "grad_norm": 0.8356701731681824, | |
| "learning_rate": 3.0679567197461134e-05, | |
| "loss": 0.289, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.4980237154150198, | |
| "grad_norm": 1.1289795637130737, | |
| "learning_rate": 2.974978374403147e-05, | |
| "loss": 0.25, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.5059288537549407, | |
| "grad_norm": 0.7377334237098694, | |
| "learning_rate": 2.8831839169543996e-05, | |
| "loss": 0.2193, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.5138339920948618, | |
| "grad_norm": 0.6371282935142517, | |
| "learning_rate": 2.7925888170101665e-05, | |
| "loss": 0.0678, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.5217391304347827, | |
| "grad_norm": 0.5883315801620483, | |
| "learning_rate": 2.7032083420597e-05, | |
| "loss": 0.0967, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.5296442687747036, | |
| "grad_norm": 0.9382833242416382, | |
| "learning_rate": 2.6150575548982292e-05, | |
| "loss": 0.297, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.5375494071146245, | |
| "grad_norm": 0.8458752036094666, | |
| "learning_rate": 2.528151311088537e-05, | |
| "loss": 0.1853, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.5454545454545454, | |
| "grad_norm": 0.5695004463195801, | |
| "learning_rate": 2.4425042564574184e-05, | |
| "loss": 0.1407, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.5533596837944663, | |
| "grad_norm": 0.46086326241493225, | |
| "learning_rate": 2.3581308246275103e-05, | |
| "loss": 0.1084, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.5612648221343872, | |
| "grad_norm": 0.9662849307060242, | |
| "learning_rate": 2.2750452345848682e-05, | |
| "loss": 0.2847, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.5691699604743083, | |
| "grad_norm": 0.799304723739624, | |
| "learning_rate": 2.1932614882827197e-05, | |
| "loss": 0.189, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.5770750988142292, | |
| "grad_norm": 0.6747804880142212, | |
| "learning_rate": 2.112793368281799e-05, | |
| "loss": 0.1797, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.5849802371541502, | |
| "grad_norm": 0.5679678320884705, | |
| "learning_rate": 2.03365443542764e-05, | |
| "loss": 0.1284, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.5928853754940713, | |
| "grad_norm": 0.669952929019928, | |
| "learning_rate": 1.9558580265652448e-05, | |
| "loss": 0.2349, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.6007905138339922, | |
| "grad_norm": 0.5827800035476685, | |
| "learning_rate": 1.879417252291502e-05, | |
| "loss": 0.1668, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.608695652173913, | |
| "grad_norm": 0.8047837018966675, | |
| "learning_rate": 1.804344994745727e-05, | |
| "loss": 0.1244, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.616600790513834, | |
| "grad_norm": 0.7034929990768433, | |
| "learning_rate": 1.730653905438714e-05, | |
| "loss": 0.2241, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.6245059288537549, | |
| "grad_norm": 0.9926624894142151, | |
| "learning_rate": 1.6583564031206357e-05, | |
| "loss": 0.1807, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.6324110671936758, | |
| "grad_norm": 0.7195786833763123, | |
| "learning_rate": 1.587464671688187e-05, | |
| "loss": 0.2133, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.6403162055335967, | |
| "grad_norm": 0.6933628916740417, | |
| "learning_rate": 1.5179906581313064e-05, | |
| "loss": 0.1928, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.6482213438735178, | |
| "grad_norm": 1.1264605522155762, | |
| "learning_rate": 1.4499460705197998e-05, | |
| "loss": 0.3693, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.6561264822134387, | |
| "grad_norm": 0.7283610701560974, | |
| "learning_rate": 1.3833423760302611e-05, | |
| "loss": 0.1677, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.6640316205533598, | |
| "grad_norm": 1.039823055267334, | |
| "learning_rate": 1.3181907990135622e-05, | |
| "loss": 0.1639, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.6719367588932808, | |
| "grad_norm": 0.933429479598999, | |
| "learning_rate": 1.2545023191032801e-05, | |
| "loss": 0.1798, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.6798418972332017, | |
| "grad_norm": 1.0857698917388916, | |
| "learning_rate": 1.1922876693653585e-05, | |
| "loss": 0.2107, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.6877470355731226, | |
| "grad_norm": 1.2482712268829346, | |
| "learning_rate": 1.131557334489326e-05, | |
| "loss": 0.2459, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.6956521739130435, | |
| "grad_norm": 0.8419239521026611, | |
| "learning_rate": 1.0723215490213634e-05, | |
| "loss": 0.2451, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.7035573122529644, | |
| "grad_norm": 0.8410497903823853, | |
| "learning_rate": 1.0145902956395447e-05, | |
| "loss": 0.2175, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.7114624505928853, | |
| "grad_norm": 1.390134334564209, | |
| "learning_rate": 9.583733034714981e-06, | |
| "loss": 0.4648, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.7193675889328062, | |
| "grad_norm": 0.5143908262252808, | |
| "learning_rate": 9.036800464548157e-06, | |
| "loss": 0.112, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.7272727272727273, | |
| "grad_norm": 0.6791729927062988, | |
| "learning_rate": 8.505197417404687e-06, | |
| "loss": 0.1596, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.7351778656126482, | |
| "grad_norm": 0.794528603553772, | |
| "learning_rate": 7.989013481394814e-06, | |
| "loss": 0.3435, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.7430830039525693, | |
| "grad_norm": 0.7834582328796387, | |
| "learning_rate": 7.488335646131628e-06, | |
| "loss": 0.2931, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.7509881422924902, | |
| "grad_norm": 2.8932089805603027, | |
| "learning_rate": 7.003248288071118e-06, | |
| "loss": 0.3275, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.7588932806324111, | |
| "grad_norm": 0.5584789514541626, | |
| "learning_rate": 6.533833156292679e-06, | |
| "loss": 0.1267, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.766798418972332, | |
| "grad_norm": 0.9573125839233398, | |
| "learning_rate": 6.08016935872251e-06, | |
| "loss": 0.215, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.774703557312253, | |
| "grad_norm": 0.8806841969490051, | |
| "learning_rate": 5.6423333488018095e-06, | |
| "loss": 0.3208, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.7826086956521738, | |
| "grad_norm": 0.49562156200408936, | |
| "learning_rate": 5.22039891260262e-06, | |
| "loss": 0.093, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.7905138339920947, | |
| "grad_norm": 0.5696113705635071, | |
| "learning_rate": 4.8144371563930476e-06, | |
| "loss": 0.1829, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.7984189723320159, | |
| "grad_norm": 0.7890591621398926, | |
| "learning_rate": 4.424516494654118e-06, | |
| "loss": 0.1888, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.8063241106719368, | |
| "grad_norm": 1.111238718032837, | |
| "learning_rate": 4.050702638550275e-06, | |
| "loss": 0.2312, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.8142292490118577, | |
| "grad_norm": 1.0172678232192993, | |
| "learning_rate": 3.693058584855369e-06, | |
| "loss": 0.4459, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.8221343873517788, | |
| "grad_norm": 0.6409620642662048, | |
| "learning_rate": 3.3516446053363015e-06, | |
| "loss": 0.1838, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.8300395256916997, | |
| "grad_norm": 1.010557770729065, | |
| "learning_rate": 3.026518236595621e-06, | |
| "loss": 0.1648, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.8379446640316206, | |
| "grad_norm": 0.7010674476623535, | |
| "learning_rate": 2.717734270375272e-06, | |
| "loss": 0.1565, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.8458498023715415, | |
| "grad_norm": 0.8774541616439819, | |
| "learning_rate": 2.4253447443228106e-06, | |
| "loss": 0.2482, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.8537549407114624, | |
| "grad_norm": 1.4902609586715698, | |
| "learning_rate": 2.1493989332218468e-06, | |
| "loss": 0.6009, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.8616600790513833, | |
| "grad_norm": 0.8265174627304077, | |
| "learning_rate": 1.8899433406879608e-06, | |
| "loss": 0.1213, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.8695652173913042, | |
| "grad_norm": 2.709406852722168, | |
| "learning_rate": 1.6470216913317626e-06, | |
| "loss": 0.1897, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.8774703557312253, | |
| "grad_norm": 0.6855682730674744, | |
| "learning_rate": 1.4206749233902084e-06, | |
| "loss": 0.1564, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.8853754940711462, | |
| "grad_norm": 0.5468905568122864, | |
| "learning_rate": 1.2109411818274852e-06, | |
| "loss": 0.1241, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.8932806324110671, | |
| "grad_norm": 0.5691235661506653, | |
| "learning_rate": 1.0178558119067315e-06, | |
| "loss": 0.1394, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.9011857707509883, | |
| "grad_norm": 0.8554819822311401, | |
| "learning_rate": 8.41451353233369e-07, | |
| "loss": 0.2113, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.9090909090909092, | |
| "grad_norm": 1.5547891855239868, | |
| "learning_rate": 6.817575342714988e-07, | |
| "loss": 0.4036, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.91699604743083, | |
| "grad_norm": 1.505486249923706, | |
| "learning_rate": 5.388012673338661e-07, | |
| "loss": 0.2284, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.924901185770751, | |
| "grad_norm": 0.9467947483062744, | |
| "learning_rate": 4.126066440464982e-07, | |
| "loss": 0.3383, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.9328063241106719, | |
| "grad_norm": 1.066933274269104, | |
| "learning_rate": 3.0319493128866396e-07, | |
| "loss": 0.3404, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.9407114624505928, | |
| "grad_norm": 1.2586652040481567, | |
| "learning_rate": 2.1058456760891798e-07, | |
| "loss": 0.2501, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.9486166007905137, | |
| "grad_norm": 1.629897117614746, | |
| "learning_rate": 1.3479116011769767e-07, | |
| "loss": 0.3085, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.9565217391304348, | |
| "grad_norm": 0.8343656063079834, | |
| "learning_rate": 7.582748185719358e-08, | |
| "loss": 0.3449, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.9644268774703557, | |
| "grad_norm": 1.270969271659851, | |
| "learning_rate": 3.370346964876036e-08, | |
| "loss": 0.4198, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.9723320158102768, | |
| "grad_norm": 1.3988014459609985, | |
| "learning_rate": 8.426222418311814e-09, | |
| "loss": 0.619, | |
| "step": 250 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 250, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 250, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 7931403009625536.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |