| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9943851768669285, | |
| "eval_steps": 500, | |
| "global_step": 666, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004491858506457047, | |
| "grad_norm": 5.744756976351124, | |
| "learning_rate": 8.443065671641791e-07, | |
| "loss": 0.8286, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.008983717012914094, | |
| "grad_norm": 5.938422657412818, | |
| "learning_rate": 1.6886131343283583e-06, | |
| "loss": 0.8731, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.01347557551937114, | |
| "grad_norm": 5.751270467988343, | |
| "learning_rate": 2.532919701492537e-06, | |
| "loss": 0.8539, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.017967434025828188, | |
| "grad_norm": 5.383642484284944, | |
| "learning_rate": 3.3772262686567165e-06, | |
| "loss": 0.8525, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.022459292532285232, | |
| "grad_norm": 4.292764513211965, | |
| "learning_rate": 4.221532835820895e-06, | |
| "loss": 0.8241, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.02695115103874228, | |
| "grad_norm": 2.3074551710755915, | |
| "learning_rate": 5.065839402985074e-06, | |
| "loss": 0.7623, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.031443009545199324, | |
| "grad_norm": 1.9368080633198215, | |
| "learning_rate": 5.910145970149254e-06, | |
| "loss": 0.7558, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.035934868051656375, | |
| "grad_norm": 3.8274856425800583, | |
| "learning_rate": 6.754452537313433e-06, | |
| "loss": 0.7472, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.04042672655811342, | |
| "grad_norm": 4.048665921995214, | |
| "learning_rate": 7.598759104477612e-06, | |
| "loss": 0.7507, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.044918585064570464, | |
| "grad_norm": 4.093114995573489, | |
| "learning_rate": 8.44306567164179e-06, | |
| "loss": 0.7386, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.049410443571027515, | |
| "grad_norm": 3.759064332390729, | |
| "learning_rate": 9.287372238805971e-06, | |
| "loss": 0.7097, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.05390230207748456, | |
| "grad_norm": 3.1922838401313083, | |
| "learning_rate": 1.0131678805970148e-05, | |
| "loss": 0.6956, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.058394160583941604, | |
| "grad_norm": 1.9827014067944742, | |
| "learning_rate": 1.0975985373134329e-05, | |
| "loss": 0.6712, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.06288601909039865, | |
| "grad_norm": 1.719440262304918, | |
| "learning_rate": 1.1820291940298508e-05, | |
| "loss": 0.6589, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.06737787759685569, | |
| "grad_norm": 2.2315024537300276, | |
| "learning_rate": 1.2664598507462687e-05, | |
| "loss": 0.6446, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.07186973610331275, | |
| "grad_norm": 1.7844279864274228, | |
| "learning_rate": 1.3508905074626866e-05, | |
| "loss": 0.6383, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.0763615946097698, | |
| "grad_norm": 1.1269775645272213, | |
| "learning_rate": 1.4353211641791045e-05, | |
| "loss": 0.6404, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.08085345311622684, | |
| "grad_norm": 1.4281568237358104, | |
| "learning_rate": 1.5197518208955224e-05, | |
| "loss": 0.6213, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.08534531162268388, | |
| "grad_norm": 1.3117040397695268, | |
| "learning_rate": 1.60418247761194e-05, | |
| "loss": 0.6102, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.08983717012914093, | |
| "grad_norm": 0.9395353461086697, | |
| "learning_rate": 1.688613134328358e-05, | |
| "loss": 0.6132, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.09432902863559797, | |
| "grad_norm": 0.9267885545655482, | |
| "learning_rate": 1.7730437910447763e-05, | |
| "loss": 0.5945, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.09882088714205503, | |
| "grad_norm": 0.8837742532373003, | |
| "learning_rate": 1.8574744477611942e-05, | |
| "loss": 0.5901, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.10331274564851207, | |
| "grad_norm": 0.7972159083144742, | |
| "learning_rate": 1.941905104477612e-05, | |
| "loss": 0.5878, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.10780460415496912, | |
| "grad_norm": 0.7812772147104998, | |
| "learning_rate": 2.0263357611940297e-05, | |
| "loss": 0.5775, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.11229646266142616, | |
| "grad_norm": 0.6789600343421683, | |
| "learning_rate": 2.110766417910448e-05, | |
| "loss": 0.578, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.11678832116788321, | |
| "grad_norm": 0.6909378202503645, | |
| "learning_rate": 2.1951970746268658e-05, | |
| "loss": 0.557, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.12128017967434025, | |
| "grad_norm": 0.6357434761696933, | |
| "learning_rate": 2.2796277313432837e-05, | |
| "loss": 0.5658, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.1257720381807973, | |
| "grad_norm": 0.8250619519181291, | |
| "learning_rate": 2.3640583880597016e-05, | |
| "loss": 0.5643, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.13026389668725435, | |
| "grad_norm": 0.6726831095671596, | |
| "learning_rate": 2.4484890447761192e-05, | |
| "loss": 0.5617, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.13475575519371139, | |
| "grad_norm": 0.6222076805018789, | |
| "learning_rate": 2.5329197014925374e-05, | |
| "loss": 0.561, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.13924761370016844, | |
| "grad_norm": 0.5358329353005232, | |
| "learning_rate": 2.6173503582089553e-05, | |
| "loss": 0.5606, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.1437394722066255, | |
| "grad_norm": 0.7020789289583306, | |
| "learning_rate": 2.7017810149253732e-05, | |
| "loss": 0.5518, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.14823133071308253, | |
| "grad_norm": 0.6564127887009266, | |
| "learning_rate": 2.786211671641791e-05, | |
| "loss": 0.5387, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.1527231892195396, | |
| "grad_norm": 0.8587738091344578, | |
| "learning_rate": 2.870642328358209e-05, | |
| "loss": 0.5465, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.15721504772599662, | |
| "grad_norm": 0.808097414835022, | |
| "learning_rate": 2.9550729850746266e-05, | |
| "loss": 0.5418, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.16170690623245368, | |
| "grad_norm": 0.7887692538287057, | |
| "learning_rate": 3.039503641791045e-05, | |
| "loss": 0.5326, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.16619876473891074, | |
| "grad_norm": 0.6984222412165758, | |
| "learning_rate": 3.123934298507463e-05, | |
| "loss": 0.5317, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.17069062324536777, | |
| "grad_norm": 0.7791858530904157, | |
| "learning_rate": 3.20836495522388e-05, | |
| "loss": 0.5391, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.17518248175182483, | |
| "grad_norm": 0.9388728492156897, | |
| "learning_rate": 3.292795611940299e-05, | |
| "loss": 0.5369, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.17967434025828186, | |
| "grad_norm": 1.298309162011316, | |
| "learning_rate": 3.377226268656716e-05, | |
| "loss": 0.5412, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.18416619876473891, | |
| "grad_norm": 0.6457533016645476, | |
| "learning_rate": 3.461656925373135e-05, | |
| "loss": 0.5357, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.18865805727119594, | |
| "grad_norm": 1.0265936577759005, | |
| "learning_rate": 3.5460875820895526e-05, | |
| "loss": 0.525, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.193149915777653, | |
| "grad_norm": 1.1401778941066776, | |
| "learning_rate": 3.63051823880597e-05, | |
| "loss": 0.5248, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.19764177428411006, | |
| "grad_norm": 0.7356923553649289, | |
| "learning_rate": 3.7149488955223884e-05, | |
| "loss": 0.5286, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.2021336327905671, | |
| "grad_norm": 1.2303805549447868, | |
| "learning_rate": 3.799379552238806e-05, | |
| "loss": 0.5178, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.20662549129702415, | |
| "grad_norm": 0.8286175834774036, | |
| "learning_rate": 3.883810208955224e-05, | |
| "loss": 0.5231, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.21111734980348118, | |
| "grad_norm": 0.8904086433361935, | |
| "learning_rate": 3.968240865671642e-05, | |
| "loss": 0.5157, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.21560920830993824, | |
| "grad_norm": 0.8742599338679005, | |
| "learning_rate": 4.052671522388059e-05, | |
| "loss": 0.5213, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.2201010668163953, | |
| "grad_norm": 1.647792575282255, | |
| "learning_rate": 4.137102179104478e-05, | |
| "loss": 0.5092, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.22459292532285233, | |
| "grad_norm": 0.8390436294173879, | |
| "learning_rate": 4.221532835820896e-05, | |
| "loss": 0.5294, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.22908478382930939, | |
| "grad_norm": 1.570360619511684, | |
| "learning_rate": 4.305963492537314e-05, | |
| "loss": 0.5179, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.23357664233576642, | |
| "grad_norm": 1.1999574349887228, | |
| "learning_rate": 4.3903941492537316e-05, | |
| "loss": 0.5201, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.23806850084222347, | |
| "grad_norm": 1.511687740175756, | |
| "learning_rate": 4.474824805970149e-05, | |
| "loss": 0.5157, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.2425603593486805, | |
| "grad_norm": 0.7415592129608428, | |
| "learning_rate": 4.5592554626865674e-05, | |
| "loss": 0.5171, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.24705221785513756, | |
| "grad_norm": 1.8621266272207242, | |
| "learning_rate": 4.643686119402985e-05, | |
| "loss": 0.504, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.2515440763615946, | |
| "grad_norm": 1.0172724657721814, | |
| "learning_rate": 4.728116776119403e-05, | |
| "loss": 0.513, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.25603593486805165, | |
| "grad_norm": 1.980807424343129, | |
| "learning_rate": 4.812547432835821e-05, | |
| "loss": 0.5156, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.2605277933745087, | |
| "grad_norm": 1.7921768094916208, | |
| "learning_rate": 4.8969780895522384e-05, | |
| "loss": 0.5113, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.26501965188096577, | |
| "grad_norm": 1.500522946905668, | |
| "learning_rate": 4.981408746268657e-05, | |
| "loss": 0.5175, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.26951151038742277, | |
| "grad_norm": 1.816474871386826, | |
| "learning_rate": 5.065839402985075e-05, | |
| "loss": 0.5126, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.27400336889387983, | |
| "grad_norm": 1.297607899891605, | |
| "learning_rate": 5.150270059701493e-05, | |
| "loss": 0.5056, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.2784952274003369, | |
| "grad_norm": 1.7895031529486063, | |
| "learning_rate": 5.2347007164179107e-05, | |
| "loss": 0.509, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.28298708590679394, | |
| "grad_norm": 1.566861092994165, | |
| "learning_rate": 5.3191313731343286e-05, | |
| "loss": 0.5229, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.287478944413251, | |
| "grad_norm": 1.4587917529272656, | |
| "learning_rate": 5.4035620298507465e-05, | |
| "loss": 0.5035, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.291970802919708, | |
| "grad_norm": 1.2731174026092225, | |
| "learning_rate": 5.4879926865671644e-05, | |
| "loss": 0.5073, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.29646266142616506, | |
| "grad_norm": 0.8712574896615427, | |
| "learning_rate": 5.572423343283582e-05, | |
| "loss": 0.4948, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.3009545199326221, | |
| "grad_norm": 1.2544721050880958, | |
| "learning_rate": 5.656854e-05, | |
| "loss": 0.5059, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.3054463784390792, | |
| "grad_norm": 1.2345846400102423, | |
| "learning_rate": 5.6568150990610705e-05, | |
| "loss": 0.4964, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.30993823694553624, | |
| "grad_norm": 1.2773814296920463, | |
| "learning_rate": 5.656698397314335e-05, | |
| "loss": 0.5007, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.31443009545199324, | |
| "grad_norm": 1.4793948509426509, | |
| "learning_rate": 5.656503897969921e-05, | |
| "loss": 0.4989, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.3189219539584503, | |
| "grad_norm": 1.407221000623572, | |
| "learning_rate": 5.656231606377945e-05, | |
| "loss": 0.4983, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.32341381246490736, | |
| "grad_norm": 1.5141105900628837, | |
| "learning_rate": 5.655881530028367e-05, | |
| "loss": 0.5145, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.3279056709713644, | |
| "grad_norm": 1.3787345596720724, | |
| "learning_rate": 5.655453678550775e-05, | |
| "loss": 0.4988, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.3323975294778215, | |
| "grad_norm": 1.1035198653506362, | |
| "learning_rate": 5.654948063714134e-05, | |
| "loss": 0.4985, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.3368893879842785, | |
| "grad_norm": 1.7837912293825526, | |
| "learning_rate": 5.654364699426449e-05, | |
| "loss": 0.5072, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.34138124649073553, | |
| "grad_norm": 0.9063757370915241, | |
| "learning_rate": 5.653703601734391e-05, | |
| "loss": 0.4932, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.3458731049971926, | |
| "grad_norm": 3.713648524059514, | |
| "learning_rate": 5.652964788822855e-05, | |
| "loss": 0.5239, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.35036496350364965, | |
| "grad_norm": 1.5022688986940036, | |
| "learning_rate": 5.652148281014452e-05, | |
| "loss": 0.4971, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.35485682201010665, | |
| "grad_norm": 1.6917942739555678, | |
| "learning_rate": 5.651254100768964e-05, | |
| "loss": 0.5127, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.3593486805165637, | |
| "grad_norm": 1.4236137502770643, | |
| "learning_rate": 5.6502822726827084e-05, | |
| "loss": 0.5057, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.36384053902302077, | |
| "grad_norm": 1.0898220225500272, | |
| "learning_rate": 5.6492328234878805e-05, | |
| "loss": 0.5066, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.36833239752947783, | |
| "grad_norm": 1.7474782681741197, | |
| "learning_rate": 5.6481057820518e-05, | |
| "loss": 0.5107, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.3728242560359349, | |
| "grad_norm": 1.3320037933512727, | |
| "learning_rate": 5.646901179376134e-05, | |
| "loss": 0.502, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.3773161145423919, | |
| "grad_norm": 1.4648960818002965, | |
| "learning_rate": 5.6456190485960274e-05, | |
| "loss": 0.5024, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.38180797304884895, | |
| "grad_norm": 32.93425624504127, | |
| "learning_rate": 5.644259424979206e-05, | |
| "loss": 0.5137, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.386299831555306, | |
| "grad_norm": 4.13281383954987, | |
| "learning_rate": 5.642822345924999e-05, | |
| "loss": 0.5151, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.39079169006176306, | |
| "grad_norm": 2.1060132603992083, | |
| "learning_rate": 5.641307850963309e-05, | |
| "loss": 0.5017, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.3952835485682201, | |
| "grad_norm": 3.3127948157740152, | |
| "learning_rate": 5.6397159817535313e-05, | |
| "loss": 0.5043, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.3997754070746771, | |
| "grad_norm": 2.5680067997894245, | |
| "learning_rate": 5.638046782083399e-05, | |
| "loss": 0.5084, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.4042672655811342, | |
| "grad_norm": 1.29659678996993, | |
| "learning_rate": 5.636300297867789e-05, | |
| "loss": 0.5031, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.40875912408759124, | |
| "grad_norm": 1.6219950978824662, | |
| "learning_rate": 5.634476577147447e-05, | |
| "loss": 0.501, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.4132509825940483, | |
| "grad_norm": 1.153754315447824, | |
| "learning_rate": 5.6325756700876784e-05, | |
| "loss": 0.4931, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.41774284110050536, | |
| "grad_norm": 1.342332275158144, | |
| "learning_rate": 5.6305976289769585e-05, | |
| "loss": 0.4968, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.42223469960696236, | |
| "grad_norm": 1.2584372361254834, | |
| "learning_rate": 5.628542508225499e-05, | |
| "loss": 0.5003, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.4267265581134194, | |
| "grad_norm": 1.0589303057035246, | |
| "learning_rate": 5.626410364363752e-05, | |
| "loss": 0.4983, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.4312184166198765, | |
| "grad_norm": 1.6739076197073897, | |
| "learning_rate": 5.624201256040852e-05, | |
| "loss": 0.48, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.43571027512633353, | |
| "grad_norm": 1.29208863478561, | |
| "learning_rate": 5.6219152440230034e-05, | |
| "loss": 0.5001, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.4402021336327906, | |
| "grad_norm": 12.810067171152744, | |
| "learning_rate": 5.619552391191811e-05, | |
| "loss": 0.5661, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.4446939921392476, | |
| "grad_norm": 210.06133300008736, | |
| "learning_rate": 5.6171127625425495e-05, | |
| "loss": 0.6828, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.44918585064570465, | |
| "grad_norm": 15.083547548247672, | |
| "learning_rate": 5.6145964251823724e-05, | |
| "loss": 0.5599, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.4536777091521617, | |
| "grad_norm": 4.244127439833851, | |
| "learning_rate": 5.6120034483284736e-05, | |
| "loss": 0.5688, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.45816956765861877, | |
| "grad_norm": 2.436112421448658, | |
| "learning_rate": 5.6093339033061736e-05, | |
| "loss": 0.5377, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.4626614261650758, | |
| "grad_norm": 1.7592855269206351, | |
| "learning_rate": 5.606587863546965e-05, | |
| "loss": 0.5176, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.46715328467153283, | |
| "grad_norm": 1.9332567283558368, | |
| "learning_rate": 5.603765404586491e-05, | |
| "loss": 0.5065, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.4716451431779899, | |
| "grad_norm": 1.6964494870673548, | |
| "learning_rate": 5.600866604062465e-05, | |
| "loss": 0.499, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.47613700168444695, | |
| "grad_norm": 1.1490174141275533, | |
| "learning_rate": 5.597891541712538e-05, | |
| "loss": 0.497, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.480628860190904, | |
| "grad_norm": 1.1602345032993668, | |
| "learning_rate": 5.5948402993721036e-05, | |
| "loss": 0.4945, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.485120718697361, | |
| "grad_norm": 1.4835240675179469, | |
| "learning_rate": 5.5917129609720485e-05, | |
| "loss": 0.5055, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.48961257720381807, | |
| "grad_norm": 1.1724100232513444, | |
| "learning_rate": 5.5885096125364414e-05, | |
| "loss": 0.5047, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.4941044357102751, | |
| "grad_norm": 0.9918742769956598, | |
| "learning_rate": 5.585230342180169e-05, | |
| "loss": 0.4951, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.4985962942167322, | |
| "grad_norm": 0.7250590047258865, | |
| "learning_rate": 5.581875240106511e-05, | |
| "loss": 0.496, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.5030881527231892, | |
| "grad_norm": 0.7760587562459388, | |
| "learning_rate": 5.5784443986046583e-05, | |
| "loss": 0.4851, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.5075800112296462, | |
| "grad_norm": 1.3383368165656475, | |
| "learning_rate": 5.574937912047177e-05, | |
| "loss": 0.5025, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.5120718697361033, | |
| "grad_norm": 1.111943463251585, | |
| "learning_rate": 5.57135587688741e-05, | |
| "loss": 0.4956, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.5165637282425604, | |
| "grad_norm": 0.9698252396840494, | |
| "learning_rate": 5.5676983916568236e-05, | |
| "loss": 0.4825, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.5210555867490174, | |
| "grad_norm": 1.1109968132860708, | |
| "learning_rate": 5.5639655569623e-05, | |
| "loss": 0.5059, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.5255474452554745, | |
| "grad_norm": 0.8773591156743417, | |
| "learning_rate": 5.560157475483366e-05, | |
| "loss": 0.4882, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.5300393037619315, | |
| "grad_norm": 1.8174443064528314, | |
| "learning_rate": 5.5562742519693715e-05, | |
| "loss": 0.4983, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.5345311622683886, | |
| "grad_norm": 0.9395508875840728, | |
| "learning_rate": 5.552315993236609e-05, | |
| "loss": 0.4897, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.5390230207748455, | |
| "grad_norm": 2.203464426406935, | |
| "learning_rate": 5.548282808165372e-05, | |
| "loss": 0.496, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.5435148792813026, | |
| "grad_norm": 1.7888451181818592, | |
| "learning_rate": 5.5441748076969605e-05, | |
| "loss": 0.5076, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.5480067377877597, | |
| "grad_norm": 1.7814512273443386, | |
| "learning_rate": 5.5399921048306325e-05, | |
| "loss": 0.4916, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.5524985962942167, | |
| "grad_norm": 1.496681127283197, | |
| "learning_rate": 5.535734814620494e-05, | |
| "loss": 0.4779, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.5569904548006738, | |
| "grad_norm": 1.4422331472579413, | |
| "learning_rate": 5.531403054172332e-05, | |
| "loss": 0.5016, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.5614823133071308, | |
| "grad_norm": 1.4520480765697215, | |
| "learning_rate": 5.526996942640397e-05, | |
| "loss": 0.5029, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5659741718135879, | |
| "grad_norm": 1.0776276265598617, | |
| "learning_rate": 5.5225166012241233e-05, | |
| "loss": 0.4789, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.570466030320045, | |
| "grad_norm": 1.1070093656315356, | |
| "learning_rate": 5.517962153164794e-05, | |
| "loss": 0.4789, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.574957888826502, | |
| "grad_norm": 1.3226479303452643, | |
| "learning_rate": 5.5133337237421544e-05, | |
| "loss": 0.4842, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.5794497473329591, | |
| "grad_norm": 0.8753047794754047, | |
| "learning_rate": 5.508631440270962e-05, | |
| "loss": 0.4882, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.583941605839416, | |
| "grad_norm": 0.9642066006974384, | |
| "learning_rate": 5.503855432097488e-05, | |
| "loss": 0.4724, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5884334643458731, | |
| "grad_norm": 1.1459724043099024, | |
| "learning_rate": 5.499005830595958e-05, | |
| "loss": 0.4866, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.5929253228523301, | |
| "grad_norm": 1.0002263936181477, | |
| "learning_rate": 5.494082769164936e-05, | |
| "loss": 0.4705, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.5974171813587872, | |
| "grad_norm": 0.9749412329132308, | |
| "learning_rate": 5.4890863832236595e-05, | |
| "loss": 0.4749, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.6019090398652442, | |
| "grad_norm": 0.8223414019863776, | |
| "learning_rate": 5.48401681020831e-05, | |
| "loss": 0.4807, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.6064008983717013, | |
| "grad_norm": 0.7388864172422459, | |
| "learning_rate": 5.478874189568235e-05, | |
| "loss": 0.4807, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.6108927568781584, | |
| "grad_norm": 0.9931817849346927, | |
| "learning_rate": 5.473658662762113e-05, | |
| "loss": 0.4835, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.6153846153846154, | |
| "grad_norm": 1.1904925514845863, | |
| "learning_rate": 5.4683703732540596e-05, | |
| "loss": 0.478, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.6198764738910725, | |
| "grad_norm": 0.6252088060994688, | |
| "learning_rate": 5.463009466509683e-05, | |
| "loss": 0.4728, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.6243683323975294, | |
| "grad_norm": 0.9909414402099046, | |
| "learning_rate": 5.457576089992082e-05, | |
| "loss": 0.4791, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.6288601909039865, | |
| "grad_norm": 1.2757654699328387, | |
| "learning_rate": 5.4520703931577924e-05, | |
| "loss": 0.4722, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.6333520494104435, | |
| "grad_norm": 0.6975507723576246, | |
| "learning_rate": 5.446492527452671e-05, | |
| "loss": 0.482, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.6378439079169006, | |
| "grad_norm": 0.7661946065945632, | |
| "learning_rate": 5.440842646307732e-05, | |
| "loss": 0.4717, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.6423357664233577, | |
| "grad_norm": 0.9435784124826875, | |
| "learning_rate": 5.435120905134929e-05, | |
| "loss": 0.4657, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.6468276249298147, | |
| "grad_norm": 0.806002131127944, | |
| "learning_rate": 5.429327461322877e-05, | |
| "loss": 0.467, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.6513194834362718, | |
| "grad_norm": 0.714987214951012, | |
| "learning_rate": 5.423462474232524e-05, | |
| "loss": 0.4824, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.6558113419427288, | |
| "grad_norm": 0.8621322205512378, | |
| "learning_rate": 5.417526105192766e-05, | |
| "loss": 0.4753, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.6603032004491859, | |
| "grad_norm": 0.6806420119133497, | |
| "learning_rate": 5.4115185174960136e-05, | |
| "loss": 0.4734, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.664795058955643, | |
| "grad_norm": 0.5561158366561496, | |
| "learning_rate": 5.405439876393696e-05, | |
| "loss": 0.4725, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.6692869174620999, | |
| "grad_norm": 0.528697225543463, | |
| "learning_rate": 5.3992903490917166e-05, | |
| "loss": 0.4756, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.673778775968557, | |
| "grad_norm": 13.695368353736573, | |
| "learning_rate": 5.393070104745854e-05, | |
| "loss": 0.4691, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.678270634475014, | |
| "grad_norm": 1.1347546914088045, | |
| "learning_rate": 5.386779314457109e-05, | |
| "loss": 0.492, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.6827624929814711, | |
| "grad_norm": 1.4197727467223638, | |
| "learning_rate": 5.380418151266999e-05, | |
| "loss": 0.5055, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.6872543514879281, | |
| "grad_norm": 0.9081718965287682, | |
| "learning_rate": 5.373986790152796e-05, | |
| "loss": 0.483, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.6917462099943852, | |
| "grad_norm": 1.6692580109261934, | |
| "learning_rate": 5.367485408022714e-05, | |
| "loss": 0.4858, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.6962380685008422, | |
| "grad_norm": 1.116716795715726, | |
| "learning_rate": 5.3609141837110465e-05, | |
| "loss": 0.4849, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.7007299270072993, | |
| "grad_norm": 1.7801924566364116, | |
| "learning_rate": 5.35427329797324e-05, | |
| "loss": 0.487, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.7052217855137564, | |
| "grad_norm": 1.484285725456502, | |
| "learning_rate": 5.34756293348093e-05, | |
| "loss": 0.4819, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.7097136440202133, | |
| "grad_norm": 1.3787842681358016, | |
| "learning_rate": 5.340783274816909e-05, | |
| "loss": 0.4803, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.7142055025266704, | |
| "grad_norm": 1.3697258174053226, | |
| "learning_rate": 5.333934508470055e-05, | |
| "loss": 0.4966, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.7186973610331274, | |
| "grad_norm": 0.9985880006887219, | |
| "learning_rate": 5.327016822830199e-05, | |
| "loss": 0.4859, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.7231892195395845, | |
| "grad_norm": 1.1302300728203512, | |
| "learning_rate": 5.3200304081829424e-05, | |
| "loss": 0.4755, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.7276810780460415, | |
| "grad_norm": 1.2089473349803344, | |
| "learning_rate": 5.312975456704423e-05, | |
| "loss": 0.4778, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.7321729365524986, | |
| "grad_norm": 0.7582069164306429, | |
| "learning_rate": 5.3058521624560304e-05, | |
| "loss": 0.4581, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.7366647950589557, | |
| "grad_norm": 1.0009191122111472, | |
| "learning_rate": 5.2986607213790666e-05, | |
| "loss": 0.4641, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.7411566535654127, | |
| "grad_norm": 1.3708383666132893, | |
| "learning_rate": 5.2914013312893555e-05, | |
| "loss": 0.4817, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.7456485120718698, | |
| "grad_norm": 0.5548744053123198, | |
| "learning_rate": 5.284074191871806e-05, | |
| "loss": 0.4791, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.7501403705783268, | |
| "grad_norm": 1.3905532535609613, | |
| "learning_rate": 5.276679504674912e-05, | |
| "loss": 0.4855, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.7546322290847838, | |
| "grad_norm": 0.8294906257179122, | |
| "learning_rate": 5.269217473105214e-05, | |
| "loss": 0.4687, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.7591240875912408, | |
| "grad_norm": 0.5936258415870873, | |
| "learning_rate": 5.261688302421702e-05, | |
| "loss": 0.4705, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.7636159460976979, | |
| "grad_norm": 0.8205916059345216, | |
| "learning_rate": 5.2540921997301675e-05, | |
| "loss": 0.4716, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.768107804604155, | |
| "grad_norm": 0.9151372632980053, | |
| "learning_rate": 5.246429373977513e-05, | |
| "loss": 0.4783, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.772599663110612, | |
| "grad_norm": 0.8574137536732892, | |
| "learning_rate": 5.2387000359459964e-05, | |
| "loss": 0.4682, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.7770915216170691, | |
| "grad_norm": 0.5949161182694169, | |
| "learning_rate": 5.230904398247439e-05, | |
| "loss": 0.4582, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.7815833801235261, | |
| "grad_norm": 0.60863710880346, | |
| "learning_rate": 5.2230426753173746e-05, | |
| "loss": 0.4727, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.7860752386299832, | |
| "grad_norm": 0.7885522071991459, | |
| "learning_rate": 5.215115083409149e-05, | |
| "loss": 0.4764, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.7905670971364402, | |
| "grad_norm": 0.7148921741815615, | |
| "learning_rate": 5.207121840587976e-05, | |
| "loss": 0.4747, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.7950589556428973, | |
| "grad_norm": 0.4191028302559816, | |
| "learning_rate": 5.1990631667249386e-05, | |
| "loss": 0.4547, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.7995508141493542, | |
| "grad_norm": 0.7247963012298666, | |
| "learning_rate": 5.190939283490934e-05, | |
| "loss": 0.4626, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.8040426726558113, | |
| "grad_norm": 0.7029559056093213, | |
| "learning_rate": 5.182750414350587e-05, | |
| "loss": 0.4662, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.8085345311622684, | |
| "grad_norm": 0.4462526562125211, | |
| "learning_rate": 5.1744967845560936e-05, | |
| "loss": 0.4712, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.8130263896687254, | |
| "grad_norm": 0.4949235485049591, | |
| "learning_rate": 5.16617862114103e-05, | |
| "loss": 0.4794, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.8175182481751825, | |
| "grad_norm": 0.5536962185283819, | |
| "learning_rate": 5.157796152914106e-05, | |
| "loss": 0.4624, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.8220101066816395, | |
| "grad_norm": 0.6390684694683445, | |
| "learning_rate": 5.1493496104528745e-05, | |
| "loss": 0.4673, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.8265019651880966, | |
| "grad_norm": 0.5366579544857547, | |
| "learning_rate": 5.140839226097382e-05, | |
| "loss": 0.4597, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.8309938236945537, | |
| "grad_norm": 0.4290137833746869, | |
| "learning_rate": 5.132265233943782e-05, | |
| "loss": 0.4588, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.8354856822010107, | |
| "grad_norm": 0.5064492251279115, | |
| "learning_rate": 5.123627869837897e-05, | |
| "loss": 0.464, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.8399775407074677, | |
| "grad_norm": 0.6672917894103767, | |
| "learning_rate": 5.1149273713687296e-05, | |
| "loss": 0.466, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.8444693992139247, | |
| "grad_norm": 0.705424722250373, | |
| "learning_rate": 5.106163977861925e-05, | |
| "loss": 0.4572, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.8489612577203818, | |
| "grad_norm": 0.6210260028415538, | |
| "learning_rate": 5.097337930373192e-05, | |
| "loss": 0.4579, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.8534531162268388, | |
| "grad_norm": 0.4999598663929801, | |
| "learning_rate": 5.0884494716816655e-05, | |
| "loss": 0.4724, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.8579449747332959, | |
| "grad_norm": 0.5494656315747662, | |
| "learning_rate": 5.079498846283238e-05, | |
| "loss": 0.4737, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.862436833239753, | |
| "grad_norm": 0.45007940306044986, | |
| "learning_rate": 5.070486300383827e-05, | |
| "loss": 0.4694, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.86692869174621, | |
| "grad_norm": 0.3764442979713074, | |
| "learning_rate": 5.061412081892603e-05, | |
| "loss": 0.4651, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.8714205502526671, | |
| "grad_norm": 0.36594098735871516, | |
| "learning_rate": 5.052276440415174e-05, | |
| "loss": 0.4762, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.8759124087591241, | |
| "grad_norm": 0.4272726723732627, | |
| "learning_rate": 5.043079627246717e-05, | |
| "loss": 0.4513, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.8804042672655812, | |
| "grad_norm": 0.5698837640855857, | |
| "learning_rate": 5.033821895365065e-05, | |
| "loss": 0.4551, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.8848961257720381, | |
| "grad_norm": 0.5555640472146062, | |
| "learning_rate": 5.02450349942375e-05, | |
| "loss": 0.4633, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.8893879842784952, | |
| "grad_norm": 0.5915810416664253, | |
| "learning_rate": 5.015124695744997e-05, | |
| "loss": 0.4721, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.8938798427849522, | |
| "grad_norm": 0.6869500074116868, | |
| "learning_rate": 5.005685742312674e-05, | |
| "loss": 0.462, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.8983717012914093, | |
| "grad_norm": 0.6975298538745554, | |
| "learning_rate": 4.996186898765196e-05, | |
| "loss": 0.4496, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.9028635597978664, | |
| "grad_norm": 0.6057519990867798, | |
| "learning_rate": 4.9866284263883835e-05, | |
| "loss": 0.4615, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.9073554183043234, | |
| "grad_norm": 0.5612658182788611, | |
| "learning_rate": 4.97701058810827e-05, | |
| "loss": 0.477, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.9118472768107805, | |
| "grad_norm": 0.5049666592555946, | |
| "learning_rate": 4.9673336484838795e-05, | |
| "loss": 0.4554, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.9163391353172375, | |
| "grad_norm": 0.42081037701799623, | |
| "learning_rate": 4.95759787369994e-05, | |
| "loss": 0.4554, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.9208309938236946, | |
| "grad_norm": 0.44770764051942097, | |
| "learning_rate": 4.947803531559565e-05, | |
| "loss": 0.4653, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.9253228523301515, | |
| "grad_norm": 0.49417870665469427, | |
| "learning_rate": 4.937950891476891e-05, | |
| "loss": 0.4704, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.9298147108366086, | |
| "grad_norm": 0.5340284014533414, | |
| "learning_rate": 4.9280402244696586e-05, | |
| "loss": 0.459, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.9343065693430657, | |
| "grad_norm": 0.41751560443080044, | |
| "learning_rate": 4.9180718031517646e-05, | |
| "loss": 0.4564, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.9387984278495227, | |
| "grad_norm": 0.4672458968519276, | |
| "learning_rate": 4.90804590172576e-05, | |
| "loss": 0.4662, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.9432902863559798, | |
| "grad_norm": 0.6262419429350549, | |
| "learning_rate": 4.8979627959753065e-05, | |
| "loss": 0.4697, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.9477821448624368, | |
| "grad_norm": 0.5603039964456088, | |
| "learning_rate": 4.887822763257594e-05, | |
| "loss": 0.4597, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.9522740033688939, | |
| "grad_norm": 0.35357175512056443, | |
| "learning_rate": 4.877626082495709e-05, | |
| "loss": 0.4531, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.956765861875351, | |
| "grad_norm": 0.6270265145350702, | |
| "learning_rate": 4.867373034170961e-05, | |
| "loss": 0.4525, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.961257720381808, | |
| "grad_norm": 0.5486691558909343, | |
| "learning_rate": 4.857063900315169e-05, | |
| "loss": 0.4634, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.9657495788882651, | |
| "grad_norm": 0.6546873588268987, | |
| "learning_rate": 4.8466989645029046e-05, | |
| "loss": 0.4418, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.970241437394722, | |
| "grad_norm": 0.6741337086934428, | |
| "learning_rate": 4.8362785118436906e-05, | |
| "loss": 0.4578, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.9747332959011791, | |
| "grad_norm": 0.4953644104180241, | |
| "learning_rate": 4.8258028289741566e-05, | |
| "loss": 0.4535, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.9792251544076361, | |
| "grad_norm": 0.5093531046864966, | |
| "learning_rate": 4.8152722040501576e-05, | |
| "loss": 0.4602, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.9837170129140932, | |
| "grad_norm": 0.5828595197888917, | |
| "learning_rate": 4.804686926738845e-05, | |
| "loss": 0.4593, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.9882088714205502, | |
| "grad_norm": 0.7121485339842426, | |
| "learning_rate": 4.794047288210701e-05, | |
| "loss": 0.4732, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.9927007299270073, | |
| "grad_norm": 0.6942755927799307, | |
| "learning_rate": 4.783353581131529e-05, | |
| "loss": 0.4607, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.9971925884334644, | |
| "grad_norm": 0.7037814589149809, | |
| "learning_rate": 4.772606099654399e-05, | |
| "loss": 0.4586, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.0030881527231892, | |
| "grad_norm": 1.3103380799684685, | |
| "learning_rate": 4.7618051394115645e-05, | |
| "loss": 0.7366, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.0075800112296462, | |
| "grad_norm": 1.4330434691359393, | |
| "learning_rate": 4.750950997506323e-05, | |
| "loss": 0.439, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.0120718697361033, | |
| "grad_norm": 0.5720145911090031, | |
| "learning_rate": 4.7400439725048454e-05, | |
| "loss": 0.4076, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.0165637282425604, | |
| "grad_norm": 0.7816685489054993, | |
| "learning_rate": 4.729084364427966e-05, | |
| "loss": 0.4223, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.0210555867490174, | |
| "grad_norm": 1.3595612583992696, | |
| "learning_rate": 4.718072474742929e-05, | |
| "loss": 0.4213, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.0255474452554745, | |
| "grad_norm": 0.6148943684688656, | |
| "learning_rate": 4.7070086063550914e-05, | |
| "loss": 0.4402, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.0300393037619315, | |
| "grad_norm": 0.5569229211126668, | |
| "learning_rate": 4.695893063599599e-05, | |
| "loss": 0.4204, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.0345311622683886, | |
| "grad_norm": 1.0596922370747948, | |
| "learning_rate": 4.684726152233006e-05, | |
| "loss": 0.4369, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.0390230207748457, | |
| "grad_norm": 1.0366531812250148, | |
| "learning_rate": 4.673508179424872e-05, | |
| "loss": 0.4341, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.0435148792813027, | |
| "grad_norm": 0.8275345128738875, | |
| "learning_rate": 4.66223945374931e-05, | |
| "loss": 0.4425, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.0480067377877598, | |
| "grad_norm": 0.6592358636504515, | |
| "learning_rate": 4.650920285176497e-05, | |
| "loss": 0.4287, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.0524985962942168, | |
| "grad_norm": 0.6838584669549151, | |
| "learning_rate": 4.639550985064149e-05, | |
| "loss": 0.424, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.0569904548006739, | |
| "grad_norm": 0.9269204224618847, | |
| "learning_rate": 4.628131866148956e-05, | |
| "loss": 0.4338, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.0614823133071307, | |
| "grad_norm": 0.705801235155315, | |
| "learning_rate": 4.61666324253798e-05, | |
| "loss": 0.4251, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.0659741718135878, | |
| "grad_norm": 0.5284215885470633, | |
| "learning_rate": 4.6051454297000155e-05, | |
| "loss": 0.4113, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.0704660303200448, | |
| "grad_norm": 0.7944656179350124, | |
| "learning_rate": 4.593578744456908e-05, | |
| "loss": 0.4305, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.074957888826502, | |
| "grad_norm": 0.5540163717397164, | |
| "learning_rate": 4.581963504974846e-05, | |
| "loss": 0.4217, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.079449747332959, | |
| "grad_norm": 0.47355128431960797, | |
| "learning_rate": 4.570300030755602e-05, | |
| "loss": 0.4169, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.083941605839416, | |
| "grad_norm": 0.5104148641173784, | |
| "learning_rate": 4.55858864262775e-05, | |
| "loss": 0.4197, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.088433464345873, | |
| "grad_norm": 0.4239177437769046, | |
| "learning_rate": 4.5468296627378346e-05, | |
| "loss": 0.4316, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.0929253228523301, | |
| "grad_norm": 0.4896289293240813, | |
| "learning_rate": 4.5350234145415145e-05, | |
| "loss": 0.4241, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.0974171813587872, | |
| "grad_norm": 0.6906271277735481, | |
| "learning_rate": 4.5231702227946624e-05, | |
| "loss": 0.4133, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.1019090398652442, | |
| "grad_norm": 0.6695765631069457, | |
| "learning_rate": 4.511270413544434e-05, | |
| "loss": 0.4217, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.1064008983717013, | |
| "grad_norm": 0.4865180812928011, | |
| "learning_rate": 4.499324314120297e-05, | |
| "loss": 0.445, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.1108927568781584, | |
| "grad_norm": 0.580809606929406, | |
| "learning_rate": 4.48733225312503e-05, | |
| "loss": 0.4069, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.1153846153846154, | |
| "grad_norm": 0.7506068048047247, | |
| "learning_rate": 4.4752945604256804e-05, | |
| "loss": 0.4253, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.1198764738910725, | |
| "grad_norm": 0.5466702938785094, | |
| "learning_rate": 4.463211567144492e-05, | |
| "loss": 0.4273, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.1243683323975295, | |
| "grad_norm": 0.4368107081797831, | |
| "learning_rate": 4.4510836056497985e-05, | |
| "loss": 0.444, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.1288601909039866, | |
| "grad_norm": 0.5540192724816043, | |
| "learning_rate": 4.43891100954688e-05, | |
| "loss": 0.4235, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.1333520494104437, | |
| "grad_norm": 0.574902490317899, | |
| "learning_rate": 4.426694113668784e-05, | |
| "loss": 0.4005, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.1378439079169007, | |
| "grad_norm": 0.4277415531920282, | |
| "learning_rate": 4.4144332540671184e-05, | |
| "loss": 0.4442, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.1423357664233578, | |
| "grad_norm": 0.47658738091533026, | |
| "learning_rate": 4.4021287680028065e-05, | |
| "loss": 0.4076, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.1468276249298146, | |
| "grad_norm": 0.461475688900843, | |
| "learning_rate": 4.389780993936809e-05, | |
| "loss": 0.4374, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.1513194834362717, | |
| "grad_norm": 0.36129487594166015, | |
| "learning_rate": 4.377390271520818e-05, | |
| "loss": 0.4058, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.1558113419427287, | |
| "grad_norm": 0.38757121097462355, | |
| "learning_rate": 4.364956941587907e-05, | |
| "loss": 0.439, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.1603032004491858, | |
| "grad_norm": 0.4643133402951908, | |
| "learning_rate": 4.35248134614316e-05, | |
| "loss": 0.4197, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.1647950589556428, | |
| "grad_norm": 0.44721127431557994, | |
| "learning_rate": 4.339963828354267e-05, | |
| "loss": 0.4373, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.1692869174621, | |
| "grad_norm": 0.5913764476973007, | |
| "learning_rate": 4.3274047325420756e-05, | |
| "loss": 0.4086, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.173778775968557, | |
| "grad_norm": 0.4545855224943832, | |
| "learning_rate": 4.3148044041711305e-05, | |
| "loss": 0.4, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.178270634475014, | |
| "grad_norm": 0.3877259301565107, | |
| "learning_rate": 4.302163189840161e-05, | |
| "loss": 0.4458, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.182762492981471, | |
| "grad_norm": 0.5899001677321934, | |
| "learning_rate": 4.2894814372725545e-05, | |
| "loss": 0.4189, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.1872543514879281, | |
| "grad_norm": 0.5093992389656228, | |
| "learning_rate": 4.276759495306786e-05, | |
| "loss": 0.4282, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.1917462099943852, | |
| "grad_norm": 0.5402728882571417, | |
| "learning_rate": 4.263997713886826e-05, | |
| "loss": 0.4136, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.1962380685008422, | |
| "grad_norm": 0.430096705490882, | |
| "learning_rate": 4.251196444052514e-05, | |
| "loss": 0.4288, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.2007299270072993, | |
| "grad_norm": 0.590664850960543, | |
| "learning_rate": 4.2383560379299025e-05, | |
| "loss": 0.4234, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.2052217855137564, | |
| "grad_norm": 0.5846766108697488, | |
| "learning_rate": 4.225476848721569e-05, | |
| "loss": 0.4224, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.2097136440202134, | |
| "grad_norm": 0.4635905268061619, | |
| "learning_rate": 4.212559230696902e-05, | |
| "loss": 0.431, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.2142055025266705, | |
| "grad_norm": 0.3279398533977027, | |
| "learning_rate": 4.199603539182358e-05, | |
| "loss": 0.4071, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.2186973610331275, | |
| "grad_norm": 0.43596934756872635, | |
| "learning_rate": 4.1866101305516834e-05, | |
| "loss": 0.4408, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.2231892195395846, | |
| "grad_norm": 0.5988053050168707, | |
| "learning_rate": 4.173579362216115e-05, | |
| "loss": 0.4024, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.2276810780460417, | |
| "grad_norm": 0.5317002353473365, | |
| "learning_rate": 4.160511592614547e-05, | |
| "loss": 0.4185, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.2321729365524985, | |
| "grad_norm": 0.4129920656791822, | |
| "learning_rate": 4.147407181203672e-05, | |
| "loss": 0.4313, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.2366647950589555, | |
| "grad_norm": 0.4032314936302549, | |
| "learning_rate": 4.134266488448094e-05, | |
| "loss": 0.4379, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.2411566535654126, | |
| "grad_norm": 0.48015467116294086, | |
| "learning_rate": 4.121089875810409e-05, | |
| "loss": 0.3997, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.2456485120718697, | |
| "grad_norm": 0.5841828612731099, | |
| "learning_rate": 4.107877705741271e-05, | |
| "loss": 0.4323, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.2501403705783267, | |
| "grad_norm": 0.5031689599839138, | |
| "learning_rate": 4.0946303416694104e-05, | |
| "loss": 0.4089, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.2546322290847838, | |
| "grad_norm": 0.30548444630400023, | |
| "learning_rate": 4.081348147991648e-05, | |
| "loss": 0.4237, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.2591240875912408, | |
| "grad_norm": 0.5367218666364604, | |
| "learning_rate": 4.0680314900628634e-05, | |
| "loss": 0.4189, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.263615946097698, | |
| "grad_norm": 0.4781859938737436, | |
| "learning_rate": 4.054680734185951e-05, | |
| "loss": 0.4236, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.268107804604155, | |
| "grad_norm": 0.3297402176434817, | |
| "learning_rate": 4.041296247601742e-05, | |
| "loss": 0.4206, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.272599663110612, | |
| "grad_norm": 0.6130041872704294, | |
| "learning_rate": 4.0278783984788976e-05, | |
| "loss": 0.4204, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.277091521617069, | |
| "grad_norm": 0.5443923534079088, | |
| "learning_rate": 4.014427555903791e-05, | |
| "loss": 0.4224, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.2815833801235261, | |
| "grad_norm": 0.4485200475188682, | |
| "learning_rate": 4.000944089870348e-05, | |
| "loss": 0.4233, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.2860752386299832, | |
| "grad_norm": 0.4960146532122582, | |
| "learning_rate": 3.9874283712698725e-05, | |
| "loss": 0.4223, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.2905670971364402, | |
| "grad_norm": 0.4033390182857802, | |
| "learning_rate": 3.9738807718808435e-05, | |
| "loss": 0.4356, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.2950589556428973, | |
| "grad_norm": 0.4490948378863902, | |
| "learning_rate": 3.9603016643586886e-05, | |
| "loss": 0.4201, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.2995508141493544, | |
| "grad_norm": 0.3592794934403415, | |
| "learning_rate": 3.946691422225532e-05, | |
| "loss": 0.4016, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.3040426726558114, | |
| "grad_norm": 0.3274077973022082, | |
| "learning_rate": 3.933050419859925e-05, | |
| "loss": 0.4231, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.3085345311622683, | |
| "grad_norm": 0.34250353805523737, | |
| "learning_rate": 3.91937903248654e-05, | |
| "loss": 0.4235, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.3130263896687255, | |
| "grad_norm": 0.34707139849811824, | |
| "learning_rate": 3.905677636165857e-05, | |
| "loss": 0.4169, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.3175182481751824, | |
| "grad_norm": 0.3669346600570388, | |
| "learning_rate": 3.8919466077838126e-05, | |
| "loss": 0.4264, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.3220101066816397, | |
| "grad_norm": 0.3669304230047965, | |
| "learning_rate": 3.878186325041438e-05, | |
| "loss": 0.4211, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.3265019651880965, | |
| "grad_norm": 0.2964581222649961, | |
| "learning_rate": 3.8643971664444674e-05, | |
| "loss": 0.4302, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.3309938236945535, | |
| "grad_norm": 0.44588513236491323, | |
| "learning_rate": 3.8505795112929244e-05, | |
| "loss": 0.4219, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.3354856822010106, | |
| "grad_norm": 0.5042888095272267, | |
| "learning_rate": 3.836733739670695e-05, | |
| "loss": 0.4139, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.3399775407074677, | |
| "grad_norm": 0.30365204006233315, | |
| "learning_rate": 3.822860232435065e-05, | |
| "loss": 0.4155, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.3444693992139247, | |
| "grad_norm": 0.49322986536572716, | |
| "learning_rate": 3.808959371206247e-05, | |
| "loss": 0.4155, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.3489612577203818, | |
| "grad_norm": 0.5204964744436007, | |
| "learning_rate": 3.7950315383568844e-05, | |
| "loss": 0.4323, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.3534531162268388, | |
| "grad_norm": 0.5239549845180205, | |
| "learning_rate": 3.781077117001532e-05, | |
| "loss": 0.4236, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.357944974733296, | |
| "grad_norm": 0.42886342963253304, | |
| "learning_rate": 3.767096490986119e-05, | |
| "loss": 0.4155, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.362436833239753, | |
| "grad_norm": 0.3518792408128484, | |
| "learning_rate": 3.753090044877385e-05, | |
| "loss": 0.4134, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.36692869174621, | |
| "grad_norm": 0.3692953802641738, | |
| "learning_rate": 3.7390581639523105e-05, | |
| "loss": 0.412, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.371420550252667, | |
| "grad_norm": 0.3072648415590272, | |
| "learning_rate": 3.725001234187511e-05, | |
| "loss": 0.4118, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.3759124087591241, | |
| "grad_norm": 0.43125767774610463, | |
| "learning_rate": 3.710919642248628e-05, | |
| "loss": 0.4175, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.3804042672655812, | |
| "grad_norm": 0.3669097572936447, | |
| "learning_rate": 3.6968137754796843e-05, | |
| "loss": 0.4016, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.3848961257720382, | |
| "grad_norm": 0.3109642926232247, | |
| "learning_rate": 3.6826840218924346e-05, | |
| "loss": 0.4317, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.3893879842784953, | |
| "grad_norm": 0.4160110328302152, | |
| "learning_rate": 3.6685307701556914e-05, | |
| "loss": 0.4125, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.3938798427849521, | |
| "grad_norm": 0.3782962315135247, | |
| "learning_rate": 3.654354409584633e-05, | |
| "loss": 0.4132, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.3983717012914094, | |
| "grad_norm": 0.3434504622603799, | |
| "learning_rate": 3.640155330130096e-05, | |
| "loss": 0.3993, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.4028635597978663, | |
| "grad_norm": 0.2809990812585536, | |
| "learning_rate": 3.625933922367848e-05, | |
| "loss": 0.4187, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.4073554183043235, | |
| "grad_norm": 0.3800500532172459, | |
| "learning_rate": 3.611690577487842e-05, | |
| "loss": 0.4113, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.4118472768107804, | |
| "grad_norm": 0.3866270074899333, | |
| "learning_rate": 3.5974256872834606e-05, | |
| "loss": 0.4443, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.4163391353172374, | |
| "grad_norm": 0.3183345152750328, | |
| "learning_rate": 3.583139644140736e-05, | |
| "loss": 0.4115, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.4208309938236945, | |
| "grad_norm": 0.33334591314115697, | |
| "learning_rate": 3.5688328410275545e-05, | |
| "loss": 0.4106, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.4253228523301515, | |
| "grad_norm": 0.33478660260203313, | |
| "learning_rate": 3.5545056714828505e-05, | |
| "loss": 0.438, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.4298147108366086, | |
| "grad_norm": 0.32710314888490855, | |
| "learning_rate": 3.540158529605782e-05, | |
| "loss": 0.4153, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.4343065693430657, | |
| "grad_norm": 0.2779481235111702, | |
| "learning_rate": 3.525791810044886e-05, | |
| "loss": 0.4267, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.4387984278495227, | |
| "grad_norm": 0.31526546588032756, | |
| "learning_rate": 3.5114059079872264e-05, | |
| "loss": 0.4182, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.4432902863559798, | |
| "grad_norm": 0.3425728526869508, | |
| "learning_rate": 3.4970012191475205e-05, | |
| "loss": 0.4311, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.4477821448624368, | |
| "grad_norm": 0.3090871364549649, | |
| "learning_rate": 3.4825781397572596e-05, | |
| "loss": 0.4104, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.452274003368894, | |
| "grad_norm": 0.3655821696356299, | |
| "learning_rate": 3.468137066553802e-05, | |
| "loss": 0.43, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.456765861875351, | |
| "grad_norm": 0.26480702241756343, | |
| "learning_rate": 3.453678396769466e-05, | |
| "loss": 0.4226, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.461257720381808, | |
| "grad_norm": 0.3357045273673627, | |
| "learning_rate": 3.4392025281206015e-05, | |
| "loss": 0.4241, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.465749578888265, | |
| "grad_norm": 0.2936284497729234, | |
| "learning_rate": 3.424709858796649e-05, | |
| "loss": 0.4232, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.4702414373947221, | |
| "grad_norm": 0.26481671273072727, | |
| "learning_rate": 3.4102007874491864e-05, | |
| "loss": 0.3988, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.4747332959011792, | |
| "grad_norm": 0.317241703119109, | |
| "learning_rate": 3.3956757131809645e-05, | |
| "loss": 0.4392, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.479225154407636, | |
| "grad_norm": 0.3078399987622083, | |
| "learning_rate": 3.3811350355349305e-05, | |
| "loss": 0.405, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.4837170129140933, | |
| "grad_norm": 0.31444107370208924, | |
| "learning_rate": 3.36657915448323e-05, | |
| "loss": 0.432, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.4882088714205501, | |
| "grad_norm": 0.323332727646115, | |
| "learning_rate": 3.352008470416218e-05, | |
| "loss": 0.4225, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.4927007299270074, | |
| "grad_norm": 0.2667619713588966, | |
| "learning_rate": 3.337423384131432e-05, | |
| "loss": 0.4113, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.4971925884334643, | |
| "grad_norm": 0.3330676591878106, | |
| "learning_rate": 3.322824296822576e-05, | |
| "loss": 0.4188, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.5016844469399215, | |
| "grad_norm": 0.28967127654136837, | |
| "learning_rate": 3.3082116100684804e-05, | |
| "loss": 0.4237, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.5061763054463784, | |
| "grad_norm": 0.39600383028909486, | |
| "learning_rate": 3.293585725822056e-05, | |
| "loss": 0.4216, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.5106681639528357, | |
| "grad_norm": 0.27798024681673933, | |
| "learning_rate": 3.2789470463992424e-05, | |
| "loss": 0.4279, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.5151600224592925, | |
| "grad_norm": 0.39649677808128553, | |
| "learning_rate": 3.264295974467934e-05, | |
| "loss": 0.414, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.5196518809657495, | |
| "grad_norm": 0.476076414839538, | |
| "learning_rate": 3.24963291303691e-05, | |
| "loss": 0.4076, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.5241437394722066, | |
| "grad_norm": 0.33178989228985734, | |
| "learning_rate": 3.234958265444744e-05, | |
| "loss": 0.4202, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.5286355979786637, | |
| "grad_norm": 0.33368347444203755, | |
| "learning_rate": 3.2202724353487125e-05, | |
| "loss": 0.4166, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.5331274564851207, | |
| "grad_norm": 0.4460597894671801, | |
| "learning_rate": 3.205575826713691e-05, | |
| "loss": 0.4226, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.5376193149915778, | |
| "grad_norm": 0.25511431033825155, | |
| "learning_rate": 3.19086884380104e-05, | |
| "loss": 0.4142, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.5421111734980348, | |
| "grad_norm": 0.38824117126685315, | |
| "learning_rate": 3.176151891157489e-05, | |
| "loss": 0.4291, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.546603032004492, | |
| "grad_norm": 0.31121791669268195, | |
| "learning_rate": 3.161425373604003e-05, | |
| "loss": 0.4214, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.551094890510949, | |
| "grad_norm": 0.30148485606990627, | |
| "learning_rate": 3.146689696224653e-05, | |
| "loss": 0.4091, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.5555867490174058, | |
| "grad_norm": 0.3046866547182492, | |
| "learning_rate": 3.131945264355468e-05, | |
| "loss": 0.4236, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.560078607523863, | |
| "grad_norm": 0.2504090472009857, | |
| "learning_rate": 3.11719248357329e-05, | |
| "loss": 0.4145, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.56457046603032, | |
| "grad_norm": 0.258289122621969, | |
| "learning_rate": 3.102431759684614e-05, | |
| "loss": 0.4033, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.5690623245367772, | |
| "grad_norm": 0.2881179834457112, | |
| "learning_rate": 3.087663498714428e-05, | |
| "loss": 0.4161, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.573554183043234, | |
| "grad_norm": 0.34290225796172236, | |
| "learning_rate": 3.072888106895041e-05, | |
| "loss": 0.4327, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.5780460415496913, | |
| "grad_norm": 0.2891433180130989, | |
| "learning_rate": 3.058105990654915e-05, | |
| "loss": 0.4215, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.5825379000561481, | |
| "grad_norm": 0.2607369714901761, | |
| "learning_rate": 3.043317556607478e-05, | |
| "loss": 0.4201, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.5870297585626054, | |
| "grad_norm": 0.36665871070764555, | |
| "learning_rate": 3.028523211539945e-05, | |
| "loss": 0.3943, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.5915216170690623, | |
| "grad_norm": 0.26437463424741936, | |
| "learning_rate": 3.013723362402124e-05, | |
| "loss": 0.4331, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.5960134755755195, | |
| "grad_norm": 0.40410569517512446, | |
| "learning_rate": 2.9989184162952263e-05, | |
| "loss": 0.4253, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.6005053340819764, | |
| "grad_norm": 0.30419273048420425, | |
| "learning_rate": 2.984108780460665e-05, | |
| "loss": 0.397, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.6049971925884334, | |
| "grad_norm": 0.2818188621577564, | |
| "learning_rate": 2.9692948622688553e-05, | |
| "loss": 0.4102, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.6094890510948905, | |
| "grad_norm": 0.2972538218122035, | |
| "learning_rate": 2.9544770692080047e-05, | |
| "loss": 0.4264, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.6139809096013475, | |
| "grad_norm": 0.2471489528022849, | |
| "learning_rate": 2.9396558088729097e-05, | |
| "loss": 0.4271, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.6184727681078046, | |
| "grad_norm": 0.3564224759338565, | |
| "learning_rate": 2.9248314889537414e-05, | |
| "loss": 0.4137, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.6229646266142617, | |
| "grad_norm": 0.32905291509123547, | |
| "learning_rate": 2.9100045172248296e-05, | |
| "loss": 0.4172, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.6274564851207187, | |
| "grad_norm": 0.3190225807797956, | |
| "learning_rate": 2.895175301533451e-05, | |
| "loss": 0.4198, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.6319483436271758, | |
| "grad_norm": 0.3382075017363175, | |
| "learning_rate": 2.8803442497886032e-05, | |
| "loss": 0.3953, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.6364402021336328, | |
| "grad_norm": 0.33610188019859094, | |
| "learning_rate": 2.865511769949792e-05, | |
| "loss": 0.4231, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.6409320606400897, | |
| "grad_norm": 0.2260278632887831, | |
| "learning_rate": 2.850678270015803e-05, | |
| "loss": 0.4029, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.645423919146547, | |
| "grad_norm": 0.2867061812970814, | |
| "learning_rate": 2.835844158013484e-05, | |
| "loss": 0.4147, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.6499157776530038, | |
| "grad_norm": 0.26179589202825637, | |
| "learning_rate": 2.821009841986517e-05, | |
| "loss": 0.4171, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.654407636159461, | |
| "grad_norm": 0.29649254174705647, | |
| "learning_rate": 2.8061757299841978e-05, | |
| "loss": 0.3961, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.658899494665918, | |
| "grad_norm": 0.26650290381668934, | |
| "learning_rate": 2.791342230050209e-05, | |
| "loss": 0.4269, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.6633913531723752, | |
| "grad_norm": 0.28900135674448674, | |
| "learning_rate": 2.7765097502113973e-05, | |
| "loss": 0.4096, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.667883211678832, | |
| "grad_norm": 0.26493172267064097, | |
| "learning_rate": 2.76167869846655e-05, | |
| "loss": 0.4096, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.6723750701852893, | |
| "grad_norm": 0.3315175277608438, | |
| "learning_rate": 2.746849482775171e-05, | |
| "loss": 0.4265, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.6768669286917461, | |
| "grad_norm": 0.25114986864895356, | |
| "learning_rate": 2.73202251104626e-05, | |
| "loss": 0.4296, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.6813587871982034, | |
| "grad_norm": 0.26764239370534887, | |
| "learning_rate": 2.7171981911270918e-05, | |
| "loss": 0.3993, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.6858506457046603, | |
| "grad_norm": 0.2500639155076402, | |
| "learning_rate": 2.7023769307919962e-05, | |
| "loss": 0.4087, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.6903425042111173, | |
| "grad_norm": 0.2214935528332111, | |
| "learning_rate": 2.6875591377311453e-05, | |
| "loss": 0.4197, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.6948343627175744, | |
| "grad_norm": 0.24330294846668346, | |
| "learning_rate": 2.6727452195393347e-05, | |
| "loss": 0.4187, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.6993262212240314, | |
| "grad_norm": 0.2762314654048503, | |
| "learning_rate": 2.6579355837047735e-05, | |
| "loss": 0.4164, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.7038180797304885, | |
| "grad_norm": 0.26707238218702334, | |
| "learning_rate": 2.643130637597876e-05, | |
| "loss": 0.4089, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.7083099382369455, | |
| "grad_norm": 0.2197237899001418, | |
| "learning_rate": 2.6283307884600558e-05, | |
| "loss": 0.412, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.7128017967434026, | |
| "grad_norm": 0.2710843795550557, | |
| "learning_rate": 2.6135364433925227e-05, | |
| "loss": 0.427, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.7172936552498597, | |
| "grad_norm": 0.2186717839830273, | |
| "learning_rate": 2.5987480093450858e-05, | |
| "loss": 0.4098, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.7217855137563167, | |
| "grad_norm": 0.29371991192822805, | |
| "learning_rate": 2.5839658931049594e-05, | |
| "loss": 0.4351, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.7262773722627736, | |
| "grad_norm": 0.1986384947170359, | |
| "learning_rate": 2.569190501285573e-05, | |
| "loss": 0.4065, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.7307692307692308, | |
| "grad_norm": 0.2554589741070793, | |
| "learning_rate": 2.5544222403153865e-05, | |
| "loss": 0.414, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.7352610892756877, | |
| "grad_norm": 0.23634494461674738, | |
| "learning_rate": 2.5396615164267103e-05, | |
| "loss": 0.4287, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.739752947782145, | |
| "grad_norm": 0.23428407666660517, | |
| "learning_rate": 2.5249087356445325e-05, | |
| "loss": 0.413, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.7442448062886018, | |
| "grad_norm": 0.25332948509650427, | |
| "learning_rate": 2.510164303775348e-05, | |
| "loss": 0.406, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.748736664795059, | |
| "grad_norm": 0.22514142243409666, | |
| "learning_rate": 2.4954286263959977e-05, | |
| "loss": 0.4264, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.753228523301516, | |
| "grad_norm": 0.23312117766346288, | |
| "learning_rate": 2.4807021088425126e-05, | |
| "loss": 0.4066, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.7577203818079732, | |
| "grad_norm": 0.22555256398015153, | |
| "learning_rate": 2.465985156198961e-05, | |
| "loss": 0.4399, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.76221224031443, | |
| "grad_norm": 0.239078624926934, | |
| "learning_rate": 2.45127817328631e-05, | |
| "loss": 0.4046, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.7667040988208873, | |
| "grad_norm": 0.25315482877506523, | |
| "learning_rate": 2.436581564651288e-05, | |
| "loss": 0.4195, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.7711959573273441, | |
| "grad_norm": 0.26317268705489383, | |
| "learning_rate": 2.4218957345552568e-05, | |
| "loss": 0.4053, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.7756878158338012, | |
| "grad_norm": 0.3077517352317534, | |
| "learning_rate": 2.4072210869630906e-05, | |
| "loss": 0.4064, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.7801796743402583, | |
| "grad_norm": 0.25075113167358737, | |
| "learning_rate": 2.392558025532066e-05, | |
| "loss": 0.4034, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.7846715328467153, | |
| "grad_norm": 0.3140726572680898, | |
| "learning_rate": 2.3779069536007578e-05, | |
| "loss": 0.4015, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.7891633913531724, | |
| "grad_norm": 0.23661923739159335, | |
| "learning_rate": 2.3632682741779443e-05, | |
| "loss": 0.4155, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.7936552498596294, | |
| "grad_norm": 0.2203166261147199, | |
| "learning_rate": 2.3486423899315204e-05, | |
| "loss": 0.4048, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.7981471083660865, | |
| "grad_norm": 0.22898517065684473, | |
| "learning_rate": 2.3340297031774245e-05, | |
| "loss": 0.42, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.8026389668725435, | |
| "grad_norm": 0.22735491481207948, | |
| "learning_rate": 2.3194306158685682e-05, | |
| "loss": 0.4205, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.8071308253790006, | |
| "grad_norm": 0.2500598506840461, | |
| "learning_rate": 2.3048455295837822e-05, | |
| "loss": 0.4036, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.8116226838854577, | |
| "grad_norm": 0.23863219705238561, | |
| "learning_rate": 2.2902748455167704e-05, | |
| "loss": 0.4266, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.8161145423919147, | |
| "grad_norm": 0.2352354859109987, | |
| "learning_rate": 2.2757189644650714e-05, | |
| "loss": 0.4247, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.8206064008983716, | |
| "grad_norm": 0.22490733683067027, | |
| "learning_rate": 2.2611782868190363e-05, | |
| "loss": 0.4076, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.8250982594048288, | |
| "grad_norm": 0.23252879423903727, | |
| "learning_rate": 2.2466532125508148e-05, | |
| "loss": 0.4136, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.8295901179112857, | |
| "grad_norm": 0.2553006127874278, | |
| "learning_rate": 2.232144141203352e-05, | |
| "loss": 0.4138, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.834081976417743, | |
| "grad_norm": 0.2108018293185955, | |
| "learning_rate": 2.2176514718793994e-05, | |
| "loss": 0.3962, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.8385738349241998, | |
| "grad_norm": 0.2622683081425597, | |
| "learning_rate": 2.2031756032305333e-05, | |
| "loss": 0.4082, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.843065693430657, | |
| "grad_norm": 0.22031220462622406, | |
| "learning_rate": 2.188716933446198e-05, | |
| "loss": 0.4268, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.847557551937114, | |
| "grad_norm": 0.3036567077269344, | |
| "learning_rate": 2.174275860242741e-05, | |
| "loss": 0.4028, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.8520494104435712, | |
| "grad_norm": 0.20702299784174724, | |
| "learning_rate": 2.1598527808524794e-05, | |
| "loss": 0.4088, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.856541268950028, | |
| "grad_norm": 0.32142540413940923, | |
| "learning_rate": 2.1454480920127745e-05, | |
| "loss": 0.4297, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.861033127456485, | |
| "grad_norm": 0.2655970915230429, | |
| "learning_rate": 2.1310621899551145e-05, | |
| "loss": 0.3897, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.8655249859629421, | |
| "grad_norm": 0.2761872286918195, | |
| "learning_rate": 2.1166954703942184e-05, | |
| "loss": 0.4095, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.8700168444693992, | |
| "grad_norm": 0.2522433353381978, | |
| "learning_rate": 2.1023483285171493e-05, | |
| "loss": 0.4411, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.8745087029758563, | |
| "grad_norm": 0.25953116653940195, | |
| "learning_rate": 2.088021158972446e-05, | |
| "loss": 0.4061, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.8790005614823133, | |
| "grad_norm": 0.3062755974675169, | |
| "learning_rate": 2.0737143558592648e-05, | |
| "loss": 0.412, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.8834924199887704, | |
| "grad_norm": 0.22419646309205818, | |
| "learning_rate": 2.0594283127165396e-05, | |
| "loss": 0.4227, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.8879842784952274, | |
| "grad_norm": 0.24586931200702924, | |
| "learning_rate": 2.045163422512159e-05, | |
| "loss": 0.3993, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.8924761370016845, | |
| "grad_norm": 0.23697716693509094, | |
| "learning_rate": 2.0309200776321534e-05, | |
| "loss": 0.4113, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.8969679955081415, | |
| "grad_norm": 0.2667432302647834, | |
| "learning_rate": 2.0166986698699047e-05, | |
| "loss": 0.4076, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.9014598540145986, | |
| "grad_norm": 0.4683746807670452, | |
| "learning_rate": 2.0024995904153676e-05, | |
| "loss": 0.4115, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.9059517125210554, | |
| "grad_norm": 0.30212390389711563, | |
| "learning_rate": 1.9883232298443098e-05, | |
| "loss": 0.4259, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.9104435710275127, | |
| "grad_norm": 0.24473748282978983, | |
| "learning_rate": 1.9741699781075652e-05, | |
| "loss": 0.3982, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.9149354295339696, | |
| "grad_norm": 0.24679947847474218, | |
| "learning_rate": 1.9600402245203158e-05, | |
| "loss": 0.4027, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.9194272880404268, | |
| "grad_norm": 0.2729103797945829, | |
| "learning_rate": 1.945934357751372e-05, | |
| "loss": 0.4336, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.9239191465468837, | |
| "grad_norm": 0.23563365063772723, | |
| "learning_rate": 1.9318527658124886e-05, | |
| "loss": 0.4082, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.928411005053341, | |
| "grad_norm": 0.22757080636730853, | |
| "learning_rate": 1.9177958360476907e-05, | |
| "loss": 0.4126, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.9329028635597978, | |
| "grad_norm": 0.22553070619645924, | |
| "learning_rate": 1.9037639551226154e-05, | |
| "loss": 0.4014, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.937394722066255, | |
| "grad_norm": 0.23284762006714582, | |
| "learning_rate": 1.889757509013882e-05, | |
| "loss": 0.4117, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.941886580572712, | |
| "grad_norm": 0.22551527039847094, | |
| "learning_rate": 1.875776882998468e-05, | |
| "loss": 0.4192, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.946378439079169, | |
| "grad_norm": 0.19924364923088228, | |
| "learning_rate": 1.8618224616431158e-05, | |
| "loss": 0.4064, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.950870297585626, | |
| "grad_norm": 0.2575421406115824, | |
| "learning_rate": 1.8478946287937537e-05, | |
| "loss": 0.4015, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.955362156092083, | |
| "grad_norm": 0.19096180555675882, | |
| "learning_rate": 1.8339937675649357e-05, | |
| "loss": 0.4191, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.9598540145985401, | |
| "grad_norm": 0.23876330511629193, | |
| "learning_rate": 1.8201202603293054e-05, | |
| "loss": 0.4191, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.9643458731049972, | |
| "grad_norm": 0.24773709987022863, | |
| "learning_rate": 1.8062744887070764e-05, | |
| "loss": 0.4121, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.9688377316114543, | |
| "grad_norm": 0.1869548437975849, | |
| "learning_rate": 1.792456833555534e-05, | |
| "loss": 0.4083, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.9733295901179113, | |
| "grad_norm": 0.23165046473155232, | |
| "learning_rate": 1.7786676749585633e-05, | |
| "loss": 0.4033, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.9778214486243684, | |
| "grad_norm": 0.22374679674635753, | |
| "learning_rate": 1.7649073922161886e-05, | |
| "loss": 0.4169, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.9823133071308254, | |
| "grad_norm": 0.223089272758445, | |
| "learning_rate": 1.751176363834144e-05, | |
| "loss": 0.4045, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.9868051656372825, | |
| "grad_norm": 0.2163649922360653, | |
| "learning_rate": 1.7374749675134596e-05, | |
| "loss": 0.3977, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.9912970241437393, | |
| "grad_norm": 0.2515642994966962, | |
| "learning_rate": 1.7238035801400747e-05, | |
| "loss": 0.4225, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.9957888826501966, | |
| "grad_norm": 0.21151586839740125, | |
| "learning_rate": 1.7101625777744674e-05, | |
| "loss": 0.4077, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 2.0016844469399215, | |
| "grad_norm": 0.3834487521662885, | |
| "learning_rate": 1.696552335641312e-05, | |
| "loss": 0.6607, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.0061763054463784, | |
| "grad_norm": 0.29623665643917146, | |
| "learning_rate": 1.6829732281191567e-05, | |
| "loss": 0.3783, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 2.0106681639528357, | |
| "grad_norm": 0.28052511802712365, | |
| "learning_rate": 1.6694256287301277e-05, | |
| "loss": 0.3731, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 2.0151600224592925, | |
| "grad_norm": 0.24119072651096665, | |
| "learning_rate": 1.6559099101296524e-05, | |
| "loss": 0.3623, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 2.0196518809657498, | |
| "grad_norm": 0.3165464855791403, | |
| "learning_rate": 1.6424264440962092e-05, | |
| "loss": 0.3705, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 2.0241437394722066, | |
| "grad_norm": 0.26903749186156733, | |
| "learning_rate": 1.628975601521103e-05, | |
| "loss": 0.383, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.028635597978664, | |
| "grad_norm": 0.27624363600058127, | |
| "learning_rate": 1.6155577523982583e-05, | |
| "loss": 0.3842, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 2.0331274564851207, | |
| "grad_norm": 0.27930458117860996, | |
| "learning_rate": 1.602173265814048e-05, | |
| "loss": 0.3689, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 2.0376193149915776, | |
| "grad_norm": 0.23078193886609763, | |
| "learning_rate": 1.5888225099371367e-05, | |
| "loss": 0.3658, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 2.042111173498035, | |
| "grad_norm": 0.29699182301774285, | |
| "learning_rate": 1.5755058520083528e-05, | |
| "loss": 0.3781, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 2.0466030320044917, | |
| "grad_norm": 0.23597336208620376, | |
| "learning_rate": 1.5622236583305905e-05, | |
| "loss": 0.3815, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.051094890510949, | |
| "grad_norm": 0.2791538405336093, | |
| "learning_rate": 1.5489762942587298e-05, | |
| "loss": 0.3623, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.055586749017406, | |
| "grad_norm": 0.29184419118498217, | |
| "learning_rate": 1.535764124189591e-05, | |
| "loss": 0.3721, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 2.060078607523863, | |
| "grad_norm": 0.24877522985887954, | |
| "learning_rate": 1.5225875115519068e-05, | |
| "loss": 0.3845, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.06457046603032, | |
| "grad_norm": 0.32015942610097947, | |
| "learning_rate": 1.509446818796328e-05, | |
| "loss": 0.3879, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 2.069062324536777, | |
| "grad_norm": 0.2027881322318081, | |
| "learning_rate": 1.4963424073854534e-05, | |
| "loss": 0.3557, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.073554183043234, | |
| "grad_norm": 0.2751448437549408, | |
| "learning_rate": 1.4832746377838853e-05, | |
| "loss": 0.3607, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 2.0780460415496913, | |
| "grad_norm": 0.2610186069488747, | |
| "learning_rate": 1.4702438694483175e-05, | |
| "loss": 0.3725, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 2.082537900056148, | |
| "grad_norm": 0.21593288666162966, | |
| "learning_rate": 1.457250460817643e-05, | |
| "loss": 0.3717, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 2.0870297585626054, | |
| "grad_norm": 0.2759153648038306, | |
| "learning_rate": 1.4442947693030987e-05, | |
| "loss": 0.3685, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 2.0915216170690623, | |
| "grad_norm": 0.2042594357879666, | |
| "learning_rate": 1.4313771512784321e-05, | |
| "loss": 0.3638, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.0960134755755195, | |
| "grad_norm": 0.22880552208198743, | |
| "learning_rate": 1.4184979620700981e-05, | |
| "loss": 0.36, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 2.1005053340819764, | |
| "grad_norm": 0.23391287577994221, | |
| "learning_rate": 1.4056575559474865e-05, | |
| "loss": 0.3745, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 2.1049971925884337, | |
| "grad_norm": 0.1915981711790181, | |
| "learning_rate": 1.3928562861131748e-05, | |
| "loss": 0.3714, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 2.1094890510948905, | |
| "grad_norm": 0.21402406677341676, | |
| "learning_rate": 1.3800945046932151e-05, | |
| "loss": 0.3612, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 2.1139809096013478, | |
| "grad_norm": 0.2153003635732833, | |
| "learning_rate": 1.3673725627274463e-05, | |
| "loss": 0.3874, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.1184727681078046, | |
| "grad_norm": 0.21118460656384122, | |
| "learning_rate": 1.3546908101598396e-05, | |
| "loss": 0.3636, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 2.1229646266142614, | |
| "grad_norm": 0.18745245877234668, | |
| "learning_rate": 1.3420495958288707e-05, | |
| "loss": 0.3732, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 2.1274564851207187, | |
| "grad_norm": 0.21764996050070418, | |
| "learning_rate": 1.3294492674579251e-05, | |
| "loss": 0.3768, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 2.1319483436271756, | |
| "grad_norm": 0.19970593378662518, | |
| "learning_rate": 1.3168901716457348e-05, | |
| "loss": 0.3836, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 2.136440202133633, | |
| "grad_norm": 0.18392811237350778, | |
| "learning_rate": 1.30437265385684e-05, | |
| "loss": 0.3666, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.1409320606400897, | |
| "grad_norm": 0.23575916440328573, | |
| "learning_rate": 1.2918970584120936e-05, | |
| "loss": 0.3685, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 2.145423919146547, | |
| "grad_norm": 0.1818275031005896, | |
| "learning_rate": 1.2794637284791822e-05, | |
| "loss": 0.3684, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 2.149915777653004, | |
| "grad_norm": 0.2200606467491186, | |
| "learning_rate": 1.2670730060631907e-05, | |
| "loss": 0.3646, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 2.154407636159461, | |
| "grad_norm": 0.1850771733603428, | |
| "learning_rate": 1.2547252319971942e-05, | |
| "loss": 0.3702, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 2.158899494665918, | |
| "grad_norm": 0.19285919274844085, | |
| "learning_rate": 1.2424207459328823e-05, | |
| "loss": 0.3701, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.163391353172375, | |
| "grad_norm": 0.21433749969130853, | |
| "learning_rate": 1.2301598863312167e-05, | |
| "loss": 0.3862, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 2.167883211678832, | |
| "grad_norm": 0.1702931219728668, | |
| "learning_rate": 1.2179429904531206e-05, | |
| "loss": 0.3392, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 2.1723750701852893, | |
| "grad_norm": 0.24590754127635706, | |
| "learning_rate": 1.2057703943502014e-05, | |
| "loss": 0.3967, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 2.176866928691746, | |
| "grad_norm": 0.1855077803454749, | |
| "learning_rate": 1.1936424328555082e-05, | |
| "loss": 0.3636, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 2.1813587871982034, | |
| "grad_norm": 0.1891504659230766, | |
| "learning_rate": 1.18155943957432e-05, | |
| "loss": 0.3865, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.1858506457046603, | |
| "grad_norm": 0.21746705058645874, | |
| "learning_rate": 1.1695217468749701e-05, | |
| "loss": 0.3858, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 2.1903425042111175, | |
| "grad_norm": 0.20986402043253627, | |
| "learning_rate": 1.157529685879703e-05, | |
| "loss": 0.3588, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 2.1948343627175744, | |
| "grad_norm": 0.18644075409495384, | |
| "learning_rate": 1.1455835864555662e-05, | |
| "loss": 0.3798, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 2.199326221224031, | |
| "grad_norm": 0.1963526042828075, | |
| "learning_rate": 1.1336837772053378e-05, | |
| "loss": 0.3673, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 2.2038180797304885, | |
| "grad_norm": 0.187617209299586, | |
| "learning_rate": 1.1218305854584864e-05, | |
| "loss": 0.3624, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.2083099382369458, | |
| "grad_norm": 0.19335403819782826, | |
| "learning_rate": 1.110024337262166e-05, | |
| "loss": 0.3661, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 2.2128017967434026, | |
| "grad_norm": 0.18272068041309544, | |
| "learning_rate": 1.0982653573722495e-05, | |
| "loss": 0.3627, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 2.2172936552498594, | |
| "grad_norm": 0.22884315852522305, | |
| "learning_rate": 1.0865539692443972e-05, | |
| "loss": 0.3836, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.2217855137563167, | |
| "grad_norm": 0.16645083472040745, | |
| "learning_rate": 1.0748904950251538e-05, | |
| "loss": 0.3563, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.2262773722627736, | |
| "grad_norm": 0.17958098207746864, | |
| "learning_rate": 1.0632752555430916e-05, | |
| "loss": 0.3588, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.230769230769231, | |
| "grad_norm": 0.20602040592151422, | |
| "learning_rate": 1.0517085702999859e-05, | |
| "loss": 0.3642, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.2352610892756877, | |
| "grad_norm": 0.17394451500625532, | |
| "learning_rate": 1.0401907574620208e-05, | |
| "loss": 0.3701, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.239752947782145, | |
| "grad_norm": 0.5939144175219137, | |
| "learning_rate": 1.0287221338510449e-05, | |
| "loss": 0.363, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.244244806288602, | |
| "grad_norm": 0.18816804013353444, | |
| "learning_rate": 1.017303014935852e-05, | |
| "loss": 0.3568, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.248736664795059, | |
| "grad_norm": 0.17212128509452182, | |
| "learning_rate": 1.005933714823504e-05, | |
| "loss": 0.3816, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.253228523301516, | |
| "grad_norm": 0.2065743282673208, | |
| "learning_rate": 9.946145462506909e-06, | |
| "loss": 0.3677, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.257720381807973, | |
| "grad_norm": 0.1614021735327873, | |
| "learning_rate": 9.83345820575129e-06, | |
| "loss": 0.3856, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.26221224031443, | |
| "grad_norm": 0.19026464913161903, | |
| "learning_rate": 9.721278477669954e-06, | |
| "loss": 0.359, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.2667040988208873, | |
| "grad_norm": 0.19398591888369016, | |
| "learning_rate": 9.609609364004025e-06, | |
| "loss": 0.3934, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.271195957327344, | |
| "grad_norm": 0.15402446568209527, | |
| "learning_rate": 9.498453936449093e-06, | |
| "loss": 0.3657, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.2756878158338014, | |
| "grad_norm": 0.16537057769960917, | |
| "learning_rate": 9.387815252570721e-06, | |
| "loss": 0.3608, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.2801796743402583, | |
| "grad_norm": 0.16414929373075068, | |
| "learning_rate": 9.277696355720346e-06, | |
| "loss": 0.3683, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.2846715328467155, | |
| "grad_norm": 0.2761231773106987, | |
| "learning_rate": 9.168100274951548e-06, | |
| "loss": 0.3966, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.2891633913531724, | |
| "grad_norm": 0.16330716919362928, | |
| "learning_rate": 9.059030024936773e-06, | |
| "loss": 0.3661, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.293655249859629, | |
| "grad_norm": 0.16923130288471336, | |
| "learning_rate": 8.950488605884352e-06, | |
| "loss": 0.3763, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.2981471083660865, | |
| "grad_norm": 0.18086653441700623, | |
| "learning_rate": 8.842479003456008e-06, | |
| "loss": 0.3779, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.3026389668725433, | |
| "grad_norm": 0.15498945140293155, | |
| "learning_rate": 8.735004188684717e-06, | |
| "loss": 0.3807, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.3071308253790006, | |
| "grad_norm": 0.17180149953095158, | |
| "learning_rate": 8.628067117892991e-06, | |
| "loss": 0.3847, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.3116226838854574, | |
| "grad_norm": 0.1737575049123219, | |
| "learning_rate": 8.521670732611555e-06, | |
| "loss": 0.3635, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.3161145423919147, | |
| "grad_norm": 0.15105838774105132, | |
| "learning_rate": 8.415817959498431e-06, | |
| "loss": 0.3702, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.3206064008983716, | |
| "grad_norm": 0.16584058762448697, | |
| "learning_rate": 8.310511710258439e-06, | |
| "loss": 0.3603, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.325098259404829, | |
| "grad_norm": 0.15982377944091147, | |
| "learning_rate": 8.205754881563097e-06, | |
| "loss": 0.3751, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.3295901179112857, | |
| "grad_norm": 0.15342330778046015, | |
| "learning_rate": 8.101550354970953e-06, | |
| "loss": 0.378, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.334081976417743, | |
| "grad_norm": 0.19940434372819502, | |
| "learning_rate": 7.997900996848314e-06, | |
| "loss": 0.3672, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.3385738349242, | |
| "grad_norm": 0.15820473715429248, | |
| "learning_rate": 7.894809658290399e-06, | |
| "loss": 0.3598, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.343065693430657, | |
| "grad_norm": 0.1641502878010429, | |
| "learning_rate": 7.792279175042914e-06, | |
| "loss": 0.4044, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.347557551937114, | |
| "grad_norm": 0.15337436979783212, | |
| "learning_rate": 7.69031236742406e-06, | |
| "loss": 0.3543, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.352049410443571, | |
| "grad_norm": 0.1852758781212842, | |
| "learning_rate": 7.588912040246938e-06, | |
| "loss": 0.3784, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.356541268950028, | |
| "grad_norm": 0.16238259293772708, | |
| "learning_rate": 7.488080982742408e-06, | |
| "loss": 0.3636, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.3610331274564853, | |
| "grad_norm": 0.15603906946771715, | |
| "learning_rate": 7.387821968482352e-06, | |
| "loss": 0.3811, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.365524985962942, | |
| "grad_norm": 0.1659566098554324, | |
| "learning_rate": 7.288137755303415e-06, | |
| "loss": 0.3633, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.370016844469399, | |
| "grad_norm": 0.19035043267004925, | |
| "learning_rate": 7.189031085231095e-06, | |
| "loss": 0.364, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.3745087029758563, | |
| "grad_norm": 0.15186801442609232, | |
| "learning_rate": 7.090504684404349e-06, | |
| "loss": 0.3875, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.3790005614823135, | |
| "grad_norm": 0.16344114761342604, | |
| "learning_rate": 6.992561263000608e-06, | |
| "loss": 0.3727, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.3834924199887704, | |
| "grad_norm": 0.16574192593884393, | |
| "learning_rate": 6.895203515161208e-06, | |
| "loss": 0.3918, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.387984278495227, | |
| "grad_norm": 0.14577845796477323, | |
| "learning_rate": 6.798434118917298e-06, | |
| "loss": 0.3557, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.3924761370016845, | |
| "grad_norm": 0.15968032977161276, | |
| "learning_rate": 6.702255736116169e-06, | |
| "loss": 0.3861, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.3969679955081413, | |
| "grad_norm": 0.16093750275057525, | |
| "learning_rate": 6.606671012348036e-06, | |
| "loss": 0.3639, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.4014598540145986, | |
| "grad_norm": 0.15800318552911596, | |
| "learning_rate": 6.511682576873261e-06, | |
| "loss": 0.3713, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.4059517125210554, | |
| "grad_norm": 0.16234671099987708, | |
| "learning_rate": 6.417293042550035e-06, | |
| "loss": 0.3854, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.4104435710275127, | |
| "grad_norm": 0.14909419081492642, | |
| "learning_rate": 6.323505005762507e-06, | |
| "loss": 0.3684, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.4149354295339696, | |
| "grad_norm": 0.1549173321840257, | |
| "learning_rate": 6.230321046349362e-06, | |
| "loss": 0.3746, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.419427288040427, | |
| "grad_norm": 0.1465237468242648, | |
| "learning_rate": 6.137743727532841e-06, | |
| "loss": 0.3826, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.4239191465468837, | |
| "grad_norm": 0.14541245229861247, | |
| "learning_rate": 6.045775595848269e-06, | |
| "loss": 0.3437, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.428411005053341, | |
| "grad_norm": 0.1498064908902273, | |
| "learning_rate": 5.9544191810739825e-06, | |
| "loss": 0.3825, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.432902863559798, | |
| "grad_norm": 0.14788005846909275, | |
| "learning_rate": 5.8636769961617455e-06, | |
| "loss": 0.3741, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.437394722066255, | |
| "grad_norm": 0.14453970820447454, | |
| "learning_rate": 5.7735515371676235e-06, | |
| "loss": 0.3609, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.441886580572712, | |
| "grad_norm": 0.1410161797968163, | |
| "learning_rate": 5.6840452831833515e-06, | |
| "loss": 0.3671, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.446378439079169, | |
| "grad_norm": 0.1584762247047278, | |
| "learning_rate": 5.595160696268093e-06, | |
| "loss": 0.3696, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.450870297585626, | |
| "grad_norm": 0.14610298365223495, | |
| "learning_rate": 5.50690022138075e-06, | |
| "loss": 0.3731, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.4553621560920833, | |
| "grad_norm": 0.13173933243455369, | |
| "learning_rate": 5.419266286312706e-06, | |
| "loss": 0.3488, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.45985401459854, | |
| "grad_norm": 0.16132740159557446, | |
| "learning_rate": 5.33226130162103e-06, | |
| "loss": 0.3721, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.464345873104997, | |
| "grad_norm": 0.16481023892412486, | |
| "learning_rate": 5.2458876605621865e-06, | |
| "loss": 0.3918, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.4688377316114543, | |
| "grad_norm": 0.12994695509763637, | |
| "learning_rate": 5.160147739026188e-06, | |
| "loss": 0.3582, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 2.473329590117911, | |
| "grad_norm": 0.1673891560932271, | |
| "learning_rate": 5.075043895471259e-06, | |
| "loss": 0.385, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.4778214486243684, | |
| "grad_norm": 0.1507734201121824, | |
| "learning_rate": 4.9905784708589365e-06, | |
| "loss": 0.3812, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 2.482313307130825, | |
| "grad_norm": 0.13615826740860898, | |
| "learning_rate": 4.906753788589707e-06, | |
| "loss": 0.3665, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 2.4868051656372825, | |
| "grad_norm": 0.15904524799386438, | |
| "learning_rate": 4.823572154439075e-06, | |
| "loss": 0.3721, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 2.4912970241437393, | |
| "grad_norm": 0.1517144896395181, | |
| "learning_rate": 4.7410358564941375e-06, | |
| "loss": 0.3619, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 2.4957888826501966, | |
| "grad_norm": 0.13891837541929897, | |
| "learning_rate": 4.659147165090664e-06, | |
| "loss": 0.374, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.5002807411566534, | |
| "grad_norm": 0.14237622397187708, | |
| "learning_rate": 4.577908332750624e-06, | |
| "loss": 0.3506, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 2.5047725996631107, | |
| "grad_norm": 0.16253277633391608, | |
| "learning_rate": 4.497321594120239e-06, | |
| "loss": 0.393, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 2.5092644581695676, | |
| "grad_norm": 0.14261230736800298, | |
| "learning_rate": 4.4173891659085096e-06, | |
| "loss": 0.344, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 2.513756316676025, | |
| "grad_norm": 0.1520280935198868, | |
| "learning_rate": 4.3381132468262535e-06, | |
| "loss": 0.3631, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 2.5182481751824817, | |
| "grad_norm": 0.15183891238142852, | |
| "learning_rate": 4.259496017525603e-06, | |
| "loss": 0.3858, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.522740033688939, | |
| "grad_norm": 0.14691223221755534, | |
| "learning_rate": 4.181539640540032e-06, | |
| "loss": 0.3621, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 2.527231892195396, | |
| "grad_norm": 0.14153958775601796, | |
| "learning_rate": 4.104246260224871e-06, | |
| "loss": 0.3683, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 2.531723750701853, | |
| "grad_norm": 0.16281015860283424, | |
| "learning_rate": 4.027618002698326e-06, | |
| "loss": 0.3577, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 2.53621560920831, | |
| "grad_norm": 0.1424800783066957, | |
| "learning_rate": 3.951656975782989e-06, | |
| "loss": 0.3784, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 2.5407074677147667, | |
| "grad_norm": 0.16328825448792272, | |
| "learning_rate": 3.876365268947863e-06, | |
| "loss": 0.3867, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.545199326221224, | |
| "grad_norm": 0.14773088993133252, | |
| "learning_rate": 3.8017449532508796e-06, | |
| "loss": 0.3785, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 2.5496911847276813, | |
| "grad_norm": 0.13128958256806036, | |
| "learning_rate": 3.7277980812819394e-06, | |
| "loss": 0.3576, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 2.554183043234138, | |
| "grad_norm": 0.13197390278998722, | |
| "learning_rate": 3.654526687106441e-06, | |
| "loss": 0.3545, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 2.558674901740595, | |
| "grad_norm": 0.15894574461129618, | |
| "learning_rate": 3.5819327862093417e-06, | |
| "loss": 0.4112, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 2.5631667602470523, | |
| "grad_norm": 0.14459375743644173, | |
| "learning_rate": 3.510018375439703e-06, | |
| "loss": 0.3622, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.5676586187535095, | |
| "grad_norm": 0.1439218454839097, | |
| "learning_rate": 3.438785432955774e-06, | |
| "loss": 0.3656, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 2.5721504772599664, | |
| "grad_norm": 0.15233426089100796, | |
| "learning_rate": 3.3682359181705775e-06, | |
| "loss": 0.3717, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 2.576642335766423, | |
| "grad_norm": 0.1322559371437999, | |
| "learning_rate": 3.2983717716980054e-06, | |
| "loss": 0.3603, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 2.5811341942728805, | |
| "grad_norm": 0.14694128208617876, | |
| "learning_rate": 3.229194915299446e-06, | |
| "loss": 0.4011, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 2.5856260527793373, | |
| "grad_norm": 0.1602803721484142, | |
| "learning_rate": 3.1607072518309103e-06, | |
| "loss": 0.3547, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.5901179112857946, | |
| "grad_norm": 0.1379362270299032, | |
| "learning_rate": 3.092910665190704e-06, | |
| "loss": 0.3697, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 2.5946097697922514, | |
| "grad_norm": 0.14133169540775473, | |
| "learning_rate": 3.0258070202676026e-06, | |
| "loss": 0.376, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 2.5991016282987087, | |
| "grad_norm": 0.14251407455481452, | |
| "learning_rate": 2.959398162889542e-06, | |
| "loss": 0.3579, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 2.6035934868051656, | |
| "grad_norm": 0.15315100809415336, | |
| "learning_rate": 2.89368591977286e-06, | |
| "loss": 0.3753, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 2.608085345311623, | |
| "grad_norm": 0.14101081938302126, | |
| "learning_rate": 2.8286720984720476e-06, | |
| "loss": 0.3723, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.6125772038180797, | |
| "grad_norm": 0.14319961194489517, | |
| "learning_rate": 2.7643584873300134e-06, | |
| "loss": 0.3729, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 2.6170690623245365, | |
| "grad_norm": 0.15095333395906557, | |
| "learning_rate": 2.700746855428914e-06, | |
| "loss": 0.3825, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 2.621560920830994, | |
| "grad_norm": 0.12661971070029193, | |
| "learning_rate": 2.637838952541465e-06, | |
| "loss": 0.3414, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 2.626052779337451, | |
| "grad_norm": 0.13278234273379166, | |
| "learning_rate": 2.5756365090828383e-06, | |
| "loss": 0.3807, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 2.630544637843908, | |
| "grad_norm": 0.12665698014448426, | |
| "learning_rate": 2.514141236063045e-06, | |
| "loss": 0.3609, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.6350364963503647, | |
| "grad_norm": 0.13779508930363313, | |
| "learning_rate": 2.4533548250398673e-06, | |
| "loss": 0.3869, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 2.639528354856822, | |
| "grad_norm": 0.13344207276431108, | |
| "learning_rate": 2.3932789480723437e-06, | |
| "loss": 0.3706, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 2.6440202133632793, | |
| "grad_norm": 0.13699604871101181, | |
| "learning_rate": 2.3339152576747693e-06, | |
| "loss": 0.3747, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 2.648512071869736, | |
| "grad_norm": 0.13677973246622468, | |
| "learning_rate": 2.2752653867712363e-06, | |
| "loss": 0.3582, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 2.653003930376193, | |
| "grad_norm": 0.14511975296201757, | |
| "learning_rate": 2.2173309486507165e-06, | |
| "loss": 0.3792, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.6574957888826503, | |
| "grad_norm": 0.12579219398981872, | |
| "learning_rate": 2.160113536922689e-06, | |
| "loss": 0.3754, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 2.661987647389107, | |
| "grad_norm": 0.13180638105096332, | |
| "learning_rate": 2.103614725473297e-06, | |
| "loss": 0.3686, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 2.6664795058955644, | |
| "grad_norm": 0.13168798354347597, | |
| "learning_rate": 2.0478360684220758e-06, | |
| "loss": 0.3629, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.670971364402021, | |
| "grad_norm": 0.13198065586545454, | |
| "learning_rate": 1.9927791000791787e-06, | |
| "loss": 0.3735, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.6754632229084785, | |
| "grad_norm": 0.1384604473879385, | |
| "learning_rate": 1.9384453349031737e-06, | |
| "loss": 0.3765, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.6799550814149353, | |
| "grad_norm": 0.13249884505301276, | |
| "learning_rate": 1.884836267459407e-06, | |
| "loss": 0.3694, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.6844469399213926, | |
| "grad_norm": 0.13879379982138118, | |
| "learning_rate": 1.8319533723788713e-06, | |
| "loss": 0.3725, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.6889387984278494, | |
| "grad_norm": 0.12469439824867523, | |
| "learning_rate": 1.779798104317648e-06, | |
| "loss": 0.3626, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.6934306569343067, | |
| "grad_norm": 0.12858080077622486, | |
| "learning_rate": 1.7283718979168997e-06, | |
| "loss": 0.3746, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.6979225154407636, | |
| "grad_norm": 0.13410929332687224, | |
| "learning_rate": 1.6776761677634044e-06, | |
| "loss": 0.3551, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.702414373947221, | |
| "grad_norm": 0.13175959221297903, | |
| "learning_rate": 1.6277123083506355e-06, | |
| "loss": 0.3775, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.7069062324536777, | |
| "grad_norm": 0.1322133282140637, | |
| "learning_rate": 1.5784816940404199e-06, | |
| "loss": 0.3642, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.7113980909601345, | |
| "grad_norm": 0.13612778232078965, | |
| "learning_rate": 1.5299856790251183e-06, | |
| "loss": 0.372, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.715889949466592, | |
| "grad_norm": 0.13427114816494626, | |
| "learning_rate": 1.4822255972903823e-06, | |
| "loss": 0.3709, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.720381807973049, | |
| "grad_norm": 0.1305771624806451, | |
| "learning_rate": 1.4352027625784614e-06, | |
| "loss": 0.3852, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.724873666479506, | |
| "grad_norm": 0.12519942206940418, | |
| "learning_rate": 1.3889184683520623e-06, | |
| "loss": 0.3507, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 2.7293655249859627, | |
| "grad_norm": 0.12740885813007477, | |
| "learning_rate": 1.3433739877587714e-06, | |
| "loss": 0.3646, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 2.73385738349242, | |
| "grad_norm": 0.12770657105316688, | |
| "learning_rate": 1.298570573596031e-06, | |
| "loss": 0.3653, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 2.7383492419988773, | |
| "grad_norm": 0.13021494670890732, | |
| "learning_rate": 1.2545094582766806e-06, | |
| "loss": 0.3749, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 2.742841100505334, | |
| "grad_norm": 0.12720931568585903, | |
| "learning_rate": 1.2111918537950663e-06, | |
| "loss": 0.3698, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.747332959011791, | |
| "grad_norm": 0.13031360088492538, | |
| "learning_rate": 1.1686189516936778e-06, | |
| "loss": 0.3544, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 2.7518248175182483, | |
| "grad_norm": 0.14232681783239062, | |
| "learning_rate": 1.1267919230304015e-06, | |
| "loss": 0.3709, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 2.756316676024705, | |
| "grad_norm": 0.1217025559747512, | |
| "learning_rate": 1.0857119183462873e-06, | |
| "loss": 0.3666, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 2.7608085345311624, | |
| "grad_norm": 0.1285083336790778, | |
| "learning_rate": 1.0453800676339103e-06, | |
| "loss": 0.3636, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 2.765300393037619, | |
| "grad_norm": 0.13178419359693688, | |
| "learning_rate": 1.0057974803062875e-06, | |
| "loss": 0.3717, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.7697922515440765, | |
| "grad_norm": 0.13148084796827988, | |
| "learning_rate": 9.669652451663472e-07, | |
| "loss": 0.3542, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 2.7742841100505333, | |
| "grad_norm": 0.13198401855249756, | |
| "learning_rate": 9.288844303770076e-07, | |
| "loss": 0.3849, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 2.7787759685569906, | |
| "grad_norm": 0.11912824431114015, | |
| "learning_rate": 8.915560834317684e-07, | |
| "loss": 0.3729, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 2.7832678270634474, | |
| "grad_norm": 0.127530408796306, | |
| "learning_rate": 8.549812311259026e-07, | |
| "loss": 0.3667, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 2.7877596855699043, | |
| "grad_norm": 0.13338301791208296, | |
| "learning_rate": 8.191608795282298e-07, | |
| "loss": 0.3523, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.7922515440763616, | |
| "grad_norm": 0.12889898554276089, | |
| "learning_rate": 7.840960139534193e-07, | |
| "loss": 0.3812, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 2.796743402582819, | |
| "grad_norm": 0.12497562196397191, | |
| "learning_rate": 7.497875989348939e-07, | |
| "loss": 0.3726, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 2.8012352610892757, | |
| "grad_norm": 0.1508506937827218, | |
| "learning_rate": 7.162365781983107e-07, | |
| "loss": 0.3803, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 2.8057271195957325, | |
| "grad_norm": 0.12671737389656942, | |
| "learning_rate": 6.834438746355856e-07, | |
| "loss": 0.3643, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 2.81021897810219, | |
| "grad_norm": 0.12837171738280037, | |
| "learning_rate": 6.514103902795144e-07, | |
| "loss": 0.3672, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.814710836608647, | |
| "grad_norm": 0.12614274808344075, | |
| "learning_rate": 6.201370062789621e-07, | |
| "loss": 0.3698, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 2.819202695115104, | |
| "grad_norm": 0.12996400253746293, | |
| "learning_rate": 5.896245828746239e-07, | |
| "loss": 0.3564, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 2.8236945536215607, | |
| "grad_norm": 0.12539502302364786, | |
| "learning_rate": 5.598739593753545e-07, | |
| "loss": 0.3813, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 2.828186412128018, | |
| "grad_norm": 0.13017394488619363, | |
| "learning_rate": 5.308859541350937e-07, | |
| "loss": 0.3737, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 2.832678270634475, | |
| "grad_norm": 0.1345338832497479, | |
| "learning_rate": 5.026613645303517e-07, | |
| "loss": 0.3654, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.837170129140932, | |
| "grad_norm": 0.12203800267847548, | |
| "learning_rate": 4.7520096693826864e-07, | |
| "loss": 0.3792, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 2.841661987647389, | |
| "grad_norm": 0.12207005945450579, | |
| "learning_rate": 4.4850551671526726e-07, | |
| "loss": 0.3638, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 2.8461538461538463, | |
| "grad_norm": 0.13326259703768226, | |
| "learning_rate": 4.2257574817627444e-07, | |
| "loss": 0.3733, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 2.850645704660303, | |
| "grad_norm": 0.12025965191472038, | |
| "learning_rate": 3.9741237457451115e-07, | |
| "loss": 0.3779, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 2.8551375631667604, | |
| "grad_norm": 0.1247223170065772, | |
| "learning_rate": 3.7301608808189436e-07, | |
| "loss": 0.3816, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.859629421673217, | |
| "grad_norm": 0.12773720236825364, | |
| "learning_rate": 3.4938755976997314e-07, | |
| "loss": 0.3613, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 2.8641212801796745, | |
| "grad_norm": 0.13248322982196473, | |
| "learning_rate": 3.265274395914892e-07, | |
| "loss": 0.3617, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 2.8686131386861313, | |
| "grad_norm": 0.1301416370938772, | |
| "learning_rate": 3.044363563624814e-07, | |
| "loss": 0.3747, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 2.8731049971925886, | |
| "grad_norm": 0.1267078653678669, | |
| "learning_rate": 2.831149177450112e-07, | |
| "loss": 0.3824, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 2.8775968556990454, | |
| "grad_norm": 0.11849109480921721, | |
| "learning_rate": 2.625637102304195e-07, | |
| "loss": 0.362, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.8820887142055023, | |
| "grad_norm": 0.11702902399405708, | |
| "learning_rate": 2.4278329912321715e-07, | |
| "loss": 0.3523, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 2.8865805727119596, | |
| "grad_norm": 0.1251109734289477, | |
| "learning_rate": 2.2377422852552588e-07, | |
| "loss": 0.3895, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 2.891072431218417, | |
| "grad_norm": 0.12139160361785585, | |
| "learning_rate": 2.0553702132211463e-07, | |
| "loss": 0.3666, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 2.8955642897248737, | |
| "grad_norm": 0.12211958626741572, | |
| "learning_rate": 1.8807217916600552e-07, | |
| "loss": 0.3657, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 2.9000561482313305, | |
| "grad_norm": 0.13082103610113954, | |
| "learning_rate": 1.7138018246469124e-07, | |
| "loss": 0.3724, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 2.904548006737788, | |
| "grad_norm": 0.12205820722138389, | |
| "learning_rate": 1.5546149036690875e-07, | |
| "loss": 0.3706, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 2.909039865244245, | |
| "grad_norm": 0.12019618783729218, | |
| "learning_rate": 1.403165407500157e-07, | |
| "loss": 0.3612, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 2.913531723750702, | |
| "grad_norm": 0.12562468502311944, | |
| "learning_rate": 1.259457502079415e-07, | |
| "loss": 0.3712, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 2.9180235822571587, | |
| "grad_norm": 0.1256013243853914, | |
| "learning_rate": 1.12349514039732e-07, | |
| "loss": 0.3727, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 2.922515440763616, | |
| "grad_norm": 0.1225201744128567, | |
| "learning_rate": 9.952820623866864e-08, | |
| "loss": 0.369, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.927007299270073, | |
| "grad_norm": 0.1414387641177634, | |
| "learning_rate": 8.748217948199705e-08, | |
| "loss": 0.3692, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 2.93149915777653, | |
| "grad_norm": 0.11580353964232593, | |
| "learning_rate": 7.621176512120177e-08, | |
| "loss": 0.3627, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 2.935991016282987, | |
| "grad_norm": 0.11868823233572184, | |
| "learning_rate": 6.571727317291237e-08, | |
| "loss": 0.3673, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 2.9404828747894443, | |
| "grad_norm": 0.12076089832698199, | |
| "learning_rate": 5.599899231036833e-08, | |
| "loss": 0.3571, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 2.944974733295901, | |
| "grad_norm": 0.11847206724092857, | |
| "learning_rate": 4.7057189855474484e-08, | |
| "loss": 0.3542, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 2.9494665918023584, | |
| "grad_norm": 0.12282099821573915, | |
| "learning_rate": 3.8892111771456016e-08, | |
| "loss": 0.3869, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 2.953958450308815, | |
| "grad_norm": 0.12087182615624807, | |
| "learning_rate": 3.1503982656088325e-08, | |
| "loss": 0.3627, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 2.958450308815272, | |
| "grad_norm": 0.12129149634112388, | |
| "learning_rate": 2.4893005735513918e-08, | |
| "loss": 0.3689, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 2.9629421673217293, | |
| "grad_norm": 0.12562547907924942, | |
| "learning_rate": 1.905936285866549e-08, | |
| "loss": 0.3798, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 2.9674340258281866, | |
| "grad_norm": 0.12565126515805553, | |
| "learning_rate": 1.4003214492247897e-08, | |
| "loss": 0.3644, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.9719258843346434, | |
| "grad_norm": 0.11891765791030787, | |
| "learning_rate": 9.724699716335607e-09, | |
| "loss": 0.3712, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 2.9764177428411003, | |
| "grad_norm": 0.12252378871109207, | |
| "learning_rate": 6.22393622054797e-09, | |
| "loss": 0.3703, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 2.9809096013475576, | |
| "grad_norm": 0.1262502829214966, | |
| "learning_rate": 3.5010203007959745e-09, | |
| "loss": 0.3711, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 2.985401459854015, | |
| "grad_norm": 0.11986947859116591, | |
| "learning_rate": 1.5560268566570605e-09, | |
| "loss": 0.3618, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 2.9898933183604717, | |
| "grad_norm": 0.12017151649381105, | |
| "learning_rate": 3.890093892963138e-10, | |
| "loss": 0.3655, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 2.9943851768669285, | |
| "grad_norm": 0.1266392478333783, | |
| "learning_rate": 0.0, | |
| "loss": 0.3727, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 2.9943851768669285, | |
| "step": 666, | |
| "total_flos": 7.06867120956244e+18, | |
| "train_loss": 0.4359416009427549, | |
| "train_runtime": 112394.1811, | |
| "train_samples_per_second": 3.042, | |
| "train_steps_per_second": 0.006 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 666, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 7.06867120956244e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |