| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9997171679079853, | |
| "eval_steps": 500, | |
| "global_step": 11931, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.002514063040130731, | |
| "grad_norm": 441.4593505859375, | |
| "learning_rate": 1.0050251256281409e-07, | |
| "loss": 27.7291, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.005028126080261462, | |
| "grad_norm": Infinity, | |
| "learning_rate": 2.512562814070352e-07, | |
| "loss": 24.6957, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.007542189120392194, | |
| "grad_norm": 498.08056640625, | |
| "learning_rate": 4.1876046901172535e-07, | |
| "loss": 27.4128, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.010056252160522925, | |
| "grad_norm": 523.5923461914062, | |
| "learning_rate": 5.862646566164154e-07, | |
| "loss": 23.0706, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.012570315200653657, | |
| "grad_norm": 249.7553253173828, | |
| "learning_rate": 7.537688442211055e-07, | |
| "loss": 20.4485, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.015084378240784388, | |
| "grad_norm": 181.62521362304688, | |
| "learning_rate": 9.212730318257958e-07, | |
| "loss": 14.4136, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.01759844128091512, | |
| "grad_norm": 104.66766357421875, | |
| "learning_rate": 1.088777219430486e-06, | |
| "loss": 13.0828, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.02011250432104585, | |
| "grad_norm": 38.68351364135742, | |
| "learning_rate": 1.256281407035176e-06, | |
| "loss": 8.1433, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.022626567361176582, | |
| "grad_norm": 77.3786392211914, | |
| "learning_rate": 1.423785594639866e-06, | |
| "loss": 7.7501, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.025140630401307314, | |
| "grad_norm": 109.94917297363281, | |
| "learning_rate": 1.5912897822445563e-06, | |
| "loss": 6.526, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.027654693441438043, | |
| "grad_norm": 19.608741760253906, | |
| "learning_rate": 1.7587939698492465e-06, | |
| "loss": 6.3196, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.030168756481568776, | |
| "grad_norm": 19.54482650756836, | |
| "learning_rate": 1.9262981574539365e-06, | |
| "loss": 5.8398, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.032682819521699505, | |
| "grad_norm": 14.328841209411621, | |
| "learning_rate": 2.0938023450586266e-06, | |
| "loss": 5.1687, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.03519688256183024, | |
| "grad_norm": 44.560970306396484, | |
| "learning_rate": 2.261306532663317e-06, | |
| "loss": 4.9725, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.03771094560196097, | |
| "grad_norm": 7.995884418487549, | |
| "learning_rate": 2.428810720268007e-06, | |
| "loss": 4.6175, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.0402250086420917, | |
| "grad_norm": 15.97421932220459, | |
| "learning_rate": 2.5963149078726968e-06, | |
| "loss": 5.0148, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.042739071682222435, | |
| "grad_norm": 9.63986587524414, | |
| "learning_rate": 2.763819095477387e-06, | |
| "loss": 4.4263, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.045253134722353164, | |
| "grad_norm": 9.64190673828125, | |
| "learning_rate": 2.931323283082077e-06, | |
| "loss": 4.2462, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.04776719776248389, | |
| "grad_norm": 3.4816224575042725, | |
| "learning_rate": 3.0988274706867673e-06, | |
| "loss": 4.0121, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.05028126080261463, | |
| "grad_norm": 4.0279693603515625, | |
| "learning_rate": 3.266331658291458e-06, | |
| "loss": 3.9778, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.05279532384274536, | |
| "grad_norm": 5.427811145782471, | |
| "learning_rate": 3.433835845896148e-06, | |
| "loss": 3.8643, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.05530938688287609, | |
| "grad_norm": 3.2525746822357178, | |
| "learning_rate": 3.601340033500838e-06, | |
| "loss": 3.5628, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.05782344992300682, | |
| "grad_norm": 6.63083553314209, | |
| "learning_rate": 3.768844221105528e-06, | |
| "loss": 3.5146, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.06033751296313755, | |
| "grad_norm": 1.7304433584213257, | |
| "learning_rate": 3.936348408710219e-06, | |
| "loss": 3.4582, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.06285157600326828, | |
| "grad_norm": 1.7491689920425415, | |
| "learning_rate": 4.103852596314908e-06, | |
| "loss": 3.4846, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.06536563904339901, | |
| "grad_norm": 1.7652583122253418, | |
| "learning_rate": 4.271356783919598e-06, | |
| "loss": 3.3866, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.06787970208352974, | |
| "grad_norm": 2.21983003616333, | |
| "learning_rate": 4.438860971524289e-06, | |
| "loss": 3.1431, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.07039376512366048, | |
| "grad_norm": 1.6234071254730225, | |
| "learning_rate": 4.6063651591289785e-06, | |
| "loss": 3.2463, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.07290782816379121, | |
| "grad_norm": 1.3999803066253662, | |
| "learning_rate": 4.773869346733669e-06, | |
| "loss": 3.0731, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.07542189120392194, | |
| "grad_norm": 1.1736998558044434, | |
| "learning_rate": 4.941373534338359e-06, | |
| "loss": 2.8621, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.07793595424405267, | |
| "grad_norm": 1.1260089874267578, | |
| "learning_rate": 5.108877721943049e-06, | |
| "loss": 2.8655, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.0804500172841834, | |
| "grad_norm": 1.0804224014282227, | |
| "learning_rate": 5.2763819095477384e-06, | |
| "loss": 2.5391, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.08296408032431413, | |
| "grad_norm": 1.3266807794570923, | |
| "learning_rate": 5.443886097152429e-06, | |
| "loss": 2.9263, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.08547814336444487, | |
| "grad_norm": 1.1520569324493408, | |
| "learning_rate": 5.611390284757119e-06, | |
| "loss": 2.6345, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.0879922064045756, | |
| "grad_norm": 1.8159115314483643, | |
| "learning_rate": 5.778894472361809e-06, | |
| "loss": 2.9688, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.09050626944470633, | |
| "grad_norm": 1.2056509256362915, | |
| "learning_rate": 5.946398659966499e-06, | |
| "loss": 2.9003, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.09302033248483706, | |
| "grad_norm": 1.1775842905044556, | |
| "learning_rate": 6.113902847571189e-06, | |
| "loss": 2.7883, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.09553439552496779, | |
| "grad_norm": 0.9780596494674683, | |
| "learning_rate": 6.28140703517588e-06, | |
| "loss": 2.7969, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.09804845856509851, | |
| "grad_norm": 1.1352972984313965, | |
| "learning_rate": 6.448911222780571e-06, | |
| "loss": 2.5682, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.10056252160522926, | |
| "grad_norm": 1.2285816669464111, | |
| "learning_rate": 6.616415410385261e-06, | |
| "loss": 2.6983, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.10307658464535999, | |
| "grad_norm": 1.0274040699005127, | |
| "learning_rate": 6.7839195979899505e-06, | |
| "loss": 2.5081, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.10559064768549072, | |
| "grad_norm": 0.9488459229469299, | |
| "learning_rate": 6.951423785594641e-06, | |
| "loss": 2.654, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.10810471072562144, | |
| "grad_norm": 1.193317174911499, | |
| "learning_rate": 7.118927973199331e-06, | |
| "loss": 2.6181, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.11061877376575217, | |
| "grad_norm": 1.1801531314849854, | |
| "learning_rate": 7.2864321608040215e-06, | |
| "loss": 2.4548, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.1131328368058829, | |
| "grad_norm": 1.0129225254058838, | |
| "learning_rate": 7.453936348408711e-06, | |
| "loss": 2.51, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.11564689984601365, | |
| "grad_norm": 1.0601414442062378, | |
| "learning_rate": 7.621440536013401e-06, | |
| "loss": 2.5285, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.11816096288614437, | |
| "grad_norm": 1.047395944595337, | |
| "learning_rate": 7.788944723618092e-06, | |
| "loss": 2.474, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.1206750259262751, | |
| "grad_norm": 0.7101143598556519, | |
| "learning_rate": 7.956448911222781e-06, | |
| "loss": 2.4253, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.12318908896640583, | |
| "grad_norm": 0.937229335308075, | |
| "learning_rate": 8.123953098827471e-06, | |
| "loss": 2.4842, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.12570315200653656, | |
| "grad_norm": 0.8702961206436157, | |
| "learning_rate": 8.291457286432163e-06, | |
| "loss": 2.4401, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.1282172150466673, | |
| "grad_norm": 0.9074134230613708, | |
| "learning_rate": 8.458961474036852e-06, | |
| "loss": 2.5236, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.13073127808679802, | |
| "grad_norm": 0.9305140376091003, | |
| "learning_rate": 8.626465661641542e-06, | |
| "loss": 2.5018, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.13324534112692876, | |
| "grad_norm": 0.8472815752029419, | |
| "learning_rate": 8.793969849246232e-06, | |
| "loss": 2.4944, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.13575940416705948, | |
| "grad_norm": 1.0092636346817017, | |
| "learning_rate": 8.961474036850922e-06, | |
| "loss": 2.2441, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.13827346720719022, | |
| "grad_norm": 0.8548736572265625, | |
| "learning_rate": 9.128978224455613e-06, | |
| "loss": 2.5998, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.14078753024732096, | |
| "grad_norm": 0.8646708726882935, | |
| "learning_rate": 9.296482412060303e-06, | |
| "loss": 2.4962, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.14330159328745168, | |
| "grad_norm": 1.0983237028121948, | |
| "learning_rate": 9.463986599664993e-06, | |
| "loss": 2.3725, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.14581565632758242, | |
| "grad_norm": 0.896719217300415, | |
| "learning_rate": 9.631490787269682e-06, | |
| "loss": 2.3442, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.14832971936771314, | |
| "grad_norm": 0.9426018595695496, | |
| "learning_rate": 9.798994974874372e-06, | |
| "loss": 2.5025, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.15084378240784388, | |
| "grad_norm": 0.977573573589325, | |
| "learning_rate": 9.966499162479064e-06, | |
| "loss": 2.4096, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.1533578454479746, | |
| "grad_norm": 0.9190056920051575, | |
| "learning_rate": 1.0134003350083752e-05, | |
| "loss": 2.4158, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.15587190848810534, | |
| "grad_norm": 0.8852869868278503, | |
| "learning_rate": 1.0301507537688441e-05, | |
| "loss": 2.4308, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.15838597152823608, | |
| "grad_norm": 0.900477945804596, | |
| "learning_rate": 1.0469011725293133e-05, | |
| "loss": 2.4504, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.1609000345683668, | |
| "grad_norm": 0.8579984307289124, | |
| "learning_rate": 1.0636515912897823e-05, | |
| "loss": 2.4033, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.16341409760849754, | |
| "grad_norm": 0.9647632241249084, | |
| "learning_rate": 1.0804020100502512e-05, | |
| "loss": 2.3929, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.16592816064862825, | |
| "grad_norm": 0.8983623385429382, | |
| "learning_rate": 1.0971524288107202e-05, | |
| "loss": 2.2222, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.168442223688759, | |
| "grad_norm": 0.8529979586601257, | |
| "learning_rate": 1.1139028475711894e-05, | |
| "loss": 2.2658, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.17095628672888974, | |
| "grad_norm": 0.962179958820343, | |
| "learning_rate": 1.1306532663316583e-05, | |
| "loss": 2.3601, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.17347034976902045, | |
| "grad_norm": 0.9139899015426636, | |
| "learning_rate": 1.1474036850921273e-05, | |
| "loss": 2.3464, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.1759844128091512, | |
| "grad_norm": 0.8962693810462952, | |
| "learning_rate": 1.1641541038525963e-05, | |
| "loss": 2.3203, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.1784984758492819, | |
| "grad_norm": 0.8451493978500366, | |
| "learning_rate": 1.1809045226130653e-05, | |
| "loss": 2.2961, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.18101253888941266, | |
| "grad_norm": 0.9265815019607544, | |
| "learning_rate": 1.1976549413735344e-05, | |
| "loss": 2.451, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.18352660192954337, | |
| "grad_norm": 0.9358211159706116, | |
| "learning_rate": 1.2144053601340034e-05, | |
| "loss": 2.1746, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.1860406649696741, | |
| "grad_norm": 0.8496787548065186, | |
| "learning_rate": 1.2311557788944724e-05, | |
| "loss": 2.2138, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.18855472800980486, | |
| "grad_norm": 0.7747920751571655, | |
| "learning_rate": 1.2479061976549413e-05, | |
| "loss": 2.0835, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.19106879104993557, | |
| "grad_norm": 0.8322747349739075, | |
| "learning_rate": 1.2646566164154106e-05, | |
| "loss": 2.1772, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.19358285409006631, | |
| "grad_norm": 1.0996856689453125, | |
| "learning_rate": 1.2814070351758796e-05, | |
| "loss": 2.3451, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.19609691713019703, | |
| "grad_norm": 0.7528049349784851, | |
| "learning_rate": 1.2981574539363486e-05, | |
| "loss": 2.2558, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.19861098017032777, | |
| "grad_norm": 0.8091762661933899, | |
| "learning_rate": 1.3149078726968176e-05, | |
| "loss": 2.4082, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.20112504321045852, | |
| "grad_norm": 0.7670057415962219, | |
| "learning_rate": 1.3316582914572867e-05, | |
| "loss": 2.2246, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.20363910625058923, | |
| "grad_norm": 0.9939231276512146, | |
| "learning_rate": 1.3484087102177557e-05, | |
| "loss": 2.2309, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.20615316929071997, | |
| "grad_norm": 0.8385757207870483, | |
| "learning_rate": 1.3651591289782247e-05, | |
| "loss": 2.2663, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.2086672323308507, | |
| "grad_norm": 0.9463258385658264, | |
| "learning_rate": 1.3819095477386936e-05, | |
| "loss": 2.1642, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.21118129537098143, | |
| "grad_norm": 0.7986982464790344, | |
| "learning_rate": 1.3986599664991626e-05, | |
| "loss": 2.1903, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.21369535841111215, | |
| "grad_norm": 0.8338661789894104, | |
| "learning_rate": 1.4154103852596318e-05, | |
| "loss": 2.1585, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.2162094214512429, | |
| "grad_norm": 0.8824120759963989, | |
| "learning_rate": 1.4321608040201007e-05, | |
| "loss": 2.2699, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.21872348449137363, | |
| "grad_norm": 0.7698261737823486, | |
| "learning_rate": 1.4489112227805697e-05, | |
| "loss": 2.1627, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.22123754753150435, | |
| "grad_norm": 0.7611339092254639, | |
| "learning_rate": 1.4656616415410387e-05, | |
| "loss": 2.2287, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.2237516105716351, | |
| "grad_norm": 0.9400216937065125, | |
| "learning_rate": 1.4824120603015077e-05, | |
| "loss": 2.1952, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.2262656736117658, | |
| "grad_norm": 0.8769610524177551, | |
| "learning_rate": 1.4991624790619768e-05, | |
| "loss": 2.1618, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.22877973665189655, | |
| "grad_norm": 0.768592357635498, | |
| "learning_rate": 1.5159128978224458e-05, | |
| "loss": 2.2728, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.2312937996920273, | |
| "grad_norm": 0.8878312706947327, | |
| "learning_rate": 1.532663316582915e-05, | |
| "loss": 2.2305, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.233807862732158, | |
| "grad_norm": 1.0638601779937744, | |
| "learning_rate": 1.5494137353433837e-05, | |
| "loss": 2.232, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.23632192577228875, | |
| "grad_norm": 0.7619097232818604, | |
| "learning_rate": 1.566164154103853e-05, | |
| "loss": 2.0886, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.23883598881241946, | |
| "grad_norm": 0.8709782361984253, | |
| "learning_rate": 1.5829145728643217e-05, | |
| "loss": 2.1845, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.2413500518525502, | |
| "grad_norm": 0.83415287733078, | |
| "learning_rate": 1.599664991624791e-05, | |
| "loss": 2.0673, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.24386411489268092, | |
| "grad_norm": 0.833507239818573, | |
| "learning_rate": 1.61641541038526e-05, | |
| "loss": 2.1072, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.24637817793281167, | |
| "grad_norm": 0.7625109553337097, | |
| "learning_rate": 1.6331658291457288e-05, | |
| "loss": 2.2129, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.2488922409729424, | |
| "grad_norm": 0.9412773847579956, | |
| "learning_rate": 1.649916247906198e-05, | |
| "loss": 2.2712, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.2514063040130731, | |
| "grad_norm": 0.8031060099601746, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 2.0546, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.25392036705320387, | |
| "grad_norm": 0.6824911832809448, | |
| "learning_rate": 1.683417085427136e-05, | |
| "loss": 2.1147, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.2564344300933346, | |
| "grad_norm": 0.8071341514587402, | |
| "learning_rate": 1.700167504187605e-05, | |
| "loss": 2.2404, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.2589484931334653, | |
| "grad_norm": 0.7607057094573975, | |
| "learning_rate": 1.716917922948074e-05, | |
| "loss": 2.0147, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.26146255617359604, | |
| "grad_norm": 0.8166337609291077, | |
| "learning_rate": 1.733668341708543e-05, | |
| "loss": 2.0361, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.2639766192137268, | |
| "grad_norm": 0.7393823266029358, | |
| "learning_rate": 1.7504187604690118e-05, | |
| "loss": 2.1895, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.2664906822538575, | |
| "grad_norm": 0.8773198127746582, | |
| "learning_rate": 1.767169179229481e-05, | |
| "loss": 2.2537, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.26900474529398827, | |
| "grad_norm": 0.7908310890197754, | |
| "learning_rate": 1.78391959798995e-05, | |
| "loss": 2.1466, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.27151880833411896, | |
| "grad_norm": 5.72189474105835, | |
| "learning_rate": 1.800670016750419e-05, | |
| "loss": 2.2273, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.2740328713742497, | |
| "grad_norm": 0.8592923879623413, | |
| "learning_rate": 1.817420435510888e-05, | |
| "loss": 2.1991, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.27654693441438044, | |
| "grad_norm": 0.7954506874084473, | |
| "learning_rate": 1.834170854271357e-05, | |
| "loss": 2.2278, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.2790609974545112, | |
| "grad_norm": 1.4735509157180786, | |
| "learning_rate": 1.850921273031826e-05, | |
| "loss": 2.1293, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.2815750604946419, | |
| "grad_norm": 1.1755093336105347, | |
| "learning_rate": 1.867671691792295e-05, | |
| "loss": 2.0573, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.2840891235347726, | |
| "grad_norm": 0.7768784761428833, | |
| "learning_rate": 1.884422110552764e-05, | |
| "loss": 2.1545, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.28660318657490336, | |
| "grad_norm": 0.7681466937065125, | |
| "learning_rate": 1.901172529313233e-05, | |
| "loss": 2.0596, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.2891172496150341, | |
| "grad_norm": 0.6799530982971191, | |
| "learning_rate": 1.917922948073702e-05, | |
| "loss": 2.0604, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.29163131265516484, | |
| "grad_norm": 0.7920475602149963, | |
| "learning_rate": 1.934673366834171e-05, | |
| "loss": 2.1564, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.2941453756952956, | |
| "grad_norm": 0.8078681230545044, | |
| "learning_rate": 1.9514237855946402e-05, | |
| "loss": 2.0666, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.2966594387354263, | |
| "grad_norm": 0.9443550109863281, | |
| "learning_rate": 1.968174204355109e-05, | |
| "loss": 2.2787, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.299173501775557, | |
| "grad_norm": 0.8694850206375122, | |
| "learning_rate": 1.984924623115578e-05, | |
| "loss": 2.1347, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.30168756481568776, | |
| "grad_norm": 0.7615613341331482, | |
| "learning_rate": 1.999813728229487e-05, | |
| "loss": 2.0468, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.3042016278558185, | |
| "grad_norm": 0.7481529712677002, | |
| "learning_rate": 1.997951010524355e-05, | |
| "loss": 1.932, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.3067156908959492, | |
| "grad_norm": 0.7879073023796082, | |
| "learning_rate": 1.9960882928192234e-05, | |
| "loss": 2.1433, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.30922975393607993, | |
| "grad_norm": 0.7781507968902588, | |
| "learning_rate": 1.9942255751140914e-05, | |
| "loss": 2.1253, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.3117438169762107, | |
| "grad_norm": 0.8100705742835999, | |
| "learning_rate": 1.9923628574089597e-05, | |
| "loss": 2.1977, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.3142578800163414, | |
| "grad_norm": 0.8454229831695557, | |
| "learning_rate": 1.990500139703828e-05, | |
| "loss": 2.0877, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.31677194305647216, | |
| "grad_norm": 0.7245929837226868, | |
| "learning_rate": 1.9886374219986964e-05, | |
| "loss": 2.1028, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.31928600609660285, | |
| "grad_norm": 0.8685073256492615, | |
| "learning_rate": 1.9867747042935644e-05, | |
| "loss": 2.0697, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.3218000691367336, | |
| "grad_norm": 0.8295558094978333, | |
| "learning_rate": 1.9849119865884328e-05, | |
| "loss": 2.0856, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.32431413217686433, | |
| "grad_norm": 0.686596691608429, | |
| "learning_rate": 1.983049268883301e-05, | |
| "loss": 2.1964, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.3268281952169951, | |
| "grad_norm": 0.9377066493034363, | |
| "learning_rate": 1.981186551178169e-05, | |
| "loss": 2.183, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.3293422582571258, | |
| "grad_norm": 0.7653865218162537, | |
| "learning_rate": 1.9793238334730374e-05, | |
| "loss": 2.0711, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.3318563212972565, | |
| "grad_norm": 0.8454409241676331, | |
| "learning_rate": 1.9774611157679055e-05, | |
| "loss": 2.1466, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.33437038433738725, | |
| "grad_norm": 0.7365928888320923, | |
| "learning_rate": 1.9755983980627738e-05, | |
| "loss": 2.1047, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.336884447377518, | |
| "grad_norm": 0.9556942582130432, | |
| "learning_rate": 1.9737356803576418e-05, | |
| "loss": 2.1695, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.33939851041764874, | |
| "grad_norm": 0.838843047618866, | |
| "learning_rate": 1.97187296265251e-05, | |
| "loss": 2.0261, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.3419125734577795, | |
| "grad_norm": 0.7815608978271484, | |
| "learning_rate": 1.9700102449473785e-05, | |
| "loss": 2.0431, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.34442663649791017, | |
| "grad_norm": 0.7637693285942078, | |
| "learning_rate": 1.9681475272422465e-05, | |
| "loss": 1.9319, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.3469406995380409, | |
| "grad_norm": 0.8159041404724121, | |
| "learning_rate": 1.966284809537115e-05, | |
| "loss": 1.941, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.34945476257817165, | |
| "grad_norm": 0.8053255677223206, | |
| "learning_rate": 1.964422091831983e-05, | |
| "loss": 1.8813, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.3519688256183024, | |
| "grad_norm": 0.7855125665664673, | |
| "learning_rate": 1.9625593741268512e-05, | |
| "loss": 2.0533, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.35448288865843314, | |
| "grad_norm": 0.8052666187286377, | |
| "learning_rate": 1.9606966564217195e-05, | |
| "loss": 1.9411, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.3569969516985638, | |
| "grad_norm": 0.7716293931007385, | |
| "learning_rate": 1.9588339387165875e-05, | |
| "loss": 2.2178, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.35951101473869457, | |
| "grad_norm": 0.7413807511329651, | |
| "learning_rate": 1.956971221011456e-05, | |
| "loss": 2.0794, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.3620250777788253, | |
| "grad_norm": 0.7578155398368835, | |
| "learning_rate": 1.9551085033063242e-05, | |
| "loss": 2.1088, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.36453914081895605, | |
| "grad_norm": 0.8205705881118774, | |
| "learning_rate": 1.9532457856011922e-05, | |
| "loss": 2.0763, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.36705320385908674, | |
| "grad_norm": 0.9258806109428406, | |
| "learning_rate": 1.9513830678960606e-05, | |
| "loss": 2.0782, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.3695672668992175, | |
| "grad_norm": 0.7821536660194397, | |
| "learning_rate": 1.949520350190929e-05, | |
| "loss": 2.1264, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.3720813299393482, | |
| "grad_norm": 0.7996969819068909, | |
| "learning_rate": 1.947657632485797e-05, | |
| "loss": 1.9979, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.37459539297947897, | |
| "grad_norm": 0.9707168340682983, | |
| "learning_rate": 1.9457949147806653e-05, | |
| "loss": 2.1808, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.3771094560196097, | |
| "grad_norm": 0.7080492973327637, | |
| "learning_rate": 1.9439321970755333e-05, | |
| "loss": 2.1, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.3796235190597404, | |
| "grad_norm": 0.8084648847579956, | |
| "learning_rate": 1.9420694793704016e-05, | |
| "loss": 1.9342, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.38213758209987114, | |
| "grad_norm": 0.7488344311714172, | |
| "learning_rate": 1.94020676166527e-05, | |
| "loss": 1.9951, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.3846516451400019, | |
| "grad_norm": 0.8307909369468689, | |
| "learning_rate": 1.938344043960138e-05, | |
| "loss": 2.0323, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.38716570818013263, | |
| "grad_norm": 0.7473161220550537, | |
| "learning_rate": 1.9364813262550063e-05, | |
| "loss": 1.9646, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.38967977122026337, | |
| "grad_norm": 0.7655180096626282, | |
| "learning_rate": 1.9346186085498743e-05, | |
| "loss": 1.9911, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.39219383426039406, | |
| "grad_norm": 0.8123348355293274, | |
| "learning_rate": 1.9327558908447426e-05, | |
| "loss": 1.9692, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.3947078973005248, | |
| "grad_norm": 0.7347009778022766, | |
| "learning_rate": 1.9308931731396106e-05, | |
| "loss": 2.112, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.39722196034065554, | |
| "grad_norm": 0.6923652291297913, | |
| "learning_rate": 1.929030455434479e-05, | |
| "loss": 1.8934, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.3997360233807863, | |
| "grad_norm": 0.7850770950317383, | |
| "learning_rate": 1.9271677377293473e-05, | |
| "loss": 2.0652, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.40225008642091703, | |
| "grad_norm": 0.7438580393791199, | |
| "learning_rate": 1.9253050200242157e-05, | |
| "loss": 2.1225, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.4047641494610477, | |
| "grad_norm": 0.7739879488945007, | |
| "learning_rate": 1.9234423023190837e-05, | |
| "loss": 2.0021, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.40727821250117846, | |
| "grad_norm": 0.7497188448905945, | |
| "learning_rate": 1.921579584613952e-05, | |
| "loss": 2.2605, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.4097922755413092, | |
| "grad_norm": 0.8320116400718689, | |
| "learning_rate": 1.9197168669088204e-05, | |
| "loss": 1.9589, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.41230633858143995, | |
| "grad_norm": 0.7329941391944885, | |
| "learning_rate": 1.9178541492036884e-05, | |
| "loss": 2.0156, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.41482040162157063, | |
| "grad_norm": 0.8237548470497131, | |
| "learning_rate": 1.9159914314985567e-05, | |
| "loss": 2.0248, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.4173344646617014, | |
| "grad_norm": 0.7972195744514465, | |
| "learning_rate": 1.9141287137934247e-05, | |
| "loss": 2.0282, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.4198485277018321, | |
| "grad_norm": 0.8421241641044617, | |
| "learning_rate": 1.912265996088293e-05, | |
| "loss": 2.0764, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.42236259074196286, | |
| "grad_norm": 0.8096216917037964, | |
| "learning_rate": 1.910403278383161e-05, | |
| "loss": 1.9925, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.4248766537820936, | |
| "grad_norm": 0.8105015158653259, | |
| "learning_rate": 1.9085405606780294e-05, | |
| "loss": 2.0158, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.4273907168222243, | |
| "grad_norm": 0.766874372959137, | |
| "learning_rate": 1.9066778429728977e-05, | |
| "loss": 2.0938, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.42990477986235504, | |
| "grad_norm": 0.7797210812568665, | |
| "learning_rate": 1.9048151252677657e-05, | |
| "loss": 1.9059, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.4324188429024858, | |
| "grad_norm": 0.7164623737335205, | |
| "learning_rate": 1.902952407562634e-05, | |
| "loss": 1.9903, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.4349329059426165, | |
| "grad_norm": 0.8231551647186279, | |
| "learning_rate": 1.901089689857502e-05, | |
| "loss": 1.9873, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.43744696898274726, | |
| "grad_norm": 0.7738022208213806, | |
| "learning_rate": 1.8992269721523704e-05, | |
| "loss": 1.9039, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.43996103202287795, | |
| "grad_norm": 0.976980447769165, | |
| "learning_rate": 1.8973642544472384e-05, | |
| "loss": 1.8539, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.4424750950630087, | |
| "grad_norm": 0.798416018486023, | |
| "learning_rate": 1.8955015367421068e-05, | |
| "loss": 1.9029, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.44498915810313944, | |
| "grad_norm": 0.9188537001609802, | |
| "learning_rate": 1.893638819036975e-05, | |
| "loss": 1.9794, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.4475032211432702, | |
| "grad_norm": 0.8166659474372864, | |
| "learning_rate": 1.8917761013318435e-05, | |
| "loss": 2.141, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.4500172841834009, | |
| "grad_norm": 0.7264742851257324, | |
| "learning_rate": 1.8899133836267115e-05, | |
| "loss": 1.9527, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.4525313472235316, | |
| "grad_norm": 0.768557608127594, | |
| "learning_rate": 1.8880506659215798e-05, | |
| "loss": 2.0309, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.45504541026366235, | |
| "grad_norm": 0.708335280418396, | |
| "learning_rate": 1.886187948216448e-05, | |
| "loss": 1.982, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.4575594733037931, | |
| "grad_norm": 0.7529143691062927, | |
| "learning_rate": 1.884325230511316e-05, | |
| "loss": 1.982, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.46007353634392384, | |
| "grad_norm": 0.7362962365150452, | |
| "learning_rate": 1.8824625128061845e-05, | |
| "loss": 1.9908, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.4625875993840546, | |
| "grad_norm": 0.7776265740394592, | |
| "learning_rate": 1.8805997951010525e-05, | |
| "loss": 2.0905, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.46510166242418527, | |
| "grad_norm": 0.8231685757637024, | |
| "learning_rate": 1.878737077395921e-05, | |
| "loss": 1.9505, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.467615725464316, | |
| "grad_norm": 0.8566842079162598, | |
| "learning_rate": 1.876874359690789e-05, | |
| "loss": 1.8867, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.47012978850444676, | |
| "grad_norm": 0.8052365183830261, | |
| "learning_rate": 1.8750116419856572e-05, | |
| "loss": 1.9028, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.4726438515445775, | |
| "grad_norm": 0.7512916922569275, | |
| "learning_rate": 1.8731489242805255e-05, | |
| "loss": 1.9618, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.4751579145847082, | |
| "grad_norm": 0.7339067459106445, | |
| "learning_rate": 1.8712862065753936e-05, | |
| "loss": 2.0292, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.47767197762483893, | |
| "grad_norm": 0.7452632784843445, | |
| "learning_rate": 1.869423488870262e-05, | |
| "loss": 1.9188, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.48018604066496967, | |
| "grad_norm": 0.7812403440475464, | |
| "learning_rate": 1.86756077116513e-05, | |
| "loss": 1.8975, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.4827001037051004, | |
| "grad_norm": 0.7752578854560852, | |
| "learning_rate": 1.8656980534599982e-05, | |
| "loss": 1.9429, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.48521416674523116, | |
| "grad_norm": 0.6833181977272034, | |
| "learning_rate": 1.8638353357548666e-05, | |
| "loss": 1.954, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.48772822978536184, | |
| "grad_norm": 0.8290771842002869, | |
| "learning_rate": 1.8619726180497346e-05, | |
| "loss": 1.8661, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.4902422928254926, | |
| "grad_norm": 0.7770770192146301, | |
| "learning_rate": 1.860109900344603e-05, | |
| "loss": 1.9486, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.49275635586562333, | |
| "grad_norm": 0.933445155620575, | |
| "learning_rate": 1.8582471826394713e-05, | |
| "loss": 1.8549, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.4952704189057541, | |
| "grad_norm": 0.7093394994735718, | |
| "learning_rate": 1.8563844649343393e-05, | |
| "loss": 1.8469, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.4977844819458848, | |
| "grad_norm": 0.7346369624137878, | |
| "learning_rate": 1.8545217472292076e-05, | |
| "loss": 1.9775, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.5002985449860156, | |
| "grad_norm": 0.7191982865333557, | |
| "learning_rate": 1.852659029524076e-05, | |
| "loss": 1.9248, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.5028126080261462, | |
| "grad_norm": 0.7234885692596436, | |
| "learning_rate": 1.850796311818944e-05, | |
| "loss": 1.9498, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.5053266710662769, | |
| "grad_norm": 0.752897322177887, | |
| "learning_rate": 1.8489335941138123e-05, | |
| "loss": 1.9045, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.5078407341064077, | |
| "grad_norm": 0.8015156984329224, | |
| "learning_rate": 1.8470708764086803e-05, | |
| "loss": 2.0621, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.5103547971465384, | |
| "grad_norm": 0.8776529431343079, | |
| "learning_rate": 1.8452081587035487e-05, | |
| "loss": 1.8736, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.5128688601866692, | |
| "grad_norm": 0.7982786893844604, | |
| "learning_rate": 1.843345440998417e-05, | |
| "loss": 1.9293, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.5153829232267999, | |
| "grad_norm": 0.9065136909484863, | |
| "learning_rate": 1.841482723293285e-05, | |
| "loss": 1.9328, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.5178969862669306, | |
| "grad_norm": 0.6973931789398193, | |
| "learning_rate": 1.8396200055881533e-05, | |
| "loss": 2.0046, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.5204110493070614, | |
| "grad_norm": 0.7191042304039001, | |
| "learning_rate": 1.8377572878830214e-05, | |
| "loss": 2.0585, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.5229251123471921, | |
| "grad_norm": 0.8368072509765625, | |
| "learning_rate": 1.8358945701778897e-05, | |
| "loss": 1.9149, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.5254391753873229, | |
| "grad_norm": 0.814167857170105, | |
| "learning_rate": 1.8340318524727577e-05, | |
| "loss": 1.981, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.5279532384274536, | |
| "grad_norm": 0.7943083047866821, | |
| "learning_rate": 1.832169134767626e-05, | |
| "loss": 1.8277, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.5304673014675843, | |
| "grad_norm": 0.8023422956466675, | |
| "learning_rate": 1.8303064170624944e-05, | |
| "loss": 1.9861, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.532981364507715, | |
| "grad_norm": 0.633378803730011, | |
| "learning_rate": 1.8284436993573624e-05, | |
| "loss": 1.8497, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.5354954275478457, | |
| "grad_norm": 0.7400213479995728, | |
| "learning_rate": 1.8265809816522307e-05, | |
| "loss": 1.8595, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.5380094905879765, | |
| "grad_norm": 0.836212158203125, | |
| "learning_rate": 1.824718263947099e-05, | |
| "loss": 2.0528, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.5405235536281072, | |
| "grad_norm": 0.8250802159309387, | |
| "learning_rate": 1.8228555462419674e-05, | |
| "loss": 1.846, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.5430376166682379, | |
| "grad_norm": 0.6821319460868835, | |
| "learning_rate": 1.8209928285368354e-05, | |
| "loss": 1.8895, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.5455516797083687, | |
| "grad_norm": 0.7601454854011536, | |
| "learning_rate": 1.8191301108317038e-05, | |
| "loss": 1.9106, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.5480657427484994, | |
| "grad_norm": 0.7625369429588318, | |
| "learning_rate": 1.8172673931265718e-05, | |
| "loss": 1.9236, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.5505798057886302, | |
| "grad_norm": 0.7543193697929382, | |
| "learning_rate": 1.81540467542144e-05, | |
| "loss": 2.0481, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.5530938688287609, | |
| "grad_norm": 0.8049002289772034, | |
| "learning_rate": 1.813541957716308e-05, | |
| "loss": 1.9396, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.5556079318688916, | |
| "grad_norm": 0.8110231161117554, | |
| "learning_rate": 1.8116792400111765e-05, | |
| "loss": 2.0862, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.5581219949090224, | |
| "grad_norm": 0.8501596450805664, | |
| "learning_rate": 1.8098165223060448e-05, | |
| "loss": 1.9038, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.560636057949153, | |
| "grad_norm": 0.7421522736549377, | |
| "learning_rate": 1.8079538046009128e-05, | |
| "loss": 1.9252, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.5631501209892839, | |
| "grad_norm": 0.7264032363891602, | |
| "learning_rate": 1.806091086895781e-05, | |
| "loss": 1.8869, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.5656641840294145, | |
| "grad_norm": 0.6781209707260132, | |
| "learning_rate": 1.804228369190649e-05, | |
| "loss": 1.8183, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.5681782470695452, | |
| "grad_norm": 0.743237316608429, | |
| "learning_rate": 1.8023656514855175e-05, | |
| "loss": 1.9669, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.570692310109676, | |
| "grad_norm": 0.685775876045227, | |
| "learning_rate": 1.8005029337803855e-05, | |
| "loss": 1.7174, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.5732063731498067, | |
| "grad_norm": 0.8393185138702393, | |
| "learning_rate": 1.798640216075254e-05, | |
| "loss": 2.0113, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.5757204361899375, | |
| "grad_norm": 0.7456147074699402, | |
| "learning_rate": 1.7967774983701222e-05, | |
| "loss": 1.9533, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.5782344992300682, | |
| "grad_norm": 0.746907114982605, | |
| "learning_rate": 1.7949147806649905e-05, | |
| "loss": 1.8457, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.5807485622701989, | |
| "grad_norm": 0.6800515651702881, | |
| "learning_rate": 1.7930520629598585e-05, | |
| "loss": 1.9492, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.5832626253103297, | |
| "grad_norm": 0.8982629179954529, | |
| "learning_rate": 1.791189345254727e-05, | |
| "loss": 1.7909, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.5857766883504604, | |
| "grad_norm": 0.7886053323745728, | |
| "learning_rate": 1.7893266275495952e-05, | |
| "loss": 1.9096, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.5882907513905912, | |
| "grad_norm": 0.6976516246795654, | |
| "learning_rate": 1.7874639098444632e-05, | |
| "loss": 1.8641, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.5908048144307219, | |
| "grad_norm": 0.792374849319458, | |
| "learning_rate": 1.7856011921393316e-05, | |
| "loss": 1.9948, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.5933188774708525, | |
| "grad_norm": 0.8440678119659424, | |
| "learning_rate": 1.7837384744341996e-05, | |
| "loss": 1.9047, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.5958329405109833, | |
| "grad_norm": 0.776010274887085, | |
| "learning_rate": 1.781875756729068e-05, | |
| "loss": 1.9526, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.598347003551114, | |
| "grad_norm": 0.6786248087882996, | |
| "learning_rate": 1.780013039023936e-05, | |
| "loss": 2.0537, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.6008610665912448, | |
| "grad_norm": 0.7782222628593445, | |
| "learning_rate": 1.7781503213188043e-05, | |
| "loss": 1.8738, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.6033751296313755, | |
| "grad_norm": 0.7707937955856323, | |
| "learning_rate": 1.7762876036136726e-05, | |
| "loss": 1.879, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.6058891926715062, | |
| "grad_norm": 0.7248098850250244, | |
| "learning_rate": 1.7744248859085406e-05, | |
| "loss": 1.8654, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.608403255711637, | |
| "grad_norm": 0.8210826516151428, | |
| "learning_rate": 1.772562168203409e-05, | |
| "loss": 1.9683, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.6109173187517677, | |
| "grad_norm": 0.7746020555496216, | |
| "learning_rate": 1.770699450498277e-05, | |
| "loss": 1.8953, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.6134313817918984, | |
| "grad_norm": 0.7477965354919434, | |
| "learning_rate": 1.7688367327931453e-05, | |
| "loss": 1.8308, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.6159454448320292, | |
| "grad_norm": 0.6938306093215942, | |
| "learning_rate": 1.7669740150880133e-05, | |
| "loss": 1.8937, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.6184595078721599, | |
| "grad_norm": 0.6874721050262451, | |
| "learning_rate": 1.7651112973828816e-05, | |
| "loss": 1.9818, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.6209735709122907, | |
| "grad_norm": 0.7377421259880066, | |
| "learning_rate": 1.76324857967775e-05, | |
| "loss": 1.9849, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.6234876339524213, | |
| "grad_norm": 0.7371711134910583, | |
| "learning_rate": 1.7613858619726183e-05, | |
| "loss": 1.9696, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.626001696992552, | |
| "grad_norm": 0.8088200092315674, | |
| "learning_rate": 1.7595231442674863e-05, | |
| "loss": 2.0681, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.6285157600326828, | |
| "grad_norm": 0.7583195567131042, | |
| "learning_rate": 1.7576604265623547e-05, | |
| "loss": 1.8371, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.6310298230728135, | |
| "grad_norm": 0.8802783489227295, | |
| "learning_rate": 1.755797708857223e-05, | |
| "loss": 1.8215, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.6335438861129443, | |
| "grad_norm": 0.7504824995994568, | |
| "learning_rate": 1.753934991152091e-05, | |
| "loss": 1.8909, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.636057949153075, | |
| "grad_norm": 0.8500109314918518, | |
| "learning_rate": 1.7520722734469594e-05, | |
| "loss": 1.9659, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.6385720121932057, | |
| "grad_norm": 0.7339226007461548, | |
| "learning_rate": 1.7502095557418274e-05, | |
| "loss": 1.8015, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.6410860752333365, | |
| "grad_norm": 0.7968369126319885, | |
| "learning_rate": 1.7483468380366957e-05, | |
| "loss": 1.959, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.6436001382734672, | |
| "grad_norm": 0.7196438908576965, | |
| "learning_rate": 1.746484120331564e-05, | |
| "loss": 1.9609, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.646114201313598, | |
| "grad_norm": 0.9080058932304382, | |
| "learning_rate": 1.744621402626432e-05, | |
| "loss": 1.8483, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.6486282643537287, | |
| "grad_norm": 0.89829021692276, | |
| "learning_rate": 1.7427586849213004e-05, | |
| "loss": 1.9141, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.6511423273938594, | |
| "grad_norm": 0.7244899868965149, | |
| "learning_rate": 1.7408959672161684e-05, | |
| "loss": 1.92, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.6536563904339902, | |
| "grad_norm": 0.8121938705444336, | |
| "learning_rate": 1.7390332495110368e-05, | |
| "loss": 1.8602, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.6561704534741208, | |
| "grad_norm": 0.7036830186843872, | |
| "learning_rate": 1.7371705318059048e-05, | |
| "loss": 1.879, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.6586845165142516, | |
| "grad_norm": 0.7597665190696716, | |
| "learning_rate": 1.735307814100773e-05, | |
| "loss": 1.8427, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.6611985795543823, | |
| "grad_norm": 0.773806095123291, | |
| "learning_rate": 1.7334450963956414e-05, | |
| "loss": 1.7957, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.663712642594513, | |
| "grad_norm": 0.7580260038375854, | |
| "learning_rate": 1.7315823786905094e-05, | |
| "loss": 1.8656, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.6662267056346438, | |
| "grad_norm": 0.7303863167762756, | |
| "learning_rate": 1.7297196609853778e-05, | |
| "loss": 1.8221, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.6687407686747745, | |
| "grad_norm": 0.7445915937423706, | |
| "learning_rate": 1.727856943280246e-05, | |
| "loss": 1.8604, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.6712548317149053, | |
| "grad_norm": 0.7845932245254517, | |
| "learning_rate": 1.7259942255751145e-05, | |
| "loss": 1.9011, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.673768894755036, | |
| "grad_norm": 0.7529757618904114, | |
| "learning_rate": 1.7241315078699825e-05, | |
| "loss": 1.7884, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.6762829577951667, | |
| "grad_norm": 0.7757226228713989, | |
| "learning_rate": 1.7222687901648508e-05, | |
| "loss": 1.9701, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.6787970208352975, | |
| "grad_norm": 0.779881477355957, | |
| "learning_rate": 1.7204060724597188e-05, | |
| "loss": 1.9259, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.6813110838754282, | |
| "grad_norm": 0.7081320881843567, | |
| "learning_rate": 1.7185433547545872e-05, | |
| "loss": 1.9069, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.683825146915559, | |
| "grad_norm": 0.9055602550506592, | |
| "learning_rate": 1.7166806370494552e-05, | |
| "loss": 1.9548, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.6863392099556896, | |
| "grad_norm": 0.722490131855011, | |
| "learning_rate": 1.7148179193443235e-05, | |
| "loss": 1.9155, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.6888532729958203, | |
| "grad_norm": 0.7735713720321655, | |
| "learning_rate": 1.712955201639192e-05, | |
| "loss": 1.8864, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.6913673360359511, | |
| "grad_norm": 0.7630046606063843, | |
| "learning_rate": 1.71109248393406e-05, | |
| "loss": 1.8977, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.6938813990760818, | |
| "grad_norm": 0.6814414262771606, | |
| "learning_rate": 1.7092297662289282e-05, | |
| "loss": 1.9323, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.6963954621162126, | |
| "grad_norm": 0.668607234954834, | |
| "learning_rate": 1.7073670485237962e-05, | |
| "loss": 1.9308, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.6989095251563433, | |
| "grad_norm": 0.7586726546287537, | |
| "learning_rate": 1.7055043308186646e-05, | |
| "loss": 1.9257, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.701423588196474, | |
| "grad_norm": 0.7936710715293884, | |
| "learning_rate": 1.7036416131135326e-05, | |
| "loss": 1.9256, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.7039376512366048, | |
| "grad_norm": 0.7399563789367676, | |
| "learning_rate": 1.701778895408401e-05, | |
| "loss": 1.932, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.7064517142767355, | |
| "grad_norm": 0.7762052416801453, | |
| "learning_rate": 1.6999161777032692e-05, | |
| "loss": 1.9289, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.7089657773168663, | |
| "grad_norm": 0.714678168296814, | |
| "learning_rate": 1.6980534599981373e-05, | |
| "loss": 1.8343, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.711479840356997, | |
| "grad_norm": 0.7210417985916138, | |
| "learning_rate": 1.6961907422930056e-05, | |
| "loss": 1.8077, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.7139939033971276, | |
| "grad_norm": 0.7361767888069153, | |
| "learning_rate": 1.694328024587874e-05, | |
| "loss": 1.8718, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.7165079664372584, | |
| "grad_norm": 0.7471848130226135, | |
| "learning_rate": 1.6924653068827423e-05, | |
| "loss": 1.9232, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.7190220294773891, | |
| "grad_norm": 0.7137832045555115, | |
| "learning_rate": 1.6906025891776103e-05, | |
| "loss": 1.8216, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.7215360925175198, | |
| "grad_norm": 0.7085067629814148, | |
| "learning_rate": 1.6887398714724786e-05, | |
| "loss": 1.964, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.7240501555576506, | |
| "grad_norm": 0.7419396042823792, | |
| "learning_rate": 1.6868771537673466e-05, | |
| "loss": 1.9132, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.7265642185977813, | |
| "grad_norm": 0.6376938819885254, | |
| "learning_rate": 1.685014436062215e-05, | |
| "loss": 1.7227, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.7290782816379121, | |
| "grad_norm": 0.8269235491752625, | |
| "learning_rate": 1.683151718357083e-05, | |
| "loss": 1.8836, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.7315923446780428, | |
| "grad_norm": 0.8165931105613708, | |
| "learning_rate": 1.6812890006519513e-05, | |
| "loss": 1.8852, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.7341064077181735, | |
| "grad_norm": 0.8207159042358398, | |
| "learning_rate": 1.6794262829468197e-05, | |
| "loss": 1.9848, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.7366204707583043, | |
| "grad_norm": 0.6867953538894653, | |
| "learning_rate": 1.6775635652416877e-05, | |
| "loss": 1.9084, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.739134533798435, | |
| "grad_norm": 0.7220890522003174, | |
| "learning_rate": 1.675700847536556e-05, | |
| "loss": 1.9337, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.7416485968385658, | |
| "grad_norm": 0.7804360389709473, | |
| "learning_rate": 1.673838129831424e-05, | |
| "loss": 1.8649, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.7441626598786965, | |
| "grad_norm": 0.7463236451148987, | |
| "learning_rate": 1.6719754121262924e-05, | |
| "loss": 1.9094, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.7466767229188271, | |
| "grad_norm": 0.6537930965423584, | |
| "learning_rate": 1.6701126944211604e-05, | |
| "loss": 1.8943, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.7491907859589579, | |
| "grad_norm": 0.8032995462417603, | |
| "learning_rate": 1.6682499767160287e-05, | |
| "loss": 2.0534, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.7517048489990886, | |
| "grad_norm": 0.703694224357605, | |
| "learning_rate": 1.666387259010897e-05, | |
| "loss": 1.9058, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.7542189120392194, | |
| "grad_norm": 0.761832058429718, | |
| "learning_rate": 1.6645245413057654e-05, | |
| "loss": 1.832, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.7567329750793501, | |
| "grad_norm": 0.7855667471885681, | |
| "learning_rate": 1.6626618236006334e-05, | |
| "loss": 1.8525, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.7592470381194808, | |
| "grad_norm": 0.7307915687561035, | |
| "learning_rate": 1.6607991058955017e-05, | |
| "loss": 1.9088, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.7617611011596116, | |
| "grad_norm": 0.7398809790611267, | |
| "learning_rate": 1.65893638819037e-05, | |
| "loss": 1.8553, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.7642751641997423, | |
| "grad_norm": 0.781365156173706, | |
| "learning_rate": 1.657073670485238e-05, | |
| "loss": 2.1244, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.7667892272398731, | |
| "grad_norm": 0.769498348236084, | |
| "learning_rate": 1.6552109527801064e-05, | |
| "loss": 1.9386, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.7693032902800038, | |
| "grad_norm": 0.8069713115692139, | |
| "learning_rate": 1.6533482350749744e-05, | |
| "loss": 1.9883, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.7718173533201345, | |
| "grad_norm": 0.8078048825263977, | |
| "learning_rate": 1.6514855173698428e-05, | |
| "loss": 2.0233, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.7743314163602653, | |
| "grad_norm": 0.8151450157165527, | |
| "learning_rate": 1.649622799664711e-05, | |
| "loss": 1.9036, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.7768454794003959, | |
| "grad_norm": 0.7425419688224792, | |
| "learning_rate": 1.647760081959579e-05, | |
| "loss": 1.7662, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.7793595424405267, | |
| "grad_norm": 0.7732254862785339, | |
| "learning_rate": 1.6458973642544475e-05, | |
| "loss": 1.929, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.7818736054806574, | |
| "grad_norm": 0.6458016633987427, | |
| "learning_rate": 1.6440346465493155e-05, | |
| "loss": 1.7479, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.7843876685207881, | |
| "grad_norm": 0.6959109902381897, | |
| "learning_rate": 1.6421719288441838e-05, | |
| "loss": 1.8997, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.7869017315609189, | |
| "grad_norm": 0.799720823764801, | |
| "learning_rate": 1.6403092111390518e-05, | |
| "loss": 1.9229, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.7894157946010496, | |
| "grad_norm": 0.7334579825401306, | |
| "learning_rate": 1.63844649343392e-05, | |
| "loss": 1.8687, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.7919298576411804, | |
| "grad_norm": 0.7263447642326355, | |
| "learning_rate": 1.6365837757287885e-05, | |
| "loss": 1.8465, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.7944439206813111, | |
| "grad_norm": 0.8034894466400146, | |
| "learning_rate": 1.6347210580236565e-05, | |
| "loss": 1.8938, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.7969579837214418, | |
| "grad_norm": 0.8071020245552063, | |
| "learning_rate": 1.632858340318525e-05, | |
| "loss": 1.9786, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.7994720467615726, | |
| "grad_norm": 0.9923710823059082, | |
| "learning_rate": 1.6309956226133932e-05, | |
| "loss": 2.0204, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.8019861098017033, | |
| "grad_norm": 0.6720794439315796, | |
| "learning_rate": 1.6291329049082615e-05, | |
| "loss": 1.9797, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.8045001728418341, | |
| "grad_norm": 0.6624062061309814, | |
| "learning_rate": 1.6272701872031295e-05, | |
| "loss": 1.9076, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.8070142358819647, | |
| "grad_norm": 0.6905372738838196, | |
| "learning_rate": 1.625407469497998e-05, | |
| "loss": 1.8835, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.8095282989220954, | |
| "grad_norm": 0.7899017333984375, | |
| "learning_rate": 1.623544751792866e-05, | |
| "loss": 1.8541, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.8120423619622262, | |
| "grad_norm": 0.747377872467041, | |
| "learning_rate": 1.6216820340877342e-05, | |
| "loss": 1.7969, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.8145564250023569, | |
| "grad_norm": 0.7094478011131287, | |
| "learning_rate": 1.6198193163826022e-05, | |
| "loss": 1.9332, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.8170704880424877, | |
| "grad_norm": 0.7842429876327515, | |
| "learning_rate": 1.6179565986774706e-05, | |
| "loss": 1.7202, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.8195845510826184, | |
| "grad_norm": 0.780980110168457, | |
| "learning_rate": 1.616093880972339e-05, | |
| "loss": 1.7652, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.8220986141227491, | |
| "grad_norm": 0.7316629886627197, | |
| "learning_rate": 1.614231163267207e-05, | |
| "loss": 1.9244, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.8246126771628799, | |
| "grad_norm": 0.7995865345001221, | |
| "learning_rate": 1.6123684455620753e-05, | |
| "loss": 1.8612, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.8271267402030106, | |
| "grad_norm": 0.689227283000946, | |
| "learning_rate": 1.6105057278569433e-05, | |
| "loss": 1.6482, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.8296408032431413, | |
| "grad_norm": 0.7569786906242371, | |
| "learning_rate": 1.6086430101518116e-05, | |
| "loss": 1.8532, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.8321548662832721, | |
| "grad_norm": 0.7134847640991211, | |
| "learning_rate": 1.6067802924466796e-05, | |
| "loss": 1.8474, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.8346689293234028, | |
| "grad_norm": 0.6890599727630615, | |
| "learning_rate": 1.604917574741548e-05, | |
| "loss": 1.7861, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.8371829923635336, | |
| "grad_norm": 0.7250029444694519, | |
| "learning_rate": 1.6030548570364163e-05, | |
| "loss": 1.9143, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.8396970554036642, | |
| "grad_norm": 0.7443738579750061, | |
| "learning_rate": 1.6011921393312843e-05, | |
| "loss": 1.9659, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.8422111184437949, | |
| "grad_norm": 0.6950932741165161, | |
| "learning_rate": 1.5993294216261527e-05, | |
| "loss": 1.7906, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.8447251814839257, | |
| "grad_norm": 0.7165166139602661, | |
| "learning_rate": 1.597466703921021e-05, | |
| "loss": 1.8269, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.8472392445240564, | |
| "grad_norm": 0.7223883867263794, | |
| "learning_rate": 1.5956039862158893e-05, | |
| "loss": 1.7825, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.8497533075641872, | |
| "grad_norm": 0.6890152096748352, | |
| "learning_rate": 1.5937412685107573e-05, | |
| "loss": 1.6946, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.8522673706043179, | |
| "grad_norm": 0.7027249932289124, | |
| "learning_rate": 1.5918785508056257e-05, | |
| "loss": 1.7711, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.8547814336444486, | |
| "grad_norm": 0.751466691493988, | |
| "learning_rate": 1.5900158331004937e-05, | |
| "loss": 1.9983, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.8572954966845794, | |
| "grad_norm": 0.8512903451919556, | |
| "learning_rate": 1.588153115395362e-05, | |
| "loss": 1.8736, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.8598095597247101, | |
| "grad_norm": 0.8137218952178955, | |
| "learning_rate": 1.58629039769023e-05, | |
| "loss": 1.8311, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.8623236227648409, | |
| "grad_norm": 0.728767991065979, | |
| "learning_rate": 1.5844276799850984e-05, | |
| "loss": 1.8467, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.8648376858049716, | |
| "grad_norm": 0.6929347515106201, | |
| "learning_rate": 1.5825649622799667e-05, | |
| "loss": 1.709, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.8673517488451022, | |
| "grad_norm": 0.631259024143219, | |
| "learning_rate": 1.5807022445748347e-05, | |
| "loss": 1.7322, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.869865811885233, | |
| "grad_norm": 0.7401567697525024, | |
| "learning_rate": 1.578839526869703e-05, | |
| "loss": 1.8758, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.8723798749253637, | |
| "grad_norm": 0.7359153032302856, | |
| "learning_rate": 1.576976809164571e-05, | |
| "loss": 1.8162, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.8748939379654945, | |
| "grad_norm": 0.7095641493797302, | |
| "learning_rate": 1.5751140914594394e-05, | |
| "loss": 1.8579, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.8774080010056252, | |
| "grad_norm": 0.7089935541152954, | |
| "learning_rate": 1.5732513737543074e-05, | |
| "loss": 1.8657, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.8799220640457559, | |
| "grad_norm": 0.7223098874092102, | |
| "learning_rate": 1.5713886560491758e-05, | |
| "loss": 1.8096, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.8824361270858867, | |
| "grad_norm": 0.7333154082298279, | |
| "learning_rate": 1.569525938344044e-05, | |
| "loss": 1.8371, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.8849501901260174, | |
| "grad_norm": 0.7953635454177856, | |
| "learning_rate": 1.5676632206389125e-05, | |
| "loss": 1.9633, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.8874642531661482, | |
| "grad_norm": 0.6517605781555176, | |
| "learning_rate": 1.5658005029337805e-05, | |
| "loss": 1.7697, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.8899783162062789, | |
| "grad_norm": 0.7414403557777405, | |
| "learning_rate": 1.5639377852286488e-05, | |
| "loss": 1.8662, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.8924923792464096, | |
| "grad_norm": 0.7915478944778442, | |
| "learning_rate": 1.562075067523517e-05, | |
| "loss": 1.8233, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.8950064422865404, | |
| "grad_norm": 0.6664229035377502, | |
| "learning_rate": 1.560212349818385e-05, | |
| "loss": 1.8945, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.897520505326671, | |
| "grad_norm": 0.7000258564949036, | |
| "learning_rate": 1.5583496321132535e-05, | |
| "loss": 1.7048, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.9000345683668018, | |
| "grad_norm": 0.6713783144950867, | |
| "learning_rate": 1.5564869144081215e-05, | |
| "loss": 1.7228, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.9025486314069325, | |
| "grad_norm": 0.7198245525360107, | |
| "learning_rate": 1.55462419670299e-05, | |
| "loss": 1.7585, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.9050626944470632, | |
| "grad_norm": 0.7971406579017639, | |
| "learning_rate": 1.552761478997858e-05, | |
| "loss": 1.791, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.907576757487194, | |
| "grad_norm": 0.7461988925933838, | |
| "learning_rate": 1.5508987612927262e-05, | |
| "loss": 1.7626, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.9100908205273247, | |
| "grad_norm": 0.7271673679351807, | |
| "learning_rate": 1.5490360435875945e-05, | |
| "loss": 1.8523, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.9126048835674555, | |
| "grad_norm": 0.7493097186088562, | |
| "learning_rate": 1.5471733258824625e-05, | |
| "loss": 1.906, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.9151189466075862, | |
| "grad_norm": 0.7091007828712463, | |
| "learning_rate": 1.545310608177331e-05, | |
| "loss": 1.8112, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.9176330096477169, | |
| "grad_norm": 0.7169962525367737, | |
| "learning_rate": 1.543447890472199e-05, | |
| "loss": 1.8062, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.9201470726878477, | |
| "grad_norm": 0.8083438873291016, | |
| "learning_rate": 1.5415851727670672e-05, | |
| "loss": 1.9375, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.9226611357279784, | |
| "grad_norm": 0.739206075668335, | |
| "learning_rate": 1.5397224550619356e-05, | |
| "loss": 1.8335, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.9251751987681092, | |
| "grad_norm": 0.6627645492553711, | |
| "learning_rate": 1.5378597373568036e-05, | |
| "loss": 1.8625, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.9276892618082399, | |
| "grad_norm": 0.7657231688499451, | |
| "learning_rate": 1.535997019651672e-05, | |
| "loss": 1.8569, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.9302033248483705, | |
| "grad_norm": 0.6739639639854431, | |
| "learning_rate": 1.5341343019465403e-05, | |
| "loss": 1.7908, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.9327173878885013, | |
| "grad_norm": 0.7855839133262634, | |
| "learning_rate": 1.5322715842414086e-05, | |
| "loss": 1.9815, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.935231450928632, | |
| "grad_norm": 0.7746731638908386, | |
| "learning_rate": 1.5304088665362766e-05, | |
| "loss": 1.8812, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.9377455139687627, | |
| "grad_norm": 0.7513526082038879, | |
| "learning_rate": 1.528546148831145e-05, | |
| "loss": 1.8104, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.9402595770088935, | |
| "grad_norm": 0.718559741973877, | |
| "learning_rate": 1.526683431126013e-05, | |
| "loss": 1.7953, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.9427736400490242, | |
| "grad_norm": 0.7029974460601807, | |
| "learning_rate": 1.5248207134208813e-05, | |
| "loss": 1.7163, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.945287703089155, | |
| "grad_norm": 0.6682865619659424, | |
| "learning_rate": 1.5229579957157495e-05, | |
| "loss": 1.7981, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.9478017661292857, | |
| "grad_norm": 0.7411314249038696, | |
| "learning_rate": 1.5210952780106176e-05, | |
| "loss": 1.8131, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.9503158291694164, | |
| "grad_norm": 0.7350772023200989, | |
| "learning_rate": 1.5192325603054858e-05, | |
| "loss": 1.8127, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.9528298922095472, | |
| "grad_norm": 0.8263423442840576, | |
| "learning_rate": 1.517369842600354e-05, | |
| "loss": 1.9312, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.9553439552496779, | |
| "grad_norm": 0.726344645023346, | |
| "learning_rate": 1.5155071248952222e-05, | |
| "loss": 1.8891, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.9578580182898087, | |
| "grad_norm": 0.744555652141571, | |
| "learning_rate": 1.5136444071900905e-05, | |
| "loss": 1.8161, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.9603720813299393, | |
| "grad_norm": 0.7903228998184204, | |
| "learning_rate": 1.5117816894849587e-05, | |
| "loss": 1.7761, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.96288614437007, | |
| "grad_norm": 0.813612699508667, | |
| "learning_rate": 1.5099189717798268e-05, | |
| "loss": 1.896, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.9654002074102008, | |
| "grad_norm": 0.7627764344215393, | |
| "learning_rate": 1.508056254074695e-05, | |
| "loss": 1.9692, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.9679142704503315, | |
| "grad_norm": 0.7458754777908325, | |
| "learning_rate": 1.5061935363695632e-05, | |
| "loss": 1.7749, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.9704283334904623, | |
| "grad_norm": 0.7195068001747131, | |
| "learning_rate": 1.5043308186644314e-05, | |
| "loss": 1.7761, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.972942396530593, | |
| "grad_norm": 0.7695887684822083, | |
| "learning_rate": 1.5024681009592999e-05, | |
| "loss": 1.9518, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.9754564595707237, | |
| "grad_norm": 0.7560247182846069, | |
| "learning_rate": 1.500605383254168e-05, | |
| "loss": 1.7545, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.9779705226108545, | |
| "grad_norm": 0.7362846732139587, | |
| "learning_rate": 1.4987426655490362e-05, | |
| "loss": 1.802, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.9804845856509852, | |
| "grad_norm": 0.6675201058387756, | |
| "learning_rate": 1.4968799478439044e-05, | |
| "loss": 1.7671, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.982998648691116, | |
| "grad_norm": 0.7939141988754272, | |
| "learning_rate": 1.4950172301387726e-05, | |
| "loss": 1.7956, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.9855127117312467, | |
| "grad_norm": 0.7457901239395142, | |
| "learning_rate": 1.493154512433641e-05, | |
| "loss": 1.8183, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.9880267747713773, | |
| "grad_norm": 0.7803356647491455, | |
| "learning_rate": 1.4912917947285091e-05, | |
| "loss": 1.8218, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.9905408378115081, | |
| "grad_norm": 0.7377974987030029, | |
| "learning_rate": 1.4894290770233773e-05, | |
| "loss": 1.7376, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.9930549008516388, | |
| "grad_norm": 0.6956228613853455, | |
| "learning_rate": 1.4875663593182454e-05, | |
| "loss": 1.8643, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.9955689638917696, | |
| "grad_norm": 0.8089497685432434, | |
| "learning_rate": 1.4857036416131136e-05, | |
| "loss": 1.894, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.9980830269319003, | |
| "grad_norm": 0.7910427451133728, | |
| "learning_rate": 1.4838409239079818e-05, | |
| "loss": 1.7475, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.9998428710599918, | |
| "eval_loss": 0.20647510886192322, | |
| "eval_runtime": 319.6977, | |
| "eval_samples_per_second": 34.11, | |
| "eval_steps_per_second": 4.267, | |
| "step": 3977 | |
| }, | |
| { | |
| "epoch": 1.000691367336036, | |
| "grad_norm": 0.836216390132904, | |
| "learning_rate": 1.48197820620285e-05, | |
| "loss": 1.9141, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 1.0032054303761666, | |
| "grad_norm": 0.7491644620895386, | |
| "learning_rate": 1.4801154884977183e-05, | |
| "loss": 1.786, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 1.0057194934162974, | |
| "grad_norm": 0.7566587328910828, | |
| "learning_rate": 1.4782527707925865e-05, | |
| "loss": 1.7916, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.0082335564564282, | |
| "grad_norm": 0.7840508222579956, | |
| "learning_rate": 1.4763900530874547e-05, | |
| "loss": 1.6907, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 1.0107476194965588, | |
| "grad_norm": 0.7381307482719421, | |
| "learning_rate": 1.4745273353823228e-05, | |
| "loss": 1.7532, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 1.0132616825366896, | |
| "grad_norm": 0.7914415001869202, | |
| "learning_rate": 1.472664617677191e-05, | |
| "loss": 1.6444, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 1.0157757455768204, | |
| "grad_norm": 0.8062846064567566, | |
| "learning_rate": 1.4708018999720592e-05, | |
| "loss": 1.8284, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 1.0182898086169512, | |
| "grad_norm": 0.7166664004325867, | |
| "learning_rate": 1.4689391822669277e-05, | |
| "loss": 1.6267, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.0208038716570818, | |
| "grad_norm": 0.7985884547233582, | |
| "learning_rate": 1.4670764645617959e-05, | |
| "loss": 1.6546, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 1.0233179346972126, | |
| "grad_norm": 0.9472522735595703, | |
| "learning_rate": 1.465213746856664e-05, | |
| "loss": 1.8375, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 1.0258319977373433, | |
| "grad_norm": 0.8365509510040283, | |
| "learning_rate": 1.4633510291515322e-05, | |
| "loss": 1.775, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 1.028346060777474, | |
| "grad_norm": 0.7500318884849548, | |
| "learning_rate": 1.4614883114464005e-05, | |
| "loss": 1.7906, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 1.0308601238176047, | |
| "grad_norm": 0.7501471042633057, | |
| "learning_rate": 1.4596255937412687e-05, | |
| "loss": 1.692, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.0333741868577355, | |
| "grad_norm": 0.656947135925293, | |
| "learning_rate": 1.4577628760361369e-05, | |
| "loss": 1.6487, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 1.035888249897866, | |
| "grad_norm": 0.6909087300300598, | |
| "learning_rate": 1.455900158331005e-05, | |
| "loss": 1.7352, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 1.038402312937997, | |
| "grad_norm": 0.7591822147369385, | |
| "learning_rate": 1.4540374406258732e-05, | |
| "loss": 1.8137, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 1.0409163759781277, | |
| "grad_norm": 0.7808558344841003, | |
| "learning_rate": 1.4521747229207414e-05, | |
| "loss": 1.8585, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 1.0434304390182585, | |
| "grad_norm": 0.7539440989494324, | |
| "learning_rate": 1.4503120052156096e-05, | |
| "loss": 1.7327, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.045944502058389, | |
| "grad_norm": 0.7050777673721313, | |
| "learning_rate": 1.448449287510478e-05, | |
| "loss": 1.7074, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 1.0484585650985199, | |
| "grad_norm": 0.7861224412918091, | |
| "learning_rate": 1.4465865698053461e-05, | |
| "loss": 1.7442, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 1.0509726281386507, | |
| "grad_norm": 0.7243571877479553, | |
| "learning_rate": 1.4447238521002143e-05, | |
| "loss": 1.6399, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 1.0534866911787812, | |
| "grad_norm": 0.7341039180755615, | |
| "learning_rate": 1.4428611343950825e-05, | |
| "loss": 1.8455, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 1.056000754218912, | |
| "grad_norm": 0.7154441475868225, | |
| "learning_rate": 1.4409984166899506e-05, | |
| "loss": 1.7194, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.0585148172590428, | |
| "grad_norm": 0.7193806171417236, | |
| "learning_rate": 1.4391356989848188e-05, | |
| "loss": 1.6864, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 1.0610288802991734, | |
| "grad_norm": 0.7973105311393738, | |
| "learning_rate": 1.4372729812796873e-05, | |
| "loss": 1.8272, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 1.0635429433393042, | |
| "grad_norm": 0.8277497887611389, | |
| "learning_rate": 1.4354102635745555e-05, | |
| "loss": 1.7089, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 1.066057006379435, | |
| "grad_norm": 0.7339916229248047, | |
| "learning_rate": 1.4335475458694237e-05, | |
| "loss": 1.7836, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 1.0685710694195656, | |
| "grad_norm": 0.6995800733566284, | |
| "learning_rate": 1.4316848281642918e-05, | |
| "loss": 1.8296, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.0710851324596964, | |
| "grad_norm": 0.755001962184906, | |
| "learning_rate": 1.42982211045916e-05, | |
| "loss": 1.7786, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 1.0735991954998272, | |
| "grad_norm": 0.7961637377738953, | |
| "learning_rate": 1.4279593927540283e-05, | |
| "loss": 1.816, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 1.076113258539958, | |
| "grad_norm": 0.7799317240715027, | |
| "learning_rate": 1.4260966750488965e-05, | |
| "loss": 1.7769, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 1.0786273215800886, | |
| "grad_norm": 0.8100453019142151, | |
| "learning_rate": 1.4242339573437647e-05, | |
| "loss": 1.7255, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 1.0811413846202194, | |
| "grad_norm": 0.7499783039093018, | |
| "learning_rate": 1.4223712396386329e-05, | |
| "loss": 1.7504, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.0836554476603502, | |
| "grad_norm": 0.8133047819137573, | |
| "learning_rate": 1.420508521933501e-05, | |
| "loss": 1.7091, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 1.0861695107004807, | |
| "grad_norm": 0.775681734085083, | |
| "learning_rate": 1.4186458042283692e-05, | |
| "loss": 1.7047, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 1.0886835737406115, | |
| "grad_norm": 0.715179443359375, | |
| "learning_rate": 1.4167830865232376e-05, | |
| "loss": 1.7005, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 1.0911976367807423, | |
| "grad_norm": 0.8723117113113403, | |
| "learning_rate": 1.4149203688181057e-05, | |
| "loss": 1.7185, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 1.093711699820873, | |
| "grad_norm": 0.7544639110565186, | |
| "learning_rate": 1.4130576511129739e-05, | |
| "loss": 1.6572, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.0962257628610037, | |
| "grad_norm": 0.7774538993835449, | |
| "learning_rate": 1.411194933407842e-05, | |
| "loss": 1.7295, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 1.0987398259011345, | |
| "grad_norm": 1.0741883516311646, | |
| "learning_rate": 1.4093322157027103e-05, | |
| "loss": 1.6388, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 1.1012538889412653, | |
| "grad_norm": 0.7513747215270996, | |
| "learning_rate": 1.4074694979975784e-05, | |
| "loss": 1.6413, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 1.1037679519813959, | |
| "grad_norm": 0.707658588886261, | |
| "learning_rate": 1.405606780292447e-05, | |
| "loss": 1.6393, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 1.1062820150215267, | |
| "grad_norm": 0.6913684606552124, | |
| "learning_rate": 1.4037440625873151e-05, | |
| "loss": 1.6868, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.1087960780616575, | |
| "grad_norm": 0.7147552967071533, | |
| "learning_rate": 1.4018813448821833e-05, | |
| "loss": 1.6873, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 1.111310141101788, | |
| "grad_norm": 0.7387866377830505, | |
| "learning_rate": 1.4000186271770515e-05, | |
| "loss": 1.5841, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 1.1138242041419188, | |
| "grad_norm": 0.8364249467849731, | |
| "learning_rate": 1.3981559094719196e-05, | |
| "loss": 1.6565, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 1.1163382671820496, | |
| "grad_norm": 0.7048381567001343, | |
| "learning_rate": 1.396293191766788e-05, | |
| "loss": 1.7189, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 1.1188523302221802, | |
| "grad_norm": 0.66068035364151, | |
| "learning_rate": 1.3944304740616562e-05, | |
| "loss": 1.7827, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.121366393262311, | |
| "grad_norm": 0.8100654482841492, | |
| "learning_rate": 1.3925677563565243e-05, | |
| "loss": 1.6887, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 1.1238804563024418, | |
| "grad_norm": 0.6675124168395996, | |
| "learning_rate": 1.3907050386513925e-05, | |
| "loss": 1.6691, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 1.1263945193425724, | |
| "grad_norm": 0.7793298959732056, | |
| "learning_rate": 1.3888423209462607e-05, | |
| "loss": 1.7299, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 1.1289085823827032, | |
| "grad_norm": 0.7944535613059998, | |
| "learning_rate": 1.3869796032411288e-05, | |
| "loss": 1.7269, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 1.131422645422834, | |
| "grad_norm": 0.8129689693450928, | |
| "learning_rate": 1.385116885535997e-05, | |
| "loss": 1.7728, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.1339367084629648, | |
| "grad_norm": 0.7754086256027222, | |
| "learning_rate": 1.3832541678308654e-05, | |
| "loss": 1.7285, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 1.1364507715030954, | |
| "grad_norm": 0.7438690662384033, | |
| "learning_rate": 1.3813914501257335e-05, | |
| "loss": 1.6881, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 1.1389648345432262, | |
| "grad_norm": 0.7423312664031982, | |
| "learning_rate": 1.3795287324206017e-05, | |
| "loss": 1.7336, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 1.141478897583357, | |
| "grad_norm": 0.7804150581359863, | |
| "learning_rate": 1.3776660147154699e-05, | |
| "loss": 1.7972, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 1.1439929606234875, | |
| "grad_norm": 0.752947211265564, | |
| "learning_rate": 1.375803297010338e-05, | |
| "loss": 1.7278, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.1465070236636183, | |
| "grad_norm": 0.7920584678649902, | |
| "learning_rate": 1.3739405793052062e-05, | |
| "loss": 1.6582, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 1.1490210867037491, | |
| "grad_norm": 0.7913728356361389, | |
| "learning_rate": 1.3720778616000747e-05, | |
| "loss": 1.7418, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 1.15153514974388, | |
| "grad_norm": 0.7286337018013, | |
| "learning_rate": 1.370215143894943e-05, | |
| "loss": 1.7133, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 1.1540492127840105, | |
| "grad_norm": 0.7719852924346924, | |
| "learning_rate": 1.3683524261898111e-05, | |
| "loss": 1.7355, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 1.1565632758241413, | |
| "grad_norm": 0.7951362729072571, | |
| "learning_rate": 1.3664897084846793e-05, | |
| "loss": 1.7826, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.159077338864272, | |
| "grad_norm": 0.7630165219306946, | |
| "learning_rate": 1.3646269907795476e-05, | |
| "loss": 1.7459, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 1.1615914019044027, | |
| "grad_norm": 0.7860592603683472, | |
| "learning_rate": 1.3627642730744158e-05, | |
| "loss": 1.6884, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 1.1641054649445335, | |
| "grad_norm": 0.7715541124343872, | |
| "learning_rate": 1.360901555369284e-05, | |
| "loss": 1.6919, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 1.1666195279846643, | |
| "grad_norm": 0.7906036376953125, | |
| "learning_rate": 1.3590388376641521e-05, | |
| "loss": 1.8039, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 1.1691335910247949, | |
| "grad_norm": 0.7115294933319092, | |
| "learning_rate": 1.3571761199590203e-05, | |
| "loss": 1.7214, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.1716476540649257, | |
| "grad_norm": 0.6619435548782349, | |
| "learning_rate": 1.3553134022538885e-05, | |
| "loss": 1.5413, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 1.1741617171050565, | |
| "grad_norm": 0.806623637676239, | |
| "learning_rate": 1.3534506845487566e-05, | |
| "loss": 1.8032, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 1.176675780145187, | |
| "grad_norm": 0.7953882813453674, | |
| "learning_rate": 1.351587966843625e-05, | |
| "loss": 1.8166, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 1.1791898431853178, | |
| "grad_norm": 0.7350426316261292, | |
| "learning_rate": 1.3497252491384932e-05, | |
| "loss": 1.5894, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 1.1817039062254486, | |
| "grad_norm": 0.7127729654312134, | |
| "learning_rate": 1.3478625314333613e-05, | |
| "loss": 1.6692, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.1842179692655794, | |
| "grad_norm": 0.8366225957870483, | |
| "learning_rate": 1.3459998137282295e-05, | |
| "loss": 1.8343, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 1.18673203230571, | |
| "grad_norm": 0.7709360718727112, | |
| "learning_rate": 1.3441370960230977e-05, | |
| "loss": 1.6433, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 1.1892460953458408, | |
| "grad_norm": 0.8758155107498169, | |
| "learning_rate": 1.3422743783179659e-05, | |
| "loss": 1.749, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 1.1917601583859716, | |
| "grad_norm": 0.7965550422668457, | |
| "learning_rate": 1.3404116606128344e-05, | |
| "loss": 1.6676, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 1.1942742214261022, | |
| "grad_norm": 0.7590332627296448, | |
| "learning_rate": 1.3385489429077025e-05, | |
| "loss": 1.6199, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.196788284466233, | |
| "grad_norm": 0.766509473323822, | |
| "learning_rate": 1.3366862252025707e-05, | |
| "loss": 1.8059, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 1.1993023475063638, | |
| "grad_norm": 0.751599907875061, | |
| "learning_rate": 1.3348235074974389e-05, | |
| "loss": 1.529, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 1.2018164105464946, | |
| "grad_norm": 0.7836679220199585, | |
| "learning_rate": 1.332960789792307e-05, | |
| "loss": 1.6657, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 1.2043304735866251, | |
| "grad_norm": 0.7974615097045898, | |
| "learning_rate": 1.3310980720871754e-05, | |
| "loss": 1.7335, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 1.206844536626756, | |
| "grad_norm": 0.7551774978637695, | |
| "learning_rate": 1.3292353543820436e-05, | |
| "loss": 1.7348, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.2093585996668867, | |
| "grad_norm": 0.7246921062469482, | |
| "learning_rate": 1.3273726366769118e-05, | |
| "loss": 1.594, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 1.2118726627070173, | |
| "grad_norm": 0.771374523639679, | |
| "learning_rate": 1.32550991897178e-05, | |
| "loss": 1.7725, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 1.2143867257471481, | |
| "grad_norm": 0.7123560905456543, | |
| "learning_rate": 1.3236472012666481e-05, | |
| "loss": 1.6686, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 1.216900788787279, | |
| "grad_norm": 0.7663251161575317, | |
| "learning_rate": 1.3217844835615163e-05, | |
| "loss": 1.6721, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 1.2194148518274095, | |
| "grad_norm": 0.7750524878501892, | |
| "learning_rate": 1.3199217658563846e-05, | |
| "loss": 1.6639, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.2219289148675403, | |
| "grad_norm": 0.7681446671485901, | |
| "learning_rate": 1.3180590481512528e-05, | |
| "loss": 1.8309, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 1.224442977907671, | |
| "grad_norm": 0.8097115159034729, | |
| "learning_rate": 1.316196330446121e-05, | |
| "loss": 1.6893, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 1.2269570409478017, | |
| "grad_norm": 0.7146026492118835, | |
| "learning_rate": 1.3143336127409891e-05, | |
| "loss": 1.5877, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 1.2294711039879325, | |
| "grad_norm": 0.857120931148529, | |
| "learning_rate": 1.3124708950358573e-05, | |
| "loss": 1.6898, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 1.2319851670280633, | |
| "grad_norm": 0.7126986980438232, | |
| "learning_rate": 1.3106081773307255e-05, | |
| "loss": 1.5924, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.2344992300681938, | |
| "grad_norm": 0.7263715267181396, | |
| "learning_rate": 1.3087454596255937e-05, | |
| "loss": 1.6104, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 1.2370132931083246, | |
| "grad_norm": 0.694532573223114, | |
| "learning_rate": 1.3068827419204622e-05, | |
| "loss": 1.7136, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 1.2395273561484554, | |
| "grad_norm": 0.7895197868347168, | |
| "learning_rate": 1.3050200242153303e-05, | |
| "loss": 1.7131, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 1.2420414191885862, | |
| "grad_norm": 0.8078169226646423, | |
| "learning_rate": 1.3031573065101985e-05, | |
| "loss": 1.6442, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 1.2445554822287168, | |
| "grad_norm": 0.7764593958854675, | |
| "learning_rate": 1.3012945888050667e-05, | |
| "loss": 1.6146, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.2470695452688476, | |
| "grad_norm": 0.738167941570282, | |
| "learning_rate": 1.299431871099935e-05, | |
| "loss": 1.8041, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 1.2495836083089784, | |
| "grad_norm": 0.8046619892120361, | |
| "learning_rate": 1.2975691533948032e-05, | |
| "loss": 1.7067, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 1.2520976713491092, | |
| "grad_norm": 0.7335379719734192, | |
| "learning_rate": 1.2957064356896714e-05, | |
| "loss": 1.7095, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 1.2546117343892398, | |
| "grad_norm": 0.8028671145439148, | |
| "learning_rate": 1.2938437179845396e-05, | |
| "loss": 1.8345, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 1.2571257974293706, | |
| "grad_norm": 0.7364903092384338, | |
| "learning_rate": 1.2919810002794077e-05, | |
| "loss": 1.6629, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.2596398604695014, | |
| "grad_norm": 0.7556438446044922, | |
| "learning_rate": 1.2901182825742759e-05, | |
| "loss": 1.6969, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 1.262153923509632, | |
| "grad_norm": 0.7872533798217773, | |
| "learning_rate": 1.288255564869144e-05, | |
| "loss": 1.6672, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 1.2646679865497628, | |
| "grad_norm": 0.7323378324508667, | |
| "learning_rate": 1.2863928471640124e-05, | |
| "loss": 1.6631, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 1.2671820495898936, | |
| "grad_norm": 0.689405620098114, | |
| "learning_rate": 1.2845301294588806e-05, | |
| "loss": 1.6149, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 1.2696961126300241, | |
| "grad_norm": 0.7461852431297302, | |
| "learning_rate": 1.2826674117537488e-05, | |
| "loss": 1.649, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 1.272210175670155, | |
| "grad_norm": 0.7727710008621216, | |
| "learning_rate": 1.280804694048617e-05, | |
| "loss": 1.7141, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 1.2747242387102857, | |
| "grad_norm": 0.8351858258247375, | |
| "learning_rate": 1.2789419763434851e-05, | |
| "loss": 1.6873, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 1.2772383017504163, | |
| "grad_norm": 0.7802210450172424, | |
| "learning_rate": 1.2770792586383533e-05, | |
| "loss": 1.7353, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 1.279752364790547, | |
| "grad_norm": 0.7288984060287476, | |
| "learning_rate": 1.2752165409332218e-05, | |
| "loss": 1.6919, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 1.282266427830678, | |
| "grad_norm": 0.701543390750885, | |
| "learning_rate": 1.27335382322809e-05, | |
| "loss": 1.7365, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 1.2847804908708085, | |
| "grad_norm": 0.9477130770683289, | |
| "learning_rate": 1.2714911055229581e-05, | |
| "loss": 1.9412, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 1.2872945539109393, | |
| "grad_norm": 0.7678667902946472, | |
| "learning_rate": 1.2696283878178263e-05, | |
| "loss": 1.6647, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 1.28980861695107, | |
| "grad_norm": 0.6887679696083069, | |
| "learning_rate": 1.2677656701126945e-05, | |
| "loss": 1.7137, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 1.2923226799912007, | |
| "grad_norm": 0.818414568901062, | |
| "learning_rate": 1.2659029524075628e-05, | |
| "loss": 1.7238, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 1.2948367430313314, | |
| "grad_norm": 0.7899143695831299, | |
| "learning_rate": 1.264040234702431e-05, | |
| "loss": 1.7148, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 1.2973508060714622, | |
| "grad_norm": 0.7786597013473511, | |
| "learning_rate": 1.2621775169972992e-05, | |
| "loss": 1.6647, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 1.299864869111593, | |
| "grad_norm": 0.7544032335281372, | |
| "learning_rate": 1.2603147992921674e-05, | |
| "loss": 1.6889, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 1.3023789321517236, | |
| "grad_norm": 0.8204768300056458, | |
| "learning_rate": 1.2584520815870355e-05, | |
| "loss": 1.827, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 1.3048929951918544, | |
| "grad_norm": 0.7489396929740906, | |
| "learning_rate": 1.2565893638819037e-05, | |
| "loss": 1.6979, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 1.3074070582319852, | |
| "grad_norm": 0.7310487627983093, | |
| "learning_rate": 1.254726646176772e-05, | |
| "loss": 1.6671, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 1.309921121272116, | |
| "grad_norm": 0.8248193860054016, | |
| "learning_rate": 1.2528639284716402e-05, | |
| "loss": 1.82, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 1.3124351843122466, | |
| "grad_norm": 0.6937602162361145, | |
| "learning_rate": 1.2510012107665084e-05, | |
| "loss": 1.7324, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 1.3149492473523774, | |
| "grad_norm": 0.7127777338027954, | |
| "learning_rate": 1.2491384930613766e-05, | |
| "loss": 1.6226, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 1.3174633103925082, | |
| "grad_norm": 0.712577760219574, | |
| "learning_rate": 1.2472757753562447e-05, | |
| "loss": 1.6926, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 1.3199773734326388, | |
| "grad_norm": 0.7192373871803284, | |
| "learning_rate": 1.245413057651113e-05, | |
| "loss": 1.7171, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 1.3224914364727696, | |
| "grad_norm": 0.7888555526733398, | |
| "learning_rate": 1.2435503399459813e-05, | |
| "loss": 1.721, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 1.3250054995129004, | |
| "grad_norm": 0.8596723079681396, | |
| "learning_rate": 1.2416876222408496e-05, | |
| "loss": 1.7396, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 1.327519562553031, | |
| "grad_norm": 0.7616602182388306, | |
| "learning_rate": 1.2398249045357178e-05, | |
| "loss": 1.7454, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 1.3300336255931617, | |
| "grad_norm": 0.8837588429450989, | |
| "learning_rate": 1.237962186830586e-05, | |
| "loss": 1.6761, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 1.3325476886332925, | |
| "grad_norm": 0.7860695719718933, | |
| "learning_rate": 1.2360994691254541e-05, | |
| "loss": 1.814, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 1.3350617516734231, | |
| "grad_norm": 0.7959959506988525, | |
| "learning_rate": 1.2342367514203225e-05, | |
| "loss": 1.503, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 1.337575814713554, | |
| "grad_norm": 0.7713824510574341, | |
| "learning_rate": 1.2323740337151906e-05, | |
| "loss": 1.6338, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 1.3400898777536847, | |
| "grad_norm": 0.8449348211288452, | |
| "learning_rate": 1.2305113160100588e-05, | |
| "loss": 1.632, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 1.3426039407938153, | |
| "grad_norm": 0.7532740235328674, | |
| "learning_rate": 1.228648598304927e-05, | |
| "loss": 1.7656, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 1.345118003833946, | |
| "grad_norm": 0.7944811582565308, | |
| "learning_rate": 1.2267858805997952e-05, | |
| "loss": 1.6338, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 1.3476320668740769, | |
| "grad_norm": 0.7803758382797241, | |
| "learning_rate": 1.2249231628946633e-05, | |
| "loss": 1.7284, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 1.3501461299142075, | |
| "grad_norm": 0.7606383562088013, | |
| "learning_rate": 1.2230604451895317e-05, | |
| "loss": 1.6658, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 1.3526601929543383, | |
| "grad_norm": 0.8360714316368103, | |
| "learning_rate": 1.2211977274843999e-05, | |
| "loss": 1.7277, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 1.355174255994469, | |
| "grad_norm": 0.812910258769989, | |
| "learning_rate": 1.219335009779268e-05, | |
| "loss": 1.7963, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 1.3576883190345999, | |
| "grad_norm": 0.6753668785095215, | |
| "learning_rate": 1.2174722920741362e-05, | |
| "loss": 1.6476, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 1.3602023820747307, | |
| "grad_norm": 0.784579336643219, | |
| "learning_rate": 1.2156095743690044e-05, | |
| "loss": 1.7068, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 1.3627164451148612, | |
| "grad_norm": 0.7511261701583862, | |
| "learning_rate": 1.2137468566638725e-05, | |
| "loss": 1.7134, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 1.365230508154992, | |
| "grad_norm": 0.7471922636032104, | |
| "learning_rate": 1.2118841389587407e-05, | |
| "loss": 1.5618, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 1.3677445711951228, | |
| "grad_norm": 0.7421702146530151, | |
| "learning_rate": 1.2100214212536092e-05, | |
| "loss": 1.6077, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 1.3702586342352534, | |
| "grad_norm": 0.7888907790184021, | |
| "learning_rate": 1.2081587035484774e-05, | |
| "loss": 1.5841, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 1.3727726972753842, | |
| "grad_norm": 0.8044992089271545, | |
| "learning_rate": 1.2062959858433456e-05, | |
| "loss": 1.7244, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 1.375286760315515, | |
| "grad_norm": 0.7535794973373413, | |
| "learning_rate": 1.2044332681382138e-05, | |
| "loss": 1.7395, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 1.3778008233556456, | |
| "grad_norm": 0.7281785607337952, | |
| "learning_rate": 1.2025705504330821e-05, | |
| "loss": 1.7157, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 1.3803148863957764, | |
| "grad_norm": 0.7137337327003479, | |
| "learning_rate": 1.2007078327279503e-05, | |
| "loss": 1.6789, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 1.3828289494359072, | |
| "grad_norm": 0.7234044671058655, | |
| "learning_rate": 1.1988451150228184e-05, | |
| "loss": 1.5587, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.3853430124760377, | |
| "grad_norm": 0.6954776048660278, | |
| "learning_rate": 1.1969823973176866e-05, | |
| "loss": 1.5418, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 1.3878570755161685, | |
| "grad_norm": 0.7066569924354553, | |
| "learning_rate": 1.1951196796125548e-05, | |
| "loss": 1.6282, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 1.3903711385562993, | |
| "grad_norm": 0.7675435543060303, | |
| "learning_rate": 1.193256961907423e-05, | |
| "loss": 1.635, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 1.39288520159643, | |
| "grad_norm": 0.7584708333015442, | |
| "learning_rate": 1.1913942442022911e-05, | |
| "loss": 1.6519, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 1.3953992646365607, | |
| "grad_norm": 0.7794647216796875, | |
| "learning_rate": 1.1895315264971595e-05, | |
| "loss": 1.7332, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 1.3979133276766915, | |
| "grad_norm": 0.7727742791175842, | |
| "learning_rate": 1.1876688087920277e-05, | |
| "loss": 1.6964, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 1.400427390716822, | |
| "grad_norm": 0.7282200455665588, | |
| "learning_rate": 1.1858060910868958e-05, | |
| "loss": 1.7257, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 1.402941453756953, | |
| "grad_norm": 0.7605634927749634, | |
| "learning_rate": 1.183943373381764e-05, | |
| "loss": 1.6748, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 1.4054555167970837, | |
| "grad_norm": 0.7347403168678284, | |
| "learning_rate": 1.1820806556766322e-05, | |
| "loss": 1.7666, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 1.4079695798372145, | |
| "grad_norm": 0.7224205732345581, | |
| "learning_rate": 1.1802179379715003e-05, | |
| "loss": 1.7845, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 1.410483642877345, | |
| "grad_norm": 0.6920194625854492, | |
| "learning_rate": 1.1783552202663689e-05, | |
| "loss": 1.664, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 1.4129977059174759, | |
| "grad_norm": 0.7645785808563232, | |
| "learning_rate": 1.176492502561237e-05, | |
| "loss": 1.6428, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 1.4155117689576067, | |
| "grad_norm": 0.8184845447540283, | |
| "learning_rate": 1.1746297848561052e-05, | |
| "loss": 1.7002, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 1.4180258319977375, | |
| "grad_norm": 0.7098300457000732, | |
| "learning_rate": 1.1727670671509734e-05, | |
| "loss": 1.7319, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 1.420539895037868, | |
| "grad_norm": 0.7213707566261292, | |
| "learning_rate": 1.1709043494458416e-05, | |
| "loss": 1.5214, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 1.4230539580779988, | |
| "grad_norm": 0.789682149887085, | |
| "learning_rate": 1.1690416317407099e-05, | |
| "loss": 1.8146, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 1.4255680211181296, | |
| "grad_norm": 0.7511170506477356, | |
| "learning_rate": 1.167178914035578e-05, | |
| "loss": 1.726, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 1.4280820841582602, | |
| "grad_norm": 0.7393010258674622, | |
| "learning_rate": 1.1653161963304462e-05, | |
| "loss": 1.5988, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 1.430596147198391, | |
| "grad_norm": 0.7070342898368835, | |
| "learning_rate": 1.1634534786253144e-05, | |
| "loss": 1.5678, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 1.4331102102385218, | |
| "grad_norm": 0.7652496695518494, | |
| "learning_rate": 1.1615907609201826e-05, | |
| "loss": 1.7204, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 1.4356242732786524, | |
| "grad_norm": 0.7325350046157837, | |
| "learning_rate": 1.1597280432150508e-05, | |
| "loss": 1.5736, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 1.4381383363187832, | |
| "grad_norm": 0.7561137080192566, | |
| "learning_rate": 1.1578653255099191e-05, | |
| "loss": 1.8278, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 1.440652399358914, | |
| "grad_norm": 0.7634536027908325, | |
| "learning_rate": 1.1560026078047873e-05, | |
| "loss": 1.8237, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 1.4431664623990446, | |
| "grad_norm": 0.8437842726707458, | |
| "learning_rate": 1.1541398900996555e-05, | |
| "loss": 1.7693, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 1.4456805254391754, | |
| "grad_norm": 0.7438640594482422, | |
| "learning_rate": 1.1522771723945236e-05, | |
| "loss": 1.6098, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 1.4481945884793062, | |
| "grad_norm": 0.6699410676956177, | |
| "learning_rate": 1.1504144546893918e-05, | |
| "loss": 1.7607, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 1.4507086515194367, | |
| "grad_norm": 0.8087129592895508, | |
| "learning_rate": 1.14855173698426e-05, | |
| "loss": 1.6347, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 1.4532227145595675, | |
| "grad_norm": 0.803558886051178, | |
| "learning_rate": 1.1466890192791282e-05, | |
| "loss": 1.7051, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 1.4557367775996983, | |
| "grad_norm": 0.788940966129303, | |
| "learning_rate": 1.1448263015739967e-05, | |
| "loss": 1.6083, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 1.4582508406398291, | |
| "grad_norm": 0.7224819660186768, | |
| "learning_rate": 1.1429635838688648e-05, | |
| "loss": 1.7132, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 1.4607649036799597, | |
| "grad_norm": 0.7632660269737244, | |
| "learning_rate": 1.141100866163733e-05, | |
| "loss": 1.7089, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 1.4632789667200905, | |
| "grad_norm": 0.8406090140342712, | |
| "learning_rate": 1.1392381484586012e-05, | |
| "loss": 1.636, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 1.4657930297602213, | |
| "grad_norm": 0.9032421708106995, | |
| "learning_rate": 1.1373754307534695e-05, | |
| "loss": 1.6217, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 1.468307092800352, | |
| "grad_norm": 0.6949290633201599, | |
| "learning_rate": 1.1355127130483377e-05, | |
| "loss": 1.6892, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 1.4708211558404827, | |
| "grad_norm": 0.8053610324859619, | |
| "learning_rate": 1.1336499953432059e-05, | |
| "loss": 1.6568, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 1.4733352188806135, | |
| "grad_norm": 0.7153986096382141, | |
| "learning_rate": 1.131787277638074e-05, | |
| "loss": 1.6421, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 1.4758492819207443, | |
| "grad_norm": 0.7638366222381592, | |
| "learning_rate": 1.1299245599329422e-05, | |
| "loss": 1.6018, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 1.4783633449608748, | |
| "grad_norm": 0.7297844886779785, | |
| "learning_rate": 1.1280618422278104e-05, | |
| "loss": 1.6306, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 1.4808774080010056, | |
| "grad_norm": 0.7795360088348389, | |
| "learning_rate": 1.1261991245226787e-05, | |
| "loss": 1.7534, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 1.4833914710411364, | |
| "grad_norm": 0.7738423943519592, | |
| "learning_rate": 1.1243364068175469e-05, | |
| "loss": 1.739, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 1.485905534081267, | |
| "grad_norm": 2.02028751373291, | |
| "learning_rate": 1.1224736891124151e-05, | |
| "loss": 1.7602, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 1.4884195971213978, | |
| "grad_norm": 0.7444608807563782, | |
| "learning_rate": 1.1206109714072833e-05, | |
| "loss": 1.5875, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 1.4909336601615286, | |
| "grad_norm": 0.6828548908233643, | |
| "learning_rate": 1.1187482537021514e-05, | |
| "loss": 1.5873, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 1.4934477232016592, | |
| "grad_norm": 0.8036696910858154, | |
| "learning_rate": 1.1168855359970196e-05, | |
| "loss": 1.6683, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 1.49596178624179, | |
| "grad_norm": 0.7577768564224243, | |
| "learning_rate": 1.1150228182918878e-05, | |
| "loss": 1.6942, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 1.4984758492819208, | |
| "grad_norm": 0.7688443064689636, | |
| "learning_rate": 1.1131601005867563e-05, | |
| "loss": 1.6124, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 1.5009899123220514, | |
| "grad_norm": 0.79356449842453, | |
| "learning_rate": 1.1112973828816245e-05, | |
| "loss": 1.756, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 1.5035039753621822, | |
| "grad_norm": 0.7810083627700806, | |
| "learning_rate": 1.1094346651764926e-05, | |
| "loss": 1.5721, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 1.506018038402313, | |
| "grad_norm": 0.7923795580863953, | |
| "learning_rate": 1.1075719474713608e-05, | |
| "loss": 1.782, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 1.5085321014424435, | |
| "grad_norm": 0.763888418674469, | |
| "learning_rate": 1.1057092297662292e-05, | |
| "loss": 1.6584, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.5110461644825746, | |
| "grad_norm": 0.8156939744949341, | |
| "learning_rate": 1.1038465120610973e-05, | |
| "loss": 1.5891, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 1.5135602275227051, | |
| "grad_norm": 0.7554816603660583, | |
| "learning_rate": 1.1019837943559655e-05, | |
| "loss": 1.6391, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 1.5160742905628357, | |
| "grad_norm": 0.7248420715332031, | |
| "learning_rate": 1.1001210766508337e-05, | |
| "loss": 1.6267, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 1.5185883536029667, | |
| "grad_norm": 0.8019365668296814, | |
| "learning_rate": 1.0982583589457019e-05, | |
| "loss": 1.5982, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 1.5211024166430973, | |
| "grad_norm": 0.7249132394790649, | |
| "learning_rate": 1.09639564124057e-05, | |
| "loss": 1.7421, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 1.523616479683228, | |
| "grad_norm": 0.7583857774734497, | |
| "learning_rate": 1.0945329235354382e-05, | |
| "loss": 1.5576, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 1.526130542723359, | |
| "grad_norm": 0.7805519104003906, | |
| "learning_rate": 1.0926702058303065e-05, | |
| "loss": 1.6994, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 1.5286446057634895, | |
| "grad_norm": 0.751857340335846, | |
| "learning_rate": 1.0908074881251747e-05, | |
| "loss": 1.6281, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 1.5311586688036203, | |
| "grad_norm": 0.748385488986969, | |
| "learning_rate": 1.0889447704200429e-05, | |
| "loss": 1.6513, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 1.533672731843751, | |
| "grad_norm": 0.7546285390853882, | |
| "learning_rate": 1.087082052714911e-05, | |
| "loss": 1.6432, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 1.5361867948838817, | |
| "grad_norm": 0.7382540702819824, | |
| "learning_rate": 1.0852193350097792e-05, | |
| "loss": 1.6372, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 1.5387008579240125, | |
| "grad_norm": 0.6952759027481079, | |
| "learning_rate": 1.0833566173046474e-05, | |
| "loss": 1.5959, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 1.5412149209641433, | |
| "grad_norm": 0.7072349190711975, | |
| "learning_rate": 1.0814938995995158e-05, | |
| "loss": 1.6, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 1.5437289840042738, | |
| "grad_norm": 0.772688090801239, | |
| "learning_rate": 1.0796311818943841e-05, | |
| "loss": 1.6602, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 1.5462430470444046, | |
| "grad_norm": 0.7094897627830505, | |
| "learning_rate": 1.0777684641892523e-05, | |
| "loss": 1.6078, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 1.5487571100845354, | |
| "grad_norm": 0.7787843942642212, | |
| "learning_rate": 1.0759057464841204e-05, | |
| "loss": 1.7128, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 1.551271173124666, | |
| "grad_norm": 0.8106518387794495, | |
| "learning_rate": 1.0740430287789886e-05, | |
| "loss": 1.5986, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 1.5537852361647968, | |
| "grad_norm": 0.7972885370254517, | |
| "learning_rate": 1.072180311073857e-05, | |
| "loss": 1.6492, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 1.5562992992049276, | |
| "grad_norm": 0.7695562839508057, | |
| "learning_rate": 1.0703175933687251e-05, | |
| "loss": 1.7523, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 1.5588133622450582, | |
| "grad_norm": 0.794442355632782, | |
| "learning_rate": 1.0684548756635933e-05, | |
| "loss": 1.6242, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 1.561327425285189, | |
| "grad_norm": 0.7430515885353088, | |
| "learning_rate": 1.0665921579584615e-05, | |
| "loss": 1.6452, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 1.5638414883253198, | |
| "grad_norm": 0.7732383608818054, | |
| "learning_rate": 1.0647294402533297e-05, | |
| "loss": 1.798, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 1.5663555513654503, | |
| "grad_norm": 0.7974816560745239, | |
| "learning_rate": 1.0628667225481978e-05, | |
| "loss": 1.6696, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 1.5688696144055814, | |
| "grad_norm": 0.6930527091026306, | |
| "learning_rate": 1.0610040048430662e-05, | |
| "loss": 1.5694, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 1.571383677445712, | |
| "grad_norm": 0.6391228437423706, | |
| "learning_rate": 1.0591412871379343e-05, | |
| "loss": 1.576, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 1.5738977404858425, | |
| "grad_norm": 0.7676073908805847, | |
| "learning_rate": 1.0572785694328025e-05, | |
| "loss": 1.7056, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 1.5764118035259735, | |
| "grad_norm": 0.794621467590332, | |
| "learning_rate": 1.0554158517276707e-05, | |
| "loss": 1.6507, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 1.5789258665661041, | |
| "grad_norm": 0.7590543627738953, | |
| "learning_rate": 1.0535531340225389e-05, | |
| "loss": 1.6453, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 1.581439929606235, | |
| "grad_norm": 0.7579789161682129, | |
| "learning_rate": 1.051690416317407e-05, | |
| "loss": 1.785, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 1.5839539926463657, | |
| "grad_norm": 0.7438461780548096, | |
| "learning_rate": 1.0498276986122752e-05, | |
| "loss": 1.627, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 1.5864680556864963, | |
| "grad_norm": 0.7626237869262695, | |
| "learning_rate": 1.0479649809071437e-05, | |
| "loss": 1.7147, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 1.588982118726627, | |
| "grad_norm": 0.8654986023902893, | |
| "learning_rate": 1.0461022632020119e-05, | |
| "loss": 1.7302, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 1.591496181766758, | |
| "grad_norm": 0.7465078830718994, | |
| "learning_rate": 1.04423954549688e-05, | |
| "loss": 1.7338, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 1.5940102448068885, | |
| "grad_norm": 0.7845212817192078, | |
| "learning_rate": 1.0423768277917482e-05, | |
| "loss": 1.6969, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 1.5965243078470193, | |
| "grad_norm": 0.8335673809051514, | |
| "learning_rate": 1.0405141100866166e-05, | |
| "loss": 1.7558, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 1.59903837088715, | |
| "grad_norm": 0.722292423248291, | |
| "learning_rate": 1.0386513923814848e-05, | |
| "loss": 1.684, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 1.6015524339272806, | |
| "grad_norm": 0.8307163715362549, | |
| "learning_rate": 1.036788674676353e-05, | |
| "loss": 1.67, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 1.6040664969674114, | |
| "grad_norm": 0.8292793035507202, | |
| "learning_rate": 1.0349259569712211e-05, | |
| "loss": 1.7231, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 1.6065805600075422, | |
| "grad_norm": 0.7402275204658508, | |
| "learning_rate": 1.0330632392660893e-05, | |
| "loss": 1.592, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 1.6090946230476728, | |
| "grad_norm": 0.7749201059341431, | |
| "learning_rate": 1.0312005215609575e-05, | |
| "loss": 1.8077, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 1.6116086860878036, | |
| "grad_norm": 0.7114654183387756, | |
| "learning_rate": 1.0293378038558256e-05, | |
| "loss": 1.7325, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 1.6141227491279344, | |
| "grad_norm": 0.7772413492202759, | |
| "learning_rate": 1.027475086150694e-05, | |
| "loss": 1.6857, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 1.616636812168065, | |
| "grad_norm": 0.7150856852531433, | |
| "learning_rate": 1.0256123684455621e-05, | |
| "loss": 1.5941, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 1.619150875208196, | |
| "grad_norm": 0.7365646362304688, | |
| "learning_rate": 1.0237496507404303e-05, | |
| "loss": 1.6481, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 1.6216649382483266, | |
| "grad_norm": 0.7896749377250671, | |
| "learning_rate": 1.0218869330352985e-05, | |
| "loss": 1.6096, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 1.6241790012884572, | |
| "grad_norm": 0.7370378375053406, | |
| "learning_rate": 1.0200242153301667e-05, | |
| "loss": 1.6503, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 1.6266930643285882, | |
| "grad_norm": 0.8268712759017944, | |
| "learning_rate": 1.0181614976250348e-05, | |
| "loss": 1.6937, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 1.6292071273687188, | |
| "grad_norm": 0.7136850357055664, | |
| "learning_rate": 1.0162987799199032e-05, | |
| "loss": 1.6138, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 1.6317211904088496, | |
| "grad_norm": 0.861764669418335, | |
| "learning_rate": 1.0144360622147715e-05, | |
| "loss": 1.6435, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 1.6342352534489804, | |
| "grad_norm": 0.8284439444541931, | |
| "learning_rate": 1.0125733445096397e-05, | |
| "loss": 1.7067, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.636749316489111, | |
| "grad_norm": 0.770339846611023, | |
| "learning_rate": 1.0107106268045079e-05, | |
| "loss": 1.5898, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 1.6392633795292417, | |
| "grad_norm": 0.7983894944190979, | |
| "learning_rate": 1.0088479090993762e-05, | |
| "loss": 1.6818, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 1.6417774425693725, | |
| "grad_norm": 0.7926666140556335, | |
| "learning_rate": 1.0069851913942444e-05, | |
| "loss": 1.65, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 1.644291505609503, | |
| "grad_norm": 0.7339533567428589, | |
| "learning_rate": 1.0051224736891126e-05, | |
| "loss": 1.7213, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 1.646805568649634, | |
| "grad_norm": 0.7764829993247986, | |
| "learning_rate": 1.0032597559839807e-05, | |
| "loss": 1.6197, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 1.6493196316897647, | |
| "grad_norm": 0.7076376080513, | |
| "learning_rate": 1.0013970382788489e-05, | |
| "loss": 1.7003, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 1.6518336947298953, | |
| "grad_norm": 0.7635972499847412, | |
| "learning_rate": 9.99534320573717e-06, | |
| "loss": 1.5423, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 1.654347757770026, | |
| "grad_norm": 0.7883146405220032, | |
| "learning_rate": 9.976716028685853e-06, | |
| "loss": 1.694, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 1.6568618208101569, | |
| "grad_norm": 0.7166745662689209, | |
| "learning_rate": 9.958088851634536e-06, | |
| "loss": 1.6261, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 1.6593758838502874, | |
| "grad_norm": 0.7623030543327332, | |
| "learning_rate": 9.939461674583218e-06, | |
| "loss": 1.6052, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 1.6618899468904182, | |
| "grad_norm": 0.7648949027061462, | |
| "learning_rate": 9.9208344975319e-06, | |
| "loss": 1.6872, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 1.664404009930549, | |
| "grad_norm": 0.8249295949935913, | |
| "learning_rate": 9.902207320480583e-06, | |
| "loss": 1.544, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 1.6669180729706796, | |
| "grad_norm": 0.7494423389434814, | |
| "learning_rate": 9.883580143429265e-06, | |
| "loss": 1.7424, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 1.6694321360108104, | |
| "grad_norm": 0.8313102126121521, | |
| "learning_rate": 9.864952966377946e-06, | |
| "loss": 1.5955, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 1.6719461990509412, | |
| "grad_norm": 0.908762514591217, | |
| "learning_rate": 9.846325789326628e-06, | |
| "loss": 1.6904, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 1.6744602620910718, | |
| "grad_norm": 0.7726667523384094, | |
| "learning_rate": 9.82769861227531e-06, | |
| "loss": 1.6662, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 1.6769743251312028, | |
| "grad_norm": 0.7486472129821777, | |
| "learning_rate": 9.809071435223992e-06, | |
| "loss": 1.6115, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 1.6794883881713334, | |
| "grad_norm": 0.8054708242416382, | |
| "learning_rate": 9.790444258172675e-06, | |
| "loss": 1.5725, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 1.682002451211464, | |
| "grad_norm": 0.7202349901199341, | |
| "learning_rate": 9.771817081121357e-06, | |
| "loss": 1.6867, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 1.684516514251595, | |
| "grad_norm": 0.6928876042366028, | |
| "learning_rate": 9.75318990407004e-06, | |
| "loss": 1.6886, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 1.6870305772917256, | |
| "grad_norm": 0.6366267800331116, | |
| "learning_rate": 9.734562727018722e-06, | |
| "loss": 1.5882, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 1.6895446403318564, | |
| "grad_norm": 0.7608894109725952, | |
| "learning_rate": 9.715935549967404e-06, | |
| "loss": 1.6984, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 1.6920587033719872, | |
| "grad_norm": 0.7281875610351562, | |
| "learning_rate": 9.697308372916085e-06, | |
| "loss": 1.728, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 1.6945727664121177, | |
| "grad_norm": 0.8543561100959778, | |
| "learning_rate": 9.678681195864767e-06, | |
| "loss": 1.6419, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 1.6970868294522485, | |
| "grad_norm": 0.6918557286262512, | |
| "learning_rate": 9.660054018813449e-06, | |
| "loss": 1.7722, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 1.6996008924923793, | |
| "grad_norm": 0.8361300826072693, | |
| "learning_rate": 9.641426841762132e-06, | |
| "loss": 1.7395, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 1.70211495553251, | |
| "grad_norm": 0.840027391910553, | |
| "learning_rate": 9.622799664710814e-06, | |
| "loss": 1.6777, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 1.7046290185726407, | |
| "grad_norm": 0.8377405405044556, | |
| "learning_rate": 9.604172487659496e-06, | |
| "loss": 1.6916, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 1.7071430816127715, | |
| "grad_norm": 0.7759352922439575, | |
| "learning_rate": 9.58554531060818e-06, | |
| "loss": 1.7192, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 1.709657144652902, | |
| "grad_norm": 0.6801751255989075, | |
| "learning_rate": 9.566918133556861e-06, | |
| "loss": 1.5633, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 1.7121712076930329, | |
| "grad_norm": 0.7420363426208496, | |
| "learning_rate": 9.548290956505543e-06, | |
| "loss": 1.7016, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 1.7146852707331637, | |
| "grad_norm": 0.6882562041282654, | |
| "learning_rate": 9.529663779454224e-06, | |
| "loss": 1.5807, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 1.7171993337732943, | |
| "grad_norm": 0.7310329079627991, | |
| "learning_rate": 9.511036602402906e-06, | |
| "loss": 1.6013, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 1.719713396813425, | |
| "grad_norm": 0.7634031772613525, | |
| "learning_rate": 9.492409425351588e-06, | |
| "loss": 1.6403, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 1.7222274598535559, | |
| "grad_norm": 0.7520259618759155, | |
| "learning_rate": 9.473782248300271e-06, | |
| "loss": 1.7502, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 1.7247415228936864, | |
| "grad_norm": 0.8202212452888489, | |
| "learning_rate": 9.455155071248953e-06, | |
| "loss": 1.6169, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 1.7272555859338174, | |
| "grad_norm": 0.7667455673217773, | |
| "learning_rate": 9.436527894197635e-06, | |
| "loss": 1.6618, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 1.729769648973948, | |
| "grad_norm": 0.7354897260665894, | |
| "learning_rate": 9.417900717146318e-06, | |
| "loss": 1.563, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 1.7322837120140786, | |
| "grad_norm": 0.74565589427948, | |
| "learning_rate": 9.399273540095e-06, | |
| "loss": 1.6561, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 1.7347977750542096, | |
| "grad_norm": 0.7192093729972839, | |
| "learning_rate": 9.380646363043682e-06, | |
| "loss": 1.6406, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 1.7373118380943402, | |
| "grad_norm": 0.7570973634719849, | |
| "learning_rate": 9.362019185992363e-06, | |
| "loss": 1.6838, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 1.739825901134471, | |
| "grad_norm": 0.7852148413658142, | |
| "learning_rate": 9.343392008941045e-06, | |
| "loss": 1.7537, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 1.7423399641746018, | |
| "grad_norm": 0.7134391069412231, | |
| "learning_rate": 9.324764831889727e-06, | |
| "loss": 1.5591, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 1.7448540272147324, | |
| "grad_norm": 0.7943626642227173, | |
| "learning_rate": 9.30613765483841e-06, | |
| "loss": 1.6757, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 1.7473680902548632, | |
| "grad_norm": 0.7721225619316101, | |
| "learning_rate": 9.287510477787092e-06, | |
| "loss": 1.5781, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 1.749882153294994, | |
| "grad_norm": 0.7856735587120056, | |
| "learning_rate": 9.268883300735775e-06, | |
| "loss": 1.6894, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 1.7523962163351245, | |
| "grad_norm": 0.8144394159317017, | |
| "learning_rate": 9.250256123684457e-06, | |
| "loss": 1.6422, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 1.7549102793752553, | |
| "grad_norm": 0.6769249439239502, | |
| "learning_rate": 9.231628946633139e-06, | |
| "loss": 1.5789, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 1.7574243424153861, | |
| "grad_norm": 0.7238858342170715, | |
| "learning_rate": 9.21300176958182e-06, | |
| "loss": 1.6185, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 1.7599384054555167, | |
| "grad_norm": 0.7050652503967285, | |
| "learning_rate": 9.194374592530502e-06, | |
| "loss": 1.6028, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.7624524684956475, | |
| "grad_norm": 0.7770819664001465, | |
| "learning_rate": 9.175747415479184e-06, | |
| "loss": 1.6631, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 1.7649665315357783, | |
| "grad_norm": 0.7743918299674988, | |
| "learning_rate": 9.157120238427868e-06, | |
| "loss": 1.6185, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 1.767480594575909, | |
| "grad_norm": 0.7575473785400391, | |
| "learning_rate": 9.13849306137655e-06, | |
| "loss": 1.6454, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 1.7699946576160397, | |
| "grad_norm": 0.7896557450294495, | |
| "learning_rate": 9.119865884325231e-06, | |
| "loss": 1.726, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 1.7725087206561705, | |
| "grad_norm": 0.722209095954895, | |
| "learning_rate": 9.101238707273914e-06, | |
| "loss": 1.6204, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 1.775022783696301, | |
| "grad_norm": 0.6729947924613953, | |
| "learning_rate": 9.082611530222596e-06, | |
| "loss": 1.6202, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 1.7775368467364319, | |
| "grad_norm": 0.7849709987640381, | |
| "learning_rate": 9.063984353171278e-06, | |
| "loss": 1.7038, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 1.7800509097765627, | |
| "grad_norm": 0.7620992660522461, | |
| "learning_rate": 9.04535717611996e-06, | |
| "loss": 1.5513, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 1.7825649728166932, | |
| "grad_norm": 0.7471102476119995, | |
| "learning_rate": 9.026729999068641e-06, | |
| "loss": 1.5868, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 1.7850790358568243, | |
| "grad_norm": 0.7188548445701599, | |
| "learning_rate": 9.008102822017323e-06, | |
| "loss": 1.7947, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 1.7875930988969548, | |
| "grad_norm": 0.7611443996429443, | |
| "learning_rate": 8.989475644966007e-06, | |
| "loss": 1.5545, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 1.7901071619370854, | |
| "grad_norm": 0.8097432255744934, | |
| "learning_rate": 8.970848467914688e-06, | |
| "loss": 1.6629, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 1.7926212249772164, | |
| "grad_norm": 0.7615976333618164, | |
| "learning_rate": 8.95222129086337e-06, | |
| "loss": 1.564, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 1.795135288017347, | |
| "grad_norm": 0.7366344928741455, | |
| "learning_rate": 8.933594113812053e-06, | |
| "loss": 1.6355, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 1.7976493510574778, | |
| "grad_norm": 0.7137435078620911, | |
| "learning_rate": 8.914966936760735e-06, | |
| "loss": 1.5679, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 1.8001634140976086, | |
| "grad_norm": 0.8146246671676636, | |
| "learning_rate": 8.896339759709417e-06, | |
| "loss": 1.718, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 1.8026774771377392, | |
| "grad_norm": 0.6740638017654419, | |
| "learning_rate": 8.877712582658099e-06, | |
| "loss": 1.54, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 1.80519154017787, | |
| "grad_norm": 0.7482881546020508, | |
| "learning_rate": 8.85908540560678e-06, | |
| "loss": 1.6875, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 1.8077056032180008, | |
| "grad_norm": 0.7979026436805725, | |
| "learning_rate": 8.840458228555462e-06, | |
| "loss": 1.6177, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 1.8102196662581314, | |
| "grad_norm": 0.98420250415802, | |
| "learning_rate": 8.821831051504146e-06, | |
| "loss": 1.6514, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 1.8127337292982622, | |
| "grad_norm": 0.7860233783721924, | |
| "learning_rate": 8.803203874452827e-06, | |
| "loss": 1.6421, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 1.815247792338393, | |
| "grad_norm": 0.8287745118141174, | |
| "learning_rate": 8.784576697401509e-06, | |
| "loss": 1.6224, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 1.8177618553785235, | |
| "grad_norm": 0.7836412787437439, | |
| "learning_rate": 8.765949520350192e-06, | |
| "loss": 1.6884, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 1.8202759184186543, | |
| "grad_norm": 0.8503448367118835, | |
| "learning_rate": 8.747322343298874e-06, | |
| "loss": 1.6815, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 1.8227899814587851, | |
| "grad_norm": 0.749182939529419, | |
| "learning_rate": 8.728695166247556e-06, | |
| "loss": 1.6704, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 1.8253040444989157, | |
| "grad_norm": 0.7341747283935547, | |
| "learning_rate": 8.710067989196238e-06, | |
| "loss": 1.6136, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 1.8278181075390465, | |
| "grad_norm": 0.7204189896583557, | |
| "learning_rate": 8.69144081214492e-06, | |
| "loss": 1.6567, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 1.8303321705791773, | |
| "grad_norm": 0.7759700417518616, | |
| "learning_rate": 8.672813635093603e-06, | |
| "loss": 1.6266, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 1.8328462336193079, | |
| "grad_norm": 0.7368295788764954, | |
| "learning_rate": 8.654186458042285e-06, | |
| "loss": 1.6376, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 1.835360296659439, | |
| "grad_norm": 0.7343635559082031, | |
| "learning_rate": 8.635559280990966e-06, | |
| "loss": 1.5306, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 1.8378743596995695, | |
| "grad_norm": 0.8353263139724731, | |
| "learning_rate": 8.61693210393965e-06, | |
| "loss": 1.9009, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 1.8403884227397, | |
| "grad_norm": 0.7668108344078064, | |
| "learning_rate": 8.598304926888332e-06, | |
| "loss": 1.5537, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 1.842902485779831, | |
| "grad_norm": 0.7725934386253357, | |
| "learning_rate": 8.579677749837013e-06, | |
| "loss": 1.5463, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 1.8454165488199616, | |
| "grad_norm": 0.7877798080444336, | |
| "learning_rate": 8.561050572785695e-06, | |
| "loss": 1.4941, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 1.8479306118600924, | |
| "grad_norm": 0.708139955997467, | |
| "learning_rate": 8.542423395734377e-06, | |
| "loss": 1.6216, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 1.8504446749002232, | |
| "grad_norm": 0.7146539688110352, | |
| "learning_rate": 8.523796218683058e-06, | |
| "loss": 1.7148, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 1.8529587379403538, | |
| "grad_norm": 0.8375624418258667, | |
| "learning_rate": 8.505169041631742e-06, | |
| "loss": 1.6496, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 1.8554728009804846, | |
| "grad_norm": 0.8858321905136108, | |
| "learning_rate": 8.486541864580424e-06, | |
| "loss": 1.6669, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 1.8579868640206154, | |
| "grad_norm": 0.8960750102996826, | |
| "learning_rate": 8.467914687529105e-06, | |
| "loss": 1.5943, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 1.860500927060746, | |
| "grad_norm": 0.7312014698982239, | |
| "learning_rate": 8.449287510477789e-06, | |
| "loss": 1.6784, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 1.8630149901008768, | |
| "grad_norm": 0.7150135636329651, | |
| "learning_rate": 8.43066033342647e-06, | |
| "loss": 1.5995, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 1.8655290531410076, | |
| "grad_norm": 0.8080976009368896, | |
| "learning_rate": 8.412033156375152e-06, | |
| "loss": 1.5626, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 1.8680431161811382, | |
| "grad_norm": 0.7181045413017273, | |
| "learning_rate": 8.393405979323834e-06, | |
| "loss": 1.5949, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 1.870557179221269, | |
| "grad_norm": 0.7655408382415771, | |
| "learning_rate": 8.374778802272516e-06, | |
| "loss": 1.7955, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 1.8730712422613998, | |
| "grad_norm": 0.8304676413536072, | |
| "learning_rate": 8.356151625221197e-06, | |
| "loss": 1.6396, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 1.8755853053015303, | |
| "grad_norm": 0.7564183473587036, | |
| "learning_rate": 8.337524448169881e-06, | |
| "loss": 1.6542, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 1.8780993683416611, | |
| "grad_norm": 0.7989917993545532, | |
| "learning_rate": 8.318897271118563e-06, | |
| "loss": 1.6334, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 1.880613431381792, | |
| "grad_norm": 0.7170543074607849, | |
| "learning_rate": 8.300270094067244e-06, | |
| "loss": 1.5565, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 1.8831274944219225, | |
| "grad_norm": 0.7773701548576355, | |
| "learning_rate": 8.281642917015928e-06, | |
| "loss": 1.7288, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 1.8856415574620533, | |
| "grad_norm": 0.7338926196098328, | |
| "learning_rate": 8.26301573996461e-06, | |
| "loss": 1.6081, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.888155620502184, | |
| "grad_norm": 0.7557771801948547, | |
| "learning_rate": 8.244388562913291e-06, | |
| "loss": 1.5685, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 1.8906696835423147, | |
| "grad_norm": 0.7206683158874512, | |
| "learning_rate": 8.225761385861973e-06, | |
| "loss": 1.6407, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 1.8931837465824457, | |
| "grad_norm": 0.7940247654914856, | |
| "learning_rate": 8.207134208810655e-06, | |
| "loss": 1.5569, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 1.8956978096225763, | |
| "grad_norm": 0.8077976107597351, | |
| "learning_rate": 8.188507031759338e-06, | |
| "loss": 1.651, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 1.8982118726627069, | |
| "grad_norm": 0.744542121887207, | |
| "learning_rate": 8.16987985470802e-06, | |
| "loss": 1.6588, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 1.9007259357028379, | |
| "grad_norm": 0.7954002022743225, | |
| "learning_rate": 8.151252677656702e-06, | |
| "loss": 1.6797, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 1.9032399987429685, | |
| "grad_norm": 0.7432531118392944, | |
| "learning_rate": 8.132625500605385e-06, | |
| "loss": 1.5912, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 1.9057540617830993, | |
| "grad_norm": 0.7329660058021545, | |
| "learning_rate": 8.113998323554067e-06, | |
| "loss": 1.6265, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 1.90826812482323, | |
| "grad_norm": 0.782841145992279, | |
| "learning_rate": 8.095371146502749e-06, | |
| "loss": 1.5097, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 1.9107821878633606, | |
| "grad_norm": 0.7583521604537964, | |
| "learning_rate": 8.07674396945143e-06, | |
| "loss": 1.6502, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 1.9132962509034914, | |
| "grad_norm": 0.7275763154029846, | |
| "learning_rate": 8.058116792400112e-06, | |
| "loss": 1.6559, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 1.9158103139436222, | |
| "grad_norm": 0.7714709043502808, | |
| "learning_rate": 8.039489615348794e-06, | |
| "loss": 1.656, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 1.9183243769837528, | |
| "grad_norm": 0.8266316652297974, | |
| "learning_rate": 8.020862438297477e-06, | |
| "loss": 1.6704, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 1.9208384400238836, | |
| "grad_norm": 0.701888918876648, | |
| "learning_rate": 8.002235261246159e-06, | |
| "loss": 1.5437, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 1.9233525030640144, | |
| "grad_norm": 0.7374228835105896, | |
| "learning_rate": 7.98360808419484e-06, | |
| "loss": 1.6042, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 1.925866566104145, | |
| "grad_norm": 0.7271724343299866, | |
| "learning_rate": 7.964980907143524e-06, | |
| "loss": 1.5868, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 1.9283806291442758, | |
| "grad_norm": 0.8421601057052612, | |
| "learning_rate": 7.946353730092206e-06, | |
| "loss": 1.7222, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 1.9308946921844066, | |
| "grad_norm": 0.7545250058174133, | |
| "learning_rate": 7.927726553040888e-06, | |
| "loss": 1.6347, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 1.9334087552245371, | |
| "grad_norm": 0.7299911379814148, | |
| "learning_rate": 7.90909937598957e-06, | |
| "loss": 1.5253, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 1.935922818264668, | |
| "grad_norm": 0.780196487903595, | |
| "learning_rate": 7.890472198938251e-06, | |
| "loss": 1.634, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 1.9384368813047987, | |
| "grad_norm": 0.76365065574646, | |
| "learning_rate": 7.871845021886933e-06, | |
| "loss": 1.7213, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 1.9409509443449293, | |
| "grad_norm": 0.7239665985107422, | |
| "learning_rate": 7.853217844835616e-06, | |
| "loss": 1.62, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 1.9434650073850603, | |
| "grad_norm": 0.8916459679603577, | |
| "learning_rate": 7.834590667784298e-06, | |
| "loss": 1.6566, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 1.945979070425191, | |
| "grad_norm": 0.7825499176979065, | |
| "learning_rate": 7.81596349073298e-06, | |
| "loss": 1.6032, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 1.9484931334653215, | |
| "grad_norm": 0.8076089024543762, | |
| "learning_rate": 7.797336313681663e-06, | |
| "loss": 1.7049, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 1.9510071965054525, | |
| "grad_norm": 0.7639163136482239, | |
| "learning_rate": 7.778709136630345e-06, | |
| "loss": 1.6503, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 1.953521259545583, | |
| "grad_norm": 0.7289741635322571, | |
| "learning_rate": 7.760081959579027e-06, | |
| "loss": 1.6348, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 1.9560353225857139, | |
| "grad_norm": 0.7479617595672607, | |
| "learning_rate": 7.741454782527708e-06, | |
| "loss": 1.6504, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 1.9585493856258447, | |
| "grad_norm": 0.7938514351844788, | |
| "learning_rate": 7.72282760547639e-06, | |
| "loss": 1.8777, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 1.9610634486659753, | |
| "grad_norm": 0.8100737929344177, | |
| "learning_rate": 7.704200428425073e-06, | |
| "loss": 1.6686, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 1.963577511706106, | |
| "grad_norm": 0.6920654773712158, | |
| "learning_rate": 7.685573251373755e-06, | |
| "loss": 1.5346, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 1.9660915747462369, | |
| "grad_norm": 0.8042290806770325, | |
| "learning_rate": 7.666946074322437e-06, | |
| "loss": 1.7133, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 1.9686056377863674, | |
| "grad_norm": 0.7247301340103149, | |
| "learning_rate": 7.648318897271119e-06, | |
| "loss": 1.5797, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 1.9711197008264982, | |
| "grad_norm": 0.7994764447212219, | |
| "learning_rate": 7.629691720219802e-06, | |
| "loss": 1.6288, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 1.973633763866629, | |
| "grad_norm": 0.702718198299408, | |
| "learning_rate": 7.611064543168484e-06, | |
| "loss": 1.5176, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 1.9761478269067596, | |
| "grad_norm": 0.7043364644050598, | |
| "learning_rate": 7.5924373661171656e-06, | |
| "loss": 1.5256, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 1.9786618899468904, | |
| "grad_norm": 0.7867445349693298, | |
| "learning_rate": 7.573810189065848e-06, | |
| "loss": 1.667, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 1.9811759529870212, | |
| "grad_norm": 0.7523759007453918, | |
| "learning_rate": 7.55518301201453e-06, | |
| "loss": 1.7404, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 1.9836900160271518, | |
| "grad_norm": 0.7788256406784058, | |
| "learning_rate": 7.536555834963212e-06, | |
| "loss": 1.678, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 1.9862040790672826, | |
| "grad_norm": 0.7953637838363647, | |
| "learning_rate": 7.517928657911893e-06, | |
| "loss": 1.7682, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 1.9887181421074134, | |
| "grad_norm": 0.7464196085929871, | |
| "learning_rate": 7.499301480860576e-06, | |
| "loss": 1.6396, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 1.991232205147544, | |
| "grad_norm": 0.7972080707550049, | |
| "learning_rate": 7.4806743038092585e-06, | |
| "loss": 1.6496, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 1.9937462681876748, | |
| "grad_norm": 0.7638291120529175, | |
| "learning_rate": 7.462047126757941e-06, | |
| "loss": 1.5729, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 1.9962603312278056, | |
| "grad_norm": 0.7536227703094482, | |
| "learning_rate": 7.443419949706623e-06, | |
| "loss": 1.6114, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 1.9987743942679361, | |
| "grad_norm": 0.7228095531463623, | |
| "learning_rate": 7.424792772655305e-06, | |
| "loss": 1.5656, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 1.9997800194839885, | |
| "eval_loss": 0.19501353800296783, | |
| "eval_runtime": 319.9353, | |
| "eval_samples_per_second": 34.085, | |
| "eval_steps_per_second": 4.263, | |
| "step": 7954 | |
| }, | |
| { | |
| "epoch": 2.001382734672072, | |
| "grad_norm": 0.805646538734436, | |
| "learning_rate": 7.406165595603987e-06, | |
| "loss": 1.5951, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 2.0038967977122026, | |
| "grad_norm": 0.7883779406547546, | |
| "learning_rate": 7.387538418552669e-06, | |
| "loss": 1.5973, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 2.006410860752333, | |
| "grad_norm": 0.7133209705352783, | |
| "learning_rate": 7.368911241501351e-06, | |
| "loss": 1.5338, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 2.0089249237924642, | |
| "grad_norm": 0.8252723217010498, | |
| "learning_rate": 7.350284064450033e-06, | |
| "loss": 1.6314, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 2.011438986832595, | |
| "grad_norm": 0.785007655620575, | |
| "learning_rate": 7.331656887398715e-06, | |
| "loss": 1.4627, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 2.0139530498727254, | |
| "grad_norm": 0.739536702632904, | |
| "learning_rate": 7.3130297103473975e-06, | |
| "loss": 1.5381, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 2.0164671129128564, | |
| "grad_norm": 0.8759775161743164, | |
| "learning_rate": 7.29440253329608e-06, | |
| "loss": 1.6206, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 2.018981175952987, | |
| "grad_norm": 0.8768475651741028, | |
| "learning_rate": 7.275775356244762e-06, | |
| "loss": 1.6203, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 2.0214952389931176, | |
| "grad_norm": 0.8431323766708374, | |
| "learning_rate": 7.257148179193444e-06, | |
| "loss": 1.6796, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 2.0240093020332486, | |
| "grad_norm": 0.8149091601371765, | |
| "learning_rate": 7.238521002142126e-06, | |
| "loss": 1.531, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 2.026523365073379, | |
| "grad_norm": 0.7990990877151489, | |
| "learning_rate": 7.219893825090808e-06, | |
| "loss": 1.4601, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 2.0290374281135097, | |
| "grad_norm": 0.83040851354599, | |
| "learning_rate": 7.20126664803949e-06, | |
| "loss": 1.5981, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 2.0315514911536408, | |
| "grad_norm": 0.7212802767753601, | |
| "learning_rate": 7.182639470988172e-06, | |
| "loss": 1.5876, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 2.0340655541937713, | |
| "grad_norm": 0.7899538278579712, | |
| "learning_rate": 7.164012293936854e-06, | |
| "loss": 1.4678, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 2.0365796172339024, | |
| "grad_norm": 0.7689089179039001, | |
| "learning_rate": 7.145385116885537e-06, | |
| "loss": 1.4632, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 2.039093680274033, | |
| "grad_norm": 0.7625756859779358, | |
| "learning_rate": 7.126757939834219e-06, | |
| "loss": 1.5567, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 2.0416077433141635, | |
| "grad_norm": 0.738569974899292, | |
| "learning_rate": 7.108130762782901e-06, | |
| "loss": 1.5618, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 2.0441218063542945, | |
| "grad_norm": 0.7236073017120361, | |
| "learning_rate": 7.0895035857315835e-06, | |
| "loss": 1.6021, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 2.046635869394425, | |
| "grad_norm": 0.8031951785087585, | |
| "learning_rate": 7.070876408680265e-06, | |
| "loss": 1.4925, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 2.0491499324345557, | |
| "grad_norm": 0.7895677089691162, | |
| "learning_rate": 7.052249231628947e-06, | |
| "loss": 1.5958, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 2.0516639954746867, | |
| "grad_norm": 0.7700218558311462, | |
| "learning_rate": 7.033622054577629e-06, | |
| "loss": 1.4987, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 2.0541780585148173, | |
| "grad_norm": 0.875299870967865, | |
| "learning_rate": 7.014994877526311e-06, | |
| "loss": 1.6025, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 2.056692121554948, | |
| "grad_norm": 0.7564578652381897, | |
| "learning_rate": 6.996367700474994e-06, | |
| "loss": 1.6196, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 2.059206184595079, | |
| "grad_norm": 0.7971473932266235, | |
| "learning_rate": 6.977740523423676e-06, | |
| "loss": 1.4542, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 2.0617202476352094, | |
| "grad_norm": 0.7703878879547119, | |
| "learning_rate": 6.959113346372358e-06, | |
| "loss": 1.5098, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 2.06423431067534, | |
| "grad_norm": 0.7669187784194946, | |
| "learning_rate": 6.94048616932104e-06, | |
| "loss": 1.5155, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 2.066748373715471, | |
| "grad_norm": 0.7265498638153076, | |
| "learning_rate": 6.9218589922697225e-06, | |
| "loss": 1.5258, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 2.0692624367556016, | |
| "grad_norm": 0.7942532300949097, | |
| "learning_rate": 6.903231815218404e-06, | |
| "loss": 1.4998, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 2.071776499795732, | |
| "grad_norm": 0.833183765411377, | |
| "learning_rate": 6.884604638167086e-06, | |
| "loss": 1.6009, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 2.074290562835863, | |
| "grad_norm": 0.7577141523361206, | |
| "learning_rate": 6.8659774611157685e-06, | |
| "loss": 1.6126, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 2.076804625875994, | |
| "grad_norm": 0.7549824118614197, | |
| "learning_rate": 6.84735028406445e-06, | |
| "loss": 1.5143, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 2.0793186889161244, | |
| "grad_norm": 0.794303834438324, | |
| "learning_rate": 6.828723107013133e-06, | |
| "loss": 1.5107, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 2.0818327519562554, | |
| "grad_norm": 0.7736561894416809, | |
| "learning_rate": 6.810095929961815e-06, | |
| "loss": 1.5271, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 2.084346814996386, | |
| "grad_norm": 0.8507485389709473, | |
| "learning_rate": 6.791468752910497e-06, | |
| "loss": 1.5842, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 2.086860878036517, | |
| "grad_norm": 0.7293105125427246, | |
| "learning_rate": 6.772841575859179e-06, | |
| "loss": 1.5556, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 2.0893749410766476, | |
| "grad_norm": 0.7482052445411682, | |
| "learning_rate": 6.7542143988078615e-06, | |
| "loss": 1.4987, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 2.091889004116778, | |
| "grad_norm": 0.882805347442627, | |
| "learning_rate": 6.735587221756543e-06, | |
| "loss": 1.6788, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 2.094403067156909, | |
| "grad_norm": 0.7152084708213806, | |
| "learning_rate": 6.716960044705225e-06, | |
| "loss": 1.5523, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 2.0969171301970397, | |
| "grad_norm": 0.7520148754119873, | |
| "learning_rate": 6.6983328676539075e-06, | |
| "loss": 1.6198, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 2.0994311932371703, | |
| "grad_norm": 0.7512083649635315, | |
| "learning_rate": 6.679705690602589e-06, | |
| "loss": 1.5683, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 2.1019452562773013, | |
| "grad_norm": 0.8368180990219116, | |
| "learning_rate": 6.661078513551273e-06, | |
| "loss": 1.5858, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 2.104459319317432, | |
| "grad_norm": 0.7326187491416931, | |
| "learning_rate": 6.6424513364999544e-06, | |
| "loss": 1.5679, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 2.1069733823575625, | |
| "grad_norm": 0.9386875629425049, | |
| "learning_rate": 6.623824159448636e-06, | |
| "loss": 1.6304, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 2.1094874453976935, | |
| "grad_norm": 0.7256460785865784, | |
| "learning_rate": 6.605196982397319e-06, | |
| "loss": 1.5173, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 2.112001508437824, | |
| "grad_norm": 0.8998124003410339, | |
| "learning_rate": 6.5865698053460005e-06, | |
| "loss": 1.5998, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 2.1145155714779547, | |
| "grad_norm": 0.7362743616104126, | |
| "learning_rate": 6.567942628294682e-06, | |
| "loss": 1.4875, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 2.1170296345180857, | |
| "grad_norm": 0.7807098627090454, | |
| "learning_rate": 6.549315451243364e-06, | |
| "loss": 1.6803, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 2.1195436975582163, | |
| "grad_norm": 0.7399052381515503, | |
| "learning_rate": 6.5306882741920465e-06, | |
| "loss": 1.5636, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 2.122057760598347, | |
| "grad_norm": 0.7732380032539368, | |
| "learning_rate": 6.512061097140728e-06, | |
| "loss": 1.5704, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 2.124571823638478, | |
| "grad_norm": 0.7884063720703125, | |
| "learning_rate": 6.493433920089412e-06, | |
| "loss": 1.6203, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 2.1270858866786084, | |
| "grad_norm": 0.7410045862197876, | |
| "learning_rate": 6.4748067430380934e-06, | |
| "loss": 1.4905, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 2.129599949718739, | |
| "grad_norm": 0.878178060054779, | |
| "learning_rate": 6.456179565986775e-06, | |
| "loss": 1.6736, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 2.13211401275887, | |
| "grad_norm": 0.7782987356185913, | |
| "learning_rate": 6.437552388935458e-06, | |
| "loss": 1.5561, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 2.1346280757990006, | |
| "grad_norm": 0.8027341365814209, | |
| "learning_rate": 6.4189252118841395e-06, | |
| "loss": 1.5602, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 2.137142138839131, | |
| "grad_norm": 0.8234791159629822, | |
| "learning_rate": 6.400298034832821e-06, | |
| "loss": 1.4999, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 2.139656201879262, | |
| "grad_norm": 0.8043009042739868, | |
| "learning_rate": 6.381670857781504e-06, | |
| "loss": 1.5256, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 2.1421702649193928, | |
| "grad_norm": 0.8206879496574402, | |
| "learning_rate": 6.3630436807301855e-06, | |
| "loss": 1.6631, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 2.1446843279595234, | |
| "grad_norm": 0.8060013651847839, | |
| "learning_rate": 6.344416503678868e-06, | |
| "loss": 1.6125, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 2.1471983909996544, | |
| "grad_norm": 0.739823043346405, | |
| "learning_rate": 6.325789326627551e-06, | |
| "loss": 1.5988, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 2.149712454039785, | |
| "grad_norm": 0.7395405769348145, | |
| "learning_rate": 6.3071621495762324e-06, | |
| "loss": 1.5949, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 2.152226517079916, | |
| "grad_norm": 0.7276797890663147, | |
| "learning_rate": 6.288534972524914e-06, | |
| "loss": 1.5325, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 2.1547405801200465, | |
| "grad_norm": 0.7939257025718689, | |
| "learning_rate": 6.269907795473597e-06, | |
| "loss": 1.6583, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 2.157254643160177, | |
| "grad_norm": 0.8217563033103943, | |
| "learning_rate": 6.2512806184222785e-06, | |
| "loss": 1.6188, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 2.159768706200308, | |
| "grad_norm": 0.791942298412323, | |
| "learning_rate": 6.23265344137096e-06, | |
| "loss": 1.5226, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 2.1622827692404387, | |
| "grad_norm": 0.7456660866737366, | |
| "learning_rate": 6.214026264319643e-06, | |
| "loss": 1.5503, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 2.1647968322805693, | |
| "grad_norm": 0.7705161571502686, | |
| "learning_rate": 6.1953990872683246e-06, | |
| "loss": 1.457, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 2.1673108953207003, | |
| "grad_norm": 0.7760891914367676, | |
| "learning_rate": 6.176771910217008e-06, | |
| "loss": 1.611, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 2.169824958360831, | |
| "grad_norm": 0.7206183075904846, | |
| "learning_rate": 6.15814473316569e-06, | |
| "loss": 1.5449, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 2.1723390214009615, | |
| "grad_norm": 0.8670841455459595, | |
| "learning_rate": 6.1395175561143715e-06, | |
| "loss": 1.5511, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 2.1748530844410925, | |
| "grad_norm": 0.7500833868980408, | |
| "learning_rate": 6.120890379063053e-06, | |
| "loss": 1.5733, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 2.177367147481223, | |
| "grad_norm": 0.8225270509719849, | |
| "learning_rate": 6.102263202011736e-06, | |
| "loss": 1.4605, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 2.1798812105213536, | |
| "grad_norm": 0.8520214557647705, | |
| "learning_rate": 6.0836360249604175e-06, | |
| "loss": 1.4978, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 2.1823952735614847, | |
| "grad_norm": 0.7467817664146423, | |
| "learning_rate": 6.065008847909099e-06, | |
| "loss": 1.6104, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 2.1849093366016152, | |
| "grad_norm": 0.895089864730835, | |
| "learning_rate": 6.046381670857782e-06, | |
| "loss": 1.5806, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 2.187423399641746, | |
| "grad_norm": 0.836562991142273, | |
| "learning_rate": 6.0277544938064636e-06, | |
| "loss": 1.5143, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 2.189937462681877, | |
| "grad_norm": 0.7424719929695129, | |
| "learning_rate": 6.009127316755147e-06, | |
| "loss": 1.5996, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 2.1924515257220074, | |
| "grad_norm": 0.7251309752464294, | |
| "learning_rate": 5.990500139703829e-06, | |
| "loss": 1.4941, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 2.1949655887621384, | |
| "grad_norm": 0.7819547653198242, | |
| "learning_rate": 5.9718729626525105e-06, | |
| "loss": 1.5456, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 2.197479651802269, | |
| "grad_norm": 0.7376725077629089, | |
| "learning_rate": 5.953245785601193e-06, | |
| "loss": 1.5191, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 2.1999937148423996, | |
| "grad_norm": 0.7918819785118103, | |
| "learning_rate": 5.934618608549875e-06, | |
| "loss": 1.5255, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 2.2025077778825306, | |
| "grad_norm": 0.8430167436599731, | |
| "learning_rate": 5.9159914314985565e-06, | |
| "loss": 1.5819, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 2.205021840922661, | |
| "grad_norm": 0.786658525466919, | |
| "learning_rate": 5.897364254447239e-06, | |
| "loss": 1.626, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 2.2075359039627918, | |
| "grad_norm": 0.8276156187057495, | |
| "learning_rate": 5.878737077395921e-06, | |
| "loss": 1.6424, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 2.2100499670029228, | |
| "grad_norm": 0.8273292779922485, | |
| "learning_rate": 5.8601099003446034e-06, | |
| "loss": 1.5752, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 2.2125640300430534, | |
| "grad_norm": 0.7365144491195679, | |
| "learning_rate": 5.841482723293286e-06, | |
| "loss": 1.5147, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 2.215078093083184, | |
| "grad_norm": 0.7272551655769348, | |
| "learning_rate": 5.822855546241968e-06, | |
| "loss": 1.5206, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 2.217592156123315, | |
| "grad_norm": 0.8018143177032471, | |
| "learning_rate": 5.8042283691906495e-06, | |
| "loss": 1.5799, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 2.2201062191634455, | |
| "grad_norm": 0.7773793339729309, | |
| "learning_rate": 5.785601192139332e-06, | |
| "loss": 1.5942, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 2.222620282203576, | |
| "grad_norm": 0.8667277097702026, | |
| "learning_rate": 5.766974015088014e-06, | |
| "loss": 1.618, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 2.225134345243707, | |
| "grad_norm": 0.709841787815094, | |
| "learning_rate": 5.7483468380366955e-06, | |
| "loss": 1.6103, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 2.2276484082838377, | |
| "grad_norm": 0.7400714755058289, | |
| "learning_rate": 5.729719660985378e-06, | |
| "loss": 1.4501, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 2.2301624713239683, | |
| "grad_norm": 0.8771334886550903, | |
| "learning_rate": 5.71109248393406e-06, | |
| "loss": 1.6628, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 2.2326765343640993, | |
| "grad_norm": 0.7763418555259705, | |
| "learning_rate": 5.692465306882743e-06, | |
| "loss": 1.4822, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 2.23519059740423, | |
| "grad_norm": 0.7555778622627258, | |
| "learning_rate": 5.673838129831425e-06, | |
| "loss": 1.5474, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 2.2377046604443604, | |
| "grad_norm": 0.8261988759040833, | |
| "learning_rate": 5.655210952780107e-06, | |
| "loss": 1.5311, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 2.2402187234844915, | |
| "grad_norm": 0.8526386022567749, | |
| "learning_rate": 5.6365837757287885e-06, | |
| "loss": 1.6839, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 2.242732786524622, | |
| "grad_norm": 0.7454199194908142, | |
| "learning_rate": 5.617956598677471e-06, | |
| "loss": 1.5861, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 2.2452468495647526, | |
| "grad_norm": 0.9659509658813477, | |
| "learning_rate": 5.599329421626153e-06, | |
| "loss": 1.7207, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 2.2477609126048836, | |
| "grad_norm": 0.7625419497489929, | |
| "learning_rate": 5.5807022445748345e-06, | |
| "loss": 1.5303, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 2.250274975645014, | |
| "grad_norm": 0.7785441875457764, | |
| "learning_rate": 5.562075067523517e-06, | |
| "loss": 1.6222, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 2.252789038685145, | |
| "grad_norm": 0.7556378841400146, | |
| "learning_rate": 5.543447890472199e-06, | |
| "loss": 1.6159, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 2.255303101725276, | |
| "grad_norm": 0.8266562819480896, | |
| "learning_rate": 5.524820713420882e-06, | |
| "loss": 1.5089, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 2.2578171647654064, | |
| "grad_norm": 0.7982493042945862, | |
| "learning_rate": 5.506193536369564e-06, | |
| "loss": 1.4979, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 2.2603312278055374, | |
| "grad_norm": 0.7474580407142639, | |
| "learning_rate": 5.487566359318246e-06, | |
| "loss": 1.5326, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 2.262845290845668, | |
| "grad_norm": 0.7379794716835022, | |
| "learning_rate": 5.468939182266928e-06, | |
| "loss": 1.5441, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 2.2653593538857986, | |
| "grad_norm": 0.8040394186973572, | |
| "learning_rate": 5.45031200521561e-06, | |
| "loss": 1.5857, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 2.2678734169259296, | |
| "grad_norm": 0.76542729139328, | |
| "learning_rate": 5.431684828164292e-06, | |
| "loss": 1.4677, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 2.27038747996606, | |
| "grad_norm": 0.7528825402259827, | |
| "learning_rate": 5.413057651112974e-06, | |
| "loss": 1.4887, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 2.2729015430061907, | |
| "grad_norm": 0.7317490577697754, | |
| "learning_rate": 5.394430474061656e-06, | |
| "loss": 1.5119, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 2.2754156060463218, | |
| "grad_norm": 0.8085633516311646, | |
| "learning_rate": 5.375803297010338e-06, | |
| "loss": 1.5988, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 2.2779296690864523, | |
| "grad_norm": 0.7670899033546448, | |
| "learning_rate": 5.357176119959021e-06, | |
| "loss": 1.5601, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 2.280443732126583, | |
| "grad_norm": 0.8052598834037781, | |
| "learning_rate": 5.338548942907703e-06, | |
| "loss": 1.6508, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 2.282957795166714, | |
| "grad_norm": 0.8178596496582031, | |
| "learning_rate": 5.319921765856385e-06, | |
| "loss": 1.4853, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 2.2854718582068445, | |
| "grad_norm": 0.8174967169761658, | |
| "learning_rate": 5.301294588805067e-06, | |
| "loss": 1.5248, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 2.287985921246975, | |
| "grad_norm": 0.7032793760299683, | |
| "learning_rate": 5.282667411753749e-06, | |
| "loss": 1.5802, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 2.290499984287106, | |
| "grad_norm": 0.907918393611908, | |
| "learning_rate": 5.264040234702431e-06, | |
| "loss": 1.6656, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 2.2930140473272367, | |
| "grad_norm": 0.8014883995056152, | |
| "learning_rate": 5.245413057651113e-06, | |
| "loss": 1.5341, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 2.2955281103673677, | |
| "grad_norm": 0.9627625346183777, | |
| "learning_rate": 5.226785880599795e-06, | |
| "loss": 1.4921, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 2.2980421734074983, | |
| "grad_norm": 0.7052360773086548, | |
| "learning_rate": 5.208158703548479e-06, | |
| "loss": 1.4502, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 2.300556236447629, | |
| "grad_norm": 0.8308632969856262, | |
| "learning_rate": 5.18953152649716e-06, | |
| "loss": 1.4937, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 2.30307029948776, | |
| "grad_norm": 0.8909317255020142, | |
| "learning_rate": 5.170904349445842e-06, | |
| "loss": 1.5045, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 2.3055843625278905, | |
| "grad_norm": 0.7417542934417725, | |
| "learning_rate": 5.152277172394524e-06, | |
| "loss": 1.5149, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 2.308098425568021, | |
| "grad_norm": 0.7549000978469849, | |
| "learning_rate": 5.133649995343206e-06, | |
| "loss": 1.5364, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 2.310612488608152, | |
| "grad_norm": 0.7741384506225586, | |
| "learning_rate": 5.115022818291888e-06, | |
| "loss": 1.6547, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 2.3131265516482826, | |
| "grad_norm": 0.8141944408416748, | |
| "learning_rate": 5.09639564124057e-06, | |
| "loss": 1.6784, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 2.315640614688413, | |
| "grad_norm": 0.8661395311355591, | |
| "learning_rate": 5.0777684641892524e-06, | |
| "loss": 1.5909, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 2.318154677728544, | |
| "grad_norm": 0.7897353172302246, | |
| "learning_rate": 5.059141287137934e-06, | |
| "loss": 1.6293, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 2.320668740768675, | |
| "grad_norm": 0.799447774887085, | |
| "learning_rate": 5.040514110086618e-06, | |
| "loss": 1.4984, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 2.3231828038088054, | |
| "grad_norm": 0.8956209421157837, | |
| "learning_rate": 5.021886933035299e-06, | |
| "loss": 1.7824, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 2.3256968668489364, | |
| "grad_norm": 0.842034637928009, | |
| "learning_rate": 5.003259755983981e-06, | |
| "loss": 1.5631, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 2.328210929889067, | |
| "grad_norm": 0.7195725440979004, | |
| "learning_rate": 4.984632578932664e-06, | |
| "loss": 1.5013, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 2.3307249929291975, | |
| "grad_norm": 0.8055155873298645, | |
| "learning_rate": 4.966005401881345e-06, | |
| "loss": 1.4574, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 2.3332390559693286, | |
| "grad_norm": 0.8267238736152649, | |
| "learning_rate": 4.947378224830027e-06, | |
| "loss": 1.4338, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 2.335753119009459, | |
| "grad_norm": 0.7792879939079285, | |
| "learning_rate": 4.928751047778709e-06, | |
| "loss": 1.5562, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 2.3382671820495897, | |
| "grad_norm": 0.7325311899185181, | |
| "learning_rate": 4.9101238707273914e-06, | |
| "loss": 1.4769, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 2.3407812450897207, | |
| "grad_norm": 0.6964813470840454, | |
| "learning_rate": 4.891496693676074e-06, | |
| "loss": 1.5951, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 2.3432953081298513, | |
| "grad_norm": 0.7390202283859253, | |
| "learning_rate": 4.872869516624756e-06, | |
| "loss": 1.4909, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 2.345809371169982, | |
| "grad_norm": 0.7670665979385376, | |
| "learning_rate": 4.8542423395734375e-06, | |
| "loss": 1.5965, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 2.348323434210113, | |
| "grad_norm": 0.7817525267601013, | |
| "learning_rate": 4.83561516252212e-06, | |
| "loss": 1.6091, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 2.3508374972502435, | |
| "grad_norm": 0.8648204207420349, | |
| "learning_rate": 4.816987985470803e-06, | |
| "loss": 1.6909, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 2.353351560290374, | |
| "grad_norm": 0.8654007315635681, | |
| "learning_rate": 4.798360808419484e-06, | |
| "loss": 1.6286, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 2.355865623330505, | |
| "grad_norm": 0.7655432224273682, | |
| "learning_rate": 4.779733631368166e-06, | |
| "loss": 1.5377, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 2.3583796863706357, | |
| "grad_norm": 0.755993127822876, | |
| "learning_rate": 4.761106454316849e-06, | |
| "loss": 1.5498, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 2.3608937494107662, | |
| "grad_norm": 0.7272914052009583, | |
| "learning_rate": 4.742479277265531e-06, | |
| "loss": 1.5418, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 2.3634078124508973, | |
| "grad_norm": 0.8197072148323059, | |
| "learning_rate": 4.723852100214213e-06, | |
| "loss": 1.5826, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 2.365921875491028, | |
| "grad_norm": 0.6918550729751587, | |
| "learning_rate": 4.705224923162895e-06, | |
| "loss": 1.6385, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 2.368435938531159, | |
| "grad_norm": 0.7512193322181702, | |
| "learning_rate": 4.6865977461115765e-06, | |
| "loss": 1.4884, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 2.3709500015712894, | |
| "grad_norm": 0.7223237752914429, | |
| "learning_rate": 4.667970569060259e-06, | |
| "loss": 1.5168, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 2.37346406461142, | |
| "grad_norm": 0.8869194984436035, | |
| "learning_rate": 4.649343392008942e-06, | |
| "loss": 1.5953, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 2.375978127651551, | |
| "grad_norm": 0.8479157090187073, | |
| "learning_rate": 4.630716214957623e-06, | |
| "loss": 1.6369, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 2.3784921906916816, | |
| "grad_norm": 0.745171844959259, | |
| "learning_rate": 4.612089037906305e-06, | |
| "loss": 1.5874, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 2.381006253731812, | |
| "grad_norm": 0.8108686804771423, | |
| "learning_rate": 4.593461860854988e-06, | |
| "loss": 1.5795, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 2.383520316771943, | |
| "grad_norm": 0.8226962089538574, | |
| "learning_rate": 4.57483468380367e-06, | |
| "loss": 1.5899, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 2.386034379812074, | |
| "grad_norm": 0.7909881472587585, | |
| "learning_rate": 4.556207506752352e-06, | |
| "loss": 1.5263, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 2.3885484428522044, | |
| "grad_norm": 0.7707247138023376, | |
| "learning_rate": 4.537580329701034e-06, | |
| "loss": 1.5485, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 2.3910625058923354, | |
| "grad_norm": 0.7700558304786682, | |
| "learning_rate": 4.518953152649716e-06, | |
| "loss": 1.5181, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 2.393576568932466, | |
| "grad_norm": 0.752479612827301, | |
| "learning_rate": 4.500325975598399e-06, | |
| "loss": 1.5693, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 2.3960906319725965, | |
| "grad_norm": 0.8538258671760559, | |
| "learning_rate": 4.481698798547081e-06, | |
| "loss": 1.5343, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 2.3986046950127276, | |
| "grad_norm": 0.8086642026901245, | |
| "learning_rate": 4.463071621495762e-06, | |
| "loss": 1.4721, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 2.401118758052858, | |
| "grad_norm": 0.7504218220710754, | |
| "learning_rate": 4.444444444444444e-06, | |
| "loss": 1.519, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 2.403632821092989, | |
| "grad_norm": 0.8101189732551575, | |
| "learning_rate": 4.425817267393127e-06, | |
| "loss": 1.5185, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 2.4061468841331197, | |
| "grad_norm": 0.7884894609451294, | |
| "learning_rate": 4.407190090341809e-06, | |
| "loss": 1.5088, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 2.4086609471732503, | |
| "grad_norm": 0.779296338558197, | |
| "learning_rate": 4.388562913290491e-06, | |
| "loss": 1.5759, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 2.4111750102133813, | |
| "grad_norm": 0.8575226068496704, | |
| "learning_rate": 4.369935736239173e-06, | |
| "loss": 1.5517, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 2.413689073253512, | |
| "grad_norm": 0.7951074838638306, | |
| "learning_rate": 4.351308559187855e-06, | |
| "loss": 1.404, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 2.4162031362936425, | |
| "grad_norm": 0.8110064268112183, | |
| "learning_rate": 4.332681382136538e-06, | |
| "loss": 1.5678, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 2.4187171993337735, | |
| "grad_norm": 0.7472586035728455, | |
| "learning_rate": 4.31405420508522e-06, | |
| "loss": 1.5548, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 2.421231262373904, | |
| "grad_norm": 0.8410158157348633, | |
| "learning_rate": 4.2954270280339014e-06, | |
| "loss": 1.5364, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 2.4237453254140346, | |
| "grad_norm": 0.7901251316070557, | |
| "learning_rate": 4.276799850982584e-06, | |
| "loss": 1.5059, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 2.4262593884541657, | |
| "grad_norm": 0.7677310705184937, | |
| "learning_rate": 4.258172673931267e-06, | |
| "loss": 1.5676, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 2.4287734514942962, | |
| "grad_norm": 0.7564699649810791, | |
| "learning_rate": 4.239545496879948e-06, | |
| "loss": 1.5657, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 2.431287514534427, | |
| "grad_norm": 0.8937082290649414, | |
| "learning_rate": 4.22091831982863e-06, | |
| "loss": 1.5665, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 2.433801577574558, | |
| "grad_norm": 0.7941327691078186, | |
| "learning_rate": 4.202291142777312e-06, | |
| "loss": 1.4843, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 2.4363156406146884, | |
| "grad_norm": 0.770139753818512, | |
| "learning_rate": 4.183663965725994e-06, | |
| "loss": 1.6048, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 2.438829703654819, | |
| "grad_norm": 0.8108421564102173, | |
| "learning_rate": 4.165036788674677e-06, | |
| "loss": 1.6141, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 2.44134376669495, | |
| "grad_norm": 0.8141824007034302, | |
| "learning_rate": 4.146409611623359e-06, | |
| "loss": 1.5963, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 2.4438578297350806, | |
| "grad_norm": 0.7764785289764404, | |
| "learning_rate": 4.1277824345720404e-06, | |
| "loss": 1.5183, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 2.446371892775211, | |
| "grad_norm": 0.797619104385376, | |
| "learning_rate": 4.109155257520723e-06, | |
| "loss": 1.5164, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 2.448885955815342, | |
| "grad_norm": 0.8023549318313599, | |
| "learning_rate": 4.090528080469406e-06, | |
| "loss": 1.6178, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 2.4514000188554728, | |
| "grad_norm": 0.820419430732727, | |
| "learning_rate": 4.071900903418087e-06, | |
| "loss": 1.5423, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 2.4539140818956033, | |
| "grad_norm": 0.7355338335037231, | |
| "learning_rate": 4.053273726366769e-06, | |
| "loss": 1.3907, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 2.4564281449357344, | |
| "grad_norm": 0.8334673643112183, | |
| "learning_rate": 4.034646549315452e-06, | |
| "loss": 1.5937, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 2.458942207975865, | |
| "grad_norm": 0.8310852646827698, | |
| "learning_rate": 4.016019372264134e-06, | |
| "loss": 1.6924, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 2.4614562710159955, | |
| "grad_norm": 0.7548458576202393, | |
| "learning_rate": 3.997392195212816e-06, | |
| "loss": 1.5909, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 2.4639703340561265, | |
| "grad_norm": 0.8151854872703552, | |
| "learning_rate": 3.978765018161498e-06, | |
| "loss": 1.5397, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 2.466484397096257, | |
| "grad_norm": 0.8655654788017273, | |
| "learning_rate": 3.9601378411101794e-06, | |
| "loss": 1.6054, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 2.4689984601363877, | |
| "grad_norm": 0.7910979986190796, | |
| "learning_rate": 3.941510664058862e-06, | |
| "loss": 1.6082, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 2.4715125231765187, | |
| "grad_norm": 0.863750159740448, | |
| "learning_rate": 3.922883487007545e-06, | |
| "loss": 1.5006, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 2.4740265862166493, | |
| "grad_norm": 0.8041654229164124, | |
| "learning_rate": 3.904256309956226e-06, | |
| "loss": 1.5901, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 2.4765406492567803, | |
| "grad_norm": 0.8277151584625244, | |
| "learning_rate": 3.885629132904908e-06, | |
| "loss": 1.5503, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 2.479054712296911, | |
| "grad_norm": 0.8127644658088684, | |
| "learning_rate": 3.867001955853591e-06, | |
| "loss": 1.5212, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 2.4815687753370415, | |
| "grad_norm": 0.7676658630371094, | |
| "learning_rate": 3.848374778802273e-06, | |
| "loss": 1.5616, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 2.4840828383771725, | |
| "grad_norm": 0.7652860879898071, | |
| "learning_rate": 3.829747601750955e-06, | |
| "loss": 1.4539, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 2.486596901417303, | |
| "grad_norm": 0.9264296293258667, | |
| "learning_rate": 3.811120424699637e-06, | |
| "loss": 1.6665, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 2.4891109644574336, | |
| "grad_norm": 0.7928087711334229, | |
| "learning_rate": 3.792493247648319e-06, | |
| "loss": 1.5913, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 2.4916250274975646, | |
| "grad_norm": 0.7933433651924133, | |
| "learning_rate": 3.7738660705970015e-06, | |
| "loss": 1.548, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 2.4941390905376952, | |
| "grad_norm": 0.8464928865432739, | |
| "learning_rate": 3.7552388935456836e-06, | |
| "loss": 1.5751, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 2.496653153577826, | |
| "grad_norm": 0.7906856536865234, | |
| "learning_rate": 3.7366117164943654e-06, | |
| "loss": 1.6283, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 2.499167216617957, | |
| "grad_norm": 0.8038718104362488, | |
| "learning_rate": 3.7179845394430475e-06, | |
| "loss": 1.6769, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 2.5016812796580874, | |
| "grad_norm": 0.8039671778678894, | |
| "learning_rate": 3.69935736239173e-06, | |
| "loss": 1.4178, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 2.5041953426982184, | |
| "grad_norm": 0.8547869324684143, | |
| "learning_rate": 3.680730185340412e-06, | |
| "loss": 1.5456, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 2.506709405738349, | |
| "grad_norm": 0.7627451419830322, | |
| "learning_rate": 3.662103008289094e-06, | |
| "loss": 1.5021, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 2.5092234687784796, | |
| "grad_norm": 0.8250429630279541, | |
| "learning_rate": 3.643475831237776e-06, | |
| "loss": 1.5275, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 2.5117375318186106, | |
| "grad_norm": 0.8037662506103516, | |
| "learning_rate": 3.6248486541864587e-06, | |
| "loss": 1.5395, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 2.514251594858741, | |
| "grad_norm": 0.7840210199356079, | |
| "learning_rate": 3.6062214771351405e-06, | |
| "loss": 1.5188, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 2.5167656578988717, | |
| "grad_norm": 0.7909175753593445, | |
| "learning_rate": 3.5875943000838226e-06, | |
| "loss": 1.4845, | |
| "step": 10010 | |
| }, | |
| { | |
| "epoch": 2.5192797209390028, | |
| "grad_norm": 0.8075452446937561, | |
| "learning_rate": 3.568967123032505e-06, | |
| "loss": 1.5204, | |
| "step": 10020 | |
| }, | |
| { | |
| "epoch": 2.5217937839791333, | |
| "grad_norm": 0.7337312698364258, | |
| "learning_rate": 3.5503399459811865e-06, | |
| "loss": 1.4357, | |
| "step": 10030 | |
| }, | |
| { | |
| "epoch": 2.524307847019264, | |
| "grad_norm": 0.8625326156616211, | |
| "learning_rate": 3.531712768929869e-06, | |
| "loss": 1.4941, | |
| "step": 10040 | |
| }, | |
| { | |
| "epoch": 2.526821910059395, | |
| "grad_norm": 0.7863226532936096, | |
| "learning_rate": 3.5130855918785513e-06, | |
| "loss": 1.5326, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 2.5293359730995255, | |
| "grad_norm": 0.7965732216835022, | |
| "learning_rate": 3.494458414827233e-06, | |
| "loss": 1.5509, | |
| "step": 10060 | |
| }, | |
| { | |
| "epoch": 2.531850036139656, | |
| "grad_norm": 0.7403382658958435, | |
| "learning_rate": 3.475831237775915e-06, | |
| "loss": 1.5435, | |
| "step": 10070 | |
| }, | |
| { | |
| "epoch": 2.534364099179787, | |
| "grad_norm": 0.7773928046226501, | |
| "learning_rate": 3.4572040607245977e-06, | |
| "loss": 1.5095, | |
| "step": 10080 | |
| }, | |
| { | |
| "epoch": 2.5368781622199177, | |
| "grad_norm": 0.830807626247406, | |
| "learning_rate": 3.4385768836732795e-06, | |
| "loss": 1.5964, | |
| "step": 10090 | |
| }, | |
| { | |
| "epoch": 2.5393922252600483, | |
| "grad_norm": 0.7817598581314087, | |
| "learning_rate": 3.4199497066219616e-06, | |
| "loss": 1.5955, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 2.5419062883001793, | |
| "grad_norm": 0.8067037463188171, | |
| "learning_rate": 3.401322529570644e-06, | |
| "loss": 1.5766, | |
| "step": 10110 | |
| }, | |
| { | |
| "epoch": 2.54442035134031, | |
| "grad_norm": 0.8080165982246399, | |
| "learning_rate": 3.3826953525193264e-06, | |
| "loss": 1.5293, | |
| "step": 10120 | |
| }, | |
| { | |
| "epoch": 2.5469344143804404, | |
| "grad_norm": 0.8234786987304688, | |
| "learning_rate": 3.364068175468008e-06, | |
| "loss": 1.5426, | |
| "step": 10130 | |
| }, | |
| { | |
| "epoch": 2.5494484774205715, | |
| "grad_norm": 0.7958806157112122, | |
| "learning_rate": 3.3454409984166903e-06, | |
| "loss": 1.5332, | |
| "step": 10140 | |
| }, | |
| { | |
| "epoch": 2.551962540460702, | |
| "grad_norm": 0.820973813533783, | |
| "learning_rate": 3.3268138213653724e-06, | |
| "loss": 1.7311, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 2.5544766035008326, | |
| "grad_norm": 0.7028027176856995, | |
| "learning_rate": 3.308186644314054e-06, | |
| "loss": 1.5361, | |
| "step": 10160 | |
| }, | |
| { | |
| "epoch": 2.5569906665409636, | |
| "grad_norm": 0.8628289103507996, | |
| "learning_rate": 3.2895594672627368e-06, | |
| "loss": 1.6191, | |
| "step": 10170 | |
| }, | |
| { | |
| "epoch": 2.559504729581094, | |
| "grad_norm": 0.7829732298851013, | |
| "learning_rate": 3.270932290211419e-06, | |
| "loss": 1.6002, | |
| "step": 10180 | |
| }, | |
| { | |
| "epoch": 2.562018792621225, | |
| "grad_norm": 0.7660000920295715, | |
| "learning_rate": 3.2523051131601007e-06, | |
| "loss": 1.4104, | |
| "step": 10190 | |
| }, | |
| { | |
| "epoch": 2.564532855661356, | |
| "grad_norm": 0.7466659545898438, | |
| "learning_rate": 3.233677936108783e-06, | |
| "loss": 1.8376, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 2.5670469187014864, | |
| "grad_norm": 0.7803143858909607, | |
| "learning_rate": 3.2150507590574654e-06, | |
| "loss": 1.5806, | |
| "step": 10210 | |
| }, | |
| { | |
| "epoch": 2.569560981741617, | |
| "grad_norm": 0.7472306489944458, | |
| "learning_rate": 3.196423582006147e-06, | |
| "loss": 1.6114, | |
| "step": 10220 | |
| }, | |
| { | |
| "epoch": 2.572075044781748, | |
| "grad_norm": 0.7997450828552246, | |
| "learning_rate": 3.1777964049548293e-06, | |
| "loss": 1.5325, | |
| "step": 10230 | |
| }, | |
| { | |
| "epoch": 2.5745891078218786, | |
| "grad_norm": 0.7580299973487854, | |
| "learning_rate": 3.1591692279035115e-06, | |
| "loss": 1.4993, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 2.577103170862009, | |
| "grad_norm": 0.813434898853302, | |
| "learning_rate": 3.140542050852194e-06, | |
| "loss": 1.448, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 2.57961723390214, | |
| "grad_norm": 0.8723161816596985, | |
| "learning_rate": 3.1219148738008758e-06, | |
| "loss": 1.5736, | |
| "step": 10260 | |
| }, | |
| { | |
| "epoch": 2.5821312969422707, | |
| "grad_norm": 0.7978937029838562, | |
| "learning_rate": 3.103287696749558e-06, | |
| "loss": 1.5218, | |
| "step": 10270 | |
| }, | |
| { | |
| "epoch": 2.5846453599824013, | |
| "grad_norm": 0.8265032768249512, | |
| "learning_rate": 3.0846605196982397e-06, | |
| "loss": 1.5379, | |
| "step": 10280 | |
| }, | |
| { | |
| "epoch": 2.5871594230225323, | |
| "grad_norm": 0.8026888370513916, | |
| "learning_rate": 3.066033342646922e-06, | |
| "loss": 1.6105, | |
| "step": 10290 | |
| }, | |
| { | |
| "epoch": 2.589673486062663, | |
| "grad_norm": 0.7922202944755554, | |
| "learning_rate": 3.0474061655956044e-06, | |
| "loss": 1.6819, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 2.5921875491027935, | |
| "grad_norm": 0.737577497959137, | |
| "learning_rate": 3.0287789885442866e-06, | |
| "loss": 1.6383, | |
| "step": 10310 | |
| }, | |
| { | |
| "epoch": 2.5947016121429245, | |
| "grad_norm": 0.7946397662162781, | |
| "learning_rate": 3.0101518114929683e-06, | |
| "loss": 1.5465, | |
| "step": 10320 | |
| }, | |
| { | |
| "epoch": 2.597215675183055, | |
| "grad_norm": 0.7973936200141907, | |
| "learning_rate": 2.9915246344416505e-06, | |
| "loss": 1.5287, | |
| "step": 10330 | |
| }, | |
| { | |
| "epoch": 2.599729738223186, | |
| "grad_norm": 0.7663348317146301, | |
| "learning_rate": 2.972897457390333e-06, | |
| "loss": 1.4991, | |
| "step": 10340 | |
| }, | |
| { | |
| "epoch": 2.6022438012633167, | |
| "grad_norm": 0.8832723498344421, | |
| "learning_rate": 2.9542702803390148e-06, | |
| "loss": 1.621, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 2.6047578643034472, | |
| "grad_norm": 0.7298182845115662, | |
| "learning_rate": 2.935643103287697e-06, | |
| "loss": 1.4962, | |
| "step": 10360 | |
| }, | |
| { | |
| "epoch": 2.6072719273435783, | |
| "grad_norm": 0.7656169533729553, | |
| "learning_rate": 2.917015926236379e-06, | |
| "loss": 1.4906, | |
| "step": 10370 | |
| }, | |
| { | |
| "epoch": 2.609785990383709, | |
| "grad_norm": 0.7603759765625, | |
| "learning_rate": 2.898388749185061e-06, | |
| "loss": 1.4963, | |
| "step": 10380 | |
| }, | |
| { | |
| "epoch": 2.61230005342384, | |
| "grad_norm": 0.8271380066871643, | |
| "learning_rate": 2.8797615721337434e-06, | |
| "loss": 1.5281, | |
| "step": 10390 | |
| }, | |
| { | |
| "epoch": 2.6148141164639704, | |
| "grad_norm": 0.7871230244636536, | |
| "learning_rate": 2.8611343950824256e-06, | |
| "loss": 1.7273, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 2.617328179504101, | |
| "grad_norm": 0.7614349126815796, | |
| "learning_rate": 2.8425072180311073e-06, | |
| "loss": 1.5016, | |
| "step": 10410 | |
| }, | |
| { | |
| "epoch": 2.619842242544232, | |
| "grad_norm": 0.8483031988143921, | |
| "learning_rate": 2.8238800409797895e-06, | |
| "loss": 1.5846, | |
| "step": 10420 | |
| }, | |
| { | |
| "epoch": 2.6223563055843626, | |
| "grad_norm": 0.7907611727714539, | |
| "learning_rate": 2.805252863928472e-06, | |
| "loss": 1.5693, | |
| "step": 10430 | |
| }, | |
| { | |
| "epoch": 2.624870368624493, | |
| "grad_norm": 0.7884009480476379, | |
| "learning_rate": 2.7866256868771542e-06, | |
| "loss": 1.5513, | |
| "step": 10440 | |
| }, | |
| { | |
| "epoch": 2.627384431664624, | |
| "grad_norm": 0.7602415084838867, | |
| "learning_rate": 2.767998509825836e-06, | |
| "loss": 1.527, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 2.629898494704755, | |
| "grad_norm": 0.8889303207397461, | |
| "learning_rate": 2.749371332774518e-06, | |
| "loss": 1.6174, | |
| "step": 10460 | |
| }, | |
| { | |
| "epoch": 2.6324125577448854, | |
| "grad_norm": 0.8100164532661438, | |
| "learning_rate": 2.7307441557232007e-06, | |
| "loss": 1.4436, | |
| "step": 10470 | |
| }, | |
| { | |
| "epoch": 2.6349266207850164, | |
| "grad_norm": 0.7836177945137024, | |
| "learning_rate": 2.7121169786718824e-06, | |
| "loss": 1.5595, | |
| "step": 10480 | |
| }, | |
| { | |
| "epoch": 2.637440683825147, | |
| "grad_norm": 0.8696681261062622, | |
| "learning_rate": 2.6934898016205646e-06, | |
| "loss": 1.5472, | |
| "step": 10490 | |
| }, | |
| { | |
| "epoch": 2.6399547468652775, | |
| "grad_norm": 0.7463527321815491, | |
| "learning_rate": 2.6748626245692467e-06, | |
| "loss": 1.5281, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 2.6424688099054086, | |
| "grad_norm": 0.8131633996963501, | |
| "learning_rate": 2.6562354475179285e-06, | |
| "loss": 1.4763, | |
| "step": 10510 | |
| }, | |
| { | |
| "epoch": 2.644982872945539, | |
| "grad_norm": 0.8555880784988403, | |
| "learning_rate": 2.637608270466611e-06, | |
| "loss": 1.5852, | |
| "step": 10520 | |
| }, | |
| { | |
| "epoch": 2.6474969359856697, | |
| "grad_norm": 0.8033715486526489, | |
| "learning_rate": 2.6189810934152932e-06, | |
| "loss": 1.5112, | |
| "step": 10530 | |
| }, | |
| { | |
| "epoch": 2.6500109990258007, | |
| "grad_norm": 0.7655378580093384, | |
| "learning_rate": 2.600353916363975e-06, | |
| "loss": 1.4734, | |
| "step": 10540 | |
| }, | |
| { | |
| "epoch": 2.6525250620659313, | |
| "grad_norm": 0.8321971297264099, | |
| "learning_rate": 2.581726739312657e-06, | |
| "loss": 1.5646, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 2.655039125106062, | |
| "grad_norm": 0.875377893447876, | |
| "learning_rate": 2.5630995622613397e-06, | |
| "loss": 1.5337, | |
| "step": 10560 | |
| }, | |
| { | |
| "epoch": 2.657553188146193, | |
| "grad_norm": 0.8315068483352661, | |
| "learning_rate": 2.544472385210022e-06, | |
| "loss": 1.5367, | |
| "step": 10570 | |
| }, | |
| { | |
| "epoch": 2.6600672511863235, | |
| "grad_norm": 0.7578769326210022, | |
| "learning_rate": 2.5258452081587036e-06, | |
| "loss": 1.4552, | |
| "step": 10580 | |
| }, | |
| { | |
| "epoch": 2.662581314226454, | |
| "grad_norm": 0.8909817934036255, | |
| "learning_rate": 2.5072180311073858e-06, | |
| "loss": 1.5756, | |
| "step": 10590 | |
| }, | |
| { | |
| "epoch": 2.665095377266585, | |
| "grad_norm": 0.8263925313949585, | |
| "learning_rate": 2.488590854056068e-06, | |
| "loss": 1.5541, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 2.6676094403067157, | |
| "grad_norm": 0.7844119071960449, | |
| "learning_rate": 2.46996367700475e-06, | |
| "loss": 1.6211, | |
| "step": 10610 | |
| }, | |
| { | |
| "epoch": 2.6701235033468462, | |
| "grad_norm": 0.8034044504165649, | |
| "learning_rate": 2.4513364999534322e-06, | |
| "loss": 1.5567, | |
| "step": 10620 | |
| }, | |
| { | |
| "epoch": 2.6726375663869772, | |
| "grad_norm": 0.7900787591934204, | |
| "learning_rate": 2.4327093229021144e-06, | |
| "loss": 1.5479, | |
| "step": 10630 | |
| }, | |
| { | |
| "epoch": 2.675151629427108, | |
| "grad_norm": 0.7971326112747192, | |
| "learning_rate": 2.4140821458507966e-06, | |
| "loss": 1.6068, | |
| "step": 10640 | |
| }, | |
| { | |
| "epoch": 2.6776656924672384, | |
| "grad_norm": 0.7867228388786316, | |
| "learning_rate": 2.3954549687994787e-06, | |
| "loss": 1.5397, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 2.6801797555073694, | |
| "grad_norm": 0.7369357347488403, | |
| "learning_rate": 2.376827791748161e-06, | |
| "loss": 1.6437, | |
| "step": 10660 | |
| }, | |
| { | |
| "epoch": 2.6826938185475, | |
| "grad_norm": 0.8075833320617676, | |
| "learning_rate": 2.3582006146968426e-06, | |
| "loss": 1.4811, | |
| "step": 10670 | |
| }, | |
| { | |
| "epoch": 2.6852078815876306, | |
| "grad_norm": 0.765251636505127, | |
| "learning_rate": 2.339573437645525e-06, | |
| "loss": 1.5732, | |
| "step": 10680 | |
| }, | |
| { | |
| "epoch": 2.6877219446277616, | |
| "grad_norm": 0.774592399597168, | |
| "learning_rate": 2.320946260594207e-06, | |
| "loss": 1.5642, | |
| "step": 10690 | |
| }, | |
| { | |
| "epoch": 2.690236007667892, | |
| "grad_norm": 0.7817278504371643, | |
| "learning_rate": 2.3023190835428895e-06, | |
| "loss": 1.4891, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 2.6927500707080227, | |
| "grad_norm": 0.8499379754066467, | |
| "learning_rate": 2.2836919064915712e-06, | |
| "loss": 1.4755, | |
| "step": 10710 | |
| }, | |
| { | |
| "epoch": 2.6952641337481538, | |
| "grad_norm": 0.9244359731674194, | |
| "learning_rate": 2.265064729440254e-06, | |
| "loss": 1.5124, | |
| "step": 10720 | |
| }, | |
| { | |
| "epoch": 2.6977781967882843, | |
| "grad_norm": 0.8041820526123047, | |
| "learning_rate": 2.2464375523889356e-06, | |
| "loss": 1.4596, | |
| "step": 10730 | |
| }, | |
| { | |
| "epoch": 2.700292259828415, | |
| "grad_norm": 0.8049500584602356, | |
| "learning_rate": 2.2278103753376177e-06, | |
| "loss": 1.5357, | |
| "step": 10740 | |
| }, | |
| { | |
| "epoch": 2.702806322868546, | |
| "grad_norm": 0.7844170331954956, | |
| "learning_rate": 2.2091831982863e-06, | |
| "loss": 1.6723, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 2.7053203859086765, | |
| "grad_norm": 0.7466855645179749, | |
| "learning_rate": 2.190556021234982e-06, | |
| "loss": 1.5109, | |
| "step": 10760 | |
| }, | |
| { | |
| "epoch": 2.7078344489488075, | |
| "grad_norm": 0.8488561511039734, | |
| "learning_rate": 2.171928844183664e-06, | |
| "loss": 1.546, | |
| "step": 10770 | |
| }, | |
| { | |
| "epoch": 2.710348511988938, | |
| "grad_norm": 0.7554918527603149, | |
| "learning_rate": 2.1533016671323464e-06, | |
| "loss": 1.4912, | |
| "step": 10780 | |
| }, | |
| { | |
| "epoch": 2.7128625750290687, | |
| "grad_norm": 0.7726461887359619, | |
| "learning_rate": 2.1346744900810285e-06, | |
| "loss": 1.5052, | |
| "step": 10790 | |
| }, | |
| { | |
| "epoch": 2.7153766380691997, | |
| "grad_norm": 0.7806633710861206, | |
| "learning_rate": 2.1160473130297103e-06, | |
| "loss": 1.617, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 2.7178907011093303, | |
| "grad_norm": 0.7801744937896729, | |
| "learning_rate": 2.097420135978393e-06, | |
| "loss": 1.5671, | |
| "step": 10810 | |
| }, | |
| { | |
| "epoch": 2.7204047641494613, | |
| "grad_norm": 0.7251957058906555, | |
| "learning_rate": 2.0787929589270746e-06, | |
| "loss": 1.4395, | |
| "step": 10820 | |
| }, | |
| { | |
| "epoch": 2.722918827189592, | |
| "grad_norm": 0.7631467580795288, | |
| "learning_rate": 2.060165781875757e-06, | |
| "loss": 1.5552, | |
| "step": 10830 | |
| }, | |
| { | |
| "epoch": 2.7254328902297225, | |
| "grad_norm": 0.9560774564743042, | |
| "learning_rate": 2.041538604824439e-06, | |
| "loss": 1.618, | |
| "step": 10840 | |
| }, | |
| { | |
| "epoch": 2.7279469532698535, | |
| "grad_norm": 0.8071618676185608, | |
| "learning_rate": 2.0229114277731215e-06, | |
| "loss": 1.4513, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 2.730461016309984, | |
| "grad_norm": 0.7830699682235718, | |
| "learning_rate": 2.0042842507218032e-06, | |
| "loss": 1.5453, | |
| "step": 10860 | |
| }, | |
| { | |
| "epoch": 2.7329750793501146, | |
| "grad_norm": 0.8035955429077148, | |
| "learning_rate": 1.9856570736704854e-06, | |
| "loss": 1.5796, | |
| "step": 10870 | |
| }, | |
| { | |
| "epoch": 2.7354891423902457, | |
| "grad_norm": 0.7925291061401367, | |
| "learning_rate": 1.9670298966191675e-06, | |
| "loss": 1.5597, | |
| "step": 10880 | |
| }, | |
| { | |
| "epoch": 2.7380032054303762, | |
| "grad_norm": 0.7249788045883179, | |
| "learning_rate": 1.9484027195678497e-06, | |
| "loss": 1.5207, | |
| "step": 10890 | |
| }, | |
| { | |
| "epoch": 2.740517268470507, | |
| "grad_norm": 0.7651757001876831, | |
| "learning_rate": 1.929775542516532e-06, | |
| "loss": 1.5689, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 2.743031331510638, | |
| "grad_norm": 0.8019152283668518, | |
| "learning_rate": 1.911148365465214e-06, | |
| "loss": 1.5936, | |
| "step": 10910 | |
| }, | |
| { | |
| "epoch": 2.7455453945507684, | |
| "grad_norm": 0.8409039378166199, | |
| "learning_rate": 1.8925211884138962e-06, | |
| "loss": 1.5477, | |
| "step": 10920 | |
| }, | |
| { | |
| "epoch": 2.748059457590899, | |
| "grad_norm": 0.7393688559532166, | |
| "learning_rate": 1.8738940113625781e-06, | |
| "loss": 1.4469, | |
| "step": 10930 | |
| }, | |
| { | |
| "epoch": 2.75057352063103, | |
| "grad_norm": 0.7847771048545837, | |
| "learning_rate": 1.8552668343112603e-06, | |
| "loss": 1.5982, | |
| "step": 10940 | |
| }, | |
| { | |
| "epoch": 2.7530875836711606, | |
| "grad_norm": 0.769858717918396, | |
| "learning_rate": 1.8366396572599424e-06, | |
| "loss": 1.6525, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 2.755601646711291, | |
| "grad_norm": 0.7851884961128235, | |
| "learning_rate": 1.8180124802086246e-06, | |
| "loss": 1.5243, | |
| "step": 10960 | |
| }, | |
| { | |
| "epoch": 2.758115709751422, | |
| "grad_norm": 0.8757625222206116, | |
| "learning_rate": 1.7993853031573065e-06, | |
| "loss": 1.6054, | |
| "step": 10970 | |
| }, | |
| { | |
| "epoch": 2.7606297727915527, | |
| "grad_norm": 0.7572917342185974, | |
| "learning_rate": 1.780758126105989e-06, | |
| "loss": 1.4794, | |
| "step": 10980 | |
| }, | |
| { | |
| "epoch": 2.7631438358316833, | |
| "grad_norm": 0.8647504448890686, | |
| "learning_rate": 1.7621309490546709e-06, | |
| "loss": 1.5396, | |
| "step": 10990 | |
| }, | |
| { | |
| "epoch": 2.7656578988718143, | |
| "grad_norm": 0.7155243754386902, | |
| "learning_rate": 1.7435037720033528e-06, | |
| "loss": 1.4566, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 2.768171961911945, | |
| "grad_norm": 0.7836328148841858, | |
| "learning_rate": 1.7248765949520352e-06, | |
| "loss": 1.4417, | |
| "step": 11010 | |
| }, | |
| { | |
| "epoch": 2.7706860249520755, | |
| "grad_norm": 0.7726582884788513, | |
| "learning_rate": 1.7062494179007171e-06, | |
| "loss": 1.5117, | |
| "step": 11020 | |
| }, | |
| { | |
| "epoch": 2.7732000879922065, | |
| "grad_norm": 0.832272469997406, | |
| "learning_rate": 1.6876222408493995e-06, | |
| "loss": 1.5008, | |
| "step": 11030 | |
| }, | |
| { | |
| "epoch": 2.775714151032337, | |
| "grad_norm": 0.7522630095481873, | |
| "learning_rate": 1.6689950637980814e-06, | |
| "loss": 1.4934, | |
| "step": 11040 | |
| }, | |
| { | |
| "epoch": 2.7782282140724677, | |
| "grad_norm": 0.7478271722793579, | |
| "learning_rate": 1.6503678867467638e-06, | |
| "loss": 1.5404, | |
| "step": 11050 | |
| }, | |
| { | |
| "epoch": 2.7807422771125987, | |
| "grad_norm": 0.7310355305671692, | |
| "learning_rate": 1.6317407096954458e-06, | |
| "loss": 1.5752, | |
| "step": 11060 | |
| }, | |
| { | |
| "epoch": 2.7832563401527293, | |
| "grad_norm": 0.8934334516525269, | |
| "learning_rate": 1.613113532644128e-06, | |
| "loss": 1.6908, | |
| "step": 11070 | |
| }, | |
| { | |
| "epoch": 2.78577040319286, | |
| "grad_norm": 0.804051399230957, | |
| "learning_rate": 1.59448635559281e-06, | |
| "loss": 1.5459, | |
| "step": 11080 | |
| }, | |
| { | |
| "epoch": 2.788284466232991, | |
| "grad_norm": 0.751595139503479, | |
| "learning_rate": 1.5758591785414922e-06, | |
| "loss": 1.4128, | |
| "step": 11090 | |
| }, | |
| { | |
| "epoch": 2.7907985292731214, | |
| "grad_norm": 0.7245124578475952, | |
| "learning_rate": 1.5572320014901742e-06, | |
| "loss": 1.4824, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 2.793312592313252, | |
| "grad_norm": 0.7615929841995239, | |
| "learning_rate": 1.5386048244388563e-06, | |
| "loss": 1.5747, | |
| "step": 11110 | |
| }, | |
| { | |
| "epoch": 2.795826655353383, | |
| "grad_norm": 0.7810664772987366, | |
| "learning_rate": 1.5199776473875385e-06, | |
| "loss": 1.4505, | |
| "step": 11120 | |
| }, | |
| { | |
| "epoch": 2.7983407183935136, | |
| "grad_norm": 0.7689558267593384, | |
| "learning_rate": 1.5013504703362205e-06, | |
| "loss": 1.4434, | |
| "step": 11130 | |
| }, | |
| { | |
| "epoch": 2.800854781433644, | |
| "grad_norm": 0.7829539775848389, | |
| "learning_rate": 1.4827232932849028e-06, | |
| "loss": 1.6225, | |
| "step": 11140 | |
| }, | |
| { | |
| "epoch": 2.803368844473775, | |
| "grad_norm": 0.8438304662704468, | |
| "learning_rate": 1.4640961162335848e-06, | |
| "loss": 1.5357, | |
| "step": 11150 | |
| }, | |
| { | |
| "epoch": 2.805882907513906, | |
| "grad_norm": 0.8361050486564636, | |
| "learning_rate": 1.4454689391822671e-06, | |
| "loss": 1.476, | |
| "step": 11160 | |
| }, | |
| { | |
| "epoch": 2.808396970554037, | |
| "grad_norm": 0.660135805606842, | |
| "learning_rate": 1.426841762130949e-06, | |
| "loss": 1.4644, | |
| "step": 11170 | |
| }, | |
| { | |
| "epoch": 2.8109110335941674, | |
| "grad_norm": 0.7874066829681396, | |
| "learning_rate": 1.4082145850796315e-06, | |
| "loss": 1.5938, | |
| "step": 11180 | |
| }, | |
| { | |
| "epoch": 2.813425096634298, | |
| "grad_norm": 0.7539405822753906, | |
| "learning_rate": 1.3895874080283134e-06, | |
| "loss": 1.6334, | |
| "step": 11190 | |
| }, | |
| { | |
| "epoch": 2.815939159674429, | |
| "grad_norm": 0.7350710034370422, | |
| "learning_rate": 1.3709602309769956e-06, | |
| "loss": 1.3995, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 2.8184532227145596, | |
| "grad_norm": 0.7598622441291809, | |
| "learning_rate": 1.3523330539256777e-06, | |
| "loss": 1.5987, | |
| "step": 11210 | |
| }, | |
| { | |
| "epoch": 2.82096728575469, | |
| "grad_norm": 0.8682858943939209, | |
| "learning_rate": 1.3337058768743599e-06, | |
| "loss": 1.5247, | |
| "step": 11220 | |
| }, | |
| { | |
| "epoch": 2.823481348794821, | |
| "grad_norm": 0.8003500699996948, | |
| "learning_rate": 1.3150786998230418e-06, | |
| "loss": 1.5929, | |
| "step": 11230 | |
| }, | |
| { | |
| "epoch": 2.8259954118349517, | |
| "grad_norm": 0.7463173270225525, | |
| "learning_rate": 1.296451522771724e-06, | |
| "loss": 1.5971, | |
| "step": 11240 | |
| }, | |
| { | |
| "epoch": 2.8285094748750828, | |
| "grad_norm": 0.7628188729286194, | |
| "learning_rate": 1.2778243457204062e-06, | |
| "loss": 1.6148, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 2.8310235379152133, | |
| "grad_norm": 0.7805269360542297, | |
| "learning_rate": 1.259197168669088e-06, | |
| "loss": 1.4666, | |
| "step": 11260 | |
| }, | |
| { | |
| "epoch": 2.833537600955344, | |
| "grad_norm": 0.8004319667816162, | |
| "learning_rate": 1.2405699916177705e-06, | |
| "loss": 1.5483, | |
| "step": 11270 | |
| }, | |
| { | |
| "epoch": 2.836051663995475, | |
| "grad_norm": 0.8070306181907654, | |
| "learning_rate": 1.2219428145664526e-06, | |
| "loss": 1.5359, | |
| "step": 11280 | |
| }, | |
| { | |
| "epoch": 2.8385657270356055, | |
| "grad_norm": 0.7626564502716064, | |
| "learning_rate": 1.2033156375151348e-06, | |
| "loss": 1.5101, | |
| "step": 11290 | |
| }, | |
| { | |
| "epoch": 2.841079790075736, | |
| "grad_norm": 0.7793872952461243, | |
| "learning_rate": 1.1846884604638167e-06, | |
| "loss": 1.5074, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 2.843593853115867, | |
| "grad_norm": 0.9144117832183838, | |
| "learning_rate": 1.166061283412499e-06, | |
| "loss": 1.6289, | |
| "step": 11310 | |
| }, | |
| { | |
| "epoch": 2.8461079161559977, | |
| "grad_norm": 0.7660481333732605, | |
| "learning_rate": 1.147434106361181e-06, | |
| "loss": 1.5565, | |
| "step": 11320 | |
| }, | |
| { | |
| "epoch": 2.8486219791961283, | |
| "grad_norm": 0.7611154317855835, | |
| "learning_rate": 1.1288069293098632e-06, | |
| "loss": 1.6396, | |
| "step": 11330 | |
| }, | |
| { | |
| "epoch": 2.8511360422362593, | |
| "grad_norm": 0.7642408013343811, | |
| "learning_rate": 1.1101797522585454e-06, | |
| "loss": 1.6081, | |
| "step": 11340 | |
| }, | |
| { | |
| "epoch": 2.85365010527639, | |
| "grad_norm": 0.7889783978462219, | |
| "learning_rate": 1.0915525752072275e-06, | |
| "loss": 1.5266, | |
| "step": 11350 | |
| }, | |
| { | |
| "epoch": 2.8561641683165204, | |
| "grad_norm": 0.7776032090187073, | |
| "learning_rate": 1.0729253981559095e-06, | |
| "loss": 1.4774, | |
| "step": 11360 | |
| }, | |
| { | |
| "epoch": 2.8586782313566514, | |
| "grad_norm": 0.8267963528633118, | |
| "learning_rate": 1.0542982211045916e-06, | |
| "loss": 1.5205, | |
| "step": 11370 | |
| }, | |
| { | |
| "epoch": 2.861192294396782, | |
| "grad_norm": 0.7250455021858215, | |
| "learning_rate": 1.0356710440532738e-06, | |
| "loss": 1.5041, | |
| "step": 11380 | |
| }, | |
| { | |
| "epoch": 2.8637063574369126, | |
| "grad_norm": 0.9626948237419128, | |
| "learning_rate": 1.017043867001956e-06, | |
| "loss": 1.5902, | |
| "step": 11390 | |
| }, | |
| { | |
| "epoch": 2.8662204204770436, | |
| "grad_norm": 0.7590466737747192, | |
| "learning_rate": 9.984166899506381e-07, | |
| "loss": 1.6308, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 2.868734483517174, | |
| "grad_norm": 0.8073099255561829, | |
| "learning_rate": 9.797895128993203e-07, | |
| "loss": 1.4904, | |
| "step": 11410 | |
| }, | |
| { | |
| "epoch": 2.8712485465573048, | |
| "grad_norm": 0.7953347563743591, | |
| "learning_rate": 9.611623358480022e-07, | |
| "loss": 1.5647, | |
| "step": 11420 | |
| }, | |
| { | |
| "epoch": 2.873762609597436, | |
| "grad_norm": 0.8464999198913574, | |
| "learning_rate": 9.425351587966844e-07, | |
| "loss": 1.5286, | |
| "step": 11430 | |
| }, | |
| { | |
| "epoch": 2.8762766726375664, | |
| "grad_norm": 0.770683765411377, | |
| "learning_rate": 9.239079817453665e-07, | |
| "loss": 1.6167, | |
| "step": 11440 | |
| }, | |
| { | |
| "epoch": 2.878790735677697, | |
| "grad_norm": 0.7723478078842163, | |
| "learning_rate": 9.052808046940487e-07, | |
| "loss": 1.5449, | |
| "step": 11450 | |
| }, | |
| { | |
| "epoch": 2.881304798717828, | |
| "grad_norm": 0.8382723927497864, | |
| "learning_rate": 8.866536276427308e-07, | |
| "loss": 1.6116, | |
| "step": 11460 | |
| }, | |
| { | |
| "epoch": 2.8838188617579585, | |
| "grad_norm": 0.772556483745575, | |
| "learning_rate": 8.680264505914129e-07, | |
| "loss": 1.5731, | |
| "step": 11470 | |
| }, | |
| { | |
| "epoch": 2.886332924798089, | |
| "grad_norm": 0.7866445183753967, | |
| "learning_rate": 8.493992735400951e-07, | |
| "loss": 1.5398, | |
| "step": 11480 | |
| }, | |
| { | |
| "epoch": 2.88884698783822, | |
| "grad_norm": 0.7631253004074097, | |
| "learning_rate": 8.307720964887772e-07, | |
| "loss": 1.6066, | |
| "step": 11490 | |
| }, | |
| { | |
| "epoch": 2.8913610508783507, | |
| "grad_norm": 0.7707316279411316, | |
| "learning_rate": 8.121449194374594e-07, | |
| "loss": 1.4257, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 2.8938751139184813, | |
| "grad_norm": 0.7535357475280762, | |
| "learning_rate": 7.935177423861415e-07, | |
| "loss": 1.5386, | |
| "step": 11510 | |
| }, | |
| { | |
| "epoch": 2.8963891769586123, | |
| "grad_norm": 0.7189772129058838, | |
| "learning_rate": 7.748905653348236e-07, | |
| "loss": 1.5996, | |
| "step": 11520 | |
| }, | |
| { | |
| "epoch": 2.898903239998743, | |
| "grad_norm": 0.8205345869064331, | |
| "learning_rate": 7.562633882835058e-07, | |
| "loss": 1.5438, | |
| "step": 11530 | |
| }, | |
| { | |
| "epoch": 2.9014173030388735, | |
| "grad_norm": 0.8040661811828613, | |
| "learning_rate": 7.376362112321877e-07, | |
| "loss": 1.6112, | |
| "step": 11540 | |
| }, | |
| { | |
| "epoch": 2.9039313660790045, | |
| "grad_norm": 0.8184328675270081, | |
| "learning_rate": 7.190090341808699e-07, | |
| "loss": 1.4874, | |
| "step": 11550 | |
| }, | |
| { | |
| "epoch": 2.906445429119135, | |
| "grad_norm": 0.7930591702461243, | |
| "learning_rate": 7.00381857129552e-07, | |
| "loss": 1.5348, | |
| "step": 11560 | |
| }, | |
| { | |
| "epoch": 2.9089594921592656, | |
| "grad_norm": 0.7524926066398621, | |
| "learning_rate": 6.817546800782342e-07, | |
| "loss": 1.5519, | |
| "step": 11570 | |
| }, | |
| { | |
| "epoch": 2.9114735551993967, | |
| "grad_norm": 0.8091217875480652, | |
| "learning_rate": 6.631275030269164e-07, | |
| "loss": 1.4673, | |
| "step": 11580 | |
| }, | |
| { | |
| "epoch": 2.9139876182395272, | |
| "grad_norm": 0.9316702485084534, | |
| "learning_rate": 6.445003259755984e-07, | |
| "loss": 1.5268, | |
| "step": 11590 | |
| }, | |
| { | |
| "epoch": 2.9165016812796583, | |
| "grad_norm": 0.7710354328155518, | |
| "learning_rate": 6.258731489242806e-07, | |
| "loss": 1.5533, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 2.919015744319789, | |
| "grad_norm": 0.7505325078964233, | |
| "learning_rate": 6.072459718729627e-07, | |
| "loss": 1.5224, | |
| "step": 11610 | |
| }, | |
| { | |
| "epoch": 2.9215298073599194, | |
| "grad_norm": 0.8271064162254333, | |
| "learning_rate": 5.886187948216449e-07, | |
| "loss": 1.4507, | |
| "step": 11620 | |
| }, | |
| { | |
| "epoch": 2.9240438704000504, | |
| "grad_norm": 0.7393507957458496, | |
| "learning_rate": 5.699916177703269e-07, | |
| "loss": 1.5227, | |
| "step": 11630 | |
| }, | |
| { | |
| "epoch": 2.926557933440181, | |
| "grad_norm": 0.8115874528884888, | |
| "learning_rate": 5.513644407190091e-07, | |
| "loss": 1.555, | |
| "step": 11640 | |
| }, | |
| { | |
| "epoch": 2.9290719964803116, | |
| "grad_norm": 0.7850735187530518, | |
| "learning_rate": 5.327372636676912e-07, | |
| "loss": 1.4828, | |
| "step": 11650 | |
| }, | |
| { | |
| "epoch": 2.9315860595204426, | |
| "grad_norm": 0.7837382555007935, | |
| "learning_rate": 5.141100866163733e-07, | |
| "loss": 1.4959, | |
| "step": 11660 | |
| }, | |
| { | |
| "epoch": 2.934100122560573, | |
| "grad_norm": 0.7817039489746094, | |
| "learning_rate": 4.954829095650555e-07, | |
| "loss": 1.5883, | |
| "step": 11670 | |
| }, | |
| { | |
| "epoch": 2.936614185600704, | |
| "grad_norm": 0.7486652731895447, | |
| "learning_rate": 4.768557325137376e-07, | |
| "loss": 1.4956, | |
| "step": 11680 | |
| }, | |
| { | |
| "epoch": 2.9391282486408348, | |
| "grad_norm": 0.7530533075332642, | |
| "learning_rate": 4.582285554624197e-07, | |
| "loss": 1.4744, | |
| "step": 11690 | |
| }, | |
| { | |
| "epoch": 2.9416423116809653, | |
| "grad_norm": 0.738166332244873, | |
| "learning_rate": 4.396013784111018e-07, | |
| "loss": 1.648, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 2.9441563747210964, | |
| "grad_norm": 0.7319310903549194, | |
| "learning_rate": 4.2097420135978395e-07, | |
| "loss": 1.4923, | |
| "step": 11710 | |
| }, | |
| { | |
| "epoch": 2.946670437761227, | |
| "grad_norm": 0.8141875863075256, | |
| "learning_rate": 4.023470243084661e-07, | |
| "loss": 1.5988, | |
| "step": 11720 | |
| }, | |
| { | |
| "epoch": 2.9491845008013575, | |
| "grad_norm": 0.7643289566040039, | |
| "learning_rate": 3.837198472571482e-07, | |
| "loss": 1.5786, | |
| "step": 11730 | |
| }, | |
| { | |
| "epoch": 2.9516985638414885, | |
| "grad_norm": 0.7373230457305908, | |
| "learning_rate": 3.650926702058304e-07, | |
| "loss": 1.5729, | |
| "step": 11740 | |
| }, | |
| { | |
| "epoch": 2.954212626881619, | |
| "grad_norm": 0.7662407755851746, | |
| "learning_rate": 3.4646549315451243e-07, | |
| "loss": 1.7458, | |
| "step": 11750 | |
| }, | |
| { | |
| "epoch": 2.9567266899217497, | |
| "grad_norm": 0.7732263803482056, | |
| "learning_rate": 3.278383161031946e-07, | |
| "loss": 1.4873, | |
| "step": 11760 | |
| }, | |
| { | |
| "epoch": 2.9592407529618807, | |
| "grad_norm": 0.7245255708694458, | |
| "learning_rate": 3.092111390518767e-07, | |
| "loss": 1.4992, | |
| "step": 11770 | |
| }, | |
| { | |
| "epoch": 2.9617548160020113, | |
| "grad_norm": 0.8597062826156616, | |
| "learning_rate": 2.9058396200055885e-07, | |
| "loss": 1.5634, | |
| "step": 11780 | |
| }, | |
| { | |
| "epoch": 2.964268879042142, | |
| "grad_norm": 0.7334901690483093, | |
| "learning_rate": 2.7195678494924096e-07, | |
| "loss": 1.5118, | |
| "step": 11790 | |
| }, | |
| { | |
| "epoch": 2.966782942082273, | |
| "grad_norm": 0.6824560761451721, | |
| "learning_rate": 2.5332960789792307e-07, | |
| "loss": 1.5404, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 2.9692970051224035, | |
| "grad_norm": 0.7791404128074646, | |
| "learning_rate": 2.3470243084660523e-07, | |
| "loss": 1.4702, | |
| "step": 11810 | |
| }, | |
| { | |
| "epoch": 2.971811068162534, | |
| "grad_norm": 0.9101436138153076, | |
| "learning_rate": 2.1607525379528736e-07, | |
| "loss": 1.5914, | |
| "step": 11820 | |
| }, | |
| { | |
| "epoch": 2.974325131202665, | |
| "grad_norm": 0.7947549223899841, | |
| "learning_rate": 1.9744807674396947e-07, | |
| "loss": 1.5456, | |
| "step": 11830 | |
| }, | |
| { | |
| "epoch": 2.9768391942427956, | |
| "grad_norm": 0.7972986102104187, | |
| "learning_rate": 1.788208996926516e-07, | |
| "loss": 1.5378, | |
| "step": 11840 | |
| }, | |
| { | |
| "epoch": 2.979353257282926, | |
| "grad_norm": 0.8576259016990662, | |
| "learning_rate": 1.6019372264133373e-07, | |
| "loss": 1.4729, | |
| "step": 11850 | |
| }, | |
| { | |
| "epoch": 2.9818673203230572, | |
| "grad_norm": 0.7566127777099609, | |
| "learning_rate": 1.4156654559001586e-07, | |
| "loss": 1.5241, | |
| "step": 11860 | |
| }, | |
| { | |
| "epoch": 2.984381383363188, | |
| "grad_norm": 0.7280460000038147, | |
| "learning_rate": 1.2293936853869797e-07, | |
| "loss": 1.5953, | |
| "step": 11870 | |
| }, | |
| { | |
| "epoch": 2.9868954464033184, | |
| "grad_norm": 0.8045029044151306, | |
| "learning_rate": 1.0431219148738009e-07, | |
| "loss": 1.592, | |
| "step": 11880 | |
| }, | |
| { | |
| "epoch": 2.9894095094434494, | |
| "grad_norm": 0.8725329637527466, | |
| "learning_rate": 8.568501443606222e-08, | |
| "loss": 1.477, | |
| "step": 11890 | |
| }, | |
| { | |
| "epoch": 2.99192357248358, | |
| "grad_norm": 0.7431342005729675, | |
| "learning_rate": 6.705783738474434e-08, | |
| "loss": 1.5088, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 2.9944376355237106, | |
| "grad_norm": 0.7301717400550842, | |
| "learning_rate": 4.8430660333426476e-08, | |
| "loss": 1.5579, | |
| "step": 11910 | |
| }, | |
| { | |
| "epoch": 2.9969516985638416, | |
| "grad_norm": 0.8561310172080994, | |
| "learning_rate": 2.98034832821086e-08, | |
| "loss": 1.6326, | |
| "step": 11920 | |
| }, | |
| { | |
| "epoch": 2.999465761603972, | |
| "grad_norm": 0.755133330821991, | |
| "learning_rate": 1.1176306230790725e-08, | |
| "loss": 1.5326, | |
| "step": 11930 | |
| }, | |
| { | |
| "epoch": 2.9997171679079853, | |
| "eval_loss": 0.19324611127376556, | |
| "eval_runtime": 323.0089, | |
| "eval_samples_per_second": 33.761, | |
| "eval_steps_per_second": 4.223, | |
| "step": 11931 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 11931, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.6616926469947392e+18, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |