| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 10.0, | |
| "eval_steps": 500, | |
| "global_step": 4570, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02188183807439825, | |
| "grad_norm": 7.835834503173828, | |
| "learning_rate": 4.3668122270742355e-06, | |
| "loss": 1.1502, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0437636761487965, | |
| "grad_norm": 5.089853286743164, | |
| "learning_rate": 8.733624454148471e-06, | |
| "loss": 1.0181, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.06564551422319474, | |
| "grad_norm": 2.486280918121338, | |
| "learning_rate": 1.3100436681222708e-05, | |
| "loss": 0.6402, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.087527352297593, | |
| "grad_norm": 2.2392938137054443, | |
| "learning_rate": 1.7467248908296942e-05, | |
| "loss": 0.4384, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.10940919037199125, | |
| "grad_norm": 2.0446348190307617, | |
| "learning_rate": 2.183406113537118e-05, | |
| "loss": 0.3294, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.13129102844638948, | |
| "grad_norm": 1.6032756567001343, | |
| "learning_rate": 2.6200873362445416e-05, | |
| "loss": 0.2657, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.15317286652078774, | |
| "grad_norm": 1.2855844497680664, | |
| "learning_rate": 3.056768558951965e-05, | |
| "loss": 0.2188, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.175054704595186, | |
| "grad_norm": 2.869873046875, | |
| "learning_rate": 3.4934497816593884e-05, | |
| "loss": 0.1833, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.19693654266958424, | |
| "grad_norm": 1.311296820640564, | |
| "learning_rate": 3.930131004366812e-05, | |
| "loss": 0.1776, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.2188183807439825, | |
| "grad_norm": 0.9431630373001099, | |
| "learning_rate": 4.366812227074236e-05, | |
| "loss": 0.1691, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.24070021881838075, | |
| "grad_norm": 0.9962027668952942, | |
| "learning_rate": 4.8034934497816594e-05, | |
| "loss": 0.1551, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.26258205689277897, | |
| "grad_norm": 0.8225952982902527, | |
| "learning_rate": 5.240174672489083e-05, | |
| "loss": 0.1378, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.2844638949671772, | |
| "grad_norm": 0.662451982498169, | |
| "learning_rate": 5.6768558951965065e-05, | |
| "loss": 0.1357, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.3063457330415755, | |
| "grad_norm": 0.7660549283027649, | |
| "learning_rate": 6.11353711790393e-05, | |
| "loss": 0.1335, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.3282275711159737, | |
| "grad_norm": 0.6745500564575195, | |
| "learning_rate": 6.550218340611354e-05, | |
| "loss": 0.1284, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.350109409190372, | |
| "grad_norm": 0.7069956064224243, | |
| "learning_rate": 6.986899563318777e-05, | |
| "loss": 0.1116, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.37199124726477023, | |
| "grad_norm": 1.5347901582717896, | |
| "learning_rate": 7.423580786026201e-05, | |
| "loss": 0.1157, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.3938730853391685, | |
| "grad_norm": 0.6622304320335388, | |
| "learning_rate": 7.860262008733625e-05, | |
| "loss": 0.1137, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.41575492341356673, | |
| "grad_norm": 0.8169113993644714, | |
| "learning_rate": 8.296943231441049e-05, | |
| "loss": 0.1099, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.437636761487965, | |
| "grad_norm": 1.2949796915054321, | |
| "learning_rate": 8.733624454148472e-05, | |
| "loss": 0.1047, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.45951859956236324, | |
| "grad_norm": 1.0924999713897705, | |
| "learning_rate": 9.170305676855896e-05, | |
| "loss": 0.1077, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.4814004376367615, | |
| "grad_norm": 1.032711386680603, | |
| "learning_rate": 9.606986899563319e-05, | |
| "loss": 0.093, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.5032822757111597, | |
| "grad_norm": 0.8792279958724976, | |
| "learning_rate": 9.999998690636703e-05, | |
| "loss": 0.0948, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.5251641137855579, | |
| "grad_norm": 0.7732746601104736, | |
| "learning_rate": 9.99984156787068e-05, | |
| "loss": 0.0929, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.5470459518599562, | |
| "grad_norm": 1.3531904220581055, | |
| "learning_rate": 9.999422581874277e-05, | |
| "loss": 0.1034, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.5689277899343544, | |
| "grad_norm": 42.917991638183594, | |
| "learning_rate": 9.998741754591594e-05, | |
| "loss": 0.0949, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.5908096280087527, | |
| "grad_norm": 13.660039901733398, | |
| "learning_rate": 9.997799121680488e-05, | |
| "loss": 0.1149, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.612691466083151, | |
| "grad_norm": 5.179759979248047, | |
| "learning_rate": 9.996594732510703e-05, | |
| "loss": 0.1119, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.6345733041575492, | |
| "grad_norm": 7.586740970611572, | |
| "learning_rate": 9.995128650161283e-05, | |
| "loss": 0.0992, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.6564551422319475, | |
| "grad_norm": 3.8440942764282227, | |
| "learning_rate": 9.993400951417277e-05, | |
| "loss": 0.1064, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.6783369803063457, | |
| "grad_norm": 2.224613666534424, | |
| "learning_rate": 9.991411726765704e-05, | |
| "loss": 0.1033, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.700218818380744, | |
| "grad_norm": 63.815773010253906, | |
| "learning_rate": 9.989161080390825e-05, | |
| "loss": 0.1072, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.7221006564551422, | |
| "grad_norm": 52.77833938598633, | |
| "learning_rate": 9.986649130168682e-05, | |
| "loss": 0.0984, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.7439824945295405, | |
| "grad_norm": 15.396129608154297, | |
| "learning_rate": 9.983876007660922e-05, | |
| "loss": 0.0979, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.7658643326039387, | |
| "grad_norm": 1.6834031343460083, | |
| "learning_rate": 9.980841858107912e-05, | |
| "loss": 0.0956, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.787746170678337, | |
| "grad_norm": 2.0496537685394287, | |
| "learning_rate": 9.977546840421128e-05, | |
| "loss": 0.0932, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.8096280087527352, | |
| "grad_norm": 1.0929217338562012, | |
| "learning_rate": 9.973991127174833e-05, | |
| "loss": 0.0864, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.8315098468271335, | |
| "grad_norm": 7.530645847320557, | |
| "learning_rate": 9.970174904597038e-05, | |
| "loss": 0.095, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.8533916849015317, | |
| "grad_norm": 1.3216899633407593, | |
| "learning_rate": 9.966098372559754e-05, | |
| "loss": 0.1115, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.87527352297593, | |
| "grad_norm": 1.4777535200119019, | |
| "learning_rate": 9.961761744568512e-05, | |
| "loss": 0.1035, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.8971553610503282, | |
| "grad_norm": 1.4249495267868042, | |
| "learning_rate": 9.957165247751192e-05, | |
| "loss": 0.0945, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.9190371991247265, | |
| "grad_norm": 5.402189254760742, | |
| "learning_rate": 9.952309122846128e-05, | |
| "loss": 0.0972, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.9409190371991247, | |
| "grad_norm": 2.5458638668060303, | |
| "learning_rate": 9.947193624189485e-05, | |
| "loss": 0.0933, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.962800875273523, | |
| "grad_norm": 1.0579487085342407, | |
| "learning_rate": 9.941819019701954e-05, | |
| "loss": 0.1069, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.9846827133479212, | |
| "grad_norm": 2.8812167644500732, | |
| "learning_rate": 9.936185590874716e-05, | |
| "loss": 0.0873, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.0065645514223194, | |
| "grad_norm": 1.7286851406097412, | |
| "learning_rate": 9.93029363275469e-05, | |
| "loss": 0.098, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.0284463894967177, | |
| "grad_norm": 2.46840763092041, | |
| "learning_rate": 9.924143453929095e-05, | |
| "loss": 0.097, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.0503282275711159, | |
| "grad_norm": 3.4682812690734863, | |
| "learning_rate": 9.917735376509274e-05, | |
| "loss": 0.0899, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.0722100656455142, | |
| "grad_norm": 1.5925973653793335, | |
| "learning_rate": 9.911069736113832e-05, | |
| "loss": 0.0864, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.0940919037199124, | |
| "grad_norm": 1.5992704629898071, | |
| "learning_rate": 9.904146881851059e-05, | |
| "loss": 0.0819, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.1159737417943107, | |
| "grad_norm": 1.0037175416946411, | |
| "learning_rate": 9.896967176300638e-05, | |
| "loss": 0.0833, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.1378555798687089, | |
| "grad_norm": 1.681389570236206, | |
| "learning_rate": 9.88953099549466e-05, | |
| "loss": 0.0934, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.1597374179431073, | |
| "grad_norm": 1.4893583059310913, | |
| "learning_rate": 9.881838728897936e-05, | |
| "loss": 0.093, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.1816192560175054, | |
| "grad_norm": 1.2456260919570923, | |
| "learning_rate": 9.873890779387584e-05, | |
| "loss": 0.1026, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.2035010940919038, | |
| "grad_norm": 0.8705942630767822, | |
| "learning_rate": 9.865687563231943e-05, | |
| "loss": 0.092, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.225382932166302, | |
| "grad_norm": 2.0043041706085205, | |
| "learning_rate": 9.857229510068761e-05, | |
| "loss": 0.0949, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.2472647702407003, | |
| "grad_norm": 1.574006199836731, | |
| "learning_rate": 9.848517062882703e-05, | |
| "loss": 0.0843, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.2691466083150984, | |
| "grad_norm": 0.98636794090271, | |
| "learning_rate": 9.839550677982135e-05, | |
| "loss": 0.0875, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.2910284463894968, | |
| "grad_norm": 0.5821505784988403, | |
| "learning_rate": 9.830330824975245e-05, | |
| "loss": 0.0923, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.312910284463895, | |
| "grad_norm": 0.5067301392555237, | |
| "learning_rate": 9.820857986745431e-05, | |
| "loss": 0.0812, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.3347921225382933, | |
| "grad_norm": 0.5871260762214661, | |
| "learning_rate": 9.811132659426016e-05, | |
| "loss": 0.0804, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.3566739606126914, | |
| "grad_norm": 0.6894322633743286, | |
| "learning_rate": 9.801155352374268e-05, | |
| "loss": 0.0909, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.3785557986870898, | |
| "grad_norm": 0.7414800524711609, | |
| "learning_rate": 9.790926588144714e-05, | |
| "loss": 0.0901, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.400437636761488, | |
| "grad_norm": 0.633738100528717, | |
| "learning_rate": 9.780446902461777e-05, | |
| "loss": 0.092, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.4223194748358863, | |
| "grad_norm": 0.9708178639411926, | |
| "learning_rate": 9.769716844191722e-05, | |
| "loss": 0.0905, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.4442013129102844, | |
| "grad_norm": 0.6483299732208252, | |
| "learning_rate": 9.758736975313895e-05, | |
| "loss": 0.0835, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.4660831509846828, | |
| "grad_norm": 0.6239913702011108, | |
| "learning_rate": 9.747507870891311e-05, | |
| "loss": 0.084, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.487964989059081, | |
| "grad_norm": 0.5222530364990234, | |
| "learning_rate": 9.736030119040508e-05, | |
| "loss": 0.0883, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.509846827133479, | |
| "grad_norm": 0.7400548458099365, | |
| "learning_rate": 9.724304320900779e-05, | |
| "loss": 0.0894, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.5317286652078774, | |
| "grad_norm": 0.5438825488090515, | |
| "learning_rate": 9.712331090602654e-05, | |
| "loss": 0.0848, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.5536105032822758, | |
| "grad_norm": 0.351309210062027, | |
| "learning_rate": 9.700111055235754e-05, | |
| "loss": 0.0796, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.575492341356674, | |
| "grad_norm": 0.5478343963623047, | |
| "learning_rate": 9.687644854815952e-05, | |
| "loss": 0.0812, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.597374179431072, | |
| "grad_norm": 0.5228798985481262, | |
| "learning_rate": 9.674933142251837e-05, | |
| "loss": 0.0802, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.6192560175054704, | |
| "grad_norm": 0.7279865145683289, | |
| "learning_rate": 9.661976583310524e-05, | |
| "loss": 0.0834, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.6411378555798688, | |
| "grad_norm": 0.653016209602356, | |
| "learning_rate": 9.648775856582795e-05, | |
| "loss": 0.0818, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.663019693654267, | |
| "grad_norm": 0.5135919451713562, | |
| "learning_rate": 9.635331653447545e-05, | |
| "loss": 0.075, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.684901531728665, | |
| "grad_norm": 0.6700458526611328, | |
| "learning_rate": 9.621644678035577e-05, | |
| "loss": 0.0752, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.7067833698030634, | |
| "grad_norm": 0.7173268795013428, | |
| "learning_rate": 9.607715647192726e-05, | |
| "loss": 0.0829, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.7286652078774618, | |
| "grad_norm": 0.5226753354072571, | |
| "learning_rate": 9.593545290442311e-05, | |
| "loss": 0.0796, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.75054704595186, | |
| "grad_norm": 0.6492887139320374, | |
| "learning_rate": 9.579134349946926e-05, | |
| "loss": 0.0759, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.772428884026258, | |
| "grad_norm": 0.6919318437576294, | |
| "learning_rate": 9.564483580469571e-05, | |
| "loss": 0.0815, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.7943107221006565, | |
| "grad_norm": 0.8271254301071167, | |
| "learning_rate": 9.549593749334128e-05, | |
| "loss": 0.0796, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.8161925601750548, | |
| "grad_norm": 0.4964686930179596, | |
| "learning_rate": 9.534465636385162e-05, | |
| "loss": 0.087, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.838074398249453, | |
| "grad_norm": 0.5696905255317688, | |
| "learning_rate": 9.519100033947083e-05, | |
| "loss": 0.0822, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.859956236323851, | |
| "grad_norm": 0.849829375743866, | |
| "learning_rate": 9.503497746782653e-05, | |
| "loss": 0.0727, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.8818380743982495, | |
| "grad_norm": 0.5354284644126892, | |
| "learning_rate": 9.487659592050824e-05, | |
| "loss": 0.0902, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.9037199124726478, | |
| "grad_norm": 0.38756123185157776, | |
| "learning_rate": 9.471586399263956e-05, | |
| "loss": 0.0775, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.925601750547046, | |
| "grad_norm": 0.7026869058609009, | |
| "learning_rate": 9.45527901024436e-05, | |
| "loss": 0.0788, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.947483588621444, | |
| "grad_norm": 0.5695210695266724, | |
| "learning_rate": 9.43873827908021e-05, | |
| "loss": 0.0757, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.9693654266958425, | |
| "grad_norm": 0.40557512640953064, | |
| "learning_rate": 9.421965072080817e-05, | |
| "loss": 0.0722, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.9912472647702408, | |
| "grad_norm": 0.42085573077201843, | |
| "learning_rate": 9.404960267731251e-05, | |
| "loss": 0.0689, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.0131291028446388, | |
| "grad_norm": 0.4493652880191803, | |
| "learning_rate": 9.387724756646332e-05, | |
| "loss": 0.0774, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.035010940919037, | |
| "grad_norm": 0.49804505705833435, | |
| "learning_rate": 9.370259441523987e-05, | |
| "loss": 0.0771, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.0568927789934355, | |
| "grad_norm": 0.6483808755874634, | |
| "learning_rate": 9.352565237097965e-05, | |
| "loss": 0.078, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.078774617067834, | |
| "grad_norm": 0.5233718156814575, | |
| "learning_rate": 9.334643070089937e-05, | |
| "loss": 0.0709, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.1006564551422318, | |
| "grad_norm": 0.6516150832176208, | |
| "learning_rate": 9.316493879160957e-05, | |
| "loss": 0.079, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.12253829321663, | |
| "grad_norm": 0.692732036113739, | |
| "learning_rate": 9.298118614862297e-05, | |
| "loss": 0.0775, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.1444201312910285, | |
| "grad_norm": 0.5700138807296753, | |
| "learning_rate": 9.279518239585667e-05, | |
| "loss": 0.0782, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.166301969365427, | |
| "grad_norm": 0.3617083728313446, | |
| "learning_rate": 9.260693727512803e-05, | |
| "loss": 0.0714, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.1881838074398248, | |
| "grad_norm": 0.4216122031211853, | |
| "learning_rate": 9.241646064564457e-05, | |
| "loss": 0.0672, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.210065645514223, | |
| "grad_norm": 0.38087522983551025, | |
| "learning_rate": 9.222376248348747e-05, | |
| "loss": 0.08, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.2319474835886215, | |
| "grad_norm": 0.8693896532058716, | |
| "learning_rate": 9.20288528810892e-05, | |
| "loss": 0.0778, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.25382932166302, | |
| "grad_norm": 0.6329444050788879, | |
| "learning_rate": 9.183174204670483e-05, | |
| "loss": 0.0722, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.2757111597374178, | |
| "grad_norm": 0.9332349896430969, | |
| "learning_rate": 9.163244030387743e-05, | |
| "loss": 0.0705, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.297592997811816, | |
| "grad_norm": 0.8268052935600281, | |
| "learning_rate": 9.143095809089741e-05, | |
| "loss": 0.0719, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.3194748358862145, | |
| "grad_norm": 1.8830317258834839, | |
| "learning_rate": 9.122730596025579e-05, | |
| "loss": 0.0733, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.341356673960613, | |
| "grad_norm": 0.4748838245868683, | |
| "learning_rate": 9.102149457809146e-05, | |
| "loss": 0.0692, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.363238512035011, | |
| "grad_norm": 0.6225442290306091, | |
| "learning_rate": 9.081353472363265e-05, | |
| "loss": 0.0724, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.385120350109409, | |
| "grad_norm": 0.3627307415008545, | |
| "learning_rate": 9.060343728863239e-05, | |
| "loss": 0.064, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.4070021881838075, | |
| "grad_norm": 0.5193670392036438, | |
| "learning_rate": 9.039121327679791e-05, | |
| "loss": 0.079, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.428884026258206, | |
| "grad_norm": 0.8696744441986084, | |
| "learning_rate": 9.017687380321449e-05, | |
| "loss": 0.0715, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.450765864332604, | |
| "grad_norm": 0.6547001004219055, | |
| "learning_rate": 8.996043009376319e-05, | |
| "loss": 0.0691, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.472647702407002, | |
| "grad_norm": 0.5175451040267944, | |
| "learning_rate": 8.974189348453306e-05, | |
| "loss": 0.0749, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.4945295404814005, | |
| "grad_norm": 0.5802105665206909, | |
| "learning_rate": 8.952127542122722e-05, | |
| "loss": 0.0717, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.516411378555799, | |
| "grad_norm": 0.7577764987945557, | |
| "learning_rate": 8.929858745856354e-05, | |
| "loss": 0.0816, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.538293216630197, | |
| "grad_norm": 0.5405493378639221, | |
| "learning_rate": 8.907384125966945e-05, | |
| "loss": 0.0738, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.560175054704595, | |
| "grad_norm": 0.3944590985774994, | |
| "learning_rate": 8.884704859547108e-05, | |
| "loss": 0.071, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.5820568927789935, | |
| "grad_norm": 0.5056865215301514, | |
| "learning_rate": 8.861822134407672e-05, | |
| "loss": 0.0692, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.6039387308533914, | |
| "grad_norm": 0.4500432312488556, | |
| "learning_rate": 8.838737149015477e-05, | |
| "loss": 0.0723, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.62582056892779, | |
| "grad_norm": 0.36721524596214294, | |
| "learning_rate": 8.815451112430604e-05, | |
| "loss": 0.0686, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.647702407002188, | |
| "grad_norm": 0.7576287984848022, | |
| "learning_rate": 8.79196524424305e-05, | |
| "loss": 0.0769, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 2.6695842450765865, | |
| "grad_norm": 0.6370827555656433, | |
| "learning_rate": 8.768280774508853e-05, | |
| "loss": 0.0815, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 2.691466083150985, | |
| "grad_norm": 0.3080037236213684, | |
| "learning_rate": 8.74439894368567e-05, | |
| "loss": 0.0607, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 2.713347921225383, | |
| "grad_norm": 0.46508321166038513, | |
| "learning_rate": 8.720321002567807e-05, | |
| "loss": 0.0696, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.735229759299781, | |
| "grad_norm": 0.6237578988075256, | |
| "learning_rate": 8.696048212220711e-05, | |
| "loss": 0.0716, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.7571115973741795, | |
| "grad_norm": 0.4095085561275482, | |
| "learning_rate": 8.671581843914923e-05, | |
| "loss": 0.0635, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.7789934354485775, | |
| "grad_norm": 0.6405256986618042, | |
| "learning_rate": 8.646923179059494e-05, | |
| "loss": 0.0648, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.800875273522976, | |
| "grad_norm": 0.48811593651771545, | |
| "learning_rate": 8.622073509134874e-05, | |
| "loss": 0.0645, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.822757111597374, | |
| "grad_norm": 0.5660180449485779, | |
| "learning_rate": 8.597034135625268e-05, | |
| "loss": 0.0697, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.8446389496717726, | |
| "grad_norm": 0.5524961352348328, | |
| "learning_rate": 8.571806369950481e-05, | |
| "loss": 0.0721, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.866520787746171, | |
| "grad_norm": 0.38668200373649597, | |
| "learning_rate": 8.54639153339722e-05, | |
| "loss": 0.0666, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 2.888402625820569, | |
| "grad_norm": 0.5835091471672058, | |
| "learning_rate": 8.520790957049904e-05, | |
| "loss": 0.0613, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.910284463894967, | |
| "grad_norm": 0.5212987661361694, | |
| "learning_rate": 8.49500598172094e-05, | |
| "loss": 0.0732, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.9321663019693656, | |
| "grad_norm": 0.28042611479759216, | |
| "learning_rate": 8.46903795788051e-05, | |
| "loss": 0.0582, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.9540481400437635, | |
| "grad_norm": 0.5811691880226135, | |
| "learning_rate": 8.442888245585828e-05, | |
| "loss": 0.0683, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 2.975929978118162, | |
| "grad_norm": 0.4917229115962982, | |
| "learning_rate": 8.41655821440992e-05, | |
| "loss": 0.0669, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 2.99781181619256, | |
| "grad_norm": 0.5715236663818359, | |
| "learning_rate": 8.390049243369886e-05, | |
| "loss": 0.064, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 3.0196936542669586, | |
| "grad_norm": 0.7416530251502991, | |
| "learning_rate": 8.363362720854677e-05, | |
| "loss": 0.0701, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 3.0415754923413565, | |
| "grad_norm": 0.6221567988395691, | |
| "learning_rate": 8.33650004455238e-05, | |
| "loss": 0.0707, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 3.063457330415755, | |
| "grad_norm": 0.5047647953033447, | |
| "learning_rate": 8.309462621377013e-05, | |
| "loss": 0.0787, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 3.085339168490153, | |
| "grad_norm": 0.4984498918056488, | |
| "learning_rate": 8.28225186739484e-05, | |
| "loss": 0.0629, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 3.1072210065645516, | |
| "grad_norm": 0.37575212121009827, | |
| "learning_rate": 8.254869207750206e-05, | |
| "loss": 0.0638, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 3.1291028446389495, | |
| "grad_norm": 0.5403359532356262, | |
| "learning_rate": 8.227316076590898e-05, | |
| "loss": 0.0645, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 3.150984682713348, | |
| "grad_norm": 0.4255600869655609, | |
| "learning_rate": 8.19959391699303e-05, | |
| "loss": 0.0615, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 3.1728665207877462, | |
| "grad_norm": 0.41742101311683655, | |
| "learning_rate": 8.171704180885457e-05, | |
| "loss": 0.0614, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 3.1947483588621446, | |
| "grad_norm": 0.36597940325737, | |
| "learning_rate": 8.143648328973746e-05, | |
| "loss": 0.0662, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 3.2166301969365425, | |
| "grad_norm": 0.34130623936653137, | |
| "learning_rate": 8.115427830663658e-05, | |
| "loss": 0.0705, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 3.238512035010941, | |
| "grad_norm": 0.45353439450263977, | |
| "learning_rate": 8.087044163984197e-05, | |
| "loss": 0.062, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 3.2603938730853392, | |
| "grad_norm": 0.525982677936554, | |
| "learning_rate": 8.058498815510196e-05, | |
| "loss": 0.0691, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 3.2822757111597376, | |
| "grad_norm": 0.520045280456543, | |
| "learning_rate": 8.029793280284459e-05, | |
| "loss": 0.0612, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 3.3041575492341355, | |
| "grad_norm": 0.5012158155441284, | |
| "learning_rate": 8.000929061739464e-05, | |
| "loss": 0.067, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 3.326039387308534, | |
| "grad_norm": 0.36634936928749084, | |
| "learning_rate": 7.971907671618612e-05, | |
| "loss": 0.0672, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 3.3479212253829322, | |
| "grad_norm": 0.43757519125938416, | |
| "learning_rate": 7.942730629897063e-05, | |
| "loss": 0.067, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 3.3698030634573306, | |
| "grad_norm": 0.44617760181427, | |
| "learning_rate": 7.913399464702113e-05, | |
| "loss": 0.0623, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 3.3916849015317285, | |
| "grad_norm": 0.6319367289543152, | |
| "learning_rate": 7.883915712233177e-05, | |
| "loss": 0.063, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 3.413566739606127, | |
| "grad_norm": 0.3538963496685028, | |
| "learning_rate": 7.854280916681314e-05, | |
| "loss": 0.066, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 3.4354485776805253, | |
| "grad_norm": 0.4608595371246338, | |
| "learning_rate": 7.824496630148364e-05, | |
| "loss": 0.0621, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 3.4573304157549236, | |
| "grad_norm": 0.5707418322563171, | |
| "learning_rate": 7.794564412565655e-05, | |
| "loss": 0.0696, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 3.4792122538293215, | |
| "grad_norm": 0.37221798300743103, | |
| "learning_rate": 7.764485831612299e-05, | |
| "loss": 0.066, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 3.50109409190372, | |
| "grad_norm": 0.6436975598335266, | |
| "learning_rate": 7.734262462633085e-05, | |
| "loss": 0.0597, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 3.5229759299781183, | |
| "grad_norm": 0.7743731141090393, | |
| "learning_rate": 7.703895888555978e-05, | |
| "loss": 0.0696, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 3.544857768052516, | |
| "grad_norm": 0.561302900314331, | |
| "learning_rate": 7.673387699809211e-05, | |
| "loss": 0.0613, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 3.5667396061269145, | |
| "grad_norm": 0.42251160740852356, | |
| "learning_rate": 7.642739494237986e-05, | |
| "loss": 0.0642, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 3.588621444201313, | |
| "grad_norm": 0.2583286166191101, | |
| "learning_rate": 7.611952877020787e-05, | |
| "loss": 0.0648, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 3.6105032822757113, | |
| "grad_norm": 0.6360892057418823, | |
| "learning_rate": 7.581029460585313e-05, | |
| "loss": 0.0621, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 3.6323851203501096, | |
| "grad_norm": 0.4245406687259674, | |
| "learning_rate": 7.54997086452403e-05, | |
| "loss": 0.0618, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 3.6542669584245075, | |
| "grad_norm": 0.3884272277355194, | |
| "learning_rate": 7.518778715509341e-05, | |
| "loss": 0.0651, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 3.676148796498906, | |
| "grad_norm": 0.4515193700790405, | |
| "learning_rate": 7.487454647208387e-05, | |
| "loss": 0.0614, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 3.6980306345733043, | |
| "grad_norm": 0.32720717787742615, | |
| "learning_rate": 7.456000300197498e-05, | |
| "loss": 0.0632, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 3.719912472647702, | |
| "grad_norm": 0.3803008198738098, | |
| "learning_rate": 7.424417321876258e-05, | |
| "loss": 0.0688, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 3.7417943107221006, | |
| "grad_norm": 0.4703556299209595, | |
| "learning_rate": 7.392707366381226e-05, | |
| "loss": 0.0565, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 3.763676148796499, | |
| "grad_norm": 0.3876951336860657, | |
| "learning_rate": 7.360872094499303e-05, | |
| "loss": 0.0629, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 3.7855579868708973, | |
| "grad_norm": 0.6968600749969482, | |
| "learning_rate": 7.328913173580745e-05, | |
| "loss": 0.0654, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 3.8074398249452956, | |
| "grad_norm": 0.3983824551105499, | |
| "learning_rate": 7.29683227745185e-05, | |
| "loss": 0.0665, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 3.8293216630196936, | |
| "grad_norm": 0.6152142286300659, | |
| "learning_rate": 7.264631086327273e-05, | |
| "loss": 0.0665, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 3.851203501094092, | |
| "grad_norm": 0.45532792806625366, | |
| "learning_rate": 7.232311286722038e-05, | |
| "loss": 0.0644, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 3.8730853391684903, | |
| "grad_norm": 0.5433072447776794, | |
| "learning_rate": 7.199874571363212e-05, | |
| "loss": 0.0669, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 3.894967177242888, | |
| "grad_norm": 0.393799364566803, | |
| "learning_rate": 7.167322639101234e-05, | |
| "loss": 0.0609, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 3.9168490153172866, | |
| "grad_norm": 0.37970104813575745, | |
| "learning_rate": 7.134657194820957e-05, | |
| "loss": 0.0613, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 3.938730853391685, | |
| "grad_norm": 0.4533461928367615, | |
| "learning_rate": 7.101879949352343e-05, | |
| "loss": 0.0661, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 3.9606126914660833, | |
| "grad_norm": 0.5303936004638672, | |
| "learning_rate": 7.068992619380859e-05, | |
| "loss": 0.0604, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 3.9824945295404817, | |
| "grad_norm": 0.43903806805610657, | |
| "learning_rate": 7.035996927357583e-05, | |
| "loss": 0.0575, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 4.00437636761488, | |
| "grad_norm": 0.4638700485229492, | |
| "learning_rate": 7.00289460140897e-05, | |
| "loss": 0.0675, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 4.0262582056892775, | |
| "grad_norm": 0.6604804396629333, | |
| "learning_rate": 6.969687375246355e-05, | |
| "loss": 0.066, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 4.048140043763676, | |
| "grad_norm": 0.39075350761413574, | |
| "learning_rate": 6.936376988075155e-05, | |
| "loss": 0.0642, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 4.070021881838074, | |
| "grad_norm": 0.5355797410011292, | |
| "learning_rate": 6.902965184503763e-05, | |
| "loss": 0.0618, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 4.091903719912473, | |
| "grad_norm": 0.70420241355896, | |
| "learning_rate": 6.869453714452194e-05, | |
| "loss": 0.0613, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 4.113785557986871, | |
| "grad_norm": 0.5154333114624023, | |
| "learning_rate": 6.835844333060423e-05, | |
| "loss": 0.058, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 4.135667396061269, | |
| "grad_norm": 0.5481790900230408, | |
| "learning_rate": 6.802138800596462e-05, | |
| "loss": 0.0602, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 4.157549234135668, | |
| "grad_norm": 1.0096722841262817, | |
| "learning_rate": 6.768338882364168e-05, | |
| "loss": 0.0693, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 4.179431072210066, | |
| "grad_norm": 0.4854465126991272, | |
| "learning_rate": 6.734446348610792e-05, | |
| "loss": 0.0612, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 4.2013129102844635, | |
| "grad_norm": 0.33401617407798767, | |
| "learning_rate": 6.700462974434251e-05, | |
| "loss": 0.0612, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 4.223194748358862, | |
| "grad_norm": 0.44405895471572876, | |
| "learning_rate": 6.666390539690179e-05, | |
| "loss": 0.0644, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 4.24507658643326, | |
| "grad_norm": 0.43215858936309814, | |
| "learning_rate": 6.632230828898677e-05, | |
| "loss": 0.0592, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 4.266958424507659, | |
| "grad_norm": 0.3804273307323456, | |
| "learning_rate": 6.597985631150884e-05, | |
| "loss": 0.0565, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 4.288840262582057, | |
| "grad_norm": 0.43090665340423584, | |
| "learning_rate": 6.56365674001525e-05, | |
| "loss": 0.0579, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 4.310722100656455, | |
| "grad_norm": 0.5440065264701843, | |
| "learning_rate": 6.529245953443615e-05, | |
| "loss": 0.0575, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 4.332603938730854, | |
| "grad_norm": 0.36599451303482056, | |
| "learning_rate": 6.49475507367703e-05, | |
| "loss": 0.0598, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 4.354485776805252, | |
| "grad_norm": 0.33587977290153503, | |
| "learning_rate": 6.460185907151371e-05, | |
| "loss": 0.0583, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 4.3763676148796495, | |
| "grad_norm": 0.3317202627658844, | |
| "learning_rate": 6.425540264402735e-05, | |
| "loss": 0.0601, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 4.398249452954048, | |
| "grad_norm": 0.36118966341018677, | |
| "learning_rate": 6.390819959972603e-05, | |
| "loss": 0.0531, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 4.420131291028446, | |
| "grad_norm": 0.3242454528808594, | |
| "learning_rate": 6.356026812312809e-05, | |
| "loss": 0.0585, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 4.442013129102845, | |
| "grad_norm": 0.4215313494205475, | |
| "learning_rate": 6.321162643690304e-05, | |
| "loss": 0.0637, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 4.463894967177243, | |
| "grad_norm": 0.34491801261901855, | |
| "learning_rate": 6.286229280091711e-05, | |
| "loss": 0.0642, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 4.485776805251641, | |
| "grad_norm": 0.5286457538604736, | |
| "learning_rate": 6.251228551127691e-05, | |
| "loss": 0.0654, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 4.50765864332604, | |
| "grad_norm": 0.357379674911499, | |
| "learning_rate": 6.216162289937119e-05, | |
| "loss": 0.0529, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 4.529540481400438, | |
| "grad_norm": 0.4211725890636444, | |
| "learning_rate": 6.181032333091078e-05, | |
| "loss": 0.0574, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 4.5514223194748356, | |
| "grad_norm": 0.4051927328109741, | |
| "learning_rate": 6.145840520496666e-05, | |
| "loss": 0.0555, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 4.573304157549234, | |
| "grad_norm": 0.4655281603336334, | |
| "learning_rate": 6.11058869530063e-05, | |
| "loss": 0.061, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 4.595185995623632, | |
| "grad_norm": 0.3950660526752472, | |
| "learning_rate": 6.0752787037928336e-05, | |
| "loss": 0.061, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 4.61706783369803, | |
| "grad_norm": 0.5274171233177185, | |
| "learning_rate": 6.039912395309568e-05, | |
| "loss": 0.0605, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 4.638949671772429, | |
| "grad_norm": 0.3764810860157013, | |
| "learning_rate": 6.004491622136681e-05, | |
| "loss": 0.0555, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 4.660831509846827, | |
| "grad_norm": 0.3669235110282898, | |
| "learning_rate": 5.969018239412573e-05, | |
| "loss": 0.0657, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 4.682713347921226, | |
| "grad_norm": 0.4793848395347595, | |
| "learning_rate": 5.933494105031032e-05, | |
| "loss": 0.0625, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 4.704595185995624, | |
| "grad_norm": 0.7370563745498657, | |
| "learning_rate": 5.897921079543931e-05, | |
| "loss": 0.0553, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 4.726477024070022, | |
| "grad_norm": 0.32018744945526123, | |
| "learning_rate": 5.862301026063777e-05, | |
| "loss": 0.055, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 4.74835886214442, | |
| "grad_norm": 0.39296776056289673, | |
| "learning_rate": 5.826635810166137e-05, | |
| "loss": 0.052, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 4.770240700218818, | |
| "grad_norm": 0.4568535089492798, | |
| "learning_rate": 5.7909272997919305e-05, | |
| "loss": 0.056, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 4.792122538293217, | |
| "grad_norm": 0.39834004640579224, | |
| "learning_rate": 5.7551773651495953e-05, | |
| "loss": 0.0586, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 4.814004376367615, | |
| "grad_norm": 0.47946035861968994, | |
| "learning_rate": 5.719387878617131e-05, | |
| "loss": 0.0609, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 4.835886214442013, | |
| "grad_norm": 0.3279586434364319, | |
| "learning_rate": 5.683560714644044e-05, | |
| "loss": 0.059, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 4.857768052516412, | |
| "grad_norm": 0.48775798082351685, | |
| "learning_rate": 5.647697749653172e-05, | |
| "loss": 0.0565, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 4.87964989059081, | |
| "grad_norm": 0.42671310901641846, | |
| "learning_rate": 5.6118008619424036e-05, | |
| "loss": 0.0605, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 4.901531728665208, | |
| "grad_norm": 0.3488156497478485, | |
| "learning_rate": 5.5758719315863076e-05, | |
| "loss": 0.0548, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 4.923413566739606, | |
| "grad_norm": 0.4897012710571289, | |
| "learning_rate": 5.539912840337661e-05, | |
| "loss": 0.0587, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 4.945295404814004, | |
| "grad_norm": 0.40971580147743225, | |
| "learning_rate": 5.5039254715289e-05, | |
| "loss": 0.0608, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 4.967177242888402, | |
| "grad_norm": 0.5318540334701538, | |
| "learning_rate": 5.46791170997348e-05, | |
| "loss": 0.0497, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 4.989059080962801, | |
| "grad_norm": 0.42819133400917053, | |
| "learning_rate": 5.4318734418671525e-05, | |
| "loss": 0.0636, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 5.010940919037199, | |
| "grad_norm": 0.4624245762825012, | |
| "learning_rate": 5.395812554689188e-05, | |
| "loss": 0.0613, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 5.032822757111598, | |
| "grad_norm": 0.4868052005767822, | |
| "learning_rate": 5.359730937103512e-05, | |
| "loss": 0.0523, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 5.054704595185996, | |
| "grad_norm": 0.6195753216743469, | |
| "learning_rate": 5.323630478859789e-05, | |
| "loss": 0.0604, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 5.076586433260394, | |
| "grad_norm": 0.38792890310287476, | |
| "learning_rate": 5.2875130706944574e-05, | |
| "loss": 0.0539, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 5.098468271334792, | |
| "grad_norm": 0.5414655208587646, | |
| "learning_rate": 5.251380604231687e-05, | |
| "loss": 0.0602, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 5.12035010940919, | |
| "grad_norm": 0.5284174680709839, | |
| "learning_rate": 5.215234971884321e-05, | |
| "loss": 0.0569, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 5.142231947483588, | |
| "grad_norm": 0.4137500822544098, | |
| "learning_rate": 5.1790780667547564e-05, | |
| "loss": 0.0595, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 5.164113785557987, | |
| "grad_norm": 0.456150084733963, | |
| "learning_rate": 5.1429117825357894e-05, | |
| "loss": 0.0652, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 5.185995623632385, | |
| "grad_norm": 0.45625388622283936, | |
| "learning_rate": 5.1067380134114484e-05, | |
| "loss": 0.0638, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 5.207877461706784, | |
| "grad_norm": 0.4778393507003784, | |
| "learning_rate": 5.0705586539577687e-05, | |
| "loss": 0.0562, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 5.229759299781182, | |
| "grad_norm": 0.45723408460617065, | |
| "learning_rate": 5.034375599043582e-05, | |
| "loss": 0.0578, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 5.25164113785558, | |
| "grad_norm": 0.4665261507034302, | |
| "learning_rate": 4.998190743731265e-05, | |
| "loss": 0.0483, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 5.273522975929978, | |
| "grad_norm": 0.37840256094932556, | |
| "learning_rate": 4.96200598317749e-05, | |
| "loss": 0.06, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 5.295404814004376, | |
| "grad_norm": 0.4239036738872528, | |
| "learning_rate": 4.925823212533963e-05, | |
| "loss": 0.0575, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 5.317286652078774, | |
| "grad_norm": 0.45726969838142395, | |
| "learning_rate": 4.8896443268481715e-05, | |
| "loss": 0.0523, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 5.339168490153173, | |
| "grad_norm": 0.3009950816631317, | |
| "learning_rate": 4.853471220964137e-05, | |
| "loss": 0.0573, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 5.361050328227571, | |
| "grad_norm": 0.5033520460128784, | |
| "learning_rate": 4.8173057894231585e-05, | |
| "loss": 0.0576, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 5.38293216630197, | |
| "grad_norm": 0.6416246294975281, | |
| "learning_rate": 4.7811499263646024e-05, | |
| "loss": 0.0566, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 5.404814004376368, | |
| "grad_norm": 0.45454248785972595, | |
| "learning_rate": 4.7450055254266875e-05, | |
| "loss": 0.0603, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 5.426695842450766, | |
| "grad_norm": 0.4150170087814331, | |
| "learning_rate": 4.708874479647319e-05, | |
| "loss": 0.061, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 5.4485776805251644, | |
| "grad_norm": 0.7388213276863098, | |
| "learning_rate": 4.6727586813649324e-05, | |
| "loss": 0.0559, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 5.470459518599562, | |
| "grad_norm": 0.42447715997695923, | |
| "learning_rate": 4.6366600221193815e-05, | |
| "loss": 0.0523, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 5.49234135667396, | |
| "grad_norm": 0.35511597990989685, | |
| "learning_rate": 4.60058039255288e-05, | |
| "loss": 0.0494, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 5.514223194748359, | |
| "grad_norm": 0.47702649235725403, | |
| "learning_rate": 4.5645216823109727e-05, | |
| "loss": 0.0621, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 5.536105032822757, | |
| "grad_norm": 0.5418275594711304, | |
| "learning_rate": 4.5284857799435726e-05, | |
| "loss": 0.0569, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 5.557986870897155, | |
| "grad_norm": 0.2867294251918793, | |
| "learning_rate": 4.4924745728060444e-05, | |
| "loss": 0.0575, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 5.579868708971554, | |
| "grad_norm": 0.3692294657230377, | |
| "learning_rate": 4.456489946960353e-05, | |
| "loss": 0.0621, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 5.601750547045952, | |
| "grad_norm": 0.5166872143745422, | |
| "learning_rate": 4.4205337870762944e-05, | |
| "loss": 0.0514, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 5.6236323851203505, | |
| "grad_norm": 0.25781774520874023, | |
| "learning_rate": 4.384607976332773e-05, | |
| "loss": 0.0526, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 5.645514223194748, | |
| "grad_norm": 0.458324670791626, | |
| "learning_rate": 4.348714396319182e-05, | |
| "loss": 0.0618, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 5.667396061269146, | |
| "grad_norm": 0.5077418088912964, | |
| "learning_rate": 4.3128549269368525e-05, | |
| "loss": 0.057, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 5.689277899343545, | |
| "grad_norm": 0.3836349844932556, | |
| "learning_rate": 4.27703144630059e-05, | |
| "loss": 0.0548, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 5.711159737417943, | |
| "grad_norm": 0.29659900069236755, | |
| "learning_rate": 4.2412458306403224e-05, | |
| "loss": 0.0535, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 5.733041575492342, | |
| "grad_norm": 0.41197437047958374, | |
| "learning_rate": 4.2054999542028174e-05, | |
| "loss": 0.0553, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 5.75492341356674, | |
| "grad_norm": 0.49191319942474365, | |
| "learning_rate": 4.169795689153535e-05, | |
| "loss": 0.0565, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 5.776805251641138, | |
| "grad_norm": 0.5994260907173157, | |
| "learning_rate": 4.134134905478566e-05, | |
| "loss": 0.0608, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 5.7986870897155365, | |
| "grad_norm": 0.5599576234817505, | |
| "learning_rate": 4.0985194708866904e-05, | |
| "loss": 0.0552, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 5.820568927789934, | |
| "grad_norm": 0.48255977034568787, | |
| "learning_rate": 4.06295125071157e-05, | |
| "loss": 0.0587, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 5.842450765864332, | |
| "grad_norm": 0.5754146575927734, | |
| "learning_rate": 4.027432107814032e-05, | |
| "loss": 0.0604, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 5.864332603938731, | |
| "grad_norm": 0.6725056767463684, | |
| "learning_rate": 3.991963902484527e-05, | |
| "loss": 0.0594, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 5.886214442013129, | |
| "grad_norm": 0.5410199761390686, | |
| "learning_rate": 3.956548492345681e-05, | |
| "loss": 0.0606, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 5.908096280087527, | |
| "grad_norm": 0.2633671164512634, | |
| "learning_rate": 3.921187732255006e-05, | |
| "loss": 0.053, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 5.929978118161926, | |
| "grad_norm": 0.3890124261379242, | |
| "learning_rate": 3.8858834742077624e-05, | |
| "loss": 0.056, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 5.951859956236324, | |
| "grad_norm": 0.4105083644390106, | |
| "learning_rate": 3.8506375672399466e-05, | |
| "loss": 0.0513, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 5.9737417943107225, | |
| "grad_norm": 0.4586251378059387, | |
| "learning_rate": 3.815451857331465e-05, | |
| "loss": 0.0557, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 5.99562363238512, | |
| "grad_norm": 0.23562565445899963, | |
| "learning_rate": 3.780328187309443e-05, | |
| "loss": 0.0536, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 6.017505470459518, | |
| "grad_norm": 0.4445761740207672, | |
| "learning_rate": 3.7452683967517057e-05, | |
| "loss": 0.0475, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 6.039387308533917, | |
| "grad_norm": 0.7048770785331726, | |
| "learning_rate": 3.710274321890443e-05, | |
| "loss": 0.0506, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 6.061269146608315, | |
| "grad_norm": 0.41764745116233826, | |
| "learning_rate": 3.6753477955160244e-05, | |
| "loss": 0.0533, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 6.083150984682713, | |
| "grad_norm": 0.30127957463264465, | |
| "learning_rate": 3.640490646881018e-05, | |
| "loss": 0.0571, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 6.105032822757112, | |
| "grad_norm": 0.3346746563911438, | |
| "learning_rate": 3.605704701604381e-05, | |
| "loss": 0.051, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 6.12691466083151, | |
| "grad_norm": 0.5310165882110596, | |
| "learning_rate": 3.5709917815758385e-05, | |
| "loss": 0.0546, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 6.1487964989059085, | |
| "grad_norm": 0.3638152778148651, | |
| "learning_rate": 3.536353704860479e-05, | |
| "loss": 0.049, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 6.170678336980306, | |
| "grad_norm": 0.37336069345474243, | |
| "learning_rate": 3.5017922856035155e-05, | |
| "loss": 0.0582, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 6.192560175054704, | |
| "grad_norm": 0.5262767672538757, | |
| "learning_rate": 3.467309333935283e-05, | |
| "loss": 0.0517, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 6.214442013129103, | |
| "grad_norm": 0.28704574704170227, | |
| "learning_rate": 3.4329066558764355e-05, | |
| "loss": 0.0458, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 6.236323851203501, | |
| "grad_norm": 0.4320286810398102, | |
| "learning_rate": 3.398586053243341e-05, | |
| "loss": 0.0489, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 6.258205689277899, | |
| "grad_norm": 0.43195730447769165, | |
| "learning_rate": 3.364349323553738e-05, | |
| "loss": 0.054, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 6.280087527352298, | |
| "grad_norm": 0.3144959509372711, | |
| "learning_rate": 3.330198259932567e-05, | |
| "loss": 0.0537, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 6.301969365426696, | |
| "grad_norm": 0.4727848172187805, | |
| "learning_rate": 3.2961346510180716e-05, | |
| "loss": 0.0599, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 6.3238512035010945, | |
| "grad_norm": 0.2918757498264313, | |
| "learning_rate": 3.26216028086812e-05, | |
| "loss": 0.057, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 6.3457330415754925, | |
| "grad_norm": 0.8075538873672485, | |
| "learning_rate": 3.2282769288667525e-05, | |
| "loss": 0.0583, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 6.36761487964989, | |
| "grad_norm": 0.25191056728363037, | |
| "learning_rate": 3.194486369631007e-05, | |
| "loss": 0.0558, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 6.389496717724289, | |
| "grad_norm": 0.3775458335876465, | |
| "learning_rate": 3.1607903729179576e-05, | |
| "loss": 0.0508, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 6.411378555798687, | |
| "grad_norm": 0.4212424159049988, | |
| "learning_rate": 3.127190703532036e-05, | |
| "loss": 0.0581, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 6.433260393873085, | |
| "grad_norm": 0.3469920754432678, | |
| "learning_rate": 3.0936891212325966e-05, | |
| "loss": 0.0537, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 6.455142231947484, | |
| "grad_norm": 0.3886861801147461, | |
| "learning_rate": 3.0602873806417484e-05, | |
| "loss": 0.0601, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 6.477024070021882, | |
| "grad_norm": 0.4389936923980713, | |
| "learning_rate": 3.0269872311524626e-05, | |
| "loss": 0.0554, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 6.49890590809628, | |
| "grad_norm": 0.4016413390636444, | |
| "learning_rate": 2.9937904168369446e-05, | |
| "loss": 0.0505, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 6.5207877461706785, | |
| "grad_norm": 0.4214434325695038, | |
| "learning_rate": 2.9606986763552935e-05, | |
| "loss": 0.0484, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 6.542669584245076, | |
| "grad_norm": 0.432650625705719, | |
| "learning_rate": 2.927713742864442e-05, | |
| "loss": 0.0544, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 6.564551422319475, | |
| "grad_norm": 0.22795255482196808, | |
| "learning_rate": 2.8948373439273734e-05, | |
| "loss": 0.0496, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 6.586433260393873, | |
| "grad_norm": 0.3206609785556793, | |
| "learning_rate": 2.862071201422659e-05, | |
| "loss": 0.0478, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 6.608315098468271, | |
| "grad_norm": 0.4075492024421692, | |
| "learning_rate": 2.8294170314542567e-05, | |
| "loss": 0.0528, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 6.63019693654267, | |
| "grad_norm": 0.2718966007232666, | |
| "learning_rate": 2.796876544261645e-05, | |
| "loss": 0.0492, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 6.652078774617068, | |
| "grad_norm": 0.33011844754219055, | |
| "learning_rate": 2.7644514441302466e-05, | |
| "loss": 0.0595, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 6.673960612691467, | |
| "grad_norm": 0.3843638002872467, | |
| "learning_rate": 2.732143429302165e-05, | |
| "loss": 0.0598, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 6.6958424507658645, | |
| "grad_norm": 0.2999420166015625, | |
| "learning_rate": 2.699954191887244e-05, | |
| "loss": 0.0517, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 6.717724288840262, | |
| "grad_norm": 0.4345828890800476, | |
| "learning_rate": 2.6678854177744416e-05, | |
| "loss": 0.0623, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 6.739606126914661, | |
| "grad_norm": 0.4258360266685486, | |
| "learning_rate": 2.6359387865435346e-05, | |
| "loss": 0.0461, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 6.761487964989059, | |
| "grad_norm": 0.44895169138908386, | |
| "learning_rate": 2.6041159713771522e-05, | |
| "loss": 0.0558, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 6.783369803063457, | |
| "grad_norm": 0.47770532965660095, | |
| "learning_rate": 2.5724186389731364e-05, | |
| "loss": 0.0556, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 6.805251641137856, | |
| "grad_norm": 0.32761141657829285, | |
| "learning_rate": 2.5408484494572704e-05, | |
| "loss": 0.0593, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 6.827133479212254, | |
| "grad_norm": 0.47967493534088135, | |
| "learning_rate": 2.509407056296303e-05, | |
| "loss": 0.0563, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 6.849015317286652, | |
| "grad_norm": 0.3491036295890808, | |
| "learning_rate": 2.4780961062113684e-05, | |
| "loss": 0.0527, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 6.8708971553610505, | |
| "grad_norm": 0.32572147250175476, | |
| "learning_rate": 2.4469172390917417e-05, | |
| "loss": 0.0497, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 6.892778993435448, | |
| "grad_norm": 0.42841556668281555, | |
| "learning_rate": 2.4158720879089337e-05, | |
| "loss": 0.0487, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 6.914660831509847, | |
| "grad_norm": 0.3758542835712433, | |
| "learning_rate": 2.384962278631182e-05, | |
| "loss": 0.0582, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 6.936542669584245, | |
| "grad_norm": 0.32348281145095825, | |
| "learning_rate": 2.354189430138285e-05, | |
| "loss": 0.0558, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 6.958424507658643, | |
| "grad_norm": 0.3324337601661682, | |
| "learning_rate": 2.323555154136815e-05, | |
| "loss": 0.0486, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 6.980306345733042, | |
| "grad_norm": 0.3877069652080536, | |
| "learning_rate": 2.293061055075707e-05, | |
| "loss": 0.0534, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 7.00218818380744, | |
| "grad_norm": 0.3602635860443115, | |
| "learning_rate": 2.2627087300622208e-05, | |
| "loss": 0.0528, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 7.024070021881838, | |
| "grad_norm": 0.4806849956512451, | |
| "learning_rate": 2.2324997687783102e-05, | |
| "loss": 0.0529, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 7.0459518599562365, | |
| "grad_norm": 0.4764333963394165, | |
| "learning_rate": 2.20243575339734e-05, | |
| "loss": 0.0532, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 7.067833698030634, | |
| "grad_norm": 0.4383291006088257, | |
| "learning_rate": 2.172518258501241e-05, | |
| "loss": 0.0482, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 7.089715536105033, | |
| "grad_norm": 0.4792105257511139, | |
| "learning_rate": 2.1427488509980402e-05, | |
| "loss": 0.0508, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 7.111597374179431, | |
| "grad_norm": 0.4630696475505829, | |
| "learning_rate": 2.1131290900397793e-05, | |
| "loss": 0.0534, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 7.133479212253829, | |
| "grad_norm": 0.5735715627670288, | |
| "learning_rate": 2.0836605269408722e-05, | |
| "loss": 0.0517, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 7.155361050328228, | |
| "grad_norm": 0.3468039929866791, | |
| "learning_rate": 2.05434470509685e-05, | |
| "loss": 0.0485, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 7.177242888402626, | |
| "grad_norm": 0.27370214462280273, | |
| "learning_rate": 2.025183159903526e-05, | |
| "loss": 0.0564, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 7.199124726477024, | |
| "grad_norm": 0.43867847323417664, | |
| "learning_rate": 1.9961774186765796e-05, | |
| "loss": 0.0454, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 7.2210065645514225, | |
| "grad_norm": 0.36127227544784546, | |
| "learning_rate": 1.9673290005715683e-05, | |
| "loss": 0.0539, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 7.2428884026258205, | |
| "grad_norm": 0.3754895329475403, | |
| "learning_rate": 1.93863941650436e-05, | |
| "loss": 0.0466, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 7.264770240700219, | |
| "grad_norm": 0.3637754023075104, | |
| "learning_rate": 1.910110169071995e-05, | |
| "loss": 0.0494, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 7.286652078774617, | |
| "grad_norm": 0.5620509386062622, | |
| "learning_rate": 1.8817427524739988e-05, | |
| "loss": 0.046, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 7.308533916849015, | |
| "grad_norm": 0.5723752379417419, | |
| "learning_rate": 1.8535386524341225e-05, | |
| "loss": 0.0489, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 7.330415754923414, | |
| "grad_norm": 0.5323576927185059, | |
| "learning_rate": 1.8254993461225172e-05, | |
| "loss": 0.0522, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 7.352297592997812, | |
| "grad_norm": 0.40650686621665955, | |
| "learning_rate": 1.797626302078383e-05, | |
| "loss": 0.0489, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 7.37417943107221, | |
| "grad_norm": 0.4388726055622101, | |
| "learning_rate": 1.7699209801330468e-05, | |
| "loss": 0.0486, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 7.3960612691466086, | |
| "grad_norm": 0.292429119348526, | |
| "learning_rate": 1.7423848313335084e-05, | |
| "loss": 0.0464, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 7.4179431072210065, | |
| "grad_norm": 0.37241464853286743, | |
| "learning_rate": 1.715019297866441e-05, | |
| "loss": 0.0509, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 7.439824945295404, | |
| "grad_norm": 0.30430710315704346, | |
| "learning_rate": 1.6878258129826575e-05, | |
| "loss": 0.0501, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 7.461706783369803, | |
| "grad_norm": 0.3546319305896759, | |
| "learning_rate": 1.660805800922049e-05, | |
| "loss": 0.0563, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 7.483588621444201, | |
| "grad_norm": 0.3407646417617798, | |
| "learning_rate": 1.6339606768389792e-05, | |
| "loss": 0.048, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 7.5054704595186, | |
| "grad_norm": 0.31406334042549133, | |
| "learning_rate": 1.6072918467281872e-05, | |
| "loss": 0.0481, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 7.527352297592998, | |
| "grad_norm": 0.4938783645629883, | |
| "learning_rate": 1.5808007073511317e-05, | |
| "loss": 0.0477, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 7.549234135667396, | |
| "grad_norm": 0.3935304880142212, | |
| "learning_rate": 1.55448864616284e-05, | |
| "loss": 0.0478, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 7.571115973741795, | |
| "grad_norm": 0.5982438921928406, | |
| "learning_rate": 1.528357041239248e-05, | |
| "loss": 0.0552, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 7.5929978118161925, | |
| "grad_norm": 0.5672005414962769, | |
| "learning_rate": 1.502407261205019e-05, | |
| "loss": 0.0483, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 7.614879649890591, | |
| "grad_norm": 0.539139986038208, | |
| "learning_rate": 1.4766406651618626e-05, | |
| "loss": 0.0503, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 7.636761487964989, | |
| "grad_norm": 0.3828607201576233, | |
| "learning_rate": 1.4510586026173557e-05, | |
| "loss": 0.0547, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 7.658643326039387, | |
| "grad_norm": 0.40856969356536865, | |
| "learning_rate": 1.4256624134142604e-05, | |
| "loss": 0.046, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 7.680525164113786, | |
| "grad_norm": 0.8226433396339417, | |
| "learning_rate": 1.4004534276603538e-05, | |
| "loss": 0.0531, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 7.702407002188184, | |
| "grad_norm": 0.393865704536438, | |
| "learning_rate": 1.3754329656587556e-05, | |
| "loss": 0.0487, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 7.724288840262582, | |
| "grad_norm": 0.2557559907436371, | |
| "learning_rate": 1.3506023378387944e-05, | |
| "loss": 0.0459, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 7.746170678336981, | |
| "grad_norm": 0.3025050461292267, | |
| "learning_rate": 1.3259628446873601e-05, | |
| "loss": 0.0516, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 7.7680525164113785, | |
| "grad_norm": 0.46884235739707947, | |
| "learning_rate": 1.3015157766807939e-05, | |
| "loss": 0.0541, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 7.789934354485776, | |
| "grad_norm": 0.4090341627597809, | |
| "learning_rate": 1.2772624142173123e-05, | |
| "loss": 0.0547, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 7.811816192560175, | |
| "grad_norm": 0.39957642555236816, | |
| "learning_rate": 1.2532040275499301e-05, | |
| "loss": 0.0463, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 7.833698030634573, | |
| "grad_norm": 0.4493909776210785, | |
| "learning_rate": 1.2293418767199449e-05, | |
| "loss": 0.0522, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 7.855579868708972, | |
| "grad_norm": 0.40386393666267395, | |
| "learning_rate": 1.2056772114909365e-05, | |
| "loss": 0.0483, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 7.87746170678337, | |
| "grad_norm": 0.4808233380317688, | |
| "learning_rate": 1.182211271283315e-05, | |
| "loss": 0.0485, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 7.899343544857768, | |
| "grad_norm": 0.35203149914741516, | |
| "learning_rate": 1.1589452851094062e-05, | |
| "loss": 0.0489, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 7.921225382932167, | |
| "grad_norm": 0.37248197197914124, | |
| "learning_rate": 1.135880471509076e-05, | |
| "loss": 0.055, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 7.9431072210065645, | |
| "grad_norm": 0.36168205738067627, | |
| "learning_rate": 1.1130180384859256e-05, | |
| "loss": 0.0445, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 7.964989059080962, | |
| "grad_norm": 0.6294784545898438, | |
| "learning_rate": 1.0903591834440097e-05, | |
| "loss": 0.0494, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 7.986870897155361, | |
| "grad_norm": 0.46873342990875244, | |
| "learning_rate": 1.0679050931251238e-05, | |
| "loss": 0.0508, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 8.00875273522976, | |
| "grad_norm": 0.4308367967605591, | |
| "learning_rate": 1.0456569435466617e-05, | |
| "loss": 0.0524, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 8.030634573304157, | |
| "grad_norm": 0.45153898000717163, | |
| "learning_rate": 1.0236158999400053e-05, | |
| "loss": 0.0475, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 8.052516411378555, | |
| "grad_norm": 0.6072208881378174, | |
| "learning_rate": 1.0017831166895104e-05, | |
| "loss": 0.0478, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 8.074398249452955, | |
| "grad_norm": 0.34941720962524414, | |
| "learning_rate": 9.801597372720395e-06, | |
| "loss": 0.0466, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 8.096280087527353, | |
| "grad_norm": 0.32349899411201477, | |
| "learning_rate": 9.587468941970751e-06, | |
| "loss": 0.0434, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 8.11816192560175, | |
| "grad_norm": 0.4987752437591553, | |
| "learning_rate": 9.375457089474043e-06, | |
| "loss": 0.0457, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 8.140043763676148, | |
| "grad_norm": 0.3384380638599396, | |
| "learning_rate": 9.165572919203847e-06, | |
| "loss": 0.0539, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 8.161925601750546, | |
| "grad_norm": 0.4982927739620209, | |
| "learning_rate": 8.957827423697823e-06, | |
| "loss": 0.0482, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 8.183807439824946, | |
| "grad_norm": 0.26903849840164185, | |
| "learning_rate": 8.75223148348207e-06, | |
| "loss": 0.0529, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 8.205689277899344, | |
| "grad_norm": 0.49081680178642273, | |
| "learning_rate": 8.548795866501164e-06, | |
| "loss": 0.0444, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 8.227571115973742, | |
| "grad_norm": 0.37901756167411804, | |
| "learning_rate": 8.347531227554323e-06, | |
| "loss": 0.0457, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 8.24945295404814, | |
| "grad_norm": 0.40646421909332275, | |
| "learning_rate": 8.148448107737227e-06, | |
| "loss": 0.0447, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 8.271334792122538, | |
| "grad_norm": 0.6366869211196899, | |
| "learning_rate": 7.951556933890047e-06, | |
| "loss": 0.0555, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 8.293216630196937, | |
| "grad_norm": 0.508129358291626, | |
| "learning_rate": 7.756868018051322e-06, | |
| "loss": 0.0461, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 8.315098468271335, | |
| "grad_norm": 0.317501425743103, | |
| "learning_rate": 7.56439155691781e-06, | |
| "loss": 0.0456, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 8.336980306345733, | |
| "grad_norm": 0.34288403391838074, | |
| "learning_rate": 7.374137631310512e-06, | |
| "loss": 0.0498, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 8.358862144420131, | |
| "grad_norm": 0.44967618584632874, | |
| "learning_rate": 7.186116205646687e-06, | |
| "loss": 0.0475, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 8.38074398249453, | |
| "grad_norm": 0.6020100712776184, | |
| "learning_rate": 7.000337127417938e-06, | |
| "loss": 0.0489, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 8.402625820568927, | |
| "grad_norm": 0.37260666489601135, | |
| "learning_rate": 6.816810126674494e-06, | |
| "loss": 0.0483, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 8.424507658643327, | |
| "grad_norm": 0.4538766145706177, | |
| "learning_rate": 6.635544815515576e-06, | |
| "loss": 0.0448, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 8.446389496717725, | |
| "grad_norm": 0.4207398593425751, | |
| "learning_rate": 6.456550687586016e-06, | |
| "loss": 0.0484, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 8.468271334792123, | |
| "grad_norm": 0.3660431504249573, | |
| "learning_rate": 6.2798371175789405e-06, | |
| "loss": 0.0557, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 8.49015317286652, | |
| "grad_norm": 0.3074619472026825, | |
| "learning_rate": 6.105413360744883e-06, | |
| "loss": 0.0506, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 8.512035010940918, | |
| "grad_norm": 0.41881874203681946, | |
| "learning_rate": 5.933288552407018e-06, | |
| "loss": 0.0468, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 8.533916849015318, | |
| "grad_norm": 0.6764019131660461, | |
| "learning_rate": 5.763471707482638e-06, | |
| "loss": 0.0459, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 8.555798687089716, | |
| "grad_norm": 0.5430788993835449, | |
| "learning_rate": 5.5959717200110896e-06, | |
| "loss": 0.047, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 8.577680525164114, | |
| "grad_norm": 0.256409227848053, | |
| "learning_rate": 5.430797362687906e-06, | |
| "loss": 0.0458, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 8.599562363238512, | |
| "grad_norm": 0.3550792932510376, | |
| "learning_rate": 5.267957286405368e-06, | |
| "loss": 0.0461, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 8.62144420131291, | |
| "grad_norm": 0.27031660079956055, | |
| "learning_rate": 5.107460019799387e-06, | |
| "loss": 0.044, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 8.643326039387308, | |
| "grad_norm": 0.5709757208824158, | |
| "learning_rate": 4.949313968802871e-06, | |
| "loss": 0.0519, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 8.665207877461707, | |
| "grad_norm": 0.42753544449806213, | |
| "learning_rate": 4.793527416205429e-06, | |
| "loss": 0.0422, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 8.687089715536105, | |
| "grad_norm": 0.6058006286621094, | |
| "learning_rate": 4.640108521219561e-06, | |
| "loss": 0.0518, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 8.708971553610503, | |
| "grad_norm": 0.3088013827800751, | |
| "learning_rate": 4.489065319053376e-06, | |
| "loss": 0.0475, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 8.730853391684901, | |
| "grad_norm": 0.6196874976158142, | |
| "learning_rate": 4.340405720489721e-06, | |
| "loss": 0.0455, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 8.752735229759299, | |
| "grad_norm": 0.3027147054672241, | |
| "learning_rate": 4.194137511471824e-06, | |
| "loss": 0.0433, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 8.774617067833699, | |
| "grad_norm": 0.34994643926620483, | |
| "learning_rate": 4.0502683526955745e-06, | |
| "loss": 0.0438, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 8.796498905908097, | |
| "grad_norm": 0.3832307457923889, | |
| "learning_rate": 3.908805779208269e-06, | |
| "loss": 0.0415, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 8.818380743982495, | |
| "grad_norm": 0.2572057545185089, | |
| "learning_rate": 3.7697572000139626e-06, | |
| "loss": 0.0491, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 8.840262582056893, | |
| "grad_norm": 0.5285372138023376, | |
| "learning_rate": 3.633129897685439e-06, | |
| "loss": 0.0498, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 8.86214442013129, | |
| "grad_norm": 0.44389864802360535, | |
| "learning_rate": 3.4989310279827938e-06, | |
| "loss": 0.0495, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 8.88402625820569, | |
| "grad_norm": 0.3135242760181427, | |
| "learning_rate": 3.3671676194786504e-06, | |
| "loss": 0.0454, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 8.905908096280088, | |
| "grad_norm": 0.29390549659729004, | |
| "learning_rate": 3.2378465731900064e-06, | |
| "loss": 0.0438, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 8.927789934354486, | |
| "grad_norm": 0.4052577018737793, | |
| "learning_rate": 3.1109746622168922e-06, | |
| "loss": 0.0454, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 8.949671772428884, | |
| "grad_norm": 0.7273414134979248, | |
| "learning_rate": 2.9865585313875565e-06, | |
| "loss": 0.0463, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 8.971553610503282, | |
| "grad_norm": 0.3757275640964508, | |
| "learning_rate": 2.8646046969104335e-06, | |
| "loss": 0.0457, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 8.99343544857768, | |
| "grad_norm": 0.4105775058269501, | |
| "learning_rate": 2.7451195460329315e-06, | |
| "loss": 0.0497, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 9.01531728665208, | |
| "grad_norm": 0.3794170916080475, | |
| "learning_rate": 2.6281093367068743e-06, | |
| "loss": 0.0423, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 9.037199124726477, | |
| "grad_norm": 0.28719809651374817, | |
| "learning_rate": 2.513580197260712e-06, | |
| "loss": 0.0441, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 9.059080962800875, | |
| "grad_norm": 0.38277560472488403, | |
| "learning_rate": 2.401538126078606e-06, | |
| "loss": 0.0423, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 9.080962800875273, | |
| "grad_norm": 0.43671509623527527, | |
| "learning_rate": 2.2919889912862313e-06, | |
| "loss": 0.0436, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 9.102844638949671, | |
| "grad_norm": 0.3907598853111267, | |
| "learning_rate": 2.1849385304434644e-06, | |
| "loss": 0.0485, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 9.12472647702407, | |
| "grad_norm": 0.38813626766204834, | |
| "learning_rate": 2.080392350243837e-06, | |
| "loss": 0.0443, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 9.146608315098469, | |
| "grad_norm": 0.4304605722427368, | |
| "learning_rate": 1.978355926220965e-06, | |
| "loss": 0.0478, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 9.168490153172867, | |
| "grad_norm": 0.3653641939163208, | |
| "learning_rate": 1.8788346024617044e-06, | |
| "loss": 0.0481, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 9.190371991247265, | |
| "grad_norm": 0.49059316515922546, | |
| "learning_rate": 1.7818335913262708e-06, | |
| "loss": 0.0482, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 9.212253829321662, | |
| "grad_norm": 0.6487129330635071, | |
| "learning_rate": 1.6873579731752798e-06, | |
| "loss": 0.0439, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 9.23413566739606, | |
| "grad_norm": 0.3429771959781647, | |
| "learning_rate": 1.5954126961036208e-06, | |
| "loss": 0.0452, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 9.25601750547046, | |
| "grad_norm": 0.23548723757266998, | |
| "learning_rate": 1.5060025756813423e-06, | |
| "loss": 0.0419, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 9.277899343544858, | |
| "grad_norm": 0.3810988962650299, | |
| "learning_rate": 1.4191322947014197e-06, | |
| "loss": 0.0467, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 9.299781181619256, | |
| "grad_norm": 0.4621908664703369, | |
| "learning_rate": 1.3348064029344908e-06, | |
| "loss": 0.0521, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 9.321663019693654, | |
| "grad_norm": 0.6988676190376282, | |
| "learning_rate": 1.2530293168905916e-06, | |
| "loss": 0.0502, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 9.343544857768052, | |
| "grad_norm": 0.2640683352947235, | |
| "learning_rate": 1.1738053195878174e-06, | |
| "loss": 0.0494, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 9.365426695842451, | |
| "grad_norm": 0.3481920063495636, | |
| "learning_rate": 1.097138560328015e-06, | |
| "loss": 0.0477, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 9.38730853391685, | |
| "grad_norm": 0.42929619550704956, | |
| "learning_rate": 1.023033054479472e-06, | |
| "loss": 0.0487, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 9.409190371991247, | |
| "grad_norm": 0.5334005951881409, | |
| "learning_rate": 9.51492683266586e-07, | |
| "loss": 0.051, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 9.431072210065645, | |
| "grad_norm": 0.3841243088245392, | |
| "learning_rate": 8.825211935666433e-07, | |
| "loss": 0.0478, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 9.452954048140043, | |
| "grad_norm": 0.34734562039375305, | |
| "learning_rate": 8.161221977135092e-07, | |
| "loss": 0.0438, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 9.474835886214443, | |
| "grad_norm": 0.6244210004806519, | |
| "learning_rate": 7.522991733084905e-07, | |
| "loss": 0.0473, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 9.49671772428884, | |
| "grad_norm": 0.4562375247478485, | |
| "learning_rate": 6.910554630381815e-07, | |
| "loss": 0.0486, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 9.518599562363239, | |
| "grad_norm": 0.8920162916183472, | |
| "learning_rate": 6.323942744993761e-07, | |
| "loss": 0.0479, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 9.540481400437637, | |
| "grad_norm": 0.46890518069267273, | |
| "learning_rate": 5.763186800310849e-07, | |
| "loss": 0.0473, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 9.562363238512035, | |
| "grad_norm": 0.37694522738456726, | |
| "learning_rate": 5.228316165536429e-07, | |
| "loss": 0.0464, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 9.584245076586432, | |
| "grad_norm": 0.42408043146133423, | |
| "learning_rate": 4.7193588541484813e-07, | |
| "loss": 0.0494, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 9.606126914660832, | |
| "grad_norm": 0.3436243534088135, | |
| "learning_rate": 4.2363415224329075e-07, | |
| "loss": 0.0446, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 9.62800875273523, | |
| "grad_norm": 0.32050296664237976, | |
| "learning_rate": 3.779289468086922e-07, | |
| "loss": 0.0506, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 9.649890590809628, | |
| "grad_norm": 0.5982491970062256, | |
| "learning_rate": 3.3482266288946686e-07, | |
| "loss": 0.0458, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 9.671772428884026, | |
| "grad_norm": 0.5433162450790405, | |
| "learning_rate": 2.9431755814729457e-07, | |
| "loss": 0.0465, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 9.693654266958424, | |
| "grad_norm": 0.4410666823387146, | |
| "learning_rate": 2.5641575400890403e-07, | |
| "loss": 0.0452, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 9.715536105032823, | |
| "grad_norm": 0.2922728955745697, | |
| "learning_rate": 2.2111923555498405e-07, | |
| "loss": 0.0464, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 9.737417943107221, | |
| "grad_norm": 0.4933525323867798, | |
| "learning_rate": 1.8842985141617197e-07, | |
| "loss": 0.048, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 9.75929978118162, | |
| "grad_norm": 0.7095863223075867, | |
| "learning_rate": 1.5834931367625928e-07, | |
| "loss": 0.0462, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 9.781181619256017, | |
| "grad_norm": 0.5306093096733093, | |
| "learning_rate": 1.3087919778252966e-07, | |
| "loss": 0.0496, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 9.803063457330415, | |
| "grad_norm": 0.31279975175857544, | |
| "learning_rate": 1.060209424632308e-07, | |
| "loss": 0.0431, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 9.824945295404813, | |
| "grad_norm": 0.5052768588066101, | |
| "learning_rate": 8.377584965221785e-08, | |
| "loss": 0.0493, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 9.846827133479213, | |
| "grad_norm": 0.4014386534690857, | |
| "learning_rate": 6.414508442078026e-08, | |
| "loss": 0.0478, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 9.86870897155361, | |
| "grad_norm": 0.2997543215751648, | |
| "learning_rate": 4.7129674916618346e-08, | |
| "loss": 0.0451, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 9.890590809628009, | |
| "grad_norm": 0.5213238000869751, | |
| "learning_rate": 3.273051230999191e-08, | |
| "loss": 0.0436, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 9.912472647702407, | |
| "grad_norm": 0.5938975811004639, | |
| "learning_rate": 2.0948350747046487e-08, | |
| "loss": 0.0417, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 9.934354485776804, | |
| "grad_norm": 0.26146259903907776, | |
| "learning_rate": 1.1783807310300488e-08, | |
| "loss": 0.0456, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 9.956236323851204, | |
| "grad_norm": 0.238664910197258, | |
| "learning_rate": 5.237361986365486e-09, | |
| "loss": 0.0456, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 9.978118161925602, | |
| "grad_norm": 0.3414470851421356, | |
| "learning_rate": 1.30935764076634e-09, | |
| "loss": 0.0471, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.492483526468277, | |
| "learning_rate": 0.0, | |
| "loss": 0.0444, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "step": 4570, | |
| "total_flos": 4.682127244666608e+17, | |
| "train_loss": 0.07182003624199255, | |
| "train_runtime": 4395.4484, | |
| "train_samples_per_second": 50.928, | |
| "train_steps_per_second": 1.04 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 4570, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 10, | |
| "save_steps": 10000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4.682127244666608e+17, | |
| "train_batch_size": 49, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |