| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.7378939277478862, |
| "eval_steps": 500, |
| "global_step": 3000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.004919292851652575, |
| "grad_norm": 2.650216579437256, |
| "learning_rate": 1.557377049180328e-06, |
| "loss": 1.9055, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.00983858570330515, |
| "grad_norm": 2.133456230163574, |
| "learning_rate": 3.1967213114754097e-06, |
| "loss": 1.8043, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.014757878554957724, |
| "grad_norm": 2.4590532779693604, |
| "learning_rate": 4.836065573770492e-06, |
| "loss": 1.7683, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.0196771714066103, |
| "grad_norm": 1.522373080253601, |
| "learning_rate": 6.475409836065574e-06, |
| "loss": 1.6977, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.024596464258262875, |
| "grad_norm": 1.6893309354782104, |
| "learning_rate": 8.114754098360657e-06, |
| "loss": 1.6298, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.02951575710991545, |
| "grad_norm": 1.5092189311981201, |
| "learning_rate": 9.754098360655738e-06, |
| "loss": 1.4915, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.03443504996156802, |
| "grad_norm": 1.3016860485076904, |
| "learning_rate": 1.139344262295082e-05, |
| "loss": 1.3592, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.0393543428132206, |
| "grad_norm": 1.8532716035842896, |
| "learning_rate": 1.3032786885245902e-05, |
| "loss": 1.2786, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.044273635664873176, |
| "grad_norm": 1.5091300010681152, |
| "learning_rate": 1.4672131147540986e-05, |
| "loss": 1.1882, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.04919292851652575, |
| "grad_norm": 2.3855371475219727, |
| "learning_rate": 1.6311475409836068e-05, |
| "loss": 1.2969, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.05411222136817832, |
| "grad_norm": 1.8920749425888062, |
| "learning_rate": 1.7950819672131146e-05, |
| "loss": 1.1571, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.0590315142198309, |
| "grad_norm": 1.7724186182022095, |
| "learning_rate": 1.9590163934426232e-05, |
| "loss": 1.1782, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.06395080707148347, |
| "grad_norm": 1.53757643699646, |
| "learning_rate": 2.122950819672131e-05, |
| "loss": 1.1917, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.06887009992313604, |
| "grad_norm": 1.773545503616333, |
| "learning_rate": 2.2868852459016393e-05, |
| "loss": 1.1343, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.07378939277478862, |
| "grad_norm": 2.211615800857544, |
| "learning_rate": 2.4508196721311478e-05, |
| "loss": 1.1242, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.0787086856264412, |
| "grad_norm": 1.4129002094268799, |
| "learning_rate": 2.614754098360656e-05, |
| "loss": 1.0075, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.08362797847809378, |
| "grad_norm": 1.8837412595748901, |
| "learning_rate": 2.778688524590164e-05, |
| "loss": 1.0791, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.08854727132974635, |
| "grad_norm": 2.0091936588287354, |
| "learning_rate": 2.9426229508196725e-05, |
| "loss": 1.0921, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.09346656418139893, |
| "grad_norm": 2.447704792022705, |
| "learning_rate": 3.106557377049181e-05, |
| "loss": 1.0776, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.0983858570330515, |
| "grad_norm": 2.063652753829956, |
| "learning_rate": 3.2704918032786885e-05, |
| "loss": 1.0928, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.10330514988470407, |
| "grad_norm": 1.985748529434204, |
| "learning_rate": 3.434426229508197e-05, |
| "loss": 1.1167, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.10822444273635665, |
| "grad_norm": 2.069704532623291, |
| "learning_rate": 3.5983606557377056e-05, |
| "loss": 1.1432, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.11314373558800922, |
| "grad_norm": 1.5993927717208862, |
| "learning_rate": 3.762295081967213e-05, |
| "loss": 1.0484, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.1180630284396618, |
| "grad_norm": 1.8127434253692627, |
| "learning_rate": 3.9262295081967214e-05, |
| "loss": 1.0379, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.12298232129131437, |
| "grad_norm": 1.3972879648208618, |
| "learning_rate": 4.09016393442623e-05, |
| "loss": 1.0084, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.12790161414296694, |
| "grad_norm": 2.6233468055725098, |
| "learning_rate": 4.254098360655738e-05, |
| "loss": 0.9489, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.13282090699461951, |
| "grad_norm": 1.9032576084136963, |
| "learning_rate": 4.4180327868852463e-05, |
| "loss": 0.9932, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.1377401998462721, |
| "grad_norm": 2.0064117908477783, |
| "learning_rate": 4.581967213114754e-05, |
| "loss": 0.987, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.14265949269792466, |
| "grad_norm": 1.6437580585479736, |
| "learning_rate": 4.745901639344262e-05, |
| "loss": 1.0212, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.14757878554957723, |
| "grad_norm": 1.3102104663848877, |
| "learning_rate": 4.9098360655737706e-05, |
| "loss": 1.0538, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.15249807840122984, |
| "grad_norm": 1.6655702590942383, |
| "learning_rate": 5.073770491803279e-05, |
| "loss": 0.9762, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.1574173712528824, |
| "grad_norm": 1.488173007965088, |
| "learning_rate": 5.237704918032788e-05, |
| "loss": 1.0271, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.16233666410453498, |
| "grad_norm": 2.1534035205841064, |
| "learning_rate": 5.401639344262295e-05, |
| "loss": 1.0124, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.16725595695618756, |
| "grad_norm": 2.0473647117614746, |
| "learning_rate": 5.5655737704918035e-05, |
| "loss": 1.0228, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.17217524980784013, |
| "grad_norm": 1.5801570415496826, |
| "learning_rate": 5.7295081967213114e-05, |
| "loss": 0.9668, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.1770945426594927, |
| "grad_norm": 1.749709963798523, |
| "learning_rate": 5.89344262295082e-05, |
| "loss": 1.0632, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.18201383551114528, |
| "grad_norm": 2.072209596633911, |
| "learning_rate": 6.0573770491803284e-05, |
| "loss": 0.9709, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.18693312836279785, |
| "grad_norm": 1.6242225170135498, |
| "learning_rate": 6.221311475409836e-05, |
| "loss": 1.0601, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.19185242121445042, |
| "grad_norm": 1.41958749294281, |
| "learning_rate": 6.385245901639346e-05, |
| "loss": 0.9846, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.196771714066103, |
| "grad_norm": 1.4413362741470337, |
| "learning_rate": 6.549180327868852e-05, |
| "loss": 1.0331, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.20169100691775557, |
| "grad_norm": 1.8630566596984863, |
| "learning_rate": 6.713114754098361e-05, |
| "loss": 1.0214, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.20661029976940815, |
| "grad_norm": 1.689773678779602, |
| "learning_rate": 6.877049180327869e-05, |
| "loss": 0.9745, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.21152959262106072, |
| "grad_norm": 1.9186772108078003, |
| "learning_rate": 7.040983606557377e-05, |
| "loss": 1.0628, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.2164488854727133, |
| "grad_norm": 1.933374285697937, |
| "learning_rate": 7.204918032786886e-05, |
| "loss": 0.9864, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.22136817832436587, |
| "grad_norm": 1.5824335813522339, |
| "learning_rate": 7.368852459016394e-05, |
| "loss": 1.0016, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.22628747117601844, |
| "grad_norm": 1.591445803642273, |
| "learning_rate": 7.532786885245902e-05, |
| "loss": 1.005, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.231206764027671, |
| "grad_norm": 1.7586395740509033, |
| "learning_rate": 7.69672131147541e-05, |
| "loss": 0.9775, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.2361260568793236, |
| "grad_norm": 2.071493148803711, |
| "learning_rate": 7.860655737704918e-05, |
| "loss": 1.0228, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.24104534973097616, |
| "grad_norm": 1.3450851440429688, |
| "learning_rate": 8.024590163934427e-05, |
| "loss": 1.0164, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.24596464258262873, |
| "grad_norm": 1.8442620038986206, |
| "learning_rate": 8.188524590163935e-05, |
| "loss": 0.9911, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.25088393543428134, |
| "grad_norm": 1.098396897315979, |
| "learning_rate": 8.352459016393444e-05, |
| "loss": 0.9718, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.2558032282859339, |
| "grad_norm": 1.005915880203247, |
| "learning_rate": 8.51639344262295e-05, |
| "loss": 0.9436, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.2607225211375865, |
| "grad_norm": 1.113786220550537, |
| "learning_rate": 8.68032786885246e-05, |
| "loss": 1.0079, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.26564181398923903, |
| "grad_norm": 1.1104156970977783, |
| "learning_rate": 8.844262295081968e-05, |
| "loss": 0.9534, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.27056110684089163, |
| "grad_norm": 0.9686466455459595, |
| "learning_rate": 9.008196721311476e-05, |
| "loss": 0.9199, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.2754803996925442, |
| "grad_norm": 1.78913414478302, |
| "learning_rate": 9.172131147540985e-05, |
| "loss": 0.9132, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.2803996925441968, |
| "grad_norm": 1.5726219415664673, |
| "learning_rate": 9.336065573770493e-05, |
| "loss": 1.053, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.2853189853958493, |
| "grad_norm": 1.7471468448638916, |
| "learning_rate": 9.5e-05, |
| "loss": 0.9108, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.2902382782475019, |
| "grad_norm": 1.045501947402954, |
| "learning_rate": 9.663934426229508e-05, |
| "loss": 1.0033, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.29515757109915447, |
| "grad_norm": 1.3126252889633179, |
| "learning_rate": 9.827868852459016e-05, |
| "loss": 0.9574, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.30007686395080707, |
| "grad_norm": 1.1474781036376953, |
| "learning_rate": 9.991803278688525e-05, |
| "loss": 0.9781, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.3049961568024597, |
| "grad_norm": 1.435939073562622, |
| "learning_rate": 9.999926090641448e-05, |
| "loss": 0.9573, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.3099154496541122, |
| "grad_norm": 2.488921642303467, |
| "learning_rate": 9.999688600430955e-05, |
| "loss": 1.02, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.3148347425057648, |
| "grad_norm": 1.1515672206878662, |
| "learning_rate": 9.999287332412564e-05, |
| "loss": 0.9107, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.31975403535741737, |
| "grad_norm": 1.6522116661071777, |
| "learning_rate": 9.998722299730854e-05, |
| "loss": 0.8688, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.32467332820906997, |
| "grad_norm": 1.14382803440094, |
| "learning_rate": 9.997993520894937e-05, |
| "loss": 0.9889, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.3295926210607225, |
| "grad_norm": 1.205665111541748, |
| "learning_rate": 9.997101019777864e-05, |
| "loss": 1.014, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.3345119139123751, |
| "grad_norm": 1.1363279819488525, |
| "learning_rate": 9.996044825615826e-05, |
| "loss": 0.9774, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.33943120676402766, |
| "grad_norm": 1.490625023841858, |
| "learning_rate": 9.994824973007218e-05, |
| "loss": 1.0072, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.34435049961568026, |
| "grad_norm": 1.5652107000350952, |
| "learning_rate": 9.993441501911479e-05, |
| "loss": 1.0104, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.3492697924673328, |
| "grad_norm": 1.1277583837509155, |
| "learning_rate": 9.991894457647808e-05, |
| "loss": 0.9207, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.3541890853189854, |
| "grad_norm": 1.0140403509140015, |
| "learning_rate": 9.990183890893663e-05, |
| "loss": 0.9267, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.35910837817063795, |
| "grad_norm": 1.0925248861312866, |
| "learning_rate": 9.98830985768311e-05, |
| "loss": 0.989, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.36402767102229056, |
| "grad_norm": 1.3068804740905762, |
| "learning_rate": 9.986272419404982e-05, |
| "loss": 0.9893, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.3689469638739431, |
| "grad_norm": 1.398160457611084, |
| "learning_rate": 9.98407164280087e-05, |
| "loss": 0.9825, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.3738662567255957, |
| "grad_norm": 1.2048150300979614, |
| "learning_rate": 9.981707599962937e-05, |
| "loss": 0.916, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.37878554957724825, |
| "grad_norm": 0.9273918271064758, |
| "learning_rate": 9.979180368331558e-05, |
| "loss": 0.9421, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.38370484242890085, |
| "grad_norm": 1.247428297996521, |
| "learning_rate": 9.97649003069278e-05, |
| "loss": 0.9482, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.3886241352805534, |
| "grad_norm": 1.952083706855774, |
| "learning_rate": 9.973636675175613e-05, |
| "loss": 0.9189, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.393543428132206, |
| "grad_norm": 0.979943037033081, |
| "learning_rate": 9.970620395249137e-05, |
| "loss": 0.9506, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.39846272098385854, |
| "grad_norm": 0.9653918147087097, |
| "learning_rate": 9.967441289719452e-05, |
| "loss": 0.927, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.40338201383551114, |
| "grad_norm": 1.7574175596237183, |
| "learning_rate": 9.96409946272643e-05, |
| "loss": 0.9486, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.40830130668716375, |
| "grad_norm": 0.8972555994987488, |
| "learning_rate": 9.960595023740307e-05, |
| "loss": 0.9739, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.4132205995388163, |
| "grad_norm": 0.8190364837646484, |
| "learning_rate": 9.956928087558102e-05, |
| "loss": 0.943, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.4181398923904689, |
| "grad_norm": 0.9662739634513855, |
| "learning_rate": 9.953098774299847e-05, |
| "loss": 0.982, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.42305918524212144, |
| "grad_norm": 0.9831930994987488, |
| "learning_rate": 9.949107209404665e-05, |
| "loss": 1.0335, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.42797847809377404, |
| "grad_norm": 0.8506195545196533, |
| "learning_rate": 9.944953523626643e-05, |
| "loss": 0.9193, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.4328977709454266, |
| "grad_norm": 0.9357776641845703, |
| "learning_rate": 9.940637853030572e-05, |
| "loss": 0.9683, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.4378170637970792, |
| "grad_norm": 1.3345489501953125, |
| "learning_rate": 9.936160338987466e-05, |
| "loss": 0.956, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.44273635664873173, |
| "grad_norm": 0.8419784307479858, |
| "learning_rate": 9.931521128169947e-05, |
| "loss": 0.9407, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.44765564950038433, |
| "grad_norm": 0.9009922742843628, |
| "learning_rate": 9.926720372547438e-05, |
| "loss": 0.9174, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.4525749423520369, |
| "grad_norm": 1.1509584188461304, |
| "learning_rate": 9.921758229381177e-05, |
| "loss": 0.9644, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.4574942352036895, |
| "grad_norm": 1.1815032958984375, |
| "learning_rate": 9.916634861219076e-05, |
| "loss": 0.9572, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.462413528055342, |
| "grad_norm": 1.1411418914794922, |
| "learning_rate": 9.911350435890386e-05, |
| "loss": 0.9424, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.46733282090699463, |
| "grad_norm": 1.2081893682479858, |
| "learning_rate": 9.905905126500208e-05, |
| "loss": 0.9643, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.4722521137586472, |
| "grad_norm": 1.0384275913238525, |
| "learning_rate": 9.900299111423819e-05, |
| "loss": 0.9573, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.4771714066102998, |
| "grad_norm": 1.0825401544570923, |
| "learning_rate": 9.894532574300827e-05, |
| "loss": 0.9533, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.4820906994619523, |
| "grad_norm": 0.9118188619613647, |
| "learning_rate": 9.888605704029161e-05, |
| "loss": 0.952, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.4870099923136049, |
| "grad_norm": 1.1044224500656128, |
| "learning_rate": 9.882518694758875e-05, |
| "loss": 0.8827, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.49192928516525747, |
| "grad_norm": 0.9803332090377808, |
| "learning_rate": 9.876271745885792e-05, |
| "loss": 0.9254, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.49684857801691007, |
| "grad_norm": 1.0061184167861938, |
| "learning_rate": 9.869865062044979e-05, |
| "loss": 0.9428, |
| "step": 2020 |
| }, |
| { |
| "epoch": 0.5017678708685627, |
| "grad_norm": 0.9520462155342102, |
| "learning_rate": 9.863298853104032e-05, |
| "loss": 0.9292, |
| "step": 2040 |
| }, |
| { |
| "epoch": 0.5066871637202153, |
| "grad_norm": 1.5129801034927368, |
| "learning_rate": 9.856573334156209e-05, |
| "loss": 0.9351, |
| "step": 2060 |
| }, |
| { |
| "epoch": 0.5116064565718678, |
| "grad_norm": 1.4102259874343872, |
| "learning_rate": 9.84968872551338e-05, |
| "loss": 0.908, |
| "step": 2080 |
| }, |
| { |
| "epoch": 0.5165257494235204, |
| "grad_norm": 1.0974596738815308, |
| "learning_rate": 9.842645252698813e-05, |
| "loss": 0.9548, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.521445042275173, |
| "grad_norm": 0.9075796008110046, |
| "learning_rate": 9.835443146439786e-05, |
| "loss": 0.8862, |
| "step": 2120 |
| }, |
| { |
| "epoch": 0.5263643351268256, |
| "grad_norm": 1.2785767316818237, |
| "learning_rate": 9.828082642660026e-05, |
| "loss": 0.9333, |
| "step": 2140 |
| }, |
| { |
| "epoch": 0.5312836279784781, |
| "grad_norm": 1.128395915031433, |
| "learning_rate": 9.820563982471987e-05, |
| "loss": 0.9688, |
| "step": 2160 |
| }, |
| { |
| "epoch": 0.5362029208301307, |
| "grad_norm": 1.3578219413757324, |
| "learning_rate": 9.812887412168941e-05, |
| "loss": 0.9747, |
| "step": 2180 |
| }, |
| { |
| "epoch": 0.5411222136817833, |
| "grad_norm": 1.431168794631958, |
| "learning_rate": 9.805053183216923e-05, |
| "loss": 0.9358, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.5460415065334359, |
| "grad_norm": 0.8082225918769836, |
| "learning_rate": 9.797061552246486e-05, |
| "loss": 0.9133, |
| "step": 2220 |
| }, |
| { |
| "epoch": 0.5509607993850884, |
| "grad_norm": 1.1131757497787476, |
| "learning_rate": 9.788912781044292e-05, |
| "loss": 0.9679, |
| "step": 2240 |
| }, |
| { |
| "epoch": 0.555880092236741, |
| "grad_norm": 1.163212776184082, |
| "learning_rate": 9.780607136544543e-05, |
| "loss": 0.9928, |
| "step": 2260 |
| }, |
| { |
| "epoch": 0.5607993850883936, |
| "grad_norm": 1.0671954154968262, |
| "learning_rate": 9.772144890820234e-05, |
| "loss": 0.8596, |
| "step": 2280 |
| }, |
| { |
| "epoch": 0.5657186779400462, |
| "grad_norm": 1.1239769458770752, |
| "learning_rate": 9.763526321074241e-05, |
| "loss": 0.9382, |
| "step": 2300 |
| }, |
| { |
| "epoch": 0.5706379707916986, |
| "grad_norm": 1.2082741260528564, |
| "learning_rate": 9.754751709630237e-05, |
| "loss": 0.9354, |
| "step": 2320 |
| }, |
| { |
| "epoch": 0.5755572636433512, |
| "grad_norm": 1.0991088151931763, |
| "learning_rate": 9.745821343923451e-05, |
| "loss": 0.9695, |
| "step": 2340 |
| }, |
| { |
| "epoch": 0.5804765564950038, |
| "grad_norm": 1.2947261333465576, |
| "learning_rate": 9.736735516491247e-05, |
| "loss": 0.9162, |
| "step": 2360 |
| }, |
| { |
| "epoch": 0.5853958493466564, |
| "grad_norm": 1.3943086862564087, |
| "learning_rate": 9.727494524963536e-05, |
| "loss": 0.9511, |
| "step": 2380 |
| }, |
| { |
| "epoch": 0.5903151421983089, |
| "grad_norm": 1.0838426351547241, |
| "learning_rate": 9.718098672053044e-05, |
| "loss": 0.983, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.5952344350499615, |
| "grad_norm": 1.285778522491455, |
| "learning_rate": 9.708548265545375e-05, |
| "loss": 0.8961, |
| "step": 2420 |
| }, |
| { |
| "epoch": 0.6001537279016141, |
| "grad_norm": 0.9513759016990662, |
| "learning_rate": 9.698843618288942e-05, |
| "loss": 0.9459, |
| "step": 2440 |
| }, |
| { |
| "epoch": 0.6050730207532667, |
| "grad_norm": 0.8295150399208069, |
| "learning_rate": 9.688985048184717e-05, |
| "loss": 0.8766, |
| "step": 2460 |
| }, |
| { |
| "epoch": 0.6099923136049193, |
| "grad_norm": 0.8916402459144592, |
| "learning_rate": 9.67897287817581e-05, |
| "loss": 0.9322, |
| "step": 2480 |
| }, |
| { |
| "epoch": 0.6149116064565718, |
| "grad_norm": 1.0418959856033325, |
| "learning_rate": 9.6688074362369e-05, |
| "loss": 1.0029, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.6198308993082244, |
| "grad_norm": 1.5775642395019531, |
| "learning_rate": 9.658489055363486e-05, |
| "loss": 0.9241, |
| "step": 2520 |
| }, |
| { |
| "epoch": 0.624750192159877, |
| "grad_norm": 1.4654349088668823, |
| "learning_rate": 9.648018073560976e-05, |
| "loss": 0.9755, |
| "step": 2540 |
| }, |
| { |
| "epoch": 0.6296694850115296, |
| "grad_norm": 1.1109009981155396, |
| "learning_rate": 9.637394833833627e-05, |
| "loss": 0.9492, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.6345887778631821, |
| "grad_norm": 1.0579159259796143, |
| "learning_rate": 9.626619684173288e-05, |
| "loss": 0.9327, |
| "step": 2580 |
| }, |
| { |
| "epoch": 0.6395080707148347, |
| "grad_norm": 1.344231128692627, |
| "learning_rate": 9.615692977548026e-05, |
| "loss": 0.9245, |
| "step": 2600 |
| }, |
| { |
| "epoch": 0.6444273635664873, |
| "grad_norm": 1.1978901624679565, |
| "learning_rate": 9.604615071890541e-05, |
| "loss": 0.8989, |
| "step": 2620 |
| }, |
| { |
| "epoch": 0.6493466564181399, |
| "grad_norm": 1.2430976629257202, |
| "learning_rate": 9.593386330086458e-05, |
| "loss": 0.922, |
| "step": 2640 |
| }, |
| { |
| "epoch": 0.6542659492697924, |
| "grad_norm": 0.8552722334861755, |
| "learning_rate": 9.582007119962424e-05, |
| "loss": 0.9013, |
| "step": 2660 |
| }, |
| { |
| "epoch": 0.659185242121445, |
| "grad_norm": 0.8925425410270691, |
| "learning_rate": 9.570477814274077e-05, |
| "loss": 1.0079, |
| "step": 2680 |
| }, |
| { |
| "epoch": 0.6641045349730976, |
| "grad_norm": 1.0146712064743042, |
| "learning_rate": 9.558798790693817e-05, |
| "loss": 0.9355, |
| "step": 2700 |
| }, |
| { |
| "epoch": 0.6690238278247502, |
| "grad_norm": 1.0288127660751343, |
| "learning_rate": 9.54697043179845e-05, |
| "loss": 0.9069, |
| "step": 2720 |
| }, |
| { |
| "epoch": 0.6739431206764027, |
| "grad_norm": 1.180992603302002, |
| "learning_rate": 9.534993125056643e-05, |
| "loss": 0.9002, |
| "step": 2740 |
| }, |
| { |
| "epoch": 0.6788624135280553, |
| "grad_norm": 1.1395238637924194, |
| "learning_rate": 9.522867262816243e-05, |
| "loss": 0.9654, |
| "step": 2760 |
| }, |
| { |
| "epoch": 0.6837817063797079, |
| "grad_norm": 1.1949700117111206, |
| "learning_rate": 9.510593242291414e-05, |
| "loss": 0.8939, |
| "step": 2780 |
| }, |
| { |
| "epoch": 0.6887009992313605, |
| "grad_norm": 1.1609656810760498, |
| "learning_rate": 9.49817146554963e-05, |
| "loss": 0.9344, |
| "step": 2800 |
| }, |
| { |
| "epoch": 0.693620292083013, |
| "grad_norm": 1.0648009777069092, |
| "learning_rate": 9.485602339498509e-05, |
| "loss": 0.969, |
| "step": 2820 |
| }, |
| { |
| "epoch": 0.6985395849346656, |
| "grad_norm": 0.7220922112464905, |
| "learning_rate": 9.472886275872475e-05, |
| "loss": 0.8687, |
| "step": 2840 |
| }, |
| { |
| "epoch": 0.7034588777863182, |
| "grad_norm": 1.2389453649520874, |
| "learning_rate": 9.460023691219277e-05, |
| "loss": 0.8678, |
| "step": 2860 |
| }, |
| { |
| "epoch": 0.7083781706379708, |
| "grad_norm": 0.894183874130249, |
| "learning_rate": 9.447015006886338e-05, |
| "loss": 0.9072, |
| "step": 2880 |
| }, |
| { |
| "epoch": 0.7132974634896234, |
| "grad_norm": 1.036689281463623, |
| "learning_rate": 9.433860649006961e-05, |
| "loss": 0.9594, |
| "step": 2900 |
| }, |
| { |
| "epoch": 0.7182167563412759, |
| "grad_norm": 1.0629465579986572, |
| "learning_rate": 9.420561048486359e-05, |
| "loss": 0.9467, |
| "step": 2920 |
| }, |
| { |
| "epoch": 0.7231360491929285, |
| "grad_norm": 1.2931263446807861, |
| "learning_rate": 9.40711664098755e-05, |
| "loss": 0.9452, |
| "step": 2940 |
| }, |
| { |
| "epoch": 0.7280553420445811, |
| "grad_norm": 1.4071450233459473, |
| "learning_rate": 9.393527866917082e-05, |
| "loss": 0.8886, |
| "step": 2960 |
| }, |
| { |
| "epoch": 0.7329746348962337, |
| "grad_norm": 0.9684018492698669, |
| "learning_rate": 9.379795171410601e-05, |
| "loss": 0.8815, |
| "step": 2980 |
| }, |
| { |
| "epoch": 0.7378939277478862, |
| "grad_norm": 1.0788882970809937, |
| "learning_rate": 9.36591900431828e-05, |
| "loss": 0.9255, |
| "step": 3000 |
| } |
| ], |
| "logging_steps": 20, |
| "max_steps": 12198, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.19351181312e+16, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|