| [ | |
| { | |
| "loss": 0.4904, | |
| "grad_norm": 0.5958866477012634, | |
| "learning_rate": 1.9944444444444447e-05, | |
| "epoch": 0.03, | |
| "step": 50 | |
| }, | |
| { | |
| "loss": 0.3657, | |
| "grad_norm": 0.4602122902870178, | |
| "learning_rate": 1.988888888888889e-05, | |
| "epoch": 0.07, | |
| "step": 100 | |
| }, | |
| { | |
| "loss": 0.3182, | |
| "grad_norm": 0.6156640648841858, | |
| "learning_rate": 1.9833333333333335e-05, | |
| "epoch": 0.1, | |
| "step": 150 | |
| }, | |
| { | |
| "loss": 0.275, | |
| "grad_norm": 0.4630753695964813, | |
| "learning_rate": 1.977777777777778e-05, | |
| "epoch": 0.13, | |
| "step": 200 | |
| }, | |
| { | |
| "loss": 0.252, | |
| "grad_norm": 0.6395113468170166, | |
| "learning_rate": 1.9722222222222224e-05, | |
| "epoch": 0.17, | |
| "step": 250 | |
| }, | |
| { | |
| "loss": 0.2434, | |
| "grad_norm": 0.4501757025718689, | |
| "learning_rate": 1.9666666666666666e-05, | |
| "epoch": 0.2, | |
| "step": 300 | |
| }, | |
| { | |
| "loss": 0.2208, | |
| "grad_norm": 0.6610535383224487, | |
| "learning_rate": 1.961111111111111e-05, | |
| "epoch": 0.23, | |
| "step": 350 | |
| }, | |
| { | |
| "loss": 0.2192, | |
| "grad_norm": 0.40222012996673584, | |
| "learning_rate": 1.9555555555555557e-05, | |
| "epoch": 0.27, | |
| "step": 400 | |
| }, | |
| { | |
| "loss": 0.2138, | |
| "grad_norm": 0.7299392223358154, | |
| "learning_rate": 1.95e-05, | |
| "epoch": 0.3, | |
| "step": 450 | |
| }, | |
| { | |
| "loss": 0.2066, | |
| "grad_norm": 0.5643211007118225, | |
| "learning_rate": 1.9444444444444445e-05, | |
| "epoch": 0.33, | |
| "step": 500 | |
| }, | |
| { | |
| "loss": 0.2021, | |
| "grad_norm": 1.0369503498077393, | |
| "learning_rate": 1.938888888888889e-05, | |
| "epoch": 0.37, | |
| "step": 550 | |
| }, | |
| { | |
| "loss": 0.1908, | |
| "grad_norm": 0.37896671891212463, | |
| "learning_rate": 1.9333333333333333e-05, | |
| "epoch": 0.4, | |
| "step": 600 | |
| }, | |
| { | |
| "loss": 0.1831, | |
| "grad_norm": 0.3660513162612915, | |
| "learning_rate": 1.927777777777778e-05, | |
| "epoch": 0.43, | |
| "step": 650 | |
| }, | |
| { | |
| "loss": 0.1808, | |
| "grad_norm": 0.5333911776542664, | |
| "learning_rate": 1.9222222222222225e-05, | |
| "epoch": 0.47, | |
| "step": 700 | |
| }, | |
| { | |
| "loss": 0.1609, | |
| "grad_norm": 0.54070645570755, | |
| "learning_rate": 1.916666666666667e-05, | |
| "epoch": 0.5, | |
| "step": 750 | |
| }, | |
| { | |
| "loss": 0.1637, | |
| "grad_norm": 0.41102516651153564, | |
| "learning_rate": 1.9111111111111113e-05, | |
| "epoch": 0.53, | |
| "step": 800 | |
| }, | |
| { | |
| "loss": 0.1614, | |
| "grad_norm": 0.4941543936729431, | |
| "learning_rate": 1.9055555555555555e-05, | |
| "epoch": 0.57, | |
| "step": 850 | |
| }, | |
| { | |
| "loss": 0.1503, | |
| "grad_norm": 0.5441026091575623, | |
| "learning_rate": 1.9e-05, | |
| "epoch": 0.6, | |
| "step": 900 | |
| }, | |
| { | |
| "loss": 0.153, | |
| "grad_norm": 0.5017764568328857, | |
| "learning_rate": 1.8944444444444447e-05, | |
| "epoch": 0.63, | |
| "step": 950 | |
| }, | |
| { | |
| "loss": 0.1619, | |
| "grad_norm": 1.039005994796753, | |
| "learning_rate": 1.888888888888889e-05, | |
| "epoch": 0.67, | |
| "step": 1000 | |
| }, | |
| { | |
| "loss": 0.1571, | |
| "grad_norm": 0.401298463344574, | |
| "learning_rate": 1.8833333333333335e-05, | |
| "epoch": 0.7, | |
| "step": 1050 | |
| }, | |
| { | |
| "loss": 0.1549, | |
| "grad_norm": 0.5666447877883911, | |
| "learning_rate": 1.877777777777778e-05, | |
| "epoch": 0.73, | |
| "step": 1100 | |
| }, | |
| { | |
| "loss": 0.1458, | |
| "grad_norm": 0.6719347834587097, | |
| "learning_rate": 1.8722222222222223e-05, | |
| "epoch": 0.77, | |
| "step": 1150 | |
| }, | |
| { | |
| "loss": 0.1442, | |
| "grad_norm": 0.6617727875709534, | |
| "learning_rate": 1.866666666666667e-05, | |
| "epoch": 0.8, | |
| "step": 1200 | |
| }, | |
| { | |
| "loss": 0.1522, | |
| "grad_norm": 0.6959300637245178, | |
| "learning_rate": 1.8611111111111114e-05, | |
| "epoch": 0.83, | |
| "step": 1250 | |
| }, | |
| { | |
| "loss": 0.126, | |
| "grad_norm": 0.3736123740673065, | |
| "learning_rate": 1.8555555555555557e-05, | |
| "epoch": 0.87, | |
| "step": 1300 | |
| }, | |
| { | |
| "loss": 0.1373, | |
| "grad_norm": 0.20589721202850342, | |
| "learning_rate": 1.8500000000000002e-05, | |
| "epoch": 0.9, | |
| "step": 1350 | |
| }, | |
| { | |
| "loss": 0.138, | |
| "grad_norm": 0.9768441319465637, | |
| "learning_rate": 1.8444444444444448e-05, | |
| "epoch": 0.93, | |
| "step": 1400 | |
| }, | |
| { | |
| "loss": 0.1324, | |
| "grad_norm": 0.27331632375717163, | |
| "learning_rate": 1.838888888888889e-05, | |
| "epoch": 0.97, | |
| "step": 1450 | |
| }, | |
| { | |
| "loss": 0.1324, | |
| "grad_norm": 0.7121672034263611, | |
| "learning_rate": 1.8333333333333333e-05, | |
| "epoch": 1.0, | |
| "step": 1500 | |
| }, | |
| { | |
| "eval_loss": 0.12290950864553452, | |
| "eval_accuracy": 0.43533333333333335, | |
| "eval_f1": 0.8652792990142387, | |
| "eval_precision": 0.9544356230583362, | |
| "eval_recall": 0.7913566113337149, | |
| "eval_runtime": 403.441, | |
| "eval_samples_per_second": 3.718, | |
| "eval_steps_per_second": 0.466, | |
| "epoch": 1.0, | |
| "step": 1500 | |
| }, | |
| { | |
| "loss": 0.1187, | |
| "grad_norm": 0.5251692533493042, | |
| "learning_rate": 1.827777777777778e-05, | |
| "epoch": 1.03, | |
| "step": 1550 | |
| }, | |
| { | |
| "loss": 0.1219, | |
| "grad_norm": 0.7017450332641602, | |
| "learning_rate": 1.8222222222222224e-05, | |
| "epoch": 1.07, | |
| "step": 1600 | |
| }, | |
| { | |
| "loss": 0.1116, | |
| "grad_norm": 0.248159259557724, | |
| "learning_rate": 1.8166666666666667e-05, | |
| "epoch": 1.1, | |
| "step": 1650 | |
| }, | |
| { | |
| "loss": 0.127, | |
| "grad_norm": 0.9016281366348267, | |
| "learning_rate": 1.8111111111111112e-05, | |
| "epoch": 1.13, | |
| "step": 1700 | |
| }, | |
| { | |
| "loss": 0.1203, | |
| "grad_norm": 0.7193984389305115, | |
| "learning_rate": 1.8055555555555558e-05, | |
| "epoch": 1.17, | |
| "step": 1750 | |
| }, | |
| { | |
| "loss": 0.1195, | |
| "grad_norm": 0.7280067205429077, | |
| "learning_rate": 1.8e-05, | |
| "epoch": 1.2, | |
| "step": 1800 | |
| }, | |
| { | |
| "loss": 0.1094, | |
| "grad_norm": 0.906154453754425, | |
| "learning_rate": 1.7944444444444446e-05, | |
| "epoch": 1.23, | |
| "step": 1850 | |
| }, | |
| { | |
| "loss": 0.1155, | |
| "grad_norm": 0.4714917838573456, | |
| "learning_rate": 1.7888888888888892e-05, | |
| "epoch": 1.27, | |
| "step": 1900 | |
| }, | |
| { | |
| "loss": 0.1106, | |
| "grad_norm": 0.7589433789253235, | |
| "learning_rate": 1.7833333333333334e-05, | |
| "epoch": 1.3, | |
| "step": 1950 | |
| }, | |
| { | |
| "loss": 0.1052, | |
| "grad_norm": 0.5668336153030396, | |
| "learning_rate": 1.7777777777777777e-05, | |
| "epoch": 1.33, | |
| "step": 2000 | |
| }, | |
| { | |
| "loss": 0.1084, | |
| "grad_norm": 1.1015430688858032, | |
| "learning_rate": 1.7722222222222222e-05, | |
| "epoch": 1.37, | |
| "step": 2050 | |
| }, | |
| { | |
| "loss": 0.1017, | |
| "grad_norm": 0.23899418115615845, | |
| "learning_rate": 1.7666666666666668e-05, | |
| "epoch": 1.4, | |
| "step": 2100 | |
| }, | |
| { | |
| "loss": 0.1042, | |
| "grad_norm": 0.6596136689186096, | |
| "learning_rate": 1.7611111111111114e-05, | |
| "epoch": 1.43, | |
| "step": 2150 | |
| }, | |
| { | |
| "loss": 0.1054, | |
| "grad_norm": 0.5246695280075073, | |
| "learning_rate": 1.7555555555555556e-05, | |
| "epoch": 1.47, | |
| "step": 2200 | |
| }, | |
| { | |
| "loss": 0.103, | |
| "grad_norm": 0.7709386348724365, | |
| "learning_rate": 1.7500000000000002e-05, | |
| "epoch": 1.5, | |
| "step": 2250 | |
| }, | |
| { | |
| "loss": 0.0977, | |
| "grad_norm": 0.6520675420761108, | |
| "learning_rate": 1.7444444444444448e-05, | |
| "epoch": 1.53, | |
| "step": 2300 | |
| }, | |
| { | |
| "loss": 0.0931, | |
| "grad_norm": 0.44711801409721375, | |
| "learning_rate": 1.738888888888889e-05, | |
| "epoch": 1.57, | |
| "step": 2350 | |
| }, | |
| { | |
| "loss": 0.0937, | |
| "grad_norm": 0.7022482752799988, | |
| "learning_rate": 1.7333333333333336e-05, | |
| "epoch": 1.6, | |
| "step": 2400 | |
| }, | |
| { | |
| "loss": 0.0961, | |
| "grad_norm": 0.9846331477165222, | |
| "learning_rate": 1.727777777777778e-05, | |
| "epoch": 1.63, | |
| "step": 2450 | |
| }, | |
| { | |
| "loss": 0.0974, | |
| "grad_norm": 0.5135018229484558, | |
| "learning_rate": 1.7222222222222224e-05, | |
| "epoch": 1.67, | |
| "step": 2500 | |
| }, | |
| { | |
| "loss": 0.0822, | |
| "grad_norm": 0.47495317459106445, | |
| "learning_rate": 1.7166666666666666e-05, | |
| "epoch": 1.7, | |
| "step": 2550 | |
| }, | |
| { | |
| "loss": 0.0883, | |
| "grad_norm": 0.6149684190750122, | |
| "learning_rate": 1.7111111111111112e-05, | |
| "epoch": 1.73, | |
| "step": 2600 | |
| }, | |
| { | |
| "loss": 0.0932, | |
| "grad_norm": 0.85074782371521, | |
| "learning_rate": 1.7055555555555558e-05, | |
| "epoch": 1.77, | |
| "step": 2650 | |
| }, | |
| { | |
| "loss": 0.083, | |
| "grad_norm": 0.5721635222434998, | |
| "learning_rate": 1.7e-05, | |
| "epoch": 1.8, | |
| "step": 2700 | |
| }, | |
| { | |
| "loss": 0.0829, | |
| "grad_norm": 0.31835615634918213, | |
| "learning_rate": 1.6944444444444446e-05, | |
| "epoch": 1.83, | |
| "step": 2750 | |
| }, | |
| { | |
| "loss": 0.079, | |
| "grad_norm": 0.616396963596344, | |
| "learning_rate": 1.688888888888889e-05, | |
| "epoch": 1.87, | |
| "step": 2800 | |
| }, | |
| { | |
| "loss": 0.0855, | |
| "grad_norm": 0.711447536945343, | |
| "learning_rate": 1.6833333333333334e-05, | |
| "epoch": 1.9, | |
| "step": 2850 | |
| }, | |
| { | |
| "loss": 0.0895, | |
| "grad_norm": 0.48665234446525574, | |
| "learning_rate": 1.677777777777778e-05, | |
| "epoch": 1.93, | |
| "step": 2900 | |
| }, | |
| { | |
| "loss": 0.0874, | |
| "grad_norm": 0.7903698086738586, | |
| "learning_rate": 1.6722222222222225e-05, | |
| "epoch": 1.97, | |
| "step": 2950 | |
| }, | |
| { | |
| "loss": 0.0838, | |
| "grad_norm": 0.3140181005001068, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "epoch": 2.0, | |
| "step": 3000 | |
| }, | |
| { | |
| "eval_loss": 0.07791818678379059, | |
| "eval_accuracy": 0.604, | |
| "eval_f1": 0.925355272999252, | |
| "eval_precision": 0.9692886242557192, | |
| "eval_recall": 0.885231825987407, | |
| "eval_runtime": 404.0361, | |
| "eval_samples_per_second": 3.713, | |
| "eval_steps_per_second": 0.465, | |
| "epoch": 2.0, | |
| "step": 3000 | |
| }, | |
| { | |
| "loss": 0.0793, | |
| "grad_norm": 0.4299638271331787, | |
| "learning_rate": 1.6611111111111113e-05, | |
| "epoch": 2.03, | |
| "step": 3050 | |
| }, | |
| { | |
| "loss": 0.0719, | |
| "grad_norm": 0.5105370283126831, | |
| "learning_rate": 1.6555555555555556e-05, | |
| "epoch": 2.07, | |
| "step": 3100 | |
| }, | |
| { | |
| "loss": 0.0675, | |
| "grad_norm": 0.8778389096260071, | |
| "learning_rate": 1.65e-05, | |
| "epoch": 2.1, | |
| "step": 3150 | |
| }, | |
| { | |
| "loss": 0.071, | |
| "grad_norm": 0.4817022681236267, | |
| "learning_rate": 1.6444444444444444e-05, | |
| "epoch": 2.13, | |
| "step": 3200 | |
| }, | |
| { | |
| "loss": 0.0744, | |
| "grad_norm": 0.5327057242393494, | |
| "learning_rate": 1.638888888888889e-05, | |
| "epoch": 2.17, | |
| "step": 3250 | |
| }, | |
| { | |
| "loss": 0.0761, | |
| "grad_norm": 0.3640955090522766, | |
| "learning_rate": 1.6333333333333335e-05, | |
| "epoch": 2.2, | |
| "step": 3300 | |
| }, | |
| { | |
| "loss": 0.0788, | |
| "grad_norm": 0.3881916105747223, | |
| "learning_rate": 1.6277777777777777e-05, | |
| "epoch": 2.23, | |
| "step": 3350 | |
| }, | |
| { | |
| "loss": 0.0701, | |
| "grad_norm": 0.30048519372940063, | |
| "learning_rate": 1.6222222222222223e-05, | |
| "epoch": 2.27, | |
| "step": 3400 | |
| }, | |
| { | |
| "loss": 0.0716, | |
| "grad_norm": 0.6386255621910095, | |
| "learning_rate": 1.616666666666667e-05, | |
| "epoch": 2.3, | |
| "step": 3450 | |
| }, | |
| { | |
| "loss": 0.0665, | |
| "grad_norm": 0.950891375541687, | |
| "learning_rate": 1.6111111111111115e-05, | |
| "epoch": 2.33, | |
| "step": 3500 | |
| }, | |
| { | |
| "loss": 0.0659, | |
| "grad_norm": 0.5039156675338745, | |
| "learning_rate": 1.6055555555555557e-05, | |
| "epoch": 2.37, | |
| "step": 3550 | |
| }, | |
| { | |
| "loss": 0.0706, | |
| "grad_norm": 0.20529164373874664, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "epoch": 2.4, | |
| "step": 3600 | |
| }, | |
| { | |
| "loss": 0.0653, | |
| "grad_norm": 0.4774051010608673, | |
| "learning_rate": 1.594444444444445e-05, | |
| "epoch": 2.43, | |
| "step": 3650 | |
| }, | |
| { | |
| "loss": 0.0674, | |
| "grad_norm": 0.4316297471523285, | |
| "learning_rate": 1.588888888888889e-05, | |
| "epoch": 2.47, | |
| "step": 3700 | |
| }, | |
| { | |
| "loss": 0.073, | |
| "grad_norm": 1.2835804224014282, | |
| "learning_rate": 1.5833333333333333e-05, | |
| "epoch": 2.5, | |
| "step": 3750 | |
| }, | |
| { | |
| "loss": 0.064, | |
| "grad_norm": 0.27332884073257446, | |
| "learning_rate": 1.577777777777778e-05, | |
| "epoch": 2.53, | |
| "step": 3800 | |
| }, | |
| { | |
| "loss": 0.0695, | |
| "grad_norm": 3.6316583156585693, | |
| "learning_rate": 1.5722222222222225e-05, | |
| "epoch": 2.57, | |
| "step": 3850 | |
| }, | |
| { | |
| "loss": 0.0626, | |
| "grad_norm": 0.5828558206558228, | |
| "learning_rate": 1.5666666666666667e-05, | |
| "epoch": 2.6, | |
| "step": 3900 | |
| }, | |
| { | |
| "loss": 0.0703, | |
| "grad_norm": 0.4646282494068146, | |
| "learning_rate": 1.5611111111111113e-05, | |
| "epoch": 2.63, | |
| "step": 3950 | |
| }, | |
| { | |
| "loss": 0.0654, | |
| "grad_norm": 0.7953578233718872, | |
| "learning_rate": 1.555555555555556e-05, | |
| "epoch": 2.67, | |
| "step": 4000 | |
| }, | |
| { | |
| "loss": 0.0647, | |
| "grad_norm": 1.1880340576171875, | |
| "learning_rate": 1.55e-05, | |
| "epoch": 2.7, | |
| "step": 4050 | |
| }, | |
| { | |
| "loss": 0.0637, | |
| "grad_norm": 0.5192042589187622, | |
| "learning_rate": 1.5444444444444446e-05, | |
| "epoch": 2.73, | |
| "step": 4100 | |
| }, | |
| { | |
| "loss": 0.0648, | |
| "grad_norm": 0.5041971802711487, | |
| "learning_rate": 1.5388888888888892e-05, | |
| "epoch": 2.77, | |
| "step": 4150 | |
| }, | |
| { | |
| "loss": 0.0651, | |
| "grad_norm": 0.5176190733909607, | |
| "learning_rate": 1.5333333333333334e-05, | |
| "epoch": 2.8, | |
| "step": 4200 | |
| }, | |
| { | |
| "loss": 0.0591, | |
| "grad_norm": 0.6175917983055115, | |
| "learning_rate": 1.5277777777777777e-05, | |
| "epoch": 2.83, | |
| "step": 4250 | |
| }, | |
| { | |
| "loss": 0.0681, | |
| "grad_norm": 0.9707493185997009, | |
| "learning_rate": 1.5222222222222223e-05, | |
| "epoch": 2.87, | |
| "step": 4300 | |
| }, | |
| { | |
| "loss": 0.0624, | |
| "grad_norm": 0.5937142372131348, | |
| "learning_rate": 1.5166666666666667e-05, | |
| "epoch": 2.9, | |
| "step": 4350 | |
| }, | |
| { | |
| "loss": 0.0632, | |
| "grad_norm": 0.45223355293273926, | |
| "learning_rate": 1.5111111111111112e-05, | |
| "epoch": 2.93, | |
| "step": 4400 | |
| }, | |
| { | |
| "loss": 0.0623, | |
| "grad_norm": 0.6646191477775574, | |
| "learning_rate": 1.5055555555555556e-05, | |
| "epoch": 2.97, | |
| "step": 4450 | |
| }, | |
| { | |
| "loss": 0.0589, | |
| "grad_norm": 0.29040953516960144, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "epoch": 3.0, | |
| "step": 4500 | |
| }, | |
| { | |
| "eval_loss": 0.059341732412576675, | |
| "eval_accuracy": 0.7013333333333334, | |
| "eval_f1": 0.9475547687541491, | |
| "eval_precision": 0.9777743948850662, | |
| "eval_recall": 0.9191471093302804, | |
| "eval_runtime": 373.05, | |
| "eval_samples_per_second": 4.021, | |
| "eval_steps_per_second": 0.504, | |
| "epoch": 3.0, | |
| "step": 4500 | |
| }, | |
| { | |
| "loss": 0.0614, | |
| "grad_norm": 0.9815644025802612, | |
| "learning_rate": 1.4944444444444446e-05, | |
| "epoch": 3.03, | |
| "step": 4550 | |
| }, | |
| { | |
| "loss": 0.054, | |
| "grad_norm": 0.6002834439277649, | |
| "learning_rate": 1.488888888888889e-05, | |
| "epoch": 3.07, | |
| "step": 4600 | |
| }, | |
| { | |
| "loss": 0.0519, | |
| "grad_norm": 0.658513069152832, | |
| "learning_rate": 1.4833333333333336e-05, | |
| "epoch": 3.1, | |
| "step": 4650 | |
| }, | |
| { | |
| "loss": 0.0536, | |
| "grad_norm": 0.48609504103660583, | |
| "learning_rate": 1.477777777777778e-05, | |
| "epoch": 3.13, | |
| "step": 4700 | |
| }, | |
| { | |
| "loss": 0.0525, | |
| "grad_norm": 0.21742594242095947, | |
| "learning_rate": 1.4722222222222224e-05, | |
| "epoch": 3.17, | |
| "step": 4750 | |
| }, | |
| { | |
| "loss": 0.0584, | |
| "grad_norm": 0.845954418182373, | |
| "learning_rate": 1.4666666666666666e-05, | |
| "epoch": 3.2, | |
| "step": 4800 | |
| }, | |
| { | |
| "loss": 0.0494, | |
| "grad_norm": 0.9900146126747131, | |
| "learning_rate": 1.4611111111111112e-05, | |
| "epoch": 3.23, | |
| "step": 4850 | |
| }, | |
| { | |
| "loss": 0.0555, | |
| "grad_norm": 0.1688126176595688, | |
| "learning_rate": 1.4555555555555556e-05, | |
| "epoch": 3.27, | |
| "step": 4900 | |
| }, | |
| { | |
| "loss": 0.0518, | |
| "grad_norm": 0.2642045319080353, | |
| "learning_rate": 1.45e-05, | |
| "epoch": 3.3, | |
| "step": 4950 | |
| }, | |
| { | |
| "loss": 0.0436, | |
| "grad_norm": 0.3140838146209717, | |
| "learning_rate": 1.4444444444444446e-05, | |
| "epoch": 3.33, | |
| "step": 5000 | |
| }, | |
| { | |
| "loss": 0.0532, | |
| "grad_norm": 0.8053641319274902, | |
| "learning_rate": 1.438888888888889e-05, | |
| "epoch": 3.37, | |
| "step": 5050 | |
| }, | |
| { | |
| "loss": 0.0556, | |
| "grad_norm": 1.414271354675293, | |
| "learning_rate": 1.4333333333333334e-05, | |
| "epoch": 3.4, | |
| "step": 5100 | |
| }, | |
| { | |
| "loss": 0.0571, | |
| "grad_norm": 0.5971333384513855, | |
| "learning_rate": 1.427777777777778e-05, | |
| "epoch": 3.43, | |
| "step": 5150 | |
| }, | |
| { | |
| "loss": 0.053, | |
| "grad_norm": 0.4587002694606781, | |
| "learning_rate": 1.4222222222222224e-05, | |
| "epoch": 3.47, | |
| "step": 5200 | |
| }, | |
| { | |
| "loss": 0.0504, | |
| "grad_norm": 0.6501361131668091, | |
| "learning_rate": 1.416666666666667e-05, | |
| "epoch": 3.5, | |
| "step": 5250 | |
| }, | |
| { | |
| "loss": 0.0533, | |
| "grad_norm": 1.1308332681655884, | |
| "learning_rate": 1.4111111111111113e-05, | |
| "epoch": 3.53, | |
| "step": 5300 | |
| }, | |
| { | |
| "loss": 0.0508, | |
| "grad_norm": 1.671701192855835, | |
| "learning_rate": 1.4055555555555556e-05, | |
| "epoch": 3.57, | |
| "step": 5350 | |
| }, | |
| { | |
| "loss": 0.0526, | |
| "grad_norm": 0.7493703961372375, | |
| "learning_rate": 1.4e-05, | |
| "epoch": 3.6, | |
| "step": 5400 | |
| }, | |
| { | |
| "loss": 0.0523, | |
| "grad_norm": 0.1724095195531845, | |
| "learning_rate": 1.3944444444444446e-05, | |
| "epoch": 3.63, | |
| "step": 5450 | |
| }, | |
| { | |
| "loss": 0.0471, | |
| "grad_norm": 0.28638821840286255, | |
| "learning_rate": 1.388888888888889e-05, | |
| "epoch": 3.67, | |
| "step": 5500 | |
| }, | |
| { | |
| "loss": 0.0539, | |
| "grad_norm": 1.1178017854690552, | |
| "learning_rate": 1.3833333333333334e-05, | |
| "epoch": 3.7, | |
| "step": 5550 | |
| }, | |
| { | |
| "loss": 0.0454, | |
| "grad_norm": 0.3396628499031067, | |
| "learning_rate": 1.377777777777778e-05, | |
| "epoch": 3.73, | |
| "step": 5600 | |
| }, | |
| { | |
| "loss": 0.0458, | |
| "grad_norm": 0.6596197485923767, | |
| "learning_rate": 1.3722222222222223e-05, | |
| "epoch": 3.77, | |
| "step": 5650 | |
| }, | |
| { | |
| "loss": 0.0491, | |
| "grad_norm": 0.6554704308509827, | |
| "learning_rate": 1.3666666666666667e-05, | |
| "epoch": 3.8, | |
| "step": 5700 | |
| }, | |
| { | |
| "loss": 0.042, | |
| "grad_norm": 0.16569265723228455, | |
| "learning_rate": 1.3611111111111113e-05, | |
| "epoch": 3.83, | |
| "step": 5750 | |
| }, | |
| { | |
| "loss": 0.0489, | |
| "grad_norm": 0.4453265964984894, | |
| "learning_rate": 1.3555555555555557e-05, | |
| "epoch": 3.87, | |
| "step": 5800 | |
| }, | |
| { | |
| "loss": 0.0479, | |
| "grad_norm": 0.917383074760437, | |
| "learning_rate": 1.3500000000000001e-05, | |
| "epoch": 3.9, | |
| "step": 5850 | |
| }, | |
| { | |
| "loss": 0.0502, | |
| "grad_norm": 0.8061167597770691, | |
| "learning_rate": 1.3444444444444447e-05, | |
| "epoch": 3.93, | |
| "step": 5900 | |
| }, | |
| { | |
| "loss": 0.045, | |
| "grad_norm": 0.14958856999874115, | |
| "learning_rate": 1.338888888888889e-05, | |
| "epoch": 3.97, | |
| "step": 5950 | |
| }, | |
| { | |
| "loss": 0.0486, | |
| "grad_norm": 0.5399668216705322, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "epoch": 4.0, | |
| "step": 6000 | |
| }, | |
| { | |
| "eval_loss": 0.05016530305147171, | |
| "eval_accuracy": 0.7253333333333334, | |
| "eval_f1": 0.952742677671463, | |
| "eval_precision": 0.9729971654483067, | |
| "eval_recall": 0.9333142530051517, | |
| "eval_runtime": 401.4594, | |
| "eval_samples_per_second": 3.736, | |
| "eval_steps_per_second": 0.468, | |
| "epoch": 4.0, | |
| "step": 6000 | |
| }, | |
| { | |
| "loss": 0.0403, | |
| "grad_norm": 1.0464340448379517, | |
| "learning_rate": 1.3277777777777779e-05, | |
| "epoch": 4.03, | |
| "step": 6050 | |
| }, | |
| { | |
| "loss": 0.0398, | |
| "grad_norm": 0.4619244635105133, | |
| "learning_rate": 1.3222222222222223e-05, | |
| "epoch": 4.07, | |
| "step": 6100 | |
| }, | |
| { | |
| "loss": 0.0446, | |
| "grad_norm": 0.8172645568847656, | |
| "learning_rate": 1.3166666666666667e-05, | |
| "epoch": 4.1, | |
| "step": 6150 | |
| }, | |
| { | |
| "loss": 0.0423, | |
| "grad_norm": 0.7000390291213989, | |
| "learning_rate": 1.3111111111111113e-05, | |
| "epoch": 4.13, | |
| "step": 6200 | |
| }, | |
| { | |
| "loss": 0.0373, | |
| "grad_norm": 1.2198138236999512, | |
| "learning_rate": 1.3055555555555557e-05, | |
| "epoch": 4.17, | |
| "step": 6250 | |
| }, | |
| { | |
| "loss": 0.0434, | |
| "grad_norm": 0.25625166296958923, | |
| "learning_rate": 1.3000000000000001e-05, | |
| "epoch": 4.2, | |
| "step": 6300 | |
| }, | |
| { | |
| "loss": 0.0434, | |
| "grad_norm": 0.8375250697135925, | |
| "learning_rate": 1.2944444444444447e-05, | |
| "epoch": 4.23, | |
| "step": 6350 | |
| }, | |
| { | |
| "loss": 0.0419, | |
| "grad_norm": 1.2568154335021973, | |
| "learning_rate": 1.288888888888889e-05, | |
| "epoch": 4.27, | |
| "step": 6400 | |
| }, | |
| { | |
| "loss": 0.04, | |
| "grad_norm": 0.08529137074947357, | |
| "learning_rate": 1.2833333333333335e-05, | |
| "epoch": 4.3, | |
| "step": 6450 | |
| }, | |
| { | |
| "loss": 0.0408, | |
| "grad_norm": 0.8144034147262573, | |
| "learning_rate": 1.2777777777777777e-05, | |
| "epoch": 4.33, | |
| "step": 6500 | |
| }, | |
| { | |
| "loss": 0.0421, | |
| "grad_norm": 0.41111770272254944, | |
| "learning_rate": 1.2722222222222223e-05, | |
| "epoch": 4.37, | |
| "step": 6550 | |
| }, | |
| { | |
| "loss": 0.0433, | |
| "grad_norm": 0.40817421674728394, | |
| "learning_rate": 1.2666666666666667e-05, | |
| "epoch": 4.4, | |
| "step": 6600 | |
| }, | |
| { | |
| "loss": 0.0393, | |
| "grad_norm": 0.6191626787185669, | |
| "learning_rate": 1.2611111111111113e-05, | |
| "epoch": 4.43, | |
| "step": 6650 | |
| }, | |
| { | |
| "loss": 0.0339, | |
| "grad_norm": 0.46081477403640747, | |
| "learning_rate": 1.2555555555555557e-05, | |
| "epoch": 4.47, | |
| "step": 6700 | |
| }, | |
| { | |
| "loss": 0.0422, | |
| "grad_norm": 0.453768789768219, | |
| "learning_rate": 1.25e-05, | |
| "epoch": 4.5, | |
| "step": 6750 | |
| }, | |
| { | |
| "loss": 0.0382, | |
| "grad_norm": 0.3311583399772644, | |
| "learning_rate": 1.2444444444444446e-05, | |
| "epoch": 4.53, | |
| "step": 6800 | |
| }, | |
| { | |
| "loss": 0.04, | |
| "grad_norm": 0.5008330345153809, | |
| "learning_rate": 1.238888888888889e-05, | |
| "epoch": 4.57, | |
| "step": 6850 | |
| }, | |
| { | |
| "loss": 0.0389, | |
| "grad_norm": 0.9684385061264038, | |
| "learning_rate": 1.2333333333333334e-05, | |
| "epoch": 4.6, | |
| "step": 6900 | |
| }, | |
| { | |
| "loss": 0.0393, | |
| "grad_norm": 0.38716915249824524, | |
| "learning_rate": 1.227777777777778e-05, | |
| "epoch": 4.63, | |
| "step": 6950 | |
| }, | |
| { | |
| "loss": 0.0428, | |
| "grad_norm": 0.7534592151641846, | |
| "learning_rate": 1.2222222222222224e-05, | |
| "epoch": 4.67, | |
| "step": 7000 | |
| }, | |
| { | |
| "loss": 0.0416, | |
| "grad_norm": 0.6481875777244568, | |
| "learning_rate": 1.2166666666666667e-05, | |
| "epoch": 4.7, | |
| "step": 7050 | |
| }, | |
| { | |
| "loss": 0.0377, | |
| "grad_norm": 0.93485027551651, | |
| "learning_rate": 1.211111111111111e-05, | |
| "epoch": 4.73, | |
| "step": 7100 | |
| }, | |
| { | |
| "loss": 0.0397, | |
| "grad_norm": 0.3216228485107422, | |
| "learning_rate": 1.2055555555555556e-05, | |
| "epoch": 4.77, | |
| "step": 7150 | |
| }, | |
| { | |
| "loss": 0.0382, | |
| "grad_norm": 0.6930127143859863, | |
| "learning_rate": 1.2e-05, | |
| "epoch": 4.8, | |
| "step": 7200 | |
| }, | |
| { | |
| "loss": 0.0378, | |
| "grad_norm": 0.2020283341407776, | |
| "learning_rate": 1.1944444444444444e-05, | |
| "epoch": 4.83, | |
| "step": 7250 | |
| }, | |
| { | |
| "loss": 0.0384, | |
| "grad_norm": 0.35124385356903076, | |
| "learning_rate": 1.188888888888889e-05, | |
| "epoch": 4.87, | |
| "step": 7300 | |
| }, | |
| { | |
| "loss": 0.038, | |
| "grad_norm": 0.5830740928649902, | |
| "learning_rate": 1.1833333333333334e-05, | |
| "epoch": 4.9, | |
| "step": 7350 | |
| }, | |
| { | |
| "loss": 0.04, | |
| "grad_norm": 0.8609558343887329, | |
| "learning_rate": 1.177777777777778e-05, | |
| "epoch": 4.93, | |
| "step": 7400 | |
| }, | |
| { | |
| "loss": 0.0446, | |
| "grad_norm": 0.8603160977363586, | |
| "learning_rate": 1.1722222222222224e-05, | |
| "epoch": 4.97, | |
| "step": 7450 | |
| }, | |
| { | |
| "loss": 0.0405, | |
| "grad_norm": 0.3362710475921631, | |
| "learning_rate": 1.1666666666666668e-05, | |
| "epoch": 5.0, | |
| "step": 7500 | |
| }, | |
| { | |
| "eval_loss": 0.04217784106731415, | |
| "eval_accuracy": 0.7713333333333333, | |
| "eval_f1": 0.9617064647641235, | |
| "eval_precision": 0.9788085358624777, | |
| "eval_recall": 0.9451917572982256, | |
| "eval_runtime": 421.9356, | |
| "eval_samples_per_second": 3.555, | |
| "eval_steps_per_second": 0.446, | |
| "epoch": 5.0, | |
| "step": 7500 | |
| }, | |
| { | |
| "loss": 0.031, | |
| "grad_norm": 0.6572943925857544, | |
| "learning_rate": 1.1611111111111114e-05, | |
| "epoch": 5.03, | |
| "step": 7550 | |
| }, | |
| { | |
| "loss": 0.0351, | |
| "grad_norm": 0.6608228087425232, | |
| "learning_rate": 1.1555555555555556e-05, | |
| "epoch": 5.07, | |
| "step": 7600 | |
| }, | |
| { | |
| "loss": 0.0335, | |
| "grad_norm": 0.34060031175613403, | |
| "learning_rate": 1.15e-05, | |
| "epoch": 5.1, | |
| "step": 7650 | |
| }, | |
| { | |
| "loss": 0.0358, | |
| "grad_norm": 0.8972667455673218, | |
| "learning_rate": 1.1444444444444444e-05, | |
| "epoch": 5.13, | |
| "step": 7700 | |
| }, | |
| { | |
| "loss": 0.0337, | |
| "grad_norm": 0.42261555790901184, | |
| "learning_rate": 1.138888888888889e-05, | |
| "epoch": 5.17, | |
| "step": 7750 | |
| }, | |
| { | |
| "loss": 0.0329, | |
| "grad_norm": 0.5377686619758606, | |
| "learning_rate": 1.1333333333333334e-05, | |
| "epoch": 5.2, | |
| "step": 7800 | |
| }, | |
| { | |
| "loss": 0.0359, | |
| "grad_norm": 0.5514285564422607, | |
| "learning_rate": 1.1277777777777778e-05, | |
| "epoch": 5.23, | |
| "step": 7850 | |
| }, | |
| { | |
| "loss": 0.035, | |
| "grad_norm": 0.3760692775249481, | |
| "learning_rate": 1.1222222222222224e-05, | |
| "epoch": 5.27, | |
| "step": 7900 | |
| }, | |
| { | |
| "loss": 0.0358, | |
| "grad_norm": 0.11783821135759354, | |
| "learning_rate": 1.1166666666666668e-05, | |
| "epoch": 5.3, | |
| "step": 7950 | |
| }, | |
| { | |
| "loss": 0.0351, | |
| "grad_norm": 1.0664758682250977, | |
| "learning_rate": 1.1111111111111113e-05, | |
| "epoch": 5.33, | |
| "step": 8000 | |
| }, | |
| { | |
| "loss": 0.0312, | |
| "grad_norm": 0.8289102911949158, | |
| "learning_rate": 1.1055555555555557e-05, | |
| "epoch": 5.37, | |
| "step": 8050 | |
| }, | |
| { | |
| "loss": 0.0325, | |
| "grad_norm": 0.4296257197856903, | |
| "learning_rate": 1.1000000000000001e-05, | |
| "epoch": 5.4, | |
| "step": 8100 | |
| }, | |
| { | |
| "loss": 0.0329, | |
| "grad_norm": 0.43858158588409424, | |
| "learning_rate": 1.0944444444444447e-05, | |
| "epoch": 5.43, | |
| "step": 8150 | |
| }, | |
| { | |
| "loss": 0.0313, | |
| "grad_norm": 0.7341222763061523, | |
| "learning_rate": 1.088888888888889e-05, | |
| "epoch": 5.47, | |
| "step": 8200 | |
| }, | |
| { | |
| "loss": 0.0318, | |
| "grad_norm": 0.0853128656744957, | |
| "learning_rate": 1.0833333333333334e-05, | |
| "epoch": 5.5, | |
| "step": 8250 | |
| }, | |
| { | |
| "loss": 0.0305, | |
| "grad_norm": 0.3742698132991791, | |
| "learning_rate": 1.0777777777777778e-05, | |
| "epoch": 5.53, | |
| "step": 8300 | |
| }, | |
| { | |
| "loss": 0.0347, | |
| "grad_norm": 1.1857047080993652, | |
| "learning_rate": 1.0722222222222223e-05, | |
| "epoch": 5.57, | |
| "step": 8350 | |
| }, | |
| { | |
| "loss": 0.0376, | |
| "grad_norm": 0.6907274127006531, | |
| "learning_rate": 1.0666666666666667e-05, | |
| "epoch": 5.6, | |
| "step": 8400 | |
| }, | |
| { | |
| "loss": 0.0362, | |
| "grad_norm": 1.3683446645736694, | |
| "learning_rate": 1.0611111111111111e-05, | |
| "epoch": 5.63, | |
| "step": 8450 | |
| }, | |
| { | |
| "loss": 0.028, | |
| "grad_norm": 0.4153459966182709, | |
| "learning_rate": 1.0555555555555557e-05, | |
| "epoch": 5.67, | |
| "step": 8500 | |
| }, | |
| { | |
| "loss": 0.029, | |
| "grad_norm": 0.3362661600112915, | |
| "learning_rate": 1.0500000000000001e-05, | |
| "epoch": 5.7, | |
| "step": 8550 | |
| }, | |
| { | |
| "loss": 0.0336, | |
| "grad_norm": 0.6201055645942688, | |
| "learning_rate": 1.0444444444444445e-05, | |
| "epoch": 5.73, | |
| "step": 8600 | |
| }, | |
| { | |
| "loss": 0.0299, | |
| "grad_norm": 0.4934876263141632, | |
| "learning_rate": 1.0388888888888891e-05, | |
| "epoch": 5.77, | |
| "step": 8650 | |
| }, | |
| { | |
| "loss": 0.0311, | |
| "grad_norm": 0.7425228953361511, | |
| "learning_rate": 1.0333333333333335e-05, | |
| "epoch": 5.8, | |
| "step": 8700 | |
| }, | |
| { | |
| "loss": 0.033, | |
| "grad_norm": 0.9107281565666199, | |
| "learning_rate": 1.0277777777777777e-05, | |
| "epoch": 5.83, | |
| "step": 8750 | |
| }, | |
| { | |
| "loss": 0.0284, | |
| "grad_norm": 0.7918747663497925, | |
| "learning_rate": 1.0222222222222223e-05, | |
| "epoch": 5.87, | |
| "step": 8800 | |
| }, | |
| { | |
| "loss": 0.0311, | |
| "grad_norm": 0.8067458868026733, | |
| "learning_rate": 1.0166666666666667e-05, | |
| "epoch": 5.9, | |
| "step": 8850 | |
| }, | |
| { | |
| "loss": 0.029, | |
| "grad_norm": 0.8999997973442078, | |
| "learning_rate": 1.0111111111111111e-05, | |
| "epoch": 5.93, | |
| "step": 8900 | |
| }, | |
| { | |
| "loss": 0.0288, | |
| "grad_norm": 0.5887193083763123, | |
| "learning_rate": 1.0055555555555557e-05, | |
| "epoch": 5.97, | |
| "step": 8950 | |
| }, | |
| { | |
| "loss": 0.029, | |
| "grad_norm": 0.12278777360916138, | |
| "learning_rate": 1e-05, | |
| "epoch": 6.0, | |
| "step": 9000 | |
| }, | |
| { | |
| "eval_loss": 0.038735076785087585, | |
| "eval_accuracy": 0.79, | |
| "eval_f1": 0.963403145155446, | |
| "eval_precision": 0.9759213037733079, | |
| "eval_recall": 0.9512020606754437, | |
| "eval_runtime": 420.923, | |
| "eval_samples_per_second": 3.564, | |
| "eval_steps_per_second": 0.447, | |
| "epoch": 6.0, | |
| "step": 9000 | |
| }, | |
| { | |
| "loss": 0.0311, | |
| "grad_norm": 0.15732979774475098, | |
| "learning_rate": 9.944444444444445e-06, | |
| "epoch": 6.03, | |
| "step": 9050 | |
| }, | |
| { | |
| "loss": 0.027, | |
| "grad_norm": 0.3718239367008209, | |
| "learning_rate": 9.88888888888889e-06, | |
| "epoch": 6.07, | |
| "step": 9100 | |
| }, | |
| { | |
| "loss": 0.0248, | |
| "grad_norm": 0.4709440767765045, | |
| "learning_rate": 9.833333333333333e-06, | |
| "epoch": 6.1, | |
| "step": 9150 | |
| }, | |
| { | |
| "loss": 0.0277, | |
| "grad_norm": 0.1499980241060257, | |
| "learning_rate": 9.777777777777779e-06, | |
| "epoch": 6.13, | |
| "step": 9200 | |
| }, | |
| { | |
| "loss": 0.0244, | |
| "grad_norm": 0.4170394837856293, | |
| "learning_rate": 9.722222222222223e-06, | |
| "epoch": 6.17, | |
| "step": 9250 | |
| }, | |
| { | |
| "loss": 0.0238, | |
| "grad_norm": 0.2653655409812927, | |
| "learning_rate": 9.666666666666667e-06, | |
| "epoch": 6.2, | |
| "step": 9300 | |
| }, | |
| { | |
| "loss": 0.0285, | |
| "grad_norm": 0.7988731265068054, | |
| "learning_rate": 9.611111111111112e-06, | |
| "epoch": 6.23, | |
| "step": 9350 | |
| }, | |
| { | |
| "loss": 0.0299, | |
| "grad_norm": 0.8069539666175842, | |
| "learning_rate": 9.555555555555556e-06, | |
| "epoch": 6.27, | |
| "step": 9400 | |
| }, | |
| { | |
| "loss": 0.0262, | |
| "grad_norm": 0.5015062093734741, | |
| "learning_rate": 9.5e-06, | |
| "epoch": 6.3, | |
| "step": 9450 | |
| }, | |
| { | |
| "loss": 0.0279, | |
| "grad_norm": 0.8432004451751709, | |
| "learning_rate": 9.444444444444445e-06, | |
| "epoch": 6.33, | |
| "step": 9500 | |
| }, | |
| { | |
| "loss": 0.0216, | |
| "grad_norm": 0.6179779171943665, | |
| "learning_rate": 9.38888888888889e-06, | |
| "epoch": 6.37, | |
| "step": 9550 | |
| }, | |
| { | |
| "loss": 0.0233, | |
| "grad_norm": 0.21573516726493835, | |
| "learning_rate": 9.333333333333334e-06, | |
| "epoch": 6.4, | |
| "step": 9600 | |
| }, | |
| { | |
| "loss": 0.026, | |
| "grad_norm": 0.1759645938873291, | |
| "learning_rate": 9.277777777777778e-06, | |
| "epoch": 6.43, | |
| "step": 9650 | |
| }, | |
| { | |
| "loss": 0.0274, | |
| "grad_norm": 0.6740387082099915, | |
| "learning_rate": 9.222222222222224e-06, | |
| "epoch": 6.47, | |
| "step": 9700 | |
| }, | |
| { | |
| "loss": 0.0323, | |
| "grad_norm": 0.7693589925765991, | |
| "learning_rate": 9.166666666666666e-06, | |
| "epoch": 6.5, | |
| "step": 9750 | |
| }, | |
| { | |
| "loss": 0.0278, | |
| "grad_norm": 0.5023288726806641, | |
| "learning_rate": 9.111111111111112e-06, | |
| "epoch": 6.53, | |
| "step": 9800 | |
| }, | |
| { | |
| "loss": 0.028, | |
| "grad_norm": 0.5492318272590637, | |
| "learning_rate": 9.055555555555556e-06, | |
| "epoch": 6.57, | |
| "step": 9850 | |
| }, | |
| { | |
| "loss": 0.0242, | |
| "grad_norm": 0.2837912440299988, | |
| "learning_rate": 9e-06, | |
| "epoch": 6.6, | |
| "step": 9900 | |
| }, | |
| { | |
| "loss": 0.0272, | |
| "grad_norm": 0.680008053779602, | |
| "learning_rate": 8.944444444444446e-06, | |
| "epoch": 6.63, | |
| "step": 9950 | |
| }, | |
| { | |
| "loss": 0.0289, | |
| "grad_norm": 1.9726448059082031, | |
| "learning_rate": 8.888888888888888e-06, | |
| "epoch": 6.67, | |
| "step": 10000 | |
| }, | |
| { | |
| "loss": 0.0271, | |
| "grad_norm": 1.6224364042282104, | |
| "learning_rate": 8.833333333333334e-06, | |
| "epoch": 6.7, | |
| "step": 10050 | |
| }, | |
| { | |
| "loss": 0.0275, | |
| "grad_norm": 0.19931891560554504, | |
| "learning_rate": 8.777777777777778e-06, | |
| "epoch": 6.73, | |
| "step": 10100 | |
| }, | |
| { | |
| "loss": 0.0281, | |
| "grad_norm": 0.5207138657569885, | |
| "learning_rate": 8.722222222222224e-06, | |
| "epoch": 6.77, | |
| "step": 10150 | |
| }, | |
| { | |
| "loss": 0.0265, | |
| "grad_norm": 0.29777082800865173, | |
| "learning_rate": 8.666666666666668e-06, | |
| "epoch": 6.8, | |
| "step": 10200 | |
| }, | |
| { | |
| "loss": 0.0292, | |
| "grad_norm": 0.8728300929069519, | |
| "learning_rate": 8.611111111111112e-06, | |
| "epoch": 6.83, | |
| "step": 10250 | |
| }, | |
| { | |
| "loss": 0.0226, | |
| "grad_norm": 0.7866277694702148, | |
| "learning_rate": 8.555555555555556e-06, | |
| "epoch": 6.87, | |
| "step": 10300 | |
| }, | |
| { | |
| "loss": 0.0273, | |
| "grad_norm": 0.3908791244029999, | |
| "learning_rate": 8.5e-06, | |
| "epoch": 6.9, | |
| "step": 10350 | |
| }, | |
| { | |
| "loss": 0.0253, | |
| "grad_norm": 0.9716600775718689, | |
| "learning_rate": 8.444444444444446e-06, | |
| "epoch": 6.93, | |
| "step": 10400 | |
| }, | |
| { | |
| "loss": 0.0272, | |
| "grad_norm": 0.49167874455451965, | |
| "learning_rate": 8.38888888888889e-06, | |
| "epoch": 6.97, | |
| "step": 10450 | |
| }, | |
| { | |
| "loss": 0.0264, | |
| "grad_norm": 0.7925304174423218, | |
| "learning_rate": 8.333333333333334e-06, | |
| "epoch": 7.0, | |
| "step": 10500 | |
| }, | |
| { | |
| "eval_loss": 0.036279063671827316, | |
| "eval_accuracy": 0.7973333333333333, | |
| "eval_f1": 0.965166908563135, | |
| "eval_precision": 0.9790930506478209, | |
| "eval_recall": 0.9516313680595306, | |
| "eval_runtime": 520.5295, | |
| "eval_samples_per_second": 2.882, | |
| "eval_steps_per_second": 0.361, | |
| "epoch": 7.0, | |
| "step": 10500 | |
| }, | |
| { | |
| "loss": 0.0222, | |
| "grad_norm": 0.8314028978347778, | |
| "learning_rate": 8.277777777777778e-06, | |
| "epoch": 7.03, | |
| "step": 10550 | |
| }, | |
| { | |
| "loss": 0.0246, | |
| "grad_norm": 0.16064128279685974, | |
| "learning_rate": 8.222222222222222e-06, | |
| "epoch": 7.07, | |
| "step": 10600 | |
| }, | |
| { | |
| "loss": 0.0236, | |
| "grad_norm": 0.6914618611335754, | |
| "learning_rate": 8.166666666666668e-06, | |
| "epoch": 7.1, | |
| "step": 10650 | |
| }, | |
| { | |
| "loss": 0.0197, | |
| "grad_norm": 0.2741406559944153, | |
| "learning_rate": 8.111111111111112e-06, | |
| "epoch": 7.13, | |
| "step": 10700 | |
| }, | |
| { | |
| "loss": 0.022, | |
| "grad_norm": 0.48848968744277954, | |
| "learning_rate": 8.055555555555557e-06, | |
| "epoch": 7.17, | |
| "step": 10750 | |
| }, | |
| { | |
| "loss": 0.0224, | |
| "grad_norm": 0.22712096571922302, | |
| "learning_rate": 8.000000000000001e-06, | |
| "epoch": 7.2, | |
| "step": 10800 | |
| }, | |
| { | |
| "loss": 0.021, | |
| "grad_norm": 0.4689682722091675, | |
| "learning_rate": 7.944444444444445e-06, | |
| "epoch": 7.23, | |
| "step": 10850 | |
| }, | |
| { | |
| "loss": 0.0224, | |
| "grad_norm": 0.8227229118347168, | |
| "learning_rate": 7.88888888888889e-06, | |
| "epoch": 7.27, | |
| "step": 10900 | |
| }, | |
| { | |
| "loss": 0.0225, | |
| "grad_norm": 0.08071909844875336, | |
| "learning_rate": 7.833333333333333e-06, | |
| "epoch": 7.3, | |
| "step": 10950 | |
| }, | |
| { | |
| "loss": 0.0251, | |
| "grad_norm": 0.674135684967041, | |
| "learning_rate": 7.77777777777778e-06, | |
| "epoch": 7.33, | |
| "step": 11000 | |
| }, | |
| { | |
| "loss": 0.024, | |
| "grad_norm": 0.5259861946105957, | |
| "learning_rate": 7.722222222222223e-06, | |
| "epoch": 7.37, | |
| "step": 11050 | |
| }, | |
| { | |
| "loss": 0.0228, | |
| "grad_norm": 0.3898846209049225, | |
| "learning_rate": 7.666666666666667e-06, | |
| "epoch": 7.4, | |
| "step": 11100 | |
| }, | |
| { | |
| "loss": 0.0232, | |
| "grad_norm": 0.6550074815750122, | |
| "learning_rate": 7.611111111111111e-06, | |
| "epoch": 7.43, | |
| "step": 11150 | |
| }, | |
| { | |
| "loss": 0.0221, | |
| "grad_norm": 0.41366636753082275, | |
| "learning_rate": 7.555555555555556e-06, | |
| "epoch": 7.47, | |
| "step": 11200 | |
| }, | |
| { | |
| "loss": 0.0237, | |
| "grad_norm": 0.37380266189575195, | |
| "learning_rate": 7.500000000000001e-06, | |
| "epoch": 7.5, | |
| "step": 11250 | |
| }, | |
| { | |
| "loss": 0.0235, | |
| "grad_norm": 0.4968361556529999, | |
| "learning_rate": 7.444444444444445e-06, | |
| "epoch": 7.53, | |
| "step": 11300 | |
| }, | |
| { | |
| "loss": 0.0198, | |
| "grad_norm": 0.8349938988685608, | |
| "learning_rate": 7.38888888888889e-06, | |
| "epoch": 7.57, | |
| "step": 11350 | |
| }, | |
| { | |
| "loss": 0.0242, | |
| "grad_norm": 0.8924107551574707, | |
| "learning_rate": 7.333333333333333e-06, | |
| "epoch": 7.6, | |
| "step": 11400 | |
| }, | |
| { | |
| "loss": 0.0218, | |
| "grad_norm": 1.0067102909088135, | |
| "learning_rate": 7.277777777777778e-06, | |
| "epoch": 7.63, | |
| "step": 11450 | |
| }, | |
| { | |
| "loss": 0.0224, | |
| "grad_norm": 1.5542269945144653, | |
| "learning_rate": 7.222222222222223e-06, | |
| "epoch": 7.67, | |
| "step": 11500 | |
| }, | |
| { | |
| "loss": 0.023, | |
| "grad_norm": 0.7638463973999023, | |
| "learning_rate": 7.166666666666667e-06, | |
| "epoch": 7.7, | |
| "step": 11550 | |
| }, | |
| { | |
| "loss": 0.0211, | |
| "grad_norm": 0.43382686376571655, | |
| "learning_rate": 7.111111111111112e-06, | |
| "epoch": 7.73, | |
| "step": 11600 | |
| }, | |
| { | |
| "loss": 0.024, | |
| "grad_norm": 0.748521625995636, | |
| "learning_rate": 7.055555555555557e-06, | |
| "epoch": 7.77, | |
| "step": 11650 | |
| }, | |
| { | |
| "loss": 0.0223, | |
| "grad_norm": 0.3006242513656616, | |
| "learning_rate": 7e-06, | |
| "epoch": 7.8, | |
| "step": 11700 | |
| }, | |
| { | |
| "loss": 0.0233, | |
| "grad_norm": 0.5056409239768982, | |
| "learning_rate": 6.944444444444445e-06, | |
| "epoch": 7.83, | |
| "step": 11750 | |
| }, | |
| { | |
| "loss": 0.0204, | |
| "grad_norm": 0.2664984464645386, | |
| "learning_rate": 6.88888888888889e-06, | |
| "epoch": 7.87, | |
| "step": 11800 | |
| }, | |
| { | |
| "loss": 0.0218, | |
| "grad_norm": 0.18376298248767853, | |
| "learning_rate": 6.833333333333334e-06, | |
| "epoch": 7.9, | |
| "step": 11850 | |
| }, | |
| { | |
| "loss": 0.0244, | |
| "grad_norm": 0.9237328171730042, | |
| "learning_rate": 6.777777777777779e-06, | |
| "epoch": 7.93, | |
| "step": 11900 | |
| }, | |
| { | |
| "loss": 0.025, | |
| "grad_norm": 0.43559354543685913, | |
| "learning_rate": 6.7222222222222235e-06, | |
| "epoch": 7.97, | |
| "step": 11950 | |
| }, | |
| { | |
| "loss": 0.0239, | |
| "grad_norm": 0.6444123983383179, | |
| "learning_rate": 6.666666666666667e-06, | |
| "epoch": 8.0, | |
| "step": 12000 | |
| }, | |
| { | |
| "eval_loss": 0.03503166139125824, | |
| "eval_accuracy": 0.8213333333333334, | |
| "eval_f1": 0.9685324825986079, | |
| "eval_precision": 0.9816284538506761, | |
| "eval_recall": 0.9557813394390383, | |
| "eval_runtime": 487.6742, | |
| "eval_samples_per_second": 3.076, | |
| "eval_steps_per_second": 0.386, | |
| "epoch": 8.0, | |
| "step": 12000 | |
| }, | |
| { | |
| "loss": 0.019, | |
| "grad_norm": 0.10045385360717773, | |
| "learning_rate": 6.6111111111111115e-06, | |
| "epoch": 8.03, | |
| "step": 12050 | |
| }, | |
| { | |
| "loss": 0.0214, | |
| "grad_norm": 0.6069362759590149, | |
| "learning_rate": 6.555555555555556e-06, | |
| "epoch": 8.07, | |
| "step": 12100 | |
| }, | |
| { | |
| "loss": 0.0197, | |
| "grad_norm": 0.37218862771987915, | |
| "learning_rate": 6.5000000000000004e-06, | |
| "epoch": 8.1, | |
| "step": 12150 | |
| }, | |
| { | |
| "loss": 0.0203, | |
| "grad_norm": 0.2680938243865967, | |
| "learning_rate": 6.444444444444445e-06, | |
| "epoch": 8.13, | |
| "step": 12200 | |
| }, | |
| { | |
| "loss": 0.0202, | |
| "grad_norm": 0.6604072451591492, | |
| "learning_rate": 6.3888888888888885e-06, | |
| "epoch": 8.17, | |
| "step": 12250 | |
| }, | |
| { | |
| "loss": 0.0182, | |
| "grad_norm": 0.18383300304412842, | |
| "learning_rate": 6.333333333333333e-06, | |
| "epoch": 8.2, | |
| "step": 12300 | |
| }, | |
| { | |
| "loss": 0.02, | |
| "grad_norm": 0.6499873995780945, | |
| "learning_rate": 6.277777777777778e-06, | |
| "epoch": 8.23, | |
| "step": 12350 | |
| }, | |
| { | |
| "loss": 0.0191, | |
| "grad_norm": 0.6889489889144897, | |
| "learning_rate": 6.222222222222223e-06, | |
| "epoch": 8.27, | |
| "step": 12400 | |
| }, | |
| { | |
| "loss": 0.0202, | |
| "grad_norm": 0.33093497157096863, | |
| "learning_rate": 6.166666666666667e-06, | |
| "epoch": 8.3, | |
| "step": 12450 | |
| }, | |
| { | |
| "loss": 0.0212, | |
| "grad_norm": 0.531550407409668, | |
| "learning_rate": 6.111111111111112e-06, | |
| "epoch": 8.33, | |
| "step": 12500 | |
| }, | |
| { | |
| "loss": 0.0196, | |
| "grad_norm": 0.581438422203064, | |
| "learning_rate": 6.055555555555555e-06, | |
| "epoch": 8.37, | |
| "step": 12550 | |
| }, | |
| { | |
| "loss": 0.02, | |
| "grad_norm": 0.7175195217132568, | |
| "learning_rate": 6e-06, | |
| "epoch": 8.4, | |
| "step": 12600 | |
| }, | |
| { | |
| "loss": 0.02, | |
| "grad_norm": 0.354005366563797, | |
| "learning_rate": 5.944444444444445e-06, | |
| "epoch": 8.43, | |
| "step": 12650 | |
| }, | |
| { | |
| "loss": 0.0202, | |
| "grad_norm": 0.39726442098617554, | |
| "learning_rate": 5.88888888888889e-06, | |
| "epoch": 8.47, | |
| "step": 12700 | |
| }, | |
| { | |
| "loss": 0.0195, | |
| "grad_norm": 0.5678325891494751, | |
| "learning_rate": 5.833333333333334e-06, | |
| "epoch": 8.5, | |
| "step": 12750 | |
| }, | |
| { | |
| "loss": 0.0204, | |
| "grad_norm": 0.9092422723770142, | |
| "learning_rate": 5.777777777777778e-06, | |
| "epoch": 8.53, | |
| "step": 12800 | |
| }, | |
| { | |
| "loss": 0.0194, | |
| "grad_norm": 0.17264424264431, | |
| "learning_rate": 5.722222222222222e-06, | |
| "epoch": 8.57, | |
| "step": 12850 | |
| }, | |
| { | |
| "loss": 0.0185, | |
| "grad_norm": 0.5633407235145569, | |
| "learning_rate": 5.666666666666667e-06, | |
| "epoch": 8.6, | |
| "step": 12900 | |
| }, | |
| { | |
| "loss": 0.0163, | |
| "grad_norm": 0.38537177443504333, | |
| "learning_rate": 5.611111111111112e-06, | |
| "epoch": 8.63, | |
| "step": 12950 | |
| }, | |
| { | |
| "loss": 0.018, | |
| "grad_norm": 0.2082950472831726, | |
| "learning_rate": 5.555555555555557e-06, | |
| "epoch": 8.67, | |
| "step": 13000 | |
| }, | |
| { | |
| "loss": 0.0212, | |
| "grad_norm": 0.24411733448505402, | |
| "learning_rate": 5.500000000000001e-06, | |
| "epoch": 8.7, | |
| "step": 13050 | |
| }, | |
| { | |
| "loss": 0.02, | |
| "grad_norm": 0.236025869846344, | |
| "learning_rate": 5.444444444444445e-06, | |
| "epoch": 8.73, | |
| "step": 13100 | |
| }, | |
| { | |
| "loss": 0.0178, | |
| "grad_norm": 0.5068910121917725, | |
| "learning_rate": 5.388888888888889e-06, | |
| "epoch": 8.77, | |
| "step": 13150 | |
| }, | |
| { | |
| "loss": 0.0189, | |
| "grad_norm": 0.3922387957572937, | |
| "learning_rate": 5.333333333333334e-06, | |
| "epoch": 8.8, | |
| "step": 13200 | |
| }, | |
| { | |
| "loss": 0.0208, | |
| "grad_norm": 0.9244905710220337, | |
| "learning_rate": 5.2777777777777785e-06, | |
| "epoch": 8.83, | |
| "step": 13250 | |
| }, | |
| { | |
| "loss": 0.0199, | |
| "grad_norm": 0.8682542443275452, | |
| "learning_rate": 5.2222222222222226e-06, | |
| "epoch": 8.87, | |
| "step": 13300 | |
| }, | |
| { | |
| "loss": 0.019, | |
| "grad_norm": 1.3180102109909058, | |
| "learning_rate": 5.1666666666666675e-06, | |
| "epoch": 8.9, | |
| "step": 13350 | |
| }, | |
| { | |
| "loss": 0.021, | |
| "grad_norm": 0.4705464839935303, | |
| "learning_rate": 5.1111111111111115e-06, | |
| "epoch": 8.93, | |
| "step": 13400 | |
| }, | |
| { | |
| "loss": 0.0184, | |
| "grad_norm": 0.4992372989654541, | |
| "learning_rate": 5.0555555555555555e-06, | |
| "epoch": 8.97, | |
| "step": 13450 | |
| }, | |
| { | |
| "loss": 0.0191, | |
| "grad_norm": 1.026501178741455, | |
| "learning_rate": 5e-06, | |
| "epoch": 9.0, | |
| "step": 13500 | |
| }, | |
| { | |
| "eval_loss": 0.033837590366601944, | |
| "eval_accuracy": 0.814, | |
| "eval_f1": 0.9678493845039826, | |
| "eval_precision": 0.9796247434769862, | |
| "eval_recall": 0.9563537492844877, | |
| "eval_runtime": 520.4183, | |
| "eval_samples_per_second": 2.882, | |
| "eval_steps_per_second": 0.361, | |
| "epoch": 9.0, | |
| "step": 13500 | |
| }, | |
| { | |
| "loss": 0.0182, | |
| "grad_norm": 0.34491831064224243, | |
| "learning_rate": 4.944444444444445e-06, | |
| "epoch": 9.03, | |
| "step": 13550 | |
| }, | |
| { | |
| "loss": 0.0152, | |
| "grad_norm": 0.1780650019645691, | |
| "learning_rate": 4.888888888888889e-06, | |
| "epoch": 9.07, | |
| "step": 13600 | |
| }, | |
| { | |
| "loss": 0.016, | |
| "grad_norm": 0.49815917015075684, | |
| "learning_rate": 4.833333333333333e-06, | |
| "epoch": 9.1, | |
| "step": 13650 | |
| }, | |
| { | |
| "loss": 0.0178, | |
| "grad_norm": 0.21634286642074585, | |
| "learning_rate": 4.777777777777778e-06, | |
| "epoch": 9.13, | |
| "step": 13700 | |
| }, | |
| { | |
| "loss": 0.0174, | |
| "grad_norm": 0.512596845626831, | |
| "learning_rate": 4.722222222222222e-06, | |
| "epoch": 9.17, | |
| "step": 13750 | |
| }, | |
| { | |
| "loss": 0.0169, | |
| "grad_norm": 0.480957567691803, | |
| "learning_rate": 4.666666666666667e-06, | |
| "epoch": 9.2, | |
| "step": 13800 | |
| }, | |
| { | |
| "loss": 0.0187, | |
| "grad_norm": 0.4561571180820465, | |
| "learning_rate": 4.611111111111112e-06, | |
| "epoch": 9.23, | |
| "step": 13850 | |
| }, | |
| { | |
| "loss": 0.0192, | |
| "grad_norm": 0.31520533561706543, | |
| "learning_rate": 4.555555555555556e-06, | |
| "epoch": 9.27, | |
| "step": 13900 | |
| }, | |
| { | |
| "loss": 0.0186, | |
| "grad_norm": 0.33180102705955505, | |
| "learning_rate": 4.5e-06, | |
| "epoch": 9.3, | |
| "step": 13950 | |
| }, | |
| { | |
| "loss": 0.0199, | |
| "grad_norm": 0.8074162006378174, | |
| "learning_rate": 4.444444444444444e-06, | |
| "epoch": 9.33, | |
| "step": 14000 | |
| }, | |
| { | |
| "loss": 0.0187, | |
| "grad_norm": 0.07085882127285004, | |
| "learning_rate": 4.388888888888889e-06, | |
| "epoch": 9.37, | |
| "step": 14050 | |
| }, | |
| { | |
| "loss": 0.0156, | |
| "grad_norm": 0.2821616232395172, | |
| "learning_rate": 4.333333333333334e-06, | |
| "epoch": 9.4, | |
| "step": 14100 | |
| }, | |
| { | |
| "loss": 0.0163, | |
| "grad_norm": 0.7558738589286804, | |
| "learning_rate": 4.277777777777778e-06, | |
| "epoch": 9.43, | |
| "step": 14150 | |
| }, | |
| { | |
| "loss": 0.0191, | |
| "grad_norm": 0.23577666282653809, | |
| "learning_rate": 4.222222222222223e-06, | |
| "epoch": 9.47, | |
| "step": 14200 | |
| }, | |
| { | |
| "loss": 0.0141, | |
| "grad_norm": 0.6420838832855225, | |
| "learning_rate": 4.166666666666667e-06, | |
| "epoch": 9.5, | |
| "step": 14250 | |
| }, | |
| { | |
| "loss": 0.0175, | |
| "grad_norm": 0.4799270033836365, | |
| "learning_rate": 4.111111111111111e-06, | |
| "epoch": 9.53, | |
| "step": 14300 | |
| }, | |
| { | |
| "loss": 0.0176, | |
| "grad_norm": 0.15877492725849152, | |
| "learning_rate": 4.055555555555556e-06, | |
| "epoch": 9.57, | |
| "step": 14350 | |
| }, | |
| { | |
| "loss": 0.0174, | |
| "grad_norm": 0.2862108647823334, | |
| "learning_rate": 4.000000000000001e-06, | |
| "epoch": 9.6, | |
| "step": 14400 | |
| }, | |
| { | |
| "loss": 0.0181, | |
| "grad_norm": 0.6650111079216003, | |
| "learning_rate": 3.944444444444445e-06, | |
| "epoch": 9.63, | |
| "step": 14450 | |
| }, | |
| { | |
| "loss": 0.0164, | |
| "grad_norm": 0.170445516705513, | |
| "learning_rate": 3.88888888888889e-06, | |
| "epoch": 9.67, | |
| "step": 14500 | |
| }, | |
| { | |
| "loss": 0.0164, | |
| "grad_norm": 1.0318118333816528, | |
| "learning_rate": 3.833333333333334e-06, | |
| "epoch": 9.7, | |
| "step": 14550 | |
| }, | |
| { | |
| "loss": 0.0172, | |
| "grad_norm": 0.1030106320977211, | |
| "learning_rate": 3.777777777777778e-06, | |
| "epoch": 9.73, | |
| "step": 14600 | |
| }, | |
| { | |
| "loss": 0.0175, | |
| "grad_norm": 0.5192915797233582, | |
| "learning_rate": 3.7222222222222225e-06, | |
| "epoch": 9.77, | |
| "step": 14650 | |
| }, | |
| { | |
| "loss": 0.0192, | |
| "grad_norm": 0.5223665833473206, | |
| "learning_rate": 3.6666666666666666e-06, | |
| "epoch": 9.8, | |
| "step": 14700 | |
| }, | |
| { | |
| "loss": 0.0173, | |
| "grad_norm": 0.21479204297065735, | |
| "learning_rate": 3.6111111111111115e-06, | |
| "epoch": 9.83, | |
| "step": 14750 | |
| }, | |
| { | |
| "loss": 0.0177, | |
| "grad_norm": 0.18222138285636902, | |
| "learning_rate": 3.555555555555556e-06, | |
| "epoch": 9.87, | |
| "step": 14800 | |
| }, | |
| { | |
| "loss": 0.0161, | |
| "grad_norm": 0.5151605010032654, | |
| "learning_rate": 3.5e-06, | |
| "epoch": 9.9, | |
| "step": 14850 | |
| }, | |
| { | |
| "loss": 0.0154, | |
| "grad_norm": 0.41523924469947815, | |
| "learning_rate": 3.444444444444445e-06, | |
| "epoch": 9.93, | |
| "step": 14900 | |
| }, | |
| { | |
| "loss": 0.0155, | |
| "grad_norm": 0.5363922715187073, | |
| "learning_rate": 3.3888888888888893e-06, | |
| "epoch": 9.97, | |
| "step": 14950 | |
| }, | |
| { | |
| "loss": 0.0172, | |
| "grad_norm": 0.5515249967575073, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "epoch": 10.0, | |
| "step": 15000 | |
| }, | |
| { | |
| "eval_loss": 0.03265129774808884, | |
| "eval_accuracy": 0.816, | |
| "eval_f1": 0.9686844579446011, | |
| "eval_precision": 0.9792367305161573, | |
| "eval_recall": 0.9583571837435604, | |
| "eval_runtime": 522.0948, | |
| "eval_samples_per_second": 2.873, | |
| "eval_steps_per_second": 0.36, | |
| "epoch": 10.0, | |
| "step": 15000 | |
| }, | |
| { | |
| "loss": 0.014, | |
| "grad_norm": 0.4168848693370819, | |
| "learning_rate": 3.277777777777778e-06, | |
| "epoch": 10.03, | |
| "step": 15050 | |
| }, | |
| { | |
| "loss": 0.0156, | |
| "grad_norm": 0.22728918492794037, | |
| "learning_rate": 3.2222222222222227e-06, | |
| "epoch": 10.07, | |
| "step": 15100 | |
| }, | |
| { | |
| "loss": 0.0164, | |
| "grad_norm": 0.11018675565719604, | |
| "learning_rate": 3.1666666666666667e-06, | |
| "epoch": 10.1, | |
| "step": 15150 | |
| }, | |
| { | |
| "loss": 0.0153, | |
| "grad_norm": 0.2938558757305145, | |
| "learning_rate": 3.1111111111111116e-06, | |
| "epoch": 10.13, | |
| "step": 15200 | |
| }, | |
| { | |
| "loss": 0.0174, | |
| "grad_norm": 0.48106732964515686, | |
| "learning_rate": 3.055555555555556e-06, | |
| "epoch": 10.17, | |
| "step": 15250 | |
| }, | |
| { | |
| "loss": 0.0157, | |
| "grad_norm": 0.6213731169700623, | |
| "learning_rate": 3e-06, | |
| "epoch": 10.2, | |
| "step": 15300 | |
| }, | |
| { | |
| "loss": 0.0136, | |
| "grad_norm": 0.12498883903026581, | |
| "learning_rate": 2.944444444444445e-06, | |
| "epoch": 10.23, | |
| "step": 15350 | |
| }, | |
| { | |
| "loss": 0.0136, | |
| "grad_norm": 0.4984402656555176, | |
| "learning_rate": 2.888888888888889e-06, | |
| "epoch": 10.27, | |
| "step": 15400 | |
| }, | |
| { | |
| "loss": 0.0143, | |
| "grad_norm": 0.1306380331516266, | |
| "learning_rate": 2.8333333333333335e-06, | |
| "epoch": 10.3, | |
| "step": 15450 | |
| }, | |
| { | |
| "loss": 0.0177, | |
| "grad_norm": 0.33903223276138306, | |
| "learning_rate": 2.7777777777777783e-06, | |
| "epoch": 10.33, | |
| "step": 15500 | |
| }, | |
| { | |
| "loss": 0.0168, | |
| "grad_norm": 0.4361119866371155, | |
| "learning_rate": 2.7222222222222224e-06, | |
| "epoch": 10.37, | |
| "step": 15550 | |
| }, | |
| { | |
| "loss": 0.0155, | |
| "grad_norm": 0.6033740043640137, | |
| "learning_rate": 2.666666666666667e-06, | |
| "epoch": 10.4, | |
| "step": 15600 | |
| }, | |
| { | |
| "loss": 0.0155, | |
| "grad_norm": 0.37458139657974243, | |
| "learning_rate": 2.6111111111111113e-06, | |
| "epoch": 10.43, | |
| "step": 15650 | |
| }, | |
| { | |
| "loss": 0.0191, | |
| "grad_norm": 0.6604194045066833, | |
| "learning_rate": 2.5555555555555557e-06, | |
| "epoch": 10.47, | |
| "step": 15700 | |
| }, | |
| { | |
| "loss": 0.0147, | |
| "grad_norm": 0.560930073261261, | |
| "learning_rate": 2.5e-06, | |
| "epoch": 10.5, | |
| "step": 15750 | |
| }, | |
| { | |
| "loss": 0.0196, | |
| "grad_norm": 0.24454829096794128, | |
| "learning_rate": 2.4444444444444447e-06, | |
| "epoch": 10.53, | |
| "step": 15800 | |
| }, | |
| { | |
| "loss": 0.0173, | |
| "grad_norm": 0.9778093099594116, | |
| "learning_rate": 2.388888888888889e-06, | |
| "epoch": 10.57, | |
| "step": 15850 | |
| }, | |
| { | |
| "loss": 0.0151, | |
| "grad_norm": 0.1762203425168991, | |
| "learning_rate": 2.3333333333333336e-06, | |
| "epoch": 10.6, | |
| "step": 15900 | |
| }, | |
| { | |
| "loss": 0.0152, | |
| "grad_norm": 0.49755942821502686, | |
| "learning_rate": 2.277777777777778e-06, | |
| "epoch": 10.63, | |
| "step": 15950 | |
| }, | |
| { | |
| "loss": 0.016, | |
| "grad_norm": 0.4183555245399475, | |
| "learning_rate": 2.222222222222222e-06, | |
| "epoch": 10.67, | |
| "step": 16000 | |
| }, | |
| { | |
| "loss": 0.0163, | |
| "grad_norm": 0.2946385145187378, | |
| "learning_rate": 2.166666666666667e-06, | |
| "epoch": 10.7, | |
| "step": 16050 | |
| }, | |
| { | |
| "loss": 0.0155, | |
| "grad_norm": 0.27264583110809326, | |
| "learning_rate": 2.1111111111111114e-06, | |
| "epoch": 10.73, | |
| "step": 16100 | |
| }, | |
| { | |
| "loss": 0.0137, | |
| "grad_norm": 0.4318552017211914, | |
| "learning_rate": 2.0555555555555555e-06, | |
| "epoch": 10.77, | |
| "step": 16150 | |
| }, | |
| { | |
| "loss": 0.0147, | |
| "grad_norm": 0.2526138722896576, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "epoch": 10.8, | |
| "step": 16200 | |
| }, | |
| { | |
| "loss": 0.0142, | |
| "grad_norm": 0.12450098246335983, | |
| "learning_rate": 1.944444444444445e-06, | |
| "epoch": 10.83, | |
| "step": 16250 | |
| }, | |
| { | |
| "loss": 0.015, | |
| "grad_norm": 0.5571430325508118, | |
| "learning_rate": 1.888888888888889e-06, | |
| "epoch": 10.87, | |
| "step": 16300 | |
| }, | |
| { | |
| "loss": 0.016, | |
| "grad_norm": 0.48039206862449646, | |
| "learning_rate": 1.8333333333333333e-06, | |
| "epoch": 10.9, | |
| "step": 16350 | |
| }, | |
| { | |
| "loss": 0.0175, | |
| "grad_norm": 0.13909302651882172, | |
| "learning_rate": 1.777777777777778e-06, | |
| "epoch": 10.93, | |
| "step": 16400 | |
| }, | |
| { | |
| "loss": 0.0142, | |
| "grad_norm": 0.754486083984375, | |
| "learning_rate": 1.7222222222222224e-06, | |
| "epoch": 10.97, | |
| "step": 16450 | |
| }, | |
| { | |
| "loss": 0.0147, | |
| "grad_norm": 0.6141240000724792, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "epoch": 11.0, | |
| "step": 16500 | |
| }, | |
| { | |
| "eval_loss": 0.03264497593045235, | |
| "eval_accuracy": 0.822, | |
| "eval_f1": 0.9696138578130639, | |
| "eval_precision": 0.9781564001747488, | |
| "eval_recall": 0.9612192329708071, | |
| "eval_runtime": 521.02, | |
| "eval_samples_per_second": 2.879, | |
| "eval_steps_per_second": 0.361, | |
| "epoch": 11.0, | |
| "step": 16500 | |
| }, | |
| { | |
| "loss": 0.0148, | |
| "grad_norm": 0.6877202391624451, | |
| "learning_rate": 1.6111111111111113e-06, | |
| "epoch": 11.03, | |
| "step": 16550 | |
| }, | |
| { | |
| "loss": 0.0155, | |
| "grad_norm": 0.5596755743026733, | |
| "learning_rate": 1.5555555555555558e-06, | |
| "epoch": 11.07, | |
| "step": 16600 | |
| }, | |
| { | |
| "loss": 0.0165, | |
| "grad_norm": 0.268638551235199, | |
| "learning_rate": 1.5e-06, | |
| "epoch": 11.1, | |
| "step": 16650 | |
| }, | |
| { | |
| "loss": 0.0146, | |
| "grad_norm": 0.09044534713029861, | |
| "learning_rate": 1.4444444444444445e-06, | |
| "epoch": 11.13, | |
| "step": 16700 | |
| }, | |
| { | |
| "loss": 0.0153, | |
| "grad_norm": 0.29204678535461426, | |
| "learning_rate": 1.3888888888888892e-06, | |
| "epoch": 11.17, | |
| "step": 16750 | |
| }, | |
| { | |
| "loss": 0.0152, | |
| "grad_norm": 0.3674470782279968, | |
| "learning_rate": 1.3333333333333334e-06, | |
| "epoch": 11.2, | |
| "step": 16800 | |
| }, | |
| { | |
| "loss": 0.0148, | |
| "grad_norm": 0.07375849038362503, | |
| "learning_rate": 1.2777777777777779e-06, | |
| "epoch": 11.23, | |
| "step": 16850 | |
| }, | |
| { | |
| "loss": 0.014, | |
| "grad_norm": 0.23876281082630157, | |
| "learning_rate": 1.2222222222222223e-06, | |
| "epoch": 11.27, | |
| "step": 16900 | |
| }, | |
| { | |
| "loss": 0.0139, | |
| "grad_norm": 0.5587507486343384, | |
| "learning_rate": 1.1666666666666668e-06, | |
| "epoch": 11.3, | |
| "step": 16950 | |
| }, | |
| { | |
| "loss": 0.0153, | |
| "grad_norm": 0.19995635747909546, | |
| "learning_rate": 1.111111111111111e-06, | |
| "epoch": 11.33, | |
| "step": 17000 | |
| }, | |
| { | |
| "loss": 0.0142, | |
| "grad_norm": 0.1458493173122406, | |
| "learning_rate": 1.0555555555555557e-06, | |
| "epoch": 11.37, | |
| "step": 17050 | |
| }, | |
| { | |
| "loss": 0.0146, | |
| "grad_norm": 0.5687617063522339, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "epoch": 11.4, | |
| "step": 17100 | |
| }, | |
| { | |
| "loss": 0.0133, | |
| "grad_norm": 0.5970475077629089, | |
| "learning_rate": 9.444444444444445e-07, | |
| "epoch": 11.43, | |
| "step": 17150 | |
| }, | |
| { | |
| "loss": 0.0142, | |
| "grad_norm": 0.3932097852230072, | |
| "learning_rate": 8.88888888888889e-07, | |
| "epoch": 11.47, | |
| "step": 17200 | |
| }, | |
| { | |
| "loss": 0.0149, | |
| "grad_norm": 0.1657884120941162, | |
| "learning_rate": 8.333333333333333e-07, | |
| "epoch": 11.5, | |
| "step": 17250 | |
| }, | |
| { | |
| "loss": 0.0131, | |
| "grad_norm": 0.10400531440973282, | |
| "learning_rate": 7.777777777777779e-07, | |
| "epoch": 11.53, | |
| "step": 17300 | |
| }, | |
| { | |
| "loss": 0.0143, | |
| "grad_norm": 0.27298837900161743, | |
| "learning_rate": 7.222222222222222e-07, | |
| "epoch": 11.57, | |
| "step": 17350 | |
| }, | |
| { | |
| "loss": 0.0157, | |
| "grad_norm": 0.5544312596321106, | |
| "learning_rate": 6.666666666666667e-07, | |
| "epoch": 11.6, | |
| "step": 17400 | |
| }, | |
| { | |
| "loss": 0.0134, | |
| "grad_norm": 0.027582811191678047, | |
| "learning_rate": 6.111111111111112e-07, | |
| "epoch": 11.63, | |
| "step": 17450 | |
| }, | |
| { | |
| "loss": 0.0141, | |
| "grad_norm": 0.5370105504989624, | |
| "learning_rate": 5.555555555555555e-07, | |
| "epoch": 11.67, | |
| "step": 17500 | |
| }, | |
| { | |
| "loss": 0.0159, | |
| "grad_norm": 0.7681547999382019, | |
| "learning_rate": 5.000000000000001e-07, | |
| "epoch": 11.7, | |
| "step": 17550 | |
| }, | |
| { | |
| "loss": 0.0152, | |
| "grad_norm": 0.5727664232254028, | |
| "learning_rate": 4.444444444444445e-07, | |
| "epoch": 11.73, | |
| "step": 17600 | |
| }, | |
| { | |
| "loss": 0.0139, | |
| "grad_norm": 0.324990838766098, | |
| "learning_rate": 3.8888888888888895e-07, | |
| "epoch": 11.77, | |
| "step": 17650 | |
| }, | |
| { | |
| "loss": 0.0139, | |
| "grad_norm": 0.16624578833580017, | |
| "learning_rate": 3.3333333333333335e-07, | |
| "epoch": 11.8, | |
| "step": 17700 | |
| }, | |
| { | |
| "loss": 0.0151, | |
| "grad_norm": 0.5462536215782166, | |
| "learning_rate": 2.7777777777777776e-07, | |
| "epoch": 11.83, | |
| "step": 17750 | |
| }, | |
| { | |
| "loss": 0.0157, | |
| "grad_norm": 0.3523215055465698, | |
| "learning_rate": 2.2222222222222224e-07, | |
| "epoch": 11.87, | |
| "step": 17800 | |
| }, | |
| { | |
| "loss": 0.0135, | |
| "grad_norm": 0.5089453458786011, | |
| "learning_rate": 1.6666666666666668e-07, | |
| "epoch": 11.9, | |
| "step": 17850 | |
| }, | |
| { | |
| "loss": 0.0151, | |
| "grad_norm": 0.45039597153663635, | |
| "learning_rate": 1.1111111111111112e-07, | |
| "epoch": 11.93, | |
| "step": 17900 | |
| }, | |
| { | |
| "loss": 0.0154, | |
| "grad_norm": 0.5319696068763733, | |
| "learning_rate": 5.555555555555556e-08, | |
| "epoch": 11.97, | |
| "step": 17950 | |
| }, | |
| { | |
| "loss": 0.0146, | |
| "grad_norm": 0.4072882831096649, | |
| "learning_rate": 0.0, | |
| "epoch": 12.0, | |
| "step": 18000 | |
| }, | |
| { | |
| "eval_loss": 0.03210272639989853, | |
| "eval_accuracy": 0.8293333333333334, | |
| "eval_f1": 0.9706901530464915, | |
| "eval_precision": 0.979458041958042, | |
| "eval_recall": 0.9620778477389811, | |
| "eval_runtime": 365.9115, | |
| "eval_samples_per_second": 4.099, | |
| "eval_steps_per_second": 0.514, | |
| "epoch": 12.0, | |
| "step": 18000 | |
| }, | |
| { | |
| "train_runtime": 94832.1488, | |
| "train_samples_per_second": 1.518, | |
| "train_steps_per_second": 0.19, | |
| "total_flos": 3.7897517039616e+16, | |
| "train_loss": 0.05056379745403926, | |
| "epoch": 12.0, | |
| "step": 18000 | |
| } | |
| ] |