| { | |
| "best_metric": 1.699568748474121, | |
| "best_model_checkpoint": "microsoft/resnet-50/checkpoint-812", | |
| "epoch": 30.0, | |
| "eval_steps": 500, | |
| "global_step": 870, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.06896551724137931, | |
| "grad_norm": 3.913442611694336, | |
| "learning_rate": 2.2988505747126437e-06, | |
| "loss": 1.9412, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.13793103448275862, | |
| "grad_norm": 3.096372127532959, | |
| "learning_rate": 4.5977011494252875e-06, | |
| "loss": 1.9546, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.20689655172413793, | |
| "grad_norm": 2.676558256149292, | |
| "learning_rate": 6.896551724137932e-06, | |
| "loss": 1.9636, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.27586206896551724, | |
| "grad_norm": 2.6550958156585693, | |
| "learning_rate": 9.195402298850575e-06, | |
| "loss": 1.9466, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.3448275862068966, | |
| "grad_norm": 2.819706439971924, | |
| "learning_rate": 1.1494252873563218e-05, | |
| "loss": 1.9396, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.41379310344827586, | |
| "grad_norm": 2.5083816051483154, | |
| "learning_rate": 1.3793103448275863e-05, | |
| "loss": 1.9505, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.4827586206896552, | |
| "grad_norm": 2.886678695678711, | |
| "learning_rate": 1.6091954022988507e-05, | |
| "loss": 1.9462, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.5517241379310345, | |
| "grad_norm": 3.9515256881713867, | |
| "learning_rate": 1.839080459770115e-05, | |
| "loss": 1.9433, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.6206896551724138, | |
| "grad_norm": 3.4381752014160156, | |
| "learning_rate": 2.0689655172413793e-05, | |
| "loss": 1.9534, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.6896551724137931, | |
| "grad_norm": 4.001620769500732, | |
| "learning_rate": 2.2988505747126437e-05, | |
| "loss": 1.9313, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.7586206896551724, | |
| "grad_norm": 2.762974739074707, | |
| "learning_rate": 2.5287356321839083e-05, | |
| "loss": 1.9397, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.8275862068965517, | |
| "grad_norm": 5.667189121246338, | |
| "learning_rate": 2.7586206896551727e-05, | |
| "loss": 1.9348, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.896551724137931, | |
| "grad_norm": 5.329193115234375, | |
| "learning_rate": 2.988505747126437e-05, | |
| "loss": 1.9332, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.9655172413793104, | |
| "grad_norm": 3.3163201808929443, | |
| "learning_rate": 3.218390804597701e-05, | |
| "loss": 1.9401, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.13636363636363635, | |
| "eval_f1_macro": 0.06727120053346067, | |
| "eval_f1_micro": 0.13636363636363635, | |
| "eval_f1_weighted": 0.0897925692701812, | |
| "eval_loss": 1.9375746250152588, | |
| "eval_precision_macro": 0.052353460181064904, | |
| "eval_precision_micro": 0.13636363636363635, | |
| "eval_precision_weighted": 0.06933679572307391, | |
| "eval_recall_macro": 0.10204081632653063, | |
| "eval_recall_micro": 0.13636363636363635, | |
| "eval_recall_weighted": 0.13636363636363635, | |
| "eval_runtime": 2.2775, | |
| "eval_samples_per_second": 57.957, | |
| "eval_steps_per_second": 7.464, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 1.0344827586206897, | |
| "grad_norm": 2.9045445919036865, | |
| "learning_rate": 3.4482758620689657e-05, | |
| "loss": 1.9282, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 1.103448275862069, | |
| "grad_norm": 3.9900588989257812, | |
| "learning_rate": 3.67816091954023e-05, | |
| "loss": 1.9278, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 1.1724137931034484, | |
| "grad_norm": 3.0278475284576416, | |
| "learning_rate": 3.908045977011495e-05, | |
| "loss": 1.9327, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 1.2413793103448276, | |
| "grad_norm": 3.1938633918762207, | |
| "learning_rate": 4.1379310344827587e-05, | |
| "loss": 1.9388, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 1.3103448275862069, | |
| "grad_norm": 2.677551031112671, | |
| "learning_rate": 4.367816091954024e-05, | |
| "loss": 1.9254, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 1.3793103448275863, | |
| "grad_norm": 2.9068210124969482, | |
| "learning_rate": 4.597701149425287e-05, | |
| "loss": 1.9446, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 1.4482758620689655, | |
| "grad_norm": 3.1965651512145996, | |
| "learning_rate": 4.827586206896552e-05, | |
| "loss": 1.924, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 1.5172413793103448, | |
| "grad_norm": 4.181434631347656, | |
| "learning_rate": 5.057471264367817e-05, | |
| "loss": 1.9081, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 1.5862068965517242, | |
| "grad_norm": 5.315581798553467, | |
| "learning_rate": 5.287356321839081e-05, | |
| "loss": 1.9181, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 1.6551724137931034, | |
| "grad_norm": 3.4278039932250977, | |
| "learning_rate": 5.517241379310345e-05, | |
| "loss": 1.9183, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 1.7241379310344827, | |
| "grad_norm": 4.40513801574707, | |
| "learning_rate": 5.747126436781609e-05, | |
| "loss": 1.9459, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.793103448275862, | |
| "grad_norm": 3.497673273086548, | |
| "learning_rate": 5.977011494252874e-05, | |
| "loss": 1.9038, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 1.8620689655172413, | |
| "grad_norm": 2.5953779220581055, | |
| "learning_rate": 6.206896551724138e-05, | |
| "loss": 1.9386, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 1.9310344827586206, | |
| "grad_norm": 3.231996536254883, | |
| "learning_rate": 6.436781609195403e-05, | |
| "loss": 1.9218, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 4.272862434387207, | |
| "learning_rate": 6.666666666666667e-05, | |
| "loss": 1.9122, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.20454545454545456, | |
| "eval_f1_macro": 0.0601113172541744, | |
| "eval_f1_micro": 0.20454545454545456, | |
| "eval_f1_weighted": 0.08520267611176702, | |
| "eval_loss": 1.9165151119232178, | |
| "eval_precision_macro": 0.04630541871921182, | |
| "eval_precision_micro": 0.20454545454545456, | |
| "eval_precision_weighted": 0.06478578892371997, | |
| "eval_recall_macro": 0.14328042328042326, | |
| "eval_recall_micro": 0.20454545454545456, | |
| "eval_recall_weighted": 0.20454545454545456, | |
| "eval_runtime": 2.2116, | |
| "eval_samples_per_second": 59.685, | |
| "eval_steps_per_second": 7.687, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 2.0689655172413794, | |
| "grad_norm": 2.822770595550537, | |
| "learning_rate": 6.896551724137931e-05, | |
| "loss": 1.9253, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 2.1379310344827585, | |
| "grad_norm": 3.1101579666137695, | |
| "learning_rate": 7.126436781609196e-05, | |
| "loss": 1.9119, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 2.206896551724138, | |
| "grad_norm": 3.2235467433929443, | |
| "learning_rate": 7.35632183908046e-05, | |
| "loss": 1.9116, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 2.2758620689655173, | |
| "grad_norm": 2.7700700759887695, | |
| "learning_rate": 7.586206896551724e-05, | |
| "loss": 1.9201, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 2.344827586206897, | |
| "grad_norm": 2.837595224380493, | |
| "learning_rate": 7.81609195402299e-05, | |
| "loss": 1.8847, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 2.413793103448276, | |
| "grad_norm": 3.7949655055999756, | |
| "learning_rate": 8.045977011494253e-05, | |
| "loss": 1.8844, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 2.4827586206896552, | |
| "grad_norm": 3.6746275424957275, | |
| "learning_rate": 8.275862068965517e-05, | |
| "loss": 1.8925, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 2.5517241379310347, | |
| "grad_norm": 3.2281265258789062, | |
| "learning_rate": 8.505747126436782e-05, | |
| "loss": 1.9014, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 2.6206896551724137, | |
| "grad_norm": 3.065439224243164, | |
| "learning_rate": 8.735632183908047e-05, | |
| "loss": 1.8918, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 2.689655172413793, | |
| "grad_norm": 3.09468674659729, | |
| "learning_rate": 8.96551724137931e-05, | |
| "loss": 1.9022, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 2.7586206896551726, | |
| "grad_norm": 3.9154744148254395, | |
| "learning_rate": 9.195402298850575e-05, | |
| "loss": 1.931, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 2.8275862068965516, | |
| "grad_norm": 3.829075813293457, | |
| "learning_rate": 9.425287356321839e-05, | |
| "loss": 1.8685, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 2.896551724137931, | |
| "grad_norm": 3.3776695728302, | |
| "learning_rate": 9.655172413793105e-05, | |
| "loss": 1.8864, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 2.9655172413793105, | |
| "grad_norm": 2.836928129196167, | |
| "learning_rate": 9.885057471264369e-05, | |
| "loss": 1.9226, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.2196969696969697, | |
| "eval_f1_macro": 0.07285668948633087, | |
| "eval_f1_micro": 0.2196969696969697, | |
| "eval_f1_weighted": 0.10234124808862988, | |
| "eval_loss": 1.8973513841629028, | |
| "eval_precision_macro": 0.05417439703153989, | |
| "eval_precision_micro": 0.2196969696969697, | |
| "eval_precision_weighted": 0.07540338449429358, | |
| "eval_recall_macro": 0.1547089947089947, | |
| "eval_recall_micro": 0.2196969696969697, | |
| "eval_recall_weighted": 0.2196969696969697, | |
| "eval_runtime": 2.2116, | |
| "eval_samples_per_second": 59.686, | |
| "eval_steps_per_second": 7.687, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 3.0344827586206895, | |
| "grad_norm": 3.60206937789917, | |
| "learning_rate": 9.987228607918264e-05, | |
| "loss": 1.9435, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 3.103448275862069, | |
| "grad_norm": 4.266343116760254, | |
| "learning_rate": 9.96168582375479e-05, | |
| "loss": 1.8453, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 3.1724137931034484, | |
| "grad_norm": 4.432826995849609, | |
| "learning_rate": 9.936143039591316e-05, | |
| "loss": 1.918, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 3.2413793103448274, | |
| "grad_norm": 3.0959718227386475, | |
| "learning_rate": 9.910600255427843e-05, | |
| "loss": 1.8782, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 3.310344827586207, | |
| "grad_norm": 3.3647584915161133, | |
| "learning_rate": 9.885057471264369e-05, | |
| "loss": 1.9114, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 3.3793103448275863, | |
| "grad_norm": 3.9723000526428223, | |
| "learning_rate": 9.859514687100895e-05, | |
| "loss": 1.8632, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 3.4482758620689653, | |
| "grad_norm": 3.6927733421325684, | |
| "learning_rate": 9.833971902937422e-05, | |
| "loss": 1.8405, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 3.5172413793103448, | |
| "grad_norm": 3.0916285514831543, | |
| "learning_rate": 9.808429118773947e-05, | |
| "loss": 1.8844, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 3.586206896551724, | |
| "grad_norm": 2.760744333267212, | |
| "learning_rate": 9.782886334610473e-05, | |
| "loss": 1.8906, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 3.655172413793103, | |
| "grad_norm": 3.5320417881011963, | |
| "learning_rate": 9.757343550446999e-05, | |
| "loss": 1.8918, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 3.7241379310344827, | |
| "grad_norm": 3.566591739654541, | |
| "learning_rate": 9.731800766283526e-05, | |
| "loss": 1.8234, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 3.793103448275862, | |
| "grad_norm": 4.444185256958008, | |
| "learning_rate": 9.706257982120052e-05, | |
| "loss": 1.9105, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 3.862068965517241, | |
| "grad_norm": 3.0570766925811768, | |
| "learning_rate": 9.680715197956578e-05, | |
| "loss": 1.9211, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 3.9310344827586206, | |
| "grad_norm": 3.4903857707977295, | |
| "learning_rate": 9.655172413793105e-05, | |
| "loss": 1.8656, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 4.608846664428711, | |
| "learning_rate": 9.62962962962963e-05, | |
| "loss": 1.8609, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.1893939393939394, | |
| "eval_f1_macro": 0.04793863854266539, | |
| "eval_f1_micro": 0.1893939393939394, | |
| "eval_f1_weighted": 0.06863941427699817, | |
| "eval_loss": 1.887947678565979, | |
| "eval_precision_macro": 0.02927400468384075, | |
| "eval_precision_micro": 0.1893939393939394, | |
| "eval_precision_weighted": 0.04191505216095381, | |
| "eval_recall_macro": 0.13227513227513227, | |
| "eval_recall_micro": 0.1893939393939394, | |
| "eval_recall_weighted": 0.1893939393939394, | |
| "eval_runtime": 2.2141, | |
| "eval_samples_per_second": 59.618, | |
| "eval_steps_per_second": 7.678, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 4.068965517241379, | |
| "grad_norm": 3.2423195838928223, | |
| "learning_rate": 9.604086845466156e-05, | |
| "loss": 1.8838, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 4.137931034482759, | |
| "grad_norm": 2.8173322677612305, | |
| "learning_rate": 9.578544061302682e-05, | |
| "loss": 1.8437, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 4.206896551724138, | |
| "grad_norm": 3.843899965286255, | |
| "learning_rate": 9.553001277139209e-05, | |
| "loss": 1.8115, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 4.275862068965517, | |
| "grad_norm": 6.2886810302734375, | |
| "learning_rate": 9.527458492975735e-05, | |
| "loss": 1.8327, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 4.344827586206897, | |
| "grad_norm": 3.9665699005126953, | |
| "learning_rate": 9.501915708812261e-05, | |
| "loss": 1.858, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 4.413793103448276, | |
| "grad_norm": 4.808972358703613, | |
| "learning_rate": 9.476372924648788e-05, | |
| "loss": 1.8519, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 4.482758620689655, | |
| "grad_norm": 3.586610794067383, | |
| "learning_rate": 9.450830140485314e-05, | |
| "loss": 1.9904, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 4.551724137931035, | |
| "grad_norm": 3.563556671142578, | |
| "learning_rate": 9.425287356321839e-05, | |
| "loss": 1.9138, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 4.620689655172414, | |
| "grad_norm": 4.1946282386779785, | |
| "learning_rate": 9.399744572158365e-05, | |
| "loss": 1.9292, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 4.689655172413794, | |
| "grad_norm": 5.027905464172363, | |
| "learning_rate": 9.374201787994892e-05, | |
| "loss": 1.9085, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 4.758620689655173, | |
| "grad_norm": 3.239557981491089, | |
| "learning_rate": 9.348659003831418e-05, | |
| "loss": 1.854, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 4.827586206896552, | |
| "grad_norm": 4.160371780395508, | |
| "learning_rate": 9.323116219667944e-05, | |
| "loss": 1.8438, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 4.896551724137931, | |
| "grad_norm": 2.8599472045898438, | |
| "learning_rate": 9.29757343550447e-05, | |
| "loss": 1.8436, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 4.9655172413793105, | |
| "grad_norm": 5.530289649963379, | |
| "learning_rate": 9.272030651340997e-05, | |
| "loss": 1.8345, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_accuracy": 0.20454545454545456, | |
| "eval_f1_macro": 0.04976958525345622, | |
| "eval_f1_micro": 0.20454545454545456, | |
| "eval_f1_weighted": 0.07126099706744868, | |
| "eval_loss": 1.8808202743530273, | |
| "eval_precision_macro": 0.030133928571428572, | |
| "eval_precision_micro": 0.20454545454545456, | |
| "eval_precision_weighted": 0.043146306818181816, | |
| "eval_recall_macro": 0.14285714285714285, | |
| "eval_recall_micro": 0.20454545454545456, | |
| "eval_recall_weighted": 0.20454545454545456, | |
| "eval_runtime": 2.2084, | |
| "eval_samples_per_second": 59.772, | |
| "eval_steps_per_second": 7.698, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 5.0344827586206895, | |
| "grad_norm": 4.05312442779541, | |
| "learning_rate": 9.246487867177522e-05, | |
| "loss": 1.9188, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 5.103448275862069, | |
| "grad_norm": 4.95611047744751, | |
| "learning_rate": 9.220945083014048e-05, | |
| "loss": 1.9234, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 5.172413793103448, | |
| "grad_norm": 4.1448140144348145, | |
| "learning_rate": 9.195402298850575e-05, | |
| "loss": 1.893, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 5.241379310344827, | |
| "grad_norm": 4.208975791931152, | |
| "learning_rate": 9.169859514687101e-05, | |
| "loss": 1.8953, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 5.310344827586207, | |
| "grad_norm": 2.88334059715271, | |
| "learning_rate": 9.144316730523627e-05, | |
| "loss": 1.7655, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 5.379310344827586, | |
| "grad_norm": 3.308145046234131, | |
| "learning_rate": 9.118773946360154e-05, | |
| "loss": 1.8691, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 5.448275862068965, | |
| "grad_norm": 4.54261589050293, | |
| "learning_rate": 9.09323116219668e-05, | |
| "loss": 1.8627, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 5.517241379310345, | |
| "grad_norm": 3.5887720584869385, | |
| "learning_rate": 9.067688378033205e-05, | |
| "loss": 1.8774, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 5.586206896551724, | |
| "grad_norm": 3.7684900760650635, | |
| "learning_rate": 9.042145593869731e-05, | |
| "loss": 1.8427, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 5.655172413793103, | |
| "grad_norm": 4.024008274078369, | |
| "learning_rate": 9.016602809706258e-05, | |
| "loss": 1.856, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 5.724137931034483, | |
| "grad_norm": 4.352729320526123, | |
| "learning_rate": 8.991060025542784e-05, | |
| "loss": 1.8458, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 5.793103448275862, | |
| "grad_norm": 3.1746506690979004, | |
| "learning_rate": 8.96551724137931e-05, | |
| "loss": 1.7398, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 5.862068965517241, | |
| "grad_norm": 3.6843760013580322, | |
| "learning_rate": 8.939974457215837e-05, | |
| "loss": 1.7648, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 5.931034482758621, | |
| "grad_norm": 4.048587799072266, | |
| "learning_rate": 8.914431673052363e-05, | |
| "loss": 1.9072, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 4.465461730957031, | |
| "learning_rate": 8.888888888888889e-05, | |
| "loss": 1.8965, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_accuracy": 0.1893939393939394, | |
| "eval_f1_macro": 0.05547309833024119, | |
| "eval_f1_micro": 0.1893939393939394, | |
| "eval_f1_weighted": 0.07873868555686739, | |
| "eval_loss": 1.8802686929702759, | |
| "eval_precision_macro": 0.037860137068334555, | |
| "eval_precision_micro": 0.1893939393939394, | |
| "eval_precision_weighted": 0.053411384454700714, | |
| "eval_recall_macro": 0.1326984126984127, | |
| "eval_recall_micro": 0.1893939393939394, | |
| "eval_recall_weighted": 0.1893939393939394, | |
| "eval_runtime": 2.4165, | |
| "eval_samples_per_second": 54.624, | |
| "eval_steps_per_second": 7.035, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 6.068965517241379, | |
| "grad_norm": 2.9984512329101562, | |
| "learning_rate": 8.863346104725416e-05, | |
| "loss": 1.772, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 6.137931034482759, | |
| "grad_norm": 4.410613059997559, | |
| "learning_rate": 8.837803320561942e-05, | |
| "loss": 1.8404, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 6.206896551724138, | |
| "grad_norm": 3.2313921451568604, | |
| "learning_rate": 8.812260536398468e-05, | |
| "loss": 1.8435, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 6.275862068965517, | |
| "grad_norm": 4.506382465362549, | |
| "learning_rate": 8.786717752234995e-05, | |
| "loss": 1.8467, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 6.344827586206897, | |
| "grad_norm": 4.841159820556641, | |
| "learning_rate": 8.761174968071521e-05, | |
| "loss": 1.777, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 6.413793103448276, | |
| "grad_norm": 2.8692665100097656, | |
| "learning_rate": 8.735632183908047e-05, | |
| "loss": 1.8875, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 6.482758620689655, | |
| "grad_norm": 2.7914023399353027, | |
| "learning_rate": 8.710089399744572e-05, | |
| "loss": 1.8494, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 6.551724137931035, | |
| "grad_norm": 4.644817352294922, | |
| "learning_rate": 8.684546615581099e-05, | |
| "loss": 1.9091, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 6.620689655172414, | |
| "grad_norm": 3.3066277503967285, | |
| "learning_rate": 8.659003831417625e-05, | |
| "loss": 1.8223, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 6.689655172413794, | |
| "grad_norm": 4.384652614593506, | |
| "learning_rate": 8.633461047254151e-05, | |
| "loss": 1.8276, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 6.758620689655173, | |
| "grad_norm": 3.6653411388397217, | |
| "learning_rate": 8.607918263090678e-05, | |
| "loss": 1.8887, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 6.827586206896552, | |
| "grad_norm": 4.581198692321777, | |
| "learning_rate": 8.582375478927204e-05, | |
| "loss": 1.9237, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 6.896551724137931, | |
| "grad_norm": 3.545809268951416, | |
| "learning_rate": 8.55683269476373e-05, | |
| "loss": 1.8695, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 6.9655172413793105, | |
| "grad_norm": 4.356478691101074, | |
| "learning_rate": 8.531289910600255e-05, | |
| "loss": 1.8651, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "eval_accuracy": 0.22727272727272727, | |
| "eval_f1_macro": 0.07867494824016563, | |
| "eval_f1_micro": 0.22727272727272727, | |
| "eval_f1_weighted": 0.11001317523056654, | |
| "eval_loss": 1.8731662034988403, | |
| "eval_precision_macro": 0.060672917815774954, | |
| "eval_precision_micro": 0.22727272727272727, | |
| "eval_precision_weighted": 0.08398658398658398, | |
| "eval_recall_macro": 0.1604232804232804, | |
| "eval_recall_micro": 0.22727272727272727, | |
| "eval_recall_weighted": 0.22727272727272727, | |
| "eval_runtime": 2.2271, | |
| "eval_samples_per_second": 59.271, | |
| "eval_steps_per_second": 7.633, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 7.0344827586206895, | |
| "grad_norm": 3.142268419265747, | |
| "learning_rate": 8.505747126436782e-05, | |
| "loss": 1.7537, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 7.103448275862069, | |
| "grad_norm": 3.638953924179077, | |
| "learning_rate": 8.480204342273308e-05, | |
| "loss": 1.8558, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 7.172413793103448, | |
| "grad_norm": 4.003501892089844, | |
| "learning_rate": 8.454661558109834e-05, | |
| "loss": 1.9323, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 7.241379310344827, | |
| "grad_norm": 6.639110088348389, | |
| "learning_rate": 8.42911877394636e-05, | |
| "loss": 1.8732, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 7.310344827586207, | |
| "grad_norm": 6.879024982452393, | |
| "learning_rate": 8.403575989782887e-05, | |
| "loss": 1.9893, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 7.379310344827586, | |
| "grad_norm": 2.855379819869995, | |
| "learning_rate": 8.378033205619413e-05, | |
| "loss": 1.8506, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 7.448275862068965, | |
| "grad_norm": 10.893568992614746, | |
| "learning_rate": 8.35249042145594e-05, | |
| "loss": 1.9504, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 7.517241379310345, | |
| "grad_norm": 3.859220027923584, | |
| "learning_rate": 8.326947637292465e-05, | |
| "loss": 1.8594, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 7.586206896551724, | |
| "grad_norm": 3.4826343059539795, | |
| "learning_rate": 8.301404853128991e-05, | |
| "loss": 1.7486, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 7.655172413793103, | |
| "grad_norm": 6.5416364669799805, | |
| "learning_rate": 8.275862068965517e-05, | |
| "loss": 1.8768, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 7.724137931034483, | |
| "grad_norm": 4.05438232421875, | |
| "learning_rate": 8.250319284802044e-05, | |
| "loss": 1.7699, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 7.793103448275862, | |
| "grad_norm": 4.8271307945251465, | |
| "learning_rate": 8.22477650063857e-05, | |
| "loss": 1.7961, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 7.862068965517241, | |
| "grad_norm": 3.0313990116119385, | |
| "learning_rate": 8.199233716475096e-05, | |
| "loss": 1.8403, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 7.931034482758621, | |
| "grad_norm": 3.440063714981079, | |
| "learning_rate": 8.173690932311623e-05, | |
| "loss": 1.8251, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "grad_norm": 4.2107744216918945, | |
| "learning_rate": 8.148148148148148e-05, | |
| "loss": 1.8235, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_accuracy": 0.19696969696969696, | |
| "eval_f1_macro": 0.057303079569284425, | |
| "eval_f1_micro": 0.19696969696969696, | |
| "eval_f1_weighted": 0.08134286956652123, | |
| "eval_loss": 1.8692715167999268, | |
| "eval_precision_macro": 0.03926482873851295, | |
| "eval_precision_micro": 0.19696969696969696, | |
| "eval_precision_weighted": 0.05537834485202906, | |
| "eval_recall_macro": 0.137989417989418, | |
| "eval_recall_micro": 0.19696969696969696, | |
| "eval_recall_weighted": 0.19696969696969696, | |
| "eval_runtime": 2.2071, | |
| "eval_samples_per_second": 59.807, | |
| "eval_steps_per_second": 7.702, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 8.068965517241379, | |
| "grad_norm": 3.2934532165527344, | |
| "learning_rate": 8.122605363984674e-05, | |
| "loss": 1.8304, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 8.137931034482758, | |
| "grad_norm": 3.333961009979248, | |
| "learning_rate": 8.0970625798212e-05, | |
| "loss": 1.8438, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 8.206896551724139, | |
| "grad_norm": 2.9383113384246826, | |
| "learning_rate": 8.071519795657727e-05, | |
| "loss": 1.8447, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 8.275862068965518, | |
| "grad_norm": 3.416475772857666, | |
| "learning_rate": 8.045977011494253e-05, | |
| "loss": 1.8325, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 8.344827586206897, | |
| "grad_norm": 3.6379287242889404, | |
| "learning_rate": 8.020434227330779e-05, | |
| "loss": 1.8463, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 8.413793103448276, | |
| "grad_norm": 4.145707130432129, | |
| "learning_rate": 7.994891443167306e-05, | |
| "loss": 1.9343, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 8.482758620689655, | |
| "grad_norm": 3.06123948097229, | |
| "learning_rate": 7.969348659003832e-05, | |
| "loss": 1.8845, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 8.551724137931034, | |
| "grad_norm": 4.507969856262207, | |
| "learning_rate": 7.943805874840358e-05, | |
| "loss": 1.6875, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 8.620689655172415, | |
| "grad_norm": 2.797962188720703, | |
| "learning_rate": 7.918263090676885e-05, | |
| "loss": 1.7689, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 8.689655172413794, | |
| "grad_norm": 3.4082255363464355, | |
| "learning_rate": 7.892720306513411e-05, | |
| "loss": 1.7913, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 8.758620689655173, | |
| "grad_norm": 5.635084629058838, | |
| "learning_rate": 7.867177522349937e-05, | |
| "loss": 1.8701, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 8.827586206896552, | |
| "grad_norm": 4.354236602783203, | |
| "learning_rate": 7.841634738186464e-05, | |
| "loss": 1.8421, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 8.89655172413793, | |
| "grad_norm": 4.307262420654297, | |
| "learning_rate": 7.81609195402299e-05, | |
| "loss": 1.836, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 8.96551724137931, | |
| "grad_norm": 4.296491622924805, | |
| "learning_rate": 7.790549169859515e-05, | |
| "loss": 1.7786, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_accuracy": 0.25, | |
| "eval_f1_macro": 0.11116724617868783, | |
| "eval_f1_micro": 0.25, | |
| "eval_f1_weighted": 0.15020813502194819, | |
| "eval_loss": 1.8612825870513916, | |
| "eval_precision_macro": 0.21305418719211824, | |
| "eval_precision_micro": 0.25, | |
| "eval_precision_weighted": 0.2558124346917451, | |
| "eval_recall_macro": 0.18077097505668935, | |
| "eval_recall_micro": 0.25, | |
| "eval_recall_weighted": 0.25, | |
| "eval_runtime": 2.2133, | |
| "eval_samples_per_second": 59.639, | |
| "eval_steps_per_second": 7.681, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 9.03448275862069, | |
| "grad_norm": 3.7005295753479004, | |
| "learning_rate": 7.765006385696041e-05, | |
| "loss": 1.8614, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 9.10344827586207, | |
| "grad_norm": 4.111121654510498, | |
| "learning_rate": 7.739463601532568e-05, | |
| "loss": 1.8188, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 9.172413793103448, | |
| "grad_norm": 3.6179349422454834, | |
| "learning_rate": 7.713920817369094e-05, | |
| "loss": 1.8447, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 9.241379310344827, | |
| "grad_norm": 3.207977294921875, | |
| "learning_rate": 7.68837803320562e-05, | |
| "loss": 1.8243, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 9.310344827586206, | |
| "grad_norm": 5.118077278137207, | |
| "learning_rate": 7.662835249042147e-05, | |
| "loss": 1.9831, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 9.379310344827585, | |
| "grad_norm": 4.01741361618042, | |
| "learning_rate": 7.637292464878673e-05, | |
| "loss": 1.8093, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 9.448275862068966, | |
| "grad_norm": 3.3068385124206543, | |
| "learning_rate": 7.611749680715198e-05, | |
| "loss": 1.8369, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 9.517241379310345, | |
| "grad_norm": 4.520148754119873, | |
| "learning_rate": 7.586206896551724e-05, | |
| "loss": 1.7552, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 9.586206896551724, | |
| "grad_norm": 3.8634729385375977, | |
| "learning_rate": 7.56066411238825e-05, | |
| "loss": 1.7714, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 9.655172413793103, | |
| "grad_norm": 4.389949798583984, | |
| "learning_rate": 7.535121328224777e-05, | |
| "loss": 1.755, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 9.724137931034482, | |
| "grad_norm": 4.070067882537842, | |
| "learning_rate": 7.509578544061303e-05, | |
| "loss": 1.8678, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 9.793103448275861, | |
| "grad_norm": 3.557446002960205, | |
| "learning_rate": 7.48403575989783e-05, | |
| "loss": 1.7598, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 9.862068965517242, | |
| "grad_norm": 4.155707836151123, | |
| "learning_rate": 7.458492975734356e-05, | |
| "loss": 1.8237, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 9.931034482758621, | |
| "grad_norm": 4.28460693359375, | |
| "learning_rate": 7.432950191570882e-05, | |
| "loss": 1.8855, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 5.5480217933654785, | |
| "learning_rate": 7.407407407407407e-05, | |
| "loss": 1.9601, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "eval_accuracy": 0.25757575757575757, | |
| "eval_f1_macro": 0.11443496370631899, | |
| "eval_f1_micro": 0.25757575757575757, | |
| "eval_f1_weighted": 0.15491617245063757, | |
| "eval_loss": 1.853498101234436, | |
| "eval_precision_macro": 0.1437282229965157, | |
| "eval_precision_micro": 0.25757575757575757, | |
| "eval_precision_weighted": 0.17927753141167774, | |
| "eval_recall_macro": 0.18648526077097508, | |
| "eval_recall_micro": 0.25757575757575757, | |
| "eval_recall_weighted": 0.25757575757575757, | |
| "eval_runtime": 2.2163, | |
| "eval_samples_per_second": 59.557, | |
| "eval_steps_per_second": 7.67, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 10.068965517241379, | |
| "grad_norm": 4.041858673095703, | |
| "learning_rate": 7.381864623243934e-05, | |
| "loss": 1.8905, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 10.137931034482758, | |
| "grad_norm": 3.6557087898254395, | |
| "learning_rate": 7.35632183908046e-05, | |
| "loss": 1.8761, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 10.206896551724139, | |
| "grad_norm": 5.162230968475342, | |
| "learning_rate": 7.330779054916986e-05, | |
| "loss": 1.8182, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 10.275862068965518, | |
| "grad_norm": 3.189119338989258, | |
| "learning_rate": 7.305236270753513e-05, | |
| "loss": 1.739, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 10.344827586206897, | |
| "grad_norm": 4.540651321411133, | |
| "learning_rate": 7.279693486590039e-05, | |
| "loss": 1.8742, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 10.413793103448276, | |
| "grad_norm": 4.213813781738281, | |
| "learning_rate": 7.254150702426565e-05, | |
| "loss": 1.7482, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 10.482758620689655, | |
| "grad_norm": 4.052529335021973, | |
| "learning_rate": 7.22860791826309e-05, | |
| "loss": 1.8047, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 10.551724137931034, | |
| "grad_norm": 3.5931994915008545, | |
| "learning_rate": 7.203065134099617e-05, | |
| "loss": 1.8343, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 10.620689655172415, | |
| "grad_norm": 5.292781829833984, | |
| "learning_rate": 7.177522349936143e-05, | |
| "loss": 1.8535, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 10.689655172413794, | |
| "grad_norm": 4.688461780548096, | |
| "learning_rate": 7.151979565772669e-05, | |
| "loss": 1.7891, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 10.758620689655173, | |
| "grad_norm": 4.041657447814941, | |
| "learning_rate": 7.126436781609196e-05, | |
| "loss": 1.8033, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 10.827586206896552, | |
| "grad_norm": 4.2664875984191895, | |
| "learning_rate": 7.100893997445722e-05, | |
| "loss": 1.8376, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 10.89655172413793, | |
| "grad_norm": 3.7928178310394287, | |
| "learning_rate": 7.075351213282248e-05, | |
| "loss": 1.8396, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 10.96551724137931, | |
| "grad_norm": 3.8722920417785645, | |
| "learning_rate": 7.049808429118773e-05, | |
| "loss": 1.7922, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "eval_accuracy": 0.2727272727272727, | |
| "eval_f1_macro": 0.12215041024932503, | |
| "eval_f1_micro": 0.2727272727272727, | |
| "eval_f1_weighted": 0.16519086921061796, | |
| "eval_loss": 1.8491514921188354, | |
| "eval_precision_macro": 0.14870509607351715, | |
| "eval_precision_micro": 0.2727272727272727, | |
| "eval_precision_weighted": 0.1859826333510544, | |
| "eval_recall_macro": 0.19833711262282688, | |
| "eval_recall_micro": 0.2727272727272727, | |
| "eval_recall_weighted": 0.2727272727272727, | |
| "eval_runtime": 2.2213, | |
| "eval_samples_per_second": 59.426, | |
| "eval_steps_per_second": 7.653, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 11.03448275862069, | |
| "grad_norm": 3.136098623275757, | |
| "learning_rate": 7.0242656449553e-05, | |
| "loss": 1.7807, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 11.10344827586207, | |
| "grad_norm": 4.442017078399658, | |
| "learning_rate": 6.998722860791826e-05, | |
| "loss": 1.8544, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 11.172413793103448, | |
| "grad_norm": 3.425168752670288, | |
| "learning_rate": 6.973180076628352e-05, | |
| "loss": 1.7325, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 11.241379310344827, | |
| "grad_norm": 3.21671462059021, | |
| "learning_rate": 6.947637292464879e-05, | |
| "loss": 1.7336, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 11.310344827586206, | |
| "grad_norm": 5.034449100494385, | |
| "learning_rate": 6.922094508301405e-05, | |
| "loss": 1.8505, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 11.379310344827585, | |
| "grad_norm": 4.4944586753845215, | |
| "learning_rate": 6.896551724137931e-05, | |
| "loss": 1.8836, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 11.448275862068966, | |
| "grad_norm": 4.909271717071533, | |
| "learning_rate": 6.871008939974458e-05, | |
| "loss": 1.9185, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 11.517241379310345, | |
| "grad_norm": 4.58135461807251, | |
| "learning_rate": 6.845466155810984e-05, | |
| "loss": 1.7199, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 11.586206896551724, | |
| "grad_norm": 4.4748101234436035, | |
| "learning_rate": 6.81992337164751e-05, | |
| "loss": 1.7971, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 11.655172413793103, | |
| "grad_norm": 3.573788642883301, | |
| "learning_rate": 6.794380587484037e-05, | |
| "loss": 1.8574, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 11.724137931034482, | |
| "grad_norm": 6.425858020782471, | |
| "learning_rate": 6.768837803320563e-05, | |
| "loss": 1.801, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 11.793103448275861, | |
| "grad_norm": 2.9800500869750977, | |
| "learning_rate": 6.74329501915709e-05, | |
| "loss": 1.7888, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 11.862068965517242, | |
| "grad_norm": 5.478114128112793, | |
| "learning_rate": 6.717752234993616e-05, | |
| "loss": 1.8653, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 11.931034482758621, | |
| "grad_norm": 5.272279262542725, | |
| "learning_rate": 6.69220945083014e-05, | |
| "loss": 1.7173, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "grad_norm": 5.45868444442749, | |
| "learning_rate": 6.666666666666667e-05, | |
| "loss": 1.8398, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "eval_accuracy": 0.2727272727272727, | |
| "eval_f1_macro": 0.13714358730970028, | |
| "eval_f1_micro": 0.2727272727272727, | |
| "eval_f1_weighted": 0.1809634164285327, | |
| "eval_loss": 1.8496934175491333, | |
| "eval_precision_macro": 0.13676194668930747, | |
| "eval_precision_micro": 0.2727272727272727, | |
| "eval_precision_weighted": 0.17243505937650777, | |
| "eval_recall_macro": 0.20220710506424794, | |
| "eval_recall_micro": 0.2727272727272727, | |
| "eval_recall_weighted": 0.2727272727272727, | |
| "eval_runtime": 2.2325, | |
| "eval_samples_per_second": 59.127, | |
| "eval_steps_per_second": 7.615, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 12.068965517241379, | |
| "grad_norm": 6.196179389953613, | |
| "learning_rate": 6.641123882503193e-05, | |
| "loss": 1.7649, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 12.137931034482758, | |
| "grad_norm": 6.698978900909424, | |
| "learning_rate": 6.61558109833972e-05, | |
| "loss": 1.6722, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 12.206896551724139, | |
| "grad_norm": 5.01690149307251, | |
| "learning_rate": 6.590038314176246e-05, | |
| "loss": 1.8412, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 12.275862068965518, | |
| "grad_norm": 3.192455768585205, | |
| "learning_rate": 6.564495530012772e-05, | |
| "loss": 1.6607, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 12.344827586206897, | |
| "grad_norm": 4.327828407287598, | |
| "learning_rate": 6.538952745849299e-05, | |
| "loss": 1.7991, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 12.413793103448276, | |
| "grad_norm": 5.359017848968506, | |
| "learning_rate": 6.513409961685824e-05, | |
| "loss": 1.8934, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 12.482758620689655, | |
| "grad_norm": 3.2725300788879395, | |
| "learning_rate": 6.48786717752235e-05, | |
| "loss": 1.7054, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 12.551724137931034, | |
| "grad_norm": 4.094532489776611, | |
| "learning_rate": 6.462324393358876e-05, | |
| "loss": 1.8921, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 12.620689655172415, | |
| "grad_norm": 5.216372013092041, | |
| "learning_rate": 6.436781609195403e-05, | |
| "loss": 1.7687, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 12.689655172413794, | |
| "grad_norm": 3.914337158203125, | |
| "learning_rate": 6.411238825031929e-05, | |
| "loss": 1.8095, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 12.758620689655173, | |
| "grad_norm": 6.219154357910156, | |
| "learning_rate": 6.385696040868455e-05, | |
| "loss": 1.902, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 12.827586206896552, | |
| "grad_norm": 4.809194564819336, | |
| "learning_rate": 6.360153256704982e-05, | |
| "loss": 1.7825, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 12.89655172413793, | |
| "grad_norm": 3.219548463821411, | |
| "learning_rate": 6.334610472541508e-05, | |
| "loss": 1.8521, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 12.96551724137931, | |
| "grad_norm": 6.680731296539307, | |
| "learning_rate": 6.309067688378033e-05, | |
| "loss": 1.8811, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 13.0, | |
| "eval_accuracy": 0.24242424242424243, | |
| "eval_f1_macro": 0.10985890652557319, | |
| "eval_f1_micro": 0.24242424242424243, | |
| "eval_f1_weighted": 0.14835858585858586, | |
| "eval_loss": 1.835381269454956, | |
| "eval_precision_macro": 0.11703703703703702, | |
| "eval_precision_micro": 0.24242424242424243, | |
| "eval_precision_weighted": 0.14898989898989898, | |
| "eval_recall_macro": 0.1780196523053666, | |
| "eval_recall_micro": 0.24242424242424243, | |
| "eval_recall_weighted": 0.24242424242424243, | |
| "eval_runtime": 2.2298, | |
| "eval_samples_per_second": 59.199, | |
| "eval_steps_per_second": 7.624, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 13.03448275862069, | |
| "grad_norm": 4.401123523712158, | |
| "learning_rate": 6.283524904214559e-05, | |
| "loss": 1.9167, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 13.10344827586207, | |
| "grad_norm": 3.634641170501709, | |
| "learning_rate": 6.257982120051086e-05, | |
| "loss": 1.8415, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 13.172413793103448, | |
| "grad_norm": 6.587606430053711, | |
| "learning_rate": 6.232439335887612e-05, | |
| "loss": 1.8362, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 13.241379310344827, | |
| "grad_norm": 5.334833145141602, | |
| "learning_rate": 6.206896551724138e-05, | |
| "loss": 1.8492, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 13.310344827586206, | |
| "grad_norm": 3.7885360717773438, | |
| "learning_rate": 6.181353767560665e-05, | |
| "loss": 1.7684, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 13.379310344827585, | |
| "grad_norm": 4.235292434692383, | |
| "learning_rate": 6.155810983397191e-05, | |
| "loss": 1.75, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 13.448275862068966, | |
| "grad_norm": 4.52523946762085, | |
| "learning_rate": 6.130268199233716e-05, | |
| "loss": 1.95, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 13.517241379310345, | |
| "grad_norm": 5.339160442352295, | |
| "learning_rate": 6.104725415070242e-05, | |
| "loss": 1.7418, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 13.586206896551724, | |
| "grad_norm": 4.966550827026367, | |
| "learning_rate": 6.0791826309067686e-05, | |
| "loss": 1.7327, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 13.655172413793103, | |
| "grad_norm": 6.578965187072754, | |
| "learning_rate": 6.053639846743295e-05, | |
| "loss": 1.7523, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 13.724137931034482, | |
| "grad_norm": 3.6884572505950928, | |
| "learning_rate": 6.028097062579821e-05, | |
| "loss": 1.7669, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 13.793103448275861, | |
| "grad_norm": 4.842553615570068, | |
| "learning_rate": 6.0025542784163477e-05, | |
| "loss": 1.8003, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 13.862068965517242, | |
| "grad_norm": 3.7844247817993164, | |
| "learning_rate": 5.977011494252874e-05, | |
| "loss": 1.757, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 13.931034482758621, | |
| "grad_norm": 3.4938206672668457, | |
| "learning_rate": 5.9514687100893996e-05, | |
| "loss": 1.7313, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "grad_norm": 6.437168598175049, | |
| "learning_rate": 5.925925925925926e-05, | |
| "loss": 1.7813, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "eval_accuracy": 0.29545454545454547, | |
| "eval_f1_macro": 0.1445112714675598, | |
| "eval_f1_micro": 0.29545454545454547, | |
| "eval_f1_weighted": 0.19248850637826523, | |
| "eval_loss": 1.8299483060836792, | |
| "eval_precision_macro": 0.12736006683375103, | |
| "eval_precision_micro": 0.29545454545454547, | |
| "eval_precision_weighted": 0.16434299131667554, | |
| "eval_recall_macro": 0.2163869992441421, | |
| "eval_recall_micro": 0.29545454545454547, | |
| "eval_recall_weighted": 0.29545454545454547, | |
| "eval_runtime": 2.2129, | |
| "eval_samples_per_second": 59.65, | |
| "eval_steps_per_second": 7.682, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 14.068965517241379, | |
| "grad_norm": 4.038322925567627, | |
| "learning_rate": 5.900383141762452e-05, | |
| "loss": 1.7202, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 14.137931034482758, | |
| "grad_norm": 4.393911838531494, | |
| "learning_rate": 5.8748403575989787e-05, | |
| "loss": 1.6778, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 14.206896551724139, | |
| "grad_norm": 4.293457508087158, | |
| "learning_rate": 5.849297573435505e-05, | |
| "loss": 1.7775, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 14.275862068965518, | |
| "grad_norm": 4.4200029373168945, | |
| "learning_rate": 5.823754789272031e-05, | |
| "loss": 1.7642, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 14.344827586206897, | |
| "grad_norm": 4.388584613800049, | |
| "learning_rate": 5.798212005108558e-05, | |
| "loss": 1.7514, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 14.413793103448276, | |
| "grad_norm": 3.827840566635132, | |
| "learning_rate": 5.7726692209450826e-05, | |
| "loss": 1.7411, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 14.482758620689655, | |
| "grad_norm": 4.515402793884277, | |
| "learning_rate": 5.747126436781609e-05, | |
| "loss": 1.7257, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 14.551724137931034, | |
| "grad_norm": 5.480957508087158, | |
| "learning_rate": 5.721583652618135e-05, | |
| "loss": 1.8772, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 14.620689655172415, | |
| "grad_norm": 3.7635657787323, | |
| "learning_rate": 5.6960408684546617e-05, | |
| "loss": 1.7874, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 14.689655172413794, | |
| "grad_norm": 4.324371814727783, | |
| "learning_rate": 5.670498084291188e-05, | |
| "loss": 1.8347, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 14.758620689655173, | |
| "grad_norm": 3.4559121131896973, | |
| "learning_rate": 5.644955300127714e-05, | |
| "loss": 1.7796, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 14.827586206896552, | |
| "grad_norm": 4.614096164703369, | |
| "learning_rate": 5.6194125159642407e-05, | |
| "loss": 1.7538, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 14.89655172413793, | |
| "grad_norm": 3.789339303970337, | |
| "learning_rate": 5.593869731800766e-05, | |
| "loss": 1.7835, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 14.96551724137931, | |
| "grad_norm": 4.958347797393799, | |
| "learning_rate": 5.5683269476372927e-05, | |
| "loss": 1.8719, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "eval_accuracy": 0.29545454545454547, | |
| "eval_f1_macro": 0.16080977845683728, | |
| "eval_f1_micro": 0.29545454545454547, | |
| "eval_f1_weighted": 0.2083153279677344, | |
| "eval_loss": 1.821338176727295, | |
| "eval_precision_macro": 0.14618567012184033, | |
| "eval_precision_micro": 0.29545454545454547, | |
| "eval_precision_weighted": 0.18381906214353025, | |
| "eval_recall_macro": 0.22134542705971277, | |
| "eval_recall_micro": 0.29545454545454547, | |
| "eval_recall_weighted": 0.29545454545454547, | |
| "eval_runtime": 2.2044, | |
| "eval_samples_per_second": 59.881, | |
| "eval_steps_per_second": 7.712, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 15.03448275862069, | |
| "grad_norm": 4.79347038269043, | |
| "learning_rate": 5.542784163473819e-05, | |
| "loss": 1.8913, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 15.10344827586207, | |
| "grad_norm": 5.953100204467773, | |
| "learning_rate": 5.517241379310345e-05, | |
| "loss": 1.7682, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 15.172413793103448, | |
| "grad_norm": 3.8751885890960693, | |
| "learning_rate": 5.491698595146872e-05, | |
| "loss": 1.7279, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 15.241379310344827, | |
| "grad_norm": 3.8575809001922607, | |
| "learning_rate": 5.466155810983398e-05, | |
| "loss": 1.7103, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 15.310344827586206, | |
| "grad_norm": 4.599151611328125, | |
| "learning_rate": 5.440613026819924e-05, | |
| "loss": 1.8022, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 15.379310344827585, | |
| "grad_norm": 5.619429111480713, | |
| "learning_rate": 5.415070242656451e-05, | |
| "loss": 1.9189, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 15.448275862068966, | |
| "grad_norm": 6.087721347808838, | |
| "learning_rate": 5.3895274584929756e-05, | |
| "loss": 1.7547, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 15.517241379310345, | |
| "grad_norm": 4.4260029792785645, | |
| "learning_rate": 5.363984674329502e-05, | |
| "loss": 1.8112, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 15.586206896551724, | |
| "grad_norm": 5.496058940887451, | |
| "learning_rate": 5.338441890166028e-05, | |
| "loss": 1.7604, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 15.655172413793103, | |
| "grad_norm": 5.748514175415039, | |
| "learning_rate": 5.3128991060025547e-05, | |
| "loss": 1.7788, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 15.724137931034482, | |
| "grad_norm": 3.9288198947906494, | |
| "learning_rate": 5.287356321839081e-05, | |
| "loss": 1.7339, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 15.793103448275861, | |
| "grad_norm": 4.473173141479492, | |
| "learning_rate": 5.261813537675607e-05, | |
| "loss": 1.7975, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 15.862068965517242, | |
| "grad_norm": 4.549223899841309, | |
| "learning_rate": 5.236270753512134e-05, | |
| "loss": 1.6772, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 15.931034482758621, | |
| "grad_norm": 4.444834232330322, | |
| "learning_rate": 5.2107279693486586e-05, | |
| "loss": 1.7763, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 16.0, | |
| "grad_norm": 4.395754814147949, | |
| "learning_rate": 5.185185185185185e-05, | |
| "loss": 1.7755, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 16.0, | |
| "eval_accuracy": 0.3181818181818182, | |
| "eval_f1_macro": 0.1734886862546437, | |
| "eval_f1_micro": 0.3181818181818182, | |
| "eval_f1_weighted": 0.22466688158177517, | |
| "eval_loss": 1.805698037147522, | |
| "eval_precision_macro": 0.15216773276474768, | |
| "eval_precision_micro": 0.3181818181818182, | |
| "eval_precision_weighted": 0.19211895070104024, | |
| "eval_recall_macro": 0.23915343915343915, | |
| "eval_recall_micro": 0.3181818181818182, | |
| "eval_recall_weighted": 0.3181818181818182, | |
| "eval_runtime": 2.2163, | |
| "eval_samples_per_second": 59.56, | |
| "eval_steps_per_second": 7.671, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 16.06896551724138, | |
| "grad_norm": 6.367030620574951, | |
| "learning_rate": 5.159642401021711e-05, | |
| "loss": 1.8807, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 16.137931034482758, | |
| "grad_norm": 4.813172340393066, | |
| "learning_rate": 5.1340996168582377e-05, | |
| "loss": 1.7648, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 16.20689655172414, | |
| "grad_norm": 4.275113582611084, | |
| "learning_rate": 5.108556832694764e-05, | |
| "loss": 1.7831, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 16.275862068965516, | |
| "grad_norm": 4.675595283508301, | |
| "learning_rate": 5.08301404853129e-05, | |
| "loss": 1.7815, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 16.344827586206897, | |
| "grad_norm": 4.324057102203369, | |
| "learning_rate": 5.057471264367817e-05, | |
| "loss": 1.6777, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 16.413793103448278, | |
| "grad_norm": 3.6266305446624756, | |
| "learning_rate": 5.031928480204342e-05, | |
| "loss": 1.8723, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 16.482758620689655, | |
| "grad_norm": 5.731067657470703, | |
| "learning_rate": 5.0063856960408687e-05, | |
| "loss": 1.7068, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 16.551724137931036, | |
| "grad_norm": 4.540092468261719, | |
| "learning_rate": 4.980842911877395e-05, | |
| "loss": 1.6402, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 16.620689655172413, | |
| "grad_norm": 4.705528259277344, | |
| "learning_rate": 4.955300127713921e-05, | |
| "loss": 1.7596, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 16.689655172413794, | |
| "grad_norm": 6.0757222175598145, | |
| "learning_rate": 4.929757343550448e-05, | |
| "loss": 1.7725, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 16.75862068965517, | |
| "grad_norm": 5.314311504364014, | |
| "learning_rate": 4.904214559386973e-05, | |
| "loss": 1.6774, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 16.82758620689655, | |
| "grad_norm": 5.615202903747559, | |
| "learning_rate": 4.8786717752234997e-05, | |
| "loss": 1.7108, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 16.896551724137932, | |
| "grad_norm": 4.7275261878967285, | |
| "learning_rate": 4.853128991060026e-05, | |
| "loss": 1.7545, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 16.96551724137931, | |
| "grad_norm": 5.45782470703125, | |
| "learning_rate": 4.827586206896552e-05, | |
| "loss": 1.7729, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 17.0, | |
| "eval_accuracy": 0.3106060606060606, | |
| "eval_f1_macro": 0.16254470167513646, | |
| "eval_f1_micro": 0.3106060606060606, | |
| "eval_f1_weighted": 0.21289974847287096, | |
| "eval_loss": 1.7963801622390747, | |
| "eval_precision_macro": 0.14503829503829505, | |
| "eval_precision_micro": 0.3106060606060606, | |
| "eval_precision_weighted": 0.1843213604577241, | |
| "eval_recall_macro": 0.2312622826908541, | |
| "eval_recall_micro": 0.3106060606060606, | |
| "eval_recall_weighted": 0.3106060606060606, | |
| "eval_runtime": 2.2292, | |
| "eval_samples_per_second": 59.215, | |
| "eval_steps_per_second": 7.626, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 17.03448275862069, | |
| "grad_norm": 4.644923210144043, | |
| "learning_rate": 4.802043422733078e-05, | |
| "loss": 1.747, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 17.103448275862068, | |
| "grad_norm": 4.358439922332764, | |
| "learning_rate": 4.776500638569604e-05, | |
| "loss": 1.7769, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 17.17241379310345, | |
| "grad_norm": 3.751288414001465, | |
| "learning_rate": 4.7509578544061307e-05, | |
| "loss": 1.6642, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 17.24137931034483, | |
| "grad_norm": 4.00644588470459, | |
| "learning_rate": 4.725415070242657e-05, | |
| "loss": 1.6688, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 17.310344827586206, | |
| "grad_norm": 3.841867208480835, | |
| "learning_rate": 4.6998722860791827e-05, | |
| "loss": 1.6797, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 17.379310344827587, | |
| "grad_norm": 4.297330856323242, | |
| "learning_rate": 4.674329501915709e-05, | |
| "loss": 1.7787, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 17.448275862068964, | |
| "grad_norm": 5.752788543701172, | |
| "learning_rate": 4.648786717752235e-05, | |
| "loss": 1.7996, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 17.517241379310345, | |
| "grad_norm": 5.417765140533447, | |
| "learning_rate": 4.623243933588761e-05, | |
| "loss": 1.7516, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 17.586206896551722, | |
| "grad_norm": 4.176225662231445, | |
| "learning_rate": 4.597701149425287e-05, | |
| "loss": 1.8538, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 17.655172413793103, | |
| "grad_norm": 4.021208763122559, | |
| "learning_rate": 4.5721583652618137e-05, | |
| "loss": 1.7611, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 17.724137931034484, | |
| "grad_norm": 5.078979969024658, | |
| "learning_rate": 4.54661558109834e-05, | |
| "loss": 1.8121, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 17.79310344827586, | |
| "grad_norm": 6.557715892791748, | |
| "learning_rate": 4.5210727969348656e-05, | |
| "loss": 1.7093, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 17.862068965517242, | |
| "grad_norm": 4.687191009521484, | |
| "learning_rate": 4.495530012771392e-05, | |
| "loss": 1.6313, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 17.93103448275862, | |
| "grad_norm": 4.512192249298096, | |
| "learning_rate": 4.469987228607918e-05, | |
| "loss": 1.679, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 18.0, | |
| "grad_norm": 6.884960174560547, | |
| "learning_rate": 4.4444444444444447e-05, | |
| "loss": 1.687, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 18.0, | |
| "eval_accuracy": 0.3181818181818182, | |
| "eval_f1_macro": 0.171902638331797, | |
| "eval_f1_micro": 0.3181818181818182, | |
| "eval_f1_weighted": 0.22369161104555607, | |
| "eval_loss": 1.7864856719970703, | |
| "eval_precision_macro": 0.1576117713638936, | |
| "eval_precision_micro": 0.3181818181818182, | |
| "eval_precision_weighted": 0.19869107029548458, | |
| "eval_recall_macro": 0.2380650037792895, | |
| "eval_recall_micro": 0.3181818181818182, | |
| "eval_recall_weighted": 0.3181818181818182, | |
| "eval_runtime": 2.211, | |
| "eval_samples_per_second": 59.701, | |
| "eval_steps_per_second": 7.689, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 18.06896551724138, | |
| "grad_norm": 4.3594160079956055, | |
| "learning_rate": 4.418901660280971e-05, | |
| "loss": 1.8299, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 18.137931034482758, | |
| "grad_norm": 5.051939010620117, | |
| "learning_rate": 4.393358876117497e-05, | |
| "loss": 1.757, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 18.20689655172414, | |
| "grad_norm": 4.713520050048828, | |
| "learning_rate": 4.367816091954024e-05, | |
| "loss": 1.6771, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 18.275862068965516, | |
| "grad_norm": 5.125997543334961, | |
| "learning_rate": 4.342273307790549e-05, | |
| "loss": 1.7797, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 18.344827586206897, | |
| "grad_norm": 4.016047954559326, | |
| "learning_rate": 4.3167305236270757e-05, | |
| "loss": 1.6967, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 18.413793103448278, | |
| "grad_norm": 5.102571487426758, | |
| "learning_rate": 4.291187739463602e-05, | |
| "loss": 1.6547, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 18.482758620689655, | |
| "grad_norm": 6.265757083892822, | |
| "learning_rate": 4.2656449553001277e-05, | |
| "loss": 1.8296, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 18.551724137931036, | |
| "grad_norm": 6.232301712036133, | |
| "learning_rate": 4.240102171136654e-05, | |
| "loss": 1.691, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 18.620689655172413, | |
| "grad_norm": 3.5012550354003906, | |
| "learning_rate": 4.21455938697318e-05, | |
| "loss": 1.6889, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 18.689655172413794, | |
| "grad_norm": 4.482182502746582, | |
| "learning_rate": 4.189016602809707e-05, | |
| "loss": 1.7312, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 18.75862068965517, | |
| "grad_norm": 5.147761821746826, | |
| "learning_rate": 4.163473818646232e-05, | |
| "loss": 1.7701, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 18.82758620689655, | |
| "grad_norm": 4.915091514587402, | |
| "learning_rate": 4.1379310344827587e-05, | |
| "loss": 1.6374, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 18.896551724137932, | |
| "grad_norm": 4.5154595375061035, | |
| "learning_rate": 4.112388250319285e-05, | |
| "loss": 1.7651, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 18.96551724137931, | |
| "grad_norm": 4.551173210144043, | |
| "learning_rate": 4.086845466155811e-05, | |
| "loss": 1.7207, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 19.0, | |
| "eval_accuracy": 0.3484848484848485, | |
| "eval_f1_macro": 0.182312925170068, | |
| "eval_f1_micro": 0.3484848484848485, | |
| "eval_f1_weighted": 0.2393939393939394, | |
| "eval_loss": 1.7770634889602661, | |
| "eval_precision_macro": 0.1572047000618429, | |
| "eval_precision_micro": 0.3484848484848485, | |
| "eval_precision_weighted": 0.20120031483667847, | |
| "eval_recall_macro": 0.2592290249433106, | |
| "eval_recall_micro": 0.3484848484848485, | |
| "eval_recall_weighted": 0.3484848484848485, | |
| "eval_runtime": 2.221, | |
| "eval_samples_per_second": 59.432, | |
| "eval_steps_per_second": 7.654, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 19.03448275862069, | |
| "grad_norm": 4.037620544433594, | |
| "learning_rate": 4.061302681992337e-05, | |
| "loss": 1.6686, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 19.103448275862068, | |
| "grad_norm": 3.9484567642211914, | |
| "learning_rate": 4.035759897828863e-05, | |
| "loss": 1.8479, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 19.17241379310345, | |
| "grad_norm": 5.732690811157227, | |
| "learning_rate": 4.0102171136653897e-05, | |
| "loss": 1.7569, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 19.24137931034483, | |
| "grad_norm": 4.171787261962891, | |
| "learning_rate": 3.984674329501916e-05, | |
| "loss": 1.7518, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 19.310344827586206, | |
| "grad_norm": 5.715792655944824, | |
| "learning_rate": 3.959131545338442e-05, | |
| "loss": 1.6642, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 19.379310344827587, | |
| "grad_norm": 7.237026214599609, | |
| "learning_rate": 3.933588761174969e-05, | |
| "loss": 1.7571, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 19.448275862068964, | |
| "grad_norm": 3.543245315551758, | |
| "learning_rate": 3.908045977011495e-05, | |
| "loss": 1.5834, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 19.517241379310345, | |
| "grad_norm": 4.249347686767578, | |
| "learning_rate": 3.8825031928480207e-05, | |
| "loss": 1.5733, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 19.586206896551722, | |
| "grad_norm": 5.662441253662109, | |
| "learning_rate": 3.856960408684547e-05, | |
| "loss": 1.6864, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 19.655172413793103, | |
| "grad_norm": 6.215047359466553, | |
| "learning_rate": 3.831417624521073e-05, | |
| "loss": 1.6699, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 19.724137931034484, | |
| "grad_norm": 5.711146831512451, | |
| "learning_rate": 3.805874840357599e-05, | |
| "loss": 1.8693, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 19.79310344827586, | |
| "grad_norm": 4.606837272644043, | |
| "learning_rate": 3.780332056194125e-05, | |
| "loss": 1.5954, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 19.862068965517242, | |
| "grad_norm": 6.943862438201904, | |
| "learning_rate": 3.7547892720306517e-05, | |
| "loss": 1.7872, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 19.93103448275862, | |
| "grad_norm": 5.065245628356934, | |
| "learning_rate": 3.729246487867178e-05, | |
| "loss": 1.6941, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "grad_norm": 5.687169075012207, | |
| "learning_rate": 3.7037037037037037e-05, | |
| "loss": 1.7066, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "eval_accuracy": 0.3484848484848485, | |
| "eval_f1_macro": 0.18567798567798569, | |
| "eval_f1_micro": 0.3484848484848485, | |
| "eval_f1_weighted": 0.24244209244209244, | |
| "eval_loss": 1.7671573162078857, | |
| "eval_precision_macro": 0.15778087964745646, | |
| "eval_precision_micro": 0.3484848484848485, | |
| "eval_precision_weighted": 0.20145312627623949, | |
| "eval_recall_macro": 0.2607407407407408, | |
| "eval_recall_micro": 0.3484848484848485, | |
| "eval_recall_weighted": 0.3484848484848485, | |
| "eval_runtime": 2.2113, | |
| "eval_samples_per_second": 59.694, | |
| "eval_steps_per_second": 7.688, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 20.06896551724138, | |
| "grad_norm": 5.638570785522461, | |
| "learning_rate": 3.67816091954023e-05, | |
| "loss": 1.6791, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 20.137931034482758, | |
| "grad_norm": 4.699033260345459, | |
| "learning_rate": 3.652618135376756e-05, | |
| "loss": 1.7742, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 20.20689655172414, | |
| "grad_norm": 4.162917137145996, | |
| "learning_rate": 3.627075351213283e-05, | |
| "loss": 1.8177, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 20.275862068965516, | |
| "grad_norm": 5.143803119659424, | |
| "learning_rate": 3.601532567049808e-05, | |
| "loss": 1.7313, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 20.344827586206897, | |
| "grad_norm": 4.561467170715332, | |
| "learning_rate": 3.5759897828863347e-05, | |
| "loss": 1.6146, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 20.413793103448278, | |
| "grad_norm": 4.308231830596924, | |
| "learning_rate": 3.550446998722861e-05, | |
| "loss": 1.6044, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 20.482758620689655, | |
| "grad_norm": 6.317595481872559, | |
| "learning_rate": 3.5249042145593867e-05, | |
| "loss": 1.7754, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 20.551724137931036, | |
| "grad_norm": 4.666116714477539, | |
| "learning_rate": 3.499361430395913e-05, | |
| "loss": 1.6148, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 20.620689655172413, | |
| "grad_norm": 5.611302852630615, | |
| "learning_rate": 3.473818646232439e-05, | |
| "loss": 1.7056, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 20.689655172413794, | |
| "grad_norm": 5.928587913513184, | |
| "learning_rate": 3.4482758620689657e-05, | |
| "loss": 1.7124, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 20.75862068965517, | |
| "grad_norm": 6.1681318283081055, | |
| "learning_rate": 3.422733077905492e-05, | |
| "loss": 1.5859, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 20.82758620689655, | |
| "grad_norm": 4.444830894470215, | |
| "learning_rate": 3.397190293742018e-05, | |
| "loss": 1.7249, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 20.896551724137932, | |
| "grad_norm": 3.6225712299346924, | |
| "learning_rate": 3.371647509578545e-05, | |
| "loss": 1.5999, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 20.96551724137931, | |
| "grad_norm": 5.365250587463379, | |
| "learning_rate": 3.34610472541507e-05, | |
| "loss": 1.7726, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 21.0, | |
| "eval_accuracy": 0.36363636363636365, | |
| "eval_f1_macro": 0.21467827328369093, | |
| "eval_f1_micro": 0.36363636363636365, | |
| "eval_f1_weighted": 0.271005070392237, | |
| "eval_loss": 1.7596381902694702, | |
| "eval_precision_macro": 0.25301995798319327, | |
| "eval_precision_micro": 0.36363636363636365, | |
| "eval_precision_weighted": 0.29305694815805106, | |
| "eval_recall_macro": 0.27661375661375665, | |
| "eval_recall_micro": 0.36363636363636365, | |
| "eval_recall_weighted": 0.36363636363636365, | |
| "eval_runtime": 2.2081, | |
| "eval_samples_per_second": 59.781, | |
| "eval_steps_per_second": 7.699, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 21.03448275862069, | |
| "grad_norm": 3.9493439197540283, | |
| "learning_rate": 3.3205619412515967e-05, | |
| "loss": 1.6921, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 21.103448275862068, | |
| "grad_norm": 5.00963020324707, | |
| "learning_rate": 3.295019157088123e-05, | |
| "loss": 1.696, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 21.17241379310345, | |
| "grad_norm": 4.348812103271484, | |
| "learning_rate": 3.269476372924649e-05, | |
| "loss": 1.6821, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 21.24137931034483, | |
| "grad_norm": 4.707080364227295, | |
| "learning_rate": 3.243933588761175e-05, | |
| "loss": 1.666, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 21.310344827586206, | |
| "grad_norm": 4.679585933685303, | |
| "learning_rate": 3.218390804597701e-05, | |
| "loss": 1.6141, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 21.379310344827587, | |
| "grad_norm": 5.1318206787109375, | |
| "learning_rate": 3.192848020434228e-05, | |
| "loss": 1.6043, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 21.448275862068964, | |
| "grad_norm": 6.021131992340088, | |
| "learning_rate": 3.167305236270754e-05, | |
| "loss": 1.7097, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 21.517241379310345, | |
| "grad_norm": 4.765815258026123, | |
| "learning_rate": 3.1417624521072797e-05, | |
| "loss": 1.6913, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 21.586206896551722, | |
| "grad_norm": 5.306324005126953, | |
| "learning_rate": 3.116219667943806e-05, | |
| "loss": 1.6404, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 21.655172413793103, | |
| "grad_norm": 5.016156196594238, | |
| "learning_rate": 3.090676883780332e-05, | |
| "loss": 1.7227, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 21.724137931034484, | |
| "grad_norm": 4.8009467124938965, | |
| "learning_rate": 3.065134099616858e-05, | |
| "loss": 1.6554, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 21.79310344827586, | |
| "grad_norm": 7.088425636291504, | |
| "learning_rate": 3.0395913154533843e-05, | |
| "loss": 1.8347, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 21.862068965517242, | |
| "grad_norm": 6.788226127624512, | |
| "learning_rate": 3.0140485312899107e-05, | |
| "loss": 1.7245, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 21.93103448275862, | |
| "grad_norm": 6.722936630249023, | |
| "learning_rate": 2.988505747126437e-05, | |
| "loss": 1.6157, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 22.0, | |
| "grad_norm": 7.610160827636719, | |
| "learning_rate": 2.962962962962963e-05, | |
| "loss": 1.7349, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 22.0, | |
| "eval_accuracy": 0.3484848484848485, | |
| "eval_f1_macro": 0.2080950684531247, | |
| "eval_f1_micro": 0.3484848484848485, | |
| "eval_f1_weighted": 0.2626815716329783, | |
| "eval_loss": 1.7516624927520752, | |
| "eval_precision_macro": 0.21449367971001024, | |
| "eval_precision_micro": 0.3484848484848485, | |
| "eval_precision_weighted": 0.25538281727489603, | |
| "eval_recall_macro": 0.26603174603174606, | |
| "eval_recall_micro": 0.3484848484848485, | |
| "eval_recall_weighted": 0.3484848484848485, | |
| "eval_runtime": 2.2067, | |
| "eval_samples_per_second": 59.819, | |
| "eval_steps_per_second": 7.704, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 22.06896551724138, | |
| "grad_norm": 4.454886436462402, | |
| "learning_rate": 2.9374201787994893e-05, | |
| "loss": 1.7828, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 22.137931034482758, | |
| "grad_norm": 5.4481024742126465, | |
| "learning_rate": 2.9118773946360157e-05, | |
| "loss": 1.5865, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 22.20689655172414, | |
| "grad_norm": 4.0386786460876465, | |
| "learning_rate": 2.8863346104725413e-05, | |
| "loss": 1.5736, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 22.275862068965516, | |
| "grad_norm": 5.739555835723877, | |
| "learning_rate": 2.8607918263090677e-05, | |
| "loss": 1.6133, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 22.344827586206897, | |
| "grad_norm": 4.515993118286133, | |
| "learning_rate": 2.835249042145594e-05, | |
| "loss": 1.6482, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 22.413793103448278, | |
| "grad_norm": 4.678834438323975, | |
| "learning_rate": 2.8097062579821203e-05, | |
| "loss": 1.668, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 22.482758620689655, | |
| "grad_norm": 6.044206142425537, | |
| "learning_rate": 2.7841634738186463e-05, | |
| "loss": 1.7136, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 22.551724137931036, | |
| "grad_norm": 5.093865871429443, | |
| "learning_rate": 2.7586206896551727e-05, | |
| "loss": 1.5431, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 22.620689655172413, | |
| "grad_norm": 7.351655960083008, | |
| "learning_rate": 2.733077905491699e-05, | |
| "loss": 1.7295, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 22.689655172413794, | |
| "grad_norm": 5.317907810211182, | |
| "learning_rate": 2.7075351213282253e-05, | |
| "loss": 1.7802, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 22.75862068965517, | |
| "grad_norm": 4.974449634552002, | |
| "learning_rate": 2.681992337164751e-05, | |
| "loss": 1.6687, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 22.82758620689655, | |
| "grad_norm": 5.984342098236084, | |
| "learning_rate": 2.6564495530012773e-05, | |
| "loss": 1.7008, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 22.896551724137932, | |
| "grad_norm": 4.135770797729492, | |
| "learning_rate": 2.6309067688378037e-05, | |
| "loss": 1.7022, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 22.96551724137931, | |
| "grad_norm": 4.533845901489258, | |
| "learning_rate": 2.6053639846743293e-05, | |
| "loss": 1.7956, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 23.0, | |
| "eval_accuracy": 0.3560606060606061, | |
| "eval_f1_macro": 0.20183300183300185, | |
| "eval_f1_micro": 0.3560606060606061, | |
| "eval_f1_weighted": 0.2590387908569726, | |
| "eval_loss": 1.7437478303909302, | |
| "eval_precision_macro": 0.1970157110463233, | |
| "eval_precision_micro": 0.3560606060606061, | |
| "eval_precision_weighted": 0.2402114254792826, | |
| "eval_recall_macro": 0.2686772486772487, | |
| "eval_recall_micro": 0.3560606060606061, | |
| "eval_recall_weighted": 0.3560606060606061, | |
| "eval_runtime": 2.2042, | |
| "eval_samples_per_second": 59.886, | |
| "eval_steps_per_second": 7.713, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 23.03448275862069, | |
| "grad_norm": 5.614207744598389, | |
| "learning_rate": 2.5798212005108557e-05, | |
| "loss": 1.6665, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 23.103448275862068, | |
| "grad_norm": 5.001101016998291, | |
| "learning_rate": 2.554278416347382e-05, | |
| "loss": 1.5931, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 23.17241379310345, | |
| "grad_norm": 4.395004749298096, | |
| "learning_rate": 2.5287356321839083e-05, | |
| "loss": 1.6153, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 23.24137931034483, | |
| "grad_norm": 5.067261219024658, | |
| "learning_rate": 2.5031928480204343e-05, | |
| "loss": 1.6054, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 23.310344827586206, | |
| "grad_norm": 6.150606155395508, | |
| "learning_rate": 2.4776500638569607e-05, | |
| "loss": 1.6065, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 23.379310344827587, | |
| "grad_norm": 4.856930732727051, | |
| "learning_rate": 2.4521072796934867e-05, | |
| "loss": 1.6605, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 23.448275862068964, | |
| "grad_norm": 4.914318561553955, | |
| "learning_rate": 2.426564495530013e-05, | |
| "loss": 1.5948, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 23.517241379310345, | |
| "grad_norm": 5.621053218841553, | |
| "learning_rate": 2.401021711366539e-05, | |
| "loss": 1.569, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 23.586206896551722, | |
| "grad_norm": 5.078169345855713, | |
| "learning_rate": 2.3754789272030653e-05, | |
| "loss": 1.7492, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 23.655172413793103, | |
| "grad_norm": 6.632299900054932, | |
| "learning_rate": 2.3499361430395913e-05, | |
| "loss": 1.7247, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 23.724137931034484, | |
| "grad_norm": 5.01677942276001, | |
| "learning_rate": 2.3243933588761177e-05, | |
| "loss": 1.6018, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 23.79310344827586, | |
| "grad_norm": 4.650204181671143, | |
| "learning_rate": 2.2988505747126437e-05, | |
| "loss": 1.7903, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 23.862068965517242, | |
| "grad_norm": 6.075940132141113, | |
| "learning_rate": 2.27330779054917e-05, | |
| "loss": 1.7968, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 23.93103448275862, | |
| "grad_norm": 3.862063407897949, | |
| "learning_rate": 2.247765006385696e-05, | |
| "loss": 1.574, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 24.0, | |
| "grad_norm": 6.3330512046813965, | |
| "learning_rate": 2.2222222222222223e-05, | |
| "loss": 1.4672, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 24.0, | |
| "eval_accuracy": 0.36363636363636365, | |
| "eval_f1_macro": 0.2032920447014192, | |
| "eval_f1_micro": 0.36363636363636365, | |
| "eval_f1_weighted": 0.26106471449416574, | |
| "eval_loss": 1.7264463901519775, | |
| "eval_precision_macro": 0.29745508048728786, | |
| "eval_precision_micro": 0.36363636363636365, | |
| "eval_precision_weighted": 0.33558615746361226, | |
| "eval_recall_macro": 0.27396825396825397, | |
| "eval_recall_micro": 0.36363636363636365, | |
| "eval_recall_weighted": 0.36363636363636365, | |
| "eval_runtime": 2.2072, | |
| "eval_samples_per_second": 59.805, | |
| "eval_steps_per_second": 7.702, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 24.06896551724138, | |
| "grad_norm": 4.276838779449463, | |
| "learning_rate": 2.1966794380587487e-05, | |
| "loss": 1.7189, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 24.137931034482758, | |
| "grad_norm": 5.133126258850098, | |
| "learning_rate": 2.1711366538952747e-05, | |
| "loss": 1.7539, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 24.20689655172414, | |
| "grad_norm": 4.256742477416992, | |
| "learning_rate": 2.145593869731801e-05, | |
| "loss": 1.6211, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 24.275862068965516, | |
| "grad_norm": 4.8324360847473145, | |
| "learning_rate": 2.120051085568327e-05, | |
| "loss": 1.6121, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 24.344827586206897, | |
| "grad_norm": 4.822171688079834, | |
| "learning_rate": 2.0945083014048533e-05, | |
| "loss": 1.7022, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 24.413793103448278, | |
| "grad_norm": 4.1980438232421875, | |
| "learning_rate": 2.0689655172413793e-05, | |
| "loss": 1.6148, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 24.482758620689655, | |
| "grad_norm": 5.895715236663818, | |
| "learning_rate": 2.0434227330779057e-05, | |
| "loss": 1.5644, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 24.551724137931036, | |
| "grad_norm": 5.411371231079102, | |
| "learning_rate": 2.0178799489144317e-05, | |
| "loss": 1.551, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 24.620689655172413, | |
| "grad_norm": 4.361970901489258, | |
| "learning_rate": 1.992337164750958e-05, | |
| "loss": 1.5996, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 24.689655172413794, | |
| "grad_norm": 4.542662143707275, | |
| "learning_rate": 1.9667943805874843e-05, | |
| "loss": 1.5886, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 24.75862068965517, | |
| "grad_norm": 3.855168104171753, | |
| "learning_rate": 1.9412515964240103e-05, | |
| "loss": 1.5883, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 24.82758620689655, | |
| "grad_norm": 6.18347692489624, | |
| "learning_rate": 1.9157088122605367e-05, | |
| "loss": 1.7291, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 24.896551724137932, | |
| "grad_norm": 4.442874431610107, | |
| "learning_rate": 1.8901660280970627e-05, | |
| "loss": 1.5939, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 24.96551724137931, | |
| "grad_norm": 4.181361675262451, | |
| "learning_rate": 1.864623243933589e-05, | |
| "loss": 1.6008, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "eval_accuracy": 0.3787878787878788, | |
| "eval_f1_macro": 0.23228074444970742, | |
| "eval_f1_micro": 0.3787878787878788, | |
| "eval_f1_weighted": 0.29046808994833084, | |
| "eval_loss": 1.723252773284912, | |
| "eval_precision_macro": 0.25331848384173966, | |
| "eval_precision_micro": 0.3787878787878788, | |
| "eval_precision_weighted": 0.2962727833301301, | |
| "eval_recall_macro": 0.28984126984126984, | |
| "eval_recall_micro": 0.3787878787878788, | |
| "eval_recall_weighted": 0.3787878787878788, | |
| "eval_runtime": 2.2165, | |
| "eval_samples_per_second": 59.553, | |
| "eval_steps_per_second": 7.67, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 25.03448275862069, | |
| "grad_norm": 7.012096405029297, | |
| "learning_rate": 1.839080459770115e-05, | |
| "loss": 1.8382, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 25.103448275862068, | |
| "grad_norm": 4.398463726043701, | |
| "learning_rate": 1.8135376756066413e-05, | |
| "loss": 1.6287, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 25.17241379310345, | |
| "grad_norm": 6.731529235839844, | |
| "learning_rate": 1.7879948914431673e-05, | |
| "loss": 1.7079, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 25.24137931034483, | |
| "grad_norm": 4.462522506713867, | |
| "learning_rate": 1.7624521072796933e-05, | |
| "loss": 1.5696, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 25.310344827586206, | |
| "grad_norm": 4.669303894042969, | |
| "learning_rate": 1.7369093231162197e-05, | |
| "loss": 1.6303, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 25.379310344827587, | |
| "grad_norm": 4.23646354675293, | |
| "learning_rate": 1.711366538952746e-05, | |
| "loss": 1.751, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 25.448275862068964, | |
| "grad_norm": 6.706507205963135, | |
| "learning_rate": 1.6858237547892723e-05, | |
| "loss": 1.6732, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 25.517241379310345, | |
| "grad_norm": 5.986099720001221, | |
| "learning_rate": 1.6602809706257983e-05, | |
| "loss": 1.6258, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 25.586206896551722, | |
| "grad_norm": 5.9361395835876465, | |
| "learning_rate": 1.6347381864623247e-05, | |
| "loss": 1.6484, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 25.655172413793103, | |
| "grad_norm": 5.526943206787109, | |
| "learning_rate": 1.6091954022988507e-05, | |
| "loss": 1.5664, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 25.724137931034484, | |
| "grad_norm": 6.878753662109375, | |
| "learning_rate": 1.583652618135377e-05, | |
| "loss": 1.6655, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 25.79310344827586, | |
| "grad_norm": 6.105263710021973, | |
| "learning_rate": 1.558109833971903e-05, | |
| "loss": 1.7125, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 25.862068965517242, | |
| "grad_norm": 5.406219959259033, | |
| "learning_rate": 1.532567049808429e-05, | |
| "loss": 1.5567, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 25.93103448275862, | |
| "grad_norm": 6.4433417320251465, | |
| "learning_rate": 1.5070242656449553e-05, | |
| "loss": 1.5778, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 26.0, | |
| "grad_norm": 6.992711067199707, | |
| "learning_rate": 1.4814814814814815e-05, | |
| "loss": 1.6899, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 26.0, | |
| "eval_accuracy": 0.3787878787878788, | |
| "eval_f1_macro": 0.22610970879960327, | |
| "eval_f1_micro": 0.3787878787878788, | |
| "eval_f1_weighted": 0.2851857523136639, | |
| "eval_loss": 1.7198569774627686, | |
| "eval_precision_macro": 0.24255156608097783, | |
| "eval_precision_micro": 0.3787878787878788, | |
| "eval_precision_weighted": 0.28629881704748017, | |
| "eval_recall_macro": 0.2887074829931973, | |
| "eval_recall_micro": 0.3787878787878788, | |
| "eval_recall_weighted": 0.3787878787878788, | |
| "eval_runtime": 2.2071, | |
| "eval_samples_per_second": 59.807, | |
| "eval_steps_per_second": 7.702, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 26.06896551724138, | |
| "grad_norm": 6.103085517883301, | |
| "learning_rate": 1.4559386973180078e-05, | |
| "loss": 1.6087, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 26.137931034482758, | |
| "grad_norm": 4.45341157913208, | |
| "learning_rate": 1.4303959131545338e-05, | |
| "loss": 1.6056, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 26.20689655172414, | |
| "grad_norm": 4.392666339874268, | |
| "learning_rate": 1.4048531289910602e-05, | |
| "loss": 1.5663, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 26.275862068965516, | |
| "grad_norm": 5.263143062591553, | |
| "learning_rate": 1.3793103448275863e-05, | |
| "loss": 1.5499, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 26.344827586206897, | |
| "grad_norm": 4.569793701171875, | |
| "learning_rate": 1.3537675606641127e-05, | |
| "loss": 1.5761, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 26.413793103448278, | |
| "grad_norm": 3.877281427383423, | |
| "learning_rate": 1.3282247765006387e-05, | |
| "loss": 1.5345, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 26.482758620689655, | |
| "grad_norm": 9.873696327209473, | |
| "learning_rate": 1.3026819923371647e-05, | |
| "loss": 1.7392, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 26.551724137931036, | |
| "grad_norm": 5.7916131019592285, | |
| "learning_rate": 1.277139208173691e-05, | |
| "loss": 1.684, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 26.620689655172413, | |
| "grad_norm": 5.101012229919434, | |
| "learning_rate": 1.2515964240102172e-05, | |
| "loss": 1.6279, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 26.689655172413794, | |
| "grad_norm": 5.251717567443848, | |
| "learning_rate": 1.2260536398467433e-05, | |
| "loss": 1.7422, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 26.75862068965517, | |
| "grad_norm": 4.35264778137207, | |
| "learning_rate": 1.2005108556832695e-05, | |
| "loss": 1.611, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 26.82758620689655, | |
| "grad_norm": 7.342212200164795, | |
| "learning_rate": 1.1749680715197957e-05, | |
| "loss": 1.7272, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 26.896551724137932, | |
| "grad_norm": 4.656854152679443, | |
| "learning_rate": 1.1494252873563218e-05, | |
| "loss": 1.4436, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 26.96551724137931, | |
| "grad_norm": 6.768021583557129, | |
| "learning_rate": 1.123882503192848e-05, | |
| "loss": 1.7073, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 27.0, | |
| "eval_accuracy": 0.3712121212121212, | |
| "eval_f1_macro": 0.21709636119574008, | |
| "eval_f1_micro": 0.3712121212121212, | |
| "eval_f1_weighted": 0.27520239970832855, | |
| "eval_loss": 1.7113381624221802, | |
| "eval_precision_macro": 0.23052503052503054, | |
| "eval_precision_micro": 0.3712121212121212, | |
| "eval_precision_weighted": 0.27291504791504795, | |
| "eval_recall_macro": 0.28190476190476194, | |
| "eval_recall_micro": 0.3712121212121212, | |
| "eval_recall_weighted": 0.3712121212121212, | |
| "eval_runtime": 2.2009, | |
| "eval_samples_per_second": 59.976, | |
| "eval_steps_per_second": 7.724, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 27.03448275862069, | |
| "grad_norm": 8.927560806274414, | |
| "learning_rate": 1.0983397190293743e-05, | |
| "loss": 1.7602, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 27.103448275862068, | |
| "grad_norm": 6.733327865600586, | |
| "learning_rate": 1.0727969348659005e-05, | |
| "loss": 1.4922, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 27.17241379310345, | |
| "grad_norm": 6.046846389770508, | |
| "learning_rate": 1.0472541507024267e-05, | |
| "loss": 1.615, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 27.24137931034483, | |
| "grad_norm": 4.5763373374938965, | |
| "learning_rate": 1.0217113665389528e-05, | |
| "loss": 1.6602, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 27.310344827586206, | |
| "grad_norm": 3.9738235473632812, | |
| "learning_rate": 9.96168582375479e-06, | |
| "loss": 1.6482, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 27.379310344827587, | |
| "grad_norm": 5.073862552642822, | |
| "learning_rate": 9.706257982120052e-06, | |
| "loss": 1.6868, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 27.448275862068964, | |
| "grad_norm": 3.963151693344116, | |
| "learning_rate": 9.450830140485313e-06, | |
| "loss": 1.5909, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 27.517241379310345, | |
| "grad_norm": 8.115581512451172, | |
| "learning_rate": 9.195402298850575e-06, | |
| "loss": 1.7228, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 27.586206896551722, | |
| "grad_norm": 6.24854040145874, | |
| "learning_rate": 8.939974457215837e-06, | |
| "loss": 1.5304, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 27.655172413793103, | |
| "grad_norm": 6.349370956420898, | |
| "learning_rate": 8.684546615581098e-06, | |
| "loss": 1.621, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 27.724137931034484, | |
| "grad_norm": 5.329178333282471, | |
| "learning_rate": 8.429118773946362e-06, | |
| "loss": 1.5858, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 27.79310344827586, | |
| "grad_norm": 7.521311283111572, | |
| "learning_rate": 8.173690932311623e-06, | |
| "loss": 1.6764, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 27.862068965517242, | |
| "grad_norm": 7.599664211273193, | |
| "learning_rate": 7.918263090676885e-06, | |
| "loss": 1.6693, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 27.93103448275862, | |
| "grad_norm": 4.939934730529785, | |
| "learning_rate": 7.662835249042145e-06, | |
| "loss": 1.6308, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 28.0, | |
| "grad_norm": 4.710191249847412, | |
| "learning_rate": 7.4074074074074075e-06, | |
| "loss": 1.6558, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 28.0, | |
| "eval_accuracy": 0.38636363636363635, | |
| "eval_f1_macro": 0.23114458131879734, | |
| "eval_f1_micro": 0.38636363636363635, | |
| "eval_f1_weighted": 0.2922915345310024, | |
| "eval_loss": 1.699568748474121, | |
| "eval_precision_macro": 0.2212253193960511, | |
| "eval_precision_micro": 0.38636363636363635, | |
| "eval_precision_weighted": 0.26765983000739096, | |
| "eval_recall_macro": 0.29551020408163264, | |
| "eval_recall_micro": 0.38636363636363635, | |
| "eval_recall_weighted": 0.38636363636363635, | |
| "eval_runtime": 2.2967, | |
| "eval_samples_per_second": 57.473, | |
| "eval_steps_per_second": 7.402, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 28.06896551724138, | |
| "grad_norm": 6.4893717765808105, | |
| "learning_rate": 7.151979565772669e-06, | |
| "loss": 1.6501, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 28.137931034482758, | |
| "grad_norm": 4.598632335662842, | |
| "learning_rate": 6.896551724137932e-06, | |
| "loss": 1.6385, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 28.20689655172414, | |
| "grad_norm": 4.8138837814331055, | |
| "learning_rate": 6.641123882503193e-06, | |
| "loss": 1.472, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 28.275862068965516, | |
| "grad_norm": 5.637802600860596, | |
| "learning_rate": 6.385696040868455e-06, | |
| "loss": 1.5484, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 28.344827586206897, | |
| "grad_norm": 7.790553092956543, | |
| "learning_rate": 6.130268199233717e-06, | |
| "loss": 1.8, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 28.413793103448278, | |
| "grad_norm": 7.774055004119873, | |
| "learning_rate": 5.874840357598978e-06, | |
| "loss": 1.5973, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 28.482758620689655, | |
| "grad_norm": 5.696927070617676, | |
| "learning_rate": 5.61941251596424e-06, | |
| "loss": 1.6791, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 28.551724137931036, | |
| "grad_norm": 7.728368759155273, | |
| "learning_rate": 5.3639846743295025e-06, | |
| "loss": 1.7006, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 28.620689655172413, | |
| "grad_norm": 4.742649555206299, | |
| "learning_rate": 5.108556832694764e-06, | |
| "loss": 1.581, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 28.689655172413794, | |
| "grad_norm": 4.426894187927246, | |
| "learning_rate": 4.853128991060026e-06, | |
| "loss": 1.6707, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 28.75862068965517, | |
| "grad_norm": 4.937201499938965, | |
| "learning_rate": 4.5977011494252875e-06, | |
| "loss": 1.5522, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 28.82758620689655, | |
| "grad_norm": 4.303575038909912, | |
| "learning_rate": 4.342273307790549e-06, | |
| "loss": 1.6571, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 28.896551724137932, | |
| "grad_norm": 6.8073296546936035, | |
| "learning_rate": 4.086845466155812e-06, | |
| "loss": 1.5614, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 28.96551724137931, | |
| "grad_norm": 4.116281509399414, | |
| "learning_rate": 3.8314176245210725e-06, | |
| "loss": 1.4732, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 29.0, | |
| "eval_accuracy": 0.3787878787878788, | |
| "eval_f1_macro": 0.23203850973587817, | |
| "eval_f1_micro": 0.3787878787878788, | |
| "eval_f1_weighted": 0.2901230293828978, | |
| "eval_loss": 1.7078348398208618, | |
| "eval_precision_macro": 0.2300817763002637, | |
| "eval_precision_micro": 0.3787878787878788, | |
| "eval_precision_weighted": 0.274184965361436, | |
| "eval_recall_macro": 0.2909297052154195, | |
| "eval_recall_micro": 0.3787878787878788, | |
| "eval_recall_weighted": 0.3787878787878788, | |
| "eval_runtime": 2.2263, | |
| "eval_samples_per_second": 59.292, | |
| "eval_steps_per_second": 7.636, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 29.03448275862069, | |
| "grad_norm": 5.656902313232422, | |
| "learning_rate": 3.5759897828863346e-06, | |
| "loss": 1.5156, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 29.103448275862068, | |
| "grad_norm": 7.74232292175293, | |
| "learning_rate": 3.3205619412515967e-06, | |
| "loss": 1.7414, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 29.17241379310345, | |
| "grad_norm": 5.7601799964904785, | |
| "learning_rate": 3.0651340996168583e-06, | |
| "loss": 1.5953, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 29.24137931034483, | |
| "grad_norm": 5.634422779083252, | |
| "learning_rate": 2.80970625798212e-06, | |
| "loss": 1.6147, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 29.310344827586206, | |
| "grad_norm": 5.589435577392578, | |
| "learning_rate": 2.554278416347382e-06, | |
| "loss": 1.5521, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 29.379310344827587, | |
| "grad_norm": 5.7706403732299805, | |
| "learning_rate": 2.2988505747126437e-06, | |
| "loss": 1.6071, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 29.448275862068964, | |
| "grad_norm": 5.294156074523926, | |
| "learning_rate": 2.043422733077906e-06, | |
| "loss": 1.6954, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 29.517241379310345, | |
| "grad_norm": 4.2079243659973145, | |
| "learning_rate": 1.7879948914431673e-06, | |
| "loss": 1.5138, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 29.586206896551722, | |
| "grad_norm": 4.128150939941406, | |
| "learning_rate": 1.5325670498084292e-06, | |
| "loss": 1.5856, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 29.655172413793103, | |
| "grad_norm": 8.734487533569336, | |
| "learning_rate": 1.277139208173691e-06, | |
| "loss": 1.5864, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 29.724137931034484, | |
| "grad_norm": 5.597517967224121, | |
| "learning_rate": 1.021711366538953e-06, | |
| "loss": 1.6514, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 29.79310344827586, | |
| "grad_norm": 5.811728000640869, | |
| "learning_rate": 7.662835249042146e-07, | |
| "loss": 1.615, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 29.862068965517242, | |
| "grad_norm": 5.624536514282227, | |
| "learning_rate": 5.108556832694765e-07, | |
| "loss": 1.5311, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 29.93103448275862, | |
| "grad_norm": 4.456348419189453, | |
| "learning_rate": 2.5542784163473823e-07, | |
| "loss": 1.5631, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "grad_norm": 5.324625015258789, | |
| "learning_rate": 0.0, | |
| "loss": 1.6134, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "eval_accuracy": 0.3787878787878788, | |
| "eval_f1_macro": 0.22475201631593583, | |
| "eval_f1_micro": 0.3787878787878788, | |
| "eval_f1_weighted": 0.28452795311977663, | |
| "eval_loss": 1.7131966352462769, | |
| "eval_precision_macro": 0.22446225453744248, | |
| "eval_precision_micro": 0.3787878787878788, | |
| "eval_precision_weighted": 0.26920564815301656, | |
| "eval_recall_macro": 0.2887074829931973, | |
| "eval_recall_micro": 0.3787878787878788, | |
| "eval_recall_weighted": 0.3787878787878788, | |
| "eval_runtime": 2.2248, | |
| "eval_samples_per_second": 59.332, | |
| "eval_steps_per_second": 7.641, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "step": 870, | |
| "total_flos": 2.95765450596e+17, | |
| "train_loss": 1.7638174569469758, | |
| "train_runtime": 786.4902, | |
| "train_samples_per_second": 17.623, | |
| "train_steps_per_second": 1.106 | |
| } | |
| ], | |
| "logging_steps": 2, | |
| "max_steps": 870, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 30, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.95765450596e+17, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |