| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.5, |
| "eval_steps": 500, |
| "global_step": 5000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.001, |
| "grad_norm": 25.875, |
| "learning_rate": 1e-05, |
| "loss": 5.3992, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.002, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 4.1384, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.003, |
| "grad_norm": 22.25, |
| "learning_rate": 1e-05, |
| "loss": 4.0857, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.004, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 3.5263, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.005, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 3.7144, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.006, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 3.6529, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.007, |
| "grad_norm": 27.625, |
| "learning_rate": 1e-05, |
| "loss": 3.4289, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.008, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 3.1815, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.009, |
| "grad_norm": 21.25, |
| "learning_rate": 1e-05, |
| "loss": 3.3833, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 3.1461, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.011, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 3.166, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.012, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 3.255, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.013, |
| "grad_norm": 30.625, |
| "learning_rate": 1e-05, |
| "loss": 3.4299, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.014, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 3.308, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.015, |
| "grad_norm": 15.8125, |
| "learning_rate": 1e-05, |
| "loss": 2.9661, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.016, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.76, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.017, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.951, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.018, |
| "grad_norm": 24.375, |
| "learning_rate": 1e-05, |
| "loss": 2.9323, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.019, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.8869, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.8807, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.021, |
| "grad_norm": 21.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6979, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.022, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 3.2043, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.023, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 3.1682, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.024, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 3.0403, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.025, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.9605, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.026, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7015, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.027, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.92, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.028, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.9273, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.029, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 3.0933, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 3.0358, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.031, |
| "grad_norm": 14.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6916, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.032, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 3.0805, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.033, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8995, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.034, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8352, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.035, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 3.1113, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.036, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 3.144, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.037, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 3.1943, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.038, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 3.0268, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.039, |
| "grad_norm": 15.3125, |
| "learning_rate": 1e-05, |
| "loss": 2.9316, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 27.875, |
| "learning_rate": 1e-05, |
| "loss": 3.0486, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.041, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8194, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.042, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.7822, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.043, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.945, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.044, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.552, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.045, |
| "grad_norm": 21.125, |
| "learning_rate": 1e-05, |
| "loss": 2.762, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.046, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6553, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.047, |
| "grad_norm": 16.125, |
| "learning_rate": 1e-05, |
| "loss": 2.8581, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.048, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.7065, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.049, |
| "grad_norm": 21.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5793, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.7661, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.051, |
| "grad_norm": 22.625, |
| "learning_rate": 1e-05, |
| "loss": 3.034, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.052, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7611, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.053, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.672, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.054, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 3.2146, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.055, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.9011, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.056, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8977, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.057, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8751, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.058, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.8186, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.059, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 2.85, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5925, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.061, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8579, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.062, |
| "grad_norm": 21.25, |
| "learning_rate": 1e-05, |
| "loss": 2.823, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.063, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.9454, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.064, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6088, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.065, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.7468, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.066, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5891, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.067, |
| "grad_norm": 22.125, |
| "learning_rate": 1e-05, |
| "loss": 2.9447, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.068, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.91, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.069, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7367, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.9169, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.071, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.7177, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.072, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.7527, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.073, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8683, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.074, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.8062, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.075, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 3.0231, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.076, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.642, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.077, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.9741, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.078, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6832, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.079, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5637, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4617, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.081, |
| "grad_norm": 24.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8996, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.082, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.8013, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.083, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.7277, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.084, |
| "grad_norm": 21.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8149, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.085, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6265, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.086, |
| "grad_norm": 23.125, |
| "learning_rate": 1e-05, |
| "loss": 2.7303, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.087, |
| "grad_norm": 16.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6056, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.088, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5529, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.089, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.9246, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6179, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.091, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.8344, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.092, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3406, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.093, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.9006, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.094, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6893, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.095, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.664, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.096, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6405, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.097, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4632, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.098, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7559, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.099, |
| "grad_norm": 14.1875, |
| "learning_rate": 1e-05, |
| "loss": 2.7806, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5489, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.101, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6177, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.102, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8924, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.103, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.9148, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.104, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6254, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.105, |
| "grad_norm": 22.375, |
| "learning_rate": 1e-05, |
| "loss": 2.7462, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.106, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6232, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.107, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.7208, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.108, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7076, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.109, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7055, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7597, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.111, |
| "grad_norm": 15.1875, |
| "learning_rate": 1e-05, |
| "loss": 2.4683, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.112, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3723, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.113, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.8105, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.114, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5635, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.115, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6278, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.116, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.797, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.117, |
| "grad_norm": 21.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7868, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.118, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.888, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.119, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.7304, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6133, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.121, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.9734, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.122, |
| "grad_norm": 15.5, |
| "learning_rate": 1e-05, |
| "loss": 2.9616, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.123, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6913, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.124, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.7537, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.125, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8173, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.126, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.9766, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.127, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6977, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.128, |
| "grad_norm": 15.5625, |
| "learning_rate": 1e-05, |
| "loss": 2.8323, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.129, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6251, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7067, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.131, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7888, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.132, |
| "grad_norm": 16.125, |
| "learning_rate": 1e-05, |
| "loss": 2.618, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.133, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6424, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.134, |
| "grad_norm": 15.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5045, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.135, |
| "grad_norm": 22.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5698, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.136, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4595, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.137, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.9727, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.138, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.81, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.139, |
| "grad_norm": 15.5625, |
| "learning_rate": 1e-05, |
| "loss": 2.7279, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 16.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4741, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.141, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8221, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.142, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.8687, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.143, |
| "grad_norm": 16.375, |
| "learning_rate": 1e-05, |
| "loss": 2.7914, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.144, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.699, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.145, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4823, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.146, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5993, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.147, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4884, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.148, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8256, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.149, |
| "grad_norm": 16.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6345, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 13.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5908, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.151, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8215, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.152, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8448, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.153, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.8291, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.154, |
| "grad_norm": 14.75, |
| "learning_rate": 1e-05, |
| "loss": 2.3605, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.155, |
| "grad_norm": 15.0625, |
| "learning_rate": 1e-05, |
| "loss": 2.7866, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.156, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7259, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.157, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 3.1778, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.158, |
| "grad_norm": 22.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7275, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.159, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5353, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 23.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5505, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.161, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4199, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.162, |
| "grad_norm": 15.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5307, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.163, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5556, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.164, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6736, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.165, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4342, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.166, |
| "grad_norm": 22.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6509, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.167, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.7432, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.168, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8303, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.169, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6912, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6406, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.171, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6771, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.172, |
| "grad_norm": 21.125, |
| "learning_rate": 1e-05, |
| "loss": 2.8519, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.173, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8109, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.174, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6078, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.175, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8467, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.176, |
| "grad_norm": 16.375, |
| "learning_rate": 1e-05, |
| "loss": 3.0072, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.177, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.7086, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.178, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.9887, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.179, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.3727, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6543, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.181, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4249, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.182, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.3869, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.183, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6094, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.184, |
| "grad_norm": 21.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5353, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.185, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5744, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.186, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5109, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.187, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.923, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.188, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.1993, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.189, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6776, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.728, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.191, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4932, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.192, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6727, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.193, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4958, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.194, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.8153, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.195, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6896, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.196, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.776, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.197, |
| "grad_norm": 21.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8323, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.198, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5435, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.199, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.9077, |
| "step": 1990 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5479, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.201, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5075, |
| "step": 2010 |
| }, |
| { |
| "epoch": 0.202, |
| "grad_norm": 16.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5987, |
| "step": 2020 |
| }, |
| { |
| "epoch": 0.203, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8348, |
| "step": 2030 |
| }, |
| { |
| "epoch": 0.204, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5709, |
| "step": 2040 |
| }, |
| { |
| "epoch": 0.205, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.7491, |
| "step": 2050 |
| }, |
| { |
| "epoch": 0.206, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.7234, |
| "step": 2060 |
| }, |
| { |
| "epoch": 0.207, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.8423, |
| "step": 2070 |
| }, |
| { |
| "epoch": 0.208, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8345, |
| "step": 2080 |
| }, |
| { |
| "epoch": 0.209, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4739, |
| "step": 2090 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4002, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.211, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.945, |
| "step": 2110 |
| }, |
| { |
| "epoch": 0.212, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4584, |
| "step": 2120 |
| }, |
| { |
| "epoch": 0.213, |
| "grad_norm": 15.4375, |
| "learning_rate": 1e-05, |
| "loss": 2.758, |
| "step": 2130 |
| }, |
| { |
| "epoch": 0.214, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.7057, |
| "step": 2140 |
| }, |
| { |
| "epoch": 0.215, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8857, |
| "step": 2150 |
| }, |
| { |
| "epoch": 0.216, |
| "grad_norm": 23.25, |
| "learning_rate": 1e-05, |
| "loss": 2.9281, |
| "step": 2160 |
| }, |
| { |
| "epoch": 0.217, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.9082, |
| "step": 2170 |
| }, |
| { |
| "epoch": 0.218, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6641, |
| "step": 2180 |
| }, |
| { |
| "epoch": 0.219, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5807, |
| "step": 2190 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 23.0, |
| "learning_rate": 1e-05, |
| "loss": 2.7514, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.221, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6935, |
| "step": 2210 |
| }, |
| { |
| "epoch": 0.222, |
| "grad_norm": 23.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8666, |
| "step": 2220 |
| }, |
| { |
| "epoch": 0.223, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6467, |
| "step": 2230 |
| }, |
| { |
| "epoch": 0.224, |
| "grad_norm": 23.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5768, |
| "step": 2240 |
| }, |
| { |
| "epoch": 0.225, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5218, |
| "step": 2250 |
| }, |
| { |
| "epoch": 0.226, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4148, |
| "step": 2260 |
| }, |
| { |
| "epoch": 0.227, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.63, |
| "step": 2270 |
| }, |
| { |
| "epoch": 0.228, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.862, |
| "step": 2280 |
| }, |
| { |
| "epoch": 0.229, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6363, |
| "step": 2290 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4642, |
| "step": 2300 |
| }, |
| { |
| "epoch": 0.231, |
| "grad_norm": 15.0625, |
| "learning_rate": 1e-05, |
| "loss": 2.5706, |
| "step": 2310 |
| }, |
| { |
| "epoch": 0.232, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3966, |
| "step": 2320 |
| }, |
| { |
| "epoch": 0.233, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7149, |
| "step": 2330 |
| }, |
| { |
| "epoch": 0.234, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.7477, |
| "step": 2340 |
| }, |
| { |
| "epoch": 0.235, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3279, |
| "step": 2350 |
| }, |
| { |
| "epoch": 0.236, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6766, |
| "step": 2360 |
| }, |
| { |
| "epoch": 0.237, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8714, |
| "step": 2370 |
| }, |
| { |
| "epoch": 0.238, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.7685, |
| "step": 2380 |
| }, |
| { |
| "epoch": 0.239, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5915, |
| "step": 2390 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3483, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.241, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6826, |
| "step": 2410 |
| }, |
| { |
| "epoch": 0.242, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3133, |
| "step": 2420 |
| }, |
| { |
| "epoch": 0.243, |
| "grad_norm": 21.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6807, |
| "step": 2430 |
| }, |
| { |
| "epoch": 0.244, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.7883, |
| "step": 2440 |
| }, |
| { |
| "epoch": 0.245, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4904, |
| "step": 2450 |
| }, |
| { |
| "epoch": 0.246, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4732, |
| "step": 2460 |
| }, |
| { |
| "epoch": 0.247, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6669, |
| "step": 2470 |
| }, |
| { |
| "epoch": 0.248, |
| "grad_norm": 23.25, |
| "learning_rate": 1e-05, |
| "loss": 2.814, |
| "step": 2480 |
| }, |
| { |
| "epoch": 0.249, |
| "grad_norm": 23.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7448, |
| "step": 2490 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 23.0, |
| "learning_rate": 1e-05, |
| "loss": 2.9254, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.251, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5313, |
| "step": 2510 |
| }, |
| { |
| "epoch": 0.252, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6543, |
| "step": 2520 |
| }, |
| { |
| "epoch": 0.253, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6981, |
| "step": 2530 |
| }, |
| { |
| "epoch": 0.254, |
| "grad_norm": 22.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8172, |
| "step": 2540 |
| }, |
| { |
| "epoch": 0.255, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8452, |
| "step": 2550 |
| }, |
| { |
| "epoch": 0.256, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5752, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.257, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.8001, |
| "step": 2570 |
| }, |
| { |
| "epoch": 0.258, |
| "grad_norm": 15.3125, |
| "learning_rate": 1e-05, |
| "loss": 2.6752, |
| "step": 2580 |
| }, |
| { |
| "epoch": 0.259, |
| "grad_norm": 16.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5199, |
| "step": 2590 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 23.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5651, |
| "step": 2600 |
| }, |
| { |
| "epoch": 0.261, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.2722, |
| "step": 2610 |
| }, |
| { |
| "epoch": 0.262, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4842, |
| "step": 2620 |
| }, |
| { |
| "epoch": 0.263, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3899, |
| "step": 2630 |
| }, |
| { |
| "epoch": 0.264, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.6523, |
| "step": 2640 |
| }, |
| { |
| "epoch": 0.265, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.7928, |
| "step": 2650 |
| }, |
| { |
| "epoch": 0.266, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4716, |
| "step": 2660 |
| }, |
| { |
| "epoch": 0.267, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4442, |
| "step": 2670 |
| }, |
| { |
| "epoch": 0.268, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5541, |
| "step": 2680 |
| }, |
| { |
| "epoch": 0.269, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4891, |
| "step": 2690 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 21.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4502, |
| "step": 2700 |
| }, |
| { |
| "epoch": 0.271, |
| "grad_norm": 21.875, |
| "learning_rate": 1e-05, |
| "loss": 2.7109, |
| "step": 2710 |
| }, |
| { |
| "epoch": 0.272, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5166, |
| "step": 2720 |
| }, |
| { |
| "epoch": 0.273, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5219, |
| "step": 2730 |
| }, |
| { |
| "epoch": 0.274, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7278, |
| "step": 2740 |
| }, |
| { |
| "epoch": 0.275, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4764, |
| "step": 2750 |
| }, |
| { |
| "epoch": 0.276, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.398, |
| "step": 2760 |
| }, |
| { |
| "epoch": 0.277, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5402, |
| "step": 2770 |
| }, |
| { |
| "epoch": 0.278, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5222, |
| "step": 2780 |
| }, |
| { |
| "epoch": 0.279, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5831, |
| "step": 2790 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6964, |
| "step": 2800 |
| }, |
| { |
| "epoch": 0.281, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7975, |
| "step": 2810 |
| }, |
| { |
| "epoch": 0.282, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6105, |
| "step": 2820 |
| }, |
| { |
| "epoch": 0.283, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5746, |
| "step": 2830 |
| }, |
| { |
| "epoch": 0.284, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.562, |
| "step": 2840 |
| }, |
| { |
| "epoch": 0.285, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.319, |
| "step": 2850 |
| }, |
| { |
| "epoch": 0.286, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 2.9131, |
| "step": 2860 |
| }, |
| { |
| "epoch": 0.287, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.617, |
| "step": 2870 |
| }, |
| { |
| "epoch": 0.288, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.7058, |
| "step": 2880 |
| }, |
| { |
| "epoch": 0.289, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5933, |
| "step": 2890 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 39.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5761, |
| "step": 2900 |
| }, |
| { |
| "epoch": 0.291, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5418, |
| "step": 2910 |
| }, |
| { |
| "epoch": 0.292, |
| "grad_norm": 21.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3706, |
| "step": 2920 |
| }, |
| { |
| "epoch": 0.293, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6748, |
| "step": 2930 |
| }, |
| { |
| "epoch": 0.294, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4772, |
| "step": 2940 |
| }, |
| { |
| "epoch": 0.295, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3348, |
| "step": 2950 |
| }, |
| { |
| "epoch": 0.296, |
| "grad_norm": 15.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7895, |
| "step": 2960 |
| }, |
| { |
| "epoch": 0.297, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5933, |
| "step": 2970 |
| }, |
| { |
| "epoch": 0.298, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8536, |
| "step": 2980 |
| }, |
| { |
| "epoch": 0.299, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4427, |
| "step": 2990 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5157, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.301, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.8311, |
| "step": 3010 |
| }, |
| { |
| "epoch": 0.302, |
| "grad_norm": 21.625, |
| "learning_rate": 1e-05, |
| "loss": 2.602, |
| "step": 3020 |
| }, |
| { |
| "epoch": 0.303, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6318, |
| "step": 3030 |
| }, |
| { |
| "epoch": 0.304, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.471, |
| "step": 3040 |
| }, |
| { |
| "epoch": 0.305, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.706, |
| "step": 3050 |
| }, |
| { |
| "epoch": 0.306, |
| "grad_norm": 14.1875, |
| "learning_rate": 1e-05, |
| "loss": 2.6716, |
| "step": 3060 |
| }, |
| { |
| "epoch": 0.307, |
| "grad_norm": 16.125, |
| "learning_rate": 1e-05, |
| "loss": 2.2474, |
| "step": 3070 |
| }, |
| { |
| "epoch": 0.308, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6439, |
| "step": 3080 |
| }, |
| { |
| "epoch": 0.309, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.7511, |
| "step": 3090 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.485, |
| "step": 3100 |
| }, |
| { |
| "epoch": 0.311, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.711, |
| "step": 3110 |
| }, |
| { |
| "epoch": 0.312, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3507, |
| "step": 3120 |
| }, |
| { |
| "epoch": 0.313, |
| "grad_norm": 22.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8057, |
| "step": 3130 |
| }, |
| { |
| "epoch": 0.314, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8168, |
| "step": 3140 |
| }, |
| { |
| "epoch": 0.315, |
| "grad_norm": 15.75, |
| "learning_rate": 1e-05, |
| "loss": 2.6906, |
| "step": 3150 |
| }, |
| { |
| "epoch": 0.316, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4336, |
| "step": 3160 |
| }, |
| { |
| "epoch": 0.317, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4364, |
| "step": 3170 |
| }, |
| { |
| "epoch": 0.318, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 3.0738, |
| "step": 3180 |
| }, |
| { |
| "epoch": 0.319, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6299, |
| "step": 3190 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4558, |
| "step": 3200 |
| }, |
| { |
| "epoch": 0.321, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3969, |
| "step": 3210 |
| }, |
| { |
| "epoch": 0.322, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3913, |
| "step": 3220 |
| }, |
| { |
| "epoch": 0.323, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4314, |
| "step": 3230 |
| }, |
| { |
| "epoch": 0.324, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6117, |
| "step": 3240 |
| }, |
| { |
| "epoch": 0.325, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3895, |
| "step": 3250 |
| }, |
| { |
| "epoch": 0.326, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5207, |
| "step": 3260 |
| }, |
| { |
| "epoch": 0.327, |
| "grad_norm": 15.75, |
| "learning_rate": 1e-05, |
| "loss": 2.281, |
| "step": 3270 |
| }, |
| { |
| "epoch": 0.328, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6651, |
| "step": 3280 |
| }, |
| { |
| "epoch": 0.329, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6336, |
| "step": 3290 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4576, |
| "step": 3300 |
| }, |
| { |
| "epoch": 0.331, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3937, |
| "step": 3310 |
| }, |
| { |
| "epoch": 0.332, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6475, |
| "step": 3320 |
| }, |
| { |
| "epoch": 0.333, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.3746, |
| "step": 3330 |
| }, |
| { |
| "epoch": 0.334, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4735, |
| "step": 3340 |
| }, |
| { |
| "epoch": 0.335, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.3351, |
| "step": 3350 |
| }, |
| { |
| "epoch": 0.336, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8694, |
| "step": 3360 |
| }, |
| { |
| "epoch": 0.337, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4649, |
| "step": 3370 |
| }, |
| { |
| "epoch": 0.338, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4623, |
| "step": 3380 |
| }, |
| { |
| "epoch": 0.339, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3888, |
| "step": 3390 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8593, |
| "step": 3400 |
| }, |
| { |
| "epoch": 0.341, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5711, |
| "step": 3410 |
| }, |
| { |
| "epoch": 0.342, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5877, |
| "step": 3420 |
| }, |
| { |
| "epoch": 0.343, |
| "grad_norm": 16.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4582, |
| "step": 3430 |
| }, |
| { |
| "epoch": 0.344, |
| "grad_norm": 21.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6229, |
| "step": 3440 |
| }, |
| { |
| "epoch": 0.345, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5171, |
| "step": 3450 |
| }, |
| { |
| "epoch": 0.346, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3955, |
| "step": 3460 |
| }, |
| { |
| "epoch": 0.347, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3764, |
| "step": 3470 |
| }, |
| { |
| "epoch": 0.348, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.6765, |
| "step": 3480 |
| }, |
| { |
| "epoch": 0.349, |
| "grad_norm": 24.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5731, |
| "step": 3490 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 15.8125, |
| "learning_rate": 1e-05, |
| "loss": 2.36, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.351, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5466, |
| "step": 3510 |
| }, |
| { |
| "epoch": 0.352, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4738, |
| "step": 3520 |
| }, |
| { |
| "epoch": 0.353, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4195, |
| "step": 3530 |
| }, |
| { |
| "epoch": 0.354, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.7505, |
| "step": 3540 |
| }, |
| { |
| "epoch": 0.355, |
| "grad_norm": 22.625, |
| "learning_rate": 1e-05, |
| "loss": 2.671, |
| "step": 3550 |
| }, |
| { |
| "epoch": 0.356, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7635, |
| "step": 3560 |
| }, |
| { |
| "epoch": 0.357, |
| "grad_norm": 22.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4406, |
| "step": 3570 |
| }, |
| { |
| "epoch": 0.358, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4645, |
| "step": 3580 |
| }, |
| { |
| "epoch": 0.359, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5336, |
| "step": 3590 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 21.125, |
| "learning_rate": 1e-05, |
| "loss": 2.2931, |
| "step": 3600 |
| }, |
| { |
| "epoch": 0.361, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4669, |
| "step": 3610 |
| }, |
| { |
| "epoch": 0.362, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3572, |
| "step": 3620 |
| }, |
| { |
| "epoch": 0.363, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5048, |
| "step": 3630 |
| }, |
| { |
| "epoch": 0.364, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3659, |
| "step": 3640 |
| }, |
| { |
| "epoch": 0.365, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3389, |
| "step": 3650 |
| }, |
| { |
| "epoch": 0.366, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6051, |
| "step": 3660 |
| }, |
| { |
| "epoch": 0.367, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7483, |
| "step": 3670 |
| }, |
| { |
| "epoch": 0.368, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 2.6497, |
| "step": 3680 |
| }, |
| { |
| "epoch": 0.369, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3905, |
| "step": 3690 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4356, |
| "step": 3700 |
| }, |
| { |
| "epoch": 0.371, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6108, |
| "step": 3710 |
| }, |
| { |
| "epoch": 0.372, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5823, |
| "step": 3720 |
| }, |
| { |
| "epoch": 0.373, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4461, |
| "step": 3730 |
| }, |
| { |
| "epoch": 0.374, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4841, |
| "step": 3740 |
| }, |
| { |
| "epoch": 0.375, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5193, |
| "step": 3750 |
| }, |
| { |
| "epoch": 0.376, |
| "grad_norm": 15.8125, |
| "learning_rate": 1e-05, |
| "loss": 2.4734, |
| "step": 3760 |
| }, |
| { |
| "epoch": 0.377, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8992, |
| "step": 3770 |
| }, |
| { |
| "epoch": 0.378, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8839, |
| "step": 3780 |
| }, |
| { |
| "epoch": 0.379, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.481, |
| "step": 3790 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3662, |
| "step": 3800 |
| }, |
| { |
| "epoch": 0.381, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.7295, |
| "step": 3810 |
| }, |
| { |
| "epoch": 0.382, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5414, |
| "step": 3820 |
| }, |
| { |
| "epoch": 0.383, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4315, |
| "step": 3830 |
| }, |
| { |
| "epoch": 0.384, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3002, |
| "step": 3840 |
| }, |
| { |
| "epoch": 0.385, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3859, |
| "step": 3850 |
| }, |
| { |
| "epoch": 0.386, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3491, |
| "step": 3860 |
| }, |
| { |
| "epoch": 0.387, |
| "grad_norm": 14.5625, |
| "learning_rate": 1e-05, |
| "loss": 2.6115, |
| "step": 3870 |
| }, |
| { |
| "epoch": 0.388, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6592, |
| "step": 3880 |
| }, |
| { |
| "epoch": 0.389, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4516, |
| "step": 3890 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.64, |
| "step": 3900 |
| }, |
| { |
| "epoch": 0.391, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4264, |
| "step": 3910 |
| }, |
| { |
| "epoch": 0.392, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.8704, |
| "step": 3920 |
| }, |
| { |
| "epoch": 0.393, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3054, |
| "step": 3930 |
| }, |
| { |
| "epoch": 0.394, |
| "grad_norm": 21.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6544, |
| "step": 3940 |
| }, |
| { |
| "epoch": 0.395, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5953, |
| "step": 3950 |
| }, |
| { |
| "epoch": 0.396, |
| "grad_norm": 14.625, |
| "learning_rate": 1e-05, |
| "loss": 2.2923, |
| "step": 3960 |
| }, |
| { |
| "epoch": 0.397, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 2.417, |
| "step": 3970 |
| }, |
| { |
| "epoch": 0.398, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3225, |
| "step": 3980 |
| }, |
| { |
| "epoch": 0.399, |
| "grad_norm": 13.9375, |
| "learning_rate": 1e-05, |
| "loss": 2.3487, |
| "step": 3990 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 21.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7811, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.401, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.2618, |
| "step": 4010 |
| }, |
| { |
| "epoch": 0.402, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3628, |
| "step": 4020 |
| }, |
| { |
| "epoch": 0.403, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5471, |
| "step": 4030 |
| }, |
| { |
| "epoch": 0.404, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.627, |
| "step": 4040 |
| }, |
| { |
| "epoch": 0.405, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5027, |
| "step": 4050 |
| }, |
| { |
| "epoch": 0.406, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.165, |
| "step": 4060 |
| }, |
| { |
| "epoch": 0.407, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.2934, |
| "step": 4070 |
| }, |
| { |
| "epoch": 0.408, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3613, |
| "step": 4080 |
| }, |
| { |
| "epoch": 0.409, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3609, |
| "step": 4090 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4449, |
| "step": 4100 |
| }, |
| { |
| "epoch": 0.411, |
| "grad_norm": 14.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4496, |
| "step": 4110 |
| }, |
| { |
| "epoch": 0.412, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4611, |
| "step": 4120 |
| }, |
| { |
| "epoch": 0.413, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6379, |
| "step": 4130 |
| }, |
| { |
| "epoch": 0.414, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6281, |
| "step": 4140 |
| }, |
| { |
| "epoch": 0.415, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5068, |
| "step": 4150 |
| }, |
| { |
| "epoch": 0.416, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3529, |
| "step": 4160 |
| }, |
| { |
| "epoch": 0.417, |
| "grad_norm": 15.6875, |
| "learning_rate": 1e-05, |
| "loss": 2.4111, |
| "step": 4170 |
| }, |
| { |
| "epoch": 0.418, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5589, |
| "step": 4180 |
| }, |
| { |
| "epoch": 0.419, |
| "grad_norm": 16.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3388, |
| "step": 4190 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.1583, |
| "step": 4200 |
| }, |
| { |
| "epoch": 0.421, |
| "grad_norm": 16.125, |
| "learning_rate": 1e-05, |
| "loss": 2.288, |
| "step": 4210 |
| }, |
| { |
| "epoch": 0.422, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4598, |
| "step": 4220 |
| }, |
| { |
| "epoch": 0.423, |
| "grad_norm": 14.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7717, |
| "step": 4230 |
| }, |
| { |
| "epoch": 0.424, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6044, |
| "step": 4240 |
| }, |
| { |
| "epoch": 0.425, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8087, |
| "step": 4250 |
| }, |
| { |
| "epoch": 0.426, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3632, |
| "step": 4260 |
| }, |
| { |
| "epoch": 0.427, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3376, |
| "step": 4270 |
| }, |
| { |
| "epoch": 0.428, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5305, |
| "step": 4280 |
| }, |
| { |
| "epoch": 0.429, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.2954, |
| "step": 4290 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5584, |
| "step": 4300 |
| }, |
| { |
| "epoch": 0.431, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.684, |
| "step": 4310 |
| }, |
| { |
| "epoch": 0.432, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4819, |
| "step": 4320 |
| }, |
| { |
| "epoch": 0.433, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5119, |
| "step": 4330 |
| }, |
| { |
| "epoch": 0.434, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5513, |
| "step": 4340 |
| }, |
| { |
| "epoch": 0.435, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4193, |
| "step": 4350 |
| }, |
| { |
| "epoch": 0.436, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4771, |
| "step": 4360 |
| }, |
| { |
| "epoch": 0.437, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3714, |
| "step": 4370 |
| }, |
| { |
| "epoch": 0.438, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3421, |
| "step": 4380 |
| }, |
| { |
| "epoch": 0.439, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.7361, |
| "step": 4390 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.2974, |
| "step": 4400 |
| }, |
| { |
| "epoch": 0.441, |
| "grad_norm": 22.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3927, |
| "step": 4410 |
| }, |
| { |
| "epoch": 0.442, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5367, |
| "step": 4420 |
| }, |
| { |
| "epoch": 0.443, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6905, |
| "step": 4430 |
| }, |
| { |
| "epoch": 0.444, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7203, |
| "step": 4440 |
| }, |
| { |
| "epoch": 0.445, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4527, |
| "step": 4450 |
| }, |
| { |
| "epoch": 0.446, |
| "grad_norm": 24.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6364, |
| "step": 4460 |
| }, |
| { |
| "epoch": 0.447, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.1915, |
| "step": 4470 |
| }, |
| { |
| "epoch": 0.448, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5231, |
| "step": 4480 |
| }, |
| { |
| "epoch": 0.449, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4303, |
| "step": 4490 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.2239, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.451, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4674, |
| "step": 4510 |
| }, |
| { |
| "epoch": 0.452, |
| "grad_norm": 15.625, |
| "learning_rate": 1e-05, |
| "loss": 2.2829, |
| "step": 4520 |
| }, |
| { |
| "epoch": 0.453, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3622, |
| "step": 4530 |
| }, |
| { |
| "epoch": 0.454, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6612, |
| "step": 4540 |
| }, |
| { |
| "epoch": 0.455, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4915, |
| "step": 4550 |
| }, |
| { |
| "epoch": 0.456, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6999, |
| "step": 4560 |
| }, |
| { |
| "epoch": 0.457, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4066, |
| "step": 4570 |
| }, |
| { |
| "epoch": 0.458, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5309, |
| "step": 4580 |
| }, |
| { |
| "epoch": 0.459, |
| "grad_norm": 23.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4408, |
| "step": 4590 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 15.1875, |
| "learning_rate": 1e-05, |
| "loss": 2.2914, |
| "step": 4600 |
| }, |
| { |
| "epoch": 0.461, |
| "grad_norm": 15.375, |
| "learning_rate": 1e-05, |
| "loss": 2.8961, |
| "step": 4610 |
| }, |
| { |
| "epoch": 0.462, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4698, |
| "step": 4620 |
| }, |
| { |
| "epoch": 0.463, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3865, |
| "step": 4630 |
| }, |
| { |
| "epoch": 0.464, |
| "grad_norm": 22.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6814, |
| "step": 4640 |
| }, |
| { |
| "epoch": 0.465, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5154, |
| "step": 4650 |
| }, |
| { |
| "epoch": 0.466, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4509, |
| "step": 4660 |
| }, |
| { |
| "epoch": 0.467, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5722, |
| "step": 4670 |
| }, |
| { |
| "epoch": 0.468, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5152, |
| "step": 4680 |
| }, |
| { |
| "epoch": 0.469, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3603, |
| "step": 4690 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4957, |
| "step": 4700 |
| }, |
| { |
| "epoch": 0.471, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5935, |
| "step": 4710 |
| }, |
| { |
| "epoch": 0.472, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5148, |
| "step": 4720 |
| }, |
| { |
| "epoch": 0.473, |
| "grad_norm": 24.375, |
| "learning_rate": 1e-05, |
| "loss": 2.8182, |
| "step": 4730 |
| }, |
| { |
| "epoch": 0.474, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3348, |
| "step": 4740 |
| }, |
| { |
| "epoch": 0.475, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4282, |
| "step": 4750 |
| }, |
| { |
| "epoch": 0.476, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5341, |
| "step": 4760 |
| }, |
| { |
| "epoch": 0.477, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4703, |
| "step": 4770 |
| }, |
| { |
| "epoch": 0.478, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6941, |
| "step": 4780 |
| }, |
| { |
| "epoch": 0.479, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5162, |
| "step": 4790 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5827, |
| "step": 4800 |
| }, |
| { |
| "epoch": 0.481, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.2945, |
| "step": 4810 |
| }, |
| { |
| "epoch": 0.482, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.2565, |
| "step": 4820 |
| }, |
| { |
| "epoch": 0.483, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6442, |
| "step": 4830 |
| }, |
| { |
| "epoch": 0.484, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5784, |
| "step": 4840 |
| }, |
| { |
| "epoch": 0.485, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.2645, |
| "step": 4850 |
| }, |
| { |
| "epoch": 0.486, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4722, |
| "step": 4860 |
| }, |
| { |
| "epoch": 0.487, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4258, |
| "step": 4870 |
| }, |
| { |
| "epoch": 0.488, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5565, |
| "step": 4880 |
| }, |
| { |
| "epoch": 0.489, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4211, |
| "step": 4890 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3477, |
| "step": 4900 |
| }, |
| { |
| "epoch": 0.491, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.432, |
| "step": 4910 |
| }, |
| { |
| "epoch": 0.492, |
| "grad_norm": 15.9375, |
| "learning_rate": 1e-05, |
| "loss": 2.5616, |
| "step": 4920 |
| }, |
| { |
| "epoch": 0.493, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3926, |
| "step": 4930 |
| }, |
| { |
| "epoch": 0.494, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6728, |
| "step": 4940 |
| }, |
| { |
| "epoch": 0.495, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.7499, |
| "step": 4950 |
| }, |
| { |
| "epoch": 0.496, |
| "grad_norm": 15.8125, |
| "learning_rate": 1e-05, |
| "loss": 2.7322, |
| "step": 4960 |
| }, |
| { |
| "epoch": 0.497, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3811, |
| "step": 4970 |
| }, |
| { |
| "epoch": 0.498, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6949, |
| "step": 4980 |
| }, |
| { |
| "epoch": 0.499, |
| "grad_norm": 22.625, |
| "learning_rate": 1e-05, |
| "loss": 2.7396, |
| "step": 4990 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.7146, |
| "step": 5000 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 10000, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 1000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 0.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|