| { | |
| "best_metric": 2.328753709793091, | |
| "best_model_checkpoint": "./cifar10_hbfp2,64/checkpoint-1328", | |
| "epoch": 9.999623635679338, | |
| "global_step": 6640, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.992469879518072e-05, | |
| "loss": 2.303, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.984939759036145e-05, | |
| "loss": 2.3059, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9774096385542175e-05, | |
| "loss": 2.308, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.9698795180722894e-05, | |
| "loss": 2.3078, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.962349397590362e-05, | |
| "loss": 2.3086, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.954819277108434e-05, | |
| "loss": 2.3078, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.947289156626506e-05, | |
| "loss": 2.3041, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.9397590361445786e-05, | |
| "loss": 2.3138, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.932228915662651e-05, | |
| "loss": 2.3229, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.924698795180723e-05, | |
| "loss": 2.308, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.917168674698795e-05, | |
| "loss": 2.3083, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.909638554216868e-05, | |
| "loss": 2.311, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.90210843373494e-05, | |
| "loss": 2.3162, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.8945783132530124e-05, | |
| "loss": 2.3042, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.887048192771085e-05, | |
| "loss": 2.3084, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.879518072289157e-05, | |
| "loss": 2.3077, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.871987951807229e-05, | |
| "loss": 2.3108, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.8644578313253016e-05, | |
| "loss": 2.3062, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.8569277108433736e-05, | |
| "loss": 2.3112, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.8493975903614455e-05, | |
| "loss": 2.318, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.841867469879519e-05, | |
| "loss": 2.319, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.834337349397591e-05, | |
| "loss": 2.3082, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.826807228915663e-05, | |
| "loss": 2.3207, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.8192771084337354e-05, | |
| "loss": 2.3087, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.8117469879518074e-05, | |
| "loss": 2.3204, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.804216867469879e-05, | |
| "loss": 2.3026, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.796686746987952e-05, | |
| "loss": 2.3105, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.7891566265060246e-05, | |
| "loss": 2.3089, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.7816265060240965e-05, | |
| "loss": 2.3197, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.774096385542169e-05, | |
| "loss": 2.3203, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.766566265060241e-05, | |
| "loss": 2.3131, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.759036144578313e-05, | |
| "loss": 2.3141, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.751506024096386e-05, | |
| "loss": 2.3076, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.7439759036144584e-05, | |
| "loss": 2.3162, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.73644578313253e-05, | |
| "loss": 2.3079, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.728915662650602e-05, | |
| "loss": 2.301, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.721385542168675e-05, | |
| "loss": 2.3247, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.713855421686747e-05, | |
| "loss": 2.319, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.7063253012048195e-05, | |
| "loss": 2.3075, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.698795180722892e-05, | |
| "loss": 2.3267, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.691265060240964e-05, | |
| "loss": 2.3128, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 4.683734939759036e-05, | |
| "loss": 2.3059, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 4.676204819277109e-05, | |
| "loss": 2.3161, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 4.668674698795181e-05, | |
| "loss": 2.3169, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.661144578313253e-05, | |
| "loss": 2.3265, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.653614457831326e-05, | |
| "loss": 2.3258, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.646084337349398e-05, | |
| "loss": 2.3263, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.63855421686747e-05, | |
| "loss": 2.3256, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.6310240963855425e-05, | |
| "loss": 2.3216, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 4.6234939759036145e-05, | |
| "loss": 2.3122, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.615963855421687e-05, | |
| "loss": 2.327, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.608433734939759e-05, | |
| "loss": 2.3187, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.600903614457832e-05, | |
| "loss": 2.312, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.5933734939759037e-05, | |
| "loss": 2.3274, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.585843373493976e-05, | |
| "loss": 2.3279, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 4.578313253012048e-05, | |
| "loss": 2.3307, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.570783132530121e-05, | |
| "loss": 2.324, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 4.563253012048193e-05, | |
| "loss": 2.3216, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.5557228915662655e-05, | |
| "loss": 2.3184, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.5481927710843374e-05, | |
| "loss": 2.3159, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.5406626506024094e-05, | |
| "loss": 2.3238, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 4.533132530120482e-05, | |
| "loss": 2.3233, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.525602409638555e-05, | |
| "loss": 2.3299, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.5180722891566266e-05, | |
| "loss": 2.3401, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.510542168674699e-05, | |
| "loss": 2.3291, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.503012048192771e-05, | |
| "loss": 2.3324, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.09666666666666666, | |
| "eval_loss": 2.3351855278015137, | |
| "eval_runtime": 90.5384, | |
| "eval_samples_per_second": 82.838, | |
| "eval_steps_per_second": 5.18, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.495481927710843e-05, | |
| "loss": 2.3837, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.487951807228916e-05, | |
| "loss": 2.326, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.4804216867469885e-05, | |
| "loss": 2.3356, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 4.4728915662650604e-05, | |
| "loss": 2.3464, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.465361445783133e-05, | |
| "loss": 2.3346, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 4.457831325301205e-05, | |
| "loss": 2.3325, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 4.450301204819277e-05, | |
| "loss": 2.3444, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.4427710843373496e-05, | |
| "loss": 2.3249, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.435240963855422e-05, | |
| "loss": 2.3472, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 4.427710843373494e-05, | |
| "loss": 2.3406, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.420180722891566e-05, | |
| "loss": 2.3482, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.412650602409639e-05, | |
| "loss": 2.3323, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 4.405120481927711e-05, | |
| "loss": 2.3407, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.3975903614457834e-05, | |
| "loss": 2.3466, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 4.390060240963856e-05, | |
| "loss": 2.3505, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 4.382530120481928e-05, | |
| "loss": 2.3289, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 4.375e-05, | |
| "loss": 2.3513, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 4.3674698795180726e-05, | |
| "loss": 2.3546, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 4.3599397590361446e-05, | |
| "loss": 2.3134, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 4.352409638554217e-05, | |
| "loss": 2.3286, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 4.34487951807229e-05, | |
| "loss": 2.3346, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 4.337349397590362e-05, | |
| "loss": 2.3357, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 4.329819277108434e-05, | |
| "loss": 2.3316, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 4.3222891566265064e-05, | |
| "loss": 2.3363, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 4.3147590361445783e-05, | |
| "loss": 2.3385, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 4.307228915662651e-05, | |
| "loss": 2.354, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 4.299698795180723e-05, | |
| "loss": 2.3349, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 4.2921686746987956e-05, | |
| "loss": 2.3247, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 4.2846385542168675e-05, | |
| "loss": 2.3473, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 4.27710843373494e-05, | |
| "loss": 2.339, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 4.269578313253012e-05, | |
| "loss": 2.3523, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 4.262048192771085e-05, | |
| "loss": 2.3478, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 4.254518072289157e-05, | |
| "loss": 2.3442, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.2469879518072294e-05, | |
| "loss": 2.3359, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 4.239457831325301e-05, | |
| "loss": 2.3298, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.231927710843373e-05, | |
| "loss": 2.3438, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 4.224397590361446e-05, | |
| "loss": 2.3356, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.2168674698795186e-05, | |
| "loss": 2.3476, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.2093373493975905e-05, | |
| "loss": 2.3233, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.201807228915663e-05, | |
| "loss": 2.319, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 4.194277108433735e-05, | |
| "loss": 2.334, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 4.186746987951807e-05, | |
| "loss": 2.3372, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 4.17921686746988e-05, | |
| "loss": 2.3215, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 4.1716867469879523e-05, | |
| "loss": 2.3351, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 4.164156626506024e-05, | |
| "loss": 2.3215, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 4.156626506024097e-05, | |
| "loss": 2.3382, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 4.149096385542169e-05, | |
| "loss": 2.3395, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 4.141566265060241e-05, | |
| "loss": 2.331, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 4.1340361445783135e-05, | |
| "loss": 2.3181, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 4.126506024096386e-05, | |
| "loss": 2.3452, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 4.118975903614458e-05, | |
| "loss": 2.3292, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 4.11144578313253e-05, | |
| "loss": 2.3285, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 4.103915662650603e-05, | |
| "loss": 2.3236, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.0963855421686746e-05, | |
| "loss": 2.3347, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.088855421686747e-05, | |
| "loss": 2.3231, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 4.08132530120482e-05, | |
| "loss": 2.3289, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 4.073795180722892e-05, | |
| "loss": 2.3383, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 4.066265060240964e-05, | |
| "loss": 2.3344, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 4.0587349397590365e-05, | |
| "loss": 2.3504, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 4.0512048192771084e-05, | |
| "loss": 2.3235, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 4.0436746987951804e-05, | |
| "loss": 2.3281, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 4.036144578313254e-05, | |
| "loss": 2.3367, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 4.028614457831326e-05, | |
| "loss": 2.3364, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 4.0210843373493976e-05, | |
| "loss": 2.3225, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 4.01355421686747e-05, | |
| "loss": 2.3343, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 4.006024096385542e-05, | |
| "loss": 2.3489, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.10493333333333334, | |
| "eval_loss": 2.328753709793091, | |
| "eval_runtime": 88.1948, | |
| "eval_samples_per_second": 85.039, | |
| "eval_steps_per_second": 5.318, | |
| "step": 1328 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 3.998493975903614e-05, | |
| "loss": 2.3911, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 3.9909638554216875e-05, | |
| "loss": 2.336, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 3.9834337349397595e-05, | |
| "loss": 2.3294, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 3.9759036144578314e-05, | |
| "loss": 2.3399, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 3.968373493975904e-05, | |
| "loss": 2.3437, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 3.960843373493976e-05, | |
| "loss": 2.3291, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 3.953313253012048e-05, | |
| "loss": 2.3548, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 3.9457831325301206e-05, | |
| "loss": 2.317, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 3.938253012048193e-05, | |
| "loss": 2.3428, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 3.930722891566265e-05, | |
| "loss": 2.3414, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 3.923192771084337e-05, | |
| "loss": 2.3572, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 3.91566265060241e-05, | |
| "loss": 2.3479, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 3.908132530120482e-05, | |
| "loss": 2.3257, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 3.9006024096385544e-05, | |
| "loss": 2.324, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 3.893072289156627e-05, | |
| "loss": 2.3178, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 3.885542168674699e-05, | |
| "loss": 2.3505, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 3.878012048192771e-05, | |
| "loss": 2.3403, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 3.8704819277108436e-05, | |
| "loss": 2.3233, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 3.8629518072289155e-05, | |
| "loss": 2.3791, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 3.855421686746988e-05, | |
| "loss": 2.3549, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 3.847891566265061e-05, | |
| "loss": 2.3709, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 3.840361445783133e-05, | |
| "loss": 2.3481, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 3.832831325301205e-05, | |
| "loss": 2.3613, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 3.8253012048192774e-05, | |
| "loss": 2.3564, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 3.817771084337349e-05, | |
| "loss": 2.3781, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 3.810240963855422e-05, | |
| "loss": 2.3624, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 3.802710843373494e-05, | |
| "loss": 2.3582, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 3.7951807228915666e-05, | |
| "loss": 2.342, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 3.7876506024096385e-05, | |
| "loss": 2.3774, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 3.780120481927711e-05, | |
| "loss": 2.341, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 3.772590361445783e-05, | |
| "loss": 2.3589, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 3.765060240963856e-05, | |
| "loss": 2.334, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 3.757530120481928e-05, | |
| "loss": 2.3761, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 3.7500000000000003e-05, | |
| "loss": 2.3596, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 3.742469879518072e-05, | |
| "loss": 2.4082, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 3.734939759036144e-05, | |
| "loss": 2.3813, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 3.7274096385542176e-05, | |
| "loss": 2.3404, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 3.7198795180722895e-05, | |
| "loss": 2.3859, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 3.7123493975903615e-05, | |
| "loss": 2.4133, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 3.704819277108434e-05, | |
| "loss": 2.4011, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 3.697289156626506e-05, | |
| "loss": 2.4124, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 3.689759036144578e-05, | |
| "loss": 2.4081, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 3.6822289156626514e-05, | |
| "loss": 2.4022, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 3.674698795180723e-05, | |
| "loss": 2.4333, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 3.667168674698795e-05, | |
| "loss": 2.4422, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 3.659638554216868e-05, | |
| "loss": 2.4196, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 3.65210843373494e-05, | |
| "loss": 2.3988, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 3.644578313253012e-05, | |
| "loss": 2.4066, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 3.6370481927710845e-05, | |
| "loss": 2.3854, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 3.629518072289157e-05, | |
| "loss": 2.4092, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 3.621987951807229e-05, | |
| "loss": 2.4533, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.614457831325301e-05, | |
| "loss": 2.4181, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 3.606927710843374e-05, | |
| "loss": 2.4229, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 3.5993975903614456e-05, | |
| "loss": 2.4473, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 3.591867469879518e-05, | |
| "loss": 2.4258, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 3.584337349397591e-05, | |
| "loss": 2.4076, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 3.576807228915663e-05, | |
| "loss": 2.4527, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 3.569277108433735e-05, | |
| "loss": 2.4653, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 3.5617469879518075e-05, | |
| "loss": 2.4161, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 3.5542168674698794e-05, | |
| "loss": 2.4817, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 3.546686746987952e-05, | |
| "loss": 2.4586, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 3.539156626506025e-05, | |
| "loss": 2.4455, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 3.5316265060240967e-05, | |
| "loss": 2.4666, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 3.5240963855421686e-05, | |
| "loss": 2.4678, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 3.516566265060241e-05, | |
| "loss": 2.5005, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 3.509036144578313e-05, | |
| "loss": 2.4819, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 3.501506024096386e-05, | |
| "loss": 2.4899, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.09893333333333333, | |
| "eval_loss": 2.447309970855713, | |
| "eval_runtime": 87.9258, | |
| "eval_samples_per_second": 85.299, | |
| "eval_steps_per_second": 5.334, | |
| "step": 1992 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 3.4939759036144585e-05, | |
| "loss": 2.4848, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 3.4864457831325304e-05, | |
| "loss": 2.5148, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 3.4789156626506024e-05, | |
| "loss": 2.4835, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 3.471385542168675e-05, | |
| "loss": 2.5572, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 3.463855421686747e-05, | |
| "loss": 2.5478, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 3.4563253012048196e-05, | |
| "loss": 2.5037, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 3.4487951807228916e-05, | |
| "loss": 2.5438, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 3.441265060240964e-05, | |
| "loss": 2.5037, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 3.433734939759036e-05, | |
| "loss": 2.5474, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 3.426204819277108e-05, | |
| "loss": 2.5127, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 3.418674698795181e-05, | |
| "loss": 2.4961, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 3.4111445783132534e-05, | |
| "loss": 2.526, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 3.4036144578313254e-05, | |
| "loss": 2.5455, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 3.396084337349398e-05, | |
| "loss": 2.5263, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 3.38855421686747e-05, | |
| "loss": 2.4951, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 3.381024096385542e-05, | |
| "loss": 2.5009, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 3.3734939759036146e-05, | |
| "loss": 2.5164, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 3.365963855421687e-05, | |
| "loss": 2.5079, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 3.358433734939759e-05, | |
| "loss": 2.4612, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 3.350903614457832e-05, | |
| "loss": 2.4669, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 3.343373493975904e-05, | |
| "loss": 2.4784, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 3.335843373493976e-05, | |
| "loss": 2.4726, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 3.3283132530120484e-05, | |
| "loss": 2.5054, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 3.320783132530121e-05, | |
| "loss": 2.5058, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 3.313253012048193e-05, | |
| "loss": 2.4783, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 3.305722891566265e-05, | |
| "loss": 2.5145, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 3.2981927710843376e-05, | |
| "loss": 2.5295, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 3.2906626506024095e-05, | |
| "loss": 2.4965, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 3.283132530120482e-05, | |
| "loss": 2.4859, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 3.275602409638555e-05, | |
| "loss": 2.4612, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 3.268072289156627e-05, | |
| "loss": 2.5565, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 3.260542168674699e-05, | |
| "loss": 2.5422, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 3.253012048192771e-05, | |
| "loss": 2.5718, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 3.245481927710843e-05, | |
| "loss": 2.5312, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 3.237951807228915e-05, | |
| "loss": 2.541, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 3.2304216867469886e-05, | |
| "loss": 2.507, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 3.2228915662650605e-05, | |
| "loss": 2.52, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 3.2153614457831325e-05, | |
| "loss": 2.5591, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 3.207831325301205e-05, | |
| "loss": 2.5054, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 3.200301204819277e-05, | |
| "loss": 2.4614, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 3.192771084337349e-05, | |
| "loss": 2.5055, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 3.1852409638554224e-05, | |
| "loss": 2.5534, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 3.177710843373494e-05, | |
| "loss": 2.4795, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 3.170180722891566e-05, | |
| "loss": 2.4741, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 3.162650602409639e-05, | |
| "loss": 2.4908, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 3.155120481927711e-05, | |
| "loss": 2.4974, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 3.147590361445783e-05, | |
| "loss": 2.4895, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 3.1400602409638555e-05, | |
| "loss": 2.492, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 3.132530120481928e-05, | |
| "loss": 2.4952, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 3.125e-05, | |
| "loss": 2.4629, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 3.117469879518072e-05, | |
| "loss": 2.5128, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 3.1099397590361447e-05, | |
| "loss": 2.4906, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 3.102409638554217e-05, | |
| "loss": 2.5128, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 3.094879518072289e-05, | |
| "loss": 2.5232, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 3.087349397590362e-05, | |
| "loss": 2.497, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 3.079819277108434e-05, | |
| "loss": 2.4846, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 3.072289156626506e-05, | |
| "loss": 2.5096, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 3.0647590361445784e-05, | |
| "loss": 2.5142, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 3.057228915662651e-05, | |
| "loss": 2.485, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 3.049698795180723e-05, | |
| "loss": 2.5429, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 3.0421686746987953e-05, | |
| "loss": 2.4776, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 3.0346385542168676e-05, | |
| "loss": 2.5325, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 3.02710843373494e-05, | |
| "loss": 2.4744, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 3.019578313253012e-05, | |
| "loss": 2.5357, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 3.012048192771085e-05, | |
| "loss": 2.4785, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 3.004518072289157e-05, | |
| "loss": 2.479, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.1, | |
| "eval_loss": 2.4893734455108643, | |
| "eval_runtime": 88.3349, | |
| "eval_samples_per_second": 84.904, | |
| "eval_steps_per_second": 5.309, | |
| "step": 2656 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 2.996987951807229e-05, | |
| "loss": 2.5363, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 2.9894578313253014e-05, | |
| "loss": 2.4889, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 2.9819277108433734e-05, | |
| "loss": 2.4864, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 2.9743975903614457e-05, | |
| "loss": 2.4692, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 2.9668674698795183e-05, | |
| "loss": 2.517, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 2.9593373493975906e-05, | |
| "loss": 2.4686, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 2.951807228915663e-05, | |
| "loss": 2.5523, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 2.9442771084337352e-05, | |
| "loss": 2.4604, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 2.9367469879518072e-05, | |
| "loss": 2.481, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 2.9292168674698795e-05, | |
| "loss": 2.4952, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 2.921686746987952e-05, | |
| "loss": 2.4523, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 2.9141566265060244e-05, | |
| "loss": 2.4656, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 2.9066265060240967e-05, | |
| "loss": 2.5053, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 2.8990963855421687e-05, | |
| "loss": 2.4706, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 2.891566265060241e-05, | |
| "loss": 2.4952, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 2.8840361445783133e-05, | |
| "loss": 2.4745, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 2.876506024096386e-05, | |
| "loss": 2.4846, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 2.8689759036144582e-05, | |
| "loss": 2.4638, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.86144578313253e-05, | |
| "loss": 2.4963, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 2.8539156626506025e-05, | |
| "loss": 2.5034, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 2.8463855421686748e-05, | |
| "loss": 2.4673, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 2.838855421686747e-05, | |
| "loss": 2.4521, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 2.8313253012048197e-05, | |
| "loss": 2.5127, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 2.823795180722892e-05, | |
| "loss": 2.5123, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 2.816265060240964e-05, | |
| "loss": 2.4752, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 2.8087349397590362e-05, | |
| "loss": 2.5014, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 2.8012048192771085e-05, | |
| "loss": 2.5098, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 2.7936746987951805e-05, | |
| "loss": 2.4752, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 2.7861445783132535e-05, | |
| "loss": 2.4531, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 2.7786144578313254e-05, | |
| "loss": 2.4507, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 2.7710843373493977e-05, | |
| "loss": 2.4901, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 2.76355421686747e-05, | |
| "loss": 2.4274, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 2.756024096385542e-05, | |
| "loss": 2.4702, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 2.7484939759036143e-05, | |
| "loss": 2.4793, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 2.7409638554216873e-05, | |
| "loss": 2.5091, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 2.7334337349397592e-05, | |
| "loss": 2.4586, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 2.7259036144578315e-05, | |
| "loss": 2.4695, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 2.7183734939759038e-05, | |
| "loss": 2.4831, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 2.7108433734939758e-05, | |
| "loss": 2.4624, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 2.703313253012048e-05, | |
| "loss": 2.4333, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 2.6957831325301207e-05, | |
| "loss": 2.4513, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 2.688253012048193e-05, | |
| "loss": 2.436, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 2.6807228915662653e-05, | |
| "loss": 2.4721, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 2.6731927710843373e-05, | |
| "loss": 2.4648, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 2.6656626506024096e-05, | |
| "loss": 2.4092, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 2.658132530120482e-05, | |
| "loss": 2.4654, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 2.6506024096385545e-05, | |
| "loss": 2.4912, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 2.6430722891566268e-05, | |
| "loss": 2.4248, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 2.635542168674699e-05, | |
| "loss": 2.4756, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 2.628012048192771e-05, | |
| "loss": 2.4355, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 2.6204819277108434e-05, | |
| "loss": 2.4313, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 2.6129518072289157e-05, | |
| "loss": 2.458, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 2.6054216867469883e-05, | |
| "loss": 2.4521, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 2.5978915662650606e-05, | |
| "loss": 2.4578, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 2.5903614457831325e-05, | |
| "loss": 2.4403, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 2.582831325301205e-05, | |
| "loss": 2.4692, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 2.575301204819277e-05, | |
| "loss": 2.4578, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 2.567771084337349e-05, | |
| "loss": 2.4624, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 2.560240963855422e-05, | |
| "loss": 2.4869, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 2.552710843373494e-05, | |
| "loss": 2.4656, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 2.5451807228915663e-05, | |
| "loss": 2.4149, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 2.5376506024096386e-05, | |
| "loss": 2.452, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 2.530120481927711e-05, | |
| "loss": 2.4622, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 2.522590361445783e-05, | |
| "loss": 2.4479, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 2.515060240963856e-05, | |
| "loss": 2.4514, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 2.5075301204819278e-05, | |
| "loss": 2.4341, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 2.5e-05, | |
| "loss": 2.4179, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_accuracy": 0.09466666666666666, | |
| "eval_loss": 2.4404096603393555, | |
| "eval_runtime": 89.296, | |
| "eval_samples_per_second": 83.99, | |
| "eval_steps_per_second": 5.252, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 2.4924698795180724e-05, | |
| "loss": 2.4961, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 2.4849397590361447e-05, | |
| "loss": 2.4329, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 2.477409638554217e-05, | |
| "loss": 2.4589, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 2.4698795180722893e-05, | |
| "loss": 2.4571, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 2.4623493975903616e-05, | |
| "loss": 2.4124, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 2.454819277108434e-05, | |
| "loss": 2.4009, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 2.4472891566265062e-05, | |
| "loss": 2.4346, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 2.4397590361445785e-05, | |
| "loss": 2.4584, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 2.4322289156626508e-05, | |
| "loss": 2.4309, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 2.4246987951807228e-05, | |
| "loss": 2.4128, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 2.4171686746987954e-05, | |
| "loss": 2.4343, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "learning_rate": 2.4096385542168677e-05, | |
| "loss": 2.4824, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "learning_rate": 2.4021084337349397e-05, | |
| "loss": 2.4489, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 2.3945783132530123e-05, | |
| "loss": 2.4503, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 2.3870481927710846e-05, | |
| "loss": 2.4152, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "learning_rate": 2.3795180722891565e-05, | |
| "loss": 2.4467, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 2.3719879518072292e-05, | |
| "loss": 2.4164, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "learning_rate": 2.364457831325301e-05, | |
| "loss": 2.4403, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 2.3569277108433734e-05, | |
| "loss": 2.4458, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "learning_rate": 2.349397590361446e-05, | |
| "loss": 2.4409, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 2.341867469879518e-05, | |
| "loss": 2.3775, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "learning_rate": 2.3343373493975903e-05, | |
| "loss": 2.4205, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 2.326807228915663e-05, | |
| "loss": 2.4296, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "learning_rate": 2.319277108433735e-05, | |
| "loss": 2.4372, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 2.3117469879518072e-05, | |
| "loss": 2.4454, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 5.39, | |
| "learning_rate": 2.3042168674698795e-05, | |
| "loss": 2.4135, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 2.2966867469879518e-05, | |
| "loss": 2.4442, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 5.42, | |
| "learning_rate": 2.289156626506024e-05, | |
| "loss": 2.3982, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 2.2816265060240964e-05, | |
| "loss": 2.4006, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 5.45, | |
| "learning_rate": 2.2740963855421687e-05, | |
| "loss": 2.4232, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 2.266566265060241e-05, | |
| "loss": 2.4277, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "learning_rate": 2.2590361445783133e-05, | |
| "loss": 2.4212, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "learning_rate": 2.2515060240963856e-05, | |
| "loss": 2.429, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 2.243975903614458e-05, | |
| "loss": 2.4011, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 2.2364457831325302e-05, | |
| "loss": 2.3976, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 2.2289156626506025e-05, | |
| "loss": 2.4228, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 2.2213855421686748e-05, | |
| "loss": 2.4326, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "learning_rate": 2.213855421686747e-05, | |
| "loss": 2.4325, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 2.2063253012048194e-05, | |
| "loss": 2.3598, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "learning_rate": 2.1987951807228917e-05, | |
| "loss": 2.4177, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 2.191265060240964e-05, | |
| "loss": 2.3965, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "learning_rate": 2.1837349397590363e-05, | |
| "loss": 2.4322, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 2.1762048192771086e-05, | |
| "loss": 2.4419, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 2.168674698795181e-05, | |
| "loss": 2.3857, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 2.1611445783132532e-05, | |
| "loss": 2.3885, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "learning_rate": 2.1536144578313255e-05, | |
| "loss": 2.3996, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 2.1460843373493978e-05, | |
| "loss": 2.4086, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 2.13855421686747e-05, | |
| "loss": 2.4339, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "learning_rate": 2.1310240963855424e-05, | |
| "loss": 2.4346, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "learning_rate": 2.1234939759036147e-05, | |
| "loss": 2.4393, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 2.1159638554216866e-05, | |
| "loss": 2.4097, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "learning_rate": 2.1084337349397593e-05, | |
| "loss": 2.4278, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "learning_rate": 2.1009036144578316e-05, | |
| "loss": 2.4109, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 2.0933734939759035e-05, | |
| "loss": 2.4208, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 2.0858433734939762e-05, | |
| "loss": 2.4159, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "learning_rate": 2.0783132530120485e-05, | |
| "loss": 2.4191, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 5.86, | |
| "learning_rate": 2.0707831325301204e-05, | |
| "loss": 2.3934, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 2.063253012048193e-05, | |
| "loss": 2.4151, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 2.055722891566265e-05, | |
| "loss": 2.4172, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 2.0481927710843373e-05, | |
| "loss": 2.4317, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 2.04066265060241e-05, | |
| "loss": 2.3632, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 5.93, | |
| "learning_rate": 2.033132530120482e-05, | |
| "loss": 2.3705, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 2.0256024096385542e-05, | |
| "loss": 2.4036, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 2.018072289156627e-05, | |
| "loss": 2.3888, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 2.0105421686746988e-05, | |
| "loss": 2.4079, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 2.003012048192771e-05, | |
| "loss": 2.3881, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_accuracy": 0.102, | |
| "eval_loss": 2.3931188583374023, | |
| "eval_runtime": 89.7706, | |
| "eval_samples_per_second": 83.546, | |
| "eval_steps_per_second": 5.224, | |
| "step": 3984 | |
| }, | |
| { | |
| "epoch": 6.01, | |
| "learning_rate": 1.9954819277108437e-05, | |
| "loss": 2.4873, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 1.9879518072289157e-05, | |
| "loss": 2.385, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "learning_rate": 1.980421686746988e-05, | |
| "loss": 2.3956, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 1.9728915662650603e-05, | |
| "loss": 2.3882, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "learning_rate": 1.9653614457831326e-05, | |
| "loss": 2.4048, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "learning_rate": 1.957831325301205e-05, | |
| "loss": 2.3765, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "learning_rate": 1.9503012048192772e-05, | |
| "loss": 2.3934, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "learning_rate": 1.9427710843373495e-05, | |
| "loss": 2.3784, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 6.13, | |
| "learning_rate": 1.9352409638554218e-05, | |
| "loss": 2.3991, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 6.14, | |
| "learning_rate": 1.927710843373494e-05, | |
| "loss": 2.4025, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 6.16, | |
| "learning_rate": 1.9201807228915664e-05, | |
| "loss": 2.4011, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "learning_rate": 1.9126506024096387e-05, | |
| "loss": 2.3931, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 1.905120481927711e-05, | |
| "loss": 2.4194, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "learning_rate": 1.8975903614457833e-05, | |
| "loss": 2.3913, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 6.22, | |
| "learning_rate": 1.8900602409638556e-05, | |
| "loss": 2.3892, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "learning_rate": 1.882530120481928e-05, | |
| "loss": 2.4361, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 1.8750000000000002e-05, | |
| "loss": 2.3755, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "learning_rate": 1.867469879518072e-05, | |
| "loss": 2.4297, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 1.8599397590361448e-05, | |
| "loss": 2.3606, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "learning_rate": 1.852409638554217e-05, | |
| "loss": 2.3626, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 1.844879518072289e-05, | |
| "loss": 2.3632, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 6.33, | |
| "learning_rate": 1.8373493975903617e-05, | |
| "loss": 2.381, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 1.829819277108434e-05, | |
| "loss": 2.4075, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "learning_rate": 1.822289156626506e-05, | |
| "loss": 2.4029, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 6.37, | |
| "learning_rate": 1.8147590361445786e-05, | |
| "loss": 2.3773, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 6.39, | |
| "learning_rate": 1.8072289156626505e-05, | |
| "loss": 2.3792, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "learning_rate": 1.7996987951807228e-05, | |
| "loss": 2.3947, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 6.42, | |
| "learning_rate": 1.7921686746987955e-05, | |
| "loss": 2.3803, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 1.7846385542168674e-05, | |
| "loss": 2.3772, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 6.45, | |
| "learning_rate": 1.7771084337349397e-05, | |
| "loss": 2.403, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 6.46, | |
| "learning_rate": 1.7695783132530123e-05, | |
| "loss": 2.3912, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 6.48, | |
| "learning_rate": 1.7620481927710843e-05, | |
| "loss": 2.4002, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 1.7545180722891566e-05, | |
| "loss": 2.3728, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 1.7469879518072292e-05, | |
| "loss": 2.3921, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 6.52, | |
| "learning_rate": 1.7394578313253012e-05, | |
| "loss": 2.404, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 6.54, | |
| "learning_rate": 1.7319277108433735e-05, | |
| "loss": 2.4003, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "learning_rate": 1.7243975903614458e-05, | |
| "loss": 2.381, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 6.57, | |
| "learning_rate": 1.716867469879518e-05, | |
| "loss": 2.4061, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 6.58, | |
| "learning_rate": 1.7093373493975904e-05, | |
| "loss": 2.3938, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 1.7018072289156627e-05, | |
| "loss": 2.3488, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 6.61, | |
| "learning_rate": 1.694277108433735e-05, | |
| "loss": 2.374, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 6.63, | |
| "learning_rate": 1.6867469879518073e-05, | |
| "loss": 2.4029, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "learning_rate": 1.6792168674698796e-05, | |
| "loss": 2.3884, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 6.66, | |
| "learning_rate": 1.671686746987952e-05, | |
| "loss": 2.3727, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 1.6641566265060242e-05, | |
| "loss": 2.3766, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "learning_rate": 1.6566265060240965e-05, | |
| "loss": 2.3937, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 6.7, | |
| "learning_rate": 1.6490963855421688e-05, | |
| "loss": 2.3661, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 1.641566265060241e-05, | |
| "loss": 2.3744, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 6.73, | |
| "learning_rate": 1.6340361445783134e-05, | |
| "loss": 2.3885, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 6.75, | |
| "learning_rate": 1.6265060240963857e-05, | |
| "loss": 2.3679, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 6.76, | |
| "learning_rate": 1.6189759036144576e-05, | |
| "loss": 2.398, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 6.78, | |
| "learning_rate": 1.6114457831325303e-05, | |
| "loss": 2.3671, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 6.79, | |
| "learning_rate": 1.6039156626506026e-05, | |
| "loss": 2.3837, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "learning_rate": 1.5963855421686745e-05, | |
| "loss": 2.3591, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 6.82, | |
| "learning_rate": 1.588855421686747e-05, | |
| "loss": 2.3798, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 6.84, | |
| "learning_rate": 1.5813253012048195e-05, | |
| "loss": 2.3887, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 6.85, | |
| "learning_rate": 1.5737951807228914e-05, | |
| "loss": 2.3971, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 1.566265060240964e-05, | |
| "loss": 2.3582, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "learning_rate": 1.558734939759036e-05, | |
| "loss": 2.3698, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "learning_rate": 1.5512048192771086e-05, | |
| "loss": 2.3878, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 6.91, | |
| "learning_rate": 1.543674698795181e-05, | |
| "loss": 2.3941, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 6.93, | |
| "learning_rate": 1.536144578313253e-05, | |
| "loss": 2.3655, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "learning_rate": 1.5286144578313255e-05, | |
| "loss": 2.3902, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "learning_rate": 1.5210843373493977e-05, | |
| "loss": 2.3656, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 6.97, | |
| "learning_rate": 1.51355421686747e-05, | |
| "loss": 2.372, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "learning_rate": 1.5060240963855424e-05, | |
| "loss": 2.3597, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "eval_accuracy": 0.09666666666666666, | |
| "eval_loss": 2.3744025230407715, | |
| "eval_runtime": 88.5662, | |
| "eval_samples_per_second": 84.682, | |
| "eval_steps_per_second": 5.295, | |
| "step": 4648 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "learning_rate": 1.4984939759036146e-05, | |
| "loss": 2.4019, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 1.4909638554216867e-05, | |
| "loss": 2.3753, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "learning_rate": 1.4834337349397592e-05, | |
| "loss": 2.3541, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 7.05, | |
| "learning_rate": 1.4759036144578315e-05, | |
| "loss": 2.3742, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "learning_rate": 1.4683734939759036e-05, | |
| "loss": 2.3839, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 7.08, | |
| "learning_rate": 1.460843373493976e-05, | |
| "loss": 2.3662, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 7.09, | |
| "learning_rate": 1.4533132530120484e-05, | |
| "loss": 2.3624, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 7.11, | |
| "learning_rate": 1.4457831325301205e-05, | |
| "loss": 2.396, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 7.12, | |
| "learning_rate": 1.438253012048193e-05, | |
| "loss": 2.3903, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 7.14, | |
| "learning_rate": 1.430722891566265e-05, | |
| "loss": 2.362, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 7.15, | |
| "learning_rate": 1.4231927710843374e-05, | |
| "loss": 2.3778, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 7.17, | |
| "learning_rate": 1.4156626506024098e-05, | |
| "loss": 2.3875, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 7.18, | |
| "learning_rate": 1.408132530120482e-05, | |
| "loss": 2.3775, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "learning_rate": 1.4006024096385543e-05, | |
| "loss": 2.3776, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 7.21, | |
| "learning_rate": 1.3930722891566267e-05, | |
| "loss": 2.3979, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 7.23, | |
| "learning_rate": 1.3855421686746989e-05, | |
| "loss": 2.3477, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 7.24, | |
| "learning_rate": 1.378012048192771e-05, | |
| "loss": 2.3558, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 7.26, | |
| "learning_rate": 1.3704819277108436e-05, | |
| "loss": 2.4065, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 7.27, | |
| "learning_rate": 1.3629518072289158e-05, | |
| "loss": 2.3601, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 7.29, | |
| "learning_rate": 1.3554216867469879e-05, | |
| "loss": 2.362, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 7.3, | |
| "learning_rate": 1.3478915662650604e-05, | |
| "loss": 2.3862, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "learning_rate": 1.3403614457831327e-05, | |
| "loss": 2.3734, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 7.33, | |
| "learning_rate": 1.3328313253012048e-05, | |
| "loss": 2.3674, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 7.35, | |
| "learning_rate": 1.3253012048192772e-05, | |
| "loss": 2.3781, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "learning_rate": 1.3177710843373495e-05, | |
| "loss": 2.3554, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "learning_rate": 1.3102409638554217e-05, | |
| "loss": 2.3918, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 7.39, | |
| "learning_rate": 1.3027108433734941e-05, | |
| "loss": 2.34, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 7.41, | |
| "learning_rate": 1.2951807228915663e-05, | |
| "loss": 2.3702, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 7.42, | |
| "learning_rate": 1.2876506024096386e-05, | |
| "loss": 2.3564, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 7.44, | |
| "learning_rate": 1.280120481927711e-05, | |
| "loss": 2.3614, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 7.45, | |
| "learning_rate": 1.2725903614457832e-05, | |
| "loss": 2.3495, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 7.47, | |
| "learning_rate": 1.2650602409638555e-05, | |
| "loss": 2.3732, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 7.48, | |
| "learning_rate": 1.257530120481928e-05, | |
| "loss": 2.3572, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 1.25e-05, | |
| "loss": 2.3752, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 7.51, | |
| "learning_rate": 1.2424698795180724e-05, | |
| "loss": 2.3437, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 7.53, | |
| "learning_rate": 1.2349397590361447e-05, | |
| "loss": 2.3856, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "learning_rate": 1.227409638554217e-05, | |
| "loss": 2.3955, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "learning_rate": 1.2198795180722893e-05, | |
| "loss": 2.4012, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 7.58, | |
| "learning_rate": 1.2123493975903614e-05, | |
| "loss": 2.3732, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 7.59, | |
| "learning_rate": 1.2048192771084338e-05, | |
| "loss": 2.3588, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 7.61, | |
| "learning_rate": 1.1972891566265061e-05, | |
| "loss": 2.3589, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 7.62, | |
| "learning_rate": 1.1897590361445783e-05, | |
| "loss": 2.368, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 7.64, | |
| "learning_rate": 1.1822289156626506e-05, | |
| "loss": 2.3614, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 7.65, | |
| "learning_rate": 1.174698795180723e-05, | |
| "loss": 2.3616, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 7.67, | |
| "learning_rate": 1.1671686746987952e-05, | |
| "loss": 2.3572, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "learning_rate": 1.1596385542168675e-05, | |
| "loss": 2.3828, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 7.7, | |
| "learning_rate": 1.1521084337349398e-05, | |
| "loss": 2.3688, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 7.71, | |
| "learning_rate": 1.144578313253012e-05, | |
| "loss": 2.3892, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 7.73, | |
| "learning_rate": 1.1370481927710844e-05, | |
| "loss": 2.3528, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 7.74, | |
| "learning_rate": 1.1295180722891567e-05, | |
| "loss": 2.3768, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "learning_rate": 1.121987951807229e-05, | |
| "loss": 2.3506, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "learning_rate": 1.1144578313253013e-05, | |
| "loss": 2.3728, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "learning_rate": 1.1069277108433736e-05, | |
| "loss": 2.3518, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 7.8, | |
| "learning_rate": 1.0993975903614459e-05, | |
| "loss": 2.3489, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 7.82, | |
| "learning_rate": 1.0918674698795181e-05, | |
| "loss": 2.3682, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 7.83, | |
| "learning_rate": 1.0843373493975904e-05, | |
| "loss": 2.3764, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 7.85, | |
| "learning_rate": 1.0768072289156627e-05, | |
| "loss": 2.3499, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 7.86, | |
| "learning_rate": 1.069277108433735e-05, | |
| "loss": 2.3569, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 7.88, | |
| "learning_rate": 1.0617469879518073e-05, | |
| "loss": 2.3602, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 1.0542168674698796e-05, | |
| "loss": 2.3609, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 7.91, | |
| "learning_rate": 1.0466867469879518e-05, | |
| "loss": 2.3573, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "learning_rate": 1.0391566265060242e-05, | |
| "loss": 2.3624, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 7.94, | |
| "learning_rate": 1.0316265060240965e-05, | |
| "loss": 2.3676, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 7.95, | |
| "learning_rate": 1.0240963855421687e-05, | |
| "loss": 2.3665, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "learning_rate": 1.016566265060241e-05, | |
| "loss": 2.3585, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 7.98, | |
| "learning_rate": 1.0090361445783134e-05, | |
| "loss": 2.3498, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 1.0015060240963856e-05, | |
| "loss": 2.3721, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_accuracy": 0.09346666666666667, | |
| "eval_loss": 2.366736888885498, | |
| "eval_runtime": 88.9858, | |
| "eval_samples_per_second": 84.283, | |
| "eval_steps_per_second": 5.271, | |
| "step": 5312 | |
| }, | |
| { | |
| "epoch": 8.01, | |
| "learning_rate": 9.939759036144579e-06, | |
| "loss": 2.4001, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 9.864457831325302e-06, | |
| "loss": 2.3524, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "learning_rate": 9.789156626506024e-06, | |
| "loss": 2.3508, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 9.713855421686747e-06, | |
| "loss": 2.3609, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 8.07, | |
| "learning_rate": 9.63855421686747e-06, | |
| "loss": 2.3533, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 9.563253012048193e-06, | |
| "loss": 2.3546, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 8.1, | |
| "learning_rate": 9.487951807228916e-06, | |
| "loss": 2.3648, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 9.41265060240964e-06, | |
| "loss": 2.3627, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 8.13, | |
| "learning_rate": 9.33734939759036e-06, | |
| "loss": 2.3538, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 9.262048192771085e-06, | |
| "loss": 2.3581, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 8.16, | |
| "learning_rate": 9.186746987951808e-06, | |
| "loss": 2.3473, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "learning_rate": 9.11144578313253e-06, | |
| "loss": 2.3438, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 8.19, | |
| "learning_rate": 9.036144578313253e-06, | |
| "loss": 2.3473, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 8.21, | |
| "learning_rate": 8.960843373493977e-06, | |
| "loss": 2.3603, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 8.22, | |
| "learning_rate": 8.885542168674699e-06, | |
| "loss": 2.3731, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 8.24, | |
| "learning_rate": 8.810240963855422e-06, | |
| "loss": 2.3714, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "learning_rate": 8.734939759036146e-06, | |
| "loss": 2.3507, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 8.27, | |
| "learning_rate": 8.659638554216867e-06, | |
| "loss": 2.3747, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "learning_rate": 8.58433734939759e-06, | |
| "loss": 2.3499, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 8.3, | |
| "learning_rate": 8.509036144578313e-06, | |
| "loss": 2.3481, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "learning_rate": 8.433734939759036e-06, | |
| "loss": 2.3652, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 8.35843373493976e-06, | |
| "loss": 2.3737, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 8.34, | |
| "learning_rate": 8.283132530120482e-06, | |
| "loss": 2.3401, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "learning_rate": 8.207831325301205e-06, | |
| "loss": 2.3348, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 8.37, | |
| "learning_rate": 8.132530120481928e-06, | |
| "loss": 2.3438, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 8.39, | |
| "learning_rate": 8.057228915662651e-06, | |
| "loss": 2.3618, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 8.4, | |
| "learning_rate": 7.981927710843373e-06, | |
| "loss": 2.3633, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 8.42, | |
| "learning_rate": 7.906626506024097e-06, | |
| "loss": 2.3791, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 8.43, | |
| "learning_rate": 7.83132530120482e-06, | |
| "loss": 2.3659, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 8.45, | |
| "learning_rate": 7.756024096385543e-06, | |
| "loss": 2.3663, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 8.46, | |
| "learning_rate": 7.680722891566265e-06, | |
| "loss": 2.3675, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 8.48, | |
| "learning_rate": 7.605421686746988e-06, | |
| "loss": 2.357, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 8.49, | |
| "learning_rate": 7.530120481927712e-06, | |
| "loss": 2.3479, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 8.51, | |
| "learning_rate": 7.4548192771084335e-06, | |
| "loss": 2.3548, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 8.52, | |
| "learning_rate": 7.379518072289157e-06, | |
| "loss": 2.363, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 8.54, | |
| "learning_rate": 7.30421686746988e-06, | |
| "loss": 2.37, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 8.55, | |
| "learning_rate": 7.228915662650602e-06, | |
| "loss": 2.3748, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 8.57, | |
| "learning_rate": 7.153614457831325e-06, | |
| "loss": 2.3464, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 8.58, | |
| "learning_rate": 7.078313253012049e-06, | |
| "loss": 2.3478, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 8.6, | |
| "learning_rate": 7.003012048192771e-06, | |
| "loss": 2.3466, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 8.61, | |
| "learning_rate": 6.927710843373494e-06, | |
| "loss": 2.3452, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 8.63, | |
| "learning_rate": 6.852409638554218e-06, | |
| "loss": 2.3445, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 8.64, | |
| "learning_rate": 6.7771084337349394e-06, | |
| "loss": 2.3432, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 8.66, | |
| "learning_rate": 6.701807228915663e-06, | |
| "loss": 2.3437, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 8.67, | |
| "learning_rate": 6.626506024096386e-06, | |
| "loss": 2.3583, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 8.69, | |
| "learning_rate": 6.551204819277108e-06, | |
| "loss": 2.3792, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 8.7, | |
| "learning_rate": 6.475903614457831e-06, | |
| "loss": 2.3393, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "learning_rate": 6.400602409638555e-06, | |
| "loss": 2.3634, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 8.73, | |
| "learning_rate": 6.325301204819277e-06, | |
| "loss": 2.3552, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 8.75, | |
| "learning_rate": 6.25e-06, | |
| "loss": 2.3372, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 8.76, | |
| "learning_rate": 6.174698795180723e-06, | |
| "loss": 2.3421, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 8.78, | |
| "learning_rate": 6.099397590361446e-06, | |
| "loss": 2.3555, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 8.79, | |
| "learning_rate": 6.024096385542169e-06, | |
| "loss": 2.3465, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 8.81, | |
| "learning_rate": 5.948795180722891e-06, | |
| "loss": 2.3619, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 8.82, | |
| "learning_rate": 5.873493975903615e-06, | |
| "loss": 2.3394, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 8.84, | |
| "learning_rate": 5.798192771084337e-06, | |
| "loss": 2.3813, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 8.86, | |
| "learning_rate": 5.72289156626506e-06, | |
| "loss": 2.34, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 8.87, | |
| "learning_rate": 5.647590361445783e-06, | |
| "loss": 2.3559, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 8.89, | |
| "learning_rate": 5.572289156626506e-06, | |
| "loss": 2.3413, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 8.9, | |
| "learning_rate": 5.496987951807229e-06, | |
| "loss": 2.3469, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "learning_rate": 5.421686746987952e-06, | |
| "loss": 2.35, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 8.93, | |
| "learning_rate": 5.346385542168675e-06, | |
| "loss": 2.358, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 8.95, | |
| "learning_rate": 5.271084337349398e-06, | |
| "loss": 2.357, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 8.96, | |
| "learning_rate": 5.195783132530121e-06, | |
| "loss": 2.3736, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 8.98, | |
| "learning_rate": 5.120481927710843e-06, | |
| "loss": 2.3486, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 8.99, | |
| "learning_rate": 5.045180722891567e-06, | |
| "loss": 2.3456, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_accuracy": 0.1036, | |
| "eval_loss": 2.34945011138916, | |
| "eval_runtime": 89.3165, | |
| "eval_samples_per_second": 83.971, | |
| "eval_steps_per_second": 5.251, | |
| "step": 5976 | |
| }, | |
| { | |
| "epoch": 9.01, | |
| "learning_rate": 4.969879518072289e-06, | |
| "loss": 2.3932, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "learning_rate": 4.894578313253012e-06, | |
| "loss": 2.3381, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "learning_rate": 4.819277108433735e-06, | |
| "loss": 2.3458, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 9.05, | |
| "learning_rate": 4.743975903614458e-06, | |
| "loss": 2.3483, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 9.07, | |
| "learning_rate": 4.66867469879518e-06, | |
| "loss": 2.3492, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 9.08, | |
| "learning_rate": 4.593373493975904e-06, | |
| "loss": 2.363, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 9.1, | |
| "learning_rate": 4.518072289156626e-06, | |
| "loss": 2.3461, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 9.11, | |
| "learning_rate": 4.442771084337349e-06, | |
| "loss": 2.3263, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 9.13, | |
| "learning_rate": 4.367469879518073e-06, | |
| "loss": 2.3446, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 9.14, | |
| "learning_rate": 4.292168674698795e-06, | |
| "loss": 2.3492, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 9.16, | |
| "learning_rate": 4.216867469879518e-06, | |
| "loss": 2.3547, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 9.17, | |
| "learning_rate": 4.141566265060241e-06, | |
| "loss": 2.3578, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 9.19, | |
| "learning_rate": 4.066265060240964e-06, | |
| "loss": 2.3575, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 9.2, | |
| "learning_rate": 3.990963855421686e-06, | |
| "loss": 2.37, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 9.22, | |
| "learning_rate": 3.91566265060241e-06, | |
| "loss": 2.3309, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 9.23, | |
| "learning_rate": 3.840361445783132e-06, | |
| "loss": 2.3272, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 9.25, | |
| "learning_rate": 3.765060240963856e-06, | |
| "loss": 2.3461, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 9.26, | |
| "learning_rate": 3.6897590361445786e-06, | |
| "loss": 2.3622, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 9.28, | |
| "learning_rate": 3.614457831325301e-06, | |
| "loss": 2.3574, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 9.29, | |
| "learning_rate": 3.5391566265060246e-06, | |
| "loss": 2.3448, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 9.31, | |
| "learning_rate": 3.463855421686747e-06, | |
| "loss": 2.3468, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 9.32, | |
| "learning_rate": 3.3885542168674697e-06, | |
| "loss": 2.3576, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 9.34, | |
| "learning_rate": 3.313253012048193e-06, | |
| "loss": 2.3443, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 9.35, | |
| "learning_rate": 3.2379518072289157e-06, | |
| "loss": 2.3561, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 9.37, | |
| "learning_rate": 3.1626506024096387e-06, | |
| "loss": 2.3564, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 3.0873493975903616e-06, | |
| "loss": 2.3243, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 9.4, | |
| "learning_rate": 3.0120481927710846e-06, | |
| "loss": 2.3521, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 9.41, | |
| "learning_rate": 2.9367469879518076e-06, | |
| "loss": 2.3574, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 9.43, | |
| "learning_rate": 2.86144578313253e-06, | |
| "loss": 2.3711, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 9.44, | |
| "learning_rate": 2.786144578313253e-06, | |
| "loss": 2.3457, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 9.46, | |
| "learning_rate": 2.710843373493976e-06, | |
| "loss": 2.3374, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 9.47, | |
| "learning_rate": 2.635542168674699e-06, | |
| "loss": 2.3487, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 9.49, | |
| "learning_rate": 2.5602409638554217e-06, | |
| "loss": 2.3415, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "learning_rate": 2.4849397590361446e-06, | |
| "loss": 2.356, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 9.52, | |
| "learning_rate": 2.4096385542168676e-06, | |
| "loss": 2.3409, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "learning_rate": 2.33433734939759e-06, | |
| "loss": 2.3431, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 9.55, | |
| "learning_rate": 2.259036144578313e-06, | |
| "loss": 2.3382, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 9.56, | |
| "learning_rate": 2.1837349397590366e-06, | |
| "loss": 2.3546, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 9.58, | |
| "learning_rate": 2.108433734939759e-06, | |
| "loss": 2.3461, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 9.59, | |
| "learning_rate": 2.033132530120482e-06, | |
| "loss": 2.355, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 9.61, | |
| "learning_rate": 1.957831325301205e-06, | |
| "loss": 2.3541, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 9.62, | |
| "learning_rate": 1.882530120481928e-06, | |
| "loss": 2.3402, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 9.64, | |
| "learning_rate": 1.8072289156626506e-06, | |
| "loss": 2.3381, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 9.65, | |
| "learning_rate": 1.7319277108433736e-06, | |
| "loss": 2.3463, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 9.67, | |
| "learning_rate": 1.6566265060240966e-06, | |
| "loss": 2.3386, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 9.68, | |
| "learning_rate": 1.5813253012048193e-06, | |
| "loss": 2.3251, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 9.7, | |
| "learning_rate": 1.5060240963855423e-06, | |
| "loss": 2.3473, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 9.71, | |
| "learning_rate": 1.430722891566265e-06, | |
| "loss": 2.3398, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 9.73, | |
| "learning_rate": 1.355421686746988e-06, | |
| "loss": 2.347, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 9.74, | |
| "learning_rate": 1.2801204819277108e-06, | |
| "loss": 2.3439, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 9.76, | |
| "learning_rate": 1.2048192771084338e-06, | |
| "loss": 2.372, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 9.77, | |
| "learning_rate": 1.1295180722891566e-06, | |
| "loss": 2.3419, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 9.79, | |
| "learning_rate": 1.0542168674698796e-06, | |
| "loss": 2.3514, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 9.8, | |
| "learning_rate": 9.789156626506025e-07, | |
| "loss": 2.3408, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "learning_rate": 9.036144578313253e-07, | |
| "loss": 2.3531, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 9.83, | |
| "learning_rate": 8.283132530120483e-07, | |
| "loss": 2.3511, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 9.85, | |
| "learning_rate": 7.530120481927712e-07, | |
| "loss": 2.351, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 9.86, | |
| "learning_rate": 6.77710843373494e-07, | |
| "loss": 2.3652, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 9.88, | |
| "learning_rate": 6.024096385542169e-07, | |
| "loss": 2.3332, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 9.89, | |
| "learning_rate": 5.271084337349398e-07, | |
| "loss": 2.3325, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 9.91, | |
| "learning_rate": 4.5180722891566265e-07, | |
| "loss": 2.3646, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 9.92, | |
| "learning_rate": 3.765060240963856e-07, | |
| "loss": 2.3587, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 9.94, | |
| "learning_rate": 3.0120481927710845e-07, | |
| "loss": 2.3622, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 9.95, | |
| "learning_rate": 2.2590361445783133e-07, | |
| "loss": 2.345, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 9.97, | |
| "learning_rate": 1.5060240963855423e-07, | |
| "loss": 2.3493, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 9.98, | |
| "learning_rate": 7.530120481927711e-08, | |
| "loss": 2.3601, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 0.0, | |
| "loss": 2.3361, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "eval_accuracy": 0.10253333333333334, | |
| "eval_loss": 2.347257375717163, | |
| "eval_runtime": 88.4974, | |
| "eval_samples_per_second": 84.748, | |
| "eval_steps_per_second": 5.3, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "step": 6640, | |
| "total_flos": 3.2936147019070783e+19, | |
| "train_loss": 2.389983481671437, | |
| "train_runtime": 10181.4642, | |
| "train_samples_per_second": 41.743, | |
| "train_steps_per_second": 0.652 | |
| } | |
| ], | |
| "max_steps": 6640, | |
| "num_train_epochs": 10, | |
| "total_flos": 3.2936147019070783e+19, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |