gpt2-medium-sft / trainer_state.json
ricemonster's picture
Upload folder using huggingface_hub
a700cd8 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.0,
"eval_steps": 500,
"global_step": 5796,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.003450655624568668,
"grad_norm": 2.7905118465423584,
"learning_rate": 4.993098688750863e-05,
"loss": 1.7703,
"step": 10
},
{
"epoch": 0.006901311249137336,
"grad_norm": 1.9586756229400635,
"learning_rate": 4.984472049689442e-05,
"loss": 0.9224,
"step": 20
},
{
"epoch": 0.010351966873706004,
"grad_norm": 1.6500365734100342,
"learning_rate": 4.9758454106280194e-05,
"loss": 0.7301,
"step": 30
},
{
"epoch": 0.013802622498274672,
"grad_norm": 1.4835381507873535,
"learning_rate": 4.967218771566598e-05,
"loss": 0.6398,
"step": 40
},
{
"epoch": 0.01725327812284334,
"grad_norm": 1.4787698984146118,
"learning_rate": 4.958592132505176e-05,
"loss": 0.6192,
"step": 50
},
{
"epoch": 0.020703933747412008,
"grad_norm": 1.2620238065719604,
"learning_rate": 4.949965493443754e-05,
"loss": 0.5913,
"step": 60
},
{
"epoch": 0.024154589371980676,
"grad_norm": 1.2006996870040894,
"learning_rate": 4.941338854382333e-05,
"loss": 0.5527,
"step": 70
},
{
"epoch": 0.027605244996549344,
"grad_norm": 1.395799160003662,
"learning_rate": 4.932712215320911e-05,
"loss": 0.5465,
"step": 80
},
{
"epoch": 0.031055900621118012,
"grad_norm": 1.2285804748535156,
"learning_rate": 4.92408557625949e-05,
"loss": 0.5123,
"step": 90
},
{
"epoch": 0.03450655624568668,
"grad_norm": 1.2713665962219238,
"learning_rate": 4.915458937198068e-05,
"loss": 0.5129,
"step": 100
},
{
"epoch": 0.03795721187025535,
"grad_norm": 1.1799707412719727,
"learning_rate": 4.906832298136646e-05,
"loss": 0.5046,
"step": 110
},
{
"epoch": 0.041407867494824016,
"grad_norm": 1.0650718212127686,
"learning_rate": 4.8982056590752246e-05,
"loss": 0.4909,
"step": 120
},
{
"epoch": 0.04485852311939269,
"grad_norm": 1.0782257318496704,
"learning_rate": 4.889579020013803e-05,
"loss": 0.4822,
"step": 130
},
{
"epoch": 0.04830917874396135,
"grad_norm": 1.0218424797058105,
"learning_rate": 4.880952380952381e-05,
"loss": 0.4791,
"step": 140
},
{
"epoch": 0.051759834368530024,
"grad_norm": 0.9384006261825562,
"learning_rate": 4.8723257418909594e-05,
"loss": 0.4811,
"step": 150
},
{
"epoch": 0.05521048999309869,
"grad_norm": 1.0220593214035034,
"learning_rate": 4.8636991028295384e-05,
"loss": 0.4718,
"step": 160
},
{
"epoch": 0.05866114561766736,
"grad_norm": 1.1225941181182861,
"learning_rate": 4.855072463768116e-05,
"loss": 0.449,
"step": 170
},
{
"epoch": 0.062111801242236024,
"grad_norm": 0.9393426775932312,
"learning_rate": 4.846445824706695e-05,
"loss": 0.4644,
"step": 180
},
{
"epoch": 0.06556245686680469,
"grad_norm": 0.9687050580978394,
"learning_rate": 4.8378191856452725e-05,
"loss": 0.4554,
"step": 190
},
{
"epoch": 0.06901311249137336,
"grad_norm": 0.9987794160842896,
"learning_rate": 4.829192546583851e-05,
"loss": 0.4487,
"step": 200
},
{
"epoch": 0.07246376811594203,
"grad_norm": 0.8862098455429077,
"learning_rate": 4.82056590752243e-05,
"loss": 0.4507,
"step": 210
},
{
"epoch": 0.0759144237405107,
"grad_norm": 0.8340906500816345,
"learning_rate": 4.8119392684610074e-05,
"loss": 0.438,
"step": 220
},
{
"epoch": 0.07936507936507936,
"grad_norm": 0.8599636554718018,
"learning_rate": 4.803312629399586e-05,
"loss": 0.4406,
"step": 230
},
{
"epoch": 0.08281573498964803,
"grad_norm": 0.8934054970741272,
"learning_rate": 4.7946859903381646e-05,
"loss": 0.4351,
"step": 240
},
{
"epoch": 0.0862663906142167,
"grad_norm": 0.7076365947723389,
"learning_rate": 4.786059351276743e-05,
"loss": 0.434,
"step": 250
},
{
"epoch": 0.08971704623878538,
"grad_norm": 0.8420814871788025,
"learning_rate": 4.777432712215321e-05,
"loss": 0.434,
"step": 260
},
{
"epoch": 0.09316770186335403,
"grad_norm": 0.768744170665741,
"learning_rate": 4.7688060731538994e-05,
"loss": 0.4317,
"step": 270
},
{
"epoch": 0.0966183574879227,
"grad_norm": 0.8374963998794556,
"learning_rate": 4.760179434092478e-05,
"loss": 0.4297,
"step": 280
},
{
"epoch": 0.10006901311249138,
"grad_norm": 0.7579261064529419,
"learning_rate": 4.751552795031056e-05,
"loss": 0.4304,
"step": 290
},
{
"epoch": 0.10351966873706005,
"grad_norm": 0.7822970151901245,
"learning_rate": 4.742926155969634e-05,
"loss": 0.4323,
"step": 300
},
{
"epoch": 0.1069703243616287,
"grad_norm": 0.7832859754562378,
"learning_rate": 4.7342995169082125e-05,
"loss": 0.4235,
"step": 310
},
{
"epoch": 0.11042097998619738,
"grad_norm": 0.8547880053520203,
"learning_rate": 4.7256728778467915e-05,
"loss": 0.4295,
"step": 320
},
{
"epoch": 0.11387163561076605,
"grad_norm": 0.7879784107208252,
"learning_rate": 4.717046238785369e-05,
"loss": 0.4281,
"step": 330
},
{
"epoch": 0.11732229123533472,
"grad_norm": 0.7858608961105347,
"learning_rate": 4.708419599723948e-05,
"loss": 0.4162,
"step": 340
},
{
"epoch": 0.12077294685990338,
"grad_norm": 0.730283796787262,
"learning_rate": 4.699792960662526e-05,
"loss": 0.4167,
"step": 350
},
{
"epoch": 0.12422360248447205,
"grad_norm": 0.7356073260307312,
"learning_rate": 4.6911663216011046e-05,
"loss": 0.4156,
"step": 360
},
{
"epoch": 0.1276742581090407,
"grad_norm": 0.6808834075927734,
"learning_rate": 4.682539682539683e-05,
"loss": 0.4095,
"step": 370
},
{
"epoch": 0.13112491373360938,
"grad_norm": 0.7014838457107544,
"learning_rate": 4.673913043478261e-05,
"loss": 0.4163,
"step": 380
},
{
"epoch": 0.13457556935817805,
"grad_norm": 0.8114644289016724,
"learning_rate": 4.6652864044168394e-05,
"loss": 0.403,
"step": 390
},
{
"epoch": 0.13802622498274672,
"grad_norm": 0.7766273021697998,
"learning_rate": 4.656659765355418e-05,
"loss": 0.4144,
"step": 400
},
{
"epoch": 0.1414768806073154,
"grad_norm": 0.7870580554008484,
"learning_rate": 4.6480331262939966e-05,
"loss": 0.4079,
"step": 410
},
{
"epoch": 0.14492753623188406,
"grad_norm": 0.6845405101776123,
"learning_rate": 4.639406487232574e-05,
"loss": 0.4108,
"step": 420
},
{
"epoch": 0.14837819185645273,
"grad_norm": 0.725813627243042,
"learning_rate": 4.6307798481711525e-05,
"loss": 0.4085,
"step": 430
},
{
"epoch": 0.1518288474810214,
"grad_norm": 0.7153050899505615,
"learning_rate": 4.622153209109731e-05,
"loss": 0.4059,
"step": 440
},
{
"epoch": 0.15527950310559005,
"grad_norm": 0.7423162460327148,
"learning_rate": 4.613526570048309e-05,
"loss": 0.4022,
"step": 450
},
{
"epoch": 0.15873015873015872,
"grad_norm": 0.7227168083190918,
"learning_rate": 4.604899930986888e-05,
"loss": 0.4103,
"step": 460
},
{
"epoch": 0.1621808143547274,
"grad_norm": 0.7159332633018494,
"learning_rate": 4.5962732919254656e-05,
"loss": 0.4114,
"step": 470
},
{
"epoch": 0.16563146997929606,
"grad_norm": 0.7028360366821289,
"learning_rate": 4.5876466528640446e-05,
"loss": 0.4052,
"step": 480
},
{
"epoch": 0.16908212560386474,
"grad_norm": 0.6726347208023071,
"learning_rate": 4.579020013802623e-05,
"loss": 0.4114,
"step": 490
},
{
"epoch": 0.1725327812284334,
"grad_norm": 0.694276750087738,
"learning_rate": 4.570393374741201e-05,
"loss": 0.3981,
"step": 500
},
{
"epoch": 0.17598343685300208,
"grad_norm": 0.6861996650695801,
"learning_rate": 4.5617667356797794e-05,
"loss": 0.391,
"step": 510
},
{
"epoch": 0.17943409247757075,
"grad_norm": 0.6844497919082642,
"learning_rate": 4.553140096618358e-05,
"loss": 0.389,
"step": 520
},
{
"epoch": 0.1828847481021394,
"grad_norm": 0.7536503076553345,
"learning_rate": 4.544513457556936e-05,
"loss": 0.4013,
"step": 530
},
{
"epoch": 0.18633540372670807,
"grad_norm": 0.6106601357460022,
"learning_rate": 4.535886818495514e-05,
"loss": 0.3913,
"step": 540
},
{
"epoch": 0.18978605935127674,
"grad_norm": 0.6462461948394775,
"learning_rate": 4.5272601794340925e-05,
"loss": 0.3887,
"step": 550
},
{
"epoch": 0.1932367149758454,
"grad_norm": 0.6759409308433533,
"learning_rate": 4.518633540372671e-05,
"loss": 0.3972,
"step": 560
},
{
"epoch": 0.19668737060041408,
"grad_norm": 0.6945746541023254,
"learning_rate": 4.51000690131125e-05,
"loss": 0.3865,
"step": 570
},
{
"epoch": 0.20013802622498275,
"grad_norm": 0.6554960608482361,
"learning_rate": 4.5013802622498274e-05,
"loss": 0.3902,
"step": 580
},
{
"epoch": 0.20358868184955142,
"grad_norm": 0.6467313170433044,
"learning_rate": 4.492753623188406e-05,
"loss": 0.3889,
"step": 590
},
{
"epoch": 0.2070393374741201,
"grad_norm": 0.6078060865402222,
"learning_rate": 4.4841269841269846e-05,
"loss": 0.3898,
"step": 600
},
{
"epoch": 0.21048999309868874,
"grad_norm": 0.8312433362007141,
"learning_rate": 4.475500345065562e-05,
"loss": 0.3897,
"step": 610
},
{
"epoch": 0.2139406487232574,
"grad_norm": 0.5776800513267517,
"learning_rate": 4.466873706004141e-05,
"loss": 0.3885,
"step": 620
},
{
"epoch": 0.21739130434782608,
"grad_norm": 0.6104868054389954,
"learning_rate": 4.4582470669427194e-05,
"loss": 0.3992,
"step": 630
},
{
"epoch": 0.22084195997239475,
"grad_norm": 0.6720201373100281,
"learning_rate": 4.449620427881298e-05,
"loss": 0.3865,
"step": 640
},
{
"epoch": 0.22429261559696342,
"grad_norm": 0.6955760717391968,
"learning_rate": 4.440993788819876e-05,
"loss": 0.3878,
"step": 650
},
{
"epoch": 0.2277432712215321,
"grad_norm": 0.6438873410224915,
"learning_rate": 4.432367149758454e-05,
"loss": 0.3881,
"step": 660
},
{
"epoch": 0.23119392684610077,
"grad_norm": 0.622840940952301,
"learning_rate": 4.4237405106970325e-05,
"loss": 0.3939,
"step": 670
},
{
"epoch": 0.23464458247066944,
"grad_norm": 0.6797952055931091,
"learning_rate": 4.415113871635611e-05,
"loss": 0.401,
"step": 680
},
{
"epoch": 0.23809523809523808,
"grad_norm": 0.6357337832450867,
"learning_rate": 4.406487232574189e-05,
"loss": 0.391,
"step": 690
},
{
"epoch": 0.24154589371980675,
"grad_norm": 0.612765908241272,
"learning_rate": 4.3978605935127674e-05,
"loss": 0.389,
"step": 700
},
{
"epoch": 0.24499654934437542,
"grad_norm": 0.5818207859992981,
"learning_rate": 4.389233954451346e-05,
"loss": 0.3835,
"step": 710
},
{
"epoch": 0.2484472049689441,
"grad_norm": 0.530667245388031,
"learning_rate": 4.380607315389924e-05,
"loss": 0.3789,
"step": 720
},
{
"epoch": 0.25189786059351277,
"grad_norm": 0.6901742815971375,
"learning_rate": 4.371980676328503e-05,
"loss": 0.3832,
"step": 730
},
{
"epoch": 0.2553485162180814,
"grad_norm": 0.6087227463722229,
"learning_rate": 4.363354037267081e-05,
"loss": 0.3766,
"step": 740
},
{
"epoch": 0.2587991718426501,
"grad_norm": 0.5868121981620789,
"learning_rate": 4.3547273982056594e-05,
"loss": 0.3717,
"step": 750
},
{
"epoch": 0.26224982746721875,
"grad_norm": 0.646168053150177,
"learning_rate": 4.346100759144238e-05,
"loss": 0.3882,
"step": 760
},
{
"epoch": 0.26570048309178745,
"grad_norm": 0.6688451170921326,
"learning_rate": 4.337474120082816e-05,
"loss": 0.3791,
"step": 770
},
{
"epoch": 0.2691511387163561,
"grad_norm": 0.5901142358779907,
"learning_rate": 4.328847481021394e-05,
"loss": 0.3854,
"step": 780
},
{
"epoch": 0.2726017943409248,
"grad_norm": 0.6211585402488708,
"learning_rate": 4.3202208419599725e-05,
"loss": 0.3829,
"step": 790
},
{
"epoch": 0.27605244996549344,
"grad_norm": 0.6081684231758118,
"learning_rate": 4.3115942028985515e-05,
"loss": 0.3859,
"step": 800
},
{
"epoch": 0.2795031055900621,
"grad_norm": 0.6079261898994446,
"learning_rate": 4.302967563837129e-05,
"loss": 0.3766,
"step": 810
},
{
"epoch": 0.2829537612146308,
"grad_norm": 0.5618354082107544,
"learning_rate": 4.294340924775708e-05,
"loss": 0.3846,
"step": 820
},
{
"epoch": 0.2864044168391994,
"grad_norm": 0.5210685729980469,
"learning_rate": 4.2857142857142856e-05,
"loss": 0.3706,
"step": 830
},
{
"epoch": 0.2898550724637681,
"grad_norm": 0.587040901184082,
"learning_rate": 4.277087646652864e-05,
"loss": 0.3771,
"step": 840
},
{
"epoch": 0.29330572808833677,
"grad_norm": 0.6124225854873657,
"learning_rate": 4.268461007591443e-05,
"loss": 0.3818,
"step": 850
},
{
"epoch": 0.29675638371290547,
"grad_norm": 0.6286453008651733,
"learning_rate": 4.2598343685300205e-05,
"loss": 0.3712,
"step": 860
},
{
"epoch": 0.3002070393374741,
"grad_norm": 0.5031912922859192,
"learning_rate": 4.2512077294685994e-05,
"loss": 0.3737,
"step": 870
},
{
"epoch": 0.3036576949620428,
"grad_norm": 0.56924968957901,
"learning_rate": 4.242581090407178e-05,
"loss": 0.3675,
"step": 880
},
{
"epoch": 0.30710835058661146,
"grad_norm": 0.5326061248779297,
"learning_rate": 4.233954451345756e-05,
"loss": 0.3724,
"step": 890
},
{
"epoch": 0.3105590062111801,
"grad_norm": 0.6027249097824097,
"learning_rate": 4.225327812284334e-05,
"loss": 0.3752,
"step": 900
},
{
"epoch": 0.3140096618357488,
"grad_norm": 0.48393580317497253,
"learning_rate": 4.2167011732229125e-05,
"loss": 0.3696,
"step": 910
},
{
"epoch": 0.31746031746031744,
"grad_norm": 0.5405918955802917,
"learning_rate": 4.208074534161491e-05,
"loss": 0.3798,
"step": 920
},
{
"epoch": 0.32091097308488614,
"grad_norm": 0.5802543759346008,
"learning_rate": 4.199447895100069e-05,
"loss": 0.3644,
"step": 930
},
{
"epoch": 0.3243616287094548,
"grad_norm": 0.5545613169670105,
"learning_rate": 4.1908212560386474e-05,
"loss": 0.3627,
"step": 940
},
{
"epoch": 0.3278122843340235,
"grad_norm": 0.536221444606781,
"learning_rate": 4.1821946169772256e-05,
"loss": 0.3648,
"step": 950
},
{
"epoch": 0.33126293995859213,
"grad_norm": 0.5735536217689514,
"learning_rate": 4.1735679779158046e-05,
"loss": 0.3694,
"step": 960
},
{
"epoch": 0.33471359558316083,
"grad_norm": 0.5893762707710266,
"learning_rate": 4.164941338854382e-05,
"loss": 0.3691,
"step": 970
},
{
"epoch": 0.33816425120772947,
"grad_norm": 0.5472971796989441,
"learning_rate": 4.156314699792961e-05,
"loss": 0.3697,
"step": 980
},
{
"epoch": 0.3416149068322981,
"grad_norm": 0.5291191935539246,
"learning_rate": 4.1476880607315394e-05,
"loss": 0.378,
"step": 990
},
{
"epoch": 0.3450655624568668,
"grad_norm": 0.5894433259963989,
"learning_rate": 4.139061421670117e-05,
"loss": 0.3676,
"step": 1000
},
{
"epoch": 0.34851621808143546,
"grad_norm": 0.519848644733429,
"learning_rate": 4.130434782608696e-05,
"loss": 0.366,
"step": 1010
},
{
"epoch": 0.35196687370600416,
"grad_norm": 0.620188295841217,
"learning_rate": 4.121808143547274e-05,
"loss": 0.3569,
"step": 1020
},
{
"epoch": 0.3554175293305728,
"grad_norm": 0.589487612247467,
"learning_rate": 4.1131815044858525e-05,
"loss": 0.3599,
"step": 1030
},
{
"epoch": 0.3588681849551415,
"grad_norm": 0.5304994583129883,
"learning_rate": 4.104554865424431e-05,
"loss": 0.3622,
"step": 1040
},
{
"epoch": 0.36231884057971014,
"grad_norm": 0.5731646418571472,
"learning_rate": 4.09592822636301e-05,
"loss": 0.3641,
"step": 1050
},
{
"epoch": 0.3657694962042788,
"grad_norm": 0.527119517326355,
"learning_rate": 4.0873015873015874e-05,
"loss": 0.3639,
"step": 1060
},
{
"epoch": 0.3692201518288475,
"grad_norm": 0.4605042636394501,
"learning_rate": 4.0786749482401656e-05,
"loss": 0.3685,
"step": 1070
},
{
"epoch": 0.37267080745341613,
"grad_norm": 0.5230356454849243,
"learning_rate": 4.070048309178744e-05,
"loss": 0.369,
"step": 1080
},
{
"epoch": 0.37612146307798483,
"grad_norm": 0.5160967111587524,
"learning_rate": 4.061421670117322e-05,
"loss": 0.3577,
"step": 1090
},
{
"epoch": 0.3795721187025535,
"grad_norm": 0.5491387248039246,
"learning_rate": 4.052795031055901e-05,
"loss": 0.3786,
"step": 1100
},
{
"epoch": 0.3830227743271222,
"grad_norm": 0.5626661777496338,
"learning_rate": 4.044168391994479e-05,
"loss": 0.3676,
"step": 1110
},
{
"epoch": 0.3864734299516908,
"grad_norm": 0.5415998697280884,
"learning_rate": 4.035541752933058e-05,
"loss": 0.3608,
"step": 1120
},
{
"epoch": 0.3899240855762595,
"grad_norm": 0.6332689523696899,
"learning_rate": 4.026915113871636e-05,
"loss": 0.3707,
"step": 1130
},
{
"epoch": 0.39337474120082816,
"grad_norm": 0.5432307124137878,
"learning_rate": 4.018288474810214e-05,
"loss": 0.364,
"step": 1140
},
{
"epoch": 0.3968253968253968,
"grad_norm": 0.498445987701416,
"learning_rate": 4.0096618357487925e-05,
"loss": 0.3609,
"step": 1150
},
{
"epoch": 0.4002760524499655,
"grad_norm": 0.5753563046455383,
"learning_rate": 4.001035196687371e-05,
"loss": 0.3625,
"step": 1160
},
{
"epoch": 0.40372670807453415,
"grad_norm": 0.5175043940544128,
"learning_rate": 3.992408557625949e-05,
"loss": 0.3602,
"step": 1170
},
{
"epoch": 0.40717736369910285,
"grad_norm": 0.521827220916748,
"learning_rate": 3.9837819185645274e-05,
"loss": 0.3523,
"step": 1180
},
{
"epoch": 0.4106280193236715,
"grad_norm": 0.538490355014801,
"learning_rate": 3.9751552795031056e-05,
"loss": 0.3702,
"step": 1190
},
{
"epoch": 0.4140786749482402,
"grad_norm": 0.5001350045204163,
"learning_rate": 3.966528640441684e-05,
"loss": 0.3455,
"step": 1200
},
{
"epoch": 0.41752933057280883,
"grad_norm": 0.47586187720298767,
"learning_rate": 3.957902001380263e-05,
"loss": 0.3569,
"step": 1210
},
{
"epoch": 0.4209799861973775,
"grad_norm": 0.5283018946647644,
"learning_rate": 3.9492753623188405e-05,
"loss": 0.3691,
"step": 1220
},
{
"epoch": 0.4244306418219462,
"grad_norm": 0.6038020253181458,
"learning_rate": 3.940648723257419e-05,
"loss": 0.3611,
"step": 1230
},
{
"epoch": 0.4278812974465148,
"grad_norm": 0.4514884054660797,
"learning_rate": 3.932022084195998e-05,
"loss": 0.367,
"step": 1240
},
{
"epoch": 0.4313319530710835,
"grad_norm": 0.5189118385314941,
"learning_rate": 3.923395445134575e-05,
"loss": 0.3553,
"step": 1250
},
{
"epoch": 0.43478260869565216,
"grad_norm": 0.5897815227508545,
"learning_rate": 3.914768806073154e-05,
"loss": 0.3658,
"step": 1260
},
{
"epoch": 0.43823326432022086,
"grad_norm": 0.5077459812164307,
"learning_rate": 3.9061421670117325e-05,
"loss": 0.3734,
"step": 1270
},
{
"epoch": 0.4416839199447895,
"grad_norm": 0.5408049821853638,
"learning_rate": 3.897515527950311e-05,
"loss": 0.3516,
"step": 1280
},
{
"epoch": 0.4451345755693582,
"grad_norm": 0.5270346999168396,
"learning_rate": 3.888888888888889e-05,
"loss": 0.3645,
"step": 1290
},
{
"epoch": 0.44858523119392685,
"grad_norm": 0.5303778052330017,
"learning_rate": 3.8802622498274674e-05,
"loss": 0.3657,
"step": 1300
},
{
"epoch": 0.4520358868184955,
"grad_norm": 0.47278884053230286,
"learning_rate": 3.8716356107660456e-05,
"loss": 0.3535,
"step": 1310
},
{
"epoch": 0.4554865424430642,
"grad_norm": 0.5036910772323608,
"learning_rate": 3.863008971704624e-05,
"loss": 0.3541,
"step": 1320
},
{
"epoch": 0.45893719806763283,
"grad_norm": 0.4585624635219574,
"learning_rate": 3.854382332643202e-05,
"loss": 0.3623,
"step": 1330
},
{
"epoch": 0.46238785369220153,
"grad_norm": 0.5310036540031433,
"learning_rate": 3.8457556935817805e-05,
"loss": 0.3586,
"step": 1340
},
{
"epoch": 0.4658385093167702,
"grad_norm": 0.5218350291252136,
"learning_rate": 3.8371290545203594e-05,
"loss": 0.3412,
"step": 1350
},
{
"epoch": 0.4692891649413389,
"grad_norm": 0.4900505244731903,
"learning_rate": 3.828502415458937e-05,
"loss": 0.3589,
"step": 1360
},
{
"epoch": 0.4727398205659075,
"grad_norm": 0.526491105556488,
"learning_rate": 3.819875776397516e-05,
"loss": 0.3509,
"step": 1370
},
{
"epoch": 0.47619047619047616,
"grad_norm": 0.4933297634124756,
"learning_rate": 3.811249137336094e-05,
"loss": 0.3551,
"step": 1380
},
{
"epoch": 0.47964113181504486,
"grad_norm": 0.5509902834892273,
"learning_rate": 3.8026224982746725e-05,
"loss": 0.3455,
"step": 1390
},
{
"epoch": 0.4830917874396135,
"grad_norm": 0.4903262257575989,
"learning_rate": 3.793995859213251e-05,
"loss": 0.3604,
"step": 1400
},
{
"epoch": 0.4865424430641822,
"grad_norm": 0.5923327207565308,
"learning_rate": 3.785369220151829e-05,
"loss": 0.3507,
"step": 1410
},
{
"epoch": 0.48999309868875085,
"grad_norm": 0.4920748472213745,
"learning_rate": 3.7767425810904074e-05,
"loss": 0.3544,
"step": 1420
},
{
"epoch": 0.49344375431331955,
"grad_norm": 0.5345553755760193,
"learning_rate": 3.7681159420289856e-05,
"loss": 0.3551,
"step": 1430
},
{
"epoch": 0.4968944099378882,
"grad_norm": 0.532013475894928,
"learning_rate": 3.759489302967564e-05,
"loss": 0.3457,
"step": 1440
},
{
"epoch": 0.5003450655624568,
"grad_norm": 0.5193171501159668,
"learning_rate": 3.750862663906142e-05,
"loss": 0.3527,
"step": 1450
},
{
"epoch": 0.5037957211870255,
"grad_norm": 0.4619022309780121,
"learning_rate": 3.742236024844721e-05,
"loss": 0.363,
"step": 1460
},
{
"epoch": 0.5072463768115942,
"grad_norm": 0.5606441497802734,
"learning_rate": 3.733609385783299e-05,
"loss": 0.3547,
"step": 1470
},
{
"epoch": 0.5106970324361628,
"grad_norm": 0.5041471719741821,
"learning_rate": 3.724982746721877e-05,
"loss": 0.3504,
"step": 1480
},
{
"epoch": 0.5141476880607315,
"grad_norm": 0.46617457270622253,
"learning_rate": 3.716356107660456e-05,
"loss": 0.3492,
"step": 1490
},
{
"epoch": 0.5175983436853002,
"grad_norm": 0.4503689706325531,
"learning_rate": 3.7077294685990336e-05,
"loss": 0.3585,
"step": 1500
},
{
"epoch": 0.5210489993098689,
"grad_norm": 0.4509599506855011,
"learning_rate": 3.6991028295376125e-05,
"loss": 0.351,
"step": 1510
},
{
"epoch": 0.5244996549344375,
"grad_norm": 0.46619853377342224,
"learning_rate": 3.690476190476191e-05,
"loss": 0.3588,
"step": 1520
},
{
"epoch": 0.5279503105590062,
"grad_norm": 0.528289794921875,
"learning_rate": 3.681849551414769e-05,
"loss": 0.3499,
"step": 1530
},
{
"epoch": 0.5314009661835749,
"grad_norm": 0.5260491371154785,
"learning_rate": 3.6732229123533474e-05,
"loss": 0.3487,
"step": 1540
},
{
"epoch": 0.5348516218081435,
"grad_norm": 0.5708985924720764,
"learning_rate": 3.6645962732919256e-05,
"loss": 0.3587,
"step": 1550
},
{
"epoch": 0.5383022774327122,
"grad_norm": 0.4738638401031494,
"learning_rate": 3.655969634230504e-05,
"loss": 0.3527,
"step": 1560
},
{
"epoch": 0.5417529330572809,
"grad_norm": 0.447976291179657,
"learning_rate": 3.647342995169082e-05,
"loss": 0.3457,
"step": 1570
},
{
"epoch": 0.5452035886818496,
"grad_norm": 0.44627007842063904,
"learning_rate": 3.6387163561076605e-05,
"loss": 0.3524,
"step": 1580
},
{
"epoch": 0.5486542443064182,
"grad_norm": 0.4650660753250122,
"learning_rate": 3.630089717046239e-05,
"loss": 0.3569,
"step": 1590
},
{
"epoch": 0.5521048999309869,
"grad_norm": 0.4500880837440491,
"learning_rate": 3.621463077984818e-05,
"loss": 0.341,
"step": 1600
},
{
"epoch": 0.5555555555555556,
"grad_norm": 0.5702126622200012,
"learning_rate": 3.612836438923395e-05,
"loss": 0.3519,
"step": 1610
},
{
"epoch": 0.5590062111801242,
"grad_norm": 0.4988945424556732,
"learning_rate": 3.604209799861974e-05,
"loss": 0.3427,
"step": 1620
},
{
"epoch": 0.5624568668046929,
"grad_norm": 0.5189987421035767,
"learning_rate": 3.5955831608005525e-05,
"loss": 0.3473,
"step": 1630
},
{
"epoch": 0.5659075224292616,
"grad_norm": 0.45712926983833313,
"learning_rate": 3.58695652173913e-05,
"loss": 0.3612,
"step": 1640
},
{
"epoch": 0.5693581780538303,
"grad_norm": 0.5472447276115417,
"learning_rate": 3.578329882677709e-05,
"loss": 0.3456,
"step": 1650
},
{
"epoch": 0.5728088336783989,
"grad_norm": 0.4873930811882019,
"learning_rate": 3.5697032436162874e-05,
"loss": 0.3692,
"step": 1660
},
{
"epoch": 0.5762594893029676,
"grad_norm": 0.503449022769928,
"learning_rate": 3.5610766045548656e-05,
"loss": 0.3771,
"step": 1670
},
{
"epoch": 0.5797101449275363,
"grad_norm": 0.5346298217773438,
"learning_rate": 3.552449965493444e-05,
"loss": 0.3543,
"step": 1680
},
{
"epoch": 0.583160800552105,
"grad_norm": 0.44163742661476135,
"learning_rate": 3.543823326432023e-05,
"loss": 0.3439,
"step": 1690
},
{
"epoch": 0.5866114561766735,
"grad_norm": 0.46102967858314514,
"learning_rate": 3.5351966873706005e-05,
"loss": 0.3433,
"step": 1700
},
{
"epoch": 0.5900621118012422,
"grad_norm": 0.4693676233291626,
"learning_rate": 3.526570048309179e-05,
"loss": 0.3445,
"step": 1710
},
{
"epoch": 0.5935127674258109,
"grad_norm": 0.4793757200241089,
"learning_rate": 3.517943409247757e-05,
"loss": 0.3594,
"step": 1720
},
{
"epoch": 0.5969634230503795,
"grad_norm": 0.5180776119232178,
"learning_rate": 3.509316770186335e-05,
"loss": 0.3379,
"step": 1730
},
{
"epoch": 0.6004140786749482,
"grad_norm": 0.48090022802352905,
"learning_rate": 3.500690131124914e-05,
"loss": 0.3528,
"step": 1740
},
{
"epoch": 0.6038647342995169,
"grad_norm": 0.5032299757003784,
"learning_rate": 3.492063492063492e-05,
"loss": 0.35,
"step": 1750
},
{
"epoch": 0.6073153899240856,
"grad_norm": 0.4861588478088379,
"learning_rate": 3.483436853002071e-05,
"loss": 0.3417,
"step": 1760
},
{
"epoch": 0.6107660455486542,
"grad_norm": 0.43041691184043884,
"learning_rate": 3.474810213940649e-05,
"loss": 0.3456,
"step": 1770
},
{
"epoch": 0.6142167011732229,
"grad_norm": 0.4854036569595337,
"learning_rate": 3.4661835748792274e-05,
"loss": 0.348,
"step": 1780
},
{
"epoch": 0.6176673567977916,
"grad_norm": 0.4714268445968628,
"learning_rate": 3.4575569358178056e-05,
"loss": 0.3384,
"step": 1790
},
{
"epoch": 0.6211180124223602,
"grad_norm": 0.4720168113708496,
"learning_rate": 3.448930296756384e-05,
"loss": 0.3437,
"step": 1800
},
{
"epoch": 0.6245686680469289,
"grad_norm": 0.4411432147026062,
"learning_rate": 3.440303657694962e-05,
"loss": 0.352,
"step": 1810
},
{
"epoch": 0.6280193236714976,
"grad_norm": 0.4829714000225067,
"learning_rate": 3.4316770186335405e-05,
"loss": 0.3246,
"step": 1820
},
{
"epoch": 0.6314699792960663,
"grad_norm": 0.5485419034957886,
"learning_rate": 3.423050379572119e-05,
"loss": 0.3462,
"step": 1830
},
{
"epoch": 0.6349206349206349,
"grad_norm": 0.43877512216567993,
"learning_rate": 3.414423740510697e-05,
"loss": 0.3441,
"step": 1840
},
{
"epoch": 0.6383712905452036,
"grad_norm": 0.5296888947486877,
"learning_rate": 3.405797101449276e-05,
"loss": 0.3419,
"step": 1850
},
{
"epoch": 0.6418219461697723,
"grad_norm": 0.4342154860496521,
"learning_rate": 3.3971704623878536e-05,
"loss": 0.3479,
"step": 1860
},
{
"epoch": 0.6452726017943409,
"grad_norm": 0.5657359957695007,
"learning_rate": 3.388543823326432e-05,
"loss": 0.3461,
"step": 1870
},
{
"epoch": 0.6487232574189096,
"grad_norm": 0.48667094111442566,
"learning_rate": 3.379917184265011e-05,
"loss": 0.3489,
"step": 1880
},
{
"epoch": 0.6521739130434783,
"grad_norm": 0.5117901563644409,
"learning_rate": 3.3712905452035884e-05,
"loss": 0.3412,
"step": 1890
},
{
"epoch": 0.655624568668047,
"grad_norm": 0.5122795104980469,
"learning_rate": 3.3626639061421674e-05,
"loss": 0.3464,
"step": 1900
},
{
"epoch": 0.6590752242926156,
"grad_norm": 0.44816821813583374,
"learning_rate": 3.3540372670807456e-05,
"loss": 0.3404,
"step": 1910
},
{
"epoch": 0.6625258799171843,
"grad_norm": 0.5073073506355286,
"learning_rate": 3.345410628019324e-05,
"loss": 0.3435,
"step": 1920
},
{
"epoch": 0.665976535541753,
"grad_norm": 0.506574273109436,
"learning_rate": 3.336783988957902e-05,
"loss": 0.3419,
"step": 1930
},
{
"epoch": 0.6694271911663217,
"grad_norm": 0.47071290016174316,
"learning_rate": 3.3281573498964805e-05,
"loss": 0.3357,
"step": 1940
},
{
"epoch": 0.6728778467908902,
"grad_norm": 0.4610745310783386,
"learning_rate": 3.319530710835059e-05,
"loss": 0.3529,
"step": 1950
},
{
"epoch": 0.6763285024154589,
"grad_norm": 0.496146023273468,
"learning_rate": 3.310904071773637e-05,
"loss": 0.3315,
"step": 1960
},
{
"epoch": 0.6797791580400276,
"grad_norm": 0.5223260521888733,
"learning_rate": 3.302277432712215e-05,
"loss": 0.3409,
"step": 1970
},
{
"epoch": 0.6832298136645962,
"grad_norm": 0.46770042181015015,
"learning_rate": 3.2936507936507936e-05,
"loss": 0.3323,
"step": 1980
},
{
"epoch": 0.6866804692891649,
"grad_norm": 0.43604782223701477,
"learning_rate": 3.2850241545893725e-05,
"loss": 0.3395,
"step": 1990
},
{
"epoch": 0.6901311249137336,
"grad_norm": 0.48399487137794495,
"learning_rate": 3.27639751552795e-05,
"loss": 0.338,
"step": 2000
},
{
"epoch": 0.6935817805383023,
"grad_norm": 0.4736705720424652,
"learning_rate": 3.267770876466529e-05,
"loss": 0.3461,
"step": 2010
},
{
"epoch": 0.6970324361628709,
"grad_norm": 0.42121419310569763,
"learning_rate": 3.2591442374051074e-05,
"loss": 0.3438,
"step": 2020
},
{
"epoch": 0.7004830917874396,
"grad_norm": 0.4169461131095886,
"learning_rate": 3.2505175983436856e-05,
"loss": 0.335,
"step": 2030
},
{
"epoch": 0.7039337474120083,
"grad_norm": 0.4755675792694092,
"learning_rate": 3.241890959282264e-05,
"loss": 0.3434,
"step": 2040
},
{
"epoch": 0.7073844030365769,
"grad_norm": 0.4242471754550934,
"learning_rate": 3.233264320220842e-05,
"loss": 0.3409,
"step": 2050
},
{
"epoch": 0.7108350586611456,
"grad_norm": 0.488776296377182,
"learning_rate": 3.2246376811594205e-05,
"loss": 0.3427,
"step": 2060
},
{
"epoch": 0.7142857142857143,
"grad_norm": 0.45658791065216064,
"learning_rate": 3.216011042097999e-05,
"loss": 0.3405,
"step": 2070
},
{
"epoch": 0.717736369910283,
"grad_norm": 0.5127744078636169,
"learning_rate": 3.207384403036577e-05,
"loss": 0.3377,
"step": 2080
},
{
"epoch": 0.7211870255348516,
"grad_norm": 0.44740578532218933,
"learning_rate": 3.198757763975155e-05,
"loss": 0.3439,
"step": 2090
},
{
"epoch": 0.7246376811594203,
"grad_norm": 0.49259352684020996,
"learning_rate": 3.1901311249137336e-05,
"loss": 0.3413,
"step": 2100
},
{
"epoch": 0.728088336783989,
"grad_norm": 0.48841023445129395,
"learning_rate": 3.181504485852312e-05,
"loss": 0.3574,
"step": 2110
},
{
"epoch": 0.7315389924085576,
"grad_norm": 0.4867888391017914,
"learning_rate": 3.17287784679089e-05,
"loss": 0.3462,
"step": 2120
},
{
"epoch": 0.7349896480331263,
"grad_norm": 0.49781835079193115,
"learning_rate": 3.164251207729469e-05,
"loss": 0.3275,
"step": 2130
},
{
"epoch": 0.738440303657695,
"grad_norm": 0.4472957253456116,
"learning_rate": 3.155624568668047e-05,
"loss": 0.3486,
"step": 2140
},
{
"epoch": 0.7418909592822637,
"grad_norm": 0.47612202167510986,
"learning_rate": 3.1469979296066256e-05,
"loss": 0.3534,
"step": 2150
},
{
"epoch": 0.7453416149068323,
"grad_norm": 0.4207702577114105,
"learning_rate": 3.138371290545204e-05,
"loss": 0.3426,
"step": 2160
},
{
"epoch": 0.748792270531401,
"grad_norm": 0.49038171768188477,
"learning_rate": 3.129744651483782e-05,
"loss": 0.3371,
"step": 2170
},
{
"epoch": 0.7522429261559697,
"grad_norm": 0.503973662853241,
"learning_rate": 3.1211180124223605e-05,
"loss": 0.3304,
"step": 2180
},
{
"epoch": 0.7556935817805382,
"grad_norm": 0.3980702757835388,
"learning_rate": 3.112491373360939e-05,
"loss": 0.3374,
"step": 2190
},
{
"epoch": 0.759144237405107,
"grad_norm": 0.4417726397514343,
"learning_rate": 3.103864734299517e-05,
"loss": 0.3359,
"step": 2200
},
{
"epoch": 0.7625948930296756,
"grad_norm": 0.47529205679893494,
"learning_rate": 3.095238095238095e-05,
"loss": 0.334,
"step": 2210
},
{
"epoch": 0.7660455486542443,
"grad_norm": 0.44546717405319214,
"learning_rate": 3.0866114561766736e-05,
"loss": 0.3338,
"step": 2220
},
{
"epoch": 0.7694962042788129,
"grad_norm": 0.45016613602638245,
"learning_rate": 3.077984817115252e-05,
"loss": 0.3457,
"step": 2230
},
{
"epoch": 0.7729468599033816,
"grad_norm": 0.48992106318473816,
"learning_rate": 3.069358178053831e-05,
"loss": 0.3372,
"step": 2240
},
{
"epoch": 0.7763975155279503,
"grad_norm": 0.47713831067085266,
"learning_rate": 3.0607315389924084e-05,
"loss": 0.3491,
"step": 2250
},
{
"epoch": 0.779848171152519,
"grad_norm": 0.4510743319988251,
"learning_rate": 3.0521048999309874e-05,
"loss": 0.3348,
"step": 2260
},
{
"epoch": 0.7832988267770876,
"grad_norm": 0.43214091658592224,
"learning_rate": 3.0434782608695656e-05,
"loss": 0.3406,
"step": 2270
},
{
"epoch": 0.7867494824016563,
"grad_norm": 0.4679895341396332,
"learning_rate": 3.0348516218081436e-05,
"loss": 0.3369,
"step": 2280
},
{
"epoch": 0.790200138026225,
"grad_norm": 0.5620041489601135,
"learning_rate": 3.0262249827467222e-05,
"loss": 0.3399,
"step": 2290
},
{
"epoch": 0.7936507936507936,
"grad_norm": 0.4891965091228485,
"learning_rate": 3.0175983436853005e-05,
"loss": 0.3323,
"step": 2300
},
{
"epoch": 0.7971014492753623,
"grad_norm": 0.44797247648239136,
"learning_rate": 3.0089717046238784e-05,
"loss": 0.3369,
"step": 2310
},
{
"epoch": 0.800552104899931,
"grad_norm": 0.4653630554676056,
"learning_rate": 3.000345065562457e-05,
"loss": 0.3304,
"step": 2320
},
{
"epoch": 0.8040027605244997,
"grad_norm": 0.4232509434223175,
"learning_rate": 2.991718426501035e-05,
"loss": 0.3361,
"step": 2330
},
{
"epoch": 0.8074534161490683,
"grad_norm": 0.42916807532310486,
"learning_rate": 2.9830917874396136e-05,
"loss": 0.338,
"step": 2340
},
{
"epoch": 0.810904071773637,
"grad_norm": 0.44847798347473145,
"learning_rate": 2.9744651483781922e-05,
"loss": 0.3388,
"step": 2350
},
{
"epoch": 0.8143547273982057,
"grad_norm": 0.41225308179855347,
"learning_rate": 2.96583850931677e-05,
"loss": 0.3352,
"step": 2360
},
{
"epoch": 0.8178053830227743,
"grad_norm": 0.5075202584266663,
"learning_rate": 2.9572118702553487e-05,
"loss": 0.333,
"step": 2370
},
{
"epoch": 0.821256038647343,
"grad_norm": 0.42286792397499084,
"learning_rate": 2.948585231193927e-05,
"loss": 0.3324,
"step": 2380
},
{
"epoch": 0.8247066942719117,
"grad_norm": 0.4466201663017273,
"learning_rate": 2.9399585921325053e-05,
"loss": 0.3338,
"step": 2390
},
{
"epoch": 0.8281573498964804,
"grad_norm": 0.3980891704559326,
"learning_rate": 2.9313319530710836e-05,
"loss": 0.3414,
"step": 2400
},
{
"epoch": 0.831608005521049,
"grad_norm": 0.4684590697288513,
"learning_rate": 2.9227053140096622e-05,
"loss": 0.34,
"step": 2410
},
{
"epoch": 0.8350586611456177,
"grad_norm": 0.43318429589271545,
"learning_rate": 2.91407867494824e-05,
"loss": 0.3221,
"step": 2420
},
{
"epoch": 0.8385093167701864,
"grad_norm": 0.4576144218444824,
"learning_rate": 2.9054520358868187e-05,
"loss": 0.341,
"step": 2430
},
{
"epoch": 0.841959972394755,
"grad_norm": 0.4950040280818939,
"learning_rate": 2.8968253968253974e-05,
"loss": 0.3331,
"step": 2440
},
{
"epoch": 0.8454106280193237,
"grad_norm": 0.5012326836585999,
"learning_rate": 2.8881987577639753e-05,
"loss": 0.3269,
"step": 2450
},
{
"epoch": 0.8488612836438924,
"grad_norm": 0.4127346873283386,
"learning_rate": 2.8795721187025536e-05,
"loss": 0.3313,
"step": 2460
},
{
"epoch": 0.852311939268461,
"grad_norm": 0.4913102388381958,
"learning_rate": 2.870945479641132e-05,
"loss": 0.3329,
"step": 2470
},
{
"epoch": 0.8557625948930296,
"grad_norm": 0.4684026837348938,
"learning_rate": 2.86231884057971e-05,
"loss": 0.3374,
"step": 2480
},
{
"epoch": 0.8592132505175983,
"grad_norm": 0.4605260491371155,
"learning_rate": 2.8536922015182887e-05,
"loss": 0.3398,
"step": 2490
},
{
"epoch": 0.862663906142167,
"grad_norm": 0.4554629325866699,
"learning_rate": 2.8450655624568667e-05,
"loss": 0.333,
"step": 2500
},
{
"epoch": 0.8661145617667356,
"grad_norm": 0.4555531442165375,
"learning_rate": 2.8364389233954453e-05,
"loss": 0.3503,
"step": 2510
},
{
"epoch": 0.8695652173913043,
"grad_norm": 0.46253401041030884,
"learning_rate": 2.827812284334024e-05,
"loss": 0.3282,
"step": 2520
},
{
"epoch": 0.873015873015873,
"grad_norm": 0.523013174533844,
"learning_rate": 2.819185645272602e-05,
"loss": 0.336,
"step": 2530
},
{
"epoch": 0.8764665286404417,
"grad_norm": 0.4696257710456848,
"learning_rate": 2.8105590062111805e-05,
"loss": 0.346,
"step": 2540
},
{
"epoch": 0.8799171842650103,
"grad_norm": 0.44945546984672546,
"learning_rate": 2.8019323671497587e-05,
"loss": 0.3297,
"step": 2550
},
{
"epoch": 0.883367839889579,
"grad_norm": 0.40458500385284424,
"learning_rate": 2.7933057280883367e-05,
"loss": 0.3391,
"step": 2560
},
{
"epoch": 0.8868184955141477,
"grad_norm": 0.4970335364341736,
"learning_rate": 2.7846790890269153e-05,
"loss": 0.342,
"step": 2570
},
{
"epoch": 0.8902691511387164,
"grad_norm": 0.4015578627586365,
"learning_rate": 2.776052449965494e-05,
"loss": 0.33,
"step": 2580
},
{
"epoch": 0.893719806763285,
"grad_norm": 0.482063889503479,
"learning_rate": 2.767425810904072e-05,
"loss": 0.3366,
"step": 2590
},
{
"epoch": 0.8971704623878537,
"grad_norm": 0.3847281336784363,
"learning_rate": 2.7587991718426505e-05,
"loss": 0.3528,
"step": 2600
},
{
"epoch": 0.9006211180124224,
"grad_norm": 0.4189392626285553,
"learning_rate": 2.7501725327812284e-05,
"loss": 0.3346,
"step": 2610
},
{
"epoch": 0.904071773636991,
"grad_norm": 0.44489413499832153,
"learning_rate": 2.741545893719807e-05,
"loss": 0.3279,
"step": 2620
},
{
"epoch": 0.9075224292615597,
"grad_norm": 0.43097779154777527,
"learning_rate": 2.7329192546583853e-05,
"loss": 0.3318,
"step": 2630
},
{
"epoch": 0.9109730848861284,
"grad_norm": 0.4077935516834259,
"learning_rate": 2.7242926155969632e-05,
"loss": 0.3248,
"step": 2640
},
{
"epoch": 0.9144237405106971,
"grad_norm": 0.39787349104881287,
"learning_rate": 2.715665976535542e-05,
"loss": 0.3398,
"step": 2650
},
{
"epoch": 0.9178743961352657,
"grad_norm": 0.4495849609375,
"learning_rate": 2.7070393374741205e-05,
"loss": 0.3406,
"step": 2660
},
{
"epoch": 0.9213250517598344,
"grad_norm": 0.4294304847717285,
"learning_rate": 2.6984126984126984e-05,
"loss": 0.3375,
"step": 2670
},
{
"epoch": 0.9247757073844031,
"grad_norm": 0.40841785073280334,
"learning_rate": 2.689786059351277e-05,
"loss": 0.3198,
"step": 2680
},
{
"epoch": 0.9282263630089717,
"grad_norm": 0.44023633003234863,
"learning_rate": 2.6811594202898553e-05,
"loss": 0.3344,
"step": 2690
},
{
"epoch": 0.9316770186335404,
"grad_norm": 0.4349792003631592,
"learning_rate": 2.6725327812284336e-05,
"loss": 0.3357,
"step": 2700
},
{
"epoch": 0.935127674258109,
"grad_norm": 0.4780796766281128,
"learning_rate": 2.663906142167012e-05,
"loss": 0.3297,
"step": 2710
},
{
"epoch": 0.9385783298826778,
"grad_norm": 0.45114386081695557,
"learning_rate": 2.6552795031055898e-05,
"loss": 0.3289,
"step": 2720
},
{
"epoch": 0.9420289855072463,
"grad_norm": 0.4433158040046692,
"learning_rate": 2.6466528640441684e-05,
"loss": 0.3241,
"step": 2730
},
{
"epoch": 0.945479641131815,
"grad_norm": 0.4354463219642639,
"learning_rate": 2.638026224982747e-05,
"loss": 0.3249,
"step": 2740
},
{
"epoch": 0.9489302967563837,
"grad_norm": 0.4203226864337921,
"learning_rate": 2.629399585921325e-05,
"loss": 0.329,
"step": 2750
},
{
"epoch": 0.9523809523809523,
"grad_norm": 0.42780405282974243,
"learning_rate": 2.6207729468599036e-05,
"loss": 0.337,
"step": 2760
},
{
"epoch": 0.955831608005521,
"grad_norm": 0.44693320989608765,
"learning_rate": 2.6121463077984822e-05,
"loss": 0.3307,
"step": 2770
},
{
"epoch": 0.9592822636300897,
"grad_norm": 0.3886381983757019,
"learning_rate": 2.60351966873706e-05,
"loss": 0.3332,
"step": 2780
},
{
"epoch": 0.9627329192546584,
"grad_norm": 0.40549805760383606,
"learning_rate": 2.5948930296756384e-05,
"loss": 0.3259,
"step": 2790
},
{
"epoch": 0.966183574879227,
"grad_norm": 0.4425898492336273,
"learning_rate": 2.586266390614217e-05,
"loss": 0.3337,
"step": 2800
},
{
"epoch": 0.9696342305037957,
"grad_norm": 0.4614519774913788,
"learning_rate": 2.577639751552795e-05,
"loss": 0.3223,
"step": 2810
},
{
"epoch": 0.9730848861283644,
"grad_norm": 0.47300979495048523,
"learning_rate": 2.5690131124913736e-05,
"loss": 0.3382,
"step": 2820
},
{
"epoch": 0.9765355417529331,
"grad_norm": 0.39775508642196655,
"learning_rate": 2.5603864734299522e-05,
"loss": 0.335,
"step": 2830
},
{
"epoch": 0.9799861973775017,
"grad_norm": 0.4713541865348816,
"learning_rate": 2.55175983436853e-05,
"loss": 0.3312,
"step": 2840
},
{
"epoch": 0.9834368530020704,
"grad_norm": 0.41885653138160706,
"learning_rate": 2.5431331953071087e-05,
"loss": 0.3256,
"step": 2850
},
{
"epoch": 0.9868875086266391,
"grad_norm": 0.41549861431121826,
"learning_rate": 2.5345065562456867e-05,
"loss": 0.3389,
"step": 2860
},
{
"epoch": 0.9903381642512077,
"grad_norm": 0.4274648427963257,
"learning_rate": 2.525879917184265e-05,
"loss": 0.3285,
"step": 2870
},
{
"epoch": 0.9937888198757764,
"grad_norm": 0.48823943734169006,
"learning_rate": 2.5172532781228436e-05,
"loss": 0.3225,
"step": 2880
},
{
"epoch": 0.9972394755003451,
"grad_norm": 0.4019075930118561,
"learning_rate": 2.5086266390614215e-05,
"loss": 0.3258,
"step": 2890
},
{
"epoch": 1.0006901311249137,
"grad_norm": 0.40041089057922363,
"learning_rate": 2.5e-05,
"loss": 0.3275,
"step": 2900
},
{
"epoch": 1.0041407867494825,
"grad_norm": 0.43522876501083374,
"learning_rate": 2.4913733609385784e-05,
"loss": 0.3229,
"step": 2910
},
{
"epoch": 1.007591442374051,
"grad_norm": 0.4160279333591461,
"learning_rate": 2.482746721877157e-05,
"loss": 0.3241,
"step": 2920
},
{
"epoch": 1.0110420979986197,
"grad_norm": 0.4505654573440552,
"learning_rate": 2.4741200828157353e-05,
"loss": 0.3302,
"step": 2930
},
{
"epoch": 1.0144927536231885,
"grad_norm": 0.4337722957134247,
"learning_rate": 2.4654934437543136e-05,
"loss": 0.3232,
"step": 2940
},
{
"epoch": 1.017943409247757,
"grad_norm": 0.44694915413856506,
"learning_rate": 2.4568668046928915e-05,
"loss": 0.3167,
"step": 2950
},
{
"epoch": 1.0213940648723256,
"grad_norm": 0.4304366707801819,
"learning_rate": 2.44824016563147e-05,
"loss": 0.3247,
"step": 2960
},
{
"epoch": 1.0248447204968945,
"grad_norm": 0.3924005329608917,
"learning_rate": 2.4396135265700484e-05,
"loss": 0.3203,
"step": 2970
},
{
"epoch": 1.028295376121463,
"grad_norm": 0.4276975393295288,
"learning_rate": 2.4309868875086267e-05,
"loss": 0.3222,
"step": 2980
},
{
"epoch": 1.0317460317460316,
"grad_norm": 0.41446569561958313,
"learning_rate": 2.4223602484472053e-05,
"loss": 0.3219,
"step": 2990
},
{
"epoch": 1.0351966873706004,
"grad_norm": 0.4318532347679138,
"learning_rate": 2.4137336093857836e-05,
"loss": 0.3315,
"step": 3000
},
{
"epoch": 1.038647342995169,
"grad_norm": 0.48855313658714294,
"learning_rate": 2.405106970324362e-05,
"loss": 0.3213,
"step": 3010
},
{
"epoch": 1.0420979986197378,
"grad_norm": 0.46925896406173706,
"learning_rate": 2.39648033126294e-05,
"loss": 0.3296,
"step": 3020
},
{
"epoch": 1.0455486542443064,
"grad_norm": 0.435977041721344,
"learning_rate": 2.3878536922015184e-05,
"loss": 0.3159,
"step": 3030
},
{
"epoch": 1.048999309868875,
"grad_norm": 0.5048639178276062,
"learning_rate": 2.3792270531400967e-05,
"loss": 0.3195,
"step": 3040
},
{
"epoch": 1.0524499654934438,
"grad_norm": 0.43628114461898804,
"learning_rate": 2.370600414078675e-05,
"loss": 0.3288,
"step": 3050
},
{
"epoch": 1.0559006211180124,
"grad_norm": 0.49825018644332886,
"learning_rate": 2.3619737750172532e-05,
"loss": 0.3182,
"step": 3060
},
{
"epoch": 1.059351276742581,
"grad_norm": 0.4279234707355499,
"learning_rate": 2.353347135955832e-05,
"loss": 0.3196,
"step": 3070
},
{
"epoch": 1.0628019323671498,
"grad_norm": 0.45079633593559265,
"learning_rate": 2.34472049689441e-05,
"loss": 0.3252,
"step": 3080
},
{
"epoch": 1.0662525879917184,
"grad_norm": 0.43655019998550415,
"learning_rate": 2.3360938578329884e-05,
"loss": 0.3163,
"step": 3090
},
{
"epoch": 1.069703243616287,
"grad_norm": 0.4613361954689026,
"learning_rate": 2.3274672187715667e-05,
"loss": 0.3281,
"step": 3100
},
{
"epoch": 1.0731538992408558,
"grad_norm": 0.4135753810405731,
"learning_rate": 2.318840579710145e-05,
"loss": 0.3157,
"step": 3110
},
{
"epoch": 1.0766045548654244,
"grad_norm": 0.4026079773902893,
"learning_rate": 2.3102139406487232e-05,
"loss": 0.3188,
"step": 3120
},
{
"epoch": 1.0800552104899932,
"grad_norm": 0.4352892339229584,
"learning_rate": 2.3015873015873015e-05,
"loss": 0.3104,
"step": 3130
},
{
"epoch": 1.0835058661145618,
"grad_norm": 0.3982253074645996,
"learning_rate": 2.29296066252588e-05,
"loss": 0.3245,
"step": 3140
},
{
"epoch": 1.0869565217391304,
"grad_norm": 0.3897712230682373,
"learning_rate": 2.2843340234644584e-05,
"loss": 0.3136,
"step": 3150
},
{
"epoch": 1.0904071773636992,
"grad_norm": 0.37550920248031616,
"learning_rate": 2.2757073844030367e-05,
"loss": 0.3256,
"step": 3160
},
{
"epoch": 1.0938578329882678,
"grad_norm": 0.41109180450439453,
"learning_rate": 2.2670807453416153e-05,
"loss": 0.317,
"step": 3170
},
{
"epoch": 1.0973084886128364,
"grad_norm": 0.46931174397468567,
"learning_rate": 2.2584541062801932e-05,
"loss": 0.318,
"step": 3180
},
{
"epoch": 1.1007591442374052,
"grad_norm": 0.41200053691864014,
"learning_rate": 2.2498274672187715e-05,
"loss": 0.3292,
"step": 3190
},
{
"epoch": 1.1042097998619738,
"grad_norm": 0.45627737045288086,
"learning_rate": 2.2412008281573498e-05,
"loss": 0.3235,
"step": 3200
},
{
"epoch": 1.1076604554865424,
"grad_norm": 0.4548512101173401,
"learning_rate": 2.2325741890959284e-05,
"loss": 0.3239,
"step": 3210
},
{
"epoch": 1.1111111111111112,
"grad_norm": 0.37763795256614685,
"learning_rate": 2.2239475500345067e-05,
"loss": 0.3183,
"step": 3220
},
{
"epoch": 1.1145617667356797,
"grad_norm": 0.4083724319934845,
"learning_rate": 2.215320910973085e-05,
"loss": 0.32,
"step": 3230
},
{
"epoch": 1.1180124223602483,
"grad_norm": 0.3961089551448822,
"learning_rate": 2.2066942719116636e-05,
"loss": 0.309,
"step": 3240
},
{
"epoch": 1.1214630779848171,
"grad_norm": 0.44931307435035706,
"learning_rate": 2.198067632850242e-05,
"loss": 0.312,
"step": 3250
},
{
"epoch": 1.1249137336093857,
"grad_norm": 0.360643595457077,
"learning_rate": 2.1894409937888198e-05,
"loss": 0.3279,
"step": 3260
},
{
"epoch": 1.1283643892339545,
"grad_norm": 0.37495937943458557,
"learning_rate": 2.180814354727398e-05,
"loss": 0.3123,
"step": 3270
},
{
"epoch": 1.1318150448585231,
"grad_norm": 0.3907097280025482,
"learning_rate": 2.1721877156659767e-05,
"loss": 0.3256,
"step": 3280
},
{
"epoch": 1.1352657004830917,
"grad_norm": 0.3720877468585968,
"learning_rate": 2.163561076604555e-05,
"loss": 0.329,
"step": 3290
},
{
"epoch": 1.1387163561076605,
"grad_norm": 0.4681912660598755,
"learning_rate": 2.1549344375431332e-05,
"loss": 0.3267,
"step": 3300
},
{
"epoch": 1.1421670117322291,
"grad_norm": 0.5137665271759033,
"learning_rate": 2.146307798481712e-05,
"loss": 0.3228,
"step": 3310
},
{
"epoch": 1.1456176673567977,
"grad_norm": 0.41128090023994446,
"learning_rate": 2.13768115942029e-05,
"loss": 0.3209,
"step": 3320
},
{
"epoch": 1.1490683229813665,
"grad_norm": 0.3766096532344818,
"learning_rate": 2.1290545203588684e-05,
"loss": 0.3228,
"step": 3330
},
{
"epoch": 1.152518978605935,
"grad_norm": 0.37945806980133057,
"learning_rate": 2.1204278812974467e-05,
"loss": 0.3179,
"step": 3340
},
{
"epoch": 1.155969634230504,
"grad_norm": 0.46118223667144775,
"learning_rate": 2.111801242236025e-05,
"loss": 0.3281,
"step": 3350
},
{
"epoch": 1.1594202898550725,
"grad_norm": 0.3962058126926422,
"learning_rate": 2.1031746031746032e-05,
"loss": 0.3263,
"step": 3360
},
{
"epoch": 1.162870945479641,
"grad_norm": 0.4236689805984497,
"learning_rate": 2.0945479641131815e-05,
"loss": 0.3198,
"step": 3370
},
{
"epoch": 1.16632160110421,
"grad_norm": 0.39470767974853516,
"learning_rate": 2.0859213250517598e-05,
"loss": 0.3177,
"step": 3380
},
{
"epoch": 1.1697722567287785,
"grad_norm": 0.3914918005466461,
"learning_rate": 2.0772946859903384e-05,
"loss": 0.3149,
"step": 3390
},
{
"epoch": 1.173222912353347,
"grad_norm": 0.4191271960735321,
"learning_rate": 2.0686680469289167e-05,
"loss": 0.3234,
"step": 3400
},
{
"epoch": 1.176673567977916,
"grad_norm": 0.40341272950172424,
"learning_rate": 2.060041407867495e-05,
"loss": 0.3314,
"step": 3410
},
{
"epoch": 1.1801242236024845,
"grad_norm": 0.4099895656108856,
"learning_rate": 2.0514147688060732e-05,
"loss": 0.3251,
"step": 3420
},
{
"epoch": 1.183574879227053,
"grad_norm": 0.4049317240715027,
"learning_rate": 2.0427881297446515e-05,
"loss": 0.3151,
"step": 3430
},
{
"epoch": 1.1870255348516219,
"grad_norm": 0.4270973205566406,
"learning_rate": 2.0341614906832298e-05,
"loss": 0.3314,
"step": 3440
},
{
"epoch": 1.1904761904761905,
"grad_norm": 0.40859755873680115,
"learning_rate": 2.025534851621808e-05,
"loss": 0.3204,
"step": 3450
},
{
"epoch": 1.193926846100759,
"grad_norm": 0.4711279273033142,
"learning_rate": 2.0169082125603867e-05,
"loss": 0.3175,
"step": 3460
},
{
"epoch": 1.1973775017253279,
"grad_norm": 0.458126038312912,
"learning_rate": 2.008281573498965e-05,
"loss": 0.3215,
"step": 3470
},
{
"epoch": 1.2008281573498965,
"grad_norm": 0.39996790885925293,
"learning_rate": 1.9996549344375432e-05,
"loss": 0.3231,
"step": 3480
},
{
"epoch": 1.204278812974465,
"grad_norm": 0.43717774748802185,
"learning_rate": 1.991028295376122e-05,
"loss": 0.3096,
"step": 3490
},
{
"epoch": 1.2077294685990339,
"grad_norm": 0.4430061876773834,
"learning_rate": 1.9824016563146998e-05,
"loss": 0.3145,
"step": 3500
},
{
"epoch": 1.2111801242236024,
"grad_norm": 0.4095977544784546,
"learning_rate": 1.973775017253278e-05,
"loss": 0.3203,
"step": 3510
},
{
"epoch": 1.2146307798481712,
"grad_norm": 0.3994373679161072,
"learning_rate": 1.9651483781918563e-05,
"loss": 0.3237,
"step": 3520
},
{
"epoch": 1.2180814354727398,
"grad_norm": 0.39703670144081116,
"learning_rate": 1.956521739130435e-05,
"loss": 0.323,
"step": 3530
},
{
"epoch": 1.2215320910973084,
"grad_norm": 0.49238279461860657,
"learning_rate": 1.9478951000690132e-05,
"loss": 0.3201,
"step": 3540
},
{
"epoch": 1.2249827467218772,
"grad_norm": 0.4226069450378418,
"learning_rate": 1.9392684610075915e-05,
"loss": 0.3182,
"step": 3550
},
{
"epoch": 1.2284334023464458,
"grad_norm": 0.3932894170284271,
"learning_rate": 1.93064182194617e-05,
"loss": 0.3076,
"step": 3560
},
{
"epoch": 1.2318840579710144,
"grad_norm": 0.41728973388671875,
"learning_rate": 1.9220151828847484e-05,
"loss": 0.3146,
"step": 3570
},
{
"epoch": 1.2353347135955832,
"grad_norm": 0.37457820773124695,
"learning_rate": 1.9133885438233263e-05,
"loss": 0.3214,
"step": 3580
},
{
"epoch": 1.2387853692201518,
"grad_norm": 0.4085715711116791,
"learning_rate": 1.9047619047619046e-05,
"loss": 0.3136,
"step": 3590
},
{
"epoch": 1.2422360248447206,
"grad_norm": 0.3940805196762085,
"learning_rate": 1.8961352657004832e-05,
"loss": 0.319,
"step": 3600
},
{
"epoch": 1.2456866804692892,
"grad_norm": 0.43771472573280334,
"learning_rate": 1.8875086266390615e-05,
"loss": 0.3206,
"step": 3610
},
{
"epoch": 1.2491373360938578,
"grad_norm": 0.43656811118125916,
"learning_rate": 1.8788819875776398e-05,
"loss": 0.3143,
"step": 3620
},
{
"epoch": 1.2525879917184266,
"grad_norm": 0.44114479422569275,
"learning_rate": 1.8702553485162184e-05,
"loss": 0.3213,
"step": 3630
},
{
"epoch": 1.2560386473429952,
"grad_norm": 0.5253772139549255,
"learning_rate": 1.8616287094547967e-05,
"loss": 0.3144,
"step": 3640
},
{
"epoch": 1.2594893029675638,
"grad_norm": 0.4206532835960388,
"learning_rate": 1.853002070393375e-05,
"loss": 0.3111,
"step": 3650
},
{
"epoch": 1.2629399585921326,
"grad_norm": 0.4311043620109558,
"learning_rate": 1.8443754313319532e-05,
"loss": 0.3158,
"step": 3660
},
{
"epoch": 1.2663906142167012,
"grad_norm": 0.41564512252807617,
"learning_rate": 1.8357487922705315e-05,
"loss": 0.3353,
"step": 3670
},
{
"epoch": 1.2698412698412698,
"grad_norm": 0.43433383107185364,
"learning_rate": 1.8271221532091098e-05,
"loss": 0.3224,
"step": 3680
},
{
"epoch": 1.2732919254658386,
"grad_norm": 0.436788946390152,
"learning_rate": 1.818495514147688e-05,
"loss": 0.3232,
"step": 3690
},
{
"epoch": 1.2767425810904072,
"grad_norm": 0.42806386947631836,
"learning_rate": 1.8098688750862663e-05,
"loss": 0.3124,
"step": 3700
},
{
"epoch": 1.2801932367149758,
"grad_norm": 0.40641355514526367,
"learning_rate": 1.801242236024845e-05,
"loss": 0.3203,
"step": 3710
},
{
"epoch": 1.2836438923395446,
"grad_norm": 0.418039470911026,
"learning_rate": 1.7926155969634232e-05,
"loss": 0.3125,
"step": 3720
},
{
"epoch": 1.2870945479641132,
"grad_norm": 0.41426384449005127,
"learning_rate": 1.7839889579020015e-05,
"loss": 0.3225,
"step": 3730
},
{
"epoch": 1.2905452035886817,
"grad_norm": 0.40283921360969543,
"learning_rate": 1.7753623188405798e-05,
"loss": 0.3084,
"step": 3740
},
{
"epoch": 1.2939958592132506,
"grad_norm": 0.42750898003578186,
"learning_rate": 1.766735679779158e-05,
"loss": 0.3135,
"step": 3750
},
{
"epoch": 1.2974465148378191,
"grad_norm": 0.4350372552871704,
"learning_rate": 1.7581090407177363e-05,
"loss": 0.3161,
"step": 3760
},
{
"epoch": 1.3008971704623877,
"grad_norm": 0.3901418447494507,
"learning_rate": 1.7494824016563146e-05,
"loss": 0.3192,
"step": 3770
},
{
"epoch": 1.3043478260869565,
"grad_norm": 0.4102122187614441,
"learning_rate": 1.7408557625948932e-05,
"loss": 0.3168,
"step": 3780
},
{
"epoch": 1.3077984817115251,
"grad_norm": 0.40640777349472046,
"learning_rate": 1.7322291235334715e-05,
"loss": 0.3288,
"step": 3790
},
{
"epoch": 1.311249137336094,
"grad_norm": 0.39230260252952576,
"learning_rate": 1.7236024844720498e-05,
"loss": 0.3179,
"step": 3800
},
{
"epoch": 1.3146997929606625,
"grad_norm": 0.462086945772171,
"learning_rate": 1.714975845410628e-05,
"loss": 0.31,
"step": 3810
},
{
"epoch": 1.3181504485852311,
"grad_norm": 0.4115633964538574,
"learning_rate": 1.7063492063492063e-05,
"loss": 0.324,
"step": 3820
},
{
"epoch": 1.3216011042098,
"grad_norm": 0.37690791487693787,
"learning_rate": 1.6977225672877846e-05,
"loss": 0.3082,
"step": 3830
},
{
"epoch": 1.3250517598343685,
"grad_norm": 0.4259274899959564,
"learning_rate": 1.689095928226363e-05,
"loss": 0.3205,
"step": 3840
},
{
"epoch": 1.3285024154589373,
"grad_norm": 0.39965760707855225,
"learning_rate": 1.6804692891649415e-05,
"loss": 0.3288,
"step": 3850
},
{
"epoch": 1.331953071083506,
"grad_norm": 0.4190365970134735,
"learning_rate": 1.6718426501035198e-05,
"loss": 0.3144,
"step": 3860
},
{
"epoch": 1.3354037267080745,
"grad_norm": 0.3663720488548279,
"learning_rate": 1.663216011042098e-05,
"loss": 0.3167,
"step": 3870
},
{
"epoch": 1.3388543823326433,
"grad_norm": 0.4190429747104645,
"learning_rate": 1.6545893719806767e-05,
"loss": 0.3091,
"step": 3880
},
{
"epoch": 1.342305037957212,
"grad_norm": 0.42641833424568176,
"learning_rate": 1.645962732919255e-05,
"loss": 0.3108,
"step": 3890
},
{
"epoch": 1.3457556935817805,
"grad_norm": 0.39976075291633606,
"learning_rate": 1.637336093857833e-05,
"loss": 0.3066,
"step": 3900
},
{
"epoch": 1.3492063492063493,
"grad_norm": 0.41375914216041565,
"learning_rate": 1.628709454796411e-05,
"loss": 0.3193,
"step": 3910
},
{
"epoch": 1.3526570048309179,
"grad_norm": 0.3997116982936859,
"learning_rate": 1.6200828157349898e-05,
"loss": 0.3126,
"step": 3920
},
{
"epoch": 1.3561076604554865,
"grad_norm": 0.3594307601451874,
"learning_rate": 1.611456176673568e-05,
"loss": 0.315,
"step": 3930
},
{
"epoch": 1.3595583160800553,
"grad_norm": 0.3919355273246765,
"learning_rate": 1.6028295376121463e-05,
"loss": 0.313,
"step": 3940
},
{
"epoch": 1.3630089717046239,
"grad_norm": 0.436056911945343,
"learning_rate": 1.5942028985507246e-05,
"loss": 0.3185,
"step": 3950
},
{
"epoch": 1.3664596273291925,
"grad_norm": 0.41378381848335266,
"learning_rate": 1.5855762594893032e-05,
"loss": 0.3174,
"step": 3960
},
{
"epoch": 1.3699102829537613,
"grad_norm": 0.4355195164680481,
"learning_rate": 1.5769496204278815e-05,
"loss": 0.3227,
"step": 3970
},
{
"epoch": 1.3733609385783299,
"grad_norm": 0.41318216919898987,
"learning_rate": 1.5683229813664594e-05,
"loss": 0.3169,
"step": 3980
},
{
"epoch": 1.3768115942028984,
"grad_norm": 0.47554564476013184,
"learning_rate": 1.559696342305038e-05,
"loss": 0.3034,
"step": 3990
},
{
"epoch": 1.3802622498274673,
"grad_norm": 0.4184069037437439,
"learning_rate": 1.5510697032436163e-05,
"loss": 0.3205,
"step": 4000
},
{
"epoch": 1.3837129054520358,
"grad_norm": 0.5330416560173035,
"learning_rate": 1.5424430641821946e-05,
"loss": 0.3282,
"step": 4010
},
{
"epoch": 1.3871635610766044,
"grad_norm": 0.43783238530158997,
"learning_rate": 1.533816425120773e-05,
"loss": 0.3246,
"step": 4020
},
{
"epoch": 1.3906142167011732,
"grad_norm": 0.41294294595718384,
"learning_rate": 1.5251897860593515e-05,
"loss": 0.3202,
"step": 4030
},
{
"epoch": 1.3940648723257418,
"grad_norm": 0.34258291125297546,
"learning_rate": 1.5165631469979296e-05,
"loss": 0.3195,
"step": 4040
},
{
"epoch": 1.3975155279503104,
"grad_norm": 0.3889847695827484,
"learning_rate": 1.5079365079365079e-05,
"loss": 0.3142,
"step": 4050
},
{
"epoch": 1.4009661835748792,
"grad_norm": 0.3962327539920807,
"learning_rate": 1.4993098688750865e-05,
"loss": 0.3208,
"step": 4060
},
{
"epoch": 1.4044168391994478,
"grad_norm": 0.4308435022830963,
"learning_rate": 1.4906832298136648e-05,
"loss": 0.3176,
"step": 4070
},
{
"epoch": 1.4078674948240166,
"grad_norm": 0.4349764585494995,
"learning_rate": 1.4820565907522429e-05,
"loss": 0.3258,
"step": 4080
},
{
"epoch": 1.4113181504485852,
"grad_norm": 0.44505828619003296,
"learning_rate": 1.4734299516908212e-05,
"loss": 0.3211,
"step": 4090
},
{
"epoch": 1.414768806073154,
"grad_norm": 0.4607914984226227,
"learning_rate": 1.4648033126293998e-05,
"loss": 0.3166,
"step": 4100
},
{
"epoch": 1.4182194616977226,
"grad_norm": 0.40675053000450134,
"learning_rate": 1.456176673567978e-05,
"loss": 0.3171,
"step": 4110
},
{
"epoch": 1.4216701173222912,
"grad_norm": 0.36485347151756287,
"learning_rate": 1.4475500345065562e-05,
"loss": 0.3143,
"step": 4120
},
{
"epoch": 1.42512077294686,
"grad_norm": 0.39989733695983887,
"learning_rate": 1.4389233954451348e-05,
"loss": 0.3176,
"step": 4130
},
{
"epoch": 1.4285714285714286,
"grad_norm": 0.4142070412635803,
"learning_rate": 1.430296756383713e-05,
"loss": 0.3232,
"step": 4140
},
{
"epoch": 1.4320220841959972,
"grad_norm": 0.41662535071372986,
"learning_rate": 1.4216701173222913e-05,
"loss": 0.3154,
"step": 4150
},
{
"epoch": 1.435472739820566,
"grad_norm": 0.39152923226356506,
"learning_rate": 1.4130434782608694e-05,
"loss": 0.3141,
"step": 4160
},
{
"epoch": 1.4389233954451346,
"grad_norm": 0.41523277759552,
"learning_rate": 1.404416839199448e-05,
"loss": 0.3175,
"step": 4170
},
{
"epoch": 1.4423740510697032,
"grad_norm": 0.40196311473846436,
"learning_rate": 1.3957902001380263e-05,
"loss": 0.3161,
"step": 4180
},
{
"epoch": 1.445824706694272,
"grad_norm": 0.441524863243103,
"learning_rate": 1.3871635610766046e-05,
"loss": 0.3147,
"step": 4190
},
{
"epoch": 1.4492753623188406,
"grad_norm": 0.4273381531238556,
"learning_rate": 1.378536922015183e-05,
"loss": 0.3195,
"step": 4200
},
{
"epoch": 1.4527260179434092,
"grad_norm": 0.41128963232040405,
"learning_rate": 1.3699102829537613e-05,
"loss": 0.3301,
"step": 4210
},
{
"epoch": 1.456176673567978,
"grad_norm": 0.39311736822128296,
"learning_rate": 1.3612836438923396e-05,
"loss": 0.3242,
"step": 4220
},
{
"epoch": 1.4596273291925466,
"grad_norm": 0.4288855493068695,
"learning_rate": 1.3526570048309179e-05,
"loss": 0.3328,
"step": 4230
},
{
"epoch": 1.4630779848171152,
"grad_norm": 0.38766637444496155,
"learning_rate": 1.3440303657694963e-05,
"loss": 0.3178,
"step": 4240
},
{
"epoch": 1.466528640441684,
"grad_norm": 0.48711350560188293,
"learning_rate": 1.3354037267080746e-05,
"loss": 0.3231,
"step": 4250
},
{
"epoch": 1.4699792960662525,
"grad_norm": 0.3988294303417206,
"learning_rate": 1.3267770876466529e-05,
"loss": 0.314,
"step": 4260
},
{
"epoch": 1.4734299516908211,
"grad_norm": 0.46785667538642883,
"learning_rate": 1.3181504485852312e-05,
"loss": 0.3071,
"step": 4270
},
{
"epoch": 1.47688060731539,
"grad_norm": 0.413526326417923,
"learning_rate": 1.3095238095238096e-05,
"loss": 0.3253,
"step": 4280
},
{
"epoch": 1.4803312629399585,
"grad_norm": 0.38375306129455566,
"learning_rate": 1.3008971704623879e-05,
"loss": 0.3172,
"step": 4290
},
{
"epoch": 1.4837819185645271,
"grad_norm": 0.4046788513660431,
"learning_rate": 1.2922705314009662e-05,
"loss": 0.3189,
"step": 4300
},
{
"epoch": 1.487232574189096,
"grad_norm": 0.4205913245677948,
"learning_rate": 1.2836438923395446e-05,
"loss": 0.3103,
"step": 4310
},
{
"epoch": 1.4906832298136645,
"grad_norm": 0.36907699704170227,
"learning_rate": 1.2750172532781229e-05,
"loss": 0.3134,
"step": 4320
},
{
"epoch": 1.4941338854382333,
"grad_norm": 0.48446428775787354,
"learning_rate": 1.2663906142167012e-05,
"loss": 0.3148,
"step": 4330
},
{
"epoch": 1.497584541062802,
"grad_norm": 0.4103284776210785,
"learning_rate": 1.2577639751552794e-05,
"loss": 0.3091,
"step": 4340
},
{
"epoch": 1.5010351966873707,
"grad_norm": 0.44808661937713623,
"learning_rate": 1.2491373360938579e-05,
"loss": 0.3182,
"step": 4350
},
{
"epoch": 1.5044858523119393,
"grad_norm": 0.3726388216018677,
"learning_rate": 1.2405106970324362e-05,
"loss": 0.3189,
"step": 4360
},
{
"epoch": 1.507936507936508,
"grad_norm": 0.41934582591056824,
"learning_rate": 1.2318840579710146e-05,
"loss": 0.3045,
"step": 4370
},
{
"epoch": 1.5113871635610767,
"grad_norm": 0.4093073904514313,
"learning_rate": 1.2232574189095929e-05,
"loss": 0.3122,
"step": 4380
},
{
"epoch": 1.5148378191856453,
"grad_norm": 0.4008806049823761,
"learning_rate": 1.2146307798481713e-05,
"loss": 0.3199,
"step": 4390
},
{
"epoch": 1.518288474810214,
"grad_norm": 0.4053995609283447,
"learning_rate": 1.2060041407867494e-05,
"loss": 0.3151,
"step": 4400
},
{
"epoch": 1.5217391304347827,
"grad_norm": 0.3917613923549652,
"learning_rate": 1.1973775017253279e-05,
"loss": 0.3179,
"step": 4410
},
{
"epoch": 1.5251897860593513,
"grad_norm": 0.40496399998664856,
"learning_rate": 1.1887508626639062e-05,
"loss": 0.3139,
"step": 4420
},
{
"epoch": 1.5286404416839199,
"grad_norm": 0.46660828590393066,
"learning_rate": 1.1801242236024846e-05,
"loss": 0.3246,
"step": 4430
},
{
"epoch": 1.5320910973084887,
"grad_norm": 0.4034900963306427,
"learning_rate": 1.1714975845410629e-05,
"loss": 0.3151,
"step": 4440
},
{
"epoch": 1.5355417529330573,
"grad_norm": 0.4394216537475586,
"learning_rate": 1.1628709454796412e-05,
"loss": 0.3139,
"step": 4450
},
{
"epoch": 1.5389924085576259,
"grad_norm": 0.390280157327652,
"learning_rate": 1.1542443064182196e-05,
"loss": 0.3066,
"step": 4460
},
{
"epoch": 1.5424430641821947,
"grad_norm": 0.4169969856739044,
"learning_rate": 1.1456176673567979e-05,
"loss": 0.306,
"step": 4470
},
{
"epoch": 1.5458937198067633,
"grad_norm": 0.41443514823913574,
"learning_rate": 1.1369910282953762e-05,
"loss": 0.3162,
"step": 4480
},
{
"epoch": 1.5493443754313319,
"grad_norm": 0.3956752121448517,
"learning_rate": 1.1283643892339544e-05,
"loss": 0.3119,
"step": 4490
},
{
"epoch": 1.5527950310559007,
"grad_norm": 0.5450183153152466,
"learning_rate": 1.1197377501725329e-05,
"loss": 0.3179,
"step": 4500
},
{
"epoch": 1.5562456866804693,
"grad_norm": 0.3975428640842438,
"learning_rate": 1.1111111111111112e-05,
"loss": 0.3236,
"step": 4510
},
{
"epoch": 1.5596963423050378,
"grad_norm": 0.40076789259910583,
"learning_rate": 1.1024844720496894e-05,
"loss": 0.3149,
"step": 4520
},
{
"epoch": 1.5631469979296067,
"grad_norm": 0.3667759597301483,
"learning_rate": 1.0938578329882679e-05,
"loss": 0.3225,
"step": 4530
},
{
"epoch": 1.5665976535541752,
"grad_norm": 0.38204970955848694,
"learning_rate": 1.0852311939268462e-05,
"loss": 0.3028,
"step": 4540
},
{
"epoch": 1.5700483091787438,
"grad_norm": 0.39774495363235474,
"learning_rate": 1.0766045548654244e-05,
"loss": 0.3172,
"step": 4550
},
{
"epoch": 1.5734989648033126,
"grad_norm": 0.4597463011741638,
"learning_rate": 1.0679779158040027e-05,
"loss": 0.3094,
"step": 4560
},
{
"epoch": 1.5769496204278814,
"grad_norm": 0.4128337800502777,
"learning_rate": 1.0593512767425812e-05,
"loss": 0.3104,
"step": 4570
},
{
"epoch": 1.5804002760524498,
"grad_norm": 0.3733654022216797,
"learning_rate": 1.0507246376811594e-05,
"loss": 0.3183,
"step": 4580
},
{
"epoch": 1.5838509316770186,
"grad_norm": 0.4163801670074463,
"learning_rate": 1.0420979986197379e-05,
"loss": 0.3103,
"step": 4590
},
{
"epoch": 1.5873015873015874,
"grad_norm": 0.39067670702934265,
"learning_rate": 1.0334713595583162e-05,
"loss": 0.3174,
"step": 4600
},
{
"epoch": 1.590752242926156,
"grad_norm": 0.4127908945083618,
"learning_rate": 1.0248447204968944e-05,
"loss": 0.3164,
"step": 4610
},
{
"epoch": 1.5942028985507246,
"grad_norm": 0.43183374404907227,
"learning_rate": 1.0162180814354729e-05,
"loss": 0.3158,
"step": 4620
},
{
"epoch": 1.5976535541752934,
"grad_norm": 0.419487863779068,
"learning_rate": 1.0075914423740512e-05,
"loss": 0.3131,
"step": 4630
},
{
"epoch": 1.601104209799862,
"grad_norm": 0.38240429759025574,
"learning_rate": 9.989648033126294e-06,
"loss": 0.3186,
"step": 4640
},
{
"epoch": 1.6045548654244306,
"grad_norm": 0.36253178119659424,
"learning_rate": 9.903381642512077e-06,
"loss": 0.3185,
"step": 4650
},
{
"epoch": 1.6080055210489994,
"grad_norm": 0.43411344289779663,
"learning_rate": 9.817115251897862e-06,
"loss": 0.3097,
"step": 4660
},
{
"epoch": 1.611456176673568,
"grad_norm": 0.4432518482208252,
"learning_rate": 9.730848861283644e-06,
"loss": 0.3216,
"step": 4670
},
{
"epoch": 1.6149068322981366,
"grad_norm": 0.4278890788555145,
"learning_rate": 9.644582470669427e-06,
"loss": 0.3057,
"step": 4680
},
{
"epoch": 1.6183574879227054,
"grad_norm": 0.4062660336494446,
"learning_rate": 9.558316080055212e-06,
"loss": 0.3157,
"step": 4690
},
{
"epoch": 1.621808143547274,
"grad_norm": 0.4200899302959442,
"learning_rate": 9.472049689440994e-06,
"loss": 0.3277,
"step": 4700
},
{
"epoch": 1.6252587991718426,
"grad_norm": 0.464057058095932,
"learning_rate": 9.385783298826777e-06,
"loss": 0.3145,
"step": 4710
},
{
"epoch": 1.6287094547964114,
"grad_norm": 0.47834956645965576,
"learning_rate": 9.29951690821256e-06,
"loss": 0.3194,
"step": 4720
},
{
"epoch": 1.63216011042098,
"grad_norm": 0.37987402081489563,
"learning_rate": 9.213250517598344e-06,
"loss": 0.311,
"step": 4730
},
{
"epoch": 1.6356107660455486,
"grad_norm": 0.3906196653842926,
"learning_rate": 9.126984126984127e-06,
"loss": 0.3143,
"step": 4740
},
{
"epoch": 1.6390614216701174,
"grad_norm": 0.4006254971027374,
"learning_rate": 9.040717736369912e-06,
"loss": 0.3211,
"step": 4750
},
{
"epoch": 1.642512077294686,
"grad_norm": 0.3769705593585968,
"learning_rate": 8.954451345755693e-06,
"loss": 0.3111,
"step": 4760
},
{
"epoch": 1.6459627329192545,
"grad_norm": 0.416151762008667,
"learning_rate": 8.868184955141477e-06,
"loss": 0.3132,
"step": 4770
},
{
"epoch": 1.6494133885438234,
"grad_norm": 0.3826710879802704,
"learning_rate": 8.781918564527262e-06,
"loss": 0.3072,
"step": 4780
},
{
"epoch": 1.652864044168392,
"grad_norm": 0.4245678782463074,
"learning_rate": 8.695652173913044e-06,
"loss": 0.3173,
"step": 4790
},
{
"epoch": 1.6563146997929605,
"grad_norm": 0.4384019374847412,
"learning_rate": 8.609385783298827e-06,
"loss": 0.3248,
"step": 4800
},
{
"epoch": 1.6597653554175293,
"grad_norm": 0.3771851360797882,
"learning_rate": 8.52311939268461e-06,
"loss": 0.3183,
"step": 4810
},
{
"epoch": 1.6632160110420982,
"grad_norm": 0.3873949944972992,
"learning_rate": 8.436853002070394e-06,
"loss": 0.3041,
"step": 4820
},
{
"epoch": 1.6666666666666665,
"grad_norm": 0.3559800684452057,
"learning_rate": 8.350586611456177e-06,
"loss": 0.3044,
"step": 4830
},
{
"epoch": 1.6701173222912353,
"grad_norm": 0.3813839852809906,
"learning_rate": 8.26432022084196e-06,
"loss": 0.3239,
"step": 4840
},
{
"epoch": 1.6735679779158041,
"grad_norm": 0.3791593015193939,
"learning_rate": 8.178053830227744e-06,
"loss": 0.3135,
"step": 4850
},
{
"epoch": 1.6770186335403725,
"grad_norm": 0.4418281018733978,
"learning_rate": 8.091787439613527e-06,
"loss": 0.3097,
"step": 4860
},
{
"epoch": 1.6804692891649413,
"grad_norm": 0.3764910101890564,
"learning_rate": 8.00552104899931e-06,
"loss": 0.3183,
"step": 4870
},
{
"epoch": 1.6839199447895101,
"grad_norm": 0.3997323513031006,
"learning_rate": 7.919254658385093e-06,
"loss": 0.3011,
"step": 4880
},
{
"epoch": 1.6873706004140787,
"grad_norm": 0.3994773030281067,
"learning_rate": 7.832988267770877e-06,
"loss": 0.306,
"step": 4890
},
{
"epoch": 1.6908212560386473,
"grad_norm": 0.4935210943222046,
"learning_rate": 7.74672187715666e-06,
"loss": 0.313,
"step": 4900
},
{
"epoch": 1.6942719116632161,
"grad_norm": 0.4665015935897827,
"learning_rate": 7.660455486542443e-06,
"loss": 0.3174,
"step": 4910
},
{
"epoch": 1.6977225672877847,
"grad_norm": 0.376712828874588,
"learning_rate": 7.574189095928226e-06,
"loss": 0.3196,
"step": 4920
},
{
"epoch": 1.7011732229123533,
"grad_norm": 0.49038994312286377,
"learning_rate": 7.48792270531401e-06,
"loss": 0.3106,
"step": 4930
},
{
"epoch": 1.704623878536922,
"grad_norm": 0.3488839566707611,
"learning_rate": 7.4016563146997936e-06,
"loss": 0.3168,
"step": 4940
},
{
"epoch": 1.7080745341614907,
"grad_norm": 0.523395836353302,
"learning_rate": 7.315389924085576e-06,
"loss": 0.3145,
"step": 4950
},
{
"epoch": 1.7115251897860593,
"grad_norm": 0.37245577573776245,
"learning_rate": 7.22912353347136e-06,
"loss": 0.3145,
"step": 4960
},
{
"epoch": 1.714975845410628,
"grad_norm": 0.37012121081352234,
"learning_rate": 7.142857142857143e-06,
"loss": 0.3063,
"step": 4970
},
{
"epoch": 1.7184265010351967,
"grad_norm": 0.42040547728538513,
"learning_rate": 7.056590752242926e-06,
"loss": 0.3175,
"step": 4980
},
{
"epoch": 1.7218771566597653,
"grad_norm": 0.40232813358306885,
"learning_rate": 6.970324361628709e-06,
"loss": 0.3244,
"step": 4990
},
{
"epoch": 1.725327812284334,
"grad_norm": 0.39817917346954346,
"learning_rate": 6.884057971014493e-06,
"loss": 0.3105,
"step": 5000
},
{
"epoch": 1.7287784679089027,
"grad_norm": 0.3830488622188568,
"learning_rate": 6.797791580400277e-06,
"loss": 0.3149,
"step": 5010
},
{
"epoch": 1.7322291235334712,
"grad_norm": 0.3827120363712311,
"learning_rate": 6.71152518978606e-06,
"loss": 0.325,
"step": 5020
},
{
"epoch": 1.73567977915804,
"grad_norm": 0.39774030447006226,
"learning_rate": 6.6252587991718436e-06,
"loss": 0.3075,
"step": 5030
},
{
"epoch": 1.7391304347826086,
"grad_norm": 0.3926418423652649,
"learning_rate": 6.538992408557626e-06,
"loss": 0.3181,
"step": 5040
},
{
"epoch": 1.7425810904071772,
"grad_norm": 0.44530630111694336,
"learning_rate": 6.45272601794341e-06,
"loss": 0.3221,
"step": 5050
},
{
"epoch": 1.746031746031746,
"grad_norm": 0.465223103761673,
"learning_rate": 6.366459627329193e-06,
"loss": 0.3081,
"step": 5060
},
{
"epoch": 1.7494824016563149,
"grad_norm": 0.37095168232917786,
"learning_rate": 6.280193236714976e-06,
"loss": 0.3143,
"step": 5070
},
{
"epoch": 1.7529330572808832,
"grad_norm": 0.38144221901893616,
"learning_rate": 6.193926846100759e-06,
"loss": 0.309,
"step": 5080
},
{
"epoch": 1.756383712905452,
"grad_norm": 0.3875439763069153,
"learning_rate": 6.107660455486543e-06,
"loss": 0.3164,
"step": 5090
},
{
"epoch": 1.7598343685300208,
"grad_norm": 0.3756479322910309,
"learning_rate": 6.021394064872326e-06,
"loss": 0.3034,
"step": 5100
},
{
"epoch": 1.7632850241545892,
"grad_norm": 0.3956107795238495,
"learning_rate": 5.935127674258109e-06,
"loss": 0.3159,
"step": 5110
},
{
"epoch": 1.766735679779158,
"grad_norm": 0.42773470282554626,
"learning_rate": 5.848861283643893e-06,
"loss": 0.3121,
"step": 5120
},
{
"epoch": 1.7701863354037268,
"grad_norm": 0.36983245611190796,
"learning_rate": 5.762594893029676e-06,
"loss": 0.3186,
"step": 5130
},
{
"epoch": 1.7736369910282954,
"grad_norm": 0.3716372549533844,
"learning_rate": 5.676328502415459e-06,
"loss": 0.3081,
"step": 5140
},
{
"epoch": 1.777087646652864,
"grad_norm": 0.44651734828948975,
"learning_rate": 5.590062111801243e-06,
"loss": 0.3059,
"step": 5150
},
{
"epoch": 1.7805383022774328,
"grad_norm": 0.4011802077293396,
"learning_rate": 5.5037957211870255e-06,
"loss": 0.3184,
"step": 5160
},
{
"epoch": 1.7839889579020014,
"grad_norm": 0.4085049331188202,
"learning_rate": 5.417529330572809e-06,
"loss": 0.3108,
"step": 5170
},
{
"epoch": 1.78743961352657,
"grad_norm": 0.42412784695625305,
"learning_rate": 5.331262939958592e-06,
"loss": 0.309,
"step": 5180
},
{
"epoch": 1.7908902691511388,
"grad_norm": 0.39434853196144104,
"learning_rate": 5.2449965493443755e-06,
"loss": 0.31,
"step": 5190
},
{
"epoch": 1.7943409247757074,
"grad_norm": 0.40047556161880493,
"learning_rate": 5.158730158730159e-06,
"loss": 0.3137,
"step": 5200
},
{
"epoch": 1.797791580400276,
"grad_norm": 0.37244585156440735,
"learning_rate": 5.072463768115943e-06,
"loss": 0.3142,
"step": 5210
},
{
"epoch": 1.8012422360248448,
"grad_norm": 0.4294069707393646,
"learning_rate": 4.9861973775017255e-06,
"loss": 0.3165,
"step": 5220
},
{
"epoch": 1.8046928916494134,
"grad_norm": 0.4544520974159241,
"learning_rate": 4.899930986887509e-06,
"loss": 0.3102,
"step": 5230
},
{
"epoch": 1.808143547273982,
"grad_norm": 0.4094906151294708,
"learning_rate": 4.813664596273292e-06,
"loss": 0.3139,
"step": 5240
},
{
"epoch": 1.8115942028985508,
"grad_norm": 0.39041367173194885,
"learning_rate": 4.7273982056590755e-06,
"loss": 0.3166,
"step": 5250
},
{
"epoch": 1.8150448585231194,
"grad_norm": 0.3962461054325104,
"learning_rate": 4.641131815044858e-06,
"loss": 0.3143,
"step": 5260
},
{
"epoch": 1.818495514147688,
"grad_norm": 0.377625972032547,
"learning_rate": 4.554865424430642e-06,
"loss": 0.3049,
"step": 5270
},
{
"epoch": 1.8219461697722568,
"grad_norm": 0.3768768012523651,
"learning_rate": 4.468599033816425e-06,
"loss": 0.3037,
"step": 5280
},
{
"epoch": 1.8253968253968254,
"grad_norm": 0.4099718928337097,
"learning_rate": 4.382332643202209e-06,
"loss": 0.3005,
"step": 5290
},
{
"epoch": 1.828847481021394,
"grad_norm": 0.4395196735858917,
"learning_rate": 4.296066252587992e-06,
"loss": 0.3204,
"step": 5300
},
{
"epoch": 1.8322981366459627,
"grad_norm": 0.42731696367263794,
"learning_rate": 4.2097998619737755e-06,
"loss": 0.3068,
"step": 5310
},
{
"epoch": 1.8357487922705316,
"grad_norm": 0.4241114854812622,
"learning_rate": 4.123533471359558e-06,
"loss": 0.3161,
"step": 5320
},
{
"epoch": 1.8391994478951,
"grad_norm": 0.42963647842407227,
"learning_rate": 4.037267080745342e-06,
"loss": 0.3165,
"step": 5330
},
{
"epoch": 1.8426501035196687,
"grad_norm": 0.3871797025203705,
"learning_rate": 3.951000690131125e-06,
"loss": 0.3134,
"step": 5340
},
{
"epoch": 1.8461007591442375,
"grad_norm": 0.42624005675315857,
"learning_rate": 3.864734299516908e-06,
"loss": 0.2984,
"step": 5350
},
{
"epoch": 1.849551414768806,
"grad_norm": 0.41070929169654846,
"learning_rate": 3.7784679089026914e-06,
"loss": 0.3188,
"step": 5360
},
{
"epoch": 1.8530020703933747,
"grad_norm": 0.3936339020729065,
"learning_rate": 3.6922015182884755e-06,
"loss": 0.3197,
"step": 5370
},
{
"epoch": 1.8564527260179435,
"grad_norm": 0.36642739176750183,
"learning_rate": 3.6059351276742587e-06,
"loss": 0.3144,
"step": 5380
},
{
"epoch": 1.8599033816425121,
"grad_norm": 0.3942832350730896,
"learning_rate": 3.519668737060042e-06,
"loss": 0.3074,
"step": 5390
},
{
"epoch": 1.8633540372670807,
"grad_norm": 0.40915966033935547,
"learning_rate": 3.433402346445825e-06,
"loss": 0.3113,
"step": 5400
},
{
"epoch": 1.8668046928916495,
"grad_norm": 0.4114089608192444,
"learning_rate": 3.3471359558316083e-06,
"loss": 0.3126,
"step": 5410
},
{
"epoch": 1.870255348516218,
"grad_norm": 0.39819249510765076,
"learning_rate": 3.2608695652173914e-06,
"loss": 0.3096,
"step": 5420
},
{
"epoch": 1.8737060041407867,
"grad_norm": 0.36793217062950134,
"learning_rate": 3.1746031746031746e-06,
"loss": 0.3044,
"step": 5430
},
{
"epoch": 1.8771566597653555,
"grad_norm": 0.37002408504486084,
"learning_rate": 3.0883367839889582e-06,
"loss": 0.3048,
"step": 5440
},
{
"epoch": 1.880607315389924,
"grad_norm": 0.3860247731208801,
"learning_rate": 3.0020703933747414e-06,
"loss": 0.322,
"step": 5450
},
{
"epoch": 1.8840579710144927,
"grad_norm": 0.3954015374183655,
"learning_rate": 2.9158040027605246e-06,
"loss": 0.3123,
"step": 5460
},
{
"epoch": 1.8875086266390615,
"grad_norm": 0.38128572702407837,
"learning_rate": 2.829537612146308e-06,
"loss": 0.3137,
"step": 5470
},
{
"epoch": 1.89095928226363,
"grad_norm": 0.3847271502017975,
"learning_rate": 2.7432712215320914e-06,
"loss": 0.3088,
"step": 5480
},
{
"epoch": 1.8944099378881987,
"grad_norm": 0.3671834170818329,
"learning_rate": 2.6570048309178746e-06,
"loss": 0.3183,
"step": 5490
},
{
"epoch": 1.8978605935127675,
"grad_norm": 0.37888962030410767,
"learning_rate": 2.570738440303658e-06,
"loss": 0.3082,
"step": 5500
},
{
"epoch": 1.901311249137336,
"grad_norm": 0.44054657220840454,
"learning_rate": 2.484472049689441e-06,
"loss": 0.3183,
"step": 5510
},
{
"epoch": 1.9047619047619047,
"grad_norm": 0.3805292844772339,
"learning_rate": 2.3982056590752246e-06,
"loss": 0.3132,
"step": 5520
},
{
"epoch": 1.9082125603864735,
"grad_norm": 0.435813844203949,
"learning_rate": 2.311939268461008e-06,
"loss": 0.3112,
"step": 5530
},
{
"epoch": 1.911663216011042,
"grad_norm": 0.43616998195648193,
"learning_rate": 2.225672877846791e-06,
"loss": 0.3012,
"step": 5540
},
{
"epoch": 1.9151138716356106,
"grad_norm": 0.3860926032066345,
"learning_rate": 2.139406487232574e-06,
"loss": 0.3107,
"step": 5550
},
{
"epoch": 1.9185645272601795,
"grad_norm": 0.362621009349823,
"learning_rate": 2.053140096618358e-06,
"loss": 0.318,
"step": 5560
},
{
"epoch": 1.9220151828847483,
"grad_norm": 0.36867082118988037,
"learning_rate": 1.966873706004141e-06,
"loss": 0.3058,
"step": 5570
},
{
"epoch": 1.9254658385093166,
"grad_norm": 0.411702036857605,
"learning_rate": 1.880607315389924e-06,
"loss": 0.3176,
"step": 5580
},
{
"epoch": 1.9289164941338854,
"grad_norm": 0.38144487142562866,
"learning_rate": 1.7943409247757074e-06,
"loss": 0.3185,
"step": 5590
},
{
"epoch": 1.9323671497584543,
"grad_norm": 0.4448649287223816,
"learning_rate": 1.7080745341614908e-06,
"loss": 0.31,
"step": 5600
},
{
"epoch": 1.9358178053830226,
"grad_norm": 0.4115146994590759,
"learning_rate": 1.621808143547274e-06,
"loss": 0.306,
"step": 5610
},
{
"epoch": 1.9392684610075914,
"grad_norm": 0.42539817094802856,
"learning_rate": 1.5355417529330572e-06,
"loss": 0.3113,
"step": 5620
},
{
"epoch": 1.9427191166321602,
"grad_norm": 0.4134668707847595,
"learning_rate": 1.4492753623188406e-06,
"loss": 0.3105,
"step": 5630
},
{
"epoch": 1.9461697722567288,
"grad_norm": 0.3997168242931366,
"learning_rate": 1.3630089717046238e-06,
"loss": 0.3131,
"step": 5640
},
{
"epoch": 1.9496204278812974,
"grad_norm": 0.38052627444267273,
"learning_rate": 1.2767425810904072e-06,
"loss": 0.3221,
"step": 5650
},
{
"epoch": 1.9530710835058662,
"grad_norm": 0.38866785168647766,
"learning_rate": 1.1904761904761904e-06,
"loss": 0.3075,
"step": 5660
},
{
"epoch": 1.9565217391304348,
"grad_norm": 0.43774518370628357,
"learning_rate": 1.1042097998619738e-06,
"loss": 0.313,
"step": 5670
},
{
"epoch": 1.9599723947550034,
"grad_norm": 0.3998454511165619,
"learning_rate": 1.017943409247757e-06,
"loss": 0.3201,
"step": 5680
},
{
"epoch": 1.9634230503795722,
"grad_norm": 0.4336646795272827,
"learning_rate": 9.316770186335405e-07,
"loss": 0.3028,
"step": 5690
},
{
"epoch": 1.9668737060041408,
"grad_norm": 0.44575753808021545,
"learning_rate": 8.454106280193237e-07,
"loss": 0.2932,
"step": 5700
},
{
"epoch": 1.9703243616287094,
"grad_norm": 0.3937937319278717,
"learning_rate": 7.591442374051071e-07,
"loss": 0.3154,
"step": 5710
},
{
"epoch": 1.9737750172532782,
"grad_norm": 0.36204755306243896,
"learning_rate": 6.728778467908903e-07,
"loss": 0.3036,
"step": 5720
},
{
"epoch": 1.9772256728778468,
"grad_norm": 0.346011757850647,
"learning_rate": 5.866114561766736e-07,
"loss": 0.3113,
"step": 5730
},
{
"epoch": 1.9806763285024154,
"grad_norm": 0.4556833803653717,
"learning_rate": 5.003450655624569e-07,
"loss": 0.3087,
"step": 5740
},
{
"epoch": 1.9841269841269842,
"grad_norm": 0.35639697313308716,
"learning_rate": 4.140786749482402e-07,
"loss": 0.3016,
"step": 5750
},
{
"epoch": 1.9875776397515528,
"grad_norm": 0.38764461874961853,
"learning_rate": 3.2781228433402347e-07,
"loss": 0.3147,
"step": 5760
},
{
"epoch": 1.9910282953761214,
"grad_norm": 0.45488855242729187,
"learning_rate": 2.4154589371980677e-07,
"loss": 0.3146,
"step": 5770
},
{
"epoch": 1.9944789510006902,
"grad_norm": 0.38507527112960815,
"learning_rate": 1.5527950310559006e-07,
"loss": 0.3222,
"step": 5780
},
{
"epoch": 1.9979296066252588,
"grad_norm": 0.3804410994052887,
"learning_rate": 6.901311249137336e-08,
"loss": 0.312,
"step": 5790
}
],
"logging_steps": 10,
"max_steps": 5796,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 3.444959504309944e+17,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}