open_door_real / trainer_state.json
theconstruct-ai's picture
Upload folder using huggingface_hub
5ce18c7 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.4,
"eval_steps": 500,
"global_step": 2000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"grad_norm": 0.5184409618377686,
"learning_rate": 3.6e-06,
"loss": 1.0866,
"step": 10
},
{
"grad_norm": 0.13902701437473297,
"learning_rate": 7.6e-06,
"loss": 1.0761,
"step": 20
},
{
"grad_norm": 0.0735388919711113,
"learning_rate": 1.16e-05,
"loss": 1.0654,
"step": 30
},
{
"grad_norm": 0.0823279619216919,
"learning_rate": 1.56e-05,
"loss": 1.0577,
"step": 40
},
{
"grad_norm": 0.0818614587187767,
"learning_rate": 1.9600000000000002e-05,
"loss": 1.0471,
"step": 50
},
{
"grad_norm": 0.07863084971904755,
"learning_rate": 2.36e-05,
"loss": 1.0344,
"step": 60
},
{
"grad_norm": 0.08459152281284332,
"learning_rate": 2.7600000000000003e-05,
"loss": 1.0274,
"step": 70
},
{
"grad_norm": 0.09263663738965988,
"learning_rate": 3.16e-05,
"loss": 1.0254,
"step": 80
},
{
"grad_norm": 0.11630931496620178,
"learning_rate": 3.56e-05,
"loss": 1.0188,
"step": 90
},
{
"grad_norm": 0.2673693001270294,
"learning_rate": 3.960000000000001e-05,
"loss": 0.9947,
"step": 100
},
{
"grad_norm": 0.8061618208885193,
"learning_rate": 4.36e-05,
"loss": 0.933,
"step": 110
},
{
"grad_norm": 0.6079975962638855,
"learning_rate": 4.76e-05,
"loss": 0.843,
"step": 120
},
{
"grad_norm": 0.5954048037528992,
"learning_rate": 5.16e-05,
"loss": 0.7637,
"step": 130
},
{
"grad_norm": 0.856939971446991,
"learning_rate": 5.560000000000001e-05,
"loss": 0.6892,
"step": 140
},
{
"grad_norm": 0.987891435623169,
"learning_rate": 5.96e-05,
"loss": 0.624,
"step": 150
},
{
"grad_norm": 0.7554893493652344,
"learning_rate": 6.36e-05,
"loss": 0.5657,
"step": 160
},
{
"grad_norm": 0.9280672669410706,
"learning_rate": 6.76e-05,
"loss": 0.5029,
"step": 170
},
{
"grad_norm": 1.0552935600280762,
"learning_rate": 7.16e-05,
"loss": 0.4461,
"step": 180
},
{
"grad_norm": 0.9269078373908997,
"learning_rate": 7.560000000000001e-05,
"loss": 0.3999,
"step": 190
},
{
"grad_norm": 0.7659823894500732,
"learning_rate": 7.960000000000001e-05,
"loss": 0.3686,
"step": 200
},
{
"grad_norm": 0.8024225831031799,
"learning_rate": 8.36e-05,
"loss": 0.3347,
"step": 210
},
{
"grad_norm": 0.9907840490341187,
"learning_rate": 8.76e-05,
"loss": 0.3038,
"step": 220
},
{
"grad_norm": 1.0437959432601929,
"learning_rate": 9.16e-05,
"loss": 0.2782,
"step": 230
},
{
"grad_norm": 0.7399183511734009,
"learning_rate": 9.56e-05,
"loss": 0.2469,
"step": 240
},
{
"grad_norm": 1.2706823348999023,
"learning_rate": 9.960000000000001e-05,
"loss": 0.2156,
"step": 250
},
{
"grad_norm": 1.0207575559616089,
"learning_rate": 9.999911419878559e-05,
"loss": 0.2059,
"step": 260
},
{
"grad_norm": 0.8645009398460388,
"learning_rate": 9.999605221019081e-05,
"loss": 0.1927,
"step": 270
},
{
"grad_norm": 1.3531432151794434,
"learning_rate": 9.999080323230761e-05,
"loss": 0.1758,
"step": 280
},
{
"grad_norm": 0.8455585837364197,
"learning_rate": 9.998336749474329e-05,
"loss": 0.1719,
"step": 290
},
{
"grad_norm": 1.9373207092285156,
"learning_rate": 9.997374532276107e-05,
"loss": 0.1467,
"step": 300
},
{
"grad_norm": 0.7995489239692688,
"learning_rate": 9.996193713726596e-05,
"loss": 0.1327,
"step": 310
},
{
"grad_norm": 0.8662717938423157,
"learning_rate": 9.994794345478624e-05,
"loss": 0.1254,
"step": 320
},
{
"grad_norm": 0.9200862646102905,
"learning_rate": 9.99317648874509e-05,
"loss": 0.1229,
"step": 330
},
{
"grad_norm": 0.8963688611984253,
"learning_rate": 9.991340214296292e-05,
"loss": 0.1151,
"step": 340
},
{
"grad_norm": 1.0278245210647583,
"learning_rate": 9.989285602456819e-05,
"loss": 0.1043,
"step": 350
},
{
"grad_norm": 0.8198051452636719,
"learning_rate": 9.98701274310205e-05,
"loss": 0.092,
"step": 360
},
{
"grad_norm": 0.9469745755195618,
"learning_rate": 9.984521735654218e-05,
"loss": 0.0756,
"step": 370
},
{
"grad_norm": 0.8919957280158997,
"learning_rate": 9.981812689078057e-05,
"loss": 0.0762,
"step": 380
},
{
"grad_norm": 0.8211736083030701,
"learning_rate": 9.978885721876041e-05,
"loss": 0.0691,
"step": 390
},
{
"grad_norm": 0.8344348073005676,
"learning_rate": 9.975740962083198e-05,
"loss": 0.0675,
"step": 400
},
{
"grad_norm": 0.8462538719177246,
"learning_rate": 9.972378547261504e-05,
"loss": 0.0643,
"step": 410
},
{
"grad_norm": 0.7656953930854797,
"learning_rate": 9.968798624493885e-05,
"loss": 0.0612,
"step": 420
},
{
"grad_norm": 0.7218711972236633,
"learning_rate": 9.965001350377753e-05,
"loss": 0.0644,
"step": 430
},
{
"grad_norm": 1.0755572319030762,
"learning_rate": 9.960986891018183e-05,
"loss": 0.0616,
"step": 440
},
{
"grad_norm": 0.7578702569007874,
"learning_rate": 9.95675542202063e-05,
"loss": 0.0564,
"step": 450
},
{
"grad_norm": 0.7378067374229431,
"learning_rate": 9.952307128483256e-05,
"loss": 0.0524,
"step": 460
},
{
"grad_norm": 0.7673323750495911,
"learning_rate": 9.947642204988835e-05,
"loss": 0.0534,
"step": 470
},
{
"grad_norm": 0.8811172842979431,
"learning_rate": 9.942760855596226e-05,
"loss": 0.056,
"step": 480
},
{
"grad_norm": 0.717313826084137,
"learning_rate": 9.937663293831471e-05,
"loss": 0.0563,
"step": 490
},
{
"grad_norm": 0.8490850925445557,
"learning_rate": 9.932349742678433e-05,
"loss": 0.0586,
"step": 500
},
{
"grad_norm": 0.7520468235015869,
"learning_rate": 9.926820434569051e-05,
"loss": 0.0532,
"step": 510
},
{
"grad_norm": 0.7413885593414307,
"learning_rate": 9.921075611373179e-05,
"loss": 0.056,
"step": 520
},
{
"grad_norm": 0.7074100375175476,
"learning_rate": 9.915115524387988e-05,
"loss": 0.0517,
"step": 530
},
{
"grad_norm": 0.7114731073379517,
"learning_rate": 9.908940434326997e-05,
"loss": 0.048,
"step": 540
},
{
"grad_norm": 0.8586488962173462,
"learning_rate": 9.902550611308645e-05,
"loss": 0.0509,
"step": 550
},
{
"grad_norm": 0.7280430197715759,
"learning_rate": 9.895946334844494e-05,
"loss": 0.0485,
"step": 560
},
{
"grad_norm": 0.7949817180633545,
"learning_rate": 9.889127893826989e-05,
"loss": 0.05,
"step": 570
},
{
"grad_norm": 0.6108531355857849,
"learning_rate": 9.882095586516831e-05,
"loss": 0.0499,
"step": 580
},
{
"grad_norm": 0.6878312826156616,
"learning_rate": 9.874849720529921e-05,
"loss": 0.0501,
"step": 590
},
{
"grad_norm": 0.7149339914321899,
"learning_rate": 9.867390612823914e-05,
"loss": 0.0505,
"step": 600
},
{
"grad_norm": 0.8408756256103516,
"learning_rate": 9.859718589684344e-05,
"loss": 0.0448,
"step": 610
},
{
"grad_norm": 0.8319779634475708,
"learning_rate": 9.851833986710353e-05,
"loss": 0.0477,
"step": 620
},
{
"grad_norm": 0.7106873393058777,
"learning_rate": 9.843737148800023e-05,
"loss": 0.0458,
"step": 630
},
{
"grad_norm": 0.7426978349685669,
"learning_rate": 9.835428430135271e-05,
"loss": 0.0494,
"step": 640
},
{
"grad_norm": 0.8612025380134583,
"learning_rate": 9.82690819416637e-05,
"loss": 0.0469,
"step": 650
},
{
"grad_norm": 0.7378700971603394,
"learning_rate": 9.818176813596041e-05,
"loss": 0.0483,
"step": 660
},
{
"grad_norm": 0.6754602789878845,
"learning_rate": 9.809234670363159e-05,
"loss": 0.0417,
"step": 670
},
{
"grad_norm": 0.6960591673851013,
"learning_rate": 9.800082155626034e-05,
"loss": 0.0483,
"step": 680
},
{
"grad_norm": 0.7274064421653748,
"learning_rate": 9.790719669745312e-05,
"loss": 0.042,
"step": 690
},
{
"grad_norm": 0.6894716620445251,
"learning_rate": 9.781147622266455e-05,
"loss": 0.0389,
"step": 700
},
{
"grad_norm": 0.776299238204956,
"learning_rate": 9.771366431901831e-05,
"loss": 0.0415,
"step": 710
},
{
"grad_norm": 0.6639792919158936,
"learning_rate": 9.761376526512394e-05,
"loss": 0.0463,
"step": 720
},
{
"grad_norm": 0.6171531677246094,
"learning_rate": 9.751178343088963e-05,
"loss": 0.0413,
"step": 730
},
{
"grad_norm": 0.715189516544342,
"learning_rate": 9.740772327733123e-05,
"loss": 0.0393,
"step": 740
},
{
"grad_norm": 0.6551257371902466,
"learning_rate": 9.730158935637697e-05,
"loss": 0.0417,
"step": 750
},
{
"grad_norm": 0.6576980948448181,
"learning_rate": 9.719338631066834e-05,
"loss": 0.0423,
"step": 760
},
{
"grad_norm": 0.6665154099464417,
"learning_rate": 9.708311887335713e-05,
"loss": 0.0414,
"step": 770
},
{
"grad_norm": 0.753499448299408,
"learning_rate": 9.697079186789823e-05,
"loss": 0.0412,
"step": 780
},
{
"grad_norm": 0.6803208589553833,
"learning_rate": 9.685641020783876e-05,
"loss": 0.042,
"step": 790
},
{
"grad_norm": 0.6924328804016113,
"learning_rate": 9.67399788966031e-05,
"loss": 0.0415,
"step": 800
},
{
"grad_norm": 0.7184621691703796,
"learning_rate": 9.662150302727395e-05,
"loss": 0.0377,
"step": 810
},
{
"grad_norm": 0.6149929761886597,
"learning_rate": 9.650098778236968e-05,
"loss": 0.0405,
"step": 820
},
{
"grad_norm": 0.6302839517593384,
"learning_rate": 9.637843843361749e-05,
"loss": 0.0371,
"step": 830
},
{
"grad_norm": 0.5969130396842957,
"learning_rate": 9.62538603417229e-05,
"loss": 0.0404,
"step": 840
},
{
"grad_norm": 0.56196528673172,
"learning_rate": 9.612725895613526e-05,
"loss": 0.04,
"step": 850
},
{
"grad_norm": 0.7191771864891052,
"learning_rate": 9.599863981480926e-05,
"loss": 0.039,
"step": 860
},
{
"grad_norm": 0.5886449813842773,
"learning_rate": 9.586800854396283e-05,
"loss": 0.0369,
"step": 870
},
{
"grad_norm": 0.5173853039741516,
"learning_rate": 9.573537085783095e-05,
"loss": 0.0358,
"step": 880
},
{
"grad_norm": 0.5850680470466614,
"learning_rate": 9.560073255841571e-05,
"loss": 0.0427,
"step": 890
},
{
"grad_norm": 0.758872926235199,
"learning_rate": 9.546409953523247e-05,
"loss": 0.04,
"step": 900
},
{
"grad_norm": 0.631585955619812,
"learning_rate": 9.532547776505229e-05,
"loss": 0.0385,
"step": 910
},
{
"grad_norm": 0.42788979411125183,
"learning_rate": 9.518487331164048e-05,
"loss": 0.0359,
"step": 920
},
{
"grad_norm": 0.8113523125648499,
"learning_rate": 9.504229232549134e-05,
"loss": 0.036,
"step": 930
},
{
"grad_norm": 0.5256606340408325,
"learning_rate": 9.489774104355909e-05,
"loss": 0.0369,
"step": 940
},
{
"grad_norm": 0.680260956287384,
"learning_rate": 9.475122578898507e-05,
"loss": 0.0356,
"step": 950
},
{
"grad_norm": 0.6082437038421631,
"learning_rate": 9.460275297082119e-05,
"loss": 0.035,
"step": 960
},
{
"grad_norm": 0.6395261883735657,
"learning_rate": 9.445232908374948e-05,
"loss": 0.0398,
"step": 970
},
{
"grad_norm": 0.5504397749900818,
"learning_rate": 9.429996070779808e-05,
"loss": 0.0347,
"step": 980
},
{
"grad_norm": 0.6658892035484314,
"learning_rate": 9.414565450805333e-05,
"loss": 0.038,
"step": 990
},
{
"grad_norm": 0.6586282253265381,
"learning_rate": 9.398941723436831e-05,
"loss": 0.0348,
"step": 1000
},
{
"grad_norm": 0.563089907169342,
"learning_rate": 9.383125572106752e-05,
"loss": 0.0365,
"step": 1010
},
{
"grad_norm": 0.6453968286514282,
"learning_rate": 9.367117688664791e-05,
"loss": 0.0352,
"step": 1020
},
{
"grad_norm": 0.7110365629196167,
"learning_rate": 9.35091877334763e-05,
"loss": 0.0373,
"step": 1030
},
{
"grad_norm": 0.8032221794128418,
"learning_rate": 9.334529534748297e-05,
"loss": 0.0373,
"step": 1040
},
{
"grad_norm": 0.5992754697799683,
"learning_rate": 9.317950689785188e-05,
"loss": 0.0372,
"step": 1050
},
{
"grad_norm": 0.5257222652435303,
"learning_rate": 9.301182963670688e-05,
"loss": 0.0349,
"step": 1060
},
{
"grad_norm": 0.5869795083999634,
"learning_rate": 9.284227089879456e-05,
"loss": 0.0379,
"step": 1070
},
{
"grad_norm": 0.6808982491493225,
"learning_rate": 9.26708381011634e-05,
"loss": 0.0321,
"step": 1080
},
{
"grad_norm": 0.8073207139968872,
"learning_rate": 9.249753874283937e-05,
"loss": 0.0363,
"step": 1090
},
{
"grad_norm": 0.6191753149032593,
"learning_rate": 9.232238040449779e-05,
"loss": 0.0338,
"step": 1100
},
{
"grad_norm": 0.6077655553817749,
"learning_rate": 9.214537074813181e-05,
"loss": 0.034,
"step": 1110
},
{
"grad_norm": 0.5800149440765381,
"learning_rate": 9.196651751671724e-05,
"loss": 0.0342,
"step": 1120
},
{
"grad_norm": 0.5005490183830261,
"learning_rate": 9.178582853387384e-05,
"loss": 0.0396,
"step": 1130
},
{
"grad_norm": 0.5275071859359741,
"learning_rate": 9.160331170352304e-05,
"loss": 0.0364,
"step": 1140
},
{
"grad_norm": 0.5516709685325623,
"learning_rate": 9.141897500954229e-05,
"loss": 0.0326,
"step": 1150
},
{
"grad_norm": 0.6132859587669373,
"learning_rate": 9.123282651541576e-05,
"loss": 0.0345,
"step": 1160
},
{
"grad_norm": 0.6618711948394775,
"learning_rate": 9.104487436388161e-05,
"loss": 0.0364,
"step": 1170
},
{
"grad_norm": 0.48607364296913147,
"learning_rate": 9.085512677657582e-05,
"loss": 0.0347,
"step": 1180
},
{
"grad_norm": 0.6589488983154297,
"learning_rate": 9.066359205367258e-05,
"loss": 0.0343,
"step": 1190
},
{
"grad_norm": 0.5488317012786865,
"learning_rate": 9.047027857352112e-05,
"loss": 0.0344,
"step": 1200
},
{
"grad_norm": 0.4591907262802124,
"learning_rate": 9.027519479227935e-05,
"loss": 0.0314,
"step": 1210
},
{
"grad_norm": 0.6019390821456909,
"learning_rate": 9.007834924354383e-05,
"loss": 0.0338,
"step": 1220
},
{
"grad_norm": 0.5816202759742737,
"learning_rate": 8.987975053797655e-05,
"loss": 0.0351,
"step": 1230
},
{
"grad_norm": 0.5801373720169067,
"learning_rate": 8.967940736292825e-05,
"loss": 0.0345,
"step": 1240
},
{
"grad_norm": 0.6135644316673279,
"learning_rate": 8.947732848205846e-05,
"loss": 0.0341,
"step": 1250
},
{
"grad_norm": 0.5733956098556519,
"learning_rate": 8.927352273495204e-05,
"loss": 0.0347,
"step": 1260
},
{
"grad_norm": 0.5448894500732422,
"learning_rate": 8.906799903673265e-05,
"loss": 0.0314,
"step": 1270
},
{
"grad_norm": 0.474997878074646,
"learning_rate": 8.88607663776726e-05,
"loss": 0.0337,
"step": 1280
},
{
"grad_norm": 0.5049208998680115,
"learning_rate": 8.865183382279978e-05,
"loss": 0.0334,
"step": 1290
},
{
"grad_norm": 0.6067247986793518,
"learning_rate": 8.844121051150096e-05,
"loss": 0.0325,
"step": 1300
},
{
"grad_norm": 0.5232999920845032,
"learning_rate": 8.822890565712211e-05,
"loss": 0.0318,
"step": 1310
},
{
"grad_norm": 0.5644145607948303,
"learning_rate": 8.801492854656536e-05,
"loss": 0.0333,
"step": 1320
},
{
"grad_norm": 0.5336611866950989,
"learning_rate": 8.779928853988268e-05,
"loss": 0.0322,
"step": 1330
},
{
"grad_norm": 0.6173704862594604,
"learning_rate": 8.758199506986655e-05,
"loss": 0.0318,
"step": 1340
},
{
"grad_norm": 0.5638954639434814,
"learning_rate": 8.73630576416373e-05,
"loss": 0.0327,
"step": 1350
},
{
"grad_norm": 0.5919230580329895,
"learning_rate": 8.714248583222726e-05,
"loss": 0.0337,
"step": 1360
},
{
"grad_norm": 0.4524107575416565,
"learning_rate": 8.692028929016196e-05,
"loss": 0.0329,
"step": 1370
},
{
"grad_norm": 0.5694531798362732,
"learning_rate": 8.669647773503797e-05,
"loss": 0.0326,
"step": 1380
},
{
"grad_norm": 0.5108136534690857,
"learning_rate": 8.647106095709773e-05,
"loss": 0.031,
"step": 1390
},
{
"grad_norm": 0.6212553977966309,
"learning_rate": 8.624404881680139e-05,
"loss": 0.0324,
"step": 1400
},
{
"grad_norm": 0.4589623808860779,
"learning_rate": 8.601545124439535e-05,
"loss": 0.0281,
"step": 1410
},
{
"grad_norm": 0.4673466682434082,
"learning_rate": 8.5785278239478e-05,
"loss": 0.0328,
"step": 1420
},
{
"grad_norm": 0.6234567165374756,
"learning_rate": 8.555353987056224e-05,
"loss": 0.0301,
"step": 1430
},
{
"grad_norm": 0.7200560569763184,
"learning_rate": 8.532024627463505e-05,
"loss": 0.0302,
"step": 1440
},
{
"grad_norm": 0.6383592486381531,
"learning_rate": 8.508540765671407e-05,
"loss": 0.0291,
"step": 1450
},
{
"grad_norm": 0.43400031328201294,
"learning_rate": 8.484903428940121e-05,
"loss": 0.0294,
"step": 1460
},
{
"grad_norm": 0.4296083152294159,
"learning_rate": 8.461113651243334e-05,
"loss": 0.0329,
"step": 1470
},
{
"grad_norm": 0.5975046157836914,
"learning_rate": 8.437172473222987e-05,
"loss": 0.0344,
"step": 1480
},
{
"grad_norm": 0.5191563367843628,
"learning_rate": 8.413080942143767e-05,
"loss": 0.0294,
"step": 1490
},
{
"grad_norm": 0.4823097884654999,
"learning_rate": 8.388840111847288e-05,
"loss": 0.0304,
"step": 1500
},
{
"grad_norm": 0.6224477887153625,
"learning_rate": 8.364451042705998e-05,
"loss": 0.0324,
"step": 1510
},
{
"grad_norm": 0.520642876625061,
"learning_rate": 8.33991480157679e-05,
"loss": 0.0282,
"step": 1520
},
{
"grad_norm": 0.5692772269248962,
"learning_rate": 8.315232461754338e-05,
"loss": 0.0296,
"step": 1530
},
{
"grad_norm": 0.41501984000205994,
"learning_rate": 8.290405102924144e-05,
"loss": 0.031,
"step": 1540
},
{
"grad_norm": 0.5292897820472717,
"learning_rate": 8.265433811115316e-05,
"loss": 0.0312,
"step": 1550
},
{
"grad_norm": 0.4766462445259094,
"learning_rate": 8.240319678653049e-05,
"loss": 0.0314,
"step": 1560
},
{
"grad_norm": 0.625045895576477,
"learning_rate": 8.215063804110857e-05,
"loss": 0.0327,
"step": 1570
},
{
"grad_norm": 0.4520655572414398,
"learning_rate": 8.189667292262512e-05,
"loss": 0.0297,
"step": 1580
},
{
"grad_norm": 0.4334840178489685,
"learning_rate": 8.164131254033716e-05,
"loss": 0.0302,
"step": 1590
},
{
"grad_norm": 0.5579179525375366,
"learning_rate": 8.138456806453503e-05,
"loss": 0.0304,
"step": 1600
},
{
"grad_norm": 0.5523282289505005,
"learning_rate": 8.112645072605386e-05,
"loss": 0.0291,
"step": 1610
},
{
"grad_norm": 0.47829750180244446,
"learning_rate": 8.086697181578222e-05,
"loss": 0.0332,
"step": 1620
},
{
"grad_norm": 0.5201600790023804,
"learning_rate": 8.060614268416823e-05,
"loss": 0.0305,
"step": 1630
},
{
"grad_norm": 0.5666531920433044,
"learning_rate": 8.034397474072309e-05,
"loss": 0.0288,
"step": 1640
},
{
"grad_norm": 0.5095893144607544,
"learning_rate": 8.008047945352193e-05,
"loss": 0.03,
"step": 1650
},
{
"grad_norm": 0.4521785080432892,
"learning_rate": 7.981566834870225e-05,
"loss": 0.0315,
"step": 1660
},
{
"grad_norm": 0.4474700093269348,
"learning_rate": 7.954955300995961e-05,
"loss": 0.0289,
"step": 1670
},
{
"grad_norm": 0.5986289978027344,
"learning_rate": 7.928214507804104e-05,
"loss": 0.0293,
"step": 1680
},
{
"grad_norm": 0.5038372278213501,
"learning_rate": 7.901345625023576e-05,
"loss": 0.028,
"step": 1690
},
{
"grad_norm": 0.4206600785255432,
"learning_rate": 7.874349827986354e-05,
"loss": 0.0305,
"step": 1700
},
{
"grad_norm": 0.4808930456638336,
"learning_rate": 7.847228297576053e-05,
"loss": 0.0311,
"step": 1710
},
{
"grad_norm": 0.4412468671798706,
"learning_rate": 7.819982220176276e-05,
"loss": 0.0284,
"step": 1720
},
{
"grad_norm": 0.47601237893104553,
"learning_rate": 7.792612787618714e-05,
"loss": 0.0276,
"step": 1730
},
{
"grad_norm": 0.475136399269104,
"learning_rate": 7.765121197131009e-05,
"loss": 0.0312,
"step": 1740
},
{
"grad_norm": 0.4572272300720215,
"learning_rate": 7.737508651284391e-05,
"loss": 0.0274,
"step": 1750
},
{
"grad_norm": 0.4734186828136444,
"learning_rate": 7.709776357941069e-05,
"loss": 0.0301,
"step": 1760
},
{
"grad_norm": 0.5564247965812683,
"learning_rate": 7.681925530201392e-05,
"loss": 0.0289,
"step": 1770
},
{
"grad_norm": 0.513393759727478,
"learning_rate": 7.65395738635079e-05,
"loss": 0.0292,
"step": 1780
},
{
"grad_norm": 0.4534357488155365,
"learning_rate": 7.62587314980648e-05,
"loss": 0.0296,
"step": 1790
},
{
"grad_norm": 0.4454066753387451,
"learning_rate": 7.597674049063947e-05,
"loss": 0.0274,
"step": 1800
},
{
"grad_norm": 0.46227017045021057,
"learning_rate": 7.569361317643211e-05,
"loss": 0.0273,
"step": 1810
},
{
"grad_norm": 0.38878199458122253,
"learning_rate": 7.540936194034865e-05,
"loss": 0.0287,
"step": 1820
},
{
"grad_norm": 0.5070212483406067,
"learning_rate": 7.512399921645901e-05,
"loss": 0.0261,
"step": 1830
},
{
"grad_norm": 0.41168951988220215,
"learning_rate": 7.483753748745317e-05,
"loss": 0.0265,
"step": 1840
},
{
"grad_norm": 0.43798619508743286,
"learning_rate": 7.454998928409516e-05,
"loss": 0.028,
"step": 1850
},
{
"grad_norm": 0.5544193387031555,
"learning_rate": 7.426136718467493e-05,
"loss": 0.0264,
"step": 1860
},
{
"grad_norm": 0.6372482180595398,
"learning_rate": 7.397168381445812e-05,
"loss": 0.0295,
"step": 1870
},
{
"grad_norm": 0.41932424902915955,
"learning_rate": 7.368095184513377e-05,
"loss": 0.0298,
"step": 1880
},
{
"grad_norm": 0.4176802933216095,
"learning_rate": 7.338918399426005e-05,
"loss": 0.0259,
"step": 1890
},
{
"grad_norm": 0.47394803166389465,
"learning_rate": 7.309639302470801e-05,
"loss": 0.0272,
"step": 1900
},
{
"grad_norm": 0.5122660398483276,
"learning_rate": 7.280259174410312e-05,
"loss": 0.031,
"step": 1910
},
{
"grad_norm": 0.5513008236885071,
"learning_rate": 7.250779300426517e-05,
"loss": 0.0261,
"step": 1920
},
{
"grad_norm": 0.39191436767578125,
"learning_rate": 7.22120097006461e-05,
"loss": 0.0287,
"step": 1930
},
{
"grad_norm": 0.48576945066452026,
"learning_rate": 7.191525477176577e-05,
"loss": 0.0262,
"step": 1940
},
{
"grad_norm": 0.47672203183174133,
"learning_rate": 7.161754119864616e-05,
"loss": 0.0308,
"step": 1950
},
{
"grad_norm": 0.5489500164985657,
"learning_rate": 7.131888200424339e-05,
"loss": 0.0304,
"step": 1960
},
{
"grad_norm": 0.5475249290466309,
"learning_rate": 7.101929025287816e-05,
"loss": 0.0284,
"step": 1970
},
{
"grad_norm": 0.3720989227294922,
"learning_rate": 7.071877904966423e-05,
"loss": 0.0268,
"step": 1980
},
{
"grad_norm": 0.45520398020744324,
"learning_rate": 7.04173615399351e-05,
"loss": 0.0245,
"step": 1990
},
{
"grad_norm": 0.4413450360298157,
"learning_rate": 7.011505090866913e-05,
"loss": 0.0267,
"step": 2000
}
],
"logging_steps": 10,
"max_steps": 5000,
"num_input_tokens_seen": 0,
"num_train_epochs": 9223372036854775807,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 0.0,
"train_batch_size": 96,
"trial_name": null,
"trial_params": null
}