got_QA_fine_tuned_model / trainer_state.json
hash-map's picture
Upload 8 files
e5ed5b6 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 4824,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0020730759264058047,
"grad_norm": 0.9091788530349731,
"learning_rate": 0.0001996268656716418,
"loss": 2.3429805755615236,
"step": 10
},
{
"epoch": 0.004146151852811609,
"grad_norm": 1.014137625694275,
"learning_rate": 0.000199212271973466,
"loss": 2.083993911743164,
"step": 20
},
{
"epoch": 0.006219227779217414,
"grad_norm": 0.9813042283058167,
"learning_rate": 0.00019879767827529022,
"loss": 2.0440767288208006,
"step": 30
},
{
"epoch": 0.008292303705623219,
"grad_norm": 0.8600534200668335,
"learning_rate": 0.00019838308457711444,
"loss": 2.0040666580200197,
"step": 40
},
{
"epoch": 0.010365379632029024,
"grad_norm": 0.9713129997253418,
"learning_rate": 0.00019796849087893865,
"loss": 1.8833141326904297,
"step": 50
},
{
"epoch": 0.012438455558434827,
"grad_norm": 0.8239923119544983,
"learning_rate": 0.00019755389718076287,
"loss": 1.9487165451049804,
"step": 60
},
{
"epoch": 0.014511531484840632,
"grad_norm": 0.970037579536438,
"learning_rate": 0.00019713930348258708,
"loss": 1.8362905502319335,
"step": 70
},
{
"epoch": 0.016584607411246437,
"grad_norm": 0.8923717141151428,
"learning_rate": 0.0001967247097844113,
"loss": 1.8545101165771485,
"step": 80
},
{
"epoch": 0.01865768333765224,
"grad_norm": 1.0183212757110596,
"learning_rate": 0.0001963101160862355,
"loss": 1.8110658645629882,
"step": 90
},
{
"epoch": 0.020730759264058048,
"grad_norm": 0.8115273118019104,
"learning_rate": 0.0001958955223880597,
"loss": 1.8285026550292969,
"step": 100
},
{
"epoch": 0.02280383519046385,
"grad_norm": 0.9811990857124329,
"learning_rate": 0.0001954809286898839,
"loss": 1.8877086639404297,
"step": 110
},
{
"epoch": 0.024876911116869654,
"grad_norm": 0.9637799859046936,
"learning_rate": 0.00019506633499170815,
"loss": 1.767138671875,
"step": 120
},
{
"epoch": 0.02694998704327546,
"grad_norm": 0.8544086217880249,
"learning_rate": 0.00019465174129353234,
"loss": 1.8863643646240233,
"step": 130
},
{
"epoch": 0.029023062969681265,
"grad_norm": 0.8133947253227234,
"learning_rate": 0.00019423714759535655,
"loss": 1.763634490966797,
"step": 140
},
{
"epoch": 0.031096138896087068,
"grad_norm": 0.8301745057106018,
"learning_rate": 0.00019382255389718077,
"loss": 1.7863468170166015,
"step": 150
},
{
"epoch": 0.033169214822492875,
"grad_norm": 1.0789601802825928,
"learning_rate": 0.00019340796019900498,
"loss": 1.7523063659667968,
"step": 160
},
{
"epoch": 0.03524229074889868,
"grad_norm": 0.9485411047935486,
"learning_rate": 0.0001929933665008292,
"loss": 1.782190704345703,
"step": 170
},
{
"epoch": 0.03731536667530448,
"grad_norm": 0.9296794533729553,
"learning_rate": 0.0001925787728026534,
"loss": 1.7600175857543945,
"step": 180
},
{
"epoch": 0.03938844260171029,
"grad_norm": 0.8409883379936218,
"learning_rate": 0.00019216417910447763,
"loss": 1.7022655487060547,
"step": 190
},
{
"epoch": 0.041461518528116095,
"grad_norm": 1.083732008934021,
"learning_rate": 0.00019174958540630184,
"loss": 1.798708152770996,
"step": 200
},
{
"epoch": 0.043534594454521895,
"grad_norm": 0.9614971280097961,
"learning_rate": 0.00019133499170812605,
"loss": 1.755731201171875,
"step": 210
},
{
"epoch": 0.0456076703809277,
"grad_norm": 0.8356192708015442,
"learning_rate": 0.00019092039800995024,
"loss": 1.7372133255004882,
"step": 220
},
{
"epoch": 0.04768074630733351,
"grad_norm": 0.8859673142433167,
"learning_rate": 0.00019050580431177448,
"loss": 1.709942626953125,
"step": 230
},
{
"epoch": 0.04975382223373931,
"grad_norm": 0.8417310118675232,
"learning_rate": 0.0001900912106135987,
"loss": 1.7145137786865234,
"step": 240
},
{
"epoch": 0.051826898160145116,
"grad_norm": 1.0017660856246948,
"learning_rate": 0.00018967661691542289,
"loss": 1.7842830657958983,
"step": 250
},
{
"epoch": 0.05389997408655092,
"grad_norm": 0.8389541506767273,
"learning_rate": 0.0001892620232172471,
"loss": 1.696818733215332,
"step": 260
},
{
"epoch": 0.05597305001295672,
"grad_norm": 0.9801367521286011,
"learning_rate": 0.00018884742951907134,
"loss": 1.6806442260742187,
"step": 270
},
{
"epoch": 0.05804612593936253,
"grad_norm": 1.0814357995986938,
"learning_rate": 0.00018843283582089553,
"loss": 1.7625520706176758,
"step": 280
},
{
"epoch": 0.060119201865768336,
"grad_norm": 1.0982401371002197,
"learning_rate": 0.00018801824212271974,
"loss": 1.7068971633911132,
"step": 290
},
{
"epoch": 0.062192277792174136,
"grad_norm": 0.8793812394142151,
"learning_rate": 0.00018760364842454396,
"loss": 1.6139469146728516,
"step": 300
},
{
"epoch": 0.06426535371857994,
"grad_norm": 1.1414549350738525,
"learning_rate": 0.00018718905472636817,
"loss": 1.7397516250610352,
"step": 310
},
{
"epoch": 0.06633842964498575,
"grad_norm": 0.8776243329048157,
"learning_rate": 0.00018677446102819239,
"loss": 1.7217279434204102,
"step": 320
},
{
"epoch": 0.06841150557139156,
"grad_norm": 1.0756258964538574,
"learning_rate": 0.0001863598673300166,
"loss": 1.757959747314453,
"step": 330
},
{
"epoch": 0.07048458149779736,
"grad_norm": 0.9709790349006653,
"learning_rate": 0.0001859452736318408,
"loss": 1.7738918304443358,
"step": 340
},
{
"epoch": 0.07255765742420316,
"grad_norm": 0.852245569229126,
"learning_rate": 0.00018553067993366503,
"loss": 1.718656349182129,
"step": 350
},
{
"epoch": 0.07463073335060896,
"grad_norm": 0.9301595091819763,
"learning_rate": 0.00018511608623548924,
"loss": 1.6480951309204102,
"step": 360
},
{
"epoch": 0.07670380927701477,
"grad_norm": 0.9946972131729126,
"learning_rate": 0.00018470149253731343,
"loss": 1.6211790084838866,
"step": 370
},
{
"epoch": 0.07877688520342058,
"grad_norm": 0.9088757038116455,
"learning_rate": 0.00018428689883913764,
"loss": 1.6915576934814454,
"step": 380
},
{
"epoch": 0.08084996112982638,
"grad_norm": 1.0271170139312744,
"learning_rate": 0.0001838723051409619,
"loss": 1.7419628143310546,
"step": 390
},
{
"epoch": 0.08292303705623219,
"grad_norm": 0.850945770740509,
"learning_rate": 0.00018345771144278607,
"loss": 1.7296173095703125,
"step": 400
},
{
"epoch": 0.08499611298263798,
"grad_norm": 0.8624365925788879,
"learning_rate": 0.0001830431177446103,
"loss": 1.6714574813842773,
"step": 410
},
{
"epoch": 0.08706918890904379,
"grad_norm": 0.9081392288208008,
"learning_rate": 0.0001826285240464345,
"loss": 1.7895374298095703,
"step": 420
},
{
"epoch": 0.0891422648354496,
"grad_norm": 0.8685234189033508,
"learning_rate": 0.00018221393034825872,
"loss": 1.6459678649902343,
"step": 430
},
{
"epoch": 0.0912153407618554,
"grad_norm": 0.9042524695396423,
"learning_rate": 0.00018179933665008293,
"loss": 1.652775764465332,
"step": 440
},
{
"epoch": 0.09328841668826121,
"grad_norm": 0.9973405003547668,
"learning_rate": 0.00018138474295190715,
"loss": 1.7169090270996095,
"step": 450
},
{
"epoch": 0.09536149261466702,
"grad_norm": 0.8893020749092102,
"learning_rate": 0.00018097014925373136,
"loss": 1.6703657150268554,
"step": 460
},
{
"epoch": 0.09743456854107281,
"grad_norm": 0.8917216658592224,
"learning_rate": 0.00018055555555555557,
"loss": 1.7180200576782227,
"step": 470
},
{
"epoch": 0.09950764446747862,
"grad_norm": 0.9904600381851196,
"learning_rate": 0.0001801409618573798,
"loss": 1.7480133056640625,
"step": 480
},
{
"epoch": 0.10158072039388442,
"grad_norm": 0.8615960478782654,
"learning_rate": 0.00017972636815920398,
"loss": 1.6338689804077149,
"step": 490
},
{
"epoch": 0.10365379632029023,
"grad_norm": 0.8724789619445801,
"learning_rate": 0.0001793117744610282,
"loss": 1.7188920974731445,
"step": 500
},
{
"epoch": 0.10572687224669604,
"grad_norm": 0.9289916753768921,
"learning_rate": 0.00017889718076285243,
"loss": 1.711186408996582,
"step": 510
},
{
"epoch": 0.10779994817310184,
"grad_norm": 0.9007911682128906,
"learning_rate": 0.00017848258706467662,
"loss": 1.691291046142578,
"step": 520
},
{
"epoch": 0.10987302409950764,
"grad_norm": 0.9181972742080688,
"learning_rate": 0.00017806799336650083,
"loss": 1.7039215087890625,
"step": 530
},
{
"epoch": 0.11194610002591344,
"grad_norm": 0.9199845194816589,
"learning_rate": 0.00017765339966832505,
"loss": 1.6865100860595703,
"step": 540
},
{
"epoch": 0.11401917595231925,
"grad_norm": 0.986028254032135,
"learning_rate": 0.00017723880597014926,
"loss": 1.6208017349243165,
"step": 550
},
{
"epoch": 0.11609225187872506,
"grad_norm": 1.0031794309616089,
"learning_rate": 0.00017682421227197348,
"loss": 1.7129045486450196,
"step": 560
},
{
"epoch": 0.11816532780513087,
"grad_norm": 0.9297766089439392,
"learning_rate": 0.0001764096185737977,
"loss": 1.6912664413452148,
"step": 570
},
{
"epoch": 0.12023840373153667,
"grad_norm": 0.9929037690162659,
"learning_rate": 0.0001759950248756219,
"loss": 1.5491928100585937,
"step": 580
},
{
"epoch": 0.12231147965794248,
"grad_norm": 0.9651626944541931,
"learning_rate": 0.00017558043117744612,
"loss": 1.611497688293457,
"step": 590
},
{
"epoch": 0.12438455558434827,
"grad_norm": 0.9021316766738892,
"learning_rate": 0.00017516583747927033,
"loss": 1.67208251953125,
"step": 600
},
{
"epoch": 0.1264576315107541,
"grad_norm": 1.012869954109192,
"learning_rate": 0.00017475124378109452,
"loss": 1.6646055221557616,
"step": 610
},
{
"epoch": 0.12853070743715989,
"grad_norm": 0.9233406186103821,
"learning_rate": 0.00017433665008291876,
"loss": 1.7033281326293945,
"step": 620
},
{
"epoch": 0.13060378336356568,
"grad_norm": 0.9368692636489868,
"learning_rate": 0.00017392205638474298,
"loss": 1.705314826965332,
"step": 630
},
{
"epoch": 0.1326768592899715,
"grad_norm": 1.06294846534729,
"learning_rate": 0.00017350746268656716,
"loss": 1.637823486328125,
"step": 640
},
{
"epoch": 0.1347499352163773,
"grad_norm": 1.02590811252594,
"learning_rate": 0.00017309286898839138,
"loss": 1.5813873291015625,
"step": 650
},
{
"epoch": 0.1368230111427831,
"grad_norm": 1.0086368322372437,
"learning_rate": 0.00017267827529021562,
"loss": 1.670191764831543,
"step": 660
},
{
"epoch": 0.1388960870691889,
"grad_norm": 0.9091687798500061,
"learning_rate": 0.0001722636815920398,
"loss": 1.6596086502075196,
"step": 670
},
{
"epoch": 0.14096916299559473,
"grad_norm": 0.9156310558319092,
"learning_rate": 0.00017184908789386402,
"loss": 1.636850929260254,
"step": 680
},
{
"epoch": 0.14304223892200052,
"grad_norm": 0.920555055141449,
"learning_rate": 0.00017143449419568824,
"loss": 1.6622644424438477,
"step": 690
},
{
"epoch": 0.1451153148484063,
"grad_norm": 0.8024299740791321,
"learning_rate": 0.00017101990049751245,
"loss": 1.6317838668823241,
"step": 700
},
{
"epoch": 0.14718839077481213,
"grad_norm": 0.9355529546737671,
"learning_rate": 0.00017060530679933667,
"loss": 1.6054956436157226,
"step": 710
},
{
"epoch": 0.14926146670121793,
"grad_norm": 0.9528997540473938,
"learning_rate": 0.00017019071310116088,
"loss": 1.5931829452514648,
"step": 720
},
{
"epoch": 0.15133454262762375,
"grad_norm": 0.8880454301834106,
"learning_rate": 0.00016977611940298507,
"loss": 1.7470476150512695,
"step": 730
},
{
"epoch": 0.15340761855402954,
"grad_norm": 0.9789003133773804,
"learning_rate": 0.0001693615257048093,
"loss": 1.5642525672912597,
"step": 740
},
{
"epoch": 0.15548069448043533,
"grad_norm": 0.9722698330879211,
"learning_rate": 0.00016894693200663352,
"loss": 1.5884984016418457,
"step": 750
},
{
"epoch": 0.15755377040684115,
"grad_norm": 0.9086152911186218,
"learning_rate": 0.0001685323383084577,
"loss": 1.699564552307129,
"step": 760
},
{
"epoch": 0.15962684633324695,
"grad_norm": 0.9152286648750305,
"learning_rate": 0.00016811774461028192,
"loss": 1.6350950241088866,
"step": 770
},
{
"epoch": 0.16169992225965277,
"grad_norm": 0.999038815498352,
"learning_rate": 0.00016770315091210617,
"loss": 1.6626720428466797,
"step": 780
},
{
"epoch": 0.16377299818605856,
"grad_norm": 0.9354605674743652,
"learning_rate": 0.00016728855721393035,
"loss": 1.6083240509033203,
"step": 790
},
{
"epoch": 0.16584607411246438,
"grad_norm": 0.9644563794136047,
"learning_rate": 0.00016687396351575457,
"loss": 1.584075927734375,
"step": 800
},
{
"epoch": 0.16791915003887017,
"grad_norm": 1.0484062433242798,
"learning_rate": 0.00016645936981757878,
"loss": 1.5805594444274902,
"step": 810
},
{
"epoch": 0.16999222596527597,
"grad_norm": 0.9263212084770203,
"learning_rate": 0.000166044776119403,
"loss": 1.5858475685119628,
"step": 820
},
{
"epoch": 0.1720653018916818,
"grad_norm": 0.8885413408279419,
"learning_rate": 0.0001656301824212272,
"loss": 1.6233335494995118,
"step": 830
},
{
"epoch": 0.17413837781808758,
"grad_norm": 1.003126621246338,
"learning_rate": 0.0001652155887230514,
"loss": 1.5958114624023438,
"step": 840
},
{
"epoch": 0.1762114537444934,
"grad_norm": 0.9760786294937134,
"learning_rate": 0.00016480099502487564,
"loss": 1.5831708908081055,
"step": 850
},
{
"epoch": 0.1782845296708992,
"grad_norm": 0.9633323550224304,
"learning_rate": 0.00016438640132669985,
"loss": 1.6611177444458007,
"step": 860
},
{
"epoch": 0.180357605597305,
"grad_norm": 0.9381558895111084,
"learning_rate": 0.00016397180762852404,
"loss": 1.6489791870117188,
"step": 870
},
{
"epoch": 0.1824306815237108,
"grad_norm": 0.9603164196014404,
"learning_rate": 0.00016355721393034826,
"loss": 1.5972728729248047,
"step": 880
},
{
"epoch": 0.1845037574501166,
"grad_norm": 1.02583909034729,
"learning_rate": 0.00016314262023217247,
"loss": 1.5938024520874023,
"step": 890
},
{
"epoch": 0.18657683337652242,
"grad_norm": 1.0646976232528687,
"learning_rate": 0.00016272802653399668,
"loss": 1.6365236282348632,
"step": 900
},
{
"epoch": 0.18864990930292821,
"grad_norm": 1.0200999975204468,
"learning_rate": 0.0001623134328358209,
"loss": 1.6409852981567383,
"step": 910
},
{
"epoch": 0.19072298522933404,
"grad_norm": 0.911371648311615,
"learning_rate": 0.0001618988391376451,
"loss": 1.647599220275879,
"step": 920
},
{
"epoch": 0.19279606115573983,
"grad_norm": 1.0000650882720947,
"learning_rate": 0.00016148424543946933,
"loss": 1.6000936508178711,
"step": 930
},
{
"epoch": 0.19486913708214562,
"grad_norm": 0.9260374307632446,
"learning_rate": 0.00016106965174129354,
"loss": 1.6430105209350585,
"step": 940
},
{
"epoch": 0.19694221300855144,
"grad_norm": 0.9303072690963745,
"learning_rate": 0.00016065505804311776,
"loss": 1.650974655151367,
"step": 950
},
{
"epoch": 0.19901528893495724,
"grad_norm": 0.9936369061470032,
"learning_rate": 0.00016024046434494194,
"loss": 1.6197996139526367,
"step": 960
},
{
"epoch": 0.20108836486136306,
"grad_norm": 1.029943823814392,
"learning_rate": 0.00015982587064676618,
"loss": 1.6034399032592774,
"step": 970
},
{
"epoch": 0.20316144078776885,
"grad_norm": 0.9719572067260742,
"learning_rate": 0.0001594112769485904,
"loss": 1.6465234756469727,
"step": 980
},
{
"epoch": 0.20523451671417467,
"grad_norm": 0.950306236743927,
"learning_rate": 0.00015899668325041459,
"loss": 1.5650517463684082,
"step": 990
},
{
"epoch": 0.20730759264058046,
"grad_norm": 0.9367968440055847,
"learning_rate": 0.0001585820895522388,
"loss": 1.5462970733642578,
"step": 1000
},
{
"epoch": 0.20938066856698626,
"grad_norm": 0.9189471006393433,
"learning_rate": 0.00015816749585406304,
"loss": 1.5616193771362306,
"step": 1010
},
{
"epoch": 0.21145374449339208,
"grad_norm": 0.9336299896240234,
"learning_rate": 0.00015775290215588723,
"loss": 1.5709722518920899,
"step": 1020
},
{
"epoch": 0.21352682041979787,
"grad_norm": 0.842507004737854,
"learning_rate": 0.00015733830845771144,
"loss": 1.5571642875671388,
"step": 1030
},
{
"epoch": 0.2155998963462037,
"grad_norm": 1.0135430097579956,
"learning_rate": 0.00015692371475953566,
"loss": 1.5800284385681151,
"step": 1040
},
{
"epoch": 0.21767297227260948,
"grad_norm": 1.0689647197723389,
"learning_rate": 0.00015650912106135987,
"loss": 1.621286392211914,
"step": 1050
},
{
"epoch": 0.21974604819901528,
"grad_norm": 0.9640220999717712,
"learning_rate": 0.0001560945273631841,
"loss": 1.6154445648193358,
"step": 1060
},
{
"epoch": 0.2218191241254211,
"grad_norm": 1.0506129264831543,
"learning_rate": 0.0001556799336650083,
"loss": 1.6239688873291016,
"step": 1070
},
{
"epoch": 0.2238922000518269,
"grad_norm": 0.9619534015655518,
"learning_rate": 0.0001552653399668325,
"loss": 1.6208015441894532,
"step": 1080
},
{
"epoch": 0.2259652759782327,
"grad_norm": 0.9947149753570557,
"learning_rate": 0.00015485074626865673,
"loss": 1.559681224822998,
"step": 1090
},
{
"epoch": 0.2280383519046385,
"grad_norm": 0.9842751622200012,
"learning_rate": 0.00015443615257048094,
"loss": 1.593973731994629,
"step": 1100
},
{
"epoch": 0.23011142783104432,
"grad_norm": 1.0848839282989502,
"learning_rate": 0.00015402155887230513,
"loss": 1.6429691314697266,
"step": 1110
},
{
"epoch": 0.23218450375745012,
"grad_norm": 0.9902291297912598,
"learning_rate": 0.00015360696517412935,
"loss": 1.560030174255371,
"step": 1120
},
{
"epoch": 0.2342575796838559,
"grad_norm": 1.0478103160858154,
"learning_rate": 0.0001531923714759536,
"loss": 1.5744388580322266,
"step": 1130
},
{
"epoch": 0.23633065561026173,
"grad_norm": 1.0551514625549316,
"learning_rate": 0.00015277777777777777,
"loss": 1.5343515396118164,
"step": 1140
},
{
"epoch": 0.23840373153666752,
"grad_norm": 0.9508061408996582,
"learning_rate": 0.000152363184079602,
"loss": 1.64904842376709,
"step": 1150
},
{
"epoch": 0.24047680746307334,
"grad_norm": 1.0218685865402222,
"learning_rate": 0.0001519485903814262,
"loss": 1.598756980895996,
"step": 1160
},
{
"epoch": 0.24254988338947914,
"grad_norm": 1.0358250141143799,
"learning_rate": 0.00015153399668325042,
"loss": 1.5701421737670898,
"step": 1170
},
{
"epoch": 0.24462295931588496,
"grad_norm": 0.9931305646896362,
"learning_rate": 0.00015111940298507463,
"loss": 1.6070541381835937,
"step": 1180
},
{
"epoch": 0.24669603524229075,
"grad_norm": 0.9626891016960144,
"learning_rate": 0.00015070480928689885,
"loss": 1.4579319953918457,
"step": 1190
},
{
"epoch": 0.24876911116869654,
"grad_norm": 0.9033750295639038,
"learning_rate": 0.00015029021558872306,
"loss": 1.5091530799865722,
"step": 1200
},
{
"epoch": 0.25084218709510236,
"grad_norm": 0.9851518869400024,
"learning_rate": 0.00014987562189054728,
"loss": 1.5484381675720216,
"step": 1210
},
{
"epoch": 0.2529152630215082,
"grad_norm": 1.124658226966858,
"learning_rate": 0.0001494610281923715,
"loss": 1.6561983108520508,
"step": 1220
},
{
"epoch": 0.25498833894791395,
"grad_norm": 1.1919487714767456,
"learning_rate": 0.00014904643449419568,
"loss": 1.5920031547546387,
"step": 1230
},
{
"epoch": 0.25706141487431977,
"grad_norm": 1.0409331321716309,
"learning_rate": 0.00014863184079601992,
"loss": 1.5242692947387695,
"step": 1240
},
{
"epoch": 0.2591344908007256,
"grad_norm": 0.9168948531150818,
"learning_rate": 0.00014821724709784413,
"loss": 1.523965549468994,
"step": 1250
},
{
"epoch": 0.26120756672713136,
"grad_norm": 0.9807767868041992,
"learning_rate": 0.00014780265339966832,
"loss": 1.555476188659668,
"step": 1260
},
{
"epoch": 0.2632806426535372,
"grad_norm": 0.905844509601593,
"learning_rate": 0.00014738805970149253,
"loss": 1.526420497894287,
"step": 1270
},
{
"epoch": 0.265353718579943,
"grad_norm": 0.9614242315292358,
"learning_rate": 0.00014697346600331675,
"loss": 1.5558547973632812,
"step": 1280
},
{
"epoch": 0.2674267945063488,
"grad_norm": 0.8911798596382141,
"learning_rate": 0.00014655887230514096,
"loss": 1.5765740394592285,
"step": 1290
},
{
"epoch": 0.2694998704327546,
"grad_norm": 0.9637242555618286,
"learning_rate": 0.00014614427860696518,
"loss": 1.5548508644104004,
"step": 1300
},
{
"epoch": 0.2715729463591604,
"grad_norm": 1.0161306858062744,
"learning_rate": 0.0001457296849087894,
"loss": 1.5633068084716797,
"step": 1310
},
{
"epoch": 0.2736460222855662,
"grad_norm": 0.9732279181480408,
"learning_rate": 0.0001453150912106136,
"loss": 1.5484658241271974,
"step": 1320
},
{
"epoch": 0.275719098211972,
"grad_norm": 1.1542972326278687,
"learning_rate": 0.00014490049751243782,
"loss": 1.661750030517578,
"step": 1330
},
{
"epoch": 0.2777921741383778,
"grad_norm": 0.9862635731697083,
"learning_rate": 0.00014448590381426204,
"loss": 1.6615022659301757,
"step": 1340
},
{
"epoch": 0.27986525006478363,
"grad_norm": 0.9343971014022827,
"learning_rate": 0.00014407131011608622,
"loss": 1.5562618255615235,
"step": 1350
},
{
"epoch": 0.28193832599118945,
"grad_norm": 0.9783313870429993,
"learning_rate": 0.00014365671641791046,
"loss": 1.6128976821899415,
"step": 1360
},
{
"epoch": 0.2840114019175952,
"grad_norm": 0.9503439664840698,
"learning_rate": 0.00014324212271973468,
"loss": 1.6037235260009766,
"step": 1370
},
{
"epoch": 0.28608447784400104,
"grad_norm": 1.033592700958252,
"learning_rate": 0.00014282752902155887,
"loss": 1.4874424934387207,
"step": 1380
},
{
"epoch": 0.28815755377040686,
"grad_norm": 1.0737738609313965,
"learning_rate": 0.00014241293532338308,
"loss": 1.5970182418823242,
"step": 1390
},
{
"epoch": 0.2902306296968126,
"grad_norm": 1.0103057622909546,
"learning_rate": 0.00014199834162520732,
"loss": 1.5805618286132812,
"step": 1400
},
{
"epoch": 0.29230370562321845,
"grad_norm": 0.9714734554290771,
"learning_rate": 0.0001415837479270315,
"loss": 1.4544689178466796,
"step": 1410
},
{
"epoch": 0.29437678154962427,
"grad_norm": 1.0983737707138062,
"learning_rate": 0.00014116915422885572,
"loss": 1.4936376571655274,
"step": 1420
},
{
"epoch": 0.29644985747603003,
"grad_norm": 1.0537210702896118,
"learning_rate": 0.00014075456053067994,
"loss": 1.531502628326416,
"step": 1430
},
{
"epoch": 0.29852293340243585,
"grad_norm": 1.0152987241744995,
"learning_rate": 0.00014033996683250415,
"loss": 1.5243349075317383,
"step": 1440
},
{
"epoch": 0.3005960093288417,
"grad_norm": 0.9748072028160095,
"learning_rate": 0.00013992537313432837,
"loss": 1.552549457550049,
"step": 1450
},
{
"epoch": 0.3026690852552475,
"grad_norm": 0.9957873225212097,
"learning_rate": 0.00013951077943615258,
"loss": 1.5652660369873046,
"step": 1460
},
{
"epoch": 0.30474216118165326,
"grad_norm": 1.0181517601013184,
"learning_rate": 0.00013909618573797677,
"loss": 1.5933172225952148,
"step": 1470
},
{
"epoch": 0.3068152371080591,
"grad_norm": 1.013455867767334,
"learning_rate": 0.000138681592039801,
"loss": 1.6291587829589844,
"step": 1480
},
{
"epoch": 0.3088883130344649,
"grad_norm": 1.0690240859985352,
"learning_rate": 0.00013826699834162522,
"loss": 1.5185072898864747,
"step": 1490
},
{
"epoch": 0.31096138896087067,
"grad_norm": 1.0201985836029053,
"learning_rate": 0.0001378524046434494,
"loss": 1.6345909118652344,
"step": 1500
},
{
"epoch": 0.3130344648872765,
"grad_norm": 1.205460548400879,
"learning_rate": 0.00013743781094527363,
"loss": 1.4923598289489746,
"step": 1510
},
{
"epoch": 0.3151075408136823,
"grad_norm": 1.0943695306777954,
"learning_rate": 0.00013702321724709787,
"loss": 1.551824951171875,
"step": 1520
},
{
"epoch": 0.31718061674008813,
"grad_norm": 0.9574096202850342,
"learning_rate": 0.00013660862354892205,
"loss": 1.563786506652832,
"step": 1530
},
{
"epoch": 0.3192536926664939,
"grad_norm": 1.01716947555542,
"learning_rate": 0.00013619402985074627,
"loss": 1.5811141014099122,
"step": 1540
},
{
"epoch": 0.3213267685928997,
"grad_norm": 1.0344488620758057,
"learning_rate": 0.00013577943615257048,
"loss": 1.4693257331848144,
"step": 1550
},
{
"epoch": 0.32339984451930553,
"grad_norm": 1.1056243181228638,
"learning_rate": 0.0001353648424543947,
"loss": 1.5346773147583008,
"step": 1560
},
{
"epoch": 0.3254729204457113,
"grad_norm": 1.1026792526245117,
"learning_rate": 0.0001349502487562189,
"loss": 1.4362568855285645,
"step": 1570
},
{
"epoch": 0.3275459963721171,
"grad_norm": 1.0443516969680786,
"learning_rate": 0.00013453565505804313,
"loss": 1.5090986251831056,
"step": 1580
},
{
"epoch": 0.32961907229852294,
"grad_norm": 1.006152629852295,
"learning_rate": 0.00013412106135986734,
"loss": 1.5561832427978515,
"step": 1590
},
{
"epoch": 0.33169214822492876,
"grad_norm": 1.0812287330627441,
"learning_rate": 0.00013370646766169155,
"loss": 1.5622328758239745,
"step": 1600
},
{
"epoch": 0.3337652241513345,
"grad_norm": 1.0125651359558105,
"learning_rate": 0.00013329187396351577,
"loss": 1.5935446739196777,
"step": 1610
},
{
"epoch": 0.33583830007774035,
"grad_norm": 1.0166149139404297,
"learning_rate": 0.00013287728026533996,
"loss": 1.5660451889038085,
"step": 1620
},
{
"epoch": 0.33791137600414617,
"grad_norm": 1.0273667573928833,
"learning_rate": 0.0001324626865671642,
"loss": 1.556844139099121,
"step": 1630
},
{
"epoch": 0.33998445193055193,
"grad_norm": 1.0821034908294678,
"learning_rate": 0.0001320480928689884,
"loss": 1.5019357681274415,
"step": 1640
},
{
"epoch": 0.34205752785695775,
"grad_norm": 1.082353949546814,
"learning_rate": 0.0001316334991708126,
"loss": 1.5400747299194335,
"step": 1650
},
{
"epoch": 0.3441306037833636,
"grad_norm": 1.059366226196289,
"learning_rate": 0.00013121890547263681,
"loss": 1.5414251327514648,
"step": 1660
},
{
"epoch": 0.3462036797097694,
"grad_norm": 0.996767520904541,
"learning_rate": 0.00013080431177446103,
"loss": 1.5605695724487305,
"step": 1670
},
{
"epoch": 0.34827675563617516,
"grad_norm": 0.9471081495285034,
"learning_rate": 0.00013038971807628524,
"loss": 1.550748634338379,
"step": 1680
},
{
"epoch": 0.350349831562581,
"grad_norm": 1.0039408206939697,
"learning_rate": 0.00012997512437810946,
"loss": 1.5467573165893556,
"step": 1690
},
{
"epoch": 0.3524229074889868,
"grad_norm": 1.1023355722427368,
"learning_rate": 0.00012956053067993367,
"loss": 1.6264640808105468,
"step": 1700
},
{
"epoch": 0.35449598341539257,
"grad_norm": 0.98204505443573,
"learning_rate": 0.00012914593698175789,
"loss": 1.5399457931518554,
"step": 1710
},
{
"epoch": 0.3565690593417984,
"grad_norm": 1.1038545370101929,
"learning_rate": 0.0001287313432835821,
"loss": 1.5933405876159668,
"step": 1720
},
{
"epoch": 0.3586421352682042,
"grad_norm": 1.0126818418502808,
"learning_rate": 0.00012831674958540631,
"loss": 1.5504828453063966,
"step": 1730
},
{
"epoch": 0.36071521119461,
"grad_norm": 1.2247607707977295,
"learning_rate": 0.0001279021558872305,
"loss": 1.5725330352783202,
"step": 1740
},
{
"epoch": 0.3627882871210158,
"grad_norm": 1.027273416519165,
"learning_rate": 0.00012748756218905474,
"loss": 1.5412575721740722,
"step": 1750
},
{
"epoch": 0.3648613630474216,
"grad_norm": 1.1408429145812988,
"learning_rate": 0.00012707296849087896,
"loss": 1.5795598030090332,
"step": 1760
},
{
"epoch": 0.36693443897382744,
"grad_norm": 0.9655275344848633,
"learning_rate": 0.00012665837479270315,
"loss": 1.5225658416748047,
"step": 1770
},
{
"epoch": 0.3690075149002332,
"grad_norm": 1.0792255401611328,
"learning_rate": 0.00012624378109452736,
"loss": 1.4800280570983886,
"step": 1780
},
{
"epoch": 0.371080590826639,
"grad_norm": 1.1403069496154785,
"learning_rate": 0.0001258291873963516,
"loss": 1.5279869079589843,
"step": 1790
},
{
"epoch": 0.37315366675304484,
"grad_norm": 0.9717866778373718,
"learning_rate": 0.0001254145936981758,
"loss": 1.5097025871276855,
"step": 1800
},
{
"epoch": 0.3752267426794506,
"grad_norm": 1.2142555713653564,
"learning_rate": 0.000125,
"loss": 1.5517022132873535,
"step": 1810
},
{
"epoch": 0.37729981860585643,
"grad_norm": 1.0863559246063232,
"learning_rate": 0.00012458540630182422,
"loss": 1.6459320068359375,
"step": 1820
},
{
"epoch": 0.37937289453226225,
"grad_norm": 0.935858428478241,
"learning_rate": 0.00012417081260364843,
"loss": 1.4541780471801757,
"step": 1830
},
{
"epoch": 0.38144597045866807,
"grad_norm": 1.0745552778244019,
"learning_rate": 0.00012375621890547265,
"loss": 1.5062150001525878,
"step": 1840
},
{
"epoch": 0.38351904638507384,
"grad_norm": 0.98747718334198,
"learning_rate": 0.00012334162520729686,
"loss": 1.450760841369629,
"step": 1850
},
{
"epoch": 0.38559212231147966,
"grad_norm": 1.017136573791504,
"learning_rate": 0.00012292703150912105,
"loss": 1.5897736549377441,
"step": 1860
},
{
"epoch": 0.3876651982378855,
"grad_norm": 1.0018948316574097,
"learning_rate": 0.0001225124378109453,
"loss": 1.5147670745849608,
"step": 1870
},
{
"epoch": 0.38973827416429124,
"grad_norm": 1.0614641904830933,
"learning_rate": 0.0001220978441127695,
"loss": 1.558132266998291,
"step": 1880
},
{
"epoch": 0.39181135009069706,
"grad_norm": 1.192165732383728,
"learning_rate": 0.00012168325041459369,
"loss": 1.5617985725402832,
"step": 1890
},
{
"epoch": 0.3938844260171029,
"grad_norm": 1.1147258281707764,
"learning_rate": 0.00012126865671641792,
"loss": 1.5168773651123046,
"step": 1900
},
{
"epoch": 0.3959575019435087,
"grad_norm": 1.129460096359253,
"learning_rate": 0.00012085406301824213,
"loss": 1.5103120803833008,
"step": 1910
},
{
"epoch": 0.39803057786991447,
"grad_norm": 1.0588513612747192,
"learning_rate": 0.00012043946932006633,
"loss": 1.5003737449645995,
"step": 1920
},
{
"epoch": 0.4001036537963203,
"grad_norm": 0.9957575798034668,
"learning_rate": 0.00012002487562189055,
"loss": 1.5337603569030762,
"step": 1930
},
{
"epoch": 0.4021767297227261,
"grad_norm": 0.996632993221283,
"learning_rate": 0.00011961028192371478,
"loss": 1.6394582748413087,
"step": 1940
},
{
"epoch": 0.4042498056491319,
"grad_norm": 1.086541771888733,
"learning_rate": 0.00011919568822553898,
"loss": 1.5516023635864258,
"step": 1950
},
{
"epoch": 0.4063228815755377,
"grad_norm": 0.9926400184631348,
"learning_rate": 0.00011878109452736319,
"loss": 1.5786477088928224,
"step": 1960
},
{
"epoch": 0.4083959575019435,
"grad_norm": 1.1575102806091309,
"learning_rate": 0.0001183665008291874,
"loss": 1.5651155471801759,
"step": 1970
},
{
"epoch": 0.41046903342834934,
"grad_norm": 0.964144766330719,
"learning_rate": 0.0001179519071310116,
"loss": 1.5103749275207519,
"step": 1980
},
{
"epoch": 0.4125421093547551,
"grad_norm": 1.1728180646896362,
"learning_rate": 0.00011753731343283582,
"loss": 1.5683252334594726,
"step": 1990
},
{
"epoch": 0.4146151852811609,
"grad_norm": 1.0341147184371948,
"learning_rate": 0.00011712271973466005,
"loss": 1.5328519821166993,
"step": 2000
},
{
"epoch": 0.41668826120756675,
"grad_norm": 1.220893383026123,
"learning_rate": 0.00011670812603648425,
"loss": 1.569212055206299,
"step": 2010
},
{
"epoch": 0.4187613371339725,
"grad_norm": 1.287847638130188,
"learning_rate": 0.00011629353233830846,
"loss": 1.4834338188171388,
"step": 2020
},
{
"epoch": 0.42083441306037833,
"grad_norm": 0.9762222766876221,
"learning_rate": 0.00011587893864013268,
"loss": 1.5161984443664551,
"step": 2030
},
{
"epoch": 0.42290748898678415,
"grad_norm": 1.0173159837722778,
"learning_rate": 0.00011546434494195688,
"loss": 1.531379795074463,
"step": 2040
},
{
"epoch": 0.42498056491319,
"grad_norm": 1.1367406845092773,
"learning_rate": 0.00011504975124378111,
"loss": 1.4666010856628418,
"step": 2050
},
{
"epoch": 0.42705364083959574,
"grad_norm": 1.034964919090271,
"learning_rate": 0.00011463515754560532,
"loss": 1.4785846710205077,
"step": 2060
},
{
"epoch": 0.42912671676600156,
"grad_norm": 1.1348105669021606,
"learning_rate": 0.00011422056384742952,
"loss": 1.5278627395629882,
"step": 2070
},
{
"epoch": 0.4311997926924074,
"grad_norm": 1.092238426208496,
"learning_rate": 0.00011380597014925374,
"loss": 1.54183349609375,
"step": 2080
},
{
"epoch": 0.43327286861881315,
"grad_norm": 1.1732505559921265,
"learning_rate": 0.00011339137645107795,
"loss": 1.5574755668640137,
"step": 2090
},
{
"epoch": 0.43534594454521897,
"grad_norm": 1.108764410018921,
"learning_rate": 0.00011297678275290215,
"loss": 1.5414604187011718,
"step": 2100
},
{
"epoch": 0.4374190204716248,
"grad_norm": 0.9649912118911743,
"learning_rate": 0.00011256218905472638,
"loss": 1.4722467422485352,
"step": 2110
},
{
"epoch": 0.43949209639803055,
"grad_norm": 1.0946617126464844,
"learning_rate": 0.0001121475953565506,
"loss": 1.5284259796142579,
"step": 2120
},
{
"epoch": 0.4415651723244364,
"grad_norm": 0.9634913206100464,
"learning_rate": 0.0001117330016583748,
"loss": 1.517714500427246,
"step": 2130
},
{
"epoch": 0.4436382482508422,
"grad_norm": 1.1210685968399048,
"learning_rate": 0.00011131840796019901,
"loss": 1.463743305206299,
"step": 2140
},
{
"epoch": 0.445711324177248,
"grad_norm": 1.065474510192871,
"learning_rate": 0.00011090381426202324,
"loss": 1.5272763252258301,
"step": 2150
},
{
"epoch": 0.4477844001036538,
"grad_norm": 1.0351336002349854,
"learning_rate": 0.00011048922056384742,
"loss": 1.4388788223266602,
"step": 2160
},
{
"epoch": 0.4498574760300596,
"grad_norm": 1.0451213121414185,
"learning_rate": 0.00011007462686567165,
"loss": 1.495101261138916,
"step": 2170
},
{
"epoch": 0.4519305519564654,
"grad_norm": 1.2555310726165771,
"learning_rate": 0.00010966003316749587,
"loss": 1.4896994590759278,
"step": 2180
},
{
"epoch": 0.4540036278828712,
"grad_norm": 1.059262752532959,
"learning_rate": 0.00010924543946932007,
"loss": 1.4970458030700684,
"step": 2190
},
{
"epoch": 0.456076703809277,
"grad_norm": 0.9911238551139832,
"learning_rate": 0.00010883084577114428,
"loss": 1.5557072639465332,
"step": 2200
},
{
"epoch": 0.4581497797356828,
"grad_norm": 1.1385858058929443,
"learning_rate": 0.00010841625207296851,
"loss": 1.4651107788085938,
"step": 2210
},
{
"epoch": 0.46022285566208865,
"grad_norm": 1.1184098720550537,
"learning_rate": 0.0001080016583747927,
"loss": 1.5804038047790527,
"step": 2220
},
{
"epoch": 0.4622959315884944,
"grad_norm": 1.0929696559906006,
"learning_rate": 0.00010758706467661693,
"loss": 1.5022403717041015,
"step": 2230
},
{
"epoch": 0.46436900751490023,
"grad_norm": 0.9907273650169373,
"learning_rate": 0.00010717247097844114,
"loss": 1.4441984176635743,
"step": 2240
},
{
"epoch": 0.46644208344130605,
"grad_norm": 0.9875295758247375,
"learning_rate": 0.00010675787728026534,
"loss": 1.4747949600219727,
"step": 2250
},
{
"epoch": 0.4685151593677118,
"grad_norm": 0.9860086441040039,
"learning_rate": 0.00010634328358208955,
"loss": 1.506369400024414,
"step": 2260
},
{
"epoch": 0.47058823529411764,
"grad_norm": 1.272979497909546,
"learning_rate": 0.00010592868988391378,
"loss": 1.4900493621826172,
"step": 2270
},
{
"epoch": 0.47266131122052346,
"grad_norm": 1.1522997617721558,
"learning_rate": 0.00010551409618573797,
"loss": 1.4898492813110351,
"step": 2280
},
{
"epoch": 0.4747343871469293,
"grad_norm": 1.0061860084533691,
"learning_rate": 0.0001050995024875622,
"loss": 1.5466057777404785,
"step": 2290
},
{
"epoch": 0.47680746307333505,
"grad_norm": 1.1039366722106934,
"learning_rate": 0.00010468490878938641,
"loss": 1.4958653450012207,
"step": 2300
},
{
"epoch": 0.47888053899974087,
"grad_norm": 1.064855933189392,
"learning_rate": 0.00010427031509121061,
"loss": 1.4066000938415528,
"step": 2310
},
{
"epoch": 0.4809536149261467,
"grad_norm": 1.2152620553970337,
"learning_rate": 0.00010385572139303483,
"loss": 1.532679271697998,
"step": 2320
},
{
"epoch": 0.48302669085255245,
"grad_norm": 1.1034210920333862,
"learning_rate": 0.00010344112769485906,
"loss": 1.4218317985534668,
"step": 2330
},
{
"epoch": 0.4850997667789583,
"grad_norm": 1.0778026580810547,
"learning_rate": 0.00010302653399668326,
"loss": 1.4759449005126952,
"step": 2340
},
{
"epoch": 0.4871728427053641,
"grad_norm": 1.0956099033355713,
"learning_rate": 0.00010261194029850747,
"loss": 1.510129165649414,
"step": 2350
},
{
"epoch": 0.4892459186317699,
"grad_norm": 1.026975393295288,
"learning_rate": 0.00010219734660033168,
"loss": 1.450872802734375,
"step": 2360
},
{
"epoch": 0.4913189945581757,
"grad_norm": 1.0078274011611938,
"learning_rate": 0.00010178275290215589,
"loss": 1.546852207183838,
"step": 2370
},
{
"epoch": 0.4933920704845815,
"grad_norm": 0.9396551847457886,
"learning_rate": 0.0001013681592039801,
"loss": 1.4956705093383789,
"step": 2380
},
{
"epoch": 0.4954651464109873,
"grad_norm": 1.0600014925003052,
"learning_rate": 0.00010095356550580433,
"loss": 1.4916232109069825,
"step": 2390
},
{
"epoch": 0.4975382223373931,
"grad_norm": 0.9873512387275696,
"learning_rate": 0.00010053897180762853,
"loss": 1.383552074432373,
"step": 2400
},
{
"epoch": 0.4996112982637989,
"grad_norm": 1.2188613414764404,
"learning_rate": 0.00010012437810945274,
"loss": 1.529955005645752,
"step": 2410
},
{
"epoch": 0.5016843741902047,
"grad_norm": 1.0167559385299683,
"learning_rate": 9.970978441127696e-05,
"loss": 1.5507872581481934,
"step": 2420
},
{
"epoch": 0.5037574501166106,
"grad_norm": 1.1081862449645996,
"learning_rate": 9.929519071310116e-05,
"loss": 1.4936115264892578,
"step": 2430
},
{
"epoch": 0.5058305260430164,
"grad_norm": 1.160117268562317,
"learning_rate": 9.888059701492539e-05,
"loss": 1.479538917541504,
"step": 2440
},
{
"epoch": 0.5079036019694221,
"grad_norm": 1.0124058723449707,
"learning_rate": 9.846600331674959e-05,
"loss": 1.407692050933838,
"step": 2450
},
{
"epoch": 0.5099766778958279,
"grad_norm": 0.9970583319664001,
"learning_rate": 9.80514096185738e-05,
"loss": 1.5096822738647462,
"step": 2460
},
{
"epoch": 0.5120497538222337,
"grad_norm": 1.0601921081542969,
"learning_rate": 9.763681592039802e-05,
"loss": 1.5124183654785157,
"step": 2470
},
{
"epoch": 0.5141228297486395,
"grad_norm": 1.2017216682434082,
"learning_rate": 9.722222222222223e-05,
"loss": 1.4738014221191407,
"step": 2480
},
{
"epoch": 0.5161959056750454,
"grad_norm": 1.1182701587677002,
"learning_rate": 9.680762852404643e-05,
"loss": 1.4870125770568847,
"step": 2490
},
{
"epoch": 0.5182689816014512,
"grad_norm": 0.9587567448616028,
"learning_rate": 9.639303482587066e-05,
"loss": 1.5147294998168945,
"step": 2500
},
{
"epoch": 0.520342057527857,
"grad_norm": 1.177620530128479,
"learning_rate": 9.597844112769486e-05,
"loss": 1.519681739807129,
"step": 2510
},
{
"epoch": 0.5224151334542627,
"grad_norm": 1.1010137796401978,
"learning_rate": 9.556384742951907e-05,
"loss": 1.4474271774291991,
"step": 2520
},
{
"epoch": 0.5244882093806685,
"grad_norm": 0.9620181918144226,
"learning_rate": 9.514925373134329e-05,
"loss": 1.5070289611816405,
"step": 2530
},
{
"epoch": 0.5265612853070744,
"grad_norm": 1.0856534242630005,
"learning_rate": 9.47346600331675e-05,
"loss": 1.5071434020996093,
"step": 2540
},
{
"epoch": 0.5286343612334802,
"grad_norm": 1.1289268732070923,
"learning_rate": 9.43200663349917e-05,
"loss": 1.6005094528198243,
"step": 2550
},
{
"epoch": 0.530707437159886,
"grad_norm": 1.063301682472229,
"learning_rate": 9.390547263681593e-05,
"loss": 1.490926742553711,
"step": 2560
},
{
"epoch": 0.5327805130862918,
"grad_norm": 1.0895897150039673,
"learning_rate": 9.349087893864013e-05,
"loss": 1.4283631324768067,
"step": 2570
},
{
"epoch": 0.5348535890126976,
"grad_norm": 1.0446993112564087,
"learning_rate": 9.307628524046435e-05,
"loss": 1.5121649742126464,
"step": 2580
},
{
"epoch": 0.5369266649391033,
"grad_norm": 1.118340253829956,
"learning_rate": 9.266169154228856e-05,
"loss": 1.5203582763671875,
"step": 2590
},
{
"epoch": 0.5389997408655092,
"grad_norm": 0.9693984389305115,
"learning_rate": 9.224709784411278e-05,
"loss": 1.493481731414795,
"step": 2600
},
{
"epoch": 0.541072816791915,
"grad_norm": 1.027601957321167,
"learning_rate": 9.183250414593698e-05,
"loss": 1.3991009712219238,
"step": 2610
},
{
"epoch": 0.5431458927183208,
"grad_norm": 1.0012259483337402,
"learning_rate": 9.14179104477612e-05,
"loss": 1.4645806312561036,
"step": 2620
},
{
"epoch": 0.5452189686447266,
"grad_norm": 1.021893858909607,
"learning_rate": 9.10033167495854e-05,
"loss": 1.4260982513427733,
"step": 2630
},
{
"epoch": 0.5472920445711325,
"grad_norm": 1.1559139490127563,
"learning_rate": 9.058872305140962e-05,
"loss": 1.436918354034424,
"step": 2640
},
{
"epoch": 0.5493651204975383,
"grad_norm": 1.0691229104995728,
"learning_rate": 9.017412935323383e-05,
"loss": 1.5417165756225586,
"step": 2650
},
{
"epoch": 0.551438196423944,
"grad_norm": 1.0778980255126953,
"learning_rate": 8.975953565505805e-05,
"loss": 1.4646015167236328,
"step": 2660
},
{
"epoch": 0.5535112723503498,
"grad_norm": 1.128537654876709,
"learning_rate": 8.934494195688225e-05,
"loss": 1.4791038513183594,
"step": 2670
},
{
"epoch": 0.5555843482767556,
"grad_norm": 1.1227152347564697,
"learning_rate": 8.893034825870648e-05,
"loss": 1.477587890625,
"step": 2680
},
{
"epoch": 0.5576574242031614,
"grad_norm": 1.1616007089614868,
"learning_rate": 8.851575456053068e-05,
"loss": 1.5081703186035156,
"step": 2690
},
{
"epoch": 0.5597305001295673,
"grad_norm": 1.071717381477356,
"learning_rate": 8.810116086235489e-05,
"loss": 1.478490161895752,
"step": 2700
},
{
"epoch": 0.5618035760559731,
"grad_norm": 0.953491747379303,
"learning_rate": 8.76865671641791e-05,
"loss": 1.486307144165039,
"step": 2710
},
{
"epoch": 0.5638766519823789,
"grad_norm": 1.107778787612915,
"learning_rate": 8.727197346600332e-05,
"loss": 1.4863730430603028,
"step": 2720
},
{
"epoch": 0.5659497279087846,
"grad_norm": 1.1062201261520386,
"learning_rate": 8.685737976782754e-05,
"loss": 1.4807310104370117,
"step": 2730
},
{
"epoch": 0.5680228038351904,
"grad_norm": 1.1240142583847046,
"learning_rate": 8.644278606965175e-05,
"loss": 1.4995529174804687,
"step": 2740
},
{
"epoch": 0.5700958797615963,
"grad_norm": 1.0423475503921509,
"learning_rate": 8.602819237147596e-05,
"loss": 1.4720562934875487,
"step": 2750
},
{
"epoch": 0.5721689556880021,
"grad_norm": 1.1449542045593262,
"learning_rate": 8.561359867330017e-05,
"loss": 1.5412587165832519,
"step": 2760
},
{
"epoch": 0.5742420316144079,
"grad_norm": 1.1100146770477295,
"learning_rate": 8.519900497512438e-05,
"loss": 1.4932291030883789,
"step": 2770
},
{
"epoch": 0.5763151075408137,
"grad_norm": 1.066078543663025,
"learning_rate": 8.47844112769486e-05,
"loss": 1.4360157012939454,
"step": 2780
},
{
"epoch": 0.5783881834672195,
"grad_norm": 1.0685060024261475,
"learning_rate": 8.436981757877281e-05,
"loss": 1.5007810592651367,
"step": 2790
},
{
"epoch": 0.5804612593936253,
"grad_norm": 1.337509274482727,
"learning_rate": 8.395522388059702e-05,
"loss": 1.4985486030578614,
"step": 2800
},
{
"epoch": 0.5825343353200311,
"grad_norm": 1.0489015579223633,
"learning_rate": 8.354063018242124e-05,
"loss": 1.4878012657165527,
"step": 2810
},
{
"epoch": 0.5846074112464369,
"grad_norm": 1.1977568864822388,
"learning_rate": 8.312603648424544e-05,
"loss": 1.4938278198242188,
"step": 2820
},
{
"epoch": 0.5866804871728427,
"grad_norm": 1.0384935140609741,
"learning_rate": 8.271144278606967e-05,
"loss": 1.484629249572754,
"step": 2830
},
{
"epoch": 0.5887535630992485,
"grad_norm": 1.0145131349563599,
"learning_rate": 8.229684908789387e-05,
"loss": 1.4890990257263184,
"step": 2840
},
{
"epoch": 0.5908266390256544,
"grad_norm": 1.0456562042236328,
"learning_rate": 8.188225538971808e-05,
"loss": 1.4158708572387695,
"step": 2850
},
{
"epoch": 0.5928997149520601,
"grad_norm": 1.0825212001800537,
"learning_rate": 8.14676616915423e-05,
"loss": 1.456554126739502,
"step": 2860
},
{
"epoch": 0.5949727908784659,
"grad_norm": 1.0650807619094849,
"learning_rate": 8.105306799336651e-05,
"loss": 1.473994255065918,
"step": 2870
},
{
"epoch": 0.5970458668048717,
"grad_norm": 1.060115098953247,
"learning_rate": 8.063847429519071e-05,
"loss": 1.4481081008911132,
"step": 2880
},
{
"epoch": 0.5991189427312775,
"grad_norm": 0.9253358840942383,
"learning_rate": 8.022388059701494e-05,
"loss": 1.4642643928527832,
"step": 2890
},
{
"epoch": 0.6011920186576833,
"grad_norm": 1.0225350856781006,
"learning_rate": 7.980928689883914e-05,
"loss": 1.3740483283996583,
"step": 2900
},
{
"epoch": 0.6032650945840892,
"grad_norm": 1.0211741924285889,
"learning_rate": 7.939469320066335e-05,
"loss": 1.4753257751464843,
"step": 2910
},
{
"epoch": 0.605338170510495,
"grad_norm": 1.0062146186828613,
"learning_rate": 7.898009950248757e-05,
"loss": 1.4432376861572265,
"step": 2920
},
{
"epoch": 0.6074112464369007,
"grad_norm": 1.1036884784698486,
"learning_rate": 7.856550580431178e-05,
"loss": 1.3941972732543946,
"step": 2930
},
{
"epoch": 0.6094843223633065,
"grad_norm": 1.0829540491104126,
"learning_rate": 7.815091210613598e-05,
"loss": 1.5212538719177247,
"step": 2940
},
{
"epoch": 0.6115573982897123,
"grad_norm": 1.1836856603622437,
"learning_rate": 7.773631840796021e-05,
"loss": 1.5671442031860352,
"step": 2950
},
{
"epoch": 0.6136304742161182,
"grad_norm": 0.9901530742645264,
"learning_rate": 7.732172470978441e-05,
"loss": 1.4176819801330567,
"step": 2960
},
{
"epoch": 0.615703550142524,
"grad_norm": 1.0980446338653564,
"learning_rate": 7.690713101160863e-05,
"loss": 1.47709379196167,
"step": 2970
},
{
"epoch": 0.6177766260689298,
"grad_norm": 1.0742297172546387,
"learning_rate": 7.649253731343284e-05,
"loss": 1.3669164657592774,
"step": 2980
},
{
"epoch": 0.6198497019953356,
"grad_norm": 1.0716501474380493,
"learning_rate": 7.607794361525706e-05,
"loss": 1.5048974990844726,
"step": 2990
},
{
"epoch": 0.6219227779217413,
"grad_norm": 1.2103937864303589,
"learning_rate": 7.566334991708126e-05,
"loss": 1.4739879608154296,
"step": 3000
},
{
"epoch": 0.6239958538481472,
"grad_norm": 1.0737628936767578,
"learning_rate": 7.524875621890548e-05,
"loss": 1.493875503540039,
"step": 3010
},
{
"epoch": 0.626068929774553,
"grad_norm": 1.0744723081588745,
"learning_rate": 7.483416252072968e-05,
"loss": 1.50384578704834,
"step": 3020
},
{
"epoch": 0.6281420057009588,
"grad_norm": 1.070535659790039,
"learning_rate": 7.44195688225539e-05,
"loss": 1.4305116653442382,
"step": 3030
},
{
"epoch": 0.6302150816273646,
"grad_norm": 1.0274264812469482,
"learning_rate": 7.400497512437811e-05,
"loss": 1.452561092376709,
"step": 3040
},
{
"epoch": 0.6322881575537704,
"grad_norm": 1.1508046388626099,
"learning_rate": 7.359038142620233e-05,
"loss": 1.4898262023925781,
"step": 3050
},
{
"epoch": 0.6343612334801763,
"grad_norm": 1.0203231573104858,
"learning_rate": 7.317578772802653e-05,
"loss": 1.490403175354004,
"step": 3060
},
{
"epoch": 0.636434309406582,
"grad_norm": 1.3003417253494263,
"learning_rate": 7.276119402985076e-05,
"loss": 1.4818619728088378,
"step": 3070
},
{
"epoch": 0.6385073853329878,
"grad_norm": 1.1255285739898682,
"learning_rate": 7.234660033167496e-05,
"loss": 1.5162216186523438,
"step": 3080
},
{
"epoch": 0.6405804612593936,
"grad_norm": 1.0838392972946167,
"learning_rate": 7.193200663349917e-05,
"loss": 1.4879429817199707,
"step": 3090
},
{
"epoch": 0.6426535371857994,
"grad_norm": 1.036298394203186,
"learning_rate": 7.151741293532339e-05,
"loss": 1.5392193794250488,
"step": 3100
},
{
"epoch": 0.6447266131122052,
"grad_norm": 1.0307782888412476,
"learning_rate": 7.11028192371476e-05,
"loss": 1.4172185897827148,
"step": 3110
},
{
"epoch": 0.6467996890386111,
"grad_norm": 1.0826961994171143,
"learning_rate": 7.068822553897181e-05,
"loss": 1.5217445373535157,
"step": 3120
},
{
"epoch": 0.6488727649650169,
"grad_norm": 1.1925781965255737,
"learning_rate": 7.027363184079603e-05,
"loss": 1.4902685165405274,
"step": 3130
},
{
"epoch": 0.6509458408914226,
"grad_norm": 1.1316440105438232,
"learning_rate": 6.985903814262023e-05,
"loss": 1.4429686546325684,
"step": 3140
},
{
"epoch": 0.6530189168178284,
"grad_norm": 1.0991287231445312,
"learning_rate": 6.944444444444444e-05,
"loss": 1.5057510375976562,
"step": 3150
},
{
"epoch": 0.6550919927442342,
"grad_norm": 0.997954249382019,
"learning_rate": 6.902985074626866e-05,
"loss": 1.4608537673950195,
"step": 3160
},
{
"epoch": 0.6571650686706401,
"grad_norm": 1.3616468906402588,
"learning_rate": 6.861525704809287e-05,
"loss": 1.4557350158691407,
"step": 3170
},
{
"epoch": 0.6592381445970459,
"grad_norm": 1.106767177581787,
"learning_rate": 6.820066334991709e-05,
"loss": 1.486179733276367,
"step": 3180
},
{
"epoch": 0.6613112205234517,
"grad_norm": 1.0956076383590698,
"learning_rate": 6.77860696517413e-05,
"loss": 1.4668097496032715,
"step": 3190
},
{
"epoch": 0.6633842964498575,
"grad_norm": 1.1206140518188477,
"learning_rate": 6.737147595356552e-05,
"loss": 1.4768742561340331,
"step": 3200
},
{
"epoch": 0.6654573723762632,
"grad_norm": 0.9833910465240479,
"learning_rate": 6.695688225538972e-05,
"loss": 1.5204994201660156,
"step": 3210
},
{
"epoch": 0.667530448302669,
"grad_norm": 1.1817854642868042,
"learning_rate": 6.654228855721395e-05,
"loss": 1.5230501174926758,
"step": 3220
},
{
"epoch": 0.6696035242290749,
"grad_norm": 1.0882384777069092,
"learning_rate": 6.612769485903815e-05,
"loss": 1.3979837417602539,
"step": 3230
},
{
"epoch": 0.6716766001554807,
"grad_norm": 1.1264276504516602,
"learning_rate": 6.571310116086236e-05,
"loss": 1.4921886444091796,
"step": 3240
},
{
"epoch": 0.6737496760818865,
"grad_norm": 1.1087210178375244,
"learning_rate": 6.529850746268657e-05,
"loss": 1.4735729217529296,
"step": 3250
},
{
"epoch": 0.6758227520082923,
"grad_norm": 1.0597937107086182,
"learning_rate": 6.488391376451079e-05,
"loss": 1.5197007179260253,
"step": 3260
},
{
"epoch": 0.6778958279346982,
"grad_norm": 1.0482463836669922,
"learning_rate": 6.446932006633499e-05,
"loss": 1.465884017944336,
"step": 3270
},
{
"epoch": 0.6799689038611039,
"grad_norm": 1.035261869430542,
"learning_rate": 6.405472636815922e-05,
"loss": 1.4476487159729003,
"step": 3280
},
{
"epoch": 0.6820419797875097,
"grad_norm": 1.1454598903656006,
"learning_rate": 6.364013266998342e-05,
"loss": 1.453122329711914,
"step": 3290
},
{
"epoch": 0.6841150557139155,
"grad_norm": 1.1053162813186646,
"learning_rate": 6.322553897180763e-05,
"loss": 1.419975471496582,
"step": 3300
},
{
"epoch": 0.6861881316403213,
"grad_norm": 1.046738624572754,
"learning_rate": 6.281094527363185e-05,
"loss": 1.4719730377197267,
"step": 3310
},
{
"epoch": 0.6882612075667272,
"grad_norm": 1.2806482315063477,
"learning_rate": 6.239635157545606e-05,
"loss": 1.4755671501159668,
"step": 3320
},
{
"epoch": 0.690334283493133,
"grad_norm": 0.9969695210456848,
"learning_rate": 6.198175787728026e-05,
"loss": 1.4757104873657227,
"step": 3330
},
{
"epoch": 0.6924073594195388,
"grad_norm": 1.1298997402191162,
"learning_rate": 6.156716417910448e-05,
"loss": 1.39326171875,
"step": 3340
},
{
"epoch": 0.6944804353459445,
"grad_norm": 1.1446443796157837,
"learning_rate": 6.115257048092869e-05,
"loss": 1.518898105621338,
"step": 3350
},
{
"epoch": 0.6965535112723503,
"grad_norm": 1.145512580871582,
"learning_rate": 6.0737976782752906e-05,
"loss": 1.533563995361328,
"step": 3360
},
{
"epoch": 0.6986265871987561,
"grad_norm": 1.1053916215896606,
"learning_rate": 6.032338308457711e-05,
"loss": 1.5256379127502442,
"step": 3370
},
{
"epoch": 0.700699663125162,
"grad_norm": 1.1185383796691895,
"learning_rate": 5.9908789386401334e-05,
"loss": 1.4893531799316406,
"step": 3380
},
{
"epoch": 0.7027727390515678,
"grad_norm": 1.0875242948532104,
"learning_rate": 5.949419568822554e-05,
"loss": 1.5119455337524415,
"step": 3390
},
{
"epoch": 0.7048458149779736,
"grad_norm": 0.9813434481620789,
"learning_rate": 5.907960199004975e-05,
"loss": 1.4083710670471192,
"step": 3400
},
{
"epoch": 0.7069188909043794,
"grad_norm": 1.216704249382019,
"learning_rate": 5.866500829187397e-05,
"loss": 1.3952042579650878,
"step": 3410
},
{
"epoch": 0.7089919668307851,
"grad_norm": 0.9959679841995239,
"learning_rate": 5.825041459369818e-05,
"loss": 1.4536603927612304,
"step": 3420
},
{
"epoch": 0.711065042757191,
"grad_norm": 1.088835597038269,
"learning_rate": 5.7835820895522386e-05,
"loss": 1.5083325386047364,
"step": 3430
},
{
"epoch": 0.7131381186835968,
"grad_norm": 1.0195202827453613,
"learning_rate": 5.742122719734661e-05,
"loss": 1.3630632400512694,
"step": 3440
},
{
"epoch": 0.7152111946100026,
"grad_norm": 1.1602747440338135,
"learning_rate": 5.7006633499170815e-05,
"loss": 1.4151906967163086,
"step": 3450
},
{
"epoch": 0.7172842705364084,
"grad_norm": 1.0450451374053955,
"learning_rate": 5.659203980099502e-05,
"loss": 1.4249334335327148,
"step": 3460
},
{
"epoch": 0.7193573464628142,
"grad_norm": 1.100573182106018,
"learning_rate": 5.6177446102819243e-05,
"loss": 1.4444709777832032,
"step": 3470
},
{
"epoch": 0.72143042238922,
"grad_norm": 1.190826654434204,
"learning_rate": 5.576285240464345e-05,
"loss": 1.4221847534179688,
"step": 3480
},
{
"epoch": 0.7235034983156258,
"grad_norm": 1.1273324489593506,
"learning_rate": 5.534825870646766e-05,
"loss": 1.4116867065429688,
"step": 3490
},
{
"epoch": 0.7255765742420316,
"grad_norm": 1.132863163948059,
"learning_rate": 5.493366500829188e-05,
"loss": 1.5136717796325683,
"step": 3500
},
{
"epoch": 0.7276496501684374,
"grad_norm": 1.256491780281067,
"learning_rate": 5.451907131011609e-05,
"loss": 1.4937585830688476,
"step": 3510
},
{
"epoch": 0.7297227260948432,
"grad_norm": 1.0635231733322144,
"learning_rate": 5.4104477611940295e-05,
"loss": 1.5216801643371582,
"step": 3520
},
{
"epoch": 0.731795802021249,
"grad_norm": 1.0570056438446045,
"learning_rate": 5.3689883913764516e-05,
"loss": 1.4815841674804688,
"step": 3530
},
{
"epoch": 0.7338688779476549,
"grad_norm": 1.1593701839447021,
"learning_rate": 5.3275290215588724e-05,
"loss": 1.4530771255493165,
"step": 3540
},
{
"epoch": 0.7359419538740606,
"grad_norm": 1.1562939882278442,
"learning_rate": 5.286069651741293e-05,
"loss": 1.5589415550231933,
"step": 3550
},
{
"epoch": 0.7380150298004664,
"grad_norm": 1.1924159526824951,
"learning_rate": 5.244610281923715e-05,
"loss": 1.3856592178344727,
"step": 3560
},
{
"epoch": 0.7400881057268722,
"grad_norm": 1.2092247009277344,
"learning_rate": 5.203150912106136e-05,
"loss": 1.402101993560791,
"step": 3570
},
{
"epoch": 0.742161181653278,
"grad_norm": 1.155090093612671,
"learning_rate": 5.161691542288557e-05,
"loss": 1.4879738807678222,
"step": 3580
},
{
"epoch": 0.7442342575796839,
"grad_norm": 0.9951611161231995,
"learning_rate": 5.120232172470979e-05,
"loss": 1.463954544067383,
"step": 3590
},
{
"epoch": 0.7463073335060897,
"grad_norm": 1.038360595703125,
"learning_rate": 5.0787728026533996e-05,
"loss": 1.514687728881836,
"step": 3600
},
{
"epoch": 0.7483804094324955,
"grad_norm": 1.1276081800460815,
"learning_rate": 5.0373134328358204e-05,
"loss": 1.383653736114502,
"step": 3610
},
{
"epoch": 0.7504534853589012,
"grad_norm": 1.2908998727798462,
"learning_rate": 4.995854063018242e-05,
"loss": 1.431437873840332,
"step": 3620
},
{
"epoch": 0.752526561285307,
"grad_norm": 1.1836334466934204,
"learning_rate": 4.954394693200663e-05,
"loss": 1.495081615447998,
"step": 3630
},
{
"epoch": 0.7545996372117129,
"grad_norm": 1.0293713808059692,
"learning_rate": 4.912935323383085e-05,
"loss": 1.4396793365478515,
"step": 3640
},
{
"epoch": 0.7566727131381187,
"grad_norm": 1.1716481447219849,
"learning_rate": 4.8714759535655055e-05,
"loss": 1.5675559997558595,
"step": 3650
},
{
"epoch": 0.7587457890645245,
"grad_norm": 1.090667963027954,
"learning_rate": 4.830016583747927e-05,
"loss": 1.4259960174560546,
"step": 3660
},
{
"epoch": 0.7608188649909303,
"grad_norm": 1.1595834493637085,
"learning_rate": 4.7885572139303484e-05,
"loss": 1.506321907043457,
"step": 3670
},
{
"epoch": 0.7628919409173361,
"grad_norm": 1.2258275747299194,
"learning_rate": 4.74709784411277e-05,
"loss": 1.4344300270080566,
"step": 3680
},
{
"epoch": 0.7649650168437419,
"grad_norm": 1.059180498123169,
"learning_rate": 4.705638474295191e-05,
"loss": 1.4194180488586425,
"step": 3690
},
{
"epoch": 0.7670380927701477,
"grad_norm": 1.0283405780792236,
"learning_rate": 4.664179104477612e-05,
"loss": 1.466190242767334,
"step": 3700
},
{
"epoch": 0.7691111686965535,
"grad_norm": 1.199347734451294,
"learning_rate": 4.6227197346600334e-05,
"loss": 1.502589797973633,
"step": 3710
},
{
"epoch": 0.7711842446229593,
"grad_norm": 1.0684884786605835,
"learning_rate": 4.581260364842455e-05,
"loss": 1.3968034744262696,
"step": 3720
},
{
"epoch": 0.7732573205493651,
"grad_norm": 1.240468978881836,
"learning_rate": 4.539800995024876e-05,
"loss": 1.4589550971984864,
"step": 3730
},
{
"epoch": 0.775330396475771,
"grad_norm": 1.2167885303497314,
"learning_rate": 4.498341625207297e-05,
"loss": 1.4414511680603028,
"step": 3740
},
{
"epoch": 0.7774034724021768,
"grad_norm": 1.1880028247833252,
"learning_rate": 4.4568822553897185e-05,
"loss": 1.4229560852050782,
"step": 3750
},
{
"epoch": 0.7794765483285825,
"grad_norm": 1.023346185684204,
"learning_rate": 4.41542288557214e-05,
"loss": 1.414274311065674,
"step": 3760
},
{
"epoch": 0.7815496242549883,
"grad_norm": 1.0751137733459473,
"learning_rate": 4.373963515754561e-05,
"loss": 1.4005146980285645,
"step": 3770
},
{
"epoch": 0.7836227001813941,
"grad_norm": 1.1147372722625732,
"learning_rate": 4.332504145936982e-05,
"loss": 1.4554076194763184,
"step": 3780
},
{
"epoch": 0.7856957761078,
"grad_norm": 1.1774779558181763,
"learning_rate": 4.2910447761194036e-05,
"loss": 1.4009012222290038,
"step": 3790
},
{
"epoch": 0.7877688520342058,
"grad_norm": 1.0493589639663696,
"learning_rate": 4.249585406301824e-05,
"loss": 1.4634977340698243,
"step": 3800
},
{
"epoch": 0.7898419279606116,
"grad_norm": 1.1655833721160889,
"learning_rate": 4.208126036484246e-05,
"loss": 1.4797652244567872,
"step": 3810
},
{
"epoch": 0.7919150038870174,
"grad_norm": 1.1000230312347412,
"learning_rate": 4.166666666666667e-05,
"loss": 1.4450549125671386,
"step": 3820
},
{
"epoch": 0.7939880798134231,
"grad_norm": 1.0554734468460083,
"learning_rate": 4.125207296849088e-05,
"loss": 1.4720548629760741,
"step": 3830
},
{
"epoch": 0.7960611557398289,
"grad_norm": 1.1296530961990356,
"learning_rate": 4.0837479270315094e-05,
"loss": 1.5152462005615235,
"step": 3840
},
{
"epoch": 0.7981342316662348,
"grad_norm": 1.1453872919082642,
"learning_rate": 4.042288557213931e-05,
"loss": 1.3691265106201171,
"step": 3850
},
{
"epoch": 0.8002073075926406,
"grad_norm": 1.1940151453018188,
"learning_rate": 4.0008291873963516e-05,
"loss": 1.3959809303283692,
"step": 3860
},
{
"epoch": 0.8022803835190464,
"grad_norm": 1.1130040884017944,
"learning_rate": 3.959369817578773e-05,
"loss": 1.4111416816711426,
"step": 3870
},
{
"epoch": 0.8043534594454522,
"grad_norm": 1.1695324182510376,
"learning_rate": 3.9179104477611945e-05,
"loss": 1.507277011871338,
"step": 3880
},
{
"epoch": 0.806426535371858,
"grad_norm": 1.2983999252319336,
"learning_rate": 3.876451077943615e-05,
"loss": 1.3935456275939941,
"step": 3890
},
{
"epoch": 0.8084996112982638,
"grad_norm": 1.1417698860168457,
"learning_rate": 3.834991708126037e-05,
"loss": 1.464134407043457,
"step": 3900
},
{
"epoch": 0.8105726872246696,
"grad_norm": 1.0782017707824707,
"learning_rate": 3.793532338308458e-05,
"loss": 1.4875127792358398,
"step": 3910
},
{
"epoch": 0.8126457631510754,
"grad_norm": 1.2534525394439697,
"learning_rate": 3.752072968490879e-05,
"loss": 1.5353063583374023,
"step": 3920
},
{
"epoch": 0.8147188390774812,
"grad_norm": 1.2145850658416748,
"learning_rate": 3.7106135986733e-05,
"loss": 1.4504884719848632,
"step": 3930
},
{
"epoch": 0.816791915003887,
"grad_norm": 1.1368614435195923,
"learning_rate": 3.669154228855722e-05,
"loss": 1.4631189346313476,
"step": 3940
},
{
"epoch": 0.8188649909302929,
"grad_norm": 1.1054575443267822,
"learning_rate": 3.6276948590381425e-05,
"loss": 1.44279203414917,
"step": 3950
},
{
"epoch": 0.8209380668566987,
"grad_norm": 1.202141523361206,
"learning_rate": 3.586235489220564e-05,
"loss": 1.5331624031066895,
"step": 3960
},
{
"epoch": 0.8230111427831044,
"grad_norm": 1.2692559957504272,
"learning_rate": 3.5447761194029854e-05,
"loss": 1.4537653923034668,
"step": 3970
},
{
"epoch": 0.8250842187095102,
"grad_norm": 1.2438832521438599,
"learning_rate": 3.503316749585406e-05,
"loss": 1.4415541648864747,
"step": 3980
},
{
"epoch": 0.827157294635916,
"grad_norm": 1.1327502727508545,
"learning_rate": 3.4618573797678276e-05,
"loss": 1.4588258743286133,
"step": 3990
},
{
"epoch": 0.8292303705623219,
"grad_norm": 1.1558505296707153,
"learning_rate": 3.420398009950249e-05,
"loss": 1.449343776702881,
"step": 4000
},
{
"epoch": 0.8313034464887277,
"grad_norm": 1.146558165550232,
"learning_rate": 0.00011689469320066336,
"loss": 1.5187210083007812,
"step": 4010
},
{
"epoch": 0.8333765224151335,
"grad_norm": 1.154318928718567,
"learning_rate": 0.00011668739635157546,
"loss": 1.4608290672302247,
"step": 4020
},
{
"epoch": 0.8354495983415393,
"grad_norm": 1.3546490669250488,
"learning_rate": 0.00011648009950248757,
"loss": 1.5246877670288086,
"step": 4030
},
{
"epoch": 0.837522674267945,
"grad_norm": 1.1111828088760376,
"learning_rate": 0.00011627280265339968,
"loss": 1.3747848510742187,
"step": 4040
},
{
"epoch": 0.8395957501943508,
"grad_norm": 1.0534316301345825,
"learning_rate": 0.00011606550580431177,
"loss": 1.4230124473571777,
"step": 4050
},
{
"epoch": 0.8416688261207567,
"grad_norm": 1.1041001081466675,
"learning_rate": 0.00011585820895522388,
"loss": 1.5086248397827149,
"step": 4060
},
{
"epoch": 0.8437419020471625,
"grad_norm": 1.1933906078338623,
"learning_rate": 0.00011565091210613599,
"loss": 1.401639175415039,
"step": 4070
},
{
"epoch": 0.8458149779735683,
"grad_norm": 1.0717347860336304,
"learning_rate": 0.0001154436152570481,
"loss": 1.4698259353637695,
"step": 4080
},
{
"epoch": 0.8478880538999741,
"grad_norm": 0.955698549747467,
"learning_rate": 0.00011523631840796021,
"loss": 1.375072193145752,
"step": 4090
},
{
"epoch": 0.84996112982638,
"grad_norm": 1.2389768362045288,
"learning_rate": 0.00011502902155887232,
"loss": 1.379574966430664,
"step": 4100
},
{
"epoch": 0.8520342057527857,
"grad_norm": 1.278762698173523,
"learning_rate": 0.0001148217247097844,
"loss": 1.4193490982055663,
"step": 4110
},
{
"epoch": 0.8541072816791915,
"grad_norm": 1.1002545356750488,
"learning_rate": 0.00011461442786069652,
"loss": 1.3836387634277343,
"step": 4120
},
{
"epoch": 0.8561803576055973,
"grad_norm": 1.1513651609420776,
"learning_rate": 0.00011440713101160863,
"loss": 1.5280473709106446,
"step": 4130
},
{
"epoch": 0.8582534335320031,
"grad_norm": 0.9518608450889587,
"learning_rate": 0.00011419983416252074,
"loss": 1.4227890968322754,
"step": 4140
},
{
"epoch": 0.8603265094584089,
"grad_norm": 1.2304730415344238,
"learning_rate": 0.00011399253731343284,
"loss": 1.5353459358215331,
"step": 4150
},
{
"epoch": 0.8623995853848148,
"grad_norm": 1.1575299501419067,
"learning_rate": 0.00011378524046434495,
"loss": 1.362869644165039,
"step": 4160
},
{
"epoch": 0.8644726613112205,
"grad_norm": 1.1317075490951538,
"learning_rate": 0.00011357794361525704,
"loss": 1.4479413986206056,
"step": 4170
},
{
"epoch": 0.8665457372376263,
"grad_norm": 1.2143278121948242,
"learning_rate": 0.00011337064676616915,
"loss": 1.4484575271606446,
"step": 4180
},
{
"epoch": 0.8686188131640321,
"grad_norm": 1.1178550720214844,
"learning_rate": 0.00011316334991708126,
"loss": 1.5304969787597655,
"step": 4190
},
{
"epoch": 0.8706918890904379,
"grad_norm": 1.2381987571716309,
"learning_rate": 0.00011295605306799337,
"loss": 1.398491668701172,
"step": 4200
},
{
"epoch": 0.8727649650168438,
"grad_norm": 1.288427710533142,
"learning_rate": 0.00011274875621890549,
"loss": 1.481517219543457,
"step": 4210
},
{
"epoch": 0.8748380409432496,
"grad_norm": 1.2424670457839966,
"learning_rate": 0.0001125414593698176,
"loss": 1.4245829582214355,
"step": 4220
},
{
"epoch": 0.8769111168696554,
"grad_norm": 1.0825245380401611,
"learning_rate": 0.00011233416252072967,
"loss": 1.5052209854125977,
"step": 4230
},
{
"epoch": 0.8789841927960611,
"grad_norm": 1.1469889879226685,
"learning_rate": 0.0001121268656716418,
"loss": 1.432518768310547,
"step": 4240
},
{
"epoch": 0.8810572687224669,
"grad_norm": 1.1925941705703735,
"learning_rate": 0.0001119195688225539,
"loss": 1.460960865020752,
"step": 4250
},
{
"epoch": 0.8831303446488727,
"grad_norm": 1.0974957942962646,
"learning_rate": 0.00011171227197346601,
"loss": 1.4701688766479493,
"step": 4260
},
{
"epoch": 0.8852034205752786,
"grad_norm": 1.19304358959198,
"learning_rate": 0.00011150497512437812,
"loss": 1.4184856414794922,
"step": 4270
},
{
"epoch": 0.8872764965016844,
"grad_norm": 1.2636064291000366,
"learning_rate": 0.00011129767827529022,
"loss": 1.4618330001831055,
"step": 4280
},
{
"epoch": 0.8893495724280902,
"grad_norm": 1.1047471761703491,
"learning_rate": 0.00011109038142620232,
"loss": 1.4464719772338868,
"step": 4290
},
{
"epoch": 0.891422648354496,
"grad_norm": 1.1815509796142578,
"learning_rate": 0.00011088308457711442,
"loss": 1.4445013999938965,
"step": 4300
},
{
"epoch": 0.8934957242809017,
"grad_norm": 1.15771484375,
"learning_rate": 0.00011067578772802653,
"loss": 1.4567338943481445,
"step": 4310
},
{
"epoch": 0.8955688002073076,
"grad_norm": 1.094064474105835,
"learning_rate": 0.00011046849087893865,
"loss": 1.4705141067504883,
"step": 4320
},
{
"epoch": 0.8976418761337134,
"grad_norm": 1.1300767660140991,
"learning_rate": 0.00011026119402985076,
"loss": 1.3876087188720703,
"step": 4330
},
{
"epoch": 0.8997149520601192,
"grad_norm": 1.255149483680725,
"learning_rate": 0.00011005389718076287,
"loss": 1.3945764541625976,
"step": 4340
},
{
"epoch": 0.901788027986525,
"grad_norm": 1.1853265762329102,
"learning_rate": 0.00010984660033167495,
"loss": 1.4600879669189453,
"step": 4350
},
{
"epoch": 0.9038611039129308,
"grad_norm": 1.3775529861450195,
"learning_rate": 0.00010963930348258707,
"loss": 1.3908918380737305,
"step": 4360
},
{
"epoch": 0.9059341798393367,
"grad_norm": 1.178391695022583,
"learning_rate": 0.00010943200663349918,
"loss": 1.418002223968506,
"step": 4370
},
{
"epoch": 0.9080072557657424,
"grad_norm": 1.3530340194702148,
"learning_rate": 0.00010922470978441128,
"loss": 1.360008716583252,
"step": 4380
},
{
"epoch": 0.9100803316921482,
"grad_norm": 1.3142609596252441,
"learning_rate": 0.00010901741293532339,
"loss": 1.3430272102355958,
"step": 4390
},
{
"epoch": 0.912153407618554,
"grad_norm": 1.1689645051956177,
"learning_rate": 0.0001088101160862355,
"loss": 1.5633913040161134,
"step": 4400
},
{
"epoch": 0.9142264835449598,
"grad_norm": 1.2114495038986206,
"learning_rate": 0.00010860281923714759,
"loss": 1.4697650909423827,
"step": 4410
},
{
"epoch": 0.9162995594713657,
"grad_norm": 1.2312393188476562,
"learning_rate": 0.0001083955223880597,
"loss": 1.4190072059631347,
"step": 4420
},
{
"epoch": 0.9183726353977715,
"grad_norm": 1.3181791305541992,
"learning_rate": 0.0001081882255389718,
"loss": 1.3760835647583007,
"step": 4430
},
{
"epoch": 0.9204457113241773,
"grad_norm": 1.1612746715545654,
"learning_rate": 0.00010798092868988393,
"loss": 1.4294224739074708,
"step": 4440
},
{
"epoch": 0.922518787250583,
"grad_norm": 1.0622650384902954,
"learning_rate": 0.00010777363184079603,
"loss": 1.3979280471801758,
"step": 4450
},
{
"epoch": 0.9245918631769888,
"grad_norm": 1.1344703435897827,
"learning_rate": 0.00010756633499170814,
"loss": 1.432724380493164,
"step": 4460
},
{
"epoch": 0.9266649391033946,
"grad_norm": 1.3609569072723389,
"learning_rate": 0.00010735903814262023,
"loss": 1.4266067504882813,
"step": 4470
},
{
"epoch": 0.9287380150298005,
"grad_norm": 1.2317551374435425,
"learning_rate": 0.00010715174129353234,
"loss": 1.3972136497497558,
"step": 4480
},
{
"epoch": 0.9308110909562063,
"grad_norm": 1.1485719680786133,
"learning_rate": 0.00010694444444444445,
"loss": 1.4437122344970703,
"step": 4490
},
{
"epoch": 0.9328841668826121,
"grad_norm": 1.1265562772750854,
"learning_rate": 0.00010673714759535656,
"loss": 1.3951814651489258,
"step": 4500
},
{
"epoch": 0.9349572428090179,
"grad_norm": 1.2382943630218506,
"learning_rate": 0.00010652985074626866,
"loss": 1.47876033782959,
"step": 4510
},
{
"epoch": 0.9370303187354236,
"grad_norm": 1.1904460191726685,
"learning_rate": 0.00010632255389718078,
"loss": 1.5116327285766602,
"step": 4520
},
{
"epoch": 0.9391033946618295,
"grad_norm": 1.2784380912780762,
"learning_rate": 0.00010611525704809286,
"loss": 1.4328707695007323,
"step": 4530
},
{
"epoch": 0.9411764705882353,
"grad_norm": 1.1920058727264404,
"learning_rate": 0.00010590796019900497,
"loss": 1.4105209350585937,
"step": 4540
},
{
"epoch": 0.9432495465146411,
"grad_norm": 1.3006956577301025,
"learning_rate": 0.00010570066334991708,
"loss": 1.4261651039123535,
"step": 4550
},
{
"epoch": 0.9453226224410469,
"grad_norm": 1.1092578172683716,
"learning_rate": 0.0001054933665008292,
"loss": 1.3674508094787599,
"step": 4560
},
{
"epoch": 0.9473956983674527,
"grad_norm": 1.2003893852233887,
"learning_rate": 0.0001052860696517413,
"loss": 1.411275577545166,
"step": 4570
},
{
"epoch": 0.9494687742938586,
"grad_norm": 1.1365995407104492,
"learning_rate": 0.00010507877280265341,
"loss": 1.4263607025146485,
"step": 4580
},
{
"epoch": 0.9515418502202643,
"grad_norm": 1.1568050384521484,
"learning_rate": 0.0001048714759535655,
"loss": 1.4921070098876954,
"step": 4590
},
{
"epoch": 0.9536149261466701,
"grad_norm": 1.249493956565857,
"learning_rate": 0.00010466417910447761,
"loss": 1.4664103507995605,
"step": 4600
},
{
"epoch": 0.9556880020730759,
"grad_norm": 1.1990628242492676,
"learning_rate": 0.00010445688225538972,
"loss": 1.4022604942321777,
"step": 4610
},
{
"epoch": 0.9577610779994817,
"grad_norm": 1.198508858680725,
"learning_rate": 0.00010424958540630183,
"loss": 1.3575029373168945,
"step": 4620
},
{
"epoch": 0.9598341539258876,
"grad_norm": 1.4220664501190186,
"learning_rate": 0.00010404228855721393,
"loss": 1.506014347076416,
"step": 4630
},
{
"epoch": 0.9619072298522934,
"grad_norm": 1.1849695444107056,
"learning_rate": 0.00010383499170812606,
"loss": 1.3853163719177246,
"step": 4640
},
{
"epoch": 0.9639803057786992,
"grad_norm": 1.183779001235962,
"learning_rate": 0.00010362769485903814,
"loss": 1.378060531616211,
"step": 4650
},
{
"epoch": 0.9660533817051049,
"grad_norm": 1.159185767173767,
"learning_rate": 0.00010342039800995024,
"loss": 1.5042824745178223,
"step": 4660
},
{
"epoch": 0.9681264576315107,
"grad_norm": 1.225111484527588,
"learning_rate": 0.00010321310116086236,
"loss": 1.4617128372192383,
"step": 4670
},
{
"epoch": 0.9701995335579165,
"grad_norm": 1.1813310384750366,
"learning_rate": 0.00010300580431177447,
"loss": 1.4024372100830078,
"step": 4680
},
{
"epoch": 0.9722726094843224,
"grad_norm": 1.261643648147583,
"learning_rate": 0.00010279850746268658,
"loss": 1.360646915435791,
"step": 4690
},
{
"epoch": 0.9743456854107282,
"grad_norm": 1.0765501260757446,
"learning_rate": 0.00010259121061359869,
"loss": 1.3550106048583985,
"step": 4700
},
{
"epoch": 0.976418761337134,
"grad_norm": 1.1051616668701172,
"learning_rate": 0.00010238391376451078,
"loss": 1.395486831665039,
"step": 4710
},
{
"epoch": 0.9784918372635398,
"grad_norm": 1.2162697315216064,
"learning_rate": 0.00010217661691542289,
"loss": 1.4161664962768554,
"step": 4720
},
{
"epoch": 0.9805649131899455,
"grad_norm": 1.2934092283248901,
"learning_rate": 0.000101969320066335,
"loss": 1.4680258750915527,
"step": 4730
},
{
"epoch": 0.9826379891163514,
"grad_norm": 1.1861159801483154,
"learning_rate": 0.0001017620232172471,
"loss": 1.4943526268005372,
"step": 4740
},
{
"epoch": 0.9847110650427572,
"grad_norm": 1.2565635442733765,
"learning_rate": 0.00010155472636815921,
"loss": 1.4541117668151855,
"step": 4750
},
{
"epoch": 0.986784140969163,
"grad_norm": 1.38951575756073,
"learning_rate": 0.00010134742951907133,
"loss": 1.3703868865966797,
"step": 4760
},
{
"epoch": 0.9888572168955688,
"grad_norm": 1.1929153203964233,
"learning_rate": 0.00010114013266998341,
"loss": 1.4121877670288085,
"step": 4770
},
{
"epoch": 0.9909302928219746,
"grad_norm": 1.1389548778533936,
"learning_rate": 0.00010093283582089552,
"loss": 1.371178436279297,
"step": 4780
},
{
"epoch": 0.9930033687483804,
"grad_norm": 0.9990752339363098,
"learning_rate": 0.00010072553897180764,
"loss": 1.394187831878662,
"step": 4790
},
{
"epoch": 0.9950764446747862,
"grad_norm": 1.1418439149856567,
"learning_rate": 0.00010051824212271974,
"loss": 1.3987725257873536,
"step": 4800
},
{
"epoch": 0.9952837522674268,
"grad_norm": 1.3999691009521484,
"learning_rate": 0.00010049751243781096,
"loss": 1.56894850730896,
"step": 4801
},
{
"epoch": 0.9954910598600674,
"grad_norm": 1.1055763959884644,
"learning_rate": 9.535655058043119e-07,
"loss": 1.3579680919647217,
"step": 4802
},
{
"epoch": 0.995698367452708,
"grad_norm": 1.2521191835403442,
"learning_rate": 9.121061359867331e-07,
"loss": 1.6727182865142822,
"step": 4803
},
{
"epoch": 0.9959056750453485,
"grad_norm": 1.1679822206497192,
"learning_rate": 8.706467661691543e-07,
"loss": 1.5550488233566284,
"step": 4804
},
{
"epoch": 0.9961129826379891,
"grad_norm": 1.414447546005249,
"learning_rate": 8.291873963515756e-07,
"loss": 1.2959469556808472,
"step": 4805
},
{
"epoch": 0.9963202902306297,
"grad_norm": 1.2053686380386353,
"learning_rate": 7.877280265339968e-07,
"loss": 1.6059751510620117,
"step": 4806
},
{
"epoch": 0.9965275978232703,
"grad_norm": 1.3234610557556152,
"learning_rate": 7.462686567164179e-07,
"loss": 1.3246705532073975,
"step": 4807
},
{
"epoch": 0.9967349054159108,
"grad_norm": 1.275701880455017,
"learning_rate": 7.048092868988392e-07,
"loss": 1.456787109375,
"step": 4808
},
{
"epoch": 0.9969422130085515,
"grad_norm": 1.0960595607757568,
"learning_rate": 6.633499170812604e-07,
"loss": 1.6712136268615723,
"step": 4809
},
{
"epoch": 0.997149520601192,
"grad_norm": 1.1167244911193848,
"learning_rate": 6.218905472636816e-07,
"loss": 1.2081254720687866,
"step": 4810
},
{
"epoch": 0.9973568281938326,
"grad_norm": 1.1857860088348389,
"learning_rate": 5.804311774461028e-07,
"loss": 1.4792537689208984,
"step": 4811
},
{
"epoch": 0.9975641357864732,
"grad_norm": 1.1021283864974976,
"learning_rate": 5.38971807628524e-07,
"loss": 1.2333704233169556,
"step": 4812
},
{
"epoch": 0.9977714433791137,
"grad_norm": 1.0850183963775635,
"learning_rate": 4.975124378109453e-07,
"loss": 1.3954815864562988,
"step": 4813
},
{
"epoch": 0.9979787509717544,
"grad_norm": 1.2729003429412842,
"learning_rate": 4.5605306799336654e-07,
"loss": 1.5298696756362915,
"step": 4814
},
{
"epoch": 0.9981860585643949,
"grad_norm": 1.2735594511032104,
"learning_rate": 4.145936981757878e-07,
"loss": 1.3652870655059814,
"step": 4815
},
{
"epoch": 0.9983933661570356,
"grad_norm": 1.3938478231430054,
"learning_rate": 3.7313432835820895e-07,
"loss": 1.5470430850982666,
"step": 4816
},
{
"epoch": 0.9986006737496761,
"grad_norm": 1.2034921646118164,
"learning_rate": 3.316749585406302e-07,
"loss": 1.539093017578125,
"step": 4817
},
{
"epoch": 0.9988079813423166,
"grad_norm": 1.165834903717041,
"learning_rate": 2.902155887230514e-07,
"loss": 1.3995617628097534,
"step": 4818
},
{
"epoch": 0.9990152889349573,
"grad_norm": 1.1757153272628784,
"learning_rate": 2.4875621890547267e-07,
"loss": 1.4894084930419922,
"step": 4819
},
{
"epoch": 0.9992225965275978,
"grad_norm": 1.3626255989074707,
"learning_rate": 2.072968490878939e-07,
"loss": 1.5547418594360352,
"step": 4820
},
{
"epoch": 0.9994299041202384,
"grad_norm": 1.1462852954864502,
"learning_rate": 1.658374792703151e-07,
"loss": 1.5449320077896118,
"step": 4821
},
{
"epoch": 0.999637211712879,
"grad_norm": 1.122637152671814,
"learning_rate": 1.2437810945273633e-07,
"loss": 1.3381319046020508,
"step": 4822
},
{
"epoch": 0.9998445193055195,
"grad_norm": 1.303135871887207,
"learning_rate": 8.291873963515755e-08,
"loss": 1.4987614154815674,
"step": 4823
},
{
"epoch": 1.0,
"grad_norm": 1.30817711353302,
"learning_rate": 4.1459369817578775e-08,
"loss": 1.4066091775894165,
"step": 4824
}
],
"logging_steps": 1,
"max_steps": 4824,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 1,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 9.698521011117097e+17,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}