ViCA-thinking-70p / trainer_state.json
nkkbr's picture
Upload pretrained weights and config files
e3c5154
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.7104477611940299,
"eval_steps": 500,
"global_step": 238,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0029850746268656717,
"grad_norm": 8.253287036233855,
"learning_rate": 9.090909090909091e-07,
"loss": 2.2247,
"step": 1
},
{
"epoch": 0.005970149253731343,
"grad_norm": 7.395570676912434,
"learning_rate": 1.8181818181818183e-06,
"loss": 2.1255,
"step": 2
},
{
"epoch": 0.008955223880597015,
"grad_norm": 8.228093047465732,
"learning_rate": 2.7272727272727272e-06,
"loss": 2.1028,
"step": 3
},
{
"epoch": 0.011940298507462687,
"grad_norm": 7.134217440402169,
"learning_rate": 3.6363636363636366e-06,
"loss": 2.0029,
"step": 4
},
{
"epoch": 0.014925373134328358,
"grad_norm": 5.937798020763942,
"learning_rate": 4.5454545454545455e-06,
"loss": 1.907,
"step": 5
},
{
"epoch": 0.01791044776119403,
"grad_norm": 5.559897081671283,
"learning_rate": 5.4545454545454545e-06,
"loss": 1.859,
"step": 6
},
{
"epoch": 0.020895522388059702,
"grad_norm": 5.0306441137723485,
"learning_rate": 6.363636363636364e-06,
"loss": 1.8198,
"step": 7
},
{
"epoch": 0.023880597014925373,
"grad_norm": 3.514372505025641,
"learning_rate": 7.272727272727273e-06,
"loss": 1.5112,
"step": 8
},
{
"epoch": 0.026865671641791045,
"grad_norm": 3.1293116769998894,
"learning_rate": 8.181818181818183e-06,
"loss": 1.4412,
"step": 9
},
{
"epoch": 0.029850746268656716,
"grad_norm": 2.7656396768471456,
"learning_rate": 9.090909090909091e-06,
"loss": 1.4058,
"step": 10
},
{
"epoch": 0.03283582089552239,
"grad_norm": 5.352646410247892,
"learning_rate": 1e-05,
"loss": 1.4371,
"step": 11
},
{
"epoch": 0.03582089552238806,
"grad_norm": 4.381930202952016,
"learning_rate": 9.99976495753613e-06,
"loss": 1.4176,
"step": 12
},
{
"epoch": 0.03880597014925373,
"grad_norm": 3.1514253661202765,
"learning_rate": 9.999059852242508e-06,
"loss": 1.2973,
"step": 13
},
{
"epoch": 0.041791044776119404,
"grad_norm": 2.6485142204402696,
"learning_rate": 9.997884750411004e-06,
"loss": 1.1784,
"step": 14
},
{
"epoch": 0.04477611940298507,
"grad_norm": 2.7946518315041007,
"learning_rate": 9.996239762521152e-06,
"loss": 1.3108,
"step": 15
},
{
"epoch": 0.04776119402985075,
"grad_norm": 2.6114018043960003,
"learning_rate": 9.994125043229753e-06,
"loss": 1.102,
"step": 16
},
{
"epoch": 0.050746268656716415,
"grad_norm": 2.3027638181918495,
"learning_rate": 9.991540791356342e-06,
"loss": 1.0699,
"step": 17
},
{
"epoch": 0.05373134328358209,
"grad_norm": 2.204185879725156,
"learning_rate": 9.98848724986449e-06,
"loss": 1.1473,
"step": 18
},
{
"epoch": 0.056716417910447764,
"grad_norm": 2.2090761147091817,
"learning_rate": 9.98496470583896e-06,
"loss": 1.1816,
"step": 19
},
{
"epoch": 0.05970149253731343,
"grad_norm": 2.0223479706782737,
"learning_rate": 9.980973490458728e-06,
"loss": 1.1217,
"step": 20
},
{
"epoch": 0.0626865671641791,
"grad_norm": 2.080473927553376,
"learning_rate": 9.976513978965829e-06,
"loss": 1.0251,
"step": 21
},
{
"epoch": 0.06567164179104477,
"grad_norm": 2.3488846800450744,
"learning_rate": 9.971586590630094e-06,
"loss": 1.0279,
"step": 22
},
{
"epoch": 0.06865671641791045,
"grad_norm": 2.1532960090217212,
"learning_rate": 9.966191788709716e-06,
"loss": 1.0126,
"step": 23
},
{
"epoch": 0.07164179104477612,
"grad_norm": 2.055129377186085,
"learning_rate": 9.960330080407712e-06,
"loss": 1.0072,
"step": 24
},
{
"epoch": 0.07462686567164178,
"grad_norm": 2.156877426415811,
"learning_rate": 9.954002016824226e-06,
"loss": 1.0349,
"step": 25
},
{
"epoch": 0.07761194029850746,
"grad_norm": 1.9914221696004062,
"learning_rate": 9.947208192904722e-06,
"loss": 1.0244,
"step": 26
},
{
"epoch": 0.08059701492537313,
"grad_norm": 1.9887204088063186,
"learning_rate": 9.939949247384046e-06,
"loss": 0.9912,
"step": 27
},
{
"epoch": 0.08358208955223881,
"grad_norm": 2.110799590941847,
"learning_rate": 9.93222586272637e-06,
"loss": 1.0377,
"step": 28
},
{
"epoch": 0.08656716417910448,
"grad_norm": 2.0583073599588473,
"learning_rate": 9.924038765061042e-06,
"loss": 1.0139,
"step": 29
},
{
"epoch": 0.08955223880597014,
"grad_norm": 2.1723065119138196,
"learning_rate": 9.915388724114301e-06,
"loss": 0.9376,
"step": 30
},
{
"epoch": 0.09253731343283582,
"grad_norm": 2.064005981064233,
"learning_rate": 9.906276553136924e-06,
"loss": 1.0226,
"step": 31
},
{
"epoch": 0.0955223880597015,
"grad_norm": 2.122144912833922,
"learning_rate": 9.896703108827758e-06,
"loss": 0.9483,
"step": 32
},
{
"epoch": 0.09850746268656717,
"grad_norm": 2.1876975651489246,
"learning_rate": 9.886669291253178e-06,
"loss": 0.8942,
"step": 33
},
{
"epoch": 0.10149253731343283,
"grad_norm": 1.9606368757720636,
"learning_rate": 9.876176043762467e-06,
"loss": 0.885,
"step": 34
},
{
"epoch": 0.1044776119402985,
"grad_norm": 2.27926831852693,
"learning_rate": 9.86522435289912e-06,
"loss": 0.9491,
"step": 35
},
{
"epoch": 0.10746268656716418,
"grad_norm": 2.030550748336965,
"learning_rate": 9.853815248308101e-06,
"loss": 0.982,
"step": 36
},
{
"epoch": 0.11044776119402985,
"grad_norm": 2.0075365017841995,
"learning_rate": 9.841949802639031e-06,
"loss": 0.9843,
"step": 37
},
{
"epoch": 0.11343283582089553,
"grad_norm": 2.459727689278767,
"learning_rate": 9.829629131445342e-06,
"loss": 0.8796,
"step": 38
},
{
"epoch": 0.11641791044776119,
"grad_norm": 1.980360220017672,
"learning_rate": 9.816854393079402e-06,
"loss": 0.8793,
"step": 39
},
{
"epoch": 0.11940298507462686,
"grad_norm": 2.0803429090292433,
"learning_rate": 9.803626788583603e-06,
"loss": 0.8749,
"step": 40
},
{
"epoch": 0.12238805970149254,
"grad_norm": 2.0518013006773446,
"learning_rate": 9.789947561577445e-06,
"loss": 0.9123,
"step": 41
},
{
"epoch": 0.1253731343283582,
"grad_norm": 2.0868313477992695,
"learning_rate": 9.775817998140615e-06,
"loss": 0.884,
"step": 42
},
{
"epoch": 0.12835820895522387,
"grad_norm": 2.133757511141339,
"learning_rate": 9.761239426692077e-06,
"loss": 0.8846,
"step": 43
},
{
"epoch": 0.13134328358208955,
"grad_norm": 2.5042273900185332,
"learning_rate": 9.74621321786517e-06,
"loss": 0.9755,
"step": 44
},
{
"epoch": 0.13432835820895522,
"grad_norm": 1.9478417312433638,
"learning_rate": 9.730740784378755e-06,
"loss": 0.8857,
"step": 45
},
{
"epoch": 0.1373134328358209,
"grad_norm": 1.9676701142325428,
"learning_rate": 9.71482358090438e-06,
"loss": 0.8642,
"step": 46
},
{
"epoch": 0.14029850746268657,
"grad_norm": 1.920433325570768,
"learning_rate": 9.698463103929542e-06,
"loss": 0.8977,
"step": 47
},
{
"epoch": 0.14328358208955225,
"grad_norm": 1.9460499143315086,
"learning_rate": 9.681660891616967e-06,
"loss": 0.902,
"step": 48
},
{
"epoch": 0.14626865671641792,
"grad_norm": 1.968155074678708,
"learning_rate": 9.664418523660004e-06,
"loss": 0.9046,
"step": 49
},
{
"epoch": 0.14925373134328357,
"grad_norm": 1.9490820429685933,
"learning_rate": 9.646737621134112e-06,
"loss": 0.943,
"step": 50
},
{
"epoch": 0.15223880597014924,
"grad_norm": 2.08429080101051,
"learning_rate": 9.628619846344453e-06,
"loss": 0.9337,
"step": 51
},
{
"epoch": 0.15522388059701492,
"grad_norm": 1.930901797857114,
"learning_rate": 9.610066902669593e-06,
"loss": 0.8928,
"step": 52
},
{
"epoch": 0.1582089552238806,
"grad_norm": 1.9013217162350948,
"learning_rate": 9.591080534401371e-06,
"loss": 0.8905,
"step": 53
},
{
"epoch": 0.16119402985074627,
"grad_norm": 1.9982220077949842,
"learning_rate": 9.571662526580898e-06,
"loss": 0.8843,
"step": 54
},
{
"epoch": 0.16417910447761194,
"grad_norm": 1.9851369521318882,
"learning_rate": 9.551814704830734e-06,
"loss": 0.935,
"step": 55
},
{
"epoch": 0.16716417910447762,
"grad_norm": 2.1245353835937437,
"learning_rate": 9.531538935183252e-06,
"loss": 0.8666,
"step": 56
},
{
"epoch": 0.1701492537313433,
"grad_norm": 2.0723319083497245,
"learning_rate": 9.51083712390519e-06,
"loss": 0.9155,
"step": 57
},
{
"epoch": 0.17313432835820897,
"grad_norm": 1.7064471958774803,
"learning_rate": 9.48971121731844e-06,
"loss": 0.7976,
"step": 58
},
{
"epoch": 0.1761194029850746,
"grad_norm": 2.205553839755198,
"learning_rate": 9.468163201617063e-06,
"loss": 0.8575,
"step": 59
},
{
"epoch": 0.1791044776119403,
"grad_norm": 2.123341193411675,
"learning_rate": 9.446195102680531e-06,
"loss": 0.8779,
"step": 60
},
{
"epoch": 0.18208955223880596,
"grad_norm": 2.019140032242156,
"learning_rate": 9.423808985883289e-06,
"loss": 0.8087,
"step": 61
},
{
"epoch": 0.18507462686567164,
"grad_norm": 1.9693330173987202,
"learning_rate": 9.401006955900555e-06,
"loss": 0.8838,
"step": 62
},
{
"epoch": 0.1880597014925373,
"grad_norm": 1.7933631080982435,
"learning_rate": 9.377791156510456e-06,
"loss": 0.8881,
"step": 63
},
{
"epoch": 0.191044776119403,
"grad_norm": 2.064611639976522,
"learning_rate": 9.35416377039246e-06,
"loss": 0.9213,
"step": 64
},
{
"epoch": 0.19402985074626866,
"grad_norm": 2.108631144160596,
"learning_rate": 9.330127018922195e-06,
"loss": 0.8718,
"step": 65
},
{
"epoch": 0.19701492537313434,
"grad_norm": 2.0324645592765624,
"learning_rate": 9.305683161962569e-06,
"loss": 0.9008,
"step": 66
},
{
"epoch": 0.2,
"grad_norm": 2.433917861345567,
"learning_rate": 9.280834497651334e-06,
"loss": 0.8843,
"step": 67
},
{
"epoch": 0.20298507462686566,
"grad_norm": 2.1425405132153377,
"learning_rate": 9.255583362184998e-06,
"loss": 0.91,
"step": 68
},
{
"epoch": 0.20597014925373133,
"grad_norm": 2.1530253028764794,
"learning_rate": 9.229932129599206e-06,
"loss": 0.9011,
"step": 69
},
{
"epoch": 0.208955223880597,
"grad_norm": 1.883401707305491,
"learning_rate": 9.203883211545517e-06,
"loss": 0.8988,
"step": 70
},
{
"epoch": 0.21194029850746268,
"grad_norm": 1.9557201234597423,
"learning_rate": 9.177439057064684e-06,
"loss": 0.8674,
"step": 71
},
{
"epoch": 0.21492537313432836,
"grad_norm": 1.9754846823177397,
"learning_rate": 9.150602152356394e-06,
"loss": 0.8917,
"step": 72
},
{
"epoch": 0.21791044776119403,
"grad_norm": 2.1092131375976333,
"learning_rate": 9.123375020545534e-06,
"loss": 0.9798,
"step": 73
},
{
"epoch": 0.2208955223880597,
"grad_norm": 1.9481297510729838,
"learning_rate": 9.09576022144496e-06,
"loss": 0.8525,
"step": 74
},
{
"epoch": 0.22388059701492538,
"grad_norm": 2.1761431025018845,
"learning_rate": 9.067760351314838e-06,
"loss": 0.8726,
"step": 75
},
{
"epoch": 0.22686567164179106,
"grad_norm": 2.2204565282035276,
"learning_rate": 9.039378042618556e-06,
"loss": 1.0044,
"step": 76
},
{
"epoch": 0.2298507462686567,
"grad_norm": 1.9277579355394385,
"learning_rate": 9.01061596377522e-06,
"loss": 0.8209,
"step": 77
},
{
"epoch": 0.23283582089552238,
"grad_norm": 2.0131002774833076,
"learning_rate": 8.981476818908778e-06,
"loss": 0.9414,
"step": 78
},
{
"epoch": 0.23582089552238805,
"grad_norm": 1.8291261816939248,
"learning_rate": 8.951963347593797e-06,
"loss": 0.832,
"step": 79
},
{
"epoch": 0.23880597014925373,
"grad_norm": 2.174063739192292,
"learning_rate": 8.92207832459788e-06,
"loss": 0.9025,
"step": 80
},
{
"epoch": 0.2417910447761194,
"grad_norm": 2.1708714478771687,
"learning_rate": 8.891824559620801e-06,
"loss": 0.8268,
"step": 81
},
{
"epoch": 0.24477611940298508,
"grad_norm": 2.027241742869845,
"learning_rate": 8.861204897030346e-06,
"loss": 0.7233,
"step": 82
},
{
"epoch": 0.24776119402985075,
"grad_norm": 1.9258937377265744,
"learning_rate": 8.83022221559489e-06,
"loss": 0.7375,
"step": 83
},
{
"epoch": 0.2507462686567164,
"grad_norm": 1.9152686230971898,
"learning_rate": 8.798879428212748e-06,
"loss": 0.8811,
"step": 84
},
{
"epoch": 0.2537313432835821,
"grad_norm": 1.7807746386758827,
"learning_rate": 8.767179481638303e-06,
"loss": 0.8278,
"step": 85
},
{
"epoch": 0.25671641791044775,
"grad_norm": 1.8289188389465045,
"learning_rate": 8.735125356204982e-06,
"loss": 0.8347,
"step": 86
},
{
"epoch": 0.25970149253731345,
"grad_norm": 2.245431341266184,
"learning_rate": 8.702720065545024e-06,
"loss": 0.7732,
"step": 87
},
{
"epoch": 0.2626865671641791,
"grad_norm": 1.958611723882418,
"learning_rate": 8.669966656306176e-06,
"loss": 0.7941,
"step": 88
},
{
"epoch": 0.2656716417910448,
"grad_norm": 1.9764966388329304,
"learning_rate": 8.636868207865244e-06,
"loss": 0.8525,
"step": 89
},
{
"epoch": 0.26865671641791045,
"grad_norm": 2.1735209259103483,
"learning_rate": 8.603427832038574e-06,
"loss": 0.8746,
"step": 90
},
{
"epoch": 0.2716417910447761,
"grad_norm": 2.0038856668883813,
"learning_rate": 8.569648672789496e-06,
"loss": 0.8601,
"step": 91
},
{
"epoch": 0.2746268656716418,
"grad_norm": 2.055939688066048,
"learning_rate": 8.535533905932739e-06,
"loss": 0.85,
"step": 92
},
{
"epoch": 0.27761194029850744,
"grad_norm": 1.9848488069313877,
"learning_rate": 8.501086738835843e-06,
"loss": 0.8632,
"step": 93
},
{
"epoch": 0.28059701492537314,
"grad_norm": 2.113603493312776,
"learning_rate": 8.466310410117622e-06,
"loss": 0.8752,
"step": 94
},
{
"epoch": 0.2835820895522388,
"grad_norm": 2.1479673113866333,
"learning_rate": 8.43120818934367e-06,
"loss": 0.8588,
"step": 95
},
{
"epoch": 0.2865671641791045,
"grad_norm": 2.053881585055401,
"learning_rate": 8.395783376718967e-06,
"loss": 0.8122,
"step": 96
},
{
"epoch": 0.28955223880597014,
"grad_norm": 1.9411012651755073,
"learning_rate": 8.360039302777614e-06,
"loss": 0.8059,
"step": 97
},
{
"epoch": 0.29253731343283584,
"grad_norm": 1.8508350536791303,
"learning_rate": 8.323979328069689e-06,
"loss": 0.833,
"step": 98
},
{
"epoch": 0.2955223880597015,
"grad_norm": 1.8935253815844801,
"learning_rate": 8.28760684284532e-06,
"loss": 0.8401,
"step": 99
},
{
"epoch": 0.29850746268656714,
"grad_norm": 2.2781390311354577,
"learning_rate": 8.25092526673592e-06,
"loss": 0.909,
"step": 100
},
{
"epoch": 0.30149253731343284,
"grad_norm": 2.0985691350237876,
"learning_rate": 8.213938048432697e-06,
"loss": 0.8366,
"step": 101
},
{
"epoch": 0.3044776119402985,
"grad_norm": 2.0596354416831835,
"learning_rate": 8.176648665362426e-06,
"loss": 0.7292,
"step": 102
},
{
"epoch": 0.3074626865671642,
"grad_norm": 1.9898953284718084,
"learning_rate": 8.139060623360494e-06,
"loss": 0.8331,
"step": 103
},
{
"epoch": 0.31044776119402984,
"grad_norm": 2.1541238474940476,
"learning_rate": 8.101177456341301e-06,
"loss": 0.8119,
"step": 104
},
{
"epoch": 0.31343283582089554,
"grad_norm": 1.945555185400502,
"learning_rate": 8.063002725966014e-06,
"loss": 0.817,
"step": 105
},
{
"epoch": 0.3164179104477612,
"grad_norm": 1.7990902555417547,
"learning_rate": 8.024540021307709e-06,
"loss": 0.7763,
"step": 106
},
{
"epoch": 0.3194029850746269,
"grad_norm": 1.9977298309774547,
"learning_rate": 7.985792958513932e-06,
"loss": 0.857,
"step": 107
},
{
"epoch": 0.32238805970149254,
"grad_norm": 1.8852500490644626,
"learning_rate": 7.946765180466725e-06,
"loss": 0.7729,
"step": 108
},
{
"epoch": 0.3253731343283582,
"grad_norm": 2.076072697679506,
"learning_rate": 7.907460356440133e-06,
"loss": 0.8789,
"step": 109
},
{
"epoch": 0.3283582089552239,
"grad_norm": 1.9583549363047221,
"learning_rate": 7.86788218175523e-06,
"loss": 0.824,
"step": 110
},
{
"epoch": 0.33134328358208953,
"grad_norm": 1.8457221620577013,
"learning_rate": 7.828034377432694e-06,
"loss": 0.8298,
"step": 111
},
{
"epoch": 0.33432835820895523,
"grad_norm": 1.9836364032355813,
"learning_rate": 7.787920689842965e-06,
"loss": 0.8754,
"step": 112
},
{
"epoch": 0.3373134328358209,
"grad_norm": 1.9566209100212486,
"learning_rate": 7.747544890354031e-06,
"loss": 0.8364,
"step": 113
},
{
"epoch": 0.3402985074626866,
"grad_norm": 1.784255320047874,
"learning_rate": 7.706910774976849e-06,
"loss": 0.7379,
"step": 114
},
{
"epoch": 0.34328358208955223,
"grad_norm": 2.028920304841314,
"learning_rate": 7.666022164008458e-06,
"loss": 0.8175,
"step": 115
},
{
"epoch": 0.34626865671641793,
"grad_norm": 1.8633625964435614,
"learning_rate": 7.624882901672801e-06,
"loss": 0.8038,
"step": 116
},
{
"epoch": 0.3492537313432836,
"grad_norm": 2.0316374775972723,
"learning_rate": 7.5834968557593155e-06,
"loss": 0.8876,
"step": 117
},
{
"epoch": 0.3522388059701492,
"grad_norm": 1.9425263272714322,
"learning_rate": 7.541867917259278e-06,
"loss": 0.9399,
"step": 118
},
{
"epoch": 0.35522388059701493,
"grad_norm": 1.9570130825151233,
"learning_rate": 7.500000000000001e-06,
"loss": 0.7776,
"step": 119
},
{
"epoch": 0.3582089552238806,
"grad_norm": 1.9649795879633638,
"learning_rate": 7.457897040276853e-06,
"loss": 0.8869,
"step": 120
},
{
"epoch": 0.3611940298507463,
"grad_norm": 1.8568802293506739,
"learning_rate": 7.415562996483193e-06,
"loss": 0.8187,
"step": 121
},
{
"epoch": 0.3641791044776119,
"grad_norm": 1.978910446313143,
"learning_rate": 7.373001848738203e-06,
"loss": 0.9066,
"step": 122
},
{
"epoch": 0.36716417910447763,
"grad_norm": 1.8873874119153988,
"learning_rate": 7.330217598512696e-06,
"loss": 0.7271,
"step": 123
},
{
"epoch": 0.3701492537313433,
"grad_norm": 1.888623084115613,
"learning_rate": 7.2872142682529045e-06,
"loss": 0.7513,
"step": 124
},
{
"epoch": 0.373134328358209,
"grad_norm": 1.8720026536315466,
"learning_rate": 7.243995901002312e-06,
"loss": 0.8024,
"step": 125
},
{
"epoch": 0.3761194029850746,
"grad_norm": 1.8390581271914446,
"learning_rate": 7.200566560021525e-06,
"loss": 0.7723,
"step": 126
},
{
"epoch": 0.37910447761194027,
"grad_norm": 2.0123305936558484,
"learning_rate": 7.156930328406268e-06,
"loss": 0.829,
"step": 127
},
{
"epoch": 0.382089552238806,
"grad_norm": 2.0041614392292373,
"learning_rate": 7.113091308703498e-06,
"loss": 0.8019,
"step": 128
},
{
"epoch": 0.3850746268656716,
"grad_norm": 2.270729693984615,
"learning_rate": 7.069053622525697e-06,
"loss": 0.9292,
"step": 129
},
{
"epoch": 0.3880597014925373,
"grad_norm": 1.9441832516476927,
"learning_rate": 7.0248214101633685e-06,
"loss": 0.7743,
"step": 130
},
{
"epoch": 0.39104477611940297,
"grad_norm": 2.009374270876549,
"learning_rate": 6.980398830195785e-06,
"loss": 0.9518,
"step": 131
},
{
"epoch": 0.3940298507462687,
"grad_norm": 2.005999418159697,
"learning_rate": 6.9357900591000034e-06,
"loss": 0.8366,
"step": 132
},
{
"epoch": 0.3970149253731343,
"grad_norm": 2.169897702526253,
"learning_rate": 6.890999290858213e-06,
"loss": 0.9083,
"step": 133
},
{
"epoch": 0.4,
"grad_norm": 1.949356318876888,
"learning_rate": 6.8460307365634225e-06,
"loss": 0.776,
"step": 134
},
{
"epoch": 0.40298507462686567,
"grad_norm": 1.9220482972951043,
"learning_rate": 6.800888624023552e-06,
"loss": 0.8621,
"step": 135
},
{
"epoch": 0.4059701492537313,
"grad_norm": 2.0267035147753294,
"learning_rate": 6.755577197363945e-06,
"loss": 0.7289,
"step": 136
},
{
"epoch": 0.408955223880597,
"grad_norm": 2.0847187410786256,
"learning_rate": 6.710100716628345e-06,
"loss": 0.7804,
"step": 137
},
{
"epoch": 0.41194029850746267,
"grad_norm": 1.9579240662128552,
"learning_rate": 6.6644634573783825e-06,
"loss": 0.8094,
"step": 138
},
{
"epoch": 0.41492537313432837,
"grad_norm": 1.916563012250393,
"learning_rate": 6.618669710291607e-06,
"loss": 0.7142,
"step": 139
},
{
"epoch": 0.417910447761194,
"grad_norm": 1.870901157605362,
"learning_rate": 6.572723780758069e-06,
"loss": 0.813,
"step": 140
},
{
"epoch": 0.4208955223880597,
"grad_norm": 2.098672375632272,
"learning_rate": 6.526629988475567e-06,
"loss": 0.8118,
"step": 141
},
{
"epoch": 0.42388059701492536,
"grad_norm": 1.8621921035189892,
"learning_rate": 6.4803926670435e-06,
"loss": 0.8365,
"step": 142
},
{
"epoch": 0.42686567164179107,
"grad_norm": 2.1971062225166174,
"learning_rate": 6.434016163555452e-06,
"loss": 0.8093,
"step": 143
},
{
"epoch": 0.4298507462686567,
"grad_norm": 1.8982557358873016,
"learning_rate": 6.387504838190479e-06,
"loss": 0.7273,
"step": 144
},
{
"epoch": 0.43283582089552236,
"grad_norm": 1.9777996729800833,
"learning_rate": 6.340863063803187e-06,
"loss": 0.7419,
"step": 145
},
{
"epoch": 0.43582089552238806,
"grad_norm": 1.8932150919381014,
"learning_rate": 6.294095225512604e-06,
"loss": 0.8438,
"step": 146
},
{
"epoch": 0.4388059701492537,
"grad_norm": 2.0407591298114194,
"learning_rate": 6.247205720289907e-06,
"loss": 0.9269,
"step": 147
},
{
"epoch": 0.4417910447761194,
"grad_norm": 1.9531499021849996,
"learning_rate": 6.2001989565450305e-06,
"loss": 0.7671,
"step": 148
},
{
"epoch": 0.44477611940298506,
"grad_norm": 1.8831951496467338,
"learning_rate": 6.153079353712201e-06,
"loss": 0.7823,
"step": 149
},
{
"epoch": 0.44776119402985076,
"grad_norm": 2.0489484171650854,
"learning_rate": 6.105851341834439e-06,
"loss": 0.7584,
"step": 150
},
{
"epoch": 0.4507462686567164,
"grad_norm": 1.87067969405706,
"learning_rate": 6.058519361147055e-06,
"loss": 0.7742,
"step": 151
},
{
"epoch": 0.4537313432835821,
"grad_norm": 2.0213113461953496,
"learning_rate": 6.011087861660191e-06,
"loss": 0.7479,
"step": 152
},
{
"epoch": 0.45671641791044776,
"grad_norm": 1.7493168229640468,
"learning_rate": 5.9635613027404495e-06,
"loss": 0.7426,
"step": 153
},
{
"epoch": 0.4597014925373134,
"grad_norm": 1.9754114531221796,
"learning_rate": 5.915944152691634e-06,
"loss": 0.7226,
"step": 154
},
{
"epoch": 0.4626865671641791,
"grad_norm": 1.948171207980199,
"learning_rate": 5.8682408883346535e-06,
"loss": 0.7897,
"step": 155
},
{
"epoch": 0.46567164179104475,
"grad_norm": 1.9776492843993512,
"learning_rate": 5.820455994586621e-06,
"loss": 0.7434,
"step": 156
},
{
"epoch": 0.46865671641791046,
"grad_norm": 2.20247547233425,
"learning_rate": 5.772593964039203e-06,
"loss": 0.9538,
"step": 157
},
{
"epoch": 0.4716417910447761,
"grad_norm": 1.9889842141590872,
"learning_rate": 5.724659296536234e-06,
"loss": 0.7595,
"step": 158
},
{
"epoch": 0.4746268656716418,
"grad_norm": 1.829000182022995,
"learning_rate": 5.6766564987506564e-06,
"loss": 0.7261,
"step": 159
},
{
"epoch": 0.47761194029850745,
"grad_norm": 2.037463529529523,
"learning_rate": 5.628590083760815e-06,
"loss": 0.8,
"step": 160
},
{
"epoch": 0.48059701492537316,
"grad_norm": 2.022353172654603,
"learning_rate": 5.5804645706261515e-06,
"loss": 0.8731,
"step": 161
},
{
"epoch": 0.4835820895522388,
"grad_norm": 2.136106832359713,
"learning_rate": 5.532284483962341e-06,
"loss": 0.7802,
"step": 162
},
{
"epoch": 0.48656716417910445,
"grad_norm": 2.069422303756225,
"learning_rate": 5.484054353515896e-06,
"loss": 0.7963,
"step": 163
},
{
"epoch": 0.48955223880597015,
"grad_norm": 2.0794925348964646,
"learning_rate": 5.435778713738292e-06,
"loss": 0.8415,
"step": 164
},
{
"epoch": 0.4925373134328358,
"grad_norm": 1.91410514628332,
"learning_rate": 5.387462103359655e-06,
"loss": 0.7799,
"step": 165
},
{
"epoch": 0.4955223880597015,
"grad_norm": 1.9422373077070176,
"learning_rate": 5.339109064962047e-06,
"loss": 0.6946,
"step": 166
},
{
"epoch": 0.49850746268656715,
"grad_norm": 2.0654078417044315,
"learning_rate": 5.290724144552379e-06,
"loss": 0.6903,
"step": 167
},
{
"epoch": 0.5014925373134328,
"grad_norm": 2.0552919898301014,
"learning_rate": 5.242311891135016e-06,
"loss": 0.837,
"step": 168
},
{
"epoch": 0.5044776119402985,
"grad_norm": 1.831897299613568,
"learning_rate": 5.193876856284085e-06,
"loss": 0.7196,
"step": 169
},
{
"epoch": 0.5074626865671642,
"grad_norm": 2.0213876599750513,
"learning_rate": 5.145423593715558e-06,
"loss": 0.8028,
"step": 170
},
{
"epoch": 0.5104477611940299,
"grad_norm": 1.947091036424257,
"learning_rate": 5.096956658859122e-06,
"loss": 0.764,
"step": 171
},
{
"epoch": 0.5134328358208955,
"grad_norm": 2.115215847395665,
"learning_rate": 5.048480608429893e-06,
"loss": 0.714,
"step": 172
},
{
"epoch": 0.5164179104477612,
"grad_norm": 1.9050203954050413,
"learning_rate": 5e-06,
"loss": 0.7731,
"step": 173
},
{
"epoch": 0.5194029850746269,
"grad_norm": 1.7649786449685814,
"learning_rate": 4.951519391570108e-06,
"loss": 0.7809,
"step": 174
},
{
"epoch": 0.5223880597014925,
"grad_norm": 1.8465309022610246,
"learning_rate": 4.903043341140879e-06,
"loss": 0.7042,
"step": 175
},
{
"epoch": 0.5253731343283582,
"grad_norm": 2.061489348895462,
"learning_rate": 4.854576406284443e-06,
"loss": 0.6684,
"step": 176
},
{
"epoch": 0.5283582089552239,
"grad_norm": 1.9338806633059145,
"learning_rate": 4.806123143715916e-06,
"loss": 0.7897,
"step": 177
},
{
"epoch": 0.5313432835820896,
"grad_norm": 1.9624408870954388,
"learning_rate": 4.7576881088649865e-06,
"loss": 0.8011,
"step": 178
},
{
"epoch": 0.5343283582089552,
"grad_norm": 2.0011140432644607,
"learning_rate": 4.7092758554476215e-06,
"loss": 0.7884,
"step": 179
},
{
"epoch": 0.5373134328358209,
"grad_norm": 1.7743518925526351,
"learning_rate": 4.660890935037954e-06,
"loss": 0.7706,
"step": 180
},
{
"epoch": 0.5402985074626866,
"grad_norm": 1.8859032236696416,
"learning_rate": 4.6125378966403465e-06,
"loss": 0.757,
"step": 181
},
{
"epoch": 0.5432835820895522,
"grad_norm": 1.8534543798073886,
"learning_rate": 4.564221286261709e-06,
"loss": 0.7555,
"step": 182
},
{
"epoch": 0.5462686567164179,
"grad_norm": 2.076412269548539,
"learning_rate": 4.515945646484105e-06,
"loss": 0.8099,
"step": 183
},
{
"epoch": 0.5492537313432836,
"grad_norm": 1.7634075976440318,
"learning_rate": 4.467715516037659e-06,
"loss": 0.8092,
"step": 184
},
{
"epoch": 0.5522388059701493,
"grad_norm": 2.020399102687769,
"learning_rate": 4.4195354293738484e-06,
"loss": 0.8419,
"step": 185
},
{
"epoch": 0.5552238805970149,
"grad_norm": 2.092605975736391,
"learning_rate": 4.371409916239188e-06,
"loss": 0.7663,
"step": 186
},
{
"epoch": 0.5582089552238806,
"grad_norm": 2.1316734178623586,
"learning_rate": 4.323343501249346e-06,
"loss": 0.7356,
"step": 187
},
{
"epoch": 0.5611940298507463,
"grad_norm": 1.9603238608445972,
"learning_rate": 4.275340703463767e-06,
"loss": 0.6401,
"step": 188
},
{
"epoch": 0.564179104477612,
"grad_norm": 2.0131339419337175,
"learning_rate": 4.227406035960798e-06,
"loss": 0.797,
"step": 189
},
{
"epoch": 0.5671641791044776,
"grad_norm": 1.8445187921680968,
"learning_rate": 4.17954400541338e-06,
"loss": 0.7394,
"step": 190
},
{
"epoch": 0.5701492537313433,
"grad_norm": 1.8780187941749311,
"learning_rate": 4.131759111665349e-06,
"loss": 0.7891,
"step": 191
},
{
"epoch": 0.573134328358209,
"grad_norm": 2.094133953929907,
"learning_rate": 4.084055847308367e-06,
"loss": 0.7553,
"step": 192
},
{
"epoch": 0.5761194029850746,
"grad_norm": 1.8929679477011396,
"learning_rate": 4.036438697259551e-06,
"loss": 0.768,
"step": 193
},
{
"epoch": 0.5791044776119403,
"grad_norm": 2.0552239533604344,
"learning_rate": 3.988912138339812e-06,
"loss": 0.7863,
"step": 194
},
{
"epoch": 0.582089552238806,
"grad_norm": 2.121613918192119,
"learning_rate": 3.941480638852948e-06,
"loss": 0.7859,
"step": 195
},
{
"epoch": 0.5850746268656717,
"grad_norm": 1.9339648420487403,
"learning_rate": 3.894148658165562e-06,
"loss": 0.7675,
"step": 196
},
{
"epoch": 0.5880597014925373,
"grad_norm": 1.9640722713060483,
"learning_rate": 3.8469206462878e-06,
"loss": 0.7666,
"step": 197
},
{
"epoch": 0.591044776119403,
"grad_norm": 2.1219795442875182,
"learning_rate": 3.7998010434549716e-06,
"loss": 0.84,
"step": 198
},
{
"epoch": 0.5940298507462687,
"grad_norm": 1.9343866437274984,
"learning_rate": 3.752794279710094e-06,
"loss": 0.7326,
"step": 199
},
{
"epoch": 0.5970149253731343,
"grad_norm": 1.8202340923662594,
"learning_rate": 3.705904774487396e-06,
"loss": 0.7718,
"step": 200
},
{
"epoch": 0.6,
"grad_norm": 2.0809715823653363,
"learning_rate": 3.6591369361968127e-06,
"loss": 0.7816,
"step": 201
},
{
"epoch": 0.6029850746268657,
"grad_norm": 1.9308006072395687,
"learning_rate": 3.6124951618095224e-06,
"loss": 0.7459,
"step": 202
},
{
"epoch": 0.6059701492537314,
"grad_norm": 1.8429870643368742,
"learning_rate": 3.5659838364445505e-06,
"loss": 0.7512,
"step": 203
},
{
"epoch": 0.608955223880597,
"grad_norm": 2.1077065827924018,
"learning_rate": 3.519607332956502e-06,
"loss": 0.8128,
"step": 204
},
{
"epoch": 0.6119402985074627,
"grad_norm": 2.1055734923288476,
"learning_rate": 3.473370011524435e-06,
"loss": 0.8685,
"step": 205
},
{
"epoch": 0.6149253731343284,
"grad_norm": 1.834560299795974,
"learning_rate": 3.427276219241933e-06,
"loss": 0.7197,
"step": 206
},
{
"epoch": 0.6179104477611941,
"grad_norm": 1.8698556901460164,
"learning_rate": 3.3813302897083955e-06,
"loss": 0.819,
"step": 207
},
{
"epoch": 0.6208955223880597,
"grad_norm": 1.9594082131377109,
"learning_rate": 3.335536542621617e-06,
"loss": 0.7508,
"step": 208
},
{
"epoch": 0.6238805970149254,
"grad_norm": 2.035843145682815,
"learning_rate": 3.289899283371657e-06,
"loss": 0.7994,
"step": 209
},
{
"epoch": 0.6268656716417911,
"grad_norm": 2.114101737166481,
"learning_rate": 3.244422802636057e-06,
"loss": 0.8318,
"step": 210
},
{
"epoch": 0.6298507462686567,
"grad_norm": 2.029674065408155,
"learning_rate": 3.1991113759764493e-06,
"loss": 0.7739,
"step": 211
},
{
"epoch": 0.6328358208955224,
"grad_norm": 1.84191088358988,
"learning_rate": 3.1539692634365788e-06,
"loss": 0.7531,
"step": 212
},
{
"epoch": 0.6358208955223881,
"grad_norm": 2.0097456357386334,
"learning_rate": 3.1090007091417884e-06,
"loss": 0.6753,
"step": 213
},
{
"epoch": 0.6388059701492538,
"grad_norm": 1.9742568670170066,
"learning_rate": 3.0642099408999982e-06,
"loss": 0.8277,
"step": 214
},
{
"epoch": 0.6417910447761194,
"grad_norm": 2.372653630330257,
"learning_rate": 3.019601169804216e-06,
"loss": 0.8008,
"step": 215
},
{
"epoch": 0.6447761194029851,
"grad_norm": 2.09632486042523,
"learning_rate": 2.975178589836632e-06,
"loss": 0.7624,
"step": 216
},
{
"epoch": 0.6477611940298508,
"grad_norm": 1.803736481539904,
"learning_rate": 2.9309463774743047e-06,
"loss": 0.8489,
"step": 217
},
{
"epoch": 0.6507462686567164,
"grad_norm": 1.7954652148600287,
"learning_rate": 2.886908691296504e-06,
"loss": 0.7625,
"step": 218
},
{
"epoch": 0.6537313432835821,
"grad_norm": 1.888642430199233,
"learning_rate": 2.843069671593734e-06,
"loss": 0.7789,
"step": 219
},
{
"epoch": 0.6567164179104478,
"grad_norm": 1.9044723977602214,
"learning_rate": 2.7994334399784773e-06,
"loss": 0.7374,
"step": 220
},
{
"epoch": 0.6597014925373135,
"grad_norm": 1.8992523202945966,
"learning_rate": 2.7560040989976894e-06,
"loss": 0.7348,
"step": 221
},
{
"epoch": 0.6626865671641791,
"grad_norm": 1.8781569606708384,
"learning_rate": 2.7127857317470967e-06,
"loss": 0.7866,
"step": 222
},
{
"epoch": 0.6656716417910448,
"grad_norm": 2.0267725086442137,
"learning_rate": 2.6697824014873076e-06,
"loss": 0.7339,
"step": 223
},
{
"epoch": 0.6686567164179105,
"grad_norm": 1.7940944553566613,
"learning_rate": 2.626998151261798e-06,
"loss": 0.6928,
"step": 224
},
{
"epoch": 0.6716417910447762,
"grad_norm": 1.8272008842472593,
"learning_rate": 2.5844370035168077e-06,
"loss": 0.7085,
"step": 225
},
{
"epoch": 0.6746268656716418,
"grad_norm": 1.970820534028099,
"learning_rate": 2.5421029597231476e-06,
"loss": 0.7771,
"step": 226
},
{
"epoch": 0.6776119402985075,
"grad_norm": 1.9871980755676482,
"learning_rate": 2.5000000000000015e-06,
"loss": 0.753,
"step": 227
},
{
"epoch": 0.6805970149253732,
"grad_norm": 2.034448775258997,
"learning_rate": 2.458132082740724e-06,
"loss": 0.7704,
"step": 228
},
{
"epoch": 0.6835820895522388,
"grad_norm": 2.0666793037099973,
"learning_rate": 2.4165031442406857e-06,
"loss": 0.7449,
"step": 229
},
{
"epoch": 0.6865671641791045,
"grad_norm": 1.9834975007043607,
"learning_rate": 2.3751170983272e-06,
"loss": 0.8116,
"step": 230
},
{
"epoch": 0.6895522388059702,
"grad_norm": 1.9415575425204292,
"learning_rate": 2.333977835991545e-06,
"loss": 0.7206,
"step": 231
},
{
"epoch": 0.6925373134328359,
"grad_norm": 1.9953311575761847,
"learning_rate": 2.293089225023152e-06,
"loss": 0.8639,
"step": 232
},
{
"epoch": 0.6955223880597015,
"grad_norm": 2.0209521491394415,
"learning_rate": 2.2524551096459703e-06,
"loss": 0.766,
"step": 233
},
{
"epoch": 0.6985074626865672,
"grad_norm": 2.039434830152999,
"learning_rate": 2.2120793101570366e-06,
"loss": 0.7189,
"step": 234
},
{
"epoch": 0.7014925373134329,
"grad_norm": 1.933645089749101,
"learning_rate": 2.171965622567308e-06,
"loss": 0.7954,
"step": 235
},
{
"epoch": 0.7044776119402985,
"grad_norm": 1.9859660370920094,
"learning_rate": 2.132117818244771e-06,
"loss": 0.7286,
"step": 236
},
{
"epoch": 0.7074626865671642,
"grad_norm": 1.919442506011092,
"learning_rate": 2.0925396435598665e-06,
"loss": 0.7344,
"step": 237
},
{
"epoch": 0.7104477611940299,
"grad_norm": 2.052417792417508,
"learning_rate": 2.053234819533276e-06,
"loss": 0.7068,
"step": 238
}
],
"logging_steps": 1.0,
"max_steps": 335,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 17,
"total_flos": 19855559786496.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}