Leonardo6's picture
Add files using upload-large-folder tool
c3c65c2 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.32786885245901637,
"eval_steps": 500,
"global_step": 100,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.003278688524590164,
"grad_norm": 58.999969482421875,
"learning_rate": 0.0,
"loss": 3.8125,
"step": 1
},
{
"epoch": 0.006557377049180328,
"grad_norm": 56.5999870300293,
"learning_rate": 3.5714285714285716e-07,
"loss": 3.8984,
"step": 2
},
{
"epoch": 0.009836065573770493,
"grad_norm": 58.442317962646484,
"learning_rate": 7.142857142857143e-07,
"loss": 3.8359,
"step": 3
},
{
"epoch": 0.013114754098360656,
"grad_norm": 59.13441467285156,
"learning_rate": 1.0714285714285714e-06,
"loss": 3.7422,
"step": 4
},
{
"epoch": 0.01639344262295082,
"grad_norm": 59.55280685424805,
"learning_rate": 1.4285714285714286e-06,
"loss": 3.7383,
"step": 5
},
{
"epoch": 0.019672131147540985,
"grad_norm": 57.113956451416016,
"learning_rate": 1.7857142857142859e-06,
"loss": 3.8281,
"step": 6
},
{
"epoch": 0.022950819672131147,
"grad_norm": 44.69753646850586,
"learning_rate": 2.1428571428571427e-06,
"loss": 3.6562,
"step": 7
},
{
"epoch": 0.02622950819672131,
"grad_norm": 42.74599075317383,
"learning_rate": 2.5e-06,
"loss": 3.668,
"step": 8
},
{
"epoch": 0.029508196721311476,
"grad_norm": 27.557815551757812,
"learning_rate": 2.8571428571428573e-06,
"loss": 3.3438,
"step": 9
},
{
"epoch": 0.03278688524590164,
"grad_norm": 25.87570571899414,
"learning_rate": 3.2142857142857147e-06,
"loss": 3.3633,
"step": 10
},
{
"epoch": 0.036065573770491806,
"grad_norm": 25.204158782958984,
"learning_rate": 3.5714285714285718e-06,
"loss": 3.2852,
"step": 11
},
{
"epoch": 0.03934426229508197,
"grad_norm": 25.082277297973633,
"learning_rate": 3.928571428571429e-06,
"loss": 3.2188,
"step": 12
},
{
"epoch": 0.04262295081967213,
"grad_norm": 21.52399253845215,
"learning_rate": 4.2857142857142855e-06,
"loss": 3.1016,
"step": 13
},
{
"epoch": 0.04590163934426229,
"grad_norm": 21.302213668823242,
"learning_rate": 4.642857142857144e-06,
"loss": 3.0938,
"step": 14
},
{
"epoch": 0.04918032786885246,
"grad_norm": 17.48920249938965,
"learning_rate": 5e-06,
"loss": 3.0703,
"step": 15
},
{
"epoch": 0.05245901639344262,
"grad_norm": 14.723689079284668,
"learning_rate": 5.357142857142857e-06,
"loss": 2.7812,
"step": 16
},
{
"epoch": 0.05573770491803279,
"grad_norm": 13.425204277038574,
"learning_rate": 5.7142857142857145e-06,
"loss": 2.8398,
"step": 17
},
{
"epoch": 0.05901639344262295,
"grad_norm": 12.121846199035645,
"learning_rate": 6.071428571428571e-06,
"loss": 2.7539,
"step": 18
},
{
"epoch": 0.06229508196721312,
"grad_norm": 12.97847843170166,
"learning_rate": 6.4285714285714295e-06,
"loss": 2.6719,
"step": 19
},
{
"epoch": 0.06557377049180328,
"grad_norm": 12.588919639587402,
"learning_rate": 6.785714285714287e-06,
"loss": 2.6758,
"step": 20
},
{
"epoch": 0.06885245901639345,
"grad_norm": 10.492659568786621,
"learning_rate": 7.1428571428571436e-06,
"loss": 2.6562,
"step": 21
},
{
"epoch": 0.07213114754098361,
"grad_norm": 9.676924705505371,
"learning_rate": 7.500000000000001e-06,
"loss": 2.6094,
"step": 22
},
{
"epoch": 0.07540983606557378,
"grad_norm": 10.222829818725586,
"learning_rate": 7.857142857142858e-06,
"loss": 2.6211,
"step": 23
},
{
"epoch": 0.07868852459016394,
"grad_norm": 10.020594596862793,
"learning_rate": 8.214285714285714e-06,
"loss": 2.5273,
"step": 24
},
{
"epoch": 0.08196721311475409,
"grad_norm": 9.404995918273926,
"learning_rate": 8.571428571428571e-06,
"loss": 2.5352,
"step": 25
},
{
"epoch": 0.08524590163934426,
"grad_norm": 10.194731712341309,
"learning_rate": 8.92857142857143e-06,
"loss": 2.543,
"step": 26
},
{
"epoch": 0.08852459016393442,
"grad_norm": 9.045639991760254,
"learning_rate": 9.285714285714288e-06,
"loss": 2.4961,
"step": 27
},
{
"epoch": 0.09180327868852459,
"grad_norm": 9.108716011047363,
"learning_rate": 9.642857142857144e-06,
"loss": 2.4336,
"step": 28
},
{
"epoch": 0.09508196721311475,
"grad_norm": 9.336297988891602,
"learning_rate": 1e-05,
"loss": 2.457,
"step": 29
},
{
"epoch": 0.09836065573770492,
"grad_norm": 8.170738220214844,
"learning_rate": 9.999968638844173e-06,
"loss": 2.4688,
"step": 30
},
{
"epoch": 0.10163934426229508,
"grad_norm": 8.507989883422852,
"learning_rate": 9.999874555770099e-06,
"loss": 2.4062,
"step": 31
},
{
"epoch": 0.10491803278688525,
"grad_norm": 8.38835334777832,
"learning_rate": 9.999717751958002e-06,
"loss": 2.3711,
"step": 32
},
{
"epoch": 0.10819672131147541,
"grad_norm": 8.36460018157959,
"learning_rate": 9.9994982293749e-06,
"loss": 2.4805,
"step": 33
},
{
"epoch": 0.11147540983606558,
"grad_norm": 8.547989845275879,
"learning_rate": 9.999215990774584e-06,
"loss": 2.3906,
"step": 34
},
{
"epoch": 0.11475409836065574,
"grad_norm": 8.938812255859375,
"learning_rate": 9.998871039697589e-06,
"loss": 2.3906,
"step": 35
},
{
"epoch": 0.1180327868852459,
"grad_norm": 8.230469703674316,
"learning_rate": 9.998463380471138e-06,
"loss": 2.3164,
"step": 36
},
{
"epoch": 0.12131147540983607,
"grad_norm": 7.883755683898926,
"learning_rate": 9.997993018209098e-06,
"loss": 2.3633,
"step": 37
},
{
"epoch": 0.12459016393442623,
"grad_norm": 8.220748901367188,
"learning_rate": 9.997459958811911e-06,
"loss": 2.3516,
"step": 38
},
{
"epoch": 0.12786885245901639,
"grad_norm": 8.738327980041504,
"learning_rate": 9.996864208966522e-06,
"loss": 2.3398,
"step": 39
},
{
"epoch": 0.13114754098360656,
"grad_norm": 7.944454669952393,
"learning_rate": 9.996205776146288e-06,
"loss": 2.3867,
"step": 40
},
{
"epoch": 0.13442622950819672,
"grad_norm": 7.631208419799805,
"learning_rate": 9.995484668610897e-06,
"loss": 2.3242,
"step": 41
},
{
"epoch": 0.1377049180327869,
"grad_norm": 9.301526069641113,
"learning_rate": 9.994700895406258e-06,
"loss": 2.3203,
"step": 42
},
{
"epoch": 0.14098360655737704,
"grad_norm": 7.625834941864014,
"learning_rate": 9.99385446636438e-06,
"loss": 2.3398,
"step": 43
},
{
"epoch": 0.14426229508196722,
"grad_norm": 7.908705234527588,
"learning_rate": 9.992945392103264e-06,
"loss": 2.3477,
"step": 44
},
{
"epoch": 0.14754098360655737,
"grad_norm": 8.002193450927734,
"learning_rate": 9.991973684026755e-06,
"loss": 2.2852,
"step": 45
},
{
"epoch": 0.15081967213114755,
"grad_norm": 8.058524131774902,
"learning_rate": 9.99093935432441e-06,
"loss": 2.3711,
"step": 46
},
{
"epoch": 0.1540983606557377,
"grad_norm": 8.803773880004883,
"learning_rate": 9.989842415971338e-06,
"loss": 2.2344,
"step": 47
},
{
"epoch": 0.15737704918032788,
"grad_norm": 7.851852893829346,
"learning_rate": 9.988682882728043e-06,
"loss": 2.3047,
"step": 48
},
{
"epoch": 0.16065573770491803,
"grad_norm": 8.29660415649414,
"learning_rate": 9.987460769140242e-06,
"loss": 2.3047,
"step": 49
},
{
"epoch": 0.16393442622950818,
"grad_norm": 7.779457092285156,
"learning_rate": 9.986176090538697e-06,
"loss": 2.1992,
"step": 50
},
{
"epoch": 0.16721311475409836,
"grad_norm": 8.602660179138184,
"learning_rate": 9.984828863039008e-06,
"loss": 2.25,
"step": 51
},
{
"epoch": 0.17049180327868851,
"grad_norm": 8.079516410827637,
"learning_rate": 9.983419103541421e-06,
"loss": 2.3203,
"step": 52
},
{
"epoch": 0.1737704918032787,
"grad_norm": 8.443635940551758,
"learning_rate": 9.981946829730611e-06,
"loss": 2.3242,
"step": 53
},
{
"epoch": 0.17704918032786884,
"grad_norm": 7.873276710510254,
"learning_rate": 9.980412060075459e-06,
"loss": 2.2656,
"step": 54
},
{
"epoch": 0.18032786885245902,
"grad_norm": 8.633429527282715,
"learning_rate": 9.978814813828827e-06,
"loss": 2.2227,
"step": 55
},
{
"epoch": 0.18360655737704917,
"grad_norm": 9.135279655456543,
"learning_rate": 9.97715511102731e-06,
"loss": 2.2148,
"step": 56
},
{
"epoch": 0.18688524590163935,
"grad_norm": 7.584745407104492,
"learning_rate": 9.975432972490985e-06,
"loss": 2.2773,
"step": 57
},
{
"epoch": 0.1901639344262295,
"grad_norm": 9.086216926574707,
"learning_rate": 9.973648419823161e-06,
"loss": 2.2656,
"step": 58
},
{
"epoch": 0.19344262295081968,
"grad_norm": 8.223559379577637,
"learning_rate": 9.971801475410084e-06,
"loss": 2.2773,
"step": 59
},
{
"epoch": 0.19672131147540983,
"grad_norm": 8.726387023925781,
"learning_rate": 9.969892162420682e-06,
"loss": 2.3125,
"step": 60
},
{
"epoch": 0.2,
"grad_norm": 8.304938316345215,
"learning_rate": 9.96792050480626e-06,
"loss": 2.168,
"step": 61
},
{
"epoch": 0.20327868852459016,
"grad_norm": 8.20266342163086,
"learning_rate": 9.965886527300201e-06,
"loss": 2.25,
"step": 62
},
{
"epoch": 0.20655737704918034,
"grad_norm": 7.833676815032959,
"learning_rate": 9.963790255417663e-06,
"loss": 2.2422,
"step": 63
},
{
"epoch": 0.2098360655737705,
"grad_norm": 7.891449928283691,
"learning_rate": 9.961631715455245e-06,
"loss": 2.1758,
"step": 64
},
{
"epoch": 0.21311475409836064,
"grad_norm": 8.999512672424316,
"learning_rate": 9.959410934490673e-06,
"loss": 2.1484,
"step": 65
},
{
"epoch": 0.21639344262295082,
"grad_norm": 7.718410015106201,
"learning_rate": 9.95712794038245e-06,
"loss": 2.1172,
"step": 66
},
{
"epoch": 0.21967213114754097,
"grad_norm": 8.272541999816895,
"learning_rate": 9.954782761769509e-06,
"loss": 2.25,
"step": 67
},
{
"epoch": 0.22295081967213115,
"grad_norm": 9.607975006103516,
"learning_rate": 9.952375428070853e-06,
"loss": 2.2812,
"step": 68
},
{
"epoch": 0.2262295081967213,
"grad_norm": 8.297865867614746,
"learning_rate": 9.949905969485192e-06,
"loss": 2.1406,
"step": 69
},
{
"epoch": 0.22950819672131148,
"grad_norm": 8.192915916442871,
"learning_rate": 9.947374416990554e-06,
"loss": 2.2109,
"step": 70
},
{
"epoch": 0.23278688524590163,
"grad_norm": 8.489333152770996,
"learning_rate": 9.944780802343906e-06,
"loss": 2.1914,
"step": 71
},
{
"epoch": 0.2360655737704918,
"grad_norm": 8.244175910949707,
"learning_rate": 9.942125158080747e-06,
"loss": 2.1797,
"step": 72
},
{
"epoch": 0.23934426229508196,
"grad_norm": 8.009477615356445,
"learning_rate": 9.939407517514709e-06,
"loss": 2.2578,
"step": 73
},
{
"epoch": 0.24262295081967214,
"grad_norm": 8.284704208374023,
"learning_rate": 9.936627914737129e-06,
"loss": 2.2305,
"step": 74
},
{
"epoch": 0.2459016393442623,
"grad_norm": 9.479103088378906,
"learning_rate": 9.933786384616631e-06,
"loss": 2.1406,
"step": 75
},
{
"epoch": 0.24918032786885247,
"grad_norm": 8.558723449707031,
"learning_rate": 9.930882962798683e-06,
"loss": 2.2812,
"step": 76
},
{
"epoch": 0.25245901639344265,
"grad_norm": 8.608718872070312,
"learning_rate": 9.927917685705148e-06,
"loss": 2.2461,
"step": 77
},
{
"epoch": 0.25573770491803277,
"grad_norm": 7.863804817199707,
"learning_rate": 9.924890590533837e-06,
"loss": 2.1914,
"step": 78
},
{
"epoch": 0.25901639344262295,
"grad_norm": 8.842366218566895,
"learning_rate": 9.921801715258027e-06,
"loss": 2.0996,
"step": 79
},
{
"epoch": 0.26229508196721313,
"grad_norm": 8.273311614990234,
"learning_rate": 9.918651098626e-06,
"loss": 2.1641,
"step": 80
},
{
"epoch": 0.26557377049180325,
"grad_norm": 8.011392593383789,
"learning_rate": 9.915438780160547e-06,
"loss": 2.168,
"step": 81
},
{
"epoch": 0.26885245901639343,
"grad_norm": 7.946706771850586,
"learning_rate": 9.912164800158474e-06,
"loss": 2.2227,
"step": 82
},
{
"epoch": 0.2721311475409836,
"grad_norm": 9.032291412353516,
"learning_rate": 9.908829199690106e-06,
"loss": 2.2891,
"step": 83
},
{
"epoch": 0.2754098360655738,
"grad_norm": 8.33333969116211,
"learning_rate": 9.905432020598751e-06,
"loss": 2.2344,
"step": 84
},
{
"epoch": 0.2786885245901639,
"grad_norm": 7.967901229858398,
"learning_rate": 9.901973305500197e-06,
"loss": 2.1172,
"step": 85
},
{
"epoch": 0.2819672131147541,
"grad_norm": 8.24918270111084,
"learning_rate": 9.898453097782164e-06,
"loss": 2.1602,
"step": 86
},
{
"epoch": 0.28524590163934427,
"grad_norm": 8.877111434936523,
"learning_rate": 9.894871441603766e-06,
"loss": 2.2383,
"step": 87
},
{
"epoch": 0.28852459016393445,
"grad_norm": 8.32304573059082,
"learning_rate": 9.891228381894954e-06,
"loss": 2.1719,
"step": 88
},
{
"epoch": 0.29180327868852457,
"grad_norm": 9.091806411743164,
"learning_rate": 9.887523964355953e-06,
"loss": 2.168,
"step": 89
},
{
"epoch": 0.29508196721311475,
"grad_norm": 8.54736614227295,
"learning_rate": 9.88375823545669e-06,
"loss": 2.1211,
"step": 90
},
{
"epoch": 0.2983606557377049,
"grad_norm": 9.3049955368042,
"learning_rate": 9.879931242436208e-06,
"loss": 2.1719,
"step": 91
},
{
"epoch": 0.3016393442622951,
"grad_norm": 8.523479461669922,
"learning_rate": 9.876043033302079e-06,
"loss": 2.1719,
"step": 92
},
{
"epoch": 0.30491803278688523,
"grad_norm": 7.741846561431885,
"learning_rate": 9.872093656829792e-06,
"loss": 2.0391,
"step": 93
},
{
"epoch": 0.3081967213114754,
"grad_norm": 8.36119270324707,
"learning_rate": 9.868083162562155e-06,
"loss": 2.0938,
"step": 94
},
{
"epoch": 0.3114754098360656,
"grad_norm": 8.163023948669434,
"learning_rate": 9.864011600808663e-06,
"loss": 2.0938,
"step": 95
},
{
"epoch": 0.31475409836065577,
"grad_norm": 8.827863693237305,
"learning_rate": 9.859879022644865e-06,
"loss": 2.1406,
"step": 96
},
{
"epoch": 0.3180327868852459,
"grad_norm": 8.434041023254395,
"learning_rate": 9.855685479911736e-06,
"loss": 2.168,
"step": 97
},
{
"epoch": 0.32131147540983607,
"grad_norm": 8.144036293029785,
"learning_rate": 9.851431025215012e-06,
"loss": 2.0742,
"step": 98
},
{
"epoch": 0.32459016393442625,
"grad_norm": 8.843683242797852,
"learning_rate": 9.847115711924542e-06,
"loss": 2.1016,
"step": 99
},
{
"epoch": 0.32786885245901637,
"grad_norm": 8.300407409667969,
"learning_rate": 9.842739594173607e-06,
"loss": 2.1953,
"step": 100
}
],
"logging_steps": 1,
"max_steps": 915,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.1732503551279104e+16,
"train_batch_size": 512,
"trial_name": null,
"trial_params": null
}