| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 291, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.003436426116838488, | |
| "grad_norm": 6.773601055145264, | |
| "learning_rate": 3.3333333333333335e-07, | |
| "loss": 2.3294, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.006872852233676976, | |
| "grad_norm": 7.0779709815979, | |
| "learning_rate": 6.666666666666667e-07, | |
| "loss": 2.343, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.010309278350515464, | |
| "grad_norm": 6.805663108825684, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 2.3024, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.013745704467353952, | |
| "grad_norm": 6.982776641845703, | |
| "learning_rate": 1.3333333333333334e-06, | |
| "loss": 2.3704, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01718213058419244, | |
| "grad_norm": 6.750400543212891, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 2.3224, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.020618556701030927, | |
| "grad_norm": 6.902153968811035, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 2.3783, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.024054982817869417, | |
| "grad_norm": 6.6329731941223145, | |
| "learning_rate": 2.3333333333333336e-06, | |
| "loss": 2.2336, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.027491408934707903, | |
| "grad_norm": 6.543385982513428, | |
| "learning_rate": 2.666666666666667e-06, | |
| "loss": 2.2793, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.030927835051546393, | |
| "grad_norm": 5.860514163970947, | |
| "learning_rate": 3e-06, | |
| "loss": 2.1503, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.03436426116838488, | |
| "grad_norm": 5.754838466644287, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 2.0999, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.037800687285223365, | |
| "grad_norm": 5.671762466430664, | |
| "learning_rate": 3.6666666666666666e-06, | |
| "loss": 2.101, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.041237113402061855, | |
| "grad_norm": 4.444242477416992, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 1.7642, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.044673539518900345, | |
| "grad_norm": 4.336024284362793, | |
| "learning_rate": 4.333333333333334e-06, | |
| "loss": 1.6966, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.048109965635738834, | |
| "grad_norm": 4.255687713623047, | |
| "learning_rate": 4.666666666666667e-06, | |
| "loss": 1.6585, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.05154639175257732, | |
| "grad_norm": 3.6368279457092285, | |
| "learning_rate": 5e-06, | |
| "loss": 1.3429, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.054982817869415807, | |
| "grad_norm": 3.6595640182495117, | |
| "learning_rate": 5.333333333333334e-06, | |
| "loss": 1.1194, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.058419243986254296, | |
| "grad_norm": 3.600109338760376, | |
| "learning_rate": 5.666666666666667e-06, | |
| "loss": 1.05, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.061855670103092786, | |
| "grad_norm": 3.134242296218872, | |
| "learning_rate": 6e-06, | |
| "loss": 0.9555, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.06529209621993128, | |
| "grad_norm": 2.7783243656158447, | |
| "learning_rate": 6.333333333333333e-06, | |
| "loss": 0.8856, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.06872852233676977, | |
| "grad_norm": 2.519134283065796, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.8207, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.07216494845360824, | |
| "grad_norm": 2.0445821285247803, | |
| "learning_rate": 7e-06, | |
| "loss": 0.4981, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.07560137457044673, | |
| "grad_norm": 1.3868145942687988, | |
| "learning_rate": 7.333333333333333e-06, | |
| "loss": 0.4683, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.07903780068728522, | |
| "grad_norm": 1.092483639717102, | |
| "learning_rate": 7.666666666666667e-06, | |
| "loss": 0.5258, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.08247422680412371, | |
| "grad_norm": 0.9057236313819885, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.4232, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.0859106529209622, | |
| "grad_norm": 0.7798469662666321, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 0.4377, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.08934707903780069, | |
| "grad_norm": 0.7577869892120361, | |
| "learning_rate": 8.666666666666668e-06, | |
| "loss": 0.3982, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.09278350515463918, | |
| "grad_norm": 0.7064369320869446, | |
| "learning_rate": 9e-06, | |
| "loss": 0.4299, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.09621993127147767, | |
| "grad_norm": 0.6497183442115784, | |
| "learning_rate": 9.333333333333334e-06, | |
| "loss": 0.4115, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.09965635738831616, | |
| "grad_norm": 0.6399904489517212, | |
| "learning_rate": 9.666666666666667e-06, | |
| "loss": 0.4078, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.10309278350515463, | |
| "grad_norm": 0.6032602190971375, | |
| "learning_rate": 1e-05, | |
| "loss": 0.3716, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.10652920962199312, | |
| "grad_norm": 0.6270999908447266, | |
| "learning_rate": 9.999637795788383e-06, | |
| "loss": 0.3949, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.10996563573883161, | |
| "grad_norm": 0.5787239074707031, | |
| "learning_rate": 9.99855123563029e-06, | |
| "loss": 0.3646, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.1134020618556701, | |
| "grad_norm": 0.5683437585830688, | |
| "learning_rate": 9.996740476948386e-06, | |
| "loss": 0.3752, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.11683848797250859, | |
| "grad_norm": 0.5568259954452515, | |
| "learning_rate": 9.994205782088438e-06, | |
| "loss": 0.3973, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.12027491408934708, | |
| "grad_norm": 0.5675911903381348, | |
| "learning_rate": 9.990947518281312e-06, | |
| "loss": 0.3204, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.12371134020618557, | |
| "grad_norm": 0.5186944603919983, | |
| "learning_rate": 9.986966157589751e-06, | |
| "loss": 0.3472, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.12714776632302405, | |
| "grad_norm": 0.5074464082717896, | |
| "learning_rate": 9.982262276840002e-06, | |
| "loss": 0.3487, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.13058419243986255, | |
| "grad_norm": 0.5508702397346497, | |
| "learning_rate": 9.976836557538234e-06, | |
| "loss": 0.3412, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.13402061855670103, | |
| "grad_norm": 0.513957679271698, | |
| "learning_rate": 9.970689785771798e-06, | |
| "loss": 0.368, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.13745704467353953, | |
| "grad_norm": 0.4993857145309448, | |
| "learning_rate": 9.963822852095344e-06, | |
| "loss": 0.365, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.140893470790378, | |
| "grad_norm": 0.503234326839447, | |
| "learning_rate": 9.95623675140179e-06, | |
| "loss": 0.3657, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.14432989690721648, | |
| "grad_norm": 0.47215989232063293, | |
| "learning_rate": 9.947932582778188e-06, | |
| "loss": 0.354, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.14776632302405499, | |
| "grad_norm": 0.4891766607761383, | |
| "learning_rate": 9.938911549346473e-06, | |
| "loss": 0.3539, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.15120274914089346, | |
| "grad_norm": 0.5077570080757141, | |
| "learning_rate": 9.929174958089167e-06, | |
| "loss": 0.36, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.15463917525773196, | |
| "grad_norm": 0.4428570866584778, | |
| "learning_rate": 9.918724219660013e-06, | |
| "loss": 0.3325, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.15807560137457044, | |
| "grad_norm": 0.46060407161712646, | |
| "learning_rate": 9.907560848179607e-06, | |
| "loss": 0.3391, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.16151202749140894, | |
| "grad_norm": 0.46653836965560913, | |
| "learning_rate": 9.895686461016007e-06, | |
| "loss": 0.3469, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.16494845360824742, | |
| "grad_norm": 0.6124569177627563, | |
| "learning_rate": 9.883102778550434e-06, | |
| "loss": 0.3625, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.16838487972508592, | |
| "grad_norm": 0.47772711515426636, | |
| "learning_rate": 9.869811623928001e-06, | |
| "loss": 0.3559, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.1718213058419244, | |
| "grad_norm": 0.4554615914821625, | |
| "learning_rate": 9.855814922793583e-06, | |
| "loss": 0.3236, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.17525773195876287, | |
| "grad_norm": 0.4689699113368988, | |
| "learning_rate": 9.841114703012817e-06, | |
| "loss": 0.3441, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.17869415807560138, | |
| "grad_norm": 0.4912395477294922, | |
| "learning_rate": 9.82571309437831e-06, | |
| "loss": 0.3505, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.18213058419243985, | |
| "grad_norm": 0.48207777738571167, | |
| "learning_rate": 9.809612328301071e-06, | |
| "loss": 0.3444, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.18556701030927836, | |
| "grad_norm": 0.4854188859462738, | |
| "learning_rate": 9.792814737487207e-06, | |
| "loss": 0.3574, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.18900343642611683, | |
| "grad_norm": 0.4824926257133484, | |
| "learning_rate": 9.775322755599979e-06, | |
| "loss": 0.3577, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.19243986254295534, | |
| "grad_norm": 0.467854768037796, | |
| "learning_rate": 9.757138916907184e-06, | |
| "loss": 0.3144, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.1958762886597938, | |
| "grad_norm": 0.4348817765712738, | |
| "learning_rate": 9.738265855914014e-06, | |
| "loss": 0.3057, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.19931271477663232, | |
| "grad_norm": 0.48006704449653625, | |
| "learning_rate": 9.718706306981332e-06, | |
| "loss": 0.3294, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.2027491408934708, | |
| "grad_norm": 0.4489571750164032, | |
| "learning_rate": 9.698463103929542e-06, | |
| "loss": 0.3263, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.20618556701030927, | |
| "grad_norm": 0.42884257435798645, | |
| "learning_rate": 9.677539179628005e-06, | |
| "loss": 0.3015, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.20962199312714777, | |
| "grad_norm": 0.4882860779762268, | |
| "learning_rate": 9.655937565570124e-06, | |
| "loss": 0.3531, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.21305841924398625, | |
| "grad_norm": 0.45341619849205017, | |
| "learning_rate": 9.63366139143413e-06, | |
| "loss": 0.3245, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.21649484536082475, | |
| "grad_norm": 0.47687625885009766, | |
| "learning_rate": 9.610713884629667e-06, | |
| "loss": 0.3019, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.21993127147766323, | |
| "grad_norm": 0.4295818507671356, | |
| "learning_rate": 9.587098369830171e-06, | |
| "loss": 0.2952, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.22336769759450173, | |
| "grad_norm": 0.4635133743286133, | |
| "learning_rate": 9.562818268491216e-06, | |
| "loss": 0.3243, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.2268041237113402, | |
| "grad_norm": 0.41768181324005127, | |
| "learning_rate": 9.537877098354787e-06, | |
| "loss": 0.2814, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.23024054982817868, | |
| "grad_norm": 0.47494152188301086, | |
| "learning_rate": 9.512278472939627e-06, | |
| "loss": 0.3122, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.23367697594501718, | |
| "grad_norm": 0.48896145820617676, | |
| "learning_rate": 9.486026101017711e-06, | |
| "loss": 0.3254, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.23711340206185566, | |
| "grad_norm": 0.444621741771698, | |
| "learning_rate": 9.459123786076911e-06, | |
| "loss": 0.3092, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.24054982817869416, | |
| "grad_norm": 0.49557366967201233, | |
| "learning_rate": 9.431575425769938e-06, | |
| "loss": 0.3571, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.24398625429553264, | |
| "grad_norm": 0.4832090139389038, | |
| "learning_rate": 9.40338501134964e-06, | |
| "loss": 0.3053, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.24742268041237114, | |
| "grad_norm": 0.49034735560417175, | |
| "learning_rate": 9.374556627090749e-06, | |
| "loss": 0.3264, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.2508591065292096, | |
| "grad_norm": 0.4763636589050293, | |
| "learning_rate": 9.345094449698143e-06, | |
| "loss": 0.3601, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.2542955326460481, | |
| "grad_norm": 0.4429002106189728, | |
| "learning_rate": 9.315002747701716e-06, | |
| "loss": 0.2926, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.25773195876288657, | |
| "grad_norm": 0.44965827465057373, | |
| "learning_rate": 9.284285880837947e-06, | |
| "loss": 0.3413, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.2611683848797251, | |
| "grad_norm": 0.43706703186035156, | |
| "learning_rate": 9.252948299418255e-06, | |
| "loss": 0.3132, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.2646048109965636, | |
| "grad_norm": 0.446598082780838, | |
| "learning_rate": 9.220994543684225e-06, | |
| "loss": 0.3136, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.26804123711340205, | |
| "grad_norm": 0.4524715542793274, | |
| "learning_rate": 9.188429243149824e-06, | |
| "loss": 0.3436, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.27147766323024053, | |
| "grad_norm": 0.46307334303855896, | |
| "learning_rate": 9.155257115930651e-06, | |
| "loss": 0.3319, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.27491408934707906, | |
| "grad_norm": 0.41813838481903076, | |
| "learning_rate": 9.121482968060384e-06, | |
| "loss": 0.2707, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.27835051546391754, | |
| "grad_norm": 0.461912602186203, | |
| "learning_rate": 9.08711169279446e-06, | |
| "loss": 0.3477, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.281786941580756, | |
| "grad_norm": 0.4281875789165497, | |
| "learning_rate": 9.052148269901145e-06, | |
| "loss": 0.2952, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.2852233676975945, | |
| "grad_norm": 0.44423964619636536, | |
| "learning_rate": 9.01659776494005e-06, | |
| "loss": 0.3284, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.28865979381443296, | |
| "grad_norm": 0.4334423243999481, | |
| "learning_rate": 8.98046532852822e-06, | |
| "loss": 0.3024, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.2920962199312715, | |
| "grad_norm": 0.4503464698791504, | |
| "learning_rate": 8.943756195593916e-06, | |
| "loss": 0.3053, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.29553264604810997, | |
| "grad_norm": 0.45096880197525024, | |
| "learning_rate": 8.90647568461816e-06, | |
| "loss": 0.2971, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.29896907216494845, | |
| "grad_norm": 0.4469160735607147, | |
| "learning_rate": 8.868629196864182e-06, | |
| "loss": 0.3323, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.3024054982817869, | |
| "grad_norm": 0.45453527569770813, | |
| "learning_rate": 8.83022221559489e-06, | |
| "loss": 0.3152, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.30584192439862545, | |
| "grad_norm": 0.42542845010757446, | |
| "learning_rate": 8.791260305278434e-06, | |
| "loss": 0.2697, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.30927835051546393, | |
| "grad_norm": 0.3987956643104553, | |
| "learning_rate": 8.751749110782013e-06, | |
| "loss": 0.2691, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.3127147766323024, | |
| "grad_norm": 0.43505221605300903, | |
| "learning_rate": 8.71169435655405e-06, | |
| "loss": 0.3071, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.3161512027491409, | |
| "grad_norm": 0.42406681180000305, | |
| "learning_rate": 8.671101845794816e-06, | |
| "loss": 0.3137, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.31958762886597936, | |
| "grad_norm": 0.4488978981971741, | |
| "learning_rate": 8.629977459615655e-06, | |
| "loss": 0.3211, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.3230240549828179, | |
| "grad_norm": 0.4613654613494873, | |
| "learning_rate": 8.588327156186915e-06, | |
| "loss": 0.3398, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.32646048109965636, | |
| "grad_norm": 0.40654006600379944, | |
| "learning_rate": 8.546156969874723e-06, | |
| "loss": 0.3183, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.32989690721649484, | |
| "grad_norm": 0.45063990354537964, | |
| "learning_rate": 8.503473010366713e-06, | |
| "loss": 0.3039, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 0.4447152316570282, | |
| "learning_rate": 8.460281461786848e-06, | |
| "loss": 0.283, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.33676975945017185, | |
| "grad_norm": 0.4078894555568695, | |
| "learning_rate": 8.416588581799447e-06, | |
| "loss": 0.2948, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.3402061855670103, | |
| "grad_norm": 0.42324382066726685, | |
| "learning_rate": 8.372400700702569e-06, | |
| "loss": 0.2622, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.3436426116838488, | |
| "grad_norm": 0.46570709347724915, | |
| "learning_rate": 8.327724220510873e-06, | |
| "loss": 0.3214, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.3470790378006873, | |
| "grad_norm": 0.46272405982017517, | |
| "learning_rate": 8.282565614028068e-06, | |
| "loss": 0.3264, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.35051546391752575, | |
| "grad_norm": 0.4732251763343811, | |
| "learning_rate": 8.23693142390914e-06, | |
| "loss": 0.3502, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.3539518900343643, | |
| "grad_norm": 0.4501849114894867, | |
| "learning_rate": 8.19082826171243e-06, | |
| "loss": 0.3211, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.35738831615120276, | |
| "grad_norm": 0.44297897815704346, | |
| "learning_rate": 8.144262806941743e-06, | |
| "loss": 0.3162, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.36082474226804123, | |
| "grad_norm": 0.4206938147544861, | |
| "learning_rate": 8.097241806078616e-06, | |
| "loss": 0.3029, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.3642611683848797, | |
| "grad_norm": 0.4197562634944916, | |
| "learning_rate": 8.049772071604864e-06, | |
| "loss": 0.2688, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.36769759450171824, | |
| "grad_norm": 0.44810447096824646, | |
| "learning_rate": 8.001860481015594e-06, | |
| "loss": 0.3005, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.3711340206185567, | |
| "grad_norm": 0.4516235291957855, | |
| "learning_rate": 7.953513975822755e-06, | |
| "loss": 0.2865, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.3745704467353952, | |
| "grad_norm": 0.4506089985370636, | |
| "learning_rate": 7.904739560549475e-06, | |
| "loss": 0.3204, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.37800687285223367, | |
| "grad_norm": 0.45452532172203064, | |
| "learning_rate": 7.855544301715203e-06, | |
| "loss": 0.2961, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.38144329896907214, | |
| "grad_norm": 0.5450659990310669, | |
| "learning_rate": 7.805935326811913e-06, | |
| "loss": 0.3216, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.3848797250859107, | |
| "grad_norm": 0.4857536554336548, | |
| "learning_rate": 7.755919823271466e-06, | |
| "loss": 0.3348, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.38831615120274915, | |
| "grad_norm": 0.4399779438972473, | |
| "learning_rate": 7.70550503742427e-06, | |
| "loss": 0.2682, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.3917525773195876, | |
| "grad_norm": 0.4279418885707855, | |
| "learning_rate": 7.654698273449435e-06, | |
| "loss": 0.2986, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.3951890034364261, | |
| "grad_norm": 0.45266300439834595, | |
| "learning_rate": 7.603506892316513e-06, | |
| "loss": 0.3116, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.39862542955326463, | |
| "grad_norm": 0.4144250750541687, | |
| "learning_rate": 7.551938310719043e-06, | |
| "loss": 0.2864, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.4020618556701031, | |
| "grad_norm": 0.5105399489402771, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.3088, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.4054982817869416, | |
| "grad_norm": 0.4652271568775177, | |
| "learning_rate": 7.447699485069342e-06, | |
| "loss": 0.2821, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.40893470790378006, | |
| "grad_norm": 0.4278913140296936, | |
| "learning_rate": 7.395044343313777e-06, | |
| "loss": 0.2981, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.41237113402061853, | |
| "grad_norm": 0.41499921679496765, | |
| "learning_rate": 7.342042203498952e-06, | |
| "loss": 0.283, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.41580756013745707, | |
| "grad_norm": 0.4199381172657013, | |
| "learning_rate": 7.288700744664167e-06, | |
| "loss": 0.2977, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.41924398625429554, | |
| "grad_norm": 0.4549289047718048, | |
| "learning_rate": 7.235027695009846e-06, | |
| "loss": 0.329, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.422680412371134, | |
| "grad_norm": 0.43108847737312317, | |
| "learning_rate": 7.181030830777838e-06, | |
| "loss": 0.2919, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.4261168384879725, | |
| "grad_norm": 0.42637932300567627, | |
| "learning_rate": 7.1267179751248005e-06, | |
| "loss": 0.304, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.42955326460481097, | |
| "grad_norm": 0.4469113349914551, | |
| "learning_rate": 7.0720969969887595e-06, | |
| "loss": 0.2966, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.4329896907216495, | |
| "grad_norm": 0.41228166222572327, | |
| "learning_rate": 7.017175809949044e-06, | |
| "loss": 0.2874, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.436426116838488, | |
| "grad_norm": 0.45446354150772095, | |
| "learning_rate": 6.961962371079752e-06, | |
| "loss": 0.3129, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.43986254295532645, | |
| "grad_norm": 0.4572490155696869, | |
| "learning_rate": 6.906464679796927e-06, | |
| "loss": 0.3398, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.44329896907216493, | |
| "grad_norm": 0.4100750982761383, | |
| "learning_rate": 6.850690776699574e-06, | |
| "loss": 0.2885, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.44673539518900346, | |
| "grad_norm": 0.4656309187412262, | |
| "learning_rate": 6.79464874240473e-06, | |
| "loss": 0.3347, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.45017182130584193, | |
| "grad_norm": 0.4740864038467407, | |
| "learning_rate": 6.7383466963767386e-06, | |
| "loss": 0.3141, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.4536082474226804, | |
| "grad_norm": 0.4347783029079437, | |
| "learning_rate": 6.681792795750876e-06, | |
| "loss": 0.2836, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.4570446735395189, | |
| "grad_norm": 0.46490252017974854, | |
| "learning_rate": 6.624995234151539e-06, | |
| "loss": 0.3036, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.46048109965635736, | |
| "grad_norm": 0.4635082185268402, | |
| "learning_rate": 6.567962240505136e-06, | |
| "loss": 0.2989, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.4639175257731959, | |
| "grad_norm": 0.43528881669044495, | |
| "learning_rate": 6.510702077847864e-06, | |
| "loss": 0.3137, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.46735395189003437, | |
| "grad_norm": 0.4689432978630066, | |
| "learning_rate": 6.453223042128556e-06, | |
| "loss": 0.2838, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.47079037800687284, | |
| "grad_norm": 0.45231714844703674, | |
| "learning_rate": 6.395533461006736e-06, | |
| "loss": 0.2994, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.4742268041237113, | |
| "grad_norm": 0.507282018661499, | |
| "learning_rate": 6.337641692646106e-06, | |
| "loss": 0.32, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.47766323024054985, | |
| "grad_norm": 0.47324004769325256, | |
| "learning_rate": 6.2795561245035895e-06, | |
| "loss": 0.3178, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.48109965635738833, | |
| "grad_norm": 0.4751383364200592, | |
| "learning_rate": 6.221285172114156e-06, | |
| "loss": 0.3112, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.4845360824742268, | |
| "grad_norm": 0.47212111949920654, | |
| "learning_rate": 6.162837277871553e-06, | |
| "loss": 0.2964, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.4879725085910653, | |
| "grad_norm": 0.48457249999046326, | |
| "learning_rate": 6.104220909805162e-06, | |
| "loss": 0.2869, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.49140893470790376, | |
| "grad_norm": 0.4023948609828949, | |
| "learning_rate": 6.045444560353136e-06, | |
| "loss": 0.2437, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.4948453608247423, | |
| "grad_norm": 0.433889776468277, | |
| "learning_rate": 5.986516745132e-06, | |
| "loss": 0.2714, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.49828178694158076, | |
| "grad_norm": 0.47038504481315613, | |
| "learning_rate": 5.927446001702899e-06, | |
| "loss": 0.3033, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.5017182130584192, | |
| "grad_norm": 0.4430581331253052, | |
| "learning_rate": 5.8682408883346535e-06, | |
| "loss": 0.2951, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.5051546391752577, | |
| "grad_norm": 0.4938088357448578, | |
| "learning_rate": 5.808909982763825e-06, | |
| "loss": 0.3056, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.5085910652920962, | |
| "grad_norm": 0.4448769688606262, | |
| "learning_rate": 5.749461880951966e-06, | |
| "loss": 0.3015, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.5120274914089347, | |
| "grad_norm": 0.44447487592697144, | |
| "learning_rate": 5.689905195840216e-06, | |
| "loss": 0.2846, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.5154639175257731, | |
| "grad_norm": 0.411819189786911, | |
| "learning_rate": 5.630248556101448e-06, | |
| "loss": 0.272, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.5189003436426117, | |
| "grad_norm": 0.46941104531288147, | |
| "learning_rate": 5.570500604890124e-06, | |
| "loss": 0.3198, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.5223367697594502, | |
| "grad_norm": 0.49389877915382385, | |
| "learning_rate": 5.510669998590074e-06, | |
| "loss": 0.291, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.5257731958762887, | |
| "grad_norm": 0.4679196774959564, | |
| "learning_rate": 5.450765405560328e-06, | |
| "loss": 0.2919, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.5292096219931272, | |
| "grad_norm": 0.48511338233947754, | |
| "learning_rate": 5.390795504879243e-06, | |
| "loss": 0.3203, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.5326460481099656, | |
| "grad_norm": 0.49272918701171875, | |
| "learning_rate": 5.330768985087059e-06, | |
| "loss": 0.3433, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.5360824742268041, | |
| "grad_norm": 0.45313969254493713, | |
| "learning_rate": 5.270694542927089e-06, | |
| "loss": 0.3076, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.5395189003436426, | |
| "grad_norm": 0.45044413208961487, | |
| "learning_rate": 5.2105808820857126e-06, | |
| "loss": 0.3174, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.5429553264604811, | |
| "grad_norm": 0.4462970495223999, | |
| "learning_rate": 5.150436711931387e-06, | |
| "loss": 0.3114, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.5463917525773195, | |
| "grad_norm": 0.40542709827423096, | |
| "learning_rate": 5.090270746252803e-06, | |
| "loss": 0.2906, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.5498281786941581, | |
| "grad_norm": 0.44431909918785095, | |
| "learning_rate": 5.030091701996428e-06, | |
| "loss": 0.2672, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.5532646048109966, | |
| "grad_norm": 0.4110300540924072, | |
| "learning_rate": 4.9699082980035735e-06, | |
| "loss": 0.2853, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.5567010309278351, | |
| "grad_norm": 0.5007030367851257, | |
| "learning_rate": 4.909729253747197e-06, | |
| "loss": 0.3131, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.5601374570446735, | |
| "grad_norm": 0.5697958469390869, | |
| "learning_rate": 4.8495632880686155e-06, | |
| "loss": 0.2935, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.563573883161512, | |
| "grad_norm": 0.43557995557785034, | |
| "learning_rate": 4.789419117914288e-06, | |
| "loss": 0.3175, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.5670103092783505, | |
| "grad_norm": 0.4249924123287201, | |
| "learning_rate": 4.729305457072913e-06, | |
| "loss": 0.3049, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.570446735395189, | |
| "grad_norm": 0.4352003335952759, | |
| "learning_rate": 4.6692310149129425e-06, | |
| "loss": 0.2796, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.5738831615120275, | |
| "grad_norm": 0.4522886872291565, | |
| "learning_rate": 4.609204495120759e-06, | |
| "loss": 0.2519, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.5773195876288659, | |
| "grad_norm": 0.4559308588504791, | |
| "learning_rate": 4.549234594439674e-06, | |
| "loss": 0.3026, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.5807560137457045, | |
| "grad_norm": 0.5845784544944763, | |
| "learning_rate": 4.489330001409929e-06, | |
| "loss": 0.2776, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.584192439862543, | |
| "grad_norm": 0.46254462003707886, | |
| "learning_rate": 4.429499395109877e-06, | |
| "loss": 0.3038, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5876288659793815, | |
| "grad_norm": 0.42879557609558105, | |
| "learning_rate": 4.369751443898554e-06, | |
| "loss": 0.2798, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.5910652920962199, | |
| "grad_norm": 0.46193256974220276, | |
| "learning_rate": 4.310094804159784e-06, | |
| "loss": 0.2779, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.5945017182130584, | |
| "grad_norm": 0.5215131640434265, | |
| "learning_rate": 4.250538119048036e-06, | |
| "loss": 0.3455, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.5979381443298969, | |
| "grad_norm": 0.44782477617263794, | |
| "learning_rate": 4.191090017236177e-06, | |
| "loss": 0.3083, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.6013745704467354, | |
| "grad_norm": 0.48400431871414185, | |
| "learning_rate": 4.131759111665349e-06, | |
| "loss": 0.2975, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.6048109965635738, | |
| "grad_norm": 0.41204163432121277, | |
| "learning_rate": 4.072553998297103e-06, | |
| "loss": 0.2757, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.6082474226804123, | |
| "grad_norm": 0.4403284192085266, | |
| "learning_rate": 4.013483254868001e-06, | |
| "loss": 0.2673, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.6116838487972509, | |
| "grad_norm": 0.44256702065467834, | |
| "learning_rate": 3.9545554396468655e-06, | |
| "loss": 0.2941, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.6151202749140894, | |
| "grad_norm": 0.44513365626335144, | |
| "learning_rate": 3.89577909019484e-06, | |
| "loss": 0.2773, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.6185567010309279, | |
| "grad_norm": 0.4568428695201874, | |
| "learning_rate": 3.8371627221284495e-06, | |
| "loss": 0.2946, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.6219931271477663, | |
| "grad_norm": 0.4727785289287567, | |
| "learning_rate": 3.7787148278858453e-06, | |
| "loss": 0.3204, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.6254295532646048, | |
| "grad_norm": 0.42963531613349915, | |
| "learning_rate": 3.7204438754964113e-06, | |
| "loss": 0.2498, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.6288659793814433, | |
| "grad_norm": 0.42374187707901, | |
| "learning_rate": 3.662358307353897e-06, | |
| "loss": 0.2989, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.6323024054982818, | |
| "grad_norm": 0.495601624250412, | |
| "learning_rate": 3.6044665389932663e-06, | |
| "loss": 0.3289, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.6357388316151202, | |
| "grad_norm": 0.4799273610115051, | |
| "learning_rate": 3.5467769578714455e-06, | |
| "loss": 0.2627, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.6391752577319587, | |
| "grad_norm": 0.4173251986503601, | |
| "learning_rate": 3.489297922152136e-06, | |
| "loss": 0.2576, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.6426116838487973, | |
| "grad_norm": 0.4326072335243225, | |
| "learning_rate": 3.432037759494867e-06, | |
| "loss": 0.2963, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.6460481099656358, | |
| "grad_norm": 0.4364977180957794, | |
| "learning_rate": 3.375004765848463e-06, | |
| "loss": 0.2613, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.6494845360824743, | |
| "grad_norm": 0.4443832337856293, | |
| "learning_rate": 3.3182072042491244e-06, | |
| "loss": 0.3029, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.6529209621993127, | |
| "grad_norm": 0.4171838164329529, | |
| "learning_rate": 3.2616533036232635e-06, | |
| "loss": 0.2758, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.6563573883161512, | |
| "grad_norm": 0.44542646408081055, | |
| "learning_rate": 3.205351257595272e-06, | |
| "loss": 0.2947, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.6597938144329897, | |
| "grad_norm": 0.42361024022102356, | |
| "learning_rate": 3.149309223300428e-06, | |
| "loss": 0.2644, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.6632302405498282, | |
| "grad_norm": 0.42630866169929504, | |
| "learning_rate": 3.093535320203074e-06, | |
| "loss": 0.2739, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 0.45428791642189026, | |
| "learning_rate": 3.0380376289202497e-06, | |
| "loss": 0.3163, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.6701030927835051, | |
| "grad_norm": 0.3921767771244049, | |
| "learning_rate": 2.982824190050958e-06, | |
| "loss": 0.2675, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.6735395189003437, | |
| "grad_norm": 0.41902685165405273, | |
| "learning_rate": 2.927903003011241e-06, | |
| "loss": 0.2682, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.6769759450171822, | |
| "grad_norm": 0.4761234521865845, | |
| "learning_rate": 2.8732820248752016e-06, | |
| "loss": 0.326, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.6804123711340206, | |
| "grad_norm": 0.4254363775253296, | |
| "learning_rate": 2.8189691692221627e-06, | |
| "loss": 0.2915, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.6838487972508591, | |
| "grad_norm": 0.41192832589149475, | |
| "learning_rate": 2.7649723049901554e-06, | |
| "loss": 0.2926, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.6872852233676976, | |
| "grad_norm": 0.47728779911994934, | |
| "learning_rate": 2.711299255335833e-06, | |
| "loss": 0.2738, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6907216494845361, | |
| "grad_norm": 0.5057693123817444, | |
| "learning_rate": 2.65795779650105e-06, | |
| "loss": 0.2806, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.6941580756013745, | |
| "grad_norm": 0.4783018231391907, | |
| "learning_rate": 2.6049556566862234e-06, | |
| "loss": 0.2891, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.697594501718213, | |
| "grad_norm": 0.5034655928611755, | |
| "learning_rate": 2.552300514930657e-06, | |
| "loss": 0.2977, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.7010309278350515, | |
| "grad_norm": 0.41531136631965637, | |
| "learning_rate": 2.5000000000000015e-06, | |
| "loss": 0.275, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.7044673539518901, | |
| "grad_norm": 0.4129270315170288, | |
| "learning_rate": 2.4480616892809593e-06, | |
| "loss": 0.2381, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.7079037800687286, | |
| "grad_norm": 0.46732890605926514, | |
| "learning_rate": 2.396493107683488e-06, | |
| "loss": 0.2779, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.711340206185567, | |
| "grad_norm": 0.4486265480518341, | |
| "learning_rate": 2.345301726550567e-06, | |
| "loss": 0.267, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.7147766323024055, | |
| "grad_norm": 0.45522594451904297, | |
| "learning_rate": 2.2944949625757295e-06, | |
| "loss": 0.3015, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.718213058419244, | |
| "grad_norm": 0.6432047486305237, | |
| "learning_rate": 2.244080176728536e-06, | |
| "loss": 0.2662, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.7216494845360825, | |
| "grad_norm": 0.4285944104194641, | |
| "learning_rate": 2.1940646731880887e-06, | |
| "loss": 0.3036, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.7250859106529209, | |
| "grad_norm": 0.4231526255607605, | |
| "learning_rate": 2.1444556982847996e-06, | |
| "loss": 0.2896, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.7285223367697594, | |
| "grad_norm": 0.42347821593284607, | |
| "learning_rate": 2.095260439450526e-06, | |
| "loss": 0.2684, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.7319587628865979, | |
| "grad_norm": 0.43296700716018677, | |
| "learning_rate": 2.0464860241772454e-06, | |
| "loss": 0.2788, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.7353951890034365, | |
| "grad_norm": 0.42099738121032715, | |
| "learning_rate": 1.998139518984409e-06, | |
| "loss": 0.2873, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.738831615120275, | |
| "grad_norm": 0.467330664396286, | |
| "learning_rate": 1.9502279283951363e-06, | |
| "loss": 0.3004, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.7422680412371134, | |
| "grad_norm": 0.4013458788394928, | |
| "learning_rate": 1.9027581939213852e-06, | |
| "loss": 0.2443, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.7457044673539519, | |
| "grad_norm": 0.4329524040222168, | |
| "learning_rate": 1.8557371930582579e-06, | |
| "loss": 0.289, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.7491408934707904, | |
| "grad_norm": 0.4044208824634552, | |
| "learning_rate": 1.8091717382875723e-06, | |
| "loss": 0.2895, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.7525773195876289, | |
| "grad_norm": 0.4422338008880615, | |
| "learning_rate": 1.7630685760908623e-06, | |
| "loss": 0.2801, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.7560137457044673, | |
| "grad_norm": 0.4364713728427887, | |
| "learning_rate": 1.7174343859719334e-06, | |
| "loss": 0.279, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.7594501718213058, | |
| "grad_norm": 0.4441254734992981, | |
| "learning_rate": 1.6722757794891287e-06, | |
| "loss": 0.271, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.7628865979381443, | |
| "grad_norm": 0.446011483669281, | |
| "learning_rate": 1.627599299297431e-06, | |
| "loss": 0.2627, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.7663230240549829, | |
| "grad_norm": 0.4614294469356537, | |
| "learning_rate": 1.5834114182005544e-06, | |
| "loss": 0.29, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.7697594501718213, | |
| "grad_norm": 0.419196754693985, | |
| "learning_rate": 1.5397185382131524e-06, | |
| "loss": 0.2672, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.7731958762886598, | |
| "grad_norm": 0.47108274698257446, | |
| "learning_rate": 1.4965269896332884e-06, | |
| "loss": 0.2684, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.7766323024054983, | |
| "grad_norm": 0.48111191391944885, | |
| "learning_rate": 1.4538430301252783e-06, | |
| "loss": 0.327, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.7800687285223368, | |
| "grad_norm": 0.4369780421257019, | |
| "learning_rate": 1.411672843813086e-06, | |
| "loss": 0.283, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.7835051546391752, | |
| "grad_norm": 0.4390357732772827, | |
| "learning_rate": 1.370022540384347e-06, | |
| "loss": 0.3011, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.7869415807560137, | |
| "grad_norm": 0.40961310267448425, | |
| "learning_rate": 1.3288981542051844e-06, | |
| "loss": 0.2714, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.7903780068728522, | |
| "grad_norm": 0.4285667836666107, | |
| "learning_rate": 1.2883056434459506e-06, | |
| "loss": 0.2577, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.7938144329896907, | |
| "grad_norm": 0.4189035892486572, | |
| "learning_rate": 1.2482508892179884e-06, | |
| "loss": 0.27, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.7972508591065293, | |
| "grad_norm": 0.3942403495311737, | |
| "learning_rate": 1.2087396947215678e-06, | |
| "loss": 0.2605, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.8006872852233677, | |
| "grad_norm": 0.49882808327674866, | |
| "learning_rate": 1.1697777844051105e-06, | |
| "loss": 0.3014, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.8041237113402062, | |
| "grad_norm": 0.3978383243083954, | |
| "learning_rate": 1.1313708031358183e-06, | |
| "loss": 0.2566, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.8075601374570447, | |
| "grad_norm": 0.40835458040237427, | |
| "learning_rate": 1.0935243153818437e-06, | |
| "loss": 0.2851, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.8109965635738832, | |
| "grad_norm": 0.37506747245788574, | |
| "learning_rate": 1.0562438044060846e-06, | |
| "loss": 0.2456, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.8144329896907216, | |
| "grad_norm": 0.38689589500427246, | |
| "learning_rate": 1.0195346714717813e-06, | |
| "loss": 0.2514, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.8178694158075601, | |
| "grad_norm": 0.4160013794898987, | |
| "learning_rate": 9.834022350599538e-07, | |
| "loss": 0.297, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.8213058419243986, | |
| "grad_norm": 0.42283937335014343, | |
| "learning_rate": 9.47851730098856e-07, | |
| "loss": 0.2556, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.8247422680412371, | |
| "grad_norm": 0.464019775390625, | |
| "learning_rate": 9.128883072055411e-07, | |
| "loss": 0.283, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.8281786941580757, | |
| "grad_norm": 0.44965192675590515, | |
| "learning_rate": 8.785170319396174e-07, | |
| "loss": 0.2795, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.8316151202749141, | |
| "grad_norm": 0.4575161039829254, | |
| "learning_rate": 8.447428840693489e-07, | |
| "loss": 0.2793, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.8350515463917526, | |
| "grad_norm": 0.4319188594818115, | |
| "learning_rate": 8.115707568501768e-07, | |
| "loss": 0.2569, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.8384879725085911, | |
| "grad_norm": 0.43016260862350464, | |
| "learning_rate": 7.790054563157745e-07, | |
| "loss": 0.3022, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.8419243986254296, | |
| "grad_norm": 0.4093724489212036, | |
| "learning_rate": 7.470517005817473e-07, | |
| "loss": 0.251, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.845360824742268, | |
| "grad_norm": 0.4726552963256836, | |
| "learning_rate": 7.157141191620548e-07, | |
| "loss": 0.3445, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.8487972508591065, | |
| "grad_norm": 0.4052773118019104, | |
| "learning_rate": 6.849972522982845e-07, | |
| "loss": 0.2703, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.852233676975945, | |
| "grad_norm": 0.4114190936088562, | |
| "learning_rate": 6.549055503018575e-07, | |
| "loss": 0.2632, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.8556701030927835, | |
| "grad_norm": 0.43223080039024353, | |
| "learning_rate": 6.254433729092518e-07, | |
| "loss": 0.303, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.8591065292096219, | |
| "grad_norm": 0.42186257243156433, | |
| "learning_rate": 5.966149886503614e-07, | |
| "loss": 0.2673, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.8625429553264605, | |
| "grad_norm": 0.4302360713481903, | |
| "learning_rate": 5.684245742300625e-07, | |
| "loss": 0.2864, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.865979381443299, | |
| "grad_norm": 0.4030810594558716, | |
| "learning_rate": 5.408762139230889e-07, | |
| "loss": 0.2608, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.8694158075601375, | |
| "grad_norm": 0.4175474941730499, | |
| "learning_rate": 5.139738989822901e-07, | |
| "loss": 0.2626, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.872852233676976, | |
| "grad_norm": 0.4360395669937134, | |
| "learning_rate": 4.877215270603752e-07, | |
| "loss": 0.2955, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.8762886597938144, | |
| "grad_norm": 0.417015939950943, | |
| "learning_rate": 4.6212290164521554e-07, | |
| "loss": 0.2755, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.8797250859106529, | |
| "grad_norm": 0.42125383019447327, | |
| "learning_rate": 4.371817315087845e-07, | |
| "loss": 0.2799, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.8831615120274914, | |
| "grad_norm": 0.41367045044898987, | |
| "learning_rate": 4.1290163016982855e-07, | |
| "loss": 0.2639, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.8865979381443299, | |
| "grad_norm": 0.44137805700302124, | |
| "learning_rate": 3.8928611537033424e-07, | |
| "loss": 0.2726, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.8900343642611683, | |
| "grad_norm": 0.4337911307811737, | |
| "learning_rate": 3.663386085658693e-07, | |
| "loss": 0.2758, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.8934707903780069, | |
| "grad_norm": 0.3822688162326813, | |
| "learning_rate": 3.4406243442987765e-07, | |
| "loss": 0.2581, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.8969072164948454, | |
| "grad_norm": 0.4298803508281708, | |
| "learning_rate": 3.224608203719953e-07, | |
| "loss": 0.281, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.9003436426116839, | |
| "grad_norm": 0.46638035774230957, | |
| "learning_rate": 3.015368960704584e-07, | |
| "loss": 0.3077, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.9037800687285223, | |
| "grad_norm": 0.4740549325942993, | |
| "learning_rate": 2.812936930186688e-07, | |
| "loss": 0.3094, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.9072164948453608, | |
| "grad_norm": 0.4159590005874634, | |
| "learning_rate": 2.617341440859883e-07, | |
| "loss": 0.2729, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.9106529209621993, | |
| "grad_norm": 0.4018743932247162, | |
| "learning_rate": 2.428610830928152e-07, | |
| "loss": 0.2579, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.9140893470790378, | |
| "grad_norm": 0.4325768053531647, | |
| "learning_rate": 2.2467724440002336e-07, | |
| "loss": 0.3084, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.9175257731958762, | |
| "grad_norm": 0.4185415208339691, | |
| "learning_rate": 2.0718526251279346e-07, | |
| "loss": 0.269, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.9209621993127147, | |
| "grad_norm": 0.41172951459884644, | |
| "learning_rate": 1.9038767169893058e-07, | |
| "loss": 0.2688, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.9243986254295533, | |
| "grad_norm": 0.4149523675441742, | |
| "learning_rate": 1.7428690562169003e-07, | |
| "loss": 0.2528, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.9278350515463918, | |
| "grad_norm": 0.4421743154525757, | |
| "learning_rate": 1.5888529698718347e-07, | |
| "loss": 0.2684, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.9312714776632303, | |
| "grad_norm": 0.4239802360534668, | |
| "learning_rate": 1.4418507720641794e-07, | |
| "loss": 0.2632, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.9347079037800687, | |
| "grad_norm": 0.4376448690891266, | |
| "learning_rate": 1.3018837607199909e-07, | |
| "loss": 0.2861, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.9381443298969072, | |
| "grad_norm": 0.432050496339798, | |
| "learning_rate": 1.1689722144956672e-07, | |
| "loss": 0.287, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.9415807560137457, | |
| "grad_norm": 0.5766061544418335, | |
| "learning_rate": 1.0431353898399388e-07, | |
| "loss": 0.3096, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.9450171821305842, | |
| "grad_norm": 0.40074169635772705, | |
| "learning_rate": 9.243915182039431e-08, | |
| "loss": 0.2671, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.9484536082474226, | |
| "grad_norm": 0.4120236933231354, | |
| "learning_rate": 8.127578033998663e-08, | |
| "loss": 0.2707, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.9518900343642611, | |
| "grad_norm": 0.478630006313324, | |
| "learning_rate": 7.082504191083417e-08, | |
| "loss": 0.2886, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.9553264604810997, | |
| "grad_norm": 0.4233369529247284, | |
| "learning_rate": 6.108845065352864e-08, | |
| "loss": 0.2715, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.9587628865979382, | |
| "grad_norm": 0.43706801533699036, | |
| "learning_rate": 5.206741722181385e-08, | |
| "loss": 0.289, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.9621993127147767, | |
| "grad_norm": 0.47116661071777344, | |
| "learning_rate": 4.376324859820924e-08, | |
| "loss": 0.3081, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.9656357388316151, | |
| "grad_norm": 0.40870651602745056, | |
| "learning_rate": 3.617714790465576e-08, | |
| "loss": 0.2667, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.9690721649484536, | |
| "grad_norm": 0.40682822465896606, | |
| "learning_rate": 2.9310214228202016e-08, | |
| "loss": 0.2728, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.9725085910652921, | |
| "grad_norm": 0.4911557734012604, | |
| "learning_rate": 2.3163442461766604e-08, | |
| "loss": 0.3386, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.9759450171821306, | |
| "grad_norm": 0.4293031394481659, | |
| "learning_rate": 1.7737723159999e-08, | |
| "loss": 0.2852, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.979381443298969, | |
| "grad_norm": 0.39959046244621277, | |
| "learning_rate": 1.3033842410251074e-08, | |
| "loss": 0.2668, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.9828178694158075, | |
| "grad_norm": 0.42357000708580017, | |
| "learning_rate": 9.052481718690998e-09, | |
| "loss": 0.2659, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.9862542955326461, | |
| "grad_norm": 0.46328771114349365, | |
| "learning_rate": 5.794217911562205e-09, | |
| "loss": 0.2965, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.9896907216494846, | |
| "grad_norm": 0.39359450340270996, | |
| "learning_rate": 3.2595230516152543e-09, | |
| "loss": 0.2548, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.993127147766323, | |
| "grad_norm": 0.447672963142395, | |
| "learning_rate": 1.4487643697103092e-09, | |
| "loss": 0.2804, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.9965635738831615, | |
| "grad_norm": 0.48000165820121765, | |
| "learning_rate": 3.6220421161692333e-10, | |
| "loss": 0.3062, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.4716525673866272, | |
| "learning_rate": 0.0, | |
| "loss": 0.2894, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.39179933071136475, | |
| "eval_runtime": 1.5263, | |
| "eval_samples_per_second": 150.689, | |
| "eval_steps_per_second": 6.552, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 291, | |
| "total_flos": 2.419849045213184e+16, | |
| "train_loss": 0.4077431839151481, | |
| "train_runtime": 379.788, | |
| "train_samples_per_second": 36.668, | |
| "train_steps_per_second": 0.766 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 291, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 800, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": false, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.419849045213184e+16, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |