| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 230, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.008695652173913044, | |
| "grad_norm": 18.15669822692871, | |
| "learning_rate": 0.0, | |
| "loss": 4.5989, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.017391304347826087, | |
| "grad_norm": 19.82965087890625, | |
| "learning_rate": 1e-05, | |
| "loss": 4.7595, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.02608695652173913, | |
| "grad_norm": 18.204021453857422, | |
| "learning_rate": 2e-05, | |
| "loss": 4.5244, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.034782608695652174, | |
| "grad_norm": 29.440780639648438, | |
| "learning_rate": 3e-05, | |
| "loss": 5.3648, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.043478260869565216, | |
| "grad_norm": 16.630733489990234, | |
| "learning_rate": 4e-05, | |
| "loss": 4.8256, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.05217391304347826, | |
| "grad_norm": 14.301246643066406, | |
| "learning_rate": 5e-05, | |
| "loss": 3.6577, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.06086956521739131, | |
| "grad_norm": 15.203990936279297, | |
| "learning_rate": 6e-05, | |
| "loss": 3.7917, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.06956521739130435, | |
| "grad_norm": 15.369929313659668, | |
| "learning_rate": 7e-05, | |
| "loss": 4.048, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.0782608695652174, | |
| "grad_norm": 14.635960578918457, | |
| "learning_rate": 8e-05, | |
| "loss": 3.9196, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.08695652173913043, | |
| "grad_norm": 25.854639053344727, | |
| "learning_rate": 9e-05, | |
| "loss": 4.3504, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.09565217391304348, | |
| "grad_norm": 10.977479934692383, | |
| "learning_rate": 0.0001, | |
| "loss": 2.5819, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.10434782608695652, | |
| "grad_norm": 9.749446868896484, | |
| "learning_rate": 9.999490215047167e-05, | |
| "loss": 2.165, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.11304347826086956, | |
| "grad_norm": 12.812524795532227, | |
| "learning_rate": 9.997960964140947e-05, | |
| "loss": 2.5085, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.12173913043478261, | |
| "grad_norm": 11.711141586303711, | |
| "learning_rate": 9.995412559116979e-05, | |
| "loss": 2.7094, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.13043478260869565, | |
| "grad_norm": 14.878225326538086, | |
| "learning_rate": 9.991845519630678e-05, | |
| "loss": 3.0107, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.1391304347826087, | |
| "grad_norm": 11.596781730651855, | |
| "learning_rate": 9.987260573051269e-05, | |
| "loss": 1.996, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.14782608695652175, | |
| "grad_norm": 10.746283531188965, | |
| "learning_rate": 9.981658654313457e-05, | |
| "loss": 2.2152, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.1565217391304348, | |
| "grad_norm": 10.082755088806152, | |
| "learning_rate": 9.975040905726798e-05, | |
| "loss": 1.384, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.16521739130434782, | |
| "grad_norm": 9.694713592529297, | |
| "learning_rate": 9.967408676742751e-05, | |
| "loss": 1.8851, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.17391304347826086, | |
| "grad_norm": 11.335524559020996, | |
| "learning_rate": 9.958763523679514e-05, | |
| "loss": 1.9028, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.1826086956521739, | |
| "grad_norm": 13.664958953857422, | |
| "learning_rate": 9.949107209404665e-05, | |
| "loss": 1.4538, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.19130434782608696, | |
| "grad_norm": 13.078099250793457, | |
| "learning_rate": 9.938441702975689e-05, | |
| "loss": 2.5061, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 8.856505393981934, | |
| "learning_rate": 9.926769179238466e-05, | |
| "loss": 1.1623, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.20869565217391303, | |
| "grad_norm": 10.897032737731934, | |
| "learning_rate": 9.914092018383778e-05, | |
| "loss": 1.6242, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.21739130434782608, | |
| "grad_norm": 10.83500862121582, | |
| "learning_rate": 9.900412805461967e-05, | |
| "loss": 1.9913, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.22608695652173913, | |
| "grad_norm": 8.87816333770752, | |
| "learning_rate": 9.885734329855798e-05, | |
| "loss": 1.8755, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.23478260869565218, | |
| "grad_norm": 14.880556106567383, | |
| "learning_rate": 9.870059584711668e-05, | |
| "loss": 1.005, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.24347826086956523, | |
| "grad_norm": 11.756641387939453, | |
| "learning_rate": 9.853391766329263e-05, | |
| "loss": 1.9287, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.25217391304347825, | |
| "grad_norm": 11.141681671142578, | |
| "learning_rate": 9.835734273509786e-05, | |
| "loss": 1.7336, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.2608695652173913, | |
| "grad_norm": 8.97098159790039, | |
| "learning_rate": 9.817090706862895e-05, | |
| "loss": 1.1737, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.26956521739130435, | |
| "grad_norm": 12.16342544555664, | |
| "learning_rate": 9.797464868072488e-05, | |
| "loss": 3.3643, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.2782608695652174, | |
| "grad_norm": 15.482931137084961, | |
| "learning_rate": 9.776860759121484e-05, | |
| "loss": 2.0722, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.28695652173913044, | |
| "grad_norm": 8.24347972869873, | |
| "learning_rate": 9.755282581475769e-05, | |
| "loss": 1.2189, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.2956521739130435, | |
| "grad_norm": 9.246153831481934, | |
| "learning_rate": 9.73273473522745e-05, | |
| "loss": 1.295, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.30434782608695654, | |
| "grad_norm": 13.356026649475098, | |
| "learning_rate": 9.709221818197624e-05, | |
| "loss": 1.6265, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.3130434782608696, | |
| "grad_norm": 9.574901580810547, | |
| "learning_rate": 9.68474862499881e-05, | |
| "loss": 1.1326, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.3217391304347826, | |
| "grad_norm": 15.934743881225586, | |
| "learning_rate": 9.659320146057262e-05, | |
| "loss": 1.5065, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.33043478260869563, | |
| "grad_norm": 9.842390060424805, | |
| "learning_rate": 9.632941566595357e-05, | |
| "loss": 1.3366, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.3391304347826087, | |
| "grad_norm": 12.335067749023438, | |
| "learning_rate": 9.60561826557425e-05, | |
| "loss": 0.7185, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.34782608695652173, | |
| "grad_norm": 12.821420669555664, | |
| "learning_rate": 9.577355814597031e-05, | |
| "loss": 2.171, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.3565217391304348, | |
| "grad_norm": 6.9211745262146, | |
| "learning_rate": 9.548159976772592e-05, | |
| "loss": 0.4694, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.3652173913043478, | |
| "grad_norm": 10.683237075805664, | |
| "learning_rate": 9.518036705540458e-05, | |
| "loss": 1.688, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.3739130434782609, | |
| "grad_norm": 13.161890983581543, | |
| "learning_rate": 9.486992143456792e-05, | |
| "loss": 0.8145, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.3826086956521739, | |
| "grad_norm": 8.92729377746582, | |
| "learning_rate": 9.45503262094184e-05, | |
| "loss": 1.4569, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.391304347826087, | |
| "grad_norm": 7.679493427276611, | |
| "learning_rate": 9.422164654989072e-05, | |
| "loss": 1.0301, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 9.197248458862305, | |
| "learning_rate": 9.388394947836279e-05, | |
| "loss": 1.125, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.40869565217391307, | |
| "grad_norm": 8.750134468078613, | |
| "learning_rate": 9.353730385598887e-05, | |
| "loss": 0.9901, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.41739130434782606, | |
| "grad_norm": 8.447084426879883, | |
| "learning_rate": 9.318178036865785e-05, | |
| "loss": 0.2989, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.4260869565217391, | |
| "grad_norm": 6.760093688964844, | |
| "learning_rate": 9.281745151257946e-05, | |
| "loss": 0.6226, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.43478260869565216, | |
| "grad_norm": 12.86007022857666, | |
| "learning_rate": 9.244439157950114e-05, | |
| "loss": 1.6501, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.4434782608695652, | |
| "grad_norm": 7.0637125968933105, | |
| "learning_rate": 9.206267664155907e-05, | |
| "loss": 0.8073, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.45217391304347826, | |
| "grad_norm": 8.347302436828613, | |
| "learning_rate": 9.167238453576589e-05, | |
| "loss": 0.8773, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.4608695652173913, | |
| "grad_norm": 12.007081031799316, | |
| "learning_rate": 9.12735948481387e-05, | |
| "loss": 1.7717, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.46956521739130436, | |
| "grad_norm": 8.299239158630371, | |
| "learning_rate": 9.086638889747035e-05, | |
| "loss": 1.1201, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.4782608695652174, | |
| "grad_norm": 9.381499290466309, | |
| "learning_rate": 9.045084971874738e-05, | |
| "loss": 0.8098, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.48695652173913045, | |
| "grad_norm": 8.82516860961914, | |
| "learning_rate": 9.002706204621803e-05, | |
| "loss": 1.2713, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.4956521739130435, | |
| "grad_norm": 12.698591232299805, | |
| "learning_rate": 8.959511229611376e-05, | |
| "loss": 1.4672, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.5043478260869565, | |
| "grad_norm": 10.124267578125, | |
| "learning_rate": 8.915508854902778e-05, | |
| "loss": 0.7497, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.5130434782608696, | |
| "grad_norm": 10.243094444274902, | |
| "learning_rate": 8.870708053195413e-05, | |
| "loss": 1.0571, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.5217391304347826, | |
| "grad_norm": 15.050994873046875, | |
| "learning_rate": 8.825117959999116e-05, | |
| "loss": 1.1001, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.5304347826086957, | |
| "grad_norm": 15.316854476928711, | |
| "learning_rate": 8.778747871771292e-05, | |
| "loss": 0.784, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.5391304347826087, | |
| "grad_norm": 3.280144214630127, | |
| "learning_rate": 8.731607244021236e-05, | |
| "loss": 0.0856, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.5478260869565217, | |
| "grad_norm": 8.658632278442383, | |
| "learning_rate": 8.683705689382024e-05, | |
| "loss": 1.2149, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.5565217391304348, | |
| "grad_norm": 10.066658020019531, | |
| "learning_rate": 8.635052975650369e-05, | |
| "loss": 1.4377, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.5652173913043478, | |
| "grad_norm": 10.947000503540039, | |
| "learning_rate": 8.585659023794818e-05, | |
| "loss": 1.6525, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.5739130434782609, | |
| "grad_norm": 8.52258586883545, | |
| "learning_rate": 8.535533905932738e-05, | |
| "loss": 1.1801, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.5826086956521739, | |
| "grad_norm": 9.919696807861328, | |
| "learning_rate": 8.484687843276469e-05, | |
| "loss": 1.176, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.591304347826087, | |
| "grad_norm": 9.4547700881958, | |
| "learning_rate": 8.433131204049067e-05, | |
| "loss": 1.0911, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 14.620793342590332, | |
| "learning_rate": 8.380874501370097e-05, | |
| "loss": 0.7491, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.6086956521739131, | |
| "grad_norm": 7.218148708343506, | |
| "learning_rate": 8.327928391111841e-05, | |
| "loss": 0.1841, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.6173913043478261, | |
| "grad_norm": 12.315970420837402, | |
| "learning_rate": 8.274303669726426e-05, | |
| "loss": 1.2871, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.6260869565217392, | |
| "grad_norm": 12.200401306152344, | |
| "learning_rate": 8.220011272044277e-05, | |
| "loss": 0.4186, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.6347826086956522, | |
| "grad_norm": 7.502784252166748, | |
| "learning_rate": 8.165062269044353e-05, | |
| "loss": 0.7866, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.6434782608695652, | |
| "grad_norm": 8.83882999420166, | |
| "learning_rate": 8.109467865596612e-05, | |
| "loss": 1.1315, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.6521739130434783, | |
| "grad_norm": 17.456348419189453, | |
| "learning_rate": 8.053239398177191e-05, | |
| "loss": 2.3548, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.6608695652173913, | |
| "grad_norm": 12.876847267150879, | |
| "learning_rate": 7.996388332556735e-05, | |
| "loss": 1.7174, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.6695652173913044, | |
| "grad_norm": 10.776142120361328, | |
| "learning_rate": 7.938926261462366e-05, | |
| "loss": 0.9081, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.6782608695652174, | |
| "grad_norm": 14.020254135131836, | |
| "learning_rate": 7.880864902213765e-05, | |
| "loss": 1.9042, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.6869565217391305, | |
| "grad_norm": 8.45417594909668, | |
| "learning_rate": 7.822216094333847e-05, | |
| "loss": 0.9913, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.6956521739130435, | |
| "grad_norm": 13.904030799865723, | |
| "learning_rate": 7.762991797134514e-05, | |
| "loss": 2.1202, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.7043478260869566, | |
| "grad_norm": 7.09511137008667, | |
| "learning_rate": 7.703204087277988e-05, | |
| "loss": 0.6924, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.7130434782608696, | |
| "grad_norm": 10.574461936950684, | |
| "learning_rate": 7.64286515631421e-05, | |
| "loss": 0.667, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.7217391304347827, | |
| "grad_norm": 17.19155502319336, | |
| "learning_rate": 7.58198730819481e-05, | |
| "loss": 2.4983, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.7304347826086957, | |
| "grad_norm": 11.54223346710205, | |
| "learning_rate": 7.52058295676416e-05, | |
| "loss": 1.8265, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.7391304347826086, | |
| "grad_norm": 7.497072219848633, | |
| "learning_rate": 7.45866462322802e-05, | |
| "loss": 0.9592, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.7478260869565218, | |
| "grad_norm": 18.042118072509766, | |
| "learning_rate": 7.396244933600285e-05, | |
| "loss": 1.2019, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.7565217391304347, | |
| "grad_norm": 8.521622657775879, | |
| "learning_rate": 7.333336616128369e-05, | |
| "loss": 0.9159, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.7652173913043478, | |
| "grad_norm": 6.687935829162598, | |
| "learning_rate": 7.269952498697734e-05, | |
| "loss": 0.7739, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.7739130434782608, | |
| "grad_norm": 11.469585418701172, | |
| "learning_rate": 7.206105506216106e-05, | |
| "loss": 1.5827, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.782608695652174, | |
| "grad_norm": 8.240609169006348, | |
| "learning_rate": 7.141808657977907e-05, | |
| "loss": 1.0337, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.7913043478260869, | |
| "grad_norm": 6.628414630889893, | |
| "learning_rate": 7.077075065009433e-05, | |
| "loss": 0.8117, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 9.6090087890625, | |
| "learning_rate": 7.01191792739534e-05, | |
| "loss": 0.9952, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.808695652173913, | |
| "grad_norm": 11.751654624938965, | |
| "learning_rate": 6.946350531586959e-05, | |
| "loss": 0.999, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.8173913043478261, | |
| "grad_norm": 9.336933135986328, | |
| "learning_rate": 6.880386247692999e-05, | |
| "loss": 0.9874, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.8260869565217391, | |
| "grad_norm": 10.022204399108887, | |
| "learning_rate": 6.814038526753205e-05, | |
| "loss": 1.5146, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.8347826086956521, | |
| "grad_norm": 6.776736736297607, | |
| "learning_rate": 6.747320897995493e-05, | |
| "loss": 0.7498, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.8434782608695652, | |
| "grad_norm": 3.5592079162597656, | |
| "learning_rate": 6.680246966077151e-05, | |
| "loss": 0.3334, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.8521739130434782, | |
| "grad_norm": 9.817056655883789, | |
| "learning_rate": 6.61283040831067e-05, | |
| "loss": 2.1917, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.8608695652173913, | |
| "grad_norm": 15.627985954284668, | |
| "learning_rate": 6.545084971874738e-05, | |
| "loss": 2.424, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.8695652173913043, | |
| "grad_norm": 7.368549823760986, | |
| "learning_rate": 6.477024471011001e-05, | |
| "loss": 0.7012, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.8782608695652174, | |
| "grad_norm": 11.648536682128906, | |
| "learning_rate": 6.408662784207149e-05, | |
| "loss": 2.1049, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.8869565217391304, | |
| "grad_norm": 7.728333950042725, | |
| "learning_rate": 6.340013851366896e-05, | |
| "loss": 1.3175, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.8956521739130435, | |
| "grad_norm": 10.55436897277832, | |
| "learning_rate": 6.271091670967436e-05, | |
| "loss": 1.196, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.9043478260869565, | |
| "grad_norm": 8.97383975982666, | |
| "learning_rate": 6.201910297204962e-05, | |
| "loss": 1.0469, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.9130434782608695, | |
| "grad_norm": 10.435872077941895, | |
| "learning_rate": 6.132483837128823e-05, | |
| "loss": 2.2582, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.9217391304347826, | |
| "grad_norm": 5.654266834259033, | |
| "learning_rate": 6.062826447764883e-05, | |
| "loss": 0.3781, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.9304347826086956, | |
| "grad_norm": 8.847671508789062, | |
| "learning_rate": 5.992952333228728e-05, | |
| "loss": 1.2343, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.9391304347826087, | |
| "grad_norm": 6.885233402252197, | |
| "learning_rate": 5.9228757418292266e-05, | |
| "loss": 0.3779, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.9478260869565217, | |
| "grad_norm": 18.239648818969727, | |
| "learning_rate": 5.85261096316312e-05, | |
| "loss": 2.1069, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.9565217391304348, | |
| "grad_norm": 3.1215038299560547, | |
| "learning_rate": 5.782172325201155e-05, | |
| "loss": 0.1011, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.9652173913043478, | |
| "grad_norm": 15.885011672973633, | |
| "learning_rate": 5.7115741913664264e-05, | |
| "loss": 1.4789, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.9739130434782609, | |
| "grad_norm": 7.603321075439453, | |
| "learning_rate": 5.640830957605465e-05, | |
| "loss": 0.7941, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.9826086956521739, | |
| "grad_norm": 11.354793548583984, | |
| "learning_rate": 5.569957049452703e-05, | |
| "loss": 2.0018, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.991304347826087, | |
| "grad_norm": 8.468228340148926, | |
| "learning_rate": 5.4989669190889136e-05, | |
| "loss": 1.1619, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 6.8398637771606445, | |
| "learning_rate": 5.427875042394199e-05, | |
| "loss": 0.7186, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.008695652173913, | |
| "grad_norm": 7.124094009399414, | |
| "learning_rate": 5.3566959159961615e-05, | |
| "loss": 1.1124, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.017391304347826, | |
| "grad_norm": 9.615336418151855, | |
| "learning_rate": 5.2854440543138406e-05, | |
| "loss": 0.8346, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.0260869565217392, | |
| "grad_norm": 7.556457042694092, | |
| "learning_rate": 5.2141339865980134e-05, | |
| "loss": 0.7831, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.0347826086956522, | |
| "grad_norm": 7.170627117156982, | |
| "learning_rate": 5.142780253968481e-05, | |
| "loss": 0.7333, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.0434782608695652, | |
| "grad_norm": 8.967455863952637, | |
| "learning_rate": 5.0713974064489367e-05, | |
| "loss": 2.6066, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.0521739130434782, | |
| "grad_norm": 8.012558937072754, | |
| "learning_rate": 5e-05, | |
| "loss": 0.6936, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.0608695652173914, | |
| "grad_norm": 5.820160865783691, | |
| "learning_rate": 4.928602593551065e-05, | |
| "loss": 0.5577, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.0695652173913044, | |
| "grad_norm": 5.0591559410095215, | |
| "learning_rate": 4.85721974603152e-05, | |
| "loss": 0.3028, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.0782608695652174, | |
| "grad_norm": 8.6309814453125, | |
| "learning_rate": 4.7858660134019884e-05, | |
| "loss": 1.1837, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.0869565217391304, | |
| "grad_norm": 11.295366287231445, | |
| "learning_rate": 4.71455594568616e-05, | |
| "loss": 1.8619, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.0956521739130434, | |
| "grad_norm": 7.7538862228393555, | |
| "learning_rate": 4.643304084003839e-05, | |
| "loss": 0.8074, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.1043478260869566, | |
| "grad_norm": 8.09078311920166, | |
| "learning_rate": 4.5721249576058027e-05, | |
| "loss": 0.6141, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.1130434782608696, | |
| "grad_norm": 6.496982574462891, | |
| "learning_rate": 4.501033080911086e-05, | |
| "loss": 0.6054, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.1217391304347826, | |
| "grad_norm": 8.538832664489746, | |
| "learning_rate": 4.4300429505472976e-05, | |
| "loss": 1.1904, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.1304347826086956, | |
| "grad_norm": 9.956181526184082, | |
| "learning_rate": 4.359169042394536e-05, | |
| "loss": 1.1105, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.1391304347826088, | |
| "grad_norm": 5.782883644104004, | |
| "learning_rate": 4.288425808633575e-05, | |
| "loss": 0.5066, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.1478260869565218, | |
| "grad_norm": 7.502802848815918, | |
| "learning_rate": 4.2178276747988446e-05, | |
| "loss": 1.662, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.1565217391304348, | |
| "grad_norm": 5.598113059997559, | |
| "learning_rate": 4.147389036836881e-05, | |
| "loss": 0.5835, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.1652173913043478, | |
| "grad_norm": 4.0234808921813965, | |
| "learning_rate": 4.077124258170774e-05, | |
| "loss": 0.0897, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.1739130434782608, | |
| "grad_norm": 8.106297492980957, | |
| "learning_rate": 4.007047666771274e-05, | |
| "loss": 0.8356, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.182608695652174, | |
| "grad_norm": 6.441939353942871, | |
| "learning_rate": 3.937173552235117e-05, | |
| "loss": 0.5413, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.191304347826087, | |
| "grad_norm": 12.384346961975098, | |
| "learning_rate": 3.8675161628711776e-05, | |
| "loss": 0.7925, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 8.959207534790039, | |
| "learning_rate": 3.798089702795038e-05, | |
| "loss": 0.715, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.208695652173913, | |
| "grad_norm": 8.550363540649414, | |
| "learning_rate": 3.728908329032567e-05, | |
| "loss": 1.0794, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.2173913043478262, | |
| "grad_norm": 15.134200096130371, | |
| "learning_rate": 3.659986148633107e-05, | |
| "loss": 1.3818, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.2260869565217392, | |
| "grad_norm": 8.52934741973877, | |
| "learning_rate": 3.591337215792852e-05, | |
| "loss": 0.316, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.2347826086956522, | |
| "grad_norm": 6.627355098724365, | |
| "learning_rate": 3.522975528989e-05, | |
| "loss": 0.4346, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.2434782608695651, | |
| "grad_norm": 10.961922645568848, | |
| "learning_rate": 3.4549150281252636e-05, | |
| "loss": 1.5048, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.2521739130434781, | |
| "grad_norm": 8.977882385253906, | |
| "learning_rate": 3.3871695916893314e-05, | |
| "loss": 0.9586, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.2608695652173914, | |
| "grad_norm": 8.683388710021973, | |
| "learning_rate": 3.3197530339228487e-05, | |
| "loss": 0.9217, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.2695652173913043, | |
| "grad_norm": 14.419705390930176, | |
| "learning_rate": 3.2526791020045086e-05, | |
| "loss": 1.7538, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.2782608695652173, | |
| "grad_norm": 7.970763206481934, | |
| "learning_rate": 3.1859614732467954e-05, | |
| "loss": 0.4354, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.2869565217391306, | |
| "grad_norm": 8.308634757995605, | |
| "learning_rate": 3.119613752307002e-05, | |
| "loss": 0.8199, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.2956521739130435, | |
| "grad_norm": 7.006349563598633, | |
| "learning_rate": 3.053649468413043e-05, | |
| "loss": 0.5617, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.3043478260869565, | |
| "grad_norm": 8.015650749206543, | |
| "learning_rate": 2.988082072604661e-05, | |
| "loss": 0.7712, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.3130434782608695, | |
| "grad_norm": 9.2400541305542, | |
| "learning_rate": 2.9229249349905684e-05, | |
| "loss": 1.4479, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.3217391304347825, | |
| "grad_norm": 5.748171806335449, | |
| "learning_rate": 2.858191342022095e-05, | |
| "loss": 0.389, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.3304347826086955, | |
| "grad_norm": 7.006728649139404, | |
| "learning_rate": 2.7938944937838923e-05, | |
| "loss": 0.5211, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.3391304347826087, | |
| "grad_norm": 7.924238681793213, | |
| "learning_rate": 2.7300475013022663e-05, | |
| "loss": 0.6353, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.3478260869565217, | |
| "grad_norm": 10.750234603881836, | |
| "learning_rate": 2.6666633838716314e-05, | |
| "loss": 1.1832, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.3565217391304347, | |
| "grad_norm": 5.400400161743164, | |
| "learning_rate": 2.603755066399718e-05, | |
| "loss": 0.1326, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.365217391304348, | |
| "grad_norm": 8.878824234008789, | |
| "learning_rate": 2.5413353767719805e-05, | |
| "loss": 0.9578, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.373913043478261, | |
| "grad_norm": 6.241739749908447, | |
| "learning_rate": 2.4794170432358415e-05, | |
| "loss": 0.4989, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.382608695652174, | |
| "grad_norm": 19.65182113647461, | |
| "learning_rate": 2.418012691805191e-05, | |
| "loss": 2.3024, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.391304347826087, | |
| "grad_norm": 10.574295043945312, | |
| "learning_rate": 2.3571348436857904e-05, | |
| "loss": 0.8163, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 4.394688606262207, | |
| "learning_rate": 2.296795912722014e-05, | |
| "loss": 0.1608, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.4086956521739131, | |
| "grad_norm": 6.658543109893799, | |
| "learning_rate": 2.2370082028654866e-05, | |
| "loss": 0.6319, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.4173913043478261, | |
| "grad_norm": 3.092381715774536, | |
| "learning_rate": 2.1777839056661554e-05, | |
| "loss": 0.0609, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.4260869565217391, | |
| "grad_norm": 8.153127670288086, | |
| "learning_rate": 2.119135097786236e-05, | |
| "loss": 0.8792, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.434782608695652, | |
| "grad_norm": 3.8503527641296387, | |
| "learning_rate": 2.061073738537635e-05, | |
| "loss": 0.0882, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.4434782608695653, | |
| "grad_norm": 13.175821304321289, | |
| "learning_rate": 2.0036116674432654e-05, | |
| "loss": 1.2865, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.4521739130434783, | |
| "grad_norm": 7.390486717224121, | |
| "learning_rate": 1.946760601822809e-05, | |
| "loss": 0.5875, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.4608695652173913, | |
| "grad_norm": 8.266345024108887, | |
| "learning_rate": 1.8905321344033898e-05, | |
| "loss": 0.2723, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.4695652173913043, | |
| "grad_norm": 8.177164077758789, | |
| "learning_rate": 1.8349377309556486e-05, | |
| "loss": 0.6457, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.4782608695652173, | |
| "grad_norm": 6.325401306152344, | |
| "learning_rate": 1.7799887279557237e-05, | |
| "loss": 0.6313, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.4869565217391305, | |
| "grad_norm": 11.269718170166016, | |
| "learning_rate": 1.725696330273575e-05, | |
| "loss": 0.8341, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.4956521739130435, | |
| "grad_norm": 9.295413970947266, | |
| "learning_rate": 1.6720716088881594e-05, | |
| "loss": 1.0624, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.5043478260869565, | |
| "grad_norm": 10.624412536621094, | |
| "learning_rate": 1.619125498629904e-05, | |
| "loss": 1.374, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.5130434782608697, | |
| "grad_norm": 8.308755874633789, | |
| "learning_rate": 1.566868795950932e-05, | |
| "loss": 0.2568, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.5217391304347827, | |
| "grad_norm": 9.137283325195312, | |
| "learning_rate": 1.5153121567235335e-05, | |
| "loss": 0.6905, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.5304347826086957, | |
| "grad_norm": 8.51821517944336, | |
| "learning_rate": 1.4644660940672627e-05, | |
| "loss": 0.733, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.5391304347826087, | |
| "grad_norm": 8.871479034423828, | |
| "learning_rate": 1.414340976205183e-05, | |
| "loss": 0.8432, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.5478260869565217, | |
| "grad_norm": 7.690972328186035, | |
| "learning_rate": 1.3649470243496326e-05, | |
| "loss": 0.5685, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.5565217391304347, | |
| "grad_norm": 7.664311408996582, | |
| "learning_rate": 1.3162943106179749e-05, | |
| "loss": 0.6617, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.5652173913043477, | |
| "grad_norm": 7.690062999725342, | |
| "learning_rate": 1.2683927559787655e-05, | |
| "loss": 0.5499, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.5739130434782609, | |
| "grad_norm": 2.8447952270507812, | |
| "learning_rate": 1.2212521282287092e-05, | |
| "loss": 0.0586, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.5826086956521739, | |
| "grad_norm": 7.366944313049316, | |
| "learning_rate": 1.1748820400008843e-05, | |
| "loss": 0.6262, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.591304347826087, | |
| "grad_norm": 10.251835823059082, | |
| "learning_rate": 1.1292919468045877e-05, | |
| "loss": 0.7647, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 10.744357109069824, | |
| "learning_rate": 1.0844911450972229e-05, | |
| "loss": 0.4428, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.608695652173913, | |
| "grad_norm": 7.546830654144287, | |
| "learning_rate": 1.0404887703886251e-05, | |
| "loss": 0.7932, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.617391304347826, | |
| "grad_norm": 5.92943000793457, | |
| "learning_rate": 9.972937953781986e-06, | |
| "loss": 0.3523, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.626086956521739, | |
| "grad_norm": 8.205349922180176, | |
| "learning_rate": 9.549150281252633e-06, | |
| "loss": 0.5491, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.634782608695652, | |
| "grad_norm": 3.2450342178344727, | |
| "learning_rate": 9.133611102529654e-06, | |
| "loss": 0.071, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.643478260869565, | |
| "grad_norm": 3.5468215942382812, | |
| "learning_rate": 8.7264051518613e-06, | |
| "loss": 0.0861, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.6521739130434783, | |
| "grad_norm": 7.733258247375488, | |
| "learning_rate": 8.327615464234129e-06, | |
| "loss": 0.6044, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.6608695652173913, | |
| "grad_norm": 9.37753963470459, | |
| "learning_rate": 7.937323358440935e-06, | |
| "loss": 0.9109, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.6695652173913045, | |
| "grad_norm": 5.947280406951904, | |
| "learning_rate": 7.555608420498872e-06, | |
| "loss": 0.5775, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.6782608695652175, | |
| "grad_norm": 17.63241195678711, | |
| "learning_rate": 7.182548487420554e-06, | |
| "loss": 1.5021, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.6869565217391305, | |
| "grad_norm": 5.9392194747924805, | |
| "learning_rate": 6.818219631342149e-06, | |
| "loss": 0.1202, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.6956521739130435, | |
| "grad_norm": 10.098470687866211, | |
| "learning_rate": 6.462696144011149e-06, | |
| "loss": 0.9951, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.7043478260869565, | |
| "grad_norm": 6.320773124694824, | |
| "learning_rate": 6.116050521637218e-06, | |
| "loss": 0.3461, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.7130434782608694, | |
| "grad_norm": 6.652013778686523, | |
| "learning_rate": 5.778353450109286e-06, | |
| "loss": 0.5867, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.7217391304347827, | |
| "grad_norm": 6.05897855758667, | |
| "learning_rate": 5.449673790581611e-06, | |
| "loss": 0.3891, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.7304347826086957, | |
| "grad_norm": 3.006110668182373, | |
| "learning_rate": 5.13007856543209e-06, | |
| "loss": 0.0702, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.7391304347826086, | |
| "grad_norm": 8.747302055358887, | |
| "learning_rate": 4.819632944595415e-06, | |
| "loss": 0.7109, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.7478260869565219, | |
| "grad_norm": 12.901275634765625, | |
| "learning_rate": 4.5184002322740785e-06, | |
| "loss": 0.6487, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.7565217391304349, | |
| "grad_norm": 9.19820499420166, | |
| "learning_rate": 4.2264418540297e-06, | |
| "loss": 1.3881, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.7652173913043478, | |
| "grad_norm": 7.706724166870117, | |
| "learning_rate": 3.9438173442575e-06, | |
| "loss": 0.7404, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.7739130434782608, | |
| "grad_norm": 7.71926736831665, | |
| "learning_rate": 3.6705843340464286e-06, | |
| "loss": 0.4732, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.7826086956521738, | |
| "grad_norm": 5.502258777618408, | |
| "learning_rate": 3.406798539427386e-06, | |
| "loss": 0.1223, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.7913043478260868, | |
| "grad_norm": 7.742398262023926, | |
| "learning_rate": 3.1525137500119207e-06, | |
| "loss": 0.5115, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 10.397440910339355, | |
| "learning_rate": 2.9077818180237693e-06, | |
| "loss": 0.7478, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.808695652173913, | |
| "grad_norm": 7.683396339416504, | |
| "learning_rate": 2.6726526477254987e-06, | |
| "loss": 0.7615, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.8173913043478263, | |
| "grad_norm": 6.312471389770508, | |
| "learning_rate": 2.4471741852423237e-06, | |
| "loss": 0.5505, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.8260869565217392, | |
| "grad_norm": 7.305165767669678, | |
| "learning_rate": 2.2313924087851656e-06, | |
| "loss": 0.5583, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.8347826086956522, | |
| "grad_norm": 6.8820929527282715, | |
| "learning_rate": 2.0253513192751373e-06, | |
| "loss": 0.3772, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.8434782608695652, | |
| "grad_norm": 6.177343845367432, | |
| "learning_rate": 1.8290929313710513e-06, | |
| "loss": 0.3328, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.8521739130434782, | |
| "grad_norm": 5.385695457458496, | |
| "learning_rate": 1.6426572649021476e-06, | |
| "loss": 0.1388, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.8608695652173912, | |
| "grad_norm": 8.022261619567871, | |
| "learning_rate": 1.4660823367073751e-06, | |
| "loss": 0.565, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.8695652173913042, | |
| "grad_norm": 9.438583374023438, | |
| "learning_rate": 1.2994041528833266e-06, | |
| "loss": 0.8478, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.8782608695652174, | |
| "grad_norm": 7.929783344268799, | |
| "learning_rate": 1.1426567014420297e-06, | |
| "loss": 0.5489, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.8869565217391304, | |
| "grad_norm": 12.173213958740234, | |
| "learning_rate": 9.958719453803278e-07, | |
| "loss": 1.9477, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.8956521739130436, | |
| "grad_norm": 6.1458740234375, | |
| "learning_rate": 8.590798161622227e-07, | |
| "loss": 0.4285, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.9043478260869566, | |
| "grad_norm": 11.562338829040527, | |
| "learning_rate": 7.323082076153509e-07, | |
| "loss": 1.2916, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.9130434782608696, | |
| "grad_norm": 17.643951416015625, | |
| "learning_rate": 6.15582970243117e-07, | |
| "loss": 1.6767, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.9217391304347826, | |
| "grad_norm": 10.295869827270508, | |
| "learning_rate": 5.089279059533658e-07, | |
| "loss": 0.8823, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.9304347826086956, | |
| "grad_norm": 6.318659782409668, | |
| "learning_rate": 4.123647632048644e-07, | |
| "loss": 0.525, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.9391304347826086, | |
| "grad_norm": 5.823322772979736, | |
| "learning_rate": 3.2591323257248893e-07, | |
| "loss": 0.1522, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.9478260869565216, | |
| "grad_norm": 7.684267520904541, | |
| "learning_rate": 2.4959094273201977e-07, | |
| "loss": 0.4218, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.9565217391304348, | |
| "grad_norm": 7.761279582977295, | |
| "learning_rate": 1.8341345686543332e-07, | |
| "loss": 0.4741, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.9652173913043478, | |
| "grad_norm": 9.738239288330078, | |
| "learning_rate": 1.2739426948732424e-07, | |
| "loss": 1.1119, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.973913043478261, | |
| "grad_norm": 8.011734962463379, | |
| "learning_rate": 8.15448036932176e-08, | |
| "loss": 0.8768, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.982608695652174, | |
| "grad_norm": 3.6634368896484375, | |
| "learning_rate": 4.5874408830215434e-08, | |
| "loss": 0.0662, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.991304347826087, | |
| "grad_norm": 9.118965148925781, | |
| "learning_rate": 2.0390358590538504e-08, | |
| "loss": 0.7163, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 7.614067554473877, | |
| "learning_rate": 5.097849528334919e-09, | |
| "loss": 0.6342, | |
| "step": 230 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 230, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 33878645932032.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |