| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.9609375, | |
| "eval_steps": 500, | |
| "global_step": 340, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01171875, | |
| "grad_norm": 36.23282241821289, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 2.3839, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0234375, | |
| "grad_norm": 35.918636322021484, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 2.3798, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.03515625, | |
| "grad_norm": 35.62618637084961, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 2.386, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.046875, | |
| "grad_norm": 35.966087341308594, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 2.3803, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.05859375, | |
| "grad_norm": 35.38177490234375, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 2.3937, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0703125, | |
| "grad_norm": 35.99677658081055, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 2.3906, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.08203125, | |
| "grad_norm": 35.44341278076172, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 2.3539, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.09375, | |
| "grad_norm": 35.300697326660156, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 2.3459, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.10546875, | |
| "grad_norm": 34.092952728271484, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 2.2959, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.1171875, | |
| "grad_norm": 34.46371841430664, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 2.2661, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.12890625, | |
| "grad_norm": 34.62260818481445, | |
| "learning_rate": 5.5e-07, | |
| "loss": 2.2918, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.140625, | |
| "grad_norm": 33.790374755859375, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 2.223, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.15234375, | |
| "grad_norm": 33.766536712646484, | |
| "learning_rate": 6.5e-07, | |
| "loss": 2.2267, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.1640625, | |
| "grad_norm": 33.894081115722656, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 2.1465, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.17578125, | |
| "grad_norm": 33.162452697753906, | |
| "learning_rate": 7.5e-07, | |
| "loss": 2.0495, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.1875, | |
| "grad_norm": 32.954341888427734, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 1.9627, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.19921875, | |
| "grad_norm": 33.96324157714844, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 1.8867, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.2109375, | |
| "grad_norm": 33.81139373779297, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 1.7752, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.22265625, | |
| "grad_norm": 34.87086868286133, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 1.6944, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.234375, | |
| "grad_norm": 34.84965133666992, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 1.5707, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.24609375, | |
| "grad_norm": 35.227317810058594, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 1.4369, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.2578125, | |
| "grad_norm": 34.91344451904297, | |
| "learning_rate": 1.1e-06, | |
| "loss": 1.3202, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.26953125, | |
| "grad_norm": 31.7376766204834, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 1.1398, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.28125, | |
| "grad_norm": 30.24741554260254, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 1.0421, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.29296875, | |
| "grad_norm": 28.292400360107422, | |
| "learning_rate": 1.25e-06, | |
| "loss": 0.8817, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.3046875, | |
| "grad_norm": 30.44672393798828, | |
| "learning_rate": 1.3e-06, | |
| "loss": 0.7073, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.31640625, | |
| "grad_norm": 29.416427612304688, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 0.5444, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.328125, | |
| "grad_norm": 24.820096969604492, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.4025, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.33984375, | |
| "grad_norm": 21.023277282714844, | |
| "learning_rate": 1.45e-06, | |
| "loss": 0.307, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.3515625, | |
| "grad_norm": 19.656967163085938, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.2151, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.36328125, | |
| "grad_norm": 14.91929817199707, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 0.1448, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.375, | |
| "grad_norm": 5.083199977874756, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.09, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.38671875, | |
| "grad_norm": 2.320681571960449, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 0.0641, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.3984375, | |
| "grad_norm": 1.6233159303665161, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 0.0584, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.41015625, | |
| "grad_norm": 1.6057201623916626, | |
| "learning_rate": 1.75e-06, | |
| "loss": 0.0626, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.421875, | |
| "grad_norm": 1.8360320329666138, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 0.0563, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.43359375, | |
| "grad_norm": 1.736350178718567, | |
| "learning_rate": 1.85e-06, | |
| "loss": 0.0609, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.4453125, | |
| "grad_norm": 1.1473922729492188, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 0.0541, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.45703125, | |
| "grad_norm": 1.1722168922424316, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 0.0534, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.46875, | |
| "grad_norm": 1.356987714767456, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.0496, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.48046875, | |
| "grad_norm": 0.8023216724395752, | |
| "learning_rate": 2.05e-06, | |
| "loss": 0.0527, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.4921875, | |
| "grad_norm": 0.9803515672683716, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.0478, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.50390625, | |
| "grad_norm": 0.8733468651771545, | |
| "learning_rate": 2.15e-06, | |
| "loss": 0.052, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.515625, | |
| "grad_norm": 0.8213743567466736, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.0448, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.52734375, | |
| "grad_norm": 0.843189537525177, | |
| "learning_rate": 2.25e-06, | |
| "loss": 0.0498, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.5390625, | |
| "grad_norm": 0.8801079392433167, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 0.0408, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.55078125, | |
| "grad_norm": 0.7131401300430298, | |
| "learning_rate": 2.35e-06, | |
| "loss": 0.0405, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.5625, | |
| "grad_norm": 0.8996126651763916, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.0525, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.57421875, | |
| "grad_norm": 0.8606986403465271, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 0.0438, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.5859375, | |
| "grad_norm": 0.6918051838874817, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.0394, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.59765625, | |
| "grad_norm": 0.6177802085876465, | |
| "learning_rate": 2.55e-06, | |
| "loss": 0.0387, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.609375, | |
| "grad_norm": 0.7042555809020996, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.0434, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.62109375, | |
| "grad_norm": 0.6537717580795288, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 0.0396, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.6328125, | |
| "grad_norm": 0.7834082841873169, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 0.0411, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.64453125, | |
| "grad_norm": 0.7287272810935974, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 0.0408, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.65625, | |
| "grad_norm": 0.7186263203620911, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.0394, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.66796875, | |
| "grad_norm": 0.7264899611473083, | |
| "learning_rate": 2.85e-06, | |
| "loss": 0.0427, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.6796875, | |
| "grad_norm": 0.7665618062019348, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.0368, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.69140625, | |
| "grad_norm": 0.7222962379455566, | |
| "learning_rate": 2.95e-06, | |
| "loss": 0.0412, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.703125, | |
| "grad_norm": 0.7061101794242859, | |
| "learning_rate": 3e-06, | |
| "loss": 0.0377, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.71484375, | |
| "grad_norm": 0.5724324584007263, | |
| "learning_rate": 3.05e-06, | |
| "loss": 0.0387, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.7265625, | |
| "grad_norm": 0.5535506010055542, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.0403, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.73828125, | |
| "grad_norm": 0.6553678512573242, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 0.0415, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.6137285828590393, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.0383, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.76171875, | |
| "grad_norm": 0.5985754132270813, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 0.0355, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.7734375, | |
| "grad_norm": 0.5903909802436829, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.0374, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.78515625, | |
| "grad_norm": 0.5718765258789062, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 0.0339, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.796875, | |
| "grad_norm": 0.6844965815544128, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.0405, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.80859375, | |
| "grad_norm": 0.5959618091583252, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.0338, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.8203125, | |
| "grad_norm": 0.6095123291015625, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.0362, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.83203125, | |
| "grad_norm": 0.543708086013794, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.0355, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.84375, | |
| "grad_norm": 0.6969983577728271, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.0325, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.85546875, | |
| "grad_norm": 0.6022969484329224, | |
| "learning_rate": 3.65e-06, | |
| "loss": 0.0342, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.8671875, | |
| "grad_norm": 0.6262147426605225, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.0348, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.87890625, | |
| "grad_norm": 0.5729933381080627, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.0318, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.890625, | |
| "grad_norm": 0.5846775770187378, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.0309, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.90234375, | |
| "grad_norm": 0.6469219923019409, | |
| "learning_rate": 3.85e-06, | |
| "loss": 0.0324, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.9140625, | |
| "grad_norm": 0.6574859023094177, | |
| "learning_rate": 3.900000000000001e-06, | |
| "loss": 0.0325, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.92578125, | |
| "grad_norm": 0.5833832025527954, | |
| "learning_rate": 3.95e-06, | |
| "loss": 0.0232, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.9375, | |
| "grad_norm": 0.7503570318222046, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.0267, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.94921875, | |
| "grad_norm": 0.7181633114814758, | |
| "learning_rate": 4.05e-06, | |
| "loss": 0.0304, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.9609375, | |
| "grad_norm": 0.6477274298667908, | |
| "learning_rate": 4.1e-06, | |
| "loss": 0.0297, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.97265625, | |
| "grad_norm": 0.6768563389778137, | |
| "learning_rate": 4.15e-06, | |
| "loss": 0.0279, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.984375, | |
| "grad_norm": 0.7905837297439575, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.0301, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.99609375, | |
| "grad_norm": 0.5576608777046204, | |
| "learning_rate": 4.25e-06, | |
| "loss": 0.0322, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.5576608777046204, | |
| "learning_rate": 4.3e-06, | |
| "loss": 0.0226, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.01171875, | |
| "grad_norm": 1.0774812698364258, | |
| "learning_rate": 4.350000000000001e-06, | |
| "loss": 0.0215, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.0234375, | |
| "grad_norm": 0.47373324632644653, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.0235, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.03515625, | |
| "grad_norm": 0.7665970325469971, | |
| "learning_rate": 4.450000000000001e-06, | |
| "loss": 0.0242, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.046875, | |
| "grad_norm": 0.6290147304534912, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.0209, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.05859375, | |
| "grad_norm": 0.5703024864196777, | |
| "learning_rate": 4.5500000000000005e-06, | |
| "loss": 0.0192, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.0703125, | |
| "grad_norm": 0.6099259853363037, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 0.0181, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.08203125, | |
| "grad_norm": 0.6570988297462463, | |
| "learning_rate": 4.65e-06, | |
| "loss": 0.0201, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.09375, | |
| "grad_norm": 0.7848325371742249, | |
| "learning_rate": 4.7e-06, | |
| "loss": 0.0253, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.10546875, | |
| "grad_norm": 0.6759209036827087, | |
| "learning_rate": 4.75e-06, | |
| "loss": 0.0195, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.1171875, | |
| "grad_norm": 0.4861151874065399, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.0191, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.12890625, | |
| "grad_norm": 0.6268576383590698, | |
| "learning_rate": 4.85e-06, | |
| "loss": 0.0211, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.140625, | |
| "grad_norm": 0.5862017869949341, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.0177, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.15234375, | |
| "grad_norm": 0.4569724202156067, | |
| "learning_rate": 4.95e-06, | |
| "loss": 0.0164, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.1640625, | |
| "grad_norm": 0.4539048969745636, | |
| "learning_rate": 5e-06, | |
| "loss": 0.0152, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.17578125, | |
| "grad_norm": 0.4553528428077698, | |
| "learning_rate": 4.999926609487568e-06, | |
| "loss": 0.0208, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.1875, | |
| "grad_norm": 0.5182592272758484, | |
| "learning_rate": 4.999706442259205e-06, | |
| "loss": 0.0154, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.19921875, | |
| "grad_norm": 0.5602673888206482, | |
| "learning_rate": 4.999339511241458e-06, | |
| "loss": 0.0196, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.2109375, | |
| "grad_norm": 0.7579494118690491, | |
| "learning_rate": 4.9988258379777334e-06, | |
| "loss": 0.0198, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.22265625, | |
| "grad_norm": 0.603757381439209, | |
| "learning_rate": 4.998165452627025e-06, | |
| "loss": 0.0185, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.234375, | |
| "grad_norm": 0.5520291924476624, | |
| "learning_rate": 4.99735839396215e-06, | |
| "loss": 0.018, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.24609375, | |
| "grad_norm": 0.55808424949646, | |
| "learning_rate": 4.996404709367466e-06, | |
| "loss": 0.0159, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.2578125, | |
| "grad_norm": 0.47174298763275146, | |
| "learning_rate": 4.995304454836095e-06, | |
| "loss": 0.0122, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.26953125, | |
| "grad_norm": 0.5289337038993835, | |
| "learning_rate": 4.994057694966632e-06, | |
| "loss": 0.0168, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.28125, | |
| "grad_norm": 0.5390430092811584, | |
| "learning_rate": 4.992664502959351e-06, | |
| "loss": 0.017, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.29296875, | |
| "grad_norm": 0.4966451823711395, | |
| "learning_rate": 4.991124960611916e-06, | |
| "loss": 0.0145, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.3046875, | |
| "grad_norm": 0.6148604154586792, | |
| "learning_rate": 4.989439158314566e-06, | |
| "loss": 0.0139, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.31640625, | |
| "grad_norm": 0.6303534507751465, | |
| "learning_rate": 4.9876071950448185e-06, | |
| "loss": 0.0118, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.328125, | |
| "grad_norm": 0.5410207509994507, | |
| "learning_rate": 4.98562917836165e-06, | |
| "loss": 0.0094, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.33984375, | |
| "grad_norm": 0.5350080132484436, | |
| "learning_rate": 4.983505224399188e-06, | |
| "loss": 0.0158, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.3515625, | |
| "grad_norm": 1.017317295074463, | |
| "learning_rate": 4.9812354578598876e-06, | |
| "loss": 0.0201, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.36328125, | |
| "grad_norm": 0.6891007423400879, | |
| "learning_rate": 4.978820012007213e-06, | |
| "loss": 0.0127, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.375, | |
| "grad_norm": 0.4756389260292053, | |
| "learning_rate": 4.976259028657812e-06, | |
| "loss": 0.0188, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.38671875, | |
| "grad_norm": 0.5957350730895996, | |
| "learning_rate": 4.973552658173186e-06, | |
| "loss": 0.011, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.3984375, | |
| "grad_norm": 0.5012223720550537, | |
| "learning_rate": 4.970701059450872e-06, | |
| "loss": 0.0138, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.41015625, | |
| "grad_norm": 0.4408419132232666, | |
| "learning_rate": 4.9677043999151e-06, | |
| "loss": 0.0144, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.421875, | |
| "grad_norm": 0.5721736550331116, | |
| "learning_rate": 4.964562855506976e-06, | |
| "loss": 0.0135, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.43359375, | |
| "grad_norm": 0.5479208827018738, | |
| "learning_rate": 4.961276610674141e-06, | |
| "loss": 0.0128, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.4453125, | |
| "grad_norm": 1.0117675065994263, | |
| "learning_rate": 4.9578458583599495e-06, | |
| "loss": 0.0111, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.45703125, | |
| "grad_norm": 0.5504026412963867, | |
| "learning_rate": 4.954270799992138e-06, | |
| "loss": 0.0083, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.46875, | |
| "grad_norm": 0.48403099179267883, | |
| "learning_rate": 4.950551645470998e-06, | |
| "loss": 0.0083, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.48046875, | |
| "grad_norm": 0.6866800785064697, | |
| "learning_rate": 4.9466886131570565e-06, | |
| "loss": 0.0085, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.4921875, | |
| "grad_norm": 0.872557520866394, | |
| "learning_rate": 4.942681929858249e-06, | |
| "loss": 0.0102, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.50390625, | |
| "grad_norm": 0.6924716234207153, | |
| "learning_rate": 4.9385318308166065e-06, | |
| "loss": 0.012, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.515625, | |
| "grad_norm": 0.5060118436813354, | |
| "learning_rate": 4.934238559694448e-06, | |
| "loss": 0.0084, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.52734375, | |
| "grad_norm": 0.6256171464920044, | |
| "learning_rate": 4.929802368560066e-06, | |
| "loss": 0.0081, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.5390625, | |
| "grad_norm": 0.5422537922859192, | |
| "learning_rate": 4.925223517872934e-06, | |
| "loss": 0.0077, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.55078125, | |
| "grad_norm": 0.953416109085083, | |
| "learning_rate": 4.920502276468408e-06, | |
| "loss": 0.0078, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.5625, | |
| "grad_norm": 0.4540804624557495, | |
| "learning_rate": 4.915638921541952e-06, | |
| "loss": 0.0097, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.57421875, | |
| "grad_norm": 0.3773641884326935, | |
| "learning_rate": 4.9106337386328524e-06, | |
| "loss": 0.0098, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.5859375, | |
| "grad_norm": 0.7970175743103027, | |
| "learning_rate": 4.905487021607462e-06, | |
| "loss": 0.0056, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.59765625, | |
| "grad_norm": 0.45197635889053345, | |
| "learning_rate": 4.900199072641937e-06, | |
| "loss": 0.0078, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.609375, | |
| "grad_norm": 0.38231438398361206, | |
| "learning_rate": 4.894770202204509e-06, | |
| "loss": 0.0072, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.62109375, | |
| "grad_norm": 0.2945426404476166, | |
| "learning_rate": 4.889200729037241e-06, | |
| "loss": 0.0086, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.6328125, | |
| "grad_norm": 0.49699363112449646, | |
| "learning_rate": 4.883490980137327e-06, | |
| "loss": 0.0073, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.64453125, | |
| "grad_norm": 0.38112956285476685, | |
| "learning_rate": 4.8776412907378845e-06, | |
| "loss": 0.0056, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.65625, | |
| "grad_norm": 0.46780407428741455, | |
| "learning_rate": 4.871652004288275e-06, | |
| "loss": 0.0078, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.66796875, | |
| "grad_norm": 0.43764325976371765, | |
| "learning_rate": 4.865523472433942e-06, | |
| "loss": 0.005, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.6796875, | |
| "grad_norm": 0.3445664644241333, | |
| "learning_rate": 4.859256054995758e-06, | |
| "loss": 0.0069, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.69140625, | |
| "grad_norm": 0.40410447120666504, | |
| "learning_rate": 4.8528501199489045e-06, | |
| "loss": 0.0088, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.703125, | |
| "grad_norm": 0.5876736640930176, | |
| "learning_rate": 4.846306043401268e-06, | |
| "loss": 0.0057, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.71484375, | |
| "grad_norm": 0.5149250626564026, | |
| "learning_rate": 4.839624209571352e-06, | |
| "loss": 0.0056, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.7265625, | |
| "grad_norm": 0.7009180784225464, | |
| "learning_rate": 4.832805010765724e-06, | |
| "loss": 0.0088, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.73828125, | |
| "grad_norm": 0.42258334159851074, | |
| "learning_rate": 4.8258488473559794e-06, | |
| "loss": 0.004, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 0.39231887459754944, | |
| "learning_rate": 4.8187561277552376e-06, | |
| "loss": 0.005, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.76171875, | |
| "grad_norm": 0.3317432701587677, | |
| "learning_rate": 4.811527268394157e-06, | |
| "loss": 0.0038, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.7734375, | |
| "grad_norm": 0.5022267699241638, | |
| "learning_rate": 4.804162693696494e-06, | |
| "loss": 0.0056, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.78515625, | |
| "grad_norm": 0.39019322395324707, | |
| "learning_rate": 4.796662836054176e-06, | |
| "loss": 0.0053, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.796875, | |
| "grad_norm": 0.5674042701721191, | |
| "learning_rate": 4.789028135801919e-06, | |
| "loss": 0.007, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.80859375, | |
| "grad_norm": 0.5690024495124817, | |
| "learning_rate": 4.7812590411913755e-06, | |
| "loss": 0.0053, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.8203125, | |
| "grad_norm": 0.23775412142276764, | |
| "learning_rate": 4.773356008364812e-06, | |
| "loss": 0.0031, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.83203125, | |
| "grad_norm": 0.4698558747768402, | |
| "learning_rate": 4.765319501328332e-06, | |
| "loss": 0.0021, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.84375, | |
| "grad_norm": 0.21603639423847198, | |
| "learning_rate": 4.757149991924633e-06, | |
| "loss": 0.0046, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.85546875, | |
| "grad_norm": 0.33830726146698, | |
| "learning_rate": 4.748847959805297e-06, | |
| "loss": 0.0022, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.8671875, | |
| "grad_norm": 0.44919782876968384, | |
| "learning_rate": 4.740413892402639e-06, | |
| "loss": 0.0032, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.87890625, | |
| "grad_norm": 0.5119614601135254, | |
| "learning_rate": 4.731848284901082e-06, | |
| "loss": 0.006, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.890625, | |
| "grad_norm": 0.3875437080860138, | |
| "learning_rate": 4.723151640208084e-06, | |
| "loss": 0.0024, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.90234375, | |
| "grad_norm": 0.3179910182952881, | |
| "learning_rate": 4.714324468924614e-06, | |
| "loss": 0.0037, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.9140625, | |
| "grad_norm": 0.43395644426345825, | |
| "learning_rate": 4.705367289315172e-06, | |
| "loss": 0.0027, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.92578125, | |
| "grad_norm": 0.3703945577144623, | |
| "learning_rate": 4.696280627277356e-06, | |
| "loss": 0.0047, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.9375, | |
| "grad_norm": 0.2503529191017151, | |
| "learning_rate": 4.687065016310996e-06, | |
| "loss": 0.0052, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.94921875, | |
| "grad_norm": 0.3613075315952301, | |
| "learning_rate": 4.6777209974868194e-06, | |
| "loss": 0.0034, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.9609375, | |
| "grad_norm": 0.3578515350818634, | |
| "learning_rate": 4.668249119414692e-06, | |
| "loss": 0.0021, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.97265625, | |
| "grad_norm": 0.1784515529870987, | |
| "learning_rate": 4.6586499382113985e-06, | |
| "loss": 0.0018, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.984375, | |
| "grad_norm": 0.259198397397995, | |
| "learning_rate": 4.648924017468003e-06, | |
| "loss": 0.0009, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.99609375, | |
| "grad_norm": 0.7194133400917053, | |
| "learning_rate": 4.6390719282167515e-06, | |
| "loss": 0.0041, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.7194133400917053, | |
| "learning_rate": 4.629094248897546e-06, | |
| "loss": 0.0014, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.01171875, | |
| "grad_norm": 0.5032601952552795, | |
| "learning_rate": 4.618991565323987e-06, | |
| "loss": 0.0028, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 2.0234375, | |
| "grad_norm": 0.6387512683868408, | |
| "learning_rate": 4.608764470648971e-06, | |
| "loss": 0.0007, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 2.03515625, | |
| "grad_norm": 0.23177844285964966, | |
| "learning_rate": 4.598413565329876e-06, | |
| "loss": 0.0006, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.046875, | |
| "grad_norm": 0.1713147759437561, | |
| "learning_rate": 4.587939457093296e-06, | |
| "loss": 0.0003, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 2.05859375, | |
| "grad_norm": 0.06128697097301483, | |
| "learning_rate": 4.577342760899368e-06, | |
| "loss": 0.0001, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 2.0703125, | |
| "grad_norm": 0.538530170917511, | |
| "learning_rate": 4.566624098905665e-06, | |
| "loss": 0.0004, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 2.08203125, | |
| "grad_norm": 0.03301696106791496, | |
| "learning_rate": 4.555784100430662e-06, | |
| "loss": 0.0004, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 2.09375, | |
| "grad_norm": 0.21366432309150696, | |
| "learning_rate": 4.544823401916794e-06, | |
| "loss": 0.0014, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.10546875, | |
| "grad_norm": 0.13440090417861938, | |
| "learning_rate": 4.533742646893086e-06, | |
| "loss": 0.0004, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 2.1171875, | |
| "grad_norm": 0.531997799873352, | |
| "learning_rate": 4.522542485937369e-06, | |
| "loss": 0.0008, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 2.12890625, | |
| "grad_norm": 0.2832719385623932, | |
| "learning_rate": 4.511223576638084e-06, | |
| "loss": 0.0023, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 2.140625, | |
| "grad_norm": 0.3814002275466919, | |
| "learning_rate": 4.499786583555675e-06, | |
| "loss": 0.001, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.15234375, | |
| "grad_norm": 0.2522885501384735, | |
| "learning_rate": 4.4882321781835666e-06, | |
| "loss": 0.0004, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.1640625, | |
| "grad_norm": 0.3866797983646393, | |
| "learning_rate": 4.476561038908745e-06, | |
| "loss": 0.0007, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.17578125, | |
| "grad_norm": 0.2128417044878006, | |
| "learning_rate": 4.464773850971924e-06, | |
| "loss": 0.0001, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.1875, | |
| "grad_norm": 0.135880708694458, | |
| "learning_rate": 4.452871306427314e-06, | |
| "loss": 0.0031, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.19921875, | |
| "grad_norm": 0.38835451006889343, | |
| "learning_rate": 4.440854104101988e-06, | |
| "loss": 0.0015, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.2109375, | |
| "grad_norm": 0.18233123421669006, | |
| "learning_rate": 4.428722949554858e-06, | |
| "loss": 0.0001, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.22265625, | |
| "grad_norm": 0.10753051191568375, | |
| "learning_rate": 4.416478555035241e-06, | |
| "loss": 0.0017, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.234375, | |
| "grad_norm": 0.30138343572616577, | |
| "learning_rate": 4.404121639441047e-06, | |
| "loss": 0.0004, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.24609375, | |
| "grad_norm": 0.12771356105804443, | |
| "learning_rate": 4.391652928276572e-06, | |
| "loss": 0.0022, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.2578125, | |
| "grad_norm": 0.4173564612865448, | |
| "learning_rate": 4.379073153609896e-06, | |
| "loss": 0.0001, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.26953125, | |
| "grad_norm": 0.08329658955335617, | |
| "learning_rate": 4.366383054029907e-06, | |
| "loss": 0.0009, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.28125, | |
| "grad_norm": 0.21187439560890198, | |
| "learning_rate": 4.3535833746029335e-06, | |
| "loss": 0.0013, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.29296875, | |
| "grad_norm": 0.046030864119529724, | |
| "learning_rate": 4.340674866829001e-06, | |
| "loss": 0.0004, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.3046875, | |
| "grad_norm": 0.08373020589351654, | |
| "learning_rate": 4.32765828859771e-06, | |
| "loss": 0.0014, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.31640625, | |
| "grad_norm": 0.4026390314102173, | |
| "learning_rate": 4.314534404143738e-06, | |
| "loss": 0.0003, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.328125, | |
| "grad_norm": 0.24255593121051788, | |
| "learning_rate": 4.3013039840019675e-06, | |
| "loss": 0.0009, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.33984375, | |
| "grad_norm": 0.2282780110836029, | |
| "learning_rate": 4.287967804962252e-06, | |
| "loss": 0.0025, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.3515625, | |
| "grad_norm": 0.14743350446224213, | |
| "learning_rate": 4.274526650023801e-06, | |
| "loss": 0.0014, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 2.36328125, | |
| "grad_norm": 0.17971713840961456, | |
| "learning_rate": 4.260981308349214e-06, | |
| "loss": 0.0003, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 2.375, | |
| "grad_norm": 0.03872796148061752, | |
| "learning_rate": 4.247332575218144e-06, | |
| "loss": 0.0003, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 2.38671875, | |
| "grad_norm": 0.06636863946914673, | |
| "learning_rate": 4.233581251980604e-06, | |
| "loss": 0.0004, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.3984375, | |
| "grad_norm": 0.1254304051399231, | |
| "learning_rate": 4.2197281460099245e-06, | |
| "loss": 0.0002, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 2.41015625, | |
| "grad_norm": 0.03998701646924019, | |
| "learning_rate": 4.2057740706553415e-06, | |
| "loss": 0.0007, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 2.421875, | |
| "grad_norm": 0.8734745979309082, | |
| "learning_rate": 4.191719845194246e-06, | |
| "loss": 0.0019, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.43359375, | |
| "grad_norm": 0.34975236654281616, | |
| "learning_rate": 4.177566294784085e-06, | |
| "loss": 0.0006, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.4453125, | |
| "grad_norm": 0.07566183060407639, | |
| "learning_rate": 4.163314250413913e-06, | |
| "loss": 0.0003, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.45703125, | |
| "grad_norm": 0.09056711941957474, | |
| "learning_rate": 4.148964548855603e-06, | |
| "loss": 0.0002, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.46875, | |
| "grad_norm": 0.16160684823989868, | |
| "learning_rate": 4.134518032614713e-06, | |
| "loss": 0.0009, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.48046875, | |
| "grad_norm": 0.0812753438949585, | |
| "learning_rate": 4.119975549881029e-06, | |
| "loss": 0.0002, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.4921875, | |
| "grad_norm": 0.05827738344669342, | |
| "learning_rate": 4.105337954478756e-06, | |
| "loss": 0.0007, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.50390625, | |
| "grad_norm": 0.2625848054885864, | |
| "learning_rate": 4.0906061058164e-06, | |
| "loss": 0.0003, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.515625, | |
| "grad_norm": 0.1771923154592514, | |
| "learning_rate": 4.075780868836296e-06, | |
| "loss": 0.0005, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.52734375, | |
| "grad_norm": 0.034166041761636734, | |
| "learning_rate": 4.060863113963835e-06, | |
| "loss": 0.0012, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 2.5390625, | |
| "grad_norm": 0.14099521934986115, | |
| "learning_rate": 4.045853717056358e-06, | |
| "loss": 0.0, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 2.55078125, | |
| "grad_norm": 0.34704917669296265, | |
| "learning_rate": 4.030753559351728e-06, | |
| "loss": 0.0006, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 2.5625, | |
| "grad_norm": 0.25681111216545105, | |
| "learning_rate": 4.015563527416596e-06, | |
| "loss": 0.0004, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.57421875, | |
| "grad_norm": 0.36212408542633057, | |
| "learning_rate": 4.000284513094342e-06, | |
| "loss": 0.0003, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 2.5859375, | |
| "grad_norm": 0.13945375382900238, | |
| "learning_rate": 3.984917413452721e-06, | |
| "loss": 0.0001, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 2.59765625, | |
| "grad_norm": 0.06798060238361359, | |
| "learning_rate": 3.969463130731183e-06, | |
| "loss": 0.0007, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.609375, | |
| "grad_norm": 0.19848179817199707, | |
| "learning_rate": 3.953922572287915e-06, | |
| "loss": 0.0007, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.62109375, | |
| "grad_norm": 0.5454645156860352, | |
| "learning_rate": 3.938296650546552e-06, | |
| "loss": 0.0018, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.6328125, | |
| "grad_norm": 0.22043731808662415, | |
| "learning_rate": 3.9225862829426184e-06, | |
| "loss": 0.0036, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.64453125, | |
| "grad_norm": 0.3086087107658386, | |
| "learning_rate": 3.906792391869657e-06, | |
| "loss": 0.0002, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.65625, | |
| "grad_norm": 0.04387599974870682, | |
| "learning_rate": 3.890915904625075e-06, | |
| "loss": 0.0014, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.66796875, | |
| "grad_norm": 0.3786030113697052, | |
| "learning_rate": 3.874957753355701e-06, | |
| "loss": 0.0014, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.6796875, | |
| "grad_norm": 0.28310713171958923, | |
| "learning_rate": 3.858918875003053e-06, | |
| "loss": 0.0001, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.69140625, | |
| "grad_norm": 0.0586460717022419, | |
| "learning_rate": 3.842800211248333e-06, | |
| "loss": 0.0001, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 2.703125, | |
| "grad_norm": 0.11408677697181702, | |
| "learning_rate": 3.8266027084571335e-06, | |
| "loss": 0.001, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 2.71484375, | |
| "grad_norm": 0.06875021010637283, | |
| "learning_rate": 3.810327317623881e-06, | |
| "loss": 0.0001, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 2.7265625, | |
| "grad_norm": 0.037388525903224945, | |
| "learning_rate": 3.793974994315991e-06, | |
| "loss": 0.0002, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 2.73828125, | |
| "grad_norm": 0.041430581361055374, | |
| "learning_rate": 3.7775466986177763e-06, | |
| "loss": 0.0015, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 0.26019373536109924, | |
| "learning_rate": 3.7610433950740667e-06, | |
| "loss": 0.0022, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 2.76171875, | |
| "grad_norm": 0.16638831794261932, | |
| "learning_rate": 3.7444660526335853e-06, | |
| "loss": 0.0001, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 2.7734375, | |
| "grad_norm": 0.11822371184825897, | |
| "learning_rate": 3.7278156445920584e-06, | |
| "loss": 0.0004, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 2.78515625, | |
| "grad_norm": 0.055076126009225845, | |
| "learning_rate": 3.711093148535068e-06, | |
| "loss": 0.0001, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 2.796875, | |
| "grad_norm": 0.08209875971078873, | |
| "learning_rate": 3.6942995462806574e-06, | |
| "loss": 0.0012, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 2.80859375, | |
| "grad_norm": 0.10523220896720886, | |
| "learning_rate": 3.6774358238216878e-06, | |
| "loss": 0.0004, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 2.8203125, | |
| "grad_norm": 0.09211058169603348, | |
| "learning_rate": 3.660502971267945e-06, | |
| "loss": 0.0007, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 2.83203125, | |
| "grad_norm": 0.6209844946861267, | |
| "learning_rate": 3.6435019827880093e-06, | |
| "loss": 0.0004, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 2.84375, | |
| "grad_norm": 0.030900023877620697, | |
| "learning_rate": 3.626433856550886e-06, | |
| "loss": 0.0002, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 2.85546875, | |
| "grad_norm": 0.041130077093839645, | |
| "learning_rate": 3.6092995946673996e-06, | |
| "loss": 0.0003, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 2.8671875, | |
| "grad_norm": 0.052536819130182266, | |
| "learning_rate": 3.5921002031313586e-06, | |
| "loss": 0.0001, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 2.87890625, | |
| "grad_norm": 0.027478178963065147, | |
| "learning_rate": 3.574836691760489e-06, | |
| "loss": 0.0011, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 2.890625, | |
| "grad_norm": 0.11695867031812668, | |
| "learning_rate": 3.557510074137147e-06, | |
| "loss": 0.0002, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 2.90234375, | |
| "grad_norm": 0.08782754838466644, | |
| "learning_rate": 3.540121367548811e-06, | |
| "loss": 0.001, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 2.9140625, | |
| "grad_norm": 0.19123269617557526, | |
| "learning_rate": 3.5226715929283507e-06, | |
| "loss": 0.0001, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.92578125, | |
| "grad_norm": 0.020774945616722107, | |
| "learning_rate": 3.505161774794085e-06, | |
| "loss": 0.0006, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 2.9375, | |
| "grad_norm": 0.12062892317771912, | |
| "learning_rate": 3.487592941189636e-06, | |
| "loss": 0.0001, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 2.94921875, | |
| "grad_norm": 0.013076180592179298, | |
| "learning_rate": 3.469966123623563e-06, | |
| "loss": 0.0011, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 2.9609375, | |
| "grad_norm": 0.22065430879592896, | |
| "learning_rate": 3.4522823570088073e-06, | |
| "loss": 0.0001, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 2.97265625, | |
| "grad_norm": 0.027459079399704933, | |
| "learning_rate": 3.434542679601922e-06, | |
| "loss": 0.0003, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 2.984375, | |
| "grad_norm": 0.07469172775745392, | |
| "learning_rate": 3.4167481329421204e-06, | |
| "loss": 0.0005, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 2.99609375, | |
| "grad_norm": 0.544292688369751, | |
| "learning_rate": 3.39889976179012e-06, | |
| "loss": 0.0001, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.02610701508820057, | |
| "learning_rate": 3.380998614066805e-06, | |
| "loss": 0.0, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 3.01171875, | |
| "grad_norm": 0.016433028504252434, | |
| "learning_rate": 3.363045740791698e-06, | |
| "loss": 0.0, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 3.0234375, | |
| "grad_norm": 0.009407744742929935, | |
| "learning_rate": 3.345042196021257e-06, | |
| "loss": 0.0, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 3.03515625, | |
| "grad_norm": 0.009587760083377361, | |
| "learning_rate": 3.326989036786981e-06, | |
| "loss": 0.0, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 3.046875, | |
| "grad_norm": 0.021458568051457405, | |
| "learning_rate": 3.3088873230333562e-06, | |
| "loss": 0.0001, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 3.05859375, | |
| "grad_norm": 1.3090940713882446, | |
| "learning_rate": 3.290738117555622e-06, | |
| "loss": 0.0007, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 3.0703125, | |
| "grad_norm": 0.008000005036592484, | |
| "learning_rate": 3.272542485937369e-06, | |
| "loss": 0.0, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 3.08203125, | |
| "grad_norm": 0.11048968136310577, | |
| "learning_rate": 3.2543014964879814e-06, | |
| "loss": 0.0004, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 3.09375, | |
| "grad_norm": 0.010688518173992634, | |
| "learning_rate": 3.2360162201799085e-06, | |
| "loss": 0.0, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 3.10546875, | |
| "grad_norm": 0.0585443377494812, | |
| "learning_rate": 3.21768773058579e-06, | |
| "loss": 0.0001, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 3.1171875, | |
| "grad_norm": 0.12098421901464462, | |
| "learning_rate": 3.1993171038154203e-06, | |
| "loss": 0.0002, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 3.12890625, | |
| "grad_norm": 0.01194986142218113, | |
| "learning_rate": 3.180905418452569e-06, | |
| "loss": 0.0, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 3.140625, | |
| "grad_norm": 0.0898946076631546, | |
| "learning_rate": 3.162453755491655e-06, | |
| "loss": 0.0011, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 3.15234375, | |
| "grad_norm": 0.04248907417058945, | |
| "learning_rate": 3.143963198274278e-06, | |
| "loss": 0.0001, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 3.1640625, | |
| "grad_norm": 0.11775418370962143, | |
| "learning_rate": 3.125434832425613e-06, | |
| "loss": 0.0002, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 3.17578125, | |
| "grad_norm": 0.009955376386642456, | |
| "learning_rate": 3.1068697457906736e-06, | |
| "loss": 0.0, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 3.1875, | |
| "grad_norm": 0.010195266455411911, | |
| "learning_rate": 3.0882690283704355e-06, | |
| "loss": 0.0, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 3.19921875, | |
| "grad_norm": 0.0036824019625782967, | |
| "learning_rate": 3.0696337722578444e-06, | |
| "loss": 0.0, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 3.2109375, | |
| "grad_norm": 0.004132798407226801, | |
| "learning_rate": 3.0509650715736977e-06, | |
| "loss": 0.0, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 3.22265625, | |
| "grad_norm": 0.0651523619890213, | |
| "learning_rate": 3.0322640224024024e-06, | |
| "loss": 0.0001, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 3.234375, | |
| "grad_norm": 0.015174048021435738, | |
| "learning_rate": 3.0135317227276247e-06, | |
| "loss": 0.0, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 3.24609375, | |
| "grad_norm": 0.004420771263539791, | |
| "learning_rate": 2.994769272367822e-06, | |
| "loss": 0.0, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 3.2578125, | |
| "grad_norm": 0.019537663087248802, | |
| "learning_rate": 2.975977772911671e-06, | |
| "loss": 0.0001, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 3.26953125, | |
| "grad_norm": 0.005312444642186165, | |
| "learning_rate": 2.9571583276533923e-06, | |
| "loss": 0.0, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 3.28125, | |
| "grad_norm": 0.005001228302717209, | |
| "learning_rate": 2.93831204152797e-06, | |
| "loss": 0.0, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 3.29296875, | |
| "grad_norm": 0.02515912428498268, | |
| "learning_rate": 2.9194400210462808e-06, | |
| "loss": 0.0, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 3.3046875, | |
| "grad_norm": 0.0026461018715053797, | |
| "learning_rate": 2.9005433742301274e-06, | |
| "loss": 0.0, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 3.31640625, | |
| "grad_norm": 0.008561859838664532, | |
| "learning_rate": 2.8816232105471864e-06, | |
| "loss": 0.0, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 3.328125, | |
| "grad_norm": 0.0016494860174134374, | |
| "learning_rate": 2.8626806408458626e-06, | |
| "loss": 0.0, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 3.33984375, | |
| "grad_norm": 0.13021136820316315, | |
| "learning_rate": 2.843716777290074e-06, | |
| "loss": 0.0007, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 3.3515625, | |
| "grad_norm": 0.0030203904025256634, | |
| "learning_rate": 2.8247327332939512e-06, | |
| "loss": 0.0, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 3.36328125, | |
| "grad_norm": 0.03953886777162552, | |
| "learning_rate": 2.805729623456469e-06, | |
| "loss": 0.0, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 3.375, | |
| "grad_norm": 0.016400372609496117, | |
| "learning_rate": 2.786708563496002e-06, | |
| "loss": 0.0, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 3.38671875, | |
| "grad_norm": 0.0036580052692443132, | |
| "learning_rate": 2.7676706701848187e-06, | |
| "loss": 0.0, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 3.3984375, | |
| "grad_norm": 0.013516291044652462, | |
| "learning_rate": 2.748617061283518e-06, | |
| "loss": 0.0, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 3.41015625, | |
| "grad_norm": 0.0161955077201128, | |
| "learning_rate": 2.7295488554753957e-06, | |
| "loss": 0.0, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 3.421875, | |
| "grad_norm": 0.030412085354328156, | |
| "learning_rate": 2.710467172300768e-06, | |
| "loss": 0.0, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 3.43359375, | |
| "grad_norm": 0.009741670452058315, | |
| "learning_rate": 2.69137313209124e-06, | |
| "loss": 0.0, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 3.4453125, | |
| "grad_norm": 0.0022640388924628496, | |
| "learning_rate": 2.672267855903927e-06, | |
| "loss": 0.0, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 3.45703125, | |
| "grad_norm": 0.004546131007373333, | |
| "learning_rate": 2.653152465455639e-06, | |
| "loss": 0.0, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 3.46875, | |
| "grad_norm": 0.00977818388491869, | |
| "learning_rate": 2.6340280830570142e-06, | |
| "loss": 0.0, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 3.48046875, | |
| "grad_norm": 0.00292399013414979, | |
| "learning_rate": 2.614895831546633e-06, | |
| "loss": 0.0, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 3.4921875, | |
| "grad_norm": 0.02362428605556488, | |
| "learning_rate": 2.595756834225089e-06, | |
| "loss": 0.0001, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 3.50390625, | |
| "grad_norm": 0.05170333385467529, | |
| "learning_rate": 2.576612214789039e-06, | |
| "loss": 0.0001, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 3.515625, | |
| "grad_norm": 0.002428271807730198, | |
| "learning_rate": 2.5574630972652263e-06, | |
| "loss": 0.0, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 3.52734375, | |
| "grad_norm": 0.0020236221607774496, | |
| "learning_rate": 2.538310605944491e-06, | |
| "loss": 0.0, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 3.5390625, | |
| "grad_norm": 0.0026413940358906984, | |
| "learning_rate": 2.5191558653157542e-06, | |
| "loss": 0.0, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 3.55078125, | |
| "grad_norm": 0.001937767956405878, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.0, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 3.5625, | |
| "grad_norm": 0.013072842732071877, | |
| "learning_rate": 2.480844134684246e-06, | |
| "loss": 0.0, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 3.57421875, | |
| "grad_norm": 0.07046481966972351, | |
| "learning_rate": 2.4616893940555094e-06, | |
| "loss": 0.0003, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 3.5859375, | |
| "grad_norm": 0.002507950412109494, | |
| "learning_rate": 2.4425369027347746e-06, | |
| "loss": 0.0, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 3.59765625, | |
| "grad_norm": 0.0024932159576565027, | |
| "learning_rate": 2.423387785210962e-06, | |
| "loss": 0.0, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 3.609375, | |
| "grad_norm": 0.007839293219149113, | |
| "learning_rate": 2.404243165774912e-06, | |
| "loss": 0.0, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 3.62109375, | |
| "grad_norm": 0.008749544620513916, | |
| "learning_rate": 2.3851041684533677e-06, | |
| "loss": 0.0, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 3.6328125, | |
| "grad_norm": 0.00224123802036047, | |
| "learning_rate": 2.3659719169429866e-06, | |
| "loss": 0.0, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 3.64453125, | |
| "grad_norm": 0.0036495248787105083, | |
| "learning_rate": 2.346847534544362e-06, | |
| "loss": 0.0, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 3.65625, | |
| "grad_norm": 0.008617470040917397, | |
| "learning_rate": 2.3277321440960733e-06, | |
| "loss": 0.0, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 3.66796875, | |
| "grad_norm": 0.20711803436279297, | |
| "learning_rate": 2.308626867908761e-06, | |
| "loss": 0.0004, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 3.6796875, | |
| "grad_norm": 0.002029536757618189, | |
| "learning_rate": 2.2895328276992325e-06, | |
| "loss": 0.0, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 3.69140625, | |
| "grad_norm": 0.0029692472890019417, | |
| "learning_rate": 2.270451144524605e-06, | |
| "loss": 0.0, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 3.703125, | |
| "grad_norm": 0.003482841420918703, | |
| "learning_rate": 2.251382938716482e-06, | |
| "loss": 0.0, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 3.71484375, | |
| "grad_norm": 0.004736272618174553, | |
| "learning_rate": 2.2323293298151817e-06, | |
| "loss": 0.0, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 3.7265625, | |
| "grad_norm": 0.002524860203266144, | |
| "learning_rate": 2.2132914365039993e-06, | |
| "loss": 0.0, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 3.73828125, | |
| "grad_norm": 0.0024032641667872667, | |
| "learning_rate": 2.1942703765435317e-06, | |
| "loss": 0.0, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "grad_norm": 0.06402894109487534, | |
| "learning_rate": 2.1752672667060488e-06, | |
| "loss": 0.0002, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 3.76171875, | |
| "grad_norm": 0.0013841127511113882, | |
| "learning_rate": 2.1562832227099266e-06, | |
| "loss": 0.0, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 3.7734375, | |
| "grad_norm": 0.002198501257225871, | |
| "learning_rate": 2.137319359154138e-06, | |
| "loss": 0.0, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 3.78515625, | |
| "grad_norm": 0.004288461524993181, | |
| "learning_rate": 2.1183767894528135e-06, | |
| "loss": 0.0, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 3.796875, | |
| "grad_norm": 0.16602352261543274, | |
| "learning_rate": 2.099456625769872e-06, | |
| "loss": 0.0003, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 3.80859375, | |
| "grad_norm": 0.001620235969312489, | |
| "learning_rate": 2.08055997895372e-06, | |
| "loss": 0.0, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 3.8203125, | |
| "grad_norm": 0.004387021530419588, | |
| "learning_rate": 2.0616879584720305e-06, | |
| "loss": 0.0, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 3.83203125, | |
| "grad_norm": 0.040472231805324554, | |
| "learning_rate": 2.042841672346608e-06, | |
| "loss": 0.0001, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 3.84375, | |
| "grad_norm": 0.03627858683466911, | |
| "learning_rate": 2.024022227088329e-06, | |
| "loss": 0.0001, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 3.85546875, | |
| "grad_norm": 0.0029672810342162848, | |
| "learning_rate": 2.0052307276321793e-06, | |
| "loss": 0.0, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 3.8671875, | |
| "grad_norm": 0.0023526407312601805, | |
| "learning_rate": 1.9864682772723757e-06, | |
| "loss": 0.0, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 3.87890625, | |
| "grad_norm": 0.001383278169669211, | |
| "learning_rate": 1.967735977597598e-06, | |
| "loss": 0.0, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 3.890625, | |
| "grad_norm": 0.002337483922019601, | |
| "learning_rate": 1.9490349284263036e-06, | |
| "loss": 0.0, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 3.90234375, | |
| "grad_norm": 0.02629532851278782, | |
| "learning_rate": 1.930366227742157e-06, | |
| "loss": 0.0, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 3.9140625, | |
| "grad_norm": 0.03508671000599861, | |
| "learning_rate": 1.9117309716295658e-06, | |
| "loss": 0.0001, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 3.92578125, | |
| "grad_norm": 0.0021862757857888937, | |
| "learning_rate": 1.8931302542093274e-06, | |
| "loss": 0.0, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 3.9375, | |
| "grad_norm": 0.002468815306201577, | |
| "learning_rate": 1.8745651675743876e-06, | |
| "loss": 0.0, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 3.94921875, | |
| "grad_norm": 0.028530335053801537, | |
| "learning_rate": 1.8560368017257229e-06, | |
| "loss": 0.0001, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 3.9609375, | |
| "grad_norm": 0.004602192435413599, | |
| "learning_rate": 1.8375462445083464e-06, | |
| "loss": 0.0, | |
| "step": 340 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 510, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 85, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.538004035049882e+17, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |