| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.9784300644874361, |
| "eval_steps": 500, |
| "global_step": 550, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "grad_norm": 0.45256540179252625, |
| "learning_rate": 2.366863905325444e-07, |
| "loss": 1.4569, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 0.4529481828212738, |
| "learning_rate": 4.733727810650888e-07, |
| "loss": 1.3644, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 0.6197301149368286, |
| "learning_rate": 7.100591715976332e-07, |
| "loss": 1.4401, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 0.4604271352291107, |
| "learning_rate": 9.467455621301776e-07, |
| "loss": 1.4268, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 0.43845024704933167, |
| "learning_rate": 1.183431952662722e-06, |
| "loss": 1.3566, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 0.411244660615921, |
| "learning_rate": 1.4201183431952664e-06, |
| "loss": 1.3928, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 0.44185420870780945, |
| "learning_rate": 1.656804733727811e-06, |
| "loss": 1.3219, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 0.35254159569740295, |
| "learning_rate": 1.8934911242603552e-06, |
| "loss": 1.2633, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 0.6924590468406677, |
| "learning_rate": 2.1301775147929e-06, |
| "loss": 1.3981, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 0.35222283005714417, |
| "learning_rate": 2.366863905325444e-06, |
| "loss": 1.3974, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 0.30887699127197266, |
| "learning_rate": 2.603550295857988e-06, |
| "loss": 1.3804, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 0.2868764102458954, |
| "learning_rate": 2.840236686390533e-06, |
| "loss": 1.3667, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 0.266529381275177, |
| "learning_rate": 3.0769230769230774e-06, |
| "loss": 1.3539, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 0.2709053158760071, |
| "learning_rate": 3.313609467455622e-06, |
| "loss": 1.3328, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 0.2658722698688507, |
| "learning_rate": 3.550295857988166e-06, |
| "loss": 1.3057, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 0.25174638628959656, |
| "learning_rate": 3.7869822485207104e-06, |
| "loss": 1.3251, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 0.3282093405723572, |
| "learning_rate": 4.023668639053255e-06, |
| "loss": 1.3731, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 0.2421874701976776, |
| "learning_rate": 4.2603550295858e-06, |
| "loss": 1.3182, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 0.2390267550945282, |
| "learning_rate": 4.497041420118343e-06, |
| "loss": 1.3427, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 0.2561015486717224, |
| "learning_rate": 4.733727810650888e-06, |
| "loss": 1.2169, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 0.2676621079444885, |
| "learning_rate": 4.970414201183432e-06, |
| "loss": 1.281, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 0.26723212003707886, |
| "learning_rate": 5.207100591715976e-06, |
| "loss": 1.315, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 0.25090569257736206, |
| "learning_rate": 5.443786982248521e-06, |
| "loss": 1.3735, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 0.23309403657913208, |
| "learning_rate": 5.680473372781066e-06, |
| "loss": 1.4455, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 0.22344529628753662, |
| "learning_rate": 5.91715976331361e-06, |
| "loss": 1.2653, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 0.2396487593650818, |
| "learning_rate": 6.153846153846155e-06, |
| "loss": 1.2975, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 0.20989473164081573, |
| "learning_rate": 6.3905325443786995e-06, |
| "loss": 1.2818, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 0.25289615988731384, |
| "learning_rate": 6.627218934911244e-06, |
| "loss": 1.3461, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 0.21861045062541962, |
| "learning_rate": 6.863905325443787e-06, |
| "loss": 1.2251, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 0.34595948457717896, |
| "learning_rate": 7.100591715976332e-06, |
| "loss": 1.3674, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 0.22662296891212463, |
| "learning_rate": 7.337278106508876e-06, |
| "loss": 1.3098, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 0.23772552609443665, |
| "learning_rate": 7.573964497041421e-06, |
| "loss": 1.3131, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 0.2229236513376236, |
| "learning_rate": 7.810650887573965e-06, |
| "loss": 1.2146, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 0.2346654236316681, |
| "learning_rate": 8.04733727810651e-06, |
| "loss": 1.2461, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 0.239919513463974, |
| "learning_rate": 8.284023668639054e-06, |
| "loss": 1.3061, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 0.28290900588035583, |
| "learning_rate": 8.5207100591716e-06, |
| "loss": 1.3061, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 0.24613362550735474, |
| "learning_rate": 8.757396449704143e-06, |
| "loss": 1.3081, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 0.2197539210319519, |
| "learning_rate": 8.994082840236687e-06, |
| "loss": 1.314, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 0.22429563105106354, |
| "learning_rate": 9.230769230769232e-06, |
| "loss": 1.3043, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 0.22450979053974152, |
| "learning_rate": 9.467455621301776e-06, |
| "loss": 1.2923, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 0.27412596344947815, |
| "learning_rate": 9.70414201183432e-06, |
| "loss": 1.3158, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 0.23781456053256989, |
| "learning_rate": 9.940828402366864e-06, |
| "loss": 1.2986, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 0.21080109477043152, |
| "learning_rate": 1.017751479289941e-05, |
| "loss": 1.309, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 0.22735567390918732, |
| "learning_rate": 1.0414201183431953e-05, |
| "loss": 1.2832, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 0.22945533692836761, |
| "learning_rate": 1.0650887573964498e-05, |
| "loss": 1.277, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 0.22165744006633759, |
| "learning_rate": 1.0887573964497042e-05, |
| "loss": 1.2381, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 0.23516800999641418, |
| "learning_rate": 1.1124260355029586e-05, |
| "loss": 1.2913, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 0.19824980199337006, |
| "learning_rate": 1.1360946745562131e-05, |
| "loss": 1.1926, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 0.3040119707584381, |
| "learning_rate": 1.1597633136094675e-05, |
| "loss": 1.3153, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 0.2223094254732132, |
| "learning_rate": 1.183431952662722e-05, |
| "loss": 1.2184, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 0.2515048682689667, |
| "learning_rate": 1.2071005917159764e-05, |
| "loss": 1.336, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 0.23539452254772186, |
| "learning_rate": 1.230769230769231e-05, |
| "loss": 1.4118, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 0.23905791342258453, |
| "learning_rate": 1.2544378698224854e-05, |
| "loss": 1.331, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 0.2439602166414261, |
| "learning_rate": 1.2781065088757399e-05, |
| "loss": 1.3149, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 0.22920997440814972, |
| "learning_rate": 1.3017751479289941e-05, |
| "loss": 1.2606, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 0.2133115828037262, |
| "learning_rate": 1.3254437869822488e-05, |
| "loss": 1.3365, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 0.2142357975244522, |
| "learning_rate": 1.349112426035503e-05, |
| "loss": 1.3224, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 0.20909257233142853, |
| "learning_rate": 1.3727810650887574e-05, |
| "loss": 1.2912, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 0.23404663801193237, |
| "learning_rate": 1.396449704142012e-05, |
| "loss": 1.3027, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 0.21893072128295898, |
| "learning_rate": 1.4201183431952663e-05, |
| "loss": 1.2553, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 0.22096726298332214, |
| "learning_rate": 1.4437869822485209e-05, |
| "loss": 1.281, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 1.081170916557312, |
| "learning_rate": 1.4674556213017752e-05, |
| "loss": 1.2884, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 0.22385230660438538, |
| "learning_rate": 1.4911242603550298e-05, |
| "loss": 1.2623, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 0.23298712074756622, |
| "learning_rate": 1.5147928994082842e-05, |
| "loss": 1.2821, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 0.3878341615200043, |
| "learning_rate": 1.5384615384615387e-05, |
| "loss": 1.2506, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 0.22243380546569824, |
| "learning_rate": 1.562130177514793e-05, |
| "loss": 1.2227, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 0.2637002468109131, |
| "learning_rate": 1.5857988165680475e-05, |
| "loss": 1.2881, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 0.2298763245344162, |
| "learning_rate": 1.609467455621302e-05, |
| "loss": 1.3599, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 0.2115241140127182, |
| "learning_rate": 1.6331360946745562e-05, |
| "loss": 1.1884, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 0.2874203026294708, |
| "learning_rate": 1.6568047337278108e-05, |
| "loss": 1.3397, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 0.22124260663986206, |
| "learning_rate": 1.6804733727810653e-05, |
| "loss": 1.3417, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 0.2512863874435425, |
| "learning_rate": 1.70414201183432e-05, |
| "loss": 1.2949, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 0.3238716423511505, |
| "learning_rate": 1.727810650887574e-05, |
| "loss": 1.2081, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 0.20584481954574585, |
| "learning_rate": 1.7514792899408286e-05, |
| "loss": 1.2029, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 0.21194683015346527, |
| "learning_rate": 1.7751479289940828e-05, |
| "loss": 1.2889, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 0.21375629305839539, |
| "learning_rate": 1.7988165680473374e-05, |
| "loss": 1.1978, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 0.3396347463130951, |
| "learning_rate": 1.822485207100592e-05, |
| "loss": 1.2245, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 0.22037793695926666, |
| "learning_rate": 1.8461538461538465e-05, |
| "loss": 1.2168, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 0.20697161555290222, |
| "learning_rate": 1.8698224852071007e-05, |
| "loss": 1.28, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 0.22741861641407013, |
| "learning_rate": 1.8934911242603552e-05, |
| "loss": 1.2906, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 0.22307828068733215, |
| "learning_rate": 1.9171597633136098e-05, |
| "loss": 1.2696, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 0.20539595186710358, |
| "learning_rate": 1.940828402366864e-05, |
| "loss": 1.2793, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 0.35372307896614075, |
| "learning_rate": 1.9644970414201185e-05, |
| "loss": 1.2929, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 0.22638989984989166, |
| "learning_rate": 1.9881656804733727e-05, |
| "loss": 1.3448, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 0.23473680019378662, |
| "learning_rate": 2.0118343195266276e-05, |
| "loss": 1.4049, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 0.40462109446525574, |
| "learning_rate": 2.035502958579882e-05, |
| "loss": 1.2063, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 0.5088987946510315, |
| "learning_rate": 2.059171597633136e-05, |
| "loss": 1.3173, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 0.2960086464881897, |
| "learning_rate": 2.0828402366863906e-05, |
| "loss": 1.1658, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 0.18364708125591278, |
| "learning_rate": 2.106508875739645e-05, |
| "loss": 1.2131, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 0.20984740555286407, |
| "learning_rate": 2.1301775147928997e-05, |
| "loss": 1.2959, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 0.23264534771442413, |
| "learning_rate": 2.153846153846154e-05, |
| "loss": 1.3718, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 0.18781857192516327, |
| "learning_rate": 2.1775147928994084e-05, |
| "loss": 1.2442, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 0.21347913146018982, |
| "learning_rate": 2.201183431952663e-05, |
| "loss": 1.2354, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 0.19755306839942932, |
| "learning_rate": 2.224852071005917e-05, |
| "loss": 1.21, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 0.6877259612083435, |
| "learning_rate": 2.2485207100591717e-05, |
| "loss": 1.2694, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 0.21010373532772064, |
| "learning_rate": 2.2721893491124263e-05, |
| "loss": 1.2075, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 0.18228934705257416, |
| "learning_rate": 2.2958579881656808e-05, |
| "loss": 1.2062, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 0.2007497102022171, |
| "learning_rate": 2.319526627218935e-05, |
| "loss": 1.2616, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 0.21043969690799713, |
| "learning_rate": 2.3431952662721896e-05, |
| "loss": 1.2297, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 0.19528649747371674, |
| "learning_rate": 2.366863905325444e-05, |
| "loss": 1.2951, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 0.2004949301481247, |
| "learning_rate": 2.3905325443786986e-05, |
| "loss": 1.3297, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 0.18860860168933868, |
| "learning_rate": 2.414201183431953e-05, |
| "loss": 1.2922, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 0.19460582733154297, |
| "learning_rate": 2.4378698224852074e-05, |
| "loss": 1.2539, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 0.23081618547439575, |
| "learning_rate": 2.461538461538462e-05, |
| "loss": 1.3242, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 0.18269965052604675, |
| "learning_rate": 2.485207100591716e-05, |
| "loss": 1.2574, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 0.1838914453983307, |
| "learning_rate": 2.5088757396449707e-05, |
| "loss": 1.282, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 0.19195106625556946, |
| "learning_rate": 2.5325443786982252e-05, |
| "loss": 1.3306, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 0.18179951608181, |
| "learning_rate": 2.5562130177514798e-05, |
| "loss": 1.2826, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 0.18123595416545868, |
| "learning_rate": 2.5798816568047337e-05, |
| "loss": 1.1557, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 0.17565734684467316, |
| "learning_rate": 2.6035502958579882e-05, |
| "loss": 1.2967, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 0.17343786358833313, |
| "learning_rate": 2.6272189349112428e-05, |
| "loss": 1.2091, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 0.18770839273929596, |
| "learning_rate": 2.6508875739644976e-05, |
| "loss": 1.2598, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 0.18156233429908752, |
| "learning_rate": 2.6745562130177515e-05, |
| "loss": 1.4203, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 0.17836841940879822, |
| "learning_rate": 2.698224852071006e-05, |
| "loss": 1.3028, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 0.18834306299686432, |
| "learning_rate": 2.7218934911242606e-05, |
| "loss": 1.3476, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 0.17323559522628784, |
| "learning_rate": 2.7455621301775148e-05, |
| "loss": 1.2906, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 0.17727844417095184, |
| "learning_rate": 2.7692307692307694e-05, |
| "loss": 1.2975, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 0.17880651354789734, |
| "learning_rate": 2.792899408284024e-05, |
| "loss": 1.2702, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 0.16582255065441132, |
| "learning_rate": 2.8165680473372784e-05, |
| "loss": 1.1503, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 0.17850655317306519, |
| "learning_rate": 2.8402366863905327e-05, |
| "loss": 1.2997, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 0.18331943452358246, |
| "learning_rate": 2.8639053254437872e-05, |
| "loss": 1.3637, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 0.1787254512310028, |
| "learning_rate": 2.8875739644970417e-05, |
| "loss": 1.3294, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 0.1663455218076706, |
| "learning_rate": 2.9112426035502963e-05, |
| "loss": 1.2493, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 0.18791569769382477, |
| "learning_rate": 2.9349112426035505e-05, |
| "loss": 1.3098, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 0.20719879865646362, |
| "learning_rate": 2.958579881656805e-05, |
| "loss": 1.2525, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 0.18029142916202545, |
| "learning_rate": 2.9822485207100596e-05, |
| "loss": 1.2253, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 0.1783473640680313, |
| "learning_rate": 3.0059171597633138e-05, |
| "loss": 1.2442, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 0.17132772505283356, |
| "learning_rate": 3.0295857988165683e-05, |
| "loss": 1.2608, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 0.18309135735034943, |
| "learning_rate": 3.0532544378698226e-05, |
| "loss": 1.2042, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 0.16952255368232727, |
| "learning_rate": 3.0769230769230774e-05, |
| "loss": 1.1857, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 0.17200875282287598, |
| "learning_rate": 3.1005917159763316e-05, |
| "loss": 1.3099, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 0.1655825823545456, |
| "learning_rate": 3.124260355029586e-05, |
| "loss": 1.1104, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 1.1321977376937866, |
| "learning_rate": 3.147928994082841e-05, |
| "loss": 1.2752, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 0.17164289951324463, |
| "learning_rate": 3.171597633136095e-05, |
| "loss": 1.3419, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 0.1645852029323578, |
| "learning_rate": 3.195266272189349e-05, |
| "loss": 1.277, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 0.18138067424297333, |
| "learning_rate": 3.218934911242604e-05, |
| "loss": 1.3829, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 0.17257159948349, |
| "learning_rate": 3.242603550295858e-05, |
| "loss": 1.2158, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 0.17873308062553406, |
| "learning_rate": 3.2662721893491124e-05, |
| "loss": 1.3722, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 0.18967239558696747, |
| "learning_rate": 3.289940828402367e-05, |
| "loss": 1.3189, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 0.17044439911842346, |
| "learning_rate": 3.3136094674556215e-05, |
| "loss": 1.2405, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 0.17479783296585083, |
| "learning_rate": 3.3372781065088764e-05, |
| "loss": 1.2111, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 0.17302677035331726, |
| "learning_rate": 3.3609467455621306e-05, |
| "loss": 1.2284, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 0.195891872048378, |
| "learning_rate": 3.384615384615385e-05, |
| "loss": 1.2887, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 0.18167690932750702, |
| "learning_rate": 3.40828402366864e-05, |
| "loss": 1.2402, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 0.17309151589870453, |
| "learning_rate": 3.431952662721894e-05, |
| "loss": 1.2908, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 0.16847188770771027, |
| "learning_rate": 3.455621301775148e-05, |
| "loss": 1.232, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 0.15291230380535126, |
| "learning_rate": 3.4792899408284023e-05, |
| "loss": 1.2175, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 0.17138057947158813, |
| "learning_rate": 3.502958579881657e-05, |
| "loss": 1.2425, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 0.1690928041934967, |
| "learning_rate": 3.5266272189349114e-05, |
| "loss": 1.2559, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 0.16478750109672546, |
| "learning_rate": 3.5502958579881656e-05, |
| "loss": 1.2275, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 0.16980737447738647, |
| "learning_rate": 3.5739644970414205e-05, |
| "loss": 1.2615, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 0.18318678438663483, |
| "learning_rate": 3.597633136094675e-05, |
| "loss": 1.1944, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 0.16001984477043152, |
| "learning_rate": 3.621301775147929e-05, |
| "loss": 1.2195, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 0.16806018352508545, |
| "learning_rate": 3.644970414201184e-05, |
| "loss": 1.2447, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 0.165884330868721, |
| "learning_rate": 3.668639053254438e-05, |
| "loss": 1.2678, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 0.1888454109430313, |
| "learning_rate": 3.692307692307693e-05, |
| "loss": 1.1565, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 0.17629367113113403, |
| "learning_rate": 3.715976331360947e-05, |
| "loss": 1.2655, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 0.15656840801239014, |
| "learning_rate": 3.739644970414201e-05, |
| "loss": 1.255, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 0.1642792522907257, |
| "learning_rate": 3.763313609467456e-05, |
| "loss": 1.2761, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 0.1748429834842682, |
| "learning_rate": 3.7869822485207104e-05, |
| "loss": 1.2886, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 0.18779656291007996, |
| "learning_rate": 3.8106508875739646e-05, |
| "loss": 1.329, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 0.1711338311433792, |
| "learning_rate": 3.8343195266272195e-05, |
| "loss": 1.217, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 0.2066163867712021, |
| "learning_rate": 3.857988165680474e-05, |
| "loss": 1.2237, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 0.1761993169784546, |
| "learning_rate": 3.881656804733728e-05, |
| "loss": 1.3078, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 0.1665651649236679, |
| "learning_rate": 3.905325443786983e-05, |
| "loss": 1.282, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 0.16825369000434875, |
| "learning_rate": 3.928994082840237e-05, |
| "loss": 1.2816, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 0.1534135639667511, |
| "learning_rate": 3.952662721893492e-05, |
| "loss": 1.1642, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 0.16484308242797852, |
| "learning_rate": 3.9763313609467454e-05, |
| "loss": 1.2835, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 0.1610719859600067, |
| "learning_rate": 4e-05, |
| "loss": 1.3346, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 0.1510305106639862, |
| "learning_rate": 3.999995711272738e-05, |
| "loss": 1.2378, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 0.15529176592826843, |
| "learning_rate": 3.9999828451093426e-05, |
| "loss": 1.2229, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 0.1719708889722824, |
| "learning_rate": 3.999961401564995e-05, |
| "loss": 1.384, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 0.1716768890619278, |
| "learning_rate": 3.999931380731659e-05, |
| "loss": 1.2835, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 0.174550399184227, |
| "learning_rate": 3.999892782738088e-05, |
| "loss": 1.271, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 0.15711328387260437, |
| "learning_rate": 3.999845607749817e-05, |
| "loss": 1.2114, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 0.15875279903411865, |
| "learning_rate": 3.999789855969166e-05, |
| "loss": 1.2691, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 0.15693986415863037, |
| "learning_rate": 3.999725527635241e-05, |
| "loss": 1.2357, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 0.16238917410373688, |
| "learning_rate": 3.999652623023927e-05, |
| "loss": 1.1509, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 0.1467331349849701, |
| "learning_rate": 3.9995711424478924e-05, |
| "loss": 1.1546, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 0.16689354181289673, |
| "learning_rate": 3.999481086256586e-05, |
| "loss": 1.3141, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 0.16529196500778198, |
| "learning_rate": 3.999382454836233e-05, |
| "loss": 1.2067, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 0.15258309245109558, |
| "learning_rate": 3.999275248609838e-05, |
| "loss": 1.155, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 0.1617356687784195, |
| "learning_rate": 3.9991594680371777e-05, |
| "loss": 1.2189, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 0.15302208065986633, |
| "learning_rate": 3.999035113614805e-05, |
| "loss": 1.1984, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 0.20816399157047272, |
| "learning_rate": 3.998902185876041e-05, |
| "loss": 1.2542, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 0.16472405195236206, |
| "learning_rate": 3.998760685390977e-05, |
| "loss": 1.2054, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 0.1601923108100891, |
| "learning_rate": 3.9986106127664694e-05, |
| "loss": 1.2957, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 0.15352420508861542, |
| "learning_rate": 3.99845196864614e-05, |
| "loss": 1.2051, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 0.17676082253456116, |
| "learning_rate": 3.998284753710369e-05, |
| "loss": 1.2261, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 0.1508205682039261, |
| "learning_rate": 3.998108968676296e-05, |
| "loss": 1.2634, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 0.1601751744747162, |
| "learning_rate": 3.997924614297815e-05, |
| "loss": 1.2006, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 0.2529548108577728, |
| "learning_rate": 3.997731691365572e-05, |
| "loss": 1.3508, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 0.17672798037528992, |
| "learning_rate": 3.99753020070696e-05, |
| "loss": 1.2879, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 0.15853144228458405, |
| "learning_rate": 3.997320143186119e-05, |
| "loss": 1.2504, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 0.14928783476352692, |
| "learning_rate": 3.997101519703927e-05, |
| "loss": 1.1055, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 0.17145495116710663, |
| "learning_rate": 3.9968743311980015e-05, |
| "loss": 1.2717, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 0.15577386319637299, |
| "learning_rate": 3.996638578642691e-05, |
| "loss": 1.2165, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 0.18255503475666046, |
| "learning_rate": 3.996394263049074e-05, |
| "loss": 1.2062, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 0.17394264042377472, |
| "learning_rate": 3.996141385464955e-05, |
| "loss": 1.2499, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 0.20845308899879456, |
| "learning_rate": 3.9958799469748547e-05, |
| "loss": 1.353, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 0.16469226777553558, |
| "learning_rate": 3.9956099487000135e-05, |
| "loss": 1.2376, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 0.16727469861507416, |
| "learning_rate": 3.995331391798379e-05, |
| "loss": 1.2536, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 0.1603512018918991, |
| "learning_rate": 3.9950442774646065e-05, |
| "loss": 1.2753, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 0.18711543083190918, |
| "learning_rate": 3.994748606930051e-05, |
| "loss": 1.2629, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 0.1531892865896225, |
| "learning_rate": 3.994444381462763e-05, |
| "loss": 1.2333, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 0.17141614854335785, |
| "learning_rate": 3.994131602367481e-05, |
| "loss": 1.2556, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 0.17776872217655182, |
| "learning_rate": 3.9938102709856316e-05, |
| "loss": 1.1875, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 0.15654616057872772, |
| "learning_rate": 3.9934803886953153e-05, |
| "loss": 1.281, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 0.16635853052139282, |
| "learning_rate": 3.993141956911309e-05, |
| "loss": 1.1977, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 0.1581771969795227, |
| "learning_rate": 3.9927949770850535e-05, |
| "loss": 1.2556, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 0.19136467576026917, |
| "learning_rate": 3.99243945070465e-05, |
| "loss": 1.2549, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 0.18221412599086761, |
| "learning_rate": 3.992075379294856e-05, |
| "loss": 1.2468, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 0.1619117110967636, |
| "learning_rate": 3.991702764417073e-05, |
| "loss": 1.2866, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 0.1579107940196991, |
| "learning_rate": 3.9913216076693446e-05, |
| "loss": 1.2815, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 0.15225648880004883, |
| "learning_rate": 3.9909319106863485e-05, |
| "loss": 1.1889, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 0.16033805906772614, |
| "learning_rate": 3.990533675139389e-05, |
| "loss": 1.2241, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 0.1602856069803238, |
| "learning_rate": 3.990126902736389e-05, |
| "loss": 1.2266, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 0.1666950285434723, |
| "learning_rate": 3.989711595221886e-05, |
| "loss": 1.2292, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 0.1629578322172165, |
| "learning_rate": 3.989287754377019e-05, |
| "loss": 1.2544, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 0.15922550857067108, |
| "learning_rate": 3.988855382019526e-05, |
| "loss": 1.2395, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 0.1633135825395584, |
| "learning_rate": 3.988414480003735e-05, |
| "loss": 1.3399, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 0.16409821808338165, |
| "learning_rate": 3.9879650502205537e-05, |
| "loss": 1.1643, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 0.1624230146408081, |
| "learning_rate": 3.987507094597464e-05, |
| "loss": 1.3067, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 0.16975589096546173, |
| "learning_rate": 3.9870406150985134e-05, |
| "loss": 1.1727, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 0.1849404126405716, |
| "learning_rate": 3.986565613724304e-05, |
| "loss": 1.3096, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 0.18090464174747467, |
| "learning_rate": 3.986082092511988e-05, |
| "loss": 1.2235, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 0.1610763967037201, |
| "learning_rate": 3.985590053535256e-05, |
| "loss": 1.2425, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 0.1640045940876007, |
| "learning_rate": 3.9850894989043284e-05, |
| "loss": 1.3407, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 0.16807672381401062, |
| "learning_rate": 3.9845804307659485e-05, |
| "loss": 1.3529, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 0.16140711307525635, |
| "learning_rate": 3.984062851303369e-05, |
| "loss": 1.3073, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 0.15345898270606995, |
| "learning_rate": 3.983536762736349e-05, |
| "loss": 1.2497, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 0.1554725468158722, |
| "learning_rate": 3.983002167321138e-05, |
| "loss": 1.2908, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 0.16261020302772522, |
| "learning_rate": 3.98245906735047e-05, |
| "loss": 1.1978, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 0.15362827479839325, |
| "learning_rate": 3.981907465153552e-05, |
| "loss": 1.1768, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 0.15314264595508575, |
| "learning_rate": 3.981347363096056e-05, |
| "loss": 1.1572, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 0.1712978333234787, |
| "learning_rate": 3.980778763580108e-05, |
| "loss": 1.3071, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 0.15101170539855957, |
| "learning_rate": 3.980201669044274e-05, |
| "loss": 1.1953, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 0.16669559478759766, |
| "learning_rate": 3.9796160819635566e-05, |
| "loss": 1.3362, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 0.23853600025177002, |
| "learning_rate": 3.979022004849379e-05, |
| "loss": 1.2309, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 0.15961961448192596, |
| "learning_rate": 3.9784194402495746e-05, |
| "loss": 1.2157, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 0.16380910575389862, |
| "learning_rate": 3.977808390748381e-05, |
| "loss": 1.1569, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 0.17208293080329895, |
| "learning_rate": 3.977188858966421e-05, |
| "loss": 1.349, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 0.1566724181175232, |
| "learning_rate": 3.976560847560697e-05, |
| "loss": 1.2158, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 0.1605634242296219, |
| "learning_rate": 3.975924359224581e-05, |
| "loss": 1.167, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 0.21259568631649017, |
| "learning_rate": 3.9752793966877956e-05, |
| "loss": 1.2877, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 0.1743762195110321, |
| "learning_rate": 3.97462596271641e-05, |
| "loss": 1.3, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 0.1705716997385025, |
| "learning_rate": 3.9739640601128255e-05, |
| "loss": 1.2658, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 0.1837138682603836, |
| "learning_rate": 3.97329369171576e-05, |
| "loss": 1.2626, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 0.22162184119224548, |
| "learning_rate": 3.9726148604002414e-05, |
| "loss": 1.1911, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 0.16128268837928772, |
| "learning_rate": 3.9719275690775924e-05, |
| "loss": 1.2447, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 0.16500110924243927, |
| "learning_rate": 3.971231820695417e-05, |
| "loss": 1.2036, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 0.16154707968235016, |
| "learning_rate": 3.970527618237592e-05, |
| "loss": 1.1957, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 0.16126224398612976, |
| "learning_rate": 3.969814964724248e-05, |
| "loss": 1.2647, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 0.16112996637821198, |
| "learning_rate": 3.969093863211762e-05, |
| "loss": 1.2127, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 0.1538519561290741, |
| "learning_rate": 3.968364316792743e-05, |
| "loss": 1.1234, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 0.16396304965019226, |
| "learning_rate": 3.967626328596015e-05, |
| "loss": 1.2309, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 0.18015865981578827, |
| "learning_rate": 3.966879901786608e-05, |
| "loss": 1.1613, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 0.16599513590335846, |
| "learning_rate": 3.966125039565745e-05, |
| "loss": 1.2543, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 0.19396306574344635, |
| "learning_rate": 3.965361745170821e-05, |
| "loss": 1.2597, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 0.1652296930551529, |
| "learning_rate": 3.964590021875401e-05, |
| "loss": 1.2601, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 0.15919215977191925, |
| "learning_rate": 3.963809872989193e-05, |
| "loss": 1.1675, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 0.16900300979614258, |
| "learning_rate": 3.963021301858045e-05, |
| "loss": 1.295, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 0.18816988170146942, |
| "learning_rate": 3.9622243118639215e-05, |
| "loss": 1.3135, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 0.17363770306110382, |
| "learning_rate": 3.961418906424897e-05, |
| "loss": 1.3225, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 0.1609456092119217, |
| "learning_rate": 3.960605088995134e-05, |
| "loss": 1.2022, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 0.1848209649324417, |
| "learning_rate": 3.959782863064874e-05, |
| "loss": 1.2134, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 0.1635885089635849, |
| "learning_rate": 3.9589522321604205e-05, |
| "loss": 1.2537, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 0.21050697565078735, |
| "learning_rate": 3.958113199844123e-05, |
| "loss": 1.3015, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 0.1821780800819397, |
| "learning_rate": 3.9572657697143614e-05, |
| "loss": 1.1287, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 0.16301009058952332, |
| "learning_rate": 3.9564099454055325e-05, |
| "loss": 1.2451, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 0.17613324522972107, |
| "learning_rate": 3.955545730588034e-05, |
| "loss": 1.3656, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 0.16889138519763947, |
| "learning_rate": 3.954673128968247e-05, |
| "loss": 1.3678, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 0.16508743166923523, |
| "learning_rate": 3.953792144288523e-05, |
| "loss": 1.3248, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 0.16305625438690186, |
| "learning_rate": 3.952902780327163e-05, |
| "loss": 1.2358, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 0.1744881272315979, |
| "learning_rate": 3.9520050408984076e-05, |
| "loss": 1.2405, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 0.16882789134979248, |
| "learning_rate": 3.951098929852417e-05, |
| "loss": 1.2694, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 0.15946689248085022, |
| "learning_rate": 3.950184451075252e-05, |
| "loss": 1.2312, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 0.16837111115455627, |
| "learning_rate": 3.949261608488866e-05, |
| "loss": 1.2121, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.16055358946323395, |
| "learning_rate": 3.948330406051077e-05, |
| "loss": 1.251, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.16870392858982086, |
| "learning_rate": 3.947390847755559e-05, |
| "loss": 1.2926, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.1582109034061432, |
| "learning_rate": 3.94644293763182e-05, |
| "loss": 1.2649, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.19230706989765167, |
| "learning_rate": 3.9454866797451895e-05, |
| "loss": 1.2991, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.15807847678661346, |
| "learning_rate": 3.9445220781967963e-05, |
| "loss": 1.1764, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 0.1773795485496521, |
| "learning_rate": 3.9435491371235534e-05, |
| "loss": 1.3195, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 0.16266267001628876, |
| "learning_rate": 3.94256786069814e-05, |
| "loss": 1.2765, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 0.1656751185655594, |
| "learning_rate": 3.941578253128982e-05, |
| "loss": 1.2727, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 0.15894421935081482, |
| "learning_rate": 3.940580318660238e-05, |
| "loss": 1.2083, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 0.16084228456020355, |
| "learning_rate": 3.939574061571775e-05, |
| "loss": 1.232, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 0.16208983957767487, |
| "learning_rate": 3.938559486179156e-05, |
| "loss": 1.2277, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 0.16037632524967194, |
| "learning_rate": 3.937536596833618e-05, |
| "loss": 1.1273, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 0.2040693312883377, |
| "learning_rate": 3.9365053979220555e-05, |
| "loss": 1.2405, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 0.1898617446422577, |
| "learning_rate": 3.935465893866998e-05, |
| "loss": 1.222, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 0.16039510071277618, |
| "learning_rate": 3.934418089126595e-05, |
| "loss": 1.2946, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 0.21441307663917542, |
| "learning_rate": 3.933361988194596e-05, |
| "loss": 1.2268, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 0.15417777001857758, |
| "learning_rate": 3.9322975956003297e-05, |
| "loss": 1.145, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 0.16220960021018982, |
| "learning_rate": 3.9312249159086855e-05, |
| "loss": 1.3189, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 0.1598338782787323, |
| "learning_rate": 3.9301439537200936e-05, |
| "loss": 1.2083, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 0.18459190428256989, |
| "learning_rate": 3.929054713670506e-05, |
| "loss": 1.2136, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 0.15777897834777832, |
| "learning_rate": 3.927957200431377e-05, |
| "loss": 1.2017, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 0.177915558218956, |
| "learning_rate": 3.926851418709641e-05, |
| "loss": 1.3244, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 0.1588701456785202, |
| "learning_rate": 3.925737373247694e-05, |
| "loss": 1.3037, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 0.1676599532365799, |
| "learning_rate": 3.9246150688233745e-05, |
| "loss": 1.2729, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 0.20505991578102112, |
| "learning_rate": 3.923484510249938e-05, |
| "loss": 1.241, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 0.15723706781864166, |
| "learning_rate": 3.922345702376044e-05, |
| "loss": 1.2241, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 0.18037424981594086, |
| "learning_rate": 3.921198650085726e-05, |
| "loss": 1.2395, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 0.16490055620670319, |
| "learning_rate": 3.9200433582983825e-05, |
| "loss": 1.2515, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 0.16806964576244354, |
| "learning_rate": 3.9188798319687406e-05, |
| "loss": 1.2211, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 0.1634434014558792, |
| "learning_rate": 3.917708076086851e-05, |
| "loss": 1.298, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 0.16614285111427307, |
| "learning_rate": 3.916528095678053e-05, |
| "loss": 1.3037, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 0.18700821697711945, |
| "learning_rate": 3.9153398958029606e-05, |
| "loss": 1.2613, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 0.16272488236427307, |
| "learning_rate": 3.91414348155744e-05, |
| "loss": 1.2064, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 0.16149508953094482, |
| "learning_rate": 3.9129388580725855e-05, |
| "loss": 1.2886, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 0.1734437793493271, |
| "learning_rate": 3.911726030514698e-05, |
| "loss": 1.2709, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 0.16677477955818176, |
| "learning_rate": 3.9105050040852646e-05, |
| "loss": 1.2614, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 0.17172600328922272, |
| "learning_rate": 3.9092757840209347e-05, |
| "loss": 1.184, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 0.15502072870731354, |
| "learning_rate": 3.908038375593498e-05, |
| "loss": 1.1066, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 0.16084939241409302, |
| "learning_rate": 3.9067927841098614e-05, |
| "loss": 1.2861, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 0.16080649197101593, |
| "learning_rate": 3.905539014912027e-05, |
| "loss": 1.2096, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 0.16113747656345367, |
| "learning_rate": 3.90427707337707e-05, |
| "loss": 1.1859, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 0.1573685258626938, |
| "learning_rate": 3.903006964917111e-05, |
| "loss": 1.1895, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 0.1588548868894577, |
| "learning_rate": 3.901728694979301e-05, |
| "loss": 1.2548, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 0.15512435138225555, |
| "learning_rate": 3.900442269045791e-05, |
| "loss": 1.2825, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 0.17400631308555603, |
| "learning_rate": 3.899147692633711e-05, |
| "loss": 1.3224, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 0.19142161309719086, |
| "learning_rate": 3.897844971295144e-05, |
| "loss": 1.1845, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 0.18297170102596283, |
| "learning_rate": 3.89653411061711e-05, |
| "loss": 1.198, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 0.17008209228515625, |
| "learning_rate": 3.895215116221529e-05, |
| "loss": 1.2946, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 0.16479219496250153, |
| "learning_rate": 3.893887993765211e-05, |
| "loss": 1.2793, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 0.16869719326496124, |
| "learning_rate": 3.892552748939823e-05, |
| "loss": 1.3061, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 0.16528654098510742, |
| "learning_rate": 3.891209387471863e-05, |
| "loss": 1.2766, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 0.1654234379529953, |
| "learning_rate": 3.889857915122644e-05, |
| "loss": 1.1604, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 0.1730952113866806, |
| "learning_rate": 3.888498337688261e-05, |
| "loss": 1.2526, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 0.16318488121032715, |
| "learning_rate": 3.8871306609995715e-05, |
| "loss": 1.2352, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 0.17201673984527588, |
| "learning_rate": 3.885754890922169e-05, |
| "loss": 1.331, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 0.1656893938779831, |
| "learning_rate": 3.8843710333563536e-05, |
| "loss": 1.2122, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 0.15819483995437622, |
| "learning_rate": 3.882979094237115e-05, |
| "loss": 1.1374, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 0.17912957072257996, |
| "learning_rate": 3.8815790795341e-05, |
| "loss": 1.1865, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 0.17538312077522278, |
| "learning_rate": 3.8801709952515894e-05, |
| "loss": 1.2847, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 0.16031011939048767, |
| "learning_rate": 3.878754847428473e-05, |
| "loss": 1.1983, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 0.15993796288967133, |
| "learning_rate": 3.8773306421382215e-05, |
| "loss": 1.2416, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 0.16199611127376556, |
| "learning_rate": 3.875898385488864e-05, |
| "loss": 1.2562, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 0.159291073679924, |
| "learning_rate": 3.87445808362296e-05, |
| "loss": 1.2262, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 0.15733957290649414, |
| "learning_rate": 3.8730097427175685e-05, |
| "loss": 1.1643, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 0.16530875861644745, |
| "learning_rate": 3.8715533689842303e-05, |
| "loss": 1.2809, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 0.31174418330192566, |
| "learning_rate": 3.870088968668936e-05, |
| "loss": 1.3185, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 0.1655522733926773, |
| "learning_rate": 3.8686165480520964e-05, |
| "loss": 1.3158, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 0.17282016575336456, |
| "learning_rate": 3.867136113448524e-05, |
| "loss": 1.3052, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 0.16425450146198273, |
| "learning_rate": 3.865647671207399e-05, |
| "loss": 1.3419, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 0.15989594161510468, |
| "learning_rate": 3.864151227712244e-05, |
| "loss": 1.1391, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 0.1693430244922638, |
| "learning_rate": 3.8626467893808956e-05, |
| "loss": 1.1643, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 0.15857893228530884, |
| "learning_rate": 3.861134362665482e-05, |
| "loss": 1.172, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 0.1693955510854721, |
| "learning_rate": 3.859613954052387e-05, |
| "loss": 1.3536, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 0.1622908115386963, |
| "learning_rate": 3.85808557006223e-05, |
| "loss": 1.2033, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 0.17406344413757324, |
| "learning_rate": 3.8565492172498314e-05, |
| "loss": 1.3955, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 0.17219895124435425, |
| "learning_rate": 3.85500490220419e-05, |
| "loss": 1.2801, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 0.25802117586135864, |
| "learning_rate": 3.853452631548452e-05, |
| "loss": 1.2639, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 0.16231444478034973, |
| "learning_rate": 3.851892411939883e-05, |
| "loss": 1.263, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 0.16031944751739502, |
| "learning_rate": 3.8503242500698396e-05, |
| "loss": 1.2532, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 0.15626895427703857, |
| "learning_rate": 3.848748152663739e-05, |
| "loss": 1.2321, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 0.15723131597042084, |
| "learning_rate": 3.847164126481035e-05, |
| "loss": 1.16, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 0.16541031002998352, |
| "learning_rate": 3.845572178315183e-05, |
| "loss": 1.2644, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 0.17055818438529968, |
| "learning_rate": 3.843972314993614e-05, |
| "loss": 1.2573, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 0.16403935849666595, |
| "learning_rate": 3.8423645433777064e-05, |
| "loss": 1.2026, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 0.157577782869339, |
| "learning_rate": 3.840748870362753e-05, |
| "loss": 1.2497, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 0.17598943412303925, |
| "learning_rate": 3.8391253028779365e-05, |
| "loss": 1.2226, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 0.15772323310375214, |
| "learning_rate": 3.8374938478862936e-05, |
| "loss": 1.1726, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 0.18102790415287018, |
| "learning_rate": 3.83585451238469e-05, |
| "loss": 1.2938, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 0.17216327786445618, |
| "learning_rate": 3.834207303403789e-05, |
| "loss": 1.3421, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 0.16786645352840424, |
| "learning_rate": 3.83255222800802e-05, |
| "loss": 1.2734, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 0.15548373758792877, |
| "learning_rate": 3.83088929329555e-05, |
| "loss": 1.2656, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 0.16076824069023132, |
| "learning_rate": 3.8292185063982536e-05, |
| "loss": 1.1429, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 0.16665169596672058, |
| "learning_rate": 3.827539874481678e-05, |
| "loss": 1.2775, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 0.1681457906961441, |
| "learning_rate": 3.82585340474502e-05, |
| "loss": 1.267, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 0.16220974922180176, |
| "learning_rate": 3.824159104421087e-05, |
| "loss": 1.2049, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 0.1736627221107483, |
| "learning_rate": 3.822456980776272e-05, |
| "loss": 1.2997, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 0.17877478897571564, |
| "learning_rate": 3.820747041110517e-05, |
| "loss": 1.2733, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 0.17471949756145477, |
| "learning_rate": 3.8190292927572896e-05, |
| "loss": 1.2488, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 0.1678047776222229, |
| "learning_rate": 3.817303743083542e-05, |
| "loss": 1.2193, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 0.18162204325199127, |
| "learning_rate": 3.8155703994896866e-05, |
| "loss": 1.1816, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 0.1719117909669876, |
| "learning_rate": 3.813829269409562e-05, |
| "loss": 1.3535, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 0.19797664880752563, |
| "learning_rate": 3.812080360310399e-05, |
| "loss": 1.3568, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 0.17989763617515564, |
| "learning_rate": 3.810323679692793e-05, |
| "loss": 1.3152, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 0.1681927740573883, |
| "learning_rate": 3.808559235090667e-05, |
| "loss": 1.2814, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 0.7965008616447449, |
| "learning_rate": 3.806787034071244e-05, |
| "loss": 1.3065, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 0.17025770246982574, |
| "learning_rate": 3.805007084235009e-05, |
| "loss": 1.1674, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 0.1730756014585495, |
| "learning_rate": 3.803219393215683e-05, |
| "loss": 1.1614, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 0.16721907258033752, |
| "learning_rate": 3.801423968680185e-05, |
| "loss": 1.2812, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 0.17217952013015747, |
| "learning_rate": 3.7996208183286e-05, |
| "loss": 1.229, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 0.1746724396944046, |
| "learning_rate": 3.79780994989415e-05, |
| "loss": 1.3108, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 0.1702830046415329, |
| "learning_rate": 3.795991371143153e-05, |
| "loss": 1.2668, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 0.1598566621541977, |
| "learning_rate": 3.794165089875e-05, |
| "loss": 1.1801, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 0.17259812355041504, |
| "learning_rate": 3.7923311139221114e-05, |
| "loss": 1.2354, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 0.2091628760099411, |
| "learning_rate": 3.7904894511499115e-05, |
| "loss": 1.1883, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 0.22369763255119324, |
| "learning_rate": 3.788640109456788e-05, |
| "loss": 1.2687, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 0.17732800543308258, |
| "learning_rate": 3.7867830967740643e-05, |
| "loss": 1.3269, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 0.28118595480918884, |
| "learning_rate": 3.7849184210659614e-05, |
| "loss": 1.2424, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 0.18187764286994934, |
| "learning_rate": 3.7830460903295634e-05, |
| "loss": 1.1875, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 0.16909223794937134, |
| "learning_rate": 3.781166112594788e-05, |
| "loss": 1.2701, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 0.17135021090507507, |
| "learning_rate": 3.779278495924345e-05, |
| "loss": 1.1831, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 0.17674005031585693, |
| "learning_rate": 3.777383248413709e-05, |
| "loss": 1.2732, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 0.198370561003685, |
| "learning_rate": 3.775480378191079e-05, |
| "loss": 1.1929, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 0.18088266253471375, |
| "learning_rate": 3.773569893417347e-05, |
| "loss": 1.2575, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 0.16227401793003082, |
| "learning_rate": 3.7716518022860606e-05, |
| "loss": 1.1559, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 0.1971830427646637, |
| "learning_rate": 3.769726113023389e-05, |
| "loss": 1.211, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 0.1707913875579834, |
| "learning_rate": 3.76779283388809e-05, |
| "loss": 1.1535, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 0.1668080985546112, |
| "learning_rate": 3.765851973171469e-05, |
| "loss": 1.1838, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 0.16925190389156342, |
| "learning_rate": 3.763903539197348e-05, |
| "loss": 1.2518, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 0.1722031980752945, |
| "learning_rate": 3.76194754032203e-05, |
| "loss": 1.2118, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 0.17410987615585327, |
| "learning_rate": 3.759983984934261e-05, |
| "loss": 1.232, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 0.1774168461561203, |
| "learning_rate": 3.758012881455192e-05, |
| "loss": 1.1537, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 0.16939586400985718, |
| "learning_rate": 3.756034238338352e-05, |
| "loss": 1.1585, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 0.17839783430099487, |
| "learning_rate": 3.754048064069599e-05, |
| "loss": 1.3063, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 0.16792502999305725, |
| "learning_rate": 3.7520543671670936e-05, |
| "loss": 1.2978, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 0.189891517162323, |
| "learning_rate": 3.7500531561812576e-05, |
| "loss": 1.235, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 0.18355625867843628, |
| "learning_rate": 3.7480444396947396e-05, |
| "loss": 1.284, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 0.1714622974395752, |
| "learning_rate": 3.7460282263223764e-05, |
| "loss": 1.1672, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 0.1876678168773651, |
| "learning_rate": 3.744004524711158e-05, |
| "loss": 1.2938, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 0.16813495755195618, |
| "learning_rate": 3.741973343540188e-05, |
| "loss": 1.1843, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 0.2258601039648056, |
| "learning_rate": 3.739934691520648e-05, |
| "loss": 1.2196, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.17664629220962524, |
| "learning_rate": 3.7378885773957614e-05, |
| "loss": 1.1806, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.1906002312898636, |
| "learning_rate": 3.735835009940754e-05, |
| "loss": 1.2252, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.1772574931383133, |
| "learning_rate": 3.733773997962815e-05, |
| "loss": 1.2304, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.17094944417476654, |
| "learning_rate": 3.731705550301064e-05, |
| "loss": 1.2695, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.19538728892803192, |
| "learning_rate": 3.729629675826507e-05, |
| "loss": 1.2349, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.18487848341464996, |
| "learning_rate": 3.727546383442007e-05, |
| "loss": 1.2724, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.76, |
| "grad_norm": 0.1757872849702835, |
| "learning_rate": 3.725455682082233e-05, |
| "loss": 1.2322, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.76, |
| "grad_norm": 0.161984384059906, |
| "learning_rate": 3.7233575807136354e-05, |
| "loss": 1.1426, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.76, |
| "grad_norm": 0.1794055700302124, |
| "learning_rate": 3.7212520883343974e-05, |
| "loss": 1.3217, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.76, |
| "grad_norm": 0.17491760849952698, |
| "learning_rate": 3.719139213974403e-05, |
| "loss": 1.2233, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.76, |
| "grad_norm": 0.21414370834827423, |
| "learning_rate": 3.717018966695192e-05, |
| "loss": 1.2594, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.76, |
| "grad_norm": 0.1733601987361908, |
| "learning_rate": 3.714891355589929e-05, |
| "loss": 1.2172, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.77, |
| "grad_norm": 0.1574425846338272, |
| "learning_rate": 3.712756389783356e-05, |
| "loss": 1.2312, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.77, |
| "grad_norm": 0.18264387547969818, |
| "learning_rate": 3.7106140784317594e-05, |
| "loss": 1.2142, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.77, |
| "grad_norm": 0.17057283222675323, |
| "learning_rate": 3.708464430722929e-05, |
| "loss": 1.1942, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.77, |
| "grad_norm": 0.17199979722499847, |
| "learning_rate": 3.706307455876118e-05, |
| "loss": 1.2263, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.77, |
| "grad_norm": 0.1813754439353943, |
| "learning_rate": 3.704143163142001e-05, |
| "loss": 1.2384, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.78, |
| "grad_norm": 0.16909797489643097, |
| "learning_rate": 3.701971561802642e-05, |
| "loss": 1.1905, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.78, |
| "grad_norm": 0.16116483509540558, |
| "learning_rate": 3.699792661171444e-05, |
| "loss": 1.1849, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.78, |
| "grad_norm": 0.17043234407901764, |
| "learning_rate": 3.69760647059312e-05, |
| "loss": 1.2199, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.78, |
| "grad_norm": 0.17161297798156738, |
| "learning_rate": 3.695412999443643e-05, |
| "loss": 1.2092, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.78, |
| "grad_norm": 0.1632567048072815, |
| "learning_rate": 3.693212257130215e-05, |
| "loss": 1.202, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.78, |
| "grad_norm": 0.17327187955379486, |
| "learning_rate": 3.691004253091217e-05, |
| "loss": 1.2268, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.79, |
| "grad_norm": 0.17322610318660736, |
| "learning_rate": 3.688788996796179e-05, |
| "loss": 1.2197, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.79, |
| "grad_norm": 0.16746391355991364, |
| "learning_rate": 3.686566497745728e-05, |
| "loss": 1.2467, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.79, |
| "grad_norm": 0.16351164877414703, |
| "learning_rate": 3.6843367654715584e-05, |
| "loss": 1.2082, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.79, |
| "grad_norm": 0.17086975276470184, |
| "learning_rate": 3.6820998095363834e-05, |
| "loss": 1.2093, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.79, |
| "grad_norm": 0.18365664780139923, |
| "learning_rate": 3.679855639533895e-05, |
| "loss": 1.2005, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.17057694494724274, |
| "learning_rate": 3.677604265088729e-05, |
| "loss": 1.1776, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.1794084757566452, |
| "learning_rate": 3.675345695856415e-05, |
| "loss": 1.208, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.1862630546092987, |
| "learning_rate": 3.6730799415233414e-05, |
| "loss": 1.1669, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.19876237213611603, |
| "learning_rate": 3.670807011806709e-05, |
| "loss": 1.2048, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.1671525239944458, |
| "learning_rate": 3.668526916454495e-05, |
| "loss": 1.2165, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.18071943521499634, |
| "learning_rate": 3.666239665245405e-05, |
| "loss": 1.2544, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.81, |
| "grad_norm": 0.17689716815948486, |
| "learning_rate": 3.6639452679888365e-05, |
| "loss": 1.2845, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.81, |
| "grad_norm": 0.16965340077877045, |
| "learning_rate": 3.661643734524834e-05, |
| "loss": 1.2231, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.81, |
| "grad_norm": 0.18019068241119385, |
| "learning_rate": 3.6593350747240456e-05, |
| "loss": 1.2223, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.81, |
| "grad_norm": 0.18523040413856506, |
| "learning_rate": 3.657019298487685e-05, |
| "loss": 1.2486, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.81, |
| "grad_norm": 0.19133590161800385, |
| "learning_rate": 3.654696415747483e-05, |
| "loss": 1.1961, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.81, |
| "grad_norm": 0.17477400600910187, |
| "learning_rate": 3.652366436465652e-05, |
| "loss": 1.2621, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.82, |
| "grad_norm": 0.21932773292064667, |
| "learning_rate": 3.650029370634837e-05, |
| "loss": 1.2104, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.82, |
| "grad_norm": 0.18244686722755432, |
| "learning_rate": 3.6476852282780755e-05, |
| "loss": 1.2468, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.82, |
| "grad_norm": 0.17326635122299194, |
| "learning_rate": 3.645334019448755e-05, |
| "loss": 1.1609, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.82, |
| "grad_norm": 0.17420323193073273, |
| "learning_rate": 3.6429757542305686e-05, |
| "loss": 1.2156, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.82, |
| "grad_norm": 0.1676824539899826, |
| "learning_rate": 3.6406104427374744e-05, |
| "loss": 1.1739, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.83, |
| "grad_norm": 0.1757155805826187, |
| "learning_rate": 3.638238095113646e-05, |
| "loss": 1.187, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.83, |
| "grad_norm": 0.18094448745250702, |
| "learning_rate": 3.6358587215334355e-05, |
| "loss": 1.2271, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.83, |
| "grad_norm": 0.1914701908826828, |
| "learning_rate": 3.633472332201329e-05, |
| "loss": 1.0756, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.83, |
| "grad_norm": 0.17444278299808502, |
| "learning_rate": 3.631078937351898e-05, |
| "loss": 1.2323, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.83, |
| "grad_norm": 0.17253823578357697, |
| "learning_rate": 3.628678547249761e-05, |
| "loss": 1.2408, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.83, |
| "grad_norm": 0.1650070697069168, |
| "learning_rate": 3.626271172189536e-05, |
| "loss": 1.1771, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.84, |
| "grad_norm": 0.16705216467380524, |
| "learning_rate": 3.623856822495798e-05, |
| "loss": 1.2229, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.84, |
| "grad_norm": 0.1749449521303177, |
| "learning_rate": 3.6214355085230346e-05, |
| "loss": 1.3156, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.84, |
| "grad_norm": 0.1721351593732834, |
| "learning_rate": 3.6190072406556016e-05, |
| "loss": 1.2198, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.84, |
| "grad_norm": 0.17338605225086212, |
| "learning_rate": 3.616572029307676e-05, |
| "loss": 1.2696, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.84, |
| "grad_norm": 0.17081210017204285, |
| "learning_rate": 3.614129884923217e-05, |
| "loss": 1.1272, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.85, |
| "grad_norm": 0.17321552336215973, |
| "learning_rate": 3.611680817975915e-05, |
| "loss": 1.0944, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.85, |
| "grad_norm": 0.1784401535987854, |
| "learning_rate": 3.609224838969149e-05, |
| "loss": 1.2999, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.85, |
| "grad_norm": 0.17898234724998474, |
| "learning_rate": 3.606761958435945e-05, |
| "loss": 1.2288, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.85, |
| "grad_norm": 0.1713172197341919, |
| "learning_rate": 3.6042921869389255e-05, |
| "loss": 1.2837, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.85, |
| "grad_norm": 0.16900764405727386, |
| "learning_rate": 3.601815535070266e-05, |
| "loss": 1.1852, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.85, |
| "grad_norm": 0.16487158834934235, |
| "learning_rate": 3.599332013451651e-05, |
| "loss": 1.2508, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.86, |
| "grad_norm": 0.1718226671218872, |
| "learning_rate": 3.596841632734228e-05, |
| "loss": 1.2721, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.86, |
| "grad_norm": 0.17556148767471313, |
| "learning_rate": 3.594344403598561e-05, |
| "loss": 1.1579, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.86, |
| "grad_norm": 0.1776830554008484, |
| "learning_rate": 3.591840336754584e-05, |
| "loss": 1.3364, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.86, |
| "grad_norm": 0.17868830263614655, |
| "learning_rate": 3.589329442941556e-05, |
| "loss": 1.2904, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.86, |
| "grad_norm": 0.16877397894859314, |
| "learning_rate": 3.586811732928017e-05, |
| "loss": 1.1634, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.86, |
| "grad_norm": 0.29319822788238525, |
| "learning_rate": 3.5842872175117386e-05, |
| "loss": 1.1825, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.87, |
| "grad_norm": 0.17021752893924713, |
| "learning_rate": 3.5817559075196775e-05, |
| "loss": 1.2404, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.87, |
| "grad_norm": 0.18304027616977692, |
| "learning_rate": 3.579217813807934e-05, |
| "loss": 1.208, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.87, |
| "grad_norm": 0.18062491714954376, |
| "learning_rate": 3.576672947261698e-05, |
| "loss": 1.3094, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.87, |
| "grad_norm": 0.18421950936317444, |
| "learning_rate": 3.574121318795208e-05, |
| "loss": 1.2645, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.87, |
| "grad_norm": 0.17431406676769257, |
| "learning_rate": 3.571562939351705e-05, |
| "loss": 1.241, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.88, |
| "grad_norm": 0.1877688467502594, |
| "learning_rate": 3.568997819903377e-05, |
| "loss": 1.279, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.88, |
| "grad_norm": 0.17437636852264404, |
| "learning_rate": 3.566425971451324e-05, |
| "loss": 1.1178, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.88, |
| "grad_norm": 0.16442453861236572, |
| "learning_rate": 3.5638474050255024e-05, |
| "loss": 1.0344, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.88, |
| "grad_norm": 0.1917441338300705, |
| "learning_rate": 3.5612621316846805e-05, |
| "loss": 1.2959, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.88, |
| "grad_norm": 0.1886010766029358, |
| "learning_rate": 3.55867016251639e-05, |
| "loss": 1.187, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.88, |
| "grad_norm": 0.20641809701919556, |
| "learning_rate": 3.556071508636879e-05, |
| "loss": 1.1979, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.89, |
| "grad_norm": 0.1747037172317505, |
| "learning_rate": 3.553466181191067e-05, |
| "loss": 1.18, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.89, |
| "grad_norm": 0.1825367510318756, |
| "learning_rate": 3.550854191352492e-05, |
| "loss": 1.293, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.89, |
| "grad_norm": 0.17783713340759277, |
| "learning_rate": 3.5482355503232656e-05, |
| "loss": 1.2322, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.89, |
| "grad_norm": 0.17146770656108856, |
| "learning_rate": 3.5456102693340255e-05, |
| "loss": 1.2166, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.89, |
| "grad_norm": 0.1839715540409088, |
| "learning_rate": 3.5429783596438864e-05, |
| "loss": 1.2572, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.89, |
| "grad_norm": 0.17346954345703125, |
| "learning_rate": 3.54033983254039e-05, |
| "loss": 1.1569, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.9, |
| "grad_norm": 0.19239090383052826, |
| "learning_rate": 3.53769469933946e-05, |
| "loss": 1.1428, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.9, |
| "grad_norm": 0.21293193101882935, |
| "learning_rate": 3.53504297138535e-05, |
| "loss": 1.3067, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.9, |
| "grad_norm": 0.18217827379703522, |
| "learning_rate": 3.532384660050601e-05, |
| "loss": 1.3452, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.9, |
| "grad_norm": 0.18584825098514557, |
| "learning_rate": 3.529719776735982e-05, |
| "loss": 1.3022, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.9, |
| "grad_norm": 0.17959220707416534, |
| "learning_rate": 3.527048332870453e-05, |
| "loss": 1.2488, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.91, |
| "grad_norm": 0.1819523572921753, |
| "learning_rate": 3.524370339911107e-05, |
| "loss": 1.1128, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.91, |
| "grad_norm": 0.20110684633255005, |
| "learning_rate": 3.521685809343126e-05, |
| "loss": 1.2031, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.91, |
| "grad_norm": 0.17916655540466309, |
| "learning_rate": 3.518994752679728e-05, |
| "loss": 1.2172, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.91, |
| "grad_norm": 0.17345504462718964, |
| "learning_rate": 3.5162971814621234e-05, |
| "loss": 1.2089, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.91, |
| "grad_norm": 0.5167191624641418, |
| "learning_rate": 3.513593107259458e-05, |
| "loss": 1.2281, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.91, |
| "grad_norm": 0.17433781921863556, |
| "learning_rate": 3.510882541668769e-05, |
| "loss": 1.2192, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.92, |
| "grad_norm": 0.1887521892786026, |
| "learning_rate": 3.508165496314931e-05, |
| "loss": 1.2763, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.92, |
| "grad_norm": 0.1756310611963272, |
| "learning_rate": 3.505441982850615e-05, |
| "loss": 1.2834, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.92, |
| "grad_norm": 0.17292964458465576, |
| "learning_rate": 3.502712012956223e-05, |
| "loss": 1.2629, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.92, |
| "grad_norm": 0.18053486943244934, |
| "learning_rate": 3.4999755983398537e-05, |
| "loss": 1.2334, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.92, |
| "grad_norm": 0.16213248670101166, |
| "learning_rate": 3.4972327507372415e-05, |
| "loss": 1.1131, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.93, |
| "grad_norm": 0.18039150536060333, |
| "learning_rate": 3.494483481911713e-05, |
| "loss": 1.2284, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.93, |
| "grad_norm": 0.17420712113380432, |
| "learning_rate": 3.491727803654132e-05, |
| "loss": 1.2631, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.93, |
| "grad_norm": 0.17673422396183014, |
| "learning_rate": 3.488965727782851e-05, |
| "loss": 1.2223, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.93, |
| "grad_norm": 0.17651581764221191, |
| "learning_rate": 3.486197266143659e-05, |
| "loss": 1.2411, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.93, |
| "grad_norm": 0.18939518928527832, |
| "learning_rate": 3.483422430609735e-05, |
| "loss": 1.2125, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.93, |
| "grad_norm": 0.1721743494272232, |
| "learning_rate": 3.48064123308159e-05, |
| "loss": 1.2753, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.94, |
| "grad_norm": 0.18775472044944763, |
| "learning_rate": 3.477853685487023e-05, |
| "loss": 1.2722, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.94, |
| "grad_norm": 0.17603406310081482, |
| "learning_rate": 3.4750597997810644e-05, |
| "loss": 1.3061, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.94, |
| "grad_norm": 0.22322580218315125, |
| "learning_rate": 3.472259587945928e-05, |
| "loss": 1.1812, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.94, |
| "grad_norm": 0.16920214891433716, |
| "learning_rate": 3.469453061990959e-05, |
| "loss": 1.2284, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.94, |
| "grad_norm": 0.1781124472618103, |
| "learning_rate": 3.466640233952582e-05, |
| "loss": 1.1115, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.94, |
| "grad_norm": 0.17156866192817688, |
| "learning_rate": 3.4638211158942493e-05, |
| "loss": 1.1759, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.95, |
| "grad_norm": 0.18725483119487762, |
| "learning_rate": 3.460995719906389e-05, |
| "loss": 1.3214, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.95, |
| "grad_norm": 0.17117691040039062, |
| "learning_rate": 3.458164058106353e-05, |
| "loss": 1.1723, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.95, |
| "grad_norm": 0.18169349431991577, |
| "learning_rate": 3.455326142638369e-05, |
| "loss": 1.1945, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.95, |
| "grad_norm": 0.17365239560604095, |
| "learning_rate": 3.45248198567348e-05, |
| "loss": 1.2686, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.95, |
| "grad_norm": 0.23987340927124023, |
| "learning_rate": 3.4496315994094995e-05, |
| "loss": 1.2492, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.18593370914459229, |
| "learning_rate": 3.446774996070959e-05, |
| "loss": 1.209, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.1767171174287796, |
| "learning_rate": 3.443912187909049e-05, |
| "loss": 1.2453, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.17480580508708954, |
| "learning_rate": 3.441043187201574e-05, |
| "loss": 1.236, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.22028052806854248, |
| "learning_rate": 3.4381680062528957e-05, |
| "loss": 1.2248, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.18207862973213196, |
| "learning_rate": 3.43528665739388e-05, |
| "loss": 1.2862, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.17260274291038513, |
| "learning_rate": 3.432399152981847e-05, |
| "loss": 1.1853, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.97, |
| "grad_norm": 0.18698668479919434, |
| "learning_rate": 3.429505505400516e-05, |
| "loss": 1.3278, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.97, |
| "grad_norm": 0.18749062716960907, |
| "learning_rate": 3.426605727059952e-05, |
| "loss": 1.283, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.97, |
| "grad_norm": 0.17615492641925812, |
| "learning_rate": 3.423699830396512e-05, |
| "loss": 1.1874, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.97, |
| "grad_norm": 0.21085886657238007, |
| "learning_rate": 3.4207878278727956e-05, |
| "loss": 1.1462, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.97, |
| "grad_norm": 0.1712028980255127, |
| "learning_rate": 3.417869731977588e-05, |
| "loss": 1.1746, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.97, |
| "grad_norm": 0.17687669396400452, |
| "learning_rate": 3.4149455552258054e-05, |
| "loss": 1.2707, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.98, |
| "grad_norm": 0.20794951915740967, |
| "learning_rate": 3.412015310158444e-05, |
| "loss": 1.2805, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.98, |
| "grad_norm": 0.18447402119636536, |
| "learning_rate": 3.4090790093425276e-05, |
| "loss": 1.2066, |
| "step": 550 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 1686, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 50, |
| "total_flos": 2.2559932560074342e+19, |
| "train_batch_size": 3, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|