| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.993252361673415, | |
| "eval_steps": 500, | |
| "global_step": 925, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005398110661268556, | |
| "grad_norm": 5.8547901781698295, | |
| "learning_rate": 4.301075268817205e-07, | |
| "loss": 0.9789, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.010796221322537112, | |
| "grad_norm": 6.195900478096068, | |
| "learning_rate": 8.60215053763441e-07, | |
| "loss": 1.0263, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.016194331983805668, | |
| "grad_norm": 5.844104649074486, | |
| "learning_rate": 1.2903225806451614e-06, | |
| "loss": 1.3526, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.021592442645074223, | |
| "grad_norm": 7.368611794635692, | |
| "learning_rate": 1.720430107526882e-06, | |
| "loss": 1.049, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.02699055330634278, | |
| "grad_norm": 5.672738441304507, | |
| "learning_rate": 2.1505376344086023e-06, | |
| "loss": 0.9437, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.032388663967611336, | |
| "grad_norm": 4.775325426134859, | |
| "learning_rate": 2.580645161290323e-06, | |
| "loss": 0.9776, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.037786774628879895, | |
| "grad_norm": 4.685418851624996, | |
| "learning_rate": 3.0107526881720433e-06, | |
| "loss": 1.1554, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.043184885290148446, | |
| "grad_norm": 3.259634449429892, | |
| "learning_rate": 3.440860215053764e-06, | |
| "loss": 0.8474, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.048582995951417005, | |
| "grad_norm": 3.2715012435354347, | |
| "learning_rate": 3.870967741935484e-06, | |
| "loss": 0.845, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.05398110661268556, | |
| "grad_norm": 2.518263916966614, | |
| "learning_rate": 4.3010752688172045e-06, | |
| "loss": 0.8109, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.059379217273954114, | |
| "grad_norm": 1.931792216640399, | |
| "learning_rate": 4.731182795698925e-06, | |
| "loss": 0.77, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.06477732793522267, | |
| "grad_norm": 1.917478295407825, | |
| "learning_rate": 5.161290322580646e-06, | |
| "loss": 0.7055, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.07017543859649122, | |
| "grad_norm": 1.9342362297971811, | |
| "learning_rate": 5.591397849462365e-06, | |
| "loss": 0.6897, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.07557354925775979, | |
| "grad_norm": 2.335686007359441, | |
| "learning_rate": 6.021505376344087e-06, | |
| "loss": 0.8362, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.08097165991902834, | |
| "grad_norm": 2.899927706834149, | |
| "learning_rate": 6.451612903225806e-06, | |
| "loss": 0.7419, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.08636977058029689, | |
| "grad_norm": 2.320716820106864, | |
| "learning_rate": 6.881720430107528e-06, | |
| "loss": 0.7808, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.09176788124156546, | |
| "grad_norm": 1.9233741520153684, | |
| "learning_rate": 7.311827956989248e-06, | |
| "loss": 0.7005, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.09716599190283401, | |
| "grad_norm": 1.4890943029905404, | |
| "learning_rate": 7.741935483870968e-06, | |
| "loss": 0.737, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.10256410256410256, | |
| "grad_norm": 1.6346157704701012, | |
| "learning_rate": 8.172043010752689e-06, | |
| "loss": 0.753, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.10796221322537113, | |
| "grad_norm": 1.3923927391716204, | |
| "learning_rate": 8.602150537634409e-06, | |
| "loss": 0.662, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.11336032388663968, | |
| "grad_norm": 1.4868588771935882, | |
| "learning_rate": 9.03225806451613e-06, | |
| "loss": 0.6965, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.11875843454790823, | |
| "grad_norm": 1.5586239228768057, | |
| "learning_rate": 9.46236559139785e-06, | |
| "loss": 0.6918, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.1241565452091768, | |
| "grad_norm": 1.6467617716036158, | |
| "learning_rate": 9.89247311827957e-06, | |
| "loss": 0.7206, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.12955465587044535, | |
| "grad_norm": 1.5431165209998874, | |
| "learning_rate": 1.0322580645161291e-05, | |
| "loss": 0.7401, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.1349527665317139, | |
| "grad_norm": 1.262214429142841, | |
| "learning_rate": 1.0752688172043012e-05, | |
| "loss": 0.7939, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.14035087719298245, | |
| "grad_norm": 1.342395990639865, | |
| "learning_rate": 1.118279569892473e-05, | |
| "loss": 0.7074, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.145748987854251, | |
| "grad_norm": 1.392051362880004, | |
| "learning_rate": 1.1612903225806453e-05, | |
| "loss": 0.6546, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.15114709851551958, | |
| "grad_norm": 1.3560822319733044, | |
| "learning_rate": 1.2043010752688173e-05, | |
| "loss": 0.6785, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.15654520917678813, | |
| "grad_norm": 1.2795645343244073, | |
| "learning_rate": 1.2473118279569894e-05, | |
| "loss": 0.6751, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.16194331983805668, | |
| "grad_norm": 1.1855435088179302, | |
| "learning_rate": 1.2903225806451613e-05, | |
| "loss": 0.7002, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.16734143049932523, | |
| "grad_norm": 1.1407557409125308, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.6543, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.17273954116059378, | |
| "grad_norm": 1.2413150424813355, | |
| "learning_rate": 1.3763440860215056e-05, | |
| "loss": 0.9318, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.17813765182186234, | |
| "grad_norm": 1.2499110019395852, | |
| "learning_rate": 1.4193548387096776e-05, | |
| "loss": 0.6477, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.18353576248313092, | |
| "grad_norm": 1.4008520106194233, | |
| "learning_rate": 1.4623655913978497e-05, | |
| "loss": 0.711, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.18893387314439947, | |
| "grad_norm": 1.1891772900416815, | |
| "learning_rate": 1.5053763440860215e-05, | |
| "loss": 0.6406, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.19433198380566802, | |
| "grad_norm": 1.147501365646657, | |
| "learning_rate": 1.5483870967741936e-05, | |
| "loss": 0.6624, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.19973009446693657, | |
| "grad_norm": 1.1751858805701079, | |
| "learning_rate": 1.5913978494623657e-05, | |
| "loss": 0.7633, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.20512820512820512, | |
| "grad_norm": 1.1864282998965006, | |
| "learning_rate": 1.6344086021505377e-05, | |
| "loss": 0.7109, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.21052631578947367, | |
| "grad_norm": 1.0272367928001183, | |
| "learning_rate": 1.6774193548387098e-05, | |
| "loss": 0.5869, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.21592442645074225, | |
| "grad_norm": 1.2112435393460614, | |
| "learning_rate": 1.7204301075268818e-05, | |
| "loss": 0.6915, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.2213225371120108, | |
| "grad_norm": 1.1750727844511175, | |
| "learning_rate": 1.763440860215054e-05, | |
| "loss": 0.6758, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.22672064777327935, | |
| "grad_norm": 1.175639613071845, | |
| "learning_rate": 1.806451612903226e-05, | |
| "loss": 0.6514, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.2321187584345479, | |
| "grad_norm": 1.1903426022335029, | |
| "learning_rate": 1.849462365591398e-05, | |
| "loss": 0.6711, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.23751686909581646, | |
| "grad_norm": 1.13775486347414, | |
| "learning_rate": 1.89247311827957e-05, | |
| "loss": 0.6484, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.242914979757085, | |
| "grad_norm": 1.1018523962648061, | |
| "learning_rate": 1.935483870967742e-05, | |
| "loss": 0.6413, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.2483130904183536, | |
| "grad_norm": 1.1536291053738799, | |
| "learning_rate": 1.978494623655914e-05, | |
| "loss": 0.6382, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.25371120107962214, | |
| "grad_norm": 1.2874232917542905, | |
| "learning_rate": 2.0215053763440862e-05, | |
| "loss": 0.716, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.2591093117408907, | |
| "grad_norm": 1.0667615653454148, | |
| "learning_rate": 2.0645161290322582e-05, | |
| "loss": 0.6627, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.26450742240215924, | |
| "grad_norm": 1.1268887691690699, | |
| "learning_rate": 2.1075268817204303e-05, | |
| "loss": 0.6586, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.2699055330634278, | |
| "grad_norm": 1.1645388991541576, | |
| "learning_rate": 2.1505376344086024e-05, | |
| "loss": 0.6267, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.27530364372469635, | |
| "grad_norm": 1.0147829233748185, | |
| "learning_rate": 2.193548387096774e-05, | |
| "loss": 0.6659, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.2807017543859649, | |
| "grad_norm": 1.0582569825253139, | |
| "learning_rate": 2.236559139784946e-05, | |
| "loss": 0.64, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.28609986504723345, | |
| "grad_norm": 1.1030609601483023, | |
| "learning_rate": 2.2795698924731185e-05, | |
| "loss": 0.6531, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.291497975708502, | |
| "grad_norm": 1.0689716886960958, | |
| "learning_rate": 2.3225806451612906e-05, | |
| "loss": 0.6584, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.2968960863697706, | |
| "grad_norm": 1.0511582991478239, | |
| "learning_rate": 2.3655913978494626e-05, | |
| "loss": 0.6263, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.30229419703103916, | |
| "grad_norm": 1.096934265784751, | |
| "learning_rate": 2.4086021505376347e-05, | |
| "loss": 0.8141, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.3076923076923077, | |
| "grad_norm": 1.0264643074156008, | |
| "learning_rate": 2.4516129032258067e-05, | |
| "loss": 0.6414, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.31309041835357626, | |
| "grad_norm": 1.1419883911045894, | |
| "learning_rate": 2.4946236559139788e-05, | |
| "loss": 0.6248, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.3184885290148448, | |
| "grad_norm": 1.119342396183509, | |
| "learning_rate": 2.537634408602151e-05, | |
| "loss": 0.6696, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.32388663967611336, | |
| "grad_norm": 1.1524685035070938, | |
| "learning_rate": 2.5806451612903226e-05, | |
| "loss": 0.6926, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.3292847503373819, | |
| "grad_norm": 1.1023493942950338, | |
| "learning_rate": 2.6236559139784946e-05, | |
| "loss": 0.7087, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.33468286099865047, | |
| "grad_norm": 1.084679917232489, | |
| "learning_rate": 2.6666666666666667e-05, | |
| "loss": 0.6711, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.340080971659919, | |
| "grad_norm": 0.9988990454591712, | |
| "learning_rate": 2.7096774193548387e-05, | |
| "loss": 0.6966, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.34547908232118757, | |
| "grad_norm": 1.0505889482507405, | |
| "learning_rate": 2.752688172043011e-05, | |
| "loss": 0.6916, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.3508771929824561, | |
| "grad_norm": 1.061750405323965, | |
| "learning_rate": 2.795698924731183e-05, | |
| "loss": 0.6824, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.3562753036437247, | |
| "grad_norm": 0.9984273646688296, | |
| "learning_rate": 2.8387096774193552e-05, | |
| "loss": 0.666, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.3616734143049933, | |
| "grad_norm": 1.0894465182196065, | |
| "learning_rate": 2.8817204301075273e-05, | |
| "loss": 0.698, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.36707152496626183, | |
| "grad_norm": 1.061814212390062, | |
| "learning_rate": 2.9247311827956993e-05, | |
| "loss": 0.6498, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.3724696356275304, | |
| "grad_norm": 1.1225275383063609, | |
| "learning_rate": 2.9677419354838714e-05, | |
| "loss": 0.865, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.37786774628879893, | |
| "grad_norm": 1.0101520932943957, | |
| "learning_rate": 3.010752688172043e-05, | |
| "loss": 0.635, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.3832658569500675, | |
| "grad_norm": 1.1042840779826546, | |
| "learning_rate": 3.053763440860215e-05, | |
| "loss": 0.6602, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.38866396761133604, | |
| "grad_norm": 0.9951373477683033, | |
| "learning_rate": 3.096774193548387e-05, | |
| "loss": 0.6181, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.3940620782726046, | |
| "grad_norm": 1.0419770417033245, | |
| "learning_rate": 3.139784946236559e-05, | |
| "loss": 0.6591, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.39946018893387314, | |
| "grad_norm": 1.1235300688912213, | |
| "learning_rate": 3.182795698924731e-05, | |
| "loss": 0.7067, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.4048582995951417, | |
| "grad_norm": 1.032338142893847, | |
| "learning_rate": 3.2258064516129034e-05, | |
| "loss": 0.6232, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.41025641025641024, | |
| "grad_norm": 1.0941631682251065, | |
| "learning_rate": 3.2688172043010754e-05, | |
| "loss": 0.7227, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.4156545209176788, | |
| "grad_norm": 1.056284707482023, | |
| "learning_rate": 3.3118279569892475e-05, | |
| "loss": 0.7325, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.42105263157894735, | |
| "grad_norm": 1.1196455822802613, | |
| "learning_rate": 3.3548387096774195e-05, | |
| "loss": 0.7219, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.42645074224021595, | |
| "grad_norm": 0.9854925794840517, | |
| "learning_rate": 3.3978494623655916e-05, | |
| "loss": 0.6219, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.4318488529014845, | |
| "grad_norm": 1.0865931108282347, | |
| "learning_rate": 3.4408602150537636e-05, | |
| "loss": 0.7059, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.43724696356275305, | |
| "grad_norm": 0.9699541750890343, | |
| "learning_rate": 3.483870967741936e-05, | |
| "loss": 0.661, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.4426450742240216, | |
| "grad_norm": 3.1013378557995916, | |
| "learning_rate": 3.526881720430108e-05, | |
| "loss": 0.6909, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.44804318488529016, | |
| "grad_norm": 2.2538980648127542, | |
| "learning_rate": 3.56989247311828e-05, | |
| "loss": 0.6949, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.4534412955465587, | |
| "grad_norm": 2.4075652108349894, | |
| "learning_rate": 3.612903225806452e-05, | |
| "loss": 0.659, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.45883940620782726, | |
| "grad_norm": 7.56367856019384, | |
| "learning_rate": 3.655913978494624e-05, | |
| "loss": 0.828, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.4642375168690958, | |
| "grad_norm": 2.895205886929977, | |
| "learning_rate": 3.698924731182796e-05, | |
| "loss": 0.9367, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.46963562753036436, | |
| "grad_norm": 3.887496158116938, | |
| "learning_rate": 3.741935483870968e-05, | |
| "loss": 0.7459, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.4750337381916329, | |
| "grad_norm": 2.274394089418949, | |
| "learning_rate": 3.78494623655914e-05, | |
| "loss": 0.7433, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.48043184885290147, | |
| "grad_norm": 2.236395643430774, | |
| "learning_rate": 3.827956989247312e-05, | |
| "loss": 0.6513, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.48582995951417, | |
| "grad_norm": 2.266924080857405, | |
| "learning_rate": 3.870967741935484e-05, | |
| "loss": 0.6671, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.49122807017543857, | |
| "grad_norm": 1.2054447808759345, | |
| "learning_rate": 3.913978494623656e-05, | |
| "loss": 0.7066, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.4966261808367072, | |
| "grad_norm": 60.63347204888335, | |
| "learning_rate": 3.956989247311828e-05, | |
| "loss": 0.6467, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.5020242914979757, | |
| "grad_norm": 1.6457989694054762, | |
| "learning_rate": 4e-05, | |
| "loss": 0.7459, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.5074224021592443, | |
| "grad_norm": 0.9244181817569254, | |
| "learning_rate": 3.999985742198083e-05, | |
| "loss": 0.7123, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.5128205128205128, | |
| "grad_norm": 0.9024731727062596, | |
| "learning_rate": 3.999942968995616e-05, | |
| "loss": 0.6764, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.5182186234817814, | |
| "grad_norm": 1.0014763698382203, | |
| "learning_rate": 3.99987168100245e-05, | |
| "loss": 0.7774, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.5236167341430499, | |
| "grad_norm": 1.1017315016983185, | |
| "learning_rate": 3.9997718792349965e-05, | |
| "loss": 0.8379, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.5290148448043185, | |
| "grad_norm": 0.9754394024215393, | |
| "learning_rate": 3.9996435651162085e-05, | |
| "loss": 0.7148, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.5344129554655871, | |
| "grad_norm": 0.9585971972404963, | |
| "learning_rate": 3.999486740475564e-05, | |
| "loss": 0.699, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.5398110661268556, | |
| "grad_norm": 1.1343058374086679, | |
| "learning_rate": 3.999301407549037e-05, | |
| "loss": 0.7095, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.5452091767881242, | |
| "grad_norm": 1.06892995056798, | |
| "learning_rate": 3.9990875689790674e-05, | |
| "loss": 0.7254, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.5506072874493927, | |
| "grad_norm": 1.0016896561256128, | |
| "learning_rate": 3.998845227814524e-05, | |
| "loss": 0.6704, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.5560053981106613, | |
| "grad_norm": 1.0363960250258362, | |
| "learning_rate": 3.9985743875106584e-05, | |
| "loss": 0.6784, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.5614035087719298, | |
| "grad_norm": 0.9134306930019722, | |
| "learning_rate": 3.9982750519290587e-05, | |
| "loss": 0.6061, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.5668016194331984, | |
| "grad_norm": 1.010882912730621, | |
| "learning_rate": 3.997947225337592e-05, | |
| "loss": 0.797, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.5721997300944669, | |
| "grad_norm": 1.0664043173199367, | |
| "learning_rate": 3.997590912410345e-05, | |
| "loss": 0.7478, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.5775978407557355, | |
| "grad_norm": 0.8939136934200366, | |
| "learning_rate": 3.997206118227557e-05, | |
| "loss": 0.6959, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.582995951417004, | |
| "grad_norm": 0.9519988505798721, | |
| "learning_rate": 3.996792848275546e-05, | |
| "loss": 0.7078, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.5883940620782726, | |
| "grad_norm": 1.0090649283064343, | |
| "learning_rate": 3.996351108446635e-05, | |
| "loss": 0.8817, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.5937921727395412, | |
| "grad_norm": 1.0131570568985884, | |
| "learning_rate": 3.9958809050390626e-05, | |
| "loss": 0.6692, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.5991902834008097, | |
| "grad_norm": 1.0231276046162419, | |
| "learning_rate": 3.995382244756895e-05, | |
| "loss": 0.6858, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.6045883940620783, | |
| "grad_norm": 1.064585741979068, | |
| "learning_rate": 3.994855134709931e-05, | |
| "loss": 0.7636, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.6099865047233468, | |
| "grad_norm": 1.1130479996632003, | |
| "learning_rate": 3.994299582413603e-05, | |
| "loss": 0.762, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.6153846153846154, | |
| "grad_norm": 1.0079939261061786, | |
| "learning_rate": 3.9937155957888646e-05, | |
| "loss": 0.7667, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.6207827260458839, | |
| "grad_norm": 0.9882446367570781, | |
| "learning_rate": 3.9931031831620816e-05, | |
| "loss": 0.6575, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.6261808367071525, | |
| "grad_norm": 0.9116587927342452, | |
| "learning_rate": 3.9924623532649124e-05, | |
| "loss": 0.6581, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.631578947368421, | |
| "grad_norm": 1.048971008752528, | |
| "learning_rate": 3.991793115234182e-05, | |
| "loss": 0.7607, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.6369770580296896, | |
| "grad_norm": 0.9629592207299862, | |
| "learning_rate": 3.991095478611755e-05, | |
| "loss": 0.6741, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.6423751686909581, | |
| "grad_norm": 1.0602574291233269, | |
| "learning_rate": 3.990369453344394e-05, | |
| "loss": 0.7037, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.6477732793522267, | |
| "grad_norm": 0.8986022296429939, | |
| "learning_rate": 3.989615049783625e-05, | |
| "loss": 0.7306, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.6531713900134952, | |
| "grad_norm": 0.872613255131903, | |
| "learning_rate": 3.9888322786855846e-05, | |
| "loss": 0.6873, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.6585695006747638, | |
| "grad_norm": 0.9970544195947879, | |
| "learning_rate": 3.9880211512108674e-05, | |
| "loss": 0.7597, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.6639676113360324, | |
| "grad_norm": 0.9142253988898471, | |
| "learning_rate": 3.987181678924369e-05, | |
| "loss": 0.8754, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.6693657219973009, | |
| "grad_norm": 0.8824260469924733, | |
| "learning_rate": 3.9863138737951175e-05, | |
| "loss": 0.6773, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.6747638326585695, | |
| "grad_norm": 0.9378213861156586, | |
| "learning_rate": 3.985417748196108e-05, | |
| "loss": 0.6998, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.680161943319838, | |
| "grad_norm": 0.9258545529411903, | |
| "learning_rate": 3.984493314904122e-05, | |
| "loss": 0.6635, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.6855600539811066, | |
| "grad_norm": 0.942025251769537, | |
| "learning_rate": 3.983540587099545e-05, | |
| "loss": 0.7178, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.6909581646423751, | |
| "grad_norm": 0.9024931118371414, | |
| "learning_rate": 3.982559578366182e-05, | |
| "loss": 0.8362, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.6963562753036437, | |
| "grad_norm": 0.9148733733577575, | |
| "learning_rate": 3.98155030269106e-05, | |
| "loss": 0.6316, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.7017543859649122, | |
| "grad_norm": 0.9421278891685188, | |
| "learning_rate": 3.980512774464235e-05, | |
| "loss": 0.7145, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.7071524966261808, | |
| "grad_norm": 0.8421736450675255, | |
| "learning_rate": 3.979447008478575e-05, | |
| "loss": 0.6264, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.7125506072874493, | |
| "grad_norm": 0.9448713733876393, | |
| "learning_rate": 3.978353019929562e-05, | |
| "loss": 0.7043, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.717948717948718, | |
| "grad_norm": 1.0328624262343067, | |
| "learning_rate": 3.977230824415069e-05, | |
| "loss": 0.7935, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.7233468286099866, | |
| "grad_norm": 0.955045550419709, | |
| "learning_rate": 3.9760804379351354e-05, | |
| "loss": 0.7184, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.728744939271255, | |
| "grad_norm": 1.023688730851197, | |
| "learning_rate": 3.974901876891745e-05, | |
| "loss": 0.7233, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.7341430499325237, | |
| "grad_norm": 0.9868216577806593, | |
| "learning_rate": 3.973695158088588e-05, | |
| "loss": 0.7037, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.7395411605937922, | |
| "grad_norm": 0.9306844020240326, | |
| "learning_rate": 3.972460298730822e-05, | |
| "loss": 0.6593, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.7449392712550608, | |
| "grad_norm": 1.0925332318325522, | |
| "learning_rate": 3.9711973164248255e-05, | |
| "loss": 0.7787, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.7503373819163293, | |
| "grad_norm": 0.940936448827497, | |
| "learning_rate": 3.9699062291779516e-05, | |
| "loss": 0.6946, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.7557354925775979, | |
| "grad_norm": 0.9658235331624828, | |
| "learning_rate": 3.9685870553982665e-05, | |
| "loss": 0.7574, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.7611336032388664, | |
| "grad_norm": 0.9638079015407056, | |
| "learning_rate": 3.967239813894288e-05, | |
| "loss": 0.6656, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.766531713900135, | |
| "grad_norm": 1.0035425179888644, | |
| "learning_rate": 3.965864523874718e-05, | |
| "loss": 0.7013, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.7719298245614035, | |
| "grad_norm": 1.0201587225092634, | |
| "learning_rate": 3.964461204948171e-05, | |
| "loss": 0.6561, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.7773279352226721, | |
| "grad_norm": 1.057632934842731, | |
| "learning_rate": 3.963029877122889e-05, | |
| "loss": 0.688, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.7827260458839406, | |
| "grad_norm": 1.0799406339392832, | |
| "learning_rate": 3.961570560806461e-05, | |
| "loss": 0.7369, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.7881241565452092, | |
| "grad_norm": 0.9807351275702001, | |
| "learning_rate": 3.96008327680553e-05, | |
| "loss": 0.755, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.7935222672064778, | |
| "grad_norm": 0.8684222059026471, | |
| "learning_rate": 3.9585680463254965e-05, | |
| "loss": 0.6737, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.7989203778677463, | |
| "grad_norm": 0.9291286386684726, | |
| "learning_rate": 3.9570248909702165e-05, | |
| "loss": 0.6869, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.8043184885290149, | |
| "grad_norm": 0.9334466133420474, | |
| "learning_rate": 3.955453832741694e-05, | |
| "loss": 0.6368, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.8097165991902834, | |
| "grad_norm": 0.9009663811050453, | |
| "learning_rate": 3.9538548940397654e-05, | |
| "loss": 0.9644, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.815114709851552, | |
| "grad_norm": 1.0313471913166774, | |
| "learning_rate": 3.952228097661782e-05, | |
| "loss": 0.7582, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.8205128205128205, | |
| "grad_norm": 0.9131908569576266, | |
| "learning_rate": 3.9505734668022844e-05, | |
| "loss": 0.7117, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.8259109311740891, | |
| "grad_norm": 1.0165555550946475, | |
| "learning_rate": 3.948891025052672e-05, | |
| "loss": 0.6754, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.8313090418353576, | |
| "grad_norm": 0.8870549778530541, | |
| "learning_rate": 3.947180796400865e-05, | |
| "loss": 0.6987, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.8367071524966262, | |
| "grad_norm": 1.003808717947859, | |
| "learning_rate": 3.9454428052309654e-05, | |
| "loss": 0.6747, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.8421052631578947, | |
| "grad_norm": 0.9111662680379845, | |
| "learning_rate": 3.943677076322908e-05, | |
| "loss": 0.787, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.8475033738191633, | |
| "grad_norm": 0.9356146773942343, | |
| "learning_rate": 3.9418836348521045e-05, | |
| "loss": 0.7076, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.8529014844804319, | |
| "grad_norm": 1.012979493517214, | |
| "learning_rate": 3.940062506389089e-05, | |
| "loss": 0.6868, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.8582995951417004, | |
| "grad_norm": 0.9312587006321174, | |
| "learning_rate": 3.938213716899149e-05, | |
| "loss": 0.749, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.863697705802969, | |
| "grad_norm": 0.9154261608012028, | |
| "learning_rate": 3.93633729274196e-05, | |
| "loss": 0.7358, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.8690958164642375, | |
| "grad_norm": 0.9832755364311611, | |
| "learning_rate": 3.934433260671206e-05, | |
| "loss": 0.6628, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.8744939271255061, | |
| "grad_norm": 0.9865516851317341, | |
| "learning_rate": 3.9325016478341986e-05, | |
| "loss": 0.7243, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.8798920377867746, | |
| "grad_norm": 0.9580622704730626, | |
| "learning_rate": 3.9305424817714905e-05, | |
| "loss": 0.7155, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.8852901484480432, | |
| "grad_norm": 0.886627340978247, | |
| "learning_rate": 3.928555790416485e-05, | |
| "loss": 0.8743, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.8906882591093117, | |
| "grad_norm": 0.9369073382530939, | |
| "learning_rate": 3.926541602095033e-05, | |
| "loss": 0.8004, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.8960863697705803, | |
| "grad_norm": 0.8967904070054883, | |
| "learning_rate": 3.9244999455250324e-05, | |
| "loss": 0.6837, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.9014844804318488, | |
| "grad_norm": 0.8918453654014739, | |
| "learning_rate": 3.922430849816018e-05, | |
| "loss": 0.7069, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.9068825910931174, | |
| "grad_norm": 0.9468000825480711, | |
| "learning_rate": 3.9203343444687475e-05, | |
| "loss": 0.6991, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.9122807017543859, | |
| "grad_norm": 0.8690439611058536, | |
| "learning_rate": 3.918210459374778e-05, | |
| "loss": 0.6544, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.9176788124156545, | |
| "grad_norm": 0.9203492753195474, | |
| "learning_rate": 3.916059224816043e-05, | |
| "loss": 0.8887, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.9230769230769231, | |
| "grad_norm": 0.9400256924968273, | |
| "learning_rate": 3.913880671464418e-05, | |
| "loss": 0.6707, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.9284750337381916, | |
| "grad_norm": 0.9929868868660712, | |
| "learning_rate": 3.911674830381286e-05, | |
| "loss": 0.746, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.9338731443994602, | |
| "grad_norm": 0.8996290548917789, | |
| "learning_rate": 3.909441733017092e-05, | |
| "loss": 0.6638, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.9392712550607287, | |
| "grad_norm": 1.0383078650677402, | |
| "learning_rate": 3.907181411210895e-05, | |
| "loss": 0.8804, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.9446693657219973, | |
| "grad_norm": 0.8736475053968872, | |
| "learning_rate": 3.904893897189917e-05, | |
| "loss": 0.7383, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.9500674763832658, | |
| "grad_norm": 0.8879797003351837, | |
| "learning_rate": 3.9025792235690787e-05, | |
| "loss": 0.686, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.9554655870445344, | |
| "grad_norm": 0.8852717088467212, | |
| "learning_rate": 3.900237423350539e-05, | |
| "loss": 0.692, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.9608636977058029, | |
| "grad_norm": 0.856393902188942, | |
| "learning_rate": 3.89786852992322e-05, | |
| "loss": 0.6664, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.9662618083670715, | |
| "grad_norm": 0.9778728102030992, | |
| "learning_rate": 3.895472577062337e-05, | |
| "loss": 0.6959, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.97165991902834, | |
| "grad_norm": 0.9807365431125, | |
| "learning_rate": 3.89304959892891e-05, | |
| "loss": 0.6826, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.9770580296896086, | |
| "grad_norm": 0.9397890000073238, | |
| "learning_rate": 3.8905996300692806e-05, | |
| "loss": 0.8002, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.9824561403508771, | |
| "grad_norm": 0.9314290560742274, | |
| "learning_rate": 3.888122705414621e-05, | |
| "loss": 0.6628, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.9878542510121457, | |
| "grad_norm": 0.9348683094207633, | |
| "learning_rate": 3.885618860280433e-05, | |
| "loss": 0.7152, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.9932523616734144, | |
| "grad_norm": 0.9283592796406461, | |
| "learning_rate": 3.883088130366042e-05, | |
| "loss": 0.6817, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.9986504723346828, | |
| "grad_norm": 0.8957200626385793, | |
| "learning_rate": 3.880530551754095e-05, | |
| "loss": 0.6965, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.0040485829959513, | |
| "grad_norm": 2.135429932537726, | |
| "learning_rate": 3.877946160910042e-05, | |
| "loss": 0.9106, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.00944669365722, | |
| "grad_norm": 0.9324649166463199, | |
| "learning_rate": 3.8753349946816154e-05, | |
| "loss": 0.5466, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.0148448043184886, | |
| "grad_norm": 0.889079188745322, | |
| "learning_rate": 3.8726970902983046e-05, | |
| "loss": 0.4034, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.0202429149797572, | |
| "grad_norm": 0.9818393519416749, | |
| "learning_rate": 3.8700324853708304e-05, | |
| "loss": 0.3707, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.0256410256410255, | |
| "grad_norm": 0.9989694298887677, | |
| "learning_rate": 3.867341217890599e-05, | |
| "loss": 0.3552, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.0310391363022942, | |
| "grad_norm": 1.0964706430273843, | |
| "learning_rate": 3.864623326229172e-05, | |
| "loss": 0.3406, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.0364372469635628, | |
| "grad_norm": 1.041322435377106, | |
| "learning_rate": 3.861878849137708e-05, | |
| "loss": 0.3507, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.0418353576248314, | |
| "grad_norm": 1.0000512392448195, | |
| "learning_rate": 3.859107825746419e-05, | |
| "loss": 0.3974, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.0472334682861, | |
| "grad_norm": 0.9147739098501848, | |
| "learning_rate": 3.8563102955640076e-05, | |
| "loss": 0.5635, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.0526315789473684, | |
| "grad_norm": 0.8437219756968998, | |
| "learning_rate": 3.853486298477105e-05, | |
| "loss": 0.4385, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.058029689608637, | |
| "grad_norm": 0.8292889033394908, | |
| "learning_rate": 3.850635874749701e-05, | |
| "loss": 0.3399, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.0634278002699056, | |
| "grad_norm": 0.7709540617055263, | |
| "learning_rate": 3.8477590650225735e-05, | |
| "loss": 0.359, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.0688259109311742, | |
| "grad_norm": 0.834537390618175, | |
| "learning_rate": 3.8448559103127065e-05, | |
| "loss": 0.5317, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.0742240215924426, | |
| "grad_norm": 0.9152705040904082, | |
| "learning_rate": 3.841926452012704e-05, | |
| "loss": 0.3868, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.0796221322537112, | |
| "grad_norm": 0.9881357548735162, | |
| "learning_rate": 3.838970731890202e-05, | |
| "loss": 0.391, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.0850202429149798, | |
| "grad_norm": 0.9213113248556943, | |
| "learning_rate": 3.835988792087272e-05, | |
| "loss": 0.5105, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.0904183535762484, | |
| "grad_norm": 0.9932763071736556, | |
| "learning_rate": 3.832980675119823e-05, | |
| "loss": 0.44, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.0958164642375168, | |
| "grad_norm": 0.9979438465584454, | |
| "learning_rate": 3.8299464238769883e-05, | |
| "loss": 0.4094, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.1012145748987854, | |
| "grad_norm": 0.7593268974099039, | |
| "learning_rate": 3.826886081620523e-05, | |
| "loss": 0.3163, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.106612685560054, | |
| "grad_norm": 0.8549555976031787, | |
| "learning_rate": 3.82379969198418e-05, | |
| "loss": 0.5373, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.1120107962213226, | |
| "grad_norm": 0.8485934704490189, | |
| "learning_rate": 3.8206872989730926e-05, | |
| "loss": 0.3291, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.117408906882591, | |
| "grad_norm": 1.1808502925030266, | |
| "learning_rate": 3.817548946963142e-05, | |
| "loss": 0.3446, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.1228070175438596, | |
| "grad_norm": 0.8718322044501718, | |
| "learning_rate": 3.81438468070033e-05, | |
| "loss": 0.3496, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.1282051282051282, | |
| "grad_norm": 0.8269069896746979, | |
| "learning_rate": 3.811194545300139e-05, | |
| "loss": 0.3637, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.1336032388663968, | |
| "grad_norm": 0.8241831789016307, | |
| "learning_rate": 3.807978586246887e-05, | |
| "loss": 0.329, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.1390013495276654, | |
| "grad_norm": 0.8440307299715044, | |
| "learning_rate": 3.8047368493930814e-05, | |
| "loss": 0.3695, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.1443994601889338, | |
| "grad_norm": 0.9057523221019308, | |
| "learning_rate": 3.8014693809587635e-05, | |
| "loss": 0.4349, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.1497975708502024, | |
| "grad_norm": 0.7812716885128814, | |
| "learning_rate": 3.798176227530852e-05, | |
| "loss": 0.319, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.155195681511471, | |
| "grad_norm": 0.754153255972741, | |
| "learning_rate": 3.794857436062476e-05, | |
| "loss": 0.3145, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.1605937921727396, | |
| "grad_norm": 0.8752509690112213, | |
| "learning_rate": 3.791513053872306e-05, | |
| "loss": 0.3811, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.165991902834008, | |
| "grad_norm": 0.8620173420159217, | |
| "learning_rate": 3.788143128643881e-05, | |
| "loss": 0.3402, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.1713900134952766, | |
| "grad_norm": 0.8574889394982422, | |
| "learning_rate": 3.784747708424929e-05, | |
| "loss": 0.3523, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.1767881241565452, | |
| "grad_norm": 0.9171855250942973, | |
| "learning_rate": 3.781326841626677e-05, | |
| "loss": 0.3733, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.1821862348178138, | |
| "grad_norm": 0.8459848989881751, | |
| "learning_rate": 3.777880577023167e-05, | |
| "loss": 0.3391, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.1875843454790824, | |
| "grad_norm": 1.7633085482731143, | |
| "learning_rate": 3.7744089637505565e-05, | |
| "loss": 0.6142, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.1929824561403508, | |
| "grad_norm": 0.8882381136541697, | |
| "learning_rate": 3.7709120513064196e-05, | |
| "loss": 0.3793, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.1983805668016194, | |
| "grad_norm": 0.9069761964495866, | |
| "learning_rate": 3.7673898895490435e-05, | |
| "loss": 0.3425, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.203778677462888, | |
| "grad_norm": 0.9280379690711708, | |
| "learning_rate": 3.76384252869671e-05, | |
| "loss": 0.4051, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.2091767881241566, | |
| "grad_norm": 0.8271806961908379, | |
| "learning_rate": 3.760270019326989e-05, | |
| "loss": 0.3556, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.214574898785425, | |
| "grad_norm": 0.9136083147626765, | |
| "learning_rate": 3.7566724123760126e-05, | |
| "loss": 0.3273, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.2199730094466936, | |
| "grad_norm": 0.9337801648538931, | |
| "learning_rate": 3.753049759137745e-05, | |
| "loss": 0.3721, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.2253711201079622, | |
| "grad_norm": 0.8801415414022893, | |
| "learning_rate": 3.749402111263261e-05, | |
| "loss": 0.5007, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.2307692307692308, | |
| "grad_norm": 0.8738904307010843, | |
| "learning_rate": 3.745729520760001e-05, | |
| "loss": 0.543, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.2361673414304994, | |
| "grad_norm": 0.9215671196261831, | |
| "learning_rate": 3.7420320399910315e-05, | |
| "loss": 0.4912, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.2415654520917678, | |
| "grad_norm": 0.7882682925458486, | |
| "learning_rate": 3.738309721674302e-05, | |
| "loss": 0.3524, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.2469635627530364, | |
| "grad_norm": 0.9028612409679585, | |
| "learning_rate": 3.7345626188818906e-05, | |
| "loss": 0.4339, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.252361673414305, | |
| "grad_norm": 0.8352453883537844, | |
| "learning_rate": 3.730790785039245e-05, | |
| "loss": 0.3874, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.2577597840755734, | |
| "grad_norm": 0.8506796880369457, | |
| "learning_rate": 3.726994273924426e-05, | |
| "loss": 0.3473, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.263157894736842, | |
| "grad_norm": 0.794622563861647, | |
| "learning_rate": 3.7231731396673365e-05, | |
| "loss": 0.3548, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.2685560053981106, | |
| "grad_norm": 0.8169696872933733, | |
| "learning_rate": 3.7193274367489524e-05, | |
| "loss": 0.3366, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.2739541160593792, | |
| "grad_norm": 0.9414753748124672, | |
| "learning_rate": 3.7154572200005446e-05, | |
| "loss": 0.3352, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.2793522267206479, | |
| "grad_norm": 0.8524753438779967, | |
| "learning_rate": 3.711562544602895e-05, | |
| "loss": 0.3668, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.2847503373819165, | |
| "grad_norm": 0.8494604337265534, | |
| "learning_rate": 3.707643466085516e-05, | |
| "loss": 0.3519, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.2901484480431848, | |
| "grad_norm": 1.017755403367373, | |
| "learning_rate": 3.703700040325852e-05, | |
| "loss": 0.4038, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.2955465587044535, | |
| "grad_norm": 0.7951998743639334, | |
| "learning_rate": 3.699732323548485e-05, | |
| "loss": 0.3344, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.300944669365722, | |
| "grad_norm": 0.8451484427027826, | |
| "learning_rate": 3.695740372324337e-05, | |
| "loss": 0.3604, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.3063427800269904, | |
| "grad_norm": 0.9474589070464138, | |
| "learning_rate": 3.691724243569857e-05, | |
| "loss": 0.3783, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.311740890688259, | |
| "grad_norm": 0.9167363147150677, | |
| "learning_rate": 3.687683994546213e-05, | |
| "loss": 0.3336, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.3171390013495277, | |
| "grad_norm": 0.8499771648824146, | |
| "learning_rate": 3.683619682858476e-05, | |
| "loss": 0.3706, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.3225371120107963, | |
| "grad_norm": 0.8642561348231351, | |
| "learning_rate": 3.6795313664547965e-05, | |
| "loss": 0.3615, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.3279352226720649, | |
| "grad_norm": 0.7524096422021259, | |
| "learning_rate": 3.675419103625579e-05, | |
| "loss": 0.328, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 0.8817200727149742, | |
| "learning_rate": 3.6712829530026535e-05, | |
| "loss": 0.3983, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.3387314439946019, | |
| "grad_norm": 0.7586442964430375, | |
| "learning_rate": 3.6671229735584365e-05, | |
| "loss": 0.2935, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.3441295546558705, | |
| "grad_norm": 0.9626722935944538, | |
| "learning_rate": 3.662939224605091e-05, | |
| "loss": 0.567, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.349527665317139, | |
| "grad_norm": 0.7968343661737369, | |
| "learning_rate": 3.658731765793679e-05, | |
| "loss": 0.3566, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.3549257759784075, | |
| "grad_norm": 0.899257227621249, | |
| "learning_rate": 3.6545006571133174e-05, | |
| "loss": 0.4005, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.360323886639676, | |
| "grad_norm": 0.8787913231865052, | |
| "learning_rate": 3.650245958890314e-05, | |
| "loss": 0.4003, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.3657219973009447, | |
| "grad_norm": 0.9125900215239395, | |
| "learning_rate": 3.645967731787313e-05, | |
| "loss": 0.4386, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.3711201079622133, | |
| "grad_norm": 0.8331834311620439, | |
| "learning_rate": 3.64166603680243e-05, | |
| "loss": 0.3307, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.376518218623482, | |
| "grad_norm": 0.8425535041307437, | |
| "learning_rate": 3.63734093526838e-05, | |
| "loss": 0.3347, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.3819163292847503, | |
| "grad_norm": 0.8290080988756854, | |
| "learning_rate": 3.632992488851603e-05, | |
| "loss": 0.3312, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.3873144399460189, | |
| "grad_norm": 0.9378460105642437, | |
| "learning_rate": 3.6286207595513884e-05, | |
| "loss": 0.4643, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.3927125506072875, | |
| "grad_norm": 0.8267549903762161, | |
| "learning_rate": 3.624225809698984e-05, | |
| "loss": 0.3539, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.3981106612685559, | |
| "grad_norm": 0.8768268582940559, | |
| "learning_rate": 3.619807701956717e-05, | |
| "loss": 0.6197, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.4035087719298245, | |
| "grad_norm": 0.8906787557494711, | |
| "learning_rate": 3.61536649931709e-05, | |
| "loss": 0.3794, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.408906882591093, | |
| "grad_norm": 0.8376396972815752, | |
| "learning_rate": 3.610902265101892e-05, | |
| "loss": 0.3374, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.4143049932523617, | |
| "grad_norm": 0.9702245743999838, | |
| "learning_rate": 3.60641506296129e-05, | |
| "loss": 0.3682, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.4197031039136303, | |
| "grad_norm": 0.8599060721053028, | |
| "learning_rate": 3.601904956872923e-05, | |
| "loss": 0.4175, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.425101214574899, | |
| "grad_norm": 0.8379932139189499, | |
| "learning_rate": 3.5973720111409904e-05, | |
| "loss": 0.3271, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.4304993252361673, | |
| "grad_norm": 0.8606949214353987, | |
| "learning_rate": 3.5928162903953347e-05, | |
| "loss": 0.3449, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.435897435897436, | |
| "grad_norm": 0.8688526476474914, | |
| "learning_rate": 3.5882378595905195e-05, | |
| "loss": 0.4602, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.4412955465587045, | |
| "grad_norm": 0.7995357785557587, | |
| "learning_rate": 3.583636784004904e-05, | |
| "loss": 0.5264, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.446693657219973, | |
| "grad_norm": 0.8388725595998526, | |
| "learning_rate": 3.5790131292397135e-05, | |
| "loss": 0.3738, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.4520917678812415, | |
| "grad_norm": 0.845571143136271, | |
| "learning_rate": 3.5743669612181004e-05, | |
| "loss": 0.3907, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.45748987854251, | |
| "grad_norm": 0.8086293381970037, | |
| "learning_rate": 3.569698346184209e-05, | |
| "loss": 0.5639, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.4628879892037787, | |
| "grad_norm": 0.9383168628054654, | |
| "learning_rate": 3.5650073507022286e-05, | |
| "loss": 0.3871, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.4682860998650473, | |
| "grad_norm": 0.8681442872940774, | |
| "learning_rate": 3.560294041655442e-05, | |
| "loss": 0.397, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.4736842105263157, | |
| "grad_norm": 0.8268737914017257, | |
| "learning_rate": 3.555558486245277e-05, | |
| "loss": 0.3486, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.4790823211875843, | |
| "grad_norm": 0.8680158503834453, | |
| "learning_rate": 3.550800751990343e-05, | |
| "loss": 0.3518, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.484480431848853, | |
| "grad_norm": 0.7711969794181421, | |
| "learning_rate": 3.546020906725474e-05, | |
| "loss": 0.3297, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.4898785425101215, | |
| "grad_norm": 0.7746266967179702, | |
| "learning_rate": 3.5412190186007573e-05, | |
| "loss": 0.3296, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.49527665317139, | |
| "grad_norm": 0.866167286472648, | |
| "learning_rate": 3.5363951560805615e-05, | |
| "loss": 0.4019, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.5006747638326585, | |
| "grad_norm": 0.8349494942186633, | |
| "learning_rate": 3.5315493879425634e-05, | |
| "loss": 0.4084, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.5060728744939271, | |
| "grad_norm": 0.7622596623778285, | |
| "learning_rate": 3.526681783276765e-05, | |
| "loss": 0.4291, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.5114709851551957, | |
| "grad_norm": 0.8720420315217544, | |
| "learning_rate": 3.52179241148451e-05, | |
| "loss": 0.37, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.5168690958164643, | |
| "grad_norm": 0.920897251888379, | |
| "learning_rate": 3.5168813422774923e-05, | |
| "loss": 0.3914, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.522267206477733, | |
| "grad_norm": 0.8228842239801039, | |
| "learning_rate": 3.511948645676764e-05, | |
| "loss": 0.3532, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.5276653171390013, | |
| "grad_norm": 0.7888080706333876, | |
| "learning_rate": 3.506994392011737e-05, | |
| "loss": 0.5181, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.53306342780027, | |
| "grad_norm": 0.8705748415419389, | |
| "learning_rate": 3.5020186519191775e-05, | |
| "loss": 0.3917, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.5384615384615383, | |
| "grad_norm": 0.8946816906145529, | |
| "learning_rate": 3.497021496342203e-05, | |
| "loss": 0.4199, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.543859649122807, | |
| "grad_norm": 0.7859844266157671, | |
| "learning_rate": 3.492002996529267e-05, | |
| "loss": 0.3159, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.5492577597840755, | |
| "grad_norm": 0.8271181630355136, | |
| "learning_rate": 3.486963224033146e-05, | |
| "loss": 0.5559, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.5546558704453441, | |
| "grad_norm": 0.8337970276185057, | |
| "learning_rate": 3.4819022507099184e-05, | |
| "loss": 0.3672, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.5600539811066128, | |
| "grad_norm": 0.9863741535233492, | |
| "learning_rate": 3.4768201487179395e-05, | |
| "loss": 0.3333, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.5654520917678814, | |
| "grad_norm": 1.0399201093786894, | |
| "learning_rate": 3.471716990516812e-05, | |
| "loss": 0.4489, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.5708502024291497, | |
| "grad_norm": 0.8592335687596742, | |
| "learning_rate": 3.466592848866356e-05, | |
| "loss": 0.3636, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.5762483130904184, | |
| "grad_norm": 0.9016848617465185, | |
| "learning_rate": 3.4614477968255663e-05, | |
| "loss": 0.4327, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.581646423751687, | |
| "grad_norm": 0.8819320121086326, | |
| "learning_rate": 3.456281907751577e-05, | |
| "loss": 0.351, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.5870445344129553, | |
| "grad_norm": 0.8025348430113183, | |
| "learning_rate": 3.4510952552986114e-05, | |
| "loss": 0.3266, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.592442645074224, | |
| "grad_norm": 0.8800970701171718, | |
| "learning_rate": 3.445887913416932e-05, | |
| "loss": 0.3621, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.5978407557354926, | |
| "grad_norm": 0.8808067572947517, | |
| "learning_rate": 3.440659956351788e-05, | |
| "loss": 0.3376, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.6032388663967612, | |
| "grad_norm": 1.1199823914819425, | |
| "learning_rate": 3.435411458642357e-05, | |
| "loss": 0.3798, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.6086369770580298, | |
| "grad_norm": 1.0162058885262826, | |
| "learning_rate": 3.430142495120678e-05, | |
| "loss": 0.3566, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.6140350877192984, | |
| "grad_norm": 0.8782900680409256, | |
| "learning_rate": 3.4248531409105896e-05, | |
| "loss": 0.3372, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.6194331983805668, | |
| "grad_norm": 0.8000888023404262, | |
| "learning_rate": 3.419543471426657e-05, | |
| "loss": 0.3644, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.6248313090418354, | |
| "grad_norm": 0.843844017800044, | |
| "learning_rate": 3.4142135623730954e-05, | |
| "loss": 0.385, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.6302294197031038, | |
| "grad_norm": 0.8497338959321703, | |
| "learning_rate": 3.408863489742692e-05, | |
| "loss": 0.3938, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.6356275303643724, | |
| "grad_norm": 0.774530442372388, | |
| "learning_rate": 3.403493329815724e-05, | |
| "loss": 0.3193, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.641025641025641, | |
| "grad_norm": 0.8713896929383775, | |
| "learning_rate": 3.3981031591588665e-05, | |
| "loss": 0.352, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.6464237516869096, | |
| "grad_norm": 0.8388021660084379, | |
| "learning_rate": 3.392693054624106e-05, | |
| "loss": 0.348, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.6518218623481782, | |
| "grad_norm": 0.8436318600036422, | |
| "learning_rate": 3.387263093347641e-05, | |
| "loss": 0.367, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.6572199730094468, | |
| "grad_norm": 0.8392694994613278, | |
| "learning_rate": 3.381813352748783e-05, | |
| "loss": 0.3621, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.6626180836707154, | |
| "grad_norm": 0.8188401843130375, | |
| "learning_rate": 3.376343910528855e-05, | |
| "loss": 0.3918, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.6680161943319838, | |
| "grad_norm": 0.8043575662262005, | |
| "learning_rate": 3.37085484467008e-05, | |
| "loss": 0.3546, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.6734143049932524, | |
| "grad_norm": 0.8941674035447104, | |
| "learning_rate": 3.365346233434472e-05, | |
| "loss": 0.3941, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.6788124156545208, | |
| "grad_norm": 0.8580597773997315, | |
| "learning_rate": 3.359818155362719e-05, | |
| "loss": 0.4437, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.6842105263157894, | |
| "grad_norm": 0.9187515726131583, | |
| "learning_rate": 3.354270689273064e-05, | |
| "loss": 0.3914, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.689608636977058, | |
| "grad_norm": 0.8126321379065723, | |
| "learning_rate": 3.348703914260178e-05, | |
| "loss": 0.3402, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.6950067476383266, | |
| "grad_norm": 0.9023277271934033, | |
| "learning_rate": 3.3431179096940375e-05, | |
| "loss": 0.3702, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.7004048582995952, | |
| "grad_norm": 0.9134916790344669, | |
| "learning_rate": 3.3375127552187885e-05, | |
| "loss": 0.399, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.7058029689608638, | |
| "grad_norm": 0.7631514166634741, | |
| "learning_rate": 3.331888530751613e-05, | |
| "loss": 0.3151, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.7112010796221324, | |
| "grad_norm": 0.9238661037906718, | |
| "learning_rate": 3.326245316481591e-05, | |
| "loss": 0.4818, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.7165991902834008, | |
| "grad_norm": 0.7902959093954609, | |
| "learning_rate": 3.320583192868552e-05, | |
| "loss": 0.3446, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.7219973009446694, | |
| "grad_norm": 0.8651279342148734, | |
| "learning_rate": 3.3149022406419335e-05, | |
| "loss": 0.3842, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.7273954116059378, | |
| "grad_norm": 0.872911869425239, | |
| "learning_rate": 3.309202540799628e-05, | |
| "loss": 0.4333, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.7327935222672064, | |
| "grad_norm": 0.819055468707437, | |
| "learning_rate": 3.303484174606824e-05, | |
| "loss": 0.3727, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.738191632928475, | |
| "grad_norm": 0.7643068590729667, | |
| "learning_rate": 3.297747223594858e-05, | |
| "loss": 0.3481, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.7435897435897436, | |
| "grad_norm": 0.827195670933345, | |
| "learning_rate": 3.291991769560038e-05, | |
| "loss": 0.355, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.7489878542510122, | |
| "grad_norm": 0.883234143840346, | |
| "learning_rate": 3.286217894562489e-05, | |
| "loss": 0.4227, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.7543859649122808, | |
| "grad_norm": 0.8280893038921171, | |
| "learning_rate": 3.280425680924976e-05, | |
| "loss": 0.3337, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.7597840755735492, | |
| "grad_norm": 0.8012629303209649, | |
| "learning_rate": 3.274615211231735e-05, | |
| "loss": 0.3394, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.7651821862348178, | |
| "grad_norm": 0.8237991067683915, | |
| "learning_rate": 3.268786568327291e-05, | |
| "loss": 0.3799, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.7705802968960864, | |
| "grad_norm": 0.9040042326048502, | |
| "learning_rate": 3.262939835315281e-05, | |
| "loss": 0.4133, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.7759784075573548, | |
| "grad_norm": 0.8013478185913319, | |
| "learning_rate": 3.2570750955572643e-05, | |
| "loss": 0.3828, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.7813765182186234, | |
| "grad_norm": 0.7411195677522415, | |
| "learning_rate": 3.25119243267154e-05, | |
| "loss": 0.3108, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.786774628879892, | |
| "grad_norm": 0.8217449129947968, | |
| "learning_rate": 3.2452919305319505e-05, | |
| "loss": 0.3499, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.7921727395411606, | |
| "grad_norm": 0.8595509733708185, | |
| "learning_rate": 3.239373673266686e-05, | |
| "loss": 0.3666, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.7975708502024292, | |
| "grad_norm": 0.8868926190097939, | |
| "learning_rate": 3.2334377452570866e-05, | |
| "loss": 0.4031, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.8029689608636978, | |
| "grad_norm": 0.7623837903935355, | |
| "learning_rate": 3.227484231136437e-05, | |
| "loss": 0.3092, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.8083670715249662, | |
| "grad_norm": 0.8231066068295543, | |
| "learning_rate": 3.2215132157887636e-05, | |
| "loss": 0.3553, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.8137651821862348, | |
| "grad_norm": 0.8069543975515238, | |
| "learning_rate": 3.2155247843476204e-05, | |
| "loss": 0.353, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.8191632928475032, | |
| "grad_norm": 0.7982465506547287, | |
| "learning_rate": 3.209519022194875e-05, | |
| "loss": 0.3698, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.8245614035087718, | |
| "grad_norm": 0.8918760369983304, | |
| "learning_rate": 3.203496014959497e-05, | |
| "loss": 0.4392, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.8299595141700404, | |
| "grad_norm": 0.9160858315953113, | |
| "learning_rate": 3.197455848516328e-05, | |
| "loss": 0.3949, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.835357624831309, | |
| "grad_norm": 0.9222161272336913, | |
| "learning_rate": 3.191398608984867e-05, | |
| "loss": 0.3974, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.8407557354925776, | |
| "grad_norm": 0.7564011344843046, | |
| "learning_rate": 3.185324382728034e-05, | |
| "loss": 0.3199, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.8461538461538463, | |
| "grad_norm": 0.8272808795137335, | |
| "learning_rate": 3.179233256350944e-05, | |
| "loss": 0.3794, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.8515519568151149, | |
| "grad_norm": 0.8965111151969724, | |
| "learning_rate": 3.173125316699671e-05, | |
| "loss": 0.5687, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.8569500674763832, | |
| "grad_norm": 0.8269731890692298, | |
| "learning_rate": 3.1670006508600076e-05, | |
| "loss": 0.3222, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.8623481781376519, | |
| "grad_norm": 0.9313561418996097, | |
| "learning_rate": 3.160859346156227e-05, | |
| "loss": 0.3745, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.8677462887989202, | |
| "grad_norm": 0.8236123893552746, | |
| "learning_rate": 3.1547014901498344e-05, | |
| "loss": 0.3848, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.8731443994601888, | |
| "grad_norm": 0.8881682095506358, | |
| "learning_rate": 3.148527170638322e-05, | |
| "loss": 0.437, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.8785425101214575, | |
| "grad_norm": 0.816673941404442, | |
| "learning_rate": 3.1423364756539135e-05, | |
| "loss": 0.3823, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.883940620782726, | |
| "grad_norm": 0.8353539656538781, | |
| "learning_rate": 3.136129493462312e-05, | |
| "loss": 0.3817, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.8893387314439947, | |
| "grad_norm": 0.8623642136451524, | |
| "learning_rate": 3.12990631256144e-05, | |
| "loss": 0.3773, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.8947368421052633, | |
| "grad_norm": 0.8785883532356465, | |
| "learning_rate": 3.1236670216801786e-05, | |
| "loss": 0.3782, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.9001349527665317, | |
| "grad_norm": 0.9116852292901764, | |
| "learning_rate": 3.117411709777101e-05, | |
| "loss": 0.4707, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.9055330634278003, | |
| "grad_norm": 0.8258672453724147, | |
| "learning_rate": 3.111140466039205e-05, | |
| "loss": 0.3295, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.9109311740890689, | |
| "grad_norm": 0.8047035314518469, | |
| "learning_rate": 3.104853379880641e-05, | |
| "loss": 0.5049, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.9163292847503373, | |
| "grad_norm": 0.8224201466858146, | |
| "learning_rate": 3.0985505409414395e-05, | |
| "loss": 0.3459, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.9217273954116059, | |
| "grad_norm": 0.8194601649707134, | |
| "learning_rate": 3.09223203908623e-05, | |
| "loss": 0.5184, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.9271255060728745, | |
| "grad_norm": 1.9213742164403345, | |
| "learning_rate": 3.085897964402958e-05, | |
| "loss": 0.4168, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.932523616734143, | |
| "grad_norm": 0.7976219658784348, | |
| "learning_rate": 3.0795484072016066e-05, | |
| "loss": 0.3383, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.9379217273954117, | |
| "grad_norm": 0.8829544955168173, | |
| "learning_rate": 3.073183458012906e-05, | |
| "loss": 0.4246, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.9433198380566803, | |
| "grad_norm": 0.7999199853428338, | |
| "learning_rate": 3.0668032075870394e-05, | |
| "loss": 0.3316, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.9487179487179487, | |
| "grad_norm": 0.7754126352865167, | |
| "learning_rate": 3.060407746892354e-05, | |
| "loss": 0.3532, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.9541160593792173, | |
| "grad_norm": 0.8116757582096946, | |
| "learning_rate": 3.053997167114062e-05, | |
| "loss": 0.3592, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.9595141700404857, | |
| "grad_norm": 0.780747731868162, | |
| "learning_rate": 3.0475715596529402e-05, | |
| "loss": 0.3831, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.9649122807017543, | |
| "grad_norm": 0.7784545333341477, | |
| "learning_rate": 3.041131016124026e-05, | |
| "loss": 0.3469, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.9703103913630229, | |
| "grad_norm": 0.780104268381351, | |
| "learning_rate": 3.0346756283553138e-05, | |
| "loss": 0.3173, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.9757085020242915, | |
| "grad_norm": 0.868243693709775, | |
| "learning_rate": 3.0282054883864434e-05, | |
| "loss": 0.4914, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.98110661268556, | |
| "grad_norm": 0.8092251365777564, | |
| "learning_rate": 3.0217206884673898e-05, | |
| "loss": 0.4763, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.9865047233468287, | |
| "grad_norm": 0.9194927920738091, | |
| "learning_rate": 3.015221321057145e-05, | |
| "loss": 0.4446, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.9919028340080973, | |
| "grad_norm": 0.8487891788474984, | |
| "learning_rate": 3.0087074788224016e-05, | |
| "loss": 0.5507, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.9973009446693657, | |
| "grad_norm": 0.8108830919109473, | |
| "learning_rate": 3.0021792546362332e-05, | |
| "loss": 0.3367, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.002699055330634, | |
| "grad_norm": 1.8729539106932147, | |
| "learning_rate": 2.9956367415767657e-05, | |
| "loss": 0.4987, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 2.0080971659919027, | |
| "grad_norm": 0.7462995058419852, | |
| "learning_rate": 2.9890800329258554e-05, | |
| "loss": 0.1457, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 2.0134952766531713, | |
| "grad_norm": 0.6392949061952808, | |
| "learning_rate": 2.982509222167755e-05, | |
| "loss": 0.1257, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 2.01889338731444, | |
| "grad_norm": 0.7022587732356804, | |
| "learning_rate": 2.975924402987783e-05, | |
| "loss": 0.1274, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 2.0242914979757085, | |
| "grad_norm": 0.77290317342361, | |
| "learning_rate": 2.969325669270987e-05, | |
| "loss": 0.1244, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.029689608636977, | |
| "grad_norm": 0.7674422966027143, | |
| "learning_rate": 2.9627131151008046e-05, | |
| "loss": 0.1215, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 2.0350877192982457, | |
| "grad_norm": 0.819257096752639, | |
| "learning_rate": 2.9560868347577235e-05, | |
| "loss": 0.1414, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 2.0404858299595143, | |
| "grad_norm": 0.7484653317083918, | |
| "learning_rate": 2.9494469227179375e-05, | |
| "loss": 0.2481, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 2.045883940620783, | |
| "grad_norm": 0.6741305314820067, | |
| "learning_rate": 2.9427934736519962e-05, | |
| "loss": 0.1336, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 2.051282051282051, | |
| "grad_norm": 0.6164943712448056, | |
| "learning_rate": 2.9361265824234575e-05, | |
| "loss": 0.1222, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.0566801619433197, | |
| "grad_norm": 0.6852123731952208, | |
| "learning_rate": 2.9294463440875375e-05, | |
| "loss": 0.1449, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 2.0620782726045883, | |
| "grad_norm": 0.6244781256553339, | |
| "learning_rate": 2.9227528538897503e-05, | |
| "loss": 0.1723, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 2.067476383265857, | |
| "grad_norm": 0.7978024679197341, | |
| "learning_rate": 2.916046207264554e-05, | |
| "loss": 0.1326, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 2.0728744939271255, | |
| "grad_norm": 0.6147819567342936, | |
| "learning_rate": 2.9093264998339875e-05, | |
| "loss": 0.1294, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 2.078272604588394, | |
| "grad_norm": 0.6787811592156624, | |
| "learning_rate": 2.9025938274063077e-05, | |
| "loss": 0.1497, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.0836707152496627, | |
| "grad_norm": 0.7317408188133651, | |
| "learning_rate": 2.8958482859746242e-05, | |
| "loss": 0.1887, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 2.0890688259109313, | |
| "grad_norm": 0.6723746847129357, | |
| "learning_rate": 2.889089971715532e-05, | |
| "loss": 0.139, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 2.0944669365722, | |
| "grad_norm": 0.6818331924468926, | |
| "learning_rate": 2.8823189809877365e-05, | |
| "loss": 0.1342, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 2.099865047233468, | |
| "grad_norm": 0.5980820718094894, | |
| "learning_rate": 2.8755354103306808e-05, | |
| "loss": 0.2167, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 2.1052631578947367, | |
| "grad_norm": 0.6648551106554124, | |
| "learning_rate": 2.868739356463174e-05, | |
| "loss": 0.1224, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.1106612685560053, | |
| "grad_norm": 0.7486161553352669, | |
| "learning_rate": 2.8619309162820048e-05, | |
| "loss": 0.1267, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 2.116059379217274, | |
| "grad_norm": 0.6867674315095388, | |
| "learning_rate": 2.8551101868605644e-05, | |
| "loss": 0.2035, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 2.1214574898785425, | |
| "grad_norm": 0.6878121126213758, | |
| "learning_rate": 2.8482772654474624e-05, | |
| "loss": 0.2658, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 2.126855600539811, | |
| "grad_norm": 0.6678383543442121, | |
| "learning_rate": 2.841432249465138e-05, | |
| "loss": 0.1294, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 2.1322537112010798, | |
| "grad_norm": 0.6372763697558534, | |
| "learning_rate": 2.8345752365084744e-05, | |
| "loss": 0.131, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.1376518218623484, | |
| "grad_norm": 0.6334966824967888, | |
| "learning_rate": 2.8277063243434036e-05, | |
| "loss": 0.1268, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 2.1430499325236165, | |
| "grad_norm": 0.6404580338280741, | |
| "learning_rate": 2.820825610905514e-05, | |
| "loss": 0.1153, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 2.148448043184885, | |
| "grad_norm": 0.6466950437091068, | |
| "learning_rate": 2.8139331942986552e-05, | |
| "loss": 0.1215, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 2.1538461538461537, | |
| "grad_norm": 0.6714487489560431, | |
| "learning_rate": 2.8070291727935385e-05, | |
| "loss": 0.1218, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 2.1592442645074224, | |
| "grad_norm": 0.6006007696911629, | |
| "learning_rate": 2.8001136448263345e-05, | |
| "loss": 0.1183, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.164642375168691, | |
| "grad_norm": 0.6386900747836864, | |
| "learning_rate": 2.7931867089972703e-05, | |
| "loss": 0.3012, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 2.1700404858299596, | |
| "grad_norm": 0.6241422911928288, | |
| "learning_rate": 2.7862484640692265e-05, | |
| "loss": 0.1267, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 2.175438596491228, | |
| "grad_norm": 0.698965066265789, | |
| "learning_rate": 2.7792990089663244e-05, | |
| "loss": 0.2367, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 2.180836707152497, | |
| "grad_norm": 0.6577790993693683, | |
| "learning_rate": 2.772338442772517e-05, | |
| "loss": 0.1247, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 2.1862348178137654, | |
| "grad_norm": 0.7057930356119323, | |
| "learning_rate": 2.7653668647301797e-05, | |
| "loss": 0.1416, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.1916329284750335, | |
| "grad_norm": 0.6283432520769865, | |
| "learning_rate": 2.758384374238691e-05, | |
| "loss": 0.1171, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 2.197031039136302, | |
| "grad_norm": 0.6758905973282627, | |
| "learning_rate": 2.751391070853017e-05, | |
| "loss": 0.1277, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 2.2024291497975708, | |
| "grad_norm": 0.6936284196657481, | |
| "learning_rate": 2.744387054282293e-05, | |
| "loss": 0.1423, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 2.2078272604588394, | |
| "grad_norm": 0.6341950745676999, | |
| "learning_rate": 2.737372424388398e-05, | |
| "loss": 0.2242, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 2.213225371120108, | |
| "grad_norm": 0.6693004983980072, | |
| "learning_rate": 2.7303472811845373e-05, | |
| "loss": 0.1232, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.2186234817813766, | |
| "grad_norm": 0.6629406429715882, | |
| "learning_rate": 2.72331172483381e-05, | |
| "loss": 0.1658, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 2.224021592442645, | |
| "grad_norm": 0.6127381303453924, | |
| "learning_rate": 2.7162658556477856e-05, | |
| "loss": 0.1371, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 2.229419703103914, | |
| "grad_norm": 0.679698173745626, | |
| "learning_rate": 2.7092097740850712e-05, | |
| "loss": 0.1316, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 2.234817813765182, | |
| "grad_norm": 0.5950073990369081, | |
| "learning_rate": 2.7021435807498803e-05, | |
| "loss": 0.1207, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 2.2402159244264506, | |
| "grad_norm": 0.6677093728594548, | |
| "learning_rate": 2.6950673763905976e-05, | |
| "loss": 0.1327, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.245614035087719, | |
| "grad_norm": 0.9128432310383863, | |
| "learning_rate": 2.6879812618983427e-05, | |
| "loss": 0.1654, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 2.251012145748988, | |
| "grad_norm": 0.6476391269524641, | |
| "learning_rate": 2.680885338305532e-05, | |
| "loss": 0.1275, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 2.2564102564102564, | |
| "grad_norm": 0.5686092680345144, | |
| "learning_rate": 2.6737797067844403e-05, | |
| "loss": 0.1103, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 2.261808367071525, | |
| "grad_norm": 0.7170335546600183, | |
| "learning_rate": 2.6666644686457527e-05, | |
| "loss": 0.2701, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 2.2672064777327936, | |
| "grad_norm": 0.6208498042067502, | |
| "learning_rate": 2.6595397253371263e-05, | |
| "loss": 0.1189, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.272604588394062, | |
| "grad_norm": 0.6398812216990059, | |
| "learning_rate": 2.652405578441739e-05, | |
| "loss": 0.1232, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 2.278002699055331, | |
| "grad_norm": 0.7138464146301546, | |
| "learning_rate": 2.6452621296768444e-05, | |
| "loss": 0.1549, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 2.283400809716599, | |
| "grad_norm": 0.6592603480537171, | |
| "learning_rate": 2.63810948089232e-05, | |
| "loss": 0.1369, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 2.2887989203778676, | |
| "grad_norm": 0.6546216583647926, | |
| "learning_rate": 2.630947734069216e-05, | |
| "loss": 0.1276, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 2.294197031039136, | |
| "grad_norm": 0.5970453793673439, | |
| "learning_rate": 2.623776991318298e-05, | |
| "loss": 0.1096, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.299595141700405, | |
| "grad_norm": 0.6022630941533801, | |
| "learning_rate": 2.6165973548785985e-05, | |
| "loss": 0.1358, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 2.3049932523616734, | |
| "grad_norm": 0.5754057954915315, | |
| "learning_rate": 2.6094089271159497e-05, | |
| "loss": 0.1076, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 2.310391363022942, | |
| "grad_norm": 0.6455725563135731, | |
| "learning_rate": 2.6022118105215314e-05, | |
| "loss": 0.1219, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 2.3157894736842106, | |
| "grad_norm": 0.6533947853819206, | |
| "learning_rate": 2.595006107710406e-05, | |
| "loss": 0.1349, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 2.3211875843454792, | |
| "grad_norm": 0.5646029755765858, | |
| "learning_rate": 2.5877919214200575e-05, | |
| "loss": 0.1105, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.326585695006748, | |
| "grad_norm": 0.6305035499092596, | |
| "learning_rate": 2.580569354508925e-05, | |
| "loss": 0.1233, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 2.331983805668016, | |
| "grad_norm": 0.7303006034253096, | |
| "learning_rate": 2.5733385099549365e-05, | |
| "loss": 0.1488, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 2.3373819163292846, | |
| "grad_norm": 0.6328447736616827, | |
| "learning_rate": 2.566099490854041e-05, | |
| "loss": 0.1229, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 2.342780026990553, | |
| "grad_norm": 0.8566015327759697, | |
| "learning_rate": 2.5588524004187395e-05, | |
| "loss": 0.1509, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 2.348178137651822, | |
| "grad_norm": 0.6624968711715733, | |
| "learning_rate": 2.5515973419766117e-05, | |
| "loss": 0.3453, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.3535762483130904, | |
| "grad_norm": 0.6945361115581712, | |
| "learning_rate": 2.5443344189688442e-05, | |
| "loss": 0.2186, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 2.358974358974359, | |
| "grad_norm": 0.6241503828356365, | |
| "learning_rate": 2.5370637349487537e-05, | |
| "loss": 0.1316, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 2.3643724696356276, | |
| "grad_norm": 0.6349270911136394, | |
| "learning_rate": 2.5297853935803134e-05, | |
| "loss": 0.1438, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 2.3697705802968962, | |
| "grad_norm": 0.6264630421591376, | |
| "learning_rate": 2.522499498636673e-05, | |
| "loss": 0.1287, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 2.375168690958165, | |
| "grad_norm": 0.6494294150925718, | |
| "learning_rate": 2.5152061539986786e-05, | |
| "loss": 0.1237, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.380566801619433, | |
| "grad_norm": 0.6590711569125003, | |
| "learning_rate": 2.5079054636533943e-05, | |
| "loss": 0.1477, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 2.3859649122807016, | |
| "grad_norm": 0.6017175013652537, | |
| "learning_rate": 2.5005975316926155e-05, | |
| "loss": 0.1102, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 2.3913630229419702, | |
| "grad_norm": 0.6640068999885245, | |
| "learning_rate": 2.4932824623113904e-05, | |
| "loss": 0.169, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 2.396761133603239, | |
| "grad_norm": 0.5766339469065881, | |
| "learning_rate": 2.485960359806528e-05, | |
| "loss": 0.2012, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 2.4021592442645074, | |
| "grad_norm": 0.7291306101333911, | |
| "learning_rate": 2.4786313285751158e-05, | |
| "loss": 0.1579, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.407557354925776, | |
| "grad_norm": 0.6380383823229943, | |
| "learning_rate": 2.4712954731130295e-05, | |
| "loss": 0.1661, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 2.4129554655870447, | |
| "grad_norm": 0.6659006653830499, | |
| "learning_rate": 2.4639528980134424e-05, | |
| "loss": 0.128, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 2.4183535762483133, | |
| "grad_norm": 0.6099007883870388, | |
| "learning_rate": 2.4566037079653372e-05, | |
| "loss": 0.1083, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 2.423751686909582, | |
| "grad_norm": 0.613692855683778, | |
| "learning_rate": 2.4492480077520084e-05, | |
| "loss": 0.1223, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 2.42914979757085, | |
| "grad_norm": 0.6715299948015367, | |
| "learning_rate": 2.441885902249573e-05, | |
| "loss": 0.1248, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.4345479082321186, | |
| "grad_norm": 0.610328390420646, | |
| "learning_rate": 2.434517496425474e-05, | |
| "loss": 0.1181, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 2.4399460188933872, | |
| "grad_norm": 0.6479329176924008, | |
| "learning_rate": 2.4271428953369805e-05, | |
| "loss": 0.3185, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 2.445344129554656, | |
| "grad_norm": 0.700649740705671, | |
| "learning_rate": 2.419762204129695e-05, | |
| "loss": 0.138, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 2.4507422402159245, | |
| "grad_norm": 0.5760283063402104, | |
| "learning_rate": 2.412375528036051e-05, | |
| "loss": 0.1096, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 2.456140350877193, | |
| "grad_norm": 0.6796450613672332, | |
| "learning_rate": 2.4049829723738127e-05, | |
| "loss": 0.1323, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.4615384615384617, | |
| "grad_norm": 0.5519373684819371, | |
| "learning_rate": 2.3975846425445745e-05, | |
| "loss": 0.104, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.4669365721997303, | |
| "grad_norm": 0.5941230543506764, | |
| "learning_rate": 2.390180644032257e-05, | |
| "loss": 0.1159, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 2.472334682860999, | |
| "grad_norm": 0.6005019267580625, | |
| "learning_rate": 2.382771082401605e-05, | |
| "loss": 0.1127, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.477732793522267, | |
| "grad_norm": 0.6231210450690562, | |
| "learning_rate": 2.3753560632966803e-05, | |
| "loss": 0.2765, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 2.4831309041835357, | |
| "grad_norm": 0.6761993902876848, | |
| "learning_rate": 2.367935692439357e-05, | |
| "loss": 0.1311, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.4885290148448043, | |
| "grad_norm": 0.6296854883290703, | |
| "learning_rate": 2.360510075627812e-05, | |
| "loss": 0.1246, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 2.493927125506073, | |
| "grad_norm": 0.631923430747307, | |
| "learning_rate": 2.3530793187350196e-05, | |
| "loss": 0.1194, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 2.4993252361673415, | |
| "grad_norm": 0.5962340880765427, | |
| "learning_rate": 2.3456435277072392e-05, | |
| "loss": 0.1114, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 2.50472334682861, | |
| "grad_norm": 0.611293805000405, | |
| "learning_rate": 2.3382028085625074e-05, | |
| "loss": 0.1192, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 2.5101214574898787, | |
| "grad_norm": 0.6017071362047015, | |
| "learning_rate": 2.3307572673891227e-05, | |
| "loss": 0.1186, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.515519568151147, | |
| "grad_norm": 0.6608480467314211, | |
| "learning_rate": 2.3233070103441372e-05, | |
| "loss": 0.1269, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 2.520917678812416, | |
| "grad_norm": 0.6286315055894832, | |
| "learning_rate": 2.3158521436518395e-05, | |
| "loss": 0.1439, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 2.526315789473684, | |
| "grad_norm": 0.5941211446808222, | |
| "learning_rate": 2.3083927736022423e-05, | |
| "loss": 0.1163, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 2.5317139001349527, | |
| "grad_norm": 0.6014018923806088, | |
| "learning_rate": 2.3009290065495663e-05, | |
| "loss": 0.1165, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 2.5371120107962213, | |
| "grad_norm": 0.6364853987180407, | |
| "learning_rate": 2.2934609489107236e-05, | |
| "loss": 0.1201, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.54251012145749, | |
| "grad_norm": 0.6044054475261765, | |
| "learning_rate": 2.285988707163801e-05, | |
| "loss": 0.1091, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 2.5479082321187585, | |
| "grad_norm": 0.6087359767421359, | |
| "learning_rate": 2.2785123878465414e-05, | |
| "loss": 0.1148, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 2.553306342780027, | |
| "grad_norm": 0.69235536929551, | |
| "learning_rate": 2.2710320975548238e-05, | |
| "loss": 0.1299, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 2.5587044534412957, | |
| "grad_norm": 0.6088709234303951, | |
| "learning_rate": 2.2635479429411463e-05, | |
| "loss": 0.1171, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 2.564102564102564, | |
| "grad_norm": 0.6626427945597948, | |
| "learning_rate": 2.256060030713102e-05, | |
| "loss": 0.1271, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.569500674763833, | |
| "grad_norm": 0.6307597674316254, | |
| "learning_rate": 2.248568467631862e-05, | |
| "loss": 0.2658, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 2.574898785425101, | |
| "grad_norm": 0.6601911176181631, | |
| "learning_rate": 2.2410733605106462e-05, | |
| "loss": 0.1292, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 2.5802968960863697, | |
| "grad_norm": 0.7416111139812673, | |
| "learning_rate": 2.2335748162132093e-05, | |
| "loss": 0.146, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 2.5856950067476383, | |
| "grad_norm": 0.589360245246151, | |
| "learning_rate": 2.2260729416523103e-05, | |
| "loss": 0.1111, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 2.591093117408907, | |
| "grad_norm": 0.654143642296817, | |
| "learning_rate": 2.2185678437881898e-05, | |
| "loss": 0.1198, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.5964912280701755, | |
| "grad_norm": 0.6608861416350591, | |
| "learning_rate": 2.2110596296270472e-05, | |
| "loss": 0.12, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 2.601889338731444, | |
| "grad_norm": 0.6681912905025659, | |
| "learning_rate": 2.2035484062195124e-05, | |
| "loss": 0.1309, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 2.6072874493927127, | |
| "grad_norm": 0.631378882096793, | |
| "learning_rate": 2.196034280659122e-05, | |
| "loss": 0.1181, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 2.612685560053981, | |
| "grad_norm": 0.6527132040126822, | |
| "learning_rate": 2.188517360080788e-05, | |
| "loss": 0.1394, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 2.6180836707152495, | |
| "grad_norm": 0.6227805441719897, | |
| "learning_rate": 2.180997751659276e-05, | |
| "loss": 0.1195, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.623481781376518, | |
| "grad_norm": 0.6802847699912662, | |
| "learning_rate": 2.1734755626076733e-05, | |
| "loss": 0.1395, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 2.6288798920377867, | |
| "grad_norm": 0.6888202624358717, | |
| "learning_rate": 2.1659509001758616e-05, | |
| "loss": 0.1325, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 2.6342780026990553, | |
| "grad_norm": 0.593385764404308, | |
| "learning_rate": 2.158423871648988e-05, | |
| "loss": 0.1113, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 2.639676113360324, | |
| "grad_norm": 0.6774506983126284, | |
| "learning_rate": 2.1508945843459322e-05, | |
| "loss": 0.1804, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 2.6450742240215925, | |
| "grad_norm": 0.6654559325765059, | |
| "learning_rate": 2.1433631456177823e-05, | |
| "loss": 0.307, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.650472334682861, | |
| "grad_norm": 0.5979358796010572, | |
| "learning_rate": 2.1358296628463008e-05, | |
| "loss": 0.1167, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 2.6558704453441297, | |
| "grad_norm": 0.7087596924028456, | |
| "learning_rate": 2.1282942434423917e-05, | |
| "loss": 0.152, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 2.661268556005398, | |
| "grad_norm": 0.5914031045078398, | |
| "learning_rate": 2.1207569948445724e-05, | |
| "loss": 0.2739, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 0.6479960919806412, | |
| "learning_rate": 2.11321802451744e-05, | |
| "loss": 0.1252, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.672064777327935, | |
| "grad_norm": 0.6359038955310647, | |
| "learning_rate": 2.1056774399501413e-05, | |
| "loss": 0.1181, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.6774628879892037, | |
| "grad_norm": 0.5845117483373671, | |
| "learning_rate": 2.0981353486548363e-05, | |
| "loss": 0.1045, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.6828609986504723, | |
| "grad_norm": 0.6312095850180834, | |
| "learning_rate": 2.0905918581651692e-05, | |
| "loss": 0.1247, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.688259109311741, | |
| "grad_norm": 0.7064036700451566, | |
| "learning_rate": 2.0830470760347326e-05, | |
| "loss": 0.1468, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.6936572199730096, | |
| "grad_norm": 0.6425598356622161, | |
| "learning_rate": 2.0755011098355366e-05, | |
| "loss": 0.1269, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.699055330634278, | |
| "grad_norm": 0.6089176429064123, | |
| "learning_rate": 2.067954067156472e-05, | |
| "loss": 0.1221, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.7044534412955468, | |
| "grad_norm": 0.6709598340951207, | |
| "learning_rate": 2.060406055601778e-05, | |
| "loss": 0.1408, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.709851551956815, | |
| "grad_norm": 0.6038478330611535, | |
| "learning_rate": 2.052857182789509e-05, | |
| "loss": 0.1069, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.7152496626180835, | |
| "grad_norm": 0.6385055235648402, | |
| "learning_rate": 2.0453075563499975e-05, | |
| "loss": 0.114, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.720647773279352, | |
| "grad_norm": 0.6961725778048614, | |
| "learning_rate": 2.0377572839243225e-05, | |
| "loss": 0.2043, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.7260458839406208, | |
| "grad_norm": 0.5770500121000621, | |
| "learning_rate": 2.030206473162772e-05, | |
| "loss": 0.2564, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.7314439946018894, | |
| "grad_norm": 0.5369215916183971, | |
| "learning_rate": 2.022655231723311e-05, | |
| "loss": 0.2308, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.736842105263158, | |
| "grad_norm": 0.6775333454012917, | |
| "learning_rate": 2.0151036672700437e-05, | |
| "loss": 0.1325, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.7422402159244266, | |
| "grad_norm": 0.627884242138156, | |
| "learning_rate": 2.0075518874716797e-05, | |
| "loss": 0.1085, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.7476383265856947, | |
| "grad_norm": 0.7044141840918825, | |
| "learning_rate": 2e-05, | |
| "loss": 0.124, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.753036437246964, | |
| "grad_norm": 0.6810850215625832, | |
| "learning_rate": 1.9924481125283203e-05, | |
| "loss": 0.5196, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.758434547908232, | |
| "grad_norm": 0.6985941640179243, | |
| "learning_rate": 1.984896332729957e-05, | |
| "loss": 0.1378, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.7638326585695006, | |
| "grad_norm": 0.6208939763658017, | |
| "learning_rate": 1.9773447682766894e-05, | |
| "loss": 0.1199, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.769230769230769, | |
| "grad_norm": 0.6960421491040145, | |
| "learning_rate": 1.9697935268372285e-05, | |
| "loss": 0.1435, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.7746288798920378, | |
| "grad_norm": 0.6479313802993825, | |
| "learning_rate": 1.9622427160756778e-05, | |
| "loss": 0.1189, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.7800269905533064, | |
| "grad_norm": 0.6257589956418593, | |
| "learning_rate": 1.9546924436500028e-05, | |
| "loss": 0.1192, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.785425101214575, | |
| "grad_norm": 0.612853830087127, | |
| "learning_rate": 1.9471428172104916e-05, | |
| "loss": 0.1295, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.7908232118758436, | |
| "grad_norm": 0.6341457718175875, | |
| "learning_rate": 1.9395939443982228e-05, | |
| "loss": 0.2511, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.7962213225371118, | |
| "grad_norm": 0.6179614260256036, | |
| "learning_rate": 1.9320459328435287e-05, | |
| "loss": 0.1408, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.801619433198381, | |
| "grad_norm": 0.6465167698062426, | |
| "learning_rate": 1.924498890164464e-05, | |
| "loss": 0.2563, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.807017543859649, | |
| "grad_norm": 0.6556794701046659, | |
| "learning_rate": 1.9169529239652678e-05, | |
| "loss": 0.2109, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.8124156545209176, | |
| "grad_norm": 0.5822517488639858, | |
| "learning_rate": 1.9094081418348318e-05, | |
| "loss": 0.1149, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.817813765182186, | |
| "grad_norm": 0.6311322525747073, | |
| "learning_rate": 1.901864651345164e-05, | |
| "loss": 0.1308, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.823211875843455, | |
| "grad_norm": 0.5898219875972766, | |
| "learning_rate": 1.894322560049859e-05, | |
| "loss": 0.1253, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.8286099865047234, | |
| "grad_norm": 0.5936183602331494, | |
| "learning_rate": 1.8867819754825602e-05, | |
| "loss": 0.1254, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.834008097165992, | |
| "grad_norm": 0.6117641560994483, | |
| "learning_rate": 1.879243005155428e-05, | |
| "loss": 0.1232, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.8394062078272606, | |
| "grad_norm": 0.6500880335763644, | |
| "learning_rate": 1.871705756557609e-05, | |
| "loss": 0.2541, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.8448043184885288, | |
| "grad_norm": 0.6058813838626976, | |
| "learning_rate": 1.8641703371536995e-05, | |
| "loss": 0.1253, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.850202429149798, | |
| "grad_norm": 0.6680465461222095, | |
| "learning_rate": 1.856636854382218e-05, | |
| "loss": 0.2805, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.855600539811066, | |
| "grad_norm": 0.6134739260909959, | |
| "learning_rate": 1.849105415654068e-05, | |
| "loss": 0.1168, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.8609986504723346, | |
| "grad_norm": 0.6617449814395124, | |
| "learning_rate": 1.841576128351013e-05, | |
| "loss": 0.1461, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.866396761133603, | |
| "grad_norm": 0.6389843047318983, | |
| "learning_rate": 1.8340490998241387e-05, | |
| "loss": 0.128, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.871794871794872, | |
| "grad_norm": 0.5738756222417001, | |
| "learning_rate": 1.8265244373923274e-05, | |
| "loss": 0.1077, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.8771929824561404, | |
| "grad_norm": 0.5919186419431655, | |
| "learning_rate": 1.8190022483407246e-05, | |
| "loss": 0.1072, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.882591093117409, | |
| "grad_norm": 0.683056671095403, | |
| "learning_rate": 1.8114826399192133e-05, | |
| "loss": 0.1212, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.8879892037786776, | |
| "grad_norm": 0.6129763131639642, | |
| "learning_rate": 1.8039657193408788e-05, | |
| "loss": 0.1171, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.893387314439946, | |
| "grad_norm": 0.5627763381998306, | |
| "learning_rate": 1.7964515937804875e-05, | |
| "loss": 0.1069, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.898785425101215, | |
| "grad_norm": 0.602936257523276, | |
| "learning_rate": 1.7889403703729535e-05, | |
| "loss": 0.1075, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.904183535762483, | |
| "grad_norm": 0.6225600024019384, | |
| "learning_rate": 1.7814321562118105e-05, | |
| "loss": 0.1192, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.9095816464237516, | |
| "grad_norm": 0.6232613214009397, | |
| "learning_rate": 1.7739270583476904e-05, | |
| "loss": 0.1129, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.91497975708502, | |
| "grad_norm": 0.7106160834112466, | |
| "learning_rate": 1.766425183786791e-05, | |
| "loss": 0.1325, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.920377867746289, | |
| "grad_norm": 0.6215307665098612, | |
| "learning_rate": 1.758926639489354e-05, | |
| "loss": 0.1152, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.9257759784075574, | |
| "grad_norm": 0.6493142964042798, | |
| "learning_rate": 1.7514315323681386e-05, | |
| "loss": 0.1351, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.931174089068826, | |
| "grad_norm": 0.5928462030642684, | |
| "learning_rate": 1.7439399692868985e-05, | |
| "loss": 0.1127, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.9365721997300946, | |
| "grad_norm": 0.7261667290131814, | |
| "learning_rate": 1.7364520570588543e-05, | |
| "loss": 0.1588, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.941970310391363, | |
| "grad_norm": 0.630870393655033, | |
| "learning_rate": 1.7289679024451772e-05, | |
| "loss": 0.1088, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.9473684210526314, | |
| "grad_norm": 0.6335205983688524, | |
| "learning_rate": 1.7214876121534592e-05, | |
| "loss": 0.0999, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.9527665317139, | |
| "grad_norm": 0.6865001571083343, | |
| "learning_rate": 1.7140112928361996e-05, | |
| "loss": 0.1231, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.9581646423751686, | |
| "grad_norm": 0.6326899562878843, | |
| "learning_rate": 1.7065390510892767e-05, | |
| "loss": 0.115, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.9635627530364372, | |
| "grad_norm": 0.6095222445393947, | |
| "learning_rate": 1.699070993450434e-05, | |
| "loss": 0.1022, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 2.968960863697706, | |
| "grad_norm": 0.671598010718977, | |
| "learning_rate": 1.6916072263977583e-05, | |
| "loss": 0.1718, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.9743589743589745, | |
| "grad_norm": 0.5878250512418469, | |
| "learning_rate": 1.6841478563481612e-05, | |
| "loss": 0.0996, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 2.979757085020243, | |
| "grad_norm": 0.7503089305640249, | |
| "learning_rate": 1.6766929896558638e-05, | |
| "loss": 0.2841, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 2.9851551956815117, | |
| "grad_norm": 0.6989279699107632, | |
| "learning_rate": 1.6692427326108776e-05, | |
| "loss": 0.1517, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 2.99055330634278, | |
| "grad_norm": 0.6049149193808451, | |
| "learning_rate": 1.6617971914374933e-05, | |
| "loss": 0.2593, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 2.9959514170040484, | |
| "grad_norm": 0.6071511813101513, | |
| "learning_rate": 1.654356472292761e-05, | |
| "loss": 0.119, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 3.001349527665317, | |
| "grad_norm": 2.191278573121276, | |
| "learning_rate": 1.6469206812649818e-05, | |
| "loss": 0.2184, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 3.0067476383265856, | |
| "grad_norm": 0.3436912254691727, | |
| "learning_rate": 1.6394899243721887e-05, | |
| "loss": 0.0354, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 3.0121457489878543, | |
| "grad_norm": 0.34881471094295025, | |
| "learning_rate": 1.632064307560644e-05, | |
| "loss": 0.0332, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 3.017543859649123, | |
| "grad_norm": 0.3692217535663024, | |
| "learning_rate": 1.6246439367033207e-05, | |
| "loss": 0.0408, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 3.0229419703103915, | |
| "grad_norm": 0.3280449705906347, | |
| "learning_rate": 1.617228917598396e-05, | |
| "loss": 0.0319, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 3.02834008097166, | |
| "grad_norm": 0.33260821933202167, | |
| "learning_rate": 1.609819355967744e-05, | |
| "loss": 0.0337, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 3.0337381916329287, | |
| "grad_norm": 0.405541501347011, | |
| "learning_rate": 1.602415357455426e-05, | |
| "loss": 0.0535, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 3.039136302294197, | |
| "grad_norm": 0.47557751817634936, | |
| "learning_rate": 1.5950170276261876e-05, | |
| "loss": 0.0402, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 3.0445344129554655, | |
| "grad_norm": 0.42113145547242353, | |
| "learning_rate": 1.587624471963949e-05, | |
| "loss": 0.0368, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 3.049932523616734, | |
| "grad_norm": 0.4587759167556131, | |
| "learning_rate": 1.5802377958703054e-05, | |
| "loss": 0.0396, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 3.0553306342780027, | |
| "grad_norm": 0.4093864304314231, | |
| "learning_rate": 1.5728571046630195e-05, | |
| "loss": 0.0328, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 3.0607287449392713, | |
| "grad_norm": 0.5852288088536234, | |
| "learning_rate": 1.5654825035745268e-05, | |
| "loss": 0.0692, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 3.06612685560054, | |
| "grad_norm": 0.4076610339068381, | |
| "learning_rate": 1.5581140977504273e-05, | |
| "loss": 0.0347, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 3.0715249662618085, | |
| "grad_norm": 0.48848968423165967, | |
| "learning_rate": 1.550751992247993e-05, | |
| "loss": 0.036, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 3.076923076923077, | |
| "grad_norm": 0.47155692827014656, | |
| "learning_rate": 1.5433962920346638e-05, | |
| "loss": 0.036, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 3.0823211875843457, | |
| "grad_norm": 0.380639334036154, | |
| "learning_rate": 1.5360471019865583e-05, | |
| "loss": 0.0296, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 3.087719298245614, | |
| "grad_norm": 0.43221784870918734, | |
| "learning_rate": 1.5287045268869712e-05, | |
| "loss": 0.0351, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 3.0931174089068825, | |
| "grad_norm": 0.49905359599899657, | |
| "learning_rate": 1.5213686714248852e-05, | |
| "loss": 0.039, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 3.098515519568151, | |
| "grad_norm": 0.41331595216842526, | |
| "learning_rate": 1.5140396401934725e-05, | |
| "loss": 0.0309, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 3.1039136302294197, | |
| "grad_norm": 0.6043807596540545, | |
| "learning_rate": 1.5067175376886099e-05, | |
| "loss": 0.0274, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 3.1093117408906883, | |
| "grad_norm": 0.5014002726682666, | |
| "learning_rate": 1.4994024683073848e-05, | |
| "loss": 0.0458, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 3.114709851551957, | |
| "grad_norm": 0.40613279392800844, | |
| "learning_rate": 1.4920945363466062e-05, | |
| "loss": 0.0324, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 3.1201079622132255, | |
| "grad_norm": 0.4886303787102555, | |
| "learning_rate": 1.4847938460013219e-05, | |
| "loss": 0.2548, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 3.125506072874494, | |
| "grad_norm": 0.3825952135908576, | |
| "learning_rate": 1.4775005013633277e-05, | |
| "loss": 0.0319, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 3.1309041835357623, | |
| "grad_norm": 0.3654848769695381, | |
| "learning_rate": 1.4702146064196875e-05, | |
| "loss": 0.0351, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 3.136302294197031, | |
| "grad_norm": 0.38630959354240707, | |
| "learning_rate": 1.4629362650512464e-05, | |
| "loss": 0.0403, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 3.1417004048582995, | |
| "grad_norm": 0.4225028608467292, | |
| "learning_rate": 1.4556655810311566e-05, | |
| "loss": 0.1626, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 3.147098515519568, | |
| "grad_norm": 0.3827994232164537, | |
| "learning_rate": 1.4484026580233888e-05, | |
| "loss": 0.0344, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 3.1524966261808367, | |
| "grad_norm": 0.3310555630534496, | |
| "learning_rate": 1.4411475995812613e-05, | |
| "loss": 0.0275, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 3.1578947368421053, | |
| "grad_norm": 0.3632971332190463, | |
| "learning_rate": 1.4339005091459595e-05, | |
| "loss": 0.0839, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 3.163292847503374, | |
| "grad_norm": 0.3602691812823302, | |
| "learning_rate": 1.4266614900450645e-05, | |
| "loss": 0.0291, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 3.1686909581646425, | |
| "grad_norm": 0.40629829870227013, | |
| "learning_rate": 1.4194306454910757e-05, | |
| "loss": 0.0425, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 3.174089068825911, | |
| "grad_norm": 0.3521781479995114, | |
| "learning_rate": 1.4122080785799423e-05, | |
| "loss": 0.0311, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 3.1794871794871793, | |
| "grad_norm": 0.5300855991321622, | |
| "learning_rate": 1.4049938922895945e-05, | |
| "loss": 0.1332, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 3.184885290148448, | |
| "grad_norm": 0.47747106483553625, | |
| "learning_rate": 1.3977881894784689e-05, | |
| "loss": 0.1839, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 3.1902834008097165, | |
| "grad_norm": 0.5073414782013317, | |
| "learning_rate": 1.390591072884051e-05, | |
| "loss": 0.0332, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 3.195681511470985, | |
| "grad_norm": 0.36797349854225664, | |
| "learning_rate": 1.383402645121402e-05, | |
| "loss": 0.0297, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 3.2010796221322537, | |
| "grad_norm": 0.39930020559853535, | |
| "learning_rate": 1.3762230086817024e-05, | |
| "loss": 0.0311, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 3.2064777327935223, | |
| "grad_norm": 0.41828193288392684, | |
| "learning_rate": 1.3690522659307846e-05, | |
| "loss": 0.1682, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 3.211875843454791, | |
| "grad_norm": 0.4254248832162874, | |
| "learning_rate": 1.3618905191076806e-05, | |
| "loss": 0.0431, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 3.2172739541160595, | |
| "grad_norm": 0.38086605557209824, | |
| "learning_rate": 1.3547378703231559e-05, | |
| "loss": 0.0275, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 3.2226720647773277, | |
| "grad_norm": 0.4245476891916035, | |
| "learning_rate": 1.3475944215582619e-05, | |
| "loss": 0.0391, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 3.2280701754385963, | |
| "grad_norm": 0.43518229539768466, | |
| "learning_rate": 1.340460274662874e-05, | |
| "loss": 0.0378, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 3.233468286099865, | |
| "grad_norm": 0.3780375093099488, | |
| "learning_rate": 1.3333355313542478e-05, | |
| "loss": 0.0302, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 3.2388663967611335, | |
| "grad_norm": 0.3629192307455684, | |
| "learning_rate": 1.3262202932155602e-05, | |
| "loss": 0.0344, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 3.244264507422402, | |
| "grad_norm": 0.3699894264323716, | |
| "learning_rate": 1.3191146616944676e-05, | |
| "loss": 0.0289, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 3.2496626180836707, | |
| "grad_norm": 0.35188609379499725, | |
| "learning_rate": 1.312018738101658e-05, | |
| "loss": 0.0339, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 3.2550607287449393, | |
| "grad_norm": 0.4491270054593502, | |
| "learning_rate": 1.304932623609403e-05, | |
| "loss": 0.1386, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 3.260458839406208, | |
| "grad_norm": 0.38745106173266436, | |
| "learning_rate": 1.2978564192501203e-05, | |
| "loss": 0.0402, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 3.2658569500674766, | |
| "grad_norm": 0.3773073501230217, | |
| "learning_rate": 1.2907902259149287e-05, | |
| "loss": 0.0308, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 3.2712550607287447, | |
| "grad_norm": 0.39839198674044035, | |
| "learning_rate": 1.2837341443522147e-05, | |
| "loss": 0.031, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 3.2766531713900133, | |
| "grad_norm": 0.38676509058198444, | |
| "learning_rate": 1.2766882751661905e-05, | |
| "loss": 0.034, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 3.282051282051282, | |
| "grad_norm": 0.37175755499128865, | |
| "learning_rate": 1.2696527188154639e-05, | |
| "loss": 0.0319, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 3.2874493927125505, | |
| "grad_norm": 0.364975396498793, | |
| "learning_rate": 1.2626275756116027e-05, | |
| "loss": 0.0291, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 3.292847503373819, | |
| "grad_norm": 0.4410768716884598, | |
| "learning_rate": 1.2556129457177084e-05, | |
| "loss": 0.0351, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 3.2982456140350878, | |
| "grad_norm": 0.3686080508993296, | |
| "learning_rate": 1.2486089291469835e-05, | |
| "loss": 0.0304, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 3.3036437246963564, | |
| "grad_norm": 0.3825335449989966, | |
| "learning_rate": 1.2416156257613098e-05, | |
| "loss": 0.0367, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 3.309041835357625, | |
| "grad_norm": 0.34218361939907355, | |
| "learning_rate": 1.2346331352698206e-05, | |
| "loss": 0.0315, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 3.3144399460188936, | |
| "grad_norm": 0.4478836143104432, | |
| "learning_rate": 1.227661557227483e-05, | |
| "loss": 0.0427, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 3.3198380566801617, | |
| "grad_norm": 0.37407337511538385, | |
| "learning_rate": 1.2207009910336764e-05, | |
| "loss": 0.0327, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 3.3252361673414303, | |
| "grad_norm": 0.35934207428825143, | |
| "learning_rate": 1.2137515359307737e-05, | |
| "loss": 0.0312, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 3.330634278002699, | |
| "grad_norm": 0.43937992207581134, | |
| "learning_rate": 1.2068132910027299e-05, | |
| "loss": 0.0354, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 3.3360323886639676, | |
| "grad_norm": 0.33294700463710736, | |
| "learning_rate": 1.1998863551736659e-05, | |
| "loss": 0.0283, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 3.341430499325236, | |
| "grad_norm": 0.39301352026371505, | |
| "learning_rate": 1.192970827206462e-05, | |
| "loss": 0.0331, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 3.3468286099865048, | |
| "grad_norm": 0.36498619637281343, | |
| "learning_rate": 1.1860668057013451e-05, | |
| "loss": 0.0328, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 3.3522267206477734, | |
| "grad_norm": 0.3559476591751902, | |
| "learning_rate": 1.1791743890944869e-05, | |
| "loss": 0.0343, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 3.357624831309042, | |
| "grad_norm": 0.43684774915670094, | |
| "learning_rate": 1.1722936756565969e-05, | |
| "loss": 0.2334, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 3.3630229419703106, | |
| "grad_norm": 0.4400139001073612, | |
| "learning_rate": 1.1654247634915261e-05, | |
| "loss": 0.0323, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 3.3684210526315788, | |
| "grad_norm": 0.3993508002078833, | |
| "learning_rate": 1.1585677505348618e-05, | |
| "loss": 0.1723, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 3.3738191632928474, | |
| "grad_norm": 0.39545396795136123, | |
| "learning_rate": 1.151722734552538e-05, | |
| "loss": 0.136, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 3.379217273954116, | |
| "grad_norm": 0.3784737302500327, | |
| "learning_rate": 1.1448898131394364e-05, | |
| "loss": 0.0385, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 3.3846153846153846, | |
| "grad_norm": 0.3633996380704606, | |
| "learning_rate": 1.1380690837179955e-05, | |
| "loss": 0.0297, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 3.390013495276653, | |
| "grad_norm": 0.38600677874076367, | |
| "learning_rate": 1.1312606435368266e-05, | |
| "loss": 0.0292, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 3.395411605937922, | |
| "grad_norm": 0.3685107386955438, | |
| "learning_rate": 1.124464589669319e-05, | |
| "loss": 0.0283, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 3.4008097165991904, | |
| "grad_norm": 0.3796999501062114, | |
| "learning_rate": 1.1176810190122644e-05, | |
| "loss": 0.0345, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 3.406207827260459, | |
| "grad_norm": 0.39313860478281704, | |
| "learning_rate": 1.110910028284468e-05, | |
| "loss": 0.0315, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 3.4116059379217276, | |
| "grad_norm": 0.38729735419763556, | |
| "learning_rate": 1.1041517140253761e-05, | |
| "loss": 0.0538, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 3.417004048582996, | |
| "grad_norm": 0.3796093100412225, | |
| "learning_rate": 1.0974061725936935e-05, | |
| "loss": 0.0305, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 3.4224021592442644, | |
| "grad_norm": 0.3654235584300773, | |
| "learning_rate": 1.0906735001660138e-05, | |
| "loss": 0.0283, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 3.427800269905533, | |
| "grad_norm": 0.409937460602826, | |
| "learning_rate": 1.0839537927354466e-05, | |
| "loss": 0.0379, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 3.4331983805668016, | |
| "grad_norm": 0.3917028036928748, | |
| "learning_rate": 1.0772471461102505e-05, | |
| "loss": 0.0269, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 3.43859649122807, | |
| "grad_norm": 0.3625369529682563, | |
| "learning_rate": 1.070553655912463e-05, | |
| "loss": 0.0283, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 3.443994601889339, | |
| "grad_norm": 0.38788057237068674, | |
| "learning_rate": 1.0638734175765433e-05, | |
| "loss": 0.0314, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 3.4493927125506074, | |
| "grad_norm": 0.3847399681910376, | |
| "learning_rate": 1.0572065263480046e-05, | |
| "loss": 0.0324, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 3.454790823211876, | |
| "grad_norm": 0.4382934253068173, | |
| "learning_rate": 1.0505530772820625e-05, | |
| "loss": 0.0894, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 3.4601889338731446, | |
| "grad_norm": 0.419058031320498, | |
| "learning_rate": 1.0439131652422763e-05, | |
| "loss": 0.2127, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 3.465587044534413, | |
| "grad_norm": 0.44649997683190956, | |
| "learning_rate": 1.037286884899196e-05, | |
| "loss": 0.1479, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 3.4709851551956814, | |
| "grad_norm": 0.40480754300934557, | |
| "learning_rate": 1.030674330729014e-05, | |
| "loss": 0.0352, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 3.47638326585695, | |
| "grad_norm": 0.30967102679189645, | |
| "learning_rate": 1.0240755970122173e-05, | |
| "loss": 0.0218, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 3.4817813765182186, | |
| "grad_norm": 0.37838596103828964, | |
| "learning_rate": 1.0174907778322458e-05, | |
| "loss": 0.0288, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 3.4871794871794872, | |
| "grad_norm": 0.4146619345330915, | |
| "learning_rate": 1.0109199670741447e-05, | |
| "loss": 0.0362, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 3.492577597840756, | |
| "grad_norm": 0.4117765119433817, | |
| "learning_rate": 1.004363258423235e-05, | |
| "loss": 0.2213, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 3.4979757085020244, | |
| "grad_norm": 0.34377868810679235, | |
| "learning_rate": 9.978207453637671e-06, | |
| "loss": 0.0269, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 3.5033738191632926, | |
| "grad_norm": 0.38450416903251206, | |
| "learning_rate": 9.912925211775989e-06, | |
| "loss": 0.0284, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 3.5087719298245617, | |
| "grad_norm": 0.3681687532508216, | |
| "learning_rate": 9.84778678942856e-06, | |
| "loss": 0.0319, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 3.51417004048583, | |
| "grad_norm": 0.3964437963905117, | |
| "learning_rate": 9.782793115326112e-06, | |
| "loss": 0.0347, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 3.5195681511470984, | |
| "grad_norm": 0.3655533421389497, | |
| "learning_rate": 9.717945116135568e-06, | |
| "loss": 0.0333, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 3.524966261808367, | |
| "grad_norm": 0.3224001401289986, | |
| "learning_rate": 9.653243716446862e-06, | |
| "loss": 0.0276, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 3.5303643724696356, | |
| "grad_norm": 0.3452946278683242, | |
| "learning_rate": 9.588689838759744e-06, | |
| "loss": 0.0281, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 3.5357624831309042, | |
| "grad_norm": 0.36042207805510473, | |
| "learning_rate": 9.524284403470598e-06, | |
| "loss": 0.0299, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 3.541160593792173, | |
| "grad_norm": 0.38055305365429554, | |
| "learning_rate": 9.460028328859379e-06, | |
| "loss": 0.0252, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 3.5465587044534415, | |
| "grad_norm": 0.3771448822244628, | |
| "learning_rate": 9.395922531076462e-06, | |
| "loss": 0.0331, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 3.5519568151147096, | |
| "grad_norm": 0.3462359501161187, | |
| "learning_rate": 9.331967924129615e-06, | |
| "loss": 0.0264, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 3.5573549257759787, | |
| "grad_norm": 0.39941647689494786, | |
| "learning_rate": 9.268165419870947e-06, | |
| "loss": 0.0333, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 3.562753036437247, | |
| "grad_norm": 0.4205128793287871, | |
| "learning_rate": 9.20451592798394e-06, | |
| "loss": 0.0307, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 3.5681511470985154, | |
| "grad_norm": 0.42077068867710476, | |
| "learning_rate": 9.141020355970427e-06, | |
| "loss": 0.0325, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 3.573549257759784, | |
| "grad_norm": 0.3622122750826694, | |
| "learning_rate": 9.077679609137714e-06, | |
| "loss": 0.0285, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 3.5789473684210527, | |
| "grad_norm": 0.4360493505456192, | |
| "learning_rate": 9.014494590585603e-06, | |
| "loss": 0.0327, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 3.5843454790823213, | |
| "grad_norm": 0.40930157708444925, | |
| "learning_rate": 8.95146620119359e-06, | |
| "loss": 0.1576, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 3.58974358974359, | |
| "grad_norm": 0.38811598863254687, | |
| "learning_rate": 8.888595339607961e-06, | |
| "loss": 0.0302, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 3.5951417004048585, | |
| "grad_norm": 0.36493375147907436, | |
| "learning_rate": 8.825882902228998e-06, | |
| "loss": 0.0342, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 3.6005398110661266, | |
| "grad_norm": 0.3459341178116053, | |
| "learning_rate": 8.763329783198222e-06, | |
| "loss": 0.0306, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 3.6059379217273952, | |
| "grad_norm": 0.39037878669889553, | |
| "learning_rate": 8.700936874385601e-06, | |
| "loss": 0.0316, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 3.611336032388664, | |
| "grad_norm": 0.39416938896362874, | |
| "learning_rate": 8.638705065376887e-06, | |
| "loss": 0.0307, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 3.6167341430499325, | |
| "grad_norm": 0.34015438708553125, | |
| "learning_rate": 8.576635243460868e-06, | |
| "loss": 0.0257, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 3.622132253711201, | |
| "grad_norm": 0.3899592691796045, | |
| "learning_rate": 8.514728293616788e-06, | |
| "loss": 0.0295, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 3.6275303643724697, | |
| "grad_norm": 0.43563748789771956, | |
| "learning_rate": 8.452985098501659e-06, | |
| "loss": 0.0586, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 3.6329284750337383, | |
| "grad_norm": 0.3664811599053362, | |
| "learning_rate": 8.391406538437737e-06, | |
| "loss": 0.027, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 3.638326585695007, | |
| "grad_norm": 0.3616556776179159, | |
| "learning_rate": 8.329993491399933e-06, | |
| "loss": 0.0317, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 3.6437246963562755, | |
| "grad_norm": 0.3948186866229346, | |
| "learning_rate": 8.268746833003304e-06, | |
| "loss": 0.0287, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 3.6491228070175437, | |
| "grad_norm": 0.4064305102650153, | |
| "learning_rate": 8.207667436490564e-06, | |
| "loss": 0.0318, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 3.6545209176788123, | |
| "grad_norm": 0.3887421174083186, | |
| "learning_rate": 8.146756172719668e-06, | |
| "loss": 0.0337, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 3.659919028340081, | |
| "grad_norm": 0.35086318209010237, | |
| "learning_rate": 8.086013910151334e-06, | |
| "loss": 0.0303, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 3.6653171390013495, | |
| "grad_norm": 0.3875957934712957, | |
| "learning_rate": 8.025441514836715e-06, | |
| "loss": 0.0309, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 3.670715249662618, | |
| "grad_norm": 0.36661840834858717, | |
| "learning_rate": 7.965039850405037e-06, | |
| "loss": 0.027, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 3.6761133603238867, | |
| "grad_norm": 0.34118151803342256, | |
| "learning_rate": 7.904809778051252e-06, | |
| "loss": 0.0296, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 3.6815114709851553, | |
| "grad_norm": 0.37213000529906315, | |
| "learning_rate": 7.84475215652381e-06, | |
| "loss": 0.1303, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 3.686909581646424, | |
| "grad_norm": 0.3896509711111651, | |
| "learning_rate": 7.784867842112367e-06, | |
| "loss": 0.1687, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 3.6923076923076925, | |
| "grad_norm": 0.3426839606336355, | |
| "learning_rate": 7.725157688635634e-06, | |
| "loss": 0.0312, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 3.6977058029689607, | |
| "grad_norm": 0.35402053366618513, | |
| "learning_rate": 7.665622547429139e-06, | |
| "loss": 0.0293, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 3.7031039136302293, | |
| "grad_norm": 0.3546158448227337, | |
| "learning_rate": 7.606263267333145e-06, | |
| "loss": 0.0249, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 3.708502024291498, | |
| "grad_norm": 0.33157529931614793, | |
| "learning_rate": 7.547080694680495e-06, | |
| "loss": 0.0219, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 3.7139001349527665, | |
| "grad_norm": 0.39512925735292526, | |
| "learning_rate": 7.4880756732846e-06, | |
| "loss": 0.0356, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 3.719298245614035, | |
| "grad_norm": 0.3714005261676109, | |
| "learning_rate": 7.429249044427362e-06, | |
| "loss": 0.0352, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 3.7246963562753037, | |
| "grad_norm": 0.3613957102307198, | |
| "learning_rate": 7.3706016468472045e-06, | |
| "loss": 0.0306, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 3.7300944669365723, | |
| "grad_norm": 0.31816258283900495, | |
| "learning_rate": 7.312134316727093e-06, | |
| "loss": 0.0266, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 3.7354925775978405, | |
| "grad_norm": 0.41925403759376656, | |
| "learning_rate": 7.25384788768265e-06, | |
| "loss": 0.1886, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 3.7408906882591095, | |
| "grad_norm": 0.4045239613978412, | |
| "learning_rate": 7.195743190750241e-06, | |
| "loss": 0.0321, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 3.7462887989203777, | |
| "grad_norm": 0.38175252409850824, | |
| "learning_rate": 7.137821054375114e-06, | |
| "loss": 0.0622, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 3.7516869095816463, | |
| "grad_norm": 0.34465054751860885, | |
| "learning_rate": 7.080082304399625e-06, | |
| "loss": 0.0271, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 3.757085020242915, | |
| "grad_norm": 0.33989881366203534, | |
| "learning_rate": 7.022527764051423e-06, | |
| "loss": 0.0286, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 3.7624831309041835, | |
| "grad_norm": 0.3412805918296631, | |
| "learning_rate": 6.9651582539317565e-06, | |
| "loss": 0.027, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 3.767881241565452, | |
| "grad_norm": 0.3334336573527621, | |
| "learning_rate": 6.907974592003732e-06, | |
| "loss": 0.0265, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 3.7732793522267207, | |
| "grad_norm": 0.4270420508919924, | |
| "learning_rate": 6.850977593580675e-06, | |
| "loss": 0.034, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 3.7786774628879893, | |
| "grad_norm": 0.3297327960112233, | |
| "learning_rate": 6.794168071314486e-06, | |
| "loss": 0.0238, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 3.7840755735492575, | |
| "grad_norm": 0.36163451391560975, | |
| "learning_rate": 6.737546835184101e-06, | |
| "loss": 0.0279, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 3.7894736842105265, | |
| "grad_norm": 0.4411471545722498, | |
| "learning_rate": 6.6811146924838725e-06, | |
| "loss": 0.0666, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 3.7948717948717947, | |
| "grad_norm": 0.35871421378374524, | |
| "learning_rate": 6.6248724478121254e-06, | |
| "loss": 0.0291, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 3.8002699055330633, | |
| "grad_norm": 0.36756705518749777, | |
| "learning_rate": 6.568820903059632e-06, | |
| "loss": 0.0367, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 3.805668016194332, | |
| "grad_norm": 0.47277824056660106, | |
| "learning_rate": 6.512960857398227e-06, | |
| "loss": 0.0392, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 3.8110661268556005, | |
| "grad_norm": 0.4010852296701039, | |
| "learning_rate": 6.457293107269371e-06, | |
| "loss": 0.0308, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 3.816464237516869, | |
| "grad_norm": 0.3680019544802103, | |
| "learning_rate": 6.401818446372809e-06, | |
| "loss": 0.0489, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 3.8218623481781377, | |
| "grad_norm": 0.3820591256150461, | |
| "learning_rate": 6.346537665655286e-06, | |
| "loss": 0.1109, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 3.8272604588394064, | |
| "grad_norm": 0.29674819102746314, | |
| "learning_rate": 6.291451553299204e-06, | |
| "loss": 0.0221, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 3.8326585695006745, | |
| "grad_norm": 0.38107534212823674, | |
| "learning_rate": 6.236560894711459e-06, | |
| "loss": 0.0317, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 3.8380566801619436, | |
| "grad_norm": 0.4300573413232902, | |
| "learning_rate": 6.181866472512175e-06, | |
| "loss": 0.1339, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 3.8434547908232117, | |
| "grad_norm": 0.4653463809534711, | |
| "learning_rate": 6.127369066523599e-06, | |
| "loss": 0.3112, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 3.8488529014844803, | |
| "grad_norm": 0.3828688005837094, | |
| "learning_rate": 6.073069453758946e-06, | |
| "loss": 0.0259, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 3.854251012145749, | |
| "grad_norm": 0.35459961651464167, | |
| "learning_rate": 6.018968408411341e-06, | |
| "loss": 0.0267, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 3.8596491228070176, | |
| "grad_norm": 0.35643103172233276, | |
| "learning_rate": 5.965066701842766e-06, | |
| "loss": 0.1439, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 3.865047233468286, | |
| "grad_norm": 0.34409226399099263, | |
| "learning_rate": 5.9113651025730835e-06, | |
| "loss": 0.0292, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 3.8704453441295548, | |
| "grad_norm": 0.35933383547027126, | |
| "learning_rate": 5.857864376269051e-06, | |
| "loss": 0.0804, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 3.8758434547908234, | |
| "grad_norm": 0.3739973480129211, | |
| "learning_rate": 5.804565285733432e-06, | |
| "loss": 0.035, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 3.8812415654520915, | |
| "grad_norm": 0.3722765784519681, | |
| "learning_rate": 5.751468590894107e-06, | |
| "loss": 0.1128, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 3.8866396761133606, | |
| "grad_norm": 0.3713792988536375, | |
| "learning_rate": 5.698575048793223e-06, | |
| "loss": 0.0277, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 3.8920377867746287, | |
| "grad_norm": 0.4568426442621641, | |
| "learning_rate": 5.645885413576433e-06, | |
| "loss": 0.1226, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 3.8974358974358974, | |
| "grad_norm": 0.37950864411226903, | |
| "learning_rate": 5.593400436482119e-06, | |
| "loss": 0.043, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 3.902834008097166, | |
| "grad_norm": 0.3925818680423842, | |
| "learning_rate": 5.541120865830687e-06, | |
| "loss": 0.0307, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 3.9082321187584346, | |
| "grad_norm": 0.36496534398830294, | |
| "learning_rate": 5.489047447013891e-06, | |
| "loss": 0.0312, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 3.913630229419703, | |
| "grad_norm": 0.3411659648761055, | |
| "learning_rate": 5.4371809224842354e-06, | |
| "loss": 0.0249, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 3.919028340080972, | |
| "grad_norm": 0.35092445194976446, | |
| "learning_rate": 5.3855220317443416e-06, | |
| "loss": 0.0274, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 3.9244264507422404, | |
| "grad_norm": 0.3489498272274411, | |
| "learning_rate": 5.334071511336449e-06, | |
| "loss": 0.0252, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 3.9298245614035086, | |
| "grad_norm": 0.36114128102871357, | |
| "learning_rate": 5.282830094831881e-06, | |
| "loss": 0.0294, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 3.9352226720647776, | |
| "grad_norm": 0.3284749937819038, | |
| "learning_rate": 5.231798512820612e-06, | |
| "loss": 0.025, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 3.9406207827260458, | |
| "grad_norm": 0.386218354934746, | |
| "learning_rate": 5.180977492900823e-06, | |
| "loss": 0.1581, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 3.9460188933873144, | |
| "grad_norm": 0.3924605685249013, | |
| "learning_rate": 5.130367759668544e-06, | |
| "loss": 0.1108, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 3.951417004048583, | |
| "grad_norm": 0.35950299892176835, | |
| "learning_rate": 5.07997003470734e-06, | |
| "loss": 0.0269, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 3.9568151147098516, | |
| "grad_norm": 0.35019384697385253, | |
| "learning_rate": 5.029785036577976e-06, | |
| "loss": 0.0262, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 3.96221322537112, | |
| "grad_norm": 0.3432029539479147, | |
| "learning_rate": 4.979813480808231e-06, | |
| "loss": 0.0254, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 3.967611336032389, | |
| "grad_norm": 0.35144365016547774, | |
| "learning_rate": 4.930056079882632e-06, | |
| "loss": 0.0298, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 3.9730094466936574, | |
| "grad_norm": 0.3541877009200062, | |
| "learning_rate": 4.880513543232361e-06, | |
| "loss": 0.0267, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 3.9784075573549256, | |
| "grad_norm": 0.40028293095466, | |
| "learning_rate": 4.831186577225082e-06, | |
| "loss": 0.0359, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 3.983805668016194, | |
| "grad_norm": 0.40302496611767596, | |
| "learning_rate": 4.782075885154909e-06, | |
| "loss": 0.0301, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 3.989203778677463, | |
| "grad_norm": 0.4237077479286715, | |
| "learning_rate": 4.733182167232356e-06, | |
| "loss": 0.0346, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 3.9946018893387314, | |
| "grad_norm": 0.3547714604476633, | |
| "learning_rate": 4.684506120574375e-06, | |
| "loss": 0.0258, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 2.102540355528973, | |
| "learning_rate": 4.636048439194392e-06, | |
| "loss": 0.0685, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 4.005398110661268, | |
| "grad_norm": 0.15325932809564183, | |
| "learning_rate": 4.587809813992437e-06, | |
| "loss": 0.0121, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 4.010796221322537, | |
| "grad_norm": 0.13530023614474354, | |
| "learning_rate": 4.53979093274526e-06, | |
| "loss": 0.008, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 4.016194331983805, | |
| "grad_norm": 0.13502016529933805, | |
| "learning_rate": 4.4919924800965696e-06, | |
| "loss": 0.0087, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 4.021592442645074, | |
| "grad_norm": 0.18254596131179956, | |
| "learning_rate": 4.444415137547238e-06, | |
| "loss": 0.0103, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 4.026990553306343, | |
| "grad_norm": 0.17029406387858442, | |
| "learning_rate": 4.3970595834455845e-06, | |
| "loss": 0.0089, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 4.032388663967612, | |
| "grad_norm": 0.2522421603518247, | |
| "learning_rate": 4.349926492977719e-06, | |
| "loss": 0.0828, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 4.03778677462888, | |
| "grad_norm": 0.14277344736187908, | |
| "learning_rate": 4.303016538157907e-06, | |
| "loss": 0.0287, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 4.043184885290149, | |
| "grad_norm": 0.2642123078469134, | |
| "learning_rate": 4.256330387818999e-06, | |
| "loss": 0.1838, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 4.048582995951417, | |
| "grad_norm": 0.271440865327848, | |
| "learning_rate": 4.209868707602871e-06, | |
| "loss": 0.1105, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 4.053981106612685, | |
| "grad_norm": 0.14472235533108238, | |
| "learning_rate": 4.163632159950965e-06, | |
| "loss": 0.0073, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 4.059379217273954, | |
| "grad_norm": 0.20035179292259456, | |
| "learning_rate": 4.117621404094811e-06, | |
| "loss": 0.0085, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 4.064777327935222, | |
| "grad_norm": 0.14362507519164897, | |
| "learning_rate": 4.07183709604666e-06, | |
| "loss": 0.008, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 4.0701754385964914, | |
| "grad_norm": 0.15431708802792676, | |
| "learning_rate": 4.026279888590101e-06, | |
| "loss": 0.0092, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 4.07557354925776, | |
| "grad_norm": 0.13999009793334247, | |
| "learning_rate": 3.980950431270776e-06, | |
| "loss": 0.0077, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 4.080971659919029, | |
| "grad_norm": 0.15652426490564023, | |
| "learning_rate": 3.935849370387104e-06, | |
| "loss": 0.0079, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 4.086369770580297, | |
| "grad_norm": 0.14077430214547057, | |
| "learning_rate": 3.89097734898108e-06, | |
| "loss": 0.0073, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 4.091767881241566, | |
| "grad_norm": 0.149289451771641, | |
| "learning_rate": 3.846335006829103e-06, | |
| "loss": 0.0077, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 4.097165991902834, | |
| "grad_norm": 0.12481268750534742, | |
| "learning_rate": 3.801922980432835e-06, | |
| "loss": 0.0079, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 4.102564102564102, | |
| "grad_norm": 0.17656054970348686, | |
| "learning_rate": 3.7577419030101615e-06, | |
| "loss": 0.0088, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 4.107962213225371, | |
| "grad_norm": 0.2610557267049597, | |
| "learning_rate": 3.7137924044861273e-06, | |
| "loss": 0.2539, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 4.113360323886639, | |
| "grad_norm": 0.1838290356805346, | |
| "learning_rate": 3.670075111483975e-06, | |
| "loss": 0.0284, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 4.1187584345479085, | |
| "grad_norm": 0.20610335600698715, | |
| "learning_rate": 3.626590647316206e-06, | |
| "loss": 0.0099, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 4.124156545209177, | |
| "grad_norm": 0.1630204215332218, | |
| "learning_rate": 3.583339631975704e-06, | |
| "loss": 0.0083, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 4.129554655870446, | |
| "grad_norm": 0.6353797431357658, | |
| "learning_rate": 3.5403226821268734e-06, | |
| "loss": 0.0068, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 4.134952766531714, | |
| "grad_norm": 0.12893014852172485, | |
| "learning_rate": 3.497540411096869e-06, | |
| "loss": 0.0062, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 4.140350877192983, | |
| "grad_norm": 0.15853943942991885, | |
| "learning_rate": 3.454993428866831e-06, | |
| "loss": 0.0078, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 4.145748987854251, | |
| "grad_norm": 0.16815858876201803, | |
| "learning_rate": 3.4126823420632095e-06, | |
| "loss": 0.0085, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 4.151147098515519, | |
| "grad_norm": 0.17322201023818573, | |
| "learning_rate": 3.3706077539490933e-06, | |
| "loss": 0.0499, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 4.156545209176788, | |
| "grad_norm": 0.301026674845652, | |
| "learning_rate": 3.328770264415635e-06, | |
| "loss": 0.0338, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 4.161943319838056, | |
| "grad_norm": 0.1778290826516132, | |
| "learning_rate": 3.287170469973466e-06, | |
| "loss": 0.0069, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 4.1673414304993255, | |
| "grad_norm": 0.1656358924994505, | |
| "learning_rate": 3.2458089637442124e-06, | |
| "loss": 0.0079, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 4.172739541160594, | |
| "grad_norm": 0.17053375220760866, | |
| "learning_rate": 3.204686335452043e-06, | |
| "loss": 0.0076, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 4.178137651821863, | |
| "grad_norm": 0.1620063244322808, | |
| "learning_rate": 3.163803171415243e-06, | |
| "loss": 0.0073, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 4.183535762483131, | |
| "grad_norm": 0.16390709001023246, | |
| "learning_rate": 3.1231600545378703e-06, | |
| "loss": 0.0069, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 4.1889338731444, | |
| "grad_norm": 0.1894766297474302, | |
| "learning_rate": 3.0827575643014283e-06, | |
| "loss": 0.0092, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 4.194331983805668, | |
| "grad_norm": 0.18523445493915092, | |
| "learning_rate": 3.0425962767566307e-06, | |
| "loss": 0.0074, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 4.199730094466936, | |
| "grad_norm": 0.1846148108584596, | |
| "learning_rate": 3.0026767645151532e-06, | |
| "loss": 0.0104, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 4.205128205128205, | |
| "grad_norm": 0.12575311717937954, | |
| "learning_rate": 2.9629995967414914e-06, | |
| "loss": 0.0054, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 4.2105263157894735, | |
| "grad_norm": 0.18798854404124588, | |
| "learning_rate": 2.9235653391448448e-06, | |
| "loss": 0.0075, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 4.2159244264507425, | |
| "grad_norm": 0.1627294340931128, | |
| "learning_rate": 2.8843745539710523e-06, | |
| "loss": 0.0085, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 4.221322537112011, | |
| "grad_norm": 0.16326469476459096, | |
| "learning_rate": 2.8454277999945603e-06, | |
| "loss": 0.0072, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 4.22672064777328, | |
| "grad_norm": 0.35487427857462644, | |
| "learning_rate": 2.806725632510472e-06, | |
| "loss": 0.2504, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 4.232118758434548, | |
| "grad_norm": 0.18235633910436322, | |
| "learning_rate": 2.7682686033266337e-06, | |
| "loss": 0.007, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 4.237516869095816, | |
| "grad_norm": 0.15004129859723508, | |
| "learning_rate": 2.730057260755743e-06, | |
| "loss": 0.0064, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 4.242914979757085, | |
| "grad_norm": 0.27399522146899674, | |
| "learning_rate": 2.6920921496075545e-06, | |
| "loss": 0.1879, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 4.248313090418353, | |
| "grad_norm": 0.17249001942659825, | |
| "learning_rate": 2.654373811181099e-06, | |
| "loss": 0.0074, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 4.253711201079622, | |
| "grad_norm": 0.25235961908712856, | |
| "learning_rate": 2.6169027832569825e-06, | |
| "loss": 0.1351, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 4.2591093117408905, | |
| "grad_norm": 0.13122161765075682, | |
| "learning_rate": 2.5796796000896882e-06, | |
| "loss": 0.0058, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 4.2645074224021595, | |
| "grad_norm": 0.13879501710771264, | |
| "learning_rate": 2.5427047923999993e-06, | |
| "loss": 0.0066, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 4.269905533063428, | |
| "grad_norm": 0.17241689078274697, | |
| "learning_rate": 2.505978887367393e-06, | |
| "loss": 0.0092, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 4.275303643724697, | |
| "grad_norm": 0.2389164870342563, | |
| "learning_rate": 2.4695024086225507e-06, | |
| "loss": 0.2019, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 4.280701754385965, | |
| "grad_norm": 0.20120093721542376, | |
| "learning_rate": 2.4332758762398846e-06, | |
| "loss": 0.0087, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 4.286099865047233, | |
| "grad_norm": 0.1859035713215901, | |
| "learning_rate": 2.3972998067301113e-06, | |
| "loss": 0.0087, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 4.291497975708502, | |
| "grad_norm": 0.24334555738366437, | |
| "learning_rate": 2.3615747130329013e-06, | |
| "loss": 0.0658, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 4.29689608636977, | |
| "grad_norm": 0.11224893182038939, | |
| "learning_rate": 2.3261011045095706e-06, | |
| "loss": 0.0052, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 4.302294197031039, | |
| "grad_norm": 0.18820534336508035, | |
| "learning_rate": 2.2908794869358044e-06, | |
| "loss": 0.0076, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 4.3076923076923075, | |
| "grad_norm": 0.1754336848612205, | |
| "learning_rate": 2.25591036249444e-06, | |
| "loss": 0.0072, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 4.3130904183535765, | |
| "grad_norm": 0.141399435578848, | |
| "learning_rate": 2.22119422976834e-06, | |
| "loss": 0.0063, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 4.318488529014845, | |
| "grad_norm": 0.15027110262927373, | |
| "learning_rate": 2.186731583733235e-06, | |
| "loss": 0.0065, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 4.323886639676114, | |
| "grad_norm": 0.1912745070493108, | |
| "learning_rate": 2.1525229157507142e-06, | |
| "loss": 0.0085, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 4.329284750337382, | |
| "grad_norm": 0.17652416759862824, | |
| "learning_rate": 2.118568713561189e-06, | |
| "loss": 0.0097, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 4.33468286099865, | |
| "grad_norm": 0.17840550642036743, | |
| "learning_rate": 2.084869461276946e-06, | |
| "loss": 0.0071, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 4.340080971659919, | |
| "grad_norm": 0.12186032825306584, | |
| "learning_rate": 2.0514256393752465e-06, | |
| "loss": 0.006, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 4.345479082321187, | |
| "grad_norm": 0.18333270183994504, | |
| "learning_rate": 2.018237724691483e-06, | |
| "loss": 0.0096, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 4.350877192982456, | |
| "grad_norm": 0.15649804149725655, | |
| "learning_rate": 1.9853061904123662e-06, | |
| "loss": 0.0066, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 4.3562753036437245, | |
| "grad_norm": 0.14219357084240056, | |
| "learning_rate": 1.9526315060691915e-06, | |
| "loss": 0.0066, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 4.361673414304994, | |
| "grad_norm": 0.16645031776290997, | |
| "learning_rate": 1.9202141375311335e-06, | |
| "loss": 0.0069, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 4.367071524966262, | |
| "grad_norm": 0.16646923239933129, | |
| "learning_rate": 1.8880545469986144e-06, | |
| "loss": 0.0076, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 4.372469635627531, | |
| "grad_norm": 0.25569705111064256, | |
| "learning_rate": 1.8561531929967057e-06, | |
| "loss": 0.0084, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 4.377867746288799, | |
| "grad_norm": 0.2319287519781518, | |
| "learning_rate": 1.8245105303685861e-06, | |
| "loss": 0.1204, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 4.383265856950067, | |
| "grad_norm": 0.1978562124884058, | |
| "learning_rate": 1.7931270102690822e-06, | |
| "loss": 0.0095, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 4.388663967611336, | |
| "grad_norm": 0.24941829732701262, | |
| "learning_rate": 1.7620030801581988e-06, | |
| "loss": 0.0067, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 4.394062078272604, | |
| "grad_norm": 0.1802122557412193, | |
| "learning_rate": 1.7311391837947723e-06, | |
| "loss": 0.0081, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 4.399460188933873, | |
| "grad_norm": 0.1494705404024646, | |
| "learning_rate": 1.7005357612301176e-06, | |
| "loss": 0.0067, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 4.4048582995951415, | |
| "grad_norm": 0.1649691836281055, | |
| "learning_rate": 1.6701932488017791e-06, | |
| "loss": 0.0101, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 4.410256410256411, | |
| "grad_norm": 0.20415445318740522, | |
| "learning_rate": 1.6401120791272829e-06, | |
| "loss": 0.0083, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 4.415654520917679, | |
| "grad_norm": 0.2904972849430406, | |
| "learning_rate": 1.6102926810979912e-06, | |
| "loss": 0.0133, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 4.421052631578947, | |
| "grad_norm": 0.17455678640910202, | |
| "learning_rate": 1.5807354798729678e-06, | |
| "loss": 0.0075, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 4.426450742240216, | |
| "grad_norm": 0.20286904970203815, | |
| "learning_rate": 1.5514408968729399e-06, | |
| "loss": 0.0723, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 4.431848852901484, | |
| "grad_norm": 0.141866348848916, | |
| "learning_rate": 1.5224093497742654e-06, | |
| "loss": 0.0066, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 4.437246963562753, | |
| "grad_norm": 0.17699214194765778, | |
| "learning_rate": 1.493641252502993e-06, | |
| "loss": 0.0083, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 4.442645074224021, | |
| "grad_norm": 0.2305318180070288, | |
| "learning_rate": 1.465137015228959e-06, | |
| "loss": 0.0629, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 4.44804318488529, | |
| "grad_norm": 0.12688029124502123, | |
| "learning_rate": 1.4368970443599261e-06, | |
| "loss": 0.0062, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 4.4534412955465585, | |
| "grad_norm": 0.14573022719889014, | |
| "learning_rate": 1.4089217425358115e-06, | |
| "loss": 0.0054, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 4.458839406207828, | |
| "grad_norm": 0.1754574359245676, | |
| "learning_rate": 1.381211508622924e-06, | |
| "loss": 0.0069, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 4.464237516869096, | |
| "grad_norm": 0.17767870785629436, | |
| "learning_rate": 1.3537667377082885e-06, | |
| "loss": 0.0074, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 4.469635627530364, | |
| "grad_norm": 0.1679498826516171, | |
| "learning_rate": 1.3265878210940098e-06, | |
| "loss": 0.0078, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 4.475033738191633, | |
| "grad_norm": 0.16389733624024713, | |
| "learning_rate": 1.2996751462917057e-06, | |
| "loss": 0.0071, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 4.480431848852901, | |
| "grad_norm": 0.1597873001757078, | |
| "learning_rate": 1.2730290970169535e-06, | |
| "loss": 0.0066, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 4.48582995951417, | |
| "grad_norm": 0.1357039185716906, | |
| "learning_rate": 1.2466500531838532e-06, | |
| "loss": 0.006, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 4.491228070175438, | |
| "grad_norm": 0.2159108023936079, | |
| "learning_rate": 1.2205383908995815e-06, | |
| "loss": 0.2062, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 4.496626180836707, | |
| "grad_norm": 0.14914685798519492, | |
| "learning_rate": 1.1946944824590513e-06, | |
| "loss": 0.0066, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 4.502024291497976, | |
| "grad_norm": 0.1431530220204283, | |
| "learning_rate": 1.1691186963395861e-06, | |
| "loss": 0.0055, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 4.507422402159245, | |
| "grad_norm": 0.1786336644306138, | |
| "learning_rate": 1.1438113971956776e-06, | |
| "loss": 0.0084, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 4.512820512820513, | |
| "grad_norm": 0.17770533748705705, | |
| "learning_rate": 1.11877294585379e-06, | |
| "loss": 0.0075, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 4.518218623481781, | |
| "grad_norm": 0.15129590466594908, | |
| "learning_rate": 1.0940036993071934e-06, | |
| "loss": 0.0081, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 4.52361673414305, | |
| "grad_norm": 0.19105021139758796, | |
| "learning_rate": 1.0695040107109089e-06, | |
| "loss": 0.0065, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 4.529014844804318, | |
| "grad_norm": 0.14979723404668785, | |
| "learning_rate": 1.045274229376636e-06, | |
| "loss": 0.006, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 4.534412955465587, | |
| "grad_norm": 0.1846638960067091, | |
| "learning_rate": 1.0213147007678036e-06, | |
| "loss": 0.0075, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 4.539811066126855, | |
| "grad_norm": 0.1454433452689002, | |
| "learning_rate": 9.97625766494621e-07, | |
| "loss": 0.0073, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 4.545209176788124, | |
| "grad_norm": 0.18488942655661209, | |
| "learning_rate": 9.742077643092207e-07, | |
| "loss": 0.0096, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 4.550607287449393, | |
| "grad_norm": 0.2006493023154337, | |
| "learning_rate": 9.51061028100837e-07, | |
| "loss": 0.0074, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 4.556005398110662, | |
| "grad_norm": 0.18899821986914248, | |
| "learning_rate": 9.281858878910555e-07, | |
| "loss": 0.0091, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 4.56140350877193, | |
| "grad_norm": 0.21143375715615836, | |
| "learning_rate": 9.055826698290881e-07, | |
| "loss": 0.0075, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 4.566801619433198, | |
| "grad_norm": 0.15198860228197414, | |
| "learning_rate": 8.832516961871462e-07, | |
| "loss": 0.0053, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 4.572199730094467, | |
| "grad_norm": 0.22751271824950478, | |
| "learning_rate": 8.611932853558236e-07, | |
| "loss": 0.0084, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 4.577597840755735, | |
| "grad_norm": 0.16335646731283246, | |
| "learning_rate": 8.394077518395738e-07, | |
| "loss": 0.0079, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 4.582995951417004, | |
| "grad_norm": 0.16417859671024335, | |
| "learning_rate": 8.178954062522226e-07, | |
| "loss": 0.0082, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 4.588394062078272, | |
| "grad_norm": 0.1371732934582776, | |
| "learning_rate": 7.966565553125294e-07, | |
| "loss": 0.0059, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 4.593792172739541, | |
| "grad_norm": 0.1718988181824814, | |
| "learning_rate": 7.756915018398214e-07, | |
| "loss": 0.0068, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 4.59919028340081, | |
| "grad_norm": 0.15416993412420898, | |
| "learning_rate": 7.5500054474968e-07, | |
| "loss": 0.0062, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 4.604588394062079, | |
| "grad_norm": 0.17416110080710895, | |
| "learning_rate": 7.345839790496745e-07, | |
| "loss": 0.0068, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 4.609986504723347, | |
| "grad_norm": 0.16830986049559513, | |
| "learning_rate": 7.144420958351506e-07, | |
| "loss": 0.0075, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 4.615384615384615, | |
| "grad_norm": 0.17367183744141607, | |
| "learning_rate": 6.945751822850955e-07, | |
| "loss": 0.009, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 4.620782726045884, | |
| "grad_norm": 0.19142013846945674, | |
| "learning_rate": 6.749835216580214e-07, | |
| "loss": 0.0092, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 4.626180836707152, | |
| "grad_norm": 0.2067536155528566, | |
| "learning_rate": 6.556673932879488e-07, | |
| "loss": 0.0072, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 4.631578947368421, | |
| "grad_norm": 0.14797804422750227, | |
| "learning_rate": 6.366270725804069e-07, | |
| "loss": 0.0068, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 4.636977058029689, | |
| "grad_norm": 0.19586304150902226, | |
| "learning_rate": 6.178628310085178e-07, | |
| "loss": 0.0103, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 4.6423751686909585, | |
| "grad_norm": 0.24693313978740186, | |
| "learning_rate": 5.993749361091206e-07, | |
| "loss": 0.133, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 4.647773279352227, | |
| "grad_norm": 0.15229371755436824, | |
| "learning_rate": 5.811636514789598e-07, | |
| "loss": 0.0068, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 4.653171390013496, | |
| "grad_norm": 0.2247760397976527, | |
| "learning_rate": 5.632292367709236e-07, | |
| "loss": 0.1055, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 4.658569500674764, | |
| "grad_norm": 0.14424338258050534, | |
| "learning_rate": 5.455719476903442e-07, | |
| "loss": 0.0063, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 4.663967611336032, | |
| "grad_norm": 0.15937610135438382, | |
| "learning_rate": 5.281920359913528e-07, | |
| "loss": 0.0176, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 4.669365721997301, | |
| "grad_norm": 0.1679601387250827, | |
| "learning_rate": 5.110897494732881e-07, | |
| "loss": 0.0064, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 4.674763832658569, | |
| "grad_norm": 0.1526193669278446, | |
| "learning_rate": 4.942653319771618e-07, | |
| "loss": 0.0071, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 4.680161943319838, | |
| "grad_norm": 0.13442330611159828, | |
| "learning_rate": 4.777190233821838e-07, | |
| "loss": 0.0061, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 4.685560053981106, | |
| "grad_norm": 0.1636822162108528, | |
| "learning_rate": 4.6145105960235136e-07, | |
| "loss": 0.0071, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 4.6909581646423755, | |
| "grad_norm": 0.1596200679398143, | |
| "learning_rate": 4.4546167258306296e-07, | |
| "loss": 0.0075, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 4.696356275303644, | |
| "grad_norm": 0.16839642747934516, | |
| "learning_rate": 4.297510902978341e-07, | |
| "loss": 0.0083, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 4.701754385964913, | |
| "grad_norm": 0.159063406884678, | |
| "learning_rate": 4.1431953674503586e-07, | |
| "loss": 0.0065, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 4.707152496626181, | |
| "grad_norm": 0.16942375232844864, | |
| "learning_rate": 3.991672319447015e-07, | |
| "loss": 0.0073, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 4.712550607287449, | |
| "grad_norm": 0.15555943118350962, | |
| "learning_rate": 3.842943919353914e-07, | |
| "loss": 0.0073, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 4.717948717948718, | |
| "grad_norm": 0.25614964211315966, | |
| "learning_rate": 3.6970122877111104e-07, | |
| "loss": 0.2021, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 4.723346828609986, | |
| "grad_norm": 0.19433096617350248, | |
| "learning_rate": 3.5538795051829336e-07, | |
| "loss": 0.0068, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 4.728744939271255, | |
| "grad_norm": 0.16794302830857327, | |
| "learning_rate": 3.413547612528212e-07, | |
| "loss": 0.0068, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 4.734143049932523, | |
| "grad_norm": 0.16974503840122634, | |
| "learning_rate": 3.2760186105712964e-07, | |
| "loss": 0.0083, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 4.7395411605937925, | |
| "grad_norm": 0.13212568788120146, | |
| "learning_rate": 3.1412944601734165e-07, | |
| "loss": 0.0057, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 4.744939271255061, | |
| "grad_norm": 0.1976283645393667, | |
| "learning_rate": 3.0093770822048786e-07, | |
| "loss": 0.0079, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 4.75033738191633, | |
| "grad_norm": 0.15836118640095823, | |
| "learning_rate": 2.8802683575174907e-07, | |
| "loss": 0.0073, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 4.755735492577598, | |
| "grad_norm": 0.14238697443763573, | |
| "learning_rate": 2.753970126917871e-07, | |
| "loss": 0.0055, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 4.761133603238866, | |
| "grad_norm": 0.26842025770841327, | |
| "learning_rate": 2.6304841911412027e-07, | |
| "loss": 0.0182, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 4.766531713900135, | |
| "grad_norm": 0.15757318285422225, | |
| "learning_rate": 2.509812310825499e-07, | |
| "loss": 0.0066, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 4.771929824561403, | |
| "grad_norm": 0.1618576241796593, | |
| "learning_rate": 2.391956206486468e-07, | |
| "loss": 0.0071, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 4.777327935222672, | |
| "grad_norm": 0.1946044303403527, | |
| "learning_rate": 2.2769175584931746e-07, | |
| "loss": 0.0153, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 4.7827260458839405, | |
| "grad_norm": 0.2296158932271888, | |
| "learning_rate": 2.1646980070437973e-07, | |
| "loss": 0.0081, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 4.7881241565452095, | |
| "grad_norm": 0.3387688201949446, | |
| "learning_rate": 2.0552991521425537e-07, | |
| "loss": 0.2726, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 4.793522267206478, | |
| "grad_norm": 0.2482321920693236, | |
| "learning_rate": 1.9487225535765873e-07, | |
| "loss": 0.0211, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 4.798920377867747, | |
| "grad_norm": 0.15086049070317792, | |
| "learning_rate": 1.8449697308939863e-07, | |
| "loss": 0.0062, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 4.804318488529015, | |
| "grad_norm": 0.15240141938966198, | |
| "learning_rate": 1.7440421633818872e-07, | |
| "loss": 0.0062, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 4.809716599190283, | |
| "grad_norm": 0.23578299035015984, | |
| "learning_rate": 1.6459412900455607e-07, | |
| "loss": 0.0125, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 4.815114709851552, | |
| "grad_norm": 0.1470906953689497, | |
| "learning_rate": 1.5506685095878715e-07, | |
| "loss": 0.0065, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 4.82051282051282, | |
| "grad_norm": 0.15212434477146647, | |
| "learning_rate": 1.4582251803892055e-07, | |
| "loss": 0.0065, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 4.825910931174089, | |
| "grad_norm": 0.14747099109095105, | |
| "learning_rate": 1.3686126204882634e-07, | |
| "loss": 0.007, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 4.8313090418353575, | |
| "grad_norm": 0.16444206201175393, | |
| "learning_rate": 1.2818321075631635e-07, | |
| "loss": 0.0059, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 4.8367071524966265, | |
| "grad_norm": 0.15611273866535655, | |
| "learning_rate": 1.19788487891328e-07, | |
| "loss": 0.0075, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 4.842105263157895, | |
| "grad_norm": 0.1781583785611643, | |
| "learning_rate": 1.1167721314415681e-07, | |
| "loss": 0.0079, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 4.847503373819164, | |
| "grad_norm": 0.20606188899572905, | |
| "learning_rate": 1.038495021637509e-07, | |
| "loss": 0.0108, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 4.852901484480432, | |
| "grad_norm": 0.17844153813529373, | |
| "learning_rate": 9.630546655606365e-08, | |
| "loss": 0.0087, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 4.8582995951417, | |
| "grad_norm": 0.1560764016762354, | |
| "learning_rate": 8.904521388245713e-08, | |
| "loss": 0.0068, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 4.863697705802969, | |
| "grad_norm": 0.1487169516366418, | |
| "learning_rate": 8.206884765818102e-08, | |
| "loss": 0.0068, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 4.869095816464237, | |
| "grad_norm": 0.2463461690457611, | |
| "learning_rate": 7.537646735087833e-08, | |
| "loss": 0.1513, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 4.874493927125506, | |
| "grad_norm": 0.2102978007590072, | |
| "learning_rate": 6.896816837918651e-08, | |
| "loss": 0.0084, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 4.8798920377867745, | |
| "grad_norm": 0.20095267792043717, | |
| "learning_rate": 6.284404211135853e-08, | |
| "loss": 0.0065, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 4.8852901484480435, | |
| "grad_norm": 0.15528097841618993, | |
| "learning_rate": 5.700417586397722e-08, | |
| "loss": 0.0065, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 4.890688259109312, | |
| "grad_norm": 0.16940230756369326, | |
| "learning_rate": 5.144865290069634e-08, | |
| "loss": 0.0087, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 4.896086369770581, | |
| "grad_norm": 0.1378627208589382, | |
| "learning_rate": 4.61775524310637e-08, | |
| "loss": 0.0078, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 4.901484480431849, | |
| "grad_norm": 0.20709815634537396, | |
| "learning_rate": 4.119094960938208e-08, | |
| "loss": 0.0138, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 4.906882591093117, | |
| "grad_norm": 0.2007039062473555, | |
| "learning_rate": 3.648891553365008e-08, | |
| "loss": 0.0083, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 4.912280701754386, | |
| "grad_norm": 0.19819856548790862, | |
| "learning_rate": 3.20715172445385e-08, | |
| "loss": 0.1599, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 4.917678812415654, | |
| "grad_norm": 0.1719664928908639, | |
| "learning_rate": 2.793881772443552e-08, | |
| "loss": 0.0071, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 4.923076923076923, | |
| "grad_norm": 0.22465213945689566, | |
| "learning_rate": 2.4090875896551903e-08, | |
| "loss": 0.1584, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 4.9284750337381915, | |
| "grad_norm": 0.16971530754399908, | |
| "learning_rate": 2.0527746624081635e-08, | |
| "loss": 0.0062, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 4.933873144399461, | |
| "grad_norm": 0.16730279275755114, | |
| "learning_rate": 1.72494807094159e-08, | |
| "loss": 0.0064, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 4.939271255060729, | |
| "grad_norm": 0.15712693233721395, | |
| "learning_rate": 1.425612489341921e-08, | |
| "loss": 0.0087, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 4.944669365721998, | |
| "grad_norm": 0.15485321834215096, | |
| "learning_rate": 1.1547721854767712e-08, | |
| "loss": 0.0071, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 4.950067476383266, | |
| "grad_norm": 0.1633322503676924, | |
| "learning_rate": 9.12431020933191e-09, | |
| "loss": 0.0079, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 4.955465587044534, | |
| "grad_norm": 0.13338657253580138, | |
| "learning_rate": 6.985924509639308e-09, | |
| "loss": 0.0054, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 4.960863697705803, | |
| "grad_norm": 0.1615631056618365, | |
| "learning_rate": 5.1325952443681544e-09, | |
| "loss": 0.0072, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 4.966261808367071, | |
| "grad_norm": 0.18025545983832483, | |
| "learning_rate": 3.5643488379211167e-09, | |
| "loss": 0.0069, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 4.97165991902834, | |
| "grad_norm": 0.19857679729061328, | |
| "learning_rate": 2.2812076500411397e-09, | |
| "loss": 0.1414, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 4.9770580296896085, | |
| "grad_norm": 0.20801482450926878, | |
| "learning_rate": 1.283189975505028e-09, | |
| "loss": 0.0678, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 4.982456140350877, | |
| "grad_norm": 0.15867502405159886, | |
| "learning_rate": 5.703100438481102e-10, | |
| "loss": 0.0065, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 4.987854251012146, | |
| "grad_norm": 0.1615039310082322, | |
| "learning_rate": 1.4257801917549884e-10, | |
| "loss": 0.0068, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 4.993252361673415, | |
| "grad_norm": 0.1853912335035784, | |
| "learning_rate": 0.0, | |
| "loss": 0.0084, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 4.993252361673415, | |
| "step": 925, | |
| "total_flos": 1.797075321619333e+17, | |
| "train_loss": 0.26902934940078777, | |
| "train_runtime": 3110.9836, | |
| "train_samples_per_second": 38.091, | |
| "train_steps_per_second": 0.297 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 925, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.797075321619333e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |