| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9992531740104555, | |
| "eval_steps": 500, | |
| "global_step": 892, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0011202389843166542, | |
| "grad_norm": 2.8858156204223633, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 0.9141, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0022404779686333084, | |
| "grad_norm": 2.804832935333252, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 0.9218, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0033607169529499626, | |
| "grad_norm": 2.7312190532684326, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 0.9026, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.004480955937266617, | |
| "grad_norm": 2.466888904571533, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 0.9196, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.005601194921583271, | |
| "grad_norm": 2.629023551940918, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 0.8756, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.006721433905899925, | |
| "grad_norm": 2.747882604598999, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 0.9368, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.00784167289021658, | |
| "grad_norm": 2.7748570442199707, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 0.9265, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.008961911874533234, | |
| "grad_norm": 2.38838267326355, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 0.9154, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.010082150858849889, | |
| "grad_norm": 2.9623191356658936, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 0.9322, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.011202389843166542, | |
| "grad_norm": 2.4339418411254883, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 0.9117, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.012322628827483197, | |
| "grad_norm": 2.7546939849853516, | |
| "learning_rate": 5.5e-07, | |
| "loss": 0.9146, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.01344286781179985, | |
| "grad_norm": 2.2417454719543457, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 0.8947, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.014563106796116505, | |
| "grad_norm": 2.443024158477783, | |
| "learning_rate": 6.5e-07, | |
| "loss": 0.8952, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.01568334578043316, | |
| "grad_norm": 2.1837103366851807, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 0.9105, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.016803584764749812, | |
| "grad_norm": 2.1527903079986572, | |
| "learning_rate": 7.5e-07, | |
| "loss": 0.8899, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.017923823749066467, | |
| "grad_norm": 2.161756753921509, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 0.8655, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.019044062733383122, | |
| "grad_norm": 1.898628830909729, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 0.85, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.020164301717699777, | |
| "grad_norm": 1.7848337888717651, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 0.8654, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.02128454070201643, | |
| "grad_norm": 1.9204438924789429, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 0.8685, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.022404779686333084, | |
| "grad_norm": 1.6950091123580933, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 0.8981, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02352501867064974, | |
| "grad_norm": 1.774417519569397, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 0.8953, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.024645257654966394, | |
| "grad_norm": 1.7463773488998413, | |
| "learning_rate": 1.1e-06, | |
| "loss": 0.8668, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.025765496639283045, | |
| "grad_norm": 1.5141345262527466, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 0.8523, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.0268857356235997, | |
| "grad_norm": 1.5256919860839844, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 0.837, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.028005974607916356, | |
| "grad_norm": 1.3926639556884766, | |
| "learning_rate": 1.25e-06, | |
| "loss": 0.8498, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.02912621359223301, | |
| "grad_norm": 1.2967628240585327, | |
| "learning_rate": 1.3e-06, | |
| "loss": 0.7906, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.030246452576549662, | |
| "grad_norm": 1.2432132959365845, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 0.8143, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.03136669156086632, | |
| "grad_norm": 1.1862553358078003, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.7959, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.03248693054518297, | |
| "grad_norm": 1.18585205078125, | |
| "learning_rate": 1.45e-06, | |
| "loss": 0.8263, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.033607169529499624, | |
| "grad_norm": 1.118574857711792, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.7765, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03472740851381628, | |
| "grad_norm": 1.1179440021514893, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 0.8037, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.035847647498132934, | |
| "grad_norm": 1.100803017616272, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.8113, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.03696788648244959, | |
| "grad_norm": 1.097655177116394, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 0.7993, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.038088125466766244, | |
| "grad_norm": 1.0662972927093506, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 0.8029, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.039208364451082896, | |
| "grad_norm": 1.109535574913025, | |
| "learning_rate": 1.75e-06, | |
| "loss": 0.8218, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.040328603435399554, | |
| "grad_norm": 1.0223349332809448, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 0.7871, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.041448842419716206, | |
| "grad_norm": 1.0227162837982178, | |
| "learning_rate": 1.85e-06, | |
| "loss": 0.7944, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.04256908140403286, | |
| "grad_norm": 0.9250116348266602, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 0.7522, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.043689320388349516, | |
| "grad_norm": 0.9528149366378784, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 0.7645, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.04480955937266617, | |
| "grad_norm": 0.8865862488746643, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.7861, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.045929798356982826, | |
| "grad_norm": 0.9337865114212036, | |
| "learning_rate": 2.05e-06, | |
| "loss": 0.7427, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.04705003734129948, | |
| "grad_norm": 0.880203127861023, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.7411, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.04817027632561613, | |
| "grad_norm": 0.8911599516868591, | |
| "learning_rate": 2.15e-06, | |
| "loss": 0.7434, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.04929051530993279, | |
| "grad_norm": 0.8979325294494629, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.7598, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.05041075429424944, | |
| "grad_norm": 0.8859411478042603, | |
| "learning_rate": 2.25e-06, | |
| "loss": 0.7646, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.05153099327856609, | |
| "grad_norm": 0.8641071915626526, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 0.7559, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.05265123226288275, | |
| "grad_norm": 2.738354444503784, | |
| "learning_rate": 2.35e-06, | |
| "loss": 0.7826, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.0537714712471994, | |
| "grad_norm": 0.847560465335846, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.7469, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.05489171023151606, | |
| "grad_norm": 0.8350309729576111, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 0.7878, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.05601194921583271, | |
| "grad_norm": 0.8858978748321533, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.7154, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05713218820014936, | |
| "grad_norm": 0.8486636877059937, | |
| "learning_rate": 2.55e-06, | |
| "loss": 0.7521, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.05825242718446602, | |
| "grad_norm": 0.8095760345458984, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.7355, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.05937266616878267, | |
| "grad_norm": 1.1256295442581177, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 0.7312, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.060492905153099324, | |
| "grad_norm": 0.8093723058700562, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 0.738, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.06161314413741598, | |
| "grad_norm": 0.807765543460846, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 0.7055, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.06273338312173264, | |
| "grad_norm": 0.8036230206489563, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.7354, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.0638536221060493, | |
| "grad_norm": 0.809104859828949, | |
| "learning_rate": 2.85e-06, | |
| "loss": 0.7469, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.06497386109036594, | |
| "grad_norm": 0.8124133944511414, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.7039, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.0660941000746826, | |
| "grad_norm": 0.8039435744285583, | |
| "learning_rate": 2.95e-06, | |
| "loss": 0.7209, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.06721433905899925, | |
| "grad_norm": 0.8367541432380676, | |
| "learning_rate": 3e-06, | |
| "loss": 0.7759, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06833457804331591, | |
| "grad_norm": 0.7983250021934509, | |
| "learning_rate": 3.05e-06, | |
| "loss": 0.7251, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.06945481702763256, | |
| "grad_norm": 0.7989967465400696, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.7164, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.07057505601194922, | |
| "grad_norm": 0.7835646867752075, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 0.7124, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.07169529499626587, | |
| "grad_norm": 0.7820047736167908, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.6997, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.07281553398058252, | |
| "grad_norm": 0.8633654713630676, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 0.7248, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.07393577296489919, | |
| "grad_norm": 0.7743967175483704, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.7237, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.07505601194921584, | |
| "grad_norm": 0.7750105857849121, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 0.6874, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.07617625093353249, | |
| "grad_norm": 0.7676857113838196, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.6771, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.07729648991784914, | |
| "grad_norm": 0.7877640724182129, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.7063, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.07841672890216579, | |
| "grad_norm": 0.7854837775230408, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.6957, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.07953696788648244, | |
| "grad_norm": 0.8387333750724792, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.6923, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.08065720687079911, | |
| "grad_norm": 0.7769498229026794, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.7229, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.08177744585511576, | |
| "grad_norm": 0.8273895978927612, | |
| "learning_rate": 3.65e-06, | |
| "loss": 0.7011, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.08289768483943241, | |
| "grad_norm": 0.7759560346603394, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.7246, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.08401792382374906, | |
| "grad_norm": 0.7503573298454285, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.6711, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.08513816280806571, | |
| "grad_norm": 0.7817795276641846, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.6771, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.08625840179238238, | |
| "grad_norm": 0.7503968477249146, | |
| "learning_rate": 3.85e-06, | |
| "loss": 0.6903, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.08737864077669903, | |
| "grad_norm": 0.7876687049865723, | |
| "learning_rate": 3.900000000000001e-06, | |
| "loss": 0.6963, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.08849887976101568, | |
| "grad_norm": 0.7441173791885376, | |
| "learning_rate": 3.95e-06, | |
| "loss": 0.6872, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.08961911874533234, | |
| "grad_norm": 0.7741108536720276, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.6563, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.09073935772964899, | |
| "grad_norm": 0.7747477889060974, | |
| "learning_rate": 4.05e-06, | |
| "loss": 0.7077, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.09185959671396565, | |
| "grad_norm": 0.8173837065696716, | |
| "learning_rate": 4.1e-06, | |
| "loss": 0.708, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.0929798356982823, | |
| "grad_norm": 0.7583895325660706, | |
| "learning_rate": 4.15e-06, | |
| "loss": 0.6696, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.09410007468259896, | |
| "grad_norm": 0.7949450612068176, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.7041, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.09522031366691561, | |
| "grad_norm": 0.7842839360237122, | |
| "learning_rate": 4.25e-06, | |
| "loss": 0.6552, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.09634055265123226, | |
| "grad_norm": 0.769949734210968, | |
| "learning_rate": 4.3e-06, | |
| "loss": 0.7012, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.09746079163554892, | |
| "grad_norm": 0.7680435180664062, | |
| "learning_rate": 4.350000000000001e-06, | |
| "loss": 0.7026, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.09858103061986558, | |
| "grad_norm": 0.7704882621765137, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.676, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.09970126960418223, | |
| "grad_norm": 0.7687551379203796, | |
| "learning_rate": 4.450000000000001e-06, | |
| "loss": 0.662, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.10082150858849888, | |
| "grad_norm": 0.8296576738357544, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.6815, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.10194174757281553, | |
| "grad_norm": 0.807574987411499, | |
| "learning_rate": 4.5500000000000005e-06, | |
| "loss": 0.6959, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.10306198655713218, | |
| "grad_norm": 0.8537940979003906, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 0.6568, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.10418222554144885, | |
| "grad_norm": 0.776905357837677, | |
| "learning_rate": 4.65e-06, | |
| "loss": 0.7027, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.1053024645257655, | |
| "grad_norm": 0.7668991684913635, | |
| "learning_rate": 4.7e-06, | |
| "loss": 0.6537, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.10642270351008215, | |
| "grad_norm": 0.7789910435676575, | |
| "learning_rate": 4.75e-06, | |
| "loss": 0.6637, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.1075429424943988, | |
| "grad_norm": 0.7807944416999817, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.7103, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.10866318147871545, | |
| "grad_norm": 0.7590618133544922, | |
| "learning_rate": 4.85e-06, | |
| "loss": 0.6765, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.10978342046303212, | |
| "grad_norm": 0.7463409900665283, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.6795, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.11090365944734877, | |
| "grad_norm": 0.8291321992874146, | |
| "learning_rate": 4.95e-06, | |
| "loss": 0.7311, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.11202389843166542, | |
| "grad_norm": 0.7995114326477051, | |
| "learning_rate": 5e-06, | |
| "loss": 0.684, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.11314413741598207, | |
| "grad_norm": 0.7649957537651062, | |
| "learning_rate": 4.9999995527397415e-06, | |
| "loss": 0.6645, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.11426437640029873, | |
| "grad_norm": 0.8313412070274353, | |
| "learning_rate": 4.999998210959126e-06, | |
| "loss": 0.6656, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.11538461538461539, | |
| "grad_norm": 0.7994295358657837, | |
| "learning_rate": 4.999995974658632e-06, | |
| "loss": 0.6562, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.11650485436893204, | |
| "grad_norm": 0.8024242520332336, | |
| "learning_rate": 4.999992843839062e-06, | |
| "loss": 0.6946, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.1176250933532487, | |
| "grad_norm": 0.7950218319892883, | |
| "learning_rate": 4.999988818501535e-06, | |
| "loss": 0.6833, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.11874533233756535, | |
| "grad_norm": 0.8187536597251892, | |
| "learning_rate": 4.9999838986474925e-06, | |
| "loss": 0.6869, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.119865571321882, | |
| "grad_norm": 0.7608385682106018, | |
| "learning_rate": 4.999978084278693e-06, | |
| "loss": 0.6536, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.12098581030619865, | |
| "grad_norm": 0.9910794496536255, | |
| "learning_rate": 4.999971375397218e-06, | |
| "loss": 0.6444, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.12210604929051531, | |
| "grad_norm": 0.8221011757850647, | |
| "learning_rate": 4.9999637720054675e-06, | |
| "loss": 0.6986, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.12322628827483197, | |
| "grad_norm": 0.8212369680404663, | |
| "learning_rate": 4.999955274106163e-06, | |
| "loss": 0.6559, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.12434652725914862, | |
| "grad_norm": 0.7603834867477417, | |
| "learning_rate": 4.999945881702344e-06, | |
| "loss": 0.6644, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.12546676624346528, | |
| "grad_norm": 0.864615261554718, | |
| "learning_rate": 4.999935594797372e-06, | |
| "loss": 0.7045, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.12658700522778193, | |
| "grad_norm": 0.7939414381980896, | |
| "learning_rate": 4.999924413394926e-06, | |
| "loss": 0.6506, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.1277072442120986, | |
| "grad_norm": 0.7965752482414246, | |
| "learning_rate": 4.99991233749901e-06, | |
| "loss": 0.6634, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.12882748319641524, | |
| "grad_norm": 0.809953510761261, | |
| "learning_rate": 4.999899367113943e-06, | |
| "loss": 0.6613, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.1299477221807319, | |
| "grad_norm": 0.7917526960372925, | |
| "learning_rate": 4.999885502244365e-06, | |
| "loss": 0.6503, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.13106796116504854, | |
| "grad_norm": 0.7986327409744263, | |
| "learning_rate": 4.999870742895239e-06, | |
| "loss": 0.6607, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.1321882001493652, | |
| "grad_norm": 1.0296788215637207, | |
| "learning_rate": 4.999855089071844e-06, | |
| "loss": 0.6705, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.13330843913368184, | |
| "grad_norm": 0.7908150553703308, | |
| "learning_rate": 4.999838540779782e-06, | |
| "loss": 0.6622, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.1344286781179985, | |
| "grad_norm": 0.8663723468780518, | |
| "learning_rate": 4.999821098024975e-06, | |
| "loss": 0.6592, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.13554891710231515, | |
| "grad_norm": 0.8147454857826233, | |
| "learning_rate": 4.999802760813662e-06, | |
| "loss": 0.6478, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.13666915608663183, | |
| "grad_norm": 0.810529351234436, | |
| "learning_rate": 4.9997835291524065e-06, | |
| "loss": 0.6438, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.13778939507094848, | |
| "grad_norm": 0.7966436743736267, | |
| "learning_rate": 4.999763403048089e-06, | |
| "loss": 0.6425, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.13890963405526513, | |
| "grad_norm": 0.789413571357727, | |
| "learning_rate": 4.999742382507909e-06, | |
| "loss": 0.6227, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.14002987303958178, | |
| "grad_norm": 0.7998204827308655, | |
| "learning_rate": 4.999720467539391e-06, | |
| "loss": 0.6635, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.14115011202389843, | |
| "grad_norm": 0.8009337782859802, | |
| "learning_rate": 4.999697658150374e-06, | |
| "loss": 0.6673, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.14227035100821508, | |
| "grad_norm": 0.7708600163459778, | |
| "learning_rate": 4.99967395434902e-06, | |
| "loss": 0.6336, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.14339058999253174, | |
| "grad_norm": 0.7975388765335083, | |
| "learning_rate": 4.999649356143811e-06, | |
| "loss": 0.6673, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.1445108289768484, | |
| "grad_norm": 0.83116215467453, | |
| "learning_rate": 4.999623863543548e-06, | |
| "loss": 0.6902, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.14563106796116504, | |
| "grad_norm": 0.8246234655380249, | |
| "learning_rate": 4.999597476557352e-06, | |
| "loss": 0.6586, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.1467513069454817, | |
| "grad_norm": 0.7604647278785706, | |
| "learning_rate": 4.999570195194665e-06, | |
| "loss": 0.6455, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.14787154592979837, | |
| "grad_norm": 0.7587922215461731, | |
| "learning_rate": 4.9995420194652475e-06, | |
| "loss": 0.6107, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.14899178491411502, | |
| "grad_norm": 0.909156858921051, | |
| "learning_rate": 4.999512949379183e-06, | |
| "loss": 0.679, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.15011202389843167, | |
| "grad_norm": 0.8901562690734863, | |
| "learning_rate": 4.999482984946872e-06, | |
| "loss": 0.6887, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.15123226288274833, | |
| "grad_norm": 0.7636528611183167, | |
| "learning_rate": 4.999452126179035e-06, | |
| "loss": 0.6514, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.15235250186706498, | |
| "grad_norm": 0.8096560835838318, | |
| "learning_rate": 4.999420373086715e-06, | |
| "loss": 0.6937, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.15347274085138163, | |
| "grad_norm": 0.8607243299484253, | |
| "learning_rate": 4.999387725681274e-06, | |
| "loss": 0.6812, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.15459297983569828, | |
| "grad_norm": 0.8863621354103088, | |
| "learning_rate": 4.9993541839743906e-06, | |
| "loss": 0.6829, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.15571321882001493, | |
| "grad_norm": 0.8292368054389954, | |
| "learning_rate": 4.99931974797807e-06, | |
| "loss": 0.6714, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.15683345780433158, | |
| "grad_norm": 0.8053487539291382, | |
| "learning_rate": 4.999284417704631e-06, | |
| "loss": 0.656, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.15795369678864823, | |
| "grad_norm": 0.8153090476989746, | |
| "learning_rate": 4.999248193166715e-06, | |
| "loss": 0.6529, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.1590739357729649, | |
| "grad_norm": 0.8820142149925232, | |
| "learning_rate": 4.999211074377285e-06, | |
| "loss": 0.6507, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.16019417475728157, | |
| "grad_norm": 0.8246594071388245, | |
| "learning_rate": 4.999173061349623e-06, | |
| "loss": 0.6767, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.16131441374159822, | |
| "grad_norm": 0.7902995944023132, | |
| "learning_rate": 4.999134154097328e-06, | |
| "loss": 0.6596, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.16243465272591487, | |
| "grad_norm": 0.8101757764816284, | |
| "learning_rate": 4.999094352634322e-06, | |
| "loss": 0.6462, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.16355489171023152, | |
| "grad_norm": 0.8051759600639343, | |
| "learning_rate": 4.999053656974847e-06, | |
| "loss": 0.6659, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.16467513069454817, | |
| "grad_norm": 0.8006038665771484, | |
| "learning_rate": 4.999012067133465e-06, | |
| "loss": 0.6865, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.16579536967886482, | |
| "grad_norm": 0.8025104999542236, | |
| "learning_rate": 4.998969583125055e-06, | |
| "loss": 0.6558, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.16691560866318148, | |
| "grad_norm": 0.7755445241928101, | |
| "learning_rate": 4.99892620496482e-06, | |
| "loss": 0.6373, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.16803584764749813, | |
| "grad_norm": 0.8465038537979126, | |
| "learning_rate": 4.99888193266828e-06, | |
| "loss": 0.6476, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.16915608663181478, | |
| "grad_norm": 0.8304318189620972, | |
| "learning_rate": 4.9988367662512775e-06, | |
| "loss": 0.6636, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.17027632561613143, | |
| "grad_norm": 1.0744889974594116, | |
| "learning_rate": 4.99879070572997e-06, | |
| "loss": 0.6329, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.1713965646004481, | |
| "grad_norm": 0.8096840381622314, | |
| "learning_rate": 4.998743751120843e-06, | |
| "loss": 0.6515, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.17251680358476476, | |
| "grad_norm": 0.8647177219390869, | |
| "learning_rate": 4.998695902440693e-06, | |
| "loss": 0.669, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.1736370425690814, | |
| "grad_norm": 0.8000180125236511, | |
| "learning_rate": 4.9986471597066435e-06, | |
| "loss": 0.6353, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.17475728155339806, | |
| "grad_norm": 0.7994810938835144, | |
| "learning_rate": 4.998597522936134e-06, | |
| "loss": 0.6312, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.17587752053771472, | |
| "grad_norm": 0.8086467385292053, | |
| "learning_rate": 4.9985469921469245e-06, | |
| "loss": 0.6426, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.17699775952203137, | |
| "grad_norm": 0.8089617490768433, | |
| "learning_rate": 4.998495567357097e-06, | |
| "loss": 0.6671, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.17811799850634802, | |
| "grad_norm": 0.7937355041503906, | |
| "learning_rate": 4.9984432485850495e-06, | |
| "loss": 0.6433, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.17923823749066467, | |
| "grad_norm": 0.7971526980400085, | |
| "learning_rate": 4.998390035849503e-06, | |
| "loss": 0.6371, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.18035847647498132, | |
| "grad_norm": 0.8417288064956665, | |
| "learning_rate": 4.998335929169498e-06, | |
| "loss": 0.6607, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.18147871545929797, | |
| "grad_norm": 2.7477307319641113, | |
| "learning_rate": 4.998280928564394e-06, | |
| "loss": 0.6445, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.18259895444361463, | |
| "grad_norm": 0.8154010772705078, | |
| "learning_rate": 4.998225034053871e-06, | |
| "loss": 0.6518, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.1837191934279313, | |
| "grad_norm": 0.8180714249610901, | |
| "learning_rate": 4.9981682456579275e-06, | |
| "loss": 0.6524, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.18483943241224796, | |
| "grad_norm": 0.8008469939231873, | |
| "learning_rate": 4.998110563396883e-06, | |
| "loss": 0.6508, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.1859596713965646, | |
| "grad_norm": 0.7731476426124573, | |
| "learning_rate": 4.998051987291378e-06, | |
| "loss": 0.6262, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.18707991038088126, | |
| "grad_norm": 0.7730596661567688, | |
| "learning_rate": 4.99799251736237e-06, | |
| "loss": 0.6356, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.1882001493651979, | |
| "grad_norm": 0.8305966854095459, | |
| "learning_rate": 4.997932153631139e-06, | |
| "loss": 0.6584, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.18932038834951456, | |
| "grad_norm": 0.8761897683143616, | |
| "learning_rate": 4.997870896119282e-06, | |
| "loss": 0.6593, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.19044062733383121, | |
| "grad_norm": 0.8341963887214661, | |
| "learning_rate": 4.9978087448487204e-06, | |
| "loss": 0.6293, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.19156086631814787, | |
| "grad_norm": 0.8244994282722473, | |
| "learning_rate": 4.997745699841689e-06, | |
| "loss": 0.6512, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.19268110530246452, | |
| "grad_norm": 0.8051140904426575, | |
| "learning_rate": 4.997681761120749e-06, | |
| "loss": 0.6535, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.19380134428678117, | |
| "grad_norm": 0.7865309119224548, | |
| "learning_rate": 4.997616928708774e-06, | |
| "loss": 0.6457, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.19492158327109785, | |
| "grad_norm": 0.7985602021217346, | |
| "learning_rate": 4.997551202628966e-06, | |
| "loss": 0.6562, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.1960418222554145, | |
| "grad_norm": 0.7812699675559998, | |
| "learning_rate": 4.99748458290484e-06, | |
| "loss": 0.6505, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.19716206123973115, | |
| "grad_norm": 0.7868354916572571, | |
| "learning_rate": 4.997417069560234e-06, | |
| "loss": 0.6295, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.1982823002240478, | |
| "grad_norm": 0.8139058947563171, | |
| "learning_rate": 4.997348662619305e-06, | |
| "loss": 0.6637, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.19940253920836445, | |
| "grad_norm": 0.7864823937416077, | |
| "learning_rate": 4.997279362106528e-06, | |
| "loss": 0.6478, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.2005227781926811, | |
| "grad_norm": 0.8095872402191162, | |
| "learning_rate": 4.9972091680467e-06, | |
| "loss": 0.668, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.20164301717699776, | |
| "grad_norm": 0.8363040089607239, | |
| "learning_rate": 4.997138080464938e-06, | |
| "loss": 0.6757, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.2027632561613144, | |
| "grad_norm": 0.8066425323486328, | |
| "learning_rate": 4.9970660993866756e-06, | |
| "loss": 0.6617, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.20388349514563106, | |
| "grad_norm": 0.8129490613937378, | |
| "learning_rate": 4.996993224837672e-06, | |
| "loss": 0.6463, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.2050037341299477, | |
| "grad_norm": 0.8092365264892578, | |
| "learning_rate": 4.996919456843998e-06, | |
| "loss": 0.6441, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.20612397311426436, | |
| "grad_norm": 0.7847099900245667, | |
| "learning_rate": 4.996844795432051e-06, | |
| "loss": 0.6608, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.20724421209858104, | |
| "grad_norm": 0.7771169543266296, | |
| "learning_rate": 4.9967692406285454e-06, | |
| "loss": 0.6198, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.2083644510828977, | |
| "grad_norm": 0.8057869076728821, | |
| "learning_rate": 4.996692792460513e-06, | |
| "loss": 0.6312, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.20948469006721435, | |
| "grad_norm": 0.8194419145584106, | |
| "learning_rate": 4.996615450955311e-06, | |
| "loss": 0.66, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.210604929051531, | |
| "grad_norm": 0.8123747110366821, | |
| "learning_rate": 4.99653721614061e-06, | |
| "loss": 0.6527, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.21172516803584765, | |
| "grad_norm": 0.8024148344993591, | |
| "learning_rate": 4.996458088044405e-06, | |
| "loss": 0.6426, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.2128454070201643, | |
| "grad_norm": 0.8563250303268433, | |
| "learning_rate": 4.996378066695008e-06, | |
| "loss": 0.6444, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.21396564600448095, | |
| "grad_norm": 0.8127154111862183, | |
| "learning_rate": 4.99629715212105e-06, | |
| "loss": 0.6478, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.2150858849887976, | |
| "grad_norm": 0.816790759563446, | |
| "learning_rate": 4.996215344351485e-06, | |
| "loss": 0.6304, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.21620612397311426, | |
| "grad_norm": 0.8222427368164062, | |
| "learning_rate": 4.9961326434155834e-06, | |
| "loss": 0.6418, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.2173263629574309, | |
| "grad_norm": 0.8139140605926514, | |
| "learning_rate": 4.996049049342936e-06, | |
| "loss": 0.6305, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.21844660194174756, | |
| "grad_norm": 0.8003841042518616, | |
| "learning_rate": 4.995964562163455e-06, | |
| "loss": 0.6474, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.21956684092606424, | |
| "grad_norm": 0.8458191156387329, | |
| "learning_rate": 4.9958791819073684e-06, | |
| "loss": 0.6637, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.2206870799103809, | |
| "grad_norm": 0.801009476184845, | |
| "learning_rate": 4.995792908605227e-06, | |
| "loss": 0.6388, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.22180731889469754, | |
| "grad_norm": 0.8174487948417664, | |
| "learning_rate": 4.995705742287901e-06, | |
| "loss": 0.653, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.2229275578790142, | |
| "grad_norm": 0.814853310585022, | |
| "learning_rate": 4.995617682986578e-06, | |
| "loss": 0.6217, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.22404779686333084, | |
| "grad_norm": 0.8986819982528687, | |
| "learning_rate": 4.995528730732766e-06, | |
| "loss": 0.6547, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.2251680358476475, | |
| "grad_norm": 0.8147602081298828, | |
| "learning_rate": 4.995438885558294e-06, | |
| "loss": 0.6475, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.22628827483196415, | |
| "grad_norm": 0.8108645081520081, | |
| "learning_rate": 4.995348147495309e-06, | |
| "loss": 0.6434, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.2274085138162808, | |
| "grad_norm": 0.8559293746948242, | |
| "learning_rate": 4.995256516576278e-06, | |
| "loss": 0.6559, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.22852875280059745, | |
| "grad_norm": 0.810727596282959, | |
| "learning_rate": 4.995163992833986e-06, | |
| "loss": 0.6365, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.2296489917849141, | |
| "grad_norm": 0.8541222214698792, | |
| "learning_rate": 4.99507057630154e-06, | |
| "loss": 0.6624, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.23076923076923078, | |
| "grad_norm": 0.8096019625663757, | |
| "learning_rate": 4.994976267012366e-06, | |
| "loss": 0.6213, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.23188946975354743, | |
| "grad_norm": 0.868386447429657, | |
| "learning_rate": 4.994881065000206e-06, | |
| "loss": 0.6397, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.23300970873786409, | |
| "grad_norm": 0.8383010029792786, | |
| "learning_rate": 4.9947849702991275e-06, | |
| "loss": 0.6477, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.23412994772218074, | |
| "grad_norm": 0.8286524415016174, | |
| "learning_rate": 4.994687982943511e-06, | |
| "loss": 0.6321, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.2352501867064974, | |
| "grad_norm": 0.7969548106193542, | |
| "learning_rate": 4.994590102968061e-06, | |
| "loss": 0.6284, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.23637042569081404, | |
| "grad_norm": 0.8124633431434631, | |
| "learning_rate": 4.994491330407799e-06, | |
| "loss": 0.6508, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.2374906646751307, | |
| "grad_norm": 0.8430159687995911, | |
| "learning_rate": 4.994391665298066e-06, | |
| "loss": 0.6392, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.23861090365944734, | |
| "grad_norm": 1.1165465116500854, | |
| "learning_rate": 4.994291107674525e-06, | |
| "loss": 0.6609, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.239731142643764, | |
| "grad_norm": 0.7861813902854919, | |
| "learning_rate": 4.9941896575731555e-06, | |
| "loss": 0.627, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.24085138162808065, | |
| "grad_norm": 0.8439143300056458, | |
| "learning_rate": 4.994087315030256e-06, | |
| "loss": 0.6511, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.2419716206123973, | |
| "grad_norm": 0.9279224276542664, | |
| "learning_rate": 4.993984080082447e-06, | |
| "loss": 0.6276, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.24309185959671398, | |
| "grad_norm": 0.8302883505821228, | |
| "learning_rate": 4.993879952766666e-06, | |
| "loss": 0.6399, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.24421209858103063, | |
| "grad_norm": 0.772523820400238, | |
| "learning_rate": 4.993774933120171e-06, | |
| "loss": 0.6284, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.24533233756534728, | |
| "grad_norm": 0.8319805264472961, | |
| "learning_rate": 4.993669021180539e-06, | |
| "loss": 0.6407, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.24645257654966393, | |
| "grad_norm": 0.8110228776931763, | |
| "learning_rate": 4.993562216985665e-06, | |
| "loss": 0.6598, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.24757281553398058, | |
| "grad_norm": 0.8476558327674866, | |
| "learning_rate": 4.9934545205737665e-06, | |
| "loss": 0.6105, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.24869305451829724, | |
| "grad_norm": 0.8189626932144165, | |
| "learning_rate": 4.993345931983376e-06, | |
| "loss": 0.6224, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.2498132935026139, | |
| "grad_norm": 0.8342490792274475, | |
| "learning_rate": 4.9932364512533485e-06, | |
| "loss": 0.6217, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.25093353248693057, | |
| "grad_norm": 0.838699460029602, | |
| "learning_rate": 4.993126078422856e-06, | |
| "loss": 0.6432, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.2520537714712472, | |
| "grad_norm": 0.801331102848053, | |
| "learning_rate": 4.9930148135313935e-06, | |
| "loss": 0.6219, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.25317401045556387, | |
| "grad_norm": 0.8075932860374451, | |
| "learning_rate": 4.99290265661877e-06, | |
| "loss": 0.649, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.2542942494398805, | |
| "grad_norm": 0.8010944724082947, | |
| "learning_rate": 4.992789607725116e-06, | |
| "loss": 0.6159, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.2554144884241972, | |
| "grad_norm": 0.8093600273132324, | |
| "learning_rate": 4.992675666890884e-06, | |
| "loss": 0.6162, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.2565347274085138, | |
| "grad_norm": 0.8651375770568848, | |
| "learning_rate": 4.99256083415684e-06, | |
| "loss": 0.6391, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.2576549663928305, | |
| "grad_norm": 0.8180515766143799, | |
| "learning_rate": 4.992445109564074e-06, | |
| "loss": 0.6627, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.2587752053771471, | |
| "grad_norm": 0.8492463231086731, | |
| "learning_rate": 4.992328493153992e-06, | |
| "loss": 0.6533, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.2598954443614638, | |
| "grad_norm": 0.7910256385803223, | |
| "learning_rate": 4.992210984968321e-06, | |
| "loss": 0.6212, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.26101568334578046, | |
| "grad_norm": 0.8081009984016418, | |
| "learning_rate": 4.992092585049106e-06, | |
| "loss": 0.6464, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.2621359223300971, | |
| "grad_norm": 0.8275237083435059, | |
| "learning_rate": 4.991973293438712e-06, | |
| "loss": 0.6447, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.26325616131441376, | |
| "grad_norm": 0.7819504141807556, | |
| "learning_rate": 4.991853110179822e-06, | |
| "loss": 0.6278, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.2643764002987304, | |
| "grad_norm": 0.7954742908477783, | |
| "learning_rate": 4.991732035315439e-06, | |
| "loss": 0.6259, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.26549663928304706, | |
| "grad_norm": 0.7841144800186157, | |
| "learning_rate": 4.9916100688888835e-06, | |
| "loss": 0.6209, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.2666168782673637, | |
| "grad_norm": 0.783143937587738, | |
| "learning_rate": 4.9914872109437975e-06, | |
| "loss": 0.6015, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.26773711725168037, | |
| "grad_norm": 0.8399513363838196, | |
| "learning_rate": 4.9913634615241394e-06, | |
| "loss": 0.6624, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.268857356235997, | |
| "grad_norm": 0.8158865571022034, | |
| "learning_rate": 4.9912388206741895e-06, | |
| "loss": 0.6607, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.26997759522031367, | |
| "grad_norm": 0.8276694416999817, | |
| "learning_rate": 4.991113288438543e-06, | |
| "loss": 0.6474, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.2710978342046303, | |
| "grad_norm": 0.8164044618606567, | |
| "learning_rate": 4.9909868648621185e-06, | |
| "loss": 0.6352, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.272218073188947, | |
| "grad_norm": 0.8372899889945984, | |
| "learning_rate": 4.99085954999015e-06, | |
| "loss": 0.638, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.27333831217326365, | |
| "grad_norm": 0.8520254492759705, | |
| "learning_rate": 4.990731343868192e-06, | |
| "loss": 0.6591, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.2744585511575803, | |
| "grad_norm": 0.8587496876716614, | |
| "learning_rate": 4.990602246542118e-06, | |
| "loss": 0.6442, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.27557879014189696, | |
| "grad_norm": 0.8117641806602478, | |
| "learning_rate": 4.99047225805812e-06, | |
| "loss": 0.6169, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.2766990291262136, | |
| "grad_norm": 0.7888590693473816, | |
| "learning_rate": 4.99034137846271e-06, | |
| "loss": 0.6345, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.27781926811053026, | |
| "grad_norm": 0.8112688064575195, | |
| "learning_rate": 4.990209607802715e-06, | |
| "loss": 0.6246, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.2789395070948469, | |
| "grad_norm": 0.8172190189361572, | |
| "learning_rate": 4.990076946125286e-06, | |
| "loss": 0.6323, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.28005974607916356, | |
| "grad_norm": 0.7784557938575745, | |
| "learning_rate": 4.9899433934778905e-06, | |
| "loss": 0.6186, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2811799850634802, | |
| "grad_norm": 0.8571611642837524, | |
| "learning_rate": 4.9898089499083135e-06, | |
| "loss": 0.6157, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.28230022404779687, | |
| "grad_norm": 0.798240065574646, | |
| "learning_rate": 4.989673615464661e-06, | |
| "loss": 0.6201, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.2834204630321135, | |
| "grad_norm": 0.8300586342811584, | |
| "learning_rate": 4.989537390195356e-06, | |
| "loss": 0.6481, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.28454070201643017, | |
| "grad_norm": 0.9545755982398987, | |
| "learning_rate": 4.989400274149141e-06, | |
| "loss": 0.6315, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.28566094100074685, | |
| "grad_norm": 0.80037522315979, | |
| "learning_rate": 4.989262267375078e-06, | |
| "loss": 0.6206, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.2867811799850635, | |
| "grad_norm": 0.8587538003921509, | |
| "learning_rate": 4.989123369922547e-06, | |
| "loss": 0.622, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.28790141896938015, | |
| "grad_norm": 0.7835617661476135, | |
| "learning_rate": 4.988983581841246e-06, | |
| "loss": 0.6255, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.2890216579536968, | |
| "grad_norm": 0.8115828037261963, | |
| "learning_rate": 4.988842903181192e-06, | |
| "loss": 0.6122, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.29014189693801345, | |
| "grad_norm": 0.7731125950813293, | |
| "learning_rate": 4.988701333992722e-06, | |
| "loss": 0.5881, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.2912621359223301, | |
| "grad_norm": 0.8486718535423279, | |
| "learning_rate": 4.988558874326489e-06, | |
| "loss": 0.676, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.29238237490664676, | |
| "grad_norm": 0.7868174910545349, | |
| "learning_rate": 4.988415524233469e-06, | |
| "loss": 0.6267, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.2935026138909634, | |
| "grad_norm": 0.7890263795852661, | |
| "learning_rate": 4.9882712837649515e-06, | |
| "loss": 0.6171, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.29462285287528006, | |
| "grad_norm": 0.799913763999939, | |
| "learning_rate": 4.988126152972548e-06, | |
| "loss": 0.6353, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.29574309185959674, | |
| "grad_norm": 0.8258731961250305, | |
| "learning_rate": 4.987980131908186e-06, | |
| "loss": 0.6371, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.29686333084391336, | |
| "grad_norm": 0.8256173729896545, | |
| "learning_rate": 4.987833220624116e-06, | |
| "loss": 0.609, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.29798356982823004, | |
| "grad_norm": 0.7728785872459412, | |
| "learning_rate": 4.9876854191729e-06, | |
| "loss": 0.6021, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.29910380881254667, | |
| "grad_norm": 0.8612363338470459, | |
| "learning_rate": 4.987536727607426e-06, | |
| "loss": 0.6031, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.30022404779686335, | |
| "grad_norm": 0.8287588357925415, | |
| "learning_rate": 4.987387145980896e-06, | |
| "loss": 0.6365, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.30134428678117997, | |
| "grad_norm": 0.7820507884025574, | |
| "learning_rate": 4.9872366743468305e-06, | |
| "loss": 0.6257, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.30246452576549665, | |
| "grad_norm": 0.8158690929412842, | |
| "learning_rate": 4.987085312759071e-06, | |
| "loss": 0.6465, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.3035847647498133, | |
| "grad_norm": 0.804691731929779, | |
| "learning_rate": 4.986933061271776e-06, | |
| "loss": 0.6384, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.30470500373412995, | |
| "grad_norm": 0.8142663240432739, | |
| "learning_rate": 4.9867799199394205e-06, | |
| "loss": 0.6061, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.3058252427184466, | |
| "grad_norm": 0.8177836537361145, | |
| "learning_rate": 4.9866258888168e-06, | |
| "loss": 0.6186, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.30694548170276326, | |
| "grad_norm": 0.8021143078804016, | |
| "learning_rate": 4.986470967959031e-06, | |
| "loss": 0.6262, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.30806572068707994, | |
| "grad_norm": 0.7913358807563782, | |
| "learning_rate": 4.986315157421543e-06, | |
| "loss": 0.6011, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.30918595967139656, | |
| "grad_norm": 0.8460528254508972, | |
| "learning_rate": 4.986158457260086e-06, | |
| "loss": 0.5983, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.31030619865571324, | |
| "grad_norm": 0.8357203006744385, | |
| "learning_rate": 4.986000867530729e-06, | |
| "loss": 0.6081, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.31142643764002986, | |
| "grad_norm": 0.8164505362510681, | |
| "learning_rate": 4.9858423882898595e-06, | |
| "loss": 0.6408, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.31254667662434654, | |
| "grad_norm": 0.8584355115890503, | |
| "learning_rate": 4.985683019594183e-06, | |
| "loss": 0.6206, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.31366691560866317, | |
| "grad_norm": 0.8428062796592712, | |
| "learning_rate": 4.985522761500722e-06, | |
| "loss": 0.6622, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.31478715459297985, | |
| "grad_norm": 0.9295979142189026, | |
| "learning_rate": 4.985361614066818e-06, | |
| "loss": 0.6374, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.31590739357729647, | |
| "grad_norm": 0.7724334597587585, | |
| "learning_rate": 4.985199577350132e-06, | |
| "loss": 0.6148, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.31702763256161315, | |
| "grad_norm": 0.785025954246521, | |
| "learning_rate": 4.9850366514086415e-06, | |
| "loss": 0.6299, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.3181478715459298, | |
| "grad_norm": 0.8610864877700806, | |
| "learning_rate": 4.984872836300641e-06, | |
| "loss": 0.6464, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.31926811053024645, | |
| "grad_norm": 0.8281784653663635, | |
| "learning_rate": 4.9847081320847475e-06, | |
| "loss": 0.6297, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.32038834951456313, | |
| "grad_norm": 0.8076677322387695, | |
| "learning_rate": 4.984542538819893e-06, | |
| "loss": 0.6144, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.32150858849887975, | |
| "grad_norm": 1.0354986190795898, | |
| "learning_rate": 4.984376056565328e-06, | |
| "loss": 0.6246, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.32262882748319643, | |
| "grad_norm": 0.7936146259307861, | |
| "learning_rate": 4.98420868538062e-06, | |
| "loss": 0.6276, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.32374906646751306, | |
| "grad_norm": 0.8247389793395996, | |
| "learning_rate": 4.984040425325657e-06, | |
| "loss": 0.6326, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.32486930545182974, | |
| "grad_norm": 0.8053764700889587, | |
| "learning_rate": 4.983871276460643e-06, | |
| "loss": 0.6298, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.32598954443614636, | |
| "grad_norm": 0.7672686576843262, | |
| "learning_rate": 4.983701238846102e-06, | |
| "loss": 0.6192, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.32710978342046304, | |
| "grad_norm": 0.8261781930923462, | |
| "learning_rate": 4.983530312542874e-06, | |
| "loss": 0.6571, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.32823002240477966, | |
| "grad_norm": 0.8620947599411011, | |
| "learning_rate": 4.983358497612118e-06, | |
| "loss": 0.6195, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.32935026138909634, | |
| "grad_norm": 0.8011974692344666, | |
| "learning_rate": 4.983185794115311e-06, | |
| "loss": 0.6157, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.33047050037341297, | |
| "grad_norm": 0.8179512619972229, | |
| "learning_rate": 4.983012202114247e-06, | |
| "loss": 0.6086, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.33159073935772965, | |
| "grad_norm": 0.82745760679245, | |
| "learning_rate": 4.98283772167104e-06, | |
| "loss": 0.651, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.3327109783420463, | |
| "grad_norm": 0.8494473695755005, | |
| "learning_rate": 4.982662352848119e-06, | |
| "loss": 0.6114, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.33383121732636295, | |
| "grad_norm": 0.80657559633255, | |
| "learning_rate": 4.982486095708234e-06, | |
| "loss": 0.6105, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.33495145631067963, | |
| "grad_norm": 0.8035582900047302, | |
| "learning_rate": 4.982308950314449e-06, | |
| "loss": 0.6373, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.33607169529499625, | |
| "grad_norm": 0.7871240377426147, | |
| "learning_rate": 4.982130916730151e-06, | |
| "loss": 0.6004, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.33719193427931293, | |
| "grad_norm": 0.8356077075004578, | |
| "learning_rate": 4.98195199501904e-06, | |
| "loss": 0.6164, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.33831217326362956, | |
| "grad_norm": 0.7815148234367371, | |
| "learning_rate": 4.981772185245135e-06, | |
| "loss": 0.6173, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.33943241224794624, | |
| "grad_norm": 0.8669472336769104, | |
| "learning_rate": 4.981591487472776e-06, | |
| "loss": 0.6209, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.34055265123226286, | |
| "grad_norm": 0.7833518385887146, | |
| "learning_rate": 4.981409901766615e-06, | |
| "loss": 0.6178, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.34167289021657954, | |
| "grad_norm": 0.79278165102005, | |
| "learning_rate": 4.9812274281916275e-06, | |
| "loss": 0.6268, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.3427931292008962, | |
| "grad_norm": 0.802302360534668, | |
| "learning_rate": 4.981044066813102e-06, | |
| "loss": 0.5878, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.34391336818521284, | |
| "grad_norm": 0.8093838691711426, | |
| "learning_rate": 4.9808598176966485e-06, | |
| "loss": 0.6419, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.3450336071695295, | |
| "grad_norm": 0.7981029152870178, | |
| "learning_rate": 4.980674680908192e-06, | |
| "loss": 0.6203, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.34615384615384615, | |
| "grad_norm": 0.8186250329017639, | |
| "learning_rate": 4.980488656513976e-06, | |
| "loss": 0.6346, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.3472740851381628, | |
| "grad_norm": 0.7659736275672913, | |
| "learning_rate": 4.980301744580561e-06, | |
| "loss": 0.5925, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.34839432412247945, | |
| "grad_norm": 0.8284393548965454, | |
| "learning_rate": 4.980113945174826e-06, | |
| "loss": 0.6471, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.34951456310679613, | |
| "grad_norm": 0.7763761878013611, | |
| "learning_rate": 4.979925258363968e-06, | |
| "loss": 0.6095, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.35063480209111275, | |
| "grad_norm": 0.820344090461731, | |
| "learning_rate": 4.979735684215499e-06, | |
| "loss": 0.6044, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.35175504107542943, | |
| "grad_norm": 0.8457436561584473, | |
| "learning_rate": 4.979545222797252e-06, | |
| "loss": 0.6176, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.35287528005974605, | |
| "grad_norm": 0.8047285079956055, | |
| "learning_rate": 4.979353874177375e-06, | |
| "loss": 0.5912, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.35399551904406273, | |
| "grad_norm": 0.8075456619262695, | |
| "learning_rate": 4.9791616384243335e-06, | |
| "loss": 0.5807, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.3551157580283794, | |
| "grad_norm": 0.7939711809158325, | |
| "learning_rate": 4.978968515606912e-06, | |
| "loss": 0.5862, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.35623599701269604, | |
| "grad_norm": 0.8410739302635193, | |
| "learning_rate": 4.97877450579421e-06, | |
| "loss": 0.6144, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.3573562359970127, | |
| "grad_norm": 0.8608905076980591, | |
| "learning_rate": 4.9785796090556485e-06, | |
| "loss": 0.6428, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.35847647498132934, | |
| "grad_norm": 0.8229684233665466, | |
| "learning_rate": 4.978383825460959e-06, | |
| "loss": 0.6124, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.359596713965646, | |
| "grad_norm": 0.790482223033905, | |
| "learning_rate": 4.978187155080199e-06, | |
| "loss": 0.6127, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.36071695294996264, | |
| "grad_norm": 0.7994086146354675, | |
| "learning_rate": 4.9779895979837355e-06, | |
| "loss": 0.6337, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.3618371919342793, | |
| "grad_norm": 0.8186279535293579, | |
| "learning_rate": 4.977791154242257e-06, | |
| "loss": 0.6257, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.36295743091859595, | |
| "grad_norm": 0.7845602631568909, | |
| "learning_rate": 4.977591823926769e-06, | |
| "loss": 0.5702, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.3640776699029126, | |
| "grad_norm": 0.7722570300102234, | |
| "learning_rate": 4.977391607108593e-06, | |
| "loss": 0.5897, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.36519790888722925, | |
| "grad_norm": 0.8029158115386963, | |
| "learning_rate": 4.977190503859368e-06, | |
| "loss": 0.6142, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.36631814787154593, | |
| "grad_norm": 0.801633894443512, | |
| "learning_rate": 4.976988514251051e-06, | |
| "loss": 0.6298, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.3674383868558626, | |
| "grad_norm": 0.8345485925674438, | |
| "learning_rate": 4.976785638355914e-06, | |
| "loss": 0.6351, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.36855862584017923, | |
| "grad_norm": 0.793038547039032, | |
| "learning_rate": 4.97658187624655e-06, | |
| "loss": 0.6463, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.3696788648244959, | |
| "grad_norm": 0.8270999789237976, | |
| "learning_rate": 4.976377227995866e-06, | |
| "loss": 0.6193, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.37079910380881254, | |
| "grad_norm": 0.8013745546340942, | |
| "learning_rate": 4.9761716936770855e-06, | |
| "loss": 0.6047, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.3719193427931292, | |
| "grad_norm": 0.8266115784645081, | |
| "learning_rate": 4.975965273363751e-06, | |
| "loss": 0.6226, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.37303958177744584, | |
| "grad_norm": 0.8072906732559204, | |
| "learning_rate": 4.975757967129722e-06, | |
| "loss": 0.615, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.3741598207617625, | |
| "grad_norm": 0.7997392416000366, | |
| "learning_rate": 4.975549775049174e-06, | |
| "loss": 0.6271, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.37528005974607914, | |
| "grad_norm": 0.8856087923049927, | |
| "learning_rate": 4.9753406971966e-06, | |
| "loss": 0.6066, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.3764002987303958, | |
| "grad_norm": 0.7950431704521179, | |
| "learning_rate": 4.975130733646809e-06, | |
| "loss": 0.6001, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.37752053771471245, | |
| "grad_norm": 0.8051393032073975, | |
| "learning_rate": 4.974919884474929e-06, | |
| "loss": 0.6316, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.3786407766990291, | |
| "grad_norm": 0.8180161714553833, | |
| "learning_rate": 4.974708149756403e-06, | |
| "loss": 0.6362, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.3797610156833458, | |
| "grad_norm": 0.8257321119308472, | |
| "learning_rate": 4.974495529566991e-06, | |
| "loss": 0.626, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.38088125466766243, | |
| "grad_norm": 0.8660753965377808, | |
| "learning_rate": 4.9742820239827705e-06, | |
| "loss": 0.6191, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.3820014936519791, | |
| "grad_norm": 0.7817429900169373, | |
| "learning_rate": 4.974067633080135e-06, | |
| "loss": 0.6297, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.38312173263629573, | |
| "grad_norm": 0.807461678981781, | |
| "learning_rate": 4.973852356935797e-06, | |
| "loss": 0.6356, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.3842419716206124, | |
| "grad_norm": 0.8301308155059814, | |
| "learning_rate": 4.973636195626783e-06, | |
| "loss": 0.6205, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.38536221060492903, | |
| "grad_norm": 0.897199809551239, | |
| "learning_rate": 4.973419149230437e-06, | |
| "loss": 0.6455, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.3864824495892457, | |
| "grad_norm": 0.8273870944976807, | |
| "learning_rate": 4.9732012178244194e-06, | |
| "loss": 0.6458, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.38760268857356234, | |
| "grad_norm": 0.9393578767776489, | |
| "learning_rate": 4.972982401486709e-06, | |
| "loss": 0.61, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.388722927557879, | |
| "grad_norm": 0.8364170789718628, | |
| "learning_rate": 4.9727627002956e-06, | |
| "loss": 0.6195, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.3898431665421957, | |
| "grad_norm": 0.8208838701248169, | |
| "learning_rate": 4.972542114329704e-06, | |
| "loss": 0.6246, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.3909634055265123, | |
| "grad_norm": 0.8595433831214905, | |
| "learning_rate": 4.972320643667946e-06, | |
| "loss": 0.6251, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.392083644510829, | |
| "grad_norm": 0.7956632375717163, | |
| "learning_rate": 4.972098288389572e-06, | |
| "loss": 0.608, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.3932038834951456, | |
| "grad_norm": 0.8434047698974609, | |
| "learning_rate": 4.9718750485741425e-06, | |
| "loss": 0.6137, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.3943241224794623, | |
| "grad_norm": 0.8272572755813599, | |
| "learning_rate": 4.971650924301534e-06, | |
| "loss": 0.623, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.3954443614637789, | |
| "grad_norm": 0.8429244756698608, | |
| "learning_rate": 4.9714259156519406e-06, | |
| "loss": 0.6325, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.3965646004480956, | |
| "grad_norm": 0.8181564807891846, | |
| "learning_rate": 4.971200022705871e-06, | |
| "loss": 0.6421, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.39768483943241223, | |
| "grad_norm": 0.7761826515197754, | |
| "learning_rate": 4.970973245544153e-06, | |
| "loss": 0.6063, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.3988050784167289, | |
| "grad_norm": 0.8283432126045227, | |
| "learning_rate": 4.970745584247928e-06, | |
| "loss": 0.6125, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.39992531740104553, | |
| "grad_norm": 0.8041115999221802, | |
| "learning_rate": 4.9705170388986565e-06, | |
| "loss": 0.6227, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.4010455563853622, | |
| "grad_norm": 0.792184054851532, | |
| "learning_rate": 4.970287609578112e-06, | |
| "loss": 0.624, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.4021657953696789, | |
| "grad_norm": 0.8069051504135132, | |
| "learning_rate": 4.970057296368388e-06, | |
| "loss": 0.6162, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.4032860343539955, | |
| "grad_norm": 0.7888614535331726, | |
| "learning_rate": 4.969826099351892e-06, | |
| "loss": 0.6088, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.4044062733383122, | |
| "grad_norm": 0.8092378377914429, | |
| "learning_rate": 4.969594018611348e-06, | |
| "loss": 0.6062, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.4055265123226288, | |
| "grad_norm": 0.8325386047363281, | |
| "learning_rate": 4.969361054229796e-06, | |
| "loss": 0.6277, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.4066467513069455, | |
| "grad_norm": 0.860455334186554, | |
| "learning_rate": 4.9691272062905935e-06, | |
| "loss": 0.6202, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.4077669902912621, | |
| "grad_norm": 0.8035297989845276, | |
| "learning_rate": 4.968892474877412e-06, | |
| "loss": 0.6087, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.4088872292755788, | |
| "grad_norm": 0.792544424533844, | |
| "learning_rate": 4.968656860074241e-06, | |
| "loss": 0.5919, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.4100074682598954, | |
| "grad_norm": 0.8256428837776184, | |
| "learning_rate": 4.968420361965386e-06, | |
| "loss": 0.5922, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.4111277072442121, | |
| "grad_norm": 0.8024367094039917, | |
| "learning_rate": 4.968182980635468e-06, | |
| "loss": 0.6332, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.4122479462285287, | |
| "grad_norm": 0.7686941027641296, | |
| "learning_rate": 4.967944716169421e-06, | |
| "loss": 0.5954, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.4133681852128454, | |
| "grad_norm": 0.814935028553009, | |
| "learning_rate": 4.967705568652502e-06, | |
| "loss": 0.627, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.4144884241971621, | |
| "grad_norm": 0.7745496034622192, | |
| "learning_rate": 4.967465538170279e-06, | |
| "loss": 0.6051, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.4156086631814787, | |
| "grad_norm": 0.8013162612915039, | |
| "learning_rate": 4.9672246248086344e-06, | |
| "loss": 0.6005, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.4167289021657954, | |
| "grad_norm": 0.8392379879951477, | |
| "learning_rate": 4.966982828653772e-06, | |
| "loss": 0.624, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.417849141150112, | |
| "grad_norm": 0.781700849533081, | |
| "learning_rate": 4.966740149792206e-06, | |
| "loss": 0.6064, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.4189693801344287, | |
| "grad_norm": 0.8092135190963745, | |
| "learning_rate": 4.966496588310771e-06, | |
| "loss": 0.6355, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.4200896191187453, | |
| "grad_norm": 0.7815409898757935, | |
| "learning_rate": 4.966252144296614e-06, | |
| "loss": 0.6039, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.421209858103062, | |
| "grad_norm": 0.7987800240516663, | |
| "learning_rate": 4.9660068178372e-06, | |
| "loss": 0.6341, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.4223300970873786, | |
| "grad_norm": 0.8279643058776855, | |
| "learning_rate": 4.965760609020308e-06, | |
| "loss": 0.6119, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.4234503360716953, | |
| "grad_norm": 0.79878830909729, | |
| "learning_rate": 4.965513517934033e-06, | |
| "loss": 0.6334, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.4245705750560119, | |
| "grad_norm": 0.8361204862594604, | |
| "learning_rate": 4.965265544666788e-06, | |
| "loss": 0.6078, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.4256908140403286, | |
| "grad_norm": 0.8225858211517334, | |
| "learning_rate": 4.965016689307298e-06, | |
| "loss": 0.6264, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.4268110530246453, | |
| "grad_norm": 0.8204994201660156, | |
| "learning_rate": 4.964766951944607e-06, | |
| "loss": 0.606, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.4279312920089619, | |
| "grad_norm": 0.8047780990600586, | |
| "learning_rate": 4.964516332668073e-06, | |
| "loss": 0.6053, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.4290515309932786, | |
| "grad_norm": 0.8498148322105408, | |
| "learning_rate": 4.964264831567368e-06, | |
| "loss": 0.6211, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.4301717699775952, | |
| "grad_norm": 0.8710355758666992, | |
| "learning_rate": 4.964012448732482e-06, | |
| "loss": 0.6134, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.4312920089619119, | |
| "grad_norm": 0.8357451558113098, | |
| "learning_rate": 4.963759184253721e-06, | |
| "loss": 0.6339, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.4324122479462285, | |
| "grad_norm": 0.8333609700202942, | |
| "learning_rate": 4.963505038221704e-06, | |
| "loss": 0.623, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.4335324869305452, | |
| "grad_norm": 0.8006998300552368, | |
| "learning_rate": 4.963250010727366e-06, | |
| "loss": 0.621, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.4346527259148618, | |
| "grad_norm": 0.8830387592315674, | |
| "learning_rate": 4.962994101861959e-06, | |
| "loss": 0.5945, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.4357729648991785, | |
| "grad_norm": 0.8361951112747192, | |
| "learning_rate": 4.9627373117170476e-06, | |
| "loss": 0.6042, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.4368932038834951, | |
| "grad_norm": 0.8356959819793701, | |
| "learning_rate": 4.962479640384516e-06, | |
| "loss": 0.6423, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.4380134428678118, | |
| "grad_norm": 0.8058585524559021, | |
| "learning_rate": 4.962221087956558e-06, | |
| "loss": 0.622, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.4391336818521285, | |
| "grad_norm": 0.8438712358474731, | |
| "learning_rate": 4.961961654525689e-06, | |
| "loss": 0.619, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.4402539208364451, | |
| "grad_norm": 0.8207183480262756, | |
| "learning_rate": 4.961701340184734e-06, | |
| "loss": 0.6095, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.4413741598207618, | |
| "grad_norm": 0.8222430348396301, | |
| "learning_rate": 4.9614401450268364e-06, | |
| "loss": 0.6266, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.4424943988050784, | |
| "grad_norm": 0.8192475438117981, | |
| "learning_rate": 4.961178069145455e-06, | |
| "loss": 0.6146, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.4436146377893951, | |
| "grad_norm": 0.7848315238952637, | |
| "learning_rate": 4.960915112634362e-06, | |
| "loss": 0.6046, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.4447348767737117, | |
| "grad_norm": 0.8019006848335266, | |
| "learning_rate": 4.960651275587644e-06, | |
| "loss": 0.6253, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.4458551157580284, | |
| "grad_norm": 0.8097095489501953, | |
| "learning_rate": 4.960386558099706e-06, | |
| "loss": 0.6284, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.446975354742345, | |
| "grad_norm": 0.796343207359314, | |
| "learning_rate": 4.9601209602652644e-06, | |
| "loss": 0.6051, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.4480955937266617, | |
| "grad_norm": 0.8076500296592712, | |
| "learning_rate": 4.959854482179354e-06, | |
| "loss": 0.6215, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.44921583271097837, | |
| "grad_norm": 0.8721750974655151, | |
| "learning_rate": 4.9595871239373215e-06, | |
| "loss": 0.6534, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.450336071695295, | |
| "grad_norm": 0.8260684013366699, | |
| "learning_rate": 4.959318885634831e-06, | |
| "loss": 0.6073, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.45145631067961167, | |
| "grad_norm": 0.8113908767700195, | |
| "learning_rate": 4.95904976736786e-06, | |
| "loss": 0.6004, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.4525765496639283, | |
| "grad_norm": 0.8291077017784119, | |
| "learning_rate": 4.9587797692327e-06, | |
| "loss": 0.622, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.453696788648245, | |
| "grad_norm": 0.8331876993179321, | |
| "learning_rate": 4.95850889132596e-06, | |
| "loss": 0.6367, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.4548170276325616, | |
| "grad_norm": 0.8167732357978821, | |
| "learning_rate": 4.9582371337445624e-06, | |
| "loss": 0.6191, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.4559372666168783, | |
| "grad_norm": 0.8163003921508789, | |
| "learning_rate": 4.957964496585743e-06, | |
| "loss": 0.6197, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.4570575056011949, | |
| "grad_norm": 0.8027840852737427, | |
| "learning_rate": 4.957690979947055e-06, | |
| "loss": 0.6119, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.4581777445855116, | |
| "grad_norm": 0.8352331519126892, | |
| "learning_rate": 4.957416583926364e-06, | |
| "loss": 0.6176, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.4592979835698282, | |
| "grad_norm": 0.846203625202179, | |
| "learning_rate": 4.957141308621851e-06, | |
| "loss": 0.608, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.4604182225541449, | |
| "grad_norm": 0.8850703239440918, | |
| "learning_rate": 4.956865154132013e-06, | |
| "loss": 0.6359, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.46153846153846156, | |
| "grad_norm": 0.7965673804283142, | |
| "learning_rate": 4.95658812055566e-06, | |
| "loss": 0.5849, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.4626587005227782, | |
| "grad_norm": 0.862568199634552, | |
| "learning_rate": 4.956310207991916e-06, | |
| "loss": 0.6176, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.46377893950709487, | |
| "grad_norm": 0.8031529784202576, | |
| "learning_rate": 4.95603141654022e-06, | |
| "loss": 0.5987, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.4648991784914115, | |
| "grad_norm": 0.8270638585090637, | |
| "learning_rate": 4.9557517463003275e-06, | |
| "loss": 0.6611, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.46601941747572817, | |
| "grad_norm": 0.7914713025093079, | |
| "learning_rate": 4.9554711973723055e-06, | |
| "loss": 0.5944, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.4671396564600448, | |
| "grad_norm": 0.8575369119644165, | |
| "learning_rate": 4.9551897698565376e-06, | |
| "loss": 0.6282, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.4682598954443615, | |
| "grad_norm": 0.766158401966095, | |
| "learning_rate": 4.9549074638537205e-06, | |
| "loss": 0.5798, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.4693801344286781, | |
| "grad_norm": 0.7910577654838562, | |
| "learning_rate": 4.954624279464866e-06, | |
| "loss": 0.5881, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.4705003734129948, | |
| "grad_norm": 0.8028966188430786, | |
| "learning_rate": 4.9543402167912985e-06, | |
| "loss": 0.6126, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.4716206123973114, | |
| "grad_norm": 0.8077728748321533, | |
| "learning_rate": 4.9540552759346596e-06, | |
| "loss": 0.6072, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.4727408513816281, | |
| "grad_norm": 0.8463570475578308, | |
| "learning_rate": 4.953769456996902e-06, | |
| "loss": 0.5721, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.47386109036594476, | |
| "grad_norm": 0.7810856699943542, | |
| "learning_rate": 4.9534827600802956e-06, | |
| "loss": 0.5852, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.4749813293502614, | |
| "grad_norm": 0.7997978329658508, | |
| "learning_rate": 4.953195185287421e-06, | |
| "loss": 0.5766, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.47610156833457806, | |
| "grad_norm": 0.7943705320358276, | |
| "learning_rate": 4.952906732721177e-06, | |
| "loss": 0.5915, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.4772218073188947, | |
| "grad_norm": 0.7930023670196533, | |
| "learning_rate": 4.952617402484773e-06, | |
| "loss": 0.6025, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.47834204630321137, | |
| "grad_norm": 0.7801458835601807, | |
| "learning_rate": 4.952327194681734e-06, | |
| "loss": 0.5947, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.479462285287528, | |
| "grad_norm": 0.8124021291732788, | |
| "learning_rate": 4.952036109415899e-06, | |
| "loss": 0.6175, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.48058252427184467, | |
| "grad_norm": 0.8290462493896484, | |
| "learning_rate": 4.9517441467914195e-06, | |
| "loss": 0.6229, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.4817027632561613, | |
| "grad_norm": 0.7960165739059448, | |
| "learning_rate": 4.951451306912764e-06, | |
| "loss": 0.6078, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.48282300224047797, | |
| "grad_norm": 0.8182939887046814, | |
| "learning_rate": 4.951157589884711e-06, | |
| "loss": 0.6145, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.4839432412247946, | |
| "grad_norm": 0.7847172021865845, | |
| "learning_rate": 4.950862995812356e-06, | |
| "loss": 0.592, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.4850634802091113, | |
| "grad_norm": 0.8396735191345215, | |
| "learning_rate": 4.9505675248011085e-06, | |
| "loss": 0.6323, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.48618371919342795, | |
| "grad_norm": 0.8656349778175354, | |
| "learning_rate": 4.950271176956688e-06, | |
| "loss": 0.6237, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.4873039581777446, | |
| "grad_norm": 0.8108944296836853, | |
| "learning_rate": 4.949973952385131e-06, | |
| "loss": 0.6148, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.48842419716206126, | |
| "grad_norm": 0.839131236076355, | |
| "learning_rate": 4.949675851192786e-06, | |
| "loss": 0.6475, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.4895444361463779, | |
| "grad_norm": 0.8033477067947388, | |
| "learning_rate": 4.949376873486319e-06, | |
| "loss": 0.6095, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.49066467513069456, | |
| "grad_norm": 0.8268027901649475, | |
| "learning_rate": 4.949077019372704e-06, | |
| "loss": 0.6096, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.4917849141150112, | |
| "grad_norm": 0.8278724551200867, | |
| "learning_rate": 4.948776288959233e-06, | |
| "loss": 0.6225, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.49290515309932786, | |
| "grad_norm": 0.8136724233627319, | |
| "learning_rate": 4.948474682353508e-06, | |
| "loss": 0.6063, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.4940253920836445, | |
| "grad_norm": 0.821102499961853, | |
| "learning_rate": 4.948172199663448e-06, | |
| "loss": 0.6214, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.49514563106796117, | |
| "grad_norm": 0.8143325448036194, | |
| "learning_rate": 4.947868840997282e-06, | |
| "loss": 0.6021, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.49626587005227785, | |
| "grad_norm": 0.8387938141822815, | |
| "learning_rate": 4.947564606463556e-06, | |
| "loss": 0.6299, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.49738610903659447, | |
| "grad_norm": 0.8075438737869263, | |
| "learning_rate": 4.9472594961711266e-06, | |
| "loss": 0.603, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.49850634802091115, | |
| "grad_norm": 0.8184303641319275, | |
| "learning_rate": 4.946953510229164e-06, | |
| "loss": 0.6126, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.4996265870052278, | |
| "grad_norm": 0.8168706893920898, | |
| "learning_rate": 4.946646648747155e-06, | |
| "loss": 0.5987, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.5007468259895445, | |
| "grad_norm": 0.8080923557281494, | |
| "learning_rate": 4.946338911834895e-06, | |
| "loss": 0.5998, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.5018670649738611, | |
| "grad_norm": 0.8014161586761475, | |
| "learning_rate": 4.946030299602496e-06, | |
| "loss": 0.6279, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.5029873039581777, | |
| "grad_norm": 0.7952543497085571, | |
| "learning_rate": 4.94572081216038e-06, | |
| "loss": 0.5883, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.5041075429424944, | |
| "grad_norm": 0.8177149295806885, | |
| "learning_rate": 4.945410449619286e-06, | |
| "loss": 0.5988, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.5052277819268111, | |
| "grad_norm": 0.8595210909843445, | |
| "learning_rate": 4.945099212090265e-06, | |
| "loss": 0.6387, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.5063480209111277, | |
| "grad_norm": 0.8385729193687439, | |
| "learning_rate": 4.944787099684678e-06, | |
| "loss": 0.6276, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.5074682598954443, | |
| "grad_norm": 0.7771918177604675, | |
| "learning_rate": 4.944474112514204e-06, | |
| "loss": 0.5967, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.508588498879761, | |
| "grad_norm": 0.7973781228065491, | |
| "learning_rate": 4.9441602506908295e-06, | |
| "loss": 0.6055, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.5097087378640777, | |
| "grad_norm": 0.8014892935752869, | |
| "learning_rate": 4.9438455143268596e-06, | |
| "loss": 0.6018, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.5108289768483943, | |
| "grad_norm": 0.8037407994270325, | |
| "learning_rate": 4.943529903534907e-06, | |
| "loss": 0.6117, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.511949215832711, | |
| "grad_norm": 0.8133897185325623, | |
| "learning_rate": 4.943213418427901e-06, | |
| "loss": 0.5902, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.5130694548170276, | |
| "grad_norm": 0.81773841381073, | |
| "learning_rate": 4.9428960591190834e-06, | |
| "loss": 0.6017, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.5141896938013443, | |
| "grad_norm": 0.8633530735969543, | |
| "learning_rate": 4.942577825722007e-06, | |
| "loss": 0.597, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.515309932785661, | |
| "grad_norm": 0.7943804264068604, | |
| "learning_rate": 4.942258718350539e-06, | |
| "loss": 0.605, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.5164301717699776, | |
| "grad_norm": 0.8280025124549866, | |
| "learning_rate": 4.941938737118857e-06, | |
| "loss": 0.5855, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.5175504107542942, | |
| "grad_norm": 0.7869625091552734, | |
| "learning_rate": 4.941617882141455e-06, | |
| "loss": 0.6035, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.5186706497386109, | |
| "grad_norm": 0.8214724659919739, | |
| "learning_rate": 4.9412961535331365e-06, | |
| "loss": 0.6402, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.5197908887229276, | |
| "grad_norm": 0.8125516772270203, | |
| "learning_rate": 4.940973551409018e-06, | |
| "loss": 0.6135, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.5209111277072442, | |
| "grad_norm": 0.8744713068008423, | |
| "learning_rate": 4.9406500758845305e-06, | |
| "loss": 0.5904, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.5220313666915609, | |
| "grad_norm": 0.8251977562904358, | |
| "learning_rate": 4.940325727075415e-06, | |
| "loss": 0.6185, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.5231516056758775, | |
| "grad_norm": 0.7986443638801575, | |
| "learning_rate": 4.940000505097728e-06, | |
| "loss": 0.6177, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.5242718446601942, | |
| "grad_norm": 0.8290914297103882, | |
| "learning_rate": 4.939674410067834e-06, | |
| "loss": 0.6531, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.5253920836445108, | |
| "grad_norm": 0.7911964058876038, | |
| "learning_rate": 4.939347442102415e-06, | |
| "loss": 0.6278, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.5265123226288275, | |
| "grad_norm": 0.7842428088188171, | |
| "learning_rate": 4.9390196013184614e-06, | |
| "loss": 0.6014, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.5276325616131441, | |
| "grad_norm": 0.8033813834190369, | |
| "learning_rate": 4.9386908878332775e-06, | |
| "loss": 0.5975, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.5287528005974608, | |
| "grad_norm": 0.7822708487510681, | |
| "learning_rate": 4.9383613017644796e-06, | |
| "loss": 0.5812, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.5298730395817774, | |
| "grad_norm": 0.8360676169395447, | |
| "learning_rate": 4.9380308432299976e-06, | |
| "loss": 0.6112, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.5309932785660941, | |
| "grad_norm": 0.8052594065666199, | |
| "learning_rate": 4.93769951234807e-06, | |
| "loss": 0.6044, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.5321135175504108, | |
| "grad_norm": 0.7881698608398438, | |
| "learning_rate": 4.937367309237251e-06, | |
| "loss": 0.5778, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.5332337565347274, | |
| "grad_norm": 0.8374946117401123, | |
| "learning_rate": 4.937034234016406e-06, | |
| "loss": 0.6213, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.5343539955190441, | |
| "grad_norm": 0.8044434189796448, | |
| "learning_rate": 4.936700286804711e-06, | |
| "loss": 0.5984, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.5354742345033607, | |
| "grad_norm": 0.7946317195892334, | |
| "learning_rate": 4.936365467721655e-06, | |
| "loss": 0.6027, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.5365944734876774, | |
| "grad_norm": 0.8196859955787659, | |
| "learning_rate": 4.93602977688704e-06, | |
| "loss": 0.6009, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.537714712471994, | |
| "grad_norm": 0.8330888748168945, | |
| "learning_rate": 4.935693214420979e-06, | |
| "loss": 0.6396, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.5388349514563107, | |
| "grad_norm": 0.7970585823059082, | |
| "learning_rate": 4.935355780443896e-06, | |
| "loss": 0.6134, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.5399551904406273, | |
| "grad_norm": 0.8149996399879456, | |
| "learning_rate": 4.935017475076529e-06, | |
| "loss": 0.6214, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.541075429424944, | |
| "grad_norm": 0.7842663526535034, | |
| "learning_rate": 4.9346782984399246e-06, | |
| "loss": 0.6084, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.5421956684092606, | |
| "grad_norm": 0.8166002035140991, | |
| "learning_rate": 4.934338250655444e-06, | |
| "loss": 0.5939, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.5433159073935773, | |
| "grad_norm": 0.8198996186256409, | |
| "learning_rate": 4.933997331844758e-06, | |
| "loss": 0.5925, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.544436146377894, | |
| "grad_norm": 0.8149311542510986, | |
| "learning_rate": 4.933655542129852e-06, | |
| "loss": 0.618, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.5455563853622106, | |
| "grad_norm": 0.8132028579711914, | |
| "learning_rate": 4.93331288163302e-06, | |
| "loss": 0.5923, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.5466766243465273, | |
| "grad_norm": 0.8155807852745056, | |
| "learning_rate": 4.9329693504768686e-06, | |
| "loss": 0.5966, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.5477968633308439, | |
| "grad_norm": 0.8111668229103088, | |
| "learning_rate": 4.932624948784317e-06, | |
| "loss": 0.613, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.5489171023151606, | |
| "grad_norm": 0.8030702471733093, | |
| "learning_rate": 4.932279676678594e-06, | |
| "loss": 0.6077, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.5500373412994772, | |
| "grad_norm": 0.8306952118873596, | |
| "learning_rate": 4.931933534283241e-06, | |
| "loss": 0.6182, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.5511575802837939, | |
| "grad_norm": 0.8086608052253723, | |
| "learning_rate": 4.931586521722112e-06, | |
| "loss": 0.5989, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.5522778192681105, | |
| "grad_norm": 0.8051784634590149, | |
| "learning_rate": 4.931238639119368e-06, | |
| "loss": 0.595, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.5533980582524272, | |
| "grad_norm": 0.7761525511741638, | |
| "learning_rate": 4.930889886599486e-06, | |
| "loss": 0.5773, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.5545182972367438, | |
| "grad_norm": 0.7963784337043762, | |
| "learning_rate": 4.9305402642872534e-06, | |
| "loss": 0.578, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.5556385362210605, | |
| "grad_norm": 0.7898165583610535, | |
| "learning_rate": 4.9301897723077664e-06, | |
| "loss": 0.5954, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.5567587752053772, | |
| "grad_norm": 0.7823036909103394, | |
| "learning_rate": 4.929838410786435e-06, | |
| "loss": 0.5871, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.5578790141896938, | |
| "grad_norm": 0.8508079648017883, | |
| "learning_rate": 4.929486179848979e-06, | |
| "loss": 0.6044, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.5589992531740104, | |
| "grad_norm": 0.8375908136367798, | |
| "learning_rate": 4.9291330796214285e-06, | |
| "loss": 0.6175, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.5601194921583271, | |
| "grad_norm": 0.8124059438705444, | |
| "learning_rate": 4.928779110230126e-06, | |
| "loss": 0.5857, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.5612397311426438, | |
| "grad_norm": 0.8159031271934509, | |
| "learning_rate": 4.928424271801726e-06, | |
| "loss": 0.6013, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.5623599701269604, | |
| "grad_norm": 0.7889281511306763, | |
| "learning_rate": 4.928068564463192e-06, | |
| "loss": 0.5959, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.563480209111277, | |
| "grad_norm": 0.8010871410369873, | |
| "learning_rate": 4.927711988341798e-06, | |
| "loss": 0.591, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.5646004480955937, | |
| "grad_norm": 0.8424665927886963, | |
| "learning_rate": 4.927354543565131e-06, | |
| "loss": 0.5844, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.5657206870799104, | |
| "grad_norm": 0.7999626398086548, | |
| "learning_rate": 4.9269962302610866e-06, | |
| "loss": 0.5836, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.566840926064227, | |
| "grad_norm": 0.8242966532707214, | |
| "learning_rate": 4.926637048557873e-06, | |
| "loss": 0.5929, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.5679611650485437, | |
| "grad_norm": 0.8201645016670227, | |
| "learning_rate": 4.926276998584008e-06, | |
| "loss": 0.5937, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.5690814040328603, | |
| "grad_norm": 0.8664683699607849, | |
| "learning_rate": 4.925916080468321e-06, | |
| "loss": 0.6258, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.570201643017177, | |
| "grad_norm": 0.8183404207229614, | |
| "learning_rate": 4.925554294339951e-06, | |
| "loss": 0.6088, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.5713218820014937, | |
| "grad_norm": 0.8237192034721375, | |
| "learning_rate": 4.925191640328348e-06, | |
| "loss": 0.6054, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.5724421209858103, | |
| "grad_norm": 0.8951362371444702, | |
| "learning_rate": 4.924828118563273e-06, | |
| "loss": 0.5847, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.573562359970127, | |
| "grad_norm": 0.8432679176330566, | |
| "learning_rate": 4.924463729174796e-06, | |
| "loss": 0.6185, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.5746825989544436, | |
| "grad_norm": 0.8540483117103577, | |
| "learning_rate": 4.924098472293301e-06, | |
| "loss": 0.6119, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.5758028379387603, | |
| "grad_norm": 0.8214981555938721, | |
| "learning_rate": 4.923732348049477e-06, | |
| "loss": 0.5815, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.5769230769230769, | |
| "grad_norm": 0.8091721534729004, | |
| "learning_rate": 4.9233653565743276e-06, | |
| "loss": 0.5912, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.5780433159073936, | |
| "grad_norm": 0.8201042413711548, | |
| "learning_rate": 4.922997497999166e-06, | |
| "loss": 0.5754, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.5791635548917102, | |
| "grad_norm": 0.8555132746696472, | |
| "learning_rate": 4.922628772455613e-06, | |
| "loss": 0.6227, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.5802837938760269, | |
| "grad_norm": 0.805891752243042, | |
| "learning_rate": 4.922259180075604e-06, | |
| "loss": 0.5782, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.5814040328603436, | |
| "grad_norm": 0.8111713528633118, | |
| "learning_rate": 4.921888720991381e-06, | |
| "loss": 0.5918, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.5825242718446602, | |
| "grad_norm": 0.8208431601524353, | |
| "learning_rate": 4.921517395335497e-06, | |
| "loss": 0.6038, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.5836445108289768, | |
| "grad_norm": 0.826924204826355, | |
| "learning_rate": 4.921145203240816e-06, | |
| "loss": 0.5922, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.5847647498132935, | |
| "grad_norm": 0.8484541177749634, | |
| "learning_rate": 4.92077214484051e-06, | |
| "loss": 0.596, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.5858849887976102, | |
| "grad_norm": 0.8284783363342285, | |
| "learning_rate": 4.920398220268064e-06, | |
| "loss": 0.599, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.5870052277819268, | |
| "grad_norm": 0.8378069400787354, | |
| "learning_rate": 4.920023429657273e-06, | |
| "loss": 0.6259, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.5881254667662434, | |
| "grad_norm": 0.8292249441146851, | |
| "learning_rate": 4.919647773142235e-06, | |
| "loss": 0.5925, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.5892457057505601, | |
| "grad_norm": 0.8202025890350342, | |
| "learning_rate": 4.919271250857367e-06, | |
| "loss": 0.5932, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.5903659447348768, | |
| "grad_norm": 0.8431236147880554, | |
| "learning_rate": 4.9188938629373915e-06, | |
| "loss": 0.5958, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.5914861837191935, | |
| "grad_norm": 0.8396396040916443, | |
| "learning_rate": 4.9185156095173395e-06, | |
| "loss": 0.6148, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.59260642270351, | |
| "grad_norm": 0.8345813155174255, | |
| "learning_rate": 4.9181364907325535e-06, | |
| "loss": 0.5651, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.5937266616878267, | |
| "grad_norm": 0.8040415644645691, | |
| "learning_rate": 4.917756506718686e-06, | |
| "loss": 0.6102, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.5948469006721434, | |
| "grad_norm": 0.7991846799850464, | |
| "learning_rate": 4.917375657611697e-06, | |
| "loss": 0.6, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.5959671396564601, | |
| "grad_norm": 0.8031482696533203, | |
| "learning_rate": 4.9169939435478595e-06, | |
| "loss": 0.5883, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.5970873786407767, | |
| "grad_norm": 0.8042899370193481, | |
| "learning_rate": 4.9166113646637534e-06, | |
| "loss": 0.561, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.5982076176250933, | |
| "grad_norm": 0.8222240209579468, | |
| "learning_rate": 4.916227921096267e-06, | |
| "loss": 0.5649, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.59932785660941, | |
| "grad_norm": 0.7763756513595581, | |
| "learning_rate": 4.915843612982601e-06, | |
| "loss": 0.577, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.6004480955937267, | |
| "grad_norm": 0.8542895913124084, | |
| "learning_rate": 4.915458440460265e-06, | |
| "loss": 0.6099, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.6015683345780433, | |
| "grad_norm": 0.8213333487510681, | |
| "learning_rate": 4.915072403667074e-06, | |
| "loss": 0.6131, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.6026885735623599, | |
| "grad_norm": 0.8075094819068909, | |
| "learning_rate": 4.914685502741157e-06, | |
| "loss": 0.5938, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.6038088125466766, | |
| "grad_norm": 0.843874454498291, | |
| "learning_rate": 4.91429773782095e-06, | |
| "loss": 0.5921, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.6049290515309933, | |
| "grad_norm": 0.8286192417144775, | |
| "learning_rate": 4.913909109045198e-06, | |
| "loss": 0.6035, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.60604929051531, | |
| "grad_norm": 0.7961990237236023, | |
| "learning_rate": 4.913519616552957e-06, | |
| "loss": 0.6176, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.6071695294996265, | |
| "grad_norm": 0.7807387709617615, | |
| "learning_rate": 4.913129260483589e-06, | |
| "loss": 0.5905, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.6082897684839432, | |
| "grad_norm": 0.8079019784927368, | |
| "learning_rate": 4.912738040976767e-06, | |
| "loss": 0.614, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.6094100074682599, | |
| "grad_norm": 0.7950747609138489, | |
| "learning_rate": 4.912345958172474e-06, | |
| "loss": 0.6129, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.6105302464525766, | |
| "grad_norm": 0.7880637645721436, | |
| "learning_rate": 4.911953012210998e-06, | |
| "loss": 0.5745, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.6116504854368932, | |
| "grad_norm": 0.797683596611023, | |
| "learning_rate": 4.911559203232941e-06, | |
| "loss": 0.5704, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.6127707244212098, | |
| "grad_norm": 0.7976189851760864, | |
| "learning_rate": 4.911164531379208e-06, | |
| "loss": 0.5753, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.6138909634055265, | |
| "grad_norm": 0.797524631023407, | |
| "learning_rate": 4.910768996791018e-06, | |
| "loss": 0.5965, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.6150112023898432, | |
| "grad_norm": 0.7966054677963257, | |
| "learning_rate": 4.910372599609896e-06, | |
| "loss": 0.5926, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.6161314413741599, | |
| "grad_norm": 0.8061427474021912, | |
| "learning_rate": 4.909975339977676e-06, | |
| "loss": 0.6076, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.6172516803584764, | |
| "grad_norm": 0.8192665576934814, | |
| "learning_rate": 4.909577218036501e-06, | |
| "loss": 0.5846, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.6183719193427931, | |
| "grad_norm": 0.8142986297607422, | |
| "learning_rate": 4.909178233928822e-06, | |
| "loss": 0.6269, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.6194921583271098, | |
| "grad_norm": 0.8129419684410095, | |
| "learning_rate": 4.908778387797399e-06, | |
| "loss": 0.6161, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.6206123973114265, | |
| "grad_norm": 0.7790163159370422, | |
| "learning_rate": 4.9083776797853015e-06, | |
| "loss": 0.6039, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.621732636295743, | |
| "grad_norm": 0.8370928168296814, | |
| "learning_rate": 4.907976110035903e-06, | |
| "loss": 0.5813, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.6228528752800597, | |
| "grad_norm": 0.8461244702339172, | |
| "learning_rate": 4.907573678692891e-06, | |
| "loss": 0.5879, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.6239731142643764, | |
| "grad_norm": 0.7943955659866333, | |
| "learning_rate": 4.907170385900258e-06, | |
| "loss": 0.579, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.6250933532486931, | |
| "grad_norm": 0.8056941628456116, | |
| "learning_rate": 4.9067662318023056e-06, | |
| "loss": 0.6096, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.6262135922330098, | |
| "grad_norm": 0.8091793060302734, | |
| "learning_rate": 4.906361216543643e-06, | |
| "loss": 0.5824, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.6273338312173263, | |
| "grad_norm": 0.8035262227058411, | |
| "learning_rate": 4.905955340269188e-06, | |
| "loss": 0.6232, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.628454070201643, | |
| "grad_norm": 0.7793251276016235, | |
| "learning_rate": 4.9055486031241675e-06, | |
| "loss": 0.5901, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.6295743091859597, | |
| "grad_norm": 0.7671579122543335, | |
| "learning_rate": 4.9051410052541135e-06, | |
| "loss": 0.5843, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.6306945481702764, | |
| "grad_norm": 0.8493455052375793, | |
| "learning_rate": 4.90473254680487e-06, | |
| "loss": 0.6254, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.6318147871545929, | |
| "grad_norm": 0.8092489838600159, | |
| "learning_rate": 4.904323227922586e-06, | |
| "loss": 0.5906, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.6329350261389096, | |
| "grad_norm": 0.8145341873168945, | |
| "learning_rate": 4.903913048753719e-06, | |
| "loss": 0.6129, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.6340552651232263, | |
| "grad_norm": 0.8953230381011963, | |
| "learning_rate": 4.903502009445033e-06, | |
| "loss": 0.5975, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.635175504107543, | |
| "grad_norm": 0.8133732676506042, | |
| "learning_rate": 4.903090110143605e-06, | |
| "loss": 0.558, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.6362957430918595, | |
| "grad_norm": 0.8249391317367554, | |
| "learning_rate": 4.902677350996812e-06, | |
| "loss": 0.5893, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.6374159820761762, | |
| "grad_norm": 0.8067794442176819, | |
| "learning_rate": 4.902263732152345e-06, | |
| "loss": 0.5823, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.6385362210604929, | |
| "grad_norm": 0.8032618761062622, | |
| "learning_rate": 4.901849253758199e-06, | |
| "loss": 0.6003, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.6396564600448096, | |
| "grad_norm": 0.8413413166999817, | |
| "learning_rate": 4.901433915962678e-06, | |
| "loss": 0.5742, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.6407766990291263, | |
| "grad_norm": 0.8067041039466858, | |
| "learning_rate": 4.9010177189143935e-06, | |
| "loss": 0.5902, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.6418969380134428, | |
| "grad_norm": 0.8752678632736206, | |
| "learning_rate": 4.900600662762265e-06, | |
| "loss": 0.6143, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.6430171769977595, | |
| "grad_norm": 0.8311488628387451, | |
| "learning_rate": 4.9001827476555166e-06, | |
| "loss": 0.5901, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.6441374159820762, | |
| "grad_norm": 0.8206403255462646, | |
| "learning_rate": 4.899763973743684e-06, | |
| "loss": 0.6056, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.6452576549663929, | |
| "grad_norm": 0.7974233627319336, | |
| "learning_rate": 4.899344341176606e-06, | |
| "loss": 0.5889, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.6463778939507094, | |
| "grad_norm": 0.8065476417541504, | |
| "learning_rate": 4.898923850104432e-06, | |
| "loss": 0.6, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.6474981329350261, | |
| "grad_norm": 0.8313128352165222, | |
| "learning_rate": 4.898502500677617e-06, | |
| "loss": 0.5973, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.6486183719193428, | |
| "grad_norm": 0.8816797733306885, | |
| "learning_rate": 4.898080293046923e-06, | |
| "loss": 0.5843, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.6497386109036595, | |
| "grad_norm": 0.816006600856781, | |
| "learning_rate": 4.897657227363419e-06, | |
| "loss": 0.6189, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.6508588498879762, | |
| "grad_norm": 0.8070967197418213, | |
| "learning_rate": 4.897233303778481e-06, | |
| "loss": 0.5872, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.6519790888722927, | |
| "grad_norm": 0.8039142489433289, | |
| "learning_rate": 4.896808522443794e-06, | |
| "loss": 0.5747, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.6530993278566094, | |
| "grad_norm": 0.8226941227912903, | |
| "learning_rate": 4.896382883511347e-06, | |
| "loss": 0.5845, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.6542195668409261, | |
| "grad_norm": 0.8548035025596619, | |
| "learning_rate": 4.895956387133437e-06, | |
| "loss": 0.5959, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.6553398058252428, | |
| "grad_norm": 0.8523133993148804, | |
| "learning_rate": 4.895529033462669e-06, | |
| "loss": 0.6107, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.6564600448095593, | |
| "grad_norm": 0.8199976086616516, | |
| "learning_rate": 4.8951008226519525e-06, | |
| "loss": 0.5981, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.657580283793876, | |
| "grad_norm": 0.8249738812446594, | |
| "learning_rate": 4.894671754854505e-06, | |
| "loss": 0.5888, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.6587005227781927, | |
| "grad_norm": 0.8519604206085205, | |
| "learning_rate": 4.894241830223851e-06, | |
| "loss": 0.6223, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.6598207617625094, | |
| "grad_norm": 0.8221259713172913, | |
| "learning_rate": 4.8938110489138205e-06, | |
| "loss": 0.5774, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.6609410007468259, | |
| "grad_norm": 0.8143469095230103, | |
| "learning_rate": 4.893379411078552e-06, | |
| "loss": 0.6049, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.6620612397311426, | |
| "grad_norm": 0.7872767448425293, | |
| "learning_rate": 4.892946916872486e-06, | |
| "loss": 0.5796, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.6631814787154593, | |
| "grad_norm": 0.8111532926559448, | |
| "learning_rate": 4.8925135664503755e-06, | |
| "loss": 0.6041, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.664301717699776, | |
| "grad_norm": 0.8038745522499084, | |
| "learning_rate": 4.892079359967276e-06, | |
| "loss": 0.5929, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.6654219566840927, | |
| "grad_norm": 0.8693163990974426, | |
| "learning_rate": 4.89164429757855e-06, | |
| "loss": 0.5892, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.6665421956684092, | |
| "grad_norm": 0.7890527248382568, | |
| "learning_rate": 4.891208379439866e-06, | |
| "loss": 0.5973, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.6676624346527259, | |
| "grad_norm": 0.8141840100288391, | |
| "learning_rate": 4.890771605707199e-06, | |
| "loss": 0.6031, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.6687826736370426, | |
| "grad_norm": 0.8265807032585144, | |
| "learning_rate": 4.890333976536831e-06, | |
| "loss": 0.5893, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.6699029126213593, | |
| "grad_norm": 0.8645585179328918, | |
| "learning_rate": 4.8898954920853484e-06, | |
| "loss": 0.6189, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.6710231516056758, | |
| "grad_norm": 0.8112260699272156, | |
| "learning_rate": 4.889456152509646e-06, | |
| "loss": 0.589, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.6721433905899925, | |
| "grad_norm": 0.8134514093399048, | |
| "learning_rate": 4.8890159579669214e-06, | |
| "loss": 0.5703, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.6732636295743092, | |
| "grad_norm": 0.8513573408126831, | |
| "learning_rate": 4.888574908614681e-06, | |
| "loss": 0.5971, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.6743838685586259, | |
| "grad_norm": 0.8489837646484375, | |
| "learning_rate": 4.888133004610735e-06, | |
| "loss": 0.6084, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.6755041075429425, | |
| "grad_norm": 0.8141038417816162, | |
| "learning_rate": 4.887690246113201e-06, | |
| "loss": 0.596, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.6766243465272591, | |
| "grad_norm": 0.8250095844268799, | |
| "learning_rate": 4.887246633280501e-06, | |
| "loss": 0.5535, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.6777445855115758, | |
| "grad_norm": 0.8543534874916077, | |
| "learning_rate": 4.886802166271365e-06, | |
| "loss": 0.6056, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.6788648244958925, | |
| "grad_norm": 0.8766425848007202, | |
| "learning_rate": 4.886356845244824e-06, | |
| "loss": 0.5984, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.6799850634802092, | |
| "grad_norm": 0.8217388391494751, | |
| "learning_rate": 4.88591067036022e-06, | |
| "loss": 0.6037, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.6811053024645257, | |
| "grad_norm": 0.7816200852394104, | |
| "learning_rate": 4.885463641777197e-06, | |
| "loss": 0.5451, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.6822255414488424, | |
| "grad_norm": 0.8485130071640015, | |
| "learning_rate": 4.885015759655706e-06, | |
| "loss": 0.5917, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.6833457804331591, | |
| "grad_norm": 0.8265321850776672, | |
| "learning_rate": 4.884567024156002e-06, | |
| "loss": 0.5676, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.6844660194174758, | |
| "grad_norm": 0.8090165853500366, | |
| "learning_rate": 4.884117435438646e-06, | |
| "loss": 0.616, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.6855862584017924, | |
| "grad_norm": 0.8049173951148987, | |
| "learning_rate": 4.883666993664507e-06, | |
| "loss": 0.5814, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.686706497386109, | |
| "grad_norm": 0.8055282831192017, | |
| "learning_rate": 4.883215698994755e-06, | |
| "loss": 0.5947, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.6878267363704257, | |
| "grad_norm": 0.8200820088386536, | |
| "learning_rate": 4.882763551590865e-06, | |
| "loss": 0.6262, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.6889469753547424, | |
| "grad_norm": 0.808571994304657, | |
| "learning_rate": 4.882310551614623e-06, | |
| "loss": 0.6029, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.690067214339059, | |
| "grad_norm": 0.8104593753814697, | |
| "learning_rate": 4.881856699228114e-06, | |
| "loss": 0.5979, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.6911874533233756, | |
| "grad_norm": 0.7940874695777893, | |
| "learning_rate": 4.88140199459373e-06, | |
| "loss": 0.5818, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.6923076923076923, | |
| "grad_norm": 0.7887553572654724, | |
| "learning_rate": 4.880946437874169e-06, | |
| "loss": 0.573, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.693427931292009, | |
| "grad_norm": 0.8413677215576172, | |
| "learning_rate": 4.880490029232431e-06, | |
| "loss": 0.6158, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.6945481702763256, | |
| "grad_norm": 0.8174417614936829, | |
| "learning_rate": 4.880032768831825e-06, | |
| "loss": 0.57, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.6956684092606422, | |
| "grad_norm": 0.8150051236152649, | |
| "learning_rate": 4.879574656835962e-06, | |
| "loss": 0.6071, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.6967886482449589, | |
| "grad_norm": 0.8536598682403564, | |
| "learning_rate": 4.879115693408757e-06, | |
| "loss": 0.6294, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.6979088872292756, | |
| "grad_norm": 0.8469665050506592, | |
| "learning_rate": 4.8786558787144314e-06, | |
| "loss": 0.6146, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.6990291262135923, | |
| "grad_norm": 0.7985464930534363, | |
| "learning_rate": 4.878195212917511e-06, | |
| "loss": 0.5528, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.7001493651979089, | |
| "grad_norm": 0.8789647817611694, | |
| "learning_rate": 4.877733696182827e-06, | |
| "loss": 0.5926, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.7012696041822255, | |
| "grad_norm": 0.7992759943008423, | |
| "learning_rate": 4.877271328675511e-06, | |
| "loss": 0.5912, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.7023898431665422, | |
| "grad_norm": 0.7898268699645996, | |
| "learning_rate": 4.876808110561004e-06, | |
| "loss": 0.5722, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.7035100821508589, | |
| "grad_norm": 0.8306590914726257, | |
| "learning_rate": 4.87634404200505e-06, | |
| "loss": 0.627, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.7046303211351755, | |
| "grad_norm": 0.8404473662376404, | |
| "learning_rate": 4.875879123173694e-06, | |
| "loss": 0.5841, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.7057505601194921, | |
| "grad_norm": 0.8321950435638428, | |
| "learning_rate": 4.87541335423329e-06, | |
| "loss": 0.6141, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.7068707991038088, | |
| "grad_norm": 0.7946979403495789, | |
| "learning_rate": 4.874946735350492e-06, | |
| "loss": 0.5849, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.7079910380881255, | |
| "grad_norm": 0.8189645409584045, | |
| "learning_rate": 4.874479266692261e-06, | |
| "loss": 0.6146, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.7091112770724421, | |
| "grad_norm": 0.8169258832931519, | |
| "learning_rate": 4.8740109484258625e-06, | |
| "loss": 0.5926, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.7102315160567588, | |
| "grad_norm": 0.7945389151573181, | |
| "learning_rate": 4.873541780718863e-06, | |
| "loss": 0.5929, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.7113517550410754, | |
| "grad_norm": 0.7935434579849243, | |
| "learning_rate": 4.873071763739135e-06, | |
| "loss": 0.5942, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.7124719940253921, | |
| "grad_norm": 0.8001875877380371, | |
| "learning_rate": 4.872600897654853e-06, | |
| "loss": 0.5957, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.7135922330097088, | |
| "grad_norm": 0.8338243961334229, | |
| "learning_rate": 4.872129182634499e-06, | |
| "loss": 0.5908, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.7147124719940254, | |
| "grad_norm": 0.8988234400749207, | |
| "learning_rate": 4.871656618846855e-06, | |
| "loss": 0.5955, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.715832710978342, | |
| "grad_norm": 0.7992603778839111, | |
| "learning_rate": 4.87118320646101e-06, | |
| "loss": 0.5852, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.7169529499626587, | |
| "grad_norm": 0.8259397745132446, | |
| "learning_rate": 4.870708945646353e-06, | |
| "loss": 0.6156, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.7180731889469754, | |
| "grad_norm": 0.893584132194519, | |
| "learning_rate": 4.870233836572579e-06, | |
| "loss": 0.6117, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.719193427931292, | |
| "grad_norm": 0.8482956290245056, | |
| "learning_rate": 4.869757879409686e-06, | |
| "loss": 0.6018, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.7203136669156086, | |
| "grad_norm": 0.7782022356987, | |
| "learning_rate": 4.869281074327975e-06, | |
| "loss": 0.5761, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.7214339058999253, | |
| "grad_norm": 0.8154391050338745, | |
| "learning_rate": 4.8688034214980515e-06, | |
| "loss": 0.5846, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.722554144884242, | |
| "grad_norm": 0.8113802671432495, | |
| "learning_rate": 4.868324921090822e-06, | |
| "loss": 0.5616, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.7236743838685586, | |
| "grad_norm": 0.830033540725708, | |
| "learning_rate": 4.867845573277501e-06, | |
| "loss": 0.63, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.7247946228528753, | |
| "grad_norm": 0.8050087690353394, | |
| "learning_rate": 4.867365378229601e-06, | |
| "loss": 0.5894, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.7259148618371919, | |
| "grad_norm": 0.8076449036598206, | |
| "learning_rate": 4.8668843361189385e-06, | |
| "loss": 0.6145, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.7270351008215086, | |
| "grad_norm": 0.8889169692993164, | |
| "learning_rate": 4.8664024471176366e-06, | |
| "loss": 0.6154, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.7281553398058253, | |
| "grad_norm": 0.8604964017868042, | |
| "learning_rate": 4.865919711398118e-06, | |
| "loss": 0.5984, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.7292755787901419, | |
| "grad_norm": 0.828212320804596, | |
| "learning_rate": 4.86543612913311e-06, | |
| "loss": 0.5785, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.7303958177744585, | |
| "grad_norm": 0.786229133605957, | |
| "learning_rate": 4.864951700495642e-06, | |
| "loss": 0.5983, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.7315160567587752, | |
| "grad_norm": 0.8467420935630798, | |
| "learning_rate": 4.864466425659046e-06, | |
| "loss": 0.6037, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.7326362957430919, | |
| "grad_norm": 0.7861534953117371, | |
| "learning_rate": 4.863980304796958e-06, | |
| "loss": 0.564, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.7337565347274085, | |
| "grad_norm": 0.7819376587867737, | |
| "learning_rate": 4.863493338083316e-06, | |
| "loss": 0.5783, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.7348767737117252, | |
| "grad_norm": 0.7864521741867065, | |
| "learning_rate": 4.863005525692362e-06, | |
| "loss": 0.594, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.7359970126960418, | |
| "grad_norm": 0.7910285592079163, | |
| "learning_rate": 4.862516867798636e-06, | |
| "loss": 0.5888, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.7371172516803585, | |
| "grad_norm": 0.8056003451347351, | |
| "learning_rate": 4.862027364576987e-06, | |
| "loss": 0.5895, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.7382374906646751, | |
| "grad_norm": 0.8140441179275513, | |
| "learning_rate": 4.861537016202561e-06, | |
| "loss": 0.6015, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.7393577296489918, | |
| "grad_norm": 0.8237465620040894, | |
| "learning_rate": 4.861045822850811e-06, | |
| "loss": 0.6072, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.7404779686333084, | |
| "grad_norm": 0.832146942615509, | |
| "learning_rate": 4.860553784697487e-06, | |
| "loss": 0.6171, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.7415982076176251, | |
| "grad_norm": 0.8073914051055908, | |
| "learning_rate": 4.860060901918648e-06, | |
| "loss": 0.5931, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.7427184466019418, | |
| "grad_norm": 0.8193264007568359, | |
| "learning_rate": 4.859567174690647e-06, | |
| "loss": 0.5948, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.7438386855862584, | |
| "grad_norm": 0.8580235838890076, | |
| "learning_rate": 4.859072603190148e-06, | |
| "loss": 0.6157, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.7449589245705751, | |
| "grad_norm": 0.7729256749153137, | |
| "learning_rate": 4.8585771875941105e-06, | |
| "loss": 0.5876, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.7460791635548917, | |
| "grad_norm": 0.844573438167572, | |
| "learning_rate": 4.858080928079798e-06, | |
| "loss": 0.6206, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.7471994025392084, | |
| "grad_norm": 0.816228449344635, | |
| "learning_rate": 4.857583824824777e-06, | |
| "loss": 0.6247, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.748319641523525, | |
| "grad_norm": 0.7984215021133423, | |
| "learning_rate": 4.8570858780069155e-06, | |
| "loss": 0.6038, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.7494398805078417, | |
| "grad_norm": 0.850842297077179, | |
| "learning_rate": 4.856587087804382e-06, | |
| "loss": 0.6283, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.7505601194921583, | |
| "grad_norm": 0.7965848445892334, | |
| "learning_rate": 4.856087454395648e-06, | |
| "loss": 0.6018, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.751680358476475, | |
| "grad_norm": 0.7891208529472351, | |
| "learning_rate": 4.855586977959487e-06, | |
| "loss": 0.5965, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.7528005974607916, | |
| "grad_norm": 0.7875372767448425, | |
| "learning_rate": 4.855085658674973e-06, | |
| "loss": 0.6004, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.7539208364451083, | |
| "grad_norm": 0.7898383736610413, | |
| "learning_rate": 4.854583496721482e-06, | |
| "loss": 0.5566, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.7550410754294249, | |
| "grad_norm": 0.8215773701667786, | |
| "learning_rate": 4.854080492278693e-06, | |
| "loss": 0.5837, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.7561613144137416, | |
| "grad_norm": 0.810920000076294, | |
| "learning_rate": 4.853576645526584e-06, | |
| "loss": 0.5893, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.7572815533980582, | |
| "grad_norm": 0.7786296606063843, | |
| "learning_rate": 4.8530719566454345e-06, | |
| "loss": 0.5604, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.7584017923823749, | |
| "grad_norm": 0.7917574048042297, | |
| "learning_rate": 4.852566425815829e-06, | |
| "loss": 0.582, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.7595220313666916, | |
| "grad_norm": 0.8070117235183716, | |
| "learning_rate": 4.8520600532186475e-06, | |
| "loss": 0.604, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.7606422703510082, | |
| "grad_norm": 0.8068091869354248, | |
| "learning_rate": 4.8515528390350765e-06, | |
| "loss": 0.5896, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.7617625093353249, | |
| "grad_norm": 0.8220896124839783, | |
| "learning_rate": 4.8510447834466e-06, | |
| "loss": 0.593, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.7628827483196415, | |
| "grad_norm": 0.7868808507919312, | |
| "learning_rate": 4.850535886635006e-06, | |
| "loss": 0.6, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.7640029873039582, | |
| "grad_norm": 0.8102167248725891, | |
| "learning_rate": 4.850026148782381e-06, | |
| "loss": 0.5813, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.7651232262882748, | |
| "grad_norm": 0.8151317834854126, | |
| "learning_rate": 4.849515570071113e-06, | |
| "loss": 0.5747, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.7662434652725915, | |
| "grad_norm": 0.8038537502288818, | |
| "learning_rate": 4.849004150683892e-06, | |
| "loss": 0.6192, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.7673637042569081, | |
| "grad_norm": 0.8006526827812195, | |
| "learning_rate": 4.848491890803708e-06, | |
| "loss": 0.5866, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.7684839432412248, | |
| "grad_norm": 0.7970767617225647, | |
| "learning_rate": 4.847978790613851e-06, | |
| "loss": 0.6098, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.7696041822255415, | |
| "grad_norm": 0.7953868508338928, | |
| "learning_rate": 4.847464850297914e-06, | |
| "loss": 0.5758, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.7707244212098581, | |
| "grad_norm": 0.8159863352775574, | |
| "learning_rate": 4.8469500700397875e-06, | |
| "loss": 0.5949, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.7718446601941747, | |
| "grad_norm": 0.7919224500656128, | |
| "learning_rate": 4.8464344500236645e-06, | |
| "loss": 0.5769, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.7729648991784914, | |
| "grad_norm": 0.7827619314193726, | |
| "learning_rate": 4.845917990434039e-06, | |
| "loss": 0.5923, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.7740851381628081, | |
| "grad_norm": 0.7937460541725159, | |
| "learning_rate": 4.845400691455703e-06, | |
| "loss": 0.5759, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.7752053771471247, | |
| "grad_norm": 0.7901214361190796, | |
| "learning_rate": 4.844882553273752e-06, | |
| "loss": 0.5657, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.7763256161314414, | |
| "grad_norm": 0.8160180449485779, | |
| "learning_rate": 4.844363576073579e-06, | |
| "loss": 0.5971, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.777445855115758, | |
| "grad_norm": 0.8019487261772156, | |
| "learning_rate": 4.843843760040877e-06, | |
| "loss": 0.5804, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.7785660941000747, | |
| "grad_norm": 0.7898805141448975, | |
| "learning_rate": 4.843323105361643e-06, | |
| "loss": 0.5854, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.7796863330843914, | |
| "grad_norm": 0.829105794429779, | |
| "learning_rate": 4.842801612222171e-06, | |
| "loss": 0.5914, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.780806572068708, | |
| "grad_norm": 0.7974743843078613, | |
| "learning_rate": 4.842279280809054e-06, | |
| "loss": 0.5856, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.7819268110530246, | |
| "grad_norm": 0.7757387161254883, | |
| "learning_rate": 4.841756111309188e-06, | |
| "loss": 0.5679, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.7830470500373413, | |
| "grad_norm": 0.827438473701477, | |
| "learning_rate": 4.841232103909766e-06, | |
| "loss": 0.5657, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.784167289021658, | |
| "grad_norm": 0.8150497078895569, | |
| "learning_rate": 4.840707258798284e-06, | |
| "loss": 0.6065, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.7852875280059746, | |
| "grad_norm": 0.7733539342880249, | |
| "learning_rate": 4.8401815761625345e-06, | |
| "loss": 0.5806, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.7864077669902912, | |
| "grad_norm": 0.8224393129348755, | |
| "learning_rate": 4.839655056190611e-06, | |
| "loss": 0.5723, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.7875280059746079, | |
| "grad_norm": 0.8295096755027771, | |
| "learning_rate": 4.839127699070907e-06, | |
| "loss": 0.609, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.7886482449589246, | |
| "grad_norm": 0.7875826358795166, | |
| "learning_rate": 4.838599504992116e-06, | |
| "loss": 0.5625, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.7897684839432412, | |
| "grad_norm": 0.8078942894935608, | |
| "learning_rate": 4.838070474143228e-06, | |
| "loss": 0.6059, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.7908887229275579, | |
| "grad_norm": 0.8128692507743835, | |
| "learning_rate": 4.837540606713538e-06, | |
| "loss": 0.5878, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.7920089619118745, | |
| "grad_norm": 0.7869188785552979, | |
| "learning_rate": 4.837009902892633e-06, | |
| "loss": 0.5741, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.7931292008961912, | |
| "grad_norm": 0.7958232164382935, | |
| "learning_rate": 4.836478362870406e-06, | |
| "loss": 0.5879, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.7942494398805079, | |
| "grad_norm": 0.7908474802970886, | |
| "learning_rate": 4.835945986837045e-06, | |
| "loss": 0.5766, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.7953696788648245, | |
| "grad_norm": 0.8136919736862183, | |
| "learning_rate": 4.83541277498304e-06, | |
| "loss": 0.587, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.7964899178491411, | |
| "grad_norm": 0.8330440521240234, | |
| "learning_rate": 4.8348787274991765e-06, | |
| "loss": 0.5935, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.7976101568334578, | |
| "grad_norm": 0.8112781643867493, | |
| "learning_rate": 4.834343844576543e-06, | |
| "loss": 0.5952, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.7987303958177745, | |
| "grad_norm": 0.8082567453384399, | |
| "learning_rate": 4.8338081264065235e-06, | |
| "loss": 0.5717, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.7998506348020911, | |
| "grad_norm": 0.9035626649856567, | |
| "learning_rate": 4.8332715731808035e-06, | |
| "loss": 0.583, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.8009708737864077, | |
| "grad_norm": 0.8441537022590637, | |
| "learning_rate": 4.8327341850913645e-06, | |
| "loss": 0.6096, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.8020911127707244, | |
| "grad_norm": 0.8210310339927673, | |
| "learning_rate": 4.832195962330491e-06, | |
| "loss": 0.6072, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.8032113517550411, | |
| "grad_norm": 0.8119834065437317, | |
| "learning_rate": 4.831656905090761e-06, | |
| "loss": 0.5864, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.8043315907393578, | |
| "grad_norm": 0.8333593606948853, | |
| "learning_rate": 4.831117013565056e-06, | |
| "loss": 0.6003, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.8054518297236744, | |
| "grad_norm": 0.8569757342338562, | |
| "learning_rate": 4.83057628794655e-06, | |
| "loss": 0.5936, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.806572068707991, | |
| "grad_norm": 0.7970093488693237, | |
| "learning_rate": 4.830034728428723e-06, | |
| "loss": 0.5776, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.8076923076923077, | |
| "grad_norm": 0.8280375003814697, | |
| "learning_rate": 4.829492335205347e-06, | |
| "loss": 0.5952, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.8088125466766244, | |
| "grad_norm": 0.8139337301254272, | |
| "learning_rate": 4.828949108470496e-06, | |
| "loss": 0.6314, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.809932785660941, | |
| "grad_norm": 0.8315449357032776, | |
| "learning_rate": 4.82840504841854e-06, | |
| "loss": 0.5978, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.8110530246452576, | |
| "grad_norm": 0.8375460505485535, | |
| "learning_rate": 4.827860155244149e-06, | |
| "loss": 0.588, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.8121732636295743, | |
| "grad_norm": 0.7952830791473389, | |
| "learning_rate": 4.827314429142291e-06, | |
| "loss": 0.582, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.813293502613891, | |
| "grad_norm": 0.8073609471321106, | |
| "learning_rate": 4.826767870308228e-06, | |
| "loss": 0.5678, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.8144137415982076, | |
| "grad_norm": 0.8218507766723633, | |
| "learning_rate": 4.826220478937527e-06, | |
| "loss": 0.6072, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.8155339805825242, | |
| "grad_norm": 0.7980933785438538, | |
| "learning_rate": 4.825672255226047e-06, | |
| "loss": 0.5524, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.8166542195668409, | |
| "grad_norm": 0.8054032921791077, | |
| "learning_rate": 4.825123199369947e-06, | |
| "loss": 0.5913, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.8177744585511576, | |
| "grad_norm": 0.8572160601615906, | |
| "learning_rate": 4.824573311565685e-06, | |
| "loss": 0.5954, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.8188946975354743, | |
| "grad_norm": 0.8065678477287292, | |
| "learning_rate": 4.824022592010013e-06, | |
| "loss": 0.5966, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.8200149365197908, | |
| "grad_norm": 0.812080442905426, | |
| "learning_rate": 4.8234710408999865e-06, | |
| "loss": 0.5666, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.8211351755041075, | |
| "grad_norm": 0.8508359789848328, | |
| "learning_rate": 4.822918658432952e-06, | |
| "loss": 0.6137, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.8222554144884242, | |
| "grad_norm": 0.7996507287025452, | |
| "learning_rate": 4.8223654448065585e-06, | |
| "loss": 0.6038, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.8233756534727409, | |
| "grad_norm": 0.8098906874656677, | |
| "learning_rate": 4.821811400218748e-06, | |
| "loss": 0.5759, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.8244958924570575, | |
| "grad_norm": 0.8356794118881226, | |
| "learning_rate": 4.821256524867764e-06, | |
| "loss": 0.6151, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.8256161314413741, | |
| "grad_norm": 0.8081963658332825, | |
| "learning_rate": 4.820700818952145e-06, | |
| "loss": 0.5943, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.8267363704256908, | |
| "grad_norm": 0.825581967830658, | |
| "learning_rate": 4.820144282670728e-06, | |
| "loss": 0.5769, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.8278566094100075, | |
| "grad_norm": 0.8103387355804443, | |
| "learning_rate": 4.819586916222644e-06, | |
| "loss": 0.5789, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.8289768483943242, | |
| "grad_norm": 0.8200883865356445, | |
| "learning_rate": 4.819028719807326e-06, | |
| "loss": 0.602, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.8300970873786407, | |
| "grad_norm": 0.8113400340080261, | |
| "learning_rate": 4.818469693624499e-06, | |
| "loss": 0.5951, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.8312173263629574, | |
| "grad_norm": 0.8407019972801208, | |
| "learning_rate": 4.817909837874189e-06, | |
| "loss": 0.5909, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.8323375653472741, | |
| "grad_norm": 0.8122291564941406, | |
| "learning_rate": 4.8173491527567155e-06, | |
| "loss": 0.5964, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.8334578043315908, | |
| "grad_norm": 0.8028641939163208, | |
| "learning_rate": 4.816787638472697e-06, | |
| "loss": 0.5926, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.8345780433159073, | |
| "grad_norm": 0.8061012625694275, | |
| "learning_rate": 4.816225295223048e-06, | |
| "loss": 0.5835, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.835698282300224, | |
| "grad_norm": 0.8372674584388733, | |
| "learning_rate": 4.815662123208979e-06, | |
| "loss": 0.6048, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.8368185212845407, | |
| "grad_norm": 0.7943447828292847, | |
| "learning_rate": 4.815098122631998e-06, | |
| "loss": 0.5999, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.8379387602688574, | |
| "grad_norm": 0.7981782555580139, | |
| "learning_rate": 4.814533293693909e-06, | |
| "loss": 0.5838, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.8390589992531741, | |
| "grad_norm": 0.8036529421806335, | |
| "learning_rate": 4.8139676365968125e-06, | |
| "loss": 0.59, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.8401792382374906, | |
| "grad_norm": 0.8037401437759399, | |
| "learning_rate": 4.813401151543105e-06, | |
| "loss": 0.5603, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.8412994772218073, | |
| "grad_norm": 0.8377910852432251, | |
| "learning_rate": 4.812833838735479e-06, | |
| "loss": 0.5984, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.842419716206124, | |
| "grad_norm": 0.7984040975570679, | |
| "learning_rate": 4.812265698376924e-06, | |
| "loss": 0.5719, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.8435399551904407, | |
| "grad_norm": 0.8077371716499329, | |
| "learning_rate": 4.811696730670726e-06, | |
| "loss": 0.5824, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.8446601941747572, | |
| "grad_norm": 0.7634063959121704, | |
| "learning_rate": 4.811126935820466e-06, | |
| "loss": 0.5584, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.8457804331590739, | |
| "grad_norm": 0.8120932579040527, | |
| "learning_rate": 4.810556314030021e-06, | |
| "loss": 0.5729, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.8469006721433906, | |
| "grad_norm": 0.7915536165237427, | |
| "learning_rate": 4.8099848655035655e-06, | |
| "loss": 0.6039, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.8480209111277073, | |
| "grad_norm": 0.8074361085891724, | |
| "learning_rate": 4.809412590445566e-06, | |
| "loss": 0.5888, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.8491411501120238, | |
| "grad_norm": 0.8198902010917664, | |
| "learning_rate": 4.808839489060788e-06, | |
| "loss": 0.5895, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.8502613890963405, | |
| "grad_norm": 0.8113036751747131, | |
| "learning_rate": 4.808265561554293e-06, | |
| "loss": 0.5938, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.8513816280806572, | |
| "grad_norm": 0.7734106183052063, | |
| "learning_rate": 4.807690808131437e-06, | |
| "loss": 0.5981, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.8525018670649739, | |
| "grad_norm": 0.7905157208442688, | |
| "learning_rate": 4.80711522899787e-06, | |
| "loss": 0.5862, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.8536221060492906, | |
| "grad_norm": 0.7930572628974915, | |
| "learning_rate": 4.80653882435954e-06, | |
| "loss": 0.6078, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.8547423450336071, | |
| "grad_norm": 0.7793716788291931, | |
| "learning_rate": 4.8059615944226885e-06, | |
| "loss": 0.6005, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.8558625840179238, | |
| "grad_norm": 0.7978621125221252, | |
| "learning_rate": 4.8053835393938545e-06, | |
| "loss": 0.5989, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.8569828230022405, | |
| "grad_norm": 0.8089454770088196, | |
| "learning_rate": 4.804804659479869e-06, | |
| "loss": 0.5576, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.8581030619865572, | |
| "grad_norm": 0.7966460585594177, | |
| "learning_rate": 4.804224954887862e-06, | |
| "loss": 0.5777, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.8592233009708737, | |
| "grad_norm": 0.8103785514831543, | |
| "learning_rate": 4.8036444258252555e-06, | |
| "loss": 0.5965, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.8603435399551904, | |
| "grad_norm": 0.8107985854148865, | |
| "learning_rate": 4.803063072499768e-06, | |
| "loss": 0.5909, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.8614637789395071, | |
| "grad_norm": 0.8095008730888367, | |
| "learning_rate": 4.8024808951194105e-06, | |
| "loss": 0.565, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.8625840179238238, | |
| "grad_norm": 0.8257828950881958, | |
| "learning_rate": 4.801897893892493e-06, | |
| "loss": 0.5825, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.8637042569081405, | |
| "grad_norm": 0.8019899725914001, | |
| "learning_rate": 4.8013140690276186e-06, | |
| "loss": 0.5806, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.864824495892457, | |
| "grad_norm": 0.809320330619812, | |
| "learning_rate": 4.800729420733683e-06, | |
| "loss": 0.5778, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.8659447348767737, | |
| "grad_norm": 0.8071535229682922, | |
| "learning_rate": 4.800143949219879e-06, | |
| "loss": 0.5702, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.8670649738610904, | |
| "grad_norm": 0.8018308281898499, | |
| "learning_rate": 4.799557654695692e-06, | |
| "loss": 0.6004, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.8681852128454071, | |
| "grad_norm": 0.8141055107116699, | |
| "learning_rate": 4.798970537370904e-06, | |
| "loss": 0.57, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.8693054518297236, | |
| "grad_norm": 0.7862063050270081, | |
| "learning_rate": 4.798382597455591e-06, | |
| "loss": 0.5528, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.8704256908140403, | |
| "grad_norm": 0.8331758975982666, | |
| "learning_rate": 4.797793835160122e-06, | |
| "loss": 0.6008, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.871545929798357, | |
| "grad_norm": 0.823685884475708, | |
| "learning_rate": 4.797204250695161e-06, | |
| "loss": 0.6154, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.8726661687826737, | |
| "grad_norm": 0.7857784032821655, | |
| "learning_rate": 4.796613844271665e-06, | |
| "loss": 0.5994, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.8737864077669902, | |
| "grad_norm": 0.8060506582260132, | |
| "learning_rate": 4.796022616100889e-06, | |
| "loss": 0.5633, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.8749066467513069, | |
| "grad_norm": 0.8090853095054626, | |
| "learning_rate": 4.795430566394377e-06, | |
| "loss": 0.5573, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.8760268857356236, | |
| "grad_norm": 0.8793733716011047, | |
| "learning_rate": 4.7948376953639695e-06, | |
| "loss": 0.619, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.8771471247199403, | |
| "grad_norm": 0.8061932921409607, | |
| "learning_rate": 4.794244003221801e-06, | |
| "loss": 0.6047, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.878267363704257, | |
| "grad_norm": 0.8307778239250183, | |
| "learning_rate": 4.7936494901803e-06, | |
| "loss": 0.5825, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.8793876026885735, | |
| "grad_norm": 0.8569943308830261, | |
| "learning_rate": 4.793054156452187e-06, | |
| "loss": 0.5754, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.8805078416728902, | |
| "grad_norm": 0.793869137763977, | |
| "learning_rate": 4.792458002250479e-06, | |
| "loss": 0.5864, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.8816280806572069, | |
| "grad_norm": 0.796796977519989, | |
| "learning_rate": 4.791861027788483e-06, | |
| "loss": 0.5988, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.8827483196415236, | |
| "grad_norm": 0.8137564063072205, | |
| "learning_rate": 4.791263233279802e-06, | |
| "loss": 0.5768, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.8838685586258401, | |
| "grad_norm": 0.7946325540542603, | |
| "learning_rate": 4.790664618938332e-06, | |
| "loss": 0.5674, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.8849887976101568, | |
| "grad_norm": 0.8001543283462524, | |
| "learning_rate": 4.790065184978263e-06, | |
| "loss": 0.5705, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.8861090365944735, | |
| "grad_norm": 0.8289190530776978, | |
| "learning_rate": 4.789464931614075e-06, | |
| "loss": 0.6023, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.8872292755787902, | |
| "grad_norm": 0.8041342496871948, | |
| "learning_rate": 4.788863859060546e-06, | |
| "loss": 0.5963, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.8883495145631068, | |
| "grad_norm": 0.8325103521347046, | |
| "learning_rate": 4.788261967532743e-06, | |
| "loss": 0.6005, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.8894697535474234, | |
| "grad_norm": 0.7777535319328308, | |
| "learning_rate": 4.787659257246029e-06, | |
| "loss": 0.5701, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.8905899925317401, | |
| "grad_norm": 0.8024612069129944, | |
| "learning_rate": 4.787055728416058e-06, | |
| "loss": 0.593, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.8917102315160568, | |
| "grad_norm": 0.7804939150810242, | |
| "learning_rate": 4.786451381258777e-06, | |
| "loss": 0.5547, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.8928304705003735, | |
| "grad_norm": 0.8120240569114685, | |
| "learning_rate": 4.7858462159904284e-06, | |
| "loss": 0.5955, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 0.89395070948469, | |
| "grad_norm": 0.7861335873603821, | |
| "learning_rate": 4.785240232827543e-06, | |
| "loss": 0.5769, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.8950709484690067, | |
| "grad_norm": 0.7677375674247742, | |
| "learning_rate": 4.784633431986948e-06, | |
| "loss": 0.572, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 0.8961911874533234, | |
| "grad_norm": 0.8227370381355286, | |
| "learning_rate": 4.784025813685761e-06, | |
| "loss": 0.5968, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.8973114264376401, | |
| "grad_norm": 0.8148797750473022, | |
| "learning_rate": 4.783417378141393e-06, | |
| "loss": 0.587, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 0.8984316654219567, | |
| "grad_norm": 0.8100507855415344, | |
| "learning_rate": 4.782808125571547e-06, | |
| "loss": 0.581, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.8995519044062733, | |
| "grad_norm": 0.8154464960098267, | |
| "learning_rate": 4.782198056194219e-06, | |
| "loss": 0.5932, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 0.90067214339059, | |
| "grad_norm": 0.7960717678070068, | |
| "learning_rate": 4.781587170227698e-06, | |
| "loss": 0.568, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.9017923823749067, | |
| "grad_norm": 0.7840545177459717, | |
| "learning_rate": 4.780975467890561e-06, | |
| "loss": 0.5709, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.9029126213592233, | |
| "grad_norm": 0.7957537174224854, | |
| "learning_rate": 4.780362949401683e-06, | |
| "loss": 0.5946, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.9040328603435399, | |
| "grad_norm": 0.8008480668067932, | |
| "learning_rate": 4.779749614980225e-06, | |
| "loss": 0.6082, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 0.9051530993278566, | |
| "grad_norm": 0.8121342062950134, | |
| "learning_rate": 4.779135464845647e-06, | |
| "loss": 0.5799, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.9062733383121733, | |
| "grad_norm": 0.7985235452651978, | |
| "learning_rate": 4.778520499217694e-06, | |
| "loss": 0.6025, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 0.90739357729649, | |
| "grad_norm": 0.7896565198898315, | |
| "learning_rate": 4.7779047183164075e-06, | |
| "loss": 0.5954, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.9085138162808065, | |
| "grad_norm": 0.807032585144043, | |
| "learning_rate": 4.7772881223621175e-06, | |
| "loss": 0.6022, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 0.9096340552651232, | |
| "grad_norm": 0.8102108836174011, | |
| "learning_rate": 4.7766707115754485e-06, | |
| "loss": 0.5775, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.9107542942494399, | |
| "grad_norm": 0.8097600340843201, | |
| "learning_rate": 4.776052486177314e-06, | |
| "loss": 0.5877, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 0.9118745332337566, | |
| "grad_norm": 0.7986764907836914, | |
| "learning_rate": 4.775433446388921e-06, | |
| "loss": 0.5625, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.9129947722180732, | |
| "grad_norm": 0.8128889203071594, | |
| "learning_rate": 4.7748135924317655e-06, | |
| "loss": 0.6031, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.9141150112023898, | |
| "grad_norm": 0.7872529029846191, | |
| "learning_rate": 4.774192924527638e-06, | |
| "loss": 0.5624, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.9152352501867065, | |
| "grad_norm": 0.8138707280158997, | |
| "learning_rate": 4.773571442898618e-06, | |
| "loss": 0.5848, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 0.9163554891710232, | |
| "grad_norm": 0.8301820158958435, | |
| "learning_rate": 4.772949147767077e-06, | |
| "loss": 0.6031, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.9174757281553398, | |
| "grad_norm": 0.8014585971832275, | |
| "learning_rate": 4.772326039355677e-06, | |
| "loss": 0.5575, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 0.9185959671396564, | |
| "grad_norm": 0.8259865641593933, | |
| "learning_rate": 4.771702117887371e-06, | |
| "loss": 0.596, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.9197162061239731, | |
| "grad_norm": 0.8028122782707214, | |
| "learning_rate": 4.771077383585404e-06, | |
| "loss": 0.5851, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 0.9208364451082898, | |
| "grad_norm": 0.7960208058357239, | |
| "learning_rate": 4.7704518366733095e-06, | |
| "loss": 0.5999, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.9219566840926064, | |
| "grad_norm": 0.797467827796936, | |
| "learning_rate": 4.769825477374915e-06, | |
| "loss": 0.5809, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 0.9230769230769231, | |
| "grad_norm": 0.8268144130706787, | |
| "learning_rate": 4.7691983059143374e-06, | |
| "loss": 0.5821, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.9241971620612397, | |
| "grad_norm": 0.803213357925415, | |
| "learning_rate": 4.768570322515982e-06, | |
| "loss": 0.5606, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.9253174010455564, | |
| "grad_norm": 0.8422443866729736, | |
| "learning_rate": 4.767941527404547e-06, | |
| "loss": 0.5813, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.926437640029873, | |
| "grad_norm": 0.7983731031417847, | |
| "learning_rate": 4.767311920805021e-06, | |
| "loss": 0.5747, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 0.9275578790141897, | |
| "grad_norm": 0.827704906463623, | |
| "learning_rate": 4.766681502942681e-06, | |
| "loss": 0.5587, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.9286781179985063, | |
| "grad_norm": 0.832988977432251, | |
| "learning_rate": 4.7660502740430985e-06, | |
| "loss": 0.572, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 0.929798356982823, | |
| "grad_norm": 0.8249674439430237, | |
| "learning_rate": 4.76541823433213e-06, | |
| "loss": 0.5801, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.9309185959671397, | |
| "grad_norm": 0.8318766355514526, | |
| "learning_rate": 4.764785384035925e-06, | |
| "loss": 0.5793, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 0.9320388349514563, | |
| "grad_norm": 0.8395798206329346, | |
| "learning_rate": 4.764151723380923e-06, | |
| "loss": 0.587, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.933159073935773, | |
| "grad_norm": 0.8279564380645752, | |
| "learning_rate": 4.763517252593852e-06, | |
| "loss": 0.597, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 0.9342793129200896, | |
| "grad_norm": 0.856611967086792, | |
| "learning_rate": 4.762881971901732e-06, | |
| "loss": 0.5488, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.9353995519044063, | |
| "grad_norm": 0.8096729516983032, | |
| "learning_rate": 4.762245881531872e-06, | |
| "loss": 0.577, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.936519790888723, | |
| "grad_norm": 0.8170201778411865, | |
| "learning_rate": 4.761608981711868e-06, | |
| "loss": 0.5602, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.9376400298730396, | |
| "grad_norm": 0.8169147968292236, | |
| "learning_rate": 4.76097127266961e-06, | |
| "loss": 0.5974, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.9387602688573562, | |
| "grad_norm": 0.806937038898468, | |
| "learning_rate": 4.760332754633276e-06, | |
| "loss": 0.5795, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.9398805078416729, | |
| "grad_norm": 0.8193455934524536, | |
| "learning_rate": 4.759693427831331e-06, | |
| "loss": 0.5886, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 0.9410007468259896, | |
| "grad_norm": 0.8178059458732605, | |
| "learning_rate": 4.759053292492532e-06, | |
| "loss": 0.5951, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.9421209858103062, | |
| "grad_norm": 0.8102588057518005, | |
| "learning_rate": 4.758412348845925e-06, | |
| "loss": 0.5724, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 0.9432412247946228, | |
| "grad_norm": 0.8178951144218445, | |
| "learning_rate": 4.7577705971208466e-06, | |
| "loss": 0.5849, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.9443614637789395, | |
| "grad_norm": 0.8069207668304443, | |
| "learning_rate": 4.757128037546918e-06, | |
| "loss": 0.5978, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 0.9454817027632562, | |
| "grad_norm": 0.8033669590950012, | |
| "learning_rate": 4.756484670354054e-06, | |
| "loss": 0.5794, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.9466019417475728, | |
| "grad_norm": 0.8094858527183533, | |
| "learning_rate": 4.755840495772455e-06, | |
| "loss": 0.6082, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.9477221807318895, | |
| "grad_norm": 0.8048908114433289, | |
| "learning_rate": 4.755195514032614e-06, | |
| "loss": 0.5644, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.9488424197162061, | |
| "grad_norm": 0.8288162350654602, | |
| "learning_rate": 4.754549725365311e-06, | |
| "loss": 0.5665, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 0.9499626587005228, | |
| "grad_norm": 0.8059208989143372, | |
| "learning_rate": 4.753903130001612e-06, | |
| "loss": 0.6111, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.9510828976848394, | |
| "grad_norm": 0.7926841974258423, | |
| "learning_rate": 4.753255728172875e-06, | |
| "loss": 0.5632, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 0.9522031366691561, | |
| "grad_norm": 0.8072546720504761, | |
| "learning_rate": 4.752607520110748e-06, | |
| "loss": 0.5953, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.9533233756534727, | |
| "grad_norm": 0.8185967803001404, | |
| "learning_rate": 4.7519585060471616e-06, | |
| "loss": 0.6158, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 0.9544436146377894, | |
| "grad_norm": 0.8182018399238586, | |
| "learning_rate": 4.751308686214341e-06, | |
| "loss": 0.586, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 0.955563853622106, | |
| "grad_norm": 0.8257445693016052, | |
| "learning_rate": 4.750658060844796e-06, | |
| "loss": 0.5818, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 0.9566840926064227, | |
| "grad_norm": 0.7723025679588318, | |
| "learning_rate": 4.750006630171325e-06, | |
| "loss": 0.5754, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 0.9578043315907394, | |
| "grad_norm": 0.7873339653015137, | |
| "learning_rate": 4.7493543944270174e-06, | |
| "loss": 0.5641, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.958924570575056, | |
| "grad_norm": 0.8600917458534241, | |
| "learning_rate": 4.748701353845247e-06, | |
| "loss": 0.5993, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 0.9600448095593727, | |
| "grad_norm": 0.7842808365821838, | |
| "learning_rate": 4.748047508659677e-06, | |
| "loss": 0.5928, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 0.9611650485436893, | |
| "grad_norm": 0.7855350375175476, | |
| "learning_rate": 4.747392859104258e-06, | |
| "loss": 0.5814, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 0.962285287528006, | |
| "grad_norm": 0.8112151026725769, | |
| "learning_rate": 4.746737405413231e-06, | |
| "loss": 0.5753, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 0.9634055265123226, | |
| "grad_norm": 0.8161853551864624, | |
| "learning_rate": 4.746081147821121e-06, | |
| "loss": 0.5853, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.9645257654966393, | |
| "grad_norm": 0.7908380627632141, | |
| "learning_rate": 4.745424086562743e-06, | |
| "loss": 0.5854, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 0.9656460044809559, | |
| "grad_norm": 0.8022582530975342, | |
| "learning_rate": 4.7447662218731995e-06, | |
| "loss": 0.5602, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 0.9667662434652726, | |
| "grad_norm": 0.8379324674606323, | |
| "learning_rate": 4.744107553987878e-06, | |
| "loss": 0.5921, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 0.9678864824495892, | |
| "grad_norm": 0.8459264636039734, | |
| "learning_rate": 4.743448083142458e-06, | |
| "loss": 0.5984, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 0.9690067214339059, | |
| "grad_norm": 0.8297575116157532, | |
| "learning_rate": 4.742787809572901e-06, | |
| "loss": 0.5897, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.9701269604182226, | |
| "grad_norm": 0.8278453946113586, | |
| "learning_rate": 4.742126733515459e-06, | |
| "loss": 0.597, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 0.9712471994025392, | |
| "grad_norm": 0.8626736402511597, | |
| "learning_rate": 4.741464855206671e-06, | |
| "loss": 0.6084, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 0.9723674383868559, | |
| "grad_norm": 0.8282763957977295, | |
| "learning_rate": 4.740802174883363e-06, | |
| "loss": 0.5668, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.9734876773711725, | |
| "grad_norm": 0.8218764066696167, | |
| "learning_rate": 4.740138692782646e-06, | |
| "loss": 0.5937, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 0.9746079163554892, | |
| "grad_norm": 0.7809569835662842, | |
| "learning_rate": 4.73947440914192e-06, | |
| "loss": 0.5895, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.9757281553398058, | |
| "grad_norm": 0.8076401948928833, | |
| "learning_rate": 4.738809324198872e-06, | |
| "loss": 0.5757, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 0.9768483943241225, | |
| "grad_norm": 0.8134318590164185, | |
| "learning_rate": 4.738143438191473e-06, | |
| "loss": 0.5326, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 0.9779686333084391, | |
| "grad_norm": 0.7635218501091003, | |
| "learning_rate": 4.737476751357985e-06, | |
| "loss": 0.5476, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 0.9790888722927558, | |
| "grad_norm": 0.8077232241630554, | |
| "learning_rate": 4.736809263936952e-06, | |
| "loss": 0.5722, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 0.9802091112770724, | |
| "grad_norm": 0.8017358183860779, | |
| "learning_rate": 4.736140976167206e-06, | |
| "loss": 0.5907, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.9813293502613891, | |
| "grad_norm": 0.8405149579048157, | |
| "learning_rate": 4.735471888287868e-06, | |
| "loss": 0.6333, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 0.9824495892457058, | |
| "grad_norm": 0.7879468202590942, | |
| "learning_rate": 4.734802000538342e-06, | |
| "loss": 0.5496, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 0.9835698282300224, | |
| "grad_norm": 0.8601053357124329, | |
| "learning_rate": 4.734131313158319e-06, | |
| "loss": 0.5751, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 0.984690067214339, | |
| "grad_norm": 0.8143794536590576, | |
| "learning_rate": 4.733459826387777e-06, | |
| "loss": 0.583, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 0.9858103061986557, | |
| "grad_norm": 0.8531054258346558, | |
| "learning_rate": 4.732787540466979e-06, | |
| "loss": 0.6068, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.9869305451829724, | |
| "grad_norm": 0.8039345145225525, | |
| "learning_rate": 4.732114455636475e-06, | |
| "loss": 0.5873, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 0.988050784167289, | |
| "grad_norm": 0.8260403275489807, | |
| "learning_rate": 4.7314405721371e-06, | |
| "loss": 0.5922, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 0.9891710231516057, | |
| "grad_norm": 0.8372035622596741, | |
| "learning_rate": 4.7307658902099755e-06, | |
| "loss": 0.5862, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 0.9902912621359223, | |
| "grad_norm": 0.8514466881752014, | |
| "learning_rate": 4.730090410096507e-06, | |
| "loss": 0.5981, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 0.991411501120239, | |
| "grad_norm": 0.7753927111625671, | |
| "learning_rate": 4.7294141320383875e-06, | |
| "loss": 0.6009, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.9925317401045557, | |
| "grad_norm": 0.8559523820877075, | |
| "learning_rate": 4.7287370562775955e-06, | |
| "loss": 0.5903, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 0.9936519790888723, | |
| "grad_norm": 0.7929627895355225, | |
| "learning_rate": 4.728059183056394e-06, | |
| "loss": 0.5727, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 0.9947722180731889, | |
| "grad_norm": 0.8660935759544373, | |
| "learning_rate": 4.72738051261733e-06, | |
| "loss": 0.5701, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 0.9958924570575056, | |
| "grad_norm": 0.8257469534873962, | |
| "learning_rate": 4.72670104520324e-06, | |
| "loss": 0.5592, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 0.9970126960418223, | |
| "grad_norm": 0.8579026460647583, | |
| "learning_rate": 4.726020781057241e-06, | |
| "loss": 0.5888, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.9981329350261389, | |
| "grad_norm": 0.791415810585022, | |
| "learning_rate": 4.7253397204227385e-06, | |
| "loss": 0.5572, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 0.9992531740104555, | |
| "grad_norm": 0.8314563035964966, | |
| "learning_rate": 4.724657863543419e-06, | |
| "loss": 0.5832, | |
| "step": 892 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 5352, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 892, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.2532247335102054e+18, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |