| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.992, |
| "eval_steps": 500, |
| "global_step": 498, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.006, |
| "grad_norm": 8.54271125793457, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 1.3108, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.012, |
| "grad_norm": 8.63392162322998, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 1.3289, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.018, |
| "grad_norm": 8.526409149169922, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 1.3149, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.024, |
| "grad_norm": 8.731363296508789, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 1.3429, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 8.385260581970215, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 1.2979, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.036, |
| "grad_norm": 8.243244171142578, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 1.2892, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.042, |
| "grad_norm": 8.073339462280273, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 1.3137, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.048, |
| "grad_norm": 6.883190155029297, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 1.2973, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.054, |
| "grad_norm": 6.144676208496094, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 1.2344, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 6.110970973968506, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 1.2714, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.066, |
| "grad_norm": 3.4984993934631348, |
| "learning_rate": 2.2e-06, |
| "loss": 1.1777, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.072, |
| "grad_norm": 3.4035353660583496, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 1.2093, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.078, |
| "grad_norm": 2.914463520050049, |
| "learning_rate": 2.6e-06, |
| "loss": 1.1692, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.084, |
| "grad_norm": 2.596907138824463, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 1.1505, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 3.722634792327881, |
| "learning_rate": 3e-06, |
| "loss": 1.108, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.096, |
| "grad_norm": 3.9575209617614746, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 1.0893, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.102, |
| "grad_norm": 3.614138126373291, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 1.0595, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.108, |
| "grad_norm": 3.3580446243286133, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 1.0657, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.114, |
| "grad_norm": 2.600682020187378, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 1.0683, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 2.0540666580200195, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 1.0472, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.126, |
| "grad_norm": 1.9993228912353516, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 1.0222, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.132, |
| "grad_norm": 1.9911528825759888, |
| "learning_rate": 4.4e-06, |
| "loss": 0.9912, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.138, |
| "grad_norm": 1.6965489387512207, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 1.008, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.144, |
| "grad_norm": 1.6771678924560547, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.9709, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 1.4323240518569946, |
| "learning_rate": 5e-06, |
| "loss": 1.0129, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.156, |
| "grad_norm": 1.2269575595855713, |
| "learning_rate": 5.2e-06, |
| "loss": 0.9489, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.162, |
| "grad_norm": 1.1984143257141113, |
| "learning_rate": 5.400000000000001e-06, |
| "loss": 0.9647, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.168, |
| "grad_norm": 1.229371190071106, |
| "learning_rate": 5.600000000000001e-06, |
| "loss": 0.9509, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.174, |
| "grad_norm": 1.2915998697280884, |
| "learning_rate": 5.8e-06, |
| "loss": 0.9432, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 1.2094134092330933, |
| "learning_rate": 6e-06, |
| "loss": 0.9398, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.186, |
| "grad_norm": 1.0977376699447632, |
| "learning_rate": 6.200000000000001e-06, |
| "loss": 0.9257, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.192, |
| "grad_norm": 0.9972071647644043, |
| "learning_rate": 6.4000000000000006e-06, |
| "loss": 0.9248, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.198, |
| "grad_norm": 1.0568616390228271, |
| "learning_rate": 6.600000000000001e-06, |
| "loss": 0.9179, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.204, |
| "grad_norm": 0.9405488967895508, |
| "learning_rate": 6.800000000000001e-06, |
| "loss": 0.8877, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 1.0385318994522095, |
| "learning_rate": 7e-06, |
| "loss": 0.9216, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.216, |
| "grad_norm": 1.0187602043151855, |
| "learning_rate": 7.2000000000000005e-06, |
| "loss": 0.9055, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.222, |
| "grad_norm": 1.0075594186782837, |
| "learning_rate": 7.4e-06, |
| "loss": 0.9413, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.228, |
| "grad_norm": 1.1520859003067017, |
| "learning_rate": 7.600000000000001e-06, |
| "loss": 0.9438, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.234, |
| "grad_norm": 1.013381004333496, |
| "learning_rate": 7.800000000000002e-06, |
| "loss": 0.93, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 1.0709172487258911, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 0.8885, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.246, |
| "grad_norm": 1.071448802947998, |
| "learning_rate": 8.2e-06, |
| "loss": 0.8915, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.252, |
| "grad_norm": 0.9831300973892212, |
| "learning_rate": 8.400000000000001e-06, |
| "loss": 0.9103, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.258, |
| "grad_norm": 0.8900676965713501, |
| "learning_rate": 8.6e-06, |
| "loss": 0.8616, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.264, |
| "grad_norm": 0.9614577293395996, |
| "learning_rate": 8.8e-06, |
| "loss": 0.8986, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 0.92194664478302, |
| "learning_rate": 9e-06, |
| "loss": 0.9231, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.276, |
| "grad_norm": 0.9245752096176147, |
| "learning_rate": 9.200000000000002e-06, |
| "loss": 0.8654, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.282, |
| "grad_norm": 0.8902332186698914, |
| "learning_rate": 9.4e-06, |
| "loss": 0.8937, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.288, |
| "grad_norm": 0.9520232677459717, |
| "learning_rate": 9.600000000000001e-06, |
| "loss": 0.8673, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.294, |
| "grad_norm": 1.0402417182922363, |
| "learning_rate": 9.800000000000001e-06, |
| "loss": 0.9088, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 0.8294693231582642, |
| "learning_rate": 1e-05, |
| "loss": 0.8828, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.306, |
| "grad_norm": 0.934485137462616, |
| "learning_rate": 9.999877063188124e-06, |
| "loss": 0.8798, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.312, |
| "grad_norm": 1.0290961265563965, |
| "learning_rate": 9.999508258797876e-06, |
| "loss": 0.8696, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.318, |
| "grad_norm": 0.8552939891815186, |
| "learning_rate": 9.998893604965111e-06, |
| "loss": 0.8565, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.324, |
| "grad_norm": 0.929507315158844, |
| "learning_rate": 9.998033131915266e-06, |
| "loss": 0.8877, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 0.8685278296470642, |
| "learning_rate": 9.996926881961862e-06, |
| "loss": 0.8574, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.336, |
| "grad_norm": 0.9464159607887268, |
| "learning_rate": 9.995574909504434e-06, |
| "loss": 0.8796, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.342, |
| "grad_norm": 0.9430830478668213, |
| "learning_rate": 9.993977281025862e-06, |
| "loss": 0.8923, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.348, |
| "grad_norm": 0.904295027256012, |
| "learning_rate": 9.992134075089085e-06, |
| "loss": 0.8489, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.354, |
| "grad_norm": 0.9471361041069031, |
| "learning_rate": 9.990045382333243e-06, |
| "loss": 0.8846, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 0.9903223514556885, |
| "learning_rate": 9.987711305469232e-06, |
| "loss": 0.8861, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.366, |
| "grad_norm": 0.9374863505363464, |
| "learning_rate": 9.985131959274637e-06, |
| "loss": 0.8806, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.372, |
| "grad_norm": 0.8957496881484985, |
| "learning_rate": 9.982307470588097e-06, |
| "loss": 0.8314, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.378, |
| "grad_norm": 0.9793574213981628, |
| "learning_rate": 9.979237978303066e-06, |
| "loss": 0.8684, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.384, |
| "grad_norm": 0.9214521050453186, |
| "learning_rate": 9.975923633360985e-06, |
| "loss": 0.8772, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 0.8720438480377197, |
| "learning_rate": 9.972364598743851e-06, |
| "loss": 0.848, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.396, |
| "grad_norm": 1.052175521850586, |
| "learning_rate": 9.968561049466214e-06, |
| "loss": 0.877, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.402, |
| "grad_norm": 0.8388480544090271, |
| "learning_rate": 9.96451317256656e-06, |
| "loss": 0.8656, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.408, |
| "grad_norm": 0.909453809261322, |
| "learning_rate": 9.960221167098124e-06, |
| "loss": 0.8489, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.414, |
| "grad_norm": 0.9141209721565247, |
| "learning_rate": 9.955685244119092e-06, |
| "loss": 0.8678, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 0.8349696397781372, |
| "learning_rate": 9.950905626682229e-06, |
| "loss": 0.8537, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.426, |
| "grad_norm": 0.8247827887535095, |
| "learning_rate": 9.945882549823906e-06, |
| "loss": 0.8778, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.432, |
| "grad_norm": 0.889093816280365, |
| "learning_rate": 9.940616260552545e-06, |
| "loss": 0.8683, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.438, |
| "grad_norm": 0.947341799736023, |
| "learning_rate": 9.935107017836472e-06, |
| "loss": 0.8524, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.444, |
| "grad_norm": 1.007067084312439, |
| "learning_rate": 9.92935509259118e-06, |
| "loss": 0.9062, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 0.9058011174201965, |
| "learning_rate": 9.92336076766601e-06, |
| "loss": 0.855, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.456, |
| "grad_norm": 1.0678291320800781, |
| "learning_rate": 9.917124337830242e-06, |
| "loss": 0.8343, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.462, |
| "grad_norm": 1.0028138160705566, |
| "learning_rate": 9.910646109758596e-06, |
| "loss": 0.8131, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.468, |
| "grad_norm": 0.889417827129364, |
| "learning_rate": 9.903926402016153e-06, |
| "loss": 0.8635, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.474, |
| "grad_norm": 1.00179922580719, |
| "learning_rate": 9.896965545042692e-06, |
| "loss": 0.8447, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 0.9413260221481323, |
| "learning_rate": 9.889763881136439e-06, |
| "loss": 0.8269, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.486, |
| "grad_norm": 0.9819712042808533, |
| "learning_rate": 9.882321764437234e-06, |
| "loss": 0.867, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.492, |
| "grad_norm": 0.951102614402771, |
| "learning_rate": 9.874639560909118e-06, |
| "loss": 0.8697, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.498, |
| "grad_norm": 0.9580561518669128, |
| "learning_rate": 9.866717648322335e-06, |
| "loss": 0.847, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.504, |
| "grad_norm": 0.9333314895629883, |
| "learning_rate": 9.858556416234755e-06, |
| "loss": 0.8418, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 1.028032660484314, |
| "learning_rate": 9.850156265972722e-06, |
| "loss": 0.8564, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.516, |
| "grad_norm": 0.9514734745025635, |
| "learning_rate": 9.841517610611309e-06, |
| "loss": 0.8373, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.522, |
| "grad_norm": 1.0056345462799072, |
| "learning_rate": 9.832640874954017e-06, |
| "loss": 0.8433, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.528, |
| "grad_norm": 0.9009577631950378, |
| "learning_rate": 9.82352649551188e-06, |
| "loss": 0.8322, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.534, |
| "grad_norm": 0.9138115048408508, |
| "learning_rate": 9.814174920481999e-06, |
| "loss": 0.8508, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 0.971014142036438, |
| "learning_rate": 9.804586609725499e-06, |
| "loss": 0.8666, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.546, |
| "grad_norm": 0.8792802095413208, |
| "learning_rate": 9.794762034744925e-06, |
| "loss": 0.8403, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.552, |
| "grad_norm": 0.9411885738372803, |
| "learning_rate": 9.784701678661045e-06, |
| "loss": 0.8349, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.558, |
| "grad_norm": 0.9519578218460083, |
| "learning_rate": 9.774406036189104e-06, |
| "loss": 0.8573, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.564, |
| "grad_norm": 0.9149671792984009, |
| "learning_rate": 9.763875613614482e-06, |
| "loss": 0.8541, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 0.9249022006988525, |
| "learning_rate": 9.753110928767816e-06, |
| "loss": 0.856, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.576, |
| "grad_norm": 0.9328562617301941, |
| "learning_rate": 9.742112510999516e-06, |
| "loss": 0.8221, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.582, |
| "grad_norm": 1.0939850807189941, |
| "learning_rate": 9.730880901153747e-06, |
| "loss": 0.8485, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.588, |
| "grad_norm": 0.9670605659484863, |
| "learning_rate": 9.719416651541839e-06, |
| "loss": 0.8507, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.594, |
| "grad_norm": 1.015991449356079, |
| "learning_rate": 9.707720325915105e-06, |
| "loss": 0.8672, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 0.8745937943458557, |
| "learning_rate": 9.69579249943714e-06, |
| "loss": 0.834, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.606, |
| "grad_norm": 0.9689687490463257, |
| "learning_rate": 9.683633758655529e-06, |
| "loss": 0.8634, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.612, |
| "grad_norm": 0.8708063364028931, |
| "learning_rate": 9.671244701472999e-06, |
| "loss": 0.8648, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.618, |
| "grad_norm": 0.868241012096405, |
| "learning_rate": 9.658625937118033e-06, |
| "loss": 0.8587, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.624, |
| "grad_norm": 0.871407687664032, |
| "learning_rate": 9.645778086114892e-06, |
| "loss": 0.8501, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 1.0273360013961792, |
| "learning_rate": 9.632701780253111e-06, |
| "loss": 0.8492, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.636, |
| "grad_norm": 0.9795374274253845, |
| "learning_rate": 9.619397662556434e-06, |
| "loss": 0.8397, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.642, |
| "grad_norm": 0.8975308537483215, |
| "learning_rate": 9.605866387251186e-06, |
| "loss": 0.8408, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.648, |
| "grad_norm": 0.9039437770843506, |
| "learning_rate": 9.592108619734107e-06, |
| "loss": 0.8453, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.654, |
| "grad_norm": 0.9673753976821899, |
| "learning_rate": 9.578125036539625e-06, |
| "loss": 0.8514, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 0.8944799900054932, |
| "learning_rate": 9.563916325306595e-06, |
| "loss": 0.8583, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.666, |
| "grad_norm": 0.8239108324050903, |
| "learning_rate": 9.549483184744483e-06, |
| "loss": 0.8347, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.672, |
| "grad_norm": 0.9192603826522827, |
| "learning_rate": 9.534826324599002e-06, |
| "loss": 0.8498, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.678, |
| "grad_norm": 0.9150791168212891, |
| "learning_rate": 9.519946465617217e-06, |
| "loss": 0.8466, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.684, |
| "grad_norm": 0.8976817727088928, |
| "learning_rate": 9.504844339512096e-06, |
| "loss": 0.8005, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 0.983814001083374, |
| "learning_rate": 9.489520688926534e-06, |
| "loss": 0.8555, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.696, |
| "grad_norm": 0.8755694031715393, |
| "learning_rate": 9.473976267396831e-06, |
| "loss": 0.8272, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.702, |
| "grad_norm": 0.8284350037574768, |
| "learning_rate": 9.458211839315636e-06, |
| "loss": 0.8437, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.708, |
| "grad_norm": 1.0080901384353638, |
| "learning_rate": 9.442228179894362e-06, |
| "loss": 0.8574, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.714, |
| "grad_norm": 0.9424903392791748, |
| "learning_rate": 9.426026075125062e-06, |
| "loss": 0.8311, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 0.9614003896713257, |
| "learning_rate": 9.409606321741776e-06, |
| "loss": 0.8328, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.726, |
| "grad_norm": 0.9206553101539612, |
| "learning_rate": 9.39296972718136e-06, |
| "loss": 0.8299, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.732, |
| "grad_norm": 0.8070399761199951, |
| "learning_rate": 9.376117109543769e-06, |
| "loss": 0.7928, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.738, |
| "grad_norm": 0.9403797388076782, |
| "learning_rate": 9.35904929755184e-06, |
| "loss": 0.8318, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.744, |
| "grad_norm": 0.9790809154510498, |
| "learning_rate": 9.341767130510529e-06, |
| "loss": 0.8271, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.8472377061843872, |
| "learning_rate": 9.324271458265642e-06, |
| "loss": 0.8397, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.756, |
| "grad_norm": 0.9861665368080139, |
| "learning_rate": 9.306563141162046e-06, |
| "loss": 0.8385, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.762, |
| "grad_norm": 0.9910692572593689, |
| "learning_rate": 9.288643050001362e-06, |
| "loss": 0.8268, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.768, |
| "grad_norm": 0.8648113012313843, |
| "learning_rate": 9.270512065999139e-06, |
| "loss": 0.8317, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.774, |
| "grad_norm": 1.017806887626648, |
| "learning_rate": 9.252171080741525e-06, |
| "loss": 0.8338, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.78, |
| "grad_norm": 0.91566401720047, |
| "learning_rate": 9.233620996141421e-06, |
| "loss": 0.8524, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.786, |
| "grad_norm": 0.7790189385414124, |
| "learning_rate": 9.214862724394133e-06, |
| "loss": 0.7918, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.792, |
| "grad_norm": 0.9569950103759766, |
| "learning_rate": 9.195897187932513e-06, |
| "loss": 0.8557, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.798, |
| "grad_norm": 1.0062651634216309, |
| "learning_rate": 9.176725319381589e-06, |
| "loss": 0.8198, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.804, |
| "grad_norm": 0.8560892343521118, |
| "learning_rate": 9.157348061512728e-06, |
| "loss": 0.809, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.81, |
| "grad_norm": 0.8932763338088989, |
| "learning_rate": 9.137766367197246e-06, |
| "loss": 0.8393, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.816, |
| "grad_norm": 0.988492488861084, |
| "learning_rate": 9.117981199359575e-06, |
| "loss": 0.8424, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.822, |
| "grad_norm": 0.9165382981300354, |
| "learning_rate": 9.097993530929895e-06, |
| "loss": 0.851, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.828, |
| "grad_norm": 0.8335132598876953, |
| "learning_rate": 9.077804344796302e-06, |
| "loss": 0.8015, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.834, |
| "grad_norm": 0.9975520372390747, |
| "learning_rate": 9.057414633756466e-06, |
| "loss": 0.8619, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.84, |
| "grad_norm": 0.9830179810523987, |
| "learning_rate": 9.036825400468814e-06, |
| "loss": 0.8534, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.846, |
| "grad_norm": 0.8578035831451416, |
| "learning_rate": 9.016037657403225e-06, |
| "loss": 0.8411, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.852, |
| "grad_norm": 0.954033374786377, |
| "learning_rate": 8.995052426791247e-06, |
| "loss": 0.8253, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.858, |
| "grad_norm": 0.8898175954818726, |
| "learning_rate": 8.973870740575814e-06, |
| "loss": 0.8109, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.864, |
| "grad_norm": 0.8814643621444702, |
| "learning_rate": 8.952493640360518e-06, |
| "loss": 0.8511, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.87, |
| "grad_norm": 0.8422505855560303, |
| "learning_rate": 8.930922177358379e-06, |
| "loss": 0.8317, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.876, |
| "grad_norm": 0.9142546057701111, |
| "learning_rate": 8.90915741234015e-06, |
| "loss": 0.8673, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.882, |
| "grad_norm": 0.8921462893486023, |
| "learning_rate": 8.88720041558216e-06, |
| "loss": 0.8562, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.888, |
| "grad_norm": 0.8459780812263489, |
| "learning_rate": 8.865052266813686e-06, |
| "loss": 0.8278, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.894, |
| "grad_norm": 0.8853496313095093, |
| "learning_rate": 8.842714055163841e-06, |
| "loss": 0.8534, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.9, |
| "grad_norm": 0.8529783487319946, |
| "learning_rate": 8.820186879108038e-06, |
| "loss": 0.8157, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.906, |
| "grad_norm": 2.1111347675323486, |
| "learning_rate": 8.797471846413957e-06, |
| "loss": 0.9127, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.912, |
| "grad_norm": 0.9344639778137207, |
| "learning_rate": 8.77457007408708e-06, |
| "loss": 0.8456, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.918, |
| "grad_norm": 0.885231077671051, |
| "learning_rate": 8.751482688315758e-06, |
| "loss": 0.8514, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.924, |
| "grad_norm": 0.8927384614944458, |
| "learning_rate": 8.728210824415829e-06, |
| "loss": 0.8016, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.93, |
| "grad_norm": 0.8285719752311707, |
| "learning_rate": 8.704755626774796e-06, |
| "loss": 0.8097, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.936, |
| "grad_norm": 0.8650528788566589, |
| "learning_rate": 8.681118248795548e-06, |
| "loss": 0.8119, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.942, |
| "grad_norm": 0.9040648937225342, |
| "learning_rate": 8.65729985283964e-06, |
| "loss": 0.8307, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.948, |
| "grad_norm": 0.8652745485305786, |
| "learning_rate": 8.633301610170136e-06, |
| "loss": 0.8523, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.954, |
| "grad_norm": 0.8268901109695435, |
| "learning_rate": 8.609124700894017e-06, |
| "loss": 0.8131, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.8694038987159729, |
| "learning_rate": 8.584770313904138e-06, |
| "loss": 0.8059, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.966, |
| "grad_norm": 0.8299560546875, |
| "learning_rate": 8.560239646820779e-06, |
| "loss": 0.8473, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.972, |
| "grad_norm": 0.8134430646896362, |
| "learning_rate": 8.535533905932739e-06, |
| "loss": 0.8056, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.978, |
| "grad_norm": 0.8414528369903564, |
| "learning_rate": 8.510654306138028e-06, |
| "loss": 0.8283, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.984, |
| "grad_norm": 0.8133756518363953, |
| "learning_rate": 8.485602070884118e-06, |
| "loss": 0.8317, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.99, |
| "grad_norm": 0.9140997529029846, |
| "learning_rate": 8.460378432107779e-06, |
| "loss": 0.815, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.996, |
| "grad_norm": 0.8432086706161499, |
| "learning_rate": 8.43498463017451e-06, |
| "loss": 0.8581, |
| "step": 166 |
| }, |
| { |
| "epoch": 1.004, |
| "grad_norm": 0.9441366791725159, |
| "learning_rate": 8.409421913817526e-06, |
| "loss": 0.734, |
| "step": 167 |
| }, |
| { |
| "epoch": 1.01, |
| "grad_norm": 1.012139916419983, |
| "learning_rate": 8.383691540076372e-06, |
| "loss": 0.7089, |
| "step": 168 |
| }, |
| { |
| "epoch": 1.016, |
| "grad_norm": 0.8592427372932434, |
| "learning_rate": 8.357794774235094e-06, |
| "loss": 0.6723, |
| "step": 169 |
| }, |
| { |
| "epoch": 1.022, |
| "grad_norm": 0.9001071453094482, |
| "learning_rate": 8.331732889760021e-06, |
| "loss": 0.6969, |
| "step": 170 |
| }, |
| { |
| "epoch": 1.028, |
| "grad_norm": 0.8972833156585693, |
| "learning_rate": 8.305507168237152e-06, |
| "loss": 0.6898, |
| "step": 171 |
| }, |
| { |
| "epoch": 1.034, |
| "grad_norm": 0.9920740723609924, |
| "learning_rate": 8.279118899309121e-06, |
| "loss": 0.6635, |
| "step": 172 |
| }, |
| { |
| "epoch": 1.04, |
| "grad_norm": 0.8686314225196838, |
| "learning_rate": 8.252569380611793e-06, |
| "loss": 0.6745, |
| "step": 173 |
| }, |
| { |
| "epoch": 1.046, |
| "grad_norm": 0.9142981767654419, |
| "learning_rate": 8.22585991771044e-06, |
| "loss": 0.6924, |
| "step": 174 |
| }, |
| { |
| "epoch": 1.052, |
| "grad_norm": 0.9121496677398682, |
| "learning_rate": 8.198991824035546e-06, |
| "loss": 0.6961, |
| "step": 175 |
| }, |
| { |
| "epoch": 1.058, |
| "grad_norm": 0.9670741558074951, |
| "learning_rate": 8.171966420818227e-06, |
| "loss": 0.672, |
| "step": 176 |
| }, |
| { |
| "epoch": 1.064, |
| "grad_norm": 0.8759296536445618, |
| "learning_rate": 8.144785037025246e-06, |
| "loss": 0.6714, |
| "step": 177 |
| }, |
| { |
| "epoch": 1.07, |
| "grad_norm": 0.8616648316383362, |
| "learning_rate": 8.117449009293668e-06, |
| "loss": 0.6745, |
| "step": 178 |
| }, |
| { |
| "epoch": 1.076, |
| "grad_norm": 0.8725053668022156, |
| "learning_rate": 8.089959681865134e-06, |
| "loss": 0.6709, |
| "step": 179 |
| }, |
| { |
| "epoch": 1.082, |
| "grad_norm": 0.8588338494300842, |
| "learning_rate": 8.062318406519751e-06, |
| "loss": 0.6537, |
| "step": 180 |
| }, |
| { |
| "epoch": 1.088, |
| "grad_norm": 0.8224848508834839, |
| "learning_rate": 8.034526542509629e-06, |
| "loss": 0.66, |
| "step": 181 |
| }, |
| { |
| "epoch": 1.094, |
| "grad_norm": 0.8267282843589783, |
| "learning_rate": 8.00658545649203e-06, |
| "loss": 0.6623, |
| "step": 182 |
| }, |
| { |
| "epoch": 1.1, |
| "grad_norm": 0.9191231727600098, |
| "learning_rate": 7.978496522462167e-06, |
| "loss": 0.6889, |
| "step": 183 |
| }, |
| { |
| "epoch": 1.106, |
| "grad_norm": 0.7937670946121216, |
| "learning_rate": 7.950261121685642e-06, |
| "loss": 0.697, |
| "step": 184 |
| }, |
| { |
| "epoch": 1.112, |
| "grad_norm": 0.7790123224258423, |
| "learning_rate": 7.921880642630514e-06, |
| "loss": 0.6588, |
| "step": 185 |
| }, |
| { |
| "epoch": 1.1179999999999999, |
| "grad_norm": 0.8752031922340393, |
| "learning_rate": 7.89335648089903e-06, |
| "loss": 0.6785, |
| "step": 186 |
| }, |
| { |
| "epoch": 1.124, |
| "grad_norm": 0.8321321606636047, |
| "learning_rate": 7.864690039158991e-06, |
| "loss": 0.6681, |
| "step": 187 |
| }, |
| { |
| "epoch": 1.13, |
| "grad_norm": 0.8590896725654602, |
| "learning_rate": 7.835882727074779e-06, |
| "loss": 0.697, |
| "step": 188 |
| }, |
| { |
| "epoch": 1.1360000000000001, |
| "grad_norm": 0.8286581039428711, |
| "learning_rate": 7.806935961238041e-06, |
| "loss": 0.7075, |
| "step": 189 |
| }, |
| { |
| "epoch": 1.142, |
| "grad_norm": 0.8149038553237915, |
| "learning_rate": 7.777851165098012e-06, |
| "loss": 0.6758, |
| "step": 190 |
| }, |
| { |
| "epoch": 1.148, |
| "grad_norm": 0.8983557224273682, |
| "learning_rate": 7.748629768891542e-06, |
| "loss": 0.7238, |
| "step": 191 |
| }, |
| { |
| "epoch": 1.154, |
| "grad_norm": 0.794880747795105, |
| "learning_rate": 7.719273209572745e-06, |
| "loss": 0.6923, |
| "step": 192 |
| }, |
| { |
| "epoch": 1.16, |
| "grad_norm": 0.805933952331543, |
| "learning_rate": 7.689782930742345e-06, |
| "loss": 0.6845, |
| "step": 193 |
| }, |
| { |
| "epoch": 1.166, |
| "grad_norm": 0.7863611578941345, |
| "learning_rate": 7.660160382576683e-06, |
| "loss": 0.659, |
| "step": 194 |
| }, |
| { |
| "epoch": 1.172, |
| "grad_norm": 0.8232173323631287, |
| "learning_rate": 7.630407021756419e-06, |
| "loss": 0.6899, |
| "step": 195 |
| }, |
| { |
| "epoch": 1.178, |
| "grad_norm": 0.7703882455825806, |
| "learning_rate": 7.600524311394873e-06, |
| "loss": 0.7069, |
| "step": 196 |
| }, |
| { |
| "epoch": 1.184, |
| "grad_norm": 0.9021191596984863, |
| "learning_rate": 7.570513720966108e-06, |
| "loss": 0.6774, |
| "step": 197 |
| }, |
| { |
| "epoch": 1.19, |
| "grad_norm": 0.8564242720603943, |
| "learning_rate": 7.540376726232648e-06, |
| "loss": 0.6456, |
| "step": 198 |
| }, |
| { |
| "epoch": 1.196, |
| "grad_norm": 0.8424244523048401, |
| "learning_rate": 7.51011480917291e-06, |
| "loss": 0.6932, |
| "step": 199 |
| }, |
| { |
| "epoch": 1.202, |
| "grad_norm": 0.8924134373664856, |
| "learning_rate": 7.4797294579083405e-06, |
| "loss": 0.68, |
| "step": 200 |
| }, |
| { |
| "epoch": 1.208, |
| "grad_norm": 0.8763133883476257, |
| "learning_rate": 7.449222166630218e-06, |
| "loss": 0.7039, |
| "step": 201 |
| }, |
| { |
| "epoch": 1.214, |
| "grad_norm": 0.7990080714225769, |
| "learning_rate": 7.4185944355261996e-06, |
| "loss": 0.6744, |
| "step": 202 |
| }, |
| { |
| "epoch": 1.22, |
| "grad_norm": 0.8522285223007202, |
| "learning_rate": 7.3878477707065314e-06, |
| "loss": 0.6779, |
| "step": 203 |
| }, |
| { |
| "epoch": 1.226, |
| "grad_norm": 0.8528336882591248, |
| "learning_rate": 7.3569836841299905e-06, |
| "loss": 0.6932, |
| "step": 204 |
| }, |
| { |
| "epoch": 1.232, |
| "grad_norm": 0.818355143070221, |
| "learning_rate": 7.326003693529538e-06, |
| "loss": 0.6669, |
| "step": 205 |
| }, |
| { |
| "epoch": 1.238, |
| "grad_norm": 0.7781950831413269, |
| "learning_rate": 7.294909322337689e-06, |
| "loss": 0.659, |
| "step": 206 |
| }, |
| { |
| "epoch": 1.244, |
| "grad_norm": 0.7805776000022888, |
| "learning_rate": 7.263702099611586e-06, |
| "loss": 0.6605, |
| "step": 207 |
| }, |
| { |
| "epoch": 1.25, |
| "grad_norm": 0.7630364894866943, |
| "learning_rate": 7.232383559957815e-06, |
| "loss": 0.6659, |
| "step": 208 |
| }, |
| { |
| "epoch": 1.256, |
| "grad_norm": 0.803504467010498, |
| "learning_rate": 7.200955243456946e-06, |
| "loss": 0.706, |
| "step": 209 |
| }, |
| { |
| "epoch": 1.262, |
| "grad_norm": 0.8165573477745056, |
| "learning_rate": 7.169418695587791e-06, |
| "loss": 0.721, |
| "step": 210 |
| }, |
| { |
| "epoch": 1.268, |
| "grad_norm": 0.7878592014312744, |
| "learning_rate": 7.137775467151411e-06, |
| "loss": 0.6484, |
| "step": 211 |
| }, |
| { |
| "epoch": 1.274, |
| "grad_norm": 0.7369374632835388, |
| "learning_rate": 7.106027114194856e-06, |
| "loss": 0.6934, |
| "step": 212 |
| }, |
| { |
| "epoch": 1.28, |
| "grad_norm": 0.8104023933410645, |
| "learning_rate": 7.0741751979346395e-06, |
| "loss": 0.6614, |
| "step": 213 |
| }, |
| { |
| "epoch": 1.286, |
| "grad_norm": 0.8349857926368713, |
| "learning_rate": 7.042221284679982e-06, |
| "loss": 0.6364, |
| "step": 214 |
| }, |
| { |
| "epoch": 1.292, |
| "grad_norm": 0.7562000751495361, |
| "learning_rate": 7.010166945755768e-06, |
| "loss": 0.6673, |
| "step": 215 |
| }, |
| { |
| "epoch": 1.298, |
| "grad_norm": 0.8273193836212158, |
| "learning_rate": 6.978013757425295e-06, |
| "loss": 0.6646, |
| "step": 216 |
| }, |
| { |
| "epoch": 1.304, |
| "grad_norm": 0.8832178711891174, |
| "learning_rate": 6.945763300812746e-06, |
| "loss": 0.7209, |
| "step": 217 |
| }, |
| { |
| "epoch": 1.31, |
| "grad_norm": 0.7850519418716431, |
| "learning_rate": 6.913417161825449e-06, |
| "loss": 0.7085, |
| "step": 218 |
| }, |
| { |
| "epoch": 1.316, |
| "grad_norm": 0.829532265663147, |
| "learning_rate": 6.880976931075887e-06, |
| "loss": 0.7104, |
| "step": 219 |
| }, |
| { |
| "epoch": 1.322, |
| "grad_norm": 0.7981274724006653, |
| "learning_rate": 6.848444203803476e-06, |
| "loss": 0.6908, |
| "step": 220 |
| }, |
| { |
| "epoch": 1.328, |
| "grad_norm": 0.841297447681427, |
| "learning_rate": 6.8158205797961265e-06, |
| "loss": 0.7219, |
| "step": 221 |
| }, |
| { |
| "epoch": 1.334, |
| "grad_norm": 0.7972620129585266, |
| "learning_rate": 6.783107663311566e-06, |
| "loss": 0.7184, |
| "step": 222 |
| }, |
| { |
| "epoch": 1.34, |
| "grad_norm": 0.7526700496673584, |
| "learning_rate": 6.750307062998462e-06, |
| "loss": 0.6812, |
| "step": 223 |
| }, |
| { |
| "epoch": 1.346, |
| "grad_norm": 0.7620211839675903, |
| "learning_rate": 6.717420391817306e-06, |
| "loss": 0.6946, |
| "step": 224 |
| }, |
| { |
| "epoch": 1.3519999999999999, |
| "grad_norm": 0.7766720056533813, |
| "learning_rate": 6.684449266961101e-06, |
| "loss": 0.6735, |
| "step": 225 |
| }, |
| { |
| "epoch": 1.358, |
| "grad_norm": 0.7742501497268677, |
| "learning_rate": 6.651395309775837e-06, |
| "loss": 0.6738, |
| "step": 226 |
| }, |
| { |
| "epoch": 1.3639999999999999, |
| "grad_norm": 0.8207322359085083, |
| "learning_rate": 6.618260145680758e-06, |
| "loss": 0.6819, |
| "step": 227 |
| }, |
| { |
| "epoch": 1.37, |
| "grad_norm": 0.83060622215271, |
| "learning_rate": 6.585045404088442e-06, |
| "loss": 0.6754, |
| "step": 228 |
| }, |
| { |
| "epoch": 1.376, |
| "grad_norm": 0.7977469563484192, |
| "learning_rate": 6.55175271832466e-06, |
| "loss": 0.6799, |
| "step": 229 |
| }, |
| { |
| "epoch": 1.3820000000000001, |
| "grad_norm": 0.7801655530929565, |
| "learning_rate": 6.518383725548074e-06, |
| "loss": 0.6779, |
| "step": 230 |
| }, |
| { |
| "epoch": 1.388, |
| "grad_norm": 0.8043777942657471, |
| "learning_rate": 6.484940066669718e-06, |
| "loss": 0.6801, |
| "step": 231 |
| }, |
| { |
| "epoch": 1.3940000000000001, |
| "grad_norm": 0.7748513221740723, |
| "learning_rate": 6.451423386272312e-06, |
| "loss": 0.6717, |
| "step": 232 |
| }, |
| { |
| "epoch": 1.4, |
| "grad_norm": 0.7956733703613281, |
| "learning_rate": 6.417835332529389e-06, |
| "loss": 0.667, |
| "step": 233 |
| }, |
| { |
| "epoch": 1.4060000000000001, |
| "grad_norm": 0.7375237941741943, |
| "learning_rate": 6.384177557124247e-06, |
| "loss": 0.69, |
| "step": 234 |
| }, |
| { |
| "epoch": 1.412, |
| "grad_norm": 0.8032549619674683, |
| "learning_rate": 6.350451715168728e-06, |
| "loss": 0.6841, |
| "step": 235 |
| }, |
| { |
| "epoch": 1.418, |
| "grad_norm": 0.7697210311889648, |
| "learning_rate": 6.3166594651218235e-06, |
| "loss": 0.6318, |
| "step": 236 |
| }, |
| { |
| "epoch": 1.424, |
| "grad_norm": 0.7588868737220764, |
| "learning_rate": 6.282802468708133e-06, |
| "loss": 0.6766, |
| "step": 237 |
| }, |
| { |
| "epoch": 1.43, |
| "grad_norm": 0.7812048196792603, |
| "learning_rate": 6.248882390836135e-06, |
| "loss": 0.6898, |
| "step": 238 |
| }, |
| { |
| "epoch": 1.436, |
| "grad_norm": 1.3364125490188599, |
| "learning_rate": 6.21490089951632e-06, |
| "loss": 0.7079, |
| "step": 239 |
| }, |
| { |
| "epoch": 1.442, |
| "grad_norm": 0.7737557291984558, |
| "learning_rate": 6.180859665779173e-06, |
| "loss": 0.7081, |
| "step": 240 |
| }, |
| { |
| "epoch": 1.448, |
| "grad_norm": 0.7614575624465942, |
| "learning_rate": 6.14676036359299e-06, |
| "loss": 0.6961, |
| "step": 241 |
| }, |
| { |
| "epoch": 1.454, |
| "grad_norm": 0.7251644134521484, |
| "learning_rate": 6.112604669781572e-06, |
| "loss": 0.6963, |
| "step": 242 |
| }, |
| { |
| "epoch": 1.46, |
| "grad_norm": 0.9040883183479309, |
| "learning_rate": 6.078394263941762e-06, |
| "loss": 0.681, |
| "step": 243 |
| }, |
| { |
| "epoch": 1.466, |
| "grad_norm": 0.7830565571784973, |
| "learning_rate": 6.04413082836085e-06, |
| "loss": 0.6858, |
| "step": 244 |
| }, |
| { |
| "epoch": 1.472, |
| "grad_norm": 0.7758118510246277, |
| "learning_rate": 6.009816047933849e-06, |
| "loss": 0.6731, |
| "step": 245 |
| }, |
| { |
| "epoch": 1.478, |
| "grad_norm": 0.7957639694213867, |
| "learning_rate": 5.975451610080643e-06, |
| "loss": 0.6876, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.484, |
| "grad_norm": 0.811799168586731, |
| "learning_rate": 5.941039204663001e-06, |
| "loss": 0.6749, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.49, |
| "grad_norm": 0.746426522731781, |
| "learning_rate": 5.906580523901493e-06, |
| "loss": 0.6598, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.496, |
| "grad_norm": 0.7993324398994446, |
| "learning_rate": 5.872077262292255e-06, |
| "loss": 0.7097, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.502, |
| "grad_norm": 0.7769522666931152, |
| "learning_rate": 5.837531116523683e-06, |
| "loss": 0.685, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.508, |
| "grad_norm": 0.7561690807342529, |
| "learning_rate": 5.802943785392986e-06, |
| "loss": 0.6682, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.514, |
| "grad_norm": 0.8304836750030518, |
| "learning_rate": 5.768316969722651e-06, |
| "loss": 0.6801, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.52, |
| "grad_norm": 0.7912241816520691, |
| "learning_rate": 5.733652372276809e-06, |
| "loss": 0.6948, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.526, |
| "grad_norm": 0.7652627229690552, |
| "learning_rate": 5.698951697677498e-06, |
| "loss": 0.6661, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.532, |
| "grad_norm": 0.768621027469635, |
| "learning_rate": 5.66421665232084e-06, |
| "loss": 0.6916, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.538, |
| "grad_norm": 0.8317710757255554, |
| "learning_rate": 5.629448944293128e-06, |
| "loss": 0.6582, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.544, |
| "grad_norm": 0.8172629475593567, |
| "learning_rate": 5.594650283286835e-06, |
| "loss": 0.6784, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.55, |
| "grad_norm": 0.7961453199386597, |
| "learning_rate": 5.559822380516539e-06, |
| "loss": 0.7056, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.556, |
| "grad_norm": 0.8242630362510681, |
| "learning_rate": 5.524966948634774e-06, |
| "loss": 0.6847, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.562, |
| "grad_norm": 0.7545662522315979, |
| "learning_rate": 5.490085701647805e-06, |
| "loss": 0.6895, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.568, |
| "grad_norm": 0.8344311714172363, |
| "learning_rate": 5.4551803548313505e-06, |
| "loss": 0.6649, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.5739999999999998, |
| "grad_norm": 0.7777344584465027, |
| "learning_rate": 5.420252624646238e-06, |
| "loss": 0.6506, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.58, |
| "grad_norm": 0.7646704912185669, |
| "learning_rate": 5.385304228653983e-06, |
| "loss": 0.6727, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.5859999999999999, |
| "grad_norm": 0.7944283485412598, |
| "learning_rate": 5.350336885432337e-06, |
| "loss": 0.6856, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.592, |
| "grad_norm": 0.7944362163543701, |
| "learning_rate": 5.315352314490781e-06, |
| "loss": 0.6925, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.5979999999999999, |
| "grad_norm": 0.7388291358947754, |
| "learning_rate": 5.2803522361859596e-06, |
| "loss": 0.6796, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.604, |
| "grad_norm": 0.7791934013366699, |
| "learning_rate": 5.245338371637091e-06, |
| "loss": 0.662, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.6099999999999999, |
| "grad_norm": 0.7636281847953796, |
| "learning_rate": 5.210312442641327e-06, |
| "loss": 0.6804, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.616, |
| "grad_norm": 0.8123326897621155, |
| "learning_rate": 5.175276171589082e-06, |
| "loss": 0.6736, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.6219999999999999, |
| "grad_norm": 0.7895820140838623, |
| "learning_rate": 5.140231281379345e-06, |
| "loss": 0.6859, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.6280000000000001, |
| "grad_norm": 0.8226007223129272, |
| "learning_rate": 5.1051794953349445e-06, |
| "loss": 0.685, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.634, |
| "grad_norm": 0.7907382845878601, |
| "learning_rate": 5.070122537117812e-06, |
| "loss": 0.6772, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.6400000000000001, |
| "grad_norm": 0.8296628594398499, |
| "learning_rate": 5.0350621306442185e-06, |
| "loss": 0.6418, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.646, |
| "grad_norm": 0.7698683142662048, |
| "learning_rate": 5e-06, |
| "loss": 0.6596, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.6520000000000001, |
| "grad_norm": 0.7878702282905579, |
| "learning_rate": 4.964937869355782e-06, |
| "loss": 0.7012, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.658, |
| "grad_norm": 0.7555845975875854, |
| "learning_rate": 4.92987746288219e-06, |
| "loss": 0.6765, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.6640000000000001, |
| "grad_norm": 0.8005223274230957, |
| "learning_rate": 4.894820504665056e-06, |
| "loss": 0.6803, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.67, |
| "grad_norm": 0.8159884810447693, |
| "learning_rate": 4.859768718620656e-06, |
| "loss": 0.6846, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.6760000000000002, |
| "grad_norm": 0.7637518644332886, |
| "learning_rate": 4.82472382841092e-06, |
| "loss": 0.6545, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.682, |
| "grad_norm": 0.7744002938270569, |
| "learning_rate": 4.789687557358676e-06, |
| "loss": 0.6855, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.688, |
| "grad_norm": 0.7353812456130981, |
| "learning_rate": 4.75466162836291e-06, |
| "loss": 0.6675, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.694, |
| "grad_norm": 0.7721124887466431, |
| "learning_rate": 4.719647763814041e-06, |
| "loss": 0.6768, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.7, |
| "grad_norm": 0.7478278875350952, |
| "learning_rate": 4.684647685509221e-06, |
| "loss": 0.6887, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.706, |
| "grad_norm": 0.7152284383773804, |
| "learning_rate": 4.649663114567663e-06, |
| "loss": 0.6822, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.712, |
| "grad_norm": 0.7599402666091919, |
| "learning_rate": 4.61469577134602e-06, |
| "loss": 0.6839, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.718, |
| "grad_norm": 0.746227502822876, |
| "learning_rate": 4.579747375353763e-06, |
| "loss": 0.6959, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.724, |
| "grad_norm": 0.7657206654548645, |
| "learning_rate": 4.54481964516865e-06, |
| "loss": 0.6896, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.73, |
| "grad_norm": 0.8069854378700256, |
| "learning_rate": 4.509914298352197e-06, |
| "loss": 0.7345, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.736, |
| "grad_norm": 0.7241007089614868, |
| "learning_rate": 4.475033051365228e-06, |
| "loss": 0.6619, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.742, |
| "grad_norm": 0.7458640933036804, |
| "learning_rate": 4.4401776194834615e-06, |
| "loss": 0.6781, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.748, |
| "grad_norm": 0.729795515537262, |
| "learning_rate": 4.405349716713165e-06, |
| "loss": 0.6945, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.754, |
| "grad_norm": 0.7799696922302246, |
| "learning_rate": 4.3705510557068746e-06, |
| "loss": 0.6747, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.76, |
| "grad_norm": 0.7436068058013916, |
| "learning_rate": 4.335783347679162e-06, |
| "loss": 0.7001, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.766, |
| "grad_norm": 0.7503911256790161, |
| "learning_rate": 4.3010483023225045e-06, |
| "loss": 0.6559, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.772, |
| "grad_norm": 0.7682201862335205, |
| "learning_rate": 4.266347627723192e-06, |
| "loss": 0.6751, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.778, |
| "grad_norm": 0.746093213558197, |
| "learning_rate": 4.231683030277349e-06, |
| "loss": 0.6741, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.784, |
| "grad_norm": 0.7442725896835327, |
| "learning_rate": 4.197056214607016e-06, |
| "loss": 0.6728, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.79, |
| "grad_norm": 0.7680650353431702, |
| "learning_rate": 4.162468883476319e-06, |
| "loss": 0.7068, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.796, |
| "grad_norm": 0.7224968671798706, |
| "learning_rate": 4.1279227377077465e-06, |
| "loss": 0.6816, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.802, |
| "grad_norm": 0.7245790958404541, |
| "learning_rate": 4.0934194760985095e-06, |
| "loss": 0.6683, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.808, |
| "grad_norm": 0.7421197295188904, |
| "learning_rate": 4.058960795337001e-06, |
| "loss": 0.6746, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.814, |
| "grad_norm": 0.7313238978385925, |
| "learning_rate": 4.02454838991936e-06, |
| "loss": 0.6548, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.8199999999999998, |
| "grad_norm": 0.753065288066864, |
| "learning_rate": 3.990183952066151e-06, |
| "loss": 0.6917, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.826, |
| "grad_norm": 0.7868438363075256, |
| "learning_rate": 3.955869171639151e-06, |
| "loss": 0.6947, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.8319999999999999, |
| "grad_norm": 0.7543407082557678, |
| "learning_rate": 3.921605736058238e-06, |
| "loss": 0.7005, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.838, |
| "grad_norm": 0.8221840262413025, |
| "learning_rate": 3.887395330218429e-06, |
| "loss": 0.695, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.8439999999999999, |
| "grad_norm": 0.7984438538551331, |
| "learning_rate": 3.853239636407012e-06, |
| "loss": 0.6685, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.85, |
| "grad_norm": 0.7780439853668213, |
| "learning_rate": 3.81914033422083e-06, |
| "loss": 0.6696, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.8559999999999999, |
| "grad_norm": 0.7559566497802734, |
| "learning_rate": 3.7850991004836813e-06, |
| "loss": 0.6746, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.862, |
| "grad_norm": 0.7471140623092651, |
| "learning_rate": 3.751117609163865e-06, |
| "loss": 0.6844, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.8679999999999999, |
| "grad_norm": 0.715210497379303, |
| "learning_rate": 3.7171975312918674e-06, |
| "loss": 0.6563, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.874, |
| "grad_norm": 0.7327157258987427, |
| "learning_rate": 3.683340534878176e-06, |
| "loss": 0.6649, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.88, |
| "grad_norm": 0.8228614926338196, |
| "learning_rate": 3.6495482848312745e-06, |
| "loss": 0.689, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.8860000000000001, |
| "grad_norm": 0.781579315662384, |
| "learning_rate": 3.6158224428757538e-06, |
| "loss": 0.6855, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.892, |
| "grad_norm": 0.7461678981781006, |
| "learning_rate": 3.5821646674706124e-06, |
| "loss": 0.6935, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.8980000000000001, |
| "grad_norm": 0.714884877204895, |
| "learning_rate": 3.5485766137276894e-06, |
| "loss": 0.6649, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.904, |
| "grad_norm": 0.7565520405769348, |
| "learning_rate": 3.5150599333302826e-06, |
| "loss": 0.6641, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.9100000000000001, |
| "grad_norm": 0.7854803800582886, |
| "learning_rate": 3.4816162744519266e-06, |
| "loss": 0.6612, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.916, |
| "grad_norm": 0.7642549276351929, |
| "learning_rate": 3.4482472816753404e-06, |
| "loss": 0.6706, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.9220000000000002, |
| "grad_norm": 0.75054532289505, |
| "learning_rate": 3.4149545959115604e-06, |
| "loss": 0.6749, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.928, |
| "grad_norm": 0.7272456288337708, |
| "learning_rate": 3.3817398543192426e-06, |
| "loss": 0.6938, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.9340000000000002, |
| "grad_norm": 0.7687782645225525, |
| "learning_rate": 3.3486046902241663e-06, |
| "loss": 0.6755, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.94, |
| "grad_norm": 0.7556332945823669, |
| "learning_rate": 3.3155507330389004e-06, |
| "loss": 0.6792, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.946, |
| "grad_norm": 0.746389627456665, |
| "learning_rate": 3.2825796081826943e-06, |
| "loss": 0.6462, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.952, |
| "grad_norm": 0.7972975969314575, |
| "learning_rate": 3.2496929370015383e-06, |
| "loss": 0.6986, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.958, |
| "grad_norm": 0.7260416150093079, |
| "learning_rate": 3.216892336688435e-06, |
| "loss": 0.6673, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.964, |
| "grad_norm": 0.7445573210716248, |
| "learning_rate": 3.184179420203877e-06, |
| "loss": 0.6936, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.97, |
| "grad_norm": 0.6925731301307678, |
| "learning_rate": 3.1515557961965254e-06, |
| "loss": 0.6606, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.976, |
| "grad_norm": 0.7348446846008301, |
| "learning_rate": 3.119023068924115e-06, |
| "loss": 0.712, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.982, |
| "grad_norm": 0.7268365621566772, |
| "learning_rate": 3.0865828381745515e-06, |
| "loss": 0.677, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.988, |
| "grad_norm": 0.7770018577575684, |
| "learning_rate": 3.0542366991872546e-06, |
| "loss": 0.634, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.994, |
| "grad_norm": 0.731056809425354, |
| "learning_rate": 3.021986242574707e-06, |
| "loss": 0.6733, |
| "step": 332 |
| }, |
| { |
| "epoch": 2.002, |
| "grad_norm": 0.7408381700515747, |
| "learning_rate": 2.989833054244232e-06, |
| "loss": 0.65, |
| "step": 333 |
| }, |
| { |
| "epoch": 2.008, |
| "grad_norm": 0.8942451477050781, |
| "learning_rate": 2.95777871532002e-06, |
| "loss": 0.5536, |
| "step": 334 |
| }, |
| { |
| "epoch": 2.014, |
| "grad_norm": 0.7516184449195862, |
| "learning_rate": 2.9258248020653617e-06, |
| "loss": 0.5792, |
| "step": 335 |
| }, |
| { |
| "epoch": 2.02, |
| "grad_norm": 0.7563628554344177, |
| "learning_rate": 2.893972885805148e-06, |
| "loss": 0.5295, |
| "step": 336 |
| }, |
| { |
| "epoch": 2.026, |
| "grad_norm": 0.7296295762062073, |
| "learning_rate": 2.862224532848591e-06, |
| "loss": 0.5701, |
| "step": 337 |
| }, |
| { |
| "epoch": 2.032, |
| "grad_norm": 0.7129521369934082, |
| "learning_rate": 2.83058130441221e-06, |
| "loss": 0.5654, |
| "step": 338 |
| }, |
| { |
| "epoch": 2.038, |
| "grad_norm": 0.7977523803710938, |
| "learning_rate": 2.799044756543054e-06, |
| "loss": 0.5577, |
| "step": 339 |
| }, |
| { |
| "epoch": 2.044, |
| "grad_norm": 0.8266711831092834, |
| "learning_rate": 2.7676164400421864e-06, |
| "loss": 0.5548, |
| "step": 340 |
| }, |
| { |
| "epoch": 2.05, |
| "grad_norm": 0.8824355602264404, |
| "learning_rate": 2.7362979003884154e-06, |
| "loss": 0.5602, |
| "step": 341 |
| }, |
| { |
| "epoch": 2.056, |
| "grad_norm": 0.7907860279083252, |
| "learning_rate": 2.705090677662311e-06, |
| "loss": 0.5572, |
| "step": 342 |
| }, |
| { |
| "epoch": 2.062, |
| "grad_norm": 0.8235220313072205, |
| "learning_rate": 2.6739963064704634e-06, |
| "loss": 0.5586, |
| "step": 343 |
| }, |
| { |
| "epoch": 2.068, |
| "grad_norm": 0.738960862159729, |
| "learning_rate": 2.6430163158700116e-06, |
| "loss": 0.5513, |
| "step": 344 |
| }, |
| { |
| "epoch": 2.074, |
| "grad_norm": 0.7408565282821655, |
| "learning_rate": 2.61215222929347e-06, |
| "loss": 0.5735, |
| "step": 345 |
| }, |
| { |
| "epoch": 2.08, |
| "grad_norm": 0.7393279671669006, |
| "learning_rate": 2.5814055644738013e-06, |
| "loss": 0.5994, |
| "step": 346 |
| }, |
| { |
| "epoch": 2.086, |
| "grad_norm": 0.7686851024627686, |
| "learning_rate": 2.5507778333697817e-06, |
| "loss": 0.5517, |
| "step": 347 |
| }, |
| { |
| "epoch": 2.092, |
| "grad_norm": 0.7830207347869873, |
| "learning_rate": 2.520270542091663e-06, |
| "loss": 0.5594, |
| "step": 348 |
| }, |
| { |
| "epoch": 2.098, |
| "grad_norm": 0.7608020901679993, |
| "learning_rate": 2.4898851908270903e-06, |
| "loss": 0.5625, |
| "step": 349 |
| }, |
| { |
| "epoch": 2.104, |
| "grad_norm": 0.739809513092041, |
| "learning_rate": 2.4596232737673544e-06, |
| "loss": 0.5675, |
| "step": 350 |
| }, |
| { |
| "epoch": 2.11, |
| "grad_norm": 0.7285906076431274, |
| "learning_rate": 2.429486279033892e-06, |
| "loss": 0.5804, |
| "step": 351 |
| }, |
| { |
| "epoch": 2.116, |
| "grad_norm": 0.7156224250793457, |
| "learning_rate": 2.3994756886051267e-06, |
| "loss": 0.5979, |
| "step": 352 |
| }, |
| { |
| "epoch": 2.122, |
| "grad_norm": 0.744182825088501, |
| "learning_rate": 2.3695929782435845e-06, |
| "loss": 0.5875, |
| "step": 353 |
| }, |
| { |
| "epoch": 2.128, |
| "grad_norm": 0.7503982782363892, |
| "learning_rate": 2.339839617423318e-06, |
| "loss": 0.5684, |
| "step": 354 |
| }, |
| { |
| "epoch": 2.134, |
| "grad_norm": 0.746688187122345, |
| "learning_rate": 2.3102170692576593e-06, |
| "loss": 0.5312, |
| "step": 355 |
| }, |
| { |
| "epoch": 2.14, |
| "grad_norm": 0.7300651669502258, |
| "learning_rate": 2.280726790427258e-06, |
| "loss": 0.5567, |
| "step": 356 |
| }, |
| { |
| "epoch": 2.146, |
| "grad_norm": 0.7481258511543274, |
| "learning_rate": 2.2513702311084595e-06, |
| "loss": 0.5706, |
| "step": 357 |
| }, |
| { |
| "epoch": 2.152, |
| "grad_norm": 0.7340971827507019, |
| "learning_rate": 2.2221488349019903e-06, |
| "loss": 0.5687, |
| "step": 358 |
| }, |
| { |
| "epoch": 2.158, |
| "grad_norm": 0.7326511740684509, |
| "learning_rate": 2.1930640387619616e-06, |
| "loss": 0.5518, |
| "step": 359 |
| }, |
| { |
| "epoch": 2.164, |
| "grad_norm": 0.7539912462234497, |
| "learning_rate": 2.1641172729252206e-06, |
| "loss": 0.5512, |
| "step": 360 |
| }, |
| { |
| "epoch": 2.17, |
| "grad_norm": 0.6984777450561523, |
| "learning_rate": 2.1353099608410095e-06, |
| "loss": 0.5774, |
| "step": 361 |
| }, |
| { |
| "epoch": 2.176, |
| "grad_norm": 0.6927710771560669, |
| "learning_rate": 2.1066435191009717e-06, |
| "loss": 0.5359, |
| "step": 362 |
| }, |
| { |
| "epoch": 2.182, |
| "grad_norm": 0.720655083656311, |
| "learning_rate": 2.078119357369487e-06, |
| "loss": 0.5491, |
| "step": 363 |
| }, |
| { |
| "epoch": 2.188, |
| "grad_norm": 0.7237579822540283, |
| "learning_rate": 2.04973887831436e-06, |
| "loss": 0.5621, |
| "step": 364 |
| }, |
| { |
| "epoch": 2.194, |
| "grad_norm": 0.7292655110359192, |
| "learning_rate": 2.0215034775378336e-06, |
| "loss": 0.5387, |
| "step": 365 |
| }, |
| { |
| "epoch": 2.2, |
| "grad_norm": 0.7011332511901855, |
| "learning_rate": 1.9934145435079705e-06, |
| "loss": 0.5512, |
| "step": 366 |
| }, |
| { |
| "epoch": 2.206, |
| "grad_norm": 0.7132775187492371, |
| "learning_rate": 1.965473457490372e-06, |
| "loss": 0.5293, |
| "step": 367 |
| }, |
| { |
| "epoch": 2.212, |
| "grad_norm": 0.6869281530380249, |
| "learning_rate": 1.9376815934802496e-06, |
| "loss": 0.5333, |
| "step": 368 |
| }, |
| { |
| "epoch": 2.218, |
| "grad_norm": 0.7164115309715271, |
| "learning_rate": 1.9100403181348687e-06, |
| "loss": 0.5491, |
| "step": 369 |
| }, |
| { |
| "epoch": 2.224, |
| "grad_norm": 0.684960663318634, |
| "learning_rate": 1.8825509907063328e-06, |
| "loss": 0.5677, |
| "step": 370 |
| }, |
| { |
| "epoch": 2.23, |
| "grad_norm": 0.7094606757164001, |
| "learning_rate": 1.8552149629747557e-06, |
| "loss": 0.5518, |
| "step": 371 |
| }, |
| { |
| "epoch": 2.2359999999999998, |
| "grad_norm": 0.7112196683883667, |
| "learning_rate": 1.8280335791817733e-06, |
| "loss": 0.5481, |
| "step": 372 |
| }, |
| { |
| "epoch": 2.242, |
| "grad_norm": 0.7124966979026794, |
| "learning_rate": 1.8010081759644538e-06, |
| "loss": 0.5525, |
| "step": 373 |
| }, |
| { |
| "epoch": 2.248, |
| "grad_norm": 0.7253919839859009, |
| "learning_rate": 1.7741400822895633e-06, |
| "loss": 0.5471, |
| "step": 374 |
| }, |
| { |
| "epoch": 2.254, |
| "grad_norm": 0.7053412795066833, |
| "learning_rate": 1.747430619388209e-06, |
| "loss": 0.5639, |
| "step": 375 |
| }, |
| { |
| "epoch": 2.26, |
| "grad_norm": 0.7297880053520203, |
| "learning_rate": 1.7208811006908798e-06, |
| "loss": 0.5564, |
| "step": 376 |
| }, |
| { |
| "epoch": 2.266, |
| "grad_norm": 0.6979199647903442, |
| "learning_rate": 1.6944928317628512e-06, |
| "loss": 0.5746, |
| "step": 377 |
| }, |
| { |
| "epoch": 2.2720000000000002, |
| "grad_norm": 0.6711254119873047, |
| "learning_rate": 1.6682671102399806e-06, |
| "loss": 0.5514, |
| "step": 378 |
| }, |
| { |
| "epoch": 2.278, |
| "grad_norm": 0.6962975263595581, |
| "learning_rate": 1.642205225764908e-06, |
| "loss": 0.5484, |
| "step": 379 |
| }, |
| { |
| "epoch": 2.284, |
| "grad_norm": 0.7515308260917664, |
| "learning_rate": 1.6163084599236278e-06, |
| "loss": 0.5719, |
| "step": 380 |
| }, |
| { |
| "epoch": 2.29, |
| "grad_norm": 0.7150830626487732, |
| "learning_rate": 1.5905780861824748e-06, |
| "loss": 0.5451, |
| "step": 381 |
| }, |
| { |
| "epoch": 2.296, |
| "grad_norm": 0.6913665533065796, |
| "learning_rate": 1.5650153698254916e-06, |
| "loss": 0.5916, |
| "step": 382 |
| }, |
| { |
| "epoch": 2.302, |
| "grad_norm": 0.6843773126602173, |
| "learning_rate": 1.5396215678922222e-06, |
| "loss": 0.5437, |
| "step": 383 |
| }, |
| { |
| "epoch": 2.308, |
| "grad_norm": 0.6709427833557129, |
| "learning_rate": 1.514397929115884e-06, |
| "loss": 0.5556, |
| "step": 384 |
| }, |
| { |
| "epoch": 2.314, |
| "grad_norm": 0.69564288854599, |
| "learning_rate": 1.4893456938619743e-06, |
| "loss": 0.5417, |
| "step": 385 |
| }, |
| { |
| "epoch": 2.32, |
| "grad_norm": 0.6761554479598999, |
| "learning_rate": 1.4644660940672628e-06, |
| "loss": 0.5794, |
| "step": 386 |
| }, |
| { |
| "epoch": 2.326, |
| "grad_norm": 0.6663413643836975, |
| "learning_rate": 1.439760353179223e-06, |
| "loss": 0.5836, |
| "step": 387 |
| }, |
| { |
| "epoch": 2.332, |
| "grad_norm": 0.6832196712493896, |
| "learning_rate": 1.4152296860958641e-06, |
| "loss": 0.5526, |
| "step": 388 |
| }, |
| { |
| "epoch": 2.338, |
| "grad_norm": 0.7147595882415771, |
| "learning_rate": 1.3908752991059854e-06, |
| "loss": 0.5329, |
| "step": 389 |
| }, |
| { |
| "epoch": 2.344, |
| "grad_norm": 0.7001447081565857, |
| "learning_rate": 1.3666983898298659e-06, |
| "loss": 0.56, |
| "step": 390 |
| }, |
| { |
| "epoch": 2.35, |
| "grad_norm": 0.6908352375030518, |
| "learning_rate": 1.3427001471603623e-06, |
| "loss": 0.5241, |
| "step": 391 |
| }, |
| { |
| "epoch": 2.356, |
| "grad_norm": 0.6571294665336609, |
| "learning_rate": 1.3188817512044544e-06, |
| "loss": 0.5491, |
| "step": 392 |
| }, |
| { |
| "epoch": 2.362, |
| "grad_norm": 0.6830478310585022, |
| "learning_rate": 1.2952443732252058e-06, |
| "loss": 0.5623, |
| "step": 393 |
| }, |
| { |
| "epoch": 2.368, |
| "grad_norm": 0.6655673384666443, |
| "learning_rate": 1.2717891755841722e-06, |
| "loss": 0.5415, |
| "step": 394 |
| }, |
| { |
| "epoch": 2.374, |
| "grad_norm": 0.7237457036972046, |
| "learning_rate": 1.2485173116842432e-06, |
| "loss": 0.5854, |
| "step": 395 |
| }, |
| { |
| "epoch": 2.38, |
| "grad_norm": 0.7015038132667542, |
| "learning_rate": 1.225429925912921e-06, |
| "loss": 0.5112, |
| "step": 396 |
| }, |
| { |
| "epoch": 2.386, |
| "grad_norm": 0.6830282211303711, |
| "learning_rate": 1.2025281535860433e-06, |
| "loss": 0.5481, |
| "step": 397 |
| }, |
| { |
| "epoch": 2.392, |
| "grad_norm": 0.6884271502494812, |
| "learning_rate": 1.1798131208919628e-06, |
| "loss": 0.5589, |
| "step": 398 |
| }, |
| { |
| "epoch": 2.398, |
| "grad_norm": 0.7025271058082581, |
| "learning_rate": 1.1572859448361602e-06, |
| "loss": 0.5567, |
| "step": 399 |
| }, |
| { |
| "epoch": 2.404, |
| "grad_norm": 0.6751793026924133, |
| "learning_rate": 1.134947733186315e-06, |
| "loss": 0.5657, |
| "step": 400 |
| }, |
| { |
| "epoch": 2.41, |
| "grad_norm": 0.6852043867111206, |
| "learning_rate": 1.1127995844178385e-06, |
| "loss": 0.5663, |
| "step": 401 |
| }, |
| { |
| "epoch": 2.416, |
| "grad_norm": 0.6937378644943237, |
| "learning_rate": 1.0908425876598512e-06, |
| "loss": 0.5723, |
| "step": 402 |
| }, |
| { |
| "epoch": 2.422, |
| "grad_norm": 0.7152097225189209, |
| "learning_rate": 1.069077822641622e-06, |
| "loss": 0.5731, |
| "step": 403 |
| }, |
| { |
| "epoch": 2.428, |
| "grad_norm": 0.6963691115379333, |
| "learning_rate": 1.047506359639483e-06, |
| "loss": 0.5455, |
| "step": 404 |
| }, |
| { |
| "epoch": 2.434, |
| "grad_norm": 0.7146711945533752, |
| "learning_rate": 1.0261292594241873e-06, |
| "loss": 0.5474, |
| "step": 405 |
| }, |
| { |
| "epoch": 2.44, |
| "grad_norm": 0.6817753314971924, |
| "learning_rate": 1.004947573208756e-06, |
| "loss": 0.5886, |
| "step": 406 |
| }, |
| { |
| "epoch": 2.446, |
| "grad_norm": 0.6989074349403381, |
| "learning_rate": 9.83962342596776e-07, |
| "loss": 0.5396, |
| "step": 407 |
| }, |
| { |
| "epoch": 2.452, |
| "grad_norm": 0.6894654631614685, |
| "learning_rate": 9.631745995311881e-07, |
| "loss": 0.545, |
| "step": 408 |
| }, |
| { |
| "epoch": 2.458, |
| "grad_norm": 0.680069088935852, |
| "learning_rate": 9.42585366243537e-07, |
| "loss": 0.5562, |
| "step": 409 |
| }, |
| { |
| "epoch": 2.464, |
| "grad_norm": 0.7130041718482971, |
| "learning_rate": 9.221956552036992e-07, |
| "loss": 0.548, |
| "step": 410 |
| }, |
| { |
| "epoch": 2.4699999999999998, |
| "grad_norm": 0.6836442351341248, |
| "learning_rate": 9.020064690701069e-07, |
| "loss": 0.5563, |
| "step": 411 |
| }, |
| { |
| "epoch": 2.476, |
| "grad_norm": 0.6889331936836243, |
| "learning_rate": 8.820188006404268e-07, |
| "loss": 0.5434, |
| "step": 412 |
| }, |
| { |
| "epoch": 2.482, |
| "grad_norm": 0.6862911581993103, |
| "learning_rate": 8.622336328027553e-07, |
| "loss": 0.5553, |
| "step": 413 |
| }, |
| { |
| "epoch": 2.488, |
| "grad_norm": 0.6937359571456909, |
| "learning_rate": 8.426519384872733e-07, |
| "loss": 0.5455, |
| "step": 414 |
| }, |
| { |
| "epoch": 2.4939999999999998, |
| "grad_norm": 0.6683201789855957, |
| "learning_rate": 8.232746806184116e-07, |
| "loss": 0.5846, |
| "step": 415 |
| }, |
| { |
| "epoch": 2.5, |
| "grad_norm": 0.7328600287437439, |
| "learning_rate": 8.041028120674894e-07, |
| "loss": 0.5425, |
| "step": 416 |
| }, |
| { |
| "epoch": 2.5060000000000002, |
| "grad_norm": 0.701684296131134, |
| "learning_rate": 7.851372756058662e-07, |
| "loss": 0.5687, |
| "step": 417 |
| }, |
| { |
| "epoch": 2.512, |
| "grad_norm": 0.7107769846916199, |
| "learning_rate": 7.663790038585794e-07, |
| "loss": 0.5673, |
| "step": 418 |
| }, |
| { |
| "epoch": 2.518, |
| "grad_norm": 0.6760324835777283, |
| "learning_rate": 7.478289192584765e-07, |
| "loss": 0.5816, |
| "step": 419 |
| }, |
| { |
| "epoch": 2.524, |
| "grad_norm": 0.6865687966346741, |
| "learning_rate": 7.294879340008632e-07, |
| "loss": 0.5545, |
| "step": 420 |
| }, |
| { |
| "epoch": 2.5300000000000002, |
| "grad_norm": 0.7050975561141968, |
| "learning_rate": 7.113569499986401e-07, |
| "loss": 0.5769, |
| "step": 421 |
| }, |
| { |
| "epoch": 2.536, |
| "grad_norm": 0.6839458346366882, |
| "learning_rate": 6.934368588379553e-07, |
| "loss": 0.544, |
| "step": 422 |
| }, |
| { |
| "epoch": 2.542, |
| "grad_norm": 0.6980892419815063, |
| "learning_rate": 6.757285417343601e-07, |
| "loss": 0.5533, |
| "step": 423 |
| }, |
| { |
| "epoch": 2.548, |
| "grad_norm": 0.6971255540847778, |
| "learning_rate": 6.582328694894729e-07, |
| "loss": 0.5754, |
| "step": 424 |
| }, |
| { |
| "epoch": 2.5540000000000003, |
| "grad_norm": 0.6847971081733704, |
| "learning_rate": 6.409507024481615e-07, |
| "loss": 0.5559, |
| "step": 425 |
| }, |
| { |
| "epoch": 2.56, |
| "grad_norm": 0.6890616416931152, |
| "learning_rate": 6.238828904562316e-07, |
| "loss": 0.5585, |
| "step": 426 |
| }, |
| { |
| "epoch": 2.566, |
| "grad_norm": 0.6950772404670715, |
| "learning_rate": 6.070302728186428e-07, |
| "loss": 0.565, |
| "step": 427 |
| }, |
| { |
| "epoch": 2.572, |
| "grad_norm": 0.7005242705345154, |
| "learning_rate": 5.903936782582253e-07, |
| "loss": 0.5786, |
| "step": 428 |
| }, |
| { |
| "epoch": 2.578, |
| "grad_norm": 0.7018908262252808, |
| "learning_rate": 5.739739248749398e-07, |
| "loss": 0.581, |
| "step": 429 |
| }, |
| { |
| "epoch": 2.584, |
| "grad_norm": 0.6644871234893799, |
| "learning_rate": 5.577718201056392e-07, |
| "loss": 0.5744, |
| "step": 430 |
| }, |
| { |
| "epoch": 2.59, |
| "grad_norm": 0.6836162209510803, |
| "learning_rate": 5.41788160684365e-07, |
| "loss": 0.5542, |
| "step": 431 |
| }, |
| { |
| "epoch": 2.596, |
| "grad_norm": 0.6840105652809143, |
| "learning_rate": 5.260237326031698e-07, |
| "loss": 0.5882, |
| "step": 432 |
| }, |
| { |
| "epoch": 2.602, |
| "grad_norm": 0.6827991604804993, |
| "learning_rate": 5.104793110734668e-07, |
| "loss": 0.5772, |
| "step": 433 |
| }, |
| { |
| "epoch": 2.608, |
| "grad_norm": 0.6655355095863342, |
| "learning_rate": 4.951556604879049e-07, |
| "loss": 0.5625, |
| "step": 434 |
| }, |
| { |
| "epoch": 2.614, |
| "grad_norm": 0.6982548236846924, |
| "learning_rate": 4.800535343827834e-07, |
| "loss": 0.5428, |
| "step": 435 |
| }, |
| { |
| "epoch": 2.62, |
| "grad_norm": 0.673215389251709, |
| "learning_rate": 4.651736754009972e-07, |
| "loss": 0.5442, |
| "step": 436 |
| }, |
| { |
| "epoch": 2.626, |
| "grad_norm": 0.6609004139900208, |
| "learning_rate": 4.5051681525551726e-07, |
| "loss": 0.5575, |
| "step": 437 |
| }, |
| { |
| "epoch": 2.632, |
| "grad_norm": 0.7088234424591064, |
| "learning_rate": 4.3608367469340553e-07, |
| "loss": 0.5542, |
| "step": 438 |
| }, |
| { |
| "epoch": 2.638, |
| "grad_norm": 0.6825269460678101, |
| "learning_rate": 4.218749634603769e-07, |
| "loss": 0.5498, |
| "step": 439 |
| }, |
| { |
| "epoch": 2.644, |
| "grad_norm": 0.690281093120575, |
| "learning_rate": 4.078913802658946e-07, |
| "loss": 0.5575, |
| "step": 440 |
| }, |
| { |
| "epoch": 2.65, |
| "grad_norm": 0.7048803567886353, |
| "learning_rate": 3.941336127488149e-07, |
| "loss": 0.5742, |
| "step": 441 |
| }, |
| { |
| "epoch": 2.656, |
| "grad_norm": 0.6868783831596375, |
| "learning_rate": 3.8060233744356634e-07, |
| "loss": 0.5414, |
| "step": 442 |
| }, |
| { |
| "epoch": 2.662, |
| "grad_norm": 0.6864995956420898, |
| "learning_rate": 3.672982197468894e-07, |
| "loss": 0.5427, |
| "step": 443 |
| }, |
| { |
| "epoch": 2.668, |
| "grad_norm": 0.6604911088943481, |
| "learning_rate": 3.542219138851094e-07, |
| "loss": 0.547, |
| "step": 444 |
| }, |
| { |
| "epoch": 2.674, |
| "grad_norm": 0.7011156678199768, |
| "learning_rate": 3.413740628819673e-07, |
| "loss": 0.5674, |
| "step": 445 |
| }, |
| { |
| "epoch": 2.68, |
| "grad_norm": 0.7000807523727417, |
| "learning_rate": 3.287552985270015e-07, |
| "loss": 0.5757, |
| "step": 446 |
| }, |
| { |
| "epoch": 2.686, |
| "grad_norm": 0.6695931553840637, |
| "learning_rate": 3.1636624134447347e-07, |
| "loss": 0.5541, |
| "step": 447 |
| }, |
| { |
| "epoch": 2.692, |
| "grad_norm": 0.6851454377174377, |
| "learning_rate": 3.0420750056286195e-07, |
| "loss": 0.5736, |
| "step": 448 |
| }, |
| { |
| "epoch": 2.698, |
| "grad_norm": 0.6993318200111389, |
| "learning_rate": 2.9227967408489653e-07, |
| "loss": 0.5699, |
| "step": 449 |
| }, |
| { |
| "epoch": 2.7039999999999997, |
| "grad_norm": 0.7027319669723511, |
| "learning_rate": 2.8058334845816214e-07, |
| "loss": 0.5419, |
| "step": 450 |
| }, |
| { |
| "epoch": 2.71, |
| "grad_norm": 0.6792875528335571, |
| "learning_rate": 2.691190988462522e-07, |
| "loss": 0.5412, |
| "step": 451 |
| }, |
| { |
| "epoch": 2.716, |
| "grad_norm": 0.666557252407074, |
| "learning_rate": 2.5788748900048676e-07, |
| "loss": 0.5618, |
| "step": 452 |
| }, |
| { |
| "epoch": 2.722, |
| "grad_norm": 0.7048344612121582, |
| "learning_rate": 2.468890712321864e-07, |
| "loss": 0.5868, |
| "step": 453 |
| }, |
| { |
| "epoch": 2.7279999999999998, |
| "grad_norm": 0.6760908961296082, |
| "learning_rate": 2.3612438638551837e-07, |
| "loss": 0.5655, |
| "step": 454 |
| }, |
| { |
| "epoch": 2.734, |
| "grad_norm": 0.6793569922447205, |
| "learning_rate": 2.2559396381089836e-07, |
| "loss": 0.553, |
| "step": 455 |
| }, |
| { |
| "epoch": 2.74, |
| "grad_norm": 0.6599664092063904, |
| "learning_rate": 2.152983213389559e-07, |
| "loss": 0.5816, |
| "step": 456 |
| }, |
| { |
| "epoch": 2.746, |
| "grad_norm": 0.6561682820320129, |
| "learning_rate": 2.0523796525507622e-07, |
| "loss": 0.5366, |
| "step": 457 |
| }, |
| { |
| "epoch": 2.752, |
| "grad_norm": 0.7191624641418457, |
| "learning_rate": 1.9541339027450256e-07, |
| "loss": 0.5784, |
| "step": 458 |
| }, |
| { |
| "epoch": 2.758, |
| "grad_norm": 0.9608937501907349, |
| "learning_rate": 1.8582507951800277e-07, |
| "loss": 0.552, |
| "step": 459 |
| }, |
| { |
| "epoch": 2.7640000000000002, |
| "grad_norm": 0.6913126707077026, |
| "learning_rate": 1.7647350448812105e-07, |
| "loss": 0.5472, |
| "step": 460 |
| }, |
| { |
| "epoch": 2.77, |
| "grad_norm": 0.6845247745513916, |
| "learning_rate": 1.6735912504598384e-07, |
| "loss": 0.5574, |
| "step": 461 |
| }, |
| { |
| "epoch": 2.776, |
| "grad_norm": 0.671147882938385, |
| "learning_rate": 1.5848238938869332e-07, |
| "loss": 0.5574, |
| "step": 462 |
| }, |
| { |
| "epoch": 2.782, |
| "grad_norm": 0.6692343950271606, |
| "learning_rate": 1.4984373402728014e-07, |
| "loss": 0.5498, |
| "step": 463 |
| }, |
| { |
| "epoch": 2.7880000000000003, |
| "grad_norm": 0.6610013246536255, |
| "learning_rate": 1.4144358376524504e-07, |
| "loss": 0.5869, |
| "step": 464 |
| }, |
| { |
| "epoch": 2.794, |
| "grad_norm": 0.6857611536979675, |
| "learning_rate": 1.3328235167766545e-07, |
| "loss": 0.5603, |
| "step": 465 |
| }, |
| { |
| "epoch": 2.8, |
| "grad_norm": 0.6564115881919861, |
| "learning_rate": 1.253604390908819e-07, |
| "loss": 0.5336, |
| "step": 466 |
| }, |
| { |
| "epoch": 2.806, |
| "grad_norm": 0.7015063166618347, |
| "learning_rate": 1.1767823556276648e-07, |
| "loss": 0.5575, |
| "step": 467 |
| }, |
| { |
| "epoch": 2.8120000000000003, |
| "grad_norm": 0.6728578805923462, |
| "learning_rate": 1.10236118863562e-07, |
| "loss": 0.5405, |
| "step": 468 |
| }, |
| { |
| "epoch": 2.818, |
| "grad_norm": 0.6682955026626587, |
| "learning_rate": 1.0303445495730868e-07, |
| "loss": 0.5556, |
| "step": 469 |
| }, |
| { |
| "epoch": 2.824, |
| "grad_norm": 0.6954447627067566, |
| "learning_rate": 9.607359798384785e-08, |
| "loss": 0.5569, |
| "step": 470 |
| }, |
| { |
| "epoch": 2.83, |
| "grad_norm": 0.6602838635444641, |
| "learning_rate": 8.935389024140485e-08, |
| "loss": 0.5386, |
| "step": 471 |
| }, |
| { |
| "epoch": 2.836, |
| "grad_norm": 0.6650714874267578, |
| "learning_rate": 8.287566216975795e-08, |
| "loss": 0.548, |
| "step": 472 |
| }, |
| { |
| "epoch": 2.842, |
| "grad_norm": 0.6559730768203735, |
| "learning_rate": 7.663923233398928e-08, |
| "loss": 0.5751, |
| "step": 473 |
| }, |
| { |
| "epoch": 2.848, |
| "grad_norm": 0.6800832152366638, |
| "learning_rate": 7.064490740882057e-08, |
| "loss": 0.6029, |
| "step": 474 |
| }, |
| { |
| "epoch": 2.854, |
| "grad_norm": 0.6990196704864502, |
| "learning_rate": 6.489298216352913e-08, |
| "loss": 0.5278, |
| "step": 475 |
| }, |
| { |
| "epoch": 2.86, |
| "grad_norm": 0.6823257207870483, |
| "learning_rate": 5.938373944745612e-08, |
| "loss": 0.5787, |
| "step": 476 |
| }, |
| { |
| "epoch": 2.866, |
| "grad_norm": 0.6999835968017578, |
| "learning_rate": 5.411745017609493e-08, |
| "loss": 0.5595, |
| "step": 477 |
| }, |
| { |
| "epoch": 2.872, |
| "grad_norm": 0.7087691426277161, |
| "learning_rate": 4.909437331777178e-08, |
| "loss": 0.5809, |
| "step": 478 |
| }, |
| { |
| "epoch": 2.878, |
| "grad_norm": 0.6622153520584106, |
| "learning_rate": 4.431475588090872e-08, |
| "loss": 0.5649, |
| "step": 479 |
| }, |
| { |
| "epoch": 2.884, |
| "grad_norm": 0.6820440888404846, |
| "learning_rate": 3.977883290187667e-08, |
| "loss": 0.5612, |
| "step": 480 |
| }, |
| { |
| "epoch": 2.89, |
| "grad_norm": 0.7198901772499084, |
| "learning_rate": 3.548682743344079e-08, |
| "loss": 0.5365, |
| "step": 481 |
| }, |
| { |
| "epoch": 2.896, |
| "grad_norm": 0.6733301281929016, |
| "learning_rate": 3.143895053378698e-08, |
| "loss": 0.5443, |
| "step": 482 |
| }, |
| { |
| "epoch": 2.902, |
| "grad_norm": 0.7018823623657227, |
| "learning_rate": 2.7635401256149163e-08, |
| "loss": 0.5623, |
| "step": 483 |
| }, |
| { |
| "epoch": 2.908, |
| "grad_norm": 0.6777315139770508, |
| "learning_rate": 2.4076366639015914e-08, |
| "loss": 0.5427, |
| "step": 484 |
| }, |
| { |
| "epoch": 2.914, |
| "grad_norm": 0.6694568395614624, |
| "learning_rate": 2.0762021696933975e-08, |
| "loss": 0.5376, |
| "step": 485 |
| }, |
| { |
| "epoch": 2.92, |
| "grad_norm": 0.6854175925254822, |
| "learning_rate": 1.769252941190458e-08, |
| "loss": 0.5715, |
| "step": 486 |
| }, |
| { |
| "epoch": 2.926, |
| "grad_norm": 0.6848807334899902, |
| "learning_rate": 1.4868040725365407e-08, |
| "loss": 0.5622, |
| "step": 487 |
| }, |
| { |
| "epoch": 2.932, |
| "grad_norm": 0.6662968993186951, |
| "learning_rate": 1.2288694530769862e-08, |
| "loss": 0.5247, |
| "step": 488 |
| }, |
| { |
| "epoch": 2.9379999999999997, |
| "grad_norm": 0.6799569725990295, |
| "learning_rate": 9.954617666758648e-09, |
| "loss": 0.5515, |
| "step": 489 |
| }, |
| { |
| "epoch": 2.944, |
| "grad_norm": 0.672495424747467, |
| "learning_rate": 7.865924910916977e-09, |
| "loss": 0.5967, |
| "step": 490 |
| }, |
| { |
| "epoch": 2.95, |
| "grad_norm": 0.7219680547714233, |
| "learning_rate": 6.022718974137976e-09, |
| "loss": 0.5707, |
| "step": 491 |
| }, |
| { |
| "epoch": 2.956, |
| "grad_norm": 0.7312204241752625, |
| "learning_rate": 4.4250904955656095e-09, |
| "loss": 0.5734, |
| "step": 492 |
| }, |
| { |
| "epoch": 2.9619999999999997, |
| "grad_norm": 0.6828036904335022, |
| "learning_rate": 3.0731180381399216e-09, |
| "loss": 0.5743, |
| "step": 493 |
| }, |
| { |
| "epoch": 2.968, |
| "grad_norm": 0.6708048582077026, |
| "learning_rate": 1.9668680847356735e-09, |
| "loss": 0.5527, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.974, |
| "grad_norm": 0.6551077961921692, |
| "learning_rate": 1.1063950348888519e-09, |
| "loss": 0.5391, |
| "step": 495 |
| }, |
| { |
| "epoch": 2.98, |
| "grad_norm": 0.68417888879776, |
| "learning_rate": 4.91741202124918e-10, |
| "loss": 0.5932, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.9859999999999998, |
| "grad_norm": 0.6624125242233276, |
| "learning_rate": 1.22936811877139e-10, |
| "loss": 0.5343, |
| "step": 497 |
| }, |
| { |
| "epoch": 2.992, |
| "grad_norm": 0.6429402828216553, |
| "learning_rate": 0.0, |
| "loss": 0.5192, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.992, |
| "step": 498, |
| "total_flos": 2.130527528110981e+17, |
| "train_loss": 0.0, |
| "train_runtime": 5.1921, |
| "train_samples_per_second": 9244.875, |
| "train_steps_per_second": 95.916 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 498, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.130527528110981e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|